From ff9876f5a2e404dde6b5983fe772aecb9d030aed Mon Sep 17 00:00:00 2001 From: terratrue-daniel <97548386+terratrue-daniel@users.noreply.github.com> Date: Tue, 28 Nov 2023 11:43:52 -0800 Subject: [PATCH 001/263] fix(ingest/mssql): Add MONEY and SMALLMONEY data types as Number (#9313) --- .../ingestion/source/sql/mssql/source.py | 3 ++ .../golden_mces_mssql_no_db_to_file.json | 34 ++++++++++++++++--- .../integration/sql_server/setup/setup.sql | 4 +-- 3 files changed, 34 insertions(+), 7 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index 710825c8ba55d..fa5310b1110e0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -48,6 +48,7 @@ ) from datahub.metadata.schema_classes import ( BooleanTypeClass, + NumberTypeClass, StringTypeClass, UnionTypeClass, ) @@ -55,6 +56,8 @@ logger: logging.Logger = logging.getLogger(__name__) register_custom_type(sqlalchemy.dialects.mssql.BIT, BooleanTypeClass) +register_custom_type(sqlalchemy.dialects.mssql.MONEY, NumberTypeClass) +register_custom_type(sqlalchemy.dialects.mssql.SMALLMONEY, NumberTypeClass) register_custom_type(sqlalchemy.dialects.mssql.SQL_VARIANT, UnionTypeClass) register_custom_type(sqlalchemy.dialects.mssql.UNIQUEIDENTIFIER, StringTypeClass) diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json index 2fe7a76fd01ae..66ef9b097c973 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_no_db_to_file.json @@ -112,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "3565ea3e-9a3a-4cb0-acd5-213d740479a0", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2023-11-27 23:08:29.350000", + "date_modified": "2023-11-27 23:08:29.833000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2023-11-27 23:08:29.077000", + "date_modified": "2023-11-27 23:08:29.077000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", @@ -3575,6 +3575,18 @@ "nativeDataType": "NVARCHAR()", "recursive": false, "isPartOfKey": false + }, + { + "fieldPath": "Price", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "MONEY()", + "recursive": false, + "isPartOfKey": false } ] } @@ -3816,6 +3828,18 @@ "nativeDataType": "NVARCHAR()", "recursive": false, "isPartOfKey": false + }, + { + "fieldPath": "Price", + "nullable": true, + "type": { + "type": { + "com.linkedin.pegasus2avro.schema.NumberType": {} + } + }, + "nativeDataType": "SMALLMONEY()", + "recursive": false, + "isPartOfKey": false } ] } diff --git a/metadata-ingestion/tests/integration/sql_server/setup/setup.sql b/metadata-ingestion/tests/integration/sql_server/setup/setup.sql index a17d52f9a39b1..77ecabc5a3fff 100644 --- a/metadata-ingestion/tests/integration/sql_server/setup/setup.sql +++ b/metadata-ingestion/tests/integration/sql_server/setup/setup.sql @@ -2,11 +2,11 @@ CREATE DATABASE NewData; GO USE NewData; GO -CREATE TABLE ProductsNew (ID int, ProductName nvarchar(max)); +CREATE TABLE ProductsNew (ID int, ProductName nvarchar(max), Price money); GO CREATE SCHEMA FooNew; GO -CREATE TABLE FooNew.ItemsNew (ID int, ItemName nvarchar(max)); +CREATE TABLE FooNew.ItemsNew (ID int, ItemName nvarchar(max), Price smallmoney); GO CREATE TABLE FooNew.PersonsNew ( ID int NOT NULL PRIMARY KEY, From 08fb730676dc5c807e43d8c8be4f8cab8ad830d0 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 29 Nov 2023 02:19:49 +0530 Subject: [PATCH 002/263] fix(ingest): drop deprecated database_alias from sql sources (#9299) Co-authored-by: Harshal Sheth --- docs/how/updating-datahub.md | 2 +- .../src/datahub/ingestion/source/metabase.py | 2 + .../ingestion/source/redshift/common.py | 12 - .../ingestion/source/redshift/config.py | 13 +- .../ingestion/source/redshift/lineage.py | 3 +- .../ingestion/source/redshift/redshift.py | 7 +- .../ingestion/source/redshift/usage.py | 4 - .../ingestion/source/sql/mssql/source.py | 7 +- .../src/datahub/ingestion/source/sql/mysql.py | 6 +- .../datahub/ingestion/source/sql/oracle.py | 2 - .../datahub/ingestion/source/sql/postgres.py | 3 - .../ingestion/source/sql/presto_on_hive.py | 2 - .../ingestion/source/sql/sql_config.py | 11 +- .../src/datahub/ingestion/source/sql/trino.py | 11 +- .../src/datahub/ingestion/source/superset.py | 2 + .../mysql/mysql_to_file_dbalias.yml | 1 - .../tests/integration/mysql/test_mysql.py | 24 - .../presto_on_hive_mces_golden_1.json | 317 ++++++++------ .../presto_on_hive_mces_golden_2.json | 292 +++++++------ .../presto_on_hive_mces_golden_3.json | 411 ++++++++++-------- .../presto_on_hive_mces_golden_4.json | 374 +++++++++------- .../presto_on_hive_mces_golden_5.json | 317 ++++++++------ .../presto-on-hive/presto_on_hive_to_file.yml | 1 - .../presto-on-hive/test_presto_on_hive.py | 4 +- .../integration/snowflake/test_snowflake.py | 1 + .../golden_mces_mssql_to_file.json | 56 +-- ...golden_mces_mssql_with_lower_case_urn.json | 56 +-- .../sql_server/source_files/mssql_to_file.yml | 1 - .../mssql_with_lower_case_urn.yml | 1 - .../tests/integration/trino/test_trino.py | 1 - .../tests/unit/test_postgres_source.py | 17 - 31 files changed, 1052 insertions(+), 909 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/ingestion/source/redshift/common.py diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 21c4cef2e848b..3263a9f7c15fb 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -8,7 +8,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - +- `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. ### Potential Downtime ### Deprecations diff --git a/metadata-ingestion/src/datahub/ingestion/source/metabase.py b/metadata-ingestion/src/datahub/ingestion/source/metabase.py index 24145d60210ff..9f09a4322bb5d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metabase.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metabase.py @@ -54,6 +54,8 @@ class MetabaseConfig(DatasetLineageProviderConfigBase): password: Optional[pydantic.SecretStr] = Field( default=None, description="Metabase password." ) + # TODO: Check and remove this if no longer needed. + # Config database_alias is removed from sql sources. database_alias_map: Optional[dict] = Field( default=None, description="Database name map to use when constructing dataset URN.", diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/common.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/common.py deleted file mode 100644 index 80657c69f88fa..0000000000000 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/common.py +++ /dev/null @@ -1,12 +0,0 @@ -from datahub.ingestion.source.redshift.config import RedshiftConfig - -redshift_datetime_format = "%Y-%m-%d %H:%M:%S" - - -def get_db_name(config: RedshiftConfig) -> str: - db_name = config.database - db_alias = config.database_alias - - db_name = db_alias or db_name - assert db_name is not None, "database name or alias must be specified" - return db_name diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 9cbf1823db939..95038ef2c6212 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -8,7 +8,7 @@ from datahub.configuration import ConfigModel from datahub.configuration.common import AllowDenyPattern from datahub.configuration.source_common import DatasetLineageProviderConfigBase -from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated +from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.data_lake_common.path_spec import PathSpec from datahub.ingestion.source.sql.postgres import BasePostgresConfig from datahub.ingestion.source.state.stateful_ingestion_base import ( @@ -87,10 +87,7 @@ class RedshiftConfig( hidden_from_schema=True, ) - _database_alias_deprecation = pydantic_field_deprecated( - "database_alias", - message="database_alias is deprecated. Use platform_instance instead.", - ) + _database_alias_removed = pydantic_removed_field("database_alias") default_schema: str = Field( default="public", @@ -151,10 +148,8 @@ def check_email_is_set_on_usage(cls, values): return values @root_validator(skip_on_failure=True) - def check_database_or_database_alias_set(cls, values): - assert values.get("database") or values.get( - "database_alias" - ), "either database or database_alias must be set" + def check_database_is_set(cls, values): + assert values.get("database"), "database must be set" return values @root_validator(skip_on_failure=True) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index c9ddfbe92ab2a..05011b2d7a769 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -16,7 +16,6 @@ from datahub.emitter.mce_builder import make_dataset_urn_with_platform_instance from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.source.aws.s3_util import strip_s3_prefix -from datahub.ingestion.source.redshift.common import get_db_name from datahub.ingestion.source.redshift.config import LineageMode, RedshiftConfig from datahub.ingestion.source.redshift.query import RedshiftQuery from datahub.ingestion.source.redshift.redshift_schema import ( @@ -266,7 +265,7 @@ def _populate_lineage_map( try: cll: Optional[List[sqlglot_l.ColumnLineageInfo]] = None raw_db_name = database - alias_db_name = get_db_name(self.config) + alias_db_name = self.config.database for lineage_row in RedshiftDataDictionary.get_lineage_rows( conn=connection, query=query diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index c7d01021773b1..0b1bde6ca8c0a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -38,7 +38,6 @@ DatasetContainerSubTypes, DatasetSubTypes, ) -from datahub.ingestion.source.redshift.common import get_db_name from datahub.ingestion.source.redshift.config import RedshiftConfig from datahub.ingestion.source.redshift.lineage import RedshiftLineageExtractor from datahub.ingestion.source.redshift.profile import RedshiftProfiler @@ -393,8 +392,8 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: def get_workunits_internal(self) -> Iterable[Union[MetadataWorkUnit, SqlWorkUnit]]: connection = RedshiftSource.get_redshift_connection(self.config) - database = get_db_name(self.config) - logger.info(f"Processing db {self.config.database} with name {database}") + database = self.config.database + logger.info(f"Processing db {database}") self.report.report_ingestion_stage_start(METADATA_EXTRACTION) self.db_tables[database] = defaultdict() self.db_views[database] = defaultdict() @@ -628,7 +627,7 @@ def gen_view_dataset_workunits( ) -> Iterable[MetadataWorkUnit]: yield from self.gen_dataset_workunits( table=view, - database=get_db_name(self.config), + database=self.config.database, schema=schema, sub_type=DatasetSubTypes.VIEW, custom_properties={}, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index bbb1876102578..c789e605b9c29 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -359,10 +359,6 @@ def _gen_access_events_from_history_query( self.report.num_usage_stat_skipped += 1 continue - # Replace database name with the alias name if one is provided in the config. - if self.config.database_alias: - access_event.database = self.config.database_alias - if not self._should_process_event(access_event, all_tables=all_tables): self.report.num_usage_stat_skipped += 1 continue diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index fa5310b1110e0..6eea5a4c31fa6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -138,7 +138,7 @@ def host(self): @property def db(self): - return self.database_alias or self.database + return self.database @platform_name("Microsoft SQL Server", id="mssql") @@ -660,10 +660,7 @@ def get_identifier( regular = f"{schema}.{entity}" qualified_table_name = regular if self.config.database: - if self.config.database_alias: - qualified_table_name = f"{self.config.database_alias}.{regular}" - else: - qualified_table_name = f"{self.config.database}.{regular}" + qualified_table_name = f"{self.config.database}.{regular}" if self.current_database: qualified_table_name = f"{self.current_database}.{regular}" return ( diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py index 891b64066721b..2126717f835a2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mysql.py @@ -54,11 +54,7 @@ class MySQLConnectionConfig(SQLAlchemyConnectionConfig): class MySQLConfig(MySQLConnectionConfig, TwoTierSQLAlchemyConfig): def get_identifier(self, *, schema: str, table: str) -> str: - regular = f"{schema}.{table}" - if self.database_alias: - return f"{self.database_alias}.{table}" - else: - return regular + return f"{schema}.{table}" @platform_name("MySQL") diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py index f2e1fe00ec8a3..7ee54200c6493 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py @@ -88,8 +88,6 @@ def get_sql_alchemy_url(self): def get_identifier(self, schema: str, table: str) -> str: regular = f"{schema}.{table}" if self.add_database_name_to_urn: - if self.database_alias: - return f"{self.database_alias}.{regular}" if self.database: return f"{self.database}.{regular}" return regular diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py index c8418075928ef..5d1e37fbb68a3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/postgres.py @@ -139,7 +139,6 @@ class PostgresSource(SQLAlchemySource): - Metadata for databases, schemas, views, and tables - Column types associated with each table - Also supports PostGIS extensions - - database_alias (optional) can be used to change the name of database to be ingested - Table, row, and column statistics via optional SQL profiling """ @@ -271,8 +270,6 @@ def get_identifier( ) -> str: regular = f"{schema}.{entity}" if self.config.database: - if self.config.database_alias: - return f"{self.config.database_alias}.{regular}" return f"{self.config.database}.{regular}" current_database = self.get_db_name(inspector) return f"{current_database}.{regular}" diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py b/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py index ceb9ecacb25d2..9657fdab9e2e3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/presto_on_hive.py @@ -329,8 +329,6 @@ def __init__(self, config: PrestoOnHiveConfig, ctx: PipelineContext) -> None: ) def get_db_name(self, inspector: Inspector) -> str: - if self.config.database_alias: - return f"{self.config.database_alias}" if self.config.database: return f"{self.config.database}" else: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py index 6a76ae847218d..54edab6f3b84b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py @@ -11,7 +11,7 @@ DatasetSourceConfigMixin, LowerCaseDatasetUrnConfigMixin, ) -from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated +from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.ge_profiling_config import GEProfilingConfig from datahub.ingestion.source.state.stale_entity_removal_handler import ( StatefulStaleMetadataRemovalConfig, @@ -129,10 +129,6 @@ class SQLAlchemyConnectionConfig(ConfigModel): host_port: str = Field(description="host URL") database: Optional[str] = Field(default=None, description="database (catalog)") - database_alias: Optional[str] = Field( - default=None, - description="[Deprecated] Alias to apply to database when ingesting.", - ) scheme: str = Field(description="scheme") sqlalchemy_uri: Optional[str] = Field( default=None, @@ -149,10 +145,7 @@ class SQLAlchemyConnectionConfig(ConfigModel): ), ) - _database_alias_deprecation = pydantic_field_deprecated( - "database_alias", - message="database_alias is deprecated. Use platform_instance instead.", - ) + _database_alias_removed = pydantic_removed_field("database_alias") def get_sql_alchemy_url( self, uri_opts: Optional[Dict[str, Any]] = None, database: Optional[str] = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py b/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py index 2b693d9d80d91..cb2e05765bfff 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/trino.py @@ -136,12 +136,9 @@ class TrinoConfig(BasicSQLAlchemyConfig): scheme: str = Field(default="trino", description="", hidden_from_docs=True) def get_identifier(self: BasicSQLAlchemyConfig, schema: str, table: str) -> str: - regular = f"{schema}.{table}" - identifier = regular - if self.database_alias: - identifier = f"{self.database_alias}.{regular}" - elif self.database: - identifier = f"{self.database}.{regular}" + identifier = f"{schema}.{table}" + if self.database: # TODO: this should be required field + identifier = f"{self.database}.{identifier}" return ( f"{self.platform_instance}.{identifier}" if self.platform_instance @@ -173,8 +170,6 @@ def __init__( super().__init__(config, ctx, platform) def get_db_name(self, inspector: Inspector) -> str: - if self.config.database_alias: - return f"{self.config.database_alias}" if self.config.database: return f"{self.config.database}" else: diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 1ae971e4a82d0..7f607666db313 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -96,6 +96,8 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): default=DEFAULT_ENV, description="Environment to use in namespace when constructing URNs", ) + # TODO: Check and remove this if no longer needed. + # Config database_alias is removed from sql sources. database_alias: Dict[str, str] = Field( default={}, description="Can be used to change mapping for database names in superset to what you have in datahub", diff --git a/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml b/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml index 1c324641fe158..89b87505ab527 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml +++ b/metadata-ingestion/tests/integration/mysql/mysql_to_file_dbalias.yml @@ -6,7 +6,6 @@ source: username: root password: example database: metagalaxy - database_alias: foogalaxy host_port: localhost:53307 schema_pattern: allow: diff --git a/metadata-ingestion/tests/integration/mysql/test_mysql.py b/metadata-ingestion/tests/integration/mysql/test_mysql.py index 8c8626a2d2297..23fd97ff2671e 100644 --- a/metadata-ingestion/tests/integration/mysql/test_mysql.py +++ b/metadata-ingestion/tests/integration/mysql/test_mysql.py @@ -75,27 +75,3 @@ def test_mysql_ingest_no_db( output_path=tmp_path / "mysql_mces.json", golden_path=test_resources_dir / golden_file, ) - - -@freeze_time(FROZEN_TIME) -@pytest.mark.integration -def test_mysql_ingest_with_db_alias( - mysql_runner, pytestconfig, test_resources_dir, tmp_path, mock_time -): - # Run the metadata ingestion pipeline. - config_file = (test_resources_dir / "mysql_to_file_dbalias.yml").resolve() - run_datahub_cmd(["ingest", "-c", f"{config_file}"], tmp_path=tmp_path) - - # Verify the output. - # Assert that all events generated have instance specific urns - import re - - urn_pattern = "^" + re.escape( - "urn:li:dataset:(urn:li:dataPlatform:mysql,foogalaxy." - ) - mce_helpers.assert_mcp_entity_urn( - filter="ALL", - entity_type="dataset", - regex_pattern=urn_pattern, - file=tmp_path / "mysql_mces_dbalias.json", - ) diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json index 45d13229b2d85..5607075ed568f 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_1.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,42 +154,45 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "container": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -191,12 +202,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,7 +295,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +313,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -312,19 +326,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -334,12 +349,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,7 +499,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -500,7 +517,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,19 +530,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,12 +553,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,7 +695,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -692,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,19 +726,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -726,12 +749,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -871,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -883,19 +909,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -905,12 +932,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,7 +1052,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1041,7 +1070,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,19 +1083,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1075,12 +1106,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1181,7 +1214,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1193,19 +1227,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,12 +1250,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,7 +1350,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1331,7 +1368,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1343,19 +1381,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1365,12 +1404,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1440,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1457,7 +1498,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1474,7 +1516,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1486,19 +1529,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,12 +1552,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1630,7 +1675,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1647,7 +1693,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1664,7 +1711,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1676,19 +1724,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json index 4ec71eb8c39c6..45f78eb61c15b 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_2.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,42 +154,45 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "container": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -191,12 +202,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,7 +295,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +313,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -312,19 +326,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -334,12 +349,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,7 +499,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -500,7 +517,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,19 +530,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,12 +553,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,7 +695,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -692,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,19 +726,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -726,12 +749,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -871,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -883,19 +909,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -905,12 +932,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,7 +1052,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1041,7 +1070,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,19 +1083,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1075,12 +1106,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1181,7 +1214,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1193,19 +1227,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,12 +1250,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,7 +1350,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1331,7 +1368,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1343,19 +1381,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1365,12 +1404,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1453,7 +1493,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1470,7 +1511,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1487,7 +1529,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1499,19 +1542,20 @@ "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json index 824524782a8e3..ad1e46eb8fbb0 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_3.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,63 +154,67 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "container": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -211,7 +223,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.map_test", + "schemaName": "metastore.db1.map_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,12 +295,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -300,52 +313,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -354,7 +370,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.union_test", + "schemaName": "metastore.db1.union_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,12 +499,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -500,52 +517,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -554,7 +574,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.nested_struct_test", + "schemaName": "metastore.db1.nested_struct_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,12 +695,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -692,52 +713,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -746,7 +770,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test", + "schemaName": "metastore.db1.array_struct_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,12 +878,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -871,52 +896,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -925,7 +953,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.struct_test", + "schemaName": "metastore.db1.struct_test", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,12 +1052,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1041,52 +1070,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1095,7 +1127,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1._test_table_underscore", + "schemaName": "metastore.db1._test_table_underscore", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,12 +1196,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1181,52 +1214,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1235,7 +1271,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.pokes", + "schemaName": "metastore.db1.pokes", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,12 +1350,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1331,52 +1368,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1385,7 +1425,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test_presto_view", + "schemaName": "metastore.db1.array_struct_test_presto_view", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1440,12 +1480,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1457,12 +1498,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -1474,52 +1516,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1528,7 +1573,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test_view", + "schemaName": "metastore.db1.array_struct_test_view", "platform": "urn:li:dataPlatform:hive", "version": 0, "created": { @@ -1630,12 +1675,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1647,12 +1693,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -1664,31 +1711,33 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,hive.db1.array_struct_test_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,metastore.db1.array_struct_test_view,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json index 3f2980457daa4..007f45238e23f 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_4.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", + "entityUrn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "presto-on-hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,63 +154,67 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "container": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", + "entityUrn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -211,7 +223,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.map_test", + "schemaName": "metastore.db1.map_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,12 +295,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -300,52 +313,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.map_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.map_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -354,7 +370,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.union_test", + "schemaName": "metastore.db1.union_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,12 +499,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -500,52 +517,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.union_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.union_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -554,7 +574,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.nested_struct_test", + "schemaName": "metastore.db1.nested_struct_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", - "transient_lastDdlTime": "1688395014", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,12 +695,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -692,52 +713,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.nested_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.nested_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -746,7 +770,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test", + "schemaName": "metastore.db1.array_struct_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395011", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,12 +878,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -871,52 +896,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -925,7 +953,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.struct_test", + "schemaName": "metastore.db1.struct_test", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1005,15 +1033,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,12 +1052,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1041,52 +1070,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.struct_test,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.struct_test,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1095,7 +1127,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1._test_table_underscore", + "schemaName": "metastore.db1._test_table_underscore", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1145,15 +1177,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "transient_lastDdlTime": "1688395008", "rawDataSize": "0", "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-07-03" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,12 +1196,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1181,52 +1214,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1._test_table_underscore,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1._test_table_underscore,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1235,7 +1271,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.pokes", + "schemaName": "metastore.db1.pokes", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1688395005", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-07-03", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,12 +1350,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1331,52 +1368,55 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.pokes,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.pokes,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "container": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1385,7 +1425,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "hive.db1.array_struct_test_presto_view", + "schemaName": "metastore.db1.array_struct_test_presto_view", "platform": "urn:li:dataPlatform:presto-on-hive", "version": 0, "created": { @@ -1453,12 +1493,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1470,12 +1511,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "viewProperties", "aspect": { @@ -1487,31 +1529,33 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,hive.db1.array_struct_test_presto_view,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:presto-on-hive,metastore.db1.array_struct_test_presto_view,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab", - "urn": "urn:li:container:e998a77f6edaa92d1326dec9d37c96ab" + "id": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d", + "urn": "urn:li:container:f4ec3d97ca6750de28020a0d393c289d" }, { - "id": "urn:li:container:bb66ab4651750f727700446f9b3aa2df", - "urn": "urn:li:container:bb66ab4651750f727700446f9b3aa2df" + "id": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f", + "urn": "urn:li:container:5bd3e4d159b00200dfe53d79a486ce7f" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json index a0dd4ab82bf24..111fc0038bdb8 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_mces_golden_5.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,19 +9,20 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive" + "database": "metastore" }, - "name": "hive" + "name": "metastore" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -31,12 +32,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -46,12 +48,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -63,12 +66,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", + "entityUrn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -78,12 +82,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -91,7 +96,7 @@ "customProperties": { "platform": "hive", "env": "PROD", - "database": "hive", + "database": "metastore", "schema": "db1" }, "name": "db1" @@ -99,12 +104,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -114,12 +120,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -129,12 +136,13 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -146,42 +154,45 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "container": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { "entityType": "container", - "entityUrn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", + "entityUrn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -191,12 +202,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,15 +276,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956983", + "totalSize": "0", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", - "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/map_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "map_test", "tags": [] @@ -283,7 +295,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -300,7 +313,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -312,19 +326,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -334,12 +349,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -464,15 +480,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956983", + "numRows": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", - "numRows": "0", "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/union_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "union_test", "tags": [] @@ -483,7 +499,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -500,7 +517,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -512,19 +530,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -534,12 +553,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -656,15 +676,15 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956983", + "totalSize": "0", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", + "transient_lastDdlTime": "1700805676", "rawDataSize": "0", "numRows": "0", - "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/nested_struct_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "nested_struct_test", "tags": [] @@ -675,7 +695,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -692,7 +713,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -704,19 +726,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -726,12 +749,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -832,17 +856,17 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "comment": "This table has array of structs", - "transient_lastDdlTime": "1690956980", - "numFiles": "1", - "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", - "rawDataSize": "32", + "another.comment": "This table has no partitions", "numRows": "1", + "rawDataSize": "32", "totalSize": "33", - "another.comment": "This table has no partitions", + "numFiles": "1", + "transient_lastDdlTime": "1700805674", + "comment": "This table has array of structs", + "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/array_struct_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "array_struct_test", "description": "This table has array of structs", @@ -854,7 +878,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -871,7 +896,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -883,19 +909,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -905,12 +932,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1005,7 +1033,7 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956977", + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "rawDataSize": "0", @@ -1013,7 +1041,7 @@ "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/struct_test", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "struct_test", "tags": [] @@ -1024,7 +1052,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1041,7 +1070,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1053,19 +1083,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1075,12 +1106,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1145,7 +1177,7 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956977", + "transient_lastDdlTime": "1700805671", "numFiles": "0", "COLUMN_STATS_ACCURATE": "{\"BASIC_STATS\":\"true\"}", "rawDataSize": "0", @@ -1153,7 +1185,7 @@ "totalSize": "0", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/_test_table_underscore", - "create_date": "2023-08-02" + "create_date": "2023-11-24" }, "name": "_test_table_underscore", "tags": [] @@ -1164,7 +1196,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1181,7 +1214,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1193,19 +1227,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1215,12 +1250,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1299,10 +1335,10 @@ { "com.linkedin.pegasus2avro.dataset.DatasetProperties": { "customProperties": { - "transient_lastDdlTime": "1690956974", + "transient_lastDdlTime": "1700805669", "table_type": "MANAGED_TABLE", "table_location": "hdfs://namenode:8020/user/hive/warehouse/db1.db/pokes", - "create_date": "2023-08-02", + "create_date": "2023-11-24", "partitioned_columns": "baz" }, "name": "pokes", @@ -1314,7 +1350,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1331,7 +1368,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1343,19 +1381,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1365,12 +1404,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1440,7 +1480,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1457,7 +1498,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1474,7 +1516,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1486,19 +1529,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1508,12 +1552,13 @@ "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "container": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1630,7 +1675,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1647,7 +1693,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1664,7 +1711,8 @@ }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } }, { @@ -1676,19 +1724,20 @@ "json": { "path": [ { - "id": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918", - "urn": "urn:li:container:939ecec0f01fb6bb1ca15fe6f0ead918" + "id": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589", + "urn": "urn:li:container:1cfce89b5a05e1da5092d88ad9eb4589" }, { - "id": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f", - "urn": "urn:li:container:f5e571e4a9acce86333e6b427ba1651f" + "id": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae", + "urn": "urn:li:container:9ba2e350c97c893a91bcaee4838cdcae" } ] } }, "systemMetadata": { "lastObserved": 1632398400000, - "runId": "presto-on-hive-test" + "runId": "presto-on-hive-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml index d4df1364513c8..233fb7fa36057 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml +++ b/metadata-ingestion/tests/integration/presto-on-hive/presto_on_hive_to_file.yml @@ -5,7 +5,6 @@ source: config: host_port: localhost:5432 database: metastore - database_alias: hive username: postgres scheme: "postgresql+psycopg2" diff --git a/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py b/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py index 31d801ccf7dee..23110ef12ae54 100644 --- a/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py +++ b/metadata-ingestion/tests/integration/presto-on-hive/test_presto_on_hive.py @@ -88,9 +88,8 @@ def test_presto_on_hive_ingest( "type": data_platform, "config": { "host_port": "localhost:5432", - "database": "db1", "metastore_db_name": "metastore", - "database_alias": "hive", + "database_pattern": {"allow": ["db1"]}, "username": "postgres", "scheme": "postgresql+psycopg2", "include_views": True, @@ -152,7 +151,6 @@ def test_presto_on_hive_instance_ingest( "config": { "host_port": "localhost:5432", "database": "metastore", - "database_alias": "hive", "username": "postgres", "scheme": "postgresql+psycopg2", "include_views": True, diff --git a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py index 4c00e48ede9fb..1b58696e4014c 100644 --- a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py +++ b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py @@ -142,6 +142,7 @@ def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph): type="datahub", config=datahub_classifier_config ) ], + max_workers=1, ), profiling=GEProfilingConfig( enabled=True, diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json index 804a8d74d0d51..9ce3664eff6a1 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_to_file.json @@ -112,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "3b767c17-c921-4331-93d9-eb0e006045a4", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2023-11-23 11:04:47.927000", + "date_modified": "2023-11-23 11:04:48.090000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -1245,7 +1245,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1262,7 +1262,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1278,7 +1278,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.dbo.Products", + "schemaName": "DemoData.dbo.Products", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1334,7 +1334,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1352,7 +1352,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.dbo.Products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.dbo.Products,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1486,7 +1486,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1503,7 +1503,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1520,7 +1520,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.Foo.Items", + "schemaName": "DemoData.Foo.Items", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1576,7 +1576,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1594,7 +1594,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Items,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1619,7 +1619,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1636,7 +1636,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1652,7 +1652,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.Foo.Persons", + "schemaName": "DemoData.Foo.Persons", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1733,7 +1733,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1751,7 +1751,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1776,7 +1776,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1793,7 +1793,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1809,7 +1809,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "DemoDataAlias.Foo.SalesReason", + "schemaName": "DemoData.Foo.SalesReason", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1868,12 +1868,12 @@ { "name": "FK_TempSales_SalesReason", "foreignFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD),ID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD),ID)" ], "sourceFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD),TempID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD),TempID)" ], - "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.Persons,PROD)" + "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.Persons,PROD)" } ] } @@ -1889,7 +1889,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1907,7 +1907,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoDataAlias.Foo.SalesReason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,DemoData.Foo.SalesReason,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2023-11-23 11:04:47.857000", + "date_modified": "2023-11-23 11:04:47.857000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", diff --git a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json index 9d1b288057a16..037a341b7d66e 100644 --- a/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json +++ b/metadata-ingestion/tests/integration/sql_server/golden_files/golden_mces_mssql_with_lower_case_urn.json @@ -112,11 +112,11 @@ "aspect": { "json": { "customProperties": { - "job_id": "1f2f14ba-db84-4fa1-910e-7df71bede642", + "job_id": "3b767c17-c921-4331-93d9-eb0e006045a4", "job_name": "Weekly Demo Data Backup", "description": "No description available.", - "date_created": "2023-10-27 10:11:55.540000", - "date_modified": "2023-10-27 10:11:55.667000", + "date_created": "2023-11-23 11:04:47.927000", + "date_modified": "2023-11-23 11:04:48.090000", "step_id": "1", "step_name": "Set database to read only", "subsystem": "TSQL", @@ -1245,7 +1245,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1262,7 +1262,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1278,7 +1278,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.dbo.products", + "schemaName": "demodata.dbo.products", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1334,7 +1334,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1352,7 +1352,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.dbo.products,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.dbo.products,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1486,7 +1486,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1503,7 +1503,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1520,7 +1520,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.foo.items", + "schemaName": "demodata.foo.items", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1576,7 +1576,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1594,7 +1594,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.items,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.items,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1619,7 +1619,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1636,7 +1636,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1652,7 +1652,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.foo.persons", + "schemaName": "demodata.foo.persons", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1733,7 +1733,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1751,7 +1751,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1776,7 +1776,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -1793,7 +1793,7 @@ { "proposedSnapshot": { "com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot": { - "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "urn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "aspects": [ { "com.linkedin.pegasus2avro.common.Status": { @@ -1809,7 +1809,7 @@ }, { "com.linkedin.pegasus2avro.schema.SchemaMetadata": { - "schemaName": "demodataalias.foo.salesreason", + "schemaName": "demodata.foo.salesreason", "platform": "urn:li:dataPlatform:mssql", "version": 0, "created": { @@ -1868,12 +1868,12 @@ { "name": "FK_TempSales_SalesReason", "foreignFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD),ID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD),ID)" ], "sourceFields": [ - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD),TempID)" + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD),TempID)" ], - "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.persons,PROD)" + "foreignDataset": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.persons,PROD)" } ] } @@ -1889,7 +1889,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1907,7 +1907,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodataalias.foo.salesreason,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:mssql,demodata.foo.salesreason,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1961,8 +1961,8 @@ "code": "CREATE PROCEDURE [Foo].[Proc.With.SpecialChar] @ID INT\nAS\n SELECT @ID AS ThatDB;\n", "input parameters": "['@ID']", "parameter @ID": "{'type': 'int'}", - "date_created": "2023-10-27 10:11:55.460000", - "date_modified": "2023-10-27 10:11:55.460000" + "date_created": "2023-11-23 11:04:47.857000", + "date_modified": "2023-11-23 11:04:47.857000" }, "externalUrl": "", "name": "demodata.Foo.Proc.With.SpecialChar", diff --git a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml index d347422353d47..c53e3cf6b8045 100644 --- a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml +++ b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_to_file.yml @@ -7,7 +7,6 @@ source: password: test!Password database: DemoData host_port: localhost:51433 - database_alias: DemoDataAlias # use_odbc: True # uri_args: # driver: "ODBC Driver 17 for SQL Server" diff --git a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml index 8d17c49163ca1..4e96d137670ba 100644 --- a/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml +++ b/metadata-ingestion/tests/integration/sql_server/source_files/mssql_with_lower_case_urn.yml @@ -7,7 +7,6 @@ source: password: test!Password database: DemoData host_port: localhost:51433 - database_alias: DemoDataAlias convert_urns_to_lowercase: true # use_odbc: True # uri_args: diff --git a/metadata-ingestion/tests/integration/trino/test_trino.py b/metadata-ingestion/tests/integration/trino/test_trino.py index 177c273c0d242..8ab3ed8056e90 100644 --- a/metadata-ingestion/tests/integration/trino/test_trino.py +++ b/metadata-ingestion/tests/integration/trino/test_trino.py @@ -70,7 +70,6 @@ def test_trino_ingest( "config": TrinoConfig( host_port="localhost:5300", database="postgresqldb", - database_alias="library_catalog", username="foo", schema_pattern=AllowDenyPattern(allow=["^librarydb"]), profile_pattern=AllowDenyPattern( diff --git a/metadata-ingestion/tests/unit/test_postgres_source.py b/metadata-ingestion/tests/unit/test_postgres_source.py index fac491cbaea04..91a62b603bb58 100644 --- a/metadata-ingestion/tests/unit/test_postgres_source.py +++ b/metadata-ingestion/tests/unit/test_postgres_source.py @@ -65,23 +65,6 @@ def tests_get_inspectors_with_sqlalchemy_uri_provided(create_engine_mock): assert create_engine_mock.call_args_list[0][0][0] == "custom_url" -def test_database_alias_takes_precendence(): - config = PostgresConfig.parse_obj( - { - **_base_config(), - "database_alias": "ops_database", - "database": "postgres", - } - ) - mock_inspector = mock.MagicMock() - assert ( - PostgresSource(config, PipelineContext(run_id="test")).get_identifier( - schema="superset", entity="logs", inspector=mock_inspector - ) - == "ops_database.superset.logs" - ) - - def test_database_in_identifier(): config = PostgresConfig.parse_obj({**_base_config(), "database": "postgres"}) mock_inspector = mock.MagicMock() From 966cb175f7d826ee9331cea652164249c9cf6bfb Mon Sep 17 00:00:00 2001 From: Hendrik Richert Date: Tue, 28 Nov 2023 21:52:11 +0100 Subject: [PATCH 003/263] feat(dev): Make repositories configurable for enterprise developers (#9230) Co-authored-by: Hendrik Richert Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- datahub-frontend/build.gradle | 18 +++++++++- datahub-upgrade/build.gradle | 16 +++++++++ docker/datahub-frontend/Dockerfile | 16 ++++++--- docker/datahub-gms/Dockerfile | 35 +++++++++++++++---- docker/datahub-ingestion-base/Dockerfile | 24 +++++++++++-- docker/datahub-ingestion-base/build.gradle | 21 +++++++++-- docker/datahub-ingestion/Dockerfile | 9 +++++ docker/datahub-ingestion/Dockerfile-slim-only | 5 +++ docker/datahub-ingestion/build.gradle | 16 +++++++-- docker/datahub-mae-consumer/Dockerfile | 27 +++++++++++--- docker/datahub-mce-consumer/Dockerfile | 27 +++++++++++--- docker/datahub-upgrade/Dockerfile | 31 ++++++++++++---- docker/elasticsearch-setup/Dockerfile | 14 ++++++++ docker/elasticsearch-setup/build.gradle | 12 ++++++- docker/kafka-setup/Dockerfile | 25 +++++++++---- docker/kafka-setup/build.gradle | 19 ++++++++++ docker/mysql-setup/Dockerfile | 13 +++++++ docker/mysql-setup/build.gradle | 10 ++++++ docker/postgres-setup/Dockerfile | 13 +++++++ docker/postgres-setup/build.gradle | 10 ++++++ .../custom-test-model/build.gradle | 6 +++- .../datahub-protobuf-example/build.gradle | 6 +++- metadata-jobs/mae-consumer-job/build.gradle | 18 +++++++++- metadata-jobs/mce-consumer-job/build.gradle | 18 +++++++++- metadata-models-custom/build.gradle | 6 +++- metadata-service/war/build.gradle | 16 +++++++++ repositories.gradle | 30 ++++++++++++---- 27 files changed, 409 insertions(+), 52 deletions(-) diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index eb81b31745536..9a5fb3210a311 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -89,6 +89,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) { @@ -104,4 +120,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 3356445cda7e1..71baa8af99468 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -108,6 +108,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":datahub-upgrade:docker").dependsOn([bootJar]) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index aaace5ae38ca3..9c26d73f4f40b 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -3,14 +3,22 @@ ARG APP_ENV=prod FROM alpine:3 AS base +# Configurable repositories +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + RUN addgroup -S datahub && adduser -S datahub -G datahub +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ ENV LD_LIBRARY_PATH="/lib:/lib64" @@ -22,8 +30,8 @@ COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend ENV JMX_VERSION=0.18.0 -RUN wget https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ - && wget https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar +RUN wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ + && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index c5696bbd2d1d2..1e13fa492c7f0 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -1,11 +1,23 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + +FROM golang:1-alpine3.18 AS binary FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -16,16 +28,25 @@ FROM alpine:3 AS base # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 + +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 25afe9b8b3dce..e0f9fdc997071 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -1,11 +1,23 @@ ARG APP_ENV=full ARG BASE_IMAGE=base +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG PIP_MIRROR_URL=null + FROM golang:1-alpine3.18 AS dockerize-binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,11 +26,19 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM python:3.10 as base +ARG DEBIAN_REPO_URL +ARG PIP_MIRROR_URL +ARG GITHUB_REPO_URL + ENV LIBRDKAFKA_VERSION=1.6.2 ENV CONFLUENT_KAFKA_VERSION=1.6.1 ENV DEBIAN_FRONTEND noninteractive +# Optionally set corporate mirror for apk and pip +RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi + RUN apt-get update && apt-get install -y -qq \ make \ python3-ldap \ @@ -33,7 +53,7 @@ RUN apt-get update && apt-get install -y -qq \ unzip \ ldap-utils \ && python -m pip install --no-cache --upgrade pip wheel setuptools \ - && wget -q https://github.com/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ + && wget -q ${GITHUB_REPO_URL}/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ tar -xz -C /root \ && cd /root/librdkafka-${LIBRDKAFKA_VERSION} \ && ./configure --prefix /usr && make && make install && cd .. && rm -rf /root/librdkafka-${LIBRDKAFKA_VERSION} \ @@ -84,4 +104,4 @@ FROM ${BASE_IMAGE} as slim-install FROM ${APP_ENV}-install USER datahub -ENV PATH="/datahub-ingestion/.local/bin:$PATH" \ No newline at end of file +ENV PATH="/datahub-ingestion/.local/bin:$PATH" diff --git a/docker/datahub-ingestion-base/build.gradle b/docker/datahub-ingestion-base/build.gradle index c4d8a962dcd32..e0168290c48f8 100644 --- a/docker/datahub-ingestion-base/build.gradle +++ b/docker/datahub-ingestion-base/build.gradle @@ -25,7 +25,24 @@ docker { }.exclude { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - buildArgs([APP_ENV: docker_target]) + + def dockerBuildArgs = [APP_ENV: docker_target] + + // Add build args if they are defined (needed for some CI or enterprise environments) + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('debianAptRepositoryUrl')) { + dockerBuildArgs.DEBIAN_REPO_URL = project.getProperty('debianAptRepositoryUrl') + } + if (project.hasProperty('pipMirrorUrl')) { + dockerBuildArgs.PIP_MIRROR_URL = project.getProperty('pipMirrorUrl') + } + + buildArgs(dockerBuildArgs) } tasks.getByName('docker').dependsOn('build') @@ -42,4 +59,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 1aee79a428a98..9516c31a19e21 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -2,6 +2,8 @@ ARG APP_ENV=full ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head +ARG PIP_MIRROR_URL=null +ARG DEBIAN_REPO_URL=http://deb.debian.org/debian FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 @@ -20,16 +22,23 @@ USER datahub ENV PATH="/datahub-ingestion/.local/bin:$PATH" FROM base as slim-install +ARG PIP_MIRROR_URL + +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" FROM base as full-install-build +ARG PIP_MIRROR_URL +ARG DEBIAN_REPO_URL USER 0 +RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi RUN apt-get update && apt-get install -y -qq maven USER datahub COPY ./docker/datahub-ingestion/pyspark_jars.sh . +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN pip install --no-cache --user ".[base]" && \ pip install --no-cache --user "./airflow-plugin[acryl-datahub-airflow-plugin]" && \ pip install --no-cache --user ".[all]" diff --git a/docker/datahub-ingestion/Dockerfile-slim-only b/docker/datahub-ingestion/Dockerfile-slim-only index cb8c27ab463c4..4112f470c25be 100644 --- a/docker/datahub-ingestion/Dockerfile-slim-only +++ b/docker/datahub-ingestion/Dockerfile-slim-only @@ -1,6 +1,7 @@ # Defining environment ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head-slim +ARG PIP_MIRROR_URL=null FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 @@ -17,6 +18,10 @@ USER datahub ENV PATH="/datahub-ingestion/.local/bin:$PATH" FROM base as slim-install + +ARG PIP_MIRROR_URL + +RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" FROM slim-install as final diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 247b896d6955c..52db594e2ef85 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -32,8 +32,18 @@ docker { }.exclude { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - buildArgs([DOCKER_VERSION: version, - RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')]) + + def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')] + + // Add build args if they are defined (needed for some CI or enterprise environments) + if (project.hasProperty('pipMirrorUrl')) { + dockerBuildArgs.PIP_MIRROR_URL = project.getProperty('pipMirrorUrl') + } + if (project.hasProperty('debianAptRepositoryUrl')) { + dockerBuildArgs.DEBIAN_REPO_URL = project.getProperty('debianAptRepositoryUrl') + } + + buildArgs(dockerBuildArgs) } tasks.getByName('docker').dependsOn(['build', ':docker:datahub-ingestion-base:docker', @@ -51,4 +61,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 07af7c66a7783..3bacd3b2dc81a 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -1,11 +1,22 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,15 +25,23 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index 97861d6be3141..bb22ab82f4402 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -1,11 +1,22 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,15 +25,23 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && apk --no-cache add jattach --repository http://dl-cdn.alpinelinux.org/alpine/edge/community/ \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index fa8e65009662b..551d61f41b979 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -1,11 +1,22 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + FROM golang:1-alpine3.18 AS binary +# Re-declaring arg from above to make it available in this stage (will inherit default value) +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -14,17 +25,25 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base +# Re-declaring args from above to make them available in this stage (will inherit default values) +ARG ALPINE_REPO_URL +ARG GITHUB_REPO_URL +ARG MAVEN_CENTRAL_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ - && curl -sS https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ - && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ - && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ + && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ + && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ + && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index c8fb2eba911b8..f4dd1cb9b018e 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -3,11 +3,19 @@ # Defining environment ARG APP_ENV=prod +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine + FROM golang:1-alpine3.18 AS binary +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update add openssl git tar curl sqlite @@ -16,6 +24,12 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 AS base + +ARG ALPINE_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk add --no-cache curl jq bash coreutils COPY --from=binary /go/bin/dockerize /usr/local/bin diff --git a/docker/elasticsearch-setup/build.gradle b/docker/elasticsearch-setup/build.gradle index ac935ca42fd12..f9dff3032b56d 100644 --- a/docker/elasticsearch-setup/build.gradle +++ b/docker/elasticsearch-setup/build.gradle @@ -27,6 +27,16 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') @@ -42,4 +52,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index e7f084739a576..f6a4b62a79356 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -1,28 +1,41 @@ ARG KAFKA_DOCKER_VERSION=7.4.1 +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 +ARG APACHE_DOWNLOAD_URL=null + # Using as a base image because to get the needed jars for confluent utils FROM confluentinc/cp-base-new:$KAFKA_DOCKER_VERSION as confluent_base -ARG MAVEN_REPO="https://repo1.maven.org/maven2" +ARG MAVEN_CENTRAL_REPO_URL ARG SNAKEYAML_VERSION="2.0" RUN rm /usr/share/java/cp-base-new/snakeyaml-*.jar \ - && wget -P /usr/share/java/cp-base-new $MAVEN_REPO/org/yaml/snakeyaml/$SNAKEYAML_VERSION/snakeyaml-$SNAKEYAML_VERSION.jar + && wget -P /usr/share/java/cp-base-new $MAVEN_CENTRAL_REPO_URL/org/yaml/snakeyaml/$SNAKEYAML_VERSION/snakeyaml-$SNAKEYAML_VERSION.jar # Based on https://github.com/blacktop's alpine kafka build FROM python:3-alpine +ARG ALPINE_REPO_URL +ARG APACHE_DOWNLOAD_URL +ARG GITHUB_REPO_URL + ENV KAFKA_VERSION 3.4.1 ENV SCALA_VERSION 2.13 LABEL name="kafka" version=${KAFKA_VERSION} +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk add --no-cache bash coreutils -RUN apk --no-cache add openjdk11-jre-headless --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community +RUN apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip RUN mkdir -p /opt \ - && mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred') \ + && if [ "${APACHE_DOWNLOAD_URL}" != "null" ] ; then mirror="${APACHE_DOWNLOAD_URL}/" ; else mirror=$(curl --stderr /dev/null https://www.apache.org/dyn/closer.cgi\?as_json\=1 | jq -r '.preferred'); fi \ && curl -sSL "${mirror}kafka/${KAFKA_VERSION}/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" \ | tar -xzf - -C /opt \ && mv /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} /opt/kafka \ @@ -39,8 +52,8 @@ RUN ls -la COPY --from=confluent_base /usr/share/java/cp-base-new/ /usr/share/java/cp-base-new/ COPY --from=confluent_base /etc/cp-base-new/log4j.properties /etc/cp-base-new/log4j.properties -ADD --chown=kafka:kafka https://github.com/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /usr/share/java/cp-base-new -ADD --chown=kafka:kafka https://github.com/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /opt/kafka/libs +ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /usr/share/java/cp-base-new +ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /opt/kafka/libs ENV METADATA_AUDIT_EVENT_NAME="MetadataAuditEvent_v4" ENV METADATA_CHANGE_EVENT_NAME="MetadataChangeEvent_v4" diff --git a/docker/kafka-setup/build.gradle b/docker/kafka-setup/build.gradle index 25f9847190de3..d7bc5c2d7d13f 100644 --- a/docker/kafka-setup/build.gradle +++ b/docker/kafka-setup/build.gradle @@ -26,6 +26,25 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + if (project.hasProperty('apacheDownloadUrl')) { + dockerBuildArgs.APACHE_DOWNLOAD_URL = project.getProperty('apacheDownloadUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') diff --git a/docker/mysql-setup/Dockerfile b/docker/mysql-setup/Dockerfile index 56bab61180489..8b7ca704c32cd 100644 --- a/docker/mysql-setup/Dockerfile +++ b/docker/mysql-setup/Dockerfile @@ -1,8 +1,16 @@ +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine + FROM golang:1-alpine3.18 AS binary +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -12,6 +20,11 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 COPY --from=binary /go/bin/dockerize /usr/local/bin +ARG ALPINE_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk add --no-cache mysql-client bash mariadb-connector-c sqlite diff --git a/docker/mysql-setup/build.gradle b/docker/mysql-setup/build.gradle index 1598866914c0e..5c70a2f0d9a2d 100644 --- a/docker/mysql-setup/build.gradle +++ b/docker/mysql-setup/build.gradle @@ -27,6 +27,16 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') diff --git a/docker/postgres-setup/Dockerfile b/docker/postgres-setup/Dockerfile index 7f4d53ae044d4..e10f70571501e 100644 --- a/docker/postgres-setup/Dockerfile +++ b/docker/postgres-setup/Dockerfile @@ -1,8 +1,16 @@ +# Defining custom repo urls for use in enterprise environments. Re-used between stages below. +ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine + FROM golang:1-alpine3.18 AS binary +ARG ALPINE_REPO_URL + ENV DOCKERIZE_VERSION v0.6.1 WORKDIR /go/src/github.com/jwilder +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + RUN apk --no-cache --update add openssl git tar curl WORKDIR /go/src/github.com/jwilder/dockerize @@ -12,6 +20,11 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM alpine:3 COPY --from=binary /go/bin/dockerize /usr/local/bin +ARG ALPINE_REPO_URL + +# Optionally set corporate mirror for apk +RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi + # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk add --no-cache postgresql-client sqlite diff --git a/docker/postgres-setup/build.gradle b/docker/postgres-setup/build.gradle index e24e206c99145..5c42a002f45be 100644 --- a/docker/postgres-setup/build.gradle +++ b/docker/postgres-setup/build.gradle @@ -27,6 +27,16 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByName('docker').dependsOn('build') diff --git a/entity-registry/custom-test-model/build.gradle b/entity-registry/custom-test-model/build.gradle index 778e2e42b95c4..8e17de0709188 100644 --- a/entity-registry/custom-test-model/build.gradle +++ b/entity-registry/custom-test-model/build.gradle @@ -2,7 +2,11 @@ import org.yaml.snakeyaml.Yaml buildscript { repositories{ - mavenCentral() + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() + } } dependencies { classpath("org.yaml:snakeyaml:1.33") diff --git a/metadata-integration/java/datahub-protobuf-example/build.gradle b/metadata-integration/java/datahub-protobuf-example/build.gradle index 71cbb67061887..4e53d8ed763ba 100644 --- a/metadata-integration/java/datahub-protobuf-example/build.gradle +++ b/metadata-integration/java/datahub-protobuf-example/build.gradle @@ -4,7 +4,11 @@ plugins { } repositories { - mavenCentral() + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() + } mavenLocal() } diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index 5e735e118493c..a8920d50b068e 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -58,6 +58,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":metadata-jobs:mae-consumer-job:docker").dependsOn([bootJar]) @@ -66,4 +82,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index ef042188bc3d8..2f60d1ae985fb 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -69,6 +69,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":metadata-jobs:mce-consumer-job:docker").dependsOn([bootJar]) @@ -77,4 +93,4 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) diff --git a/metadata-models-custom/build.gradle b/metadata-models-custom/build.gradle index 95a00766039a8..71d3b0fd1f736 100644 --- a/metadata-models-custom/build.gradle +++ b/metadata-models-custom/build.gradle @@ -2,7 +2,11 @@ import org.yaml.snakeyaml.Yaml buildscript { repositories{ - mavenCentral() + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() + } } dependencies { classpath("org.yaml:snakeyaml:1.33") diff --git a/metadata-service/war/build.gradle b/metadata-service/war/build.gradle index 35730ad6dfa9f..fc29b0bb46092 100644 --- a/metadata-service/war/build.gradle +++ b/metadata-service/war/build.gradle @@ -83,6 +83,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":metadata-service:war:docker").dependsOn([build, war]) diff --git a/repositories.gradle b/repositories.gradle index 69eaea6ca12bc..d82563c2659a0 100644 --- a/repositories.gradle +++ b/repositories.gradle @@ -1,15 +1,31 @@ repositories { gradlePluginPortal() mavenLocal() - mavenCentral() - maven { - url "https://packages.confluent.io/maven/" + + if (project.hasProperty('apacheMavenRepositoryUrl')) { + maven { url project.getProperty('apacheMavenRepositoryUrl') } + } else { + mavenCentral() } - maven { - url "https://plugins.gradle.org/m2/" + + if (project.hasProperty('confluentMavenRepositoryUrl')) { + maven { + url project.getProperty('confluentMavenRepositoryUrl') + } + } else { + maven { + url "https://packages.confluent.io/maven/" + } } - maven { - url "https://linkedin.jfrog.io/artifactory/open-source/" // GMA, pegasus + + if (project.hasProperty('linkedinOpenSourceRepositoryUrl')) { + maven { + url project.getProperty('linkedinOpenSourceRepositoryUrl') + } + } else { + maven { + url "https://linkedin.jfrog.io/artifactory/open-source/" // GMA, pegasus + } } } From 3d7962cf170632911467c280cb1fb173330e2568 Mon Sep 17 00:00:00 2001 From: Adriano Vega Llobell Date: Tue, 28 Nov 2023 23:58:42 +0100 Subject: [PATCH 004/263] fix(ingest/sql): improve handling of views with dots in their names (#9183) --- .../src/datahub/ingestion/source/sql/sql_common.py | 2 +- metadata-ingestion/tests/unit/test_sql_common.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 80f828e9ea2fd..67af6b2010c83 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -1054,7 +1054,7 @@ def _run_sql_parser( return view_definition_lineage_helper(raw_lineage, view_urn) def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: - database, schema, _view = dataset_identifier.split(".") + database, schema, _view = dataset_identifier.split(".", 2) return database, schema def get_profiler_instance(self, inspector: Inspector) -> "DatahubGEProfiler": diff --git a/metadata-ingestion/tests/unit/test_sql_common.py b/metadata-ingestion/tests/unit/test_sql_common.py index 808b38192411d..e23d290b611f4 100644 --- a/metadata-ingestion/tests/unit/test_sql_common.py +++ b/metadata-ingestion/tests/unit/test_sql_common.py @@ -102,3 +102,17 @@ def test_use_source_schema_for_foreign_key_if_not_specified(): def test_get_platform_from_sqlalchemy_uri(uri: str, expected_platform: str) -> None: platform: str = get_platform_from_sqlalchemy_uri(uri) assert platform == expected_platform + + +def test_get_db_schema_with_dots_in_view_name(): + config: SQLCommonConfig = _TestSQLAlchemyConfig() + ctx: PipelineContext = PipelineContext(run_id="test_ctx") + platform: str = "TEST" + source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) + + database, schema = source.get_db_schema( + dataset_identifier="database.schema.long.view.name1" + ) + + assert database == "database" + assert schema == "schema" From 2031bd4de12d0e42974fb46e1839145dd86cb40e Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 28 Nov 2023 18:31:56 -0500 Subject: [PATCH 005/263] docs(ingest): update docs on adding stateful ingestion (#9327) --- .../add_stateful_ingestion_to_source.md | 197 ++++++------------ 1 file changed, 66 insertions(+), 131 deletions(-) diff --git a/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md b/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md index 9e39d24fb8578..a152697988c6f 100644 --- a/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md +++ b/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source.md @@ -5,160 +5,75 @@ the [Redunant Run Elimination](./stateful.md#redundant-run-elimination) use-case capability available for the sources. This document describes how to add support for these two use-cases to new sources. ## Adding Stale Metadata Removal to a Source -Adding the stale metadata removal use-case to a new source involves -1. Defining the new checkpoint state that stores the list of entities emitted from a specific ingestion run. -2. Modifying the `SourceConfig` associated with the source to use a custom `stateful_ingestion` config param. -3. Modifying the `SourceReport` associated with the source to include soft-deleted entities in the report. -4. Modifying the `Source` to - 1. Instantiate the StaleEntityRemovalHandler object - 2. Add entities from the current run to the state object - 3. Emit stale metadata removal workunits + +Adding the stale metadata removal use-case to a new source involves modifying the source config, source report, and the source itself. + +For a full example of all changes required: [Adding stale metadata removal to the MongoDB source](https://github.com/datahub-project/datahub/pull/9118). The [datahub.ingestion.source.state.stale_entity_removal_handler](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/stale_entity_removal_handler.py) module provides the supporting infrastructure for all the steps described above and substantially simplifies the implementation on the source side. Below is a detailed explanation of each of these steps along with examples. -### 1. Defining the checkpoint state for the source. -The checkpoint state class is responsible for tracking the entities emitted from each ingestion run. If none of the existing states do not meet the needs of the new source, a new checkpoint state must be created. The state must -inherit from the `StaleEntityCheckpointStateBase` abstract class shown below, and implement each of the abstract methods. -```python -class StaleEntityCheckpointStateBase(CheckpointStateBase, ABC, Generic[Derived]): - """ - Defines the abstract interface for the checkpoint states that are used for stale entity removal. - Examples include sql_common state for tracking table and & view urns, - dbt that tracks node & assertion urns, kafka state tracking topic urns. - """ - - @classmethod - @abstractmethod - def get_supported_types(cls) -> List[str]: - pass - - @abstractmethod - def add_checkpoint_urn(self, type: str, urn: str) -> None: - """ - Adds an urn into the list used for tracking the type. - :param type: The type of the urn such as a 'table', 'view', - 'node', 'topic', 'assertion' that the concrete sub-class understands. - :param urn: The urn string - :return: None. - """ - pass - - @abstractmethod - def get_urns_not_in( - self, type: str, other_checkpoint_state: Derived - ) -> Iterable[str]: - """ - Gets the urns present in this checkpoint but not the other_checkpoint for the given type. - :param type: The type of the urn such as a 'table', 'view', - 'node', 'topic', 'assertion' that the concrete sub-class understands. - :param other_checkpoint_state: the checkpoint state to compute the urn set difference against. - :return: an iterable to the set of urns present in this checkpoing state but not in the other_checkpoint. - """ - pass -``` - -Examples: -* [BaseSQLAlchemyCheckpointState](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/sql_common_state.py#L17) - -### 2. Modifying the SourceConfig +### 1. Modify the source config The source's config must inherit from `StatefulIngestionConfigBase`, and should declare a field named `stateful_ingestion` of type `Optional[StatefulStaleMetadataRemovalConfig]`. -Examples: -- The `KafkaSourceConfig` +Example: + ```python -from typing import List, Optional -import pydantic -from datahub.ingestion.source.state.stale_entity_removal_handler import StatefulStaleMetadataRemovalConfig -from datahub.ingestion.source.state.stateful_ingestion_base import ( +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StatefulStaleMetadataRemovalConfig, StatefulIngestionConfigBase, ) -class KafkaSourceConfig(StatefulIngestionConfigBase): +class MySourceConfig(StatefulIngestionConfigBase): # ...... stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = None ``` -### 3. Modifying the SourceReport -The report class of the source should inherit from `StaleEntityRemovalSourceReport` whose definition is shown below. -```python -from typing import List -from dataclasses import dataclass, field -from datahub.ingestion.source.state.stateful_ingestion_base import StatefulIngestionReport -@dataclass -class StaleEntityRemovalSourceReport(StatefulIngestionReport): - soft_deleted_stale_entities: List[str] = field(default_factory=list) +### 2. Modify the source report - def report_stale_entity_soft_deleted(self, urn: str) -> None: - self.soft_deleted_stale_entities.append(urn) -``` +The report class of the source should inherit from `StaleEntityRemovalSourceReport` instead of `SourceReport`. -Examples: -* The `KafkaSourceReport` ```python -from dataclasses import dataclass -from datahub.ingestion.source.state.stale_entity_removal_handler import StaleEntityRemovalSourceReport +from datahub.ingestion.source.state.stale_entity_removal_handler import ( + StaleEntityRemovalSourceReport, +) + @dataclass -class KafkaSourceReport(StaleEntityRemovalSourceReport): - # + pass ``` -### 4. Modifying the Source -The source must inherit from `StatefulIngestionSourceBase`. +### 3. Modify the source -#### 4.1 Instantiate StaleEntityRemovalHandler in the `__init__` method of the source. +1. The source must inherit from `StatefulIngestionSourceBase` instead of `Source`. +2. The source should contain a custom `get_workunit_processors` method. -Examples: -1. The `KafkaSource` ```python from datahub.ingestion.source.state.stateful_ingestion_base import StatefulIngestionSourceBase from datahub.ingestion.source.state.stale_entity_removal_handler import StaleEntityRemovalHandler -class KafkaSource(StatefulIngestionSourceBase): - def __init__(self, config: KafkaSourceConfig, ctx: PipelineContext): - # - # Create and register the stateful ingestion stale entity removal handler. - self.stale_entity_removal_handler = StaleEntityRemovalHandler( - source=self, - config=self.source_config, - state_type_class=KafkaCheckpointState, - pipeline_name=self.ctx.pipeline_name, - run_id=self.ctx.run_id, - ) -``` -#### 4.2 Adding entities from current run to the state object. -Use the `add_entity_to_state` method of the `StaleEntityRemovalHandler`. -Examples: -```python -# Kafka -self.stale_entity_removal_handler.add_entity_to_state( - type="topic", - urn=topic_urn,) - -# DBT -self.stale_entity_removal_handler.add_entity_to_state( - type="dataset", - urn=node_datahub_urn -) -self.stale_entity_removal_handler.add_entity_to_state( - type="assertion", - urn=node_datahub_urn, -) -``` +class MySource(StatefulIngestionSourceBase): + def __init__(self, config: MySourceConfig, ctx: PipelineContext): + super().__init__(config, ctx) -#### 4.3 Emitting soft-delete workunits associated with the stale entities. -```python -def get_workunits(self) -> Iterable[MetadataWorkUnit]: - # - # Emit the rest of the workunits for the source. - # NOTE: Populating the current state happens during the execution of this code. - # ... - - # Clean up stale entities at the end - yield from self.stale_entity_removal_handler.gen_removed_entity_workunits() + self.config = config + self.report = MySourceReport() + + # other initialization code here + + def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: + return [ + *super().get_workunit_processors(), + StaleEntityRemovalHandler.create( + self, self.config, self.ctx + ).workunit_processor, + ] + + # other methods here ``` ## Adding Redundant Run Elimination to a Source @@ -168,12 +83,13 @@ as snowflake usage, bigquery usage etc.). It typically involves expensive and lo run elimination to a new source to prevent the expensive reruns for the same time range(potentially due to a user error or a scheduler malfunction), the following steps are required. + 1. Update the `SourceConfig` 2. Update the `SourceReport` -3. Modify the `Source` to - 1. Instantiate the RedundantRunSkipHandler object. - 2. Check if the current run should be skipped. - 3. Update the state for the current run(start & end times). +3. Modify the `Source` to + 1. Instantiate the RedundantRunSkipHandler object. + 2. Check if the current run should be skipped. + 3. Update the state for the current run(start & end times). The [datahub.ingestion.source.state.redundant_run_skip_handler](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/redundant_run_skip_handler.py) modules provides the supporting infrastructure required for all the steps described above. @@ -181,11 +97,15 @@ modules provides the supporting infrastructure required for all the steps descri NOTE: The handler currently uses a simple state, the [BaseUsageCheckpointState](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/usage_common_state.py), across all sources it supports (unlike the StaleEntityRemovalHandler). + ### 1. Modifying the SourceConfig + The `SourceConfig` must inherit from the [StatefulRedundantRunSkipConfig](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/redundant_run_skip_handler.py#L23) class. Examples: + 1. Snowflake Usage + ```python from datahub.ingestion.source.state.redundant_run_skip_handler import ( StatefulRedundantRunSkipConfig, @@ -193,27 +113,36 @@ from datahub.ingestion.source.state.redundant_run_skip_handler import ( class SnowflakeStatefulIngestionConfig(StatefulRedundantRunSkipConfig): pass ``` + ### 2. Modifying the SourceReport + The `SourceReport` must inherit from the [StatefulIngestionReport](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py#L102) class. Examples: + 1. Snowflake Usage + ```python @dataclass class SnowflakeUsageReport(BaseSnowflakeReport, StatefulIngestionReport): # ``` + ### 3. Modifying the Source + The source must inherit from `StatefulIngestionSourceBase`. + #### 3.1 Instantiate RedundantRunSkipHandler in the `__init__` method of the source. + The source should instantiate an instance of the `RedundantRunSkipHandler` in its `__init__` method. Examples: Snowflake Usage + ```python from datahub.ingestion.source.state.redundant_run_skip_handler import ( RedundantRunSkipHandler, ) class SnowflakeUsageSource(StatefulIngestionSourceBase): - + def __init__(self, config: SnowflakeUsageConfig, ctx: PipelineContext): super(SnowflakeUsageSource, self).__init__(config, ctx) self.config: SnowflakeUsageConfig = config @@ -226,10 +155,13 @@ class SnowflakeUsageSource(StatefulIngestionSourceBase): run_id=self.ctx.run_id, ) ``` + #### 3.2 Checking if the current run should be skipped. + The sources can query if the current run should be skipped using `should_skip_this_run` method of `RedundantRunSkipHandler`. This should done from the `get_workunits` method, before doing any other work. Example code: + ```python def get_workunits(self) -> Iterable[MetadataWorkUnit]: # Skip a redundant run @@ -239,10 +171,13 @@ def get_workunits(self) -> Iterable[MetadataWorkUnit]: return # Generate the workunits. ``` + #### 3.3 Updating the state for the current run. + The source should use the `update_state` method of `RedundantRunSkipHandler` to update the current run's state if the run has not been skipped. This step can be performed in the `get_workunits` if the run has not been skipped. Example code: + ```python def get_workunits(self) -> Iterable[MetadataWorkUnit]: # Skip a redundant run @@ -250,7 +185,7 @@ Example code: cur_start_time_millis=self.config.start_time ): return - + # Generate the workunits. # # Update checkpoint state for this run. @@ -258,4 +193,4 @@ Example code: start_time_millis=self.config.start_time, end_time_millis=self.config.end_time, ) -``` \ No newline at end of file +``` From 3a840371ccdb84ea1a264ef69b0b87709f2e1adc Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 28 Nov 2023 21:21:15 -0600 Subject: [PATCH 006/263] fix(docker): docker compose health checks port fix (#9326) --- docker/docker-compose-with-cassandra.yml | 2 +- docker/docker-compose-without-neo4j.yml | 10 +++++----- docker/docker-compose.yml | 12 ++++++------ docker/quickstart/docker-compose-m1.quickstart.yml | 12 ++++++------ .../docker-compose-without-neo4j-m1.quickstart.yml | 10 +++++----- .../docker-compose-without-neo4j.quickstart.yml | 10 +++++----- docker/quickstart/docker-compose.quickstart.yml | 12 ++++++------ 7 files changed, 34 insertions(+), 34 deletions(-) diff --git a/docker/docker-compose-with-cassandra.yml b/docker/docker-compose-with-cassandra.yml index 39f4341600572..48239fcd87831 100644 --- a/docker/docker-compose-with-cassandra.yml +++ b/docker/docker-compose-with-cassandra.yml @@ -43,7 +43,7 @@ services: dockerfile: docker/datahub-gms/Dockerfile env_file: ./datahub-gms/env/docker.cassandra.env healthcheck: - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 20s interval: 1s retries: 20 diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 235e89e340551..6191994eaa1ea 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -44,7 +44,7 @@ services: dockerfile: docker/datahub-gms/Dockerfile env_file: datahub-gms/env/docker-without-neo4j.env healthcheck: - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s interval: 1s retries: 3 @@ -119,7 +119,7 @@ services: limits: memory: 1G healthcheck: - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s start_period: 20s interval: 1s retries: 3 @@ -134,7 +134,7 @@ services: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 env_file: schema-registry/env/docker.env healthcheck: - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} start_period: 60s interval: 1s retries: 3 @@ -150,7 +150,7 @@ services: - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 env_file: broker/env/docker.env healthcheck: - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} start_period: 60s interval: 1s retries: 5 @@ -168,7 +168,7 @@ services: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 env_file: zookeeper/env/docker.env healthcheck: - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} start_period: 30s interval: 5s retries: 3 diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 46da8c6fdbd2a..95f56fe47e3cc 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -42,7 +42,7 @@ services: context: ../ dockerfile: docker/datahub-gms/Dockerfile healthcheck: - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s interval: 1s retries: 3 @@ -124,7 +124,7 @@ services: limits: memory: 1G healthcheck: - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s start_period: 20s interval: 1s retries: 3 @@ -140,7 +140,7 @@ services: - ${DATAHUB_MAPPED_NEO4J_BOLT_PORT:-7687}:7687 env_file: neo4j/env/docker.env healthcheck: - test: wget http://neo4j:$${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474} + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} start_period: 5s interval: 1s retries: 5 @@ -155,7 +155,7 @@ services: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 env_file: schema-registry/env/docker.env healthcheck: - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} start_period: 60s interval: 1s retries: 3 @@ -171,7 +171,7 @@ services: - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 env_file: broker/env/docker.env healthcheck: - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} start_period: 60s interval: 1s retries: 5 @@ -189,7 +189,7 @@ services: - ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181 env_file: zookeeper/env/docker.env healthcheck: - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} start_period: 10s interval: 5s retries: 3 diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 4df32395cf82d..7b7ca4052f324 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -111,7 +111,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -171,7 +171,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -258,7 +258,7 @@ services: interval: 1s retries: 5 start_period: 5s - test: wget http://neo4j:$${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474} + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} timeout: 5s hostname: neo4j image: neo4j/neo4j-arm64-experimental:4.0.6-arm64 @@ -280,7 +280,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -295,7 +295,7 @@ services: interval: 5s retries: 3 start_period: 10s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index b1cb6c208a42d..53dacaf6ef63b 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -106,7 +106,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -164,7 +164,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -253,7 +253,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -268,7 +268,7 @@ services: interval: 5s retries: 3 start_period: 30s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 6eac53229e82a..1ca91aa19206d 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -106,7 +106,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -164,7 +164,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -253,7 +253,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -268,7 +268,7 @@ services: interval: 5s retries: 3 start_period: 30s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index 86d70abd2b815..c77b4418b6f36 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -22,7 +22,7 @@ services: interval: 1s retries: 5 start_period: 60s - test: nc -z broker $${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092} + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} timeout: 5s hostname: broker image: confluentinc/cp-kafka:7.4.0 @@ -111,7 +111,7 @@ services: interval: 1s retries: 3 start_period: 90s - test: curl -sS --fail http://datahub-gms:${DATAHUB_MAPPED_GMS_PORT:-8080}/health + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} @@ -171,7 +171,7 @@ services: interval: 1s retries: 3 start_period: 20s - test: curl -sS --fail http://elasticsearch:$${DATAHUB_MAPPED_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + test: curl -sS --fail http://elasticsearch:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s timeout: 5s hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} @@ -258,7 +258,7 @@ services: interval: 1s retries: 5 start_period: 5s - test: wget http://neo4j:$${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474} + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} timeout: 5s hostname: neo4j image: neo4j:4.4.9-community @@ -280,7 +280,7 @@ services: interval: 1s retries: 3 start_period: 60s - test: nc -z schema-registry ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081} + test: nc -z schema-registry ${DATAHUB_SCHEMA_REGISTRY_PORT:-8081} timeout: 5s hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 @@ -295,7 +295,7 @@ services: interval: 5s retries: 3 start_period: 10s - test: echo srvr | nc zookeeper $${DATAHUB_MAPPED_ZK_PORT:-2181} + test: echo srvr | nc zookeeper $${DATAHUB_ZK_PORT:-2181} timeout: 5s hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 From bc24136763a35d0d128162a0cbf74b9c69fc49ae Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Wed, 29 Nov 2023 09:13:21 +0530 Subject: [PATCH 007/263] =?UTF-8?q?fix(ui):=20vulnerability=20(React):=20I?= =?UTF-8?q?nefficient=20Regular=20Expression=20Complexit=E2=80=A6=20(#9324?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- datahub-web-react/package.json | 4 ++- datahub-web-react/yarn.lock | 64 ++++------------------------------ 2 files changed, 10 insertions(+), 58 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 62186125b4ad2..fd01fccbdff6c 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -149,6 +149,8 @@ "resolutions": { "@ant-design/colors": "6.0.0", "refractor": "3.3.1", - "json-schema": "0.4.0" + "json-schema": "0.4.0", + "prismjs": "^1.27.0", + "nth-check": "^2.0.1" } } diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index b9a6c62c88de3..3bab8aebdf3fb 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -5998,7 +5998,7 @@ bonjour-service@^1.0.11: fast-deep-equal "^3.1.3" multicast-dns "^7.2.5" -boolbase@^1.0.0, boolbase@~1.0.0: +boolbase@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= @@ -6437,15 +6437,6 @@ cli-width@^3.0.0: resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== -clipboard@^2.0.0: - version "2.0.8" - resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.8.tgz#ffc6c103dd2967a83005f3f61976aa4655a4cdba" - integrity sha512-Y6WO0unAIQp5bLmk1zdThRhgJt/x3ks6f30s3oE3H1mgIEU33XyQjEf8gsf6DxC7NPX8Y1SsNWjUjL/ywLnnbQ== - dependencies: - good-listener "^1.2.2" - select "^1.1.2" - tiny-emitter "^2.0.0" - cliui@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" @@ -7389,11 +7380,6 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= -delegate@^3.1.2: - version "3.2.0" - resolved "https://registry.yarnpkg.com/delegate/-/delegate-3.2.0.tgz#b66b71c3158522e8ab5744f720d8ca0c2af59166" - integrity sha512-IofjkYBZaZivn0V8nnsMJGBr4jVLxHDheKSW88PyxS5QC4Vo9ZbZVvhzlSxY87fVq3STR6r+4cGepyHkcWOQSw== - depd@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" @@ -8967,13 +8953,6 @@ globby@^13.1.1: merge2 "^1.4.1" slash "^4.0.0" -good-listener@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/good-listener/-/good-listener-1.2.2.tgz#d53b30cdf9313dffb7dc9a0d477096aa6d145c50" - integrity sha1-1TswzfkxPf+33JoNR3CWqm0UXFA= - dependencies: - delegate "^3.1.2" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -10793,12 +10772,7 @@ json-schema-traverse@^1.0.0: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== -json-schema@0.2.3, json-schema@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-schema@^0.4.0: +json-schema@0.2.3, json-schema@0.4.0, json-schema@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== @@ -12122,14 +12096,7 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -nth-check@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" - integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== - dependencies: - boolbase "~1.0.0" - -nth-check@^2.0.1: +nth-check@^1.0.2, nth-check@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== @@ -13262,17 +13229,10 @@ pretty-format@^28.1.3: ansi-styles "^5.0.0" react-is "^18.0.0" -prismjs@^1.22.0: - version "1.24.1" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.24.1.tgz#c4d7895c4d6500289482fa8936d9cdd192684036" - integrity sha512-mNPsedLuk90RVJioIky8ANZEwYm5w9LcvCXrxHlwf4fNVSn8jEipMybMkWUyyF0JhnC+C4VcOVSBuHRKs1L5Ow== - -prismjs@~1.23.0: - version "1.23.0" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.23.0.tgz#d3b3967f7d72440690497652a9d40ff046067f33" - integrity sha512-c29LVsqOaLbBHuIbsTxaKENh1N2EQBOHaWv7gkHN4dgRbxSREqDnDbtFJYdpPauS4YCplMSNCABQ6Eeor69bAA== - optionalDependencies: - clipboard "^2.0.0" +prismjs@^1.22.0, prismjs@^1.27.0, prismjs@~1.23.0: + version "1.29.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12" + integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q== process-nextick-args@~2.0.0: version "2.0.1" @@ -15039,11 +14999,6 @@ select-hose@^2.0.0: resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= -select@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" - integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= - selfsigned@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" @@ -16067,11 +16022,6 @@ thunky@^1.0.2: resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== -tiny-emitter@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" - integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== - tiny-invariant@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875" From e4c05fa9c81e9bb98f988aaa3b02f8b252df7933 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Tue, 28 Nov 2023 22:53:57 -0500 Subject: [PATCH 008/263] fix(ui): Fix UI glitch in policies creator (#9266) --- .../policy/PolicyPrivilegeForm.tsx | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx b/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx index b8e1505fceaec..ac73a1f5ece7c 100644 --- a/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx +++ b/datahub-web-react/src/app/permissions/policy/PolicyPrivilegeForm.tsx @@ -319,7 +319,7 @@ export default function PolicyPrivilegeForm({ .filter((privs) => privs.resourceType !== 'all') .map((resPrivs) => { return ( - + {resPrivs.resourceTypeDisplayName} ); @@ -355,7 +355,9 @@ export default function PolicyPrivilegeForm({ )} > {resourceSearchResults?.map((result) => ( - {renderSearchResult(result)} + + {renderSearchResult(result)} + ))} @@ -389,7 +391,9 @@ export default function PolicyPrivilegeForm({ dropdownStyle={isShowingDomainNavigator ? { display: 'none' } : {}} > {domainSearchResults?.map((result) => ( - {renderSearchResult(result)} + + {renderSearchResult(result)} + ))} @@ -412,9 +416,14 @@ export default function PolicyPrivilegeForm({ )} > - {privilegeOptions.map((priv) => ( - {priv.displayName} - ))} + {privilegeOptions.map((priv, index) => { + const key = `${priv.type}-${index}`; + return ( + + {priv.displayName} + + ); + })} All Privileges From 10b7a951da2955dc0a80021d0ed40e6f00c51b16 Mon Sep 17 00:00:00 2001 From: allizex <150264485+allizex@users.noreply.github.com> Date: Wed, 29 Nov 2023 05:40:34 +0100 Subject: [PATCH 009/263] fix(sidebar): remove a space reserved for scroll bars when sidebar is collapsed (#9322) --- datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx index 0d3d40c4a71af..822e75b65febc 100644 --- a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx +++ b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx @@ -26,12 +26,12 @@ const SidebarHeader = styled.div` white-space: nowrap; `; -const SidebarBody = styled.div` +const SidebarBody = styled.div<{ visible: boolean }>` height: calc(100% - 47px); padding-left: 16px; padding-right: 12px; padding-bottom: 200px; - overflow: auto; + overflow: ${(props) => (props.visible ? 'auto' : 'hidden')}; white-space: nowrap; `; @@ -50,7 +50,7 @@ const BrowseSidebar = ({ visible, width }: Props) => { Navigate - + {entityAggregations && !entityAggregations.length &&
No results found
} {entityAggregations?.map((entityAggregation) => ( From ab10e6bc58471ec3ee8870377dc2d2a0f2527406 Mon Sep 17 00:00:00 2001 From: terratrue-daniel <97548386+terratrue-daniel@users.noreply.github.com> Date: Wed, 29 Nov 2023 00:02:26 -0800 Subject: [PATCH 010/263] feat(ingest/mssql): enable TLS encryption for SQLServer using pytds (#9256) --- metadata-ingestion/docs/sources/mssql/mssql_recipe.yml | 8 ++++++++ metadata-ingestion/setup.py | 2 +- .../src/datahub/ingestion/source/sql/mssql/source.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml index 5f1e24ce1e956..93be7a86d72cc 100644 --- a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml +++ b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml @@ -9,6 +9,14 @@ source: username: user password: pass + # Options + # Uncomment if you need to use encryption with pytds + # See https://python-tds.readthedocs.io/en/latest/pytds.html#pytds.connect + # options: + # connect_args: + # cafile: server-ca.pem + # validate_host: true + sink: # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 2b002164a49b9..8d9892d8e11b1 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -350,7 +350,7 @@ "mlflow": {"mlflow-skinny>=2.3.0"}, "mode": {"requests", "tenacity>=8.0.1"} | sqllineage_lib, "mongodb": {"pymongo[srv]>=3.11", "packaging"}, - "mssql": sql_common | {"sqlalchemy-pytds>=0.3"}, + "mssql": sql_common | {"sqlalchemy-pytds>=0.3", "pyOpenSSL"}, "mssql-odbc": sql_common | {"pyodbc"}, "mysql": mysql, # mariadb should have same dependency as mysql diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py index 6eea5a4c31fa6..2442df595d967 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/mssql/source.py @@ -155,7 +155,7 @@ class SQLServerSource(SQLAlchemySource): - Metadata for databases, schemas, views and tables - Column types associated with each table/view - Table, row, and column statistics via optional SQL profiling - We have two options for the underlying library used to connect to SQL Server: (1) [python-tds](https://github.com/denisenkom/pytds) and (2) [pyodbc](https://github.com/mkleehammer/pyodbc). The TDS library is pure Python and hence easier to install, but only PyODBC supports encrypted connections. + We have two options for the underlying library used to connect to SQL Server: (1) [python-tds](https://github.com/denisenkom/pytds) and (2) [pyodbc](https://github.com/mkleehammer/pyodbc). The TDS library is pure Python and hence easier to install. """ def __init__(self, config: SQLServerConfig, ctx: PipelineContext): From c946d26a624e39655d98e93a044d067030819d19 Mon Sep 17 00:00:00 2001 From: Mide Ojikutu Date: Wed, 29 Nov 2023 08:02:57 +0000 Subject: [PATCH 011/263] fix(datahub-frontend): Add playCaffeine as replacement for removed playEhcache dependency (#8344) --- build.gradle | 1 + datahub-frontend/play.gradle | 1 + 2 files changed, 2 insertions(+) diff --git a/build.gradle b/build.gradle index 7c5deb4783943..c1278a6dab1a0 100644 --- a/build.gradle +++ b/build.gradle @@ -167,6 +167,7 @@ project.ext.externalDependency = [ 'parquetHadoop': 'org.apache.parquet:parquet-hadoop:1.13.1', 'picocli': 'info.picocli:picocli:4.5.0', 'playCache': "com.typesafe.play:play-cache_2.12:$playVersion", + 'playCaffeineCache': "com.typesafe.play:play-caffeine-cache_2.12:$playVersion", 'playWs': 'com.typesafe.play:play-ahc-ws-standalone_2.12:2.1.10', 'playDocs': "com.typesafe.play:play-docs_2.12:$playVersion", 'playGuice': "com.typesafe.play:play-guice_2.12:$playVersion", diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index daecba16cbf72..dd1ceee411f74 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -58,6 +58,7 @@ dependencies { implementation externalDependency.shiroCore implementation externalDependency.playCache + implementation externalDependency.playCaffeineCache implementation externalDependency.playWs implementation externalDependency.playServer implementation externalDependency.playAkkaHttpServer From 4dd6738ae7707ab8b085c2b2c1502f0a8c86d361 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 29 Nov 2023 04:25:33 -0500 Subject: [PATCH 012/263] fix(ingest): bump pyhive to fix headers issue (#9328) --- metadata-ingestion/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 8d9892d8e11b1..4f5f09fb148fa 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -214,7 +214,8 @@ # - 0.6.13 adds a small fix for Databricks # - 0.6.14 uses pure-sasl instead of sasl so it builds on Python 3.11 # - 0.6.15 adds support for thrift > 0.14 (cherry-picked from https://github.com/apache/thrift/pull/2491) - "acryl-pyhive[hive_pure_sasl]==0.6.15", + # - 0.6.16 fixes a regression in 0.6.15 (https://github.com/acryldata/PyHive/pull/9) + "acryl-pyhive[hive-pure-sasl]==0.6.16", # As per https://github.com/datahub-project/datahub/issues/8405 # and https://github.com/dropbox/PyHive/issues/417, version 0.14.0 # of thrift broke PyHive's hive+http transport. From 0795f0b2e8b40502c6fedb469f4cc5b3e2e8146e Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 29 Nov 2023 09:16:48 -0600 Subject: [PATCH 013/263] feat(gradle): quickstart postgres gradle task (#9329) --- docker/build.gradle | 34 +++++++++++++++++++ ...ompose-without-neo4j.postgres.override.yml | 2 +- 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/docker/build.gradle b/docker/build.gradle index 56634a5fe0c67..c7f783af6c997 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -15,6 +15,7 @@ ext { ':metadata-service:war', ':datahub-frontend', ] + debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job'] debug_compose_args = [ @@ -27,6 +28,13 @@ ext { 'datahub-gms', 'datahub-frontend-react' ] + + // Postgres + pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] + pg_compose_args = [ + '-f', 'docker-compose-without-neo4j.yml', + '-f', 'docker-compose-without-neo4j.postgres.override.yml' + ] } task quickstart(type: Exec, dependsOn: ':metadata-ingestion:install') { @@ -125,3 +133,29 @@ task debugReload(type: Exec) { def cmd = ['docker compose -p datahub'] + debug_compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") } + +task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { + dependsOn(pg_quickstart_modules.collect { it + ':dockerTag' }) + shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' + + environment "DATAHUB_TELEMETRY_ENABLED", "false" + environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" + environment "DATAHUB_POSTGRES_VERSION", "15.5" + + // OpenSearch + environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' + environment "DATAHUB_SEARCH_TAG", '2.9.0' + environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' + environment "USE_AWS_ELASTICSEARCH", 'true' + + def cmd = [ + 'source ../metadata-ingestion/venv/bin/activate && ', + 'datahub docker quickstart', + '--no-pull-images', + '--standalone_consumers', + '--version', "v${version}", + '--dump-logs-on-failure' + ] + pg_compose_args + + commandLine 'bash', '-c', cmd.join(" ") +} diff --git a/docker/docker-compose-without-neo4j.postgres.override.yml b/docker/docker-compose-without-neo4j.postgres.override.yml index e4c754b30afd7..369b5a155fc36 100644 --- a/docker/docker-compose-without-neo4j.postgres.override.yml +++ b/docker/docker-compose-without-neo4j.postgres.override.yml @@ -53,7 +53,7 @@ services: postgres: container_name: postgres hostname: postgres - image: postgres:12.3 + image: postgres:${DATAHUB_POSTGRES_VERSION:-12.3} env_file: postgres/env/docker.env ports: - '5432:5432' From f8db90926e927b890ce9be674b8b45d55e4bffc4 Mon Sep 17 00:00:00 2001 From: noggi Date: Wed, 29 Nov 2023 09:26:14 -0800 Subject: [PATCH 014/263] Upload metadata model to s3 (#9325) --- .github/workflows/metadata-model.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index 4bae5ccc9a266..eb098a327e4cb 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -37,6 +37,19 @@ jobs: run: ./metadata-ingestion/scripts/install_deps.sh - name: Run model generation run: ./gradlew :metadata-models:build + - name: Generate metadata files + if: ${{ needs.setup.outputs.publish == 'true' }} + run: ./gradlew :metadata-ingestion:modelDocGen + - name: Configure AWS Credentials + if: ${{ needs.setup.outputs.publish == 'true' }} + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY }} + aws-region: us-west-2 + - name: Upload metadata to S3 + if: ${{ needs.setup.outputs.publish == 'true' }} + run: aws s3 cp ./metadata-ingestion/generated/docs/metadata_model_mces.json s3://${{ secrets.ACRYL_CI_ARTIFACTS_BUCKET }}/datahub/demo/metadata/ - name: Upload metadata to DataHub if: ${{ needs.setup.outputs.publish == 'true' }} env: From fe444aff2638ca232b18827be9de25183cf1c347 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Wed, 29 Nov 2023 13:52:26 -0500 Subject: [PATCH 015/263] fix(ui) Set explicit height on logo images to fix render bug (#9344) --- datahub-web-react/src/app/shared/LogoCountCard.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/datahub-web-react/src/app/shared/LogoCountCard.tsx b/datahub-web-react/src/app/shared/LogoCountCard.tsx index ebf0d9cd4f54e..e67898520e7b8 100644 --- a/datahub-web-react/src/app/shared/LogoCountCard.tsx +++ b/datahub-web-react/src/app/shared/LogoCountCard.tsx @@ -7,6 +7,7 @@ import { HomePageButton } from './components'; const PlatformLogo = styled(Image)` max-height: 32px; + height: 32px; width: auto; object-fit: contain; background-color: transparent; From 5e52e31fc96f71204f8b58a9f4c2f75a489f5c46 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Wed, 29 Nov 2023 14:56:30 -0500 Subject: [PATCH 016/263] fix(ingest/browse): Re-emit browse path v2 aspects to avoid race condition (#9227) --- .../src/datahub/ingestion/api/source.py | 4 +++- .../src/datahub/ingestion/api/source_helpers.py | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/api/source.py b/metadata-ingestion/src/datahub/ingestion/api/source.py index 8940642f7008a..a272b6e3cffcf 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source.py @@ -33,6 +33,7 @@ auto_materialize_referenced_tags, auto_status_aspect, auto_workunit_reporter, + re_emit_browse_path_v2, ) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent @@ -278,13 +279,14 @@ def _get_browse_path_processor(self, dry_run: bool) -> MetadataWorkUnitProcessor if isinstance(config, PlatformInstanceConfigMixin) and config.platform_instance: platform_instance = config.platform_instance - return partial( + browse_path_processor = partial( auto_browse_path_v2, platform=platform, platform_instance=platform_instance, drop_dirs=[s for s in browse_path_drop_dirs if s is not None], dry_run=dry_run, ) + return lambda stream: re_emit_browse_path_v2(browse_path_processor(stream)) class TestableSource(Source): diff --git a/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py b/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py index fae260226195c..66365ef0cdc45 100644 --- a/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py +++ b/metadata-ingestion/src/datahub/ingestion/api/source_helpers.py @@ -198,6 +198,21 @@ def auto_lowercase_urns( yield wu +def re_emit_browse_path_v2( + stream: Iterable[MetadataWorkUnit], +) -> Iterable[MetadataWorkUnit]: + """Re-emit browse paths v2 aspects, to avoid race condition where server overwrites with default.""" + browse_path_v2_workunits = [] + + for wu in stream: + yield wu + if wu.is_primary_source and wu.get_aspect_of_type(BrowsePathsV2Class): + browse_path_v2_workunits.append(wu) + + for wu in browse_path_v2_workunits: + yield wu + + def auto_browse_path_v2( stream: Iterable[MetadataWorkUnit], *, From 863894b80a36776438cfc4c8728fedba013ddd31 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 29 Nov 2023 16:25:48 -0500 Subject: [PATCH 017/263] feat(ingest/ldap): make ingestion robust to string departmentId (#9258) --- metadata-ingestion/src/datahub/ingestion/source/ldap.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ldap.py b/metadata-ingestion/src/datahub/ingestion/source/ldap.py index e1d035a96d42f..72985688273f6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ldap.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ldap.py @@ -1,4 +1,5 @@ """LDAP Source""" +import contextlib import dataclasses from typing import Any, Dict, Iterable, List, Optional @@ -390,10 +391,10 @@ def build_corp_user_mce( country_code = get_attr_or_none( attrs, self.config.user_attrs_map["countryCode"] ) - if department_id_str: - department_id = int(department_id_str) - else: - department_id = None + department_id = None + with contextlib.suppress(ValueError): + if department_id_str: + department_id = int(department_id_str) custom_props_map = {} if self.config.custom_props_list: From dd09f5e68f76003ee54ab776eefbfb71f335ba15 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Thu, 30 Nov 2023 00:56:26 +0100 Subject: [PATCH 018/263] doc(ingest/teradata): Adding Teradata to list of Integrations (#9336) --- docs-website/filterTagIndexes.json | 11 +++++++++++ .../src/datahub/ingestion/source/redshift/redshift.py | 4 ++++ 2 files changed, 15 insertions(+) diff --git a/docs-website/filterTagIndexes.json b/docs-website/filterTagIndexes.json index c154b586fe66e..419f16e8d8a52 100644 --- a/docs-website/filterTagIndexes.json +++ b/docs-website/filterTagIndexes.json @@ -605,6 +605,17 @@ "Features": "Notifications, Alerting" } }, + { + "Path": "docs/generated/ingestion/sources/teradata", + "imgPath": "img/logos/platforms/teradata.svg", + "Title": "Teradata", + "Description": "Teradata is a data warehousing and analytics tool that allows users to store, manage, and analyze large amounts of data in a scalable and cost-effective manner.", + "tags": { + "Platform Type": "BI Tool", + "Connection Type": "Pull", + "Features": "Stateful Ingestion, Column Level Lineage, UI Ingestion, Lower Casing, Status Aspect" + } + }, { "Path": "docs/generated/ingestion/sources/trino", "imgPath": "img/logos/platforms/trino.png", diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index 0b1bde6ca8c0a..04f0edf504595 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -114,6 +114,10 @@ @capability(SourceCapability.DATA_PROFILING, "Optionally enabled via configuration") @capability(SourceCapability.DESCRIPTIONS, "Enabled by default") @capability(SourceCapability.LINEAGE_COARSE, "Optionally enabled via configuration") +@capability( + SourceCapability.LINEAGE_FINE, + "Optionally enabled via configuration (`mixed` or `sql_based` lineage needs to be enabled)", +) @capability( SourceCapability.USAGE_STATS, "Enabled by default, can be disabled via configuration `include_usage_statistics`", From c00ce518c2f6eccab73bf0e3598761362b7df8d0 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 06:07:33 +0530 Subject: [PATCH 019/263] fix(ui): Complexity in chalk/ansi-regex and minimatch ReDoS Vulnerability solution (#9323) Co-authored-by: John Joyce --- datahub-web-react/package.json | 2 ++ datahub-web-react/yarn.lock | 52 ++++++---------------------------- 2 files changed, 10 insertions(+), 44 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index fd01fccbdff6c..0b889810a809a 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -150,6 +150,8 @@ "@ant-design/colors": "6.0.0", "refractor": "3.3.1", "json-schema": "0.4.0", + "ansi-regex": "3.0.1", + "minimatch": "3.0.5", "prismjs": "^1.27.0", "nth-check": "^2.0.1" } diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 3bab8aebdf3fb..9924c223c1b0a 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -5343,25 +5343,10 @@ ansi-html-community@^0.0.8: resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= - -ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= - -ansi-regex@^5.0.0, ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== +ansi-regex@3.0.1, ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== ansi-styles@^2.2.1: version "2.2.1" @@ -6011,13 +5996,6 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - braces@^2.3.1: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" @@ -11783,27 +11761,13 @@ minimalistic-assert@^1.0.0: resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== +minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^5.0.1: + version "3.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" + integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== dependencies: brace-expansion "^1.1.7" -minimatch@^5.0.1: - version "5.1.6" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" - integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== - dependencies: - brace-expansion "^2.0.1" - minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" From cb722533279d58d32cfdfd4fb5afe64c7e6552f7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 01:07:37 +0000 Subject: [PATCH 020/263] build(deps): bump tmpl from 1.0.4 to 1.0.5 in /datahub-web-react (#9345) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- datahub-web-react/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 9924c223c1b0a..e222209ead6bc 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -16016,9 +16016,9 @@ tmp@^0.0.33: os-tmpdir "~1.0.2" tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== to-fast-properties@^2.0.0: version "2.0.0" From c3499f8661c1a06cca1d165371db20d71aea4396 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 07:37:50 +0530 Subject: [PATCH 021/263] fix(): Address @babel/traverse vulnerabilities (#9343) Co-authored-by: John Joyce --- datahub-web-react/package.json | 1 + datahub-web-react/yarn.lock | 129 +++++++++++++++++++++++++-------- 2 files changed, 98 insertions(+), 32 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 0b889810a809a..40bcad19284d9 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -150,6 +150,7 @@ "@ant-design/colors": "6.0.0", "refractor": "3.3.1", "json-schema": "0.4.0", + "@babel/traverse": ">=7.23.2", "ansi-regex": "3.0.1", "minimatch": "3.0.5", "prismjs": "^1.27.0", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index e222209ead6bc..b755281d17697 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -191,6 +191,14 @@ dependencies: "@babel/highlight" "^7.22.5" +"@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" + integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA== + dependencies: + "@babel/highlight" "^7.23.4" + chalk "^2.4.2" + "@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" @@ -226,7 +234,7 @@ eslint-visitor-keys "^2.1.0" semver "^6.3.0" -"@babel/generator@^7.12.13", "@babel/generator@^7.14.0", "@babel/generator@^7.22.5", "@babel/generator@^7.7.2": +"@babel/generator@^7.14.0", "@babel/generator@^7.22.5", "@babel/generator@^7.7.2": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.5.tgz#1e7bf768688acfb05cf30b2369ef855e82d984f7" integrity sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA== @@ -236,6 +244,16 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" +"@babel/generator@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.5.tgz#17d0a1ea6b62f351d281350a5f80b87a810c4755" + integrity sha512-BPssCHrBD+0YrxviOa3QzpqwhNIXKEtOa2jQrm4FlmkC2apYgRnQcmPWiGZDlGxiNtltnUFolMe8497Esry+jA== + dependencies: + "@babel/types" "^7.23.5" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + "@babel/helper-annotate-as-pure@^7.0.0", "@babel/helper-annotate-as-pure@^7.18.6", "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" @@ -297,12 +315,17 @@ resolve "^1.14.2" semver "^6.1.2" +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" integrity sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q== -"@babel/helper-function-name@^7.12.13", "@babel/helper-function-name@^7.22.5": +"@babel/helper-function-name@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz#ede300828905bb15e582c037162f99d5183af1be" integrity sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ== @@ -310,6 +333,14 @@ "@babel/template" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" @@ -393,23 +424,40 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-split-export-declaration@^7.12.13", "@babel/helper-split-export-declaration@^7.22.5": +"@babel/helper-split-export-declaration@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz#88cf11050edb95ed08d596f7a044462189127a08" integrity sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ== dependencies: "@babel/types" "^7.22.5" +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-string-parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== +"@babel/helper-string-parser@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" + integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== + "@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-option@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" @@ -443,16 +491,30 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" + integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + "@babel/parser@7.12.16": version "7.12.16" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== -"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.12.13", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": +"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" + integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" @@ -1322,6 +1384,15 @@ dependencies: regenerator-runtime "^0.13.11" +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + "@babel/template@^7.22.5", "@babel/template@^7.3.3", "@babel/template@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" @@ -1331,34 +1402,19 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.12.13.tgz#689f0e4b4c08587ad26622832632735fb8c4e0c0" - integrity sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA== - dependencies: - "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.12.13" - "@babel/helper-function-name" "^7.12.13" - "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/parser" "^7.12.13" - "@babel/types" "^7.12.13" - debug "^4.1.0" - globals "^11.1.0" - lodash "^4.17.19" - -"@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.5.tgz#44bd276690db6f4940fdb84e1cb4abd2f729ccd1" - integrity sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ== +"@babel/traverse@7.12.13", "@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" + integrity sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w== dependencies: - "@babel/code-frame" "^7.22.5" - "@babel/generator" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-function-name" "^7.22.5" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" - "@babel/parser" "^7.22.5" - "@babel/types" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.5" + "@babel/types" "^7.23.5" debug "^4.1.0" globals "^11.1.0" @@ -1371,7 +1427,7 @@ lodash "^4.17.19" to-fast-properties "^2.0.0" -"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.13", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": +"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== @@ -1380,6 +1436,15 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" + integrity sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w== + dependencies: + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -6211,7 +6276,7 @@ chalk@^1.0.0, chalk@^1.1.3: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.4.1: +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== From efaf21d571262a58c0c3e624c523213be4310c43 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 03:51:44 -0500 Subject: [PATCH 022/263] docs(ingest/looker): mark platform instance as a supported capability (#9347) --- .../src/datahub/ingestion/source/looker/looker_config.py | 7 +++++-- .../src/datahub/ingestion/source/looker/looker_source.py | 2 +- .../src/datahub/ingestion/source/looker/lookml_source.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index e6ddea9a30489..514f22b4f2158 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -9,7 +9,10 @@ from datahub.configuration import ConfigModel from datahub.configuration.common import AllowDenyPattern, ConfigurationError -from datahub.configuration.source_common import DatasetSourceConfigMixin, EnvConfigMixin +from datahub.configuration.source_common import ( + EnvConfigMixin, + PlatformInstanceConfigMixin, +) from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.looker.looker_lib_wrapper import LookerAPIConfig from datahub.ingestion.source.state.stale_entity_removal_handler import ( @@ -98,7 +101,7 @@ class LookerViewNamingPattern(NamingPattern): ] -class LookerCommonConfig(DatasetSourceConfigMixin): +class LookerCommonConfig(EnvConfigMixin, PlatformInstanceConfigMixin): explore_naming_pattern: LookerNamingPattern = pydantic.Field( description=f"Pattern for providing dataset names to explores. {LookerNamingPattern.allowed_docstring()}", default=LookerNamingPattern(pattern="{model}.explore.{name}"), diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 4a98e8874bca0..7e8fbfde12042 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -99,7 +99,7 @@ @support_status(SupportStatus.CERTIFIED) @config_class(LookerDashboardSourceConfig) @capability(SourceCapability.DESCRIPTIONS, "Enabled by default") -@capability(SourceCapability.PLATFORM_INSTANCE, "Not supported", supported=False) +@capability(SourceCapability.PLATFORM_INSTANCE, "Use the `platform_instance` field") @capability( SourceCapability.OWNERSHIP, "Enabled by default, configured using `extract_owners`" ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 2bd469b3f9bcd..4e91d17feaa9f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -1455,7 +1455,7 @@ class LookerManifest: @support_status(SupportStatus.CERTIFIED) @capability( SourceCapability.PLATFORM_INSTANCE, - "Supported using the `connection_to_platform_map`", + "Use the `platform_instance` and `connection_to_platform_map` fields", ) @capability(SourceCapability.LINEAGE_COARSE, "Supported by default") @capability( From 65d5034a80d60f85f57a5157b730eda9d83c5516 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:50:08 +0530 Subject: [PATCH 023/263] fix(): Address HIGH vulnerability with Axios (#9353) --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 116 ++++++++++++++++----------------- 2 files changed, 58 insertions(+), 60 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 40bcad19284d9..22c88f9647dc2 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -80,7 +80,7 @@ "reactour": "1.18.7", "remirror": "^2.0.23", "sinon": "^11.1.1", - "start-server-and-test": "1.12.2", + "start-server-and-test": "^2.0.3", "styled-components": "^5.2.1", "turndown-plugin-gfm": "^1.0.2", "typescript": "^4.8.4", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index b755281d17697..d33299a79b13a 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -3760,14 +3760,14 @@ resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" integrity sha512-izzOXQfeQLonzrIQb8u6LQ8dk+ymz3WXTIXjvOlTXHq6sbzROg3NWU+9TTAOpEoK9Bth24/6F/XrfHJ5yR5n6Q== -"@sideway/address@^4.1.0": - version "4.1.2" - resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.2.tgz#811b84333a335739d3969cfc434736268170cad1" - integrity sha512-idTz8ibqWFrPU8kMirL0CoPH/A29XOzzAzpyN3zQ4kAWnzmNfFmRaoMNN6VI8ske5M73HZyhIaW4OuSFIdM4oA== +"@sideway/address@^4.1.3": + version "4.1.4" + resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" + integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== dependencies: "@hapi/hoek" "^9.0.0" -"@sideway/formula@^3.0.0": +"@sideway/formula@^3.0.1": version "3.0.1" resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== @@ -5712,12 +5712,14 @@ axe-core@^4.6.2: resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== -axios@^0.21.1: - version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== +axios@^1.6.1: + version "1.6.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2" + integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A== dependencies: - follow-redirects "^1.14.0" + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" axobject-query@^3.1.1: version "3.2.1" @@ -7301,7 +7303,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: dependencies: ms "2.0.0" -debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: +debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -7315,13 +7317,6 @@ debug@4.1.1: dependencies: ms "^2.1.1" -debug@4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" - integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== - dependencies: - ms "2.1.2" - debug@^3.2.6, debug@^3.2.7: version "3.2.7" resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" @@ -8232,22 +8227,7 @@ events@^3.2.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -execa@5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/execa/-/execa-5.0.0.tgz#4029b0007998a841fbd1032e5f4de86a3c1e3376" - integrity sha512-ov6w/2LCiuyO4RLYGdpFGjkcs0wMTgGE8PrkTHikeUy5iJekXyPIKUjifk5CsE0pt7sMCrMZ3YNqoCj6idQOnQ== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -execa@^5.0.0: +execa@5.1.1, execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== @@ -8661,11 +8641,16 @@ focus-outline-manager@^1.0.2: resolved "https://registry.yarnpkg.com/focus-outline-manager/-/focus-outline-manager-1.0.2.tgz#7bf3658865341fb6b08d042a037b9d2868b119b5" integrity sha512-bHWEmjLsTjGP9gVs7P3Hyl+oY5NlMW8aTSPdTJ+X2GKt6glDctt9fUCLbRV+d/l8NDC40+FxMjp9WlTQXaQALw== -follow-redirects@^1.0.0, follow-redirects@^1.14.0: +follow-redirects@^1.0.0: version "1.15.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.0.tgz#06441868281c86d0dda4ad8bdaead2d02dca89d4" integrity sha512-aExlJShTV4qOUOL7yF1U5tvLCB0xQuudbf6toyYA0E/acBNw71mvjFTnLaRp50aQaYocMR0a/RMMBIHeZnGyjQ== +follow-redirects@^1.15.0: + version "1.15.3" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" + integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== + for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -8695,7 +8680,7 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" -form-data@4.0.0: +form-data@4.0.0, form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== @@ -10711,15 +10696,15 @@ jiti@^1.18.2: resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== -joi@^17.3.0: - version "17.4.1" - resolved "https://registry.npmjs.org/joi/-/joi-17.4.1.tgz#15d2f23c8cbe4d1baded2dd190c58f8dbe11cca0" - integrity sha512-gDPOwQ5sr+BUxXuPDGrC1pSNcVR/yGGcTI0aCnjYxZEa3za60K/iCQ+OFIkEHWZGVCUcUlXlFKvMmrlmxrG6UQ== +joi@^17.11.0: + version "17.11.0" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.11.0.tgz#aa9da753578ec7720e6f0ca2c7046996ed04fc1a" + integrity sha512-NgB+lZLNoqISVy1rZocE9PZI36bL/77ie924Ri43yEvi9GUUMPeyVIr8KdFTMUlby1p0PBYMk9spIxEUQYqrJQ== dependencies: "@hapi/hoek" "^9.0.0" "@hapi/topo" "^5.0.0" - "@sideway/address" "^4.1.0" - "@sideway/formula" "^3.0.0" + "@sideway/address" "^4.1.3" + "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" js-cookie@^2.2.1: @@ -11833,7 +11818,7 @@ minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@ dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== @@ -13486,6 +13471,11 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -14883,13 +14873,20 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.3.3, rxjs@^6.6.0, rxjs@^6.6.3: +rxjs@^6.3.3, rxjs@^6.6.0: version "6.6.7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== dependencies: tslib "^1.9.0" +rxjs@^7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + safari-14-idb-fix@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/safari-14-idb-fix/-/safari-14-idb-fix-1.0.6.tgz#cbaabc33a4500c44b5c432d6c525b0ed9b68bb65" @@ -15495,18 +15492,19 @@ stacktrace-js@^2.0.2: stack-generator "^2.0.5" stacktrace-gps "^3.0.4" -start-server-and-test@1.12.2: - version "1.12.2" - resolved "https://registry.npmjs.org/start-server-and-test/-/start-server-and-test-1.12.2.tgz#13afe6f22d7347e0fd47a739cdd085786fced14b" - integrity sha512-rjJF8N/8XVukEYR44Ehm8LAZIDjWCQKXX54W8UQ8pXz3yDKPCdqTqJy7VYnCAknPw65cmLfPxz8M2+K/zCAvVQ== +start-server-and-test@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/start-server-and-test/-/start-server-and-test-2.0.3.tgz#15c53c85e23cba7698b498b8a2598cab95f3f802" + integrity sha512-QsVObjfjFZKJE6CS6bSKNwWZCKBG6975/jKRPPGFfFh+yOQglSeGXiNWjzgQNXdphcBI9nXbyso9tPfX4YAUhg== dependencies: + arg "^5.0.2" bluebird "3.7.2" check-more-types "2.24.0" - debug "4.3.1" - execa "5.0.0" + debug "4.3.4" + execa "5.1.1" lazy-ass "1.6.0" ps-tree "1.2.0" - wait-on "5.3.0" + wait-on "7.2.0" state-local@^1.0.6: version "1.0.7" @@ -16739,16 +16737,16 @@ w3c-xmlserializer@^2.0.0: dependencies: xml-name-validator "^3.0.0" -wait-on@5.3.0: - version "5.3.0" - resolved "https://registry.npmjs.org/wait-on/-/wait-on-5.3.0.tgz#584e17d4b3fe7b46ac2b9f8e5e102c005c2776c7" - integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg== +wait-on@7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-7.2.0.tgz#d76b20ed3fc1e2bebc051fae5c1ff93be7892928" + integrity sha512-wCQcHkRazgjG5XoAq9jbTMLpNIjoSlZslrJ2+N9MxDsGEv1HnFoVjOCexL0ESva7Y9cu350j+DWADdk54s4AFQ== dependencies: - axios "^0.21.1" - joi "^17.3.0" + axios "^1.6.1" + joi "^17.11.0" lodash "^4.17.21" - minimist "^1.2.5" - rxjs "^6.6.3" + minimist "^1.2.8" + rxjs "^7.8.1" walker@^1.0.7: version "1.0.7" From ae1169d6d5831751b6d26d08052472d4adfdbf43 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 30 Nov 2023 22:53:28 +0530 Subject: [PATCH 024/263] fix(ui): show formatted total result count in Search (#9356) --- datahub-web-react/src/app/search/SearchResults.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index b93e835970196..11bb494588753 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -28,6 +28,7 @@ import SearchSortSelect from './sorting/SearchSortSelect'; import { combineSiblingsInSearchResults } from './utils/combineSiblingsInSearchResults'; import SearchQuerySuggester from './suggestions/SearchQuerySugggester'; import { ANTD_GRAY_V2 } from '../entity/shared/constants'; +import { formatNumberWithoutAbbreviation } from '../shared/formatNumber'; const SearchResultsWrapper = styled.div<{ v2Styles: boolean }>` display: flex; @@ -210,7 +211,13 @@ export const SearchResults = ({ {lastResultIndex > 0 ? (page - 1) * pageSize + 1 : 0} - {lastResultIndex} {' '} - of {totalResults} results + of{' '} + + {totalResults >= 10000 + ? `${formatNumberWithoutAbbreviation(10000)}+` + : formatNumberWithoutAbbreviation(totalResults)} + {' '} + results From a7dc9c9d2292898d9668a3e39b0db42837397f94 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 18:11:36 -0500 Subject: [PATCH 025/263] feat(sdk): autogenerate urn types (#9257) --- docs-website/sphinx/apidocs/urns.rst | 7 + docs-website/sphinx/conf.py | 4 + docs-website/sphinx/index.rst | 1 + docs-website/sphinx/requirements.txt | 2 +- docs/how/updating-datahub.md | 52 ++- .../dataset_add_column_documentation.py | 14 +- .../library/dataset_add_column_tag.py | 14 +- .../library/dataset_add_column_term.py | 14 +- .../examples/library/upsert_group.py | 8 +- metadata-ingestion/scripts/avro_codegen.py | 407 +++++++++++++++++- .../scripts/custom_package_codegen.py | 1 + .../dataprocess/dataprocess_instance.py | 2 +- .../datahub/ingestion/source/csv_enricher.py | 5 +- .../source/metadata/business_glossary.py | 2 +- .../src/datahub/ingestion/source/tableau.py | 4 +- .../utilities/_custom_package_loader.py | 5 + .../src/datahub/utilities/docs_build.py | 9 + .../src/datahub/utilities/sqlglot_lineage.py | 14 +- .../src/datahub/utilities/urn_encoder.py | 4 + .../src/datahub/utilities/urns/_urn_base.py | 234 ++++++++++ .../datahub/utilities/urns/corp_group_urn.py | 41 +- .../datahub/utilities/urns/corpuser_urn.py | 41 +- .../datahub/utilities/urns/data_flow_urn.py | 89 +--- .../datahub/utilities/urns/data_job_urn.py | 52 +-- .../utilities/urns/data_platform_urn.py | 35 +- .../urns/data_process_instance_urn.py | 47 +- .../src/datahub/utilities/urns/dataset_urn.py | 113 +---- .../src/datahub/utilities/urns/domain_urn.py | 41 +- .../src/datahub/utilities/urns/error.py | 3 +- .../src/datahub/utilities/urns/field_paths.py | 15 + .../datahub/utilities/urns/notebook_urn.py | 47 +- .../src/datahub/utilities/urns/tag_urn.py | 41 +- .../src/datahub/utilities/urns/urn.py | 163 +------ .../src/datahub/utilities/urns/urn_iter.py | 10 +- .../state/test_checkpoint.py | 8 +- .../stateful_ingestion/test_kafka_state.py | 2 +- metadata-ingestion/tests/unit/test_urn.py | 45 -- .../unit/{ => urns}/test_corp_group_urn.py | 5 +- .../unit/{ => urns}/test_corpuser_urn.py | 5 +- .../unit/{ => urns}/test_data_flow_urn.py | 10 +- .../unit/{ => urns}/test_data_job_urn.py | 5 +- .../test_data_process_instance_urn.py | 9 +- .../tests/unit/{ => urns}/test_dataset_urn.py | 15 +- .../tests/unit/{ => urns}/test_domain_urn.py | 5 +- .../unit/{ => urns}/test_notebook_urn.py | 5 +- .../tests/unit/{ => urns}/test_tag_urn.py | 5 +- .../tests/unit/urns/test_urn.py | 56 +++ .../src/main/resources/entity-registry.yml | 4 +- 48 files changed, 856 insertions(+), 864 deletions(-) create mode 100644 docs-website/sphinx/apidocs/urns.rst create mode 100644 metadata-ingestion/src/datahub/utilities/docs_build.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/_urn_base.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/field_paths.py delete mode 100644 metadata-ingestion/tests/unit/test_urn.py rename metadata-ingestion/tests/unit/{ => urns}/test_corp_group_urn.py (87%) rename metadata-ingestion/tests/unit/{ => urns}/test_corpuser_urn.py (88%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_flow_urn.py (77%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_job_urn.py (90%) rename metadata-ingestion/tests/unit/{ => urns}/test_data_process_instance_urn.py (90%) rename metadata-ingestion/tests/unit/{ => urns}/test_dataset_urn.py (81%) rename metadata-ingestion/tests/unit/{ => urns}/test_domain_urn.py (87%) rename metadata-ingestion/tests/unit/{ => urns}/test_notebook_urn.py (86%) rename metadata-ingestion/tests/unit/{ => urns}/test_tag_urn.py (87%) create mode 100644 metadata-ingestion/tests/unit/urns/test_urn.py diff --git a/docs-website/sphinx/apidocs/urns.rst b/docs-website/sphinx/apidocs/urns.rst new file mode 100644 index 0000000000000..2bd70deb22c7e --- /dev/null +++ b/docs-website/sphinx/apidocs/urns.rst @@ -0,0 +1,7 @@ +URNs +====== + +.. automodule:: datahub.metadata.urns + :exclude-members: LI_DOMAIN, URN_PREFIX, url_encode, validate, get_type, get_entity_id, get_entity_id_as_string, get_domain, underlying_key_aspect_type + :member-order: alphabetical + :inherited-members: diff --git a/docs-website/sphinx/conf.py b/docs-website/sphinx/conf.py index 3f118aadeea81..49cd20d5ef44d 100644 --- a/docs-website/sphinx/conf.py +++ b/docs-website/sphinx/conf.py @@ -3,6 +3,10 @@ # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html +# See https://stackoverflow.com/a/65147676 +import builtins + +builtins.__sphinx_build__ = True # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information diff --git a/docs-website/sphinx/index.rst b/docs-website/sphinx/index.rst index fe11648dff555..18d92f4053b94 100644 --- a/docs-website/sphinx/index.rst +++ b/docs-website/sphinx/index.rst @@ -14,6 +14,7 @@ Welcome to DataHub Python SDK's documentation! apidocs/builder apidocs/clients apidocs/models + apidocs/urns Indices and tables diff --git a/docs-website/sphinx/requirements.txt b/docs-website/sphinx/requirements.txt index 94ddd40579f0e..2e064330138d9 100644 --- a/docs-website/sphinx/requirements.txt +++ b/docs-website/sphinx/requirements.txt @@ -1,4 +1,4 @@ --e ../../metadata-ingestion[datahub-rest,sql-parsing] +-e ../../metadata-ingestion[datahub-rest,sql-parser] beautifulsoup4==4.11.2 Sphinx==6.1.3 sphinx-click==4.4.0 diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 3263a9f7c15fb..dad05fd0153f2 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -9,6 +9,9 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. +- #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. + The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. + ### Potential Downtime ### Deprecations @@ -23,18 +26,19 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - #9044 - GraphQL APIs for adding ownership now expect either an `ownershipTypeUrn` referencing a customer ownership type or a (deprecated) `type`. Where before adding an ownership without a concrete type was allowed, this is no longer the case. For simplicity you can use the `type` parameter which will get translated to a custom ownership type internally if one exists for the type being added. - #9010 - In Redshift source's config `incremental_lineage` is set default to off. - #8810 - Removed support for SQLAlchemy 1.3.x. Only SQLAlchemy 1.4.x is supported now. -- #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted +- #8942 - Removed `urn:li:corpuser:datahub` owner for the `Measure`, `Dimension` and `Temporal` tags emitted by Looker and LookML source connectors. - #8853 - The Airflow plugin no longer supports Airflow 2.0.x or Python 3.7. See the docs for more details. - #8853 - Introduced the Airflow plugin v2. If you're using Airflow 2.3+, the v2 plugin will be enabled by default, and so you'll need to switch your requirements to include `pip install 'acryl-datahub-airflow-plugin[plugin-v2]'`. To continue using the v1 plugin, set the `DATAHUB_AIRFLOW_PLUGIN_USE_V1_PLUGIN` environment variable to `true`. - #8943 - The Unity Catalog ingestion source has a new option `include_metastore`, which will cause all urns to be changed when disabled. -This is currently enabled by default to preserve compatibility, but will be disabled by default and then removed in the future. -If stateful ingestion is enabled, simply setting `include_metastore: false` will perform all required cleanup. -Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: -`datahub delete --platform databricks --soft` and then reingesting with `include_metastore: false`. + This is currently enabled by default to preserve compatibility, but will be disabled by default and then removed in the future. + If stateful ingestion is enabled, simply setting `include_metastore: false` will perform all required cleanup. + Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: + `datahub delete --platform databricks --soft` and then reingesting with `include_metastore: false`. - #8846 - Changed enum values in resource filters used by policies. `RESOURCE_TYPE` became `TYPE` and `RESOURCE_URN` became `URN`. -Any existing policies using these filters (i.e. defined for particular `urns` or `types` such as `dataset`) need to be upgraded -manually, for example by retrieving their respective `dataHubPolicyInfo` aspect and changing part using filter i.e. + Any existing policies using these filters (i.e. defined for particular `urns` or `types` such as `dataset`) need to be upgraded + manually, for example by retrieving their respective `dataHubPolicyInfo` aspect and changing part using filter i.e. + ```yaml "resources": { "filter": { @@ -49,7 +53,9 @@ manually, for example by retrieving their respective `dataHubPolicyInfo` aspect ] } ``` + into + ```yaml "resources": { "filter": { @@ -64,22 +70,25 @@ into ] } ``` + for example, using `datahub put` command. Policies can be also removed and re-created via UI. + - #9077 - The BigQuery ingestion source by default sets `match_fully_qualified_names: true`. -This means that any `dataset_pattern` or `schema_pattern` specified will be matched on the fully -qualified dataset name, i.e. `.`. We attempt to support the old -pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this -should not cause any issues. However, if you have a complex dataset pattern, we recommend you -manually convert it to the fully qualified format to avoid any potential issues. + This means that any `dataset_pattern` or `schema_pattern` specified will be matched on the fully + qualified dataset name, i.e. `.`. We attempt to support the old + pattern format by prepending `.*\\.` to dataset patterns lacking a period, so in most cases this + should not cause any issues. However, if you have a complex dataset pattern, we recommend you + manually convert it to the fully qualified format to avoid any potential issues. - #9110 - The Unity Catalog source will now generate urns based on `env` properly. If you have -been setting `env` in your recipe to something besides `PROD`, we will now generate urns -with that new env variable, invalidating your existing urns. + been setting `env` in your recipe to something besides `PROD`, we will now generate urns + with that new env variable, invalidating your existing urns. ### Potential Downtime ### Deprecations ### Other Notable Changes + - Session token configuration has changed, all previously created session tokens will be invalid and users will be prompted to log in. Expiration time has also been shortened which may result in more login prompts with the default settings. There should be no other interruption due to this change. @@ -88,13 +97,16 @@ with that new env variable, invalidating your existing urns. ### Breaking Changes ### Potential Downtime + - #8611 Search improvements requires reindexing indices. A `system-update` job will run which will set indices to read-only and create a backup/clone of each index. During the reindexing new components will be prevented from start-up until the reindex completes. The logs of this job will indicate a % complete per index. Depending on index sizes and infrastructure this process can take 5 minutes to hours however as a rough estimate 1 hour for every 2.3 million entities. ### Deprecations + - #8525: In LDAP ingestor, the `manager_pagination_enabled` changed to general `pagination_enabled` - MAE Events are no longer produced. MAE events have been deprecated for over a year. ### Other Notable Changes + - In this release we now enable you to create and delete pinned announcements on your DataHub homepage! If you have the “Manage Home Page Posts” platform privilege you’ll see a new section in settings called “Home Page Posts” where you can create and delete text posts and link posts that your users see on the home page. - The new search and browse experience, which was first made available in the previous release behind a feature flag, is now on by default. Check out our release notes for v0.10.5 to get more information and documentation on this new Browse experience. - In addition to the ranking changes mentioned above, this release includes changes to the highlighting of search entities to understand why they match your query. You can also sort your results alphabetically or by last updated times, in addition to relevance. In this release, we suggest a correction if your query has a typo in it. @@ -121,12 +133,13 @@ with that new env variable, invalidating your existing urns. This determines which Okta profile attribute is used for the corresponding DataHub user and thus may change what DataHub users are generated by the Okta source. And in a follow up `okta_profile_to_username_regex` has been set to `.*` which taken together with previous change brings the defaults in line with OIDC. - #8331: For all sql-based sources that support profiling, you can no longer specify -`profile_table_level_only` together with `include_field_xyz` config options to ingest -certain column-level metrics. Instead, set `profile_table_level_only` to `false` and -individually enable / disable desired field metrics. + `profile_table_level_only` together with `include_field_xyz` config options to ingest + certain column-level metrics. Instead, set `profile_table_level_only` to `false` and + individually enable / disable desired field metrics. - #8451: The `bigquery-beta` and `snowflake-beta` source aliases have been dropped. Use `bigquery` and `snowflake` as the source type instead. - #8472: Ingestion runs created with Pipeline.create will show up in the DataHub ingestion tab as CLI-based runs. To revert to the previous behavior of not showing these runs in DataHub, pass `no_default_report=True`. -- #8513: `snowflake` connector will use user's `email` attribute as is in urn. To revert to previous behavior disable `email_as_user_identifier` in recipe. +- #8513: `snowflake` connector will use user's `email` attribute as is in urn. To revert to previous behavior disable `email_as_user_identifier` in recipe. + ### Potential Downtime - BrowsePathsV2 upgrade will now be handled by the `system-update` job in non-blocking mode. This process generates data needed for the new search @@ -153,9 +166,11 @@ individually enable / disable desired field metrics. ### Potential Downtime ### Deprecations + - #8045: With the introduction of custom ownership types, the `Owner` aspect has been updated where the `type` field is deprecated in favor of a new field `typeUrn`. This latter field is an urn reference to the new OwnershipType entity. GraphQL endpoints have been updated to use the new field. For pre-existing ownership aspect records, DataHub now has logic to map the old field to the new field. ### Other notable Changes + - #8191: Updates GMS's health check endpoint to account for its dependency on external components. Notably, at this time, elasticsearch. This means that DataHub operators can now use GMS health status more reliably. ## 0.10.3 @@ -170,6 +185,7 @@ individually enable / disable desired field metrics. ### Potential Downtime ### Deprecations + - The signature of `Source.get_workunits()` is changed from `Iterable[WorkUnit]` to the more restrictive `Iterable[MetadataWorkUnit]`. - Legacy usage creation via the `UsageAggregation` aspect, `/usageStats?action=batchIngest` GMS endpoint, and `UsageStatsWorkUnit` metadata-ingestion class are all deprecated. diff --git a/metadata-ingestion/examples/library/dataset_add_column_documentation.py b/metadata-ingestion/examples/library/dataset_add_column_documentation.py index a6dbf58c09c81..bf871e2dcdb8e 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_documentation.py +++ b/metadata-ingestion/examples/library/dataset_add_column_documentation.py @@ -14,24 +14,12 @@ EditableSchemaMetadataClass, InstitutionalMemoryClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> owner, ownership_type, dataset documentation_to_add = ( "Name of the user who was deleted. This description is updated via PythonSDK." diff --git a/metadata-ingestion/examples/library/dataset_add_column_tag.py b/metadata-ingestion/examples/library/dataset_add_column_tag.py index ede1809c7bad9..94204bc39b874 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_tag.py +++ b/metadata-ingestion/examples/library/dataset_add_column_tag.py @@ -15,24 +15,12 @@ GlobalTagsClass, TagAssociationClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> the column, dataset and the tag to set column = "user_name" dataset_urn = make_dataset_urn(platform="hive", name="fct_users_created", env="PROD") diff --git a/metadata-ingestion/examples/library/dataset_add_column_term.py b/metadata-ingestion/examples/library/dataset_add_column_term.py index 115517bcfa06e..9796fa9d5404c 100644 --- a/metadata-ingestion/examples/library/dataset_add_column_term.py +++ b/metadata-ingestion/examples/library/dataset_add_column_term.py @@ -15,24 +15,12 @@ GlossaryTermAssociationClass, GlossaryTermsClass, ) +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - """A helper function to extract simple . path notation from the v2 field path""" - if not field_path.startswith("[version=2.0]"): - # not a v2, we assume this is a simple path - return field_path - # this is a v2 field path - tokens = [ - t for t in field_path.split(".") if not (t.startswith("[") or t.endswith("]")) - ] - - return ".".join(tokens) - - # Inputs -> the column, dataset and the term to set column = "address.zipcode" dataset_urn = make_dataset_urn(platform="hive", name="realestate_db.sales", env="PROD") diff --git a/metadata-ingestion/examples/library/upsert_group.py b/metadata-ingestion/examples/library/upsert_group.py index 86a03b72c1289..84844e142f46c 100644 --- a/metadata-ingestion/examples/library/upsert_group.py +++ b/metadata-ingestion/examples/library/upsert_group.py @@ -5,7 +5,7 @@ CorpGroupGenerationConfig, ) from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig -from datahub.utilities.urns.corpuser_urn import CorpuserUrn +from datahub.metadata.urns import CorpUserUrn log = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) @@ -13,10 +13,10 @@ group_email = "foogroup@acryl.io" group = CorpGroup( id=group_email, - owners=[str(CorpuserUrn.create_from_id("datahub"))], + owners=[str(CorpUserUrn("datahub"))], members=[ - str(CorpuserUrn.create_from_id("bar@acryl.io")), - str(CorpuserUrn.create_from_id("joe@acryl.io")), + str(CorpUserUrn("bar@acryl.io")), + str(CorpUserUrn("joe@acryl.io")), ], display_name="Foo Group", email=group_email, diff --git a/metadata-ingestion/scripts/avro_codegen.py b/metadata-ingestion/scripts/avro_codegen.py index de8836559217b..c6f6bac128b79 100644 --- a/metadata-ingestion/scripts/avro_codegen.py +++ b/metadata-ingestion/scripts/avro_codegen.py @@ -1,6 +1,8 @@ import collections +import copy import json import re +import textwrap from pathlib import Path from typing import Dict, Iterable, List, Optional, Tuple, Union @@ -115,11 +117,20 @@ def patch_schema(schema: dict, urn_arrays: Dict[str, List[Tuple[str, str]]]) -> # Patch normal urn types. field: avro.schema.Field for field in nested.fields: - java_class: Optional[str] = field.props.get("java", {}).get("class") + field_props: dict = field.props # type: ignore + java_props: dict = field_props.get("java", {}) + java_class: Optional[str] = java_props.get("class") if java_class and java_class.startswith( "com.linkedin.pegasus2avro.common.urn." ): - field.set_prop("Urn", java_class.split(".")[-1]) + type = java_class.split(".")[-1] + entity_types = field_props.get("Relationship", {}).get( + "entityTypes", [] + ) + + field.set_prop("Urn", type) + if entity_types: + field.set_prop("entityTypes", entity_types) # Patch array urn types. if nested.name in urn_arrays: @@ -130,7 +141,7 @@ def patch_schema(schema: dict, urn_arrays: Dict[str, List[Tuple[str, str]]]) -> field.set_prop("Urn", type) field.set_prop("urn_is_array", True) - return patched.to_json() + return patched.to_json() # type: ignore def merge_schemas(schemas_obj: List[dict]) -> str: @@ -141,6 +152,7 @@ def merge_schemas(schemas_obj: List[dict]) -> str: class NamesWithDups(avro.schema.Names): def add_name(self, name_attr, space_attr, new_schema): to_add = avro.schema.Name(name_attr, space_attr, self.default_namespace) + assert to_add.fullname self.names[to_add.fullname] = new_schema return to_add @@ -228,7 +240,6 @@ def make_load_schema_methods(schemas: Iterable[str]) -> str: def save_raw_schemas(schema_save_dir: Path, schemas: Dict[str, dict]) -> None: # Save raw avsc files. - schema_save_dir.mkdir() for name, schema in schemas.items(): (schema_save_dir / f"{name}.avsc").write_text(json.dumps(schema, indent=2)) @@ -333,6 +344,342 @@ class AspectBag(TypedDict, total=False): schema_class_file.write_text("\n".join(schema_classes_lines)) +def write_urn_classes(key_aspects: List[dict], urn_dir: Path) -> None: + urn_dir.mkdir() + + (urn_dir / "__init__.py").write_text("\n# This file is intentionally left empty.") + + code = """ +# This file contains classes corresponding to entity URNs. + +from typing import ClassVar, List, Optional, Type, TYPE_CHECKING + +import functools +from deprecated.sphinx import deprecated as _sphinx_deprecated + +from datahub.utilities.urn_encoder import UrnEncoder +from datahub.utilities.urns._urn_base import _SpecificUrn, Urn +from datahub.utilities.urns.error import InvalidUrnError + +deprecated = functools.partial(_sphinx_deprecated, version="0.12.0.2") +""" + + for aspect in key_aspects: + entity_type = aspect["Aspect"]["keyForEntity"] + if aspect["Aspect"]["entityCategory"] == "internal": + continue + + code += generate_urn_class(entity_type, aspect) + + (urn_dir / "urn_defs.py").write_text(code) + + +def capitalize_entity_name(entity_name: str) -> str: + # Examples: + # corpuser -> CorpUser + # corpGroup -> CorpGroup + # mlModelDeployment -> MlModelDeployment + + if entity_name == "corpuser": + return "CorpUser" + + return f"{entity_name[0].upper()}{entity_name[1:]}" + + +def python_type(avro_type: str) -> str: + if avro_type == "string": + return "str" + elif ( + isinstance(avro_type, dict) + and avro_type.get("type") == "enum" + and avro_type.get("name") == "FabricType" + ): + # TODO: make this stricter using an enum + return "str" + raise ValueError(f"unknown type {avro_type}") + + +def field_type(field: dict) -> str: + return python_type(field["type"]) + + +def field_name(field: dict) -> str: + manual_mapping = { + "origin": "env", + "platformName": "platform_name", + } + + name: str = field["name"] + if name in manual_mapping: + return manual_mapping[name] + + # If the name is mixed case, convert to snake case. + if name.lower() != name: + # Inject an underscore before each capital letter, and then convert to lowercase. + return re.sub(r"(? "{class_name}": + return cls(id) +""" +_extra_urn_methods: Dict[str, List[str]] = { + "corpGroup": [_create_from_id.format(class_name="CorpGroupUrn")], + "corpuser": [_create_from_id.format(class_name="CorpUserUrn")], + "dataFlow": [ + """ +@classmethod +def create_from_ids( + cls, + orchestrator: str, + flow_id: str, + env: str, + platform_instance: Optional[str] = None, +) -> "DataFlowUrn": + return cls( + orchestrator=orchestrator, + flow_id=f"{platform_instance}.{flow_id}" if platform_instance else flow_id, + cluster=env, + ) + +@deprecated(reason="Use .orchestrator instead") +def get_orchestrator_name(self) -> str: + return self.orchestrator + +@deprecated(reason="Use .flow_id instead") +def get_flow_id(self) -> str: + return self.flow_id + +@deprecated(reason="Use .cluster instead") +def get_env(self) -> str: + return self.cluster +""", + ], + "dataJob": [ + """ +@classmethod +def create_from_ids(cls, data_flow_urn: str, job_id: str) -> "DataJobUrn": + return cls(data_flow_urn, job_id) + +def get_data_flow_urn(self) -> "DataFlowUrn": + return DataFlowUrn.from_string(self.flow) + +@deprecated(reason="Use .job_id instead") +def get_job_id(self) -> str: + return self.job_id +""" + ], + "dataPlatform": [_create_from_id.format(class_name="DataPlatformUrn")], + "dataProcessInstance": [ + _create_from_id.format(class_name="DataProcessInstanceUrn"), + """ +@deprecated(reason="Use .id instead") +def get_dataprocessinstance_id(self) -> str: + return self.id +""", + ], + "dataset": [ + """ +@classmethod +def create_from_ids( + cls, + platform_id: str, + table_name: str, + env: str, + platform_instance: Optional[str] = None, +) -> "DatasetUrn": + return DatasetUrn( + platform=platform_id, + name=f"{platform_instance}.{table_name}" if platform_instance else table_name, + env=env, + ) + +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path as _get_simple_field_path_from_v2_field_path + +get_simple_field_path_from_v2_field_path = staticmethod(deprecated(reason='Use the function from the field_paths module instead')(_get_simple_field_path_from_v2_field_path)) + +def get_data_platform_urn(self) -> "DataPlatformUrn": + return DataPlatformUrn.from_string(self.platform) + +@deprecated(reason="Use .name instead") +def get_dataset_name(self) -> str: + return self.name + +@deprecated(reason="Use .env instead") +def get_env(self) -> str: + return self.env +""" + ], + "domain": [_create_from_id.format(class_name="DomainUrn")], + "notebook": [ + """ +@deprecated(reason="Use .notebook_tool instead") +def get_platform_id(self) -> str: + return self.notebook_tool + +@deprecated(reason="Use .notebook_id instead") +def get_notebook_id(self) -> str: + return self.notebook_id +""" + ], + "tag": [_create_from_id.format(class_name="TagUrn")], +} + + +def generate_urn_class(entity_type: str, key_aspect: dict) -> str: + """Generate a class definition for this entity. + + The class definition has the following structure: + - A class attribute ENTITY_TYPE, which is the entity type string. + - A class attribute URN_PARTS, which is the number of parts in the URN. + - A constructor that takes the URN parts as arguments. The field names + will match the key aspect's field names. It will also have a _allow_coercion + flag, which will allow for some normalization (e.g. upper case env). + Then, each part will be validated (including nested calls for urn subparts). + - Utilities for converting to/from the key aspect. + - Any additional methods that are required for this entity type, defined above. + These are primarily for backwards compatibility. + - Getter methods for each field. + """ + + class_name = f"{capitalize_entity_name(entity_type)}Urn" + + fields = copy.deepcopy(key_aspect["fields"]) + if entity_type == "container": + # The annotations say guid is optional, but it is required. + # This is a quick fix of the annotations. + assert field_name(fields[0]) == "guid" + assert fields[0]["type"] == ["null", "string"] + fields[0]["type"] = "string" + + _init_arg_parts: List[str] = [] + for field in fields: + default = '"PROD"' if field_name(field) == "env" else None + _arg_part = f"{field_name(field)}: {field_type(field)}" + if default: + _arg_part += f" = {default}" + _init_arg_parts.append(_arg_part) + init_args = ", ".join(_init_arg_parts) + + super_init_args = ", ".join(field_name(field) for field in fields) + + arg_count = len(fields) + parse_ids_mapping = ", ".join( + f"{field_name(field)}=entity_ids[{i}]" for i, field in enumerate(fields) + ) + + key_aspect_class = f"{key_aspect['name']}Class" + to_key_aspect_args = ", ".join( + # The LHS bypasses any field name aliases. + f"{field['name']}=self.{field_name(field)}" + for field in fields + ) + from_key_aspect_args = ", ".join( + f"{field_name(field)}=key_aspect.{field['name']}" for field in fields + ) + + init_coercion = "" + init_validation = "" + for field in fields: + init_validation += f'if not {field_name(field)}:\n raise InvalidUrnError("{field_name(field)} cannot be empty")\n' + + # Generalized mechanism for validating embedded urns. + field_urn_type_class = None + if field_name(field) == "platform": + field_urn_type_class = "DataPlatformUrn" + elif field.get("Urn"): + if len(field.get("entityTypes", [])) == 1: + field_entity_type = field["entityTypes"][0] + field_urn_type_class = f"{capitalize_entity_name(field_entity_type)}Urn" + else: + field_urn_type_class = "Urn" + + if field_urn_type_class: + init_validation += f"{field_name(field)} = str({field_name(field)})\n" + init_validation += ( + f"assert {field_urn_type_class}.from_string({field_name(field)})\n" + ) + else: + init_validation += ( + f"assert not UrnEncoder.contains_reserved_char({field_name(field)})\n" + ) + + if field_name(field) == "env": + init_coercion += "env = env.upper()\n" + # TODO add ALL_ENV_TYPES validation + elif entity_type == "dataPlatform" and field_name(field) == "platform_name": + init_coercion += 'if platform_name.startswith("urn:li:dataPlatform:"):\n' + init_coercion += " platform_name = DataPlatformUrn.from_string(platform_name).platform_name\n" + + if field_name(field) == "platform": + init_coercion += "platform = DataPlatformUrn(platform).urn()\n" + elif field_urn_type_class is None: + # For all non-urns, run the value through the UrnEncoder. + init_coercion += ( + f"{field_name(field)} = UrnEncoder.encode_string({field_name(field)})\n" + ) + if not init_coercion: + init_coercion = "pass" + + # TODO include the docs for each field + + code = f""" +if TYPE_CHECKING: + from datahub.metadata.schema_classes import {key_aspect_class} + +class {class_name}(_SpecificUrn): + ENTITY_TYPE: ClassVar[str] = "{entity_type}" + URN_PARTS: ClassVar[int] = {arg_count} + + def __init__(self, {init_args}, *, _allow_coercion: bool = True) -> None: + if _allow_coercion: + # Field coercion logic (if any is required). +{textwrap.indent(init_coercion.strip(), prefix=" "*4*3)} + + # Validation logic. +{textwrap.indent(init_validation.strip(), prefix=" "*4*2)} + + super().__init__(self.ENTITY_TYPE, [{super_init_args}]) + + @classmethod + def _parse_ids(cls, entity_ids: List[str]) -> "{class_name}": + if len(entity_ids) != cls.URN_PARTS: + raise InvalidUrnError(f"{class_name} should have {{cls.URN_PARTS}} parts, got {{len(entity_ids)}}: {{entity_ids}}") + return cls({parse_ids_mapping}, _allow_coercion=False) + + @classmethod + def underlying_key_aspect_type(cls) -> Type["{key_aspect_class}"]: + from datahub.metadata.schema_classes import {key_aspect_class} + + return {key_aspect_class} + + def to_key_aspect(self) -> "{key_aspect_class}": + from datahub.metadata.schema_classes import {key_aspect_class} + + return {key_aspect_class}({to_key_aspect_args}) + + @classmethod + def from_key_aspect(cls, key_aspect: "{key_aspect_class}") -> "{class_name}": + return cls({from_key_aspect_args}) +""" + + for extra_method in _extra_urn_methods.get(entity_type, []): + code += textwrap.indent(extra_method, prefix=" " * 4) + + for i, field in enumerate(fields): + code += f""" + @property + def {field_name(field)}(self) -> {field_type(field)}: + return self.entity_ids[{i}] +""" + + return code + + @click.command() @click.argument( "entity_registry", type=click.Path(exists=True, dir_okay=False), required=True @@ -367,6 +714,7 @@ def generate( if schema.get("Aspect") } + # Copy entity registry info into the corresponding key aspect. for entity in entities: # This implicitly requires that all keyAspects are resolvable. aspect = aspects[entity.keyAspect] @@ -428,6 +776,7 @@ def generate( import importlib from typing import TYPE_CHECKING +from datahub.utilities.docs_build import IS_SPHINX_BUILD from datahub.utilities._custom_package_loader import get_custom_models_package _custom_package_path = get_custom_models_package() @@ -437,16 +786,64 @@ def generate( # Required explicitly because __all__ doesn't include _ prefixed names. from ._schema_classes import _Aspect, __SCHEMA_TYPES + + if IS_SPHINX_BUILD: + # Set __module__ to the current module so that Sphinx will document the + # classes as belonging to this module instead of the custom package. + for _cls in list(globals().values()): + if hasattr(_cls, "__module__") and "datahub.metadata._schema_classes" in _cls.__module__: + _cls.__module__ = __name__ else: _custom_package = importlib.import_module(_custom_package_path) globals().update(_custom_package.__dict__) +""" + ) + + (Path(outdir) / "urns.py").write_text( + """ +# This is a specialized shim layer that allows us to dynamically load custom URN types from elsewhere. + +import importlib +from typing import TYPE_CHECKING + +from datahub.utilities.docs_build import IS_SPHINX_BUILD +from datahub.utilities._custom_package_loader import get_custom_urns_package +from datahub.utilities.urns._urn_base import Urn # noqa: F401 +_custom_package_path = get_custom_urns_package() + +if TYPE_CHECKING or not _custom_package_path: + from ._urns.urn_defs import * # noqa: F401 + + if IS_SPHINX_BUILD: + # Set __module__ to the current module so that Sphinx will document the + # classes as belonging to this module instead of the custom package. + for _cls in list(globals().values()): + if hasattr(_cls, "__module__") and ("datahub.metadata._urns.urn_defs" in _cls.__module__ or _cls is Urn): + _cls.__module__ = __name__ +else: + _custom_package = importlib.import_module(_custom_package_path) + globals().update(_custom_package.__dict__) """ ) + # Generate URN classes. + urn_dir = Path(outdir) / "_urns" + write_urn_classes( + [aspect for aspect in aspects.values() if aspect["Aspect"].get("keyForEntity")], + urn_dir, + ) + + # Save raw schema files in codegen as well. + schema_save_dir = Path(outdir) / "schemas" + schema_save_dir.mkdir() + for schema_out_file, schema in schemas.items(): + (schema_save_dir / f"{schema_out_file}.avsc").write_text( + json.dumps(schema, indent=2) + ) + # Keep a copy of a few raw avsc files. required_avsc_schemas = {"MetadataChangeEvent", "MetadataChangeProposal"} - schema_save_dir = Path(outdir) / "schemas" save_raw_schemas( schema_save_dir, { diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 4a674550d49df..a5883c9ae9020 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -90,6 +90,7 @@ def generate( entry_points={{ "datahub.custom_packages": [ "models={python_package_name}.models.schema_classes", + "urns={python_package_name}.models._urns.urn_defs", ], }}, ) diff --git a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py index 2f07e4a112f93..6a2f733dcf8f7 100644 --- a/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py +++ b/metadata-ingestion/src/datahub/api/entities/dataprocess/dataprocess_instance.py @@ -75,7 +75,7 @@ class DataProcessInstance: def __post_init__(self): self.urn = DataProcessInstanceUrn.create_from_id( - dataprocessinstance_id=DataProcessInstanceKey( + id=DataProcessInstanceKey( cluster=self.cluster, orchestrator=self.orchestrator, id=self.id, diff --git a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py index 611f0c5c52cc6..a2db8ceb4a89a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py +++ b/metadata-ingestion/src/datahub/ingestion/source/csv_enricher.py @@ -45,6 +45,7 @@ TagAssociationClass, ) from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path from datahub.utilities.urns.urn import Urn, guess_entity_type DATASET_ENTITY_TYPE = DatasetUrn.ENTITY_TYPE @@ -436,9 +437,7 @@ def process_sub_resource_row( field_match = False for field_info in current_editable_schema_metadata.editableSchemaFieldInfo: if ( - DatasetUrn.get_simple_field_path_from_v2_field_path( - field_info.fieldPath - ) + get_simple_field_path_from_v2_field_path(field_info.fieldPath) == field_path ): # we have some editable schema metadata for this field diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index b5d9d96354fc5..97877df63707f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -113,7 +113,7 @@ def create_id(path: List[str], default_id: Optional[str], enable_auto_id: bool) id_: str = ".".join(path) - if UrnEncoder.contains_reserved_char(id_): + if UrnEncoder.contains_extended_reserved_char(id_): enable_auto_id = True if enable_auto_id: diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index 08df7599510f4..da44d09121c6c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -1086,9 +1086,7 @@ def get_upstream_columns_of_fields_in_datasource( def is_snowflake_urn(self, urn: str) -> bool: return ( - DatasetUrn.create_from_string(urn) - .get_data_platform_urn() - .get_platform_name() + DatasetUrn.create_from_string(urn).get_data_platform_urn().platform_name == "snowflake" ) diff --git a/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py b/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py index 1b66258557406..bb029db3b65b7 100644 --- a/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py +++ b/metadata-ingestion/src/datahub/utilities/_custom_package_loader.py @@ -10,6 +10,7 @@ _CUSTOM_PACKAGE_GROUP_KEY = "datahub.custom_packages" _MODELS_KEY = "models" +_URNS_KEY = "urns" class CustomPackageException(Exception): @@ -41,3 +42,7 @@ def _get_custom_package_for_name(name: str) -> Optional[str]: def get_custom_models_package() -> Optional[str]: return _get_custom_package_for_name(_MODELS_KEY) + + +def get_custom_urns_package() -> Optional[str]: + return _get_custom_package_for_name(_URNS_KEY) diff --git a/metadata-ingestion/src/datahub/utilities/docs_build.py b/metadata-ingestion/src/datahub/utilities/docs_build.py new file mode 100644 index 0000000000000..18cb3629516ba --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/docs_build.py @@ -0,0 +1,9 @@ +from typing import TYPE_CHECKING + +try: + # Via https://stackoverflow.com/a/65147676 + if not TYPE_CHECKING and __sphinx_build__: + IS_SPHINX_BUILD = True + +except NameError: + IS_SPHINX_BUILD = False diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index efe2d26aae3d9..c2cccf9f1e389 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -37,7 +37,7 @@ TimeTypeClass, ) from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict -from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.field_paths import get_simple_field_path_from_v2_field_path logger = logging.getLogger(__name__) @@ -443,15 +443,14 @@ def _convert_schema_aspect_to_info( cls, schema_metadata: SchemaMetadataClass ) -> SchemaInfo: return { - DatasetUrn.get_simple_field_path_from_v2_field_path(col.fieldPath): ( + get_simple_field_path_from_v2_field_path(col.fieldPath): ( # The actual types are more of a "nice to have". col.nativeDataType or "str" ) for col in schema_metadata.fields # TODO: We can't generate lineage to columns nested within structs yet. - if "." - not in DatasetUrn.get_simple_field_path_from_v2_field_path(col.fieldPath) + if "." not in get_simple_field_path_from_v2_field_path(col.fieldPath) } @classmethod @@ -459,17 +458,14 @@ def convert_graphql_schema_metadata_to_info( cls, schema: GraphQLSchemaMetadata ) -> SchemaInfo: return { - DatasetUrn.get_simple_field_path_from_v2_field_path(field["fieldPath"]): ( + get_simple_field_path_from_v2_field_path(field["fieldPath"]): ( # The actual types are more of a "nice to have". field["nativeDataType"] or "str" ) for field in schema["fields"] # TODO: We can't generate lineage to columns nested within structs yet. - if "." - not in DatasetUrn.get_simple_field_path_from_v2_field_path( - field["fieldPath"] - ) + if "." not in get_simple_field_path_from_v2_field_path(field["fieldPath"]) } def close(self) -> None: diff --git a/metadata-ingestion/src/datahub/utilities/urn_encoder.py b/metadata-ingestion/src/datahub/utilities/urn_encoder.py index 706d50d942055..093c9ade8c152 100644 --- a/metadata-ingestion/src/datahub/utilities/urn_encoder.py +++ b/metadata-ingestion/src/datahub/utilities/urn_encoder.py @@ -23,4 +23,8 @@ def encode_char(c: str) -> str: @staticmethod def contains_reserved_char(value: str) -> bool: + return bool(set(value).intersection(RESERVED_CHARS)) + + @staticmethod + def contains_extended_reserved_char(value: str) -> bool: return bool(set(value).intersection(RESERVED_CHARS_EXTENDED)) diff --git a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py new file mode 100644 index 0000000000000..fbde0d6e6d69a --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py @@ -0,0 +1,234 @@ +import functools +import urllib.parse +from abc import abstractmethod +from typing import ClassVar, Dict, List, Optional, Type, TypeVar + +from deprecated import deprecated + +from datahub.utilities.urns.error import InvalidUrnError + +URN_TYPES: Dict[str, Type["_SpecificUrn"]] = {} + + +def _split_entity_id(entity_id: str) -> List[str]: + if not (entity_id.startswith("(") and entity_id.endswith(")")): + return [entity_id] + + parts = [] + start_paren_count = 1 + part_start = 1 + for i in range(1, len(entity_id)): + c = entity_id[i] + if c == "(": + start_paren_count += 1 + elif c == ")": + start_paren_count -= 1 + if start_paren_count < 0: + raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") + elif c == ",": + if start_paren_count != 1: + continue + + if i - part_start <= 0: + raise InvalidUrnError(f"{entity_id}, empty part disallowed") + parts.append(entity_id[part_start:i]) + part_start = i + 1 + + if start_paren_count != 0: + raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") + + parts.append(entity_id[part_start:-1]) + + return parts + + +_UrnSelf = TypeVar("_UrnSelf", bound="Urn") + + +@functools.total_ordering +class Urn: + """ + URNs are globally unique identifiers used to refer to entities. + + It will be in format of urn:li:: or urn:li::(,,...) + + A note on encoding: certain characters, particularly commas and parentheses, are + not allowed in string portions of the URN. However, these are allowed when the urn + has another urn embedded within it. The main URN class ignores this possibility, + and assumes that the user provides a valid URN string. However, the specific URN + classes, such as DatasetUrn, will automatically encode these characters using + url-encoding when the URN is created and _allow_coercion is enabled (the default). + However, all from_string methods will try to preserve the string as-is, and will + raise an error if the string is invalid. + """ + + # retained for backwards compatibility + URN_PREFIX: ClassVar[str] = "urn" + LI_DOMAIN: ClassVar[str] = "li" + + _entity_type: str + _entity_ids: List[str] + + def __init__(self, entity_type: str, entity_id: List[str]) -> None: + self._entity_type = entity_type + self._entity_ids = entity_id + + if not self._entity_ids: + raise InvalidUrnError("Empty entity id.") + for part in self._entity_ids: + if not part: + raise InvalidUrnError("Empty entity id.") + + @property + def entity_type(self) -> str: + return self._entity_type + + @property + def entity_ids(self) -> List[str]: + return self._entity_ids + + @classmethod + def from_string(cls: Type[_UrnSelf], urn_str: str) -> "_UrnSelf": + """ + Creates an Urn from its string representation. + + Args: + urn_str: The string representation of the Urn. + + Returns: + Urn of the given string representation. + + Raises: + InvalidUrnError: If the string representation is in invalid format. + """ + + # TODO: Add handling for url encoded urns e.g. urn%3A ... + + if not urn_str.startswith("urn:li:"): + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. Urns should start with 'urn:li:'" + ) + + parts: List[str] = urn_str.split(":", maxsplit=3) + if len(parts) != 4: + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. Expect 4 parts from urn string but found {len(parts)}" + ) + if "" in parts: + raise InvalidUrnError( + f"Invalid urn string: {urn_str}. There should not be empty parts in urn string." + ) + + _urn, _li, entity_type, entity_ids_str = parts + entity_ids = _split_entity_id(entity_ids_str) + + UrnCls: Optional[Type["_SpecificUrn"]] = URN_TYPES.get(entity_type) + if UrnCls: + if not issubclass(UrnCls, cls): + # We want to return a specific subtype of Urn. If we're called + # with Urn.from_string(), that's fine. However, if we're called as + # DatasetUrn.from_string('urn:li:corpuser:foo'), that should throw an error. + raise InvalidUrnError( + f"Passed an urn of type {entity_type} to the from_string method of {cls.__name__}. Use Urn.from_string() or {UrnCls.__name__}.from_string() instead." + ) + return UrnCls._parse_ids(entity_ids) # type: ignore + + # Fallback for unknown types. + if cls != Urn: + raise InvalidUrnError( + f"Unknown urn type {entity_type} for urn {urn_str} of type {cls}" + ) + return cls(entity_type, entity_ids) + + def urn(self) -> str: + """Get the string representation of the urn.""" + + if len(self._entity_ids) == 1: + return f"urn:li:{self._entity_type}:{self._entity_ids[0]}" + + return f"urn:li:{self._entity_type}:({','.join(self._entity_ids)})" + + def __str__(self) -> str: + return self.urn() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.urn()})" + + def urn_url_encoded(self) -> str: + return Urn.url_encode(self.urn()) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Urn): + return False + return self.urn() == other.urn() + + def __lt__(self, other: object) -> bool: + if not isinstance(other, Urn): + raise TypeError( + f"'<' not supported between instances of '{type(self)}' and '{type(other)}'" + ) + return self.urn() < other.urn() + + def __hash__(self) -> int: + return hash(self.urn()) + + @classmethod + @deprecated(reason="prefer .from_string") + def create_from_string(cls: Type[_UrnSelf], urn_str: str) -> "_UrnSelf": + return cls.from_string(urn_str) + + @deprecated(reason="prefer .entity_ids") + def get_entity_id(self) -> List[str]: + return self._entity_ids + + @deprecated(reason="prefer .entity_type") + def get_type(self) -> str: + return self._entity_type + + @deprecated(reason="no longer needed") + def get_domain(self) -> str: + return "li" + + @deprecated(reason="no longer needed") + def get_entity_id_as_string(self) -> str: + urn = self.urn() + prefix = "urn:li:" + assert urn.startswith(prefix) + id_with_type = urn[len(prefix) :] + return id_with_type.split(":", maxsplit=1)[1] + + @classmethod + @deprecated(reason="no longer needed") + def validate(cls, urn_str: str) -> None: + Urn.create_from_string(urn_str) + + @staticmethod + def url_encode(urn: str) -> str: + # safe='' encodes '/' as '%2F' + return urllib.parse.quote(urn, safe="") + + +class _SpecificUrn(Urn): + ENTITY_TYPE: str = "" + + def __init_subclass__(cls) -> None: + # Validate the subclass. + entity_type = cls.ENTITY_TYPE + if not entity_type: + raise ValueError(f'_SpecificUrn subclass {cls} must define "ENTITY_TYPE"') + + # Register the urn type. + if entity_type in URN_TYPES: + raise ValueError(f"duplicate urn type registered: {entity_type}") + URN_TYPES[entity_type] = cls + + return super().__init_subclass__() + + @classmethod + def underlying_key_aspect_type(cls) -> Type: + raise NotImplementedError() + + @classmethod + @abstractmethod + def _parse_ids(cls: Type[_UrnSelf], entity_ids: List[str]) -> _UrnSelf: + raise NotImplementedError() diff --git a/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py b/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py index 94fa133becf6c..37c1076925945 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/corp_group_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class CorpGroupUrn(Urn): - """ - expected corp group urn format: urn:li:corpGroup:. example: "urn:li:corpGroup:data" - """ - - ENTITY_TYPE: str = "corpGroup" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "CorpGroupUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, group_id: str) -> "CorpGroupUrn": - return cls(CorpGroupUrn.ENTITY_TYPE, [group_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != CorpGroupUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {CorpGroupUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import CorpGroupUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py b/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py index 653b99f4af9bf..5f9ecf65951b9 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/corpuser_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class CorpuserUrn(Urn): - """ - expected corp user urn format: urn:li:corpuser:. example: "urn:li:corpuser:tom" - """ - - ENTITY_TYPE: str = "corpuser" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "CorpuserUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, user_id: str) -> "CorpuserUrn": - return cls(CorpuserUrn.ENTITY_TYPE, [user_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != CorpuserUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {CorpuserUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import CorpUserUrn as CorpuserUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py index f0dda5d8db493..5b2b45927c339 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_flow_urn.py @@ -1,88 +1 @@ -from typing import List, Optional - -from datahub.configuration.source_common import ALL_ENV_TYPES -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataFlowUrn(Urn): - """ - expected data flow urn format: urn:li:dataFlow:(,,). example: - urn:li:dataFlow:(airflow,ingest_user,prod) - """ - - ENTITY_TYPE: str = "dataFlow" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataFlowUrn": - """ - Create a DataFlowUrn from the its string representation - :param urn_str: the string representation of the DataFlowUrn - :return: DataFlowUrn of the given string representation - :raises InvalidUrnError is the string representation is in invalid format - """ - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - def get_orchestrator_name(self) -> str: - """ - :return: the orchestrator name for the Dataflow - """ - return self.get_entity_id()[0] - - def get_flow_id(self) -> str: - """ - :return: the data flow id from this DataFlowUrn - """ - return self.get_entity_id()[1] - - def get_env(self) -> str: - """ - :return: the environment where the DataFlow is run - """ - return self.get_entity_id()[2] - - @classmethod - def create_from_ids( - cls, - orchestrator: str, - flow_id: str, - env: str, - platform_instance: Optional[str] = None, - ) -> "DataFlowUrn": - entity_id: List[str] - if platform_instance: - entity_id = [ - orchestrator, - f"{platform_instance}.{flow_id}", - env, - ] - else: - entity_id = [orchestrator, flow_id, env] - return cls(DataFlowUrn.ENTITY_TYPE, entity_id) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataFlowUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataFlowUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - # expected entity id format (,,) - if len(entity_id) != 3: - raise InvalidUrnError( - f"Expect 3 parts in the entity id but found {entity_id}" - ) - - env = entity_id[2].upper() - if env not in ALL_ENV_TYPES: - raise InvalidUrnError( - f"Invalid env:{env}. Allowed envs are {ALL_ENV_TYPES}" - ) +from datahub.metadata.urns import DataFlowUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py index 9459646893b92..53e3419ee7ecb 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_job_urn.py @@ -1,51 +1 @@ -from typing import List - -from datahub.utilities.urns.data_flow_urn import DataFlowUrn -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataJobUrn(Urn): - """ - expected Data job urn format: urn:li:dataJob:(,). example: - "urn:li:dataJob:(urn:li:dataFlow:(airflow,sample_flow,prod),sample_job)" - """ - - ENTITY_TYPE: str = "dataJob" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - def get_data_flow_urn(self) -> DataFlowUrn: - return DataFlowUrn.create_from_string(self.get_entity_id()[0]) - - def get_job_id(self) -> str: - return self.get_entity_id()[1] - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataJobUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_ids(cls, data_flow_urn: str, job_id: str) -> "DataJobUrn": - return cls(DataJobUrn.ENTITY_TYPE, [data_flow_urn, job_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataJobUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataJobUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 2: - raise InvalidUrnError( - f"Expect 2 part in entity id, but found{len(entity_id)}" - ) - - data_flow_urn_str = entity_id[0] - DataFlowUrn.validate(data_flow_urn_str) +from datahub.metadata.urns import DataJobUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py index 79cf54dfe920a..9d37e38f256e7 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_platform_urn.py @@ -1,34 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataPlatformUrn(Urn): - """ - expected dataset urn format: urn:li:dataPlatform:. example: "urn:li:dataPlatform:hive" - """ - - ENTITY_TYPE: str = "dataPlatform" - - def __init__(self, entity_type: str, entity_id: List[str], domain: str = "li"): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataPlatformUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, platform_id: str) -> "DataPlatformUrn": - return cls(DataPlatformUrn.ENTITY_TYPE, [platform_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataPlatformUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataPlatformUrn.ENTITY_TYPE} but found {entity_type}" - ) - - def get_platform_name(self) -> str: - return self.get_entity_id()[0] +from datahub.metadata.urns import DataPlatformUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py b/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py index 6367d48d6d441..df6ba797d069c 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/data_process_instance_urn.py @@ -1,46 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DataProcessInstanceUrn(Urn): - """ - expected domain urn format: urn:li:dataProcessInstance: - """ - - ENTITY_TYPE: str = "dataProcessInstance" - - def __init__( - self, entity_type: str, entity_id: List[str], domain_id: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain_id) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DataProcessInstanceUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, dataprocessinstance_id: str) -> "DataProcessInstanceUrn": - return cls(DataProcessInstanceUrn.ENTITY_TYPE, [dataprocessinstance_id]) - - def get_dataprocessinstance_id(self) -> str: - """ - :return: the dataprocess instance id from this DatasetUrn - """ - return self.get_entity_id()[0] - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DataProcessInstanceUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DataProcessInstanceUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import DataProcessInstanceUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py b/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py index 3ed33c068496e..6078ffefc03d8 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/dataset_urn.py @@ -1,112 +1 @@ -from typing import List, Optional - -from datahub.configuration.source_common import ALL_ENV_TYPES -from datahub.utilities.urn_encoder import UrnEncoder -from datahub.utilities.urns.data_platform_urn import DataPlatformUrn -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DatasetUrn(Urn): - """ - expected dataset urn format: urn:li:dataset:(,,env). example: - urn:li:dataset:(urn:li:dataPlatform:hive,member,prod) - """ - - ENTITY_TYPE: str = "dataset" - - def __init__(self, entity_type: str, entity_id: List[str], domain: str = "li"): - super().__init__(entity_type, UrnEncoder.encode_string_array(entity_id), domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DatasetUrn": - """ - Create a DatasetUrn from the its string representation - :param urn_str: the string representation of the DatasetUrn - :return: DatasetUrn of the given string representation - :raises InvalidUrnError is the string representation is in invalid format - """ - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - def get_data_platform_urn(self) -> DataPlatformUrn: - """ - :return: the DataPlatformUrn of where the Dataset is created - """ - return DataPlatformUrn.create_from_string(self.get_entity_id()[0]) - - def get_dataset_name(self) -> str: - """ - :return: the dataset name from this DatasetUrn - """ - return self.get_entity_id()[1] - - def get_env(self) -> str: - """ - :return: the environment where the Dataset is created - """ - return self.get_entity_id()[2] - - @classmethod - def create_from_ids( - cls, - platform_id: str, - table_name: str, - env: str, - platform_instance: Optional[str] = None, - ) -> "DatasetUrn": - entity_id: List[str] - if platform_instance: - entity_id = [ - str(DataPlatformUrn.create_from_id(platform_id)), - f"{platform_instance}.{table_name}", - env, - ] - else: - entity_id = [ - str(DataPlatformUrn.create_from_id(platform_id)), - table_name, - env, - ] - return cls(DatasetUrn.ENTITY_TYPE, entity_id) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DatasetUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DatasetUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - # expected entity id format (,,) - if len(entity_id) != 3: - raise InvalidUrnError( - f"Expect 3 parts in the entity id but found {entity_id}" - ) - - platform_urn_str = entity_id[0] - - DataPlatformUrn.validate(platform_urn_str) - env = entity_id[2].upper() - if env not in ALL_ENV_TYPES: - raise InvalidUrnError( - f"Invalid env:{env}. Allowed envs are {ALL_ENV_TYPES}" - ) - - """A helper function to extract simple . path notation from the v2 field path""" - - @staticmethod - def get_simple_field_path_from_v2_field_path(field_path: str) -> str: - if field_path.startswith("[version=2.0]"): - # this is a v2 field path - tokens = [ - t - for t in field_path.split(".") - if not (t.startswith("[") or t.endswith("]")) - ] - path = ".".join(tokens) - return path - else: - # not a v2, we assume this is a simple path - return field_path +from datahub.metadata.urns import DatasetUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py b/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py index dc875ce84f973..442a6b27729bb 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/domain_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class DomainUrn(Urn): - """ - expected domain urn format: urn:li:domain:. example: "urn:li:domain:product" - """ - - ENTITY_TYPE: str = "domain" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "DomainUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, domain_id: str) -> "DomainUrn": - return cls(DomainUrn.ENTITY_TYPE, [domain_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != DomainUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {DomainUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import DomainUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/error.py b/metadata-ingestion/src/datahub/utilities/urns/error.py index 12b7c02ab2d9a..a5c17c40787ca 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/error.py +++ b/metadata-ingestion/src/datahub/utilities/urns/error.py @@ -1,3 +1,2 @@ class InvalidUrnError(Exception): - def __init__(self, msg: str): - super().__init__(msg) + pass diff --git a/metadata-ingestion/src/datahub/utilities/urns/field_paths.py b/metadata-ingestion/src/datahub/utilities/urns/field_paths.py new file mode 100644 index 0000000000000..c2ecfa3031140 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/field_paths.py @@ -0,0 +1,15 @@ +def get_simple_field_path_from_v2_field_path(field_path: str) -> str: + """A helper function to extract simple . path notation from the v2 field path""" + + if field_path.startswith("[version=2.0]"): + # this is a v2 field path + tokens = [ + t + for t in field_path.split(".") + if not (t.startswith("[") or t.endswith("]")) + ] + path = ".".join(tokens) + return path + else: + # not a v2, we assume this is a simple path + return field_path diff --git a/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py b/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py index fcf2c92450309..60a4f5396aa46 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/notebook_urn.py @@ -1,46 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class NotebookUrn(Urn): - """ - expected dataset urn format: urn:li:notebook:(,). example: "urn:li:notebook:(querybook,1234)" - """ - - ENTITY_TYPE: str = "notebook" - - def __init__( - self, entity_type: str, entity_id: List[str], domain: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, domain) - - @classmethod - def create_from_string(cls, urn_str: str) -> "NotebookUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_ids(cls, platform_id: str, notebook_id: str) -> "NotebookUrn": - return cls(NotebookUrn.ENTITY_TYPE, [platform_id, notebook_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != NotebookUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {NotebookUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 2: - raise InvalidUrnError( - f"Expect 2 parts in entity id, but found{len(entity_id)}" - ) - - def get_platform_id(self) -> str: - return self.get_entity_id()[0] - - def get_notebook_id(self) -> str: - return self.get_entity_id()[1] +from datahub.metadata.urns import NotebookUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py b/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py index e2baeea45e807..0ac632ee40a01 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/tag_urn.py @@ -1,40 +1 @@ -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class TagUrn(Urn): - """ - expected tag urn format: urn:li:tag:. example: "urn:li:tag:product" - """ - - ENTITY_TYPE: str = "tag" - - def __init__( - self, entity_type: str, entity_id: List[str], tag: str = Urn.LI_DOMAIN - ): - super().__init__(entity_type, entity_id, tag) - - @classmethod - def create_from_string(cls, urn_str: str) -> "TagUrn": - urn: Urn = super().create_from_string(urn_str) - return cls(urn.get_type(), urn.get_entity_id(), urn.get_domain()) - - @classmethod - def create_from_id(cls, tag_id: str) -> "TagUrn": - return cls(TagUrn.ENTITY_TYPE, [tag_id]) - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - if entity_type != TagUrn.ENTITY_TYPE: - raise InvalidUrnError( - f"Entity type should be {TagUrn.ENTITY_TYPE} but found {entity_type}" - ) - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - if len(entity_id) != 1: - raise InvalidUrnError( - f"Expect 1 part in entity id, but found{len(entity_id)}" - ) +from datahub.metadata.urns import TagUrn # noqa: F401 diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn.py b/metadata-ingestion/src/datahub/utilities/urns/urn.py index db6898d55ad2b..2e5cebfd0e8f5 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn.py @@ -1,167 +1,6 @@ -import urllib.parse -from typing import List - -from datahub.utilities.urns.error import InvalidUrnError +from datahub.metadata.urns import Urn # noqa: F401 def guess_entity_type(urn: str) -> str: assert urn.startswith("urn:li:"), "urns must start with urn:li:" return urn.split(":")[2] - - -class Urn: - """ - URNs are Globally Unique Identifiers (GUID) used to represent an entity. - It will be in format of urn::: - """ - - URN_PREFIX: str = "urn" - # all the Datahub urn use li domain for now. - LI_DOMAIN: str = "li" - - _entity_type: str - _domain: str - _entity_id: List[str] - - def __init__( - self, entity_type: str, entity_id: List[str], urn_domain: str = LI_DOMAIN - ): - if not entity_id: - raise InvalidUrnError("Empty entity id.") - self._validate_entity_type(entity_type) - self._validate_entity_id(entity_id) - self._entity_type = entity_type - self._domain = urn_domain - self._entity_id = entity_id - - @classmethod - def create_from_string(cls, urn_str: str) -> "Urn": - """ - Create a Urn from the its string representation - :param urn_str: the string representation of the Urn - :return: Urn of the given string representation - :raises InvalidUrnError if the string representation is in invalid format - """ - - # expect urn string in format of urn::: - cls.validate(urn_str) - parts: List[str] = urn_str.split(":", 3) - - return cls(parts[2], cls._get_entity_id_from_str(parts[3]), parts[1]) - - @classmethod - def validate(cls, urn_str: str) -> None: - """ - Validate if a string is in valid Urn format - :param urn_str: to be validated urn string - :raises InvalidUrnError if the string representation is in invalid format - """ - parts: List[str] = urn_str.split(":", 3) - if len(parts) != 4: - raise InvalidUrnError( - f"Invalid urn string: {urn_str}. Expect 4 parts from urn string but found {len(parts)}" - ) - - if "" in parts: - raise InvalidUrnError( - f"Invalid urn string: {urn_str}. There should not be empty parts in urn string." - ) - - if parts[0] != Urn.URN_PREFIX: - raise InvalidUrnError( - f'Invalid urn string: {urn_str}. Expect urn starting with "urn" but found {parts[0]}' - ) - - if "" in cls._get_entity_id_from_str(parts[3]): - raise InvalidUrnError( - f"Invalid entity id in urn string: {urn_str}. There should not be empty parts in entity id." - ) - - cls._validate_entity_type(parts[2]) - cls._validate_entity_id(cls._get_entity_id_from_str(parts[3])) - - @staticmethod - def url_encode(urn: str) -> str: - # safe='' encodes '/' as '%2F' - return urllib.parse.quote(urn, safe="") - - def get_type(self) -> str: - return self._entity_type - - def get_entity_id(self) -> List[str]: - return self._entity_id - - def get_entity_id_as_string(self) -> str: - """ - :return: string representation of the entity ids. If there are more than one part in the entity id part, it will - return in this format (,,...) - """ - return self._entity_id_to_string() - - def get_domain(self) -> str: - return self._domain - - @staticmethod - def _get_entity_id_from_str(entity_id: str) -> List[str]: - if not (entity_id.startswith("(") and entity_id.endswith(")")): - return [entity_id] - - parts = [] - start_paren_count = 1 - part_start = 1 - for i in range(1, len(entity_id)): - c = entity_id[i] - if c == "(": - start_paren_count += 1 - elif c == ")": - start_paren_count -= 1 - if start_paren_count < 0: - raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") - elif c == ",": - if start_paren_count != 1: - continue - - if i - part_start <= 0: - raise InvalidUrnError(f"{entity_id}, empty part disallowed") - parts.append(entity_id[part_start:i]) - part_start = i + 1 - - if start_paren_count != 0: - raise InvalidUrnError(f"{entity_id}, mismatched paren nesting") - - parts.append(entity_id[part_start:-1]) - - return parts - - @staticmethod - def _validate_entity_type(entity_type: str) -> None: - pass - - @staticmethod - def _validate_entity_id(entity_id: List[str]) -> None: - pass - - def __str__(self) -> str: - return f"{self.URN_PREFIX}:{self._domain}:{self._entity_type}:{self._entity_id_to_string()}" - - def _entity_id_to_string(self) -> str: - if len(self._entity_id) == 1: - return self._entity_id[0] - result = "" - for part in self._entity_id: - result = result + str(part) + "," - return f"({result[:-1]})" - - def __hash__(self) -> int: - return hash((self._domain, self._entity_type) + tuple(self._entity_id)) - - def __eq__(self, other: object) -> bool: - return ( - ( - self._entity_id == other._entity_id - and self._domain == other._domain - and self._entity_type == other._entity_type - ) - if isinstance(other, Urn) - else False - ) diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index 169a4ac3649a3..4f228494f416b 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -131,9 +131,11 @@ def _modify_at_path( def _lowercase_dataset_urn(dataset_urn: str) -> str: - cur_urn = DatasetUrn.create_from_string(dataset_urn) - cur_urn._entity_id[1] = cur_urn._entity_id[1].lower() - return str(cur_urn) + cur_urn = DatasetUrn.from_string(dataset_urn) + new_urn = DatasetUrn( + platform=cur_urn.platform, name=cur_urn.name.lower(), env=cur_urn.env + ) + return str(new_urn) def lowercase_dataset_urns( @@ -149,7 +151,7 @@ def modify_urn(urn: str) -> str: return _lowercase_dataset_urn(urn) elif guess_entity_type(urn) == "schemaField": cur_urn = Urn.create_from_string(urn) - cur_urn._entity_id[0] = _lowercase_dataset_urn(cur_urn._entity_id[0]) + cur_urn._entity_ids[0] = _lowercase_dataset_urn(cur_urn._entity_ids[0]) return str(cur_urn) return urn diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py index 712ae2066b728..ecea318339345 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/state/test_checkpoint.py @@ -4,7 +4,6 @@ import pydantic import pytest -from datahub.emitter.mce_builder import make_dataset_urn from datahub.ingestion.source.state.checkpoint import Checkpoint, CheckpointStateBase from datahub.ingestion.source.state.sql_common_state import ( BaseSQLAlchemyCheckpointState, @@ -59,12 +58,15 @@ def _assert_checkpoint_deserialization( def _make_sql_alchemy_checkpoint_state() -> BaseSQLAlchemyCheckpointState: + # Note that the urns here purposely use a lowercase env, even though it's + # technically incorrect. This is purely for backwards compatibility testing, but + # all existing code uses correctly formed envs. base_sql_alchemy_checkpoint_state_obj = BaseSQLAlchemyCheckpointState() base_sql_alchemy_checkpoint_state_obj.add_checkpoint_urn( - type="table", urn=make_dataset_urn("mysql", "db1.t1", "prod") + type="table", urn="urn:li:dataset:(urn:li:dataPlatform:mysql,db1.t1,prod)" ) base_sql_alchemy_checkpoint_state_obj.add_checkpoint_urn( - type="view", urn=make_dataset_urn("mysql", "db1.v1", "prod") + type="view", urn="urn:li:dataset:(urn:li:dataPlatform:mysql,db1.v1,prod)" ) return base_sql_alchemy_checkpoint_state_obj diff --git a/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py b/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py index f4517ba2df9c9..3b0e4e31d4b4a 100644 --- a/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py +++ b/metadata-ingestion/tests/unit/stateful_ingestion/test_kafka_state.py @@ -25,6 +25,6 @@ def test_kafka_state_migration() -> None: } ) assert state.urns == [ - "urn:li:dataset:(urn:li:dataPlatform:kafka,test_topic1,test)", + "urn:li:dataset:(urn:li:dataPlatform:kafka,test_topic1,TEST)", "urn:li:dataset:(urn:li:dataPlatform:kafka,topic_2,DEV)", ] diff --git a/metadata-ingestion/tests/unit/test_urn.py b/metadata-ingestion/tests/unit/test_urn.py deleted file mode 100644 index 8bab01e437fdb..0000000000000 --- a/metadata-ingestion/tests/unit/test_urn.py +++ /dev/null @@ -1,45 +0,0 @@ -import unittest - -from datahub.utilities.urns.error import InvalidUrnError -from datahub.utilities.urns.urn import Urn - - -class TestUrn(unittest.TestCase): - def test_parse_urn(self) -> None: - simple_urn_str = "urn:li:dataPlatform:abc" - urn = Urn.create_from_string(simple_urn_str) - assert urn.get_entity_id_as_string() == "abc" - assert urn.get_entity_id() == ["abc"] - assert urn.get_type() == "dataPlatform" - assert urn.get_domain() == "li" - assert urn.__str__() == simple_urn_str - assert urn == Urn("dataPlatform", ["abc"]) - - complex_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - urn = Urn.create_from_string(complex_urn_str) - assert urn.get_entity_id_as_string() == "(urn:li:dataPlatform:abc,def,prod)" - assert urn.get_entity_id() == ["urn:li:dataPlatform:abc", "def", "prod"] - assert urn.get_type() == "dataset" - assert urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - - def test_url_encode_urn(self) -> None: - urn_with_slash: Urn = Urn.create_from_string( - "urn:li:dataset:(urn:li:dataPlatform:abc,def/ghi,prod)" - ) - assert ( - Urn.url_encode(str(urn_with_slash)) - == "urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Aabc%2Cdef%2Fghi%2Cprod%29" - ) - - def test_invalid_urn(self) -> None: - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:()") - - with self.assertRaises(InvalidUrnError): - Urn.create_from_string("urn:li:abc:(abc,)") diff --git a/metadata-ingestion/tests/unit/test_corp_group_urn.py b/metadata-ingestion/tests/unit/urns/test_corp_group_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_corp_group_urn.py rename to metadata-ingestion/tests/unit/urns/test_corp_group_urn.py index 9cfd925ef34eb..1897a0e8686f0 100644 --- a/metadata-ingestion/tests/unit/test_corp_group_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_corp_group_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.corp_group_urn import CorpGroupUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestCorpGroupUrn(unittest.TestCase): def test_parse_urn(self) -> None: corp_group_urn_str = "urn:li:corpGroup:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert corp_group_urn.get_entity_id() == ["abc"] assert str(corp_group_urn) == corp_group_urn_str - assert corp_group_urn == CorpGroupUrn("corpGroup", ["abc"]) + assert corp_group_urn == CorpGroupUrn(name="abc") assert corp_group_urn == CorpGroupUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_corpuser_urn.py b/metadata-ingestion/tests/unit/urns/test_corpuser_urn.py similarity index 88% rename from metadata-ingestion/tests/unit/test_corpuser_urn.py rename to metadata-ingestion/tests/unit/urns/test_corpuser_urn.py index 40b83214a785b..7a2a4f4ff4493 100644 --- a/metadata-ingestion/tests/unit/test_corpuser_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_corpuser_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.corpuser_urn import CorpuserUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestCorpuserUrn(unittest.TestCase): def test_parse_urn(self) -> None: corpuser_urn_str = "urn:li:corpuser:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert corpuser_urn.get_entity_id() == ["abc"] assert str(corpuser_urn) == corpuser_urn_str - assert corpuser_urn == CorpuserUrn("corpuser", ["abc"]) + assert corpuser_urn == CorpuserUrn("abc") assert corpuser_urn == CorpuserUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_data_flow_urn.py b/metadata-ingestion/tests/unit/urns/test_data_flow_urn.py similarity index 77% rename from metadata-ingestion/tests/unit/test_data_flow_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_flow_urn.py index 8b739d39abf67..524411121d418 100644 --- a/metadata-ingestion/tests/unit/test_data_flow_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_flow_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.data_flow_urn import DataFlowUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDataFlowUrn(unittest.TestCase): def test_parse_urn(self) -> None: data_flow_urn_str = "urn:li:dataFlow:(airflow,def,prod)" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert data_flow_urn.get_flow_id() == "def" assert data_flow_urn.get_env() == "prod" assert data_flow_urn.__str__() == "urn:li:dataFlow:(airflow,def,prod)" - assert data_flow_urn == DataFlowUrn("dataFlow", ["airflow", "def", "prod"]) + assert data_flow_urn == DataFlowUrn("airflow", "def", "prod") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): @@ -20,8 +23,3 @@ def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): DataFlowUrn.create_from_string("urn:li:dataFlow:(airflow,flow_id)") - - with self.assertRaises(InvalidUrnError): - DataFlowUrn.create_from_string( - "urn:li:dataFlow:(airflow,flow_id,invalidEnv)" - ) diff --git a/metadata-ingestion/tests/unit/test_data_job_urn.py b/metadata-ingestion/tests/unit/urns/test_data_job_urn.py similarity index 90% rename from metadata-ingestion/tests/unit/test_data_job_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_job_urn.py index 0cd9084a51522..bf039cd2a91f9 100644 --- a/metadata-ingestion/tests/unit/test_data_job_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_job_urn.py @@ -1,10 +1,13 @@ import unittest +import pytest + from datahub.utilities.urns.data_flow_urn import DataFlowUrn from datahub.utilities.urns.data_job_urn import DataJobUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDataJobUrn(unittest.TestCase): def test_parse_urn(self) -> None: data_job_urn_str = ( @@ -17,7 +20,7 @@ def test_parse_urn(self) -> None: assert data_job_urn.get_job_id() == "job_id" assert data_job_urn.__str__() == data_job_urn_str assert data_job_urn == DataJobUrn( - "dataJob", ["urn:li:dataFlow:(airflow,flow_id,prod)", "job_id"] + "urn:li:dataFlow:(airflow,flow_id,prod)", "job_id" ) def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_data_process_instance_urn.py b/metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py similarity index 90% rename from metadata-ingestion/tests/unit/test_data_process_instance_urn.py rename to metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py index e6cd201e12c7a..a86f8dd99416f 100644 --- a/metadata-ingestion/tests/unit/test_data_process_instance_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_data_process_instance_urn.py @@ -1,10 +1,13 @@ import unittest +import pytest + from datahub.utilities.urns.data_process_instance_urn import DataProcessInstanceUrn from datahub.utilities.urns.error import InvalidUrnError -class TestDomainUrn(unittest.TestCase): +@pytest.mark.filterwarnings("ignore::DeprecationWarning") +class TestDataProcessInstanceUrn(unittest.TestCase): def test_parse_urn(self) -> None: dataprocessinstance_urn_str = "urn:li:dataProcessInstance:abc" dataprocessinstance_urn = DataProcessInstanceUrn.create_from_string( @@ -14,9 +17,7 @@ def test_parse_urn(self) -> None: assert dataprocessinstance_urn.get_entity_id() == ["abc"] assert str(dataprocessinstance_urn) == dataprocessinstance_urn_str - assert dataprocessinstance_urn == DataProcessInstanceUrn( - "dataProcessInstance", ["abc"] - ) + assert dataprocessinstance_urn == DataProcessInstanceUrn("abc") assert dataprocessinstance_urn == DataProcessInstanceUrn.create_from_id("abc") assert "abc" == dataprocessinstance_urn.get_dataprocessinstance_id() diff --git a/metadata-ingestion/tests/unit/test_dataset_urn.py b/metadata-ingestion/tests/unit/urns/test_dataset_urn.py similarity index 81% rename from metadata-ingestion/tests/unit/test_dataset_urn.py rename to metadata-ingestion/tests/unit/urns/test_dataset_urn.py index e1e37409d8a63..53065143a6ae4 100644 --- a/metadata-ingestion/tests/unit/test_dataset_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_dataset_urn.py @@ -1,26 +1,25 @@ import unittest +import pytest + from datahub.utilities.urns.data_platform_urn import DataPlatformUrn from datahub.utilities.urns.dataset_urn import DatasetUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDatasetUrn(unittest.TestCase): def test_parse_urn(self) -> None: - dataset_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + dataset_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,PROD)" dataset_urn = DatasetUrn.create_from_string(dataset_urn_str) assert ( dataset_urn.get_data_platform_urn() == DataPlatformUrn.create_from_string("urn:li:dataPlatform:abc") ) assert dataset_urn.get_dataset_name() == "def" - assert dataset_urn.get_env() == "prod" - assert ( - dataset_urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" - ) - assert dataset_urn == DatasetUrn( - "dataset", ["urn:li:dataPlatform:abc", "def", "prod"] - ) + assert dataset_urn.get_env() == "PROD" + assert dataset_urn.__str__() == dataset_urn_str + assert dataset_urn == DatasetUrn("urn:li:dataPlatform:abc", "def", "prod") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): diff --git a/metadata-ingestion/tests/unit/test_domain_urn.py b/metadata-ingestion/tests/unit/urns/test_domain_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_domain_urn.py rename to metadata-ingestion/tests/unit/urns/test_domain_urn.py index e5e4dffc525cd..843a5bf40f5c6 100644 --- a/metadata-ingestion/tests/unit/test_domain_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_domain_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.domain_urn import DomainUrn from datahub.utilities.urns.error import InvalidUrnError +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestDomainUrn(unittest.TestCase): def test_parse_urn(self) -> None: domain_urn_str = "urn:li:domain:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert domain_urn.get_entity_id() == ["abc"] assert str(domain_urn) == domain_urn_str - assert domain_urn == DomainUrn("domain", ["abc"]) + assert domain_urn == DomainUrn("abc") assert domain_urn == DomainUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/test_notebook_urn.py b/metadata-ingestion/tests/unit/urns/test_notebook_urn.py similarity index 86% rename from metadata-ingestion/tests/unit/test_notebook_urn.py rename to metadata-ingestion/tests/unit/urns/test_notebook_urn.py index 6b245e29ceae9..3ec580f02142b 100644 --- a/metadata-ingestion/tests/unit/test_notebook_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_notebook_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.error import InvalidUrnError from datahub.utilities.urns.notebook_urn import NotebookUrn +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestNotebookUrn(unittest.TestCase): def test_parse_urn(self) -> None: notebook_urn_str = "urn:li:notebook:(querybook,123)" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert notebook_urn.get_notebook_id() == "123" assert str(notebook_urn) == notebook_urn_str - assert notebook_urn == NotebookUrn("notebook", ["querybook", "123"]) + assert notebook_urn == NotebookUrn("querybook", "123") def test_invalid_urn(self) -> None: with self.assertRaises(InvalidUrnError): diff --git a/metadata-ingestion/tests/unit/test_tag_urn.py b/metadata-ingestion/tests/unit/urns/test_tag_urn.py similarity index 87% rename from metadata-ingestion/tests/unit/test_tag_urn.py rename to metadata-ingestion/tests/unit/urns/test_tag_urn.py index 630420dc1263f..fa3664bcc0218 100644 --- a/metadata-ingestion/tests/unit/test_tag_urn.py +++ b/metadata-ingestion/tests/unit/urns/test_tag_urn.py @@ -1,9 +1,12 @@ import unittest +import pytest + from datahub.utilities.urns.error import InvalidUrnError from datahub.utilities.urns.tag_urn import TagUrn +@pytest.mark.filterwarnings("ignore::DeprecationWarning") class TestTagUrn(unittest.TestCase): def test_parse_urn(self) -> None: tag_urn_str = "urn:li:tag:abc" @@ -12,7 +15,7 @@ def test_parse_urn(self) -> None: assert tag_urn.get_entity_id() == ["abc"] assert str(tag_urn) == tag_urn_str - assert tag_urn == TagUrn("tag", ["abc"]) + assert tag_urn == TagUrn("abc") assert tag_urn == TagUrn.create_from_id("abc") def test_invalid_urn(self) -> None: diff --git a/metadata-ingestion/tests/unit/urns/test_urn.py b/metadata-ingestion/tests/unit/urns/test_urn.py new file mode 100644 index 0000000000000..1bf48082fec8c --- /dev/null +++ b/metadata-ingestion/tests/unit/urns/test_urn.py @@ -0,0 +1,56 @@ +import pytest + +from datahub.metadata.urns import DatasetUrn, Urn +from datahub.utilities.urns.error import InvalidUrnError + +pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning") + + +def test_parse_urn() -> None: + simple_urn_str = "urn:li:dataPlatform:abc" + urn = Urn.create_from_string(simple_urn_str) + assert urn.get_entity_id_as_string() == "abc" + assert urn.get_entity_id() == ["abc"] + assert urn.get_type() == "dataPlatform" + assert urn.get_domain() == "li" + assert urn.__str__() == simple_urn_str + assert urn == Urn("dataPlatform", ["abc"]) + + complex_urn_str = "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + urn = Urn.create_from_string(complex_urn_str) + assert urn.get_entity_id_as_string() == "(urn:li:dataPlatform:abc,def,prod)" + assert urn.get_entity_id() == ["urn:li:dataPlatform:abc", "def", "prod"] + assert urn.get_type() == "dataset" + assert urn.__str__() == "urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)" + + +def test_url_encode_urn() -> None: + urn_with_slash: Urn = Urn.create_from_string( + "urn:li:dataset:(urn:li:dataPlatform:abc,def/ghi,prod)" + ) + assert ( + Urn.url_encode(str(urn_with_slash)) + == "urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Aabc%2Cdef%2Fghi%2Cprod%29" + ) + + +def test_invalid_urn() -> None: + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:()") + + with pytest.raises(InvalidUrnError): + Urn.create_from_string("urn:li:abc:(abc,)") + + +def test_urn_type_dispatch() -> None: + urn = Urn.from_string("urn:li:dataset:(urn:li:dataPlatform:abc,def,prod)") + assert isinstance(urn, DatasetUrn) + + with pytest.raises(InvalidUrnError, match="Passed an urn of type corpuser"): + DatasetUrn.from_string("urn:li:corpuser:foo") diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index a5296d074093b..1ba238b737236 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -400,7 +400,7 @@ entities: - dataHubUpgradeRequest - dataHubUpgradeResult - name: inviteToken - category: core + category: internal keyAspect: inviteTokenKey aspects: - inviteToken @@ -425,7 +425,7 @@ entities: aspects: - postInfo - name: dataHubStepState - category: core + category: internal keyAspect: dataHubStepStateKey aspects: - dataHubStepStateProperties From a8476ee657a3c116b65de8cd14a731acff164503 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 18:34:48 -0500 Subject: [PATCH 026/263] fix(airflow): support inlet datajobs correctly in v1 plugin (#9331) --- docs/lineage/airflow.md | 42 ++++++++-- .../datahub_listener.py | 4 + .../datahub_plugin_v22.py | 43 ++++++---- .../integration/goldens/v1_basic_iolets.json | 64 ++++----------- .../integration/goldens/v1_simple_dag.json | 78 ++++++------------- .../integration/goldens/v2_basic_iolets.json | 18 ++--- .../v2_basic_iolets_no_dag_listener.json | 14 ++-- .../integration/goldens/v2_simple_dag.json | 34 ++++---- .../v2_simple_dag_no_dag_listener.json | 28 +++---- .../goldens/v2_snowflake_operator.json | 14 ++-- .../goldens/v2_sqlite_operator.json | 62 +++++++-------- .../v2_sqlite_operator_no_dag_listener.json | 70 ++++++++--------- .../tests/integration/test_plugin.py | 52 ++++++++++--- .../datahub/api/entities/datajob/datajob.py | 3 +- 14 files changed, 269 insertions(+), 257 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 32da518d6c04c..8fd38f560bfbb 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -8,7 +8,7 @@ If you're looking to schedule DataHub ingestion using Airflow, see the guide on The DataHub Airflow plugin supports: -- Automatic column-level lineage extraction from various operators e.g. `SqlOperator`s (including `MySqlOperator`, `PostgresOperator`, `SnowflakeOperator`, and more), `S3FileTransformOperator`, and a few others. +- Automatic column-level lineage extraction from various operators e.g. SQL operators (including `MySqlOperator`, `PostgresOperator`, `SnowflakeOperator`, and more), `S3FileTransformOperator`, and more. - Airflow DAG and tasks, including properties, ownership, and tags. - Task run information, including task successes and failures. - Manual lineage annotations using `inlets` and `outlets` on Airflow operators. @@ -76,12 +76,6 @@ enabled = True # default | log_level | _no change_ | [debug] Set the log level for the plugin. | | debug_emitter | false | [debug] If true, the plugin will log the emitted events. | -### Automatic lineage extraction - -To automatically extract lineage information, the v2 plugin builds on top of Airflow's built-in [OpenLineage extractors](https://openlineage.io/docs/integrations/airflow/default-extractors). - -The SQL-related extractors have been updated to use DataHub's SQL parser, which is more robust than the built-in one and uses DataHub's metadata information to generate column-level lineage. We discussed the DataHub SQL parser, including why schema-aware parsing works better and how it performs on benchmarks, during the [June 2023 community town hall](https://youtu.be/1QVcUmRQK5E?si=U27zygR7Gi_KdkzE&t=2309). - ## DataHub Plugin v1 ### Installation @@ -152,6 +146,40 @@ conn_id = datahub_rest_default # or datahub_kafka_default Emitting DataHub ... ``` +## Automatic lineage extraction + +Only the v2 plugin supports automatic lineage extraction. If you're using the v1 plugin, you must use manual lineage annotation or emit lineage directly. + +To automatically extract lineage information, the v2 plugin builds on top of Airflow's built-in [OpenLineage extractors](https://openlineage.io/docs/integrations/airflow/default-extractors). +As such, we support a superset of the default operators that Airflow/OpenLineage supports. + +The SQL-related extractors have been updated to use [DataHub's SQL lineage parser](https://blog.datahubproject.io/extracting-column-level-lineage-from-sql-779b8ce17567), which is more robust than the built-in one and uses DataHub's metadata information to generate column-level lineage. + +Supported operators: + +- `SQLExecuteQueryOperator`, including any subclasses. Note that in newer versions of Airflow (generally Airflow 2.5+), most SQL operators inherit from this class. +- `AthenaOperator` and `AWSAthenaOperator` +- `BigQueryOperator` and `BigQueryExecuteQueryOperator` +- `MySqlOperator` +- `PostgresOperator` +- `RedshiftSQLOperator` +- `SnowflakeOperator` and `SnowflakeOperatorAsync` +- `SqliteOperator` +- `TrinoOperator` + + + ## Manual Lineage Annotation ### Using `inlets` and `outlets` diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index c39eef2635658..debc91700d3db 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -296,6 +296,7 @@ def _extract_lineage( logger.debug("Merging start datajob into finish datajob") datajob.inlets.extend(original_datajob.inlets) datajob.outlets.extend(original_datajob.outlets) + datajob.upstream_urns.extend(original_datajob.upstream_urns) datajob.fine_grained_lineages.extend(original_datajob.fine_grained_lineages) for k, v in original_datajob.properties.items(): @@ -304,6 +305,9 @@ def _extract_lineage( # Deduplicate inlets/outlets. datajob.inlets = list(sorted(set(datajob.inlets), key=lambda x: str(x))) datajob.outlets = list(sorted(set(datajob.outlets), key=lambda x: str(x))) + datajob.upstream_urns = list( + sorted(set(datajob.upstream_urns), key=lambda x: str(x)) + ) # Write all other OL facets as DataHub properties. if task_metadata: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py index f9a2119f51e32..51a4151bc8207 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin_v22.py @@ -18,6 +18,10 @@ ) from datahub_airflow_plugin._config import get_lineage_config from datahub_airflow_plugin.client.airflow_generator import AirflowGenerator +from datahub_airflow_plugin.entities import ( + entities_to_datajob_urn_list, + entities_to_dataset_urn_list, +) from datahub_airflow_plugin.hooks.datahub import DatahubGenericHook from datahub_airflow_plugin.lineage.datahub import DatahubLineageConfig @@ -94,7 +98,8 @@ def datahub_task_status_callback(context, status): # This code is from the original airflow lineage code -> # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_task_inlets_advanced(task, context) + task_inlets = get_task_inlets_advanced(task, context) + task_outlets = get_task_outlets(task) emitter = ( DatahubGenericHook(config.datahub_conn_id).get_underlying_hook().make_emitter() @@ -116,13 +121,15 @@ def datahub_task_status_callback(context, status): capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = get_task_outlets(task) - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) + datajob.inlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_inlets]) + ) + datajob.outlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_outlets]) + ) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in task_inlets]) + ) task.log.info(f"Emitting Datahub Datajob: {datajob}") datajob.emit(emitter, callback=_make_emit_callback(task.log)) @@ -169,7 +176,8 @@ def datahub_pre_execution(context): # This code is from the original airflow lineage code -> # https://github.com/apache/airflow/blob/main/airflow/lineage/__init__.py - inlets = get_task_inlets_advanced(task, context) + task_inlets = get_task_inlets_advanced(task, context) + task_outlets = get_task_outlets(task) datajob = AirflowGenerator.generate_datajob( cluster=config.cluster, @@ -178,14 +186,15 @@ def datahub_pre_execution(context): capture_tags=config.capture_tags_info, capture_owner=config.capture_ownership_info, ) - - for inlet in inlets: - datajob.inlets.append(inlet.urn) - - task_outlets = get_task_outlets(task) - - for outlet in task_outlets: - datajob.outlets.append(outlet.urn) + datajob.inlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_inlets]) + ) + datajob.outlets.extend( + entities_to_dataset_urn_list([let.urn for let in task_outlets]) + ) + datajob.upstream_urns.extend( + entities_to_datajob_urn_list([let.urn for let in task_inlets]) + ) task.log.info(f"Emitting Datahub dataJob {datajob}") datajob.emit(emitter, callback=_make_emit_callback(task.log)) diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json index 6b460e99b1f28..a21df71efcdac 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -95,14 +95,15 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -151,17 +152,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -257,14 +247,15 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableE,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -313,17 +304,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -389,9 +369,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.143271", - "start_date": "2023-11-08 09:55:05.801617+00:00", - "end_date": "2023-11-08 09:55:05.944888+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -408,7 +388,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437305801, + "time": 1701222667932, "actor": "urn:li:corpuser:datahub" } } @@ -437,8 +417,7 @@ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableB,DEV)", "urn:li:dataset:(urn:li:dataPlatform:snowflake,cloud.mydb.schema.tableC,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ] } } @@ -501,17 +480,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -541,7 +509,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437305801, + "timestampMillis": 1701222667932, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -558,7 +526,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437305944, + "timestampMillis": 1701222668122, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json index 7ec172e3678dc..6116722350541 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v1_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -94,13 +94,14 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -127,17 +128,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -220,13 +210,14 @@ "json": { "inputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ], "outputDatasets": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)" ], - "inputDatajobs": [], + "inputDatajobs": [ + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + ], "fineGrainedLineages": [] } } @@ -253,17 +244,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -318,9 +298,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.120524", - "start_date": "2023-11-08 09:54:06.065112+00:00", - "end_date": "2023-11-08 09:54:06.185636+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -337,7 +317,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437246065, + "time": 1701222595752, "actor": "urn:li:corpuser:datahub" } } @@ -364,8 +344,7 @@ "json": { "inputs": [ "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableA,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)" + "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)" ] } } @@ -405,17 +384,6 @@ } } }, -{ - "entityType": "dataJob", - "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableD,PROD)", @@ -434,7 +402,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437246065, + "timestampMillis": 1701222595752, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -451,7 +419,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437246185, + "timestampMillis": 1701222595962, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -476,7 +444,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/home/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "None", @@ -687,9 +655,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "0.099975", - "start_date": "2023-11-08 09:54:09.744583+00:00", - "end_date": "2023-11-08 09:54:09.844558+00:00", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "1", "max_tries": "0", @@ -706,7 +674,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699437249744, + "time": 1701222599804, "actor": "urn:li:corpuser:datahub" } } @@ -731,7 +699,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437249744, + "timestampMillis": 1701222599804, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -748,7 +716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699437249844, + "timestampMillis": 1701222599959, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json index 6767a368f366a..7c52cbcddc13c 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -75,7 +75,7 @@ "downstream_task_ids": "[]", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -218,9 +218,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:07:55.311482+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -237,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671275311, + "time": 1701223416947, "actor": "urn:li:corpuser:datahub" } } @@ -358,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671275311, + "timestampMillis": 1701223416947, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -387,7 +387,7 @@ "downstream_task_ids": "[]", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableB', env='DEV', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableC', env='PROD', platform_instance='cloud'), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None), Dataset(platform='snowflake', name='mydb.schema.tableE', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"task_id\": \"run_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'This is where you might run your data tooling.'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"env\": \"DEV\", \"name\": \"mydb.schema.tableB\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableC\", \"platform\": \"snowflake\", \"platform_instance\": \"cloud\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}, {\"env\": \"PROD\", \"name\": \"mydb.schema.tableE\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=basic_iolets&_flt_3_task_id=run_data_task", "name": "run_data_task", @@ -528,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671276777, + "timestampMillis": 1701223417702, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json index 63b0a05935554..150f95d5171c7 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_basic_iolets_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/basic_iolets.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -218,9 +218,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:11:17.444435+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -237,7 +237,7 @@ "name": "basic_iolets_run_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643477444, + "time": 1701223185349, "actor": "urn:li:corpuser:datahub" } } @@ -358,7 +358,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643477444, + "timestampMillis": 1701223185349, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -528,7 +528,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643478123, + "timestampMillis": 1701223186055, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json index c558f79c32e15..0248ab0473c9e 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -76,7 +76,7 @@ "downstream_task_ids": "['run_another_data_task']", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -183,9 +183,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:06:07.193282+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -202,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671167193, + "time": 1701223349283, "actor": "urn:li:corpuser:datahub" } } @@ -287,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671167193, + "timestampMillis": 1701223349283, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -316,7 +316,7 @@ "downstream_task_ids": "['run_another_data_task']", "inlets": "[Dataset(platform='snowflake', name='mydb.schema.tableA', env='PROD', platform_instance=None), Urn(_urn='urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)'), Urn(_urn='urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)')]", "outlets": "[Dataset(platform='snowflake', name='mydb.schema.tableD', env='PROD', platform_instance=None)]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 1'\", \"dag\": \"<>\", \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"task_id\": \"task_1\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 1'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [\"run_another_data_task\"], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableA\", \"platform\": \"snowflake\"}, {\"_urn\": \"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)\"}, {\"_urn\": \"urn:li:dataJob:(urn:li:dataFlow:(airflow,test_dag,PROD),test_task)\"}], \"outlets\": [{\"env\": \"PROD\", \"name\": \"mydb.schema.tableD\", \"platform\": \"snowflake\"}], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"task_1\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=task_1", "name": "task_1", @@ -421,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671168726, + "timestampMillis": 1701223349928, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -453,7 +453,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -522,9 +522,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-30 13:06:19.970466+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -541,7 +541,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1698671179970, + "time": 1701223355004, "actor": "urn:li:corpuser:datahub" } } @@ -566,7 +566,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671179970, + "timestampMillis": 1701223355004, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -595,7 +595,7 @@ "downstream_task_ids": "[]", "inlets": "[]", "outlets": "[]", - "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_is_setup\": false, \"_is_teardown\": false, \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": {}, \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" + "openlineage_run_facet_unknownSourceAttribute": "{\"_producer\": \"https://github.com/OpenLineage/OpenLineage/tree/1.2.0/integration/airflow\", \"_schemaURL\": \"https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/BaseFacet\", \"unknownItems\": [{\"name\": \"BashOperator\", \"properties\": {\"_BaseOperator__from_mapped\": false, \"_BaseOperator__init_kwargs\": {\"bash_command\": \"echo 'task 2'\", \"dag\": \"<>\", \"task_id\": \"run_another_data_task\"}, \"_BaseOperator__instantiated\": true, \"_dag\": \"<>\", \"_lock_for_execution\": true, \"_log\": \"<>\", \"append_env\": false, \"bash_command\": \"echo 'task 2'\", \"depends_on_past\": false, \"do_xcom_push\": true, \"downstream_task_ids\": [], \"email_on_failure\": true, \"email_on_retry\": true, \"executor_config\": {}, \"ignore_first_depends_on_past\": true, \"inlets\": [], \"outlets\": [], \"output_encoding\": \"utf-8\", \"owner\": \"airflow\", \"params\": \"<>\", \"pool\": \"default_pool\", \"pool_slots\": 1, \"priority_weight\": 1, \"queue\": \"default\", \"retries\": 0, \"retry_delay\": \"<>\", \"retry_exponential_backoff\": false, \"skip_on_exit_code\": [99], \"start_date\": \"<>\", \"task_group\": \"<>\", \"task_id\": \"run_another_data_task\", \"trigger_rule\": \"all_success\", \"upstream_task_ids\": [\"task_1\"], \"wait_for_downstream\": false, \"wait_for_past_depends_before_skipping\": false, \"weight_rule\": \"downstream\"}, \"type\": \"operator\"}]}" }, "externalUrl": "http://airflow.example.com/taskinstance/list/?flt1_dag_id_equals=simple_dag&_flt_3_task_id=run_another_data_task", "name": "run_another_data_task", @@ -662,7 +662,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1698671180730, + "timestampMillis": 1701223355580, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json index ec0f3cab1e81f..7860251fc22dc 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_simple_dag_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -183,9 +183,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:10:10.856995+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -202,7 +202,7 @@ "name": "simple_dag_task_1_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643410856, + "time": 1701223113232, "actor": "urn:li:corpuser:datahub" } } @@ -287,7 +287,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643410856, + "timestampMillis": 1701223113232, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -421,7 +421,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643411390, + "timestampMillis": 1701223113778, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -446,7 +446,7 @@ "catchup": "False", "description": "'A simple DAG that runs a few fake data tasks.'", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/simple_dag.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -580,9 +580,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:10:15.128009+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -599,7 +599,7 @@ "name": "simple_dag_run_another_data_task_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643415128, + "time": 1701223119777, "actor": "urn:li:corpuser:datahub" } } @@ -624,7 +624,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643415128, + "timestampMillis": 1701223119777, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -720,7 +720,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643415856, + "timestampMillis": 1701223120456, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json index 0a704ed10c911..1bf0820c7cb41 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_snowflake_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/snowflake_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -226,9 +226,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-09-30 06:55:36.844976+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -245,7 +245,7 @@ "name": "snowflake_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1696056936844, + "time": 1701223475050, "actor": "urn:li:corpuser:datahub" } } @@ -318,7 +318,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056936844, + "timestampMillis": 1701223475050, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -496,7 +496,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1696056938096, + "timestampMillis": 1701223476665, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json index 3b4b60174f99f..3965ee4a10ad0 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/shubham/airflow1/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -193,9 +193,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:10.262813+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401750262, + "time": 1701223533895, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401750262, + "timestampMillis": 1701223533895, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401750651, + "timestampMillis": 1701223534302, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -557,9 +557,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:15.013834+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -576,7 +576,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401755013, + "time": 1701223539348, "actor": "urn:li:corpuser:datahub" } } @@ -625,7 +625,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401755013, + "timestampMillis": 1701223539348, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -735,7 +735,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401755600, + "timestampMillis": 1701223540058, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -920,9 +920,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:20.216818+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -939,7 +939,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401760216, + "time": 1701223548187, "actor": "urn:li:corpuser:datahub" } } @@ -1012,7 +1012,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401760216, + "timestampMillis": 1701223548187, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1248,7 +1248,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401761237, + "timestampMillis": 1701223549416, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1365,9 +1365,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:26.243934+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1384,7 +1384,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401766243, + "time": 1701223557795, "actor": "urn:li:corpuser:datahub" } } @@ -1433,7 +1433,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401766243, + "timestampMillis": 1701223557795, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1545,7 +1545,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401767373, + "timestampMillis": 1701223559079, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1662,9 +1662,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-10-15 20:29:32.075613+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1681,7 +1681,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1697401772075, + "time": 1701223564459, "actor": "urn:li:corpuser:datahub" } } @@ -1730,7 +1730,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401772075, + "timestampMillis": 1701223564459, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1842,7 +1842,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1697401773454, + "timestampMillis": 1701223566107, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json index 99a8aadb7fd9c..a9f9fbac56fff 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/goldens/v2_sqlite_operator_no_dag_listener.json @@ -11,7 +11,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -193,9 +193,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:17.805860+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -212,7 +212,7 @@ "name": "sqlite_operator_create_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643537805, + "time": 1701223251992, "actor": "urn:li:corpuser:datahub" } } @@ -261,7 +261,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643537805, + "timestampMillis": 1701223251992, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -442,7 +442,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643538759, + "timestampMillis": 1701223253042, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -467,7 +467,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -614,9 +614,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:22.560376+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -633,7 +633,7 @@ "name": "sqlite_operator_populate_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643542560, + "time": 1701223258947, "actor": "urn:li:corpuser:datahub" } } @@ -682,7 +682,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643542560, + "timestampMillis": 1701223258947, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -792,7 +792,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643543925, + "timestampMillis": 1701223260414, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -817,7 +817,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1034,9 +1034,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:29.429032+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1053,7 +1053,7 @@ "name": "sqlite_operator_transform_cost_table_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643549429, + "time": 1701223266595, "actor": "urn:li:corpuser:datahub" } } @@ -1126,7 +1126,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643549429, + "timestampMillis": 1701223266595, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1362,7 +1362,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643551423, + "timestampMillis": 1701223268728, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1387,7 +1387,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1536,9 +1536,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:37.423556+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1555,7 +1555,7 @@ "name": "sqlite_operator_cleanup_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643557423, + "time": 1701223275045, "actor": "urn:li:corpuser:datahub" } } @@ -1604,7 +1604,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643557423, + "timestampMillis": 1701223275045, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1716,7 +1716,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643559607, + "timestampMillis": 1701223277378, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -1741,7 +1741,7 @@ "catchup": "False", "description": "None", "doc_md": "None", - "fileloc": "'/Users/hsheth/projects/datahub/metadata-ingestion-modules/airflow-plugin/tests/integration/dags/sqlite_operator.py'", + "fileloc": "", "is_paused_upon_creation": "None", "start_date": "DateTime(2023, 1, 1, 0, 0, 0, tzinfo=Timezone('UTC'))", "tags": "[]", @@ -1890,9 +1890,9 @@ "json": { "customProperties": { "run_id": "manual_run_test", - "duration": "None", - "start_date": "2023-11-10 19:12:43.792375+00:00", - "end_date": "None", + "duration": "", + "start_date": "", + "end_date": "", "execution_date": "2023-09-27 21:34:38+00:00", "try_number": "0", "max_tries": "0", @@ -1909,7 +1909,7 @@ "name": "sqlite_operator_cleanup_processed_costs_manual_run_test", "type": "BATCH_AD_HOC", "created": { - "time": 1699643563792, + "time": 1701223282010, "actor": "urn:li:corpuser:datahub" } } @@ -1958,7 +1958,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643563792, + "timestampMillis": 1701223282010, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -2070,7 +2070,7 @@ "aspectName": "dataProcessInstanceRunEvent", "aspect": { "json": { - "timestampMillis": 1699643566350, + "timestampMillis": 1701223284766, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" diff --git a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py index a2b7fd151a1e4..0c5d11f693eef 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/integration/test_plugin.py @@ -1,6 +1,7 @@ import contextlib import dataclasses import functools +import json import logging import os import pathlib @@ -8,12 +9,13 @@ import signal import subprocess import time -from typing import Iterator, Sequence +from typing import Any, Iterator, Sequence import pytest import requests import tenacity from airflow.models.connection import Connection +from datahub.ingestion.sink.file import write_metadata_file from datahub.testing.compare_metadata_json import assert_metadata_files_equal from datahub_airflow_plugin._airflow_shims import ( @@ -358,26 +360,58 @@ def test_airflow_plugin( print("Sleeping for a few seconds to let the plugin finish...") time.sleep(10) + _sanitize_output_file(airflow_instance.metadata_file) + check_golden_file( pytestconfig=pytestconfig, output_path=airflow_instance.metadata_file, golden_path=golden_path, ignore_paths=[ # Timing-related items. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['start_date'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['end_date'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['duration'\]", - # Host-specific items. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['pid'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['hostname'\]", - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['unixname'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['start_date'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['end_date'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['duration'\]", # TODO: If we switched to Git urls, maybe we could get this to work consistently. - r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['fileloc'\]", + # r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['fileloc'\]", r"root\[\d+\]\['aspect'\]\['json'\]\['customProperties'\]\['openlineage_.*'\]", ], ) +def _sanitize_output_file(output_path: pathlib.Path) -> None: + # Overwrite some custom properties in the output file to make it easier to compare. + + props_job = { + "fileloc": "", + } + props_process = { + "start_date": "", + "end_date": "", + "duration": "", + } + + def _sanitize(obj: Any) -> None: + if isinstance(obj, dict) and "customProperties" in obj: + replacement_props = ( + props_process if "run_id" in obj["customProperties"] else props_job + ) + obj["customProperties"] = { + k: replacement_props.get(k, v) + for k, v in obj["customProperties"].items() + } + elif isinstance(obj, dict): + for v in obj.values(): + _sanitize(v) + elif isinstance(obj, list): + for v in obj: + _sanitize(v) + + objs = json.loads(output_path.read_text()) + _sanitize(objs) + + write_metadata_file(output_path, objs) + + if __name__ == "__main__": # When run directly, just set up a local airflow instance. import tempfile diff --git a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py index 6c42e830e223b..1ec74b94179d5 100644 --- a/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py +++ b/metadata-ingestion/src/datahub/api/entities/datajob/datajob.py @@ -40,7 +40,8 @@ class DataJob: group_owners Set[str]): A list of group ids that own this job. inlets (List[str]): List of urns the DataProcessInstance consumes outlets (List[str]): List of urns the DataProcessInstance produces - input_datajob_urns: List[DataJobUrn] = field(default_factory=list) + fine_grained_lineages: Column lineage for the inlets and outlets + upstream_urns: List[DataJobUrn] = field(default_factory=list) """ id: str From f9fd9467ef14cd5b39cac4c71e214d9088f0f9a1 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 30 Nov 2023 21:00:43 -0500 Subject: [PATCH 027/263] feat(ingest): clean up DataHubRestEmitter return type (#9286) Co-authored-by: Andrew Sikowitz --- .../config/HomePageOnboardingConfig.tsx | 3 +-- docs/how/updating-datahub.md | 1 + .../datahub_airflow_plugin/hooks/datahub.py | 23 ++++++++++++++----- .../airflow-plugin/tests/unit/test_airflow.py | 8 +++---- .../src/datahub/emitter/generic_emitter.py | 7 ++---- .../src/datahub/emitter/rest_emitter.py | 19 ++++++++------- .../datahub/ingestion/sink/datahub_rest.py | 20 ++++++++++++---- .../tests/test_helpers/graph_helpers.py | 12 +++++----- .../tests/unit/test_rest_emitter.py | 6 +++++ 9 files changed, 62 insertions(+), 37 deletions(-) diff --git a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx index 28a0465a1b2f7..8b361db5ab344 100644 --- a/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx +++ b/datahub-web-react/src/app/onboarding/config/HomePageOnboardingConfig.tsx @@ -94,8 +94,7 @@ export const HomePageOnboardingConfig: OnboardingStep[] = [ Here are your organization's Data Platforms. Data Platforms represent specific third-party Data Systems or Tools. Examples include Data Warehouses like Snowflake, - Orchestrators like - Airflow, and Dashboarding tools like Looker. + Orchestrators like Airflow, and Dashboarding tools like Looker. ), }, diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index dad05fd0153f2..df179b0d0d2f7 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -11,6 +11,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. +- #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. ### Potential Downtime diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py index 9604931795ccb..b60f20c5bf8b2 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/hooks/datahub.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from airflow.models.connection import Connection from datahub.emitter.kafka_emitter import DatahubKafkaEmitter - from datahub.emitter.rest_emitter import DatahubRestEmitter + from datahub.emitter.rest_emitter import DataHubRestEmitter from datahub.emitter.synchronized_file_emitter import SynchronizedFileEmitter from datahub.ingestion.sink.datahub_kafka import KafkaSinkConfig @@ -63,6 +63,13 @@ def test_connection(self) -> Tuple[bool, str]: return True, "Successfully connected to DataHub." def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: + # We have a few places in the codebase that use this method directly, despite + # it being "private". For now, we retain backwards compatibility by keeping + # this method around, but should stop using it in the future. + host, token, extra_args = self._get_config_v2() + return host, token, extra_args.get("timeout_sec") + + def _get_config_v2(self) -> Tuple[str, Optional[str], Dict]: conn: "Connection" = self.get_connection(self.datahub_rest_conn_id) host = conn.host @@ -74,14 +81,18 @@ def _get_config(self) -> Tuple[str, Optional[str], Optional[int]]: "host parameter should not contain a port number if the port is specified separately" ) host = f"{host}:{conn.port}" - password = conn.password - timeout_sec = conn.extra_dejson.get("timeout_sec") - return (host, password, timeout_sec) + token = conn.password + + extra_args = conn.extra_dejson + return (host, token, extra_args) - def make_emitter(self) -> "DatahubRestEmitter": + def make_emitter(self) -> "DataHubRestEmitter": import datahub.emitter.rest_emitter - return datahub.emitter.rest_emitter.DatahubRestEmitter(*self._get_config()) + host, token, extra_args = self._get_config_v2() + return datahub.emitter.rest_emitter.DataHubRestEmitter( + host, token, **extra_args + ) def emit( self, diff --git a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py index 7fbf707995994..93b4af0501985 100644 --- a/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py +++ b/metadata-ingestion-modules/airflow-plugin/tests/unit/test_airflow.py @@ -99,19 +99,19 @@ def patch_airflow_connection(conn: Connection) -> Iterator[Connection]: yield conn -@mock.patch("datahub.emitter.rest_emitter.DatahubRestEmitter", autospec=True) +@mock.patch("datahub.emitter.rest_emitter.DataHubRestEmitter", autospec=True) def test_datahub_rest_hook(mock_emitter): with patch_airflow_connection(datahub_rest_connection_config) as config: assert config.conn_id hook = DatahubRestHook(config.conn_id) hook.emit_mces([lineage_mce]) - mock_emitter.assert_called_once_with(config.host, None, None) + mock_emitter.assert_called_once_with(config.host, None) instance = mock_emitter.return_value instance.emit.assert_called_with(lineage_mce) -@mock.patch("datahub.emitter.rest_emitter.DatahubRestEmitter", autospec=True) +@mock.patch("datahub.emitter.rest_emitter.DataHubRestEmitter", autospec=True) def test_datahub_rest_hook_with_timeout(mock_emitter): with patch_airflow_connection( datahub_rest_connection_config_with_timeout @@ -120,7 +120,7 @@ def test_datahub_rest_hook_with_timeout(mock_emitter): hook = DatahubRestHook(config.conn_id) hook.emit_mces([lineage_mce]) - mock_emitter.assert_called_once_with(config.host, None, 5) + mock_emitter.assert_called_once_with(config.host, None, timeout_sec=5) instance = mock_emitter.return_value instance.emit.assert_called_with(lineage_mce) diff --git a/metadata-ingestion/src/datahub/emitter/generic_emitter.py b/metadata-ingestion/src/datahub/emitter/generic_emitter.py index 28138c6182758..54b3d6841fe9c 100644 --- a/metadata-ingestion/src/datahub/emitter/generic_emitter.py +++ b/metadata-ingestion/src/datahub/emitter/generic_emitter.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Optional, Union +from typing import Callable, Optional, Union from typing_extensions import Protocol @@ -21,10 +21,7 @@ def emit( # required. However, this would be a breaking change that may need # more careful consideration. callback: Optional[Callable[[Exception, str], None]] = None, - # TODO: The rest emitter returns timestamps as the return type. For now - # we smooth over that detail using Any, but eventually we should - # standardize on a return type. - ) -> Any: + ) -> None: raise NotImplementedError def flush(self) -> None: diff --git a/metadata-ingestion/src/datahub/emitter/rest_emitter.py b/metadata-ingestion/src/datahub/emitter/rest_emitter.py index afb19df9791af..4598c7faa2105 100644 --- a/metadata-ingestion/src/datahub/emitter/rest_emitter.py +++ b/metadata-ingestion/src/datahub/emitter/rest_emitter.py @@ -1,10 +1,9 @@ -import datetime import functools import json import logging import os from json.decoder import JSONDecodeError -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union import requests from deprecated import deprecated @@ -60,6 +59,7 @@ def __init__( self, gms_server: str, token: Optional[str] = None, + timeout_sec: Optional[float] = None, connect_timeout_sec: Optional[float] = None, read_timeout_sec: Optional[float] = None, retry_status_codes: Optional[List[int]] = None, @@ -103,11 +103,12 @@ def __init__( if disable_ssl_verification: self._session.verify = False - if connect_timeout_sec: - self._connect_timeout_sec = connect_timeout_sec - - if read_timeout_sec: - self._read_timeout_sec = read_timeout_sec + self._connect_timeout_sec = ( + connect_timeout_sec or timeout_sec or _DEFAULT_CONNECT_TIMEOUT_SEC + ) + self._read_timeout_sec = ( + read_timeout_sec or timeout_sec or _DEFAULT_READ_TIMEOUT_SEC + ) if self._connect_timeout_sec < 1 or self._read_timeout_sec < 1: logger.warning( @@ -208,8 +209,7 @@ def emit( UsageAggregation, ], callback: Optional[Callable[[Exception, str], None]] = None, - ) -> Tuple[datetime.datetime, datetime.datetime]: - start_time = datetime.datetime.now() + ) -> None: try: if isinstance(item, UsageAggregation): self.emit_usage(item) @@ -226,7 +226,6 @@ def emit( else: if callback: callback(None, "success") # type: ignore - return start_time, datetime.datetime.now() def emit_mce(self, mce: MetadataChangeEvent) -> None: url = f"{self._gms_server}/entities?action=ingest" diff --git a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py index d3abde0d36993..fedd8520dde4d 100644 --- a/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py +++ b/metadata-ingestion/src/datahub/ingestion/sink/datahub_rest.py @@ -4,10 +4,10 @@ import logging from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass -from datetime import timedelta +from datetime import datetime, timedelta from enum import auto from threading import BoundedSemaphore -from typing import Union +from typing import Tuple, Union from datahub.cli.cli_utils import set_env_variables_override_config from datahub.configuration.common import ( @@ -181,6 +181,18 @@ def _write_done_callback( self.report.report_failure({"e": e}) write_callback.on_failure(record_envelope, Exception(e), {}) + def _emit_wrapper( + self, + record: Union[ + MetadataChangeEvent, + MetadataChangeProposal, + MetadataChangeProposalWrapper, + ], + ) -> Tuple[datetime, datetime]: + start_time = datetime.now() + self.emitter.emit(record) + return start_time, datetime.now() + def write_record_async( self, record_envelope: RecordEnvelope[ @@ -194,7 +206,7 @@ def write_record_async( ) -> None: record = record_envelope.record if self.config.mode == SyncOrAsync.ASYNC: - write_future = self.executor.submit(self.emitter.emit, record) + write_future = self.executor.submit(self._emit_wrapper, record) write_future.add_done_callback( functools.partial( self._write_done_callback, record_envelope, write_callback @@ -204,7 +216,7 @@ def write_record_async( else: # execute synchronously try: - (start, end) = self.emitter.emit(record) + (start, end) = self._emit_wrapper(record) write_callback.on_success(record_envelope, success_metadata={}) except Exception as e: write_callback.on_failure(record_envelope, e, failure_metadata={}) diff --git a/metadata-ingestion/tests/test_helpers/graph_helpers.py b/metadata-ingestion/tests/test_helpers/graph_helpers.py index 4c2c46c2f97ce..2e73f5e2c6cdb 100644 --- a/metadata-ingestion/tests/test_helpers/graph_helpers.py +++ b/metadata-ingestion/tests/test_helpers/graph_helpers.py @@ -1,6 +1,5 @@ -from datetime import datetime from pathlib import Path -from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, Union +from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union from datahub.emitter.mce_builder import Aspect from datahub.emitter.mcp import MetadataChangeProposalWrapper @@ -22,7 +21,9 @@ class MockDataHubGraph(DataHubGraph): - def __init__(self, entity_graph: Dict[str, Dict[str, Any]] = {}) -> None: + def __init__( + self, entity_graph: Optional[Dict[str, Dict[str, Any]]] = None + ) -> None: self.emitted: List[ Union[ MetadataChangeEvent, @@ -30,7 +31,7 @@ def __init__(self, entity_graph: Dict[str, Dict[str, Any]] = {}) -> None: MetadataChangeProposalWrapper, ] ] = [] - self.entity_graph = entity_graph + self.entity_graph = entity_graph or {} def import_file(self, file: Path) -> None: """Imports metadata from any MCE/MCP file. Does not clear prior loaded data. @@ -110,9 +111,8 @@ def emit( UsageAggregationClass, ], callback: Union[Callable[[Exception, str], None], None] = None, - ) -> Tuple[datetime, datetime]: + ) -> None: self.emitted.append(item) # type: ignore - return (datetime.now(), datetime.now()) def emit_mce(self, mce: MetadataChangeEvent) -> None: self.emitted.append(mce) diff --git a/metadata-ingestion/tests/unit/test_rest_emitter.py b/metadata-ingestion/tests/unit/test_rest_emitter.py index e56cbd2c41c6b..b4d7cb17b66f5 100644 --- a/metadata-ingestion/tests/unit/test_rest_emitter.py +++ b/metadata-ingestion/tests/unit/test_rest_emitter.py @@ -20,6 +20,12 @@ def test_datahub_rest_emitter_timeout_construction(): assert emitter._read_timeout_sec == 4 +def test_datahub_rest_emitter_general_timeout_construction(): + emitter = DatahubRestEmitter(MOCK_GMS_ENDPOINT, timeout_sec=2, read_timeout_sec=4) + assert emitter._connect_timeout_sec == 2 + assert emitter._read_timeout_sec == 4 + + def test_datahub_rest_emitter_retry_construction(): emitter = DatahubRestEmitter( MOCK_GMS_ENDPOINT, From 4d9eb12cba3a36ca30a7b07fea9aeb6a13443522 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 04:03:10 -0500 Subject: [PATCH 028/263] feat(ingest/dbt): support custom ownership types in dbt meta (#9332) --- metadata-ingestion/docs/sources/dbt/dbt.md | 2 +- .../src/datahub/utilities/mapping.py | 10 ++++++++- metadata-ingestion/tests/unit/test_mapping.py | 22 +++++++++++++++++-- 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/docs/sources/dbt/dbt.md b/metadata-ingestion/docs/sources/dbt/dbt.md index 43ced13c3b1f8..6cc8772871c2f 100644 --- a/metadata-ingestion/docs/sources/dbt/dbt.md +++ b/metadata-ingestion/docs/sources/dbt/dbt.md @@ -62,7 +62,7 @@ We support the following operations: 1. add_tag - Requires `tag` property in config. 2. add_term - Requires `term` property in config. 3. add_terms - Accepts an optional `separator` property in config. -4. add_owner - Requires `owner_type` property in config which can be either user or group. Optionally accepts the `owner_category` config property which you can set to one of `['TECHNICAL_OWNER', 'BUSINESS_OWNER', 'DATA_STEWARD', 'DATAOWNER'` (defaults to `DATAOWNER`). +4. add_owner - Requires `owner_type` property in config which can be either user or group. Optionally accepts the `owner_category` config property which can be set to either a [custom ownership type](../../../../docs/ownership/ownership-types.md) urn like `urn:li:ownershipType:architect` or one of `['TECHNICAL_OWNER', 'BUSINESS_OWNER', 'DATA_STEWARD', 'DATAOWNER'` (defaults to `DATAOWNER`). 5. add_doc_link - Requires `link` and `description` properties in config. Upon ingestion run, this will overwrite current links in the institutional knowledge section with this new link. The anchor text is defined here in the meta_mappings as `description`. Note: diff --git a/metadata-ingestion/src/datahub/utilities/mapping.py b/metadata-ingestion/src/datahub/utilities/mapping.py index f91c01d901ac1..00f7d370d1676 100644 --- a/metadata-ingestion/src/datahub/utilities/mapping.py +++ b/metadata-ingestion/src/datahub/utilities/mapping.py @@ -191,6 +191,7 @@ def convert_to_aspects( OwnerClass( owner=x.get("urn"), type=x.get("category"), + typeUrn=x.get("categoryUrn"), source=OwnershipSourceClass(type=self.owner_source_type) if self.owner_source_type else None, @@ -281,18 +282,25 @@ def get_operation_value( operation_config.get(Constants.OWNER_CATEGORY) or OwnershipTypeClass.DATAOWNER ) - owner_category = owner_category.upper() + owner_category_urn = None + if owner_category.startswith("urn:li:"): + owner_category_urn = owner_category + owner_category = OwnershipTypeClass.DATAOWNER + else: + owner_category = owner_category.upper() if self.strip_owner_email_id: owner_id = self.sanitize_owner_ids(owner_id) if operation_config[Constants.OWNER_TYPE] == Constants.USER_OWNER: return { "urn": mce_builder.make_owner_urn(owner_id, OwnerType.USER), "category": owner_category, + "categoryUrn": owner_category_urn, } elif operation_config[Constants.OWNER_TYPE] == Constants.GROUP_OWNER: return { "urn": mce_builder.make_owner_urn(owner_id, OwnerType.GROUP), "category": owner_category, + "categoryUrn": owner_category_urn, } elif ( operation_type == Constants.ADD_TERM_OPERATION diff --git a/metadata-ingestion/tests/unit/test_mapping.py b/metadata-ingestion/tests/unit/test_mapping.py index 5c258f16535f8..de35451c9ec4b 100644 --- a/metadata-ingestion/tests/unit/test_mapping.py +++ b/metadata-ingestion/tests/unit/test_mapping.py @@ -174,7 +174,11 @@ def test_operation_processor_advanced_matching_owners(): def test_operation_processor_ownership_category(): - raw_props = {"user_owner": "@test_user", "business_owner": "alice"} + raw_props = { + "user_owner": "@test_user", + "business_owner": "alice", + "architect": "bob", + } processor = OperationProcessor( operation_defs={ "user_owner": { @@ -193,6 +197,14 @@ def test_operation_processor_ownership_category(): "owner_category": OwnershipTypeClass.BUSINESS_OWNER, }, }, + "architect": { + "match": ".*", + "operation": "add_owner", + "config": { + "owner_type": "user", + "owner_category": "urn:li:ownershipType:architect", + }, + }, }, owner_source_type="SOURCE_CONTROL", ) @@ -200,7 +212,7 @@ def test_operation_processor_ownership_category(): assert "add_owner" in aspect_map ownership_aspect: OwnershipClass = aspect_map["add_owner"] - assert len(ownership_aspect.owners) == 2 + assert len(ownership_aspect.owners) == 3 new_owner: OwnerClass = ownership_aspect.owners[0] assert new_owner.owner == "urn:li:corpGroup:test_user" assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" @@ -211,6 +223,12 @@ def test_operation_processor_ownership_category(): assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" assert new_owner.type and new_owner.type == OwnershipTypeClass.BUSINESS_OWNER + new_owner = ownership_aspect.owners[2] + assert new_owner.owner == "urn:li:corpuser:bob" + assert new_owner.source and new_owner.source.type == "SOURCE_CONTROL" + assert new_owner.type == OwnershipTypeClass.DATAOWNER # dummy value + assert new_owner.typeUrn == "urn:li:ownershipType:architect" + def test_operation_processor_advanced_matching_tags(): raw_props = { From 82f375ded6c98160ad9edbe6488cbc16b2a01d22 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 04:03:20 -0500 Subject: [PATCH 029/263] docs(ingest/lookml): clarify that ssh key has no passphrase (#9348) --- docs/quick-ingestion-guides/looker/setup.md | 3 ++- metadata-ingestion/docs/sources/looker/lookml_pre.md | 2 +- metadata-ingestion/src/datahub/configuration/git.py | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/quick-ingestion-guides/looker/setup.md b/docs/quick-ingestion-guides/looker/setup.md index c08de116895ea..81c2c9e4ba08c 100644 --- a/docs/quick-ingestion-guides/looker/setup.md +++ b/docs/quick-ingestion-guides/looker/setup.md @@ -129,7 +129,8 @@ Follow the below steps to create the GitHub Deploy Key. ### Generate a private-public SSH key pair ```bash - ssh-keygen -t rsa -f looker_datahub_deploy_key +ssh-keygen -t rsa -f looker_datahub_deploy_key +# If prompted, don't add a passphrase to the key ``` This will typically generate two files like the one below. diff --git a/metadata-ingestion/docs/sources/looker/lookml_pre.md b/metadata-ingestion/docs/sources/looker/lookml_pre.md index d78a30fe6ec37..68a4828a5ce2a 100644 --- a/metadata-ingestion/docs/sources/looker/lookml_pre.md +++ b/metadata-ingestion/docs/sources/looker/lookml_pre.md @@ -6,7 +6,7 @@ To use LookML ingestion through the UI, or automate github checkout through the In a nutshell, there are three steps: -1. Generate a private-public ssh key pair. This will typically generate two files, e.g. looker_datahub_deploy_key (this is the private key) and looker_datahub_deploy_key.pub (this is the public key) +1. Generate a private-public ssh key pair. This will typically generate two files, e.g. looker_datahub_deploy_key (this is the private key) and looker_datahub_deploy_key.pub (this is the public key). Do not add a passphrase. ![Image](https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/gitssh/ssh-key-generation.png) 2. Add the public key to your Looker git repo as a deploy key with read access (no need to provision write access). Follow the guide [here](https://docs.github.com/en/developers/overview/managing-deploy-keys#deploy-keys) for that. diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 0c7d64d4aafcf..80eb41c100b10 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -77,7 +77,9 @@ class GitInfo(GitReference): deploy_key_file: Optional[FilePath] = Field( None, - description="A private key file that contains an ssh key that has been configured as a deploy key for this repository. Use a file where possible, else see deploy_key for a config field that accepts a raw string.", + description="A private key file that contains an ssh key that has been configured as a deploy key for this repository. " + "Use a file where possible, else see deploy_key for a config field that accepts a raw string. " + "We expect the key not have a passphrase.", ) deploy_key: Optional[SecretStr] = Field( None, From 3142efcad5a06c06d5546b05b7f259c1eba109c5 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Fri, 1 Dec 2023 14:55:26 +0530 Subject: [PATCH 030/263] fix(migrate): connect with token without dry-run (#9317) --- metadata-ingestion/src/datahub/cli/migrate.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/metadata-ingestion/src/datahub/cli/migrate.py b/metadata-ingestion/src/datahub/cli/migrate.py index e83a8ed8feaad..30f82987a6b65 100644 --- a/metadata-ingestion/src/datahub/cli/migrate.py +++ b/metadata-ingestion/src/datahub/cli/migrate.py @@ -23,7 +23,7 @@ SchemaKey, ) from datahub.emitter.rest_emitter import DatahubRestEmitter -from datahub.ingestion.graph.client import DataHubGraph, DataHubGraphConfig +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph from datahub.metadata.schema_classes import ( ContainerKeyClass, ContainerPropertiesClass, @@ -141,13 +141,7 @@ def dataplatform2instance_func( migration_report = MigrationReport(run_id, dry_run, keep) system_metadata = SystemMetadataClass(runId=run_id) - # initialize for dry-run - graph = DataHubGraph(config=DataHubGraphConfig(server="127.0.0.1")) - - if not dry_run: - graph = DataHubGraph( - config=DataHubGraphConfig(server=cli_utils.get_session_and_host()[1]) - ) + graph = get_default_graph() urns_to_migrate = [] From 864d3dfa16b6abbb09361f52112dbb4b95bf6775 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 09:18:07 -0800 Subject: [PATCH 031/263] fix(ui): Minor: fix unnecessary lineage tab scroll by removing -1 margin on lists (#9364) --- .../src/app/entity/shared/tabs/Entity/components/EntityList.tsx | 1 - .../app/recommendations/renderer/component/EntityNameList.tsx | 1 - 2 files changed, 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx index 758b070864a9a..3a9061fd97d6e 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Entity/components/EntityList.tsx @@ -8,7 +8,6 @@ import { EntityType } from '../../../../../../types.generated'; const StyledList = styled(List)` padding-left: 40px; padding-right: 40px; - margin-top: -1px; .ant-list-items > .ant-list-item { padding-right: 0px; padding-left: 0px; diff --git a/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx b/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx index 4ff78e64625b1..9e8454ae22317 100644 --- a/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx +++ b/datahub-web-react/src/app/recommendations/renderer/component/EntityNameList.tsx @@ -11,7 +11,6 @@ import { capitalizeFirstLetterOnly } from '../../../shared/textUtil'; export const StyledList = styled(List)` overflow-y: auto; height: 100%; - margin-top: -1px; box-shadow: ${(props) => props.theme.styles['box-shadow']}; flex: 1; .ant-list-items > .ant-list-item { From 36c7813f89b1f20898e07f24c5f209f5c57947d7 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 1 Dec 2023 23:18:39 +0530 Subject: [PATCH 032/263] feat(ui): Support dynamic entity profile tab names (#9352) --- .../app/entity/shared/containers/profile/EntityProfile.tsx | 1 + .../entity/shared/containers/profile/header/EntityTabs.tsx | 5 +++-- datahub-web-react/src/app/entity/shared/types.ts | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx index 74c127cb05dd9..d7b7a4da804ef 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/EntityProfile.tsx @@ -238,6 +238,7 @@ export const EntityProfile = ({ visible: () => true, enabled: () => true, }, + getDynamicName: () => '', })) || []; const visibleTabs = [...sortedTabs, ...autoRenderTabs].filter((tab) => diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index ea5c263ef7abc..096f1db617d92 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -44,10 +44,11 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { onTabClick={(tab: string) => routeToTab({ tabName: tab })} > {tabs.map((tab) => { + const tabName = (tab.getDynamicName && tab.getDynamicName(entityData, baseEntity)) || tab.name; if (!tab.display?.enabled(entityData, baseEntity)) { - return ; + return ; } - return ; + return ; })} ); diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index 6596711d4e82a..ae8ab747f7cb6 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -50,6 +50,7 @@ export type EntityTab = { }; properties?: any; id?: string; + getDynamicName?: (GenericEntityProperties, T) => string; }; export type EntitySidebarSection = { From 7b0a8f422b02c47ffb4fe2ddd5f61c7230de0c03 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 1 Dec 2023 14:23:11 -0500 Subject: [PATCH 033/263] docs: add setup instructions for mac dependencies (#9346) Co-authored-by: Hyejin Yoon <0327jane@gmail.com> --- docs/developers.md | 140 ++++++++++++++++++++++++++++----------------- 1 file changed, 89 insertions(+), 51 deletions(-) diff --git a/docs/developers.md b/docs/developers.md index 52fd7d356a44c..c3c3a59283e66 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -4,33 +4,53 @@ title: "Local Development" # DataHub Developer's Guide -## Pre-requirements - - [Java 11 SDK](https://openjdk.org/projects/jdk/11/) - - [Python 3.10] (https://www.python.org/downloads/release/python-3100/) - - [Docker](https://www.docker.com/) - - [Docker Compose](https://docs.docker.com/compose/) - - Docker engine with at least 8GB of memory to run tests. +## Requirements - :::note +- Both [Java 11 JDK](https://openjdk.org/projects/jdk/11/) and [Java 8 JDK](https://openjdk.java.net/projects/jdk8/) +- [Python 3.10](https://www.python.org/downloads/release/python-3100/) +- [Docker](https://www.docker.com/) +- [Docker Compose](https://docs.docker.com/compose/) +- Docker engine with at least 8GB of memory to run tests. - Do not try to use a JDK newer than JDK 11. The build process does not work with newer JDKs currently. +:::caution - ::: +Do not try to use a JDK newer than JDK 11. The build process does not currently work with newer JDKs versions. + +::: + +On macOS, these can be installed using [Homebrew](https://brew.sh/). + +```shell +# Install Java 8 and 11 +brew tap homebrew/cask-versions +brew install java11 +brew install --cask zulu8 + +# Install Python +brew install python@3.10 # you may need to add this to your PATH +# alternatively, you can use pyenv to manage your python versions + +# Install docker and docker compose +brew install --cask docker +``` ## Building the Project Fork and clone the repository if haven't done so already -``` + +```shell git clone https://github.com/{username}/datahub.git ``` Change into the repository's root directory -``` + +```shell cd datahub ``` Use [gradle wrapper](https://docs.gradle.org/current/userguide/gradle_wrapper.html) to build the project -``` + +```shell ./gradlew build ``` @@ -38,29 +58,37 @@ Note that the above will also run run tests and a number of validations which ma We suggest partially compiling DataHub according to your needs: - - Build Datahub's backend GMS (Generalized metadata service): -``` -./gradlew :metadata-service:war:build -``` - - Build Datahub's frontend: -``` -./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint -``` - - Build DataHub's command line tool: -``` -./gradlew :metadata-ingestion:installDev -``` - - Build DataHub's documentation: -``` -./gradlew :docs-website:yarnLintFix :docs-website:build -x :metadata-ingestion:runPreFlightScript -# To preview the documentation -./gradlew :docs-website:serve -``` +- Build Datahub's backend GMS (Generalized metadata service): + + ``` + ./gradlew :metadata-service:war:build + ``` + +- Build Datahub's frontend: + + ``` + ./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint + ``` + +- Build DataHub's command line tool: -## Deploying local versions + ``` + ./gradlew :metadata-ingestion:installDev + ``` + +- Build DataHub's documentation: + + ``` + ./gradlew :docs-website:yarnLintFix :docs-website:build -x :metadata-ingestion:runPreFlightScript + # To preview the documentation + ./gradlew :docs-website:serve + ``` + +## Deploying Local Versions Run just once to have the local `datahub` cli tool installed in your $PATH -``` + +```shell cd smoke-test/ python3 -m venv venv source venv/bin/activate @@ -70,34 +98,40 @@ cd ../ ``` Once you have compiled & packaged the project or appropriate module you can deploy the entire system via docker-compose by running: -``` + +```shell ./gradlew quickstart ``` Replace whatever container you want in the existing deployment. I.e, replacing datahub's backend (GMS): -``` + +```shell (cd docker && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub -f docker-compose-without-neo4j.yml -f docker-compose-without-neo4j.override.yml -f docker-compose.dev.yml up -d --no-deps --force-recreate --build datahub-gms) ``` Running the local version of the frontend -``` + +```shell (cd docker && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub -f docker-compose-without-neo4j.yml -f docker-compose-without-neo4j.override.yml -f docker-compose.dev.yml up -d --no-deps --force-recreate --build datahub-frontend-react) ``` + ## IDE Support -The recommended IDE for DataHub development is [IntelliJ IDEA](https://www.jetbrains.com/idea/). -You can run the following command to generate or update the IntelliJ project file -``` + +The recommended IDE for DataHub development is [IntelliJ IDEA](https://www.jetbrains.com/idea/). +You can run the following command to generate or update the IntelliJ project file. + +```shell ./gradlew idea ``` + Open `datahub.ipr` in IntelliJ to start developing! For consistency please import and auto format the code using [LinkedIn IntelliJ Java style](../gradle/idea/LinkedIn%20Style.xml). - ## Windows Compatibility -For optimal performance and compatibility, we strongly recommend building on a Mac or Linux system. +For optimal performance and compatibility, we strongly recommend building on a Mac or Linux system. Please note that we do not actively support Windows in a non-virtualized environment. If you must use Windows, one workaround is to build within a virtualized environment, such as a VM(Virtual Machine) or [WSL(Windows Subsystem for Linux)](https://learn.microsoft.com/en-us/windows/wsl). @@ -105,37 +139,41 @@ This approach can help ensure that your build environment remains isolated and s ## Common Build Issues -### Getting `Unsupported class file major version 57` +#### Getting `Unsupported class file major version 57` You're probably using a Java version that's too new for gradle. Run the following command to check your Java version -``` + +```shell java --version ``` + While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). -### Getting `cannot find symbol` error for `javax.annotation.Generated` +#### Getting `cannot find symbol` error for `javax.annotation.Generated` Similar to the previous issue, please use Java 1.8 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. -### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error +#### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error -This is a [known issue](https://github.com/linkedin/rest.li/issues/287) when building the project on Windows due a bug in the Pegasus plugin. Please refer to [Windows Compatibility](/docs/developers.md#windows-compatibility). +This is a [known issue](https://github.com/linkedin/rest.li/issues/287) when building the project on Windows due a bug in the Pegasus plugin. Please refer to [Windows Compatibility](/docs/developers.md#windows-compatibility). -### Various errors related to `generateDataTemplate` or other `generate` tasks +#### Various errors related to `generateDataTemplate` or other `generate` tasks -As we generate quite a few files from the models, it is possible that old generated files may conflict with new model changes. When this happens, a simple `./gradlew clean` should reosolve the issue. +As we generate quite a few files from the models, it is possible that old generated files may conflict with new model changes. When this happens, a simple `./gradlew clean` should reosolve the issue. -### `Execution failed for task ':metadata-service:restli-servlet-impl:checkRestModel'` +#### `Execution failed for task ':metadata-service:restli-servlet-impl:checkRestModel'` This generally means that an [incompatible change](https://linkedin.github.io/rest.li/modeling/compatibility_check) was introduced to the rest.li API in GMS. You'll need to rebuild the snapshots/IDL by running the following command once -``` + +```shell ./gradlew :metadata-service:restli-servlet-impl:build -Prest.model.compatibility=ignore ``` -### `java.io.IOException: No space left on device` +#### `java.io.IOException: No space left on device` This means you're running out of space on your disk to build. Please free up some space or try a different disk. -### `Build failed` for task `./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint` +#### `Build failed` for task `./gradlew :datahub-frontend:dist -x yarnTest -x yarnLint` + This could mean that you need to update your [Yarn](https://yarnpkg.com/getting-started/install) version From f3abfd175e1c142750686b3c8f7b08acadd83a4d Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 13:21:28 -0800 Subject: [PATCH 034/263] feat(ui): Add caching to search, entity profile for better UX (#9362) --- datahub-web-react/src/Mocks.tsx | 149 +++++++++++++++--- .../styled/search/EmbeddedListSearch.tsx | 1 + .../search/EmbeddedListSearchResults.tsx | 11 +- .../containers/profile/header/EntityTabs.tsx | 1 + .../profile/useGetDataForProfile.ts | 1 + ...rateUseSearchResultsViaRelationshipHook.ts | 1 + .../src/app/search/SearchPage.tsx | 2 + .../src/app/search/SearchResultList.tsx | 4 +- .../src/app/search/SearchResults.tsx | 11 +- .../search/SearchResultsLoadingSection.tsx | 33 ++++ .../app/search/__tests__/SearchPage.test.tsx | 95 ++--------- .../src/app/search/filters/BasicFilters.tsx | 4 + .../filters/BasicFiltersLoadingSection.tsx | 27 ++++ .../src/app/search/filters/SearchFilters.tsx | 20 ++- 14 files changed, 241 insertions(+), 119 deletions(-) create mode 100644 datahub-web-react/src/app/search/SearchResultsLoadingSection.tsx create mode 100644 datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index a2e14308e8cee..ada9a06ab5b95 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -41,10 +41,12 @@ import { FetchedEntity } from './app/lineage/types'; import { DEFAULT_APP_CONFIG } from './appConfigContext'; export const user1 = { + __typename: 'CorpUser', username: 'sdas', urn: 'urn:li:corpuser:1', type: EntityType.CorpUser, info: { + __typename: 'CorpUserInfo', email: 'sdas@domain.com', active: true, displayName: 'sdas', @@ -53,18 +55,19 @@ export const user1 = { lastName: 'Das', fullName: 'Shirshanka Das', }, - editableInfo: { - pictureLink: 'https://crunchconf.com/img/2019/speakers/1559291783-ShirshankaDas.png', - }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -74,14 +77,23 @@ export const user1 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + properties: null, + editableProperties: null, }; const user2 = { + __typename: 'CorpUser', username: 'john', urn: 'urn:li:corpuser:3', type: EntityType.CorpUser, - info: { + properties: { + __typename: 'CorpUserInfo', email: 'john@domain.com', active: true, displayName: 'john', @@ -90,25 +102,41 @@ const user2 = { lastName: 'Joyce', fullName: 'John Joyce', }, - editableInfo: { - pictureLink: null, - }, editableProperties: { displayName: 'Test', title: 'test', pictureLink: null, teams: [], skills: [], + __typename: 'CorpUserEditableProperties', + email: 'john@domain.com', + }, + groups: { + __typename: 'EntityRelationshipsResult', + relationships: [ + { + __typename: 'EntityRelationship', + entity: { + __typename: 'CorpGroup', + urn: 'urn:li:corpgroup:group1', + name: 'group1', + properties: null, + }, + }, + ], }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -118,7 +146,13 @@ const user2 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + info: null, }; export const dataPlatform = { @@ -149,6 +183,7 @@ export const dataPlatformInstance = { }; export const dataset1 = { + __typename: 'Dataset', urn: 'urn:li:dataset:1', type: EntityType.Dataset, platform: { @@ -260,6 +295,7 @@ export const dataset1 = { }; export const dataset2 = { + __typename: 'Dataset', urn: 'urn:li:dataset:2', type: EntityType.Dataset, platform: { @@ -358,17 +394,23 @@ export const dataset3 = { urn: 'urn:li:dataset:3', type: EntityType.Dataset, platform: { + __typename: 'DataPlatform', urn: 'urn:li:dataPlatform:kafka', name: 'Kafka', + displayName: 'Kafka', info: { + __typename: 'DataPlatformInfo', displayName: 'Kafka', type: PlatformType.MessageBroker, datasetNameDelimiter: '.', logoUrl: '', }, type: EntityType.DataPlatform, + lastIngested: null, + properties: null, }, privileges: { + __typename: 'EntityPrivileges', canEditLineage: false, canEditEmbed: false, canEditQueries: false, @@ -381,54 +423,78 @@ export const dataset3 = { origin: 'PROD', uri: 'www.google.com', properties: { + __typename: 'DatasetProperties', name: 'Yet Another Dataset', + qualifiedName: 'Yet Another Dataset', description: 'This and here we have yet another Dataset (YAN). Are there more?', origin: 'PROD', - customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:3' }], + customProperties: [ + { + __typename: 'CustomPropertiesEntry', + key: 'propertyAKey', + value: 'propertyAValue', + associatedUrn: 'urn:li:dataset:3', + }, + ], externalUrl: 'https://data.hub', }, parentContainers: { + __typename: 'ParentContainersResult', count: 0, containers: [], }, editableProperties: null, created: { + __typename: 'AuditStamp', time: 0, + actor: null, }, lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, ownership: { + __typename: 'Ownership', owners: [ { + __typename: 'Owner', owner: { ...user1, }, type: 'DATAOWNER', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, { + __typename: 'Owner', owner: { ...user2, }, type: 'DELEGATE', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, ], lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, }, globalTags: { __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -439,14 +505,18 @@ export const dataset3 = { ], }, glossaryTerms: { + __typename: 'GlossaryTerms', terms: [ { + __typename: 'GlossaryTermAssociation', term: { + __typename: 'GlossaryTerm', type: EntityType.GlossaryTerm, urn: 'urn:li:glossaryTerm:sample-glossary-term', name: 'sample-glossary-term', hierarchicalName: 'example.sample-glossary-term', properties: { + __typename: 'GlossaryTermProperties', name: 'sample-glossary-term', description: 'sample definition', definition: 'sample definition', @@ -463,13 +533,21 @@ export const dataset3 = { incoming: null, outgoing: null, institutionalMemory: { + __typename: 'InstitutionalMemory', elements: [ { + __typename: 'InstitutionalMemoryMetadata', url: 'https://www.google.com', - author: { urn: 'urn:li:corpuser:datahub', username: 'datahub', type: EntityType.CorpUser }, + author: { + __typename: 'CorpUser', + urn: 'urn:li:corpuser:datahub', + username: 'datahub', + type: EntityType.CorpUser, + }, description: 'This only points to Google', label: 'This only points to Google', created: { + __typename: 'AuditStamp', actor: 'urn:li:corpuser:1', time: 1612396473001, }, @@ -482,12 +560,14 @@ export const dataset3 = { operations: null, datasetProfiles: [ { + __typename: 'DatasetProfile', rowCount: 10, columnCount: 5, sizeInBytes: 10000, timestampMillis: 0, fieldProfiles: [ { + __typename: 'DatasetFieldProfile', fieldPath: 'testColumn', uniqueCount: 1, uniqueProportion: 0.129, @@ -507,6 +587,7 @@ export const dataset3 = { viewProperties: null, autoRenderAspects: [ { + __typename: 'AutoRenderAspect', aspectName: 'autoRenderAspect', payload: '{ "values": [{ "autoField1": "autoValue1", "autoField2": "autoValue2" }] }', renderSpec: { @@ -529,7 +610,11 @@ export const dataset3 = { siblings: null, statsSummary: null, embed: null, - browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + browsePathV2: { __typename: 'BrowsePathV2', path: [{ name: 'test', entity: null }] }, + access: null, + dataProduct: null, + lastProfile: null, + lastOperation: null, } as Dataset; export const dataset3WithSchema = { @@ -1839,7 +1924,6 @@ export const mocks = [ browse: { entities: [ { - __typename: 'Dataset', ...dataset1, }, ], @@ -1986,7 +2070,6 @@ export const mocks = [ searchResults: [ { entity: { - __typename: 'Dataset', ...dataset1, }, matchedFields: [ @@ -1999,7 +2082,6 @@ export const mocks = [ }, { entity: { - __typename: 'Dataset', ...dataset2, }, }, @@ -2075,6 +2157,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2248,6 +2331,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2259,10 +2343,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -2270,6 +2356,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2278,12 +2365,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -2829,6 +2917,7 @@ export const mocks = [ // ], // }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2908,6 +2997,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3205,6 +3295,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3216,10 +3307,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -3227,6 +3320,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3235,12 +3329,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3290,6 +3385,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3301,6 +3397,7 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ @@ -3308,10 +3405,12 @@ export const mocks = [ value: 'PROD', count: 3, entity: null, + __typename: 'AggregationMetadata', }, ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3320,12 +3419,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3367,6 +3467,7 @@ export const mocks = [ __typename: 'AuthenticatedUser', corpUser: { ...user2 }, platformPrivileges: { + __typename: 'PlatformPrivileges', viewAnalytics: true, managePolicies: true, manageIdentities: true, diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx index e27a63b98f012..26228e8c44515 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx @@ -188,6 +188,7 @@ export const EmbeddedListSearch = ({ variables: { input: searchInput, }, + fetchPolicy: 'cache-first', }); useEffect(() => { diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index e4d43f34dcba7..1daf2a4c59b70 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -1,14 +1,15 @@ import React from 'react'; import { Pagination, Typography } from 'antd'; +import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; import { SearchCfg } from '../../../../../../conf'; -import { ReactComponent as LoadingSvg } from '../../../../../../images/datahub-logo-color-loading_pendulum.svg'; import { EntityAndType } from '../../../types'; import { UnionType } from '../../../../../search/utils/constants'; import { SearchFiltersSection } from '../../../../../search/SearchFiltersSection'; import { EntitySearchResults, EntityActionProps } from './EntitySearchResults'; import MatchingViewsLabel from './MatchingViewsLabel'; +import { ANTD_GRAY } from '../../../constants'; const SearchBody = styled.div` height: 100%; @@ -59,6 +60,12 @@ const LoadingContainer = styled.div` flex: 1; `; +const StyledLoading = styled(LoadingOutlined)` + font-size: 36px; + color: ${ANTD_GRAY[7]}; + padding-bottom: 18px; +]`; + interface Props { page: number; searchResponse?: SearchResultType | null; @@ -121,7 +128,7 @@ export const EmbeddedListSearchResults = ({ {loading && ( - + )} {!loading && ( diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index 096f1db617d92..58693eca8af0e 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -39,6 +39,7 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { return ( routeToTab({ tabName: tab })} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts b/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts index 5a7d4f24dfd2a..ae87eeb1a8450 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts +++ b/datahub-web-react/src/app/entity/shared/containers/profile/useGetDataForProfile.ts @@ -32,6 +32,7 @@ export default function useGetDataForProfile({ urn, entityType, useEntityQuer refetch, } = useEntityQuery({ variables: { urn }, + fetchPolicy: 'cache-first', }); const dataPossiblyCombinedWithSiblings = isHideSiblingMode diff --git a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts index f3b904956b224..e26aa01c385e8 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts +++ b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts @@ -45,6 +45,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ variables: { input: inputFields, }, + fetchPolicy: 'cache-first', skip: !filtersExist(filters, orFilters), // If you don't include any filters, we shound't return anything :). Might as well skip! }); diff --git a/datahub-web-react/src/app/search/SearchPage.tsx b/datahub-web-react/src/app/search/SearchPage.tsx index 6387f0ef8c05e..541355a3e2cb4 100644 --- a/datahub-web-react/src/app/search/SearchPage.tsx +++ b/datahub-web-react/src/app/search/SearchPage.tsx @@ -62,6 +62,7 @@ export const SearchPage = () => { searchFlags: { getSuggestions: true }, }, }, + fetchPolicy: 'cache-and-network', }); const total = data?.searchAcrossEntities?.total || 0; @@ -217,6 +218,7 @@ export const SearchPage = () => { )} {showSearchFiltersV2 && ( ` `; type Props = { + loading: boolean; query: string; searchResults: CombinedSearchResult[]; totalResultCount: number; @@ -64,6 +65,7 @@ type Props = { }; export const SearchResultList = ({ + loading, query, searchResults, totalResultCount, @@ -104,7 +106,7 @@ export const SearchResultList = ({ id="search-result-list" dataSource={searchResults} split={false} - locale={{ emptyText: }} + locale={{ emptyText: (!loading && ) || <> }} renderItem={(item, index) => ( ` display: flex; @@ -109,6 +109,7 @@ const SearchResultListContainer = styled.div<{ v2Styles: boolean }>` `; interface Props { + loading: boolean; unionType?: UnionType; query: string; viewUrn?: string; @@ -124,7 +125,6 @@ interface Props { } | null; facets?: Array | null; selectedFilters: Array; - loading: boolean; error: any; onChangeFilters: (filters: Array) => void; onChangeUnionType: (unionType: UnionType) => void; @@ -142,6 +142,7 @@ interface Props { } export const SearchResults = ({ + loading, unionType = UnionType.AND, query, viewUrn, @@ -149,7 +150,6 @@ export const SearchResults = ({ searchResponse, facets, selectedFilters, - loading, error, onChangeUnionType, onChangeFilters, @@ -180,7 +180,6 @@ export const SearchResults = ({ return ( <> - {loading && } {!showSearchFiltersV2 && ( @@ -247,10 +246,12 @@ export const SearchResults = ({ )} {(error && ) || - (!loading && ( + (loading && !combinedSiblingSearchResults.length && ) || + (combinedSiblingSearchResults && ( {totalResults > 0 && } + + + + + + + ); +} diff --git a/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx b/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx index 0111a264d1e17..5d921c82913ac 100644 --- a/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx +++ b/datahub-web-react/src/app/search/__tests__/SearchPage.test.tsx @@ -1,42 +1,23 @@ import React from 'react'; -import { act } from 'react-dom/test-utils'; -import { fireEvent, render, waitFor } from '@testing-library/react'; +import { render, waitFor } from '@testing-library/react'; +import { InMemoryCache } from '@apollo/client'; import { MockedProvider } from '@apollo/client/testing'; import { Route } from 'react-router'; - import { SearchPage } from '../SearchPage'; import TestPageContainer from '../../../utils/test-utils/TestPageContainer'; import { mocksWithSearchFlagsOff } from '../../../Mocks'; import { PageRoutes } from '../../../conf/Global'; +import possibleTypesResult from '../../../possibleTypes.generated'; -describe('SearchPage', () => { - it('renders loading', async () => { - const promise = Promise.resolve(); - const { getByText } = render( - - - } /> - - , - ); - await waitFor(() => expect(getByText('Loading...')).toBeInTheDocument()); - await act(() => promise); - }); +const cache = new InMemoryCache({ + // need to define possibleTypes to allow us to use Apollo cache with union types + possibleTypes: possibleTypesResult.possibleTypes, +}); +describe('SearchPage', () => { it('renders the selected filters as checked', async () => { const { getByTestId, queryByTestId } = render( - + @@ -56,14 +37,7 @@ describe('SearchPage', () => { it('renders the selected filters as checked using legacy URL scheme for entity (entity instead of _entityType)', async () => { const { getByTestId, queryByTestId } = render( - + @@ -83,14 +57,7 @@ describe('SearchPage', () => { it('renders multiple checked filters at once', async () => { const { getByTestId, queryByTestId } = render( - + @@ -108,44 +75,4 @@ describe('SearchPage', () => { const hdfsPlatformBox = getByTestId('facet-platform-hdfs'); expect(hdfsPlatformBox).toHaveProperty('checked', true); }); - - it('clicking a filter selects a new filter', async () => { - const promise = Promise.resolve(); - const { getByTestId, queryByTestId } = render( - - - } /> - - , - ); - - await waitFor(() => expect(queryByTestId('facet-_entityType-DATASET')).toBeInTheDocument()); - - const datasetEntityBox = getByTestId('facet-_entityType-DATASET'); - expect(datasetEntityBox).toHaveProperty('checked', true); - - const chartEntityBox = getByTestId('facet-_entityType-CHART'); - expect(chartEntityBox).toHaveProperty('checked', false); - act(() => { - fireEvent.click(chartEntityBox); - }); - - await waitFor(() => expect(queryByTestId('facet-_entityType-DATASET')).toBeInTheDocument()); - - const datasetEntityBox2 = getByTestId('facet-_entityType-DATASET'); - expect(datasetEntityBox2).toHaveProperty('checked', true); - - const chartEntityBox2 = getByTestId('facet-_entityType-CHART'); - expect(chartEntityBox2).toHaveProperty('checked', true); - await act(() => promise); - }); }); diff --git a/datahub-web-react/src/app/search/filters/BasicFilters.tsx b/datahub-web-react/src/app/search/filters/BasicFilters.tsx index e8f56e5c2cd5e..84750387853bb 100644 --- a/datahub-web-react/src/app/search/filters/BasicFilters.tsx +++ b/datahub-web-react/src/app/search/filters/BasicFilters.tsx @@ -24,6 +24,7 @@ import { } from '../../onboarding/config/SearchOnboardingConfig'; import { useFilterRendererRegistry } from './render/useFilterRenderer'; import { FilterScenarioType } from './render/types'; +import BasicFiltersLoadingSection from './BasicFiltersLoadingSection'; const NUM_VISIBLE_FILTER_DROPDOWNS = 5; @@ -56,6 +57,7 @@ const FILTERS_TO_REMOVE = [ ]; interface Props { + loading: boolean; availableFilters: FacetMetadata[] | null; activeFilters: FacetFilterInput[]; onChangeFilters: (newFilters: FacetFilterInput[]) => void; @@ -64,6 +66,7 @@ interface Props { } export default function BasicFilters({ + loading, availableFilters, activeFilters, onChangeFilters, @@ -88,6 +91,7 @@ export default function BasicFilters({ + {loading && !visibleFilters?.length && } {visibleFilters?.map((filter) => { return filterRendererRegistry.hasRenderer(filter.field) ? ( filterRendererRegistry.render(filter.field, { diff --git a/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx b/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx new file mode 100644 index 0000000000000..f82a66d4f0c6d --- /dev/null +++ b/datahub-web-react/src/app/search/filters/BasicFiltersLoadingSection.tsx @@ -0,0 +1,27 @@ +import * as React from 'react'; +import { Skeleton } from 'antd'; +import styled from 'styled-components'; + +const Container = styled.div` + display: flex; + align-items: center; +`; + +const CardSkeleton = styled(Skeleton.Input)` + && { + padding: 2px 12px 2px 0px; + height: 32px; + border-radius: 8px; + } +`; + +export default function BasicFiltersLoadingSection() { + return ( + + + + + + + ); +} diff --git a/datahub-web-react/src/app/search/filters/SearchFilters.tsx b/datahub-web-react/src/app/search/filters/SearchFilters.tsx index 97e71ae701aac..bcc987159e0e6 100644 --- a/datahub-web-react/src/app/search/filters/SearchFilters.tsx +++ b/datahub-web-react/src/app/search/filters/SearchFilters.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata } from '../../../types.generated'; import { ANTD_GRAY } from '../../entity/shared/constants'; @@ -13,6 +13,7 @@ const SearchFiltersWrapper = styled.div<{ removePadding: boolean }>` `; interface Props { + loading: boolean; mode: FilterMode; availableFilters: FacetMetadata[]; activeFilters: FacetFilterInput[]; @@ -24,6 +25,7 @@ interface Props { } export default function SearchFilters({ + loading, mode, availableFilters, activeFilters, @@ -33,6 +35,17 @@ export default function SearchFilters({ onChangeUnionType, onChangeMode, }: Props) { + const [finalAvailableFilters, setFinalAvailableFilters] = useState(availableFilters); + + /** + * Only update the active filters if we are done loading. Prevents jitter! + */ + useEffect(() => { + if (!loading && finalAvailableFilters !== availableFilters) { + setFinalAvailableFilters(availableFilters); + } + }, [availableFilters, loading, finalAvailableFilters]); + const isShowingBasicFilters = mode === FilterModes.BASIC; return ( {isShowingBasicFilters && ( Date: Fri, 1 Dec 2023 13:21:54 -0800 Subject: [PATCH 035/263] =?UTF-8?q?refactor(ui):=20Remove=20primary=20colo?= =?UTF-8?q?r=20for=20sort=20selector=20+=20add=20t=E2=80=A6=20(#9363)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/app/search/context/constants.ts | 8 ++-- .../app/search/sorting/SearchSortSelect.tsx | 42 ++++++++++--------- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/datahub-web-react/src/app/search/context/constants.ts b/datahub-web-react/src/app/search/context/constants.ts index 5f841b8536e19..96e5d7c787203 100644 --- a/datahub-web-react/src/app/search/context/constants.ts +++ b/datahub-web-react/src/app/search/context/constants.ts @@ -7,19 +7,19 @@ export const LAST_OPERATION_TIME_FIELD = 'lastOperationTime'; export const DEFAULT_SORT_OPTION = RELEVANCE; export const SORT_OPTIONS = { - [RELEVANCE]: { label: 'Relevance', field: RELEVANCE, sortOrder: SortOrder.Descending }, + [RELEVANCE]: { label: 'Relevance (Default)', field: RELEVANCE, sortOrder: SortOrder.Descending }, [`${ENTITY_NAME_FIELD}_${SortOrder.Ascending}`]: { - label: 'A to Z', + label: 'Name A to Z', field: ENTITY_NAME_FIELD, sortOrder: SortOrder.Ascending, }, [`${ENTITY_NAME_FIELD}_${SortOrder.Descending}`]: { - label: 'Z to A', + label: 'Name Z to A', field: ENTITY_NAME_FIELD, sortOrder: SortOrder.Descending, }, [`${LAST_OPERATION_TIME_FIELD}_${SortOrder.Descending}`]: { - label: 'Last Modified in Platform', + label: 'Last Modified In Source', field: LAST_OPERATION_TIME_FIELD, sortOrder: SortOrder.Descending, }, diff --git a/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx b/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx index 683292a20b5b4..fc9486926214f 100644 --- a/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx +++ b/datahub-web-react/src/app/search/sorting/SearchSortSelect.tsx @@ -1,8 +1,9 @@ import Icon, { CaretDownFilled } from '@ant-design/icons'; -import { Select } from 'antd'; +import { Select, Tooltip } from 'antd'; import React from 'react'; import styled from 'styled-components'; import { ReactComponent as SortIcon } from '../../../images/sort.svg'; +import { ANTD_GRAY } from '../../entity/shared/constants'; import { DEFAULT_SORT_OPTION, SORT_OPTIONS } from '../context/constants'; import { useSearchContext } from '../context/SearchContext'; @@ -13,19 +14,20 @@ const SelectWrapper = styled.span` .ant-select-selection-item { // !important is necessary because updating Select styles for antd is impossible - color: ${(props) => props.theme.styles['primary-color']} !important; + color: ${ANTD_GRAY[8]} !important; font-weight: 700; } - svg { - color: ${(props) => props.theme.styles['primary-color']}; + .ant-select-selection-placeholder { + color: ${ANTD_GRAY[8]}; + font-weight: 700; } `; const StyledIcon = styled(Icon)` - color: ${(props) => props.theme.styles['primary-color']}; + color: ${ANTD_GRAY[8]}; font-size: 16px; - margin-right: -6px; + margin-right: -8px; `; export default function SearchSortSelect() { @@ -34,18 +36,20 @@ export default function SearchSortSelect() { const options = Object.entries(SORT_OPTIONS).map(([value, option]) => ({ value, label: option.label })); return ( - - - setSelectedSortOption(sortOption)} + dropdownStyle={{ minWidth: 'max-content' }} + placement="bottomRight" + suffixIcon={} + /> + + ); } From 14a463b1ce1b5b60bea8496f5f4aee16b8b7aa39 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Fri, 1 Dec 2023 15:53:01 -0800 Subject: [PATCH 036/263] feat(ui): Supporting subtypes for data jobs (#9361) Co-authored-by: Andrew Sikowitz --- .../datahub/graphql/GmsGraphQLEngine.java | 8 ---- .../datahub/graphql/SubTypesResolver.java | 47 ------------------- .../graphql/types/chart/ChartType.java | 3 +- .../types/chart/mappers/ChartMapper.java | 4 ++ .../types/common/mappers/SubTypesMapper.java | 22 +++++++++ .../container/mappers/ContainerMapper.java | 9 +--- .../dashboard/mappers/DashboardMapper.java | 13 ++--- .../graphql/types/datajob/DataJobType.java | 3 +- .../types/datajob/mappers/DataJobMapper.java | 4 ++ .../graphql/types/dataset/DatasetType.java | 3 +- .../types/dataset/mappers/DatasetMapper.java | 4 ++ .../src/main/resources/entity.graphql | 5 ++ .../src/app/entity/chart/ChartEntity.tsx | 1 + .../src/app/entity/dataJob/DataJobEntity.tsx | 2 + .../app/entity/dataJob/preview/Preview.tsx | 4 +- datahub-web-react/src/graphql/browse.graphql | 6 +++ .../src/graphql/dataProcess.graphql | 3 ++ .../src/graphql/fragments.graphql | 9 ++++ datahub-web-react/src/graphql/lineage.graphql | 3 ++ datahub-web-react/src/graphql/preview.graphql | 6 +++ datahub-web-react/src/graphql/scroll.graphql | 6 +++ datahub-web-react/src/graphql/search.graphql | 3 ++ .../src/main/resources/entity-registry.yml | 1 + 23 files changed, 93 insertions(+), 76 deletions(-) delete mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index b0b26f073876c..9ea8126a07ab2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1174,10 +1174,6 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("testResults", new TestResultsResolver(entityClient)) .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "dataset", - "subTypes")) .dataFetcher("runs", new EntityRunsResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) @@ -1433,10 +1429,6 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "chart", - "subTypes")) ); builder.type("ChartInfo", typeWiring -> typeWiring .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java deleted file mode 100644 index c74d84d8be323..0000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.linkedin.datahub.graphql; - -import com.linkedin.common.SubTypes; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.r2.RemoteInvocationException; -import graphql.schema.DataFetcher; -import graphql.schema.DataFetchingEnvironment; -import java.net.URISyntaxException; -import java.util.Collections; -import java.util.concurrent.CompletableFuture; -import javax.annotation.Nullable; -import lombok.AllArgsConstructor; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -@AllArgsConstructor -public class SubTypesResolver implements DataFetcher> { - - EntityClient _entityClient; - String _entityType; - String _aspectName; - - @Override - @Nullable - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - SubTypes subType = null; - final String urnStr = ((Entity) environment.getSource()).getUrn(); - try { - final Urn urn = Urn.createFromString(urnStr); - EntityResponse entityResponse = _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(_aspectName), context.getAuthentication()).get(urn); - if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { - subType = new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); - } - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); - } - return subType; - }); - } -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index cfec8f8a2391f..fa0e3cd856803 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -77,7 +77,8 @@ public class ChartType implements SearchableEntityType, Browsable INPUT_FIELDS_ASPECT_NAME, EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 657c9b688aed2..e0ffc57ddf519 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -11,6 +11,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AccessLevel; @@ -34,6 +35,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -97,6 +99,8 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) { chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java new file mode 100644 index 0000000000000..9aa94eae62999 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -0,0 +1,22 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.SubTypes; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.ArrayList; +import javax.annotation.Nonnull; + +public class SubTypesMapper implements ModelMapper { + + public static final SubTypesMapper INSTANCE = new SubTypesMapper(); + + public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + return INSTANCE.apply(metadata); + } + + @Override + public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { + final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + result.setTypeNames(new ArrayList<>(input.getTypeNames())); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index aeaa8f4f85c14..b81259e78be3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -21,6 +21,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -97,7 +98,7 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { - result.setSubTypes(mapSubTypes(new SubTypes(envelopedSubTypes.getValue().data()))); + result.setSubTypes(SubTypesMapper.map(new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); @@ -150,12 +151,6 @@ private static com.linkedin.datahub.graphql.generated.ContainerEditablePropertie return editableContainerProperties; } - private static com.linkedin.datahub.graphql.generated.SubTypes mapSubTypes(final SubTypes gmsSubTypes) { - final com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(gmsSubTypes.getTypeNames()); - return subTypes; - } - private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) { // Set dummy platform to be resolved. final DataPlatform dummyPlatform = new DataPlatform(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 32e4341ece4aa..432624ac4699f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -33,6 +33,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -91,7 +92,8 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) { dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> @@ -204,13 +206,4 @@ private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) final Domains domains = new Domains(dataMap); dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); } - - private void mapSubTypes(@Nonnull Dashboard dashboard, DataMap dataMap) { - SubTypes pegasusSubTypes = new SubTypes(dataMap); - if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); - dashboard.setSubTypes(subTypes); - } - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index bde79f6dce6e8..f6f37978bb36a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -75,7 +75,8 @@ public class DataJobType implements SearchableEntityType, Brows DEPRECATION_ASPECT_NAME, DATA_PLATFORM_INSTANCE_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("flow"); private final EntityClient _entityClient; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 4845fc1876348..61802ad9cfe5c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -9,6 +9,7 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataFlow; @@ -27,6 +28,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -103,6 +105,8 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); } }); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 0fc4399ac902d..6f339d3985133 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -86,7 +86,8 @@ public class DatasetType implements SearchableEntityType, Brows EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME ); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 4867aa1d89825..3e39c14c29ede 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -11,6 +11,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.Siblings; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -29,6 +30,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -114,6 +116,8 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 035f756a10d55..4f3769d908815 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -5689,6 +5689,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { """ type: EntityType! + """ + Sub Types that this entity implements + """ + subTypes: SubTypes + """ The timestamp for the last time this entity was ingested """ diff --git a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx index fc898dec9d93a..d2d35aad7c29f 100644 --- a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx +++ b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx @@ -168,6 +168,7 @@ export class ChartEntity implements Entity { return ( { { Date: Mon, 4 Dec 2023 09:50:46 +0100 Subject: [PATCH 037/263] fix(ingest/bigquery): Fix format arguments for table lineage test (#9340) (#9341) --- .../src/datahub/ingestion/source/bigquery_v2/lineage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index e9acf5ea86044..eddd08c92b808 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -894,8 +894,8 @@ def test_capability(self, project_id: str) -> None: for entry in self.audit_log_api.get_bigquery_log_entries_via_gcp_logging( gcp_logging_client, filter=BQ_FILTER_RULE_TEMPLATE_V2_LINEAGE.format( - self.start_time.strftime(BQ_DATETIME_FORMAT), - self.end_time.strftime(BQ_DATETIME_FORMAT), + start_time=self.start_time.strftime(BQ_DATETIME_FORMAT), + end_time=self.end_time.strftime(BQ_DATETIME_FORMAT), ), log_page_size=self.config.log_page_size, limit=1, From 7857944bb52ff29ee7d30d8fba21262aa4510b0a Mon Sep 17 00:00:00 2001 From: ethan-cartwright Date: Mon, 4 Dec 2023 11:32:45 -0500 Subject: [PATCH 038/263] fix(siblingsHook): add logic to account for non dbt upstreams (#9154) Co-authored-by: Ethan Cartwright --- .../hook/siblings/SiblingAssociationHook.java | 3 +-- .../siblings/SiblingAssociationHookTest.java | 22 +++++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 7cbe53dee9fe4..064f987ff1ba9 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -205,9 +205,8 @@ private void handleSourceDatasetEvent(MetadataChangeLog event, DatasetUrn source // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt model if (dbtUpstreams.size() == 1) { setSiblingsAndSoftDeleteSibling(dbtUpstreams.get(0).getDataset(), sourceUrn); - } else { + } else if (dbtUpstreams.size() > 1) { log.error("{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", sourceUrn.toString(), dbtUpstreams.size()); - } } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 6a2a05aa4b8c0..93e98b7343cd4 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -305,6 +305,28 @@ public void testInvokeWhenSourceUrnHasTwoUpstreamsOneDbt() throws Exception { Mockito.verify(_mockEntityClient, Mockito.times(2)).ingestProposal(Mockito.any(), eq(true)); } + @Test + public void testInvokeWhenSourceUrnHasTwoUpstreamsNoDbt() throws Exception { + + MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + final UpstreamLineage upstreamLineage = new UpstreamLineage(); + final UpstreamArray upstreamArray = new UpstreamArray(); + Upstream snowflakeUpstream1 = + createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream snowflakeUpstream2 = + createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", DatasetLineageType.TRANSFORMED); + upstreamArray.add(snowflakeUpstream1); + upstreamArray.add(snowflakeUpstream2); + upstreamLineage.setUpstreams(upstreamArray); + + event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); + event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + _siblingAssociationHook.invoke(event); + + + Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); + } + private MetadataChangeLog createEvent(String entityType, String aspectName, ChangeType changeType) { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(entityType); From f9b24e07241bd5dc3e6d93698a90000fc08150fb Mon Sep 17 00:00:00 2001 From: purnimagarg1 <139125209+purnimagarg1@users.noreply.github.com> Date: Mon, 4 Dec 2023 22:58:41 +0530 Subject: [PATCH 039/263] feat: Support CSV ingestion through the UI (#9280) Co-authored-by: Gabe Lyons --- .../src/app/ingest/source/builder/CSVInfo.tsx | 27 ++++++++ .../ingest/source/builder/RecipeBuilder.tsx | 5 +- .../source/builder/RecipeForm/constants.ts | 8 ++- .../ingest/source/builder/RecipeForm/csv.ts | 60 ++++++++++++++++++ .../app/ingest/source/builder/constants.ts | 4 ++ .../app/ingest/source/builder/sources.json | 7 ++ .../src/app/ingest/source/conf/csv/csv.ts | 22 +++++++ .../src/app/ingest/source/conf/sources.tsx | 2 + datahub-web-react/src/images/csv-logo.png | Bin 0 -> 12029 bytes .../main/resources/boot/data_platforms.json | 10 +++ 10 files changed, 143 insertions(+), 2 deletions(-) create mode 100644 datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx create mode 100644 datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts create mode 100644 datahub-web-react/src/app/ingest/source/conf/csv/csv.ts create mode 100644 datahub-web-react/src/images/csv-logo.png diff --git a/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx b/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx new file mode 100644 index 0000000000000..87d632bb228b5 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/builder/CSVInfo.tsx @@ -0,0 +1,27 @@ +import React from 'react'; +import { Alert } from 'antd'; + +const CSV_FORMAT_LINK = 'https://datahubproject.io/docs/generated/ingestion/sources/csv'; + +export const CSVInfo = () => { + const link = ( + + link + + ); + + return ( + + Add the URL of your CSV file to be ingested. This will work for any web-hosted CSV file. For + example, You can create a file in google sheets following the format at this {link} and then + construct the CSV URL by publishing your google sheet in the CSV format. + + } + /> + ); +}; diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx b/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx index bee9b04cee100..db1f0fdd4dfa6 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeBuilder.tsx @@ -7,8 +7,9 @@ import { ANTD_GRAY } from '../../../entity/shared/constants'; import { YamlEditor } from './YamlEditor'; import RecipeForm from './RecipeForm/RecipeForm'; import { SourceBuilderState, SourceConfig } from './types'; -import { LOOKER, LOOK_ML } from './constants'; +import { CSV, LOOKER, LOOK_ML } from './constants'; import { LookerWarning } from './LookerWarning'; +import { CSVInfo } from './CSVInfo'; export const ControlsContainer = styled.div` display: flex; @@ -81,6 +82,8 @@ function RecipeBuilder(props: Props) { return (
{(type === LOOKER || type === LOOK_ML) && } + {type === CSV && } + {sourceConfigs?.displayName} Recipe diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts index 351876fe6b16a..844bf50926764 100644 --- a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/constants.ts @@ -83,7 +83,7 @@ import { PROJECT_NAME, } from './lookml'; import { PRESTO, PRESTO_HOST_PORT, PRESTO_DATABASE, PRESTO_USERNAME, PRESTO_PASSWORD } from './presto'; -import { BIGQUERY_BETA, DBT_CLOUD, MYSQL, POWER_BI, UNITY_CATALOG, VERTICA } from '../constants'; +import { BIGQUERY_BETA, CSV, DBT_CLOUD, MYSQL, POWER_BI, UNITY_CATALOG, VERTICA } from '../constants'; import { BIGQUERY_BETA_PROJECT_ID, DATASET_ALLOW, DATASET_DENY, PROJECT_ALLOW, PROJECT_DENY } from './bigqueryBeta'; import { MYSQL_HOST_PORT, MYSQL_PASSWORD, MYSQL_USERNAME } from './mysql'; import { MSSQL, MSSQL_DATABASE, MSSQL_HOST_PORT, MSSQL_PASSWORD, MSSQL_USERNAME } from './mssql'; @@ -140,6 +140,7 @@ import { INCLUDE_VIEW_LINEAGE, INCLUDE_PROJECTIONS_LINEAGE, } from './vertica'; +import { CSV_ARRAY_DELIMITER, CSV_DELIMITER, CSV_FILE_URL, CSV_WRITE_SEMANTICS } from './csv'; export enum RecipeSections { Connection = 0, @@ -453,6 +454,11 @@ export const RECIPE_FIELDS: RecipeFields = { ], filterSectionTooltip: 'Include or exclude specific Schemas, Tables, Views and Projections from ingestion.', }, + [CSV]: { + fields: [CSV_FILE_URL], + filterFields: [], + advancedFields: [CSV_ARRAY_DELIMITER, CSV_DELIMITER, CSV_WRITE_SEMANTICS], + }, }; export const CONNECTORS_WITH_FORM = new Set(Object.keys(RECIPE_FIELDS)); diff --git a/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts new file mode 100644 index 0000000000000..fba4f3b9d0164 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/builder/RecipeForm/csv.ts @@ -0,0 +1,60 @@ +import { RecipeField, FieldType } from './common'; + +const validateURL = (fieldName) => { + return { + validator(_, value) { + const URLPattern = new RegExp(/^(?:http(s)?:\/\/)?[\w.-]+(?:\.[\w.-]+)+[\w\-._~:/?#[\]@!$&'()*+,;=.]+$/); + const isURLValid = URLPattern.test(value); + if (!value || isURLValid) { + return Promise.resolve(); + } + return Promise.reject(new Error(`A valid ${fieldName} is required.`)); + }, + }; +}; + +export const CSV_FILE_URL: RecipeField = { + name: 'filename', + label: 'File URL', + tooltip: 'File URL of the CSV file to ingest.', + type: FieldType.TEXT, + fieldPath: 'source.config.filename', + placeholder: 'File URL', + required: true, + rules: [() => validateURL('File URL')], +}; + +export const CSV_ARRAY_DELIMITER: RecipeField = { + name: 'array_delimiter', + label: 'Array delimiter', + tooltip: 'Delimiter to use when parsing array fields (tags, terms and owners)', + type: FieldType.TEXT, + fieldPath: 'source.config.array_delimiter', + placeholder: 'Array delimiter', + rules: null, +}; + +export const CSV_DELIMITER: RecipeField = { + name: 'delimiter', + label: 'Delimiter', + tooltip: 'Delimiter to use when parsing CSV', + type: FieldType.TEXT, + fieldPath: 'source.config.delimiter', + placeholder: 'Delimiter', + rules: null, +}; + +export const CSV_WRITE_SEMANTICS: RecipeField = { + name: 'write_semantics', + label: 'Write Semantics', + tooltip: + 'Whether the new tags, terms and owners to be added will override the existing ones added only by this source or not. Value for this config can be "PATCH" or "OVERRIDE"', + type: FieldType.SELECT, + options: [ + { label: 'PATCH', value: 'PATCH' }, + { label: 'OVERRIDE', value: 'OVERRIDE' }, + ], + fieldPath: 'source.config.write_semantics', + placeholder: 'Write Semantics', + rules: null, +}; diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index fdb094d721304..08538729de40b 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -30,6 +30,7 @@ import verticaLogo from '../../../../images/verticalogo.png'; import mlflowLogo from '../../../../images/mlflowlogo.png'; import dynamodbLogo from '../../../../images/dynamodblogo.png'; import fivetranLogo from '../../../../images/fivetranlogo.png'; +import csvLogo from '../../../../images/csv-logo.png'; export const ATHENA = 'athena'; export const ATHENA_URN = `urn:li:dataPlatform:${ATHENA}`; @@ -108,6 +109,8 @@ export const VERTICA = 'vertica'; export const VERTICA_URN = `urn:li:dataPlatform:${VERTICA}`; export const FIVETRAN = 'fivetran'; export const FIVETRAN_URN = `urn:li:dataPlatform:${FIVETRAN}`; +export const CSV = 'csv-enricher'; +export const CSV_URN = `urn:li:dataPlatform:${CSV}`; export const PLATFORM_URN_TO_LOGO = { [ATHENA_URN]: athenaLogo, @@ -142,6 +145,7 @@ export const PLATFORM_URN_TO_LOGO = { [UNITY_CATALOG_URN]: databricksLogo, [VERTICA_URN]: verticaLogo, [FIVETRAN_URN]: fivetranLogo, + [CSV_URN]: csvLogo, }; export const SOURCE_TO_PLATFORM_URN = { diff --git a/datahub-web-react/src/app/ingest/source/builder/sources.json b/datahub-web-react/src/app/ingest/source/builder/sources.json index 9619abebbd54e..2dc2598c1a0ab 100644 --- a/datahub-web-react/src/app/ingest/source/builder/sources.json +++ b/datahub-web-react/src/app/ingest/source/builder/sources.json @@ -223,6 +223,13 @@ "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/fivetran/", "recipe": "source:\n type: fivetran\n config:\n # Fivetran log connector destination server configurations\n fivetran_log_config:\n destination_platform: snowflake\n destination_config:\n # Coordinates\n account_id: snowflake_account_id\n warehouse: warehouse_name\n database: snowflake_db\n log_schema: fivetran_log_schema\n\n # Credentials\n username: ${SNOWFLAKE_USER}\n password: ${SNOWFLAKE_PASS}\n role: snowflake_role\n\n # Optional - filter for certain connector names instead of ingesting everything.\n # connector_patterns:\n # allow:\n # - connector_name\n\n # Optional -- This mapping is optional and only required to configure platform-instance for source\n # A mapping of Fivetran connector id to data platform instance\n # sources_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV\n\n # Optional -- This mapping is optional and only required to configure platform-instance for destination.\n # A mapping of Fivetran destination id to data platform instance\n # destination_to_platform_instance:\n # calendar_elected:\n # platform_instance: cloud_postgres_instance\n # env: DEV" }, + { + "urn": "urn:li:dataPlatform:csv-enricher", + "name": "csv-enricher", + "displayName": "CSV", + "docsUrl": "https://datahubproject.io/docs/generated/ingestion/sources/csv'", + "recipe": "source: \n type: csv-enricher \n config: \n # URL of your csv file to ingest \n filename: \n array_delimiter: '|' \n delimiter: ',' \n write_semantics: PATCH" + }, { "urn": "urn:li:dataPlatform:custom", "name": "custom", diff --git a/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts b/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts new file mode 100644 index 0000000000000..e1dc22c086fb4 --- /dev/null +++ b/datahub-web-react/src/app/ingest/source/conf/csv/csv.ts @@ -0,0 +1,22 @@ +import { SourceConfig } from '../types'; +import csvLogo from '../../../../../images/csv-logo.png'; + +const placeholderRecipe = `\ +source: + type: csv-enricher + config: + filename: # URL of your csv file to ingest, e.g. https://docs.google.com/spreadsheets/d/DOCID/export?format=csv + array_delimiter: | + delimiter: , + write_semantics: PATCH +`; + +const csvConfig: SourceConfig = { + type: 'csv-enricher', + placeholderRecipe, + displayName: 'CSV', + docsUrl: 'https://datahubproject.io/docs/generated/ingestion/sources/csv', + logoUrl: csvLogo, +}; + +export default csvConfig; diff --git a/datahub-web-react/src/app/ingest/source/conf/sources.tsx b/datahub-web-react/src/app/ingest/source/conf/sources.tsx index a3cdb0a8f5843..4dbeeb5c975e9 100644 --- a/datahub-web-react/src/app/ingest/source/conf/sources.tsx +++ b/datahub-web-react/src/app/ingest/source/conf/sources.tsx @@ -16,6 +16,7 @@ import { SourceConfig } from './types'; import hiveConfig from './hive/hive'; import oracleConfig from './oracle/oracle'; import tableauConfig from './tableau/tableau'; +import csvConfig from './csv/csv'; const baseUrl = window.location.origin; @@ -46,6 +47,7 @@ export const SOURCE_TEMPLATE_CONFIGS: Array<SourceConfig> = [ glueConfig, oracleConfig, hiveConfig, + csvConfig, { type: 'custom', placeholderRecipe: DEFAULT_PLACEHOLDER_RECIPE, diff --git a/datahub-web-react/src/images/csv-logo.png b/datahub-web-react/src/images/csv-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..b5fdc189cf58ec8c62a14ff58fcc271c36bb1a4f GIT binary patch literal 12029 zcmdUVbyQUGx9=H-loXVdlt!eKln@XYT0kiYNf}@O0f!DnLPcPZZfPkMkeCq=L_$L8 z8XBcL2bg(>-@WhMx9(f-ci%sE-TT&pb>PH4-?R5;$9L~fjG?|J^(EFz002<KwA76N zfC&7P2p}f~5Bq*YIPgH>sb%gB05okEzmN`(5(n@ki_bkXA7c+EALQewjsOygyy=2) z^|pWP>3GxQsdL8W9aaF~0$}QQP5d%9uz{&r4}y;fM-$ai5%F5jjoP8vbhU!>d2kZ$ zxdIB5gF&W*6a(F*=mSGq<w>Q9!<DlM3-{Cg;vqE>w)<L6^KZIzqv^SE@gK$>T3-D% z!tlyZg@i90NbAY|8WOs$qW*MD%DXCfYy9Zc*Q%kOtGanDkXq^#zwTQVC>qZTL;WZI zto%})roLjtEAH3VikXZrNd9${#!hTE?S3T|l50Lv6~tXQuok`JHcO%6JS_?{dYuY{ zXp@gJ)MpN7G$kX|?%4ub8{L$DB%S|8g;DODio&Ak!)xDgo;ck+JKn@(xHwzK8-1`j ztAi9TVqfw0O4``YZC0Cq@x8O9G4ViXi*#ZlGm8+x6beg!YupgHdAf&qpCt#4?~d%= zE7`eCWsbUKbsJurXjXlajA3)pw9Yh&nX@~?YY*tY6HL0<jgEP?L?WET+~0#Hfx)59 zOIQ`OBv7#T9m5u1AnoZkO<f}bRgvG@L#Uu8!nO)ym;qjxFc&n87e?X$#PgPhQ2{94 ztML#OQ~{ABpoKC~S;DaWf?%_dP}IAGd;T)Hnvy`xho;~8yNJ6eNeQzU|GHewUB-VR zfE0kCE_DK2Fe3=dfB3fl);F8<p&_*;-yR6iyE0vtWoS+0=14&5pjfYNzVKt)mV?*U zmX8PNOtY6BFa!IZqc^kK-ZrG%-JoxMA*C)clg+Dl{|=EXL^j-LrI)JmjGVG#rtfpb zf<LloWKI;=Fm67!A$#;nnav0-c-IfA@CV^ofBF%Vaer-F&-!#dHIT*AV%l9x+m2#5 z=4c5UVG#vU@qJOIPkgfrO$`%zsWWcquRm>7a+?{b8C`5Y-s7r8)4+G$V=@d~x2bIc zUrOz6su)Q{RD*|#Z$K^?dj@FK^wt?)2&i>o)||Dgw!kl?y1&@4f}(AtRTxC&)a@r4 z0@L#fo~FJKYhqefLBhX>P%|=0G{l)r>;2<ooztFZgy+4Br3r<kHpR|#qbX}ip<#z| zyWw>ZnMG`50WnWb@zlv%%%tI(IPfIL(XvA*#Rj&h=BJ<nXG)|~R&n-cQM?|wi_oZO z8w3tK4`iZcKIQEu;NCu-yD{6<a+ax(I8<-d;;#PXvbJm1;V2&b#?Ug&W(thxmNYNS z#gFpe0Q=t>LjM;h`)6zJ2Az)acKxcl-3AYlv#Ouu_v^a|m{ERfFr5cfNweZh^6d<d zF7)+U4rmz1=0u_Qn8cA}iIu9KXQbet$;1MoSDJtTAYdr`HSJpx=i|BK$$!L~z9D%G zNpA2e6Jae(XUAC$&csLVVcRiPL51W@MoPiio4Q<c*uBM|`aeN<!EWTq?=*o>0u5M# z@Ene5;q$NK7Ao`<ViWG~`d5ReW0*rZ3Z9ML6}5hcWPnpQ&AC>2YD}|h!6Q8%PP1R2 zWk1&*j6Fui$LO0aU`V*P;Qf_|3YhqN2nML3EBH#v3w*fy%W#znRrZ742I6QH^s5$! z3wXilwK;#HDgJSC#I5NiU1jG5^4n<#>vk;tk$EE*_|;U}mY$6SgZDd?#l3e5GYLpG zI6En@QSG)SyR<736GZMornO;5KTo!NLoDCf!C<<##Mh5Cm!lF-y2CGRqGU89aU+xD z4ohl|w=;^hj<P<sQ(D!fVh}&U34+?=U|A--ZV4O7D@5%9xd@Q|%y$1rH2weOW<$9x z(xLaHrde9$R_e35<OY#`>R`fyo7B_JySMx;86Nozy1hTLV8kBy)n<kEp;7o_O<Jo% zPTNP<9u$bWb#I4!mUxy}yHG}|e)sx~YoQ81zr6j6O|yuq18NM;=R?^&h45}#8}Etd zsMk}`NpqWR|8Rd9|BY(<O~)3O3wXuwnFvg$fKUPrI80qBf4QC)boat5HyzcM<o?y* z)`?6D{V#?#<xARM?;=zyzR3h_T<z;bLt(viecfp21zI?Kz|5xwzZ5!30w%oDCqb2_ z7tw7h``-fKMz!b`Devv}1JK(*92r_wd|^!Oxj??2>WaO;%mug%vFW{abgftaf&Amg zIO!0Pt(!xv7x?WYn<kx#N3x;lql2=a{#DpTs8zH9xxvZq?8T2l_!4O6Hh@wqUDY!n zYsqo{&3YG_rIyA3I@C5i!!`XqvRiXiA@XH)m;iLodsEkF;Tfdv^0<azh3|yTDX3t4 z1;;u6_7@P;yoF$@`2Q)NUh<duWx^)hVZ5Lk4F)eBae=ufubqx@XsH!d)b42aR@4Gr zC{dyNcTc`^w&oPi@?@hofal4Yy!rOe=BRD{+)Shbsv!9mBq+>&6K|aCR`my~3*X=b zz`!L+XPtl)K^2&DeNO6_`HI1+shn0pN<w#!=EY!eU0WgKjUbO{_kxjWDc~RwKhx?G z3L6;G6_-0fXCix#_7J>DhuM(mGl=yWAIx|Z*09iu$<Qk8g{s`SzQxE3t2SBe!B}dQ zeuV;aV;F^a-d+p$U}~2wfG#ZuNeRAJc(>$Vu7l;zgxEi;qW%m0{=$_Dgg!u&NkS;@ z0A4Z??8H1k!b=sFc~$Z_GL8(JGSL;9S!cQ*dC!!&r^wd1=_!bL_PF}OJ&Ctxk30~S z>^j=5K<>js?^zw<CaO8~cTW+@Edn>_&oYl5*PnWRs1;zCAIv-C;Sr16wwsmcYD7>g zbNyfwdP(4VD)J6tAN0-r(eVz}xUvSZlE0jK<P+V`zk4kjOiIwWK8!0&k6{<VReF$g zO5OwJn%m{t4s3i%_bT@9;bcO@#<jo<lmFvVv<UYd<#E|w`UcK<=E4EN2Z%+C9+H>c z!#4N(PTid-LrdM*C1OS>dyED;k?v+;{(_F~lud7eUFxY{>RBN9Ps!4hAEf*OWP*Bl z$wWAN6uVw55>nM$V0~GAvp94=8Z)WY4u0mg2i?g1P3Ao3#lE7rrjX%5@*SgSCW%v} zVZ;l+t4n<n0UFPiM%q1(7E1J=sbPry5Jk22K~m@ToIN%CEuKdSCS}T`j1Ja;e?smx zobd)|aCFwJQosH=^=Vmi$YX2SkP4_yh2$pPBhTU2ZAzK$wdq5%<c6SaKTqf^_J<WO zVf(m)l9>r8{HLl#Y-gJ~(TlQVdUz;NX7c@BYn-NxgV9<Y^}`X!ISk&fPVTyb_3=VY zyh!qZ_Ujs<u{Hj{wh(aJ#+IRWVsCk}2IF`W4=7%z{iT4D=w5PWSg;I7?;=(bT0`mJ z0)sM*HA~?2a4o_Q*A+G^-B|B%OQ{n7A3bae@e=}r!`b1?%1H&AxCaK%w+2R*)o&px zz57sYFP5+QW&hZNWXR=aCn)a6S!px`UEBVRnH(>>bjH_lhI^NMNsp}BX@3yh{9Lm; zW?MDz&>byn*nNJ>fae`8nx<X>wy|=0`-tQGgf~eiF}UWmr;61HFqG0I+A4Y!LQ~f9 z1$6W_P}u`AIC;C3YpZenOG^*obB_D5_^Uc!oU6$z&B09-0w;4$QdoZ_brbjYWzV>m zezLn_fwKdxcyhtvzWbj65q^}(+q?q=Qc#8zC_JRJlYQOvTe&~zu5tCg2PT;!o6@cS z@qWhRU561j9Gu49+4*4X;N4F#q9&=YL=<9=J`GvaLA1SiVc5I}Vno?V1z()_pkV_P zQ(CD(kn;>gCpw8GaAUbj_RnTDS4e$KKAwpoQ-9`m^OO11mHjtU`boy&8AUs*-pA^E zy$q~zgyFxX9yf+BJu_U@l{)7XINz^|k^lr_nXCR}=coJ%?Csx0@JZ=>F<oH`ye@HO z&Y*#QNyeYDJ@MP-_T%zDa-sVvhKh-FP0H=292#<!+5>~AjR!m#sG;l+bUzNKlRHN| zkz^6WX`{pCpf)dYE(Ho$#jh(lO@n$8jX?5Vd(6aV-F9zuD#P>5*}i+jYxuzfUEdum z@?aNMBJEGN5zvSUPgEmAtyFjiUulK;PawD4N@Huekcz6uVHml#Hn|^1ryO78hlw7} zCnOd<do8<_zGe98{NoW-)Q`+=T%~}(N^$<)>qYc01|xmK!hSPH7m$17oH97yBY4?1 z`x&z5ZJpZLqoUjPnDnMkw{nOqdsjbIM2IJK+<<Z)@~;#+JD{}E+mW%8nGU$npACED zc_@FY-T+d-(NO`!OC%=a$p^m4vUdvo_PDq@#Yc-F@(MF)u!`|{{Hj72r@`D+SEE{0 zsuR*GRu^ePL!CW|xH8PW7Y+)Td&uwSJ2}0loSL1GqZVW(-QKGFOKNw}4c<%7WaTXu z9xjBLCmyM~BLovW&uG$pQzuXw{6s+xe)L7XcJL+tg^=DMgkZ%J^zf?jZy9OQM^bIV z%}!58l^uTfmG9M~tL(H-p}Z3eTyzw2Skma7bRl+cY{}eaK6hr4zf~;>G|Xw0Q0TEx zw~OFGayOBTDV~o;-R*0}H?6a#-viSd{qbpJv(m4hEh_0pl@|ILL@(7d2)eo$(q&!H z0ka`0QgnnwFcm8MBY)EebRb^HzVL>&c1W%{euzZVmMIHl=f|8XQA`cankt7ohuFhV zN~;Pt`_6#;(($fV#ZiIMM^D#m*v-JKaLaWgkCBkO5;#ap^-dO_{FlzPpEtrjT$8~r z#$t@R7R^0%Pw<>6n5`K*)CBPBY~@|Irn;uoZaS^L^`olz$qF|V%9nj|kQg&}g<Jq4 z0K}0n8dmf{Y(MS-A1UYKxTdJ)*wicYLI>kehoqkPgCmKd+VHX2fLFNeLl=%T${<uH z8Z|WCED9?kR9S~yOGt6BHgyN<CYM(!G8^j6Y>X)l3!R0A@|*~HsF{g!zuVSe<Yn@N z;Hb=D+mBBLw&heea#pZs?M3_k(?tr0*<ztxS8URj$30e$!peq~BQpYG3I#ak%V?_z z6q-=6=j7o(nLh-jS-6DZW5<<tk0;@erSj%TlFFDjL+nv|e0D2|hOX8?bj(9Siq5&Z z0e&#tpHjt`sUfFpATV8A-!^zDXwz)pUC;`OVKp0?EsG-s-ykMlzw1g>;pg9|5vmpw zY6s7lR3RrM`*S9=*p6D3K5V8ulkhBgO$aCE5nzB%GM7_5-58K#d3NHNc!U@EwJb2F zc|S{52F4l8=0TcXdwEcCU2eCLH%altAPU^l_yncyNf%;oZ+!61={8&<&>s_T@26d* z>=#J>B4y^U+=eyI%Fan2&Yy`Wwf<FR(}#(pw&HwwtYrH*@<HIZ1TvM4u{cexT<m#f z>(R|QlAE_&TxXe;dtY+q;OBos&6{`ITcjkb%D&hEzf3mXS5F<k+E`(>Xh?l}q8#4N z@=z4wk!Nv!!!<>$^W!F7*YVy_Uk|M=YdHqREY@A~N9kvS{cldb57gg1bmv~%q^(D^ zjQ!38;#WVl%~liSZ$ZO~;L+I$ZqJ$~Jq{|pqvzyjOVqDtc4OYInb+;#k6Bf&&$qO9 z=8o82BW5p<yX}z!i9`=xyTNes=SmY!oF-oZ#(8kBU{LOhpYUekRNDDI0ZgZK)`_4y zv4g_-her6zzBa9y*qvSRH2W+8@Q4-OI=bGqS~b4Up!&jiT*8SHAkuWU-kSr_S(Nrz zu)TCT(BK#*bK)LmAYlIe`1MO?Nb~;`H_sR#L(oKZw_)QZ17~~@C|umoPh9s&*N0Ko zsWHCK*Y`fzpFRF_JJgb#iP!wGwFn_d+lJGh#Wd@5sOJ<AOgMB4GbtkoWZ_NisQ~z+ zH!Di#1}4fH)%_y0XV*=$g4NR}*kx{wsXZTm`&J~p=2Yst-!-+e5ShKktU??8ovYzb z8^rdMfQNBd?Zq+}d~|fctrxTEc@(7Ta_^7ta55;=q`*X{+Sa9H?HZ9z;*e5($1i_% z4YHH7<bg#7*0b()Cf-ALOd9jd4--z!%nycv=6`MzP9sjkyM&sA=LP}pVfV0GgM{Ij zB)Y)}iwh;P`g;E<8_vUH0^Y0~U6I{B_UQMqIhotEXIXDdwP^c0Rcziqt$cWw=c+B- zc-8o_oZMb|fBnf7+YgiU${7bDHmUw%z|z~xDB{voC!F$9ega%!lek2~QQ3+@+{B#d zjtp$$$DM=)M2N)p)?mN^?m>T8modeh$*jqT;bPrStqfyNH}CXepAu?*D5v6o_fjD} ziY<cKC7N8eM!L)``aF*qSe07+z*<kLDS+3j@AA8aJNyeo+Bb4XBxvE9bo0jCP4)p5 z5h1h9MMo1amRTCI^14&1sTJmw7)`1P!K{z)X0ts#YaiTMfT5Cp{tu^7F{{#*S8VQG zhj7ua9(`SK`93lII%I>{=?i1!cSZINsIfo(tC!++h49eX$o8#;w=YG==WY+lMzvt5 zK8U@y_!{%AyO`o}CNmRVs^%L_y0(PC4%tlHX6)W<k({Sl@Z=v>y4Gp;OwJ&?r=&9- zE$~P*0n9q3(@L^6%t{mzZ2+BeBJXzl0Q~vZ3OrN!2K@WYO-1=r?CO_Hg}6Yo5h8RF zQc8TgnT6T!xcamW%Ra=PqZw-)KFIv4SY<;(<$OLVYewJ!;AeRLt0=Zb<Ki)^te?@J z*km)(HW-he%+`LNkxdB~Z_~Xc+F@9cP2=_xv&)<F{>72RfhEH!lWKbHEq#LPD-cjB z^=Ud)shya0#ieRV9Iw1_$6imy=x0PbL6eNKB*|Joy4a{9o2Xz?eEBYSCdrxit`~<1 zVgE#E)|d*36qndhRh4C*XQ9(~S}kVz^6GX0IdGts-2^yz5_{FhoiL2{Vfs880#2kT z&a6QUk5B>fHOzz89<a@`_y?IxRWXa@nly$*(Oh+d+Z1~*k$5MrQ-lg02<=s^pYK^$ zVr*}EKWn0r&1$jZ5hIn)m>%6X0Rh9wpJpu(L_gmHRf!(uU#SY&&AG)#3f$(p4{$jU zn8(+Bc)hn)h)DMk4j$wu&4Gj=kHIO+kh|yVEq?qFDbN*t;><LXDPwCIx6Y>h?;3)k z@o7!YBlH{RoNcST;4*`#45J>dc7_=2<4S`BoUD;wUhSYTmSa`kbyEg2%PGs8=L)zV zeeWNI+(_WC!?4ziY#nmsy?bI1)+CC&dv^8)uD<EK+q7OxAQNeh6Ck*Ny#o1nTAC}C zX{Olfzp_sKLr<k{I9Zt<_76K(I6DZO=U?BC%Q9#<lbv0n!*d*aO;<qZ{7KrdF@373 zb44oLZrH#@uP=73vGGF}N_>Y#t(!4whuEFC7amVzEK^b02{4>Sv-(Vt@)Ei~@Y42E z)q0|OfsRV~wDLpV(mNcQ;X5l#C61wATEHM0p)SDlVS@$m5ggyblhMt4!B+ba#o|M^ z72>?unM;1l;P?V$th%4CO3+!;28pFu8wK+;dAgBBtQDWEk_jT6l+Ou+_o}`p5^m{{ z0_@!eD3jQ{1~NhqqeO`3)>}mNXoKZuvB%(2(2;-t^J924VSJIsx;rw-z`#gOe=mBr z<sn$$J5K2}-li2MhKAAHjQH`zKkF%W(Rrl*VO;n?2lHm1(_q>iYO0}Sw#iPUK>$Vg zD5M!N&>3#=UeD{9fi+b3JlRb5;}KzEi8A^|Kc?~$SU$XXEiwF9G7e@^M;+m)YvMc_ z5cYxv{2m3|eQ*y1SxdhKPX|1I-#$`JT{Xc&?--u?{P}uq`u8von~O`bB(OZoLm~YD zqWaw4a}qYoUT%{{kfShPWobxk`n?yuL#=YCprbUkQ@*BMEj9hl0~)cm`i<Kg^d$|@ zCiMfiiWJ!HFMQ<norIBd+I?K3_ODB5z<&zmAL;`_(tI+FbiKoaOb(<}7LNARp7(}U zZpxF&D(*E3UjbE6$vEg{z(xg7lhY}8&UPc|Egkz8-|QpsJhc<_0|@?_bT&rk=1G@e zkr`3ykORbKl{Bt4b}jm=gRgADC9qt7A(AxE1JqL(<`YFUgN?F8u%VlVluEOxK4}(u zX$mMX#BQ5yn)x`Px)XjNpu_{Z1oTOqBv@#CD@=!k!QVG-2o31f22j38GW~<qE_&#J zW0klefbCaHr(AlE%_&TnMcHi;sFR!-{O<noeIN^3{*h1tjs^CWyqsN;Vyt_%+nuUX zo_otla<F-Yvc;xYh(nkP+24;@2ge;PW86*f%zA~>6+&lvkL!o{3zab&D>~H(+{Y4; zJF9N2Q9V@Z#y!~hRQR;v3^7YN&h%DO{JvE}uvM;4XECJGzIVi}?%rEsp5U|*lgA)> z$>+A!Um9143ck{xwNu>)WiEdsZYCG}Q0&S&OB!$TO<p~5U2itqzgs~x$HNeUQpCCp zf#pLc)g3-i=e(fK`SbvCGXI4k6Ca?8RH@%^2^4WAHiA@?+bSw53|*iN6OpQ7lYLz# zJie6@x^k7Fr3iYqA<ND_>Y!Bgn(&VM0=h#3#9`?Wu5f3fE$;7}k|rngFRTdt<(49J z@GU0|fz1umw5M9$okp@P?qMy`0UDo!8^P|0IFXCKya_k>6`m5j6Q5-Yg7D~NqJ%_l zJZDL8;<xS%s)v=f!G*7VbvTod-ekk*la7{QP$MpkeZD>jvXGr<LblxjLETr#(*!hz z*@ZD?VX)QY`3=}AJT#m5k*e+W!Um*-*?1qWvf0*ZwCfSRabz-%JGlrsKmF~iY}fNH zFlg{J2UmsC2HG7C60*;V-#9kwBv0swf`JqtW{z=kXIvxJzy5uY{Os!97AEIgk_~?{ zB4V5GnIg{SXnL}*e<8FsOMwwfVV=XxavC1)=1NpJArMn2qqM&jzSiLBrdecPZiy4* zsp_sNy425dvB((Ms=h9s3x-W2cZ=Mnv4zHOELc_ui_^BIMI5I@Oh_|Mo#;dg+fzNj z3)X2;rMOiM96v<Iiu}%5gvD&#A!y7B(4Gg1B6@<}3`{*Ju!t{f1wGf0<EJO`@br){ z`%;+)%lOflX<>N(ZDoPSpz?ss#Lrf2v)YX;HFDQ*g*`-73?sey-9<^&4K&rmrk}P; z=Wb^H(%{@~@VnCI83wK&3uZG1sGc<%s-b9F?A{ENAw!B2Dnqz`coBN(6KmH^QM6P; zVT5kXW=#?Y{&*b3_eED>T93*2CZ}4oPV^bW_CgTOK9kvl3{TC>7ePxuZ$r0`!R<hB z@y~GvJn|ST?K`G&J~gdXu{^?$MJx;_wTi-4pYb-vS$`NF6;)R<AUJRK)f|EaP*{7~ z$U9H!FxLASL6GeO;-qF(*K00~iB@qZ0}#R&X9xN*8aIL6ha<?Z(MIb;!b6;2Gh)1E zX~>nYf6rl9Fo-G4VK~HB=N}qtFWLN(dOB+!-5JIFVG78UZnLIY5>!3&UiKqf6X7iG z&e@yzeJjpeX}g5r)Ym6$-mKYv%z85vJ9)%ZehxARLN2%fQZ|1-2JD%R{W?&K9<RdZ zlpoGV-R1-1NM4e*81aacwaoYa%WW6R8FF_$eohztaiy%qG#n{eze?Qd@Y140cPA_v z*}9mtZ?)H{*;yn^XU?g7kGvGx^5PG@pJG<I!+H1B{kUMhy~`Jk9#WARvg{<UrTjEP zS#_AJR0V5vxwB%{5|~}Fr#3OE!oE)=n#5t)w|!y*tgknULM4_bk3|DI@R9i6O!V`{ z6qu}*Sh8Tf*_jo#U=MyKLNLvp7YTg}<a6?o4|xNOB60*uS>Y}>OA^u3I{lqhQWXt( zyVqv@L8Tu`0wtFPB&V{@FD-)Yt`=v>Dut^Fr$WyAr4`6^=>W|c&8=mj77{WRsY%W6 zKofGHcd@$UDYwtrgZC3It&bh<p%{qz!99MD>qyX{%?dLx6y2b!U({gAIe!(lD*fTg zh$Fs~Iv2SwJgf5hTXyz|-@&~k{~jWhH)ZvYy<SbmxDct>awH}u@zz$~5;x@0Fu)s2 zf|br{Chr5&xO0*185o??g7-TuZ)woM_+A(^xI0KYo&9(ox9%$K06}t`AFu!X;Zqj{ zkNHB{H?Sk~HIwdbFPgd*Y+AD#mlaZgEx<)F)R0nvpCUL`O)K#Edsbb#)mS&vyws_X zlYWv)*&*Zgalyqn{aTfi%`G1Xo4s!Xncu)JC~CMra*&|i4dnia@Vo)eqrom@s1L(- zJ8vzmTV~gUCH;s^$gk##_C!feukjJDS%$Px#TsnTUy^aLF-jP1(@L&<gZ$(3v{H}8 z{Z(*894G}mxx_D~`>@d>Sd`c78j?!_ld@NNqK}#Yi{S5m(JYDEpLy+#gK6W>(<5J? zL$+8W+=h*Ef=pep24f1eHl#PY!7q~mw}xDqIg2hp*k0Wk9sDme5w@y4rFSD>R<Y}Y zX_>QE=L@&_F4&LF$)`*bO;WH8s8D+xZz-Ff532E$)bH|(vd`1bT-U2@W1l-Ig2Ltn z`2_~`bPnC8Ro|o#NsKdll1Y@0DOhD|ykWYAd)>Nc2JU8IKM2z>0@02)46Zyt2R1RC ztPdjF9m!W{ufO_q-~_3B)DWNg<afnBnBmjYJJ%9Nwm2<y7Vp>^JYQ7~A6VJ<z}<g7 z;k(vvu1+q(dfctA?LC0$0)tF6gJO#eYA96e%;3iY8|odt{XGL0M?%4&$E8nVT@>L` z62<%Q+%z(=(p*sT`(w$GDtW0Dy7T<CQqGX4uFvi*)EWjWfvub@tKM_+C@b<F_jOmb zM(5wB%HA?w{VNo-mvBT&eNs}&NBFym5~s$ehg&XgW;>^PtYvrg31xDi)JDTV<gS5U za6SR-_R>q7QGawLO$yO}<57fMe*pFpMam8j%kXXlOEx^NbHKq>IG2M6CoL-Ad3jv) z<2ADB-mjWDuzsz^EHa%!e(QG%7Jr{BOa<TA-UY2L!3^AD7jycE3K6KyPOt*?Kv^<! zG!_>`{luZxzXD%77blpKqd^!WTZ=m8{W9C9WGj3+-|uY4bFT04HJ@`Kyvv>&lduAU z2s$f9QS1Cm2KP5Dr|2l7(n>yxnHJCNm~6MMUoA0sOV@8VC%w3KGuY8Rcu9$9Zx@lT z7o71DZJ^zliU=j`52ni7<D-ic*i~0eHF)F5(_HjjEP5wOH)5mfZK-1XMN`WDw@ljZ zPRq{PeyrZom2YY2)v4lv@srb`c^&dQFL4V_ufzQ5kgD~?c!7{Ox~6Z7i!#eIXT3!c zf?)?q2%<JUmNo9$rG0mOUvz=ji=>S3zkjfihp(m#2v2aef^s{uwtA!0V{%8K(9n^g z`)h;8Iow-n5h0WPOjh0R#PGn}3DraWJdYax)8m}xrKlF#@NbbegFUd_7S{|}3oEK~ zl_rR-+LHee=D5s16F#ti4<F%c)trK4Yhc?{LUp!K2^Yxgc^o!?>n5stf~)7gVQ^^X zlz->MK+g&IrTY$a1#lY{TM^)4IvmqX!XfqIk`8k@MzZXgQ|;4}sxD`zUC-9c%4<I= z*q8+&UmIepR3(8YMIkA23S#}}n5l9bU;YT?p4@?%+8)&!jN5Vm#NV1&?QKp=j@O<` z3xhYC%}WQskg?C%)6u&w$DPEq@)3EWYnPM6Modq0S;Lx;g<i!WauJt(C0V@Y&bLoL zVyQN&32-H7m?Zslui>cc?&f<JQc~?6YE$O`>%i#Hhhw*|swIu8X`#Oj<@wSnq=%~7 zo-<m6)Nz`CMUf+);cHI&w(jaZM1fGY|KJS0|AQY7e)@=a3JAy0eshsvYqVBb4EC?P zpVB6<({O~jWHz?%^}r8&Qb0V;qCWE5RnSD*M&KJ5P7--*?eDzRS9*|o0D$Y(5~Uej zKJ4mx`L`O;z9sG3xx5;>1nxr0d&37bMob&QIb+PX%GWVLsWll-M4j+rqmonD1hD;X z<^S!`A912Pidzpkit>jrl)Iis_vun@yWd%nVFuo*3WRfmxx8#i-y#DPAftxKbXw14 zY4z9Dee^(?v5xv5&u?CH2|vZhU2&PF2p{f3%zi9-!*|XI4KrHd`i_(`VJWLh&S^)O zJ#q?OdO=5sal-dS{OVxWj1`|!Y<97iZUZM)J>w_i#IAy$cdtm7%NEM1-76*}v4IjS z`=$m=%|{OgFl=ebU^57W{Q>cGssYrCZ+O)-J;X*n$yX#>rxtg0wgzJ<S27kc38;zW z5Zw}bq7?^PH$Z|zQ)s%=a5T!f*G+|2+~frMVc0T^PY$`~rp<2ixMcT-Ir^q8*wg{r zq!fjrWN-V?S)@KvQ(4d8O%GS|v9u1xI#NK1x%T(ts2&PJo8hRxXfU_s=z&pWW0Sgr zX#5s_n>I$1?u3hBxT>dd{9WEvvAHxEEcs4w^|DW2eu;j8RVGs=-n3~#AvoZg=h~&Y z(;@g)Vf7w7K;v)`O9QMmhQ!f_Ui$QU1dolTQ__oVw(l)s+e7>bllJ)<#=4bGGSd$| zxe}c3lI_G|aS&v;;R=I`D7c4Q+jcuEbjyU;N*_ybTi>m)kH^O<Kfx`xVWXt(TiuP3 zxh&VPve0ibs{!A!iC)Mst3JraOg`S8>QMPB9R0Nh7mQb%>?Jl>tj%xk#_WQ%XNx7+ zgLPv8oFt$vZuH@yQR3=UIpF5w2o-y4D)<V+a655h*i{vU^3OARwv-W~x=*35(&q(2 z`5HuP$(#la3np+(ddte9ce)MFW6C^SKtR=rIseklP%8n-5c6=)tKtHQ>DnED_-pVT z)e6x&l<!)ZN?%$ouh%Dm(-8fJ?g!4CcRMf)<b@0|I}OD8B>DyCqs>7nbWP+tPSG>( z^Y@$rG&VEuw~PMmvv*z*ir6(4)N2Z3w!Vb9A~-VAE=&V$x#0vnkwJ%Ba!*4Ft>`$p zA_8M-y;NOjB%aZy8aQxo`^|d!dExYqh`?zy<Sc@2^ZE}fV*cnUgJ?+W+)%24kPrQW zDtXl1AbQUmo<5aWo!*f@klfg!RTIEFoPjITu-P8?P9E`<u$V_EKeqtxxNhZWhV;)* z_e1Ao@O>U7LtiF>Sxr#98z4LGe{mT93#ae@JC*X^FC+Jm1_%$_1BsNkbV#24!K!L+ zumjP`eIR#bpnP~_G+vq(D0KHna>3FI?dp}b7>(c{_n<AQMIB^Ap`5ZP;KK%sSoMpi zz)3mnvC6-kKJuQvJp%^ecM}Q#aLxNDhkiUAWTq6DFGzv^fzw}}+Cy3Z95MQzVSjSt zuTRs&^RkZ-Z_|QTwf~i1!M#PAzPh^9aeuQuzB8@mQ%hz<zG(W~+E!lyC9K;<q?z+A zv<^Bc*e+tO_{_ESlZEytA{8m>hx2c&XJl3hY_N@QM{UVN_&SjBIdJ_yW|{tb)%;UR z>VLnRlR#1%nYh`B@3g2EY9a-a>nqv~GAE5r|6iD~@qLy5py$p8y6}*O`GjPGtfV1Q zL_a$GnIoXZ_gNI=qPUE_ew2cw+~@}4wFN6k;2;GFRz1f1>6zoS-UX#s_+oUVbYbRO zf$cxdFX+MOC#Ixf?-(~jD7V`|=1_t8vW>Yc{t%p=I)mpyVv_S&XLV^mF-XEh(NvH= z>TAVT*^ZCwT(Fcu^Ix)mGQ1a4s}l~gk|0@fNM2Zl9nrBw!Wy@=K4DB|7g0h^1^7o3 yllGgZg0CH-$0=36P`O}C{bv%iVW@pAf-+YEBx&bEe-cPt2VnQ~)hktPpZ^a=hZQ0K literal 0 HcmV?d00001 diff --git a/metadata-service/war/src/main/resources/boot/data_platforms.json b/metadata-service/war/src/main/resources/boot/data_platforms.json index 3c70eda8561b8..0574f3fda4017 100644 --- a/metadata-service/war/src/main/resources/boot/data_platforms.json +++ b/metadata-service/war/src/main/resources/boot/data_platforms.json @@ -574,5 +574,15 @@ "type": "OTHERS", "logoUrl": "/assets/platforms/fivetranlogo.png" } + }, + { + "urn": "urn:li:dataPlatform:csv", + "aspect": { + "datasetNameDelimiter": ".", + "name": "csv", + "displayName": "CSV", + "type": "OTHERS", + "logoUrl": "/assets/platforms/csv-logo.png" + } } ] From 6a1801089116e04333ab20c80183ff73c0b2374c Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 4 Dec 2023 23:46:42 +0530 Subject: [PATCH 040/263] fix(vulns): node-fetch forwards secure headers to untrusted sites (#9375) unrelated smoke test failing --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 1986 ++++++++++++++------------------ 2 files changed, 893 insertions(+), 1095 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 22c88f9647dc2..c26338ea285fb 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -124,7 +124,7 @@ }, "devDependencies": { "@babel/plugin-proposal-private-property-in-object": "^7.21.11", - "@graphql-codegen/cli": "1.20.0", + "@graphql-codegen/cli": "^5.0.0", "@graphql-codegen/near-operation-file-preset": "^1.17.13", "@graphql-codegen/typescript-operations": "1.17.13", "@graphql-codegen/typescript-react-apollo": "2.2.1", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index d33299a79b13a..41b542da97550 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -184,6 +184,13 @@ signedsource "^1.0.0" yargs "^15.3.1" +"@ardatan/sync-fetch@^0.0.1": + version "0.0.1" + resolved "https://registry.yarnpkg.com/@ardatan/sync-fetch/-/sync-fetch-0.0.1.tgz#3385d3feedceb60a896518a1db857ec1e945348f" + integrity sha512-xhlTqH0m31mnsG0tIP4ETgfSB6gXDaYYsUWTrlUV93fFQPI9dd8hE0Ot6MHLCtqgB32hwJAC3YZMWlXZw7AleA== + dependencies: + node-fetch "^2.6.1" + "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" @@ -204,6 +211,11 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== +"@babel/compat-data@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" + integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== + "@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.14.0", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" @@ -225,6 +237,27 @@ json5 "^2.2.2" semver "^6.3.0" +"@babel/core@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.5.tgz#6e23f2acbcb77ad283c5ed141f824fd9f70101c7" + integrity sha512-Cwc2XjUrG4ilcfOw4wBAK+enbdgwAcAJCfGUItPBKR7Mjw4aEfAFYrLxeRp4jWgtNIKn3n2AlBOfwwafl+42/g== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helpers" "^7.23.5" + "@babel/parser" "^7.23.5" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + "@babel/eslint-parser@^7.16.3": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.5.tgz#fa032503b9e2d188e25b1b95d29e8b8431042d78" @@ -244,7 +277,7 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/generator@^7.23.5": +"@babel/generator@^7.18.13", "@babel/generator@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.5.tgz#17d0a1ea6b62f351d281350a5f80b87a810c4755" integrity sha512-BPssCHrBD+0YrxviOa3QzpqwhNIXKEtOa2jQrm4FlmkC2apYgRnQcmPWiGZDlGxiNtltnUFolMe8497Esry+jA== @@ -279,6 +312,17 @@ lru-cache "^5.1.1" semver "^6.3.0" +"@babel/helper-compilation-targets@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + integrity sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw== + dependencies: + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.15" + browserslist "^4.21.9" + lru-cache "^5.1.1" + semver "^6.3.1" + "@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0", "@babel/helper-create-class-features-plugin@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.5.tgz#2192a1970ece4685fbff85b48da2c32fcb130b7c" @@ -362,6 +406,13 @@ dependencies: "@babel/types" "^7.22.5" +"@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== + dependencies: + "@babel/types" "^7.22.15" + "@babel/helper-module-transforms@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz#0f65daa0716961b6e96b164034e737f60a80d2ef" @@ -376,6 +427,17 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-module-transforms@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" + integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-optimise-call-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" @@ -448,16 +510,21 @@ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== -"@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" - integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== - "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-identifier@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" + integrity sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ== + +"@babel/helper-validator-option@^7.22.15": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" + integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== + "@babel/helper-validator-option@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" @@ -482,6 +549,15 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helpers@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.5.tgz#52f522840df8f1a848d06ea6a79b79eefa72401e" + integrity sha512-oO7us8FzTEsG3U6ag9MfdF1iA/7Z6dz+MtFhifZk8C8o453rGJFFWUP1t+ULM9TUIAzC9uxXEiXjOiVMyd7QPg== + dependencies: + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + "@babel/highlight@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.5.tgz#aa6c05c5407a67ebce408162b7ede789b4d22031" @@ -500,17 +576,12 @@ chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@7.12.16": - version "7.12.16" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" - integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== - "@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== -"@babel/parser@^7.22.15", "@babel/parser@^7.23.5": +"@babel/parser@^7.16.8", "@babel/parser@^7.22.15", "@babel/parser@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== @@ -673,6 +744,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" +"@babel/plugin-syntax-import-assertions@^7.20.0": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz#9c05a7f592982aff1a2768260ad84bcd3f0c77fc" + integrity sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-import-assertions@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" @@ -1384,7 +1462,7 @@ dependencies: regenerator-runtime "^0.13.11" -"@babel/template@^7.22.15": +"@babel/template@^7.18.10", "@babel/template@^7.22.15": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== @@ -1402,7 +1480,7 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@7.12.13", "@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.22.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": +"@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.22.5", "@babel/traverse@^7.23.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" integrity sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w== @@ -1418,15 +1496,6 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/types@7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.12.13.tgz#8be1aa8f2c876da11a9cf650c0ecf656913ad611" - integrity sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ== - dependencies: - "@babel/helper-validator-identifier" "^7.12.11" - lodash "^4.17.19" - to-fast-properties "^2.0.0" - "@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" @@ -1436,7 +1505,7 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" -"@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": +"@babel/types@^7.16.8", "@babel/types@^7.18.13", "@babel/types@^7.22.15", "@babel/types@^7.23.0", "@babel/types@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" integrity sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w== @@ -1727,16 +1796,6 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.3.0.tgz#ea89004119dc42db2e1dba0f97d553f7372f6fcb" integrity sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg== -"@endemolshinegroup/cosmiconfig-typescript-loader@3.0.2": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@endemolshinegroup/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-3.0.2.tgz#eea4635828dde372838b0909693ebd9aafeec22d" - integrity sha512-QRVtqJuS1mcT56oHpVegkKBlgtWjXw/gHNWO3eL9oyB5Sc7HBoc2OLG/nYpVfT/Jejvo3NUrD0Udk7XgoyDKkA== - dependencies: - lodash.get "^4" - make-error "^1" - ts-node "^9" - tslib "^2" - "@eslint-community/eslint-utils@^4.2.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" @@ -1777,66 +1836,55 @@ "@graphql-codegen/plugin-helpers" "^1.18.2" tslib "~2.0.1" -"@graphql-codegen/cli@1.20.0": - version "1.20.0" - resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-1.20.0.tgz#e1bb62fce07caaf1395ca6e94ffc0f2ba1f57938" - integrity sha512-5pLtZoaqEmEui6PR7IArmD23VLD3++UQby6iNe4NFG4eMcRai2raIM0E4a/MSn7SjyfSRguekYMMC5JKS1VgQw== - dependencies: - "@graphql-codegen/core" "1.17.9" - "@graphql-codegen/plugin-helpers" "^1.18.2" - "@graphql-tools/apollo-engine-loader" "^6" - "@graphql-tools/code-file-loader" "^6" - "@graphql-tools/git-loader" "^6" - "@graphql-tools/github-loader" "^6" - "@graphql-tools/graphql-file-loader" "^6" - "@graphql-tools/json-file-loader" "^6" - "@graphql-tools/load" "^6" - "@graphql-tools/prisma-loader" "^6" - "@graphql-tools/url-loader" "^6" - "@graphql-tools/utils" "^7.0.0" - ansi-escapes "^4.3.1" - camel-case "^4.1.2" +"@graphql-codegen/cli@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-5.0.0.tgz#761dcf08cfee88bbdd9cdf8097b2343445ec6f0a" + integrity sha512-A7J7+be/a6e+/ul2KI5sfJlpoqeqwX8EzktaKCeduyVKgOLA6W5t+NUGf6QumBDXU8PEOqXk3o3F+RAwCWOiqA== + dependencies: + "@babel/generator" "^7.18.13" + "@babel/template" "^7.18.10" + "@babel/types" "^7.18.13" + "@graphql-codegen/core" "^4.0.0" + "@graphql-codegen/plugin-helpers" "^5.0.1" + "@graphql-tools/apollo-engine-loader" "^8.0.0" + "@graphql-tools/code-file-loader" "^8.0.0" + "@graphql-tools/git-loader" "^8.0.0" + "@graphql-tools/github-loader" "^8.0.0" + "@graphql-tools/graphql-file-loader" "^8.0.0" + "@graphql-tools/json-file-loader" "^8.0.0" + "@graphql-tools/load" "^8.0.0" + "@graphql-tools/prisma-loader" "^8.0.0" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.8.0" chalk "^4.1.0" - chokidar "^3.4.3" - common-tags "^1.8.0" - constant-case "^3.0.3" - cosmiconfig "^7.0.0" + cosmiconfig "^8.1.3" debounce "^1.2.0" - dependency-graph "^0.9.0" detect-indent "^6.0.0" - glob "^7.1.6" - graphql-config "^3.2.0" - indent-string "^4.0.0" - inquirer "^7.3.3" + graphql-config "^5.0.2" + inquirer "^8.0.0" is-glob "^4.0.1" + jiti "^1.17.1" json-to-pretty-yaml "^1.2.2" - latest-version "5.1.0" - listr "^0.14.3" - listr-update-renderer "^0.5.0" + listr2 "^4.0.5" log-symbols "^4.0.0" - lower-case "^2.0.1" - minimatch "^3.0.4" - mkdirp "^1.0.4" - pascal-case "^3.1.1" - request "^2.88.2" + micromatch "^4.0.5" + shell-quote "^1.7.3" string-env-interpolation "^1.0.1" ts-log "^2.2.3" - tslib "~2.0.1" - upper-case "^2.0.2" - valid-url "^1.0.9" - wrap-ansi "^7.0.0" - yaml "^1.10.0" - yargs "^16.1.1" + tslib "^2.4.0" + yaml "^2.3.1" + yargs "^17.0.0" -"@graphql-codegen/core@1.17.9": - version "1.17.9" - resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-1.17.9.tgz#c03e71018ff04d26f5139a2d90a32b31d3bb2b43" - integrity sha512-7nwy+bMWqb0iYJ2DKxA9UiE16meeJ2Ch2XWS/N/ZnA0snTR+GZ20USI8z6YqP1Fuist7LvGO1MbitO2qBT8raA== +"@graphql-codegen/core@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-4.0.0.tgz#b29c911746a532a675e33720acb4eb2119823e01" + integrity sha512-JAGRn49lEtSsZVxeIlFVIRxts2lWObR+OQo7V2LHDJ7ohYYw3ilv7nJ8pf8P4GTg/w6ptcYdSdVVdkI8kUHB/Q== dependencies: - "@graphql-codegen/plugin-helpers" "^1.18.2" - "@graphql-tools/merge" "^6" - "@graphql-tools/utils" "^6" - tslib "~2.0.1" + "@graphql-codegen/plugin-helpers" "^5.0.0" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.0" + tslib "~2.5.0" "@graphql-codegen/fragment-matcher@^5.0.0": version "5.0.0" @@ -1868,7 +1916,7 @@ lodash "~4.17.0" tslib "~2.3.0" -"@graphql-codegen/plugin-helpers@^5.0.0": +"@graphql-codegen/plugin-helpers@^5.0.0", "@graphql-codegen/plugin-helpers@^5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.1.tgz#e2429fcfba3f078d5aa18aa062d46c922bbb0d55" integrity sha512-6L5sb9D8wptZhnhLLBcheSPU7Tg//DGWgc5tQBWX46KYTOTQHGqDpv50FxAJJOyFVJrveN9otWk9UT9/yfY4ww== @@ -1929,125 +1977,181 @@ parse-filepath "^1.0.2" tslib "~2.3.0" -"@graphql-tools/apollo-engine-loader@^6": - version "6.2.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-6.2.5.tgz#b9e65744f522bb9f6ca50651e5622820c4f059a8" - integrity sha512-CE4uef6PyxtSG+7OnLklIr2BZZDgjO89ZXK47EKdY7jQy/BQD/9o+8SxPsgiBc+2NsDJH2I6P/nqoaJMOEat6g== +"@graphql-tools/apollo-engine-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.0.tgz#ac1f351cbe41508411784f25757f5557b0f27489" + integrity sha512-axQTbN5+Yxs1rJ6cWQBOfw3AEeC+fvIuZSfJLPLLvFJLj4pUm9fhxey/g6oQZAAQJqKPfw+tLDUQvnfvRK8Kmg== dependencies: - "@graphql-tools/utils" "^7.0.0" - cross-fetch "3.0.6" - tslib "~2.0.1" + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.9.0" + tslib "^2.4.0" -"@graphql-tools/batch-execute@^7.1.2": - version "7.1.2" - resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-7.1.2.tgz#35ba09a1e0f80f34f1ce111d23c40f039d4403a0" - integrity sha512-IuR2SB2MnC2ztA/XeTMTfWcA0Wy7ZH5u+nDkDNLAdX+AaSyDnsQS35sCmHqG0VOGTl7rzoyBWLCKGwSJplgtwg== +"@graphql-tools/batch-execute@^9.0.1": + version "9.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-9.0.2.tgz#5ac3257501e7941fad40661bb5e1110d6312f58b" + integrity sha512-Y2uwdZI6ZnatopD/SYfZ1eGuQFI7OU2KGZ2/B/7G9ISmgMl5K+ZZWz/PfIEXeiHirIDhyk54s4uka5rj2xwKqQ== dependencies: - "@graphql-tools/utils" "^7.7.0" - dataloader "2.0.0" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/utils" "^10.0.5" + dataloader "^2.2.2" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/code-file-loader@^6": - version "6.3.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-6.3.1.tgz#42dfd4db5b968acdb453382f172ec684fa0c34ed" - integrity sha512-ZJimcm2ig+avgsEOWWVvAaxZrXXhiiSZyYYOJi0hk9wh5BxZcLUNKkTp6EFnZE/jmGUwuos3pIjUD3Hwi3Bwhg== +"@graphql-tools/code-file-loader@^8.0.0": + version "8.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-8.0.3.tgz#8e1e8c2fc05c94614ce25c3cee36b3b4ec08bb64" + integrity sha512-gVnnlWs0Ua+5FkuHHEriFUOI3OIbHv6DS1utxf28n6NkfGMJldC4j0xlJRY0LS6dWK34IGYgD4HelKYz2l8KiA== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.5.1" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/graphql-tag-pluck" "8.1.0" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/delegate@^7.0.1", "@graphql-tools/delegate@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-7.1.5.tgz#0b027819b7047eff29bacbd5032e34a3d64bd093" - integrity sha512-bQu+hDd37e+FZ0CQGEEczmRSfQRnnXeUxI/0miDV+NV/zCbEdIJj5tYFNrKT03W6wgdqx8U06d8L23LxvGri/g== +"@graphql-tools/delegate@^10.0.0", "@graphql-tools/delegate@^10.0.3": + version "10.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-10.0.3.tgz#2d0e133da94ca92c24e0c7360414e5592321cf2d" + integrity sha512-Jor9oazZ07zuWkykD3OOhT/2XD74Zm6Ar0ENZMk75MDD51wB2UWUIMljtHxbJhV5A6UBC2v8x6iY0xdCGiIlyw== dependencies: - "@ardatan/aggregate-error" "0.0.6" - "@graphql-tools/batch-execute" "^7.1.2" - "@graphql-tools/schema" "^7.1.5" - "@graphql-tools/utils" "^7.7.1" - dataloader "2.0.0" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/batch-execute" "^9.0.1" + "@graphql-tools/executor" "^1.0.0" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.5" + dataloader "^2.2.2" + tslib "^2.5.0" + +"@graphql-tools/executor-graphql-ws@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-1.1.0.tgz#7727159ebaa9df4dc793d0d02e74dd1ca4a7cc60" + integrity sha512-yM67SzwE8rYRpm4z4AuGtABlOp9mXXVy6sxXnTJRoYIdZrmDbKVfIY+CpZUJCqS0FX3xf2+GoHlsj7Qswaxgcg== + dependencies: + "@graphql-tools/utils" "^10.0.2" + "@types/ws" "^8.0.0" + graphql-ws "^5.14.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + ws "^8.13.0" -"@graphql-tools/git-loader@^6": - version "6.2.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-6.2.6.tgz#c2226f4b8f51f1c05c9ab2649ba32d49c68cd077" - integrity sha512-ooQTt2CaG47vEYPP3CPD+nbA0F+FYQXfzrB1Y1ABN9K3d3O2RK3g8qwslzZaI8VJQthvKwt0A95ZeE4XxteYfw== +"@graphql-tools/executor-http@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-http/-/executor-http-1.0.4.tgz#d4b3b32430c24b0167760d3b6ffb91846a3b6956" + integrity sha512-lSoPFWrGU6XT9nGGBogUI8bSOtP0yce2FhXTrU5akMZ35BDCNWbkmgryzRhxoAH/yDOaZtKkHQB3xrYX3uo5zA== + dependencies: + "@graphql-tools/utils" "^10.0.2" + "@repeaterjs/repeater" "^3.0.4" + "@whatwg-node/fetch" "^0.9.0" + extract-files "^11.0.0" + meros "^1.2.1" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/executor-legacy-ws@^1.0.0": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.0.4.tgz#27fcccba782daf605d4cf34ffa85a675f43c33f6" + integrity sha512-b7aGuRekZDS+m3af3BIvMKxu15bmVPMt5eGQVuP2v5pxmbaPTh+iv5mx9b3Plt32z5Ke5tycBnNm5urSFtW8ng== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/utils" "^10.0.0" + "@types/ws" "^8.0.0" + isomorphic-ws "5.0.0" + tslib "^2.4.0" + ws "8.14.2" -"@graphql-tools/github-loader@^6": - version "6.2.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-6.2.5.tgz#460dff6f5bbaa26957a5ea3be4f452b89cc6a44b" - integrity sha512-DLuQmYeNNdPo8oWus8EePxWCfCAyUXPZ/p1PWqjrX/NGPyH2ZObdqtDAfRHztljt0F/qkBHbGHCEk2TKbRZTRw== +"@graphql-tools/executor@^1.0.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor/-/executor-1.2.0.tgz#6c45f4add765769d9820c4c4405b76957ba39c79" + integrity sha512-SKlIcMA71Dha5JnEWlw4XxcaJ+YupuXg0QCZgl2TOLFz4SkGCwU/geAsJvUJFwK2RbVLpQv/UMq67lOaBuwDtg== dependencies: - "@graphql-tools/graphql-tag-pluck" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - cross-fetch "3.0.6" - tslib "~2.0.1" + "@graphql-tools/utils" "^10.0.0" + "@graphql-typed-document-node/core" "3.2.0" + "@repeaterjs/repeater" "^3.0.4" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/graphql-file-loader@^6", "@graphql-tools/graphql-file-loader@^6.0.0": - version "6.2.7" - resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-6.2.7.tgz#d3720f2c4f4bb90eb2a03a7869a780c61945e143" - integrity sha512-5k2SNz0W87tDcymhEMZMkd6/vs6QawDyjQXWtqkuLTBF3vxjxPD1I4dwHoxgWPIjjANhXybvulD7E+St/7s9TQ== +"@graphql-tools/git-loader@^8.0.0": + version "8.0.3" + resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-8.0.3.tgz#a86d352b23a646c28d27282fef7694b846b31c44" + integrity sha512-Iz9KbRUAkuOe8JGTS0qssyJ+D5Snle17W+z9anwWrLFrkBhHrRFUy5AdjZqgJuhls0x30QkZBnnCtnHDBdQ4nA== dependencies: - "@graphql-tools/import" "^6.2.6" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@graphql-tools/graphql-tag-pluck" "8.1.0" + "@graphql-tools/utils" "^10.0.0" + is-glob "4.0.3" + micromatch "^4.0.4" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/graphql-tag-pluck@^6.2.6", "@graphql-tools/graphql-tag-pluck@^6.5.1": - version "6.5.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-6.5.1.tgz#5fb227dbb1e19f4b037792b50f646f16a2d4c686" - integrity sha512-7qkm82iFmcpb8M6/yRgzjShtW6Qu2OlCSZp8uatA3J0eMl87TxyJoUmL3M3UMMOSundAK8GmoyNVFUrueueV5Q== +"@graphql-tools/github-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-8.0.0.tgz#683195800618364701cfea9bc6f88674486f053b" + integrity sha512-VuroArWKcG4yaOWzV0r19ElVIV6iH6UKDQn1MXemND0xu5TzrFme0kf3U9o0YwNo0kUYEk9CyFM0BYg4he17FA== dependencies: - "@babel/parser" "7.12.16" - "@babel/traverse" "7.12.13" - "@babel/types" "7.12.13" - "@graphql-tools/utils" "^7.0.0" - tslib "~2.1.0" + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/executor-http" "^1.0.0" + "@graphql-tools/graphql-tag-pluck" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + "@whatwg-node/fetch" "^0.9.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" -"@graphql-tools/import@^6.2.6": - version "6.3.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-6.3.1.tgz#731c47ab6c6ac9f7994d75c76b6c2fa127d2d483" - integrity sha512-1szR19JI6WPibjYurMLdadHKZoG9C//8I/FZ0Dt4vJSbrMdVNp8WFxg4QnZrDeMG4MzZc90etsyF5ofKjcC+jw== +"@graphql-tools/graphql-file-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.0.0.tgz#a2026405bce86d974000455647511bf65df4f211" + integrity sha512-wRXj9Z1IFL3+zJG1HWEY0S4TXal7+s1vVhbZva96MSp0kbb/3JBF7j0cnJ44Eq0ClccMgGCDFqPFXty4JlpaPg== dependencies: + "@graphql-tools/import" "7.0.0" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/graphql-tag-pluck@8.1.0", "@graphql-tools/graphql-tag-pluck@^8.0.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.1.0.tgz#0745b6f0103eb725f10c5d4c1a9438670bb8e05b" + integrity sha512-kt5l6H/7QxQcIaewInTcune6NpATojdFEW98/8xWcgmy7dgXx5vU9e0AicFZIH+ewGyZzTpwFqO2RI03roxj2w== + dependencies: + "@babel/core" "^7.22.9" + "@babel/parser" "^7.16.8" + "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/traverse" "^7.16.8" + "@babel/types" "^7.16.8" + "@graphql-tools/utils" "^10.0.0" + tslib "^2.4.0" + +"@graphql-tools/import@7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-7.0.0.tgz#a6a91a90a707d5f46bad0fd3fde2f407b548b2be" + integrity sha512-NVZiTO8o1GZs6OXzNfjB+5CtQtqsZZpQOq+Uu0w57kdUkT4RlQKlwhT8T81arEsbV55KpzkpFsOZP7J1wdmhBw== + dependencies: + "@graphql-tools/utils" "^10.0.0" resolve-from "5.0.0" - tslib "~2.2.0" + tslib "^2.4.0" -"@graphql-tools/json-file-loader@^6", "@graphql-tools/json-file-loader@^6.0.0": - version "6.2.6" - resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-6.2.6.tgz#830482cfd3721a0799cbf2fe5b09959d9332739a" - integrity sha512-CnfwBSY5926zyb6fkDBHnlTblHnHI4hoBALFYXnrg0Ev4yWU8B04DZl/pBRUc459VNgO2x8/mxGIZj2hPJG1EA== +"@graphql-tools/json-file-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-8.0.0.tgz#9b1b62902f766ef3f1c9cd1c192813ea4f48109c" + integrity sha512-ki6EF/mobBWJjAAC84xNrFMhNfnUFD6Y0rQMGXekrUgY0NdeYXHU0ZUgHzC9O5+55FslqUmAUHABePDHTyZsLg== dependencies: - "@graphql-tools/utils" "^7.0.0" - tslib "~2.0.1" + "@graphql-tools/utils" "^10.0.0" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" -"@graphql-tools/load@^6", "@graphql-tools/load@^6.0.0": - version "6.2.8" - resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-6.2.8.tgz#16900fb6e75e1d075cad8f7ea439b334feb0b96a" - integrity sha512-JpbyXOXd8fJXdBh2ta0Q4w8ia6uK5FHzrTNmcvYBvflFuWly2LDTk2abbSl81zKkzswQMEd2UIYghXELRg8eTA== +"@graphql-tools/load@^8.0.0": + version "8.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-8.0.1.tgz#498f2230448601cb87894b8a93df7867daef69ea" + integrity sha512-qSMsKngJhDqRbuWyo3NvakEFqFL6+eSjy8ooJ1o5qYD26N7dqXkKzIMycQsX7rBK19hOuINAUSaRcVWH6hTccw== dependencies: - "@graphql-tools/merge" "^6.2.12" - "@graphql-tools/utils" "^7.5.0" - globby "11.0.3" - import-from "3.0.0" - is-glob "4.0.1" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.11" p-limit "3.1.0" - tslib "~2.2.0" - unixify "1.0.0" - valid-url "1.0.9" + tslib "^2.4.0" -"@graphql-tools/merge@^6", "@graphql-tools/merge@^6.0.0", "@graphql-tools/merge@^6.2.12": - version "6.2.14" - resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-6.2.14.tgz#694e2a2785ba47558e5665687feddd2935e9d94e" - integrity sha512-RWT4Td0ROJai2eR66NHejgf8UwnXJqZxXgDWDI+7hua5vNA2OW8Mf9K1Wav1ZkjWnuRp4ztNtkZGie5ISw55ow== +"@graphql-tools/merge@^9.0.0", "@graphql-tools/merge@^9.0.1": + version "9.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-9.0.1.tgz#693f15da152339284469b1ce5c6827e3ae350a29" + integrity sha512-hIEExWO9fjA6vzsVjJ3s0cCQ+Q/BEeMVJZtMXd7nbaVefVy0YDyYlEkeoYYNV3NVVvu1G9lr6DM1Qd0DGo9Caw== dependencies: - "@graphql-tools/schema" "^7.0.0" - "@graphql-tools/utils" "^7.7.0" - tslib "~2.2.0" + "@graphql-tools/utils" "^10.0.10" + tslib "^2.4.0" "@graphql-tools/optimize@^1.0.1": version "1.3.1" @@ -2056,31 +2160,28 @@ dependencies: tslib "^2.4.0" -"@graphql-tools/prisma-loader@^6": - version "6.3.0" - resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-6.3.0.tgz#c907e17751ff2b26e7c2bc75d0913ebf03f970da" - integrity sha512-9V3W/kzsFBmUQqOsd96V4a4k7Didz66yh/IK89B1/rrvy9rYj+ULjEqR73x9BYZ+ww9FV8yP8LasWAJwWaqqJQ== +"@graphql-tools/prisma-loader@^8.0.0": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-8.0.2.tgz#3a7126ec2389a7aa7846bd0e441629ac5a1934fc" + integrity sha512-8d28bIB0bZ9Bj0UOz9sHagVPW+6AHeqvGljjERtwCnWl8OCQw2c2pNboYXISLYUG5ub76r4lDciLLTU+Ks7Q0w== dependencies: - "@graphql-tools/url-loader" "^6.8.2" - "@graphql-tools/utils" "^7.0.0" - "@types/http-proxy-agent" "^2.0.2" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.8" "@types/js-yaml" "^4.0.0" "@types/json-stable-stringify" "^1.0.32" - "@types/jsonwebtoken" "^8.5.0" + "@whatwg-node/fetch" "^0.9.0" chalk "^4.1.0" debug "^4.3.1" - dotenv "^8.2.0" - graphql-request "^3.3.0" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - isomorphic-fetch "^3.0.0" + dotenv "^16.0.0" + graphql-request "^6.0.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.0" + jose "^5.0.0" js-yaml "^4.0.0" json-stable-stringify "^1.0.1" - jsonwebtoken "^8.5.1" lodash "^4.17.20" - replaceall "^0.1.6" scuid "^1.1.0" - tslib "~2.1.0" + tslib "^2.4.0" yaml-ast-parser "^0.0.43" "@graphql-tools/relay-operation-optimizer@^6.3.0": @@ -2092,39 +2193,34 @@ "@graphql-tools/utils" "9.1.3" tslib "^2.4.0" -"@graphql-tools/schema@^7.0.0", "@graphql-tools/schema@^7.1.5": - version "7.1.5" - resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-7.1.5.tgz#07b24e52b182e736a6b77c829fc48b84d89aa711" - integrity sha512-uyn3HSNSckf4mvQSq0Q07CPaVZMNFCYEVxroApOaw802m9DcZPgf9XVPy/gda5GWj9AhbijfRYVTZQgHnJ4CXA== +"@graphql-tools/schema@^10.0.0": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-10.0.2.tgz#21bc2ee25a65fb4890d2e5f9f22ef1f733aa81da" + integrity sha512-TbPsIZnWyDCLhgPGnDjt4hosiNU2mF/rNtSk5BVaXWnZqvKJ6gzJV4fcHcvhRIwtscDMW2/YTnK6dLVnk8pc4w== dependencies: - "@graphql-tools/utils" "^7.1.2" - tslib "~2.2.0" - value-or-promise "1.0.6" - -"@graphql-tools/url-loader@^6", "@graphql-tools/url-loader@^6.0.0", "@graphql-tools/url-loader@^6.8.2": - version "6.10.1" - resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-6.10.1.tgz#dc741e4299e0e7ddf435eba50a1f713b3e763b33" - integrity sha512-DSDrbhQIv7fheQ60pfDpGD256ixUQIR6Hhf9Z5bRjVkXOCvO5XrkwoWLiU7iHL81GB1r0Ba31bf+sl+D4nyyfw== - dependencies: - "@graphql-tools/delegate" "^7.0.1" - "@graphql-tools/utils" "^7.9.0" - "@graphql-tools/wrap" "^7.0.4" - "@microsoft/fetch-event-source" "2.0.1" - "@types/websocket" "1.0.2" - abort-controller "3.0.0" - cross-fetch "3.1.4" - extract-files "9.0.0" - form-data "4.0.0" - graphql-ws "^4.4.1" - is-promise "4.0.0" - isomorphic-ws "4.0.1" - lodash "4.17.21" - meros "1.1.4" - subscriptions-transport-ws "^0.9.18" - sync-fetch "0.3.0" - tslib "~2.2.0" - valid-url "1.0.9" - ws "7.4.5" + "@graphql-tools/merge" "^9.0.1" + "@graphql-tools/utils" "^10.0.10" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/url-loader@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-8.0.0.tgz#8d952d5ebb7325e587cb914aaebded3dbd078cf6" + integrity sha512-rPc9oDzMnycvz+X+wrN3PLrhMBQkG4+sd8EzaFN6dypcssiefgWKToXtRKI8HHK68n2xEq1PyrOpkjHFJB+GwA== + dependencies: + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/delegate" "^10.0.0" + "@graphql-tools/executor-graphql-ws" "^1.0.0" + "@graphql-tools/executor-http" "^1.0.0" + "@graphql-tools/executor-legacy-ws" "^1.0.0" + "@graphql-tools/utils" "^10.0.0" + "@graphql-tools/wrap" "^10.0.0" + "@types/ws" "^8.0.0" + "@whatwg-node/fetch" "^0.9.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.11" + ws "^8.12.0" "@graphql-tools/utils@9.1.3": version "9.1.3" @@ -2143,16 +2239,17 @@ dset "^3.1.2" tslib "^2.4.0" -"@graphql-tools/utils@^6": - version "6.2.4" - resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-6.2.4.tgz#38a2314d2e5e229ad4f78cca44e1199e18d55856" - integrity sha512-ybgZ9EIJE3JMOtTrTd2VcIpTXtDrn2q6eiYkeYMKRVh3K41+LZa6YnR2zKERTXqTWqhobROwLt4BZbw2O3Aeeg== +"@graphql-tools/utils@^10.0.10", "@graphql-tools/utils@^10.0.11", "@graphql-tools/utils@^10.0.2", "@graphql-tools/utils@^10.0.5", "@graphql-tools/utils@^10.0.8": + version "10.0.11" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-10.0.11.tgz#1238fbe37e8d6c662c48ab2477c98269d6fd851a" + integrity sha512-vVjXgKn6zjXIlYBd7yJxCVMYGb5j18gE3hx3Qw3mNsSEsYQXbJbPdlwb7Fc9FogsJei5AaqiQerqH4kAosp1nQ== dependencies: - "@ardatan/aggregate-error" "0.0.6" - camel-case "4.1.1" - tslib "~2.0.1" + "@graphql-typed-document-node/core" "^3.1.1" + cross-inspect "1.0.0" + dset "^3.1.2" + tslib "^2.4.0" -"@graphql-tools/utils@^7.0.0", "@graphql-tools/utils@^7.1.2", "@graphql-tools/utils@^7.5.0", "@graphql-tools/utils@^7.7.0", "@graphql-tools/utils@^7.7.1", "@graphql-tools/utils@^7.8.1", "@graphql-tools/utils@^7.9.0", "@graphql-tools/utils@^7.9.1": +"@graphql-tools/utils@^7.9.1": version "7.10.0" resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-7.10.0.tgz#07a4cb5d1bec1ff1dc1d47a935919ee6abd38699" integrity sha512-d334r6bo9mxdSqZW6zWboEnnOOFRrAPVQJ7LkU8/6grglrbcu6WhwCLzHb90E94JI3TD3ricC3YGbUqIi9Xg0w== @@ -2161,27 +2258,27 @@ camel-case "4.1.2" tslib "~2.2.0" -"@graphql-tools/wrap@^7.0.4": - version "7.0.8" - resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-7.0.8.tgz#ad41e487135ca3ea1ae0ea04bb3f596177fb4f50" - integrity sha512-1NDUymworsOlb53Qfh7fonDi2STvqCtbeE68ntKY9K/Ju/be2ZNxrFSbrBHwnxWcN9PjISNnLcAyJ1L5tCUyhg== +"@graphql-tools/wrap@^10.0.0": + version "10.0.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-10.0.1.tgz#9e3d27d2723962c26c4377d5d7ab0d3038bf728c" + integrity sha512-Cw6hVrKGM2OKBXeuAGltgy4tzuqQE0Nt7t/uAqnuokSXZhMHXJUb124Bnvxc2gPZn5chfJSDafDe4Cp8ZAVJgg== dependencies: - "@graphql-tools/delegate" "^7.1.5" - "@graphql-tools/schema" "^7.1.5" - "@graphql-tools/utils" "^7.8.1" - tslib "~2.2.0" - value-or-promise "1.0.6" + "@graphql-tools/delegate" "^10.0.3" + "@graphql-tools/schema" "^10.0.0" + "@graphql-tools/utils" "^10.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-typed-document-node/core@3.2.0", "@graphql-typed-document-node/core@^3.1.1", "@graphql-typed-document-node/core@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== "@graphql-typed-document-node/core@^3.0.0": version "3.1.0" resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950" integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg== -"@graphql-typed-document-node/core@^3.1.1": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" - integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== - "@hapi/hoek@^9.0.0": version "9.2.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz#f3933a44e365864f4dad5db94158106d511e8131" @@ -2213,11 +2310,6 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== -"@iarna/toml@^2.2.5": - version "2.2.5" - resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" - integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== - "@icons/material@^0.2.4": version "0.2.4" resolved "https://registry.yarnpkg.com/@icons/material/-/material-0.2.4.tgz#e90c9f71768b3736e76d7dd6783fc6c2afa88bc8" @@ -2546,11 +2638,6 @@ refractor "^3.3.1" unist-util-visit "^2.0.3" -"@microsoft/fetch-event-source@2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@microsoft/fetch-event-source/-/fetch-event-source-2.0.1.tgz#9ceecc94b49fbaa15666e38ae8587f64acce007d" - integrity sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA== - "@miragejs/graphql@^0.1.11": version "0.1.12" resolved "https://registry.npmjs.org/@miragejs/graphql/-/graphql-0.1.12.tgz#60679c4ad807fc4a001bc88aba396ba3fa5a958b" @@ -2700,6 +2787,33 @@ dependencies: svgmoji "^3.2.0" +"@peculiar/asn1-schema@^2.3.6": + version "2.3.8" + resolved "https://registry.yarnpkg.com/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz#04b38832a814e25731232dd5be883460a156da3b" + integrity sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA== + dependencies: + asn1js "^3.0.5" + pvtsutils "^1.3.5" + tslib "^2.6.2" + +"@peculiar/json-schema@^1.1.12": + version "1.1.12" + resolved "https://registry.yarnpkg.com/@peculiar/json-schema/-/json-schema-1.1.12.tgz#fe61e85259e3b5ba5ad566cb62ca75b3d3cd5339" + integrity sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w== + dependencies: + tslib "^2.0.0" + +"@peculiar/webcrypto@^1.4.0": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@peculiar/webcrypto/-/webcrypto-1.4.3.tgz#078b3e8f598e847b78683dc3ba65feb5029b93a7" + integrity sha512-VtaY4spKTdN5LjJ04im/d/joXuvLbQdgy5Z4DXF4MFZhQ+MTrejbNMkfZBp1Bs3O5+bFqnJgyGdPuZQflvIa5A== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + pvtsutils "^1.3.2" + tslib "^2.5.0" + webcrypto-core "^1.7.7" + "@pmmmwh/react-refresh-webpack-plugin@^0.5.3": version "0.5.10" resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz#2eba163b8e7dbabb4ce3609ab5e32ab63dda3ef8" @@ -3701,6 +3815,11 @@ dependencies: type-fest "^2.0.0" +"@repeaterjs/repeater@^3.0.4": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@repeaterjs/repeater/-/repeater-3.0.5.tgz#b77571685410217a548a9c753aa3cdfc215bfc78" + integrity sha512-l3YHBLAol6d/IKnB9LhpD0cEZWAoe3eFKUyTYWmFmCO2Q/WOckxLQAUyMZWwZV2M/m3+4vgRoaolFqaII82/TA== + "@rollup/plugin-babel@^5.2.0": version "5.3.1" resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" @@ -3748,13 +3867,6 @@ resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.2.tgz#31b9c510d8cada9683549e1dbb4284cca5001faf" integrity sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw== -"@samverschueren/stream-to-observable@^0.3.0": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.1.tgz#a21117b19ee9be70c379ec1877537ef2e1c63301" - integrity sha512-c/qwwcHyafOQuVQJj0IlBjf5yYgBI7YPJ77k4fOJYesb41jio65eaJODRUmfYKhTOFBrIZ66kgvGPlNbjuoRdQ== - dependencies: - any-observable "^0.3.0" - "@seznam/compose-react-refs@^1.0.6": version "1.0.6" resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" @@ -3782,11 +3894,6 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== - "@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" @@ -3979,13 +4086,6 @@ "@svgr/plugin-svgo" "^5.5.0" loader-utils "^2.0.0" -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== - dependencies: - defer-to-connect "^1.0.1" - "@testing-library/dom@^7.28.1": version "7.31.0" resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.31.0.tgz#938451abd3ca27e1b69bb395d4a40759fd7f5b3b" @@ -4300,13 +4400,6 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== -"@types/http-proxy-agent@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@types/http-proxy-agent/-/http-proxy-agent-2.0.2.tgz#942c1f35c7e1f0edd1b6ffae5d0f9051cfb32be1" - integrity sha512-2S6IuBRhqUnH1/AUx9k8KWtY3Esg4eqri946MnxTG5HwehF1S5mqLln8fcyMiuQkY72p2gH3W+rIPqp5li0LyQ== - dependencies: - "@types/node" "*" - "@types/http-proxy@^1.17.5", "@types/http-proxy@^1.17.8": version "1.17.11" resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" @@ -4366,13 +4459,6 @@ resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= -"@types/jsonwebtoken@^8.5.0": - version "8.5.1" - resolved "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#56958cb2d80f6d74352bd2e501a018e2506a8a84" - integrity sha512-rNAPdomlIUX0i0cg2+I+Q1wOUr531zHBQ+cV/28PJ39bSPKjahatZZ2LMuhiguETkCgLVzfruw/ZvNMNkKoSzw== - dependencies: - "@types/node" "*" - "@types/lodash@^4.14.172": version "4.14.195" resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.195.tgz#bafc975b252eb6cea78882ce8a7b6bf22a6de632" @@ -4659,10 +4745,10 @@ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.3.tgz#9c088679876f374eb5983f150d4787aa6fb32d7e" integrity sha512-FvUupuM3rlRsRtCN+fDudtmytGO6iHJuuRKS1Ss0pG5z8oX0diNEw94UEL7hgDbpN94rgaK5R7sWm6RrSkZuAQ== -"@types/websocket@1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@types/websocket/-/websocket-1.0.2.tgz#d2855c6a312b7da73ed16ba6781815bf30c6187a" - integrity sha512-B5m9aq7cbbD/5/jThEr33nUY8WEfVi6A2YKCTOvw5Ldy7mtsOkqRvGjnzy6g7iMMDsgu7xREuCzqATLDLQVKcQ== +"@types/ws@^8.0.0": + version "8.5.10" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.10.tgz#4acfb517970853fa6574a3a6886791d04a396787" + integrity sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A== dependencies: "@types/node" "*" @@ -5213,6 +5299,57 @@ "@webassemblyjs/ast" "1.11.6" "@xtuc/long" "4.2.2" +"@whatwg-node/events@^0.0.3": + version "0.0.3" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.0.3.tgz#13a65dd4f5893f55280f766e29ae48074927acad" + integrity sha512-IqnKIDWfXBJkvy/k6tzskWTc2NK3LcqHlb+KHGCrjOCH4jfQckRX0NAiIcC/vIqQkzLYw2r2CTSwAxcrtcD6lA== + +"@whatwg-node/events@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.1.1.tgz#0ca718508249419587e130da26d40e29d99b5356" + integrity sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w== + +"@whatwg-node/fetch@^0.8.0": + version "0.8.8" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.8.8.tgz#48c6ad0c6b7951a73e812f09dd22d75e9fa18cae" + integrity sha512-CdcjGC2vdKhc13KKxgsc6/616BQ7ooDIgPeTuAiE8qfCnS0mGzcfCOoZXypQSz73nxI+GWc7ZReIAVhxoE1KCg== + dependencies: + "@peculiar/webcrypto" "^1.4.0" + "@whatwg-node/node-fetch" "^0.3.6" + busboy "^1.6.0" + urlpattern-polyfill "^8.0.0" + web-streams-polyfill "^3.2.1" + +"@whatwg-node/fetch@^0.9.0": + version "0.9.14" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.9.14.tgz#262039fd8aea52a9c8aac2ec20f316382eae1a3c" + integrity sha512-wurZC82zzZwXRDSW0OS9l141DynaJQh7Yt0FD1xZ8niX7/Et/7RoiLiltbVU1fSF1RR9z6ndEaTUQBAmddTm1w== + dependencies: + "@whatwg-node/node-fetch" "^0.5.0" + urlpattern-polyfill "^9.0.0" + +"@whatwg-node/node-fetch@^0.3.6": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.3.6.tgz#e28816955f359916e2d830b68a64493124faa6d0" + integrity sha512-w9wKgDO4C95qnXZRwZTfCmLWqyRnooGjcIwG0wADWjw9/HN0p7dtvtgSvItZtUyNteEvgTrd8QojNEqV6DAGTA== + dependencies: + "@whatwg-node/events" "^0.0.3" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + +"@whatwg-node/node-fetch@^0.5.0": + version "0.5.1" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.5.1.tgz#36a2bc31e5fc8cffa17826c192a8829d4c0ccc1e" + integrity sha512-sQz/s3NyyzIZxQ7PHxDFUMM1k4kQQbi2jU8ILdTbt5+S59ME8aI7XF30O9qohRIIYdSrUvm/OwKQmVP1y6e2WQ== + dependencies: + "@whatwg-node/events" "^0.1.0" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + "@wry/context@^0.6.0": version "0.6.0" resolved "https://registry.yarnpkg.com/@wry/context/-/context-0.6.0.tgz#f903eceb89d238ef7e8168ed30f4511f92d83e06" @@ -5263,13 +5400,6 @@ abab@^2.0.3, abab@^2.0.5: resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== -abort-controller@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" - integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== - dependencies: - event-target-shim "^5.0.0" - accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: version "1.3.8" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" @@ -5336,6 +5466,21 @@ agent-base@6: dependencies: debug "4" +agent-base@^7.0.2, agent-base@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.0.tgz#536802b76bc0b34aa50195eb2442276d613e3434" + integrity sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg== + dependencies: + debug "^4.3.4" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + ajv-formats@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" @@ -5391,12 +5536,7 @@ analytics@^0.8.9: "@analytics/core" "^0.12.7" "@analytics/storage-utils" "^0.4.2" -ansi-escapes@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" - integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== - -ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0, ansi-escapes@^4.3.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== @@ -5408,16 +5548,11 @@ ansi-html-community@^0.0.8: resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== -ansi-regex@3.0.1, ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: +ansi-regex@3.0.1, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= - ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -5486,11 +5621,6 @@ antd@4.24.7: rc-util "^5.22.5" scroll-into-view-if-needed "^2.2.25" -any-observable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/any-observable/-/any-observable-0.3.0.tgz#af933475e5806a67d0d7df090dd5e8bef65d119b" - integrity sha512-/FQM1EDkTsf63Ub2C6O7GuYFDsSXUwsaZDurV0np41ocwq0jthUAYCmhBX9f+KwlaCgIuWyr/4WlUQUBfKfZog== - any-promise@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" @@ -5635,6 +5765,15 @@ asn1@~0.2.3: dependencies: safer-buffer "~2.1.0" +asn1js@^3.0.1, asn1js@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/asn1js/-/asn1js-3.0.5.tgz#5ea36820443dbefb51cc7f88a2ebb5b462114f38" + integrity sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ== + dependencies: + pvtsutils "^1.3.2" + pvutils "^1.1.3" + tslib "^2.4.0" + assert-plus@1.0.0, assert-plus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" @@ -5650,10 +5789,10 @@ ast-types-flow@^0.0.7: resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= -async-limiter@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" - integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== async-validator@^4.1.0: version "4.2.5" @@ -5942,11 +6081,6 @@ babel-preset-react-app@^10.0.1: babel-plugin-macros "^3.1.0" babel-plugin-transform-react-remove-prop-types "^0.4.24" -backo2@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" - integrity sha1-MasayLEpNjRj41s+u2n038+6eUc= - bail@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" @@ -6012,6 +6146,15 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + blacklist@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/blacklist/-/blacklist-1.1.4.tgz#b2dd09d6177625b2caa69835a37b28995fa9a2f2" @@ -6101,6 +6244,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4 node-releases "^2.0.12" update-browserslist-db "^1.0.11" +browserslist@^4.21.9: + version "4.22.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" + integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== + dependencies: + caniuse-lite "^1.0.30001565" + electron-to-chromium "^1.4.601" + node-releases "^2.0.14" + update-browserslist-db "^1.0.13" + bser@2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" @@ -6113,17 +6266,12 @@ btoa@^1.2.1: resolved "https://registry.yarnpkg.com/btoa/-/btoa-1.2.1.tgz#01a9909f8b2c93f6bf680ba26131eb30f7fa3d73" integrity sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g== -buffer-equal-constant-time@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" - integrity sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk= - buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer@^5.7.0: +buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -6136,6 +6284,13 @@ builtin-modules@^3.1.0: resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" @@ -6161,19 +6316,6 @@ cache-base@^1.0.1: union-value "^1.0.0" unset-value "^1.0.0" -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" - call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" @@ -6187,14 +6329,6 @@ callsites@^3.0.0: resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== -camel-case@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.1.tgz#1fc41c854f00e2f7d0139dfeba1542d6896fe547" - integrity sha512-7fa2WcG4fYFkclIvEmxBbTvmibwF2/agfEBc6q3lOpVu0A13ltLsA+Hr/8Hp6kp5f+G7hKi6t8lys6XxP+1K6Q== - dependencies: - pascal-case "^3.1.1" - tslib "^1.10.0" - camel-case@4.1.2, camel-case@^4.1.1, camel-case@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" @@ -6238,6 +6372,11 @@ caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001508.tgz#4461bbc895c692a96da399639cc1e146e7302a33" integrity sha512-sdQZOJdmt3GJs1UMNpCCCyeuS2IEGLXnHyAo9yIO5JJDjbjoVRij4M1qep6P6gFpptD1PqIYgzM+gwJbOi92mw== +caniuse-lite@^1.0.30001565: + version "1.0.30001566" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001566.tgz#61a8e17caf3752e3e426d4239c549ebbb37fef0d" + integrity sha512-ggIhCsTxmITBAMmK8yZjEhCO5/47jKXPu6Dha/wuCS4JePVL+3uiDEBuhu2aIoT+bqTOR8L76Ip1ARL9xYsEJA== + capital-case@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669" @@ -6267,17 +6406,6 @@ ccount@^1.0.0: resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== -chalk@^1.0.0, chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" @@ -6295,7 +6423,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -6398,7 +6526,7 @@ check-types@^11.1.1: resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== -chokidar@^3.4.2, chokidar@^3.4.3, chokidar@^3.5.3: +chokidar@^3.4.2, chokidar@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -6455,12 +6583,10 @@ clean-css@^5.2.2: dependencies: source-map "~0.6.0" -cli-cursor@^2.0.0, cli-cursor@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" - integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= - dependencies: - restore-cursor "^2.0.0" +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== cli-cursor@^3.1.0: version "3.1.0" @@ -6469,13 +6595,18 @@ cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" -cli-truncate@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-0.2.1.tgz#9f15cfbb0705005369216c626ac7d05ab90dd574" - integrity sha1-nxXPuwcFAFNpIWxiasfQWrkN1XQ= +cli-spinners@^2.5.0: + version "2.9.2" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" + integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== + +cli-truncate@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" + integrity sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== dependencies: - slice-ansi "0.0.4" - string-width "^1.0.1" + slice-ansi "^3.0.0" + string-width "^4.2.0" cli-width@^3.0.0: version "3.0.0" @@ -6500,6 +6631,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clone-deep@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" @@ -6509,12 +6649,10 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= - dependencies: - mimic-response "^1.0.0" +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== clsx@^1.2.1: version "1.2.1" @@ -6535,11 +6673,6 @@ coa@^2.0.2: chalk "^2.4.1" q "^1.1.2" -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - codemirror@^5.62.0: version "5.65.10" resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.65.10.tgz#4276a93b8534ce91f14b733ba9a1ac949666eac9" @@ -6597,7 +6730,7 @@ colord@^2.9.1: resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== -colorette@^2.0.10: +colorette@^2.0.10, colorette@^2.0.16: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== @@ -6699,7 +6832,7 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -constant-case@^3.0.3, constant-case@^3.0.4: +constant-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ== @@ -6725,6 +6858,11 @@ convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + cookie-signature@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" @@ -6788,13 +6926,6 @@ core-util-is@1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cosmiconfig-toml-loader@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig-toml-loader/-/cosmiconfig-toml-loader-1.0.0.tgz#0681383651cceff918177debe9084c0d3769509b" - integrity sha512-H/2gurFWVi7xXvCyvsWRLCMekl4tITJcX0QEsDMpzxtuxDyM59xLatYNg4s/k9AA/HdtCYfj2su8mgA0GSDLDA== - dependencies: - "@iarna/toml" "^2.2.5" - cosmiconfig-typescript-loader@^1.0.0: version "1.0.9" resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz#69c523f7e8c3d9f27f563d02bbeadaf2f27212d3" @@ -6803,17 +6934,6 @@ cosmiconfig-typescript-loader@^1.0.0: cosmiconfig "^7" ts-node "^10.7.0" -cosmiconfig@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" - integrity sha512-pondGvTuVYDk++upghXJabWzL6Kxu6f26ljFw64Swq9v6sQPUL3EUlVDV56diOjpCayKihL6hVe8exIACU4XcA== - dependencies: - "@types/parse-json" "^4.0.0" - import-fresh "^3.2.1" - parse-json "^5.0.0" - path-type "^4.0.0" - yaml "^1.10.0" - cosmiconfig@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" @@ -6836,6 +6956,16 @@ cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: path-type "^4.0.0" yaml "^1.10.0" +cosmiconfig@^8.1.0, cosmiconfig@^8.1.3: + version "8.3.6" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.6.tgz#060a2b871d66dba6c8538ea1118ba1ac16f5fae3" + integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA== + dependencies: + import-fresh "^3.3.0" + js-yaml "^4.1.0" + parse-json "^5.2.0" + path-type "^4.0.0" + craco-antd@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/craco-antd/-/craco-antd-2.0.0.tgz#f38977f4de1714e984ad4f68aae2bcce81bdab79" @@ -6883,21 +7013,7 @@ cronstrue@^1.122.0: resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-1.122.0.tgz#bd6838077b476d28f61d381398b47b8c3912a126" integrity sha512-PFuhZd+iPQQ0AWTXIEYX+t3nFGzBrWxmTWUKJOrsGRewaBSLKZ4I1f8s2kryU75nNxgyugZgiGh2OJsCTA/XlA== -cross-fetch@3.0.6: - version "3.0.6" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.0.6.tgz#3a4040bc8941e653e0e9cf17f29ebcd177d3365c" - integrity sha512-KBPUbqgFjzWlVcURG+Svp9TlhA5uliYtiNx/0r8nv0pdypeQCRJ9IaSIc3q/x3q8t3F75cHuwxVql1HFGHCNJQ== - dependencies: - node-fetch "2.6.1" - -cross-fetch@3.1.4: - version "3.1.4" - resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.4.tgz#9723f3a3a247bf8b89039f3a380a9244e8fa2f39" - integrity sha512-1eAtFWdIubi6T4XPy6ei9iUFoKpUkIF971QLN8lIvvvwueI65+Nw5haMNKUwfJxabqlIIDODJKGrQ66gxC0PbQ== - dependencies: - node-fetch "2.6.1" - -cross-fetch@^3.0.6, cross-fetch@^3.1.5: +cross-fetch@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== @@ -7271,21 +7387,16 @@ data-urls@^2.0.0: whatwg-mimetype "^2.3.0" whatwg-url "^8.0.0" -dataloader@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.0.0.tgz#41eaf123db115987e21ca93c005cd7753c55fe6f" - integrity sha512-YzhyDAwA4TaQIhM5go+vCLmU0UikghC/t9DTQYZR2M/UvZ1MdOhPezSDZcjj9uqQJOMqjLcpWtyW2iNINdlatQ== +dataloader@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.2.2.tgz#216dc509b5abe39d43a9b9d97e6e5e473dfbe3e0" + integrity sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g== date-fns@2.x: version "2.29.3" resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.29.3.tgz#27402d2fc67eb442b511b70bbdf98e6411cd68a8" integrity sha512-dDCnyH2WnnKusqvZZ6+jA1O51Ibt8ZMRNkDZdyAyK4YfbDwa/cEmuztzG5pk6hqlp9aSBPYcjOlktquahGwGeA== -date-fns@^1.27.2: - version "1.30.1" - resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-1.30.1.tgz#2e71bf0b119153dbb4cc4e88d9ea5acfb50dc05c" - integrity sha512-hBSVCvSmWC+QypYObzwGOd9wqdDpOt+0wl0KbU+R+uuZBS1jN8VsD1ss3irQDknRj5NvxiTF6oj/nDRnN/UQNw== - dayjs@1.x, dayjs@^1.11.7: version "1.11.7" resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.7.tgz#4b296922642f70999544d1144a2c25730fce63e2" @@ -7339,23 +7450,11 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" - integrity sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= - dependencies: - mimic-response "^1.0.0" - dedent@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== - deep-is@^0.1.3, deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" @@ -7373,10 +7472,12 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +defaults@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== + dependencies: + clone "^1.0.2" define-lazy-prop@^2.0.0: version "2.0.0" @@ -7433,11 +7534,6 @@ dependency-graph@^0.11.0: resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.11.0.tgz#ac0ce7ed68a54da22165a85e97a01d53f5eb2e27" integrity sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg== -dependency-graph@^0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318" - integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w== - dequal@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" @@ -7660,6 +7756,11 @@ dotenv@^10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +dotenv@^16.0.0: + version "16.3.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" + integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== + dotenv@^8.2.0: version "8.6.0" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" @@ -7670,11 +7771,6 @@ dset@^3.1.2: resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.3.tgz#c194147f159841148e8e34ca41f638556d9542d2" integrity sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ== -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= - duplexer@^0.1.2, duplexer@~0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -7688,13 +7784,6 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -ecdsa-sig-formatter@1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" - integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== - dependencies: - safe-buffer "^5.0.1" - ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -7712,10 +7801,10 @@ electron-to-chromium@^1.4.431: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.441.tgz#94dd9c1cbf081d83f032a4f1cd9f787e21fc24ce" integrity sha512-LlCgQ8zgYZPymf5H4aE9itwiIWH4YlCiv1HFLmmcBeFYi5E+3eaIFnjHzYtcFQbaKfAW+CqZ9pgxo33DZuoqPg== -elegant-spinner@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/elegant-spinner/-/elegant-spinner-1.0.1.tgz#db043521c95d7e303fd8f345bedc3349cfb0729e" - integrity sha1-2wQ1IcldfjA/2PNFvtwzSc+wcp4= +electron-to-chromium@^1.4.601: + version "1.4.601" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.601.tgz#cac69868548aee89961ffe63ff5a7716f0685b75" + integrity sha512-SpwUMDWe9tQu8JX5QCO1+p/hChAi9AE9UpoC3rcHVc+gdCGlbT3SGb5I1klgb952HRIyvt9wZhSz9bNBYz9swA== emittery@^0.10.2: version "0.10.2" @@ -7772,13 +7861,6 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - enhanced-resolve@^5.15.0: version "5.15.0" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" @@ -7874,7 +7956,7 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -8207,16 +8289,6 @@ event-stream@=3.3.4: stream-combiner "~0.0.4" through "~2.3.1" -event-target-shim@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" - integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== - -eventemitter3@^3.1.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" - integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== - eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" @@ -8355,10 +8427,10 @@ extract-domain@2.2.1: resolved "https://registry.yarnpkg.com/extract-domain/-/extract-domain-2.2.1.tgz#1deeae633a5cbf05ae2fd7b3ff87cb98cbc4cb5b" integrity sha512-lOq1adCJha0tFFBci4quxC4XLa6+Rs2WgAwTo9qbO9OsElvJmGgCvOzmHo/yg5CiqeP4+sHjkXYGkrCcIEprMg== -extract-files@9.0.0, extract-files@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-9.0.0.tgz#8a7744f2437f81f5ed3250ed9f1550de902fe54a" - integrity sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ== +extract-files@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-11.0.0.tgz#b72d428712f787eef1f5193aff8ab5351ca8469a" + integrity sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ== extsprintf@1.3.0: version "1.3.0" @@ -8380,15 +8452,20 @@ faker@5.5.3: resolved "https://registry.npmjs.org/faker/-/faker-5.5.3.tgz#c57974ee484431b25205c2c8dc09fda861e51e0e" integrity sha512-wLTv2a28wjUyWkbnX7u/ABZBkUkIF2fCd73V6P2oFqEGEktDfzWx4UxrSqtPRw0xPRAcjeAOIiJWqZm3pP4u3g== +fast-decode-uri-component@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz#46f8b6c22b30ff7a81357d4f59abfae938202543" + integrity sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg== + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: - version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" - integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== +fast-glob@^3.2.11: + version "3.3.0" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" + integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -8396,10 +8473,10 @@ fast-glob@^3.1.1, fast-glob@^3.2.12, fast-glob@^3.2.9: merge2 "^1.3.0" micromatch "^4.0.4" -fast-glob@^3.2.11: - version "3.3.0" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.0.tgz#7c40cb491e1e2ed5664749e87bfb516dbe8727c0" - integrity sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA== +fast-glob@^3.2.12, fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -8422,11 +8499,25 @@ fast-loops@^1.1.3: resolved "https://registry.yarnpkg.com/fast-loops/-/fast-loops-1.1.3.tgz#ce96adb86d07e7bf9b4822ab9c6fac9964981f75" integrity sha512-8EZzEP0eKkEEVX+drtd9mtuQ+/QrlfW/5MlwcwK5Nds6EkZ/tRzEexkzUY2mIssnAyVLT+TKHuRXmFNNXYUd6g== +fast-querystring@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/fast-querystring/-/fast-querystring-1.1.2.tgz#a6d24937b4fc6f791b4ee31dcb6f53aeafb89f53" + integrity sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg== + dependencies: + fast-decode-uri-component "^1.0.1" + fast-shallow-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fast-shallow-equal/-/fast-shallow-equal-1.0.0.tgz#d4dcaf6472440dcefa6f88b98e3251e27f25628b" integrity sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw== +fast-url-parser@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d" + integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== + dependencies: + punycode "^1.3.2" + fastest-stable-stringify@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/fastest-stable-stringify/-/fastest-stable-stringify-2.0.2.tgz#3757a6774f6ec8de40c4e86ec28ea02417214c76" @@ -8478,21 +8569,6 @@ fbjs@^3.0.0: setimmediate "^1.0.5" ua-parser-js "^0.7.30" -figures@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" - integrity sha1-y+Hjr/zxzUS4DK3+0o3Hk6lwHS4= - dependencies: - escape-string-regexp "^1.0.5" - object-assign "^4.1.0" - -figures@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" - integrity sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI= - dependencies: - escape-string-regexp "^1.0.5" - figures@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" @@ -8680,19 +8756,19 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" -form-data@4.0.0, form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" mime-types "^2.1.12" -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== dependencies: asynckit "^0.4.0" combined-stream "^1.0.8" @@ -8836,20 +8912,6 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - get-stream@^6.0.0: version "6.0.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" @@ -8946,19 +9008,7 @@ globals@^13.19.0: dependencies: type-fest "^0.20.2" -globby@11.0.3: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - -globby@^11.0.4, globby@^11.1.0: +globby@^11.0.3, globby@^11.0.4, globby@^11.1.0: version "11.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== @@ -8981,23 +9031,6 @@ globby@^13.1.1: merge2 "^1.4.1" slash "^4.0.0" -got@^9.6.0: - version "9.6.0" - resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -9013,31 +9046,30 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -graphql-config@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-3.3.0.tgz#24c3672a427cb67c0c717ca3b9d70e9f0c9e752b" - integrity sha512-mSQIsPMssr7QrgqhnjI+CyVH6oQgCrgS6irHsTvwf7RFDRnR2k9kqpQOQgVoOytBSn0DOYryS0w0SAg9xor/Jw== - dependencies: - "@endemolshinegroup/cosmiconfig-typescript-loader" "3.0.2" - "@graphql-tools/graphql-file-loader" "^6.0.0" - "@graphql-tools/json-file-loader" "^6.0.0" - "@graphql-tools/load" "^6.0.0" - "@graphql-tools/merge" "^6.0.0" - "@graphql-tools/url-loader" "^6.0.0" - "@graphql-tools/utils" "^7.0.0" - cosmiconfig "7.0.0" - cosmiconfig-toml-loader "1.0.0" - minimatch "3.0.4" - string-env-interpolation "1.0.1" - -graphql-request@^3.3.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-3.4.0.tgz#3a400cd5511eb3c064b1873afb059196bbea9c2b" - integrity sha512-acrTzidSlwAj8wBNO7Q/UQHS8T+z5qRGquCQRv9J1InwR01BBWV9ObnoE+JS5nCCEj8wSGS0yrDXVDoRiKZuOg== +graphql-config@^5.0.2: + version "5.0.3" + resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-5.0.3.tgz#d9aa2954cf47a927f9cb83cdc4e42ae55d0b321e" + integrity sha512-BNGZaoxIBkv9yy6Y7omvsaBUHOzfFcII3UN++tpH8MGOKFPFkCPZuwx09ggANMt8FgyWP1Od8SWPmrUEZca4NQ== + dependencies: + "@graphql-tools/graphql-file-loader" "^8.0.0" + "@graphql-tools/json-file-loader" "^8.0.0" + "@graphql-tools/load" "^8.0.0" + "@graphql-tools/merge" "^9.0.0" + "@graphql-tools/url-loader" "^8.0.0" + "@graphql-tools/utils" "^10.0.0" + cosmiconfig "^8.1.0" + jiti "^1.18.2" + minimatch "^4.2.3" + string-env-interpolation "^1.0.1" + tslib "^2.4.0" + +graphql-request@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-6.1.0.tgz#f4eb2107967af3c7a5907eb3131c671eac89be4f" + integrity sha512-p+XPfS4q7aIpKVcgmnZKhMNqhltk20hfXtkaIkTfjjmiKMJ5xrt5c743cL03y/K7y1rg3WrIC49xGiEQ4mxdNw== dependencies: - cross-fetch "^3.0.6" - extract-files "^9.0.0" - form-data "^3.0.0" + "@graphql-typed-document-node/core" "^3.2.0" + cross-fetch "^3.1.5" graphql-tag@2.10.3: version "2.10.3" @@ -9051,10 +9083,10 @@ graphql-tag@^2.10.1, graphql-tag@^2.11.0, graphql-tag@^2.12.0: dependencies: tslib "^2.1.0" -graphql-ws@^4.4.1: - version "4.5.1" - resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-4.5.1.tgz#d9dc6e047c6d4ddb928ccbfb3ca3022580a89925" - integrity sha512-GE7vCMKe2D7fc0ugkM1V8QMneHcbV9c3BpPBzdlW/Uzkqv0F/zZq9DDHxLzg55ZhE5OSLL+n/gyqAMPgH59hcw== +graphql-ws@^5.14.0: + version "5.14.2" + resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-5.14.2.tgz#7db6f6138717a544d9480f0213f65f2841ed1c52" + integrity sha512-LycmCwhZ+Op2GlHz4BZDsUYHKRiiUz+3r9wbhBATMETNlORQJAaFlAgTFoeRh6xQoQegwYwIylVD1Qns9/DA3w== graphql.macro@^1.4.2: version "1.4.2" @@ -9101,13 +9133,6 @@ harmony-reflect@^1.4.6: resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= - dependencies: - ansi-regex "^2.0.0" - has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -9395,11 +9420,6 @@ htmlparser2@^6.1.0: domutils "^2.5.2" entities "^2.0.0" -http-cache-semantics@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" - integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== - http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" @@ -9440,6 +9460,14 @@ http-proxy-agent@^4.0.1: agent-base "6" debug "4" +http-proxy-agent@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.0.tgz#e9096c5afd071a3fce56e6252bb321583c124673" + integrity sha512-+ZT+iBxVUQ1asugqnD6oWoRiS25AkjNfG085dKJGtGxkdwLQrMKU5wJr2bOOFAXzKcTuqq+7fZlTMgG3SRfIYQ== + dependencies: + agent-base "^7.1.0" + debug "^4.3.4" + http-proxy-middleware@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.0.tgz#20d1ac3409199c83e5d0383ba6436b04e7acb9fe" @@ -9488,6 +9516,14 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" +https-proxy-agent@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz#e2645b846b90e96c6e6f347fb5b2e41f1590b09b" + integrity sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA== + dependencies: + agent-base "^7.0.2" + debug "4" + human-signals@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -9541,7 +9577,7 @@ ieee754@^1.1.13: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.1.4, ignore@^5.2.0: +ignore@^5.2.0: version "5.2.4" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== @@ -9561,7 +9597,7 @@ immutable@~3.7.6: resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.7.6.tgz#13b4d3cb12befa15482a26fe1b2ebae640071e4b" integrity sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw== -import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== @@ -9569,13 +9605,6 @@ import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-from@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/import-from/-/import-from-3.0.0.tgz#055cfec38cd5a27d8057ca51376d7d3bf0891966" - integrity sha512-CiuXOFFSzkU5x/CR0+z7T91Iht4CXgfCxVOFRhh2Zyhg5wOpWvvDLQUsWl+gcN+QscYBjez8hDCt85O7RLDttQ== - dependencies: - resolve-from "^5.0.0" - import-from@4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/import-from/-/import-from-4.0.0.tgz#2710b8d66817d232e16f4166e319248d3d5492e2" @@ -9594,11 +9623,6 @@ imurmurhash@^0.1.4: resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= -indent-string@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" - integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= - indent-string@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" @@ -9627,7 +9651,7 @@ inherits@2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= -ini@^1.3.5, ini@~1.3.0: +ini@^1.3.5: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== @@ -9645,24 +9669,26 @@ inline-style-prefixer@^6.0.0: css-in-js-utils "^3.1.0" fast-loops "^1.1.3" -inquirer@^7.3.3: - version "7.3.3" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" - integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== +inquirer@^8.0.0: + version "8.2.6" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.6.tgz#733b74888195d8d400a67ac332011b5fae5ea562" + integrity sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg== dependencies: ansi-escapes "^4.2.1" - chalk "^4.1.0" + chalk "^4.1.1" cli-cursor "^3.1.0" cli-width "^3.0.0" external-editor "^3.0.3" figures "^3.0.0" - lodash "^4.17.19" + lodash "^4.17.21" mute-stream "0.0.8" + ora "^5.4.1" run-async "^2.4.0" - rxjs "^6.6.0" + rxjs "^7.5.5" string-width "^4.1.0" strip-ansi "^6.0.0" through "^2.3.6" + wrap-ansi "^6.0.1" internal-slot@^1.0.3: version "1.0.3" @@ -9852,18 +9878,6 @@ is-finite@~1.0.1: dependencies: number-is-nan "^1.0.0" -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= - is-fullwidth-code-point@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" @@ -9874,14 +9888,7 @@ is-generator-fn@^2.0.0: resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== -is-glob@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: +is-glob@4.0.3, is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== @@ -9900,6 +9907,11 @@ is-integer@~1.0.4: dependencies: is-finite "^1.0.0" +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + is-lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/is-lower-case/-/is-lower-case-2.0.2.tgz#1c0884d3012c841556243483aa5d522f47396d2a" @@ -9939,13 +9951,6 @@ is-obj@^1.0.1: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= -is-observable@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-observable/-/is-observable-1.1.0.tgz#b3e986c8f44de950867cab5403f5a3465005975e" - integrity sha512-NqCa4Sa2d+u7BWc6CukaObG3Fh+CU9bvixbpcXYhy2VvYS7vVGIdAgnIS5Ks3A/cqk4rebLJ9s8zBstT2aKnIA== - dependencies: - symbol-observable "^1.1.0" - is-path-inside@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -9973,16 +9978,6 @@ is-potential-custom-element-name@^1.0.1: resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== -is-promise@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" - integrity sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ== - -is-promise@^2.1.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" - integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== - is-regex@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" @@ -10015,11 +10010,6 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - is-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" @@ -10114,18 +10104,10 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= -isomorphic-fetch@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4" - integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA== - dependencies: - node-fetch "^2.6.1" - whatwg-fetch "^3.4.1" - -isomorphic-ws@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-4.0.1.tgz#55fd4cd6c5e6491e76dc125938dd863f5cd4f2dc" - integrity sha512-BhBvN2MBpWTaSHdWRb/bwdZJ1WaehQ2L1KngkCkfLUGF0mAWAT1sQUQacEmQ0jXkFw/czDXPNQSL5u2/Krsz1w== +isomorphic-ws@5.0.0, isomorphic-ws@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" + integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw== isomorphic.js@^0.2.4: version "0.2.5" @@ -10179,11 +10161,6 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -iterall@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" - integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== - jake@^10.8.5: version "10.8.7" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" @@ -10691,6 +10668,11 @@ jest@^27.4.3: import-local "^3.0.2" jest-cli "^27.5.1" +jiti@^1.17.1: + version "1.21.0" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d" + integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== + jiti@^1.18.2: version "1.18.2" resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" @@ -10707,6 +10689,11 @@ joi@^17.11.0: "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" +jose@^5.0.0: + version "5.1.3" + resolved "https://registry.yarnpkg.com/jose/-/jose-5.1.3.tgz#303959d85c51b5cb14725f930270b72be56abdca" + integrity sha512-GPExOkcMsCLBTi1YetY2LmkoY559fss0+0KVa6kOfb2YFe84nAM7Nm/XzuZozah4iHgmBGrCOHL5/cy670SBRw== + js-cookie@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" @@ -10780,11 +10767,6 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= - json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" @@ -10844,7 +10826,7 @@ json5@^1.0.2: dependencies: minimist "^1.2.0" -json5@^2.1.2, json5@^2.2.0, json5@^2.2.2: +json5@^2.1.2, json5@^2.2.0, json5@^2.2.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -10875,22 +10857,6 @@ jsonpointer@^5.0.0: resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== -jsonwebtoken@^8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" - integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== - dependencies: - jws "^3.2.2" - lodash.includes "^4.3.0" - lodash.isboolean "^3.0.3" - lodash.isinteger "^4.0.4" - lodash.isnumber "^3.0.3" - lodash.isplainobject "^4.0.6" - lodash.isstring "^4.0.1" - lodash.once "^4.0.0" - ms "^2.1.1" - semver "^5.6.0" - jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" @@ -10921,30 +10887,6 @@ just-extend@^4.0.2: resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== -jwa@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" - integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== - dependencies: - buffer-equal-constant-time "1.0.1" - ecdsa-sig-formatter "1.0.11" - safe-buffer "^5.0.1" - -jws@^3.2.2: - version "3.2.2" - resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" - integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== - dependencies: - jwa "^1.4.1" - safe-buffer "^5.0.1" - -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -10991,13 +10933,6 @@ language-tags@=1.0.5: dependencies: language-subtag-registry "~0.3.2" -latest-version@5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" - integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== - dependencies: - package-json "^6.3.0" - launch-editor@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" @@ -11082,49 +11017,19 @@ lines-and-columns@^1.1.6: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= -listr-silent-renderer@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" - integrity sha1-kktaN1cVN3C/Go4/v3S4u/P5JC4= - -listr-update-renderer@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/listr-update-renderer/-/listr-update-renderer-0.5.0.tgz#4ea8368548a7b8aecb7e06d8c95cb45ae2ede6a2" - integrity sha512-tKRsZpKz8GSGqoI/+caPmfrypiaq+OQCbd+CovEC24uk1h952lVj5sC7SqyFUm+OaJ5HN/a1YLt5cit2FMNsFA== - dependencies: - chalk "^1.1.3" - cli-truncate "^0.2.1" - elegant-spinner "^1.0.1" - figures "^1.7.0" - indent-string "^3.0.0" - log-symbols "^1.0.2" - log-update "^2.3.0" - strip-ansi "^3.0.1" - -listr-verbose-renderer@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/listr-verbose-renderer/-/listr-verbose-renderer-0.5.0.tgz#f1132167535ea4c1261102b9f28dac7cba1e03db" - integrity sha512-04PDPqSlsqIOaaaGZ+41vq5FejI9auqTInicFRndCBgE3bXG8D6W1I+mWhk+1nqbHmyhla/6BUrd5OSiHwKRXw== - dependencies: - chalk "^2.4.1" - cli-cursor "^2.1.0" - date-fns "^1.27.2" - figures "^2.0.0" - -listr@^0.14.3: - version "0.14.3" - resolved "https://registry.yarnpkg.com/listr/-/listr-0.14.3.tgz#2fea909604e434be464c50bddba0d496928fa586" - integrity sha512-RmAl7su35BFd/xoMamRjpIE4j3v+L28o8CT5YhAXQJm1fD+1l9ngXY8JAQRJ+tFK2i5njvi0iRUKV09vPwA0iA== - dependencies: - "@samverschueren/stream-to-observable" "^0.3.0" - is-observable "^1.1.0" - is-promise "^2.1.0" - is-stream "^1.1.0" - listr-silent-renderer "^1.1.1" - listr-update-renderer "^0.5.0" - listr-verbose-renderer "^0.5.0" - p-map "^2.0.0" - rxjs "^6.3.3" +listr2@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-4.0.5.tgz#9dcc50221583e8b4c71c43f9c7dfd0ef546b75d5" + integrity sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA== + dependencies: + cli-truncate "^2.1.0" + colorette "^2.0.16" + log-update "^4.0.0" + p-map "^4.0.0" + rfdc "^1.3.0" + rxjs "^7.5.5" + through "^2.3.8" + wrap-ansi "^7.0.0" loader-runner@^4.2.0: version "4.3.0" @@ -11212,7 +11117,7 @@ lodash.forin@^4.4.0: resolved "https://registry.npmjs.org/lodash.forin/-/lodash.forin-4.4.0.tgz#5d3f20ae564011fbe88381f7d98949c9c9519731" integrity sha1-XT8grlZAEfvog4H32YlJyclRlzE= -lodash.get@^4, lodash.get@^4.4.2: +lodash.get@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= @@ -11222,21 +11127,11 @@ lodash.has@^4.5.2: resolved "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz#d19f4dc1095058cccbe2b0cdf4ee0fe4aa37c862" integrity sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI= -lodash.includes@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" - integrity sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8= - lodash.invokemap@^4.6.0: version "4.6.0" resolved "https://registry.npmjs.org/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" integrity sha1-F0jNpdiw74NpxOs+xUwh/rofLWI= -lodash.isboolean@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" - integrity sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY= - lodash.isempty@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e" @@ -11257,21 +11152,11 @@ lodash.isinteger@^4.0.4: resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M= -lodash.isnumber@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" - integrity sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w= - lodash.isplainobject@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= -lodash.isstring@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" - integrity sha1-1SfftUVuynzJu5XV2ur4i6VKVFE= - lodash.lowerfirst@^4.3.1: version "4.3.1" resolved "https://registry.npmjs.org/lodash.lowerfirst/-/lodash.lowerfirst-4.3.1.tgz#de3c7b12e02c6524a0059c2f6cb7c5c52655a13d" @@ -11297,11 +11182,6 @@ lodash.merge@^4.6.2: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash.once@^4.0.0: - version "4.1.1" - resolved "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" - integrity sha1-DdOXEhPHxW34gJd9UEyI+0cal6w= - lodash.pick@4.4.0, lodash.pick@^4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" @@ -11332,19 +11212,12 @@ lodash.values@^4.3.0: resolved "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= -lodash@4.17.21, lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: +lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -log-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" - integrity sha1-N2/3tY6jCGoPCfrMdGF+ylAeGhg= - dependencies: - chalk "^1.0.0" - -log-symbols@^4.0.0: +log-symbols@^4.0.0, log-symbols@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== @@ -11352,14 +11225,15 @@ log-symbols@^4.0.0: chalk "^4.1.0" is-unicode-supported "^0.1.0" -log-update@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" - integrity sha1-iDKP19HOeTiykoN0bwsbwSayRwg= +log-update@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" + integrity sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg== dependencies: - ansi-escapes "^3.0.0" - cli-cursor "^2.0.0" - wrap-ansi "^3.0.1" + ansi-escapes "^4.3.0" + cli-cursor "^3.1.0" + slice-ansi "^4.0.0" + wrap-ansi "^6.2.0" longest-streak@^2.0.0: version "2.0.4" @@ -11380,23 +11254,13 @@ lower-case-first@^2.0.2: dependencies: tslib "^2.0.3" -lower-case@^2.0.1, lower-case@^2.0.2: +lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== dependencies: tslib "^2.0.3" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - lowlight@^1.17.0: version "1.20.0" resolved "https://registry.yarnpkg.com/lowlight/-/lowlight-1.20.0.tgz#ddb197d33462ad0d93bf19d17b6c301aa3941888" @@ -11446,7 +11310,7 @@ make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: dependencies: semver "^6.0.0" -make-error@^1, make-error@^1.1.1, make-error@^1.3.6: +make-error@^1.1.1, make-error@^1.3.6: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -11652,10 +11516,10 @@ merge2@^1.3.0, merge2@^1.4.1: resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== -meros@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/meros/-/meros-1.1.4.tgz#c17994d3133db8b23807f62bec7f0cb276cfd948" - integrity sha512-E9ZXfK9iQfG9s73ars9qvvvbSIkJZF5yOo9j4tcwM5tN8mUKfj/EKN5PzOr3ZH0y5wL7dLAHw3RVEfpQV9Q7VQ== +meros@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/meros/-/meros-1.3.0.tgz#c617d2092739d55286bf618129280f362e6242f2" + integrity sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w== messageformat-parser@^4.1.3: version "4.1.3" @@ -11764,21 +11628,11 @@ mime@1.6.0, mime@^1.4.1: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== -mimic-response@^1.0.0, mimic-response@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - min-document@^2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685" @@ -11811,7 +11665,7 @@ minimalistic-assert@^1.0.0: resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4, minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^5.0.1: +minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^4.2.3, minimatch@^5.0.1: version "3.0.5" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== @@ -11875,11 +11729,6 @@ mkdirp@^0.5.1, mkdirp@~0.5.1: dependencies: minimist "^1.2.5" -mkdirp@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - mocked-env@1.3.2: version "1.3.2" resolved "https://registry.npmjs.org/mocked-env/-/mocked-env-1.3.2.tgz#548eb2fde141d083de70dc6b231cd9f3210d8731" @@ -12049,11 +11898,6 @@ no-case@^3.0.4: lower-case "^2.0.2" tslib "^2.0.3" -node-fetch@2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" - integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== - node-fetch@2.6.7, node-fetch@^2.6.1: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -12076,6 +11920,11 @@ node-releases@^2.0.12: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" + integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== + normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" @@ -12093,11 +11942,6 @@ normalize-range@^0.1.2: resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== - normalize-url@^6.0.1: version "6.1.0" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" @@ -12258,20 +12102,13 @@ on-headers@~1.0.2: resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== -once@^1.3.0, once@^1.3.1, once@^1.4.0: +once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= dependencies: wrappy "1" -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= - dependencies: - mimic-fn "^1.0.0" - onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" @@ -12328,6 +12165,21 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + orderedmap@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/orderedmap/-/orderedmap-2.1.0.tgz#819457082fa3a06abd316d83a281a1ca467437cd" @@ -12338,11 +12190,6 @@ os-tmpdir@~1.0.2: resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== - p-limit@3.1.0, p-limit@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" @@ -12378,10 +12225,12 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" -p-map@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" p-retry@^4.5.0: version "4.6.2" @@ -12396,16 +12245,6 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-json@^6.3.0: - version "6.5.0" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0" - integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== - dependencies: - got "^9.6.0" - registry-auth-token "^4.0.0" - registry-url "^5.0.0" - semver "^6.2.0" - param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" @@ -13183,11 +13022,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= - pretender@^3.4.3: version "3.4.3" resolved "https://registry.npmjs.org/pretender/-/pretender-3.4.3.tgz#a3b4160516007075d29127262f3a0063d19896e9" @@ -13493,19 +13327,28 @@ psl@^1.1.28, psl@^1.1.33: resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" +punycode@^1.3.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +pvtsutils@^1.3.2, pvtsutils@^1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/pvtsutils/-/pvtsutils-1.3.5.tgz#b8705b437b7b134cd7fd858f025a23456f1ce910" + integrity sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA== + dependencies: + tslib "^2.6.1" + +pvutils@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/pvutils/-/pvutils-1.1.3.tgz#f35fc1d27e7cd3dfbd39c0826d173e806a03f5a3" + integrity sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ== + q@^1.1.2: version "1.5.1" resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" @@ -13959,16 +13802,6 @@ rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: rc-resize-observer "^1.0.0" rc-util "^5.15.0" -rc@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - react-app-polyfill@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" @@ -14335,6 +14168,15 @@ readable-stream@^3.0.6: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^3.4.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -14440,20 +14282,6 @@ regexpu-core@^5.3.1: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.1.0" -registry-auth-token@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250" - integrity sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw== - dependencies: - rc "^1.2.8" - -registry-url@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009" - integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== - dependencies: - rc "^1.2.8" - regjsparser@^0.9.1: version "0.9.1" resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" @@ -14642,11 +14470,6 @@ repeat-string@^1.0.0, repeat-string@^1.6.1: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= -replaceall@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/replaceall/-/replaceall-0.1.6.tgz#81d81ac7aeb72d7f5c4942adf2697a3220688d8e" - integrity sha1-gdgax663LX9cSUKt8ml6MiBojY4= - request@^2.88.2: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" @@ -14759,21 +14582,6 @@ resolve@^2.0.0-next.4: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= - dependencies: - lowercase-keys "^1.0.0" - -restore-cursor@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" - integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= - dependencies: - onetime "^2.0.0" - signal-exit "^3.0.2" - restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -14797,6 +14605,11 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rfdc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" + integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== + rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -14873,14 +14686,7 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.3.3, rxjs@^6.6.0: - version "6.6.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" - integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== - dependencies: - tslib "^1.9.0" - -rxjs@^7.8.1: +rxjs@^7.5.5, rxjs@^7.8.1: version "7.8.1" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== @@ -15037,11 +14843,16 @@ semver@^5.6.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" @@ -15224,10 +15035,23 @@ slash@^4.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== -slice-ansi@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" - integrity sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU= +slice-ansi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" + integrity sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" snake-case@^3.0.4: version "3.0.4" @@ -15332,7 +15156,7 @@ source-map-resolve@^0.6.0: atob "^2.1.2" decode-uri-component "^0.2.0" -source-map-support@^0.5.17, source-map-support@^0.5.6, source-map-support@~0.5.20: +source-map-support@^0.5.6, source-map-support@~0.5.20: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -15536,6 +15360,11 @@ stream-combiner@~0.0.4: dependencies: duplexer "~0.1.1" +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + strict-uri-encode@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" @@ -15546,7 +15375,7 @@ string-convert@^0.2.0: resolved "https://registry.yarnpkg.com/string-convert/-/string-convert-0.2.1.tgz#6982cc3049fbb4cd85f8b24568b9d9bf39eeff97" integrity sha1-aYLMMEn7tM2F+LJFaLnZvznu/5c= -string-env-interpolation@1.0.1, string-env-interpolation@^1.0.1: +string-env-interpolation@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz#ad4397ae4ac53fe6c91d1402ad6f6a52862c7152" integrity sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg== @@ -15572,23 +15401,6 @@ string-natural-compare@^3.0.1: resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -string-width@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" @@ -15598,6 +15410,15 @@ string-width@^4.1.0, string-width@^4.2.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" +string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7, string.prototype.matchall@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz#3bf85722021816dcd1bf38bb714915887ca79fd3" @@ -15662,20 +15483,6 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= - dependencies: - ansi-regex "^3.0.0" - strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -15717,7 +15524,7 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@^2.0.1, strip-json-comments@~2.0.1: +strip-json-comments@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= @@ -15768,17 +15575,6 @@ stylis@4.1.3, stylis@^4.0.6: resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.1.3.tgz#fd2fbe79f5fed17c55269e16ed8da14c84d069f7" integrity sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA== -subscriptions-transport-ws@^0.9.18: - version "0.9.18" - resolved "https://registry.yarnpkg.com/subscriptions-transport-ws/-/subscriptions-transport-ws-0.9.18.tgz#bcf02320c911fbadb054f7f928e51c6041a37b97" - integrity sha512-tztzcBTNoEbuErsVQpTN2xUNN/efAZXyCyL5m3x4t6SKrEiTL2N8SaKWBFWM4u56pL79ULif3zjyeq+oV+nOaA== - dependencies: - backo2 "^1.0.2" - eventemitter3 "^3.1.0" - iterall "^1.2.1" - symbol-observable "^1.0.4" - ws "^5.2.0" - sucrase@^3.32.0: version "3.32.0" resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" @@ -15792,11 +15588,6 @@ sucrase@^3.32.0: pirates "^4.0.1" ts-interface-checker "^0.1.9" -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= - supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -15887,11 +15678,6 @@ swap-case@^2.0.2: dependencies: tslib "^2.0.3" -symbol-observable@^1.0.4, symbol-observable@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" - integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== - symbol-observable@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-2.0.3.tgz#5b521d3d07a43c351055fa43b8355b62d33fd16a" @@ -15902,14 +15688,6 @@ symbol-tree@^3.2.4: resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -sync-fetch@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/sync-fetch/-/sync-fetch-0.3.0.tgz#77246da949389310ad978ab26790bb05f88d1335" - integrity sha512-dJp4qg+x4JwSEW1HibAuMi0IIrBI3wuQr2GimmqB7OXR50wmwzfdusG+p39R9w3R6aFtZ2mzvxvWKQ3Bd/vx3g== - dependencies: - buffer "^5.7.0" - node-fetch "^2.6.1" - tailwindcss@^3.0.2: version "3.3.2" resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.2.tgz#2f9e35d715fdf0bbf674d90147a0684d7054a2d3" @@ -16039,7 +15817,7 @@ throttle-debounce@^3.0.1: resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb" integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg== -through@2, through@^2.3.6, through@~2.3, through@~2.3.1: +through@2, through@^2.3.6, through@^2.3.8, through@~2.3, through@~2.3.1: version "2.3.8" resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= @@ -16095,11 +15873,6 @@ to-object-path@^0.3.0: dependencies: kind-of "^3.0.2" -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== - to-regex-range@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" @@ -16222,18 +15995,6 @@ ts-node@^10.7.0: v8-compile-cache-lib "^3.0.1" yn "3.1.1" -ts-node@^9: - version "9.1.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-9.1.1.tgz#51a9a450a3e959401bda5f004a72d54b936d376d" - integrity sha512-hPlt7ZACERQGf03M253ytLY3dHbGNGrAq9qIHWUY9XHYl1z7wYngSr3OQ5xmui8o2AaxsONxIzjafLUiWBo1Fg== - dependencies: - arg "^4.1.0" - create-require "^1.1.0" - diff "^4.0.1" - make-error "^1.1.1" - source-map-support "^0.5.17" - yn "3.1.1" - tsconfig-paths@^3.14.1: version "3.14.2" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" @@ -16244,26 +16005,26 @@ tsconfig-paths@^3.14.1: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: +tslib@^1.10.0, tslib@^1.8.1: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2, tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e" integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== +tslib@^2.3.1, tslib@^2.5.0, tslib@^2.6.1, tslib@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== + tslib@~2.0.1: version "2.0.3" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.0.3.tgz#8e0741ac45fc0c226e58a17bfc3e64b9bc6ca61c" integrity sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ== -tslib@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" - integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== - tslib@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.2.0.tgz#fb2c475977e35e241311ede2693cee1ec6698f5c" @@ -16509,10 +16270,10 @@ universalify@^2.0.0: resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== -unixify@1.0.0: +unixify@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unixify/-/unixify-1.0.0.tgz#3a641c8c2ffbce4da683a5c70f03a462940c2090" - integrity sha1-OmQcjC/7zk2mg6XHDwOkYpQMIJA= + integrity sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg== dependencies: normalize-path "^2.1.1" @@ -16547,6 +16308,14 @@ update-browserslist-db@^1.0.11: escalade "^3.1.1" picocolors "^1.0.0" +update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" + integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + upper-case-first@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324" @@ -16573,12 +16342,15 @@ urix@^0.1.0: resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= - dependencies: - prepend-http "^2.0.0" +urlpattern-polyfill@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz#99f096e35eff8bf4b5a2aa7d58a1523d6ebc7ce5" + integrity sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ== + +urlpattern-polyfill@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-9.0.0.tgz#bc7e386bb12fd7898b58d1509df21d3c29ab3460" + integrity sha512-WHN8KDQblxd32odxeIgo83rdVDE2bvdkb86it7bMhYZwWKJz0+O0RK/eZiHYnM+zgt/U7hAHOlCQGfjjvSkw2g== use-callback-ref@^1.2.5: version "1.3.0" @@ -16661,20 +16433,15 @@ v8-to-istanbul@^8.1.0: convert-source-map "^1.6.0" source-map "^0.7.3" -valid-url@1.0.9, valid-url@^1.0.9: - version "1.0.9" - resolved "https://registry.yarnpkg.com/valid-url/-/valid-url-1.0.9.tgz#1c14479b40f1397a75782f115e4086447433a200" - integrity sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA= - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== -value-or-promise@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.6.tgz#218aa4794aa2ee24dcf48a29aba4413ed584747f" - integrity sha512-9r0wQsWD8z/BxPOvnwbPf05ZvFngXyouE9EKB+5GbYix+BYnAwrIChCUyFIinfbf2FL/U71z+CPpbnmTdxrwBg== +value-or-promise@^1.0.11, value-or-promise@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.12.tgz#0e5abfeec70148c78460a849f6b003ea7986f15c" + integrity sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q== vary@~1.1.2: version "1.1.2" @@ -16770,16 +16537,39 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== + dependencies: + defaults "^1.0.3" + web-namespaces@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== +web-streams-polyfill@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== + web-vitals@^0.2.4: version "0.2.4" resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-0.2.4.tgz#ec3df43c834a207fd7cdefd732b2987896e08511" integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg== +webcrypto-core@^1.7.7: + version "1.7.7" + resolved "https://registry.yarnpkg.com/webcrypto-core/-/webcrypto-core-1.7.7.tgz#06f24b3498463e570fed64d7cab149e5437b162c" + integrity sha512-7FjigXNsBfopEj+5DV2nhNpfic2vumtjjgPmeDKk45z+MJwXKKfhPB7118Pfzrmh4jqOMST6Ch37iPAHoImg5g== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + asn1js "^3.0.1" + pvtsutils "^1.3.2" + tslib "^2.4.0" + webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" @@ -16935,7 +16725,7 @@ whatwg-encoding@^1.0.5: dependencies: iconv-lite "0.4.24" -whatwg-fetch@^3.4.1, whatwg-fetch@^3.6.2: +whatwg-fetch@^3.6.2: version "3.6.2" resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== @@ -17180,15 +16970,7 @@ workbox-window@6.6.1: "@types/trusted-types" "^2.0.2" workbox-core "6.6.1" -wrap-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-3.0.1.tgz#288a04d87eda5c286e060dfe8f135ce8d007f8ba" - integrity sha1-KIoE2H7aXChuBg3+jxNc6NAH+Lo= - dependencies: - string-width "^2.1.1" - strip-ansi "^4.0.0" - -wrap-ansi@^6.2.0: +wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== @@ -17221,17 +17003,10 @@ write-file-atomic@^3.0.0: signal-exit "^3.0.2" typedarray-to-buffer "^3.1.5" -ws@7.4.5: - version "7.4.5" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1" - integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== - -ws@^5.2.0: - version "5.2.2" - resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.2.tgz#dffef14866b8e8dc9133582514d1befaf96e980f" - integrity sha512-jaHFD6PFv6UgoIVda6qZllptQsMlDEJkTQcybzzXDYM1XO9Y8em691FGMPmM46WGyLU4z9KMgQN+qrux/nhlHA== - dependencies: - async-limiter "~1.0.0" +ws@8.14.2, ws@^8.12.0: + version "8.14.2" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f" + integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g== ws@^7.4.6: version "7.5.9" @@ -17308,6 +17083,11 @@ yaml@^2.1.1: resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== +yaml@^2.3.1: + version "2.3.4" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" + integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA== + yamljs@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/yamljs/-/yamljs-0.3.0.tgz#dc060bf267447b39f7304e9b2bfbe8b5a7ddb03b" @@ -17329,6 +17109,11 @@ yargs-parser@^20.2.2: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + yargs@^15.3.1: version "15.4.1" resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" @@ -17346,7 +17131,7 @@ yargs@^15.3.1: y18n "^4.0.0" yargs-parser "^18.1.2" -yargs@^16.1.1, yargs@^16.2.0: +yargs@^16.2.0: version "16.2.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== @@ -17359,6 +17144,19 @@ yargs@^16.1.1, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@^17.0.0: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yjs@^13.5.23: version "13.5.44" resolved "https://registry.yarnpkg.com/yjs/-/yjs-13.5.44.tgz#1c79ec7407963e07f44174cffcfde5b58a62b0da" From c0ef72886828044c40eb9db8a140e7e8afecb2d1 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Mon, 4 Dec 2023 13:21:42 -0500 Subject: [PATCH 041/263] fix(ingest/powerbi): Allow old parser to parse [db].[schema].[table] table references (#9360) --- .../ingestion/source/powerbi/config.py | 1 + .../source/powerbi/m_query/resolver.py | 28 +++++--- .../tests/unit/test_powerbi_parser.py | 65 +++++++++++++++++++ 3 files changed, 84 insertions(+), 10 deletions(-) create mode 100644 metadata-ingestion/tests/unit/test_powerbi_parser.py diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py index b8cc34c234ffa..f71afac737ca6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py @@ -314,6 +314,7 @@ class PowerBiDashboardSourceConfig( description="Configure how is ownership ingested", ) modified_since: Optional[str] = pydantic.Field( + default=None, description="Get only recently modified workspaces based on modified_since datetime '2023-02-10T00:00:00.0000000Z', excludePersonalWorkspaces and excludeInActiveWorkspaces limit to last 30 days", ) extract_dashboards: bool = pydantic.Field( diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py index e200ff41f71c2..930841f1f0df2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/m_query/resolver.py @@ -617,16 +617,25 @@ def create_urn_using_old_parser( tables: List[str] = native_sql_parser.get_tables(query) - for table in tables: - schema_and_table: List[str] = table.split(".") - if len(schema_and_table) == 1: - # schema name is not present. set default schema - schema_and_table.insert(0, MSSqlDataPlatformTableCreator.DEFAULT_SCHEMA) - - qualified_table_name = ( - f"{db_name}.{schema_and_table[0]}.{schema_and_table[1]}" - ) + for parsed_table in tables: + # components: List[str] = [v.strip("[]") for v in parsed_table.split(".")] + components = [v.strip("[]") for v in parsed_table.split(".")] + if len(components) == 3: + database, schema, table = components + elif len(components) == 2: + schema, table = components + database = db_name + elif len(components) == 1: + (table,) = components + database = db_name + schema = MSSqlDataPlatformTableCreator.DEFAULT_SCHEMA + else: + logger.warning( + f"Unsupported table format found {parsed_table} in query {query}" + ) + continue + qualified_table_name = f"{database}.{schema}.{table}" urn = urn_creator( config=self.config, platform_instance_resolver=self.platform_instance_resolver, @@ -634,7 +643,6 @@ def create_urn_using_old_parser( server=server, qualified_table_name=qualified_table_name, ) - dataplatform_tables.append( DataPlatformTable( data_platform_pair=self.get_platform_pair(), diff --git a/metadata-ingestion/tests/unit/test_powerbi_parser.py b/metadata-ingestion/tests/unit/test_powerbi_parser.py new file mode 100644 index 0000000000000..e53e8d7aee16f --- /dev/null +++ b/metadata-ingestion/tests/unit/test_powerbi_parser.py @@ -0,0 +1,65 @@ +import pytest + +from datahub.ingestion.api.common import PipelineContext +from datahub.ingestion.source.powerbi.config import PowerBiDashboardSourceConfig +from datahub.ingestion.source.powerbi.dataplatform_instance_resolver import ( + ResolvePlatformInstanceFromDatasetTypeMapping, +) +from datahub.ingestion.source.powerbi.m_query.resolver import ( + MSSqlDataPlatformTableCreator, +) + + +@pytest.fixture +def creator(): + config = PowerBiDashboardSourceConfig( + tenant_id="test-tenant-id", + client_id="test-client-id", + client_secret="test-client-secret", + ) + return MSSqlDataPlatformTableCreator( + ctx=PipelineContext(run_id="test-run-id"), + config=config, + platform_instance_resolver=ResolvePlatformInstanceFromDatasetTypeMapping( + config + ), + ) + + +def test_parse_three_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM [dwhdbt].[dbo2].[my_table] where oper_day_date > getdate() - 5", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,dwhdbt.dbo2.my_table,PROD)" + ) + + +def test_parse_two_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM my_schema.my_table", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,default_db.my_schema.my_table,PROD)" + ) + + +def test_parse_one_part_table_reference(creator): + v = creator.create_urn_using_old_parser( + "SELECT * FROM my_table", + db_name="default_db", + server="server", + ) + assert len(v) == 1 + assert ( + v[0].urn + == "urn:li:dataset:(urn:li:dataPlatform:mssql,default_db.dbo.my_table,PROD)" + ) From 4ec3208918791b517a6d18c41905ee2dbe189a12 Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 14:31:58 -0500 Subject: [PATCH 042/263] feat(ingest): support stdin in `datahub put` (#9359) --- .../src/datahub/cli/ingest_cli.py | 4 +++ metadata-ingestion/src/datahub/cli/put_cli.py | 27 ++++++++++--------- .../src/datahub/cli/specific/file_loader.py | 1 + .../datahub/configuration/config_loader.py | 22 ++++++++++----- .../src/datahub/configuration/json_loader.py | 11 ++++++++ .../source/metadata/business_glossary.py | 2 +- .../ingestion/source/metadata/lineage.py | 2 +- .../tests/unit/config/test_config_loader.py | 9 +++++-- 8 files changed, 55 insertions(+), 23 deletions(-) create mode 100644 metadata-ingestion/src/datahub/configuration/json_loader.py diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index dd0287004a368..b7827ec9f050b 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -147,6 +147,9 @@ async def run_pipeline_to_completion(pipeline: Pipeline) -> int: squirrel_original_config=True, squirrel_field="__raw_config", allow_stdin=True, + allow_remote=True, + process_directives=True, + resolve_env_vars=True, ) raw_pipeline_config = pipeline_config.pop("__raw_config") @@ -268,6 +271,7 @@ def deploy( pipeline_config = load_config_file( config, allow_stdin=True, + allow_remote=True, resolve_env_vars=False, ) diff --git a/metadata-ingestion/src/datahub/cli/put_cli.py b/metadata-ingestion/src/datahub/cli/put_cli.py index 6a1d82388dc2a..324d7f94db258 100644 --- a/metadata-ingestion/src/datahub/cli/put_cli.py +++ b/metadata-ingestion/src/datahub/cli/put_cli.py @@ -1,11 +1,11 @@ -import json import logging -from typing import Any, Optional +from typing import Optional import click from click_default_group import DefaultGroup from datahub.cli.cli_utils import post_entity +from datahub.configuration.config_loader import load_config_file from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.graph.client import get_default_graph from datahub.metadata.schema_classes import ( @@ -36,22 +36,23 @@ def put() -> None: @click.option("--urn", required=True, type=str) @click.option("-a", "--aspect", required=True, type=str) @click.option("-d", "--aspect-data", required=True, type=str) -@click.pass_context @upgrade.check_upgrade @telemetry.with_telemetry() -def aspect(ctx: Any, urn: str, aspect: str, aspect_data: str) -> None: +def aspect(urn: str, aspect: str, aspect_data: str) -> None: """Update a single aspect of an entity""" entity_type = guess_entity_type(urn) - with open(aspect_data) as fp: - aspect_obj = json.load(fp) - status = post_entity( - urn=urn, - aspect_name=aspect, - entity_type=entity_type, - aspect_value=aspect_obj, - ) - click.secho(f"Update succeeded with status {status}", fg="green") + aspect_obj = load_config_file( + aspect_data, allow_stdin=True, resolve_env_vars=False, process_directives=False + ) + + status = post_entity( + urn=urn, + aspect_name=aspect, + entity_type=entity_type, + aspect_value=aspect_obj, + ) + click.secho(f"Update succeeded with status {status}", fg="green") @put.command() diff --git a/metadata-ingestion/src/datahub/cli/specific/file_loader.py b/metadata-ingestion/src/datahub/cli/specific/file_loader.py index a9787343fdb91..cad32eb0a22a1 100644 --- a/metadata-ingestion/src/datahub/cli/specific/file_loader.py +++ b/metadata-ingestion/src/datahub/cli/specific/file_loader.py @@ -21,5 +21,6 @@ def load_file(config_file: Path) -> Union[dict, list]: squirrel_original_config=False, resolve_env_vars=False, allow_stdin=False, + process_directives=False, ) return res diff --git a/metadata-ingestion/src/datahub/configuration/config_loader.py b/metadata-ingestion/src/datahub/configuration/config_loader.py index 30ca4ff6aed2d..2f41af6f7286e 100644 --- a/metadata-ingestion/src/datahub/configuration/config_loader.py +++ b/metadata-ingestion/src/datahub/configuration/config_loader.py @@ -11,6 +11,7 @@ from expandvars import UnboundVariable, expandvars from datahub.configuration.common import ConfigurationError, ConfigurationMechanism +from datahub.configuration.json_loader import JsonConfigurationMechanism from datahub.configuration.toml import TomlConfigurationMechanism from datahub.configuration.yaml import YamlConfigurationMechanism @@ -100,33 +101,42 @@ def load_config_file( squirrel_original_config: bool = False, squirrel_field: str = "__orig_config", allow_stdin: bool = False, - resolve_env_vars: bool = True, - process_directives: bool = True, + allow_remote: bool = True, # TODO: Change the default to False. + resolve_env_vars: bool = True, # TODO: Change the default to False. + process_directives: bool = False, ) -> dict: config_mech: ConfigurationMechanism if allow_stdin and config_file == "-": # If we're reading from stdin, we assume that the input is a YAML file. + # Note that YAML is a superset of JSON, so this will also read JSON files. config_mech = YamlConfigurationMechanism() raw_config_file = sys.stdin.read() else: config_file_path = pathlib.Path(config_file) if config_file_path.suffix in {".yaml", ".yml"}: config_mech = YamlConfigurationMechanism() + elif config_file_path.suffix == ".json": + config_mech = JsonConfigurationMechanism() elif config_file_path.suffix == ".toml": config_mech = TomlConfigurationMechanism() else: raise ConfigurationError( - f"Only .toml and .yml are supported. Cannot process file type {config_file_path.suffix}" + f"Only .toml, .yml, and .json are supported. Cannot process file type {config_file_path.suffix}" ) + url_parsed = parse.urlparse(str(config_file)) - if url_parsed.scheme in ("http", "https"): # URLs will return http/https + if allow_remote and url_parsed.scheme in ( + "http", + "https", + ): # URLs will return http/https + # If the URL is remote, we need to fetch it. try: response = requests.get(str(config_file)) raw_config_file = response.text except Exception as e: raise ConfigurationError( - f"Cannot read remote file {config_file_path}, error:{e}" - ) + f"Cannot read remote file {config_file_path}: {e}" + ) from e else: if not config_file_path.is_file(): raise ConfigurationError(f"Cannot open config file {config_file_path}") diff --git a/metadata-ingestion/src/datahub/configuration/json_loader.py b/metadata-ingestion/src/datahub/configuration/json_loader.py new file mode 100644 index 0000000000000..35667eb5951fc --- /dev/null +++ b/metadata-ingestion/src/datahub/configuration/json_loader.py @@ -0,0 +1,11 @@ +import json +from typing import IO + +from datahub.configuration import ConfigurationMechanism + + +class JsonConfigurationMechanism(ConfigurationMechanism): + """Ability to load configuration from json files""" + + def load_config(self, config_fp: IO) -> dict: + return json.load(config_fp) diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index 97877df63707f..6baa70aa581d6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -495,7 +495,7 @@ def create(cls, config_dict, ctx): def load_glossary_config( cls, file_name: Union[str, pathlib.Path] ) -> BusinessGlossaryConfig: - config = load_config_file(file_name) + config = load_config_file(file_name, resolve_env_vars=True) glossary_cfg = BusinessGlossaryConfig.parse_obj(config) return glossary_cfg diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py index f33c6e0edae3d..659444fe610e0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/lineage.py @@ -147,7 +147,7 @@ def create( @staticmethod def load_lineage_config(file_name: str) -> LineageConfig: - config = load_config_file(file_name) + config = load_config_file(file_name, resolve_env_vars=True) lineage_config = LineageConfig.parse_obj(config) return lineage_config diff --git a/metadata-ingestion/tests/unit/config/test_config_loader.py b/metadata-ingestion/tests/unit/config/test_config_loader.py index 3253c96b876aa..f9a4076e18363 100644 --- a/metadata-ingestion/tests/unit/config/test_config_loader.py +++ b/metadata-ingestion/tests/unit/config/test_config_loader.py @@ -134,7 +134,7 @@ def test_load_success(pytestconfig, filename, golden_config, env, referenced_env assert list_referenced_env_variables(raw_config) == referenced_env_vars with mock.patch.dict(os.environ, env): - loaded_config = load_config_file(filepath) + loaded_config = load_config_file(filepath, resolve_env_vars=True) assert loaded_config == golden_config # TODO check referenced env vars @@ -183,7 +183,12 @@ def test_write_file_directive(pytestconfig): fake_ssl_key = "my-secret-key-value" with mock.patch.dict(os.environ, {"DATAHUB_SSL_KEY": fake_ssl_key}): - loaded_config = load_config_file(filepath, squirrel_original_config=False) + loaded_config = load_config_file( + filepath, + squirrel_original_config=False, + resolve_env_vars=True, + process_directives=True, + ) # Check that the rest of the dict is unmodified. diff = deepdiff.DeepDiff( From 7517c77ffdbafc193dc7529881fc42ebe3f2ab2a Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 20:00:11 -0500 Subject: [PATCH 043/263] fix(ingest): resolve issue with caplog and asyncio (#9377) --- .../src/datahub/ingestion/source/looker/lookml_source.py | 2 +- .../tests/unit/api/source_helpers/test_source_helpers.py | 9 +++++++-- .../tests/unit/utilities/test_perf_timer.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 4e91d17feaa9f..93c405f0a39f2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -550,7 +550,7 @@ def resolve_includes( @dataclass class LookerViewFile: absolute_file_path: str - connection: Optional[str] + connection: Optional[LookerConnectionDefinition] includes: List[str] resolved_includes: List[ProjectInclude] views: List[Dict] diff --git a/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py b/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py index b667af8bb41e9..26e8639bed6e7 100644 --- a/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py +++ b/metadata-ingestion/tests/unit/api/source_helpers/test_source_helpers.py @@ -3,6 +3,7 @@ from typing import Any, Dict, Iterable, List, Union from unittest.mock import patch +import pytest from freezegun import freeze_time import datahub.metadata.schema_classes as models @@ -482,7 +483,7 @@ def test_auto_browse_path_v2_dry_run(telemetry_ping_mock): @freeze_time("2023-01-02 00:00:00") -def test_auto_empty_dataset_usage_statistics(caplog): +def test_auto_empty_dataset_usage_statistics(caplog: pytest.LogCaptureFixture) -> None: has_urn = make_dataset_urn("my_platform", "has_aspect") empty_urn = make_dataset_urn("my_platform", "no_aspect") config = BaseTimeWindowConfig() @@ -499,6 +500,7 @@ def test_auto_empty_dataset_usage_statistics(caplog): ), ).as_workunit() ] + caplog.clear() with caplog.at_level(logging.WARNING): new_wus = list( auto_empty_dataset_usage_statistics( @@ -530,7 +532,9 @@ def test_auto_empty_dataset_usage_statistics(caplog): @freeze_time("2023-01-02 00:00:00") -def test_auto_empty_dataset_usage_statistics_invalid_timestamp(caplog): +def test_auto_empty_dataset_usage_statistics_invalid_timestamp( + caplog: pytest.LogCaptureFixture, +) -> None: urn = make_dataset_urn("my_platform", "my_dataset") config = BaseTimeWindowConfig() wus = [ @@ -546,6 +550,7 @@ def test_auto_empty_dataset_usage_statistics_invalid_timestamp(caplog): ), ).as_workunit() ] + caplog.clear() with caplog.at_level(logging.WARNING): new_wus = list( auto_empty_dataset_usage_statistics( diff --git a/metadata-ingestion/tests/unit/utilities/test_perf_timer.py b/metadata-ingestion/tests/unit/utilities/test_perf_timer.py index d5fde314c2b57..6129b3e37d8bc 100644 --- a/metadata-ingestion/tests/unit/utilities/test_perf_timer.py +++ b/metadata-ingestion/tests/unit/utilities/test_perf_timer.py @@ -5,7 +5,7 @@ from datahub.utilities.perf_timer import PerfTimer -approx = partial(pytest.approx, rel=1e-2) +approx = partial(pytest.approx, rel=2e-2) def test_perf_timer_simple(): From 0d9aa2641014f36611e0d740dcd0df563df0984d Mon Sep 17 00:00:00 2001 From: Harshal Sheth <hsheth2@gmail.com> Date: Mon, 4 Dec 2023 20:00:57 -0500 Subject: [PATCH 044/263] fix(ingest/airflow): compat with pluggy 1.0 (#9365) --- docs/lineage/airflow.md | 14 ++++++++ .../datahub_airflow_plugin/_airflow_shims.py | 5 +++ .../_datahub_listener_module.py | 35 ++++++++++++++++--- .../datahub_airflow_plugin/datahub_plugin.py | 6 ++-- .../airflow-plugin/tox.ini | 6 +++- 5 files changed, 57 insertions(+), 9 deletions(-) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 8fd38f560bfbb..da3a36bc87be5 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -246,6 +246,20 @@ If your URLs aren't being generated correctly (usually they'll start with `http: base_url = http://airflow.mycorp.example.com ``` +### TypeError ... missing 3 required positional arguments + +If you see errors like the following with the v2 plugin: + +```shell +ERROR - on_task_instance_success() missing 3 required positional arguments: 'previous_state', 'task_instance', and 'session' +Traceback (most recent call last): + File "/home/airflow/.local/lib/python3.8/site-packages/datahub_airflow_plugin/datahub_listener.py", line 124, in wrapper + f(*args, **kwargs) +TypeError: on_task_instance_success() missing 3 required positional arguments: 'previous_state', 'task_instance', and 'session' +``` + +The solution is to upgrade `acryl-datahub-airflow-plugin>=0.12.0.4` or upgrade `pluggy>=1.2.0`. See this [PR](https://github.com/datahub-project/datahub/pull/9365) for details. + ## Compatibility We no longer officially support Airflow <2.1. However, you can use older versions of `acryl-datahub-airflow-plugin` with older versions of Airflow. diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py index 10f014fbd586f..d384958cf3ddb 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py @@ -2,6 +2,7 @@ import airflow.version import packaging.version +import pluggy from airflow.models.baseoperator import BaseOperator from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED @@ -27,9 +28,13 @@ # Approach suggested by https://stackoverflow.com/a/11887885/5004662. AIRFLOW_VERSION = packaging.version.parse(airflow.version.version) +PLUGGY_VERSION = packaging.version.parse(pluggy.__version__) HAS_AIRFLOW_STANDALONE_CMD = AIRFLOW_VERSION >= packaging.version.parse("2.2.0.dev0") HAS_AIRFLOW_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.3.0.dev0") HAS_AIRFLOW_DAG_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.5.0.dev0") +NEEDS_AIRFLOW_LISTENER_MODULE = AIRFLOW_VERSION < packaging.version.parse( + "2.5.0.dev0" +) or PLUGGY_VERSION <= packaging.version.parse("1.0.0") def get_task_inlets(operator: "Operator") -> List: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py index f39d37b122228..e16563400e397 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_datahub_listener_module.py @@ -1,7 +1,34 @@ -from datahub_airflow_plugin.datahub_listener import get_airflow_plugin_listener +from datahub_airflow_plugin.datahub_listener import ( + get_airflow_plugin_listener, + hookimpl, +) _listener = get_airflow_plugin_listener() if _listener: - on_task_instance_running = _listener.on_task_instance_running - on_task_instance_success = _listener.on_task_instance_success - on_task_instance_failed = _listener.on_task_instance_failed + # The run_in_thread decorator messes with pluggy's interface discovery, + # which causes the hooks to be called with no arguments and results in TypeErrors. + # This is only an issue with Pluggy <= 1.0.0. + # See https://github.com/pytest-dev/pluggy/issues/358 + # Note that pluggy 1.0.0 is in the constraints file for Airflow 2.4 and 2.5. + + @hookimpl + def on_task_instance_running(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_running(previous_state, task_instance, session) + + @hookimpl + def on_task_instance_success(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_success(previous_state, task_instance, session) + + @hookimpl + def on_task_instance_failed(previous_state, task_instance, session): + assert _listener + _listener.on_task_instance_failed(previous_state, task_instance, session) + + if hasattr(_listener, "on_dag_run_running"): + + @hookimpl + def on_dag_run_running(dag_run, session): + assert _listener + _listener.on_dag_run_running(dag_run, session) diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py index c96fab31647f5..2b0b751bd787b 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py @@ -6,8 +6,8 @@ from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED from datahub_airflow_plugin._airflow_shims import ( - HAS_AIRFLOW_DAG_LISTENER_API, HAS_AIRFLOW_LISTENER_API, + NEEDS_AIRFLOW_LISTENER_MODULE, ) assert AIRFLOW_PATCHED @@ -50,7 +50,7 @@ class DatahubPlugin(AirflowPlugin): name = "datahub_plugin" if _USE_AIRFLOW_LISTENER_INTERFACE: - if HAS_AIRFLOW_DAG_LISTENER_API: + if not NEEDS_AIRFLOW_LISTENER_MODULE: from datahub_airflow_plugin.datahub_listener import ( # type: ignore[misc] get_airflow_plugin_listener, ) @@ -60,8 +60,6 @@ class DatahubPlugin(AirflowPlugin): else: # On Airflow < 2.5, we need the listener to be a module. # This is just a quick shim layer to make that work. - # The DAG listener API was added at the same time as this method - # was fixed, so we're reusing the same check variable. # # Related Airflow change: https://github.com/apache/airflow/pull/27113. import datahub_airflow_plugin._datahub_listener_module as _listener_module # type: ignore[misc] diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 2f05854940d10..1010bd2933e45 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -14,7 +14,11 @@ deps = # Airflow version airflow21: apache-airflow~=2.1.0 airflow22: apache-airflow~=2.2.0 - airflow24: apache-airflow~=2.4.0 + # On Airflow 2.4 and 2.5, Airflow's constraints file pins pluggy to 1.0.0, + # which has caused issues for us before. As such, we now pin it explicitly + # to prevent regressions. + # See https://github.com/datahub-project/datahub/pull/9365 + airflow24: apache-airflow~=2.4.0,pluggy==1.0.0 airflow26: apache-airflow~=2.6.0 airflow27: apache-airflow~=2.7.0 commands = From d123b6174743f080a0eb8264b224569eaf952550 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth <treff7es@gmail.com> Date: Tue, 5 Dec 2023 17:16:35 +0100 Subject: [PATCH 045/263] feat(ingest/athena): Enable Athena view ingestion and view lineage (#9354) --- .../datahub/ingestion/source/sql/athena.py | 25 +++++++++++++++++-- .../ingestion/source/sql/sql_common.py | 2 ++ .../src/datahub/utilities/sqlglot_lineage.py | 2 ++ 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py index ac0e2bd4bb8a9..c3759875b2769 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py @@ -8,7 +8,8 @@ from pyathena.common import BaseCursor from pyathena.model import AthenaTableMetadata from pyathena.sqlalchemy_athena import AthenaRestDialect -from sqlalchemy import create_engine, inspect, types +from sqlalchemy import create_engine, exc, inspect, text, types +from sqlalchemy.engine import reflection from sqlalchemy.engine.reflection import Inspector from sqlalchemy.types import TypeEngine from sqlalchemy_bigquery import STRUCT @@ -64,6 +65,22 @@ class CustomAthenaRestDialect(AthenaRestDialect): # regex to identify complex types in DDL strings which are embedded in `<>`. _complex_type_pattern = re.compile(r"(<.+>)") + @typing.no_type_check + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + # This method was backported from PyAthena v3.0.7 to allow to retrieve the view definition + # from Athena. This is required until we support sqlalchemy > 2.0 + # https://github.com/laughingman7743/PyAthena/blob/509dd37d0fd15ad603993482cc47b8549b82facd/pyathena/sqlalchemy/base.py#L1118 + raw_connection = self._raw_connection(connection) + schema = schema if schema else raw_connection.schema_name # type: ignore + query = f"""SHOW CREATE VIEW "{schema}"."{view_name}";""" + try: + res = connection.scalars(text(query)) + except exc.OperationalError as e: + raise exc.NoSuchTableError(f"{schema}.{view_name}") from e + else: + return "\n".join([r for r in res]) + @typing.no_type_check def _get_column_type( self, type_: Union[str, Dict[str, Any]] @@ -236,7 +253,7 @@ class AthenaConfig(SQLCommonConfig): # overwrite default behavior of SQLAlchemyConfing include_views: Optional[bool] = pydantic.Field( - default=False, description="Whether views should be ingested." + default=True, description="Whether views should be ingested." ) _s3_staging_dir_population = pydantic_renamed_field( @@ -303,6 +320,10 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector + def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: + schema, _view = dataset_identifier.split(".", 1) + return None, schema + def get_table_properties( self, inspector: Inspector, schema: str, table: str ) -> Tuple[Optional[str], Dict[str, str], Optional[str]]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 67af6b2010c83..590bc7f696784 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -371,6 +371,8 @@ def get_db_name(self, inspector: Inspector) -> str: engine = inspector.engine if engine and hasattr(engine, "url") and hasattr(engine.url, "database"): + if engine.url.database is None: + return "" return str(engine.url.database).strip('"').lower() else: raise Exception("Unable to get database name from Sqlalchemy inspector") diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index c2cccf9f1e389..fc3efef2ba532 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -962,6 +962,8 @@ def _get_dialect(platform: str) -> str: return "hive" if platform == "mssql": return "tsql" + if platform == "athena": + return "trino" else: return platform From 3ee82590cd2ab7da08b5ad8b19b1e4dd988023d9 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth <treff7es@gmail.com> Date: Tue, 5 Dec 2023 17:42:29 +0100 Subject: [PATCH 046/263] fix(ingest/redshift): Identify materialized views properly + fix connection args support (#9368) --- .../docs/sources/redshift/redshift_recipe.yml | 4 +- metadata-ingestion/setup.py | 8 +- .../ingestion/source/redshift/config.py | 23 ++++- .../ingestion/source/redshift/query.py | 18 +++- .../ingestion/source/redshift/redshift.py | 6 +- .../source/redshift/redshift_schema.py | 98 +++++++++++++------ 6 files changed, 109 insertions(+), 48 deletions(-) diff --git a/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml b/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml index be704e6759d41..a561405d3de47 100644 --- a/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml +++ b/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml @@ -40,8 +40,8 @@ source: options: connect_args: - sslmode: "prefer" # or "require" or "verify-ca" - sslrootcert: ~ # needed to unpin the AWS Redshift certificate + # check all available options here: https://pypi.org/project/redshift-connector/ + ssl_insecure: "false" # Specifies if IDP hosts server certificate will be verified sink: # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 4f5f09fb148fa..416b255fb763f 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -181,8 +181,8 @@ redshift_common = { # Clickhouse 0.8.3 adds support for SQLAlchemy 1.4.x "sqlalchemy-redshift>=0.8.3", - "psycopg2-binary", "GeoAlchemy2", + "redshift-connector", *sqllineage_lib, *path_spec_common, } @@ -365,11 +365,7 @@ | {"psycopg2-binary", "pymysql>=1.0.2"}, "pulsar": {"requests"}, "redash": {"redash-toolbelt", "sql-metadata"} | sqllineage_lib, - "redshift": sql_common - | redshift_common - | usage_common - | {"redshift-connector"} - | sqlglot_lib, + "redshift": sql_common | redshift_common | usage_common | sqlglot_lib, "s3": {*s3_base, *data_lake_profiling}, "gcs": {*s3_base, *data_lake_profiling}, "sagemaker": aws_common, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 95038ef2c6212..51ad8a050adc2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -82,7 +82,7 @@ class RedshiftConfig( # large Redshift warehouses. As an example, see this query for the columns: # https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/blob/60b4db04c1d26071c291aeea52f1dcb5dd8b0eb0/sqlalchemy_redshift/dialect.py#L745. scheme: str = Field( - default="redshift+psycopg2", + default="redshift+redshift_connector", description="", hidden_from_schema=True, ) @@ -170,3 +170,24 @@ def backward_compatibility_configs_set(cls, values: Dict) -> Dict: "The config option `match_fully_qualified_names` will be deprecated in future and the default behavior will assume `match_fully_qualified_names: True`." ) return values + + @root_validator(skip_on_failure=True) + def connection_config_compatibility_set(cls, values: Dict) -> Dict: + if ( + ("options" in values and "connect_args" in values["options"]) + and "extra_client_options" in values + and len(values["extra_client_options"]) > 0 + ): + raise ValueError( + "Cannot set both `connect_args` and `extra_client_options` in the config. Please use `extra_client_options` only." + ) + + if "options" in values and "connect_args" in values["options"]: + values["extra_client_options"] = values["options"]["connect_args"] + + if values["extra_client_options"]: + if values["options"]: + values["options"]["connect_args"] = values["extra_client_options"] + else: + values["options"] = {"connect_args": values["extra_client_options"]} + return values diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py index a96171caf9835..92e36fffd6bb4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py @@ -179,14 +179,18 @@ class RedshiftQuery: additional_table_metadata: str = """ select - database, - schema, + ti.database, + ti.schema, "table", size, tbl_rows, estimated_visible_rows, skew_rows, - last_accessed + last_accessed, + case + when smi.name is not null then 1 + else 0 + end as is_materialized from pg_catalog.svv_table_info as ti left join ( @@ -198,8 +202,12 @@ class RedshiftQuery: group by tbl) as la on (la.tbl = ti.table_id) - ; - """ + left join stv_mv_info smi on + smi.db_name = ti.database + and smi.schema = ti.schema + and smi.name = ti.table + ; +""" @staticmethod def stl_scan_based_lineage_query( diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index 04f0edf504595..eb635b1292b81 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -6,7 +6,6 @@ import humanfriendly # These imports verify that the dependencies are available. -import psycopg2 # noqa: F401 import pydantic import redshift_connector @@ -352,7 +351,6 @@ def create(cls, config_dict, ctx): def get_redshift_connection( config: RedshiftConfig, ) -> redshift_connector.Connection: - client_options = config.extra_client_options host, port = config.host_port.split(":") conn = redshift_connector.connect( host=host, @@ -360,7 +358,7 @@ def get_redshift_connection( user=config.username, database=config.database, password=config.password.get_secret_value() if config.password else None, - **client_options, + **config.extra_client_options, ) conn.autocommit = True @@ -641,7 +639,7 @@ def gen_view_dataset_workunits( dataset_urn = self.gen_dataset_urn(datahub_dataset_name) if view.ddl: view_properties_aspect = ViewProperties( - materialized=view.type == "VIEW_MATERIALIZED", + materialized=view.materialized, viewLanguage="SQL", viewLogic=view.ddl, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py index 4a13d17d2cc0f..ca81682ae00e4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py @@ -40,6 +40,7 @@ class RedshiftTable(BaseTable): @dataclass class RedshiftView(BaseTable): type: Optional[str] = None + materialized: bool = False columns: List[RedshiftColumn] = field(default_factory=list) last_altered: Optional[datetime] = None size_in_bytes: Optional[int] = None @@ -66,6 +67,7 @@ class RedshiftExtraTableMeta: estimated_visible_rows: Optional[int] = None skew_rows: Optional[float] = None last_accessed: Optional[datetime] = None + is_materialized: bool = False @dataclass @@ -148,6 +150,7 @@ def enrich_tables( ], skew_rows=meta[field_names.index("skew_rows")], last_accessed=meta[field_names.index("last_accessed")], + is_materialized=meta[field_names.index("is_materialized")], ) if table_meta.schema not in table_enrich: table_enrich.setdefault(table_meta.schema, {}) @@ -173,42 +176,23 @@ def get_tables_and_views( logger.info(f"Fetched {len(db_tables)} tables/views from Redshift") for table in db_tables: schema = table[field_names.index("schema")] + table_name = table[field_names.index("relname")] + if table[field_names.index("tabletype")] not in [ "MATERIALIZED VIEW", "VIEW", ]: if schema not in tables: tables.setdefault(schema, []) - table_name = table[field_names.index("relname")] - - creation_time: Optional[datetime] = None - if table[field_names.index("creation_time")]: - creation_time = table[field_names.index("creation_time")].replace( - tzinfo=timezone.utc - ) - last_altered: Optional[datetime] = None - size_in_bytes: Optional[int] = None - rows_count: Optional[int] = None - if schema in enriched_table and table_name in enriched_table[schema]: - if enriched_table[schema][table_name].last_accessed: - # Mypy seems to be not clever enough to understand the above check - last_accessed = enriched_table[schema][table_name].last_accessed - assert last_accessed - last_altered = last_accessed.replace(tzinfo=timezone.utc) - elif creation_time: - last_altered = creation_time - - if enriched_table[schema][table_name].size: - # Mypy seems to be not clever enough to understand the above check - size = enriched_table[schema][table_name].size - if size: - size_in_bytes = size * 1024 * 1024 - - if enriched_table[schema][table_name].estimated_visible_rows: - rows = enriched_table[schema][table_name].estimated_visible_rows - assert rows - rows_count = int(rows) + ( + creation_time, + last_altered, + rows_count, + size_in_bytes, + ) = RedshiftDataDictionary.get_table_stats( + enriched_table, field_names, schema, table + ) tables[schema].append( RedshiftTable( @@ -231,16 +215,37 @@ def get_tables_and_views( else: if schema not in views: views[schema] = [] + ( + creation_time, + last_altered, + rows_count, + size_in_bytes, + ) = RedshiftDataDictionary.get_table_stats( + enriched_table=enriched_table, + field_names=field_names, + schema=schema, + table=table, + ) + + materialized = False + if schema in enriched_table and table_name in enriched_table[schema]: + if enriched_table[schema][table_name].is_materialized: + materialized = True views[schema].append( RedshiftView( type=table[field_names.index("tabletype")], name=table[field_names.index("relname")], ddl=table[field_names.index("view_definition")], - created=table[field_names.index("creation_time")], + created=creation_time, comment=table[field_names.index("table_description")], + last_altered=last_altered, + size_in_bytes=size_in_bytes, + rows_count=rows_count, + materialized=materialized, ) ) + for schema_key, schema_tables in tables.items(): logger.info( f"In schema: {schema_key} discovered {len(schema_tables)} tables" @@ -250,6 +255,39 @@ def get_tables_and_views( return tables, views + @staticmethod + def get_table_stats(enriched_table, field_names, schema, table): + table_name = table[field_names.index("relname")] + + creation_time: Optional[datetime] = None + if table[field_names.index("creation_time")]: + creation_time = table[field_names.index("creation_time")].replace( + tzinfo=timezone.utc + ) + last_altered: Optional[datetime] = None + size_in_bytes: Optional[int] = None + rows_count: Optional[int] = None + if schema in enriched_table and table_name in enriched_table[schema]: + if enriched_table[schema][table_name].last_accessed: + # Mypy seems to be not clever enough to understand the above check + last_accessed = enriched_table[schema][table_name].last_accessed + assert last_accessed + last_altered = last_accessed.replace(tzinfo=timezone.utc) + elif creation_time: + last_altered = creation_time + + if enriched_table[schema][table_name].size: + # Mypy seems to be not clever enough to understand the above check + size = enriched_table[schema][table_name].size + if size: + size_in_bytes = size * 1024 * 1024 + + if enriched_table[schema][table_name].estimated_visible_rows: + rows = enriched_table[schema][table_name].estimated_visible_rows + assert rows + rows_count = int(rows) + return creation_time, last_altered, rows_count, size_in_bytes + @staticmethod def get_schema_fields_for_column( column: RedshiftColumn, From 806f09ae23b1a569006be9eaf8d13165e67742b3 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Tue, 5 Dec 2023 12:33:00 -0500 Subject: [PATCH 047/263] test(ingest/unity): Unity catalog data generation (#8949) --- metadata-ingestion/setup.py | 1 + .../performance/bigquery/bigquery_events.py | 10 +- .../tests/performance/data_generation.py | 153 ++++++++++----- .../tests/performance/data_model.py | 54 +++++- .../tests/performance/databricks/generator.py | 177 ++++++++++++++++++ .../databricks/unity_proxy_mock.py | 73 ++++---- .../tests/unit/test_bigquery_source.py | 2 +- .../tests/unit/test_bigquery_usage.py | 14 +- 8 files changed, 383 insertions(+), 101 deletions(-) create mode 100644 metadata-ingestion/tests/performance/databricks/generator.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 416b255fb763f..69cbe8d823450 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -262,6 +262,7 @@ "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", + "databricks-sql-connector", } mysql = sql_common | {"pymysql>=1.0.2"} diff --git a/metadata-ingestion/tests/performance/bigquery/bigquery_events.py b/metadata-ingestion/tests/performance/bigquery/bigquery_events.py index d9b5571a8015f..0e0bfe78c260f 100644 --- a/metadata-ingestion/tests/performance/bigquery/bigquery_events.py +++ b/metadata-ingestion/tests/performance/bigquery/bigquery_events.py @@ -2,7 +2,7 @@ import random import uuid from collections import defaultdict -from typing import Dict, Iterable, List, cast +from typing import Dict, Iterable, List, Set from typing_extensions import get_args @@ -15,7 +15,7 @@ ) from datahub.ingestion.source.bigquery_v2.bigquery_config import BigQueryV2Config from datahub.ingestion.source.bigquery_v2.usage import OPERATION_STATEMENT_TYPES -from tests.performance.data_model import Query, StatementType, Table, View +from tests.performance.data_model import Query, StatementType, Table # https://cloud.google.com/bigquery/docs/reference/auditlogs/rest/Shared.Types/BigQueryAuditMetadata.TableDataRead.Reason READ_REASONS = [ @@ -86,7 +86,7 @@ def generate_events( ref_from_table(parent, table_to_project) for field in query.fields_accessed if field.table.is_view() - for parent in cast(View, field.table).parents + for parent in field.table.upstreams ) ), referencedViews=referencedViews, @@ -96,7 +96,7 @@ def generate_events( query_on_view=True if referencedViews else False, ) ) - table_accesses = defaultdict(set) + table_accesses: Dict[BigQueryTableRef, Set[str]] = defaultdict(set) for field in query.fields_accessed: if not field.table.is_view(): table_accesses[ref_from_table(field.table, table_to_project)].add( @@ -104,7 +104,7 @@ def generate_events( ) else: # assuming that same fields are accessed in parent tables - for parent in cast(View, field.table).parents: + for parent in field.table.upstreams: table_accesses[ref_from_table(parent, table_to_project)].add( field.column ) diff --git a/metadata-ingestion/tests/performance/data_generation.py b/metadata-ingestion/tests/performance/data_generation.py index 67b156896909a..9b80d6260d408 100644 --- a/metadata-ingestion/tests/performance/data_generation.py +++ b/metadata-ingestion/tests/performance/data_generation.py @@ -8,16 +8,16 @@ This is a work in progress, built piecemeal as needed. """ import random -import uuid +from abc import ABCMeta, abstractmethod +from collections import OrderedDict from dataclasses import dataclass from datetime import datetime, timedelta, timezone -from typing import Iterable, List, TypeVar, Union, cast +from typing import Collection, Iterable, List, Optional, TypeVar, Union, cast from faker import Faker from tests.performance.data_model import ( Column, - ColumnMapping, ColumnType, Container, FieldAccess, @@ -40,17 +40,46 @@ "UNKNOWN", ] +ID_COLUMN = "id" # Use to allow joins between all tables + + +class Distribution(metaclass=ABCMeta): + @abstractmethod + def _sample(self) -> int: + raise NotImplementedError + + def sample( + self, *, floor: Optional[int] = None, ceiling: Optional[int] = None + ) -> int: + value = self._sample() + if floor is not None: + value = max(value, floor) + if ceiling is not None: + value = min(value, ceiling) + return value + @dataclass(frozen=True) -class NormalDistribution: +class NormalDistribution(Distribution): mu: float sigma: float - def sample(self) -> int: + def _sample(self) -> int: return int(random.gauss(mu=self.mu, sigma=self.sigma)) - def sample_with_floor(self, floor: int = 1) -> int: - return max(int(random.gauss(mu=self.mu, sigma=self.sigma)), floor) + +@dataclass(frozen=True) +class LomaxDistribution(Distribution): + """See https://en.wikipedia.org/wiki/Lomax_distribution. + + Equivalent to pareto(scale, shape) - scale; scale * beta_prime(1, shape) + """ + + scale: float + shape: float + + def _sample(self) -> int: + return int(self.scale * (random.paretovariate(self.shape) - 1)) @dataclass @@ -72,9 +101,9 @@ def generate_data( num_containers: Union[List[int], int], num_tables: int, num_views: int, - columns_per_table: NormalDistribution = NormalDistribution(5, 2), - parents_per_view: NormalDistribution = NormalDistribution(2, 1), - view_definition_length: NormalDistribution = NormalDistribution(150, 50), + columns_per_table: Distribution = NormalDistribution(5, 2), + parents_per_view: Distribution = NormalDistribution(2, 1), + view_definition_length: Distribution = NormalDistribution(150, 50), time_range: timedelta = timedelta(days=14), ) -> SeedMetadata: # Assemble containers @@ -85,43 +114,32 @@ def generate_data( for i, num_in_layer in enumerate(num_containers): layer = [ Container( - f"{i}-container-{j}", + f"{_container_type(i)}_{j}", parent=random.choice(containers[-1]) if containers else None, ) for j in range(num_in_layer) ] containers.append(layer) - # Assemble tables + # Assemble tables and views, lineage, and definitions tables = [ - Table( - f"table-{i}", - container=random.choice(containers[-1]), - columns=[ - f"column-{j}-{uuid.uuid4()}" - for j in range(columns_per_table.sample_with_floor()) - ], - column_mapping=None, - ) - for i in range(num_tables) + _generate_table(i, containers[-1], columns_per_table) for i in range(num_tables) ] views = [ View( - f"view-{i}", - container=random.choice(containers[-1]), - columns=[ - f"column-{j}-{uuid.uuid4()}" - for j in range(columns_per_table.sample_with_floor()) - ], - column_mapping=None, - definition=f"{uuid.uuid4()}-{'*' * view_definition_length.sample_with_floor(10)}", - parents=random.sample(tables, parents_per_view.sample_with_floor()), + **{ # type: ignore + **_generate_table(i, containers[-1], columns_per_table).__dict__, + "name": f"view_{i}", + "definition": f"--{'*' * view_definition_length.sample(floor=0)}", + }, ) for i in range(num_views) ] - for table in tables + views: - _generate_column_mapping(table) + for view in views: + view.upstreams = random.sample(tables, k=parents_per_view.sample(floor=1)) + + generate_lineage(tables, views) now = datetime.now(tz=timezone.utc) return SeedMetadata( @@ -133,6 +151,33 @@ def generate_data( ) +def generate_lineage( + tables: Collection[Table], + views: Collection[Table], + # Percentiles: 75th=0, 80th=1, 95th=2, 99th=4, 99.99th=15 + upstream_distribution: Distribution = LomaxDistribution(scale=3, shape=5), +) -> None: + num_upstreams = [upstream_distribution.sample(ceiling=100) for _ in tables] + # Prioritize tables with a lot of upstreams themselves + factor = 1 + len(tables) // 10 + table_weights = [1 + (num_upstreams[i] * factor) for i in range(len(tables))] + view_weights = [1] * len(views) + + # TODO: Python 3.9 use random.sample with counts + sample = [] + for table, weight in zip(tables, table_weights): + for _ in range(weight): + sample.append(table) + for view, weight in zip(views, view_weights): + for _ in range(weight): + sample.append(view) + for i, table in enumerate(tables): + table.upstreams = random.sample( # type: ignore + sample, + k=num_upstreams[i], + ) + + def generate_queries( seed_metadata: SeedMetadata, num_selects: int, @@ -146,12 +191,12 @@ def generate_queries( ) -> Iterable[Query]: faker = Faker() query_texts = [ - faker.paragraph(query_length.sample_with_floor(30) // 30) + faker.paragraph(query_length.sample(floor=30) // 30) for _ in range(num_unique_queries) ] all_tables = seed_metadata.tables + seed_metadata.views - users = [f"user-{i}@xyz.com" for i in range(num_users)] + users = [f"user_{i}@xyz.com" for i in range(num_users)] for i in range(num_selects): # Pure SELECT statements tables = _sample_list(all_tables, tables_per_select) all_columns = [ @@ -191,21 +236,43 @@ def generate_queries( ) -def _generate_column_mapping(table: Table) -> ColumnMapping: - d = {} - for column in table.columns: - d[column] = Column( - name=column, +def _container_type(i: int) -> str: + if i == 0: + return "database" + elif i == 1: + return "schema" + else: + return f"{i}container" + + +def _generate_table( + i: int, parents: List[Container], columns_per_table: Distribution +) -> Table: + num_columns = columns_per_table.sample(floor=1) + + columns = OrderedDict({ID_COLUMN: Column(ID_COLUMN, ColumnType.INTEGER, False)}) + for j in range(num_columns): + name = f"column_{j}" + columns[name] = Column( + name=name, type=random.choice(list(ColumnType)), nullable=random.random() < 0.1, # Fixed 10% chance for now ) - table.column_mapping = d - return d + return Table( + f"table_{i}", + container=random.choice(parents), + columns=columns, + upstreams=[], + ) def _sample_list(lst: List[T], dist: NormalDistribution, floor: int = 1) -> List[T]: - return random.sample(lst, min(dist.sample_with_floor(floor), len(lst))) + return random.sample(lst, min(dist.sample(floor=floor), len(lst))) def _random_time_between(start: datetime, end: datetime) -> datetime: return start + timedelta(seconds=(end - start).total_seconds() * random.random()) + + +if __name__ == "__main__": + z = generate_data(10, 1000, 10) diff --git a/metadata-ingestion/tests/performance/data_model.py b/metadata-ingestion/tests/performance/data_model.py index 9425fa827070e..728bb6ddde215 100644 --- a/metadata-ingestion/tests/performance/data_model.py +++ b/metadata-ingestion/tests/performance/data_model.py @@ -1,7 +1,9 @@ -from dataclasses import dataclass +import typing +from collections import OrderedDict +from dataclasses import dataclass, field from datetime import datetime from enum import Enum -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union from typing_extensions import Literal @@ -37,29 +39,63 @@ class ColumnType(str, Enum): @dataclass class Column: name: str - type: ColumnType - nullable: bool + type: ColumnType = ColumnType.STRING + nullable: bool = False ColumnRef = str ColumnMapping = Dict[ColumnRef, Column] -@dataclass +@dataclass(init=False) class Table: name: str container: Container - columns: List[ColumnRef] - column_mapping: Optional[ColumnMapping] + columns: typing.OrderedDict[ColumnRef, Column] = field(repr=False) + upstreams: List["Table"] = field(repr=False) + + def __init__( + self, + name: str, + container: Container, + columns: Union[List[str], Dict[str, Column]], + upstreams: List["Table"], + ): + self.name = name + self.container = container + self.upstreams = upstreams + if isinstance(columns, list): + self.columns = OrderedDict((col, Column(col)) for col in columns) + elif isinstance(columns, dict): + self.columns = OrderedDict(columns) + + @property + def name_components(self) -> List[str]: + lst = [self.name] + container: Optional[Container] = self.container + while container: + lst.append(container.name) + container = container.parent + return lst[::-1] def is_view(self) -> bool: return False -@dataclass +@dataclass(init=False) class View(Table): definition: str - parents: List[Table] + + def __init__( + self, + name: str, + container: Container, + columns: Union[List[str], Dict[str, Column]], + upstreams: List["Table"], + definition: str, + ): + super().__init__(name, container, columns, upstreams) + self.definition = definition def is_view(self) -> bool: return True diff --git a/metadata-ingestion/tests/performance/databricks/generator.py b/metadata-ingestion/tests/performance/databricks/generator.py new file mode 100644 index 0000000000000..29df325d856a1 --- /dev/null +++ b/metadata-ingestion/tests/performance/databricks/generator.py @@ -0,0 +1,177 @@ +import logging +import random +import string +from concurrent.futures import ThreadPoolExecutor, wait +from datetime import datetime +from typing import Callable, List, TypeVar, Union +from urllib.parse import urlparse + +from databricks.sdk import WorkspaceClient +from databricks.sdk.core import DatabricksError +from databricks.sdk.service.catalog import ColumnTypeName +from performance.data_generation import Distribution, LomaxDistribution, SeedMetadata +from performance.data_model import ColumnType, Container, Table, View +from performance.databricks.unity_proxy_mock import _convert_column_type +from sqlalchemy import create_engine + +from datahub.ingestion.source.sql.sql_config import make_sqlalchemy_uri + +logger = logging.getLogger(__name__) +T = TypeVar("T") + +MAX_WORKERS = 200 + + +class DatabricksDataGenerator: + def __init__(self, host: str, token: str, warehouse_id: str): + self.client = WorkspaceClient(host=host, token=token) + self.warehouse_id = warehouse_id + url = make_sqlalchemy_uri( + scheme="databricks", + username="token", + password=token, + at=urlparse(host).netloc, + db=None, + uri_opts={"http_path": f"/sql/1.0/warehouses/{warehouse_id}"}, + ) + engine = create_engine( + url, connect_args={"timeout": 600}, pool_size=MAX_WORKERS + ) + self.connection = engine.connect() + + def clear_data(self, seed_metadata: SeedMetadata) -> None: + for container in seed_metadata.containers[0]: + try: + self.client.catalogs.delete(container.name, force=True) + except DatabricksError: + pass + + def create_data( + self, + seed_metadata: SeedMetadata, + # Percentiles: 1st=0, 10th=7, 25th=21, 50th=58, 75th=152, 90th=364, 99th=2063, 99.99th=46316 + num_rows_distribution: Distribution = LomaxDistribution(scale=100, shape=1.5), + ) -> None: + """Create data in Databricks based on SeedMetadata.""" + for container in seed_metadata.containers[0]: + self._create_catalog(container) + for container in seed_metadata.containers[1]: + self._create_schema(container) + + _thread_pool_execute("create tables", seed_metadata.tables, self._create_table) + _thread_pool_execute("create views", seed_metadata.views, self._create_view) + _thread_pool_execute( + "populate tables", + seed_metadata.tables, + lambda t: self._populate_table( + t, num_rows_distribution.sample(ceiling=1_000_000) + ), + ) + _thread_pool_execute( + "create table lineage", seed_metadata.tables, self._create_table_lineage + ) + + def _create_catalog(self, catalog: Container) -> None: + try: + self.client.catalogs.get(catalog.name) + except DatabricksError: + self.client.catalogs.create(catalog.name) + + def _create_schema(self, schema: Container) -> None: + try: + self.client.schemas.get(f"{schema.parent.name}.{schema.name}") + except DatabricksError: + self.client.schemas.create(schema.name, schema.parent.name) + + def _create_table(self, table: Table) -> None: + try: + self.client.tables.delete(".".join(table.name_components)) + except DatabricksError: + pass + + columns = ", ".join( + f"{name} {_convert_column_type(column.type).value}" + for name, column in table.columns.items() + ) + self._execute_sql(f"CREATE TABLE {_quote_table(table)} ({columns})") + self._assert_table_exists(table) + + def _create_view(self, view: View) -> None: + self._execute_sql(_generate_view_definition(view)) + self._assert_table_exists(view) + + def _assert_table_exists(self, table: Table) -> None: + self.client.tables.get(".".join(table.name_components)) + + def _populate_table(self, table: Table, num_rows: int) -> None: + values = [ + ", ".join( + str(_generate_value(column.type)) for column in table.columns.values() + ) + for _ in range(num_rows) + ] + values_str = ", ".join(f"({value})" for value in values) + self._execute_sql(f"INSERT INTO {_quote_table(table)} VALUES {values_str}") + + def _create_table_lineage(self, table: Table) -> None: + for upstream in table.upstreams: + self._execute_sql(_generate_insert_lineage(table, upstream)) + + def _execute_sql(self, sql: str) -> None: + print(sql) + self.connection.execute(sql) + + +def _thread_pool_execute(desc: str, lst: List[T], fn: Callable[[T], None]) -> None: + with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: + futures = [executor.submit(fn, item) for item in lst] + wait(futures) + for future in futures: + try: + future.result() + except Exception as e: + logger.error(f"Error executing '{desc}': {e}", exc_info=True) + + +def _generate_value(t: ColumnType) -> Union[int, float, str, bool]: + ctn = _convert_column_type(t) + if ctn == ColumnTypeName.INT: + return random.randint(-(2**31), 2**31 - 1) + elif ctn == ColumnTypeName.DOUBLE: + return random.uniform(-(2**31), 2**31 - 1) + elif ctn == ColumnTypeName.STRING: + return ( + "'" + "".join(random.choice(string.ascii_letters) for _ in range(8)) + "'" + ) + elif ctn == ColumnTypeName.BOOLEAN: + return random.choice([True, False]) + elif ctn == ColumnTypeName.TIMESTAMP: + return random.randint(0, int(datetime.now().timestamp())) + else: + raise NotImplementedError(f"Unsupported type {ctn}") + + +def _generate_insert_lineage(table: Table, upstream: Table) -> str: + select = [] + for column in table.columns.values(): + matching_cols = [c for c in upstream.columns.values() if c.type == column.type] + if matching_cols: + upstream_col = random.choice(matching_cols) + select.append(f"{upstream_col.name} AS {column.name}") + else: + select.append(f"{_generate_value(column.type)} AS {column.name}") + + return f"INSERT INTO {_quote_table(table)} SELECT {', '.join(select)} FROM {_quote_table(upstream)}" + + +def _generate_view_definition(view: View) -> str: + from_statement = f"FROM {_quote_table(view.upstreams[0])} t0" + join_statement = " ".join( + f"JOIN {_quote_table(upstream)} t{i+1} ON t0.id = t{i+1}.id" + for i, upstream in enumerate(view.upstreams[1:]) + ) + return f"CREATE VIEW {_quote_table(view)} AS SELECT * {from_statement} {join_statement} {view.definition}" + + +def _quote_table(table: Table) -> str: + return ".".join(f"`{component}`" for component in table.name_components) diff --git a/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py b/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py index 593163e12bf0a..ee1caf6783ec1 100644 --- a/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py +++ b/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py @@ -88,22 +88,21 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: def tables(self, schema: Schema) -> Iterable[Table]: for table in self._schema_to_table[schema.name]: columns = [] - if table.column_mapping: - for i, col_name in enumerate(table.columns): - column = table.column_mapping[col_name] - columns.append( - Column( - id=column.name, - name=column.name, - type_name=self._convert_column_type(column.type), - type_text=column.type.value, - nullable=column.nullable, - position=i, - comment=None, - type_precision=0, - type_scale=0, - ) + for i, col_name in enumerate(table.columns): + column = table.columns[col_name] + columns.append( + Column( + id=column.name, + name=column.name, + type_name=_convert_column_type(column.type), + type_text=column.type.value, + nullable=column.nullable, + position=i, + comment=None, + type_precision=0, + type_scale=0, ) + ) yield Table( id=f"{schema.id}.{table.name}", @@ -145,7 +144,7 @@ def query_history( yield Query( query_id=str(i), query_text=query.text, - statement_type=self._convert_statement_type(query.type), + statement_type=_convert_statement_type(query.type), start_time=query.timestamp, end_time=query.timestamp, user_id=hash(query.actor), @@ -160,24 +159,24 @@ def table_lineage(self, table: Table) -> None: def get_column_lineage(self, table: Table) -> None: pass - @staticmethod - def _convert_column_type(t: ColumnType) -> ColumnTypeName: - if t == ColumnType.INTEGER: - return ColumnTypeName.INT - elif t == ColumnType.FLOAT: - return ColumnTypeName.DOUBLE - elif t == ColumnType.STRING: - return ColumnTypeName.STRING - elif t == ColumnType.BOOLEAN: - return ColumnTypeName.BOOLEAN - elif t == ColumnType.DATETIME: - return ColumnTypeName.TIMESTAMP - else: - raise ValueError(f"Unknown column type: {t}") - - @staticmethod - def _convert_statement_type(t: StatementType) -> QueryStatementType: - if t == "CUSTOM" or t == "UNKNOWN": - return QueryStatementType.OTHER - else: - return QueryStatementType[t] + +def _convert_column_type(t: ColumnType) -> ColumnTypeName: + if t == ColumnType.INTEGER: + return ColumnTypeName.INT + elif t == ColumnType.FLOAT: + return ColumnTypeName.DOUBLE + elif t == ColumnType.STRING: + return ColumnTypeName.STRING + elif t == ColumnType.BOOLEAN: + return ColumnTypeName.BOOLEAN + elif t == ColumnType.DATETIME: + return ColumnTypeName.TIMESTAMP + else: + raise ValueError(f"Unknown column type: {t}") + + +def _convert_statement_type(t: StatementType) -> QueryStatementType: + if t == "CUSTOM" or t == "UNKNOWN": + return QueryStatementType.OTHER + else: + return QueryStatementType[t] diff --git a/metadata-ingestion/tests/unit/test_bigquery_source.py b/metadata-ingestion/tests/unit/test_bigquery_source.py index 4cfa5c48d2377..3cdb73d77d0a1 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_source.py +++ b/metadata-ingestion/tests/unit/test_bigquery_source.py @@ -324,7 +324,7 @@ def test_get_projects_list_failure( {"project_id_pattern": {"deny": ["^test-project$"]}} ) source = BigqueryV2Source(config=config, ctx=PipelineContext(run_id="test")) - caplog.records.clear() + caplog.clear() with caplog.at_level(logging.ERROR): projects = source._get_projects() assert len(caplog.records) == 1 diff --git a/metadata-ingestion/tests/unit/test_bigquery_usage.py b/metadata-ingestion/tests/unit/test_bigquery_usage.py index 1eb5d8b00e27c..c0055763bc15b 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_usage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_usage.py @@ -1,7 +1,7 @@ import logging import random from datetime import datetime, timedelta, timezone -from typing import Iterable, cast +from typing import Iterable from unittest.mock import MagicMock, patch import pytest @@ -45,15 +45,16 @@ ACTOR_2, ACTOR_2_URN = "b@acryl.io", "urn:li:corpuser:b" DATABASE_1 = Container("database_1") DATABASE_2 = Container("database_2") -TABLE_1 = Table("table_1", DATABASE_1, ["id", "name", "age"], None) -TABLE_2 = Table("table_2", DATABASE_1, ["id", "table_1_id", "value"], None) +TABLE_1 = Table("table_1", DATABASE_1, columns=["id", "name", "age"], upstreams=[]) +TABLE_2 = Table( + "table_2", DATABASE_1, columns=["id", "table_1_id", "value"], upstreams=[] +) VIEW_1 = View( name="view_1", container=DATABASE_1, columns=["id", "name", "total"], definition="VIEW DEFINITION 1", - parents=[TABLE_1, TABLE_2], - column_mapping=None, + upstreams=[TABLE_1, TABLE_2], ) ALL_TABLES = [TABLE_1, TABLE_2, VIEW_1] @@ -842,6 +843,7 @@ def test_usage_counts_no_columns( ) ), ] + caplog.clear() with caplog.at_level(logging.WARNING): workunits = usage_extractor._get_workunits_internal( events, [TABLE_REFS[TABLE_1.name]] @@ -938,7 +940,7 @@ def test_operational_stats( ).to_urn("PROD") for field in query.fields_accessed if field.table.is_view() - for parent in cast(View, field.table).parents + for parent in field.table.upstreams ) ), ), From c66619ccc7be509e37e804588023c51984b4fb33 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 5 Dec 2023 14:03:24 -0600 Subject: [PATCH 048/263] fix(elasticsearch): set datahub usage events shard & replica count (#9388) --- docker/elasticsearch-setup/create-indices.sh | 7 ++++++- .../resources/index/usage-event/aws_es_index_template.json | 4 +++- .../main/resources/index/usage-event/index_template.json | 4 +++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/docker/elasticsearch-setup/create-indices.sh b/docker/elasticsearch-setup/create-indices.sh index 5c4eb3ce3851e..81cf405bf4b3d 100755 --- a/docker/elasticsearch-setup/create-indices.sh +++ b/docker/elasticsearch-setup/create-indices.sh @@ -5,6 +5,8 @@ set -e : ${DATAHUB_ANALYTICS_ENABLED:=true} : ${USE_AWS_ELASTICSEARCH:=false} : ${ELASTICSEARCH_INSECURE:=false} +: ${DUE_SHARDS:=1} +: ${DUE_REPLICAS:=1} # protocol: http or https? if [[ $ELASTICSEARCH_USE_SSL == true ]]; then @@ -74,7 +76,10 @@ function create_if_not_exists { # use the file at given path as definition, but first replace all occurences of `PREFIX` # placeholder within the file with the actual prefix value TMP_SOURCE_PATH="/tmp/$RESOURCE_DEFINITION_NAME" - sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" | tee -a "$TMP_SOURCE_PATH" + sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" \ + | sed -e "s/DUE_SHARDS/$DUE_SHARDS/g" \ + | sed -e "s/DUE_REPLICAS/$DUE_REPLICAS/g" \ + | tee -a "$TMP_SOURCE_PATH" curl "${CURL_ARGS[@]}" -XPUT "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS" -H 'Content-Type: application/json' --data "@$TMP_SOURCE_PATH" elif [ $RESOURCE_STATUS -eq 403 ]; then diff --git a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json index 21e98e4e96b5f..16d1e14720b2d 100644 --- a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json +++ b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json @@ -20,6 +20,8 @@ } }, "settings": { - "index.opendistro.index_state_management.rollover_alias": "PREFIXdatahub_usage_event" + "index.opendistro.index_state_management.rollover_alias": "PREFIXdatahub_usage_event", + "index.number_of_shards": DUE_SHARDS, + "index.number_of_replicas": DUE_REPLICAS } } \ No newline at end of file diff --git a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json index 44f6e644713eb..e3c6a8c37e573 100644 --- a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json +++ b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json @@ -23,7 +23,9 @@ } }, "settings": { - "index.lifecycle.name": "PREFIXdatahub_usage_event_policy" + "index.lifecycle.name": "PREFIXdatahub_usage_event_policy", + "index.number_of_shards": DUE_SHARDS, + "index.number_of_replicas": DUE_REPLICAS } } } \ No newline at end of file From 7fb60869f2a9757d6729d52a44f5c0390af86381 Mon Sep 17 00:00:00 2001 From: siladitya <68184387+siladitya2@users.noreply.github.com> Date: Wed, 6 Dec 2023 03:28:47 +0100 Subject: [PATCH 049/263] feat(gms/search): Adding support for DOUBLE Searchable type (#9369) Co-authored-by: si-chakraborty <si.chakraborty@adevinta.com> --- .../metadata/models/annotation/SearchableAnnotation.java | 3 ++- .../com/linkedin/metadata/models/EntitySpecBuilderTest.java | 6 +++++- .../search/elasticsearch/indexbuilder/MappingsBuilder.java | 2 ++ .../metadata/search/indexbuilder/MappingsBuilderTest.java | 6 +++++- .../src/main/pegasus/com/datahub/test/TestEntityInfo.pdl | 6 ++++++ 5 files changed, 20 insertions(+), 3 deletions(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index d5e5044f95c23..efa30a948e237 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -66,7 +66,8 @@ public enum FieldType { DATETIME, OBJECT, BROWSE_PATH_V2, - WORD_GRAM + WORD_GRAM, + DOUBLE } @Nonnull diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index 3618108970afa..b95cb1085283f 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -142,7 +142,7 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { assertEquals(new TestEntityInfo().schema().getFullName(), testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields - assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 10); + assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); assertEquals("customProperties", testEntityInfo.getSearchableFieldSpecMap().get( new PathSpec("customProperties").toString()).getSearchableAnnotation().getFieldName()); assertEquals(SearchableAnnotation.FieldType.KEYWORD, testEntityInfo.getSearchableFieldSpecMap().get( @@ -189,6 +189,10 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { new PathSpec("foreignKey").toString()).getSearchableAnnotation().getFieldName()); assertEquals(true, testEntityInfo.getSearchableFieldSpecMap().get( new PathSpec("foreignKey").toString()).getSearchableAnnotation().isQueryByDefault()); + assertEquals("doubleField", testEntityInfo.getSearchableFieldSpecMap().get( + new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldName()); + assertEquals(SearchableAnnotation.FieldType.DOUBLE, testEntityInfo.getSearchableFieldSpecMap().get( + new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldType()); // Assert on Relationship Fields diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 35cef71edd953..13a0f57ccea99 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -134,6 +134,8 @@ private static Map<String, Object> getMappingsForField(@Nonnull final Searchable mappingForField.put(TYPE, ESUtils.DATE_FIELD_TYPE); } else if (fieldType == FieldType.OBJECT) { mappingForField.put(TYPE, ESUtils.OBJECT_FIELD_TYPE); + } else if (fieldType == FieldType.DOUBLE) { + mappingForField.put(TYPE, ESUtils.DOUBLE_FIELD_TYPE); } else { log.info("FieldType {} has no mappings implemented", fieldType); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 0d2ce236d9f54..d9f2f0e5aac94 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -18,7 +18,7 @@ public void testMappingsBuilder() { Map<String, Object> result = MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); assertEquals(result.size(), 1); Map<String, Object> properties = (Map<String, Object>) result.get("properties"); - assertEquals(properties.size(), 19); + assertEquals(properties.size(), 20); assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword", "fields", ImmutableMap.of("delimited", @@ -123,5 +123,9 @@ public void testMappingsBuilder() { assertEquals(feature1.get("type"), "double"); Map<String, Object> feature2 = (Map<String, Object>) properties.get("feature2"); assertEquals(feature2.get("type"), "double"); + + // DOUBLE + Map<String, Object> doubleField = (Map<String, Object>) properties.get("doubleField"); + assertEquals(doubleField.get("type"), "double"); } } diff --git a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl index 6dff14133ee60..db293140ad650 100644 --- a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl +++ b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl @@ -90,4 +90,10 @@ record TestEntityInfo includes CustomProperties { } } esObjectField: optional map[string, string] + + @Searchable = { + "fieldName": "doubleField", + "fieldType": "DOUBLE" + } + doubleField: optional double } From e14474176f20e38b2c4c883949c561223181b57c Mon Sep 17 00:00:00 2001 From: Aseem Bansal <asmbansal2@gmail.com> Date: Wed, 6 Dec 2023 11:02:42 +0530 Subject: [PATCH 050/263] feat(lint): add spotless for java lint (#9373) --- build.gradle | 39 +- datahub-frontend/app/auth/AuthModule.java | 366 +- datahub-frontend/app/auth/AuthUtils.java | 247 +- datahub-frontend/app/auth/Authenticator.java | 57 +- datahub-frontend/app/auth/ConfigUtil.java | 10 +- datahub-frontend/app/auth/CookieConfigs.java | 19 +- datahub-frontend/app/auth/JAASConfigs.java | 19 +- .../app/auth/NativeAuthenticationConfigs.java | 16 +- .../app/auth/cookie/CustomCookiesModule.java | 5 +- datahub-frontend/app/auth/sso/SsoConfigs.java | 34 +- datahub-frontend/app/auth/sso/SsoManager.java | 15 +- .../app/auth/sso/SsoProvider.java | 22 +- .../sso/oidc/OidcAuthorizationGenerator.java | 69 +- .../app/auth/sso/oidc/OidcCallbackLogic.java | 273 +- .../app/auth/sso/oidc/OidcConfigs.java | 194 +- .../app/auth/sso/oidc/OidcProvider.java | 33 +- .../sso/oidc/OidcResponseErrorHandler.java | 77 +- .../oidc/custom/CustomOidcAuthenticator.java | 76 +- .../app/client/AuthServiceClient.java | 152 +- .../app/client/KafkaTrackingProducer.java | 228 +- .../app/config/ConfigurationProvider.java | 18 +- .../app/controllers/Application.java | 179 +- .../controllers/AuthenticationController.java | 590 +-- .../controllers/CentralLogoutController.java | 32 +- .../controllers/SsoCallbackController.java | 99 +- .../app/controllers/TrackingController.java | 82 +- datahub-frontend/app/security/AuthUtil.java | 37 +- .../app/security/AuthenticationManager.java | 16 +- .../app/security/DummyLoginModule.java | 18 +- datahub-frontend/app/utils/ConfigUtil.java | 20 +- datahub-frontend/app/utils/SearchUtil.java | 39 +- datahub-frontend/build.gradle | 2 - .../test/app/ApplicationTest.java | 68 +- .../test/security/DummyLoginModuleTest.java | 8 +- .../test/security/OidcConfigurationTest.java | 567 ++- .../test/utils/SearchUtilTest.java | 23 +- .../linkedin/datahub/graphql/Constants.java | 42 +- .../datahub/graphql/GmsGraphQLEngine.java | 3369 ++++++++++------- .../datahub/graphql/GmsGraphQLEngineArgs.java | 72 +- .../datahub/graphql/GmsGraphQLPlugin.java | 25 +- .../datahub/graphql/GraphQLEngine.java | 241 +- .../datahub/graphql/QueryContext.java | 43 +- .../datahub/graphql/RelationshipKey.java | 1 - .../datahub/graphql/TimeSeriesAspectArgs.java | 6 +- .../datahub/graphql/UsageStatsKey.java | 1 - .../datahub/graphql/VersionedAspectKey.java | 2 +- .../graphql/WeaklyTypedAspectsResolver.java | 104 +- .../resolver/AnalyticsChartTypeResolver.java | 23 +- .../analytics/resolver/GetChartsResolver.java | 265 +- .../resolver/GetHighlightsResolver.java | 114 +- .../GetMetadataAnalyticsResolver.java | 93 +- .../resolver/IsAnalyticsEnabledResolver.java | 9 +- .../analytics/service/AnalyticsService.java | 179 +- .../analytics/service/AnalyticsUtil.java | 182 +- .../authorization/AuthorizationUtils.java | 183 +- .../exception/AuthenticationException.java | 16 +- .../exception/AuthorizationException.java | 5 +- .../DataHubDataFetcherExceptionHandler.java | 3 +- .../exception/DataHubGraphQLError.java | 12 +- .../exception/DataHubGraphQLException.java | 1 - .../exception/ValidationException.java | 16 +- .../graphql/featureflags/FeatureFlags.java | 1 - .../datahub/graphql/resolvers/AuthUtils.java | 24 +- .../resolvers/AuthenticatedResolver.java | 27 +- .../graphql/resolvers/BatchLoadUtils.java | 14 +- .../graphql/resolvers/EntityTypeMapper.java | 10 +- .../datahub/graphql/resolvers/MeResolver.java | 175 +- .../graphql/resolvers/ResolverUtils.java | 328 +- .../assertion/AssertionRunEventResolver.java | 128 +- .../assertion/DeleteAssertionResolver.java | 109 +- .../assertion/EntityAssertionsResolver.java | 102 +- .../resolvers/auth/AccessTokenUtil.java | 11 +- .../auth/CreateAccessTokenResolver.java | 133 +- .../auth/GetAccessTokenResolver.java | 75 +- .../auth/ListAccessTokensResolver.java | 123 +- .../auth/RevokeAccessTokenResolver.java | 58 +- .../resolvers/browse/BrowsePathsResolver.java | 86 +- .../resolvers/browse/BrowseResolver.java | 98 +- .../browse/EntityBrowsePathsResolver.java | 22 +- .../resolvers/chart/BrowseV2Resolver.java | 102 +- .../chart/ChartStatsSummaryResolver.java | 15 +- .../resolvers/config/AppConfigResolver.java | 111 +- .../container/ContainerEntitiesResolver.java | 93 +- .../container/ParentContainersResolver.java | 53 +- .../DashboardStatsSummaryResolver.java | 106 +- .../DashboardUsageStatsResolver.java | 74 +- .../dashboard/DashboardUsageStatsUtils.java | 184 +- .../BatchSetDataProductResolver.java | 101 +- .../CreateDataProductResolver.java | 69 +- .../DataProductAuthorizationUtils.java | 47 +- .../DeleteDataProductResolver.java | 55 +- .../ListDataProductAssetsResolver.java | 176 +- .../UpdateDataProductResolver.java | 77 +- .../dataset/DatasetHealthResolver.java | 130 +- .../dataset/DatasetStatsSummaryResolver.java | 96 +- .../dataset/DatasetUsageStatsResolver.java | 40 +- .../UpdateDeprecationResolver.java | 113 +- .../domain/CreateDomainResolver.java | 156 +- .../domain/DeleteDomainResolver.java | 65 +- .../domain/DomainEntitiesResolver.java | 100 +- .../resolvers/domain/ListDomainsResolver.java | 88 +- .../domain/ParentDomainsResolver.java | 72 +- .../resolvers/domain/SetDomainResolver.java | 86 +- .../resolvers/domain/UnsetDomainResolver.java | 74 +- .../resolvers/embed/UpdateEmbedResolver.java | 91 +- .../entity/EntityExistsResolver.java | 30 +- .../entity/EntityPrivilegesResolver.java | 71 +- .../glossary/AddRelatedTermsResolver.java | 135 +- .../glossary/CreateGlossaryNodeResolver.java | 116 +- .../glossary/CreateGlossaryTermResolver.java | 160 +- .../DeleteGlossaryEntityResolver.java | 61 +- .../GetRootGlossaryNodesResolver.java | 97 +- .../GetRootGlossaryTermsResolver.java | 88 +- .../glossary/ParentNodesResolver.java | 96 +- .../glossary/RemoveRelatedTermsResolver.java | 124 +- .../group/AddGroupMembersResolver.java | 72 +- .../resolvers/group/CreateGroupResolver.java | 41 +- .../resolvers/group/EntityCountsResolver.java | 62 +- .../resolvers/group/ListGroupsResolver.java | 82 +- .../group/RemoveGroupMembersResolver.java | 70 +- .../resolvers/group/RemoveGroupResolver.java | 46 +- .../resolvers/ingest/IngestionAuthUtils.java | 13 +- .../ingest/IngestionResolverUtils.java | 63 +- ...ncelIngestionExecutionRequestResolver.java | 101 +- ...eateIngestionExecutionRequestResolver.java | 194 +- .../CreateTestConnectionRequestResolver.java | 104 +- .../GetIngestionExecutionRequestResolver.java | 60 +- ...estionSourceExecutionRequestsResolver.java | 120 +- .../execution/RollbackIngestionResolver.java | 52 +- .../ingest/secret/CreateSecretResolver.java | 79 +- .../ingest/secret/DeleteSecretResolver.java | 27 +- .../secret/GetSecretValuesResolver.java | 97 +- .../ingest/secret/ListSecretsResolver.java | 107 +- .../resolvers/ingest/secret/SecretUtils.java | 11 +- .../source/DeleteIngestionSourceResolver.java | 29 +- .../source/GetIngestionSourceResolver.java | 52 +- .../source/ListIngestionSourcesResolver.java | 108 +- .../source/UpsertIngestionSourceResolver.java | 118 +- .../resolvers/jobs/DataJobRunsResolver.java | 127 +- .../resolvers/jobs/EntityRunsResolver.java | 140 +- .../lineage/UpdateLineageResolver.java | 230 +- .../resolvers/load/AspectResolver.java | 22 +- .../load/BatchGetEntitiesResolver.java | 28 +- .../load/EntityLineageResultResolver.java | 73 +- .../EntityRelationshipsResultResolver.java | 54 +- .../load/EntityTypeBatchResolver.java | 34 +- .../resolvers/load/EntityTypeResolver.java | 79 +- .../load/LoadableTypeBatchResolver.java | 41 +- .../resolvers/load/LoadableTypeResolver.java | 41 +- .../resolvers/load/OwnerTypeResolver.java | 42 +- .../load/TimeSeriesAspectResolver.java | 115 +- .../resolvers/mutate/AddLinkResolver.java | 61 +- .../resolvers/mutate/AddOwnerResolver.java | 45 +- .../resolvers/mutate/AddOwnersResolver.java | 66 +- .../resolvers/mutate/AddTagResolver.java | 80 +- .../resolvers/mutate/AddTagsResolver.java | 79 +- .../resolvers/mutate/AddTermResolver.java | 70 +- .../resolvers/mutate/AddTermsResolver.java | 75 +- .../mutate/BatchAddOwnersResolver.java | 62 +- .../mutate/BatchAddTagsResolver.java | 127 +- .../mutate/BatchAddTermsResolver.java | 115 +- .../mutate/BatchRemoveOwnersResolver.java | 77 +- .../mutate/BatchRemoveTagsResolver.java | 64 +- .../mutate/BatchRemoveTermsResolver.java | 64 +- .../mutate/BatchSetDomainResolver.java | 61 +- .../BatchUpdateDeprecationResolver.java | 68 +- .../BatchUpdateSoftDeletedResolver.java | 55 +- .../resolvers/mutate/DescriptionUtils.java | 440 ++- .../resolvers/mutate/MoveDomainResolver.java | 110 +- .../mutate/MutableTypeBatchResolver.java | 43 +- .../resolvers/mutate/MutableTypeResolver.java | 50 +- .../resolvers/mutate/MutationUtils.java | 80 +- .../resolvers/mutate/RemoveLinkResolver.java | 55 +- .../resolvers/mutate/RemoveOwnerResolver.java | 59 +- .../resolvers/mutate/RemoveTagResolver.java | 79 +- .../resolvers/mutate/RemoveTermResolver.java | 79 +- .../mutate/UpdateDescriptionResolver.java | 661 ++-- .../resolvers/mutate/UpdateNameResolver.java | 219 +- .../mutate/UpdateParentNodeResolver.java | 113 +- .../mutate/UpdateUserSettingResolver.java | 71 +- .../resolvers/mutate/util/DeleteUtils.java | 54 +- .../mutate/util/DeprecationUtils.java | 58 +- .../resolvers/mutate/util/DomainUtils.java | 217 +- .../resolvers/mutate/util/EmbedUtils.java | 25 +- .../resolvers/mutate/util/GlossaryUtils.java | 109 +- .../resolvers/mutate/util/LabelUtils.java | 409 +- .../resolvers/mutate/util/LinkUtils.java | 98 +- .../resolvers/mutate/util/OwnerUtils.java | 243 +- .../resolvers/mutate/util/SiblingsUtils.java | 21 +- .../operation/ReportOperationResolver.java | 105 +- .../CreateOwnershipTypeResolver.java | 41 +- .../DeleteOwnershipTypeResolver.java | 28 +- .../ownership/ListOwnershipTypesResolver.java | 86 +- .../UpdateOwnershipTypeResolver.java | 51 +- .../policy/DeletePolicyResolver.java | 35 +- .../policy/GetGrantedPrivilegesResolver.java | 33 +- .../policy/ListPoliciesResolver.java | 33 +- .../resolvers/policy/PolicyAuthUtils.java | 10 +- .../policy/UpsertPolicyResolver.java | 48 +- .../mappers/PolicyInfoPolicyMapper.java | 45 +- .../mappers/PolicyUpdateInputInfoMapper.java | 43 +- .../resolvers/post/CreatePostResolver.java | 38 +- .../resolvers/post/DeletePostResolver.java | 19 +- .../resolvers/post/ListPostsResolver.java | 73 +- .../resolvers/query/CreateQueryResolver.java | 76 +- .../resolvers/query/DeleteQueryResolver.java | 46 +- .../resolvers/query/ListQueriesResolver.java | 83 +- .../resolvers/query/UpdateQueryResolver.java | 109 +- .../ListRecommendationsResolver.java | 111 +- .../resolvers/role/AcceptRoleResolver.java | 47 +- .../role/BatchAssignRoleResolver.java | 30 +- .../role/CreateInviteTokenResolver.java | 30 +- .../role/GetInviteTokenResolver.java | 30 +- .../resolvers/role/ListRolesResolver.java | 66 +- .../AggregateAcrossEntitiesResolver.java | 112 +- .../AutoCompleteForMultipleResolver.java | 133 +- .../search/AutoCompleteResolver.java | 114 +- .../resolvers/search/AutocompleteUtils.java | 102 +- .../search/GetQuickFiltersResolver.java | 172 +- .../search/ScrollAcrossEntitiesResolver.java | 124 +- .../search/ScrollAcrossLineageResolver.java | 124 +- .../search/SearchAcrossEntitiesResolver.java | 109 +- .../search/SearchAcrossLineageResolver.java | 138 +- .../resolvers/search/SearchResolver.java | 74 +- .../graphql/resolvers/search/SearchUtils.java | 301 +- .../UpdateCorpUserViewsSettingsResolver.java | 76 +- .../view/GlobalViewsSettingsResolver.java | 36 +- .../UpdateGlobalViewsSettingsResolver.java | 70 +- .../step/BatchGetStepStatesResolver.java | 109 +- .../step/BatchUpdateStepStatesResolver.java | 65 +- .../resolvers/tag/CreateTagResolver.java | 93 +- .../resolvers/tag/DeleteTagResolver.java | 55 +- .../resolvers/tag/SetTagColorResolver.java | 104 +- .../resolvers/test/CreateTestResolver.java | 85 +- .../resolvers/test/DeleteTestResolver.java | 32 +- .../resolvers/test/ListTestsResolver.java | 81 +- .../resolvers/test/TestResultsResolver.java | 55 +- .../graphql/resolvers/test/TestUtils.java | 14 +- .../resolvers/test/UpdateTestResolver.java | 53 +- .../timeline/GetSchemaBlameResolver.java | 62 +- .../GetSchemaVersionListResolver.java | 53 +- .../type/AspectInterfaceTypeResolver.java | 20 +- .../type/EntityInterfaceTypeResolver.java | 26 +- .../type/HyperParameterValueTypeResolver.java | 36 +- .../type/PlatformSchemaUnionTypeResolver.java | 22 +- .../resolvers/type/ResultsTypeResolver.java | 18 +- ...TimeSeriesAspectInterfaceTypeResolver.java | 3 +- .../CreateNativeUserResetTokenResolver.java | 41 +- .../resolvers/user/ListUsersResolver.java | 77 +- .../resolvers/user/RemoveUserResolver.java | 46 +- .../user/UpdateUserStatusResolver.java | 46 +- .../resolvers/view/CreateViewResolver.java | 79 +- .../resolvers/view/DeleteViewResolver.java | 40 +- .../view/ListGlobalViewsResolver.java | 86 +- .../resolvers/view/ListMyViewsResolver.java | 100 +- .../resolvers/view/UpdateViewResolver.java | 63 +- .../graphql/resolvers/view/ViewUtils.java | 92 +- .../graphql/scalar/LongScalarType.java | 3 +- .../graphql/types/BatchMutableType.java | 18 +- .../graphql/types/BrowsableEntityType.java | 58 +- .../datahub/graphql/types/EntityType.java | 16 +- .../datahub/graphql/types/LoadableType.java | 68 +- .../datahub/graphql/types/MutableType.java | 25 +- .../graphql/types/SearchableEntityType.java | 83 +- .../graphql/types/aspect/AspectMapper.java | 1 - .../graphql/types/aspect/AspectType.java | 72 +- .../types/assertion/AssertionMapper.java | 45 +- .../types/assertion/AssertionType.java | 112 +- .../types/auth/AccessTokenMetadataType.java | 25 +- .../mappers/AccessTokenMetadataMapper.java | 9 +- .../graphql/types/chart/ChartType.java | 366 +- .../types/chart/mappers/ChartMapper.java | 339 +- .../chart/mappers/ChartUpdateInputMapper.java | 109 +- .../chart/mappers/InputFieldsMapper.java | 53 +- .../common/mappers/AuditStampMapper.java | 26 +- .../common/mappers/BrowsePathsV2Mapper.java | 6 +- .../mappers/ChangeAuditStampsMapper.java | 4 +- .../types/common/mappers/CostMapper.java | 26 +- .../types/common/mappers/CostValueMapper.java | 29 +- .../mappers/CustomPropertiesMapper.java | 36 +- .../DataPlatformInstanceAspectMapper.java | 10 +- .../common/mappers/DeprecationMapper.java | 32 +- .../types/common/mappers/EmbedMapper.java | 1 - .../mappers/FineGrainedLineagesMapper.java | 42 +- .../mappers/InstitutionalMemoryMapper.java | 27 +- .../InstitutionalMemoryMetadataMapper.java | 46 +- ...stitutionalMemoryMetadataUpdateMapper.java | 37 +- .../InstitutionalMemoryUpdateMapper.java | 34 +- .../types/common/mappers/OperationMapper.java | 91 +- .../types/common/mappers/OwnerMapper.java | 74 +- .../common/mappers/OwnerUpdateMapper.java | 76 +- .../types/common/mappers/OwnershipMapper.java | 33 +- .../common/mappers/OwnershipSourceMapper.java | 30 +- .../common/mappers/OwnershipUpdateMapper.java | 37 +- .../mappers/SearchFlagsInputMapper.java | 10 +- .../types/common/mappers/SiblingsMapper.java | 12 +- .../types/common/mappers/StatusMapper.java | 21 +- .../types/common/mappers/StringMapMapper.java | 31 +- .../types/common/mappers/SubTypesMapper.java | 9 +- .../mappers/UpstreamLineagesMapper.java | 12 +- .../common/mappers/UrnToEntityMapper.java | 7 +- .../common/mappers/util/MappingHelper.java | 8 +- .../types/common/mappers/util/RunInfo.java | 1 - .../mappers/util/SystemMetadataUtils.java | 19 +- .../mappers/util/UpdateMappingHelper.java | 1 - .../types/container/ContainerType.java | 108 +- .../container/mappers/ContainerMapper.java | 80 +- .../types/corpgroup/CorpGroupType.java | 329 +- .../types/corpgroup/CorpGroupUtils.java | 24 +- .../CorpGroupEditablePropertiesMapper.java | 18 +- .../mappers/CorpGroupInfoMapper.java | 76 +- .../corpgroup/mappers/CorpGroupMapper.java | 119 +- .../mappers/CorpGroupPropertiesMapper.java | 9 +- .../graphql/types/corpuser/CorpUserType.java | 355 +- .../graphql/types/corpuser/CorpUserUtils.java | 24 +- .../mappers/CorpUserEditableInfoMapper.java | 46 +- .../corpuser/mappers/CorpUserInfoMapper.java | 48 +- .../corpuser/mappers/CorpUserMapper.java | 212 +- .../mappers/CorpUserPropertiesMapper.java | 12 +- .../mappers/CorpUserStatusMapper.java | 9 +- .../types/dashboard/DashboardType.java | 366 +- .../dashboard/mappers/DashboardMapper.java | 325 +- .../mappers/DashboardUpdateInputMapper.java | 110 +- .../mappers/DashboardUsageMetricMapper.java | 7 +- .../graphql/types/dataflow/DataFlowType.java | 340 +- .../dataflow/mappers/DataFlowMapper.java | 239 +- .../mappers/DataFlowUpdateInputMapper.java | 44 +- .../graphql/types/datajob/DataJobType.java | 346 +- .../types/datajob/mappers/DataJobMapper.java | 247 +- .../mappers/DataJobUpdateInputMapper.java | 104 +- .../types/dataplatform/DataPlatformType.java | 86 +- .../mappers/DataPlatformInfoMapper.java | 36 +- .../mappers/DataPlatformMapper.java | 64 +- .../mappers/DataPlatformPropertiesMapper.java | 37 +- .../DataPlatformInstanceType.java | 182 +- .../mappers/DataPlatformInstanceMapper.java | 107 +- .../mappers/DataProcessInstanceMapper.java | 64 +- .../DataProcessInstanceRunEventMapper.java | 65 +- .../DataProcessInstanceRunResultMapper.java | 42 +- .../types/dataproduct/DataProductType.java | 87 +- .../mappers/DataProductMapper.java | 67 +- .../graphql/types/dataset/DatasetType.java | 487 +-- .../graphql/types/dataset/DatasetUtils.java | 16 +- .../types/dataset/VersionedDatasetType.java | 64 +- .../mappers/AssertionRunEventMapper.java | 13 +- .../mappers/DatasetDeprecationMapper.java | 31 +- .../types/dataset/mappers/DatasetMapper.java | 305 +- .../dataset/mappers/DatasetProfileMapper.java | 21 +- .../mappers/DatasetUpdateInputMapper.java | 78 +- .../EditableSchemaFieldInfoMapper.java | 51 +- .../mappers/EditableSchemaMetadataMapper.java | 35 +- .../mappers/ForeignKeyConstraintMapper.java | 18 +- .../dataset/mappers/PlatformSchemaMapper.java | 109 +- .../dataset/mappers/SchemaFieldMapper.java | 119 +- .../types/dataset/mappers/SchemaMapper.java | 71 +- .../dataset/mappers/SchemaMetadataMapper.java | 65 +- .../mappers/VersionedDatasetMapper.java | 110 +- .../types/domain/DomainAssociationMapper.java | 38 +- .../graphql/types/domain/DomainMapper.java | 25 +- .../graphql/types/domain/DomainType.java | 74 +- .../types/glossary/GlossaryNodeType.java | 52 +- .../types/glossary/GlossaryTermType.java | 246 +- .../types/glossary/GlossaryTermUtils.java | 28 +- .../glossary/mappers/GlossaryNodeMapper.java | 16 +- .../mappers/GlossaryTermInfoMapper.java | 53 +- .../glossary/mappers/GlossaryTermMapper.java | 107 +- .../mappers/GlossaryTermPropertiesMapper.java | 17 +- .../glossary/mappers/GlossaryTermsMapper.java | 75 +- .../mappers/AutoCompleteResultsMapper.java | 33 +- .../types/mappers/BrowsePathMapper.java | 30 +- .../types/mappers/BrowsePathsMapper.java | 25 +- .../types/mappers/BrowseResultMapper.java | 11 +- .../types/mappers/InputModelMapper.java | 8 +- .../graphql/types/mappers/MapperUtils.java | 87 +- .../graphql/types/mappers/ModelMapper.java | 7 +- .../types/mappers/TimeSeriesAspectMapper.java | 6 +- .../UrnScrollAcrossLineageResultsMapper.java | 15 +- .../types/mappers/UrnScrollResultsMapper.java | 9 +- .../UrnSearchAcrossLineageResultsMapper.java | 31 +- .../types/mappers/UrnSearchResultsMapper.java | 14 +- .../types/mlmodel/MLFeatureTableType.java | 220 +- .../graphql/types/mlmodel/MLFeatureType.java | 142 +- .../types/mlmodel/MLModelGroupType.java | 222 +- .../graphql/types/mlmodel/MLModelType.java | 213 +- .../graphql/types/mlmodel/MLModelUtils.java | 63 +- .../types/mlmodel/MLPrimaryKeyType.java | 143 +- .../types/mlmodel/mappers/BaseDataMapper.java | 25 +- .../CaveatsAndRecommendationsMapper.java | 43 +- .../mlmodel/mappers/CaveatsDetailsMapper.java | 28 +- .../mappers/EthicalConsiderationsMapper.java | 34 +- .../mappers/HyperParameterMapMapper.java | 30 +- .../HyperParameterValueTypeMapper.java | 49 +- .../mlmodel/mappers/IntendedUseMapper.java | 36 +- .../mlmodel/mappers/MLFeatureMapper.java | 166 +- .../mappers/MLFeaturePropertiesMapper.java | 58 +- .../mlmodel/mappers/MLFeatureTableMapper.java | 170 +- .../MLFeatureTablePropertiesMapper.java | 71 +- .../mlmodel/mappers/MLHyperParamMapper.java | 30 +- .../types/mlmodel/mappers/MLMetricMapper.java | 27 +- .../mappers/MLModelFactorPromptsMapper.java | 43 +- .../mlmodel/mappers/MLModelFactorsMapper.java | 43 +- .../mlmodel/mappers/MLModelGroupMapper.java | 160 +- .../mappers/MLModelGroupPropertiesMapper.java | 35 +- .../types/mlmodel/mappers/MLModelMapper.java | 257 +- .../mappers/MLModelPropertiesMapper.java | 100 +- .../mlmodel/mappers/MLPrimaryKeyMapper.java | 152 +- .../mappers/MLPrimaryKeyPropertiesMapper.java | 58 +- .../types/mlmodel/mappers/MetricsMapper.java | 23 +- .../mappers/QuantitativeAnalysesMapper.java | 29 +- .../mlmodel/mappers/ResultsTypeMapper.java | 30 +- .../mlmodel/mappers/SourceCodeUrlMapper.java | 28 +- .../mlmodel/mappers/VersionTagMapper.java | 26 +- .../graphql/types/notebook/NotebookType.java | 159 +- .../notebook/mappers/NotebookMapper.java | 139 +- .../mappers/NotebookUpdateInputMapper.java | 34 +- .../types/ownership/OwnershipType.java | 34 +- .../types/ownership/OwnershipTypeMapper.java | 13 +- .../types/policy/DataHubPolicyMapper.java | 44 +- .../types/policy/DataHubPolicyType.java | 26 +- .../graphql/types/post/PostMapper.java | 5 +- .../graphql/types/query/QueryMapper.java | 23 +- .../graphql/types/query/QueryType.java | 33 +- .../DataFlowDataJobsRelationshipsMapper.java | 36 +- .../DownstreamEntityRelationshipsMapper.java | 36 +- .../EntityRelationshipLegacyMapper.java | 38 +- .../UpstreamEntityRelationshipsMapper.java | 34 +- .../graphql/types/role/DataHubRoleType.java | 26 +- .../types/role/mappers/DataHubRoleMapper.java | 5 +- .../graphql/types/rolemetadata/RoleType.java | 158 +- .../rolemetadata/mappers/AccessMapper.java | 60 +- .../rolemetadata/mappers/RoleMapper.java | 120 +- .../types/schemafield/SchemaFieldType.java | 25 +- .../datahub/graphql/types/tag/TagType.java | 278 +- .../types/tag/mappers/GlobalTagsMapper.java | 51 +- .../mappers/TagAssociationUpdateMapper.java | 35 +- .../graphql/types/tag/mappers/TagMapper.java | 88 +- .../tag/mappers/TagUpdateInputMapper.java | 21 +- .../graphql/types/test/TestMapper.java | 14 +- .../datahub/graphql/types/test/TestType.java | 39 +- .../timeline/mappers/SchemaBlameMapper.java | 72 +- .../mappers/SchemaVersionListMapper.java | 50 +- .../types/timeline/utils/TimelineUtils.java | 30 +- .../types/usage/FieldUsageCountsMapper.java | 7 +- .../types/usage/UsageAggregationMapper.java | 11 +- .../usage/UsageAggregationMetricsMapper.java | 23 +- .../UsageQueryResultAggregationMapper.java | 27 +- .../types/usage/UsageQueryResultMapper.java | 17 +- .../types/usage/UserUsageCountsMapper.java | 12 +- .../graphql/types/view/DataHubViewMapper.java | 38 +- .../graphql/types/view/DataHubViewType.java | 26 +- .../datahub/graphql/util/DateUtil.java | 51 +- .../graphql/util/SearchInsightsUtil.java | 3 +- .../linkedin/datahub/graphql/TestUtils.java | 92 +- .../graphql/resolvers/ResolverUtilsTest.java | 112 +- .../resolvers/UpdateLineageResolverTest.java | 78 +- .../AssertionRunEventResolverTest.java | 102 +- .../DeleteAssertionResolverTest.java | 154 +- .../EntityAssertionsResolverTest.java | 179 +- .../auth/ListAccessTokensResolverTest.java | 31 +- .../browse/BrowseV2ResolverTest.java | 230 +- .../browse/EntityBrowsePathsResolverTest.java | 20 +- .../ContainerEntitiesResolverTest.java | 78 +- .../ParentContainersResolverTest.java | 149 +- .../dashboard/DashboardStatsSummaryTest.java | 162 +- .../dataset/DatasetHealthResolverTest.java | 207 +- .../DatasetStatsSummaryResolverTest.java | 96 +- .../BatchUpdateSoftDeletedResolverTest.java | 122 +- .../BatchUpdateDeprecationResolverTest.java | 188 +- .../UpdateDeprecationResolverTest.java | 183 +- .../domain/BatchSetDomainResolverTest.java | 234 +- .../domain/CreateDomainProposalMatcher.java | 21 +- .../domain/CreateDomainResolverTest.java | 187 +- .../domain/DeleteDomainResolverTest.java | 44 +- .../domain/DomainEntitiesResolverTest.java | 83 +- .../domain/ListDomainsResolverTest.java | 141 +- .../domain/MoveDomainResolverTest.java | 67 +- .../domain/ParentDomainsResolverTest.java | 97 +- .../domain/SetDomainResolverTest.java | 196 +- .../domain/UnsetDomainResolverTest.java | 155 +- .../embed/UpdateEmbedResolverTest.java | 106 +- .../entity/EntityExistsResolverTest.java | 7 +- .../entity/EntityPrivilegesResolverTest.java | 24 +- .../glossary/AddRelatedTermsResolverTest.java | 107 +- .../CreateGlossaryNodeResolverTest.java | 80 +- .../CreateGlossaryTermResolverTest.java | 176 +- .../DeleteGlossaryEntityResolverTest.java | 35 +- .../GetRootGlossaryNodesResolverTest.java | 68 +- .../GetRootGlossaryTermsResolverTest.java | 63 +- .../resolvers/glossary/GlossaryUtilsTest.java | 176 +- .../glossary/ParentNodesResolverTest.java | 293 +- .../RemoveRelatedTermsResolverTest.java | 88 +- .../glossary/UpdateNameResolverTest.java | 68 +- .../UpdateParentNodeResolverTest.java | 66 +- .../group/AddGroupMembersResolverTest.java | 9 +- .../group/CreateGroupResolverTest.java | 9 +- .../group/RemoveGroupMembersResolverTest.java | 9 +- .../resolvers/ingest/IngestTestUtils.java | 44 +- .../ingest/IngestionAuthUtilsTest.java | 34 +- ...IngestionExecutionRequestResolverTest.java | 75 +- ...IngestionExecutionRequestResolverTest.java | 75 +- ...eateTestConnectionRequestResolverTest.java | 37 +- ...IngestionExecutionRequestResolverTest.java | 82 +- ...onSourceExecutionRequestsResolverTest.java | 138 +- .../RollbackIngestionResolverTest.java | 28 +- .../CreateSecretResolverMatcherTest.java | 19 +- .../secret/CreateSecretResolverTest.java | 58 +- .../secret/DeleteSecretResolverTest.java | 16 +- .../secret/GetSecretValuesResolverTest.java | 66 +- .../secret/ListSecretsResolverTest.java | 121 +- .../DeleteIngestionSourceResolverTest.java | 25 +- .../GetIngestionSourceResolverTest.java | 65 +- .../ListIngestionSourceResolverTest.java | 126 +- .../UpsertIngestionSourceResolverTest.java | 77 +- .../mutate/MutableTypeBatchResolverTest.java | 269 +- .../resolvers/mutate/SiblingsUtilsTest.java | 58 +- .../mutate/UpdateUserSettingResolverTest.java | 17 +- .../ReportOperationResolverTest.java | 57 +- .../owner/AddOwnersResolverTest.java | 333 +- .../owner/BatchAddOwnersResolverTest.java | 353 +- .../owner/BatchRemoveOwnersResolverTest.java | 161 +- .../CreateOwnershipTypeResolverTest.java | 52 +- .../DeleteOwnershipTypeResolverTest.java | 49 +- .../ListOwnershipTypesResolverTest.java | 82 +- .../UpdateOwnershipTypeResolverTest.java | 95 +- .../post/CreatePostResolverTest.java | 51 +- .../post/DeletePostResolverTest.java | 9 +- .../resolvers/post/ListPostsResolverTest.java | 60 +- .../query/CreateQueryResolverTest.java | 253 +- .../query/DeleteQueryResolverTest.java | 102 +- .../query/ListQueriesResolverTest.java | 119 +- .../query/UpdateQueryResolverTest.java | 288 +- .../role/AcceptRoleResolverTest.java | 24 +- .../role/BatchAssignRoleResolverTest.java | 9 +- .../role/CreateInviteTokenResolverTest.java | 15 +- .../role/GetInviteTokenResolverTest.java | 15 +- .../resolvers/role/ListRolesResolverTest.java | 60 +- .../AggregateAcrossEntitiesResolverTest.java | 487 ++- .../AutoCompleteForMultipleResolverTest.java | 243 +- .../search/GetQuickFiltersResolverTest.java | 214 +- .../SearchAcrossEntitiesResolverTest.java | 686 ++-- .../SearchAcrossLineageResolverTest.java | 42 +- .../resolvers/search/SearchResolverTest.java | 299 +- .../resolvers/search/SearchUtilsTest.java | 580 +-- ...dateCorpUserViewsSettingsResolverTest.java | 152 +- .../view/GlobalViewsSettingsResolverTest.java | 47 +- ...UpdateGlobalViewsSettingsResolverTest.java | 100 +- .../step/BatchGetStepStatesResolverTest.java | 51 +- .../BatchUpdateStepStatesResolverTest.java | 12 +- .../resolvers/tag/AddTagsResolverTest.java | 156 +- .../tag/BatchAddTagsResolverTest.java | 255 +- .../tag/BatchRemoveTagsResolverTest.java | 206 +- .../resolvers/tag/CreateTagResolverTest.java | 49 +- .../resolvers/tag/DeleteTagResolverTest.java | 21 +- .../tag/SetTagColorResolverTest.java | 108 +- .../resolvers/term/AddTermsResolverTest.java | 169 +- .../term/BatchAddTermsResolverTest.java | 195 +- .../term/BatchRemoveTermsResolverTest.java | 165 +- .../test/CreateTestResolverTest.java | 46 +- .../test/DeleteTestResolverTest.java | 21 +- .../resolvers/test/ListTestsResolverTest.java | 82 +- .../test/UpdateTestResolverTest.java | 42 +- ...reateNativeUserResetTokenResolverTest.java | 15 +- .../view/CreateViewResolverTest.java | 172 +- .../view/DeleteViewResolverTest.java | 70 +- .../view/ListGlobalViewsResolverTest.java | 80 +- .../view/ListMyViewsResolverTest.java | 156 +- .../view/UpdateViewResolverTest.java | 261 +- .../graphql/resolvers/view/ViewUtilsTest.java | 159 +- .../types/assertion/AssertionTypeTest.java | 102 +- .../types/container/ContainerTypeTest.java | 187 +- .../DataPlatformInstanceTest.java | 356 +- .../dataset/mappers/DatasetMapperTest.java | 293 +- .../mappers/DatasetProfileMapperTest.java | 322 +- .../graphql/types/domain/DomainTypeTest.java | 128 +- .../types/notebook/NotebookTypeTest.java | 255 +- .../graphql/types/query/QueryTypeTest.java | 277 +- .../types/view/DataHubViewTypeTest.java | 303 +- .../datahub/graphql/utils/DateUtilTest.java | 67 +- .../graphql/utils/MutationsUtilsTest.java | 28 +- .../utils/SystemMetadataUtilsTest.java | 105 +- .../com/linkedin/datahub/upgrade/Upgrade.java | 19 +- .../datahub/upgrade/UpgradeCleanupStep.java | 12 +- .../linkedin/datahub/upgrade/UpgradeCli.java | 7 +- .../upgrade/UpgradeCliApplication.java | 23 +- .../datahub/upgrade/UpgradeContext.java | 26 +- .../datahub/upgrade/UpgradeManager.java | 14 +- .../datahub/upgrade/UpgradeReport.java | 18 +- .../datahub/upgrade/UpgradeResult.java | 29 +- .../linkedin/datahub/upgrade/UpgradeStep.java | 24 +- .../datahub/upgrade/UpgradeStepResult.java | 43 +- .../datahub/upgrade/UpgradeUtils.java | 6 +- .../common/steps/ClearGraphServiceStep.java | 1 - .../common/steps/ClearSearchServiceStep.java | 4 +- .../common/steps/GMSDisableWriteModeStep.java | 1 - .../common/steps/GMSEnableWriteModeStep.java | 1 - .../common/steps/GMSQualificationStep.java | 64 +- .../config/BackfillBrowsePathsV2Config.java | 4 +- .../upgrade/config/BuildIndicesConfig.java | 22 +- .../upgrade/config/CleanIndicesConfig.java | 22 +- .../upgrade/config/NoCodeCleanupConfig.java | 15 +- .../upgrade/config/NoCodeUpgradeConfig.java | 7 +- .../config/RemoveUnknownAspectsConfig.java | 1 - .../upgrade/config/RestoreBackupConfig.java | 21 +- .../upgrade/config/RestoreIndicesConfig.java | 11 +- .../upgrade/config/SystemUpdateConfig.java | 31 +- .../upgrade/impl/DefaultUpgradeContext.java | 1 - .../upgrade/impl/DefaultUpgradeManager.java | 59 +- .../upgrade/impl/DefaultUpgradeReport.java | 3 +- .../upgrade/impl/DefaultUpgradeResult.java | 1 - .../impl/DefaultUpgradeStepResult.java | 1 - .../upgrade/nocode/CreateAspectTableStep.java | 60 +- .../upgrade/nocode/DataMigrationStep.java | 103 +- .../datahub/upgrade/nocode/NoCodeUpgrade.java | 5 +- .../nocode/RemoveAspectV2TableStep.java | 5 +- .../nocode/UpgradeQualificationStep.java | 15 +- .../nocodecleanup/DeleteAspectTableStep.java | 5 +- .../DeleteLegacyGraphRelationshipsStep.java | 8 +- .../DeleteLegacySearchIndicesStep.java | 4 +- .../nocodecleanup/NoCodeCleanupUpgrade.java | 15 +- .../NoCodeUpgradeQualificationStep.java | 17 +- .../RemoveClientIdAspectStep.java | 8 +- .../RemoveUnknownAspects.java | 1 - .../restorebackup/ClearAspectV2TableStep.java | 5 +- .../upgrade/restorebackup/RestoreBackup.java | 4 +- .../restorebackup/RestoreStorageStep.java | 83 +- .../backupreader/BackupReader.java | 6 +- .../backupreader/BackupReaderArgs.java | 9 +- .../EbeanAspectBackupIterator.java | 20 +- .../backupreader/LocalParquetReader.java | 17 +- .../backupreader/ParquetReaderWrapper.java | 26 +- .../backupreader/ReaderWrapper.java | 17 +- .../restoreindices/RestoreIndices.java | 15 +- .../upgrade/restoreindices/SendMAEStep.java | 99 +- .../datahub/upgrade/system/SystemUpdate.java | 75 +- .../system/elasticsearch/BuildIndices.java | 79 +- .../system/elasticsearch/CleanIndices.java | 56 +- .../steps/BuildIndicesPostStep.java | 35 +- .../steps/BuildIndicesPreStep.java | 62 +- .../elasticsearch/steps/BuildIndicesStep.java | 2 - .../elasticsearch/steps/CleanIndicesStep.java | 79 +- .../steps/DataHubStartupStep.java | 8 +- .../system/elasticsearch/util/IndexUtils.java | 45 +- .../entity/steps/BackfillBrowsePathsV2.java | 1 - .../steps/BackfillBrowsePathsV2Step.java | 90 +- .../DatahubUpgradeNoSchemaRegistryTest.java | 105 +- .../upgrade/UpgradeCliApplicationTest.java | 69 +- ...pgradeCliApplicationTestConfiguration.java | 24 +- docker/build.gradle | 2 +- .../linkedin/metadata/models/AspectSpec.java | 50 +- .../metadata/models/ConfigEntitySpec.java | 12 +- .../metadata/models/DataSchemaFactory.java | 60 +- .../metadata/models/DefaultEntitySpec.java | 5 +- .../metadata/models/DefaultEventSpec.java | 1 - .../linkedin/metadata/models/EntitySpec.java | 5 +- .../metadata/models/EntitySpecBuilder.java | 252 +- .../metadata/models/EntitySpecUtils.java | 18 +- .../linkedin/metadata/models/EventSpec.java | 17 +- .../metadata/models/EventSpecBuilder.java | 25 +- .../linkedin/metadata/models/FieldSpec.java | 12 +- .../metadata/models/FieldSpecUtils.java | 21 +- .../models/ModelValidationException.java | 4 +- .../metadata/models/PartialEntitySpec.java | 20 +- .../models/PropertyOverrideComparator.java | 1 - .../models/RelationshipFieldSpec.java | 9 +- .../RelationshipFieldSpecExtractor.java | 45 +- .../metadata/models/SearchScoreFieldSpec.java | 3 +- .../models/SearchScoreFieldSpecExtractor.java | 25 +- .../metadata/models/SearchableFieldSpec.java | 3 +- .../models/SearchableFieldSpecExtractor.java | 127 +- .../models/TimeseriesFieldCollectionSpec.java | 3 +- .../metadata/models/TimeseriesFieldSpec.java | 3 +- .../models/TimeseriesFieldSpecExtractor.java | 84 +- .../models/annotation/AnnotationUtils.java | 4 +- .../models/annotation/AspectAnnotation.java | 23 +- .../models/annotation/EntityAnnotation.java | 25 +- .../models/annotation/EventAnnotation.java | 23 +- .../annotation/RelationshipAnnotation.java | 49 +- .../annotation/SearchScoreAnnotation.java | 24 +- .../annotation/SearchableAnnotation.java | 62 +- .../annotation/TimeseriesFieldAnnotation.java | 21 +- .../TimeseriesFieldCollectionAnnotation.java | 18 +- .../models/extractor/AspectExtractor.java | 28 +- .../models/extractor/FieldExtractor.java | 55 +- .../models/registry/ConfigEntityRegistry.java | 71 +- .../models/registry/EntityRegistry.java | 19 +- .../models/registry/EntityRegistryUtils.java | 12 +- .../models/registry/LineageRegistry.java | 103 +- .../models/registry/MergedEntityRegistry.java | 98 +- .../models/registry/PatchEntityRegistry.java | 141 +- .../registry/PluginEntityRegistryLoader.java | 151 +- .../registry/SnapshotEntityRegistry.java | 34 +- .../models/registry/config/Entity.java | 10 +- .../config/EntityRegistryLoadResult.java | 4 +- .../models/registry/config/Event.java | 2 +- .../template/ArrayMergingTemplate.java | 88 +- .../template/AspectTemplateEngine.java | 40 +- .../template/CompoundKeyTemplate.java | 17 +- .../models/registry/template/Template.java | 28 +- .../template/common/GlobalTagsTemplate.java | 4 +- .../common/GlossaryTermsTemplate.java | 24 +- .../template/common/OwnershipTemplate.java | 18 +- .../dataflow/DataFlowInfoTemplate.java | 1 - .../template/datajob/DataJobInfoTemplate.java | 1 - .../datajob/DataJobInputOutputTemplate.java | 70 +- .../DataProductPropertiesTemplate.java | 4 +- .../dataset/DatasetPropertiesTemplate.java | 1 - .../EditableSchemaMetadataTemplate.java | 86 +- .../dataset/UpstreamLineageTemplate.java | 8 +- .../registry/template/util/TemplateUtil.java | 28 +- .../models/DataSchemaFactoryTest.java | 16 +- .../models/EntitySpecBuilderTest.java | 392 +- .../registry/ConfigEntityRegistryTest.java | 23 +- .../models/registry/LineageRegistryTest.java | 72 +- .../registry/PatchEntityRegistryTest.java | 45 +- .../PluginEntityRegistryLoaderTest.java | 276 +- .../models/registry/TestConstants.java | 4 +- gradle/checkstyle/checkstyle.xml | 198 - gradle/checkstyle/suppressions.xml | 7 - .../ingestion/IngestionScheduler.java | 241 +- .../ingestion/IngestionSchedulerTest.java | 193 +- .../java/com/datahub/util/ModelUtils.java | 235 +- .../java/com/datahub/util/RecordUtils.java | 291 +- .../main/java/com/datahub/util/Statement.java | 1 - .../util/exception/ESQueryException.java | 4 +- .../exception/InvalidSchemaException.java | 4 +- .../exception/ModelConversionException.java | 4 +- .../util/validator/AspectValidator.java | 24 +- .../util/validator/DeltaValidator.java | 23 +- .../util/validator/DocumentValidator.java | 48 +- .../util/validator/EntityValidator.java | 68 +- .../util/validator/RelationshipValidator.java | 119 +- .../util/validator/SnapshotValidator.java | 56 +- .../util/validator/ValidationUtils.java | 140 +- .../java/com/linkedin/metadata/Constants.java | 99 +- .../java/com/linkedin/util/Configuration.java | 40 +- .../com/linkedin/common/uri/Uri.java | 48 +- .../com/linkedin/common/uri/UriCoercer.java | 19 +- .../com/linkedin/common/url/Url.java | 48 +- .../com/linkedin/common/url/UrlCoercer.java | 19 +- .../linkedin/common/urn/AzkabanFlowUrn.java | 31 +- .../linkedin/common/urn/AzkabanJobUrn.java | 30 +- .../com/linkedin/common/urn/ChartUrn.java | 30 +- .../com/linkedin/common/urn/CorpGroupUrn.java | 34 +- .../com/linkedin/common/urn/CorpuserUrn.java | 33 +- .../com/linkedin/common/urn/DashboardUrn.java | 30 +- .../com/linkedin/common/urn/DataFlowUrn.java | 31 +- .../com/linkedin/common/urn/DataJobUrn.java | 31 +- .../linkedin/common/urn/DataPlatformUrn.java | 28 +- .../linkedin/common/urn/DataProcessUrn.java | 37 +- .../linkedin/common/urn/DatasetFieldUrn.java | 62 +- .../com/linkedin/common/urn/DatasetUrn.java | 36 +- .../com/linkedin/common/urn/FabricUrn.java | 29 +- .../linkedin/common/urn/GlossaryNodeUrn.java | 94 +- .../linkedin/common/urn/GlossaryTermUrn.java | 35 +- .../com/linkedin/common/urn/MLFeatureUrn.java | 30 +- .../com/linkedin/common/urn/MLModelUrn.java | 40 +- .../com/linkedin/common/urn/NotebookUrn.java | 30 +- .../com/linkedin/common/urn/TagUrn.java | 94 +- .../linkedin/common/urn/TestEntityUrn.java | 101 +- .../com/linkedin/common/urn/TupleKey.java | 72 +- .../com/linkedin/common/urn/Urn.java | 140 +- .../com/linkedin/common/urn/UrnCoercer.java | 29 +- .../com/linkedin/common/urn/UrnUtils.java | 114 +- .../com/linkedin/common/urn/UrnValidator.java | 13 +- .../com/linkedin/common/urn/VersionedUrn.java | 99 +- .../common/urn/VersionedUrnUtils.java | 18 +- .../linkedin/util/VersionedUrnCoercer.java | 1 - .../common/urn/DatasetFieldUrnTest.java | 23 +- .../linkedin/common/util/ModelUtilsTest.java | 38 +- .../linkedin/common/util/RecordUtilsTest.java | 103 +- .../common/util/VersionedUrnUtilsTest.java | 5 +- .../com/datahub/authentication/Actor.java | 23 +- .../com/datahub/authentication/ActorType.java | 8 +- .../authentication/Authentication.java | 15 +- .../authentication/AuthenticationContext.java | 3 +- .../AuthenticationException.java | 1 - .../AuthenticationExpiredException.java | 1 - .../authentication/AuthenticationRequest.java | 11 +- .../authentication/AuthenticatorContext.java | 13 +- .../com/datahub/authorization/AuthUtil.java | 26 +- .../authorization/AuthorizationRequest.java | 19 +- .../authorization/AuthorizationResult.java | 29 +- .../authorization/AuthorizedActors.java | 1 - .../authorization/AuthorizerContext.java | 14 +- .../ConjunctivePrivilegeGroup.java | 6 +- .../DisjunctivePrivilegeGroup.java | 4 +- .../authorization/EntityFieldType.java | 30 +- .../com/datahub/authorization/EntitySpec.java | 22 +- .../authorization/EntitySpecResolver.java | 7 +- .../datahub/authorization/FieldResolver.java | 27 +- .../authorization/ResolvedEntitySpec.java | 23 +- .../main/java/com/datahub/plugins/Plugin.java | 7 +- .../com/datahub/plugins/PluginConstant.java | 3 +- .../auth/authentication/Authenticator.java | 30 +- .../auth/authorization/Authorizer.java | 22 +- .../producer/BaseMetadataEventProducer.java | 28 +- .../dao/producer/KafkaEventProducer.java | 70 +- .../dao/producer/KafkaHealthChecker.java | 181 +- .../producer/KafkaMetadataEventProducer.java | 62 +- .../dao/producer/KafkaProducerCallback.java | 1 - metadata-events/mxe-avro/build.gradle | 2 +- .../main/java/com/linkedin/mxe/Configs.java | 38 +- .../com/linkedin/mxe/TopicConvention.java | 52 +- .../com/linkedin/mxe/TopicConventionImpl.java | 64 +- .../main/java/com/linkedin/mxe/Topics.java | 20 +- .../com/linkedin/metadata/EventUtils.java | 188 +- .../linkedin/metadata/EventUtilsTests.java | 55 +- .../java/datahub-client/build.gradle | 2 - .../main/java/datahub/client/Callback.java | 9 +- .../src/main/java/datahub/client/Emitter.java | 54 +- .../client/MetadataResponseFuture.java | 7 +- .../datahub/client/MetadataWriteResponse.java | 18 +- .../java/datahub/client/file/FileEmitter.java | 117 +- .../client/file/FileEmitterConfig.java | 9 +- .../datahub/client/kafka/AvroSerializer.java | 27 +- .../datahub/client/kafka/KafkaEmitter.java | 67 +- .../client/kafka/KafkaEmitterConfig.java | 29 +- .../patch/AbstractMultiFieldPatchBuilder.java | 25 +- .../client/patch/PatchOperationType.java | 5 +- .../common/CustomPropertiesPatchBuilder.java | 29 +- .../patch/common/GlobalTagsPatchBuilder.java | 11 +- .../common/GlossaryTermsPatchBuilder.java | 14 +- .../patch/common/OwnershipPatchBuilder.java | 32 +- .../dataflow/DataFlowInfoPatchBuilder.java | 54 +- .../datajob/DataJobInfoPatchBuilder.java | 51 +- .../DataJobInputOutputPatchBuilder.java | 96 +- .../DatasetPropertiesPatchBuilder.java | 66 +- .../EditableSchemaMetadataPatchBuilder.java | 53 +- .../dataset/UpstreamLineagePatchBuilder.java | 25 +- .../CustomPropertiesPatchBuilderSupport.java | 8 +- .../IntermediatePatchBuilder.java | 13 +- .../java/datahub/client/rest/RestEmitter.java | 316 +- .../client/rest/RestEmitterConfig.java | 46 +- .../java/datahub/event/EventFormatter.java | 52 +- .../event/EventValidationException.java | 1 + .../event/MetadataChangeProposalWrapper.java | 17 +- .../java/datahub/event/StringEscapeUtils.java | 122 +- .../datahub/event/UpsertAspectRequest.java | 17 +- .../datahub/client/file/FileEmitterTest.java | 125 +- .../client/kafka/AvroSerializerTest.java | 17 +- .../client/kafka/KafkaEmitterTest.java | 73 +- .../kafka/containers/KafkaContainer.java | 71 +- .../containers/SchemaRegistryContainer.java | 76 +- .../client/kafka/containers/Utils.java | 25 +- .../kafka/containers/ZookeeperContainer.java | 80 +- .../java/datahub/client/patch/PatchTest.java | 354 +- .../datahub/client/rest/RestEmitterTest.java | 425 ++- .../datahub/event/EventFormatterTest.java | 44 +- .../MetadataChangeProposalWrapperTest.java | 121 +- .../datahub/server/TestDataHubServer.java | 24 +- .../google/protobuf/ExtensionRegistry.java | 543 ++- .../datahub/protobuf/DirectoryWalker.java | 67 +- .../java/datahub/protobuf/Proto2DataHub.java | 685 ++-- .../datahub/protobuf/ProtobufDataset.java | 465 +-- .../java/datahub/protobuf/ProtobufUtils.java | 354 +- .../datahub/protobuf/model/FieldTypeEdge.java | 75 +- .../protobuf/model/ProtobufElement.java | 45 +- .../datahub/protobuf/model/ProtobufEnum.java | 131 +- .../datahub/protobuf/model/ProtobufField.java | 452 +-- .../datahub/protobuf/model/ProtobufGraph.java | 800 ++-- .../protobuf/model/ProtobufMessage.java | 194 +- .../protobuf/model/ProtobufOneOfField.java | 87 +- .../visitors/ProtobufExtensionUtil.java | 307 +- .../visitors/ProtobufModelVisitor.java | 28 +- .../protobuf/visitors/VisitContext.java | 87 +- .../visitors/dataset/DatasetVisitor.java | 204 +- .../visitors/dataset/DeprecationVisitor.java | 77 +- .../visitors/dataset/DescriptionVisitor.java | 9 +- .../visitors/dataset/DomainVisitor.java | 23 +- .../dataset/InstitutionalMemoryVisitor.java | 216 +- .../dataset/KafkaTopicPropertyVisitor.java | 28 +- .../visitors/dataset/OwnershipVisitor.java | 78 +- .../visitors/dataset/PropertyVisitor.java | 61 +- .../dataset/TagAssociationVisitor.java | 20 +- .../dataset/TermAssociationVisitor.java | 17 +- .../field/ProtobufExtensionFieldVisitor.java | 119 +- .../visitors/field/SchemaFieldVisitor.java | 26 +- .../protobuf/visitors/tags/TagVisitor.java | 59 +- .../datahub/protobuf/ProtobufDatasetTest.java | 1113 ++++-- .../datahub/protobuf/ProtobufUtilsTest.java | 72 +- .../java/datahub/protobuf/TestFixtures.java | 115 +- .../protobuf/model/ProtobufEnumTest.java | 125 +- .../protobuf/model/ProtobufFieldTest.java | 398 +- .../protobuf/model/ProtobufGraphTest.java | 161 +- .../protobuf/model/ProtobufMessageTest.java | 318 +- .../model/ProtobufOneOfFieldTest.java | 219 +- .../protobuf/visitors/VisitContextTest.java | 53 +- .../visitors/dataset/DatasetVisitorTest.java | 85 +- .../dataset/DescriptionVisitorTest.java | 27 +- .../visitors/dataset/DomainVisitorTest.java | 29 +- .../InstitutionalMemoryVisitorTest.java | 110 +- .../KafkaTopicPropertyVisitorTest.java | 47 +- .../dataset/OwnershipVisitorTest.java | 88 +- .../visitors/dataset/PropertyVisitorTest.java | 100 +- .../dataset/TermAssociationVisitorTest.java | 58 +- .../ProtobufExtensionFieldVisitorTest.java | 445 ++- .../field/SchemaFieldVisitorTest.java | 107 +- .../protobuf/visitors/tag/TagVisitorTest.java | 132 +- .../examples/DataJobLineageAdd.java | 52 +- .../datahubproject/examples/DatasetAdd.java | 113 +- .../examples/DatasetCustomPropertiesAdd.java | 58 +- .../DatasetCustomPropertiesAddRemove.java | 53 +- .../DatasetCustomPropertiesReplace.java | 28 +- .../io/datahubproject/examples/TagCreate.java | 49 +- .../test/spark/lineage/HdfsIn2HdfsOut1.java | 32 +- .../test/spark/lineage/HdfsIn2HdfsOut2.java | 42 +- .../lineage/HdfsIn2HiveCreateInsertTable.java | 65 +- .../spark/lineage/HdfsIn2HiveCreateTable.java | 58 +- .../test/spark/lineage/HiveInHiveOut.java | 80 +- .../spark/lineage/HiveInHiveOut_test1.java | 79 +- .../main/java/test/spark/lineage/Utils.java | 2 +- .../datahub/spark/DatahubSparkListener.java | 346 +- .../java/datahub/spark/DatasetExtractor.java | 451 ++- .../consumer/impl/CoalesceJobsEmitter.java | 70 +- .../spark/consumer/impl/McpEmitter.java | 141 +- .../java/datahub/spark/model/AppEndEvent.java | 11 +- .../datahub/spark/model/AppStartEvent.java | 45 +- .../datahub/spark/model/DatasetLineage.java | 15 +- .../datahub/spark/model/LineageConsumer.java | 3 +- .../datahub/spark/model/LineageEvent.java | 5 +- .../datahub/spark/model/LineageUtils.java | 60 +- .../spark/model/SQLQueryExecEndEvent.java | 13 +- .../spark/model/SQLQueryExecStartEvent.java | 42 +- .../model/dataset/CatalogTableDataset.java | 11 +- .../spark/model/dataset/HdfsPathDataset.java | 27 +- .../spark/model/dataset/JdbcDataset.java | 9 +- .../spark/model/dataset/SparkDataset.java | 8 +- .../datahub/spark/TestCoalesceJobLineage.java | 126 +- .../datahub/spark/TestSparkJobsLineage.java | 293 +- .../aspect/utils/DeprecationUtils.java | 47 +- .../metadata/client/JavaEntityClient.java | 1290 ++++--- .../client/SystemJavaEntityClient.java | 48 +- .../com/linkedin/metadata/dao/AspectKey.java | 14 +- .../linkedin/metadata/dao/BaseReadDAO.java | 56 +- .../linkedin/metadata/entity/AspectDao.java | 280 +- .../metadata/entity/AspectMigrationsDao.java | 12 +- .../metadata/entity/EntityAspect.java | 37 +- .../entity/EntityAspectIdentifier.java | 12 +- .../metadata/entity/EntityServiceImpl.java | 1983 ++++++---- .../linkedin/metadata/entity/EntityUtils.java | 104 +- .../metadata/entity/NewModelUtils.java | 60 +- .../AspectStorageValidationUtil.java | 13 +- .../entity/cassandra/CassandraAspect.java | 22 +- .../entity/cassandra/CassandraAspectDao.java | 430 ++- .../cassandra/CassandraRetentionService.java | 195 +- .../ebean/AspectStorageValidationUtil.java | 18 +- .../metadata/entity/ebean/EbeanAspectDao.java | 445 ++- .../metadata/entity/ebean/EbeanAspectV1.java | 14 +- .../metadata/entity/ebean/EbeanAspectV2.java | 41 +- .../entity/ebean/EbeanRetentionService.java | 214 +- .../ebean/transactions/AspectsBatchImpl.java | 92 +- .../ebean/transactions/PatchBatchItem.java | 304 +- .../ebean/transactions/UpsertBatchItem.java | 282 +- .../EntityRegistryUrnValidator.java | 50 +- .../validation/RecordTemplateValidator.java | 72 +- .../validation/ValidationException.java | 4 +- .../entity/validation/ValidationUtils.java | 60 +- .../metadata/event/EntityEventProducer.java | 23 +- .../metadata/event/EventProducer.java | 39 +- .../metadata/graph/JavaGraphClient.java | 77 +- .../metadata/graph/SiblingGraphService.java | 220 +- .../metadata/graph/dgraph/DgraphExecutor.java | 147 +- .../graph/dgraph/DgraphGraphService.java | 1261 +++--- .../metadata/graph/dgraph/DgraphSchema.java | 216 +- .../graph/elastic/ESGraphQueryDAO.java | 365 +- .../graph/elastic/ESGraphWriteDAO.java | 36 +- .../elastic/ElasticSearchGraphService.java | 161 +- .../GraphRelationshipMappingsBuilder.java | 17 +- .../graph/elastic/TimeFilterUtils.java | 90 +- .../graph/neo4j/Neo4jGraphService.java | 470 ++- .../candidatesource/MostPopularSource.java | 73 +- .../candidatesource/RecentlyEditedSource.java | 82 +- .../candidatesource/RecentlyViewedSource.java | 85 +- .../search/EntityLineageResultCacheKey.java | 23 +- .../metadata/search/LineageSearchService.java | 631 +-- .../metadata/search/SearchService.java | 195 +- .../search/cache/CacheableSearcher.java | 40 +- .../cache/CachedEntityLineageResult.java | 7 +- .../search/cache/EntityDocCountCache.java | 22 +- .../client/CachingEntitySearchService.java | 232 +- .../elasticsearch/ElasticSearchService.java | 165 +- .../indexbuilder/ESIndexBuilder.java | 400 +- .../indexbuilder/EntityIndexBuilders.java | 68 +- .../indexbuilder/MappingsBuilder.java | 148 +- .../indexbuilder/ReindexConfig.java | 446 ++- .../indexbuilder/SettingsBuilder.java | 286 +- .../elasticsearch/query/ESBrowseDAO.java | 194 +- .../elasticsearch/query/ESSearchDAO.java | 280 +- .../request/AggregationQueryBuilder.java | 49 +- .../request/AutocompleteRequestHandler.java | 99 +- .../query/request/CustomizedQueryHandler.java | 49 +- .../query/request/PITAwareSearchRequest.java | 1 - .../query/request/SearchAfterWrapper.java | 10 +- .../query/request/SearchFieldConfig.java | 303 +- .../query/request/SearchQueryBuilder.java | 488 ++- .../query/request/SearchRequestHandler.java | 365 +- .../elasticsearch/update/BulkListener.java | 50 +- .../elasticsearch/update/ESBulkProcessor.java | 318 +- .../elasticsearch/update/ESWriteDAO.java | 27 +- .../search/features/FeatureExtractor.java | 9 +- .../metadata/search/features/Features.java | 11 +- .../metadata/search/ranker/SearchRanker.java | 50 +- .../metadata/search/ranker/SimpleRanker.java | 4 +- .../SearchDocumentTransformer.java | 182 +- .../search/utils/BrowsePathUtils.java | 210 +- .../search/utils/BrowsePathV2Utils.java | 115 +- .../metadata/search/utils/ESUtils.java | 336 +- .../metadata/search/utils/FilterUtils.java | 17 +- .../metadata/search/utils/GZIPUtil.java | 6 +- .../metadata/search/utils/SearchUtils.java | 95 +- .../service/UpdateIndicesService.java | 310 +- .../metadata/shared/ElasticSearchIndexed.java | 24 +- .../systemmetadata/ESSystemMetadataDAO.java | 62 +- .../ElasticSearchSystemMetadataService.java | 129 +- .../systemmetadata/SystemMetadataEntry.java | 1 - .../SystemMetadataMappingsBuilder.java | 3 +- .../timeline/MissingEntityAspect.java | 3 +- .../timeline/TimelineServiceImpl.java | 388 +- .../DatasetSchemaFieldChangeEvent.java | 10 +- .../SchemaFieldGlossaryTermChangeEvent.java | 13 +- .../schema/SchemaFieldTagChangeEvent.java | 13 +- .../data/entity/DomainChangeEvent.java | 11 +- .../data/entity/GlossaryTermChangeEvent.java | 11 +- .../data/entity/OwnerChangeEvent.java | 9 +- .../timeline/data/entity/TagChangeEvent.java | 11 +- .../timeline/eventgenerator/Aspect.java | 13 +- ...AssertionRunEventChangeEventGenerator.java | 28 +- .../ChangeEventGeneratorUtils.java | 88 +- ...sInstanceRunEventChangeEventGenerator.java | 37 +- ...DatasetPropertiesChangeEventGenerator.java | 94 +- .../DeprecationChangeEventGenerator.java | 32 +- ...DatasetPropertiesChangeEventGenerator.java | 69 +- ...bleSchemaMetadataChangeEventGenerator.java | 209 +- .../EntityChangeEventGenerator.java | 36 +- .../EntityChangeEventGeneratorFactory.java | 15 +- .../EntityChangeEventGeneratorRegistry.java | 22 +- .../EntityKeyChangeEventGenerator.java | 7 +- .../GlobalTagsChangeEventGenerator.java | 124 +- .../GlossaryTermInfoChangeEventGenerator.java | 187 +- .../GlossaryTermsChangeEventGenerator.java | 153 +- ...stitutionalMemoryChangeEventGenerator.java | 171 +- .../OwnershipChangeEventGenerator.java | 173 +- .../SchemaMetadataChangeEventGenerator.java | 427 ++- .../SingleDomainChangeEventGenerator.java | 28 +- .../StatusChangeEventGenerator.java | 30 +- .../ElasticSearchTimeseriesAspectService.java | 195 +- .../elastic/indexbuilder/MappingsBuilder.java | 37 +- .../TimeseriesAspectIndexBuilders.java | 44 +- .../elastic/query/ESAggregatedStatsDAO.java | 192 +- .../TimeseriesAspectTransformer.java | 135 +- .../linkedin/metadata/version/GitVersion.java | 1 - .../metadata/AspectGenerationUtils.java | 10 +- .../metadata/AspectIngestionUtils.java | 57 +- .../linkedin/metadata/AspectUtilsTest.java | 24 +- .../linkedin/metadata/CassandraTestUtils.java | 96 +- .../linkedin/metadata/DockerTestUtils.java | 24 +- .../com/linkedin/metadata/EbeanTestUtils.java | 7 +- .../metadata/TestEntitySpecBuilder.java | 4 +- .../com/linkedin/metadata/TestEntityUtil.java | 65 +- .../metadata/client/JavaEntityClientTest.java | 223 +- .../update/BulkListenerTest.java | 52 +- .../update/ESBulkProcessorTest.java | 18 +- .../entity/AspectMigrationsDaoTest.java | 22 +- .../CassandraAspectMigrationsDaoTest.java | 24 +- .../entity/CassandraEntityServiceTest.java | 83 +- .../entity/DeleteEntityServiceTest.java | 70 +- .../entity/DeleteEntityUtilsTest.java | 264 +- .../entity/EbeanAspectMigrationsDaoTest.java | 36 +- .../entity/EbeanEntityServiceTest.java | 287 +- .../metadata/entity/EntityServiceTest.java | 2909 +++++++------- .../metadata/entity/TestEntityRegistry.java | 9 +- .../extractor/AspectExtractorTest.java | 8 +- .../extractor/FieldExtractorTest.java | 82 +- .../com/linkedin/metadata/graph/EdgeTest.java | 77 +- .../metadata/graph/GraphServiceTestBase.java | 2449 ++++++------ .../graph/dgraph/DgraphContainer.java | 419 +- .../graph/dgraph/DgraphGraphServiceTest.java | 1390 +++---- .../graph/neo4j/Neo4jGraphServiceTest.java | 199 +- .../graph/neo4j/Neo4jTestServerBuilder.java | 7 +- .../graph/search/ESGraphQueryDAOTest.java | 165 +- .../search/SearchGraphServiceTestBase.java | 309 +- .../graph/search/TimeFilterUtilsTest.java | 7 +- .../SearchGraphServiceElasticSearchTest.java | 11 +- .../SearchGraphServiceOpenSearchTest.java | 10 +- .../sibling/SiblingGraphServiceTest.java | 523 +-- .../RecommendationsServiceTest.java | 91 +- ...ySearchAggregationCandidateSourceTest.java | 63 +- .../RecommendationUtilsTest.java | 13 +- .../candidatesource/TestSource.java | 8 +- .../LineageSearchResultCacheKeyTest.java | 30 +- .../search/LineageServiceTestBase.java | 1019 +++-- .../search/SearchServiceTestBase.java | 273 +- .../metadata/search/TestEntityTestBase.java | 130 +- .../search/cache/CacheableSearcherTest.java | 126 +- .../elasticsearch/ElasticSearchSuite.java | 32 +- .../GoldenElasticSearchTest.java | 63 +- .../IndexBuilderElasticSearchTest.java | 26 +- .../LineageDataFixtureElasticSearchTest.java | 59 +- .../LineageServiceElasticSearchTest.java | 24 +- .../SampleDataFixtureElasticSearchTest.java | 45 +- .../SearchDAOElasticSearchTest.java | 24 +- .../SearchServiceElasticSearchTest.java | 25 +- ...ystemMetadataServiceElasticSearchTest.java | 12 +- .../TestEntityElasticSearchTest.java | 21 +- ...eseriesAspectServiceElasticSearchTest.java | 13 +- .../search/fixtures/GoldenTestBase.java | 297 +- .../fixtures/LineageDataFixtureTestBase.java | 83 +- .../fixtures/SampleDataFixtureTestBase.java | 3277 +++++++++------- .../indexbuilder/IndexBuilderTestBase.java | 421 +- .../indexbuilder/MappingsBuilderTest.java | 67 +- .../opensearch/GoldenOpenSearchTest.java | 59 +- .../IndexBuilderOpenSearchTest.java | 26 +- .../LineageDataFixtureOpenSearchTest.java | 55 +- .../LineageServiceOpenSearchTest.java | 21 +- .../search/opensearch/OpenSearchSuite.java | 31 +- .../SampleDataFixtureOpenSearchTest.java | 44 +- .../opensearch/SearchDAOOpenSearchTest.java | 18 +- .../SearchServiceOpenSearchTest.java | 21 +- .../SystemMetadataServiceOpenSearchTest.java | 10 +- .../opensearch/TestEntityOpenSearchTest.java | 21 +- ...TimeseriesAspectServiceOpenSearchTest.java | 9 +- .../metadata/search/query/BrowseDAOTest.java | 42 +- .../search/query/SearchDAOTestBase.java | 647 ++-- .../request/AggregationQueryBuilderTest.java | 149 +- .../AutocompleteRequestHandlerTest.java | 19 +- .../request/CustomizedQueryHandlerTest.java | 357 +- .../query/request/SearchQueryBuilderTest.java | 383 +- .../request/SearchRequestHandlerTest.java | 576 +-- .../SearchDocumentTransformerTest.java | 65 +- .../search/utils/BrowsePathUtilsTest.java | 65 +- .../search/utils/BrowsePathV2UtilsTest.java | 167 +- .../metadata/search/utils/ESUtilsTest.java | 361 +- .../search/utils/SearchUtilsTest.java | 207 +- .../SystemMetadataServiceTestBase.java | 28 +- .../CassandraTimelineServiceTest.java | 32 +- .../timeline/EbeanTimelineServiceTest.java | 35 +- .../timeline/TimelineServiceTest.java | 100 +- ...chemaMetadataChangeEventGeneratorTest.java | 64 +- .../TimeseriesAspectServiceTestBase.java | 1025 +++-- .../io/datahubproject/test/DataGenerator.java | 687 ++-- .../test/fixtures/search/EntityExporter.java | 87 +- .../test/fixtures/search/FixtureReader.java | 170 +- .../test/fixtures/search/FixtureWriter.java | 111 +- .../test/fixtures/search/LineageExporter.java | 359 +- .../SampleDataFixtureConfiguration.java | 493 +-- .../fixtures/search/SearchFixtureUtils.java | 243 +- .../SearchLineageFixtureConfiguration.java | 378 +- .../test/models/Anonymized.java | 75 +- .../test/models/DatasetAnonymized.java | 66 +- .../test/models/GraphAnonymized.java | 20 +- .../search/ElasticsearchTestContainer.java | 70 +- .../test/search/OpenSearchTestContainer.java | 73 +- .../test/search/SearchTestContainer.java | 11 +- .../test/search/SearchTestUtils.java | 279 +- .../config/SearchCommonTestConfiguration.java | 89 +- .../SearchTestContainerConfiguration.java | 127 +- .../kafka/MaeConsumerApplication.java | 43 +- .../kafka/MaeConsumerApplicationTest.java | 11 +- ...eConsumerApplicationTestConfiguration.java | 27 +- .../kafka/DataHubUsageEventsProcessor.java | 30 +- .../metadata/kafka/MclConsumerConfig.java | 16 +- .../kafka/MetadataChangeLogProcessor.java | 55 +- .../boot/ApplicationStartupListener.java | 12 +- .../boot/MCLBootstrapManagerFactory.java | 8 +- .../DataHubUsageEventsProcessorCondition.java | 11 +- .../kafka/config/EntityHydratorConfig.java | 12 +- .../MetadataChangeLogProcessorCondition.java | 5 +- .../kafka/elasticsearch/ElasticEvent.java | 2 +- .../elasticsearch/ElasticsearchConnector.java | 19 +- .../ElasticsearchConnectorFactory.java | 7 +- .../kafka/elasticsearch/JsonElasticEvent.java | 17 +- .../kafka/elasticsearch/MCEElasticEvent.java | 20 +- .../kafka/hook/MetadataChangeLogHook.java | 17 +- .../kafka/hook/UpdateIndicesHook.java | 18 +- .../event/EntityChangeEventGeneratorHook.java | 120 +- .../ingestion/IngestionSchedulerHook.java | 45 +- .../hook/siblings/SiblingAssociationHook.java | 284 +- .../metadata/kafka/hydrator/BaseHydrator.java | 9 +- .../kafka/hydrator/ChartHydrator.java | 16 +- .../kafka/hydrator/CorpUserHydrator.java | 15 +- .../kafka/hydrator/DashboardHydrator.java | 16 +- .../kafka/hydrator/DataFlowHydrator.java | 16 +- .../kafka/hydrator/DataJobHydrator.java | 16 +- .../kafka/hydrator/DatasetHydrator.java | 5 +- .../kafka/hydrator/EntityHydrator.java | 28 +- .../DataHubUsageEventTransformer.java | 48 +- .../kafka/hook/EntityRegistryTestUtil.java | 20 +- .../kafka/hook/GraphIndexUtilsTest.java | 102 +- .../hook/MCLProcessingTestDataGenerator.java | 24 +- .../kafka/hook/UpdateIndicesHookTest.java | 227 +- .../EntityChangeEventGeneratorHookTest.java | 318 +- .../hook/event/PlatformEventMatcher.java | 37 +- .../ingestion/IngestionSchedulerHookTest.java | 34 +- .../siblings/SiblingAssociationHookTest.java | 311 +- .../kafka/hook/spring/MCLSpringTest.java | 37 +- .../spring/MCLSpringTestConfiguration.java | 40 +- .../kafka/MceConsumerApplication.java | 55 +- .../metadata/restli/EbeanServerConfig.java | 99 +- .../metadata/restli/RestliServletConfig.java | 78 +- .../kafka/MceConsumerApplicationTest.java | 39 +- ...eConsumerApplicationTestConfiguration.java | 44 +- .../metadata/kafka/McpConsumerConfig.java | 16 +- .../kafka/MetadataChangeEventsProcessor.java | 52 +- .../MetadataChangeProposalsProcessor.java | 45 +- .../boot/ApplicationStartupListener.java | 14 +- .../boot/MCPBootstrapManagerFactory.java | 13 +- ...adataChangeProposalProcessorCondition.java | 5 +- .../datahub/event/PlatformEventProcessor.java | 22 +- .../datahub/event/hook/PlatformEventHook.java | 13 +- .../model/validation/ModelValidationTask.java | 40 +- metadata-models/build.gradle | 4 - .../linkedin/metadata/ModelValidation.java | 28 +- .../metadata/ModelValidationConstants.java | 1 - .../AuthenticationConfiguration.java | 26 +- .../AuthenticationConstants.java | 23 +- .../AuthenticatorConfiguration.java | 11 +- .../TokenServiceConfiguration.java | 5 +- .../AuthorizationConfiguration.java | 16 +- .../AuthorizerConfiguration.java | 19 +- .../DefaultAuthorizerConfiguration.java | 10 +- .../filter/AuthenticationFilter.java | 217 +- .../authentication/AuthTestConfiguration.java | 22 +- .../AuthenticationFilterTest.java | 36 +- .../authenticator/AuthenticatorChain.java | 73 +- .../DataHubJwtTokenAuthenticator.java | 67 +- .../DataHubSystemAuthenticator.java | 59 +- .../DataHubTokenAuthenticator.java | 56 +- .../HealthStatusAuthenticator.java | 41 +- .../authenticator/NoOpAuthenticator.java | 39 +- .../authentication/group/GroupService.java | 125 +- .../invite/InviteTokenService.java | 76 +- .../authentication/post/PostService.java | 33 +- .../token/DataHubJwtSigningKeyResolver.java | 33 +- .../token/StatefulTokenService.java | 92 +- .../token/StatelessTokenService.java | 71 +- .../authentication/token/TokenClaims.java | 58 +- .../authentication/token/TokenException.java | 4 +- .../token/TokenExpiredException.java | 4 +- .../authentication/token/TokenType.java | 12 +- .../authentication/token/TokenVersion.java | 45 +- .../user/NativeUserService.java | 83 +- .../authorization/AuthorizerChain.java | 51 +- .../authorization/DataHubAuthorizer.java | 144 +- .../DefaultEntitySpecResolver.java | 12 +- .../datahub/authorization/FilterUtils.java | 41 +- .../datahub/authorization/PolicyEngine.java | 149 +- .../datahub/authorization/PolicyFetcher.java | 123 +- ...PlatformInstanceFieldResolverProvider.java | 31 +- .../DomainFieldResolverProvider.java | 74 +- .../EntityFieldResolverProvider.java | 12 +- .../EntityTypeFieldResolverProvider.java | 7 +- .../EntityUrnFieldResolverProvider.java | 7 +- .../GroupMembershipFieldResolverProvider.java | 45 +- .../OwnerFieldResolverProvider.java | 20 +- .../authorization/role/RoleService.java | 49 +- .../datahub/telemetry/TrackingService.java | 94 +- .../authenticator/AuthenticatorChainTest.java | 28 +- .../DataHubJwtTokenAuthenticatorTest.java | 24 +- .../DataHubSystemAuthenticatorTest.java | 110 +- .../DataHubTokenAuthenticatorTest.java | 291 +- .../group/GroupServiceTest.java | 186 +- .../invite/InviteTokenServiceTest.java | 125 +- .../authentication/post/PostServiceTest.java | 25 +- .../DataHubJwtSigningKeyResolverTest.java | 35 +- .../token/StatefulTokenServiceTest.java | 97 +- .../token/StatelessTokenServiceTest.java | 78 +- .../user/NativeUserServiceTest.java | 163 +- .../authorization/DataHubAuthorizerTest.java | 496 ++- .../authorization/PolicyEngineTest.java | 593 ++- .../authorization/RoleServiceTest.java | 44 +- ...formInstanceFieldResolverProviderTest.java | 152 +- ...upMembershipFieldResolverProviderTest.java | 210 +- .../telemetry/TrackingServiceTest.java | 70 +- .../authentication/AuthServiceController.java | 300 +- .../metadata/config/AssetsConfiguration.java | 6 +- .../config/AuthPluginConfiguration.java | 7 +- .../metadata/config/DataHubConfiguration.java | 10 +- .../metadata/config/EntityProfileConfig.java | 4 +- .../EntityRegistryPluginConfiguration.java | 3 +- .../config/IngestionConfiguration.java | 14 +- .../metadata/config/PluginConfiguration.java | 24 +- .../metadata/config/PreProcessHooks.java | 1 - .../metadata/config/QueriesTabConfig.java | 5 +- .../config/RetentionPluginConfiguration.java | 3 +- .../config/SearchResultVisualConfig.java | 9 +- .../metadata/config/TestsConfiguration.java | 10 +- .../metadata/config/ViewsConfiguration.java | 10 +- .../metadata/config/VisualConfiguration.java | 21 +- .../config/cache/CacheConfiguration.java | 1 - .../EntityDocCountCacheConfiguration.java | 1 - .../cache/HomepageCacheConfiguration.java | 1 - .../cache/PrimaryCacheConfiguration.java | 1 - .../cache/SearchCacheConfiguration.java | 1 - .../SearchLineageCacheConfiguration.java | 1 - .../cache/client/ClientCacheConfig.java | 15 +- .../client/ClientCacheConfiguration.java | 4 +- .../cache/client/EntityClientCacheConfig.java | 17 +- .../cache/client/UsageClientCacheConfig.java | 10 +- .../config/kafka/ConsumerConfiguration.java | 1 - .../config/kafka/ProducerConfiguration.java | 1 - .../kafka/SchemaRegistryConfiguration.java | 1 - .../search/BuildIndicesConfiguration.java | 1 - .../config/search/CustomConfiguration.java | 11 +- .../search/ElasticSearchConfiguration.java | 1 - .../search/ExactMatchConfiguration.java | 1 - .../search/GraphQueryConfiguration.java | 2 +- .../config/search/PartialConfiguration.java | 1 - .../config/search/SearchConfiguration.java | 1 - .../config/search/WordGramConfiguration.java | 1 - .../search/custom/BoolQueryConfiguration.java | 18 +- .../custom/CustomSearchConfiguration.java | 7 +- .../search/custom/QueryConfiguration.java | 24 +- .../telemetry/TelemetryConfiguration.java | 33 +- .../spring/YamlPropertySourceFactory.java | 12 +- .../BatchWriteOperationsOptions.java | 1 - .../factory/auth/AuthorizerChainFactory.java | 93 +- .../AwsRequestSigningApacheInterceptor.java | 69 +- .../auth/DataHubAuthorizerFactory.java | 20 +- .../auth/DataHubTokenServiceFactory.java | 17 +- .../gms/factory/auth/GroupServiceFactory.java | 7 +- .../auth/InviteTokenServiceFactory.java | 3 +- .../auth/NativeUserServiceFactory.java | 15 +- .../gms/factory/auth/PostServiceFactory.java | 3 +- .../gms/factory/auth/RoleServiceFactory.java | 7 +- .../auth/SystemAuthenticationFactory.java | 10 +- .../gms/factory/common/CacheConfig.java | 22 +- .../common/DatasetUrnNameCasingFactory.java | 3 +- .../ElasticSearchGraphServiceFactory.java | 25 +- ...ticSearchSystemMetadataServiceFactory.java | 13 +- .../ElasticsearchSSLContextFactory.java | 154 +- .../gms/factory/common/GitVersionFactory.java | 1 - .../factory/common/GraphClientFactory.java | 1 - .../factory/common/GraphServiceFactory.java | 8 +- .../common/IndexConventionFactory.java | 1 - .../LocalCassandraSessionConfigFactory.java | 24 +- .../common/LocalEbeanServerConfigFactory.java | 4 +- .../factory/common/Neo4jDriverFactory.java | 8 +- .../common/Neo4jGraphServiceFactory.java | 6 +- .../common/RestHighLevelClientFactory.java | 103 +- .../common/SiblingGraphServiceFactory.java | 1 - .../common/SystemMetadataServiceFactory.java | 3 +- .../common/TopicConventionFactory.java | 24 +- .../factory/config/ConfigurationProvider.java | 72 +- .../config/HealthCheckConfiguration.java | 1 - .../DataProductServiceFactory.java | 5 +- .../entity/CassandraSessionFactory.java | 33 +- .../factory/entity/EbeanServerFactory.java | 4 +- .../entity/EntityAspectDaoFactory.java | 3 +- .../EntityAspectMigrationsDaoFactory.java | 3 +- .../factory/entity/EntityServiceFactory.java | 43 +- .../entity/JavaEntityClientFactory.java | 18 +- .../entity/RestliEntityClientFactory.java | 25 +- .../entity/RetentionServiceFactory.java | 14 +- .../indices/UpdateIndicesServiceFactory.java | 29 +- .../ConfigEntityRegistryFactory.java | 3 +- .../entityregistry/EntityRegistryFactory.java | 3 +- .../PluginEntityRegistryFactory.java | 6 +- .../factory/graphql/GraphQLEngineFactory.java | 27 +- .../ingestion/IngestionSchedulerFactory.java | 15 +- .../DataHubKafkaEventProducerFactory.java | 11 +- .../kafka/DataHubKafkaProducerFactory.java | 58 +- .../kafka/KafkaEventConsumerFactory.java | 201 +- .../kafka/SimpleKafkaConsumerFactory.java | 22 +- .../kafka/ThreadPoolContainerCustomizer.java | 7 +- .../AwsGlueSchemaRegistryFactory.java | 15 +- .../DUHESchemaRegistryFactory.java | 42 +- .../InternalSchemaRegistryFactory.java | 23 +- .../KafkaSchemaRegistryFactory.java | 16 +- .../schemaregistry/SchemaRegistryConfig.java | 1 - .../lineage/LineageServiceFactory.java | 8 +- .../OwnershipTypeServiceFactory.java | 5 +- .../factory/query/QueryServiceFactory.java | 3 +- .../RecommendationServiceFactory.java | 28 +- .../MostPopularCandidateSourceFactory.java | 7 +- .../RecentlyEditedCandidateSourceFactory.java | 7 +- ...ecentlySearchedCandidateSourceFactory.java | 1 - .../RecentlyViewedCandidateSourceFactory.java | 7 +- .../TopPlatformsCandidateSourceFactory.java | 1 - .../TopTagsCandidateSourceFactory.java | 1 - .../TopTermsCandidateSourceFactory.java | 1 - .../BaseElasticSearchComponentsFactory.java | 20 +- .../CachingEntitySearchServiceFactory.java | 13 +- .../ElasticSearchBulkProcessorFactory.java | 20 +- .../ElasticSearchIndexBuilderFactory.java | 56 +- .../search/ElasticSearchServiceFactory.java | 60 +- .../search/EntityIndexBuildersFactory.java | 40 +- .../search/EntitySearchServiceFactory.java | 1 - .../search/LineageSearchServiceFactory.java | 19 +- .../SearchDocumentTransformerFactory.java | 3 +- .../factory/search/SearchServiceFactory.java | 9 +- .../search/SettingsBuilderFactory.java | 3 +- .../search/ranker/SearchRankerFactory.java | 1 - .../search/views/ViewServiceFactory.java | 3 +- .../factory/secret/SecretServiceFactory.java | 1 - .../settings/SettingsServiceFactory.java | 3 +- .../gms/factory/telemetry/DailyReport.java | 67 +- .../factory/telemetry/MixpanelApiFactory.java | 1 - .../MixpanelMessageBuilderFactory.java | 2 - .../telemetry/ScheduledAnalyticsFactory.java | 19 +- .../gms/factory/telemetry/TelemetryUtils.java | 55 +- .../telemetry/TrackingServiceFactory.java | 14 +- ...tyChangeEventGeneratorRegistryFactory.java | 34 +- .../timeline/TimelineServiceFactory.java | 9 +- ...cSearchTimeseriesAspectServiceFactory.java | 16 +- .../TimeseriesAspectServiceFactory.java | 1 - .../gms/factory/usage/UsageClientFactory.java | 24 +- .../metadata/boot/BootstrapManager.java | 42 +- .../linkedin/metadata/boot/BootstrapStep.java | 31 +- .../boot/OnBootApplicationListener.java | 67 +- .../linkedin/metadata/boot/UpgradeStep.java | 43 +- .../dependencies/BootstrapDependency.java | 5 +- .../factories/BootstrapManagerFactory.java | 73 +- .../IngestRetentionPoliciesStepFactory.java | 12 +- .../kafka/DataHubUpgradeKafkaListener.java | 70 +- .../boot/kafka/MockDUHEDeserializer.java | 83 +- .../boot/kafka/MockDUHESerializer.java | 83 +- .../boot/steps/BackfillBrowsePathsV2Step.java | 72 +- .../boot/steps/IndexDataPlatformsStep.java | 72 +- .../IngestDataPlatformInstancesStep.java | 31 +- .../boot/steps/IngestDataPlatformsStep.java | 83 +- .../IngestDefaultGlobalSettingsStep.java | 76 +- .../boot/steps/IngestOwnershipTypesStep.java | 45 +- .../boot/steps/IngestPoliciesStep.java | 82 +- .../steps/IngestRetentionPoliciesStep.java | 50 +- .../metadata/boot/steps/IngestRolesStep.java | 68 +- .../boot/steps/IngestRootUserStep.java | 50 +- .../boot/steps/RemoveClientIdAspectStep.java | 5 +- .../steps/RestoreColumnLineageIndices.java | 158 +- .../boot/steps/RestoreDbtSiblingsIndices.java | 116 +- .../boot/steps/RestoreGlossaryIndices.java | 171 +- .../steps/UpgradeDefaultBrowsePathsStep.java | 65 +- .../boot/steps/WaitForSystemUpdateStep.java | 3 +- .../restli/server/RAPServletFactory.java | 85 +- .../restli/server/RestliHandlerServlet.java | 30 +- .../kafka/SimpleKafkaConsumerFactoryTest.java | 30 +- .../gms/factory/search/CacheTest.java | 236 +- ...ElasticSearchBulkProcessorFactoryTest.java | 19 +- ...ticSearchIndexBuilderFactoryEmptyTest.java | 34 +- ...earchIndexBuilderFactoryOverridesTest.java | 41 +- .../ElasticSearchIndexBuilderFactoryTest.java | 22 +- .../secret/SecretServiceFactoryTest.java | 33 +- .../steps/BackfillBrowsePathsV2StepTest.java | 208 +- .../IngestDataPlatformInstancesStepTest.java | 118 +- .../IngestDefaultGlobalSettingsStepTest.java | 92 +- .../RestoreColumnLineageIndicesTest.java | 415 +- .../steps/RestoreGlossaryIndicesTest.java | 447 ++- .../UpgradeDefaultBrowsePathsStepTest.java | 398 +- .../telemetry/TelemetryUtilsTest.java | 10 +- .../datahub/graphql/GraphQLController.java | 158 +- .../datahub/graphql/GraphiQLController.java | 7 +- .../datahub/graphql/SpringQueryContext.java | 6 +- .../openapi-analytics-servlet/build.gradle | 2 - .../config/OpenapiAnalyticsConfig.java | 8 +- .../delegates/DatahubUsageEventsImpl.java | 62 +- .../OpenAPIAnalyticsTestConfiguration.java | 61 +- .../delegates/DatahubUsageEventsImplTest.java | 39 +- .../openapi-entity-servlet/build.gradle | 4 +- .../datahubproject/CustomSpringCodegen.java | 54 +- .../delegates/EntityApiDelegateImpl.java | 1236 +++--- .../openapi/util/OpenApiEntitiesUtil.java | 535 +-- .../OpenAPIEntityTestConfiguration.java | 195 +- .../delegates/EntityApiDelegateImplTest.java | 439 ++- .../openapi/util/OpenApiEntitiesUtilTest.java | 67 +- .../GlobalControllerExceptionHandler.java | 1 - .../openapi/config/SpringWebConfig.java | 24 +- .../StringToChangeCategoryConverter.java | 27 +- .../openapi/dto/RollbackRunResultDto.java | 1 - .../openapi/dto/UpsertAspectRequest.java | 17 +- .../openapi/dto/UrnResponseMap.java | 1 - .../openapi/entities/EntitiesController.java | 226 +- .../openapi/health/HealthCheckController.java | 51 +- .../openapi/health/HealthController.java | 1 - .../elastic/OperationsController.java | 43 +- .../entities/PlatformEntitiesController.java | 38 +- .../RelationshipsController.java | 148 +- .../openapi/timeline/TimelineController.java | 25 +- .../openapi/util/ElasticsearchUtils.java | 3 +- .../openapi/util/MappingUtil.java | 387 +- .../openapi/util/ReflectionCache.java | 224 +- .../java/entities/EntitiesControllerTest.java | 234 +- .../src/test/java/mock/MockEntityService.java | 125 +- .../plugins/auth/configuration/AuthParam.java | 21 +- .../auth/configuration/AuthPluginConfig.java | 15 +- .../AuthenticatorPluginConfig.java | 12 +- .../configuration/AuthorizerPluginConfig.java | 12 +- .../provider/AuthPluginConfigProvider.java | 4 +- .../AuthenticatorPluginConfigProvider.java | 21 +- .../AuthorizerPluginConfigProvider.java | 21 +- .../plugins/common/ConfigValidationUtils.java | 33 +- .../datahub/plugins/common/PluginConfig.java | 37 +- .../plugins/common/PluginConfigProvider.java | 1 - .../common/PluginPermissionManager.java | 9 +- .../datahub/plugins/common/PluginType.java | 12 +- .../datahub/plugins/common/SecurityMode.java | 29 +- .../datahub/plugins/common/YamlMapper.java | 9 +- .../datahub/plugins/configuration/Config.java | 20 +- .../plugins/configuration/ConfigProvider.java | 13 +- .../plugins/configuration/PluginConfig.java | 34 +- .../plugins/factory/PluginConfigFactory.java | 7 +- .../plugins/loader/IsolatedClassLoader.java | 63 +- .../datahub/plugins/loader/JarExtractor.java | 7 +- .../loader/PluginPermissionManagerImpl.java | 5 +- .../com/datahub/plugins/auth/TestConfig.java | 9 +- .../plugins/auth/TestConfigProvider.java | 65 +- .../auth/TestConfigValidationUtils.java | 1 - .../plugins/auth/TestIsolatedClassLoader.java | 138 +- .../plugins/auth/TestPluginConfigFactory.java | 21 +- .../auth/TestPluginPermissionManager.java | 43 +- .../plugins/test/TestAuthenticator.java | 17 +- .../datahub/plugins/test/TestAuthorizer.java | 12 +- .../test/TestLenientModeAuthenticator.java | 9 +- ...linkedin.analytics.analytics.restspec.json | 2 + .../com.linkedin.entity.aspects.restspec.json | 6 + ...com.linkedin.entity.entities.restspec.json | 26 +- ...m.linkedin.entity.entitiesV2.restspec.json | 3 + ...n.entity.entitiesVersionedV2.restspec.json | 2 + .../com.linkedin.entity.runs.restspec.json | 4 + ...nkedin.lineage.relationships.restspec.json | 4 + ...nkedin.operations.operations.restspec.json | 5 + ...m.linkedin.platform.platform.restspec.json | 2 + ...om.linkedin.usage.usageStats.restspec.json | 4 + ...linkedin.analytics.analytics.snapshot.json | 2 + .../com.linkedin.entity.aspects.snapshot.json | 6 + ...com.linkedin.entity.entities.snapshot.json | 26 +- ...m.linkedin.entity.entitiesV2.snapshot.json | 3 + ...n.entity.entitiesVersionedV2.snapshot.json | 2 + .../com.linkedin.entity.runs.snapshot.json | 4 + ...nkedin.lineage.relationships.snapshot.json | 4 + ...nkedin.operations.operations.snapshot.json | 5 + ...m.linkedin.platform.platform.snapshot.json | 2 + ...om.linkedin.usage.usageStats.snapshot.json | 4 + .../main/java/com/linkedin/BatchGetUtils.java | 77 +- .../linkedin/common/client/BaseClient.java | 37 +- .../linkedin/common/client/ClientCache.java | 208 +- .../linkedin/entity/client/EntityClient.java | 378 +- .../entity/client/EntityClientCache.java | 235 +- .../entity/client/RestliEntityClient.java | 583 ++- .../entity/client/SystemEntityClient.java | 142 +- .../client/SystemRestliEntityClient.java | 28 +- .../java/com/linkedin/usage/UsageClient.java | 98 +- .../com/linkedin/usage/UsageClientCache.java | 92 +- .../common/client/BaseClientTest.java | 100 +- .../metadata/filter/RestliLoggingFilter.java | 8 +- .../resources/analytics/Analytics.java | 45 +- .../resources/entity/AspectResource.java | 293 +- .../entity/BatchIngestionRunResource.java | 492 ++- .../resources/entity/EntityResource.java | 837 ++-- .../resources/entity/EntityV2Resource.java | 119 +- .../entity/EntityVersionedV2Resource.java | 86 +- .../resources/entity/ResourceUtils.java | 9 +- .../resources/lineage/Relationships.java | 153 +- .../operations/OperationsResource.java | 206 +- .../metadata/resources/operations/Utils.java | 36 +- .../resources/platform/PlatformResource.java | 32 +- .../resources/restli/RestliConstants.java | 2 +- .../resources/restli/RestliUtils.java | 49 +- .../metadata/resources/usage/UsageStats.java | 313 +- .../resources/entity/AspectResourceTest.java | 73 +- .../operations/OperationsResourceTest.java | 124 +- .../mock/MockTimeseriesAspectService.java | 53 +- .../schema-registry-api/build.gradle | 2 - .../generated/CompatibilityCheckResponse.java | 45 +- .../openapi/generated/Config.java | 55 +- .../generated/ConfigUpdateRequest.java | 55 +- .../openapi/generated/ErrorMessage.java | 43 +- .../openapi/generated/Mode.java | 49 +- .../openapi/generated/ModeUpdateRequest.java | 49 +- .../generated/RegisterSchemaRequest.java | 71 +- .../generated/RegisterSchemaResponse.java | 37 +- .../openapi/generated/Schema.java | 75 +- .../openapi/generated/SchemaReference.java | 63 +- .../SchemaRegistryServerVersion.java | 43 +- .../openapi/generated/SchemaString.java | 64 +- .../openapi/generated/ServerClusterId.java | 43 +- .../openapi/generated/SubjectVersion.java | 43 +- .../java/io/swagger/api/CompatibilityApi.java | 311 +- .../api/CompatibilityApiController.java | 40 +- .../main/java/io/swagger/api/ConfigApi.java | 614 ++- .../io/swagger/api/ConfigApiController.java | 40 +- .../main/java/io/swagger/api/ContextsApi.java | 131 +- .../io/swagger/api/ContextsApiController.java | 40 +- .../main/java/io/swagger/api/DefaultApi.java | 162 +- .../io/swagger/api/DefaultApiController.java | 40 +- .../src/main/java/io/swagger/api/ModeApi.java | 529 ++- .../io/swagger/api/ModeApiController.java | 40 +- .../main/java/io/swagger/api/SchemasApi.java | 653 +++- .../io/swagger/api/SchemasApiController.java | 40 +- .../main/java/io/swagger/api/SubjectsApi.java | 1051 +++-- .../io/swagger/api/SubjectsApiController.java | 40 +- .../src/main/java/io/swagger/api/V1Api.java | 114 +- .../java/io/swagger/api/V1ApiController.java | 40 +- .../registry/SchemaRegistryController.java | 126 +- .../config/SpringWebSchemaRegistryConfig.java | 8 +- .../openapi/test/OpenAPISpringTestServer.java | 1 - .../OpenAPISpringTestServerConfiguration.java | 10 +- .../test/SchemaRegistryControllerTest.java | 82 +- ...maRegistryControllerTestConfiguration.java | 5 +- .../DataHubUsageEventConstants.java | 3 +- .../datahubusage/DataHubUsageEventType.java | 1 - .../linkedin/metadata/entity/AspectUtils.java | 86 +- .../metadata/entity/DeleteEntityService.java | 787 ++-- .../metadata/entity/DeleteEntityUtils.java | 162 +- .../metadata/entity/EntityService.java | 188 +- .../metadata/entity/IngestProposalResult.java | 1 - .../metadata/entity/IngestResult.java | 14 +- .../linkedin/metadata/entity/ListResult.java | 3 +- .../metadata/entity/RetentionService.java | 136 +- .../metadata/entity/RollbackResult.java | 2 - .../metadata/entity/RollbackRunResult.java | 1 - .../metadata/entity/UpdateAspectResult.java | 4 +- .../restoreindices/RestoreIndicesArgs.java | 86 +- .../restoreindices/RestoreIndicesResult.java | 18 +- .../retention/BulkApplyRetentionArgs.java | 10 +- .../retention/BulkApplyRetentionResult.java | 18 +- .../transactions/AbstractBatchItem.java | 124 +- .../entity/transactions/AspectsBatch.java | 26 +- .../com/linkedin/metadata/graph/Edge.java | 28 +- .../linkedin/metadata/graph/GraphClient.java | 18 +- .../linkedin/metadata/graph/GraphFilters.java | 1 - .../metadata/graph/GraphIndexUtils.java | 94 +- .../linkedin/metadata/graph/GraphService.java | 296 +- .../metadata/graph/RelatedEntity.java | 8 +- .../RecommendationsService.java | 46 +- .../DomainsCandidateSource.java | 5 +- .../EntitySearchAggregationSource.java | 87 +- .../RecentlySearchedSource.java | 51 +- .../candidatesource/RecommendationSource.java | 33 +- .../candidatesource/RecommendationUtils.java | 8 +- .../candidatesource/TopPlatformsSource.java | 39 +- .../candidatesource/TopTagsSource.java | 4 +- .../candidatesource/TopTermsSource.java | 4 +- .../ranker/RecommendationModuleRanker.java | 8 +- .../ranker/SimpleRecommendationRanker.java | 8 +- .../registry/SchemaRegistryService.java | 3 +- .../registry/SchemaRegistryServiceImpl.java | 55 +- .../metadata/resource/ResourceReference.java | 13 +- .../metadata/resource/SubResourceType.java | 4 +- .../metadata/search/EntitySearchService.java | 153 +- .../metadata/search/utils/QueryUtils.java | 75 +- .../metadata/secret/SecretService.java | 7 +- .../metadata/service/BaseService.java | 81 +- .../metadata/service/DataProductService.java | 266 +- .../metadata/service/DomainService.java | 169 +- .../metadata/service/GlossaryTermService.java | 260 +- .../metadata/service/LineageService.java | 474 ++- .../metadata/service/OwnerService.java | 106 +- .../service/OwnershipTypeService.java | 126 +- .../metadata/service/QueryService.java | 124 +- .../metadata/service/SettingsService.java | 95 +- .../linkedin/metadata/service/TagService.java | 241 +- .../metadata/service/ViewService.java | 108 +- .../metadata/shared/ValidationUtils.java | 152 +- .../systemmetadata/SystemMetadataService.java | 10 +- .../metadata/timeline/SemanticVersion.java | 16 +- .../metadata/timeline/TimelineService.java | 7 +- .../timeline/data/ChangeCategory.java | 11 +- .../metadata/timeline/data/ChangeEvent.java | 50 +- .../timeline/data/ChangeOperation.java | 36 +- .../timeline/data/ChangeTransaction.java | 6 +- .../timeline/data/PatchOperation.java | 1 - .../timeline/data/SemanticChangeType.java | 6 +- .../timeline/data/SemanticDifference.java | 3 +- .../timeseries/TimeseriesAspectService.java | 101 +- .../metadata/service/DomainServiceTest.java | 351 +- .../service/GlossaryTermServiceTest.java | 579 +-- .../metadata/service/LineageServiceTest.java | 376 +- .../metadata/service/OwnerServiceTest.java | 255 +- .../service/OwnershipTypeServiceTest.java | 423 +-- .../metadata/service/QueryServiceTest.java | 660 ++-- .../metadata/service/SettingsServiceTest.java | 345 +- .../metadata/service/TagServiceTest.java | 497 +-- .../metadata/service/ViewServiceTest.java | 664 ++-- .../java/com/datahub/gms/servlet/Config.java | 92 +- .../gms/servlet/ConfigSearchExport.java | 198 +- .../java/com/datahub/gms/util/CSVWriter.java | 66 +- .../authorization/PoliciesConfig.java | 1005 ++--- .../restli/DefaultRestliClientFactory.java | 56 +- .../linkedin/metadata/restli/RestliUtil.java | 35 +- .../metadata/utils/AuditStampUtils.java | 20 +- .../linkedin/metadata/utils/BrowseUtil.java | 28 +- .../metadata/utils/ConcurrencyUtils.java | 75 +- .../utils/DataPlatformInstanceUtils.java | 22 +- .../metadata/utils/EntityKeyUtils.java | 103 +- .../metadata/utils/GenericRecordUtils.java | 31 +- .../metadata/utils/IngestionUtils.java | 22 +- .../linkedin/metadata/utils/PegasusUtils.java | 71 +- .../linkedin/metadata/utils/SearchUtil.java | 107 +- .../metadata/utils/SystemMetadataUtils.java | 13 +- .../utils/elasticsearch/IndexConvention.java | 11 +- .../elasticsearch/IndexConventionImpl.java | 31 +- .../exception/UnsupportedGraphEntities.java | 4 +- .../metadata/utils/log/LogMessageFilter.java | 11 +- .../metadata/utils/metrics/MetricUtils.java | 10 +- .../metadata/utils/EntityKeyUtilsTest.java | 27 +- .../metadata/utils/IngestionUtilsTest.java | 14 +- .../metadata/utils/SearchUtilTest.java | 42 +- .../IndexConventionImplTest.java | 48 +- .../src/main/java/mock/MockAspectSpec.java | 18 +- .../main/java/mock/MockEntityRegistry.java | 2 - .../src/main/java/mock/MockEntitySpec.java | 28 +- .../java/com/datahub/utils/TestUtils.java | 5 +- .../com/datahub/test/testing/urn/BarUrn.java | 7 +- .../test/testing/urn/BarUrnCoercer.java | 4 +- .../test/testing/urn/BaseUrnCoercer.java | 4 +- .../com/datahub/test/testing/urn/BazUrn.java | 7 +- .../test/testing/urn/BazUrnCoercer.java | 4 +- .../com/datahub/test/testing/urn/FooUrn.java | 7 +- .../test/testing/urn/FooUrnCoercer.java | 4 +- .../datahub/test/testing/urn/PizzaUrn.java | 7 +- .../test/testing/urn/PizzaUrnCoercer.java | 4 +- .../testing/urn/SingleAspectEntityUrn.java | 1 - 1711 files changed, 91903 insertions(+), 71109 deletions(-) delete mode 100644 gradle/checkstyle/checkstyle.xml delete mode 100644 gradle/checkstyle/suppressions.xml diff --git a/build.gradle b/build.gradle index c1278a6dab1a0..f5e5403e822e7 100644 --- a/build.gradle +++ b/build.gradle @@ -21,6 +21,7 @@ buildscript { ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' + ext.googleJavaFormatVersion = '1.18.1' ext.docker_registry = 'linkedin' @@ -42,6 +43,7 @@ plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' id 'com.github.johnrengelman.shadow' version '6.1.0' id 'com.palantir.docker' version '0.35.0' apply false + id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } @@ -225,13 +227,11 @@ project.ext.externalDependency = [ 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' - ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' - apply plugin: 'checkstyle' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' } @@ -253,6 +253,7 @@ subprojects { apply plugin: 'maven-publish' apply plugin: 'com.gorylenko.gradle-git-properties' + apply plugin: 'com.diffplug.spotless' gitProperties { keys = ['git.commit.id','git.commit.id.describe','git.commit.time'] @@ -266,6 +267,7 @@ subprojects { plugins.withType(JavaPlugin) { dependencies { constraints { + implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') implementation('org.apache.commons:commons-compress:1.21') implementation('org.apache.velocity:velocity-engine-core:2.3') @@ -274,13 +276,32 @@ subprojects { implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion") } } - - checkstyle { - configDirectory = file("${project.rootDir}/gradle/checkstyle") - sourceSets = [ getProject().sourceSets.main, getProject().sourceSets.test ] - toolVersion = "8.0" - maxWarnings = 0 - ignoreFailures = false + spotless { + java { + googleJavaFormat() + target project.fileTree(project.projectDir) { + include '**/*.java' + exclude 'build/**/*.java' + exclude '**/generated/**/*.*' + exclude '**/mainGeneratedDataTemplate/**/*.*' + exclude '**/mainGeneratedRest/**/*.*' + } + } + } + afterEvaluate { + def spotlessJavaTask = tasks.findByName('spotlessJava') + def processTask = tasks.findByName('processResources') + if (processTask != null) { + spotlessJavaTask.dependsOn processTask + } + def compileJavaTask = tasks.findByName('compileJava') + if (compileJavaTask != null) { + spotlessJavaTask.dependsOn compileJavaTask + } + // TODO - Do not run this in CI. How? + // tasks.withType(JavaCompile) { + // finalizedBy(tasks.findByName('spotlessApply')) + // } } } diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index fe04c3629fe58..ef33bde8f61d3 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,5 +1,9 @@ package auth; +import static auth.AuthUtils.*; +import static auth.sso.oidc.OidcConfigs.*; +import static utils.ConfigUtil.*; + import auth.sso.SsoConfigs; import auth.sso.SsoManager; import auth.sso.oidc.OidcConfigs; @@ -18,12 +22,10 @@ import com.linkedin.util.Configuration; import config.ConfigurationProvider; import controllers.SsoCallbackController; - import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; @@ -42,205 +44,227 @@ import play.cache.SyncCacheApi; import utils.ConfigUtil; -import static auth.AuthUtils.*; -import static auth.sso.oidc.OidcConfigs.*; -import static utils.ConfigUtil.*; +/** Responsible for configuring, validating, and providing authentication related components. */ +public class AuthModule extends AbstractModule { + /** + * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration + * value provides a stable encryption base from which to derive the encryption key. + * + * <p>We hash this value (SHA256), then take the first 16 bytes as the AES key. + */ + private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; -/** - * Responsible for configuring, validating, and providing authentication related components. - */ -public class AuthModule extends AbstractModule { + private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; + private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; + private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + + private final com.typesafe.config.Config _configs; + + public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { + _configs = configs; + } + @Override + protected void configure() { /** - * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration - * value provides a stable encryption base from which to derive the encryption key. - * - * We hash this value (SHA256), then take the first 16 bytes as the AES key. + * In Pac4J, you are given the option to store the profiles of authenticated users in either (i) + * PlayCacheSessionStore - saves your data in the Play cache or (ii) PlayCookieSessionStore + * saves your data in the Play session cookie However there is problem + * (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j + * profile in cookie. Whenever the profile returned by Pac4j is greater than 4096 characters, + * the response will be rejected by the browser. Default to PlayCacheCookieStore so that + * datahub-frontend container remains as a stateless service */ - private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; - private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; - private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; - private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - private final com.typesafe.config.Config _configs; - - public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { - _configs = configs; + if (sessionStoreProvider.equals("PlayCacheSessionStore")) { + final PlayCacheSessionStore playCacheSessionStore = + new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); + bind(SessionStore.class).toInstance(playCacheSessionStore); + bind(PlaySessionStore.class).toInstance(playCacheSessionStore); + } else { + PlayCookieSessionStore playCacheCookieStore; + try { + // To generate a valid encryption key from an input value, we first + // hash the input to generate a fixed-length string. Then, we convert + // it to hex and slice the first 16 bytes, because AES key length must strictly + // have a specific length. + final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); + final String aesKeyHash = + DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); + final String aesEncryptionKey = aesKeyHash.substring(0, 16); + playCacheCookieStore = + new PlayCookieSessionStore(new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); + } catch (Exception e) { + throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); + } + bind(SessionStore.class).toInstance(playCacheCookieStore); + bind(PlaySessionStore.class).toInstance(playCacheCookieStore); } - @Override - protected void configure() { - /** - * In Pac4J, you are given the option to store the profiles of authenticated users in either - * (i) PlayCacheSessionStore - saves your data in the Play cache or - * (ii) PlayCookieSessionStore saves your data in the Play session cookie - * However there is problem (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j profile in cookie. - * Whenever the profile returned by Pac4j is greater than 4096 characters, the response will be rejected by the browser. - * Default to PlayCacheCookieStore so that datahub-frontend container remains as a stateless service - */ - String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - - if (sessionStoreProvider.equals("PlayCacheSessionStore")) { - final PlayCacheSessionStore playCacheSessionStore = new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); - bind(SessionStore.class).toInstance(playCacheSessionStore); - bind(PlaySessionStore.class).toInstance(playCacheSessionStore); - } else { - PlayCookieSessionStore playCacheCookieStore; - try { - // To generate a valid encryption key from an input value, we first - // hash the input to generate a fixed-length string. Then, we convert - // it to hex and slice the first 16 bytes, because AES key length must strictly - // have a specific length. - final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); - final String aesKeyHash = DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); - final String aesEncryptionKey = aesKeyHash.substring(0, 16); - playCacheCookieStore = new PlayCookieSessionStore( - new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); - } catch (Exception e) { - throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); - } - bind(SessionStore.class).toInstance(playCacheCookieStore); - bind(PlaySessionStore.class).toInstance(playCacheCookieStore); - } - - try { - bind(SsoCallbackController.class).toConstructor(SsoCallbackController.class.getConstructor( - SsoManager.class, - Authentication.class, - SystemEntityClient.class, - AuthServiceClient.class, - com.typesafe.config.Config.class)); - } catch (NoSuchMethodException | SecurityException e) { - throw new RuntimeException("Failed to bind to SsoCallbackController. Cannot find constructor", e); - } - // logout - final LogoutController logoutController = new LogoutController(); - logoutController.setDefaultUrl("/"); - bind(LogoutController.class).toInstance(logoutController); + try { + bind(SsoCallbackController.class) + .toConstructor( + SsoCallbackController.class.getConstructor( + SsoManager.class, + Authentication.class, + SystemEntityClient.class, + AuthServiceClient.class, + com.typesafe.config.Config.class)); + } catch (NoSuchMethodException | SecurityException e) { + throw new RuntimeException( + "Failed to bind to SsoCallbackController. Cannot find constructor", e); } + // logout + final LogoutController logoutController = new LogoutController(); + logoutController.setDefaultUrl("/"); + bind(LogoutController.class).toInstance(logoutController); + } - @Provides @Singleton - protected Config provideConfig(SsoManager ssoManager) { - if (ssoManager.isSsoEnabled()) { - final Clients clients = new Clients(); - final List<Client> clientList = new ArrayList<>(); - clientList.add(ssoManager.getSsoProvider().client()); - clients.setClients(clientList); - final Config config = new Config(clients); - config.setHttpActionAdapter(new PlayHttpActionAdapter()); - return config; - } - return new Config(); + @Provides + @Singleton + protected Config provideConfig(SsoManager ssoManager) { + if (ssoManager.isSsoEnabled()) { + final Clients clients = new Clients(); + final List<Client> clientList = new ArrayList<>(); + clientList.add(ssoManager.getSsoProvider().client()); + clients.setClients(clientList); + final Config config = new Config(clients); + config.setHttpActionAdapter(new PlayHttpActionAdapter()); + return config; } + return new Config(); + } - @Provides @Singleton - protected SsoManager provideSsoManager() { - SsoManager manager = new SsoManager(); - // Seed the SSO manager with a default SSO provider. - if (isSsoEnabled(_configs)) { - SsoConfigs ssoConfigs = new SsoConfigs(_configs); - if (ssoConfigs.isOidcEnabled()) { - // Register OIDC Provider, add to list of managers. - OidcConfigs oidcConfigs = new OidcConfigs(_configs); - OidcProvider oidcProvider = new OidcProvider(oidcConfigs); - // Set the default SSO provider to this OIDC client. - manager.setSsoProvider(oidcProvider); - } - } - return manager; + @Provides + @Singleton + protected SsoManager provideSsoManager() { + SsoManager manager = new SsoManager(); + // Seed the SSO manager with a default SSO provider. + if (isSsoEnabled(_configs)) { + SsoConfigs ssoConfigs = new SsoConfigs(_configs); + if (ssoConfigs.isOidcEnabled()) { + // Register OIDC Provider, add to list of managers. + OidcConfigs oidcConfigs = new OidcConfigs(_configs); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + // Set the default SSO provider to this OIDC client. + manager.setSsoProvider(oidcProvider); + } } + return manager; + } - @Provides - @Singleton - protected Authentication provideSystemAuthentication() { - // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service. - String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); - String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); - final Actor systemActor = - new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. - return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret), - Collections.emptyMap()); - } + @Provides + @Singleton + protected Authentication provideSystemAuthentication() { + // Returns an instance of Authentication used to authenticate system initiated calls to Metadata + // Service. + String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); + String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); + final Actor systemActor = + new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. + return new Authentication( + systemActor, + String.format("Basic %s:%s", systemClientId, systemSecret), + Collections.emptyMap()); + } - @Provides - @Singleton - protected ConfigurationProvider provideConfigurationProvider() { - AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ConfigurationProvider.class); - return context.getBean(ConfigurationProvider.class); - } + @Provides + @Singleton + protected ConfigurationProvider provideConfigurationProvider() { + AnnotationConfigApplicationContext context = + new AnnotationConfigApplicationContext(ConfigurationProvider.class); + return context.getBean(ConfigurationProvider.class); + } - @Provides - @Singleton - protected SystemEntityClient provideEntityClient(final Authentication systemAuthentication, - final ConfigurationProvider configurationProvider) { - return new SystemRestliEntityClient(buildRestliClient(), - new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), - _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - } + @Provides + @Singleton + protected SystemEntityClient provideEntityClient( + final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + return new SystemRestliEntityClient( + buildRestliClient(), + new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), + _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); + } - @Provides - @Singleton - protected CloseableHttpClient provideHttpClient() { - return HttpClients.createDefault(); - } + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } - @Provides - @Singleton - protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) { - // Init a GMS auth client - final String metadataServiceHost = - _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); - - final int metadataServicePort = - _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - - final Boolean metadataServiceUseSsl = - _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean( - METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); - - return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, - systemAuthentication, httpClient); - } + @Provides + @Singleton + protected AuthServiceClient provideAuthClient( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + // Init a GMS auth client + final String metadataServiceHost = + _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) + ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + + final int metadataServicePort = + _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) + ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt( + Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - private com.linkedin.restli.client.Client buildRestliClient() { - final String metadataServiceHost = utils.ConfigUtil.getString( + final Boolean metadataServiceUseSsl = + _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean( + Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + + return new AuthServiceClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + systemAuthentication, + httpClient); + } + + private com.linkedin.restli.client.Client buildRestliClient() { + final String metadataServiceHost = + utils.ConfigUtil.getString( _configs, METADATA_SERVICE_HOST_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = utils.ConfigUtil.getInt( + final int metadataServicePort = + utils.ConfigUtil.getInt( _configs, utils.ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = utils.ConfigUtil.getBoolean( + final boolean metadataServiceUseSsl = + utils.ConfigUtil.getBoolean( _configs, utils.ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); - final String metadataServiceSslProtocol = utils.ConfigUtil.getString( + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); + final String metadataServiceSslProtocol = + utils.ConfigUtil.getString( _configs, utils.ConfigUtil.METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL - ); - return DefaultRestliClientFactory.getRestLiClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, metadataServiceSslProtocol); - } + ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL); + return DefaultRestliClientFactory.getRestLiClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + metadataServiceSslProtocol); + } - protected boolean isSsoEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); - } + protected boolean isSsoEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + } - protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( + protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH))); - } + } } - diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 386eee725c83d..283a2164584b9 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -1,137 +1,136 @@ package auth; import com.linkedin.common.urn.CorpuserUrn; -import lombok.extern.slf4j.Slf4j; -import play.mvc.Http; - -import javax.annotation.Nonnull; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import play.mvc.Http; @Slf4j public class AuthUtils { - /** - * The config path that determines whether Metadata Service Authentication is enabled. - * - * When enabled, the frontend server will proxy requests to the Metadata Service without requiring them to have a valid - * frontend-issued Session Cookie. This effectively means delegating the act of authentication to the Metadata Service. It - * is critical that if Metadata Service authentication is enabled at the frontend service layer, it is also enabled in the - * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. - * - * When disabled, the frontend server will require that all requests have a valid Session Cookie associated with them. Otherwise, - * requests will be denied with an Unauthorized error. - */ - public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = "metadataService.auth.enabled"; - - /** - * The attribute inside session cookie representing a GMS-issued access token - */ - public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; - - /** - * An ID used to identify system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; - - /** - * An Secret used to authenticate system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; - - /** - * Cookie name for redirect url that is manually separated from the session to reduce size - */ - public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; - - public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); - - public static final String LOGIN_ROUTE = "/login"; - public static final String USER_NAME = "username"; - public static final String PASSWORD = "password"; - public static final String ACTOR = "actor"; - public static final String ACCESS_TOKEN = "token"; - public static final String FULL_NAME = "fullName"; - public static final String EMAIL = "email"; - public static final String TITLE = "title"; - public static final String INVITE_TOKEN = "inviteToken"; - public static final String RESET_TOKEN = "resetToken"; - - /** - * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply - * checks for the presence of an "Authorization" header or the presence of a valid session cookie issued - * by the frontend. - * - * Note that this method DOES NOT actually verify the authentication token of an inbound request. That will - * be handled by the downstream Metadata Service. Until then, the request should be treated as UNAUTHENTICATED. - * - * Returns true if the request is eligible to be forwarded to GMS, false otherwise. - */ - public static boolean isEligibleForForwarding(Http.Request req) { - return hasValidSessionCookie(req) || hasAuthHeader(req); + /** + * The config path that determines whether Metadata Service Authentication is enabled. + * + * <p>When enabled, the frontend server will proxy requests to the Metadata Service without + * requiring them to have a valid frontend-issued Session Cookie. This effectively means + * delegating the act of authentication to the Metadata Service. It is critical that if Metadata + * Service authentication is enabled at the frontend service layer, it is also enabled in the + * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. + * + * <p>When disabled, the frontend server will require that all requests have a valid Session + * Cookie associated with them. Otherwise, requests will be denied with an Unauthorized error. + */ + public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = + "metadataService.auth.enabled"; + + /** The attribute inside session cookie representing a GMS-issued access token */ + public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; + + /** + * An ID used to identify system callers that are internal to DataHub. Provided via configuration. + */ + public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; + + /** + * An Secret used to authenticate system callers that are internal to DataHub. Provided via + * configuration. + */ + public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; + + /** Cookie name for redirect url that is manually separated from the session to reduce size */ + public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; + + public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); + + public static final String LOGIN_ROUTE = "/login"; + public static final String USER_NAME = "username"; + public static final String PASSWORD = "password"; + public static final String ACTOR = "actor"; + public static final String ACCESS_TOKEN = "token"; + public static final String FULL_NAME = "fullName"; + public static final String EMAIL = "email"; + public static final String TITLE = "title"; + public static final String INVITE_TOKEN = "inviteToken"; + public static final String RESET_TOKEN = "resetToken"; + + /** + * Determines whether the inbound request should be forward to downstream Metadata Service. Today, + * this simply checks for the presence of an "Authorization" header or the presence of a valid + * session cookie issued by the frontend. + * + * <p>Note that this method DOES NOT actually verify the authentication token of an inbound + * request. That will be handled by the downstream Metadata Service. Until then, the request + * should be treated as UNAUTHENTICATED. + * + * <p>Returns true if the request is eligible to be forwarded to GMS, false otherwise. + */ + public static boolean isEligibleForForwarding(Http.Request req) { + return hasValidSessionCookie(req) || hasAuthHeader(req); + } + + /** + * Returns true if a request has a valid session cookie issued by the frontend server. Note that + * this DOES NOT verify whether the token within the session cookie will be accepted by the + * downstream GMS service. + * + * <p>Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, + * as well as their agreement to determine authentication status. + */ + public static boolean hasValidSessionCookie(final Http.Request req) { + Map<String, String> sessionCookie = req.session().data(); + return sessionCookie.containsKey(ACCESS_TOKEN) + && sessionCookie.containsKey(ACTOR) + && req.getCookie(ACTOR).isPresent() + && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); + } + + /** Returns true if a request includes the Authorization header, false otherwise */ + public static boolean hasAuthHeader(final Http.Request req) { + return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); + } + + /** + * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. + * + * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" + * @param ttlInHours the number of hours until the actor cookie expires after being set + */ + public static Http.Cookie createActorCookie( + @Nonnull final String actorUrn, + @Nonnull final Integer ttlInHours, + @Nonnull final String sameSite, + final boolean isSecure) { + return Http.Cookie.builder(ACTOR, actorUrn) + .withHttpOnly(false) + .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) + .withSameSite(convertSameSiteValue(sameSite)) + .withSecure(isSecure) + .build(); + } + + public static Map<String, String> createSessionMap( + final String userUrnStr, final String accessToken) { + final Map<String, String> sessionAttributes = new HashMap<>(); + sessionAttributes.put(ACTOR, userUrnStr); + sessionAttributes.put(ACCESS_TOKEN, accessToken); + return sessionAttributes; + } + + private AuthUtils() {} + + private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { + try { + return Http.Cookie.SameSite.valueOf(sameSiteValue); + } catch (IllegalArgumentException e) { + log.warn( + String.format( + "Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), + e); + return Http.Cookie.SameSite.LAX; } - - /** - * Returns true if a request has a valid session cookie issued by the frontend server. - * Note that this DOES NOT verify whether the token within the session cookie will be accepted - * by the downstream GMS service. - * - * Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, - * as well as their agreement to determine authentication status. - */ - public static boolean hasValidSessionCookie(final Http.Request req) { - Map<String, String> sessionCookie = req.session().data(); - return sessionCookie.containsKey(ACCESS_TOKEN) - && sessionCookie.containsKey(ACTOR) - && req.getCookie(ACTOR).isPresent() - && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); - } - - /** - * Returns true if a request includes the Authorization header, false otherwise - */ - public static boolean hasAuthHeader(final Http.Request req) { - return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); - } - - /** - * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. - * - * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" - * @param ttlInHours the number of hours until the actor cookie expires after being set - */ - public static Http.Cookie createActorCookie( - @Nonnull final String actorUrn, - @Nonnull final Integer ttlInHours, - @Nonnull final String sameSite, - final boolean isSecure - ) { - return Http.Cookie.builder(ACTOR, actorUrn) - .withHttpOnly(false) - .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) - .withSameSite(convertSameSiteValue(sameSite)) - .withSecure(isSecure) - .build(); - } - - public static Map<String, String> createSessionMap(final String userUrnStr, final String accessToken) { - final Map<String, String> sessionAttributes = new HashMap<>(); - sessionAttributes.put(ACTOR, userUrnStr); - sessionAttributes.put(ACCESS_TOKEN, accessToken); - return sessionAttributes; - } - - private AuthUtils() { } - - private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { - try { - return Http.Cookie.SameSite.valueOf(sameSiteValue); - } catch (IllegalArgumentException e) { - log.warn(String.format("Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), e); - return Http.Cookie.SameSite.LAX; - } - } - + } } diff --git a/datahub-frontend/app/auth/Authenticator.java b/datahub-frontend/app/auth/Authenticator.java index ae847b318dce2..8536fc7e01695 100644 --- a/datahub-frontend/app/auth/Authenticator.java +++ b/datahub-frontend/app/auth/Authenticator.java @@ -1,48 +1,49 @@ package auth; +import static auth.AuthUtils.*; + import com.typesafe.config.Config; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.inject.Inject; import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import static auth.AuthUtils.*; - - /** * Implementation of base Play Authentication used to determine if a request to a route should be * authenticated. */ public class Authenticator extends Security.Authenticator { - private final boolean metadataServiceAuthEnabled; + private final boolean metadataServiceAuthEnabled; - @Inject - public Authenticator(@Nonnull Config config) { - this.metadataServiceAuthEnabled = config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + @Inject + public Authenticator(@Nonnull Config config) { + this.metadataServiceAuthEnabled = + config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) && config.getBoolean(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH); + } + + @Override + public Optional<String> getUsername(@Nonnull Http.Request req) { + if (this.metadataServiceAuthEnabled) { + // If Metadata Service auth is enabled, we only want to verify presence of the + // "Authorization" header OR the presence of a frontend generated session cookie. + // At this time, the actor is still considered to be unauthenicated. + return Optional.ofNullable( + AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); + } else { + // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. + return Optional.ofNullable( + AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); } + } - @Override - public Optional<String> getUsername(@Nonnull Http.Request req) { - if (this.metadataServiceAuthEnabled) { - // If Metadata Service auth is enabled, we only want to verify presence of the - // "Authorization" header OR the presence of a frontend generated session cookie. - // At this time, the actor is still considered to be unauthenicated. - return Optional.ofNullable(AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); - } else { - // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. - return Optional.ofNullable(AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); - } - } - - @Override - @Nonnull - public Result onUnauthorized(@Nullable Http.Request req) { - return unauthorized(); - } + @Override + @Nonnull + public Result onUnauthorized(@Nullable Http.Request req) { + return unauthorized(); + } } diff --git a/datahub-frontend/app/auth/ConfigUtil.java b/datahub-frontend/app/auth/ConfigUtil.java index e0999ee00be38..9fbed91ce6a10 100644 --- a/datahub-frontend/app/auth/ConfigUtil.java +++ b/datahub-frontend/app/auth/ConfigUtil.java @@ -3,20 +3,20 @@ import com.typesafe.config.Config; import java.util.Optional; - public class ConfigUtil { - private ConfigUtil() { - } + private ConfigUtil() {} public static String getRequired(final Config configs, final String path) { if (!configs.hasPath(path)) { - throw new IllegalArgumentException(String.format("Missing required config with path %s", path)); + throw new IllegalArgumentException( + String.format("Missing required config with path %s", path)); } return configs.getString(path); } - public static String getOptional(final Config configs, final String path, final String defaultVal) { + public static String getOptional( + final Config configs, final String path, final String defaultVal) { if (!configs.hasPath(path)) { return defaultVal; } diff --git a/datahub-frontend/app/auth/CookieConfigs.java b/datahub-frontend/app/auth/CookieConfigs.java index b6da9b7a1833c..63b2ce61aaf9b 100644 --- a/datahub-frontend/app/auth/CookieConfigs.java +++ b/datahub-frontend/app/auth/CookieConfigs.java @@ -1,6 +1,5 @@ package auth; - import com.typesafe.config.Config; public class CookieConfigs { @@ -16,12 +15,18 @@ public class CookieConfigs { private final boolean _authCookieSecure; public CookieConfigs(final Config configs) { - _ttlInHours = configs.hasPath(SESSION_TTL_CONFIG_PATH) ? configs.getInt(SESSION_TTL_CONFIG_PATH) - : DEFAULT_SESSION_TTL_HOURS; - _authCookieSameSite = configs.hasPath(AUTH_COOKIE_SAME_SITE) ? configs.getString(AUTH_COOKIE_SAME_SITE) - : DEFAULT_AUTH_COOKIE_SAME_SITE; - _authCookieSecure = configs.hasPath(AUTH_COOKIE_SECURE) ? configs.getBoolean(AUTH_COOKIE_SECURE) - : DEFAULT_AUTH_COOKIE_SECURE; + _ttlInHours = + configs.hasPath(SESSION_TTL_CONFIG_PATH) + ? configs.getInt(SESSION_TTL_CONFIG_PATH) + : DEFAULT_SESSION_TTL_HOURS; + _authCookieSameSite = + configs.hasPath(AUTH_COOKIE_SAME_SITE) + ? configs.getString(AUTH_COOKIE_SAME_SITE) + : DEFAULT_AUTH_COOKIE_SAME_SITE; + _authCookieSecure = + configs.hasPath(AUTH_COOKIE_SECURE) + ? configs.getBoolean(AUTH_COOKIE_SECURE) + : DEFAULT_AUTH_COOKIE_SECURE; } public int getTtlInHours() { diff --git a/datahub-frontend/app/auth/JAASConfigs.java b/datahub-frontend/app/auth/JAASConfigs.java index f39c20aceb6f9..529bf98e1fdcf 100644 --- a/datahub-frontend/app/auth/JAASConfigs.java +++ b/datahub-frontend/app/auth/JAASConfigs.java @@ -6,17 +6,18 @@ */ public class JAASConfigs { - public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; + public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; - private Boolean _isEnabled = true; + private Boolean _isEnabled = true; - public JAASConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { - _isEnabled = false; - } + public JAASConfigs(final com.typesafe.config.Config configs) { + if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) + && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { + _isEnabled = false; } + } - public boolean isJAASEnabled() { - return _isEnabled; - } + public boolean isJAASEnabled() { + return _isEnabled; + } } diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java index 3114da92d7d79..772c2c8f92f28 100644 --- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -1,23 +1,27 @@ package auth; -/** - * Currently, this config enables or disable native user authentication. - */ +/** Currently, this config enables or disable native user authentication. */ public class NativeAuthenticationConfigs { public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; - public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = "auth.native.signUp.enforceValidEmail"; + public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = + "auth.native.signUp.enforceValidEmail"; private Boolean _isEnabled = true; private Boolean _isEnforceValidEmailEnabled = true; public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) { - _isEnabled = Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); + _isEnabled = + Boolean.parseBoolean( + configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); } if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) { _isEnforceValidEmailEnabled = - Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH).toString()); + Boolean.parseBoolean( + configs + .getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH) + .toString()); } } diff --git a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java index a6dbd69a93889..223ac669bd6ea 100644 --- a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java +++ b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java @@ -7,16 +7,15 @@ import play.api.mvc.FlashCookieBaker; import play.api.mvc.SessionCookieBaker; - public class CustomCookiesModule extends AbstractModule { @Override public void configure() { bind(CookieSigner.class).toProvider(CookieSignerProvider.class); - // We override the session cookie baker to not use a fallback, this prevents using an old URL Encoded cookie + // We override the session cookie baker to not use a fallback, this prevents using an old URL + // Encoded cookie bind(SessionCookieBaker.class).to(CustomSessionCookieBaker.class); // We don't care about flash cookies, we don't use them bind(FlashCookieBaker.class).to(DefaultFlashCookieBaker.class); } - } diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java index 062054173bddb..1f8455e773ffb 100644 --- a/datahub-frontend/app/auth/sso/SsoConfigs.java +++ b/datahub-frontend/app/auth/sso/SsoConfigs.java @@ -2,24 +2,19 @@ import static auth.ConfigUtil.*; - -/** - * Class responsible for extracting and validating top-level SSO related configurations. - */ +/** Class responsible for extracting and validating top-level SSO related configurations. */ public class SsoConfigs { - /** - * Required configs - */ + /** Required configs */ private static final String AUTH_BASE_URL_CONFIG_PATH = "auth.baseUrl"; + private static final String AUTH_BASE_CALLBACK_PATH_CONFIG_PATH = "auth.baseCallbackPath"; private static final String AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH = "auth.successRedirectPath"; public static final String OIDC_ENABLED_CONFIG_PATH = "auth.oidc.enabled"; - /** - * Default values - */ + /** Default values */ private static final String DEFAULT_BASE_CALLBACK_PATH = "/callback"; + private static final String DEFAULT_SUCCESS_REDIRECT_PATH = "/"; private final String _authBaseUrl; @@ -29,17 +24,14 @@ public class SsoConfigs { public SsoConfigs(final com.typesafe.config.Config configs) { _authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH); - _authBaseCallbackPath = getOptional( - configs, - AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, - DEFAULT_BASE_CALLBACK_PATH); - _authSuccessRedirectPath = getOptional( - configs, - AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, - DEFAULT_SUCCESS_REDIRECT_PATH); - _oidcEnabled = configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + _authBaseCallbackPath = + getOptional(configs, AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, DEFAULT_BASE_CALLBACK_PATH); + _authSuccessRedirectPath = + getOptional(configs, AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, DEFAULT_SUCCESS_REDIRECT_PATH); + _oidcEnabled = + configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( + Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); } public String getAuthBaseUrl() { diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java index 739ce3f1ba450..bf33f4148a553 100644 --- a/datahub-frontend/app/auth/sso/SsoManager.java +++ b/datahub-frontend/app/auth/sso/SsoManager.java @@ -2,19 +2,16 @@ import javax.annotation.Nonnull; - -/** - * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. - */ +/** Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. */ public class SsoManager { private SsoProvider<?> _provider; // Only one active provider at a time. - public SsoManager() { } + public SsoManager() {} /** - * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been - * provided to the manager. + * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the + * manager. * * @return true if SSO logic is enabled, false otherwise. */ @@ -34,8 +31,8 @@ public void setSsoProvider(@Nonnull final SsoProvider<?> provider) { /** * Gets the active {@link SsoProvider} instance. * - * @return the {@SsoProvider} that should be used during authentication and on - * IdP callback, or null if SSO is not enabled. + * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or + * null if SSO is not enabled. */ public SsoProvider<?> getSsoProvider() { return _provider; diff --git a/datahub-frontend/app/auth/sso/SsoProvider.java b/datahub-frontend/app/auth/sso/SsoProvider.java index f7454d599ba99..a0947b52b92ae 100644 --- a/datahub-frontend/app/auth/sso/SsoProvider.java +++ b/datahub-frontend/app/auth/sso/SsoProvider.java @@ -3,15 +3,10 @@ import org.pac4j.core.client.Client; import org.pac4j.core.credentials.Credentials; -/** - * A thin interface over a Pac4j {@link Client} object and its - * associated configurations. - */ +/** A thin interface over a Pac4j {@link Client} object and its associated configurations. */ public interface SsoProvider<C extends SsoConfigs> { - /** - * The protocol used for SSO. - */ + /** The protocol used for SSO. */ enum SsoProtocol { OIDC("oidc"); // SAML -- not yet supported. @@ -28,19 +23,12 @@ public String getCommonName() { } } - /** - * Returns the configs required by the provider. - */ + /** Returns the configs required by the provider. */ C configs(); - /** - * Returns the SSO protocol associated with the provider instance. - */ + /** Returns the SSO protocol associated with the provider instance. */ SsoProtocol protocol(); - /** - * Retrieves an initialized Pac4j {@link Client}. - */ + /** Retrieves an initialized Pac4j {@link Client}. */ Client<? extends Credentials> client(); - } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java index baca144610ec4..fa676d2d16c90 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java @@ -1,9 +1,9 @@ package auth.sso.oidc; +import com.nimbusds.jwt.JWT; +import com.nimbusds.jwt.JWTParser; import java.util.Map.Entry; import java.util.Optional; - -import com.nimbusds.jwt.JWTParser; import org.pac4j.core.authorization.generator.AuthorizationGenerator; import org.pac4j.core.context.WebContext; import org.pac4j.core.profile.AttributeLocation; @@ -14,44 +14,43 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.nimbusds.jwt.JWT; - public class OidcAuthorizationGenerator implements AuthorizationGenerator { - private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - - private final ProfileDefinition<?> profileDef; + private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - private final OidcConfigs oidcConfigs; + private final ProfileDefinition<?> profileDef; - public OidcAuthorizationGenerator(final ProfileDefinition<?> profileDef, final OidcConfigs oidcConfigs) { - this.profileDef = profileDef; - this.oidcConfigs = oidcConfigs; - } + private final OidcConfigs oidcConfigs; - @Override - public Optional<UserProfile> generate(WebContext context, UserProfile profile) { - if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { - try { - final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); - - CommonProfile commonProfile = new CommonProfile(); - - for (final Entry<String, Object> entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { - final String claimName = entry.getKey(); - - if (profile.getAttribute(claimName) == null) { - profileDef.convertAndAdd(commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); - } - } - - return Optional.of(commonProfile); - } catch (Exception e) { - logger.warn("Cannot parse access token claims", e); - } + public OidcAuthorizationGenerator( + final ProfileDefinition<?> profileDef, final OidcConfigs oidcConfigs) { + this.profileDef = profileDef; + this.oidcConfigs = oidcConfigs; + } + + @Override + public Optional<UserProfile> generate(WebContext context, UserProfile profile) { + if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { + try { + final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); + + CommonProfile commonProfile = new CommonProfile(); + + for (final Entry<String, Object> entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { + final String claimName = entry.getKey(); + + if (profile.getAttribute(claimName) == null) { + profileDef.convertAndAdd( + commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); + } } - - return Optional.ofNullable(profile); + + return Optional.of(commonProfile); + } catch (Exception e) { + logger.warn("Cannot parse access token claims", e); + } } - + + return Optional.ofNullable(profile); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java index 7164710f4e0de..fa562f54312ec 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java @@ -1,6 +1,13 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static org.pac4j.play.store.PlayCookieSessionStore.*; +import static play.mvc.Results.internalServerError; + import auth.CookieConfigs; +import auth.sso.SsoManager; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -59,23 +66,16 @@ import org.pac4j.core.util.Pac4jConstants; import org.pac4j.play.PlayWebContext; import play.mvc.Result; -import auth.sso.SsoManager; - -import static auth.AuthUtils.*; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static org.pac4j.play.store.PlayCookieSessionStore.*; -import static play.mvc.Results.internalServerError; - /** - * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects back to D - * DataHub after an authentication attempt. + * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects + * back to D DataHub after an authentication attempt. * - * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract - * basic information about the user including their name, email, groups, & more. If just-in-time provisioning - * is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for the user, along with any Groups - * ({@link CorpGroupSnapshot}) that can be extracted, only doing so if the user does not already exist. + * <p>On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract + * basic information about the user including their name, email, groups, & more. If just-in-time + * provisioning is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for + * the user, along with any Groups ({@link CorpGroupSnapshot}) that can be extracted, only doing so + * if the user does not already exist. */ @Slf4j public class OidcCallbackLogic extends DefaultCallbackLogic<Result, PlayWebContext> { @@ -86,9 +86,12 @@ public class OidcCallbackLogic extends DefaultCallbackLogic<Result, PlayWebConte private final AuthServiceClient _authClient; private final CookieConfigs _cookieConfigs; - public OidcCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, - final CookieConfigs cookieConfigs) { + public OidcCallbackLogic( + final SsoManager ssoManager, + final Authentication systemAuthentication, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { _ssoManager = ssoManager; _systemAuthentication = systemAuthentication; _entityClient = entityClient; @@ -97,14 +100,27 @@ public OidcCallbackLogic(final SsoManager ssoManager, final Authentication syste } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { setContextRedirectUrl(context); final Result result = - super.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, + super.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, defaultClient); // Handle OIDC authentication errors. @@ -119,14 +135,25 @@ public Result perform(PlayWebContext context, Config config, @SuppressWarnings("unchecked") private void setContextRedirectUrl(PlayWebContext context) { - Optional<Cookie> redirectUrl = context.getRequestCookies().stream() - .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())).findFirst(); + Optional<Cookie> redirectUrl = + context.getRequestCookies().stream() + .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())) + .findFirst(); redirectUrl.ifPresent( - cookie -> context.getSessionStore().set(context, Pac4jConstants.REQUESTED_URL, - JAVA_SER_HELPER.deserializeFromBytes(uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); + cookie -> + context + .getSessionStore() + .set( + context, + Pac4jConstants.REQUESTED_URL, + JAVA_SER_HELPER.deserializeFromBytes( + uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); } - private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result result, final PlayWebContext context, + private Result handleOidcCallback( + final OidcConfigs oidcConfigs, + final Result result, + final PlayWebContext context, final ProfileManager<UserProfile> profileManager) { log.debug("Beginning OIDC Callback Handling..."); @@ -134,14 +161,17 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re if (profileManager.isAuthenticated()) { // If authenticated, the user should have a profile. final CommonProfile profile = (CommonProfile) profileManager.get(true).get(); - log.debug(String.format("Found authenticated user with profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Found authenticated user with profile %s", profile.getAttributes().toString())); // Extract the User name required to log into DataHub. final String userName = extractUserNameOrThrow(oidcConfigs, profile); final CorpuserUrn corpUserUrn = new CorpuserUrn(userName); try { - // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does not exist. + // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does + // not exist. if (oidcConfigs.isJitProvisioningEnabled()) { log.debug("Just-in-time provisioning is enabled. Beginning provisioning process..."); CorpUserSnapshot extractedUser = extractUser(corpUserUrn, profile); @@ -150,7 +180,8 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re // Extract groups & provision them. List<CorpGroupSnapshot> extractedGroups = extractGroups(profile); tryProvisionGroups(extractedGroups); - // Add users to groups on DataHub. Note that this clears existing group membership for a user if it already exists. + // Add users to groups on DataHub. Note that this clears existing group membership for a + // user if it already exists. updateGroupMembership(corpUserUrn, createGroupMembership(extractedGroups)); } } else if (oidcConfigs.isPreProvisioningRequired()) { @@ -160,55 +191,69 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re } // Update user status to active on login. // If we want to prevent certain users from logging in, here's where we'll want to do it. - setUserStatus(corpUserUrn, new CorpUserStatus().setStatus(Constants.CORP_USER_STATUS_ACTIVE) - .setLastModified(new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()))); + setUserStatus( + corpUserUrn, + new CorpUserStatus() + .setStatus(Constants.CORP_USER_STATUS_ACTIVE) + .setLastModified( + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()))); } catch (Exception e) { log.error("Failed to perform post authentication steps. Redirecting to error page.", e); return internalServerError( - String.format("Failed to perform post authentication steps. Error message: %s", e.getMessage())); + String.format( + "Failed to perform post authentication steps. Error message: %s", e.getMessage())); } // Successfully logged in - Generate GMS login token final String accessToken = _authClient.generateSessionTokenForUser(corpUserUrn.getId()); return result - .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) - .withCookies( - createActorCookie( - corpUserUrn.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) + .withCookies( + createActorCookie( + corpUserUrn.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); } return internalServerError( "Failed to authenticate current user. Cannot find valid identity provider profile in session."); } - private String extractUserNameOrThrow(final OidcConfigs oidcConfigs, final CommonProfile profile) { + private String extractUserNameOrThrow( + final OidcConfigs oidcConfigs, final CommonProfile profile) { // Ensure that the attribute exists (was returned by IdP) if (!profile.containsAttribute(oidcConfigs.getUserNameClaim())) { - throw new RuntimeException(String.format( - "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", - oidcConfigs.getUserNameClaim(), oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString())); + throw new RuntimeException( + String.format( + "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", + oidcConfigs.getUserNameClaim(), + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString())); } final String userNameClaim = (String) profile.getAttribute(oidcConfigs.getUserNameClaim()); - final Optional<String> mappedUserName = extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); - - return mappedUserName.orElseThrow(() -> new RuntimeException( - String.format("Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", - userNameClaim, oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString()))); + final Optional<String> mappedUserName = + extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); + + return mappedUserName.orElseThrow( + () -> + new RuntimeException( + String.format( + "Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", + userNameClaim, + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString()))); } - /** - * Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. - */ + /** Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. */ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { - log.debug(String.format("Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); // Extracts these based on the default set of OIDC claims, described here: // https://developer.okta.com/blog/2017/07/25/oidc-primer-part-1 @@ -217,7 +262,9 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { String email = profile.getEmail(); URI picture = profile.getPictureUrl(); String displayName = profile.getDisplayName(); - String fullName = (String) profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. + String fullName = + (String) + profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. if (fullName == null && firstName != null && lastName != null) { fullName = String.format("%s %s", firstName, lastName); } @@ -231,7 +278,8 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { userInfo.setFullName(fullName, SetMode.IGNORE_NULL); userInfo.setEmail(email, SetMode.IGNORE_NULL); // If there is a display name, use it. Otherwise fall back to full name. - userInfo.setDisplayName(displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); + userInfo.setDisplayName( + displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); final CorpUserEditableInfo editableInfo = new CorpUserEditableInfo(); try { @@ -254,15 +302,18 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { - log.debug(String.format("Attempting to extract groups from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract groups from OIDC profile %s", + profile.getAttributes().toString())); final OidcConfigs configs = (OidcConfigs) _ssoManager.getSsoProvider().configs(); - // First, attempt to extract a list of groups from the profile, using the group name attribute config. + // First, attempt to extract a list of groups from the profile, using the group name attribute + // config. final List<CorpGroupSnapshot> extractedGroups = new ArrayList<>(); final List<String> groupsClaimNames = - new ArrayList<String>(Arrays.asList(configs.getGroupsClaimName().split(","))).stream() - .map(String::trim) - .collect(Collectors.toList()); + new ArrayList<String>(Arrays.asList(configs.getGroupsClaimName().split(","))) + .stream().map(String::trim).collect(Collectors.toList()); for (final String groupsClaimName : groupsClaimNames) { @@ -273,14 +324,16 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { final Object groupAttribute = profile.getAttribute(groupsClaimName); if (groupAttribute instanceof Collection) { // List of group names - groupNames = (Collection<String>) profile.getAttribute(groupsClaimName, Collection.class); + groupNames = + (Collection<String>) profile.getAttribute(groupsClaimName, Collection.class); } else if (groupAttribute instanceof String) { // Single group name groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class)); } else { log.error( - String.format("Fail to parse OIDC group claim with name %s. Unknown type %s provided.", groupsClaimName, - groupAttribute.getClass())); + String.format( + "Fail to parse OIDC group claim with name %s. Unknown type %s provided.", + groupsClaimName, groupAttribute.getClass())); // Skip over group attribute. Do not throw. groupNames = Collections.emptyList(); } @@ -297,7 +350,8 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { corpGroupInfo.setDisplayName(groupName); // To deal with the possibility of spaces, we url encode the URN group name. - final String urlEncodedGroupName = URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); + final String urlEncodedGroupName = + URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); final CorpGroupUrn groupUrn = new CorpGroupUrn(urlEncodedGroupName); final CorpGroupSnapshot corpGroupSnapshot = new CorpGroupSnapshot(); corpGroupSnapshot.setUrn(groupUrn); @@ -306,18 +360,23 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { corpGroupSnapshot.setAspects(aspects); groupSnapshots.add(corpGroupSnapshot); } catch (UnsupportedEncodingException ex) { - log.error(String.format("Failed to URL encoded extracted group name %s. Skipping", groupName)); + log.error( + String.format( + "Failed to URL encoded extracted group name %s. Skipping", groupName)); } } if (groupSnapshots.isEmpty()) { - log.warn(String.format("Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); + log.warn( + String.format( + "Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); } else { extractedGroups.addAll(groupSnapshots); } } catch (Exception e) { - log.error(String.format( - "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", - groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + log.error( + String.format( + "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); } } } @@ -327,7 +386,8 @@ private List<CorpGroupSnapshot> extractGroups(CommonProfile profile) { private GroupMembership createGroupMembership(final List<CorpGroupSnapshot> extractedGroups) { final GroupMembership groupMembershipAspect = new GroupMembership(); groupMembershipAspect.setGroups( - new UrnArray(extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + new UrnArray( + extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); return groupMembershipAspect; } @@ -345,30 +405,39 @@ private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) { // If we find more than the key aspect, then the entity "exists". if (existingCorpUserSnapshot.getAspects().size() <= 1) { log.debug( - String.format("Extracted user that does not yet exist %s. Provisioning...", corpUserSnapshot.getUrn())); + String.format( + "Extracted user that does not yet exist %s. Provisioning...", + corpUserSnapshot.getUrn())); // 2. The user does not exist. Provision them. final Entity newEntity = new Entity(); newEntity.setValue(Snapshot.create(corpUserSnapshot)); _entityClient.update(newEntity, _systemAuthentication); log.debug(String.format("Successfully provisioned user %s", corpUserSnapshot.getUrn())); } - log.debug(String.format("User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); + log.debug( + String.format( + "User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); // Otherwise, the user exists. Skip provisioning. } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); + throw new RuntimeException( + String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); } } private void tryProvisionGroups(List<CorpGroupSnapshot> corpGroups) { - log.debug(String.format("Attempting to provision groups with urns %s", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + log.debug( + String.format( + "Attempting to provision groups with urns %s", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); // 1. Check if this user already exists. try { - final Set<Urn> urnsToFetch = corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); - final Map<Urn, Entity> existingGroups = _entityClient.batchGet(urnsToFetch, _systemAuthentication); + final Set<Urn> urnsToFetch = + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); + final Map<Urn, Entity> existingGroups = + _entityClient.batchGet(urnsToFetch, _systemAuthentication); log.debug(String.format("Fetched GMS groups with urns %s", existingGroups.keySet())); @@ -381,15 +450,21 @@ private void tryProvisionGroups(List<CorpGroupSnapshot> corpGroups) { // If more than the key aspect exists, then the group already "exists". if (corpGroupSnapshot.getAspects().size() <= 1) { - log.debug(String.format("Extracted group that does not yet exist %s. Provisioning...", - corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + corpGroupSnapshot.getUrn())); groupsToCreate.add(extractedGroup); } - log.debug(String.format("Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); } else { // Should not occur until we stop returning default Key aspects for unrecognized entities. log.debug( - String.format("Extracted group that does not yet exist %s. Provisioning...", extractedGroup.getUrn())); + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + extractedGroup.getUrn())); groupsToCreate.add(extractedGroup); } } @@ -400,15 +475,20 @@ private void tryProvisionGroups(List<CorpGroupSnapshot> corpGroups) { log.debug(String.format("Provisioning groups with urns %s", groupsToCreateUrns)); // Now batch create all entities identified to create. - _entityClient.batchUpdate(groupsToCreate.stream() - .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) - .collect(Collectors.toSet()), _systemAuthentication); + _entityClient.batchUpdate( + groupsToCreate.stream() + .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) + .collect(Collectors.toSet()), + _systemAuthentication); log.debug(String.format("Successfully provisioned groups with urns %s", groupsToCreateUrns)); } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision groups with urns %s.", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), e); + throw new RuntimeException( + String.format( + "Failed to provision groups with urns %s.", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), + e); } } @@ -423,12 +503,14 @@ private void updateGroupMembership(Urn urn, GroupMembership groupMembership) { try { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to update group membership for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to update group membership for user with urn %s", urn), e); } } private void verifyPreProvisionedUser(CorpuserUrn urn) { - // Validate that the user exists in the system (there is more than just a key aspect for them, as of today). + // Validate that the user exists in the system (there is more than just a key aspect for them, + // as of today). try { final Entity corpUser = _entityClient.get(urn, _systemAuthentication); @@ -436,9 +518,14 @@ private void verifyPreProvisionedUser(CorpuserUrn urn) { // If we find more than the key aspect, then the entity "exists". if (corpUser.getValue().getCorpUserSnapshot().getAspects().size() <= 1) { - log.debug(String.format("Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); - throw new RuntimeException(String.format("User with urn %s has not yet been provisioned in DataHub. " - + "Please contact your DataHub admin to provision an account.", urn)); + log.debug( + String.format( + "Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); + throw new RuntimeException( + String.format( + "User with urn %s has not yet been provisioned in DataHub. " + + "Please contact your DataHub admin to provision an account.", + urn)); } // Otherwise, the user exists. } catch (RemoteInvocationException e) { diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index eb037db2ef9c0..6877ca187da97 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -1,104 +1,122 @@ package auth.sso.oidc; +import static auth.ConfigUtil.*; + import auth.sso.SsoConfigs; import java.util.Optional; import lombok.Getter; -import static auth.ConfigUtil.*; - - -/** - * Class responsible for extracting and validating OIDC related configurations. - */ +/** Class responsible for extracting and validating OIDC related configurations. */ @Getter public class OidcConfigs extends SsoConfigs { - /** - * Required configs - */ - public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; - public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; - public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + /** Required configs */ + public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; + + public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; + public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + + /** Optional configs */ + public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; + + public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; + public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; + public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; + public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = + "auth.oidc.clientAuthenticationMethod"; + public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = + "auth.oidc.jitProvisioningEnabled"; + public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = + "auth.oidc.preProvisioningRequired"; + public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; + public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = + "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. + public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; + public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; + public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; + public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; + public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; + public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = + "auth.oidc.extractJwtAccessTokenClaims"; + public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; - /** - * Optional configs - */ - public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; - public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; - public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; - public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; - public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = "auth.oidc.clientAuthenticationMethod"; - public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = "auth.oidc.jitProvisioningEnabled"; - public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = "auth.oidc.preProvisioningRequired"; - public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; - public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. - public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; - public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; - public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; - public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; - public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; - public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims"; - public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; + /** Default values */ + private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - /** - * Default values - */ - private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; - private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; // Often "group" must be included for groups. - private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; - private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; - private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; - private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; - private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; // False since extraction of groups can overwrite existing group membership. - private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; - private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; + private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; + private static final String DEFAULT_OIDC_SCOPE = + "openid profile email"; // Often "group" must be included for groups. + private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; + private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; + private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; + private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; + private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = + "false"; // False since extraction of groups can overwrite existing group membership. + private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; + private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; - private String clientId; - private String clientSecret; - private String discoveryUri; - private String userNameClaim; - private String userNameClaimRegex; - private String scope; - private String clientName; - private String clientAuthenticationMethod; - private boolean jitProvisioningEnabled; - private boolean preProvisioningRequired; - private boolean extractGroupsEnabled; - private String groupsClaimName; - private Optional<String> responseType; - private Optional<String> responseMode; - private Optional<Boolean> useNonce; - private Optional<String> customParamResource; - private String readTimeout; - private Optional<Boolean> extractJwtAccessTokenClaims; - private Optional<String> preferredJwsAlgorithm; + private String clientId; + private String clientSecret; + private String discoveryUri; + private String userNameClaim; + private String userNameClaimRegex; + private String scope; + private String clientName; + private String clientAuthenticationMethod; + private boolean jitProvisioningEnabled; + private boolean preProvisioningRequired; + private boolean extractGroupsEnabled; + private String groupsClaimName; + private Optional<String> responseType; + private Optional<String> responseMode; + private Optional<Boolean> useNonce; + private Optional<String> customParamResource; + private String readTimeout; + private Optional<Boolean> extractJwtAccessTokenClaims; + private Optional<String> preferredJwsAlgorithm; - public OidcConfigs(final com.typesafe.config.Config configs) { - super(configs); - clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); - clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); - discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); - userNameClaim = getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); - userNameClaimRegex = - getOptional(configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); - scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); - clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); - clientAuthenticationMethod = getOptional(configs, OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, + public OidcConfigs(final com.typesafe.config.Config configs) { + super(configs); + clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); + clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); + discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); + userNameClaim = + getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); + userNameClaimRegex = + getOptional( + configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); + scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); + clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); + clientAuthenticationMethod = + getOptional( + configs, + OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); - jitProvisioningEnabled = Boolean.parseBoolean( - getOptional(configs, OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); - preProvisioningRequired = Boolean.parseBoolean( - getOptional(configs, OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); - extractGroupsEnabled = Boolean.parseBoolean( + jitProvisioningEnabled = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, + DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); + preProvisioningRequired = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, + DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); + extractGroupsEnabled = + Boolean.parseBoolean( getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); - groupsClaimName = getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); - responseType = getOptional(configs, OIDC_RESPONSE_TYPE); - responseMode = getOptional(configs, OIDC_RESPONSE_MODE); - useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); - customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); - readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); - extractJwtAccessTokenClaims = getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); - preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); - } + groupsClaimName = + getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); + responseType = getOptional(configs, OIDC_RESPONSE_TYPE); + responseMode = getOptional(configs, OIDC_RESPONSE_MODE); + useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); + customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); + readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); + extractJwtAccessTokenClaims = + getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java index fd0a2e1877154..39a65a46cbf91 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java @@ -10,15 +10,15 @@ import org.pac4j.oidc.credentials.OidcCredentials; import org.pac4j.oidc.profile.OidcProfileDefinition; - /** * Implementation of {@link SsoProvider} supporting the OIDC protocol. * - * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC related - * configuration options, which reside in an instance of {@link OidcConfigs}. + * <p>This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC + * related configuration options, which reside in an instance of {@link OidcConfigs}. * - * It is responsible for initializing this client from a configuration object ({@link OidcConfigs}. Note that - * this class is not related to the logic performed when an IdP performs a callback to DataHub. + * <p>It is responsible for initializing this client from a configuration object ({@link + * OidcConfigs}. Note that this class is not related to the logic performed when an IdP performs a + * callback to DataHub. */ @Slf4j public class OidcProvider implements SsoProvider<OidcConfigs> { @@ -53,7 +53,8 @@ private Client<OidcCredentials> createPac4jClient() { oidcConfiguration.setClientId(_oidcConfigs.getClientId()); oidcConfiguration.setSecret(_oidcConfigs.getClientSecret()); oidcConfiguration.setDiscoveryURI(_oidcConfigs.getDiscoveryUri()); - oidcConfiguration.setClientAuthenticationMethodAsString(_oidcConfigs.getClientAuthenticationMethod()); + oidcConfiguration.setClientAuthenticationMethodAsString( + _oidcConfigs.getClientAuthenticationMethod()); oidcConfiguration.setScope(_oidcConfigs.getScope()); try { oidcConfiguration.setReadTimeout(Integer.parseInt(_oidcConfigs.getReadTimeout())); @@ -63,18 +64,24 @@ private Client<OidcCredentials> createPac4jClient() { _oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType); _oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode); _oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce); - _oidcConfigs.getCustomParamResource() + _oidcConfigs + .getCustomParamResource() .ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value))); - _oidcConfigs.getPreferredJwsAlgorithm().ifPresent(preferred -> { - log.info("Setting preferredJwsAlgorithm: " + preferred); - oidcConfiguration.setPreferredJwsAlgorithm(preferred); - }); + _oidcConfigs + .getPreferredJwsAlgorithm() + .ifPresent( + preferred -> { + log.info("Setting preferredJwsAlgorithm: " + preferred); + oidcConfiguration.setPreferredJwsAlgorithm(preferred); + }); final CustomOidcClient oidcClient = new CustomOidcClient(oidcConfiguration); oidcClient.setName(OIDC_CLIENT_NAME); - oidcClient.setCallbackUrl(_oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); + oidcClient.setCallbackUrl( + _oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); oidcClient.setCallbackUrlResolver(new PathParameterCallbackUrlResolver()); - oidcClient.addAuthorizationGenerator(new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); + oidcClient.addAuthorizationGenerator( + new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); return oidcClient; } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java index 014632c17e690..9881b5e095b78 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java @@ -1,57 +1,58 @@ package auth.sso.oidc; +import static play.mvc.Results.internalServerError; +import static play.mvc.Results.unauthorized; + +import java.util.Optional; import org.pac4j.play.PlayWebContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.mvc.Result; -import java.util.Optional; - -import static play.mvc.Results.internalServerError; -import static play.mvc.Results.unauthorized; - - public class OidcResponseErrorHandler { - private OidcResponseErrorHandler() { - - } - - private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); + private OidcResponseErrorHandler() {} - private static final String ERROR_FIELD_NAME = "error"; - private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; + private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); - public static Result handleError(final PlayWebContext context) { + private static final String ERROR_FIELD_NAME = "error"; + private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; - _logger.warn("OIDC responded with an error: '{}'. Error description: '{}'", - getError(context), - getErrorDescription(context)); + public static Result handleError(final PlayWebContext context) { - if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { - return unauthorized(String.format("Access denied. " - + "The OIDC service responded with 'Access denied'. " - + "It seems that you don't have access to this application yet. Please apply for access. \n\n" - + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " - + "Error details: '%s':'%s'", - context.getRequestParameter("error"), - context.getRequestParameter("error_description"))); - } + _logger.warn( + "OIDC responded with an error: '{}'. Error description: '{}'", + getError(context), + getErrorDescription(context)); - return internalServerError( - String.format("Internal server error. The OIDC service responded with an error: '%s'.\n" - + "Error description: '%s'", getError(context).orElse(""), getErrorDescription(context).orElse(""))); + if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { + return unauthorized( + String.format( + "Access denied. " + + "The OIDC service responded with 'Access denied'. " + + "It seems that you don't have access to this application yet. Please apply for access. \n\n" + + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " + + "Error details: '%s':'%s'", + context.getRequestParameter("error"), + context.getRequestParameter("error_description"))); } - public static boolean isError(final PlayWebContext context) { - return getError(context).isPresent() && !getError(context).get().isEmpty(); - } + return internalServerError( + String.format( + "Internal server error. The OIDC service responded with an error: '%s'.\n" + + "Error description: '%s'", + getError(context).orElse(""), getErrorDescription(context).orElse(""))); + } - public static Optional<String> getError(final PlayWebContext context) { - return context.getRequestParameter(ERROR_FIELD_NAME); - } + public static boolean isError(final PlayWebContext context) { + return getError(context).isPresent() && !getError(context).get().isEmpty(); + } - public static Optional<String> getErrorDescription(final PlayWebContext context) { - return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); - } + public static Optional<String> getError(final PlayWebContext context) { + return context.getRequestParameter(ERROR_FIELD_NAME); + } + + public static Optional<String> getErrorDescription(final PlayWebContext context) { + return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java index 8c8c250fb7e63..01f8f16171d13 100644 --- a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java +++ b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java @@ -1,8 +1,8 @@ package auth.sso.oidc.custom; -import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; +import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.TokenErrorResponse; import com.nimbusds.oauth2.sdk.TokenRequest; @@ -37,7 +37,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class CustomOidcAuthenticator implements Authenticator<OidcCredentials> { private static final Logger logger = LoggerFactory.getLogger(OidcAuthenticator.class); @@ -61,14 +60,17 @@ public CustomOidcAuthenticator(final OidcClient<OidcConfiguration> client) { this.client = client; // check authentication methods - final List<ClientAuthenticationMethod> metadataMethods = configuration.findProviderMetadata().getTokenEndpointAuthMethods(); + final List<ClientAuthenticationMethod> metadataMethods = + configuration.findProviderMetadata().getTokenEndpointAuthMethods(); - final ClientAuthenticationMethod preferredMethod = getPreferredAuthenticationMethod(configuration); + final ClientAuthenticationMethod preferredMethod = + getPreferredAuthenticationMethod(configuration); final ClientAuthenticationMethod chosenMethod; if (CommonHelper.isNotEmpty(metadataMethods)) { if (preferredMethod != null) { - if (ClientAuthenticationMethod.NONE.equals(preferredMethod) || metadataMethods.contains(preferredMethod)) { + if (ClientAuthenticationMethod.NONE.equals(preferredMethod) + || metadataMethods.contains(preferredMethod)) { chosenMethod = preferredMethod; } else { throw new TechnicalException( @@ -83,8 +85,10 @@ public CustomOidcAuthenticator(final OidcClient<OidcConfiguration> client) { chosenMethod = firstSupportedMethod(metadataMethods); } } else { - chosenMethod = preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); - logger.info("Provider metadata does not provide Token endpoint authentication methods. Using: {}", + chosenMethod = + preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); + logger.info( + "Provider metadata does not provide Token endpoint authentication methods. Using: {}", chosenMethod); } @@ -103,38 +107,41 @@ public CustomOidcAuthenticator(final OidcClient<OidcConfiguration> client) { } /** - * The preferred {@link ClientAuthenticationMethod} specified in the given - * {@link OidcConfiguration}, or <code>null</code> meaning that the a - * provider-supported method should be chosen. + * The preferred {@link ClientAuthenticationMethod} specified in the given {@link + * OidcConfiguration}, or <code>null</code> meaning that the a provider-supported method should be + * chosen. */ - private static ClientAuthenticationMethod getPreferredAuthenticationMethod(OidcConfiguration config) { + private static ClientAuthenticationMethod getPreferredAuthenticationMethod( + OidcConfiguration config) { final ClientAuthenticationMethod configurationMethod = config.getClientAuthenticationMethod(); if (configurationMethod == null) { return null; } if (!SUPPORTED_METHODS.contains(configurationMethod)) { - throw new TechnicalException("Configured authentication method (" + configurationMethod + ") is not supported."); + throw new TechnicalException( + "Configured authentication method (" + configurationMethod + ") is not supported."); } return configurationMethod; } /** - * The first {@link ClientAuthenticationMethod} from the given list of - * methods that is supported by this implementation. + * The first {@link ClientAuthenticationMethod} from the given list of methods that is supported + * by this implementation. * - * @throws TechnicalException - * if none of the provider-supported methods is supported. + * @throws TechnicalException if none of the provider-supported methods is supported. */ - private static ClientAuthenticationMethod firstSupportedMethod(final List<ClientAuthenticationMethod> metadataMethods) { + private static ClientAuthenticationMethod firstSupportedMethod( + final List<ClientAuthenticationMethod> metadataMethods) { Optional<ClientAuthenticationMethod> firstSupported = metadataMethods.stream().filter((m) -> SUPPORTED_METHODS.contains(m)).findFirst(); if (firstSupported.isPresent()) { return firstSupported.get(); } else { - throw new TechnicalException("None of the Token endpoint provider metadata authentication methods are supported: " - + metadataMethods); + throw new TechnicalException( + "None of the Token endpoint provider metadata authentication methods are supported: " + + metadataMethods); } } @@ -145,21 +152,30 @@ public void validate(final OidcCredentials credentials, final WebContext context if (code != null) { try { final String computedCallbackUrl = client.computeFinalCallbackUrl(context); - CodeVerifier verifier = (CodeVerifier) configuration.getValueRetriever() - .retrieve(client.getCodeVerifierSessionAttributeName(), client, context).orElse(null); + CodeVerifier verifier = + (CodeVerifier) + configuration + .getValueRetriever() + .retrieve(client.getCodeVerifierSessionAttributeName(), client, context) + .orElse(null); // Token request - final TokenRequest request = createTokenRequest(new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); + final TokenRequest request = + createTokenRequest( + new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); HTTPRequest tokenHttpRequest = request.toHTTPRequest(); tokenHttpRequest.setConnectTimeout(configuration.getConnectTimeout()); tokenHttpRequest.setReadTimeout(configuration.getReadTimeout()); final HTTPResponse httpResponse = tokenHttpRequest.send(); - logger.debug("Token response: status={}, content={}", httpResponse.getStatusCode(), + logger.debug( + "Token response: status={}, content={}", + httpResponse.getStatusCode(), httpResponse.getContent()); final TokenResponse response = OIDCTokenResponseParser.parse(httpResponse); if (response instanceof TokenErrorResponse) { - throw new TechnicalException("Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); + throw new TechnicalException( + "Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); } logger.debug("Token response successful"); final OIDCTokenResponse tokenSuccessResponse = (OIDCTokenResponse) response; @@ -178,11 +194,15 @@ public void validate(final OidcCredentials credentials, final WebContext context private TokenRequest createTokenRequest(final AuthorizationGrant grant) { if (clientAuthentication != null) { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - this.clientAuthentication, grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + this.clientAuthentication, + grant); } else { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - new ClientID(configuration.getClientId()), grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + new ClientID(configuration.getClientId()), + grant); } } } diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 24183f5c625da..4d40f45cd09b4 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -3,7 +3,6 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; - import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; @@ -17,17 +16,16 @@ import org.apache.http.util.EntityUtils; import play.mvc.Http; - -/** - * This class is responsible for coordinating authentication with the backend Metadata Service. - */ +/** This class is responsible for coordinating authentication with the backend Metadata Service. */ @Slf4j public class AuthServiceClient { private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser"; private static final String SIGN_UP_ENDPOINT = "auth/signUp"; - private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; - private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; + private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/resetNativeUserCredentials"; + private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/verifyNativeUserCredentials"; private static final String TRACK_ENDPOINT = "auth/track"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; @@ -39,7 +37,8 @@ public class AuthServiceClient { private static final String INVITE_TOKEN_FIELD = "inviteToken"; private static final String RESET_TOKEN_FIELD = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch"; private final String metadataServiceHost; @@ -48,8 +47,11 @@ public class AuthServiceClient { private final Authentication systemAuthentication; private final CloseableHttpClient httpClient; - public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication, + public AuthServiceClient( + @Nonnull final String metadataServiceHost, + @Nonnull final Integer metadataServicePort, + @Nonnull final Boolean useSsl, + @Nonnull final Authentication systemAuthentication, @Nonnull final CloseableHttpClient httpClient) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); @@ -59,10 +61,11 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin } /** - * Call the Auth Service to generate a session token for a particular user with a unique actor id, or throws an exception if generation fails. + * Call the Auth Service to generate a session token for a particular user with a unique actor id, + * or throws an exception if generation fails. * - * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of an Actor of type - * USER. + * <p>Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of + * an Actor of type USER. */ @Nonnull public String generateSessionTokenForUser(@Nonnull final String userId) { @@ -72,15 +75,21 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - GENERATE_SESSION_TOKEN_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + GENERATE_SESSION_TOKEN_ENDPOINT)); // Build JSON request to generate a token on behalf of a user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_ID_FIELD, userId); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -94,7 +103,8 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { return getAccessTokenFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", + String.format( + "Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { @@ -110,11 +120,14 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { } } - /** - * Call the Auth Service to create a native Datahub user. - */ - public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email, - @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) { + /** Call the Auth Service to create a native Datahub user. */ + public boolean signUp( + @Nonnull final String userUrn, + @Nonnull final String fullName, + @Nonnull final String email, + @Nonnull final String title, + @Nonnull final String password, + @Nonnull final String inviteToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(fullName, "fullName must not be null"); Objects.requireNonNull(email, "email must not be null"); @@ -126,9 +139,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - SIGN_UP_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, SIGN_UP_ENDPOINT)); // Build JSON request to sign up a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -139,7 +154,8 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN objectNode.put(TITLE_FIELD, title); objectNode.put(PASSWORD_FIELD, password); objectNode.put(INVITE_TOKEN_FIELD, inviteToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -152,11 +168,15 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN final String jsonStr = EntityUtils.toString(entity); return getIsNativeUserCreatedFromJson(jsonStr); } else { - String content = response.getEntity().getContent() == null ? "" : new String( - response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + String content = + response.getEntity().getContent() == null + ? "" + : new String( + response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s Body: %s", response.getStatusLine().toString(), - response.getEntity().toString(), content)); + String.format( + "Bad response from the Metadata Service: %s %s Body: %s", + response.getStatusLine().toString(), response.getEntity().toString(), content)); } } catch (Exception e) { throw new RuntimeException(String.format("Failed to create user %s", userUrn), e); @@ -171,10 +191,10 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN } } - /** - * Call the Auth Service to reset credentials for a native DataHub user. - */ - public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password, + /** Call the Auth Service to reset credentials for a native DataHub user. */ + public boolean resetNativeUserCredentials( + @Nonnull final String userUrn, + @Nonnull final String password, @Nonnull final String resetToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); @@ -184,9 +204,14 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -194,7 +219,8 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); objectNode.put(RESET_TOKEN_FIELD, resetToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -208,8 +234,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul return getAreNativeUserCredentialsResetFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to reset credentials for user", e); @@ -224,10 +251,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul } } - /** - * Call the Auth Service to verify the credentials for a native Datahub user. - */ - public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { + /** Call the Auth Service to verify the credentials for a native Datahub user. */ + public boolean verifyNativeUserCredentials( + @Nonnull final String userUrn, @Nonnull final String password) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); CloseableHttpResponse response = null; @@ -235,16 +261,22 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -258,8 +290,9 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu return getDoesPasswordMatchFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to verify credentials for user", e); @@ -274,18 +307,18 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu } } - /** - * Call the Auth Service to track an analytics event - */ + /** Call the Auth Service to track an analytics event */ public void track(@Nonnull final String event) { Objects.requireNonNull(event, "event must not be null"); CloseableHttpResponse response = null; try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - TRACK_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, TRACK_ENDPOINT)); // Build JSON request to track event. request.setEntity(new StringEntity(event, StandardCharsets.UTF_8)); @@ -298,8 +331,9 @@ public void track(@Nonnull final String event) { if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to track event", e); diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java index 59e91a6d5a0f7..b7173684b6350 100644 --- a/datahub-frontend/app/client/KafkaTrackingProducer.java +++ b/datahub-frontend/app/client/KafkaTrackingProducer.java @@ -3,6 +3,15 @@ import com.linkedin.metadata.config.kafka.ProducerConfiguration; import com.typesafe.config.Config; import config.ConfigurationProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.inject.Inject; +import javax.inject.Singleton; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -15,98 +24,141 @@ import play.api.inject.ApplicationLifecycle; import utils.ConfigUtil; -import javax.inject.Inject; - -import javax.annotation.Nonnull; -import javax.inject.Singleton; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; - @Singleton public class KafkaTrackingProducer { - private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); - private static final List<String> KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList( - Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), - SecurityProtocol.SASL_PLAINTEXT.name())); - - private final Boolean _isEnabled; - private final KafkaProducer<String, String> _producer; - - @Inject - public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle, final ConfigurationProvider configurationProvider) { - _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); - - if (_isEnabled) { - _logger.debug("Analytics tracking is enabled"); - _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); - - lifecycle.addStopHook( - () -> { - _producer.flush(); - _producer.close(); - return CompletableFuture.completedFuture(null); - }); - } else { - _logger.debug("Analytics tracking is disabled"); - _producer = null; - } - } - - public Boolean isEnabled() { - return _isEnabled; + private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); + private static final List<String> KAFKA_SSL_PROTOCOLS = + Collections.unmodifiableList( + Arrays.asList( + SecurityProtocol.SSL.name(), + SecurityProtocol.SASL_SSL.name(), + SecurityProtocol.SASL_PLAINTEXT.name())); + + private final Boolean _isEnabled; + private final KafkaProducer<String, String> _producer; + + @Inject + public KafkaTrackingProducer( + @Nonnull Config config, + ApplicationLifecycle lifecycle, + final ConfigurationProvider configurationProvider) { + _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); + + if (_isEnabled) { + _logger.debug("Analytics tracking is enabled"); + _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); + + lifecycle.addStopHook( + () -> { + _producer.flush(); + _producer.close(); + return CompletableFuture.completedFuture(null); + }); + } else { + _logger.debug("Analytics tracking is disabled"); + _producer = null; } - - public void send(ProducerRecord<String, String> record) { - _producer.send(record); + } + + public Boolean isEnabled() { + return _isEnabled; + } + + public void send(ProducerRecord<String, String> record) { + _producer.send(record); + } + + private static KafkaProducer createKafkaProducer( + Config config, ProducerConfiguration producerConfiguration) { + final Properties props = new Properties(); + props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + config.getString("analytics.kafka.delivery.timeout.ms")); + props.put( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + config.getString("analytics.kafka.bootstrap.server")); + props.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. + props.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); + + final String securityProtocolConfig = "analytics.kafka.security.protocol"; + if (config.hasPath(securityProtocolConfig) + && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { + props.put( + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); + setConfig( + config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); + + setConfig( + config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.keystore.location"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.keystore.password"); + + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + "analytics.kafka.ssl.truststore.type"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.truststore.location"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.truststore.password"); + + setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); + setConfig( + config, + props, + SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, + "analytics.kafka.ssl.endpoint.identification.algorithm"); + + final String securityProtocol = config.getString(securityProtocolConfig); + if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) + || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { + setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); + setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); + setConfig( + config, + props, + SaslConfigs.SASL_KERBEROS_SERVICE_NAME, + "analytics.kafka.sasl.kerberos.service.name"); + setConfig( + config, + props, + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.login.callback.handler.class"); + setConfig( + config, + props, + SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.client.callback.handler.class"); + } } - private static KafkaProducer createKafkaProducer(Config config, ProducerConfiguration producerConfiguration) { - final Properties props = new Properties(); - props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms")); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server")); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); - - final String securityProtocolConfig = "analytics.kafka.security.protocol"; - if (config.hasPath(securityProtocolConfig) - && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { - props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); - setConfig(config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); - - setConfig(config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.keystore.location"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.keystore.password"); - - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "analytics.kafka.ssl.truststore.type"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.truststore.location"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.truststore.password"); - - setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); - setConfig(config, props, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "analytics.kafka.ssl.endpoint.identification.algorithm"); - - final String securityProtocol = config.getString(securityProtocolConfig); - if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) - || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { - setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); - setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); - setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name"); - setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class"); - setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class"); - } - } - - return new org.apache.kafka.clients.producer.KafkaProducer<String, String>(props); - } + return new org.apache.kafka.clients.producer.KafkaProducer<String, String>(props); + } - private static void setConfig(Config config, Properties props, String key, String configKey) { - Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) - .ifPresent(v -> props.put(key, v)); - } + private static void setConfig(Config config, Properties props, String key, String configKey) { + Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) + .ifPresent(v -> props.put(key, v)); + } } diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 8f526c831b5c9..3d87267f8ebe3 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -4,28 +4,22 @@ import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; - import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.PropertySource; - /** - * Minimal sharing between metadata-service and frontend - * Does not use the factories module to avoid transitive dependencies. + * Minimal sharing between metadata-service and frontend Does not use the factories module to avoid + * transitive dependencies. */ @EnableConfigurationProperties @PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class) @ConfigurationProperties @Data public class ConfigurationProvider { - /** - * Kafka related configs. - */ - private KafkaConfiguration kafka; + /** Kafka related configs. */ + private KafkaConfiguration kafka; - /** - * Configuration for caching - */ - private CacheConfiguration cache; + /** Configuration for caching */ + private CacheConfiguration cache; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 5c76f2572a936..60971bf06e27b 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -1,5 +1,8 @@ package controllers; +import static auth.AuthUtils.ACTOR; +import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; + import akka.actor.ActorSystem; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -9,41 +12,35 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.util.Pair; import com.typesafe.config.Config; - +import java.io.InputStream; +import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.Environment; import play.http.HttpEntity; +import play.libs.Json; import play.libs.ws.InMemoryBodyWritable; import play.libs.ws.StandaloneWSClient; -import play.libs.Json; import play.libs.ws.ahc.StandaloneAhcWSClient; import play.mvc.Controller; import play.mvc.Http; import play.mvc.ResponseHeader; import play.mvc.Result; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.inject.Inject; -import java.io.InputStream; import play.mvc.Security; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig; import utils.ConfigUtil; -import java.time.Duration; - -import static auth.AuthUtils.ACTOR; -import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; - public class Application extends Controller { private final Logger _logger = LoggerFactory.getLogger(Application.class.getName()); @@ -61,22 +58,17 @@ public Application(Environment environment, @Nonnull Config config) { /** * Serves the build output index.html for any given path * - * @param path takes a path string, which essentially is ignored - * routing is managed client side + * @param path takes a path string, which essentially is ignored routing is managed client side * @return {Result} build output index.html resource */ @Nonnull private Result serveAsset(@Nullable String path) { try { InputStream indexHtml = _environment.resourceAsStream("public/index.html"); - return ok(indexHtml) - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return ok(indexHtml).withHeader("Cache-Control", "no-cache").as("text/html"); } catch (Exception e) { _logger.warn("Cannot load public/index.html resource. Static assets or assets jar missing?"); - return notFound() - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return notFound().withHeader("Cache-Control", "no-cache").as("text/html"); } } @@ -99,66 +91,87 @@ public Result index(@Nullable String path) { /** * Proxies requests to the Metadata Service * - * TODO: Investigate using mutual SSL authentication to call Metadata Service. + * <p>TODO: Investigate using mutual SSL authentication to call Metadata Service. */ @Security.Authenticated(Authenticator.class) - public CompletableFuture<Result> proxy(String path, Http.Request request) throws ExecutionException, InterruptedException { + public CompletableFuture<Result> proxy(String path, Http.Request request) + throws ExecutionException, InterruptedException { final String authorizationHeaderValue = getAuthorizationHeaderValueToProxy(request); final String resolvedUri = mapPath(request.uri()); - final String metadataServiceHost = ConfigUtil.getString( - _config, - ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = ConfigUtil.getInt( - _config, - ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = ConfigUtil.getBoolean( - _config, - ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); + final String metadataServiceHost = + ConfigUtil.getString( + _config, + ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); + final int metadataServicePort = + ConfigUtil.getInt( + _config, + ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); + final boolean metadataServiceUseSsl = + ConfigUtil.getBoolean( + _config, + ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); // TODO: Fully support custom internal SSL. final String protocol = metadataServiceUseSsl ? "https" : "http"; final Map<String, List<String>> headers = request.getHeaders().toMap(); - if (headers.containsKey(Http.HeaderNames.HOST) && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { - headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); + if (headers.containsKey(Http.HeaderNames.HOST) + && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { + headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } - return _ws.url(String.format("%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) + return _ws.url( + String.format( + "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) .setMethod(request.method()) - .setHeaders(headers - .entrySet() - .stream() - // Remove X-DataHub-Actor to prevent malicious delegation. - .filter(entry -> !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) - // Remove Host s.th. service meshes do not route to wrong host - .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ) + .setHeaders( + headers.entrySet().stream() + // Remove X-DataHub-Actor to prevent malicious delegation. + .filter( + entry -> + !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase( + entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) + // Remove Host s.th. service meshes do not route to wrong host + .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) .addHeader(Http.HeaderNames.AUTHORIZATION, authorizationHeaderValue) - .addHeader(AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) - .setBody(new InMemoryBodyWritable(ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), "application/json")) + .addHeader( + AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) + .setBody( + new InMemoryBodyWritable( + ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), + "application/json")) .setRequestTimeout(Duration.ofSeconds(120)) .execute() - .thenApply(apiResponse -> { - final ResponseHeader header = new ResponseHeader(apiResponse.getStatus(), apiResponse.getHeaders() - .entrySet() - .stream() - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); - final HttpEntity body = new HttpEntity.Strict(apiResponse.getBodyAsBytes(), Optional.ofNullable(apiResponse.getContentType())); - return new Result(header, body); - }).toCompletableFuture(); + .thenApply( + apiResponse -> { + final ResponseHeader header = + new ResponseHeader( + apiResponse.getStatus(), + apiResponse.getHeaders().entrySet().stream() + .filter( + entry -> + !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter( + entry -> + !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); + final HttpEntity body = + new HttpEntity.Strict( + apiResponse.getBodyAsBytes(), + Optional.ofNullable(apiResponse.getContentType())); + return new Result(header, body); + }) + .toCompletableFuture(); } /** @@ -173,11 +186,13 @@ public Result appConfig() { config.put("appVersion", _config.getString("app.version")); config.put("isInternal", _config.getBoolean("linkedin.internal")); config.put("shouldShowDatasetLineage", _config.getBoolean("linkedin.show.dataset.lineage")); - config.put("suggestionConfidenceThreshold", + config.put( + "suggestionConfidenceThreshold", Integer.valueOf(_config.getString("linkedin.suggestion.confidence.threshold"))); config.set("wikiLinks", wikiLinks()); config.set("tracking", trackingInfo()); - // In a staging environment, we can trigger this flag to be true so that the UI can handle based on + // In a staging environment, we can trigger this flag to be true so that the UI can handle based + // on // such config and alert users that their changes will not affect production data config.put("isStagingBanner", _config.getBoolean("ui.show.staging.banner")); config.put("isLiveDataWarning", _config.getBoolean("ui.show.live.data.banner")); @@ -206,6 +221,7 @@ public Result appConfig() { /** * Creates a JSON object of profile / avatar properties + * * @return Json avatar / profile image properties */ @Nonnull @@ -273,23 +289,26 @@ private StandaloneWSClient createWsClient() { } /** - * Returns the value of the Authorization Header to be provided when proxying requests to the downstream Metadata Service. + * Returns the value of the Authorization Header to be provided when proxying requests to the + * downstream Metadata Service. * - * Currently, the Authorization header value may be derived from + * <p>Currently, the Authorization header value may be derived from * - * a) The value of the "token" attribute of the Session Cookie provided by the client. This value is set - * when creating the session token initially from a token granted by the Metadata Service. + * <p>a) The value of the "token" attribute of the Session Cookie provided by the client. This + * value is set when creating the session token initially from a token granted by the Metadata + * Service. * - * Or if the "token" attribute cannot be found in a session cookie, then we fallback to + * <p>Or if the "token" attribute cannot be found in a session cookie, then we fallback to * - * b) The value of the Authorization - * header provided in the original request. This will be used in cases where clients are making programmatic requests - * to Metadata Service APIs directly, without providing a session cookie (ui only). + * <p>b) The value of the Authorization header provided in the original request. This will be used + * in cases where clients are making programmatic requests to Metadata Service APIs directly, + * without providing a session cookie (ui only). * - * If neither are found, an empty string is returned. + * <p>If neither are found, an empty string is returned. */ private String getAuthorizationHeaderValueToProxy(Http.Request request) { - // If the session cookie has an authorization token, use that. If there's an authorization header provided, simply + // If the session cookie has an authorization token, use that. If there's an authorization + // header provided, simply // use that. String value = ""; if (request.session().data().containsKey(SESSION_COOKIE_GMS_TOKEN_NAME)) { @@ -301,11 +320,13 @@ private String getAuthorizationHeaderValueToProxy(Http.Request request) { } /** - * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This is sent along - * with any requests that have a valid frontend session cookie to identify the calling actor, for backwards compatibility. + * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This + * is sent along with any requests that have a valid frontend session cookie to identify the + * calling actor, for backwards compatibility. * - * If Metadata Service authentication is enabled, this value is not required because Actor context will most often come - * from the authentication credentials provided in the Authorization header. + * <p>If Metadata Service authentication is enabled, this value is not required because Actor + * context will most often come from the authentication credentials provided in the Authorization + * header. */ private String getDataHubActorHeader(Http.Request request) { String actor = request.session().data().get(ACTOR); diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index e28d4ba2ee37e..9c232e965a003 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -1,5 +1,9 @@ package controllers; +import static auth.AuthUtils.*; +import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; +import static org.pac4j.play.store.PlayCookieSessionStore.*; + import auth.AuthUtils; import auth.CookieConfigs; import auth.JAASConfigs; @@ -35,325 +39,337 @@ import play.mvc.Results; import security.AuthenticationManager; -import static auth.AuthUtils.*; -import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; -import static org.pac4j.play.store.PlayCookieSessionStore.*; - - // TODO add logging. public class AuthenticationController extends Controller { - public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; - private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; - private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; - private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; - - private static final String SSO_NO_REDIRECT_MESSAGE = "SSO is configured, however missing redirect from idp"; - - private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); - private final CookieConfigs _cookieConfigs; - private final JAASConfigs _jaasConfigs; - private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; - private final boolean _verbose; - - @Inject - private org.pac4j.core.config.Config _ssoConfig; - - @Inject - private PlaySessionStore _playSessionStore; - - @Inject - private SsoManager _ssoManager; - - @Inject - AuthServiceClient _authClient; - - @Inject - public AuthenticationController(@Nonnull Config configs) { - _cookieConfigs = new CookieConfigs(configs); - _jaasConfigs = new JAASConfigs(configs); - _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); - _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; + private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; + private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; + private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; + + private static final String SSO_NO_REDIRECT_MESSAGE = + "SSO is configured, however missing redirect from idp"; + + private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); + private final CookieConfigs _cookieConfigs; + private final JAASConfigs _jaasConfigs; + private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; + private final boolean _verbose; + + @Inject private org.pac4j.core.config.Config _ssoConfig; + + @Inject private PlaySessionStore _playSessionStore; + + @Inject private SsoManager _ssoManager; + + @Inject AuthServiceClient _authClient; + + @Inject + public AuthenticationController(@Nonnull Config configs) { + _cookieConfigs = new CookieConfigs(configs); + _jaasConfigs = new JAASConfigs(configs); + _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); + _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + } + + /** + * Route used to perform authentication, or redirect to log in if authentication fails. + * + * <p>If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider + * (Indirect auth). If not, we will fall back to the default username / password login experience + * (Direct auth). + */ + @Nonnull + public Result authenticate(Http.Request request) { + + // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is + // authenticated. + + final Optional<String> maybeRedirectPath = + Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); + final String redirectPath = maybeRedirectPath.orElse("/"); + + if (AuthUtils.hasValidSessionCookie(request)) { + return Results.redirect(redirectPath); } - /** - * Route used to perform authentication, or redirect to log in if authentication fails. - * - * If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider (Indirect auth). - * If not, we will fall back to the default username / password login experience (Direct auth). - */ - @Nonnull - public Result authenticate(Http.Request request) { - - // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is authenticated. - - final Optional<String> maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); - final String redirectPath = maybeRedirectPath.orElse("/"); - - if (AuthUtils.hasValidSessionCookie(request)) { - return Results.redirect(redirectPath); - } - - // 1. If SSO is enabled, redirect to IdP if not authenticated. - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, redirectPath).orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - - // 2. If either JAAS auth or Native auth is enabled, fallback to it - if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { - return Results.redirect( - LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); - } - - // 3. If no auth enabled, fallback to using default user account & redirect. - // Generate GMS session token, TODO: - final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); - return Results.redirect(redirectPath).withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) - .withCookies( - createActorCookie( - DEFAULT_ACTOR_URN.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + // 1. If SSO is enabled, redirect to IdP if not authenticated. + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, redirectPath) + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); } - /** - * Redirect to the identity provider for authentication. - */ - @Nonnull - public Result sso(Http.Request request) { - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, "/").orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - return Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + // 2. If either JAAS auth or Native auth is enabled, fallback to it + if (_jaasConfigs.isJAASEnabled() + || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { + return Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); } - /** - * Log in a user based on a username + password. - * - * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the default. - */ - @Nonnull - public Result logIn(Http.Request request) { - boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); - _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; - if (noAuthEnabled) { - String message = "Neither JAAS nor native authentication is enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } - - final JsonNode json = request.body().asJson(); - final String username = json.findPath(USER_NAME).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - - if (StringUtils.isBlank(username)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); - return Results.badRequest(invalidCredsJson); - } - - JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); - boolean loginSucceeded = tryLogin(username, password); - - if (!loginSucceeded) { - return Results.badRequest(invalidCredsJson); - } - - final Urn actorUrn = new CorpuserUrn(username); - final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); - return createSession(actorUrn.toString(), accessToken); + // 3. If no auth enabled, fallback to using default user account & redirect. + // Generate GMS session token, TODO: + final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); + return Results.redirect(redirectPath) + .withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) + .withCookies( + createActorCookie( + DEFAULT_ACTOR_URN.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } + + /** Redirect to the identity provider for authentication. */ + @Nonnull + public Result sso(Http.Request request) { + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, "/") + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); + } + return Results.redirect( + LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + } + + /** + * Log in a user based on a username + password. + * + * <p>TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the + * default. + */ + @Nonnull + public Result logIn(Http.Request request) { + boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); + _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; + if (noAuthEnabled) { + String message = "Neither JAAS nor native authentication is enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); } - /** - * Sign up a native user based on a name, email, title, and password. The invite token must match an existing invite token. - * - */ - @Nonnull - public Result signUp(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } + final JsonNode json = request.body().asJson(); + final String username = json.findPath(USER_NAME).textValue(); + final String password = json.findPath(PASSWORD).textValue(); - final JsonNode json = request.body().asJson(); - final String fullName = json.findPath(FULL_NAME).textValue(); - final String email = json.findPath(EMAIL).textValue(); - final String title = json.findPath(TITLE).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); + if (StringUtils.isBlank(username)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(fullName)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); - return Results.badRequest(invalidCredsJson); - } + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = tryLogin(username, password); - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { - Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); - if (!emailValidator.isValid(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - } + if (!loginSucceeded) { + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn actorUrn = new CorpuserUrn(username); + final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); + return createSession(actorUrn.toString(), accessToken); + } + + /** + * Sign up a native user based on a name, email, title, and password. The invite token must match + * an existing invite token. + */ + @Nonnull + public Result signUp(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); + } - if (StringUtils.isBlank(title)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String fullName = json.findPath(FULL_NAME).textValue(); + final String email = json.findPath(EMAIL).textValue(); + final String title = json.findPath(TITLE).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); - if (StringUtils.isBlank(inviteToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(fullName)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } + if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { + Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); + if (!emailValidator.isValid(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } } - /** - * Reset a native user's credentials based on a username, old password, and new password. - * - */ - @Nonnull - public Result resetNativeUserCredentials(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return badRequest(error); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final JsonNode json = request.body().asJson(); - final String email = json.findPath(EMAIL).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String resetToken = json.findPath(RESET_TOKEN).textValue(); + if (StringUtils.isBlank(title)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(inviteToken)) { + JsonNode invalidCredsJson = + Json.newObject().put("message", "Invite token must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + /** Reset a native user's credentials based on a username, old password, and new password. */ + @Nonnull + public Result resetNativeUserCredentials(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } - if (StringUtils.isBlank(resetToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String email = json.findPath(EMAIL).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String resetToken = json.findPath(RESET_TOKEN).textValue(); - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); } - private Optional<Result> redirectToIdentityProvider(Http.RequestHeader request, String redirectPath) { - final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); - final Client client = _ssoManager.getSsoProvider().client(); - configurePac4jSessionStore(playWebContext, client, redirectPath); - try { - final Optional<RedirectionAction> action = client.getRedirectionAction(playWebContext); - return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); - } catch (Exception e) { - if (_verbose) { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", e); - } else { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); - } - return Optional.of(Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8)))); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); } - private void configurePac4jSessionStore(PlayWebContext context, Client client, String redirectPath) { - // Set the originally requested path for post-auth redirection. We split off into a separate cookie from the session - // to reduce size of the session cookie - FoundAction foundAction = new FoundAction(redirectPath); - byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); - String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); - context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); - // This is to prevent previous login attempts from being cached. - // We replicate the logic here, which is buried in the Pac4j client. - if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) != null) { - _logger.debug("Found previous login attempt. Removing it manually to prevent unexpected errors."); - _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); - } + if (StringUtils.isBlank(resetToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); + return Results.badRequest(invalidCredsJson); } - private String encodeRedirectUri(final String redirectUri) { - return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + private Optional<Result> redirectToIdentityProvider( + Http.RequestHeader request, String redirectPath) { + final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); + final Client client = _ssoManager.getSsoProvider().client(); + configurePac4jSessionStore(playWebContext, client, redirectPath); + try { + final Optional<RedirectionAction> action = client.getRedirectionAction(playWebContext); + return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); + } catch (Exception e) { + if (_verbose) { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", + e); + } else { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); + } + return Optional.of( + Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8)))); } - - private boolean tryLogin(String username, String password) { - boolean loginSucceeded = false; - - // First try jaas login, if enabled - if (_jaasConfigs.isJAASEnabled()) { - try { - _logger.debug("Attempting jaas authentication"); - AuthenticationManager.authenticateJaasUser(username, password); - _logger.debug("Jaas authentication successful. Login succeeded"); - loginSucceeded = true; - } catch (Exception e) { - if (_verbose) { - _logger.debug("Jaas authentication error. Login failed", e); - } else { - _logger.debug("Jaas authentication error. Login failed"); - } - } - } - - // If jaas login fails or is disabled, try native auth login - if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { - final Urn userUrn = new CorpuserUrn(username); - final String userUrnString = userUrn.toString(); - loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); + } + + private void configurePac4jSessionStore( + PlayWebContext context, Client client, String redirectPath) { + // Set the originally requested path for post-auth redirection. We split off into a separate + // cookie from the session + // to reduce size of the session cookie + FoundAction foundAction = new FoundAction(redirectPath); + byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); + String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); + context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); + // This is to prevent previous login attempts from being cached. + // We replicate the logic here, which is buried in the Pac4j client. + if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) + != null) { + _logger.debug( + "Found previous login attempt. Removing it manually to prevent unexpected errors."); + _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); + } + } + + private String encodeRedirectUri(final String redirectUri) { + return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + } + + private boolean tryLogin(String username, String password) { + boolean loginSucceeded = false; + + // First try jaas login, if enabled + if (_jaasConfigs.isJAASEnabled()) { + try { + _logger.debug("Attempting jaas authentication"); + AuthenticationManager.authenticateJaasUser(username, password); + _logger.debug("Jaas authentication successful. Login succeeded"); + loginSucceeded = true; + } catch (Exception e) { + if (_verbose) { + _logger.debug("Jaas authentication error. Login failed", e); + } else { + _logger.debug("Jaas authentication error. Login failed"); } - - return loginSucceeded; + } } - private Result createSession(String userUrnString, String accessToken) { - return Results.ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies( - createActorCookie( - userUrnString, - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); - + // If jaas login fails or is disabled, try native auth login + if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { + final Urn userUrn = new CorpuserUrn(username); + final String userUrnString = userUrn.toString(); + loginSucceeded = + loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); } -} \ No newline at end of file + + return loginSucceeded; + } + + private Result createSession(String userUrnString, String accessToken) { + return Results.ok() + .withSession(createSessionMap(userUrnString, accessToken)) + .withCookies( + createActorCookie( + userUrnString, + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } +} diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java index 5e24fe9f8220c..eea1c662ebf89 100644 --- a/datahub-frontend/app/controllers/CentralLogoutController.java +++ b/datahub-frontend/app/controllers/CentralLogoutController.java @@ -2,18 +2,15 @@ import com.typesafe.config.Config; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import javax.inject.Inject; import lombok.extern.slf4j.Slf4j; import org.pac4j.play.LogoutController; import play.mvc.Http; import play.mvc.Result; import play.mvc.Results; -import javax.inject.Inject; -import java.nio.charset.StandardCharsets; - -/** - * Responsible for handling logout logic with oidc providers - */ +/** Responsible for handling logout logic with oidc providers */ @Slf4j public class CentralLogoutController extends LogoutController { private static final String AUTH_URL_CONFIG_PATH = "/login"; @@ -28,26 +25,27 @@ public CentralLogoutController(Config config) { setLogoutUrlPattern(DEFAULT_BASE_URL_PATH + ".*"); setLocalLogout(true); setCentralLogout(true); - } - /** - * logout() method should not be called if oidc is not enabled - */ + /** logout() method should not be called if oidc is not enabled */ public Result executeLogout(Http.Request request) { if (_isOidcEnabled) { try { return logout(request).toCompletableFuture().get().withNewSession(); } catch (Exception e) { - log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e); + log.error( + "Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", + e); return redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8))) - .withNewSession(); + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8))) + .withNewSession(); } } - return Results.redirect(AUTH_URL_CONFIG_PATH) - .withNewSession(); + return Results.redirect(AUTH_URL_CONFIG_PATH).withNewSession(); } } diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java index 7a4b5585cc21a..9f4445b1aa5c7 100644 --- a/datahub-frontend/app/controllers/SsoCallbackController.java +++ b/datahub-frontend/app/controllers/SsoCallbackController.java @@ -1,6 +1,9 @@ package controllers; import auth.CookieConfigs; +import auth.sso.SsoManager; +import auth.sso.SsoProvider; +import auth.sso.oidc.OidcCallbackLogic; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemEntityClient; @@ -18,17 +21,13 @@ import org.pac4j.play.PlayWebContext; import play.mvc.Http; import play.mvc.Result; -import auth.sso.oidc.OidcCallbackLogic; -import auth.sso.SsoManager; -import auth.sso.SsoProvider; import play.mvc.Results; - /** * A dedicated Controller for handling redirects to DataHub by 3rd-party Identity Providers after * off-platform authentication. * - * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines + * <p>Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines * the handling logic to invoke. */ @Slf4j @@ -46,56 +45,88 @@ public SsoCallbackController( _ssoManager = ssoManager; setDefaultUrl("/"); // By default, redirects to Home Page on log in. setSaveInSession(false); - setCallbackLogic(new SsoCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, new CookieConfigs(configs))); + setCallbackLogic( + new SsoCallbackLogic( + ssoManager, + systemAuthentication, + entityClient, + authClient, + new CookieConfigs(configs))); } public CompletionStage<Result> handleCallback(String protocol, Http.Request request) { if (shouldHandleCallback(protocol)) { log.debug(String.format("Handling SSO callback. Protocol: %s", protocol)); - return callback(request).handle((res, e) -> { - if (e != null) { - log.error("Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", e); - return Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode( - "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", - StandardCharsets.UTF_8))) - .discardingCookie("actor") - .withNewSession(); - } - return res; - }); + return callback(request) + .handle( + (res, e) -> { + if (e != null) { + log.error( + "Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", + e); + return Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", + StandardCharsets.UTF_8))) + .discardingCookie("actor") + .withNewSession(); + } + return res; + }); } - return CompletableFuture.completedFuture(Results.internalServerError( - String.format("Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); + return CompletableFuture.completedFuture( + Results.internalServerError( + String.format( + "Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); } - - /** - * Logic responsible for delegating to protocol-specific callback logic. - */ + /** Logic responsible for delegating to protocol-specific callback logic. */ public class SsoCallbackLogic implements CallbackLogic<Result, PlayWebContext> { private final OidcCallbackLogic _oidcCallbackLogic; - SsoCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, final CookieConfigs cookieConfigs) { - _oidcCallbackLogic = new OidcCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); + SsoCallbackLogic( + final SsoManager ssoManager, + final Authentication systemAuthentication, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { + _oidcCallbackLogic = + new OidcCallbackLogic( + ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter<Result, PlayWebContext> httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { if (SsoProvider.SsoProtocol.OIDC.equals(_ssoManager.getSsoProvider().protocol())) { - return _oidcCallbackLogic.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, defaultClient); + return _oidcCallbackLogic.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, + defaultClient); } // Should never occur. - throw new UnsupportedOperationException("Failed to find matching SSO Provider. Only one supported is OIDC."); + throw new UnsupportedOperationException( + "Failed to find matching SSO Provider. Only one supported is OIDC."); } } private boolean shouldHandleCallback(final String protocol) { - return _ssoManager.isSsoEnabled() && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + return _ssoManager.isSsoEnabled() + && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); } } diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java index 776ab5cad58ff..254a8cc640d0c 100644 --- a/datahub-frontend/app/controllers/TrackingController.java +++ b/datahub-frontend/app/controllers/TrackingController.java @@ -1,14 +1,15 @@ package controllers; +import static auth.AuthUtils.ACTOR; + import auth.Authenticator; import client.AuthServiceClient; +import client.KafkaTrackingProducer; import com.fasterxml.jackson.databind.JsonNode; import com.typesafe.config.Config; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Singleton; - - import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,57 +17,52 @@ import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import client.KafkaTrackingProducer; - -import static auth.AuthUtils.ACTOR; - // TODO: Migrate this to metadata-service. @Singleton public class TrackingController extends Controller { - private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); + private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); - private final String _topic; + private final String _topic; - @Inject - KafkaTrackingProducer _producer; + @Inject KafkaTrackingProducer _producer; - @Inject - AuthServiceClient _authClient; + @Inject AuthServiceClient _authClient; - @Inject - public TrackingController(@Nonnull Config config) { - _topic = config.getString("analytics.tracking.topic"); - } + @Inject + public TrackingController(@Nonnull Config config) { + _topic = config.getString("analytics.tracking.topic"); + } - @Security.Authenticated(Authenticator.class) - @Nonnull - public Result track(Http.Request request) throws Exception { - if (!_producer.isEnabled()) { - // If tracking is disabled, simply return a 200. - return status(200); - } + @Security.Authenticated(Authenticator.class) + @Nonnull + public Result track(Http.Request request) throws Exception { + if (!_producer.isEnabled()) { + // If tracking is disabled, simply return a 200. + return status(200); + } - JsonNode event; - try { - event = request.body().asJson(); - } catch (Exception e) { - return badRequest(); - } - final String actor = request.session().data().get(ACTOR); - try { - _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); - final ProducerRecord<String, String> record = new ProducerRecord<>( - _topic, - actor, - event.toString()); - _producer.send(record); - _authClient.track(event.toString()); - return ok(); - } catch (Exception e) { - _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event)); - return internalServerError(e.getMessage()); - } + JsonNode event; + try { + event = request.body().asJson(); + } catch (Exception e) { + return badRequest(); + } + final String actor = request.session().data().get(ACTOR); + try { + _logger.debug( + String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); + final ProducerRecord<String, String> record = + new ProducerRecord<>(_topic, actor, event.toString()); + _producer.send(record); + _authClient.track(event.toString()); + return ok(); + } catch (Exception e) { + _logger.error( + String.format( + "Failed to emit product analytics event. actor: %s, event: %s", actor, event)); + return internalServerError(e.getMessage()); } + } } diff --git a/datahub-frontend/app/security/AuthUtil.java b/datahub-frontend/app/security/AuthUtil.java index 8af90b37a6f31..55752644ada70 100644 --- a/datahub-frontend/app/security/AuthUtil.java +++ b/datahub-frontend/app/security/AuthUtil.java @@ -8,52 +8,53 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.digest.HmacAlgorithms; - -/** - * Auth Utils - * Adheres to HSEC requirement for creating application tokens - */ +/** Auth Utils Adheres to HSEC requirement for creating application tokens */ public final class AuthUtil { private static final String HMAC_SHA256_ALGORITHM = HmacAlgorithms.HMAC_SHA_256.toString(); private static final String DELIIMITER = ":"; private static final String HEX_CHARS = "0123456789ABCDEF"; - private AuthUtil() { } + private AuthUtil() {} /** * Generate hash string using the secret HMAC Key + * * @param value value to be hashed * @param hmacKey secret HMAC key * @return Hashed string using the secret key * @throws NoSuchAlgorithmException * @throws InvalidKeyException */ - public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlgorithmException, InvalidKeyException { - //Time-stamp at Encryption time + public static String generateHash(String value, byte[] hmacKey) + throws NoSuchAlgorithmException, InvalidKeyException { + // Time-stamp at Encryption time long tStamp = System.currentTimeMillis(); String uTValue = new String(); String cValue; String finalEncValue; - //Concatenated Values + // Concatenated Values uTValue = uTValue.concat(value).concat(":").concat(Long.toString(tStamp)); cValue = uTValue; - //Digest - HMAC-SHA256 + // Digest - HMAC-SHA256 SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); byte[] rawHmac = mac.doFinal(uTValue.getBytes()); String hmacString = getHex(rawHmac); - finalEncValue = Base64.getEncoder().encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); + finalEncValue = + Base64.getEncoder() + .encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); return finalEncValue; } /** * Validate the one-way hash string + * * @param hashedValue Hashed value to be validated * @param hmacKey HMAC Key used to create the hash * @param sessionWindow previously defined session window to validate if the hash is expired @@ -62,7 +63,7 @@ public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlg */ public static String verifyHash(String hashedValue, byte[] hmacKey, long sessionWindow) throws GeneralSecurityException { - //Username:Timestamp:SignedHMAC(Username:Timestamp) + // Username:Timestamp:SignedHMAC(Username:Timestamp) String[] decryptedHash = decryptBase64Hash(hashedValue); String username = decryptedHash[0]; String timestamp = decryptedHash[1]; @@ -70,7 +71,7 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session long newTStamp = System.currentTimeMillis(); String newUTValue = username.concat(DELIIMITER).concat(timestamp); - //Digest - HMAC-SHA1 Verify + // Digest - HMAC-SHA1 Verify SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); @@ -87,8 +88,10 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session return decryptedHash[0]; } + /** * Decrypt base64 hash + * * @param value base 64 hash string * @return Decrypted base 64 string */ @@ -96,8 +99,10 @@ private static String[] decryptBase64Hash(String value) { String decodedBase64 = new String(Base64.getDecoder().decode(value)); return decodedBase64.split(DELIIMITER); } + /** * Get Hex string from byte array + * * @param raw byte array * @return Hex representation of the byte array */ @@ -114,14 +119,16 @@ private static String getHex(byte[] raw) { return hex.toString(); } + /** * Compares two HMAC byte arrays + * * @param a HMAC byte array 1 * @param b HMAC byte array 2 * @return true if the two HMAC are identical */ private static boolean isEqual(byte[] a, byte[] b) { - if (a == null || b == null || a.length != b.length) { + if (a == null || b == null || a.length != b.length) { return false; } @@ -133,4 +140,4 @@ private static boolean isEqual(byte[] a, byte[] b) { return result == 0; } -} \ No newline at end of file +} diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index 67bcf7e404335..f46dc57c232bd 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -15,13 +15,12 @@ import org.eclipse.jetty.jaas.PropertyUserStoreManager; import play.Logger; - public class AuthenticationManager { - private AuthenticationManager(boolean verbose) { - } + private AuthenticationManager(boolean verbose) {} - public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception { + public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) + throws Exception { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager(); @@ -29,10 +28,12 @@ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull Strin jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager)); JAASLoginService.INSTANCE.set(jaasLoginService); try { - LoginContext lc = new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); + LoginContext lc = + new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); lc.login(); } catch (LoginException le) { - AuthenticationException authenticationException = new AuthenticationException(le.getMessage()); + AuthenticationException authenticationException = + new AuthenticationException(le.getMessage()); authenticationException.setRootCause(le); throw authenticationException; } @@ -52,7 +53,8 @@ public void handle(@Nonnull Callback[] callbacks) { NameCallback nc = null; PasswordCallback pc = null; for (Callback callback : callbacks) { - Logger.debug("The submitted callback is of type: " + callback.getClass() + " : " + callback); + Logger.debug( + "The submitted callback is of type: " + callback.getClass() + " : " + callback); if (callback instanceof NameCallback) { nc = (NameCallback) callback; nc.setName(this.username); diff --git a/datahub-frontend/app/security/DummyLoginModule.java b/datahub-frontend/app/security/DummyLoginModule.java index 56822f0805be4..c46fa29e1599a 100644 --- a/datahub-frontend/app/security/DummyLoginModule.java +++ b/datahub-frontend/app/security/DummyLoginModule.java @@ -1,21 +1,22 @@ package security; +import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; -import java.util.Map; - /** - * This LoginModule performs dummy authentication. - * Any username and password can work for authentication + * This LoginModule performs dummy authentication. Any username and password can work for + * authentication */ public class DummyLoginModule implements LoginModule { - public void initialize(final Subject subject, final CallbackHandler callbackHandler, - final Map<String, ?> sharedState, final Map<String, ?> options) { - } + public void initialize( + final Subject subject, + final CallbackHandler callbackHandler, + final Map<String, ?> sharedState, + final Map<String, ?> options) {} public boolean login() throws LoginException { return true; @@ -32,5 +33,4 @@ public boolean abort() throws LoginException { public boolean logout() throws LoginException { return true; } - -} \ No newline at end of file +} diff --git a/datahub-frontend/app/utils/ConfigUtil.java b/datahub-frontend/app/utils/ConfigUtil.java index b99a5e123b9eb..5c80389c96da4 100644 --- a/datahub-frontend/app/utils/ConfigUtil.java +++ b/datahub-frontend/app/utils/ConfigUtil.java @@ -3,18 +3,16 @@ import com.linkedin.util.Configuration; import com.typesafe.config.Config; - public class ConfigUtil { - private ConfigUtil() { - - } + private ConfigUtil() {} // New configurations, provided via application.conf file. public static final String METADATA_SERVICE_HOST_CONFIG_PATH = "metadataService.host"; public static final String METADATA_SERVICE_PORT_CONFIG_PATH = "metadataService.port"; public static final String METADATA_SERVICE_USE_SSL_CONFIG_PATH = "metadataService.useSsl"; - public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = "metadataService.sslProtocol"; + public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = + "metadataService.sslProtocol"; // Legacy env-var based config values, for backwards compatibility: public static final String GMS_HOST_ENV_VAR = "DATAHUB_GMS_HOST"; @@ -27,10 +25,14 @@ private ConfigUtil() { public static final String DEFAULT_GMS_PORT = "8080"; public static final String DEFAULT_GMS_USE_SSL = "False"; - public static final String DEFAULT_METADATA_SERVICE_HOST = Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); - public static final Integer DEFAULT_METADATA_SERVICE_PORT = Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); - public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); - public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); + public static final String DEFAULT_METADATA_SERVICE_HOST = + Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); + public static final Integer DEFAULT_METADATA_SERVICE_PORT = + Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); + public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = + Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); + public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = + Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); public static boolean getBoolean(Config config, String key) { return config.hasPath(key) && config.getBoolean(key); diff --git a/datahub-frontend/app/utils/SearchUtil.java b/datahub-frontend/app/utils/SearchUtil.java index 2c52ff5b40156..803c70a63646a 100644 --- a/datahub-frontend/app/utils/SearchUtil.java +++ b/datahub-frontend/app/utils/SearchUtil.java @@ -2,29 +2,26 @@ import javax.annotation.Nonnull; - -/** - * Utility functions for Search - */ +/** Utility functions for Search */ public class SearchUtil { - private SearchUtil() { - //utility class - } + private SearchUtil() { + // utility class + } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - * - * @param input - * @return - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + * + * @param input + * @return + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } + return input; + } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 9a5fb3210a311..a1b97701dbf88 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -55,8 +55,6 @@ tasks.withType(Checkstyle) { exclude "**/generated/**" } -checkstyleMain.source = "app/" - /* PLAY UPGRADE NOTE diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java index f27fefdb79669..a5da0951d1632 100644 --- a/datahub-frontend/test/app/ApplicationTest.java +++ b/datahub-frontend/test/app/ApplicationTest.java @@ -1,11 +1,22 @@ package app; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static play.mvc.Http.Status.NOT_FOUND; +import static play.mvc.Http.Status.OK; +import static play.test.Helpers.fakeRequest; +import static play.test.Helpers.route; + import com.nimbusds.jwt.JWT; import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.jwt.JWTParser; import controllers.routes; +import java.io.IOException; +import java.net.InetAddress; import java.text.ParseException; import java.util.Date; +import java.util.List; +import java.util.Map; import no.nav.security.mock.oauth2.MockOAuth2Server; import no.nav.security.mock.oauth2.token.DefaultOAuth2TokenCallback; import okhttp3.mockwebserver.MockResponse; @@ -26,22 +37,9 @@ import play.mvc.Http; import play.mvc.Result; import play.test.Helpers; - import play.test.TestBrowser; import play.test.WithBrowser; -import java.io.IOException; -import java.net.InetAddress; -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static play.mvc.Http.Status.NOT_FOUND; -import static play.mvc.Http.Status.OK; -import static play.test.Helpers.fakeRequest; -import static play.test.Helpers.route; - @TestInstance(TestInstance.Lifecycle.PER_CLASS) @SetEnvironmentVariable(key = "DATAHUB_SECRET", value = "test") @SetEnvironmentVariable(key = "KAFKA_BOOTSTRAP_SERVER", value = "") @@ -56,11 +54,15 @@ public class ApplicationTest extends WithBrowser { @Override protected Application provideApplication() { return new GuiceApplicationBuilder() - .configure("metadataService.port", String.valueOf(gmsServerPort())) - .configure("auth.baseUrl", "http://localhost:" + providePort()) - .configure("auth.oidc.discoveryUri", "http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration") - .in(new Environment(Mode.TEST)).build(); + .configure("metadataService.port", String.valueOf(gmsServerPort())) + .configure("auth.baseUrl", "http://localhost:" + providePort()) + .configure( + "auth.oidc.discoveryUri", + "http://localhost:" + + oauthServerPort() + + "/testIssuer/.well-known/openid-configuration") + .in(new Environment(Mode.TEST)) + .build(); } @Override @@ -90,16 +92,20 @@ public int gmsServerPort() { public void init() throws IOException { _gmsServer = new MockWebServer(); _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER))); - _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); + _gmsServer.enqueue( + new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); _gmsServer.start(gmsServerPort()); _oauthServer = new MockOAuth2Server(); _oauthServer.enqueueCallback( - new DefaultOAuth2TokenCallback(ISSUER_ID, "testUser", List.of(), Map.of( - "email", "testUser@myCompany.com", - "groups", "myGroup" - ), 600) - ); + new DefaultOAuth2TokenCallback( + ISSUER_ID, + "testUser", + List.of(), + Map.of( + "email", "testUser@myCompany.com", + "groups", "myGroup"), + 600)); _oauthServer.start(InetAddress.getByName("localhost"), oauthServerPort()); // Discovery url to authorization server metadata @@ -147,8 +153,9 @@ public void testIndexNotFound() { @Test public void testOpenIdConfig() { - assertEquals("http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration", _wellKnownUrl); + assertEquals( + "http://localhost:" + oauthServerPort() + "/testIssuer/.well-known/openid-configuration", + _wellKnownUrl); } @Test @@ -166,8 +173,13 @@ public void testHappyPathOidc() throws ParseException { Map<String, String> data = (Map<String, String>) claims.getClaim("data"); assertEquals(TEST_TOKEN, data.get("token")); assertEquals(TEST_USER, data.get("actor")); - // Default expiration is 24h, so should always be less than current time + 1 day since it stamps the time before this executes - assertTrue(claims.getExpirationTime().compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) < 0); + // Default expiration is 24h, so should always be less than current time + 1 day since it stamps + // the time before this executes + assertTrue( + claims + .getExpirationTime() + .compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) + < 0); } @Test diff --git a/datahub-frontend/test/security/DummyLoginModuleTest.java b/datahub-frontend/test/security/DummyLoginModuleTest.java index 6727513d884af..9bf2b5dd4d11c 100644 --- a/datahub-frontend/test/security/DummyLoginModuleTest.java +++ b/datahub-frontend/test/security/DummyLoginModuleTest.java @@ -1,14 +1,12 @@ package security; -import com.sun.security.auth.callback.TextCallbackHandler; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import com.sun.security.auth.callback.TextCallbackHandler; import java.util.HashMap; import javax.security.auth.Subject; import javax.security.auth.login.LoginException; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class DummyLoginModuleTest { diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index ed16014b58e59..a27a1462a8a27 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,8 @@ package security; +import static auth.sso.oidc.OidcConfigs.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import auth.sso.oidc.OidcConfigs; import auth.sso.oidc.OidcProvider; import com.typesafe.config.Config; @@ -19,296 +22,290 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; -import static auth.sso.oidc.OidcConfigs.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - - public class OidcConfigurationTest { - private static final com.typesafe.config.Config CONFIG = new Config() { - - private final Map<String, Object> _map = new HashMap<>(); - - @Override - public ConfigObject root() { - return null; - } - - @Override - public ConfigOrigin origin() { - return null; - } - - @Override - public Config withFallback(ConfigMergeable other) { - return null; - } - - @Override - public Config resolve() { - return null; - } - - @Override - public Config resolve(ConfigResolveOptions options) { - return null; - } - - @Override - public boolean isResolved() { - return false; - } - - @Override - public Config resolveWith(Config source) { - return null; - } - - @Override - public Config resolveWith(Config source, ConfigResolveOptions options) { - return null; - } - - @Override - public void checkValid(Config reference, String... restrictToPaths) { - - } - - @Override - public boolean hasPath(String path) { - return true; - } - - @Override - public boolean hasPathOrNull(String path) { - return false; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public Set<Map.Entry<String, ConfigValue>> entrySet() { - return null; - } - - @Override - public boolean getIsNull(String path) { - return false; - } - - @Override - public boolean getBoolean(String path) { - return false; - } - - @Override - public Number getNumber(String path) { - return null; - } - - @Override - public int getInt(String path) { - return 0; - } - - @Override - public long getLong(String path) { - return 0; - } - - @Override - public double getDouble(String path) { - return 0; - } - - @Override - public String getString(String path) { - return (String) _map.getOrDefault(path, "1"); - } - - @Override - public <T extends Enum<T>> T getEnum(Class<T> enumClass, String path) { - return null; - } - - @Override - public ConfigObject getObject(String path) { - return null; - } - - @Override - public Config getConfig(String path) { - return null; - } - - @Override - public Object getAnyRef(String path) { - return null; - } - - @Override - public ConfigValue getValue(String path) { - return null; - } - - @Override - public Long getBytes(String path) { - return null; - } - - @Override - public ConfigMemorySize getMemorySize(String path) { - return null; - } - - @Override - public Long getMilliseconds(String path) { - return null; - } - - @Override - public Long getNanoseconds(String path) { - return null; - } - - @Override - public long getDuration(String path, TimeUnit unit) { - return 0; - } - - @Override - public Duration getDuration(String path) { - return null; - } - - @Override - public Period getPeriod(String path) { - return null; - } - - @Override - public TemporalAmount getTemporal(String path) { - return null; - } - - @Override - public ConfigList getList(String path) { - return null; - } - - @Override - public List<Boolean> getBooleanList(String path) { - return null; - } - - @Override - public List<Number> getNumberList(String path) { - return null; - } - - @Override - public List<Integer> getIntList(String path) { - return null; - } - - @Override - public List<Long> getLongList(String path) { - return null; - } - - @Override - public List<Double> getDoubleList(String path) { - return null; - } - - @Override - public List<String> getStringList(String path) { - return null; - } - - @Override - public <T extends Enum<T>> List<T> getEnumList(Class<T> enumClass, String path) { - return null; - } - - @Override - public List<? extends ConfigObject> getObjectList(String path) { - return null; - } - - @Override - public List<? extends Config> getConfigList(String path) { - return null; - } - - @Override - public List<? extends Object> getAnyRefList(String path) { - return null; - } - - @Override - public List<Long> getBytesList(String path) { - return null; - } - - @Override - public List<ConfigMemorySize> getMemorySizeList(String path) { - return null; - } - - @Override - public List<Long> getMillisecondsList(String path) { - return null; - } - - @Override - public List<Long> getNanosecondsList(String path) { - return null; - } - - @Override - public List<Long> getDurationList(String path, TimeUnit unit) { - return null; - } - - @Override - public List<Duration> getDurationList(String path) { - return null; - } - - @Override - public Config withOnlyPath(String path) { - return null; - } - - @Override - public Config withoutPath(String path) { - return null; - } - - @Override - public Config atPath(String path) { - return null; - } - - @Override - public Config atKey(String key) { - return null; - } - - @Override - public Config withValue(String path, ConfigValue value) { - _map.put(path, value.unwrapped()); - return this; - } - }; + private static final com.typesafe.config.Config CONFIG = + new Config() { + + private final Map<String, Object> _map = new HashMap<>(); + + @Override + public ConfigObject root() { + return null; + } + + @Override + public ConfigOrigin origin() { + return null; + } + + @Override + public Config withFallback(ConfigMergeable other) { + return null; + } + + @Override + public Config resolve() { + return null; + } + + @Override + public Config resolve(ConfigResolveOptions options) { + return null; + } + + @Override + public boolean isResolved() { + return false; + } + + @Override + public Config resolveWith(Config source) { + return null; + } + + @Override + public Config resolveWith(Config source, ConfigResolveOptions options) { + return null; + } + + @Override + public void checkValid(Config reference, String... restrictToPaths) {} + + @Override + public boolean hasPath(String path) { + return true; + } + + @Override + public boolean hasPathOrNull(String path) { + return false; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public Set<Map.Entry<String, ConfigValue>> entrySet() { + return null; + } + + @Override + public boolean getIsNull(String path) { + return false; + } + + @Override + public boolean getBoolean(String path) { + return false; + } + + @Override + public Number getNumber(String path) { + return null; + } + + @Override + public int getInt(String path) { + return 0; + } + + @Override + public long getLong(String path) { + return 0; + } + + @Override + public double getDouble(String path) { + return 0; + } + + @Override + public String getString(String path) { + return (String) _map.getOrDefault(path, "1"); + } + + @Override + public <T extends Enum<T>> T getEnum(Class<T> enumClass, String path) { + return null; + } + + @Override + public ConfigObject getObject(String path) { + return null; + } + + @Override + public Config getConfig(String path) { + return null; + } + + @Override + public Object getAnyRef(String path) { + return null; + } + + @Override + public ConfigValue getValue(String path) { + return null; + } + + @Override + public Long getBytes(String path) { + return null; + } + + @Override + public ConfigMemorySize getMemorySize(String path) { + return null; + } + + @Override + public Long getMilliseconds(String path) { + return null; + } + + @Override + public Long getNanoseconds(String path) { + return null; + } + + @Override + public long getDuration(String path, TimeUnit unit) { + return 0; + } + + @Override + public Duration getDuration(String path) { + return null; + } + + @Override + public Period getPeriod(String path) { + return null; + } + + @Override + public TemporalAmount getTemporal(String path) { + return null; + } + + @Override + public ConfigList getList(String path) { + return null; + } + + @Override + public List<Boolean> getBooleanList(String path) { + return null; + } + + @Override + public List<Number> getNumberList(String path) { + return null; + } + + @Override + public List<Integer> getIntList(String path) { + return null; + } + + @Override + public List<Long> getLongList(String path) { + return null; + } + + @Override + public List<Double> getDoubleList(String path) { + return null; + } + + @Override + public List<String> getStringList(String path) { + return null; + } + + @Override + public <T extends Enum<T>> List<T> getEnumList(Class<T> enumClass, String path) { + return null; + } + + @Override + public List<? extends ConfigObject> getObjectList(String path) { + return null; + } + + @Override + public List<? extends Config> getConfigList(String path) { + return null; + } + + @Override + public List<? extends Object> getAnyRefList(String path) { + return null; + } + + @Override + public List<Long> getBytesList(String path) { + return null; + } + + @Override + public List<ConfigMemorySize> getMemorySizeList(String path) { + return null; + } + + @Override + public List<Long> getMillisecondsList(String path) { + return null; + } + + @Override + public List<Long> getNanosecondsList(String path) { + return null; + } + + @Override + public List<Long> getDurationList(String path, TimeUnit unit) { + return null; + } + + @Override + public List<Duration> getDurationList(String path) { + return null; + } + + @Override + public Config withOnlyPath(String path) { + return null; + } + + @Override + public Config withoutPath(String path) { + return null; + } + + @Override + public Config atPath(String path) { + return null; + } + + @Override + public Config atKey(String key) { + return null; + } + + @Override + public Config withValue(String path, ConfigValue value) { + _map.put(path, value.unwrapped()); + return this; + } + }; @Test public void readTimeoutPropagation() { diff --git a/datahub-frontend/test/utils/SearchUtilTest.java b/datahub-frontend/test/utils/SearchUtilTest.java index 428566ae3f424..6767fa5637469 100644 --- a/datahub-frontend/test/utils/SearchUtilTest.java +++ b/datahub-frontend/test/utils/SearchUtilTest.java @@ -1,17 +1,18 @@ package utils; -import org.junit.jupiter.api.Test; - import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; + public class SearchUtilTest { - @Test - public void testEscapeForwardSlash() { - // escape "/" - assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); - // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to retain the regex behaviour with "*" - assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); - assertEquals("", ""); - assertEquals("foo", "foo"); - } + @Test + public void testEscapeForwardSlash() { + // escape "/" + assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); + // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to + // retain the regex behaviour with "*" + assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); + assertEquals("", ""); + assertEquals("foo", "foo"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 4488f27c19d80..e45bed33eb023 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql; -/** - * Constants relating to GraphQL type system & execution. - */ +/** Constants relating to GraphQL type system & execution. */ public class Constants { - private Constants() { }; + private Constants() {} + ; - public static final String URN_FIELD_NAME = "urn"; - public static final String URNS_FIELD_NAME = "urns"; - public static final String GMS_SCHEMA_FILE = "entity.graphql"; - public static final String SEARCH_SCHEMA_FILE = "search.graphql"; - public static final String APP_SCHEMA_FILE = "app.graphql"; - public static final String AUTH_SCHEMA_FILE = "auth.graphql"; - public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; - public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; - public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; - public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; - public static final String TESTS_SCHEMA_FILE = "tests.graphql"; - public static final String STEPS_SCHEMA_FILE = "step.graphql"; - public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; - public static final String BROWSE_PATH_DELIMITER = "/"; - public static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - - public static final String ENTITY_FILTER_NAME = "_entityType"; + public static final String URN_FIELD_NAME = "urn"; + public static final String URNS_FIELD_NAME = "urns"; + public static final String GMS_SCHEMA_FILE = "entity.graphql"; + public static final String SEARCH_SCHEMA_FILE = "search.graphql"; + public static final String APP_SCHEMA_FILE = "app.graphql"; + public static final String AUTH_SCHEMA_FILE = "auth.graphql"; + public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; + public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; + public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; + public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; + public static final String TESTS_SCHEMA_FILE = "tests.graphql"; + public static final String STEPS_SCHEMA_FILE = "step.graphql"; + public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String BROWSE_PATH_DELIMITER = "/"; + public static final String BROWSE_PATH_V2_DELIMITER = "␟"; + public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; + public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 9ea8126a07ab2..f0cb56b1a99ce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; +import static graphql.scalars.ExtendedScalars.*; + import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; @@ -68,7 +72,6 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.ListTestsResult; import com.linkedin.datahub.graphql.generated.ListViewsResult; -import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -78,6 +81,7 @@ import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; +import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.Owner; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; @@ -284,7 +288,6 @@ import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; import com.linkedin.datahub.graphql.types.domain.DomainType; -import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; @@ -297,6 +300,7 @@ import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; +import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; @@ -352,205 +356,191 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; -import static graphql.scalars.ExtendedScalars.*; - - /** - * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph. + * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS + * graph. */ @Slf4j @Getter public class GmsGraphQLEngine { - private final EntityClient entityClient; - private final SystemEntityClient systemEntityClient; - private final GraphClient graphClient; - private final UsageClient usageClient; - private final SiblingGraphService siblingGraphService; - - private final EntityService entityService; - private final AnalyticsService analyticsService; - private final RecommendationsService recommendationsService; - private final EntityRegistry entityRegistry; - private final StatefulTokenService statefulTokenService; - private final SecretService secretService; - private final GitVersion gitVersion; - private final boolean supportsImpactAnalysis; - private final TimeseriesAspectService timeseriesAspectService; - private final TimelineService timelineService; - private final NativeUserService nativeUserService; - private final GroupService groupService; - private final RoleService roleService; - private final InviteTokenService inviteTokenService; - private final PostService postService; - private final SettingsService settingsService; - private final ViewService viewService; - private final OwnershipTypeService ownershipTypeService; - private final LineageService lineageService; - private final QueryService queryService; - private final DataProductService dataProductService; - - private final FeatureFlags featureFlags; - - private final IngestionConfiguration ingestionConfiguration; - private final AuthenticationConfiguration authenticationConfiguration; - private final AuthorizationConfiguration authorizationConfiguration; - private final VisualConfiguration visualConfiguration; - private final TelemetryConfiguration telemetryConfiguration; - private final TestsConfiguration testsConfiguration; - private final DataHubConfiguration datahubConfiguration; - private final ViewsConfiguration viewsConfiguration; - - private final DatasetType datasetType; - - private final RoleType roleType; - - private final CorpUserType corpUserType; - private final CorpGroupType corpGroupType; - private final ChartType chartType; - private final DashboardType dashboardType; - private final DataPlatformType dataPlatformType; - private final TagType tagType; - private final MLModelType mlModelType; - private final MLModelGroupType mlModelGroupType; - private final MLFeatureType mlFeatureType; - private final MLFeatureTableType mlFeatureTableType; - private final MLPrimaryKeyType mlPrimaryKeyType; - private final DataFlowType dataFlowType; - private final DataJobType dataJobType; - private final GlossaryTermType glossaryTermType; - private final GlossaryNodeType glossaryNodeType; - private final AspectType aspectType; - private final ContainerType containerType; - private final DomainType domainType; - private final NotebookType notebookType; - private final AssertionType assertionType; - private final VersionedDatasetType versionedDatasetType; - private final DataPlatformInstanceType dataPlatformInstanceType; - private final AccessTokenMetadataType accessTokenMetadataType; - private final TestType testType; - private final DataHubPolicyType dataHubPolicyType; - private final DataHubRoleType dataHubRoleType; - private final SchemaFieldType schemaFieldType; - private final DataHubViewType dataHubViewType; - private final QueryType queryType; - private final DataProductType dataProductType; - private final OwnershipType ownershipType; - - /** - * A list of GraphQL Plugins that extend the core engine - */ - private final List<GmsGraphQLPlugin> graphQLPlugins; - - /** - * Configures the graph objects that can be fetched primary key. - */ - public final List<EntityType<?, ?>> entityTypes; - - /** - * Configures all graph objects - */ - public final List<LoadableType<?, ?>> loadableTypes; - - /** - * Configures the graph objects for owner - */ - public final List<LoadableType<?, ?>> ownerTypes; - - /** - * Configures the graph objects that can be searched. - */ - public final List<SearchableEntityType<?, ?>> searchableTypes; - - /** - * Configures the graph objects that can be browsed. - */ - public final List<BrowsableEntityType<?, ?>> browsableTypes; - - public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { - - this.graphQLPlugins = List.of( + private final EntityClient entityClient; + private final SystemEntityClient systemEntityClient; + private final GraphClient graphClient; + private final UsageClient usageClient; + private final SiblingGraphService siblingGraphService; + + private final EntityService entityService; + private final AnalyticsService analyticsService; + private final RecommendationsService recommendationsService; + private final EntityRegistry entityRegistry; + private final StatefulTokenService statefulTokenService; + private final SecretService secretService; + private final GitVersion gitVersion; + private final boolean supportsImpactAnalysis; + private final TimeseriesAspectService timeseriesAspectService; + private final TimelineService timelineService; + private final NativeUserService nativeUserService; + private final GroupService groupService; + private final RoleService roleService; + private final InviteTokenService inviteTokenService; + private final PostService postService; + private final SettingsService settingsService; + private final ViewService viewService; + private final OwnershipTypeService ownershipTypeService; + private final LineageService lineageService; + private final QueryService queryService; + private final DataProductService dataProductService; + + private final FeatureFlags featureFlags; + + private final IngestionConfiguration ingestionConfiguration; + private final AuthenticationConfiguration authenticationConfiguration; + private final AuthorizationConfiguration authorizationConfiguration; + private final VisualConfiguration visualConfiguration; + private final TelemetryConfiguration telemetryConfiguration; + private final TestsConfiguration testsConfiguration; + private final DataHubConfiguration datahubConfiguration; + private final ViewsConfiguration viewsConfiguration; + + private final DatasetType datasetType; + + private final RoleType roleType; + + private final CorpUserType corpUserType; + private final CorpGroupType corpGroupType; + private final ChartType chartType; + private final DashboardType dashboardType; + private final DataPlatformType dataPlatformType; + private final TagType tagType; + private final MLModelType mlModelType; + private final MLModelGroupType mlModelGroupType; + private final MLFeatureType mlFeatureType; + private final MLFeatureTableType mlFeatureTableType; + private final MLPrimaryKeyType mlPrimaryKeyType; + private final DataFlowType dataFlowType; + private final DataJobType dataJobType; + private final GlossaryTermType glossaryTermType; + private final GlossaryNodeType glossaryNodeType; + private final AspectType aspectType; + private final ContainerType containerType; + private final DomainType domainType; + private final NotebookType notebookType; + private final AssertionType assertionType; + private final VersionedDatasetType versionedDatasetType; + private final DataPlatformInstanceType dataPlatformInstanceType; + private final AccessTokenMetadataType accessTokenMetadataType; + private final TestType testType; + private final DataHubPolicyType dataHubPolicyType; + private final DataHubRoleType dataHubRoleType; + private final SchemaFieldType schemaFieldType; + private final DataHubViewType dataHubViewType; + private final QueryType queryType; + private final DataProductType dataProductType; + private final OwnershipType ownershipType; + + /** A list of GraphQL Plugins that extend the core engine */ + private final List<GmsGraphQLPlugin> graphQLPlugins; + + /** Configures the graph objects that can be fetched primary key. */ + public final List<EntityType<?, ?>> entityTypes; + + /** Configures all graph objects */ + public final List<LoadableType<?, ?>> loadableTypes; + + /** Configures the graph objects for owner */ + public final List<LoadableType<?, ?>> ownerTypes; + + /** Configures the graph objects that can be searched. */ + public final List<SearchableEntityType<?, ?>> searchableTypes; + + /** Configures the graph objects that can be browsed. */ + public final List<BrowsableEntityType<?, ?>> browsableTypes; + + public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { + + this.graphQLPlugins = + List.of( // Add new plugins here - ); - - this.graphQLPlugins.forEach(plugin -> plugin.init(args)); - - this.entityClient = args.entityClient; - this.systemEntityClient = args.systemEntityClient; - this.graphClient = args.graphClient; - this.usageClient = args.usageClient; - this.siblingGraphService = args.siblingGraphService; - - this.analyticsService = args.analyticsService; - this.entityService = args.entityService; - this.recommendationsService = args.recommendationsService; - this.statefulTokenService = args.statefulTokenService; - this.secretService = args.secretService; - this.entityRegistry = args.entityRegistry; - this.gitVersion = args.gitVersion; - this.supportsImpactAnalysis = args.supportsImpactAnalysis; - this.timeseriesAspectService = args.timeseriesAspectService; - this.timelineService = args.timelineService; - this.nativeUserService = args.nativeUserService; - this.groupService = args.groupService; - this.roleService = args.roleService; - this.inviteTokenService = args.inviteTokenService; - this.postService = args.postService; - this.viewService = args.viewService; - this.ownershipTypeService = args.ownershipTypeService; - this.settingsService = args.settingsService; - this.lineageService = args.lineageService; - this.queryService = args.queryService; - this.dataProductService = args.dataProductService; - - this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); - this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); - this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); - this.visualConfiguration = args.visualConfiguration; - this.telemetryConfiguration = args.telemetryConfiguration; - this.testsConfiguration = args.testsConfiguration; - this.datahubConfiguration = args.datahubConfiguration; - this.viewsConfiguration = args.viewsConfiguration; - this.featureFlags = args.featureFlags; - - this.datasetType = new DatasetType(entityClient); - this.roleType = new RoleType(entityClient); - this.corpUserType = new CorpUserType(entityClient, featureFlags); - this.corpGroupType = new CorpGroupType(entityClient); - this.chartType = new ChartType(entityClient); - this.dashboardType = new DashboardType(entityClient); - this.dataPlatformType = new DataPlatformType(entityClient); - this.tagType = new TagType(entityClient); - this.mlModelType = new MLModelType(entityClient); - this.mlModelGroupType = new MLModelGroupType(entityClient); - this.mlFeatureType = new MLFeatureType(entityClient); - this.mlFeatureTableType = new MLFeatureTableType(entityClient); - this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); - this.dataFlowType = new DataFlowType(entityClient); - this.dataJobType = new DataJobType(entityClient); - this.glossaryTermType = new GlossaryTermType(entityClient); - this.glossaryNodeType = new GlossaryNodeType(entityClient); - this.aspectType = new AspectType(entityClient); - this.containerType = new ContainerType(entityClient); - this.domainType = new DomainType(entityClient); - this.notebookType = new NotebookType(entityClient); - this.assertionType = new AssertionType(entityClient); - this.versionedDatasetType = new VersionedDatasetType(entityClient); - this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); - this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); - this.testType = new TestType(entityClient); - this.dataHubPolicyType = new DataHubPolicyType(entityClient); - this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); - this.dataHubViewType = new DataHubViewType(entityClient); - this.queryType = new QueryType(entityClient); - this.dataProductType = new DataProductType(entityClient); - this.ownershipType = new OwnershipType(entityClient); - - // Init Lists - this.entityTypes = ImmutableList.of( + ); + + this.graphQLPlugins.forEach(plugin -> plugin.init(args)); + + this.entityClient = args.entityClient; + this.systemEntityClient = args.systemEntityClient; + this.graphClient = args.graphClient; + this.usageClient = args.usageClient; + this.siblingGraphService = args.siblingGraphService; + + this.analyticsService = args.analyticsService; + this.entityService = args.entityService; + this.recommendationsService = args.recommendationsService; + this.statefulTokenService = args.statefulTokenService; + this.secretService = args.secretService; + this.entityRegistry = args.entityRegistry; + this.gitVersion = args.gitVersion; + this.supportsImpactAnalysis = args.supportsImpactAnalysis; + this.timeseriesAspectService = args.timeseriesAspectService; + this.timelineService = args.timelineService; + this.nativeUserService = args.nativeUserService; + this.groupService = args.groupService; + this.roleService = args.roleService; + this.inviteTokenService = args.inviteTokenService; + this.postService = args.postService; + this.viewService = args.viewService; + this.ownershipTypeService = args.ownershipTypeService; + this.settingsService = args.settingsService; + this.lineageService = args.lineageService; + this.queryService = args.queryService; + this.dataProductService = args.dataProductService; + + this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); + this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); + this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); + this.visualConfiguration = args.visualConfiguration; + this.telemetryConfiguration = args.telemetryConfiguration; + this.testsConfiguration = args.testsConfiguration; + this.datahubConfiguration = args.datahubConfiguration; + this.viewsConfiguration = args.viewsConfiguration; + this.featureFlags = args.featureFlags; + + this.datasetType = new DatasetType(entityClient); + this.roleType = new RoleType(entityClient); + this.corpUserType = new CorpUserType(entityClient, featureFlags); + this.corpGroupType = new CorpGroupType(entityClient); + this.chartType = new ChartType(entityClient); + this.dashboardType = new DashboardType(entityClient); + this.dataPlatformType = new DataPlatformType(entityClient); + this.tagType = new TagType(entityClient); + this.mlModelType = new MLModelType(entityClient); + this.mlModelGroupType = new MLModelGroupType(entityClient); + this.mlFeatureType = new MLFeatureType(entityClient); + this.mlFeatureTableType = new MLFeatureTableType(entityClient); + this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); + this.dataFlowType = new DataFlowType(entityClient); + this.dataJobType = new DataJobType(entityClient); + this.glossaryTermType = new GlossaryTermType(entityClient); + this.glossaryNodeType = new GlossaryNodeType(entityClient); + this.aspectType = new AspectType(entityClient); + this.containerType = new ContainerType(entityClient); + this.domainType = new DomainType(entityClient); + this.notebookType = new NotebookType(entityClient); + this.assertionType = new AssertionType(entityClient); + this.versionedDatasetType = new VersionedDatasetType(entityClient); + this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); + this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); + this.testType = new TestType(entityClient); + this.dataHubPolicyType = new DataHubPolicyType(entityClient); + this.dataHubRoleType = new DataHubRoleType(entityClient); + this.schemaFieldType = new SchemaFieldType(); + this.dataHubViewType = new DataHubViewType(entityClient); + this.queryType = new QueryType(entityClient); + this.dataProductType = new DataProductType(entityClient); + this.ownershipType = new OwnershipType(entityClient); + + // Init Lists + this.entityTypes = + ImmutableList.of( datasetType, roleType, corpUserType, @@ -582,1262 +572,1867 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubViewType, queryType, dataProductType, - ownershipType - ); - this.loadableTypes = new ArrayList<>(entityTypes); - // Extend loadable types with types from the plugins - // This allows us to offer search and browse capabilities out of the box for those types - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - this.loadableTypes.addAll(pluginLoadableTypes); - } - } - this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); - this.searchableTypes = loadableTypes.stream() + ownershipType); + this.loadableTypes = new ArrayList<>(entityTypes); + // Extend loadable types with types from the plugins + // This allows us to offer search and browse capabilities out of the box for those types + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + this.loadableTypes.addAll(pluginLoadableTypes); + } + } + this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); + this.searchableTypes = + loadableTypes.stream() .filter(type -> (type instanceof SearchableEntityType<?, ?>)) .map(type -> (SearchableEntityType<?, ?>) type) .collect(Collectors.toList()); - this.browsableTypes = loadableTypes.stream() + this.browsableTypes = + loadableTypes.stream() .filter(type -> (type instanceof BrowsableEntityType<?, ?>)) .map(type -> (BrowsableEntityType<?, ?>) type) .collect(Collectors.toList()); - } + } - /** - * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from - * a {@link LoadableType}. - */ - public Map<String, Function<QueryContext, DataLoader<?, ?>>> loaderSuppliers(final Collection<? extends LoadableType<?, ?>> loadableTypes) { - return loadableTypes - .stream() - .collect(Collectors.toMap( + /** + * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link + * LoadableType}. + */ + public Map<String, Function<QueryContext, DataLoader<?, ?>>> loaderSuppliers( + final Collection<? extends LoadableType<?, ?>> loadableTypes) { + return loadableTypes.stream() + .collect( + Collectors.toMap( LoadableType::name, - (graphType) -> (context) -> createDataLoader(graphType, context) - )); - } + (graphType) -> (context) -> createDataLoader(graphType, context))); + } - /** - * Final call to wire up any extra resolvers the plugin might want to add on - * @param builder - */ - private void configurePluginResolvers(final RuntimeWiring.Builder builder) { - this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); - } - - - public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { - configureQueryResolvers(builder); - configureMutationResolvers(builder); - configureGenericEntityResolvers(builder); - configureDatasetResolvers(builder); - configureCorpUserResolvers(builder); - configureCorpGroupResolvers(builder); - configureDashboardResolvers(builder); - configureNotebookResolvers(builder); - configureChartResolvers(builder); - configureTypeResolvers(builder); - configureTypeExtensions(builder); - configureTagAssociationResolver(builder); - configureGlossaryTermAssociationResolver(builder); - configureDataJobResolvers(builder); - configureDataFlowResolvers(builder); - configureMLFeatureTableResolvers(builder); - configureGlossaryRelationshipResolvers(builder); - configureIngestionSourceResolvers(builder); - configureAnalyticsResolvers(builder); - configureContainerResolvers(builder); - configureDataPlatformInstanceResolvers(builder); - configureGlossaryTermResolvers(builder); - configureOrganisationRoleResolvers(builder); - configureGlossaryNodeResolvers(builder); - configureDomainResolvers(builder); - configureDataProductResolvers(builder); - configureAssertionResolvers(builder); - configurePolicyResolvers(builder); - configureDataProcessInstanceResolvers(builder); - configureVersionedDatasetResolvers(builder); - configureAccessAccessTokenMetadataResolvers(builder); - configureTestResultResolvers(builder); - configureRoleResolvers(builder); - configureSchemaFieldResolvers(builder); - configureEntityPathResolvers(builder); - configureViewResolvers(builder); - configureQueryEntityResolvers(builder); - configureOwnershipTypeResolver(builder); - configurePluginResolvers(builder); - } - - private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { - builder.type("Role", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("RoleAssociation", typeWiring -> typeWiring - .dataFetcher("role", - new LoadableTypeResolver<>(roleType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleAssociation) - env.getSource()).getRole().getUrn())) - ); - builder.type("RoleUser", typeWiring -> typeWiring - .dataFetcher("user", - new LoadableTypeResolver<>(corpUserType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleUser) - env.getSource()).getUser().getUrn())) - ); + /** + * Final call to wire up any extra resolvers the plugin might want to add on + * + * @param builder + */ + private void configurePluginResolvers(final RuntimeWiring.Builder builder) { + this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); + } + + public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { + configureQueryResolvers(builder); + configureMutationResolvers(builder); + configureGenericEntityResolvers(builder); + configureDatasetResolvers(builder); + configureCorpUserResolvers(builder); + configureCorpGroupResolvers(builder); + configureDashboardResolvers(builder); + configureNotebookResolvers(builder); + configureChartResolvers(builder); + configureTypeResolvers(builder); + configureTypeExtensions(builder); + configureTagAssociationResolver(builder); + configureGlossaryTermAssociationResolver(builder); + configureDataJobResolvers(builder); + configureDataFlowResolvers(builder); + configureMLFeatureTableResolvers(builder); + configureGlossaryRelationshipResolvers(builder); + configureIngestionSourceResolvers(builder); + configureAnalyticsResolvers(builder); + configureContainerResolvers(builder); + configureDataPlatformInstanceResolvers(builder); + configureGlossaryTermResolvers(builder); + configureOrganisationRoleResolvers(builder); + configureGlossaryNodeResolvers(builder); + configureDomainResolvers(builder); + configureDataProductResolvers(builder); + configureAssertionResolvers(builder); + configurePolicyResolvers(builder); + configureDataProcessInstanceResolvers(builder); + configureVersionedDatasetResolvers(builder); + configureAccessAccessTokenMetadataResolvers(builder); + configureTestResultResolvers(builder); + configureRoleResolvers(builder); + configureSchemaFieldResolvers(builder); + configureEntityPathResolvers(builder); + configureViewResolvers(builder); + configureQueryEntityResolvers(builder); + configureOwnershipTypeResolver(builder); + configurePluginResolvers(builder); + } + + private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { + builder.type( + "Role", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "RoleAssociation", + typeWiring -> + typeWiring.dataFetcher( + "role", + new LoadableTypeResolver<>( + roleType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleAssociation) env.getSource()) + .getRole() + .getUrn()))); + builder.type( + "RoleUser", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleUser) env.getSource()) + .getUser() + .getUrn()))); + } + + public GraphQLEngine.Builder builder() { + final GraphQLEngine.Builder builder = GraphQLEngine.builder(); + builder + .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) + .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + List<String> pluginSchemaFiles = plugin.getSchemaFiles(); + if (pluginSchemaFiles != null) { + pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); + } + Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + pluginLoadableTypes.forEach( + loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); + } } - - public GraphQLEngine.Builder builder() { - final GraphQLEngine.Builder builder = GraphQLEngine.builder(); - builder - .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) - .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); - - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - List<String> pluginSchemaFiles = plugin.getSchemaFiles(); - if (pluginSchemaFiles != null) { - pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); - } - Collection<? extends LoadableType<?, ?>> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - pluginLoadableTypes.forEach(loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); - } - } - builder - .addDataLoaders(loaderSuppliers(loadableTypes)) - .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) - .configureRuntimeWiring(this::configureRuntimeWiring); - return builder; + builder + .addDataLoaders(loaderSuppliers(loadableTypes)) + .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) + .configureRuntimeWiring(this::configureRuntimeWiring); + return builder; + } + + public static String fileBasedSchema(String fileName) { + String schema; + try { + InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); + schema = IOUtils.toString(is, StandardCharsets.UTF_8); + is.close(); + } catch (IOException e) { + throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); } - - public static String fileBasedSchema(String fileName) { - String schema; - try { - InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); - schema = IOUtils.toString(is, StandardCharsets.UTF_8); - is.close(); - } catch (IOException e) { - throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); - } - return schema; + return schema; + } + + private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { + final boolean isAnalyticsEnabled = analyticsService != null; + builder + .type( + "Query", + typeWiring -> + typeWiring.dataFetcher( + "isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) + .type( + "AnalyticsChart", + typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); + if (isAnalyticsEnabled) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "getAnalyticsCharts", new GetChartsResolver(analyticsService, entityClient)) + .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) + .dataFetcher( + "getMetadataAnalyticsCharts", + new GetMetadataAnalyticsResolver(entityClient))); } + } - private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { - final boolean isAnalyticsEnabled = analyticsService != null; - builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) - .type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); - if (isAnalyticsEnabled) { - builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts", - new GetChartsResolver(analyticsService, entityClient)) - .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) - .dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient))); - } - } - - private void configureContainerResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Container", typeWiring -> typeWiring + private void configureContainerResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Container", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, (env) -> ((Container) env.getSource()).getPlatform().getUrn())) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Container container = env.getSource(); - return container.getContainer() != null ? container.getContainer().getUrn() : null; - }) - ) + final Container container = env.getSource(); + return container.getContainer() != null + ? container.getContainer().getUrn() + : null; + })) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Container container = env.getSource(); - return container.getDataPlatformInstance() != null ? container.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataPlatformInstance", typeWiring -> typeWiring - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn())) - ); - } - - private void configureQueryResolvers(final RuntimeWiring.Builder builder) { - builder.type("Query", typeWiring -> typeWiring - .dataFetcher("appConfig", - new AppConfigResolver(gitVersion, analyticsService != null, - this.ingestionConfiguration, - this.authenticationConfiguration, - this.authorizationConfiguration, - this.supportsImpactAnalysis, - this.visualConfiguration, - this.telemetryConfiguration, - this.testsConfiguration, - this.datahubConfiguration, - this.viewsConfiguration, - this.featureFlags - )) - .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) - .dataFetcher("search", new SearchResolver(this.entityClient)) - .dataFetcher("searchAcrossEntities", new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) - .dataFetcher("scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) - .dataFetcher("aggregateAcrossEntities", new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) - .dataFetcher("autoCompleteForMultiple", new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) - .dataFetcher("browse", new BrowseResolver(browsableTypes)) - .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) - .dataFetcher("dataset", getResolver(datasetType)) - .dataFetcher("role", getResolver(roleType)) - .dataFetcher("versionedDataset", getResolver(versionedDatasetType, - (env) -> new VersionedUrn().setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) - .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) - .dataFetcher("notebook", getResolver(notebookType)) - .dataFetcher("corpUser", getResolver(corpUserType)) - .dataFetcher("corpGroup", getResolver(corpGroupType)) - .dataFetcher("dashboard", getResolver(dashboardType)) - .dataFetcher("chart", getResolver(chartType)) - .dataFetcher("tag", getResolver(tagType)) - .dataFetcher("dataFlow", getResolver(dataFlowType)) - .dataFetcher("dataJob", getResolver(dataJobType)) - .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) - .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) - .dataFetcher("domain", getResolver((domainType))) - .dataFetcher("dataPlatform", getResolver(dataPlatformType)) - .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) - .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) - .dataFetcher("mlFeature", getResolver(mlFeatureType)) - .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) - .dataFetcher("mlModel", getResolver(mlModelType)) - .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) - .dataFetcher("assertion", getResolver(assertionType)) - .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) - .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) - .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) - .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) - .dataFetcher("listRecommendations", new ListRecommendationsResolver(recommendationsService)) - .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) - .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) - .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) - .dataFetcher("container", getResolver(containerType)) - .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) - .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) - .dataFetcher("getSecretValues", new GetSecretValuesResolver(this.entityClient, this.secretService)) - .dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) - .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) - .dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) - .dataFetcher("getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) - .dataFetcher("test", getResolver(testType)) - .dataFetcher("listTests", new ListTestsResolver(entityClient)) - .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) - .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) - .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) - .dataFetcher("entity", getEntityResolver()) - .dataFetcher("entities", getEntitiesResolver()) - .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) - .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) - .dataFetcher("batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) - .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) - .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) - .dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) - .dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService)) - .dataFetcher("dataProduct", getResolver(dataProductType)) - .dataFetcher("listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) - .dataFetcher("browseV2", new BrowseV2Resolver(this.entityClient, this.viewService)) - ); - } - - private DataFetcher getEntitiesResolver() { - return new BatchGetEntitiesResolver(entityTypes, - (env) -> { - List<String> urns = env.getArgument(URNS_FIELD_NAME); - return urns.stream().map((urn) -> { + final Container container = env.getSource(); + return container.getDataPlatformInstance() != null + ? container.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataPlatformInstance", + typeWiring -> + typeWiring.dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn()))); + } + + private void configureQueryResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "appConfig", + new AppConfigResolver( + gitVersion, + analyticsService != null, + this.ingestionConfiguration, + this.authenticationConfiguration, + this.authorizationConfiguration, + this.supportsImpactAnalysis, + this.visualConfiguration, + this.telemetryConfiguration, + this.testsConfiguration, + this.datahubConfiguration, + this.viewsConfiguration, + this.featureFlags)) + .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) + .dataFetcher("search", new SearchResolver(this.entityClient)) + .dataFetcher( + "searchAcrossEntities", + new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "scrollAcrossEntities", + new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "aggregateAcrossEntities", + new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) + .dataFetcher( + "autoCompleteForMultiple", + new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) + .dataFetcher("browse", new BrowseResolver(browsableTypes)) + .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) + .dataFetcher("dataset", getResolver(datasetType)) + .dataFetcher("role", getResolver(roleType)) + .dataFetcher( + "versionedDataset", + getResolver( + versionedDatasetType, + (env) -> + new VersionedUrn() + .setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) + .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) + .dataFetcher("notebook", getResolver(notebookType)) + .dataFetcher("corpUser", getResolver(corpUserType)) + .dataFetcher("corpGroup", getResolver(corpGroupType)) + .dataFetcher("dashboard", getResolver(dashboardType)) + .dataFetcher("chart", getResolver(chartType)) + .dataFetcher("tag", getResolver(tagType)) + .dataFetcher("dataFlow", getResolver(dataFlowType)) + .dataFetcher("dataJob", getResolver(dataJobType)) + .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) + .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) + .dataFetcher("domain", getResolver((domainType))) + .dataFetcher("dataPlatform", getResolver(dataPlatformType)) + .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) + .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) + .dataFetcher("mlFeature", getResolver(mlFeatureType)) + .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) + .dataFetcher("mlModel", getResolver(mlModelType)) + .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) + .dataFetcher("assertion", getResolver(assertionType)) + .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) + .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) + .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) + .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) + .dataFetcher( + "listRecommendations", new ListRecommendationsResolver(recommendationsService)) + .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) + .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) + .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher("container", getResolver(containerType)) + .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) + .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) + .dataFetcher( + "getSecretValues", + new GetSecretValuesResolver(this.entityClient, this.secretService)) + .dataFetcher( + "listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) + .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) + .dataFetcher( + "getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) + .dataFetcher("test", getResolver(testType)) + .dataFetcher("listTests", new ListTestsResolver(entityClient)) + .dataFetcher( + "getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) + .dataFetcher( + "getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) + .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) + .dataFetcher("entity", getEntityResolver()) + .dataFetcher("entities", getEntitiesResolver()) + .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) + .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) + .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) + .dataFetcher( + "batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) + .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) + .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) + .dataFetcher( + "globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) + .dataFetcher( + "getQuickFilters", + new GetQuickFiltersResolver(this.entityClient, this.viewService)) + .dataFetcher("dataProduct", getResolver(dataProductType)) + .dataFetcher( + "listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) + .dataFetcher( + "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + } + + private DataFetcher getEntitiesResolver() { + return new BatchGetEntitiesResolver( + entityTypes, + (env) -> { + List<String> urns = env.getArgument(URNS_FIELD_NAME); + return urns.stream() + .map( + (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + Urn entityUrn = Urn.createFromString(urn); + return UrnToEntityMapper.map(entityUrn); } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); + throw new RuntimeException("Failed to get entity", e); } - }).collect(Collectors.toList()); - }); - } + }) + .collect(Collectors.toList()); + }); + } + + private DataFetcher getEntityResolver() { + return new EntityTypeResolver( + entityTypes, + (env) -> { + try { + Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); + return UrnToEntityMapper.map(urn); + } catch (Exception e) { + throw new RuntimeException("Failed to get entity", e); + } + }); + } + + private DataFetcher getResolver(LoadableType<?, String> loadableType) { + return getResolver(loadableType, this::getUrnField); + } + + private <T, K> DataFetcher getResolver( + LoadableType<T, K> loadableType, Function<DataFetchingEnvironment, K> keyProvider) { + return new LoadableTypeResolver<>(loadableType, keyProvider); + } + + private String getUrnField(DataFetchingEnvironment env) { + return env.getArgument(URN_FIELD_NAME); + } + + private void configureMutationResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Mutation", + typeWiring -> + typeWiring + .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) + .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) + .dataFetcher( + "createTag", new CreateTagResolver(this.entityClient, this.entityService)) + .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) + .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) + .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) + .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) + .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) + .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) + .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) + .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) + .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) + .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) + .dataFetcher("addTag", new AddTagResolver(entityService)) + .dataFetcher("addTags", new AddTagsResolver(entityService)) + .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) + .dataFetcher("removeTag", new RemoveTagResolver(entityService)) + .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) + .dataFetcher("addTerm", new AddTermResolver(entityService)) + .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) + .dataFetcher("addTerms", new AddTermsResolver(entityService)) + .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) + .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) + .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) + .dataFetcher( + "updateDescription", + new UpdateDescriptionResolver(entityService, this.entityClient)) + .dataFetcher("addOwner", new AddOwnerResolver(entityService)) + .dataFetcher("addOwners", new AddOwnersResolver(entityService)) + .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) + .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) + .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) + .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) + .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) + .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) + .dataFetcher( + "removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) + .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) + .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) + .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) + .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) + .dataFetcher( + "createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) + .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) + .dataFetcher( + "setDomain", new SetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) + .dataFetcher( + "updateDeprecation", + new UpdateDeprecationResolver(this.entityClient, this.entityService)) + .dataFetcher( + "batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) + .dataFetcher( + "unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) + .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) + .dataFetcher( + "revokeAccessToken", + new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) + .dataFetcher( + "createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "createIngestionExecutionRequest", + new CreateIngestionExecutionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "cancelIngestionExecutionRequest", + new CancelIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher( + "createTestConnectionRequest", + new CreateTestConnectionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "deleteAssertion", + new DeleteAssertionResolver(this.entityClient, this.entityService)) + .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) + .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) + .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) + .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) + .dataFetcher( + "createGlossaryTerm", + new CreateGlossaryTermResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createGlossaryNode", + new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateParentNode", + new UpdateParentNodeResolver(this.entityService, this.entityClient)) + .dataFetcher( + "deleteGlossaryEntity", + new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) + .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) + .dataFetcher( + "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + .dataFetcher( + "createNativeUserResetToken", + new CreateNativeUserResetTokenResolver(this.nativeUserService)) + .dataFetcher( + "batchUpdateSoftDeleted", + new BatchUpdateSoftDeletedResolver(this.entityService)) + .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) + .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) + .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) + .dataFetcher( + "createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) + .dataFetcher( + "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) + .dataFetcher("createPost", new CreatePostResolver(this.postService)) + .dataFetcher("deletePost", new DeletePostResolver(this.postService)) + .dataFetcher( + "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) + .dataFetcher("createView", new CreateViewResolver(this.viewService)) + .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) + .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) + .dataFetcher( + "updateGlobalViewsSettings", + new UpdateGlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateCorpUserViewsSettings", + new UpdateCorpUserViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateLineage", + new UpdateLineageResolver(this.entityService, this.lineageService)) + .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) + .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) + .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) + .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) + .dataFetcher( + "createDataProduct", new CreateDataProductResolver(this.dataProductService)) + .dataFetcher( + "updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) + .dataFetcher( + "deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) + .dataFetcher( + "batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) + .dataFetcher( + "createOwnershipType", + new CreateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "updateOwnershipType", + new UpdateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "deleteOwnershipType", + new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + } + + private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "SearchResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((SearchResult) env.getSource()).getEntity()))) + .type( + "MatchedField", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((MatchedField) env.getSource()).getEntity()))) + .type( + "SearchAcrossLineageResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity()))) + .type( + "AggregationMetadata", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((AggregationMetadata) env.getSource()).getEntity()))) + .type( + "RecommendationContent", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((RecommendationContent) env.getSource()).getEntity()))) + .type( + "BrowseResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, (env) -> ((BrowseResults) env.getSource()).getEntities()))) + .type( + "ParentDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new EntityTypeBatchResolver( + entityTypes, + (env) -> { + final ParentDomainsResult result = env.getSource(); + return result != null ? result.getDomains() : null; + }))) + .type( + "EntityRelationshipLegacy", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity()))) + .type( + "EntityRelationship", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((EntityRelationship) env.getSource()).getEntity()))) + .type( + "BrowseResultGroupV2", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity()))) + .type( + "BrowsePathEntry", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))) + .type( + "LineageRelationship", + typeWiring -> + typeWiring + .dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((LineageRelationship) env.getSource()).getEntity())) + .dataFetcher( + "createdActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getCreatedActor() != null + ? relationship.getCreatedActor() + : null; + })) + .dataFetcher( + "updatedActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getUpdatedActor() != null + ? relationship.getUpdatedActor() + : null; + }))) + .type( + "ListDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new LoadableTypeBatchResolver<>( + domainType, + (env) -> + ((ListDomainsResult) env.getSource()) + .getDomains().stream() + .map(Domain::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryTermsResult", + typeWiring -> + typeWiring.dataFetcher( + "terms", + new LoadableTypeBatchResolver<>( + glossaryTermType, + (env) -> + ((GetRootGlossaryTermsResult) env.getSource()) + .getTerms().stream() + .map(GlossaryTerm::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryNodesResult", + typeWiring -> + typeWiring.dataFetcher( + "nodes", + new LoadableTypeBatchResolver<>( + glossaryNodeType, + (env) -> + ((GetRootGlossaryNodesResult) env.getSource()) + .getNodes().stream() + .map(GlossaryNode::getUrn) + .collect(Collectors.toList())))) + .type( + "AutoCompleteResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResults) env.getSource()).getEntities()))) + .type( + "AutoCompleteResultForEntity", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))) + .type( + "PolicyMatchCriterionValue", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity()))) + .type( + "ListTestsResult", + typeWiring -> + typeWiring.dataFetcher( + "tests", + new LoadableTypeBatchResolver<>( + testType, + (env) -> + ((ListTestsResult) env.getSource()) + .getTests().stream() + .map(Test::getUrn) + .collect(Collectors.toList())))) + .type( + "QuickFilter", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((QuickFilter) env.getSource()).getEntity()))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "ownershipType", + new EntityTypeResolver( + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + } - private DataFetcher getEntityResolver() { - return new EntityTypeResolver(entityTypes, - (env) -> { - try { - Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); - } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); - } - }); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dataset} type. + */ + private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Dataset", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dataset) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getContainer() != null + ? dataset.getContainer().getUrn() + : null; + })) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getDataPlatformInstance() != null + ? dataset.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "datasetProfiles", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "datasetProfile", + DatasetProfileMapper::map)) + .dataFetcher( + "operations", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "operation", + OperationMapper::map, + new SortCriterion() + .setField(OPERATION_EVENT_TIME_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))) + .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) + .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) + .dataFetcher( + "health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher( + "assertions", new EntityAssertionsResolver(entityClient, graphClient)) + .dataFetcher("testResults", new TestResultsResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("runs", new EntityRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "UserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn()))) + .type( + "ForeignKeyConstraint", + typeWiring -> + typeWiring.dataFetcher( + "foreignDataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> + ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn()))) + .type( + "SiblingProperties", + typeWiring -> + typeWiring.dataFetcher( + "siblings", + new EntityTypeBatchResolver( + new ArrayList<>(entityTypes), + (env) -> ((SiblingProperties) env.getSource()).getSiblings()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))) + .type( + "DatasetStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } - private DataFetcher getResolver(LoadableType<?, String> loadableType) { - return getResolver(loadableType, this::getUrnField); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.VersionedDataset} type. + */ + private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionedDataset", + typeWiring -> typeWiring.dataFetcher("relationships", new StaticDataFetcher(null))); + } - private <T, K> DataFetcher getResolver(LoadableType<T, K> loadableType, - Function<DataFetchingEnvironment, K> keyProvider) { - return new LoadableTypeResolver<>(loadableType, keyProvider); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. + */ + private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "AccessToken", + typeWiring -> + typeWiring.dataFetcher( + "metadata", + new LoadableTypeResolver<>( + accessTokenMetadataType, + (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn()))); + builder.type( + "ListAccessTokenResult", + typeWiring -> + typeWiring.dataFetcher( + "tokens", + new LoadableTypeBatchResolver<>( + accessTokenMetadataType, + (env) -> + ((ListAccessTokenResult) env.getSource()) + .getTokens().stream() + .map(AccessTokenMetadata::getUrn) + .collect(Collectors.toList())))); + } + + private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTerm", + typeWiring -> + typeWiring + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryNode", + typeWiring -> + typeWiring + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "SchemaFieldEntity", + typeWiring -> + typeWiring.dataFetcher( + "parent", + new EntityTypeResolver( + entityTypes, (env) -> ((SchemaFieldEntity) env.getSource()).getParent()))); + } + + private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "EntityPath", + typeWiring -> + typeWiring.dataFetcher( + "path", + new BatchGetEntitiesResolver( + entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); + } - private String getUrnField(DataFetchingEnvironment env) { - return env.getArgument(URN_FIELD_NAME); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpUser} type. + */ + private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpUser", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "CorpUserInfo", + typeWiring -> + typeWiring.dataFetcher( + "manager", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn()))); + } - private void configureMutationResolvers(final RuntimeWiring.Builder builder) { - builder.type("Mutation", typeWiring -> typeWiring - .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) - .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) - .dataFetcher("createTag", new CreateTagResolver(this.entityClient, this.entityService)) - .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) - .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) - .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) - .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) - .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) - .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) - .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) - .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) - .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) - .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) - .dataFetcher("addTag", new AddTagResolver(entityService)) - .dataFetcher("addTags", new AddTagsResolver(entityService)) - .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) - .dataFetcher("removeTag", new RemoveTagResolver(entityService)) - .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) - .dataFetcher("addTerm", new AddTermResolver(entityService)) - .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) - .dataFetcher("addTerms", new AddTermsResolver(entityService)) - .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) - .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) - .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) - .dataFetcher("updateDescription", new UpdateDescriptionResolver(entityService, this.entityClient)) - .dataFetcher("addOwner", new AddOwnerResolver(entityService)) - .dataFetcher("addOwners", new AddOwnersResolver(entityService)) - .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) - .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) - .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) - .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) - .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) - .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) - .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) - .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) - .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) - .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) - .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) - .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) - .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) - .dataFetcher("updateDeprecation", new UpdateDeprecationResolver(this.entityClient, this.entityService)) - .dataFetcher("batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) - .dataFetcher("unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) - .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) - .dataFetcher("createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) - .dataFetcher("revokeAccessToken", new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) - .dataFetcher("createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) - .dataFetcher("createIngestionExecutionRequest", new CreateIngestionExecutionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("cancelIngestionExecutionRequest", new CancelIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("createTestConnectionRequest", new CreateTestConnectionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("deleteAssertion", new DeleteAssertionResolver(this.entityClient, this.entityService)) - .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) - .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) - .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) - .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) - .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService)) - .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) - .dataFetcher("updateParentNode", new UpdateParentNodeResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteGlossaryEntity", - new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) - .dataFetcher("updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) - .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) - .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) - .dataFetcher("batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService)) - .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) - .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) - .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) - .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) - .dataFetcher("createPost", new CreatePostResolver(this.postService)) - .dataFetcher("deletePost", new DeletePostResolver(this.postService)) - .dataFetcher("batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) - .dataFetcher("createView", new CreateViewResolver(this.viewService)) - .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) - .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) - .dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService)) - .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) - .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) - .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) - .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) - .dataFetcher("createDataProduct", new CreateDataProductResolver(this.dataProductService)) - .dataFetcher("updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) - .dataFetcher("deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) - .dataFetcher("batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) - .dataFetcher("createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService)) - ); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpGroup} type. + */ + private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpGroup", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder + .type( + "CorpGroupInfo", + typeWiring -> + typeWiring + .dataFetcher( + "admins", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getAdmins().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()))) + .dataFetcher( + "members", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getMembers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList())))) + .type( + "ListGroupsResult", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> + ((ListGroupsResult) env.getSource()) + .getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList())))); + } + + private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "Tag", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "TagAssociation", + typeWiring -> + typeWiring.dataFetcher( + "tag", + new LoadableTypeResolver<>( + tagType, + (env) -> + ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()) + .getTag() + .getUrn()))); + } + + private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTermAssociation", + typeWiring -> + typeWiring.dataFetcher( + "term", + new LoadableTypeResolver<>( + glossaryTermType, + (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn()))); + } - private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("SearchResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchResult) env.getSource()).getEntity())) - ) - .type("MatchedField", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((MatchedField) env.getSource()).getEntity())) - ) - .type("SearchAcrossLineageResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity())) - ) - .type("AggregationMetadata", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((AggregationMetadata) env.getSource()).getEntity())) - ) - .type("RecommendationContent", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((RecommendationContent) env.getSource()).getEntity())) - ) - .type("BrowseResults", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((BrowseResults) env.getSource()).getEntities())) - ) - .type("ParentDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new EntityTypeBatchResolver(entityTypes, - (env) -> { - final ParentDomainsResult result = env.getSource(); - return result != null ? result.getDomains() : null; - })) - ) - .type("EntityRelationshipLegacy", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity())) - ) - .type("EntityRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationship) env.getSource()).getEntity())) - ) - .type("BrowseResultGroupV2", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity())) - ) - .type("BrowsePathEntry", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowsePathEntry) env.getSource()).getEntity())) - ) - .type("LineageRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((LineageRelationship) env.getSource()).getEntity())) - .dataFetcher("createdActor", - new EntityTypeResolver(entityTypes, - (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getCreatedActor() != null ? relationship.getCreatedActor() : null; - }) - ) - .dataFetcher("updatedActor", - new EntityTypeResolver(entityTypes, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Notebook} type. + */ + private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Notebook", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getUpdatedActor() != null ? relationship.getUpdatedActor() : null; - }) - ) - ) - .type("ListDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new LoadableTypeBatchResolver<>(domainType, - (env) -> ((ListDomainsResult) env.getSource()).getDomains().stream() - .map(Domain::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryTermsResult", typeWiring -> typeWiring - .dataFetcher("terms", new LoadableTypeBatchResolver<>(glossaryTermType, - (env) -> ((GetRootGlossaryTermsResult) env.getSource()).getTerms().stream() - .map(GlossaryTerm::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryNodesResult", typeWiring -> typeWiring - .dataFetcher("nodes", new LoadableTypeBatchResolver<>(glossaryNodeType, - (env) -> ((GetRootGlossaryNodesResult) env.getSource()).getNodes().stream() - .map(GlossaryNode::getUrn) - .collect(Collectors.toList()))) - ) - .type("AutoCompleteResults", typeWiring -> typeWiring - .dataFetcher("entities", - new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResults) env.getSource()).getEntities())) - ) - .type("AutoCompleteResultForEntity", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities())) - ) - .type("PolicyMatchCriterionValue", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity())) - ) - .type("ListTestsResult", typeWiring -> typeWiring - .dataFetcher("tests", new LoadableTypeBatchResolver<>(testType, - (env) -> ((ListTestsResult) env.getSource()).getTests().stream() - .map(Test::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuickFilter", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((QuickFilter) env.getSource()).getEntity())) - ) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("ownershipType", new EntityTypeResolver(entityTypes, - (env) -> ((Owner) env.getSource()).getOwnershipType())) - ); - } + final Notebook notebook = env.getSource(); + return notebook.getDataPlatformInstance() != null + ? notebook.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dataset} type. - */ - private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Dataset", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dashboard} type. + */ + private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Dashboard", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dataset) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getContainer() != null ? dataset.getContainer().getUrn() : null; - }) - ) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Dashboard dashboard = env.getSource(); + return dashboard.getDataPlatformInstance() != null + ? dashboard.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getDataPlatformInstance() != null ? dataset.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("datasetProfiles", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "datasetProfile", - DatasetProfileMapper::map - ) - ) - .dataFetcher("operations", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "operation", - OperationMapper::map, - new SortCriterion().setField(OPERATION_EVENT_TIME_FIELD_NAME).setOrder(SortOrder.DESCENDING) - ) - ) - .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) - .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) - .dataFetcher("health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("assertions", new EntityAssertionsResolver(entityClient, graphClient)) - .dataFetcher("testResults", new TestResultsResolver(entityClient)) - .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("runs", new EntityRunsResolver(entityClient)) + final Dashboard dashboard = env.getSource(); + return dashboard.getContainer() != null + ? dashboard.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) + .dataFetcher( + "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes, - (env) -> ((Owner) env.getSource()).getOwner())) - ) - .type("UserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>(corpUserType, - (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn())) - ) - .type("ForeignKeyConstraint", typeWiring -> typeWiring - .dataFetcher("foreignDataset", new LoadableTypeResolver<>(datasetType, - (env) -> ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn())) - ) - .type("SiblingProperties", typeWiring -> typeWiring - .dataFetcher("siblings", - new EntityTypeBatchResolver( - new ArrayList<>(entityTypes), - (env) -> ((SiblingProperties) env.getSource()).getSiblings())) - ) - .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring - .dataFetcher("author", new LoadableTypeResolver<>(corpUserType, - (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())) - ) - .type("DatasetStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "DashboardInfo", + typeWiring -> + typeWiring.dataFetcher( + "charts", + new LoadableTypeBatchResolver<>( + chartType, + (env) -> + ((DashboardInfo) env.getSource()) + .getCharts().stream() + .map(Chart::getUrn) + .collect(Collectors.toList())))); + builder.type( + "DashboardUserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn()))); + builder.type( + "DashboardStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.VersionedDataset} type. - */ - private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("VersionedDataset", typeWiring -> typeWiring - .dataFetcher("relationships", new StaticDataFetcher(null))); - - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. - */ - private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { - builder.type("AccessToken", typeWiring -> typeWiring - .dataFetcher("metadata", new LoadableTypeResolver<>(accessTokenMetadataType, - (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn())) - ); - builder.type("ListAccessTokenResult", typeWiring -> typeWiring - .dataFetcher("tokens", new LoadableTypeBatchResolver<>(accessTokenMetadataType, - (env) -> ((ListAccessTokenResult) env.getSource()).getTokens().stream() - .map(AccessTokenMetadata::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryNode", typeWiring -> typeWiring - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { - builder.type("SchemaFieldEntity", typeWiring -> typeWiring - .dataFetcher("parent", new EntityTypeResolver(entityTypes, - (env) -> ((SchemaFieldEntity) env.getSource()).getParent())) - ); - } - - private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { - builder.type("EntityPath", typeWiring -> typeWiring - .dataFetcher("path", new BatchGetEntitiesResolver(entityTypes, - (env) -> ((EntityPath) env.getSource()).getPath())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpUser} type. - */ - private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpUser", typeWiring -> typeWiring - .dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("CorpUserInfo", typeWiring -> typeWiring - .dataFetcher("manager", new LoadableTypeResolver<>(corpUserType, - (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpGroup} type. - */ - private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpGroup", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); - builder.type("CorpGroupInfo", typeWiring -> typeWiring - .dataFetcher("admins", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getAdmins().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - .dataFetcher("members", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getMembers().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - ) - .type("ListGroupsResult", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(corpGroupType, - (env) -> ((ListGroupsResult) env.getSource()).getGroups().stream() - .map(CorpGroup::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("Tag", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - builder.type("TagAssociation", typeWiring -> typeWiring - .dataFetcher("tag", - new LoadableTypeResolver<>(tagType, - (env) -> ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()).getTag().getUrn())) - ); - } - - private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTermAssociation", typeWiring -> typeWiring - .dataFetcher("term", - new LoadableTypeResolver<>(glossaryTermType, - (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn())) - ); - } + DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Notebook} type. + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Chart} type. */ - private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { - builder.type("Notebook", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Notebook notebook = env.getSource(); - return notebook.getDataPlatformInstance() != null ? notebook.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dashboard} type. - */ - private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { - builder.type("Dashboard", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getDataPlatformInstance() != null ? dashboard.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>(containerType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getContainer() != null ? dashboard.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) - .dataFetcher("statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("DashboardInfo", typeWiring -> typeWiring - .dataFetcher("charts", new LoadableTypeBatchResolver<>(chartType, - (env) -> ((DashboardInfo) env.getSource()).getCharts().stream() - .map(Chart::getUrn) - .collect(Collectors.toList()))) - ); - builder.type("DashboardUserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>( - corpUserType, - (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn())) - ); - builder.type("DashboardStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, - (env) -> { - DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Chart} type. - */ - private void configureChartResolvers(final RuntimeWiring.Builder builder) { - builder.type("Chart", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getDataPlatformInstance() != null ? chart.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>( - containerType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getContainer() != null ? chart.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("ChartInfo", typeWiring -> typeWiring - .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((ChartInfo) env.getSource()).getInputs().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - ); - } - - /** - * Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. - */ - private void configureTypeResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Entity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("EntityWithRelationships", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("BrowsableEntity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(browsableTypes.stream() - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("OwnerType", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(ownerTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType<?, ?>) graphType) - .collect(Collectors.toList()) - ))) - .type("PlatformSchema", typeWiring -> typeWiring - .typeResolver(new PlatformSchemaUnionTypeResolver()) - ) - .type("HyperParameterValueType", typeWiring -> typeWiring - .typeResolver(new HyperParameterValueTypeResolver()) - ) - .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) - .type("TimeSeriesAspect", typeWiring -> typeWiring - .typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) - .type("ResultsType", typeWiring -> typeWiring - .typeResolver(new ResultsTypeResolver())); - } - - /** - * Configures custom type extensions leveraged within our GraphQL schema. - */ - private void configureTypeExtensions(final RuntimeWiring.Builder builder) { - builder.scalar(GraphQLLong); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataJob} type. - */ - private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataJob", typeWiring -> typeWiring + private void configureChartResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Chart", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("dataFlow", new LoadableTypeResolver<>(dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Chart chart = env.getSource(); + return chart.getDataPlatformInstance() != null + ? chart.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final DataJob dataJob = env.getSource(); - return dataJob.getDataPlatformInstance() != null ? dataJob.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + final Chart chart = env.getSource(); + return chart.getContainer() != null + ? chart.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher( + "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ) - .type("DataJobInputOutput", typeWiring -> typeWiring - .dataFetcher("inputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("outputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getOutputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("inputDatajobs", new LoadableTypeBatchResolver<>(dataJobType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatajobs().stream() - .map(DataJob::getUrn) - .collect(Collectors.toList()))) - ); - } + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "ChartInfo", + typeWiring -> + typeWiring.dataFetcher( + "inputs", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((ChartInfo) env.getSource()) + .getInputs().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList())))); + } + + /** Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. */ + private void configureTypeResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Entity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "EntityWithRelationships", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "BrowsableEntity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + browsableTypes.stream() + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "OwnerType", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + ownerTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType<?, ?>) graphType) + .collect(Collectors.toList())))) + .type( + "PlatformSchema", + typeWiring -> typeWiring.typeResolver(new PlatformSchemaUnionTypeResolver())) + .type( + "HyperParameterValueType", + typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) + .type( + "TimeSeriesAspect", + typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) + .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())); + } + + /** Configures custom type extensions leveraged within our GraphQL schema. */ + private void configureTypeExtensions(final RuntimeWiring.Builder builder) { + builder.scalar(GraphQLLong); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataFlow} type. - */ - private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataFlow", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataJob} type. + */ + private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataJob", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "dataFlow", + new LoadableTypeResolver<>( + dataFlowType, + (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataPlatformInstance() != null + ? dataJob.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))) + .type( + "DataJobInputOutput", + typeWiring -> + typeWiring + .dataFetcher( + "inputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "outputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getOutputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "inputDatajobs", + new LoadableTypeBatchResolver<>( + dataJobType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatajobs().stream() + .map(DataJob::getUrn) + .collect(Collectors.toList())))); + } + + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataFlow} type. + */ + private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataFlow", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final DataFlow dataFlow = env.getSource(); - return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } + final DataFlow dataFlow = env.getSource(); + return dataFlow.getDataPlatformInstance() != null + ? dataFlow.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.MLFeatureTable} type. - */ - private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { - builder - .type("MLFeatureTable", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.MLFeatureTable} type. + */ + private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "MLFeatureTable", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeatureTable entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeatureTableProperties", + typeWiring -> + typeWiring + .dataFetcher( + "mlFeatures", + new LoadableTypeBatchResolver<>( + mlFeatureType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlFeatures().stream() + .map(MLFeature::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of())) + .dataFetcher( + "mlPrimaryKeys", + new LoadableTypeBatchResolver<>( + mlPrimaryKeyType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() + != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlPrimaryKeys().stream() + .map(MLPrimaryKey::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of()))) + .type( + "MLFeatureProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, (env) -> { - final MLFeatureTable entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeatureTableProperties", typeWiring -> typeWiring - .dataFetcher("mlFeatures", - new LoadableTypeBatchResolver<>(mlFeatureType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlFeatures().stream() - .map(MLFeature::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - .dataFetcher("mlPrimaryKeys", - new LoadableTypeBatchResolver<>(mlPrimaryKeyType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys().stream() - .map(MLPrimaryKey::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - ) - .type("MLFeatureProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLFeatureProperties) env.getSource()).getSources() == null) { + if (((MLFeatureProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLFeatureProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLPrimaryKeyProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { + } + return ((MLFeatureProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLPrimaryKeyProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> { + if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLPrimaryKeyProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLModel", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + } + return ((MLPrimaryKeyProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLModel", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModel mlModel = env.getSource(); + return mlModel.getDataPlatformInstance() != null + ? mlModel.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLModelProperties", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + mlModelGroupType, (env) -> { - final MLModel mlModel = env.getSource(); - return mlModel.getDataPlatformInstance() != null ? mlModel.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLModelProperties", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(mlModelGroupType, - (env) -> { - MLModelProperties properties = env.getSource(); - if (properties.getGroups() != null) { + MLModelProperties properties = env.getSource(); + if (properties.getGroups() != null) { return properties.getGroups().stream() .map(MLModelGroup::getUrn) .collect(Collectors.toList()); - } - return Collections.emptyList(); - }) - ) - ) - .type("MLModelGroup", typeWiring -> typeWiring + } + return Collections.emptyList(); + }))) + .type( + "MLModelGroup", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModelGroup entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeature", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeature entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLPrimaryKey", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLPrimaryKey entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "GlossaryTerm", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "GlossaryNode", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureDomainResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Domain", + typeWiring -> + typeWiring + .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) + .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "DomainAssociation", + typeWiring -> + typeWiring.dataFetcher( + "domain", + new LoadableTypeResolver<>( + domainType, + (env) -> + ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()) + .getDomain() + .getUrn()))); + } + + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProduct", + typeWiring -> + typeWiring + .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Assertion", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final MLModelGroup entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeature", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Assertion assertion = env.getSource(); + return assertion.getDataPlatformInstance() != null + ? assertion.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); + } + + private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { + // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. + builder.type( + "ActorFilter", + typeWiring -> + typeWiring + .dataFetcher( + "resolvedUsers", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - final MLFeature entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLPrimaryKey", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final ActorFilter filter = env.getSource(); + return filter.getUsers(); + })) + .dataFetcher( + "resolvedGroups", + new LoadableTypeBatchResolver<>( + corpGroupType, (env) -> { - final MLPrimaryKey entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))) - .type("GlossaryNode", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureDomainResolvers(final RuntimeWiring.Builder builder) { - builder.type("Domain", typeWiring -> typeWiring - .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) - .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("DomainAssociation", typeWiring -> typeWiring - .dataFetcher("domain", - new LoadableTypeResolver<>(domainType, - (env) -> ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()).getDomain().getUrn())) - ); - } - - private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProduct", typeWiring -> typeWiring - .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - } - - private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { - builder.type("Assertion", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Assertion assertion = env.getSource(); - return assertion.getDataPlatformInstance() != null ? assertion.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); - } - - private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { - // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. - builder.type("ActorFilter", typeWiring -> typeWiring.dataFetcher("resolvedUsers", - new LoadableTypeBatchResolver<>(corpUserType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getUsers(); - })).dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getGroups(); - })).dataFetcher("resolvedRoles", new LoadableTypeBatchResolver<>(dataHubRoleType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getRoles(); - })).dataFetcher("resolvedOwnershipTypes", new LoadableTypeBatchResolver<>(ownershipType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getResourceOwnersTypes(); - }))); - } - - private void configureRoleResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataHubRole", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureViewResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataHubView", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListViewsResult", typeWiring -> typeWiring - .dataFetcher("views", new LoadableTypeBatchResolver<>( - dataHubViewType, - (env) -> ((ListViewsResult) env.getSource()).getViews().stream() - .map(DataHubView::getUrn) - .collect(Collectors.toList()))) - ) - .type("CorpUserViewsSettings", typeWiring -> typeWiring - .dataFetcher("defaultView", new LoadableTypeResolver<>( + final ActorFilter filter = env.getSource(); + return filter.getGroups(); + })) + .dataFetcher( + "resolvedRoles", + new LoadableTypeBatchResolver<>( + dataHubRoleType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getRoles(); + })) + .dataFetcher( + "resolvedOwnershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getResourceOwnersTypes(); + }))); + } + + private void configureRoleResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataHubRole", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureViewResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataHubView", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListViewsResult", + typeWiring -> + typeWiring.dataFetcher( + "views", + new LoadableTypeBatchResolver<>( + dataHubViewType, + (env) -> + ((ListViewsResult) env.getSource()) + .getViews().stream() + .map(DataHubView::getUrn) + .collect(Collectors.toList())))) + .type( + "CorpUserViewsSettings", + typeWiring -> + typeWiring.dataFetcher( + "defaultView", + new LoadableTypeResolver<>( dataHubViewType, (env) -> { - final CorpUserViewsSettings settings = env.getSource(); - if (settings.getDefaultView() != null) { - return settings.getDefaultView().getUrn(); - } - return null; - } - ) - )); - } - - private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("QueryEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListQueriesResult", typeWiring -> typeWiring - .dataFetcher("queries", new LoadableTypeBatchResolver<>( - queryType, - (env) -> ((ListQueriesResult) env.getSource()).getQueries().stream() - .map(QueryEntity::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuerySubject", typeWiring -> typeWiring - .dataFetcher("dataset", new LoadableTypeResolver<>( - datasetType, - (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn())) - ); - - } - - private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { - builder - .type("OwnershipTypeEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListOwnershipTypesResult", typeWiring -> typeWiring - .dataFetcher("ownershipTypes", new LoadableTypeBatchResolver<>(ownershipType, - (env) -> ((ListOwnershipTypesResult) env.getSource()).getOwnershipTypes().stream() - .map(OwnershipTypeEntity::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProcessInstance", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + final CorpUserViewsSettings settings = env.getSource(); + if (settings.getDefaultView() != null) { + return settings.getDefaultView().getUrn(); + } + return null; + }))); + } + + private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "QueryEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListQueriesResult", + typeWiring -> + typeWiring.dataFetcher( + "queries", + new LoadableTypeBatchResolver<>( + queryType, + (env) -> + ((ListQueriesResult) env.getSource()) + .getQueries().stream() + .map(QueryEntity::getUrn) + .collect(Collectors.toList())))) + .type( + "QuerySubject", + typeWiring -> + typeWiring.dataFetcher( + "dataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn()))); + } + + private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { + builder + .type( + "OwnershipTypeEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListOwnershipTypesResult", + typeWiring -> + typeWiring.dataFetcher( + "ownershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> + ((ListOwnershipTypesResult) env.getSource()) + .getOwnershipTypes().stream() + .map(OwnershipTypeEntity::getUrn) + .collect(Collectors.toList())))); + } + + private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProcessInstance", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("state", new TimeSeriesAspectResolver(this.entityClient, "dataProcessInstance", - DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, DataProcessInstanceRunEventMapper::map))); - } - - private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { - builder.type("TestResult", typeWiring -> typeWiring - .dataFetcher("test", new LoadableTypeResolver<>(testType, - (env) -> { - final TestResult testResult = env.getSource(); - return testResult.getTest() != null ? testResult.getTest().getUrn() : null; - })) - ); - } - - private <T, K> DataLoader<K, DataFetcherResult<T>> createDataLoader(final LoadableType<T, K> graphType, final QueryContext queryContext) { - BatchLoaderContextProvider contextProvider = () -> queryContext; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); - return DataLoader.newDataLoader((keys, context) -> CompletableFuture.supplyAsync(() -> { - try { - log.debug(String.format("Batch loading entities of type: %s, keys: %s", graphType.name(), keys)); - return graphType.batchLoad(keys, context.getContext()); - } catch (Exception e) { - log.error(String.format("Failed to load Entities of type: %s, keys: %s", graphType.name(), keys) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to retrieve entities of type %s", graphType.name()), e); - } - }), loaderOptions); - } - - private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { - builder.type("IngestionSource", typeWiring -> typeWiring - .dataFetcher("executions", new IngestionSourceExecutionRequestsResolver(entityClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> { - final IngestionSource ingestionSource = env.getSource(); - return ingestionSource.getPlatform() != null ? ingestionSource.getPlatform().getUrn() : null; - }) - )); - } + .dataFetcher( + "state", + new TimeSeriesAspectResolver( + this.entityClient, + "dataProcessInstance", + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + DataProcessInstanceRunEventMapper::map))); + } + + private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "TestResult", + typeWiring -> + typeWiring.dataFetcher( + "test", + new LoadableTypeResolver<>( + testType, + (env) -> { + final TestResult testResult = env.getSource(); + return testResult.getTest() != null ? testResult.getTest().getUrn() : null; + }))); + } + + private <T, K> DataLoader<K, DataFetcherResult<T>> createDataLoader( + final LoadableType<T, K> graphType, final QueryContext queryContext) { + BatchLoaderContextProvider contextProvider = () -> queryContext; + DataLoaderOptions loaderOptions = + DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); + return DataLoader.newDataLoader( + (keys, context) -> + CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + String.format( + "Batch loading entities of type: %s, keys: %s", + graphType.name(), keys)); + return graphType.batchLoad(keys, context.getContext()); + } catch (Exception e) { + log.error( + String.format( + "Failed to load Entities of type: %s, keys: %s", + graphType.name(), keys) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to retrieve entities of type %s", graphType.name()), + e); + } + }), + loaderOptions); + } + + private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "IngestionSource", + typeWiring -> + typeWiring + .dataFetcher( + "executions", new IngestionSourceExecutionRequestsResolver(entityClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final IngestionSource ingestionSource = env.getSource(); + return ingestionSource.getPlatform() != null + ? ingestionSource.getPlatform().getUrn() + : null; + }))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 157fb10ce7078..4829194a8ce4d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -38,41 +38,41 @@ @Data public class GmsGraphQLEngineArgs { - EntityClient entityClient; - SystemEntityClient systemEntityClient; - GraphClient graphClient; - UsageClient usageClient; - AnalyticsService analyticsService; - EntityService entityService; - RecommendationsService recommendationsService; - StatefulTokenService statefulTokenService; - TimeseriesAspectService timeseriesAspectService; - EntityRegistry entityRegistry; - SecretService secretService; - NativeUserService nativeUserService; - IngestionConfiguration ingestionConfiguration; - AuthenticationConfiguration authenticationConfiguration; - AuthorizationConfiguration authorizationConfiguration; - GitVersion gitVersion; - TimelineService timelineService; - boolean supportsImpactAnalysis; - VisualConfiguration visualConfiguration; - TelemetryConfiguration telemetryConfiguration; - TestsConfiguration testsConfiguration; - DataHubConfiguration datahubConfiguration; - ViewsConfiguration viewsConfiguration; - SiblingGraphService siblingGraphService; - GroupService groupService; - RoleService roleService; - InviteTokenService inviteTokenService; - PostService postService; - ViewService viewService; - OwnershipTypeService ownershipTypeService; - SettingsService settingsService; - LineageService lineageService; - QueryService queryService; - FeatureFlags featureFlags; - DataProductService dataProductService; + EntityClient entityClient; + SystemEntityClient systemEntityClient; + GraphClient graphClient; + UsageClient usageClient; + AnalyticsService analyticsService; + EntityService entityService; + RecommendationsService recommendationsService; + StatefulTokenService statefulTokenService; + TimeseriesAspectService timeseriesAspectService; + EntityRegistry entityRegistry; + SecretService secretService; + NativeUserService nativeUserService; + IngestionConfiguration ingestionConfiguration; + AuthenticationConfiguration authenticationConfiguration; + AuthorizationConfiguration authorizationConfiguration; + GitVersion gitVersion; + TimelineService timelineService; + boolean supportsImpactAnalysis; + VisualConfiguration visualConfiguration; + TelemetryConfiguration telemetryConfiguration; + TestsConfiguration testsConfiguration; + DataHubConfiguration datahubConfiguration; + ViewsConfiguration viewsConfiguration; + SiblingGraphService siblingGraphService; + GroupService groupService; + RoleService roleService; + InviteTokenService inviteTokenService; + PostService postService; + ViewService viewService; + OwnershipTypeService ownershipTypeService; + SettingsService settingsService; + LineageService lineageService; + QueryService queryService; + FeatureFlags featureFlags; + DataProductService dataProductService; - //any fork specific args should go below this line + // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index e7ef0c402a1de..472d9465aeee1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -5,41 +5,42 @@ import java.util.Collection; import java.util.List; - /** - * An interface that allows the Core GMS GraphQL Engine to be extended without requiring - * code changes in the GmsGraphQLEngine class if new entities, relationships or resolvers - * need to be introduced. This is useful if you are maintaining a fork of DataHub and - * don't want to deal with merge conflicts. + * An interface that allows the Core GMS GraphQL Engine to be extended without requiring code + * changes in the GmsGraphQLEngine class if new entities, relationships or resolvers need to be + * introduced. This is useful if you are maintaining a fork of DataHub and don't want to deal with + * merge conflicts. */ public interface GmsGraphQLPlugin { /** * Initialization method that allows the plugin to instantiate + * * @param args */ void init(GmsGraphQLEngineArgs args); /** - * Return a list of schema files that contain graphql definitions - * that are served by this plugin + * Return a list of schema files that contain graphql definitions that are served by this plugin + * * @return */ List<String> getSchemaFiles(); /** * Return a list of LoadableTypes that this plugin serves + * * @return */ Collection<? extends LoadableType<?, ?>> getLoadableTypes(); /** - * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers. + * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific + * resolvers. + * * @param wiringBuilder : the builder being used to configure the runtime wiring * @param baseEngine : a reference to the core engine and its graphql types */ - default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) { - - } - + default void configureExtraResolvers( + final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java index 74c4c541b972b..f95727a1e8fd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static graphql.schema.idl.RuntimeWiring.*; + import com.linkedin.datahub.graphql.exception.DataHubDataFetcherExceptionHandler; import graphql.ExecutionInput; import graphql.ExecutionResult; @@ -22,152 +24,157 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; -import static graphql.schema.idl.RuntimeWiring.*; - /** - * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and executing - * GQL queries. - - * <p>This class provides a {@link Builder} builder for constructing {@link GraphQL} instances provided one or more - * schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. + * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and + * executing GQL queries. + * + * <p>This class provides a {@link Builder} builder for constructing {@link GraphQL} instances + * provided one or more schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. * - * <p>In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set of variables. + * <p>In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set + * of variables. */ public class GraphQLEngine { - private final GraphQL _graphQL; - private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _dataLoaderSuppliers; + private final GraphQL _graphQL; + private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _dataLoaderSuppliers; - private GraphQLEngine(@Nonnull final List<String> schemas, - @Nonnull final RuntimeWiring runtimeWiring, - @Nonnull final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { + private GraphQLEngine( + @Nonnull final List<String> schemas, + @Nonnull final RuntimeWiring runtimeWiring, + @Nonnull final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { - _dataLoaderSuppliers = dataLoaderSuppliers; + _dataLoaderSuppliers = dataLoaderSuppliers; - /* - * Parse schema - */ - SchemaParser schemaParser = new SchemaParser(); - TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); - schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); + /* + * Parse schema + */ + SchemaParser schemaParser = new SchemaParser(); + TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); + schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); - /* - * Configure resolvers (data fetchers) - */ - SchemaGenerator schemaGenerator = new SchemaGenerator(); - GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + /* + * Configure resolvers (data fetchers) + */ + SchemaGenerator schemaGenerator = new SchemaGenerator(); + GraphQLSchema graphQLSchema = + schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); - /* - * Instantiate engine - */ - _graphQL = new GraphQL.Builder(graphQLSchema) + /* + * Instantiate engine + */ + _graphQL = + new GraphQL.Builder(graphQLSchema) .defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler()) .instrumentation(new TracingInstrumentation()) .build(); - } + } + + public ExecutionResult execute( + @Nonnull final String query, + @Nullable final Map<String, Object> variables, + @Nonnull final QueryContext context) { + /* + * Init DataLoaderRegistry - should be created for each request. + */ + DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - public ExecutionResult execute(@Nonnull final String query, - @Nullable final Map<String, Object> variables, - @Nonnull final QueryContext context) { - /* - * Init DataLoaderRegistry - should be created for each request. - */ - DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - - /* - * Construct execution input - */ - ExecutionInput executionInput = ExecutionInput.newExecutionInput() + /* + * Construct execution input + */ + ExecutionInput executionInput = + ExecutionInput.newExecutionInput() .query(query) .variables(variables) .dataLoaderRegistry(register) .context(context) .build(); - /* - * Execute GraphQL Query - */ - return _graphQL.execute(executionInput); - } + /* + * Execute GraphQL Query + */ + return _graphQL.execute(executionInput); + } + + public GraphQL getGraphQL() { + return _graphQL; + } + + public static Builder builder() { + return new Builder(); + } + + /** Used to construct a {@link GraphQLEngine}. */ + public static class Builder { - public GraphQL getGraphQL() { - return _graphQL; + private final List<String> _schemas = new ArrayList<>(); + private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _loaderSuppliers = + new HashMap<>(); + private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); + + /** + * Used to add a schema file containing the GQL types resolved by the engine. + * + * <p>If multiple files are provided, their schemas will be merged together. + */ + public Builder addSchema(final String schema) { + _schemas.add(schema); + return this; } - public static Builder builder() { - return new Builder(); + /** + * Used to register a {@link DataLoader} to be used within the configured resolvers. + * + * <p>The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} + * when invoked. + * + * <p>If multiple loaders are registered with the name, the latter will override the former. + */ + public Builder addDataLoader( + final String name, final Function<QueryContext, DataLoader<?, ?>> dataLoaderSupplier) { + _loaderSuppliers.put(name, dataLoaderSupplier); + return this; } /** - * Used to construct a {@link GraphQLEngine}. + * Used to register multiple {@link DataLoader}s for use within the configured resolvers. + * + * <p>The included {@link Supplier} provided is expected to return a new instance of {@link + * DataLoader} when invoked. + * + * <p>If multiple loaders are registered with the name, the latter will override the former. */ - public static class Builder { - - private final List<String> _schemas = new ArrayList<>(); - private final Map<String, Function<QueryContext, DataLoader<?, ?>>> _loaderSuppliers = new HashMap<>(); - private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); - - /** - * Used to add a schema file containing the GQL types resolved by the engine. - * - * If multiple files are provided, their schemas will be merged together. - */ - public Builder addSchema(final String schema) { - _schemas.add(schema); - return this; - } - - /** - * Used to register a {@link DataLoader} to be used within the configured resolvers. - * - * The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoader(final String name, final Function<QueryContext, DataLoader<?, ?>> dataLoaderSupplier) { - _loaderSuppliers.put(name, dataLoaderSupplier); - return this; - } - - /** - * Used to register multiple {@link DataLoader}s for use within the configured resolvers. - * - * The included {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoaders(Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { - _loaderSuppliers.putAll(dataLoaderSuppliers); - return this; - } - - /** - * Used to configure the runtime wiring (data fetchers & type resolvers) - * used in resolving the Graph QL schema. - * - * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register any required - * data + type resolvers. - */ - public Builder configureRuntimeWiring(final Consumer<RuntimeWiring.Builder> builderFunc) { - builderFunc.accept(_runtimeWiringBuilder); - return this; - } - - /** - * Builds a {@link GraphQLEngine}. - */ - public GraphQLEngine build() { - return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); - } + public Builder addDataLoaders( + Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers) { + _loaderSuppliers.putAll(dataLoaderSuppliers); + return this; } - private DataLoaderRegistry createDataLoaderRegistry(final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers, - final QueryContext context) { - final DataLoaderRegistry registry = new DataLoaderRegistry(); - for (String key : dataLoaderSuppliers.keySet()) { - registry.register(key, dataLoaderSuppliers.get(key).apply(context)); - } - return registry; + /** + * Used to configure the runtime wiring (data fetchers & type resolvers) used in resolving the + * Graph QL schema. + * + * <p>The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register + * any required data + type resolvers. + */ + public Builder configureRuntimeWiring(final Consumer<RuntimeWiring.Builder> builderFunc) { + builderFunc.accept(_runtimeWiringBuilder); + return this; } + /** Builds a {@link GraphQLEngine}. */ + public GraphQLEngine build() { + return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); + } + } + + private DataLoaderRegistry createDataLoaderRegistry( + final Map<String, Function<QueryContext, DataLoader<?, ?>>> dataLoaderSuppliers, + final QueryContext context) { + final DataLoaderRegistry registry = new DataLoaderRegistry(); + for (String key : dataLoaderSuppliers.keySet()) { + registry.register(key, dataLoaderSuppliers.get(key).apply(context)); + } + return registry; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 4803ef08fdddc..9f110e713ed57 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -4,38 +4,25 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; - -/** - * Provided as input to GraphQL resolvers; used to carry information about GQL request context. - */ +/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { - /** - * Returns true if the current actor is authenticated, false otherwise. - */ - boolean isAuthenticated(); + /** Returns true if the current actor is authenticated, false otherwise. */ + boolean isAuthenticated(); - /** - * Returns the {@link Authentication} associated with the current query context. - */ - Authentication getAuthentication(); + /** Returns the {@link Authentication} associated with the current query context. */ + Authentication getAuthentication(); - /** - * Returns the current authenticated actor, null if there is none. - */ - default Actor getActor() { - return getAuthentication().getActor(); - } + /** Returns the current authenticated actor, null if there is none. */ + default Actor getActor() { + return getAuthentication().getActor(); + } - /** - * Returns the current authenticated actor, null if there is none. - */ - default String getActorUrn() { - return getActor().toUrnStr(); - } + /** Returns the current authenticated actor, null if there is none. */ + default String getActorUrn() { + return getActor().toUrnStr(); + } - /** - * Returns the authorizer used to authorize specific actions. - */ - Authorizer getAuthorizer(); + /** Returns the authorizer used to authorize specific actions. */ + Authorizer getAuthorizer(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java index df7f0884852d4..425c86ab0f0f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class RelationshipKey { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java index d51de6652bb0a..c3ad37ddcb201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java @@ -10,11 +10,7 @@ public class TimeSeriesAspectArgs { private Long count; private TimeRange timeRange; - public TimeSeriesAspectArgs( - String urn, - String aspectName, - Long count, - TimeRange timeRange) { + public TimeSeriesAspectArgs(String urn, String aspectName, Long count, TimeRange timeRange) { this.urn = urn; this.aspectName = aspectName; this.count = count; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java index 5f703f520bde4..c7302c9772c5e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java @@ -3,7 +3,6 @@ import com.linkedin.usage.UsageTimeRange; import lombok.Data; - @Data public class UsageStatsKey { private String resource; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java index b0c0436ffd891..6f81de5f04d8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java @@ -8,7 +8,7 @@ public class VersionedAspectKey { private String urn; private Long version; - public VersionedAspectKey(String urn, String aspectName, Long version) { + public VersionedAspectKey(String urn, String aspectName, Long version) { this.urn = urn; this.version = version; this.aspectName = aspectName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index a78d89e59bc7b..22ee4d4d4845c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.codec.JacksonDataCodec; @@ -26,68 +28,84 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @AllArgsConstructor public class WeaklyTypedAspectsResolver implements DataFetcher<CompletableFuture<List<RawAspect>>> { - private final EntityClient _entityClient; - private final EntityRegistry _entityRegistry; - private static final JacksonDataCodec CODEC = new JacksonDataCodec(); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + private static final JacksonDataCodec CODEC = new JacksonDataCodec(); - private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { - return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); - } + private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { + return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); + } - @Override - public CompletableFuture<List<RawAspect>> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - List<RawAspect> results = new ArrayList<>(); + @Override + public CompletableFuture<List<RawAspect>> get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + List<RawAspect> results = new ArrayList<>(); - final QueryContext context = environment.getContext(); - final String urnStr = ((Entity) environment.getSource()).getUrn(); - final EntityType entityType = ((Entity) environment.getSource()).getType(); - final String entityTypeName = EntityTypeMapper.getName(entityType); - final AspectParams input = bindArgument(environment.getArgument("input"), AspectParams.class); + final QueryContext context = environment.getContext(); + final String urnStr = ((Entity) environment.getSource()).getUrn(); + final EntityType entityType = ((Entity) environment.getSource()).getType(); + final String entityTypeName = EntityTypeMapper.getName(entityType); + final AspectParams input = + bindArgument(environment.getArgument("input"), AspectParams.class); - EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); - entitySpec.getAspectSpecs().stream().filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)).forEach(aspectSpec -> { - try { - Urn urn = Urn.createFromString(urnStr); - RawAspect result = new RawAspect(); - EntityResponse entityResponse = - _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(aspectSpec.getName()), context.getAuthentication()).get(urn); - if (entityResponse == null || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { + EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); + entitySpec.getAspectSpecs().stream() + .filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)) + .forEach( + aspectSpec -> { + try { + Urn urn = Urn.createFromString(urnStr); + RawAspect result = new RawAspect(); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(aspectSpec.getName()), + context.getAuthentication()) + .get(urn); + if (entityResponse == null + || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { return; - } + } - DataMap resolvedAspect = entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); - if (resolvedAspect == null) { + DataMap resolvedAspect = + entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); + if (resolvedAspect == null) { return; - } + } - result.setPayload(CODEC.mapToString(resolvedAspect)); - result.setAspectName(aspectSpec.getName()); + result.setPayload(CODEC.mapToString(resolvedAspect)); + result.setAspectName(aspectSpec.getName()); - DataMap renderSpec = aspectSpec.getRenderSpec(); + DataMap renderSpec = aspectSpec.getRenderSpec(); - if (renderSpec != null) { + if (renderSpec != null) { AspectRenderSpec resultRenderSpec = new AspectRenderSpec(); resultRenderSpec.setDisplayType(renderSpec.getString("displayType")); resultRenderSpec.setDisplayName(renderSpec.getString("displayName")); resultRenderSpec.setKey(renderSpec.getString("key")); result.setRenderSpec(resultRenderSpec); - } + } - results.add(result); - } catch (IOException | RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + aspectSpec.getName() + " for urn " + urnStr + " ", e); - } - }); - return results; + results.add(result); + } catch (IOException | RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + + aspectSpec.getName() + + " for urn " + + urnStr + + " ", + e); + } + }); + return results; }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java index 7728dcae5d8ee..3bf932c4281e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java @@ -7,18 +7,17 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - public class AnalyticsChartTypeResolver implements TypeResolver { - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TimeSeriesChart) { - return env.getSchema().getObjectType("TimeSeriesChart"); - } else if (env.getObject() instanceof BarChart) { - return env.getSchema().getObjectType("BarChart"); - } else if (env.getObject() instanceof TableChart) { - return env.getSchema().getObjectType("TableChart"); - } else { - throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TimeSeriesChart) { + return env.getSchema().getObjectType("TimeSeriesChart"); + } else if (env.getObject() instanceof BarChart) { + return env.getSchema().getObjectType("BarChart"); + } else if (env.getObject() instanceof TableChart) { + return env.getSchema().getObjectType("TableChart"); + } else { + throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index b8a5dd1121a10..3f635872747a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -27,15 +27,11 @@ import java.util.Collections; import java.util.List; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @Slf4j @RequiredArgsConstructor public final class GetChartsResolver implements DataFetcher<List<AnalyticsChartGroup>> { @@ -47,15 +43,17 @@ public final class GetChartsResolver implements DataFetcher<List<AnalyticsChartG public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) throws Exception { Authentication authentication = ResolverUtils.getAuthentication(environment); try { - return ImmutableList.of(AnalyticsChartGroup.builder() - .setGroupId("DataHubUsageAnalytics") - .setTitle("DataHub Usage Analytics") - .setCharts(getProductAnalyticsCharts(authentication)) - .build(), AnalyticsChartGroup.builder() - .setGroupId("GlobalMetadataAnalytics") - .setTitle("Data Landscape Summary") - .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) - .build()); + return ImmutableList.of( + AnalyticsChartGroup.builder() + .setGroupId("DataHubUsageAnalytics") + .setTitle("DataHub Usage Analytics") + .setCharts(getProductAnalyticsCharts(authentication)) + .build(), + AnalyticsChartGroup.builder() + .setGroupId("GlobalMetadataAnalytics") + .setTitle("Data Landscape Summary") + .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) + .build()); } catch (Exception e) { log.error("Failed to retrieve analytics charts!", e); return Collections.emptyList(); // Simply return nothing. @@ -63,85 +61,115 @@ public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) } private TimeSeriesChart getActiveUsersTimeSeriesChart( - final DateTime beginning, - final DateTime end, - final String title, - final DateInterval interval - ) { + final DateTime beginning, + final DateTime end, + final String title, + final DateInterval interval) { final DateRange dateRange = - new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); + new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); final List<NamedLine> timeSeriesLines = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), dateRange, interval, - Optional.empty(), ImmutableMap.of(), Collections.emptyMap(), Optional.of("browserId")); + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + dateRange, + interval, + Optional.empty(), + ImmutableMap.of(), + Collections.emptyMap(), + Optional.of("browserId")); return TimeSeriesChart.builder() - .setTitle(title) - .setDateRange(dateRange) - .setInterval(interval) - .setLines(timeSeriesLines) - .build(); + .setTitle(title) + .setDateRange(dateRange) + .setInterval(interval) + .setLines(timeSeriesLines) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ - private List<AnalyticsChart> getProductAnalyticsCharts(Authentication authentication) throws Exception { + /** TODO: Config Driven Charts Instead of Hardcoded. */ + private List<AnalyticsChart> getProductAnalyticsCharts(Authentication authentication) + throws Exception { final List<AnalyticsChart> charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); - charts.add(getActiveUsersTimeSeriesChart( + charts.add( + getActiveUsersTimeSeriesChart( startOfNextWeek.minusWeeks(10), startOfNextWeek.minusMillis(1), "Weekly Active Users", - DateInterval.WEEK - )); - charts.add(getActiveUsersTimeSeriesChart( + DateInterval.WEEK)); + charts.add( + getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), startOfNextMonth.minusMillis(1), "Monthly Active Users", - DateInterval.MONTH - )); + DateInterval.MONTH)); String searchesTitle = "Searches Last Week"; DateInterval dailyInterval = DateInterval.DAY; String searchEventType = "SearchEvent"; final List<NamedLine> searchesTimeseries = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), trailingWeekDateRange, dailyInterval, - Optional.empty(), ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + trailingWeekDateRange, + dailyInterval, + Optional.empty(), + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), Optional.empty()); - charts.add(TimeSeriesChart.builder() - .setTitle(searchesTitle) - .setDateRange(trailingWeekDateRange) - .setInterval(dailyInterval) - .setLines(searchesTimeseries) - .build()); + charts.add( + TimeSeriesChart.builder() + .setTitle(searchesTitle) + .setDateRange(trailingWeekDateRange) + .setInterval(dailyInterval) + .setLines(searchesTimeseries) + .build()); final String topSearchTitle = "Top Search Queries"; final List<String> columns = ImmutableList.of("Query", "Count"); final List<Row> topSearchQueries = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "query.keyword", ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), - Optional.empty(), 10, AnalyticsUtil::buildCellWithSearchLandingPage); - charts.add(TableChart.builder().setTitle(topSearchTitle).setColumns(columns).setRows(topSearchQueries).build()); + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "query.keyword", + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithSearchLandingPage); + charts.add( + TableChart.builder() + .setTitle(topSearchTitle) + .setColumns(columns) + .setRows(topSearchQueries) + .build()); final String sectionViewsTitle = "Section Views across Entity Types"; final List<NamedBar> sectionViewsPerEntityType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "section.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), Collections.emptyMap(), - Optional.empty(), true); - charts.add(BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); + ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), + Collections.emptyMap(), + Optional.empty(), + true); + charts.add( + BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); final String actionsByTypeTitle = "Actions by Entity Type"; final List<NamedBar> eventsByEventType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "actionType.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), Collections.emptyMap(), Optional.empty(), + ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), + Collections.emptyMap(), + Optional.empty(), true); charts.add(BarChart.builder().setTitle(actionsByTypeTitle).setBars(eventsByEventType).build()); @@ -149,61 +177,128 @@ private List<AnalyticsChart> getProductAnalyticsCharts(Authentication authentica final List<String> columns5 = ImmutableList.of("Dataset", "#Views"); final List<Row> topViewedDatasets = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "entityUrn.keyword", ImmutableMap.of("type", ImmutableList.of("EntityViewEvent"), "entityType.keyword", - ImmutableList.of(EntityType.DATASET.name())), Collections.emptyMap(), Optional.empty(), 10, + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "entityUrn.keyword", + ImmutableMap.of( + "type", + ImmutableList.of("EntityViewEvent"), + "entityType.keyword", + ImmutableList.of(EntityType.DATASET.name())), + Collections.emptyMap(), + Optional.empty(), + 10, AnalyticsUtil::buildCellWithEntityLandingPage); - AnalyticsUtil.hydrateDisplayNameForTable(_entityClient, topViewedDatasets, Constants.DATASET_ENTITY_NAME, - ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), AnalyticsUtil::getDatasetName, authentication); - charts.add(TableChart.builder().setTitle(topViewedTitle).setColumns(columns5).setRows(topViewedDatasets).build()); + AnalyticsUtil.hydrateDisplayNameForTable( + _entityClient, + topViewedDatasets, + Constants.DATASET_ENTITY_NAME, + ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), + AnalyticsUtil::getDatasetName, + authentication); + charts.add( + TableChart.builder() + .setTitle(topViewedTitle) + .setColumns(columns5) + .setRows(topViewedDatasets) + .build()); return charts; } - private List<AnalyticsChart> getGlobalMetadataAnalyticsCharts(Authentication authentication) throws Exception { + private List<AnalyticsChart> getGlobalMetadataAnalyticsCharts(Authentication authentication) + throws Exception { final List<AnalyticsChart> charts = new ArrayList<>(); // Chart 1: Entities per domain final List<NamedBar> entitiesPerDomain = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("domains.keyword", "platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerDomain, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); - AnalyticsUtil.hydrateDisplayNameForSegments(_entityClient, entitiesPerDomain, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("domains.keyword", "platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerDomain, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); + AnalyticsUtil.hydrateDisplayNameForSegments( + _entityClient, + entitiesPerDomain, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerDomain.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); + charts.add( + BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); } // Chart 2: Entities per platform final List<NamedBar> entitiesPerPlatform = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerPlatform, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerPlatform, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerPlatform.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Platform").setBars(entitiesPerPlatform).build()); + charts.add( + BarChart.builder() + .setTitle("Entities per Platform") + .setBars(entitiesPerPlatform) + .build()); } // Chart 3: Entities per term final List<NamedBar> entitiesPerTerm = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("glossaryTerms.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerTerm, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("glossaryTerms.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerTerm, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); if (!entitiesPerTerm.isEmpty()) { charts.add(BarChart.builder().setTitle("Entities per Term").setBars(entitiesPerTerm).build()); } // Chart 4: Entities per fabric type final List<NamedBar> entitiesPerEnv = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("origin.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("origin.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); if (entitiesPerEnv.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); + charts.add( + BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); } return charts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java index c631a13b0bcb6..7000ab7adff5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java @@ -14,15 +14,11 @@ import java.util.Map; import java.util.Optional; import java.util.function.Function; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetHighlightsResolver implements DataFetcher<List<Highlight>> { @@ -40,69 +36,72 @@ public final List<Highlight> get(DataFetchingEnvironment environment) throws Exc } private Highlight getTimeBasedHighlight( - final String title, - final String changeString, - final DateTime endDateTime, - final Function<DateTime, DateTime> periodStartFunc - ) { + final String title, + final String changeString, + final DateTime endDateTime, + final Function<DateTime, DateTime> periodStartFunc) { DateTime startDate = periodStartFunc.apply(endDateTime); DateTime timeBeforeThat = periodStartFunc.apply(startDate); - DateRange dateRangeThis = new DateRange( - String.valueOf(startDate.getMillis()), - String.valueOf(endDateTime.getMillis()) - ); - DateRange dateRangeLast = new DateRange( - String.valueOf(timeBeforeThat.getMillis()), - String.valueOf(startDate.getMillis()) - ); - - int activeUsersThisRange = _analyticsService.getHighlights( + DateRange dateRangeThis = + new DateRange( + String.valueOf(startDate.getMillis()), String.valueOf(endDateTime.getMillis())); + DateRange dateRangeLast = + new DateRange( + String.valueOf(timeBeforeThat.getMillis()), String.valueOf(startDate.getMillis())); + + int activeUsersThisRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeThis), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); - int activeUsersLastRange = _analyticsService.getHighlights( + Optional.of("browserId")); + int activeUsersLastRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeLast), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); + Optional.of("browserId")); String bodyText = ""; if (activeUsersLastRange > 0) { - double percentChange = (double) (activeUsersThisRange - activeUsersLastRange) - / (double) activeUsersLastRange * 100; + double percentChange = + (double) (activeUsersThisRange - activeUsersLastRange) + / (double) activeUsersLastRange + * 100; String directionChange = percentChange > 0 ? "increase" : "decrease"; - bodyText = Double.isInfinite(percentChange) ? "" + bodyText = + Double.isInfinite(percentChange) + ? "" : String.format(changeString, percentChange, directionChange); } - return Highlight.builder().setTitle(title).setValue(activeUsersThisRange).setBody(bodyText).build(); + return Highlight.builder() + .setTitle(title) + .setValue(activeUsersThisRange) + .setBody(bodyText) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ + /** TODO: Config Driven Charts Instead of Hardcoded. */ private List<Highlight> getHighlights() { final List<Highlight> highlights = new ArrayList<>(); DateTime endDate = DateTime.now(); - highlights.add(getTimeBasedHighlight( + highlights.add( + getTimeBasedHighlight( "Weekly Active Users", "%.2f%% %s from last week", endDate, - (date) -> date.minusWeeks(1) - )); - highlights.add(getTimeBasedHighlight( + (date) -> date.minusWeeks(1))); + highlights.add( + getTimeBasedHighlight( "Monthly Active Users", "%.2f%% %s from last month", endDate, - (date) -> date.minusMonths(1) - )); + (date) -> date.minusMonths(1))); // Entity metdata statistics getEntityMetadataStats("Datasets", EntityType.DATASET).ifPresent(highlights::add); @@ -121,10 +120,13 @@ private Optional<Highlight> getEntityMetadataStats(String title, EntityType enti if (numEntities == 0) { return Optional.empty(); } - int numEntitiesWithOwners = getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); - int numEntitiesWithTags = getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); + int numEntitiesWithOwners = + getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); + int numEntitiesWithTags = + getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); int numEntitiesWithGlossaryTerms = - getNumEntitiesFiltered(index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); + getNumEntitiesFiltered( + index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); int numEntitiesWithDescription = getNumEntitiesFiltered(index, ImmutableMap.of("hasDescription", ImmutableList.of("true"))); @@ -137,22 +139,36 @@ private Optional<Highlight> getEntityMetadataStats(String title, EntityType enti if (entityType == EntityType.DOMAIN) { // Don't show percent with domain when asking for stats regarding domains bodyText = - String.format("%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription); + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription); } else { int numEntitiesWithDomains = getNumEntitiesFiltered(index, ImmutableMap.of("hasDomain", ImmutableList.of("true"))); double percentWithDomains = 100.0 * numEntitiesWithDomains / numEntities; - bodyText = String.format( - "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription, percentWithDomains); + bodyText = + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription, + percentWithDomains); } } - return Optional.of(Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); + return Optional.of( + Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); } private int getNumEntitiesFiltered(String index, Map<String, List<String>> filters) { - return _analyticsService.getHighlights(index, Optional.empty(), filters, - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty()); + return _analyticsService.getHighlights( + index, + Optional.empty(), + filters, + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index f61c2eb77739b..31a8359f8f0e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.analytics.resolver; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -30,12 +32,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetMetadataAnalyticsResolver implements DataFetcher<List<AnalyticsChartGroup>> { @@ -45,7 +42,8 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher<List<Anal @Override public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) throws Exception { final Authentication authentication = ResolverUtils.getAuthentication(environment); - final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); + final MetadataAnalyticsInput input = + bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); try { final AnalyticsChartGroup group = new AnalyticsChartGroup(); @@ -59,7 +57,8 @@ public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) } } - private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List<AnalyticsChart> getCharts( + MetadataAnalyticsInput input, Authentication authentication) throws Exception { final List<AnalyticsChart> charts = new ArrayList<>(); List<String> entities = Collections.emptyList(); @@ -77,48 +76,76 @@ private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, Authenticat filter = QueryUtils.newFilter("domains.keyword", input.getDomain()); } - SearchResult searchResult = _entityClient.searchAcrossEntities(entities, query, filter, 0, 0, - null, null, authentication); + SearchResult searchResult = + _entityClient.searchAcrossEntities( + entities, query, filter, 0, 0, null, null, authentication); - List<AggregationMetadata> aggregationMetadataList = searchResult.getMetadata().getAggregations(); + List<AggregationMetadata> aggregationMetadataList = + searchResult.getMetadata().getAggregations(); Optional<AggregationMetadata> domainAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("domains")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("domains")) + .findFirst(); if (StringUtils.isEmpty(input.getDomain()) && domainAggregation.isPresent()) { List<NamedBar> domainChart = buildBarChart(domainAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, domainChart, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + domainChart, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); } Optional<AggregationMetadata> platformAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("platform")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("platform")) + .findFirst(); if (platformAggregation.isPresent()) { List<NamedBar> platformChart = buildBarChart(platformAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, platformChart, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + platformChart, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); + charts.add( + BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); } Optional<AggregationMetadata> termAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("glossaryTerms")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("glossaryTerms")) + .findFirst(); if (termAggregation.isPresent()) { List<NamedBar> termChart = buildBarChart(termAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, termChart, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + termChart, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); } Optional<AggregationMetadata> envAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("origin")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("origin")) + .findFirst(); if (envAggregation.isPresent()) { List<NamedBar> termChart = buildBarChart(envAggregation.get()); if (termChart.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); + charts.add( + BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); } } @@ -126,16 +153,20 @@ private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, Authenticat } private List<NamedBar> buildBarChart(AggregationMetadata aggregation) { - return aggregation.getAggregations() - .entrySet() - .stream() + return aggregation.getAggregations().entrySet().stream() .sorted(Collections.reverseOrder(Map.Entry.comparingByValue())) .limit(10) - .map(entry -> NamedBar.builder() - .setName(entry.getKey()) - .setSegments(ImmutableList.of( - BarSegment.builder().setLabel("#Entities").setValue(entry.getValue().intValue()).build())) - .build()) + .map( + entry -> + NamedBar.builder() + .setName(entry.getKey()) + .setSegments( + ImmutableList.of( + BarSegment.builder() + .setLabel("#Entities") + .setValue(entry.getValue().intValue()) + .build())) + .build()) .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java index 8e3bffc9ccf08..c7f5c0bbc63eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java @@ -3,20 +3,17 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -/** - * Returns true if analytics feature flag is enabled, false otherwise. - */ +/** Returns true if analytics feature flag is enabled, false otherwise. */ public class IsAnalyticsEnabledResolver implements DataFetcher<Boolean> { private final Boolean _isAnalyticsEnabled; public IsAnalyticsEnabledResolver(final Boolean isAnalyticsEnabled) { - _isAnalyticsEnabled = isAnalyticsEnabled; + _isAnalyticsEnabled = isAnalyticsEnabled; } @Override public final Boolean get(DataFetchingEnvironment environment) throws Exception { - return _isAnalyticsEnabled; + return _isAnalyticsEnabled; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 4135a7b0da148..03333bda05f61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -40,7 +40,6 @@ import org.opensearch.search.aggregations.metrics.Cardinality; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class AnalyticsService { @@ -72,25 +71,35 @@ public String getUsageIndexName() { return _indexConvention.getIndexName(DATAHUB_USAGE_EVENT_INDEX); } - public List<NamedLine> getTimeseriesChart(String indexName, DateRange dateRange, DateInterval granularity, + public List<NamedLine> getTimeseriesChart( + String indexName, + DateRange dateRange, + DateInterval granularity, Optional<String> dimension, // Length 1 for now - Map<String, List<String>> filters, Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn) { + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn) { log.debug( - String.format("Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", - indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + String.format("filters: %s, uniqueOn: %s", filters, - uniqueOn)); - - AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); - - AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(DATE_HISTOGRAM) - .field("timestamp") - .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); + String.format( + "Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", + indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + + AggregationBuilder filteredAgg = + getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); + + AggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(DATE_HISTOGRAM) + .field("timestamp") + .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); uniqueOn.ifPresent(s -> dateHistogram.subAggregation(getUniqueQuery(s))); if (dimension.isPresent()) { filteredAgg.subAggregation( - AggregationBuilders.terms(DIMENSION).field(dimension.get()).subAggregation(dateHistogram)); + AggregationBuilders.terms(DIMENSION) + .field(dimension.get()) + .subAggregation(dateHistogram)); } else { filteredAgg.subAggregation(dateHistogram); } @@ -99,39 +108,55 @@ public List<NamedLine> getTimeseriesChart(String indexName, DateRange dateRange, Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { if (dimension.isPresent()) { - return aggregationResult.<Terms>get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedLine(bucket.getKeyAsString(), - extractPointsFromAggregations(bucket.getAggregations(), uniqueOn.isPresent()))) + return aggregationResult.<Terms>get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedLine( + bucket.getKeyAsString(), + extractPointsFromAggregations( + bucket.getAggregations(), uniqueOn.isPresent()))) .collect(Collectors.toList()); } else { return ImmutableList.of( - new NamedLine("total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); + new NamedLine( + "total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); } } catch (Exception e) { - log.error(String.format("Caught exception while getting time series chart: %s", e.getMessage())); + log.error( + String.format("Caught exception while getting time series chart: %s", e.getMessage())); return ImmutableList.of(); } } private int extractCount(MultiBucketsAggregation.Bucket bucket, boolean didUnique) { - return didUnique ? (int) bucket.getAggregations().<Cardinality>get(UNIQUE).getValue() : (int) bucket.getDocCount(); + return didUnique + ? (int) bucket.getAggregations().<Cardinality>get(UNIQUE).getValue() + : (int) bucket.getDocCount(); } - private List<NumericDataPoint> extractPointsFromAggregations(Aggregations aggregations, boolean didUnique) { - return aggregations.<Histogram>get(DATE_HISTOGRAM).getBuckets() - .stream() - .map(bucket -> new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) + private List<NumericDataPoint> extractPointsFromAggregations( + Aggregations aggregations, boolean didUnique) { + return aggregations.<Histogram>get(DATE_HISTOGRAM).getBuckets().stream() + .map( + bucket -> + new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } - public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRange, List<String> dimensions, + public List<NamedBar> getBarChart( + String indexName, + Optional<DateRange> dateRange, + List<String> dimensions, // Length 1 or 2 - Map<String, List<String>> filters, Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn, + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn, boolean showMissing) { log.debug( - String.format("Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", indexName, dateRange, - dimensions) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", + indexName, dateRange, dimensions) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); assert (dimensions.size() == 1 || dimensions.size() == 2); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); @@ -142,7 +167,8 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang } if (dimensions.size() == 2) { - TermsAggregationBuilder secondTermAgg = AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); + TermsAggregationBuilder secondTermAgg = + AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); if (showMissing) { secondTermAgg.missing(NA); } @@ -161,14 +187,24 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang List<BarSegment> barSegments = extractBarSegmentsFromAggregations(aggregationResult, DIMENSION, uniqueOn.isPresent()); return barSegments.stream() - .map(segment -> new NamedBar(segment.getLabel(), - ImmutableList.of(BarSegment.builder().setLabel("Count").setValue(segment.getValue()).build()))) + .map( + segment -> + new NamedBar( + segment.getLabel(), + ImmutableList.of( + BarSegment.builder() + .setLabel("Count") + .setValue(segment.getValue()) + .build()))) .collect(Collectors.toList()); } else { - return aggregationResult.<Terms>get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedBar(bucket.getKeyAsString(), - extractBarSegmentsFromAggregations(bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) + return aggregationResult.<Terms>get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedBar( + bucket.getKeyAsString(), + extractBarSegmentsFromAggregations( + bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) .collect(Collectors.toList()); } } catch (Exception e) { @@ -177,31 +213,41 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang } } - private List<BarSegment> extractBarSegmentsFromAggregations(Aggregations aggregations, String aggregationKey, - boolean didUnique) { - return aggregations.<Terms>get(aggregationKey).getBuckets() - .stream() + private List<BarSegment> extractBarSegmentsFromAggregations( + Aggregations aggregations, String aggregationKey, boolean didUnique) { + return aggregations.<Terms>get(aggregationKey).getBuckets().stream() .map(bucket -> new BarSegment(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } public Row buildRow(String groupByValue, Function<String, Cell> groupByValueToCell, int count) { List<String> values = ImmutableList.of(groupByValue, String.valueOf(count)); - List<Cell> cells = ImmutableList.of(groupByValueToCell.apply(groupByValue), - Cell.builder().setValue(String.valueOf(count)).build()); + List<Cell> cells = + ImmutableList.of( + groupByValueToCell.apply(groupByValue), + Cell.builder().setValue(String.valueOf(count)).build()); return new Row(values, cells); } - public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRange, String groupBy, - Map<String, List<String>> filters, Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn, - int maxRows, Function<String, Cell> groupByValueToCell) { + public List<Row> getTopNTableChart( + String indexName, + Optional<DateRange> dateRange, + String groupBy, + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn, + int maxRows, + Function<String, Cell> groupByValueToCell) { log.debug( - String.format("Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", indexName, dateRange, - groupBy) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", + indexName, dateRange, groupBy) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); - TermsAggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); + TermsAggregationBuilder termAgg = + AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); if (uniqueOn.isPresent()) { termAgg.order(BucketOrder.aggregation(UNIQUE, false)); termAgg.subAggregation(getUniqueQuery(uniqueOn.get())); @@ -212,10 +258,13 @@ public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRan Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { - return aggregationResult.<Terms>get(DIMENSION).getBuckets() - .stream() - .map(bucket -> buildRow(bucket.getKeyAsString(), groupByValueToCell, - extractCount(bucket, uniqueOn.isPresent()))) + return aggregationResult.<Terms>get(DIMENSION).getBuckets().stream() + .map( + bucket -> + buildRow( + bucket.getKeyAsString(), + groupByValueToCell, + extractCount(bucket, uniqueOn.isPresent()))) .collect(Collectors.toList()); } catch (Exception e) { log.error(String.format("Caught exception while getting top n chart: %s", e.getMessage())); @@ -223,11 +272,16 @@ public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRan } } - public int getHighlights(String indexName, Optional<DateRange> dateRange, Map<String, List<String>> filters, - Map<String, List<String>> mustNotFilters, Optional<String> uniqueOn) { + public int getHighlights( + String indexName, + Optional<DateRange> dateRange, + Map<String, List<String>> filters, + Map<String, List<String>> mustNotFilters, + Optional<String> uniqueOn) { log.debug( - String.format("Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + String.format( - "filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); uniqueOn.ifPresent(s -> filteredAgg.subAggregation(getUniqueQuery(s))); @@ -246,7 +300,8 @@ public int getHighlights(String indexName, Optional<DateRange> dateRange, Map<St } } - private SearchRequest constructSearchRequest(String indexName, AggregationBuilder aggregationBuilder) { + private SearchRequest constructSearchRequest( + String indexName, AggregationBuilder aggregationBuilder) { SearchRequest searchRequest = new SearchRequest(indexName); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); @@ -257,7 +312,8 @@ private SearchRequest constructSearchRequest(String indexName, AggregationBuilde private Filter executeAndExtract(SearchRequest searchRequest) { try { - final SearchResponse searchResponse = _elasticClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _elasticClient.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well return searchResponse.getAggregations().<Filter>get(FILTERED); } catch (Exception e) { @@ -266,11 +322,14 @@ private Filter executeAndExtract(SearchRequest searchRequest) { } } - private AggregationBuilder getFilteredAggregation(Map<String, List<String>> mustFilters, - Map<String, List<String>> mustNotFilters, Optional<DateRange> dateRange) { + private AggregationBuilder getFilteredAggregation( + Map<String, List<String>> mustFilters, + Map<String, List<String>> mustNotFilters, + Optional<DateRange> dateRange) { BoolQueryBuilder filteredQuery = QueryBuilders.boolQuery(); mustFilters.forEach((key, values) -> filteredQuery.must(QueryBuilders.termsQuery(key, values))); - mustNotFilters.forEach((key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); + mustNotFilters.forEach( + (key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); dateRange.ifPresent(range -> filteredQuery.must(dateRangeQuery(range))); return AggregationBuilders.filter(FILTERED, filteredQuery); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index 42f4e25c010ef..be7f4d2f0897a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -31,16 +31,17 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - @Slf4j public class AnalyticsUtil { - private AnalyticsUtil() { - } + private AnalyticsUtil() {} public static Cell buildCellWithSearchLandingPage(String query) { Cell result = new Cell(); result.setValue(query); - result.setLinkParams(LinkParams.builder().setSearchParams(SearchParams.builder().setQuery(query).build()).build()); + result.setLinkParams( + LinkParams.builder() + .setSearchParams(SearchParams.builder().setQuery(query).build()) + .build()); return result; } @@ -50,70 +51,138 @@ public static Cell buildCellWithEntityLandingPage(String urn) { try { Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); result.setEntity(entity); - result.setLinkParams(LinkParams.builder() - .setEntityProfileParams(EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) - .build()); + result.setLinkParams( + LinkParams.builder() + .setEntityProfileParams( + EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) + .build()); } catch (URISyntaxException e) { log.error("Malformed urn {} in table", urn, e); } return result; } - public static void hydrateDisplayNameForBars(EntityClient entityClient, List<NamedBar> bars, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { + public static void hydrateDisplayNameForBars( + EntityClient entityClient, + List<NamedBar> bars, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { Map<String, String> urnToDisplayName = - getUrnToDisplayName(entityClient, bars.stream().map(NamedBar::getName).collect(Collectors.toList()), entityName, - aspectNames, extractDisplayName, authentication); + getUrnToDisplayName( + entityClient, + bars.stream().map(NamedBar::getName).collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.setName(urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); + bars.forEach( + namedBar -> + namedBar.setName( + urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); } - public static void hydrateDisplayNameForSegments(EntityClient entityClient, List<NamedBar> bars, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { - Map<String, String> urnToDisplayName = getUrnToDisplayName(entityClient, - bars.stream().flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)).collect(Collectors.toList()), - entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForSegments( + EntityClient entityClient, + List<NamedBar> bars, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { + Map<String, String> urnToDisplayName = + getUrnToDisplayName( + entityClient, + bars.stream() + .flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.getSegments() - .forEach(segment -> segment.setLabel(urnToDisplayName.getOrDefault(segment.getLabel(), segment.getLabel())))); + bars.forEach( + namedBar -> + namedBar + .getSegments() + .forEach( + segment -> + segment.setLabel( + urnToDisplayName.getOrDefault( + segment.getLabel(), segment.getLabel())))); } - public static void hydrateDisplayNameForTable(EntityClient entityClient, List<Row> rows, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { - Map<String, String> urnToDisplayName = getUrnToDisplayName(entityClient, rows.stream() - .flatMap(row -> row.getCells().stream().filter(cell -> cell.getEntity() != null).map(Cell::getValue)) - .collect(Collectors.toList()), entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForTable( + EntityClient entityClient, + List<Row> rows, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { + Map<String, String> urnToDisplayName = + getUrnToDisplayName( + entityClient, + rows.stream() + .flatMap( + row -> + row.getCells().stream() + .filter(cell -> cell.getEntity() != null) + .map(Cell::getValue)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - rows.forEach(row -> row.getCells().forEach(cell -> { - if (cell.getEntity() != null) { - cell.setValue(urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); - } - })); + rows.forEach( + row -> + row.getCells() + .forEach( + cell -> { + if (cell.getEntity() != null) { + cell.setValue( + urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); + } + })); } - public static Map<String, String> getUrnToDisplayName(EntityClient entityClient, List<String> urns, String entityName, - Set<String> aspectNames, Function<EntityResponse, Optional<String>> extractDisplayName, - Authentication authentication) throws Exception { - Set<Urn> uniqueUrns = urns.stream().distinct().map(urnStr -> { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toSet()); - Map<Urn, EntityResponse> aspects = entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); - return aspects.entrySet() - .stream() - .map(entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) + public static Map<String, String> getUrnToDisplayName( + EntityClient entityClient, + List<String> urns, + String entityName, + Set<String> aspectNames, + Function<EntityResponse, Optional<String>> extractDisplayName, + Authentication authentication) + throws Exception { + Set<Urn> uniqueUrns = + urns.stream() + .distinct() + .map( + urnStr -> { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + Map<Urn, EntityResponse> aspects = + entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); + return aspects.entrySet().stream() + .map( + entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) .filter(pair -> pair.getValue().isPresent()) .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().get())); } public static Optional<String> getDomainName(EntityResponse entityResponse) { - EnvelopedAspect domainProperties = entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + EnvelopedAspect domainProperties = + entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (domainProperties == null) { return Optional.empty(); } @@ -126,13 +195,17 @@ public static Optional<String> getPlatformName(EntityResponse entityResponse) { if (envelopedDataPlatformInfo == null) { return Optional.empty(); } - DataPlatformInfo dataPlatformInfo = new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); + DataPlatformInfo dataPlatformInfo = + new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); return Optional.of( - dataPlatformInfo.getDisplayName() == null ? dataPlatformInfo.getName() : dataPlatformInfo.getDisplayName()); + dataPlatformInfo.getDisplayName() == null + ? dataPlatformInfo.getName() + : dataPlatformInfo.getDisplayName()); } public static Optional<String> getDatasetName(EntityResponse entityResponse) { - EnvelopedAspect envelopedDatasetKey = entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); + EnvelopedAspect envelopedDatasetKey = + entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); if (envelopedDatasetKey == null) { return Optional.empty(); } @@ -141,7 +214,8 @@ public static Optional<String> getDatasetName(EntityResponse entityResponse) { } public static Optional<String> getTermName(EntityResponse entityResponse) { - EnvelopedAspect envelopedTermInfo = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + EnvelopedAspect envelopedTermInfo = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); if (envelopedTermInfo != null) { GlossaryTermInfo glossaryTermInfo = new GlossaryTermInfo(envelopedTermInfo.getValue().data()); if (glossaryTermInfo.hasName()) { @@ -150,11 +224,13 @@ public static Optional<String> getTermName(EntityResponse entityResponse) { } // if name is not set on GlossaryTermInfo or there is no GlossaryTermInfo - EnvelopedAspect envelopedGlossaryTermKey = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); + EnvelopedAspect envelopedGlossaryTermKey = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); if (envelopedGlossaryTermKey == null) { return Optional.empty(); } - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); + GlossaryTermKey glossaryTermKey = + new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); return Optional.of(glossaryTermKey.getName()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 03e63c7fb472f..6ba3777d476cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,36 +1,37 @@ package com.linkedin.datahub.graphql.authorization; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; - import java.time.Clock; import java.util.List; import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class AuthorizationUtils { private static final Clock CLOCK = Clock.systemUTC(); public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp().setTime(CLOCK.millis()).setActor(UrnUtils.getUrn(context.getActorUrn())); + return new AuditStamp() + .setTime(CLOCK.millis()) + .setActor(UrnUtils.getUrn(context.getActorUrn())); } public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } public static boolean canManagePolicies(@Nonnull QueryContext context) { @@ -38,7 +39,8 @@ public static boolean canManagePolicies(@Nonnull QueryContext context) { } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } public static boolean canManageTokens(@Nonnull QueryContext context) { @@ -46,21 +48,20 @@ public static boolean canManageTokens(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Domains. This is true if the user has the 'Manage Domains' or 'Create Domains' platform privilege. + * Returns true if the current used is able to create Domains. This is true if the user has the + * 'Manage Domains' or 'Create Domains' platform privilege. */ public static boolean canCreateDomains(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageDomains(@Nonnull QueryContext context) { @@ -68,21 +69,20 @@ public static boolean canManageDomains(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Tags. This is true if the user has the 'Manage Tags' or 'Create Tags' platform privilege. + * Returns true if the current used is able to create Tags. This is true if the user has the + * 'Manage Tags' or 'Create Tags' platform privilege. */ public static boolean canCreateTags(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageTags(@Nonnull QueryContext context) { @@ -90,48 +90,59 @@ public static boolean canManageTags(@Nonnull QueryContext context) { } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized(context, Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return isAuthorized( + context, + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), + PoliciesConfig.DELETE_ENTITY_PRIVILEGE); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - public static boolean canEditGroupMembers(@Nonnull String groupUrnStr, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); + public static boolean canEditGroupMembers( + @Nonnull String groupUrnStr, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized(context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, - groupUrnStr, orPrivilegeGroups); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + CORP_GROUP_ENTITY_NAME, + groupUrnStr, + orPrivilegeGroups); } public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { @@ -142,31 +153,39 @@ public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); } - public static boolean canEditEntityQueries(@Nonnull List<Urn> entityUrns, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); - return entityUrns.stream().allMatch(entityUrn -> - isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups - )); - } - - public static boolean canCreateQuery(@Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { + public static boolean canEditEntityQueries( + @Nonnull List<Urn> entityUrns, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); + return entityUrns.stream() + .allMatch( + entityUrn -> + isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + entityUrn.getEntityType(), + entityUrn.toString(), + orPrivilegeGroups)); + } + + public static boolean canCreateQuery( + @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to create a query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canUpdateQuery(@Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { + public static boolean canUpdateQuery( + @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to update any query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canDeleteQuery(@Nonnull Urn entityUrn, @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { + public static boolean canDeleteQuery( + @Nonnull Urn entityUrn, @Nonnull List<Urn> subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to remove any query. return canEditEntityQueries(subjectUrns, context); } @@ -177,15 +196,16 @@ public static boolean isAuthorized( @Nonnull PoliciesConfig.Privilege privilege) { final Authorizer authorizer = context.getAuthorizer(); final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + final ConjunctivePrivilegeGroup andGroup = + new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); + return AuthUtil.isAuthorized( + authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); } public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); } @@ -194,13 +214,10 @@ public static boolean isAuthorized( @Nonnull String actor, @Nonnull String resourceType, @Nonnull String resource, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); } - private AuthorizationUtils() { } - + private AuthorizationUtils() {} } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java index a09dc8741cd29..69e0ed0625b2f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthenticationException extends GraphQLException { - public AuthenticationException(String message) { - super(message); - } + public AuthenticationException(String message) { + super(message); + } - public AuthenticationException(String message, Throwable cause) { - super(message, cause); - } + public AuthenticationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java index 803af09e079d1..30568e45938c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.exception; - -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthorizationException extends DataHubGraphQLException { public AuthorizationException(String message) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 8d3f5d5cea9eb..7c3ea1d581b6e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -13,7 +13,8 @@ public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + public DataFetcherExceptionHandlerResult onException( + DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = handlerParameters.getException(); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java index 15c539a608cc0..f007a8b7c7adb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.exception; +import static graphql.Assert.*; + import graphql.ErrorType; import graphql.GraphQLError; import graphql.GraphqlErrorHelper; @@ -11,9 +13,6 @@ import java.util.List; import java.util.Map; -import static graphql.Assert.*; - - @PublicApi public class DataHubGraphQLError implements GraphQLError { @@ -23,7 +22,11 @@ public class DataHubGraphQLError implements GraphQLError { private final List<SourceLocation> locations; private final Map<String, Object> extensions; - public DataHubGraphQLError(String message, ResultPath path, SourceLocation sourceLocation, DataHubGraphQLErrorCode errorCode) { + public DataHubGraphQLError( + String message, + ResultPath path, + SourceLocation sourceLocation, + DataHubGraphQLErrorCode errorCode) { this.path = assertNotNull(path).toList(); this.errorCode = assertNotNull(errorCode); this.locations = Collections.singletonList(sourceLocation); @@ -90,4 +93,3 @@ public int hashCode() { return GraphqlErrorHelper.hashCode(this); } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java index 3d3c54e2febb2..75096a8c4148e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java @@ -2,7 +2,6 @@ import graphql.GraphQLException; - public class DataHubGraphQLException extends GraphQLException { private final DataHubGraphQLErrorCode code; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java index 2ee9838af5428..87a1aebb02f2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when an unexpected value is provided by the client. - */ +/** Exception thrown when an unexpected value is provided by the client. */ public class ValidationException extends GraphQLException { - public ValidationException(String message) { - super(message); - } + public ValidationException(String message) { + super(message); + } - public ValidationException(String message, Throwable cause) { - super(message, cause); - } + public ValidationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 4d6133f18df05..07bd1fba5d8a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.PreProcessHooks; import lombok.Data; - @Data public class FeatureFlags { private boolean showSimplifiedHomepageByDefault = false; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java index e228cb8445c02..9faf00e0211bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java @@ -1,26 +1,25 @@ package com.linkedin.datahub.graphql.resolvers; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthorizationRequest; +import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import java.util.List; import java.util.Optional; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; public class AuthUtils { - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean isAuthorized( - String principal, - List<String> privilegeGroup, - Authorizer authorizer) { + String principal, List<String> privilegeGroup, Authorizer authorizer) { for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = new AuthorizationRequest(principal, privilege, Optional.empty()); + final AuthorizationRequest request = + new AuthorizationRequest(principal, privilege, Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { return false; @@ -29,6 +28,5 @@ public static boolean isAuthorized( return true; } - - private AuthUtils() { } + private AuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java index 2520b55c24e25..570ea322be7a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java @@ -2,29 +2,28 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthenticationException; - import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - /** - * Checks whether the user is currently authenticated & if so delegates execution to a child resolver. + * Checks whether the user is currently authenticated & if so delegates execution to a child + * resolver. */ @Deprecated public final class AuthenticatedResolver<T> implements DataFetcher<T> { - private final DataFetcher<T> _resolver; + private final DataFetcher<T> _resolver; - public AuthenticatedResolver(final DataFetcher<T> resolver) { - _resolver = resolver; - } + public AuthenticatedResolver(final DataFetcher<T> resolver) { + _resolver = resolver; + } - @Override - public final T get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); - if (context.isAuthenticated()) { - return _resolver.get(environment); - } - throw new AuthenticationException("Failed to authenticate the current user."); + @Override + public final T get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + if (context.isAuthenticated()) { + return _resolver.get(environment); } + throw new AuthenticationException("Failed to authenticate the current user."); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 930c98ee7113a..5ab07701c15a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -2,18 +2,17 @@ import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; public class BatchLoadUtils { - private BatchLoadUtils() { } + private BatchLoadUtils() {} public static CompletableFuture<List<Entity>> batchLoadEntitiesOfSameType( List<Entity> entities, @@ -24,9 +23,10 @@ public static CompletableFuture<List<Entity>> batchLoadEntitiesOfSameType( } // Assume all entities are of the same type final com.linkedin.datahub.graphql.types.EntityType filteredEntity = - Iterables.getOnlyElement(entityTypes.stream() - .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) - .collect(Collectors.toList())); + Iterables.getOnlyElement( + entityTypes.stream() + .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) + .collect(Collectors.toList())); final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); List keyList = new ArrayList(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java index b0f23e63177e6..aba781f9e1dc7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java @@ -7,9 +7,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service Storage Entities + * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service + * Storage Entities */ public class EntityTypeMapper { @@ -44,10 +44,10 @@ public class EntityTypeMapper { .build(); private static final Map<String, EntityType> ENTITY_NAME_TO_TYPE = - ENTITY_TYPE_TO_NAME.entrySet().stream().collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); + ENTITY_TYPE_TO_NAME.entrySet().stream() + .collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); - private EntityTypeMapper() { - } + private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index 02921b453e315..b480e287adb9b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -23,17 +26,12 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * GraphQL resolver responsible for resolving information about the currently - * logged in User, including - * - * 1. User profile information - * 2. User privilege information, i.e. which features to display in the UI. + * GraphQL resolver responsible for resolving information about the currently logged in User, + * including * + * <p>1. User profile information 2. User privilege information, i.e. which features to display in + * the UI. */ public class MeResolver implements DataFetcher<CompletableFuture<AuthenticatedUser>> { @@ -48,114 +46,123 @@ public MeResolver(final EntityClient entityClient, final FeatureFlags featureFla @Override public CompletableFuture<AuthenticatedUser> get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - // 1. Get currently logged in user profile. - final Urn userUrn = Urn.createFromString(context.getActorUrn()); - final EntityResponse gmsUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - Collections.singleton(userUrn), null, context.getAuthentication()).get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); - - // 2. Get platform privileges - final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); - platformPrivileges.setViewAnalytics(canViewAnalytics(context)); - platformPrivileges.setManagePolicies(canManagePolicies(context)); - platformPrivileges.setManageIdentities(canManageUsersGroups(context)); - platformPrivileges.setGeneratePersonalAccessTokens(canGeneratePersonalAccessToken(context)); - platformPrivileges.setManageDomains(canManageDomains(context)); - platformPrivileges.setManageIngestion(canManageIngestion(context)); - platformPrivileges.setManageSecrets(canManageSecrets(context)); - platformPrivileges.setManageTokens(canManageTokens(context)); - platformPrivileges.setManageTests(canManageTests(context)); - platformPrivileges.setManageGlossaries(canManageGlossaries(context)); - platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); - platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); - platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); - platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); - platformPrivileges.setManageGlobalViews(AuthorizationUtils.canManageGlobalViews(context)); - platformPrivileges.setManageOwnershipTypes(AuthorizationUtils.canManageOwnershipTypes(context)); - platformPrivileges.setManageGlobalAnnouncements(AuthorizationUtils.canManageGlobalAnnouncements(context)); - - // Construct and return authenticated user object. - final AuthenticatedUser authUser = new AuthenticatedUser(); - authUser.setCorpUser(corpUser); - authUser.setPlatformPrivileges(platformPrivileges); - return authUser; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to fetch authenticated user!", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // 1. Get currently logged in user profile. + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + final EntityResponse gmsUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + null, + context.getAuthentication()) + .get(userUrn); + final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); + + // 2. Get platform privileges + final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); + platformPrivileges.setViewAnalytics(canViewAnalytics(context)); + platformPrivileges.setManagePolicies(canManagePolicies(context)); + platformPrivileges.setManageIdentities(canManageUsersGroups(context)); + platformPrivileges.setGeneratePersonalAccessTokens( + canGeneratePersonalAccessToken(context)); + platformPrivileges.setManageDomains(canManageDomains(context)); + platformPrivileges.setManageIngestion(canManageIngestion(context)); + platformPrivileges.setManageSecrets(canManageSecrets(context)); + platformPrivileges.setManageTokens(canManageTokens(context)); + platformPrivileges.setManageTests(canManageTests(context)); + platformPrivileges.setManageGlossaries(canManageGlossaries(context)); + platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); + platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); + platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); + platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); + platformPrivileges.setManageGlobalViews( + AuthorizationUtils.canManageGlobalViews(context)); + platformPrivileges.setManageOwnershipTypes( + AuthorizationUtils.canManageOwnershipTypes(context)); + platformPrivileges.setManageGlobalAnnouncements( + AuthorizationUtils.canManageGlobalAnnouncements(context)); + + // Construct and return authenticated user object. + final AuthenticatedUser authUser = new AuthenticatedUser(); + authUser.setCorpUser(corpUser); + authUser.setPlatformPrivileges(platformPrivileges); + return authUser; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to fetch authenticated user!", e); + } + }); } - /** - * Returns true if the authenticated user has privileges to view analytics. - */ + /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage policies analytics. - */ + /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage users & groups. - */ + /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to generate personal access tokens - */ + /** Returns true if the authenticated user has privileges to generate personal access tokens */ private boolean canGeneratePersonalAccessToken(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage (add or remove) tests. - */ + /** Returns true if the authenticated user has privileges to manage (add or remove) tests. */ private boolean canManageTests(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage domains - */ + /** Returns true if the authenticated user has privileges to manage domains */ private boolean canManageDomains(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage access tokens - */ + /** Returns true if the authenticated user has privileges to manage access tokens */ private boolean canManageTokens(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); } - /** - * Returns true if the authenticated user has privileges to manage glossaries - */ + /** Returns true if the authenticated user has privileges to manage glossaries */ private boolean canManageGlossaries(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage user credentials - */ + /** Returns true if the authenticated user has privileges to manage user credentials */ private boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } /** * Returns true if the provided actor is authorized for a particular privilege, false otherwise. */ - private boolean isAuthorized(final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); + private boolean isAuthorized( + final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); return AuthorizationResult.Type.ALLOW.equals(result.getType()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java index 2c2e71ee92eaa..244012d320b43 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -30,184 +32,198 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.metadata.Constants.*; - - public class ResolverUtils { - private static final Set<String> KEYWORD_EXCLUDED_FILTERS = ImmutableSet.of( - "runId", - "_entityType" - ); - private static final ObjectMapper MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final Set<String> KEYWORD_EXCLUDED_FILTERS = + ImmutableSet.of("runId", "_entityType"); + private static final ObjectMapper MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); + + private ResolverUtils() {} + + @Nonnull + public static <T> T bindArgument(Object argument, Class<T> clazz) { + return MAPPER.convertValue(argument, clazz); + } + + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } - - private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); - - private ResolverUtils() { } - - @Nonnull - public static <T> T bindArgument(Object argument, Class<T> clazz) { - return MAPPER.convertValue(argument, clazz); + return input; + } + + @Nonnull + public static Authentication getAuthentication(DataFetchingEnvironment environment) { + return ((QueryContext) environment.getContext()).getAuthentication(); + } + + /** + * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure + * that it is matched against a keyword filter in ElasticSearch. + * @param facetFilterInputs The list of facet filters inputs + * @param validFacetFields The set of valid fields against which to filter for. + * @return A map of filter definitions to be used in ElasticSearch. + */ + @Nonnull + public static Map<String, String> buildFacetFilters( + @Nullable List<FacetFilterInput> facetFilterInputs, @Nonnull Set<String> validFacetFields) { + if (facetFilterInputs == null) { + return Collections.emptyMap(); } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; - } - - @Nonnull - public static Authentication getAuthentication(DataFetchingEnvironment environment) { - return ((QueryContext) environment.getContext()).getAuthentication(); - } - - /** - * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure - * that it is matched against a keyword filter in ElasticSearch. - * - * @param facetFilterInputs The list of facet filters inputs - * @param validFacetFields The set of valid fields against which to filter for. - * @return A map of filter definitions to be used in ElasticSearch. - */ - @Nonnull - public static Map<String, String> buildFacetFilters(@Nullable List<FacetFilterInput> facetFilterInputs, - @Nonnull Set<String> validFacetFields) { - if (facetFilterInputs == null) { - return Collections.emptyMap(); - } - - final Map<String, String> facetFilters = new HashMap<>(); - - facetFilterInputs.forEach(facetFilterInput -> { - if (!validFacetFields.contains(facetFilterInput.getField())) { - throw new ValidationException(String.format("Unrecognized facet with name %s provided", facetFilterInput.getField())); - } - if (!facetFilterInput.getValues().isEmpty()) { - facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); - } + final Map<String, String> facetFilters = new HashMap<>(); + + facetFilterInputs.forEach( + facetFilterInput -> { + if (!validFacetFields.contains(facetFilterInput.getField())) { + throw new ValidationException( + String.format( + "Unrecognized facet with name %s provided", facetFilterInput.getField())); + } + if (!facetFilterInput.getValues().isEmpty()) { + facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); + } }); - return facetFilters; - } + return facetFilters; + } - public static List<Criterion> criterionListFromAndFilter(List<FacetFilterInput> andFilters) { - return andFilters != null && !andFilters.isEmpty() - ? andFilters.stream() + public static List<Criterion> criterionListFromAndFilter(List<FacetFilterInput> andFilters) { + return andFilters != null && !andFilters.isEmpty() + ? andFilters.stream() .map(filter -> criterionFromFilter(filter)) - .collect(Collectors.toList()) : Collections.emptyList(); + .collect(Collectors.toList()) + : Collections.emptyList(); + } + + // In the case that user sends filters to be or-d together, we need to build a series of + // conjunctive criterion + // arrays, rather than just one for the AND case. + public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( + @Nonnull List<AndFilterInput> orFilters) { + return new ConjunctiveCriterionArray( + orFilters.stream() + .map( + orFilter -> { + CriterionArray andCriterionForOr = + new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); + return new ConjunctiveCriterion().setAnd(andCriterionForOr); + }) + .collect(Collectors.toList())); + } + + @Nullable + public static Filter buildFilter( + @Nullable List<FacetFilterInput> andFilters, @Nullable List<AndFilterInput> orFilters) { + if ((andFilters == null || andFilters.isEmpty()) + && (orFilters == null || orFilters.isEmpty())) { + return null; + } + // Or filters are the new default. We will check them first. + // If we have OR filters, we need to build a series of CriterionArrays + if (orFilters != null && !orFilters.isEmpty()) { + return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); } - // In the case that user sends filters to be or-d together, we need to build a series of conjunctive criterion - // arrays, rather than just one for the AND case. - public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( - @Nonnull List<AndFilterInput> orFilters - ) { - return new ConjunctiveCriterionArray(orFilters.stream().map(orFilter -> { - CriterionArray andCriterionForOr = new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); - return new ConjunctiveCriterion().setAnd( - andCriterionForOr - ); - } - ).collect(Collectors.toList())); + // If or filters are not set, someone may be using the legacy and filters + final List<Criterion> andCriterions = criterionListFromAndFilter(andFilters); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + } + + public static Criterion criterionFromFilter(final FacetFilterInput filter) { + return criterionFromFilter(filter, false); + } + + // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) + public static Criterion criterionFromFilter( + final FacetFilterInput filter, final Boolean skipKeywordSuffix) { + Criterion result = new Criterion(); + + if (skipKeywordSuffix) { + result.setField(filter.getField()); + } else { + result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); } - @Nullable - public static Filter buildFilter(@Nullable List<FacetFilterInput> andFilters, @Nullable List<AndFilterInput> orFilters) { - if ((andFilters == null || andFilters.isEmpty()) && (orFilters == null || orFilters.isEmpty())) { - return null; - } - - // Or filters are the new default. We will check them first. - // If we have OR filters, we need to build a series of CriterionArrays - if (orFilters != null && !orFilters.isEmpty()) { - return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); - } - - // If or filters are not set, someone may be using the legacy and filters - final List<Criterion> andCriterions = criterionListFromAndFilter(andFilters); - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + // `value` is deprecated in place of `values`- this is to support old query patterns. If values + // is provided, + // this statement will be skipped + if (filter.getValues() == null && filter.getValue() != null) { + result.setValues(new StringArray(filter.getValue())); + result.setValue(filter.getValue()); + } else if (filter.getValues() != null) { + result.setValues(new StringArray(filter.getValues())); + if (!filter.getValues().isEmpty()) { + result.setValue(filter.getValues().get(0)); + } else { + result.setValue(""); + } + } else { + result.setValues(new StringArray()); + result.setValue(""); } - public static Criterion criterionFromFilter(final FacetFilterInput filter) { - return criterionFromFilter(filter, false); + if (filter.getCondition() != null) { + result.setCondition(Condition.valueOf(filter.getCondition().toString())); + } else { + result.setCondition(Condition.EQUAL); } - // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) - public static Criterion criterionFromFilter(final FacetFilterInput filter, final Boolean skipKeywordSuffix) { - Criterion result = new Criterion(); - - if (skipKeywordSuffix) { - result.setField(filter.getField()); - } else { - result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); - } - - // `value` is deprecated in place of `values`- this is to support old query patterns. If values is provided, - // this statement will be skipped - if (filter.getValues() == null && filter.getValue() != null) { - result.setValues(new StringArray(filter.getValue())); - result.setValue(filter.getValue()); - } else if (filter.getValues() != null) { - result.setValues(new StringArray(filter.getValues())); - if (!filter.getValues().isEmpty()) { - result.setValue(filter.getValues().get(0)); - } else { - result.setValue(""); - } - } else { - result.setValues(new StringArray()); - result.setValue(""); - } - - - if (filter.getCondition() != null) { - result.setCondition(Condition.valueOf(filter.getCondition().toString())); - } else { - result.setCondition(Condition.EQUAL); - } - - if (filter.getNegated() != null) { - result.setNegated(filter.getNegated()); - } - - return result; + if (filter.getNegated() != null) { + result.setNegated(filter.getNegated()); } - private static String getFilterField(final String originalField, final boolean skipKeywordSuffix) { - if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { - return originalField; - } - return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + return result; + } + + private static String getFilterField( + final String originalField, final boolean skipKeywordSuffix) { + if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { + return originalField; } + return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + } - public static Filter buildFilterWithUrns(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") + public static Filter buildFilterWithUrns(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) { + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); - if (inputFilters == null) { - return QueryUtils.newFilter(urnMatchCriterion); - } - - // Add urn match criterion to each or clause - if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { - for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { - conjunctiveCriterion.getAnd().add(urnMatchCriterion); - } - return inputFilters; - } - return QueryUtils.newFilter(urnMatchCriterion); + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + if (inputFilters == null) { + return QueryUtils.newFilter(urnMatchCriterion); + } + + // Add urn match criterion to each or clause + if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { + for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { + conjunctiveCriterion.getAnd().add(urnMatchCriterion); + } + return inputFilters; } + return QueryUtils.newFilter(urnMatchCriterion); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index b5b13cc00b40d..2a074b950d0ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; @@ -26,13 +28,9 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * GraphQL Resolver used for fetching AssertionRunEvents. - */ -public class AssertionRunEventResolver implements DataFetcher<CompletableFuture<AssertionRunEventsResult>> { +/** GraphQL Resolver used for fetching AssertionRunEvents. */ +public class AssertionRunEventResolver + implements DataFetcher<CompletableFuture<AssertionRunEventsResult>> { private final EntityClient _client; @@ -42,58 +40,72 @@ public AssertionRunEventResolver(final EntityClient client) { @Override public CompletableFuture<AssertionRunEventsResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String urn = ((Assertion) environment.getSource()).getUrn(); - final String maybeStatus = environment.getArgumentOrDefault("status", null); - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; + final String urn = ((Assertion) environment.getSource()).getUrn(); + final String maybeStatus = environment.getArgumentOrDefault("status", null); + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; - try { - // Step 1: Fetch aspects from GMS - List<EnvelopedAspect> aspects = _client.getTimeseriesAspectValues( - urn, - Constants.ASSERTION_ENTITY_NAME, - Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - buildFilter(maybeFilters, maybeStatus), - context.getAuthentication()); + try { + // Step 1: Fetch aspects from GMS + List<EnvelopedAspect> aspects = + _client.getTimeseriesAspectValues( + urn, + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilter(maybeFilters, maybeStatus), + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - List<AssertionRunEvent> runEvents = aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + // Step 2: Bind profiles into GraphQL strong types. + List<AssertionRunEvent> runEvents = + aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); - // Step 3: Package and return response. - final AssertionRunEventsResult result = new AssertionRunEventsResult(); - result.setTotal(runEvents.size()); - result.setFailed(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.FAILURE.equals( - runEvent.getResult().getType() - )).count())); - result.setSucceeded(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.SUCCESS.equals(runEvent.getResult().getType() - )).count())); - result.setRunEvents(runEvents); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 3: Package and return response. + final AssertionRunEventsResult result = new AssertionRunEventsResult(); + result.setTotal(runEvents.size()); + result.setFailed( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.FAILURE.equals( + runEvent.getResult().getType())) + .count())); + result.setSucceeded( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.SUCCESS.equals( + runEvent.getResult().getType())) + .count())); + result.setRunEvents(runEvents); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } @Nullable - public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable final String status) { + public static Filter buildFilter( + @Nullable FilterInput filtersInput, @Nullable final String status) { if (filtersInput == null && status == null) { return null; } @@ -107,8 +119,14 @@ public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable f if (filtersInput != null) { facetFilters.addAll(filtersInput.getAnd()); } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilters.stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + facetFilters.stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 8006ae7d2a464..89912b2814e40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -19,63 +19,76 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver that deletes an Assertion. - */ +/** GraphQL Resolver that deletes an Assertion. */ @Slf4j -public class DeleteAssertionResolver implements DataFetcher<CompletableFuture<Boolean>> { +public class DeleteAssertionResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteAssertionResolver(final EntityClient entityClient, final EntityService entityService) { + public DeleteAssertionResolver( + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn assertionUrn = Urn.createFromString(environment.getArgument("urn")); - return CompletableFuture.supplyAsync(() -> { - - // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { - return true; - } - - if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { - try { - _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(assertionUrn, context.getAuthentication()); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for assertion with urn %s", assertionUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. check the entity exists. If not, return false. + if (!_entityService.exists(assertionUrn)) { return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against assertion with urn %s", assertionUrn), e); } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + + if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { + try { + _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + assertionUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for assertion with urn %s", + assertionUrn), + e); + } + }); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against assertion with urn %s", assertionUrn), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - /** - * Determine whether the current user is allowed to remove an assertion. - */ - private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final Urn assertionUrn) { + /** Determine whether the current user is allowed to remove an assertion. */ + private boolean isAuthorizedToDeleteAssertion( + final QueryContext context, final Urn assertionUrn) { // 2. fetch the assertion info AssertionInfo info = - (AssertionInfo) EntityUtils.getAspectFromEntity( - assertionUrn.toString(), Constants.ASSERTION_INFO_ASPECT_NAME, _entityService, null); + (AssertionInfo) + EntityUtils.getAspectFromEntity( + assertionUrn.toString(), + Constants.ASSERTION_INFO_ASPECT_NAME, + _entityService, + null); if (info != null) { // 3. check whether the actor has permission to edit the assertions on the assertee @@ -86,11 +99,14 @@ private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final return true; } - private boolean isAuthorizedToDeleteAssertionFromAssertee(final QueryContext context, final Urn asserteeUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToDeleteAssertionFromAssertee( + final QueryContext context, final Urn asserteeUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -104,7 +120,8 @@ private Urn getAsserteeUrnFromInfo(final AssertionInfo info) { case DATASET: return info.getDatasetAssertion().getDataset(); default: - throw new RuntimeException(String.format("Unsupported Assertion Type %s provided", info.getType())); + throw new RuntimeException( + String.format("Unsupported Assertion Type %s provided", info.getType())); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index ff573bb59fba1..9814589df7651 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -26,11 +26,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of Assertions associated with an Entity. - */ -public class EntityAssertionsResolver implements DataFetcher<CompletableFuture<EntityAssertionsResult>> { +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityAssertionsResolver + implements DataFetcher<CompletableFuture<EntityAssertionsResult>> { private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; @@ -44,54 +42,60 @@ public EntityAssertionsResolver(final EntityClient entityClient, final GraphClie @Override public CompletableFuture<EntityAssertionsResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 200); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 200); - try { - // Step 1: Fetch set of assertions associated with the target entity from the Graph Store - final EntityRelationships relationships = _graphClient.getRelatedEntities( - entityUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - start, - count, - context.getActorUrn() - ); + try { + // Step 1: Fetch set of assertions associated with the target entity from the Graph + // Store + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + start, + count, + context.getActorUrn()); - final List<Urn> assertionUrns = relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + final List<Urn> assertionUrns = + relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the assertion entities based on the urns from step 1 - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the assertion entities based on the urns from step 1 + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS assertion model to GraphQL model - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List<Assertion> assertions = gmsResults.stream() - .filter(Objects::nonNull) - .map(AssertionMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS assertion model to GraphQL model + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List<Assertion> assertions = + gmsResults.stream() + .filter(Objects::nonNull) + .map(AssertionMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final EntityAssertionsResult result = new EntityAssertionsResult(); - result.setCount(relationships.getCount()); - result.setStart(relationships.getStart()); - result.setTotal(relationships.getTotal()); - result.setAssertions(assertions); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 4: Package and return result + final EntityAssertionsResult result = new EntityAssertionsResult(); + result.setCount(relationships.getCount()); + result.setStart(relationships.getStart()); + result.setTotal(relationships.getTotal()); + result.setAssertions(assertions); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java index 8f5be1000bb45..9015ad0ebb210 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java @@ -5,13 +5,9 @@ import java.time.temporal.ChronoUnit; import java.util.Optional; - - public class AccessTokenUtil { - /** - * Convert an {@link AccessTokenDuration} into its milliseconds equivalent. - */ + /** Convert an {@link AccessTokenDuration} into its milliseconds equivalent. */ public static Optional<Long> mapDurationToMs(final AccessTokenDuration duration) { switch (duration) { case ONE_HOUR: @@ -29,9 +25,10 @@ public static Optional<Long> mapDurationToMs(final AccessTokenDuration duration) case NO_EXPIRY: return Optional.empty(); default: - throw new RuntimeException(String.format("Unrecognized access token duration %s provided", duration)); + throw new RuntimeException( + String.format("Unrecognized access token duration %s provided", duration)); } } - private AccessTokenUtil() { } + private AccessTokenUtil() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java index cd55d81aec6ad..14a1b9a1f7a01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatefulTokenService; @@ -10,10 +12,10 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessToken; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.AccessTokenType; import com.linkedin.datahub.graphql.generated.CreateAccessTokenInput; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.metadata.Constants; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -22,12 +24,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for creating personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for creating personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class CreateAccessTokenResolver implements DataFetcher<CompletableFuture<AccessToken>> { @@ -38,62 +35,85 @@ public CreateAccessTokenResolver(final StatefulTokenService statefulTokenService } @Override - public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final CreateAccessTokenInput input = bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); - - log.info("User {} requesting new access token for user {} ", context.getActorUrn(), input.getActorUrn()); - - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Date date = new Date(); - final long createdAtInMs = date.getTime(); - final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - - final String tokenName = input.getName(); - final String tokenDescription = input.getDescription(); - - final String accessToken = - _statefulTokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null), - createdAtInMs, tokenName, tokenDescription, context.getActorUrn()); - log.info("Generated access token for {} of type {} with duration {}", input.getActorUrn(), input.getType(), - input.getDuration()); - try { - final String tokenHash = _statefulTokenService.hash(accessToken); - - final AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - result.setMetadata(metadata); - - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new access token with name %s", input.getName()), - e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final CreateAccessTokenInput input = + bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); + + log.info( + "User {} requesting new access token for user {} ", + context.getActorUrn(), + input.getActorUrn()); + + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Date date = new Date(); + final long createdAtInMs = date.getTime(); + final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + + final String tokenName = input.getName(); + final String tokenDescription = input.getDescription(); + + final String accessToken = + _statefulTokenService.generateAccessToken( + type, + createActor(input.getType(), actorUrn), + expiresInMs.orElse(null), + createdAtInMs, + tokenName, + tokenDescription, + context.getActorUrn()); + log.info( + "Generated access token for {} of type {} with duration {}", + input.getActorUrn(), + input.getType(), + input.getDuration()); + try { + final String tokenHash = _statefulTokenService.hash(accessToken); + + final AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn( + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + result.setMetadata(metadata); + + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new access token with name %s", input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final CreateAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final CreateAccessTokenInput input) { if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final CreateAccessTokenInput input) { + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final CreateAccessTokenInput input) { return AuthorizationUtils.canManageTokens(context) - || input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + || input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -101,6 +121,7 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java index 5ac4ec8ac3a6b..aed6bd6cb98af 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatelessTokenService; @@ -18,12 +20,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for generating personal & service principal access tokens - */ +/** Resolver for generating personal & service principal access tokens */ @Slf4j public class GetAccessTokenResolver implements DataFetcher<CompletableFuture<AccessToken>> { @@ -34,39 +31,49 @@ public GetAccessTokenResolver(final StatelessTokenService tokenService) { } @Override - public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final GetAccessTokenInput input = bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); + public CompletableFuture<AccessToken> get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final GetAccessTokenInput input = + bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - final String accessToken = - _tokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); - AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - return result; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Optional<Long> expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + final String accessToken = + _tokenService.generateAccessToken( + type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); + AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + return result; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final GetAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final GetAccessTokenInput input) { // Currently only an actor can generate a personal token for themselves. if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final GetAccessTokenInput input) { - return input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final GetAccessTokenInput input) { + return input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -74,14 +81,16 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, createUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } private Urn createUrn(final String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to validate provided urn %s", urnStr)); + throw new IllegalArgumentException( + String.format("Failed to validate provided urn %s", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index f9ba552d349e0..5cfa80e394c5f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,14 +25,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for listing personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for listing personal & service principal v2-type (stateful) access tokens. */ @Slf4j -public class ListAccessTokensResolver implements DataFetcher<CompletableFuture<ListAccessTokenResult>> { +public class ListAccessTokensResolver + implements DataFetcher<CompletableFuture<ListAccessTokenResult>> { private static final String EXPIRES_AT_FIELD_NAME = "expiresAt"; @@ -41,60 +39,87 @@ public ListAccessTokensResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListAccessTokenResult> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final ListAccessTokenInput input = bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - final List<FacetFilterInput> filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + public CompletableFuture<ListAccessTokenResult> get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final ListAccessTokenInput input = + bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + final List<FacetFilterInput> filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - log.info("User {} listing access tokens with filters {}", context.getActorUrn(), filters.toString()); + log.info( + "User {} listing access tokens with filters {}", + context.getActorUrn(), + filters.toString()); - if (AuthorizationUtils.canManageTokens(context) || isListingSelfTokens(filters, context)) { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(EXPIRES_AT_FIELD_NAME).setOrder(SortOrder.DESCENDING); - final SearchResult searchResult = _entityClient.search(Constants.ACCESS_TOKEN_ENTITY_NAME, "", - buildFilter(filters, Collections.emptyList()), sortCriterion, start, count, - getAuthentication(environment), new SearchFlags().setFulltext(true)); + if (AuthorizationUtils.canManageTokens(context) + || isListingSelfTokens(filters, context)) { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(EXPIRES_AT_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); + final SearchResult searchResult = + _entityClient.search( + Constants.ACCESS_TOKEN_ENTITY_NAME, + "", + buildFilter(filters, Collections.emptyList()), + sortCriterion, + start, + count, + getAuthentication(environment), + new SearchFlags().setFulltext(true)); - final List<AccessTokenMetadata> tokens = searchResult.getEntities().stream().map(entity -> { - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(entity.getEntity().toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - return metadata; - }).collect(Collectors.toList()); + final List<AccessTokenMetadata> tokens = + searchResult.getEntities().stream() + .map( + entity -> { + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn(entity.getEntity().toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + return metadata; + }) + .collect(Collectors.toList()); - final ListAccessTokenResult result = new ListAccessTokenResult(); - result.setTokens(tokens); - result.setStart(searchResult.getFrom()); - result.setCount(searchResult.getPageSize()); - result.setTotal(searchResult.getNumEntities()); + final ListAccessTokenResult result = new ListAccessTokenResult(); + result.setTokens(tokens); + result.setStart(searchResult.getFrom()); + result.setCount(searchResult.getPageSize()); + result.setTotal(searchResult.getNumEntities()); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list access tokens", e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list access tokens", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Utility method to answer: Does the existing security context have permissions to generate their personal tokens - * AND is the request coming in requesting those personal tokens? - * <p> - * Note: We look for the actorUrn field because a token generated by someone else means that the generator actor has - * manage all access token privileges which means that he/she will be bound to just listing their own tokens. + * Utility method to answer: Does the existing security context have permissions to generate their + * personal tokens AND is the request coming in requesting those personal tokens? + * + * <p>Note: We look for the actorUrn field because a token generated by someone else means that + * the generator actor has manage all access token privileges which means that he/she will be + * bound to just listing their own tokens. * * @param filters The filters being used in the request. * @param context Current security context. * @return A boolean stating if the current user can list its personal tokens. */ - private boolean isListingSelfTokens(final List<FacetFilterInput> filters, final QueryContext context) { - return AuthorizationUtils.canGeneratePersonalAccessToken(context) && filters.stream() - .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); + private boolean isListingSelfTokens( + final List<FacetFilterInput> filters, final QueryContext context) { + return AuthorizationUtils.canGeneratePersonalAccessToken(context) + && filters.stream() + .anyMatch( + filter -> + filter.getField().equals("ownerUrn") + && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java index 252c0eaba6e85..8d0a23e665b1b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.token.StatefulTokenService; import com.google.common.collect.ImmutableSet; import com.linkedin.access.token.DataHubAccessTokenInfo; @@ -18,42 +20,39 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for revoking personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for revoking personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class RevokeAccessTokenResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; private final StatefulTokenService _statefulTokenService; - public RevokeAccessTokenResolver(final EntityClient entityClient, final StatefulTokenService statefulTokenService) { + public RevokeAccessTokenResolver( + final EntityClient entityClient, final StatefulTokenService statefulTokenService) { _entityClient = entityClient; _statefulTokenService = statefulTokenService; } @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); - log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); + log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); - if (isAuthorizedToRevokeToken(context, tokenId)) { - try { - _statefulTokenService.revokeAccessToken(tokenId); - } catch (Exception e) { - throw new RuntimeException("Failed to revoke access token", e); - } - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToRevokeToken(context, tokenId)) { + try { + _statefulTokenService.revokeAccessToken(tokenId); + } catch (Exception e) { + throw new RuntimeException("Failed to revoke access token", e); + } + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private boolean isAuthorizedToRevokeToken(final QueryContext context, final String tokenId) { @@ -62,12 +61,17 @@ private boolean isAuthorizedToRevokeToken(final QueryContext context, final Stri private boolean isOwnerOfAccessToken(final QueryContext context, final String tokenId) { try { - final EntityResponse entityResponse = _entityClient.getV2(Constants.ACCESS_TOKEN_ENTITY_NAME, - Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), - ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), context.getAuthentication()); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), + ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { + final DataMap data = + entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(data); return tokenInfo.getOwnerUrn().toString().equals(context.getActorUrn()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java index 4a1964b36032c..40c91b43850f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java @@ -1,61 +1,65 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.BrowsePathsInput; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.Collections; -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowsePathsResolver implements DataFetcher<CompletableFuture<List<BrowsePath>>> { - private static final Logger _logger = LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); - - private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; - - public BrowsePathsResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } - - @Override - public CompletableFuture<List<BrowsePath>> get(DataFetchingEnvironment environment) { - final BrowsePathsInput input = bindArgument(environment.getArgument("input"), BrowsePathsInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Fetch browse paths. entity type: %s, urn: %s", - input.getType(), - input.getUrn())); - if (_typeToEntity.containsKey(input.getType())) { - return _typeToEntity.get(input.getType()).browsePaths(input.getUrn(), environment.getContext()); - } - // Browse path is impl detail. - return Collections.emptyList(); - } catch (Exception e) { - _logger.error("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()) + " " + e.getMessage()); - throw new RuntimeException("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()), e); + private static final Logger _logger = + LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); + + private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; + + public BrowsePathsResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } + + @Override + public CompletableFuture<List<BrowsePath>> get(DataFetchingEnvironment environment) { + final BrowsePathsInput input = + bindArgument(environment.getArgument("input"), BrowsePathsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Fetch browse paths. entity type: %s, urn: %s", + input.getType(), input.getUrn())); + if (_typeToEntity.containsKey(input.getType())) { + return _typeToEntity + .get(input.getType()) + .browsePaths(input.getUrn(), environment.getContext()); } + // Browse path is impl detail. + return Collections.emptyList(); + } catch (Exception e) { + _logger.error( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java index 9c95eceb1e78f..287d0eef8aec8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java @@ -1,77 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowseInput; import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowseResolver implements DataFetcher<CompletableFuture<BrowseResults>> { - private static final int DEFAULT_START = 0; - private static final int DEFAULT_COUNT = 10; + private static final int DEFAULT_START = 0; + private static final int DEFAULT_COUNT = 10; - private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); + private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); - private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; + private final Map<EntityType, BrowsableEntityType<?, ?>> _typeToEntity; - public BrowseResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } + public BrowseResolver(@Nonnull final List<BrowsableEntityType<?, ?>> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture<BrowseResults> get(DataFetchingEnvironment environment) { - final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); + @Override + public CompletableFuture<BrowseResults> get(DataFetchingEnvironment environment) { + final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); - final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; - final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; + final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; + final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count)); - return _typeToEntity.get(input.getType()).browse( - input.getPath(), - input.getFilters(), - start, - count, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count) + " " + e.getMessage()); - throw new RuntimeException("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count), e); - } + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count)); + return _typeToEntity + .get(input.getType()) + .browse( + input.getPath(), input.getFilters(), start, count, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java index 81f82c93f1fa7..396d91c37d81c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.browse; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.types.BrowsableEntityType; -import com.linkedin.datahub.graphql.generated.BrowsePath; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; public class EntityBrowsePathsResolver implements DataFetcher<CompletableFuture<List<BrowsePath>>> { @@ -24,12 +24,14 @@ public CompletableFuture<List<BrowsePath>> get(DataFetchingEnvironment environme final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - return CompletableFuture.supplyAsync(() -> { - try { - return _browsableType.browsePaths(urn, context); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _browsableType.browsePaths(urn, context); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 76abddc9a99a9..292d6108b7a04 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.chart; +import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; @@ -17,18 +21,13 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -52,30 +51,40 @@ public CompletableFuture<BrowseResultsV2> get(DataFetchingEnvironment environmen // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); - return CompletableFuture.supplyAsync(() -> { - try { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - final String pathStr = input.getPath().size() > 0 ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + return CompletableFuture.supplyAsync( + () -> { + try { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + final String pathStr = + input.getPath().size() > 0 + ? BROWSE_PATH_V2_DELIMITER + + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) + : ""; + final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); - BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, - pathStr, - maybeResolvedView != null - ? SearchUtils.combineFilters(filter, maybeResolvedView.getDefinition().getFilter()) - : filter, - sanitizedQuery, - start, - count, - context.getAuthentication() - ); - return mapBrowseResults(browseResults); - } catch (Exception e) { - throw new RuntimeException("Failed to execute browse V2", e); - } - }); + BrowseResultV2 browseResults = + _entityClient.browseV2( + entityName, + pathStr, + maybeResolvedView != null + ? SearchUtils.combineFilters( + filter, maybeResolvedView.getDefinition().getFilter()) + : filter, + sanitizedQuery, + start, + count, + context.getAuthentication()); + return mapBrowseResults(browseResults); + } catch (Exception e) { + throw new RuntimeException("Failed to execute browse V2", e); + } + }); } private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { @@ -85,28 +94,29 @@ private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { results.setCount(browseResults.getPageSize()); List<BrowseResultGroupV2> groups = new ArrayList<>(); - browseResults.getGroups().forEach(group -> { - BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); - browseGroup.setName(group.getName()); - browseGroup.setCount(group.getCount()); - browseGroup.setHasSubGroups(group.isHasSubGroups()); - if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); - } - groups.add(browseGroup); - }); + browseResults + .getGroups() + .forEach( + group -> { + BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); + browseGroup.setName(group.getName()); + browseGroup.setCount(group.getCount()); + browseGroup.setHasSubGroups(group.isHasSubGroups()); + if (group.hasUrn() && group.getUrn() != null) { + browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); + } + groups.add(browseGroup); + }); results.setGroups(groups); BrowseResultMetadata resultMetadata = new BrowseResultMetadata(); - resultMetadata.setPath(Arrays.stream(browseResults.getMetadata().getPath() - .split(BROWSE_PATH_V2_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()) - ); + resultMetadata.setPath( + Arrays.stream(browseResults.getMetadata().getPath().split(BROWSE_PATH_V2_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList())); resultMetadata.setTotalNumEntities(browseResults.getMetadata().getTotalNumEntities()); results.setMetadata(resultMetadata); return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java index 207da02de6ec2..a2d04a26bfa97 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java @@ -11,24 +11,23 @@ import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; - @Slf4j -public class ChartStatsSummaryResolver implements DataFetcher<CompletableFuture<ChartStatsSummary>> { +public class ChartStatsSummaryResolver + implements DataFetcher<CompletableFuture<ChartStatsSummary>> { private final TimeseriesAspectService timeseriesAspectService; private final Cache<Urn, ChartStatsSummary> summaryCache; public ChartStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) - .build(); + this.summaryCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(6, TimeUnit.HOURS).build(); } @Override - public CompletableFuture<ChartStatsSummary> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ChartStatsSummary> get(DataFetchingEnvironment environment) + throws Exception { // Not yet implemented return CompletableFuture.completedFuture(null); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index f6bc68caa0821..34f7f133f6fb9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -35,10 +35,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * Resolver responsible for serving app configurations to the React UI. - */ +/** Resolver responsible for serving app configurations to the React UI. */ public class AppConfigResolver implements DataFetcher<CompletableFuture<AppConfig>> { private final GitVersion _gitVersion; @@ -82,7 +79,8 @@ public AppConfigResolver( } @Override - public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); @@ -103,19 +101,20 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen final PoliciesConfig policiesConfig = new PoliciesConfig(); policiesConfig.setEnabled(_authorizationConfiguration.getDefaultAuthorizer().isEnabled()); - policiesConfig.setPlatformPrivileges(com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES - .stream() - .map(this::mapPrivilege) - .collect(Collectors.toList())); + policiesConfig.setPlatformPrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES.stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); - policiesConfig.setResourcePrivileges(com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES - .stream() - .map(this::mapResourcePrivileges) - .collect(Collectors.toList()) - ); + policiesConfig.setResourcePrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES.stream() + .map(this::mapResourcePrivileges) + .collect(Collectors.toList())); final IdentityManagementConfig identityManagementConfig = new IdentityManagementConfig(); - identityManagementConfig.setEnabled(true); // Identity Management always enabled. TODO: Understand if there's a case where this should change. + identityManagementConfig.setEnabled( + true); // Identity Management always enabled. TODO: Understand if there's a case where this + // should change. final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig(); ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled()); @@ -133,7 +132,8 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); - queriesTabConfig.setQueriesTabResultSize(_visualConfiguration.getQueriesTab().getQueriesTabResultSize()); + queriesTabConfig.setQueriesTabResultSize( + _visualConfiguration.getQueriesTab().getQueriesTabResultSize()); visualConfig.setQueriesTab(queriesTabConfig); } if (_visualConfiguration != null && _visualConfiguration.getEntityProfile() != null) { @@ -148,7 +148,8 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen if (_visualConfiguration != null && _visualConfiguration.getSearchResult() != null) { SearchResultsVisualConfig searchResultsVisualConfig = new SearchResultsVisualConfig(); if (_visualConfiguration.getSearchResult().getEnableNameHighlight() != null) { - searchResultsVisualConfig.setEnableNameHighlight(_visualConfiguration.getSearchResult().getEnableNameHighlight()); + searchResultsVisualConfig.setEnableNameHighlight( + _visualConfiguration.getSearchResult().getEnableNameHighlight()); } visualConfig.setSearchResult(searchResultsVisualConfig); } @@ -166,14 +167,15 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen viewsConfig.setEnabled(_viewsConfiguration.isEnabled()); appConfig.setViewsConfig(viewsConfig); - final FeatureFlagsConfig featureFlagsConfig = FeatureFlagsConfig.builder() - .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) - .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) - .setShowBrowseV2(_featureFlags.isShowBrowseV2()) - .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) - .setShowAccessManagement(_featureFlags.isShowAccessManagement()) - .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) - .build(); + final FeatureFlagsConfig featureFlagsConfig = + FeatureFlagsConfig.builder() + .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) + .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) + .setShowBrowseV2(_featureFlags.isShowBrowseV2()) + .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) + .setShowAccessManagement(_featureFlags.isShowAccessManagement()) + .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .build(); appConfig.setFeatureFlags(featureFlagsConfig); @@ -185,14 +187,17 @@ private ResourcePrivileges mapResourcePrivileges( final ResourcePrivileges graphQLPrivileges = new ResourcePrivileges(); graphQLPrivileges.setResourceType(resourcePrivileges.getResourceType()); graphQLPrivileges.setResourceTypeDisplayName(resourcePrivileges.getResourceTypeDisplayName()); - graphQLPrivileges.setEntityType(mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); + graphQLPrivileges.setEntityType( + mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); graphQLPrivileges.setPrivileges( - resourcePrivileges.getPrivileges().stream().map(this::mapPrivilege).collect(Collectors.toList()) - ); + resourcePrivileges.getPrivileges().stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); return graphQLPrivileges; } - private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { + private Privilege mapPrivilege( + com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { final Privilege graphQLPrivilege = new Privilege(); graphQLPrivilege.setType(privilege.getType()); graphQLPrivilege.setDisplayName(privilege.getDisplayName()); @@ -202,29 +207,53 @@ private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfi private EntityType mapResourceTypeToEntityType(final String resourceType) { // TODO: Is there a better way to instruct the UI to present a searchable resource? - if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES.getResourceType().equals(resourceType)) { + if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATASET; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DASHBOARD; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CHART; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_FLOW; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_JOB; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.TAG; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_TERM; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_NODE; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DOMAIN; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CONTAINER; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_GROUP; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_USER; } else { return null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 4b8bd37a4fabe..58f7715c3e627 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; @@ -20,21 +22,16 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j public class ContainerEntitiesResolver implements DataFetcher<CompletableFuture<SearchResults>> { - static final List<String> CONTAINABLE_ENTITY_NAMES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME - ); + static final List<String> CONTAINABLE_ENTITY_NAMES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME); private static final String CONTAINER_FIELD_NAME = "container"; private static final String INPUT_ARG_NAME = "input"; private static final String DEFAULT_QUERY = "*"; @@ -55,45 +52,53 @@ public ContainerEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Container) environment.getSource()).getUrn(); - final ContainerEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final ContainerEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : "*"; final int start = input.getStart() != null ? input.getStart() : 0; final int count = input.getCount() != null ? input.getCount() : 20; - return CompletableFuture.supplyAsync(() -> { - - try { - - final Criterion filterCriterion = new Criterion() - .setField(CONTAINER_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - CONTAINABLE_ENTITY_NAMES, - query, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with container with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final Criterion filterCriterion = + new Criterion() + .setField(CONTAINER_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + CONTAINABLE_ENTITY_NAMES, + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve entities associated with container with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 90fad4ca4578a..9502fb8e5cb93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -12,15 +14,13 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; - -public class ParentContainersResolver implements DataFetcher<CompletableFuture<ParentContainersResult>> { +public class ParentContainersResolver + implements DataFetcher<CompletableFuture<ParentContainersResult>> { private final EntityClient _entityClient; @@ -28,21 +28,25 @@ public ParentContainersResolver(final EntityClient entityClient) { _entityClient = entityClient; } - private void aggregateParentContainers(List<Container> containers, String urn, QueryContext context) { + private void aggregateParentContainers( + List<Container> containers, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(CONTAINER_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); - EntityResponse response = _entityClient.getV2(containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); if (response != null) { Container mappedContainer = ContainerMapper.map(response); containers.add(mappedContainer); @@ -61,16 +65,17 @@ public CompletableFuture<ParentContainersResult> get(DataFetchingEnvironment env final String urn = ((Entity) environment.getSource()).getUrn(); final List<Container> containers = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - aggregateParentContainers(containers, urn, context); - final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); - return result; - } catch (DataHubGraphQLException e) { - throw new RuntimeException("Failed to load all containers", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + aggregateParentContainers(containers, urn, context); + final ParentContainersResult result = new ParentContainersResult(); + result.setCount(containers.size()); + result.setContainers(containers); + return result; + } catch (DataHubGraphQLException e) { + throw new RuntimeException("Failed to load all containers", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index db125384745a1..b5480359bde6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpUser; -import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; +import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUserUsageCounts; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.query.filter.Filter; @@ -19,10 +21,9 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - @Slf4j -public class DashboardStatsSummaryResolver implements DataFetcher<CompletableFuture<DashboardStatsSummary>> { +public class DashboardStatsSummaryResolver + implements DataFetcher<CompletableFuture<DashboardStatsSummary>> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -32,63 +33,72 @@ public class DashboardStatsSummaryResolver implements DataFetcher<CompletableFut public DashboardStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); + this.summaryCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite( + 6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. + .build(); } @Override - public CompletableFuture<DashboardStatsSummary> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DashboardStatsSummary> get(DataFetchingEnvironment environment) + throws Exception { final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - - try { - - final DashboardStatsSummary result = new DashboardStatsSummary(); - - // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. - List<DashboardUsageMetrics> dashboardUsageMetrics = - getDashboardUsageMetrics(resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); - if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); - } - - // Obtain unique user statistics, by rolling up unique users over the past month. - List<DashboardUserUsageCounts> userUsageCounts = getDashboardUsagePerUser(resourceUrn); - result.setUniqueUserCountLast30Days(userUsageCounts.size()); - result.setTopUsersLast30Days( - trimUsers(userUsageCounts.stream().map(DashboardUserUsageCounts::getUser).collect(Collectors.toList()))); - - this.summaryCache.put(resourceUrn, result); - return result; - - } catch (Exception e) { - log.error(String.format("Failed to load dashboard usage summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } + + try { + + final DashboardStatsSummary result = new DashboardStatsSummary(); + + // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. + List<DashboardUsageMetrics> dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + if (dashboardUsageMetrics.size() > 0) { + result.setViewCount(getDashboardViewCount(resourceUrn)); + } + + // Obtain unique user statistics, by rolling up unique users over the past month. + List<DashboardUserUsageCounts> userUsageCounts = getDashboardUsagePerUser(resourceUrn); + result.setUniqueUserCountLast30Days(userUsageCounts.size()); + result.setTopUsersLast30Days( + trimUsers( + userUsageCounts.stream() + .map(DashboardUserUsageCounts::getUser) + .collect(Collectors.toList()))); + + this.summaryCache.put(resourceUrn, result); + return result; + + } catch (Exception e) { + log.error( + String.format( + "Failed to load dashboard usage summary for resource %s", + resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private int getDashboardViewCount(final Urn resourceUrn) { - List<DashboardUsageMetrics> dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), - null, - null, - 1, - this.timeseriesAspectService); + List<DashboardUsageMetrics> dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } private List<DashboardUserUsageCounts> getDashboardUsagePerUser(final Urn resourceUrn) { long now = System.currentTimeMillis(); long nowMinusOneMonth = timeMinusOneMonth(now); - Filter bucketStatsFilter = createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); + Filter bucketStatsFilter = + createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); } @@ -98,4 +108,4 @@ private List<CorpUser> trimUsers(final List<CorpUser> originalUsers) { } return originalUsers; } - } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 24e1db33e9d40..07d028b07b01d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; @@ -26,16 +28,14 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - /** * Resolver used for resolving the usage statistics of a Dashboard. - * <p> - * Returns daily as well as absolute usage metrics of Dashboard + * + * <p>Returns daily as well as absolute usage metrics of Dashboard */ @Slf4j -public class DashboardUsageStatsResolver implements DataFetcher<CompletableFuture<DashboardUsageQueryResult>> { +public class DashboardUsageStatsResolver + implements DataFetcher<CompletableFuture<DashboardUsageQueryResult>> { private static final String ES_FIELD_EVENT_GRANULARITY = "eventGranularity"; private final TimeseriesAspectService timeseriesAspectService; @@ -44,34 +44,40 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi } @Override - public CompletableFuture<DashboardUsageQueryResult> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DashboardUsageQueryResult> get(DataFetchingEnvironment environment) + throws Exception { final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); // Max number of aspects to return for absolute dashboard usage. final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - return CompletableFuture.supplyAsync(() -> { - DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); + return CompletableFuture.supplyAsync( + () -> { + DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); - // Time Bucket Stats - Filter bucketStatsFilter = createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); - List<DashboardUsageAggregation> dailyUsageBuckets = getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); + // Time Bucket Stats + Filter bucketStatsFilter = + createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); + List<DashboardUsageAggregation> dailyUsageBuckets = + getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); - usageQueryResult.setBuckets(dailyUsageBuckets); - usageQueryResult.setAggregations(aggregations); + usageQueryResult.setBuckets(dailyUsageBuckets); + usageQueryResult.setAggregations(aggregations); - // Absolute usage metrics - List<DashboardUsageMetrics> dashboardUsageMetrics = - getDashboardUsageMetrics(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); - usageQueryResult.setMetrics(dashboardUsageMetrics); - return usageQueryResult; - }); + // Absolute usage metrics + List<DashboardUsageMetrics> dashboardUsageMetrics = + getDashboardUsageMetrics( + dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + usageQueryResult.setMetrics(dashboardUsageMetrics); + return usageQueryResult; + }); } - private List<DashboardUsageMetrics> getDashboardUsageMetrics(String dashboardUrn, Long maybeStartTimeMillis, - Long maybeEndTimeMillis, Integer maybeLimit) { + private List<DashboardUsageMetrics> getDashboardUsageMetrics( + String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, Integer maybeLimit) { List<DashboardUsageMetrics> dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -79,16 +85,26 @@ private List<DashboardUsageMetrics> getDashboardUsageMetrics(String dashboardUrn // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); List<EnvelopedAspect> aspects = - timeseriesAspectService.getAspectValues(Urn.createFromString(dashboardUrn), Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit, + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 462c18ea33dd4..4f170a296c47e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -32,7 +32,6 @@ import java.util.List; import java.util.stream.Collectors; - public class DashboardUsageStatsUtils { public static final String ES_FIELD_URN = "urn"; @@ -49,15 +48,17 @@ public static List<DashboardUsageMetrics> getDashboardUsageMetrics( List<DashboardUsageMetrics> dashboardUsageMetrics; try { Filter filter = createUsageFilter(dashboardUrn, null, null, false); - List<EnvelopedAspect> aspects = timeseriesAspectService.getAspectValues( - Urn.createFromString(dashboardUrn), - Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + List<EnvelopedAspect> aspects = + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + filter); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } @@ -69,8 +70,10 @@ public static DashboardUsageQueryResultAggregations getAggregations( List<DashboardUsageAggregation> dailyUsageBuckets, TimeseriesAspectService timeseriesAspectService) { - List<DashboardUserUsageCounts> userUsageCounts = getUserUsageCounts(filter, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = new DashboardUsageQueryResultAggregations(); + List<DashboardUserUsageCounts> userUsageCounts = + getUserUsageCounts(filter, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + new DashboardUsageQueryResultAggregations(); aggregations.setUsers(userUsageCounts); aggregations.setUniqueUserCount(userUsageCounts.size()); @@ -99,29 +102,47 @@ public static DashboardUsageQueryResultAggregations getAggregations( } public static List<DashboardUsageAggregation> getBuckets( - Filter filter, - String dashboardUrn, - TimeseriesAspectService timeseriesAspectService) { + Filter filter, String dashboardUrn, TimeseriesAspectService timeseriesAspectService) { AggregationSpec usersCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountAggregation = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("viewsCount"); AggregationSpec executionsCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("executionsCount"); AggregationSpec usersCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("viewsCount"); AggregationSpec executionsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{usersCountAggregation, viewsCountAggregation, executionsCountAggregation, - usersCountCardinalityAggregation, viewsCountCardinalityAggregation, executionsCountCardinalityAggregation}; - GenericTable dailyStats = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, - createUsageGroupingBuckets(CalendarInterval.DAY)); + new AggregationSpec[] { + usersCountAggregation, + viewsCountAggregation, + executionsCountAggregation, + usersCountCardinalityAggregation, + viewsCountCardinalityAggregation, + executionsCountCardinalityAggregation + }; + GenericTable dailyStats = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + createUsageGroupingBuckets(CalendarInterval.DAY)); List<DashboardUsageAggregation> buckets = new ArrayList<>(); for (StringArray row : dailyStats.getRows()) { @@ -130,7 +151,8 @@ public static List<DashboardUsageAggregation> getBuckets( usageAggregation.setDuration(WindowDuration.DAY); usageAggregation.setResource(dashboardUrn); - DashboardUsageAggregationMetrics usageAggregationMetrics = new DashboardUsageAggregationMetrics(); + DashboardUsageAggregationMetrics usageAggregationMetrics = + new DashboardUsageAggregationMetrics(); if (!row.get(1).equals(ES_NULL_VALUE) && !row.get(4).equals(ES_NULL_VALUE)) { try { @@ -156,7 +178,8 @@ public static List<DashboardUsageAggregation> getBuckets( usageAggregationMetrics.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert executionsCount from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert executionsCount from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -165,34 +188,59 @@ public static List<DashboardUsageAggregation> getBuckets( return buckets; } - public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, TimeseriesAspectService timeseriesAspectService) { + public static List<DashboardUserUsageCounts> getUserUsageCounts( + Filter filter, TimeseriesAspectService timeseriesAspectService) { // Sum aggregation on userCounts.count AggregationSpec sumUsageCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.usageCount"); AggregationSpec sumViewCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.viewsCount"); AggregationSpec sumExecutionCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.executionsCount"); AggregationSpec usageCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.usageCount"); AggregationSpec viewCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.viewsCount"); AggregationSpec executionCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY) + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) .setFieldPath("userCounts.executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{sumUsageCountsCountAggSpec, sumViewCountsCountAggSpec, sumExecutionCountsCountAggSpec, - usageCountsCardinalityAggSpec, viewCountsCardinalityAggSpec, executionCountsCardinalityAggSpec}; + new AggregationSpec[] { + sumUsageCountsCountAggSpec, + sumViewCountsCountAggSpec, + sumExecutionCountsCountAggSpec, + usageCountsCardinalityAggSpec, + viewCountsCardinalityAggSpec, + executionCountsCardinalityAggSpec + }; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend - GenericTable result = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, groupingBuckets); + GenericTable result = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List<DashboardUserUsageCounts> userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -208,7 +256,8 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T userUsageCount.setUsageCount(Integer.valueOf(row.get(1))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE) && row.get(5).equals(ES_NULL_VALUE)) { @@ -217,7 +266,8 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T userUsageCount.setViewsCount(Integer.valueOf(row.get(2))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user views count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user views count from ES to int", e); } } if (!row.get(3).equals(ES_NULL_VALUE) && !row.get(6).equals(ES_NULL_VALUE)) { @@ -226,7 +276,8 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T userUsageCount.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user executions count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user executions count from ES to int", e); } } userUsageCounts.add(userUsageCount); @@ -239,17 +290,15 @@ public static List<DashboardUserUsageCounts> getUserUsageCounts(Filter filter, T private static GroupingBucket[] createUsageGroupingBuckets(CalendarInterval calenderInterval) { GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(calenderInterval)); - return new GroupingBucket[]{timestampBucket}; + return new GroupingBucket[] {timestampBucket}; } public static Filter createUsageFilter( - String dashboardUrn, - Long startTime, - Long endTime, - boolean byBucket) { + String dashboardUrn, Long startTime, Long endTime, boolean byBucket) { Filter filter = new Filter(); final ArrayList<Criterion> criteria = new ArrayList<>(); @@ -260,44 +309,55 @@ public static Filter createUsageFilter( if (startTime != null) { // Add filter for start time - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(Long.toString(startTime)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(Long.toString(startTime)); criteria.add(startTimeCriterion); } if (endTime != null) { // Add filter for end time - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(Long.toString(endTime)); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(Long.toString(endTime)); criteria.add(endTimeCriterion); } if (byBucket) { - // Add filter for presence of eventGranularity - only consider bucket stats and not absolute stats + // Add filter for presence of eventGranularity - only consider bucket stats and not absolute + // stats // since unit is mandatory, we assume if eventGranularity contains unit, then it is not null Criterion onlyTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.CONTAIN).setValue("unit"); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.CONTAIN) + .setValue("unit"); criteria.add(onlyTimeBucketsCriterion); } else { // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); } - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } - public static Long timeMinusOneMonth(long time) { final long oneHourMillis = 60 * 60 * 1000; final long oneDayMillis = 24 * oneHourMillis; return time - (31 * oneDayMillis + 1); } - private DashboardUsageStatsUtils() { } + private DashboardUsageStatsUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java index 9c32fa1c08076..f5d4f949e5710 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -8,15 +10,12 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -27,54 +26,80 @@ public class BatchSetDataProductResolver implements DataFetcher<CompletableFutur @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDataProductInput input = bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); + final BatchSetDataProductInput input = + bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); final String maybeDataProductUrn = input.getDataProductUrn(); final List<String> resources = input.getResourceUrns(); - return CompletableFuture.supplyAsync(() -> { - - verifyResources(resources, context); - verifyDataProduct(maybeDataProductUrn, context); + return CompletableFuture.supplyAsync( + () -> { + verifyResources(resources, context); + verifyDataProduct(maybeDataProductUrn, context); - try { - List<Urn> resourceUrns = resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (maybeDataProductUrn != null) { - batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); - } else { - batchUnsetDataProduct(resourceUrns, context); - } - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + List<Urn> resourceUrns = + resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + if (maybeDataProductUrn != null) { + batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); + } else { + batchUnsetDataProduct(resourceUrns, context); + } + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void verifyResources(List<String> resources, QueryContext context) { for (String resource : resources) { - if (!_dataProductService.verifyEntityExists(UrnUtils.getUrn(resource), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, %s in resources does not exist", resource)); + if (!_dataProductService.verifyEntityExists( + UrnUtils.getUrn(resource), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, %s in resources does not exist", resource)); } Urn resourceUrn = UrnUtils.getUrn(resource); - if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity( + context, resourceUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } } private void verifyDataProduct(String maybeDataProductUrn, QueryContext context) { - if (maybeDataProductUrn != null && !_dataProductService.verifyEntityExists(UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, Data Product urn %s does not exist", maybeDataProductUrn)); + if (maybeDataProductUrn != null + && !_dataProductService.verifyEntityExists( + UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, Data Product urn %s does not exist", + maybeDataProductUrn)); } } - private void batchSetDataProduct(@Nonnull String dataProductUrn, List<Urn> resources, QueryContext context) { - log.debug("Batch setting Data Product. dataProduct urn: {}, resources: {}", dataProductUrn, resources); + private void batchSetDataProduct( + @Nonnull String dataProductUrn, List<Urn> resources, QueryContext context) { + log.debug( + "Batch setting Data Product. dataProduct urn: {}, resources: {}", + dataProductUrn, + resources); try { - _dataProductService.batchSetDataProduct(UrnUtils.getUrn(dataProductUrn), resources, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.batchSetDataProduct( + UrnUtils.getUrn(dataProductUrn), + resources, + context.getAuthentication(), + UrnUtils.getUrn(context.getActorUrn())); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Data Product %s to resources with urns %s!", dataProductUrn, resources), e); + throw new RuntimeException( + String.format( + "Failed to batch set Data Product %s to resources with urns %s!", + dataProductUrn, resources), + e); } } @@ -82,10 +107,14 @@ private void batchUnsetDataProduct(List<Urn> resources, QueryContext context) { log.debug("Batch unsetting Data Product. resources: {}", resources); try { for (Urn resource : resources) { - _dataProductService.unsetDataProduct(resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.unsetDataProduct( + resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch unset data product for resources with urns %s!", resources), e); + throw new RuntimeException( + String.format( + "Failed to batch unset data product for resources with urns %s!", resources), + e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index f644ff31a571b..10c487a839f35 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -12,13 +14,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class CreateDataProductResolver implements DataFetcher<CompletableFuture<DataProduct>> { @@ -26,37 +25,45 @@ public class CreateDataProductResolver implements DataFetcher<CompletableFuture< private final DataProductService _dataProductService; @Override - public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateDataProductInput input = bindArgument(environment.getArgument("input"), CreateDataProductInput.class); + final CreateDataProductInput input = + bindArgument(environment.getArgument("input"), CreateDataProductInput.class); final Authentication authentication = context.getAuthentication(); final Urn domainUrn = UrnUtils.getUrn(input.getDomainUrn()); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Domain provided dos not exist"); - } - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - final Urn dataProductUrn = _dataProductService.createDataProduct( - input.getProperties().getName(), - input.getProperties().getDescription(), - authentication); - _dataProductService.setDomain(dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new DataProduct from input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Domain provided dos not exist"); + } + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + final Urn dataProductUrn = + _dataProductService.createDataProduct( + input.getProperties().getName(), + input.getProperties().getDescription(), + authentication); + _dataProductService.setDomain( + dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new DataProduct from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java index 596e292e7fe33..f6fe11a587a39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java @@ -7,25 +7,27 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.authorization.PoliciesConfig; -import lombok.extern.slf4j.Slf4j; - import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DataProductAuthorizationUtils { - private DataProductAuthorizationUtils() { + private DataProductAuthorizationUtils() {} - } - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDataProductsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -35,11 +37,14 @@ public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryCo orPrivilegeGroups); } - public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext context, Urn domainUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToManageDataProducts( + @Nonnull QueryContext context, Urn domainUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -49,10 +54,10 @@ public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext con orPrivilegeGroups); } - public static boolean isAuthorizedToEditDataProduct(@Nonnull QueryContext context, Urn dataProductUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP - )); + public static boolean isAuthorizedToEditDataProduct( + @Nonnull QueryContext context, Urn dataProductUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(ALL_PRIVILEGES_GROUP)); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java index fd31e2199c22a..ea13f96cfc1bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java @@ -9,11 +9,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class DeleteDataProductResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -21,32 +20,38 @@ public class DeleteDataProductResolver implements DataFetcher<CompletableFuture< private final DataProductService _dataProductService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } - - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } - - try { - _dataProductService.deleteDataProduct(dataProductUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Data Product", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } + + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } + + try { + _dataProductService.deleteDataProduct(dataProductUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Data Product", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index 831d449bef9ef..a0f1698bf99e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -22,18 +25,14 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * Resolver responsible for getting the assets belonging to a Data Product. Get the assets from the @@ -41,7 +40,8 @@ */ @Slf4j @RequiredArgsConstructor -public class ListDataProductAssetsResolver implements DataFetcher<CompletableFuture<SearchResults>> { +public class ListDataProductAssetsResolver + implements DataFetcher<CompletableFuture<SearchResults>> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; @@ -52,7 +52,10 @@ public class ListDataProductAssetsResolver implements DataFetcher<CompletableFut public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); // get urn from either input or source (in the case of "entities" field) - final String urn = environment.getArgument("urn") != null ? environment.getArgument("urn") : ((DataProduct) environment.getSource()).getUrn(); + final String urn = + environment.getArgument("urn") != null + ? environment.getArgument("urn") + : ((DataProduct) environment.getSource()).getUrn(); final Urn dataProductUrn = UrnUtils.getUrn(urn); final SearchAcrossEntitiesInput input = bindArgument(environment.getArgument("input"), SearchAcrossEntitiesInput.class); @@ -60,32 +63,52 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) // 1. Get urns of assets belonging to Data Product using an aspect query List<Urn> assetUrns = new ArrayList<>(); try { - final EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data(); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse != null + && entityResponse + .getAspects() + .containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + final DataMap data = + entityResponse + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); final DataProductProperties dataProductProperties = new DataProductProperties(data); if (dataProductProperties.hasAssets()) { - assetUrns.addAll(dataProductProperties.getAssets().stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList())); + assetUrns.addAll( + dataProductProperties.getAssets().stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList())); } } } catch (Exception e) { log.error(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); - throw new RuntimeException(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to list data product assets with urn %s", dataProductUrn), e); } // 2. Get list of entities that we should query based on filters or assets from aspect. - List<String> entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); - - - final List<EntityType> inputEntityTypes = (input.getTypes() == null || input.getTypes().isEmpty()) ? ImmutableList.of() : input.getTypes(); - final List<String> inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).distinct().collect(Collectors.toList()); - - final List<String> finalEntityNames = inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; + List<String> entitiesToQuery = + assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); + + final List<EntityType> inputEntityTypes = + (input.getTypes() == null || input.getTypes().isEmpty()) + ? ImmutableList.of() + : input.getTypes(); + final List<String> inputEntityNames = + inputEntityTypes.stream() + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + + final List<String> finalEntityNames = + inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); @@ -93,49 +116,64 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - // if no assets in data product properties, exit early before search and return empty results - if (assetUrns.size() == 0) { - SearchResults results = new SearchResults(); - results.setStart(start); - results.setCount(count); - results.setTotal(0); - results.setSearchResults(ImmutableList.of()); - return results; - } - - // add urns from the aspect to our filters - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } - - try { - log.debug( - "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - finalEntityNames, - sanitizedQuery, - finalFilter, - start, - count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // if no assets in data product properties, exit early before search and return empty + // results + if (assetUrns.size() == 0) { + SearchResults results = new SearchResults(); + results.setStart(start); + results.setCount(count); + results.setTotal(0); + results.setSearchResults(ImmutableList.of()); + return results; + } + + // add urns from the aspect to our filters + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); + + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } + + try { + log.debug( + "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + finalEntityNames, + sanitizedQuery, + finalFilter, + start, + count, + searchFlags, + null, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 79afddbb873fb..304ef96d90aa5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,13 +15,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class UpdateDataProductResolver implements DataFetcher<CompletableFuture<DataProduct>> { @@ -27,43 +26,51 @@ public class UpdateDataProductResolver implements DataFetcher<CompletableFuture< private final DataProductService _dataProductService; @Override - public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataProduct> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateDataProductInput input = bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); + final UpdateDataProductInput input = + bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } - try { - final Urn urn = _dataProductService.updateDataProduct( - dataProductUrn, - input.getName(), - input.getDescription(), - authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); - } - }); + try { + final Urn urn = + _dataProductService.updateDataProduct( + dataProductUrn, input.getName(), input.getDescription(), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(urn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java index 1587df4c9899b..604c46a1f7c01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java @@ -39,13 +39,11 @@ import lombok.Data; import lombok.extern.slf4j.Slf4j; - /** * Resolver used for resolving the Health state of a Dataset. * - * Currently, the health status is calculated via the validation on a Dataset. If there are no validations found, the - * health status will be undefined for the Dataset. - * + * <p>Currently, the health status is calculated via the validation on a Dataset. If there are no + * validations found, the health status will be undefined for the Dataset. */ @Slf4j public class DatasetHealthResolver implements DataFetcher<CompletableFuture<List<Health>>> { @@ -60,47 +58,48 @@ public class DatasetHealthResolver implements DataFetcher<CompletableFuture<List private final Cache<String, CachedHealth> _statusCache; public DatasetHealthResolver( - final GraphClient graphClient, - final TimeseriesAspectService timeseriesAspectService) { + final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService) { this(graphClient, timeseriesAspectService, new Config(true)); - } + public DatasetHealthResolver( final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService, final Config config) { _graphClient = graphClient; _timeseriesAspectService = timeseriesAspectService; - _statusCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(1, TimeUnit.MINUTES) - .build(); + _statusCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(1, TimeUnit.MINUTES).build(); _config = config; } @Override - public CompletableFuture<List<Health>> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<List<Health>> get(final DataFetchingEnvironment environment) + throws Exception { final Dataset parent = environment.getSource(); - return CompletableFuture.supplyAsync(() -> { - try { - final CachedHealth cachedStatus = _statusCache.get(parent.getUrn(), () -> ( - computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); - return cachedStatus.healths; - } catch (Exception e) { - throw new RuntimeException("Failed to resolve dataset's health status.", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final CachedHealth cachedStatus = + _statusCache.get( + parent.getUrn(), + () -> + (computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); + return cachedStatus.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve dataset's health status.", e); + } + }); } /** * Computes the "resolved health status" for a Dataset by * - * - fetching active (non-deleted) assertions - * - fetching latest assertion run for each - * - checking whether any of the assertions latest runs are failing - * + * <p>- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing */ - private CachedHealth computeHealthStatusForDataset(final String datasetUrn, final QueryContext context) { + private CachedHealth computeHealthStatusForDataset( + final String datasetUrn, final QueryContext context) { final List<Health> healthStatuses = new ArrayList<>(); if (_config.getAssertionsEnabled()) { @@ -113,31 +112,33 @@ private CachedHealth computeHealthStatusForDataset(final String datasetUrn, fina } /** - * Returns the resolved "assertions health", which is currently a static function of whether the most recent run of - * all dataset assertions has succeeded. + * Returns the resolved "assertions health", which is currently a static function of whether the + * most recent run of all dataset assertions has succeeded. * * @param datasetUrn the dataset to compute health for * @param context the query context * @return an instance of {@link Health} for the Dataset, null if one cannot be computed. */ @Nullable - private Health computeAssertionHealthForDataset(final String datasetUrn, final QueryContext context) { + private Health computeAssertionHealthForDataset( + final String datasetUrn, final QueryContext context) { // Get active assertion urns - final EntityRelationships relationships = _graphClient.getRelatedEntities( - datasetUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - 0, - 500, - context.getActorUrn() - ); + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + datasetUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); if (relationships.getTotal() > 0) { // If there are assertions defined, then we should return a non-null health for this asset. - final Set<String> activeAssertionUrns = relationships.getRelationships() - .stream() - .map(relationship -> relationship.getEntity().toString()).collect(Collectors.toSet()); + final Set<String> activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); final GenericTable assertionRunResults = getAssertionRunsTable(datasetUrn); @@ -146,22 +147,24 @@ private Health computeAssertionHealthForDataset(final String datasetUrn, final Q return null; } - final List<String> failingAssertionUrns = getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + final List<String> failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); // Finally compute & return the health. final Health health = new Health(); health.setType(HealthStatusType.ASSERTIONS); if (failingAssertionUrns.size() > 0) { health.setStatus(HealthStatus.FAIL); - health.setMessage(String.format("%s of %s assertions are failing", failingAssertionUrns.size(), - activeAssertionUrns.size())); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); health.setCauses(failingAssertionUrns); } else { health.setStatus(HealthStatus.PASS); health.setMessage("All assertions are passing"); } return health; - } return null; } @@ -175,7 +178,8 @@ private GenericTable getAssertionRunsTable(final String asserteeUrn) { createAssertionGroupingBuckets()); } - private List<String> getFailingAssertionUrns(final GenericTable assertionRunsResult, final Set<String> candidateAssertionUrns) { + private List<String> getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set<String> candidateAssertionUrns) { // Create the buckets based on the result return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); } @@ -191,12 +195,15 @@ private Filter createAssertionsFilter(final String datasetUrn) { // Add filter for result == result Criterion startTimeCriterion = - new Criterion().setField("status").setCondition(Condition.EQUAL).setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); criteria.add(startTimeCriterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } @@ -205,31 +212,38 @@ private AggregationSpec[] createAssertionAggregationSpecs() { AggregationSpec resultTypeAggregation = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); AggregationSpec timestampAggregation = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("timestampMillis"); - return new AggregationSpec[]{resultTypeAggregation, timestampAggregation}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; } private GroupingBucket[] createAssertionGroupingBuckets() { // String grouping bucket on "assertionUrn" GroupingBucket assertionUrnBucket = new GroupingBucket(); assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - return new GroupingBucket[]{assertionUrnBucket}; + return new GroupingBucket[] {assertionUrnBucket}; } - private List<String> resultToFailedAssertionUrns(final StringArrayArray rows, final Set<String> activeAssertionUrns) { + private List<String> resultToFailedAssertionUrns( + final StringArrayArray rows, final Set<String> activeAssertionUrns) { final List<String> failedAssertionUrns = new ArrayList<>(); for (StringArray row : rows) { // Result structure should be assertionUrn, event.result.type, timestampMillis if (row.size() != 3) { - throw new RuntimeException(String.format( - "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", row.size())); + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); } final String assertionUrn = row.get(0); final String resultType = row.get(1); - // If assertion is "active" (not deleted) & is failing, then we report a degradation in health. - if (activeAssertionUrns.contains(assertionUrn) && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { failedAssertionUrns.add(assertionUrn); } } @@ -246,4 +260,4 @@ public static class Config { private static class CachedHealth { private final List<Health> healths; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 2873866bb34f7..74fbd9c2c868a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -24,13 +24,13 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - /** * This resolver is a thin wrapper around the {@link DatasetUsageStatsResolver} which simply * computes some aggregate usage metrics for a Dashboard. */ @Slf4j -public class DatasetStatsSummaryResolver implements DataFetcher<CompletableFuture<DatasetStatsSummary>> { +public class DatasetStatsSummaryResolver + implements DataFetcher<CompletableFuture<DatasetStatsSummary>> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -40,53 +40,64 @@ public class DatasetStatsSummaryResolver implements DataFetcher<CompletableFutur public DatasetStatsSummaryResolver(final UsageClient usageClient) { this.usageClient = usageClient; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); + this.summaryCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite( + 6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. + .build(); } @Override - public CompletableFuture<DatasetStatsSummary> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DatasetStatsSummary> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } - try { + try { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view profile information for dataset {}", + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), resourceUrn.toString()); - return null; - } - - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); - - final DatasetStatsSummary result = new DatasetStatsSummary(); - result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); - result.setUniqueUserCountLast30Days(usageQueryResult.getAggregations().getUniqueUserCount()); - if (usageQueryResult.getAggregations().hasUsers()) { - result.setTopUsersLast30Days(trimUsers(usageQueryResult.getAggregations().getUsers() - .stream() - .filter(UserUsageCounts::hasUser) - .sorted((a, b) -> (b.getCount() - a.getCount())) - .map(userCounts -> createPartialUser(Objects.requireNonNull(userCounts.getUser()))) - .collect(Collectors.toList()))); - } - this.summaryCache.put(resourceUrn, result); - return result; - } catch (Exception e) { - log.error(String.format("Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return null; + } + + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); + + final DatasetStatsSummary result = new DatasetStatsSummary(); + result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); + result.setUniqueUserCountLast30Days( + usageQueryResult.getAggregations().getUniqueUserCount()); + if (usageQueryResult.getAggregations().hasUsers()) { + result.setTopUsersLast30Days( + trimUsers( + usageQueryResult.getAggregations().getUsers().stream() + .filter(UserUsageCounts::hasUser) + .sorted((a, b) -> (b.getCount() - a.getCount())) + .map( + userCounts -> + createPartialUser(Objects.requireNonNull(userCounts.getUser()))) + .collect(Collectors.toList()))); + } + this.summaryCache.put(resourceUrn, result); + return result; + } catch (Exception e) { + log.error( + String.format( + "Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private List<CorpUser> trimUsers(final List<CorpUser> originalUsers) { @@ -103,8 +114,9 @@ private CorpUser createPartialUser(final Urn userUrn) { } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index e4bec8e896fdf..75288ec989c79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -17,7 +17,6 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DatasetUsageStatsResolver implements DataFetcher<CompletableFuture<UsageQueryResult>> { @@ -28,30 +27,35 @@ public DatasetUsageStatsResolver(final UsageClient usageClient) { } @Override - public CompletableFuture<UsageQueryResult> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<UsageQueryResult> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final UsageTimeRange range = UsageTimeRange.valueOf(environment.getArgument("range")); - return CompletableFuture.supplyAsync(() -> { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view usage information for dataset {}", - context.getActorUrn(), - resourceUrn.toString()); - return null; - } - try { - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view usage information for dataset {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + try { + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), range); + return UsageQueryResultMapper.map(usageQueryResult); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + } + }); } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, + return AuthorizationUtils.isAuthorized( + context, Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 75c09d0cf7e43..62c88c506ba61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,16 +1,20 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; import com.linkedin.datahub.graphql.resolvers.AuthUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -23,13 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor @@ -37,48 +37,61 @@ public class UpdateDeprecationResolver implements DataFetcher<CompletableFuture< private static final String EMPTY_STRING = ""; private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateDeprecationInput input = bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); + final UpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); final Urn entityUrn = Urn.createFromString(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateDeprecationInput( - entityUrn, - _entityService - ); - try { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DEPRECATION_ASPECT_NAME, - _entityService, - new Deprecation()); - updateDeprecation(deprecation, input, context); - - // Create the Deprecation aspect - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(entityUrn, DEPRECATION_ASPECT_NAME, deprecation); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to update Deprecation for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to update Deprecation for resource with entity urn %s", entityUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateDeprecationInput(entityUrn, _entityService); + try { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), + DEPRECATION_ASPECT_NAME, + _entityService, + new Deprecation()); + updateDeprecation(deprecation, input, context); + + // Create the Deprecation aspect + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + entityUrn, DEPRECATION_ASPECT_NAME, deprecation); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to update Deprecation for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update Deprecation for resource with entity urn %s", entityUrn), + e); + } + }); } - private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext context, final Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToUpdateDeprecationForEntity( + final QueryContext context, final Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -88,20 +101,19 @@ private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext cont orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); + String.format( + "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); } return true; } - private static void updateDeprecation(Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { + private static void updateDeprecation( + Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { deprecation.setDeprecated(input.getDeprecated()); deprecation.setDecommissionTime(input.getDecommissionTime(), SetMode.REMOVE_IF_NULL); if (input.getNote() != null) { @@ -115,9 +127,10 @@ private static void updateDeprecation(Deprecation deprecation, UpdateDeprecation } catch (URISyntaxException e) { // Should never happen. throw new RuntimeException( - String.format("Failed to convert authorized actor into an Urn. actor urn: %s", - context.getActorUrn()), + String.format( + "Failed to convert authorized actor into an Urn. actor urn: %s", + context.getActorUrn()), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index 1930cdc1f8667..9099394d32bd0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -23,22 +28,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS privilege. + * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -51,71 +49,101 @@ public class CreateDomainResolver implements DataFetcher<CompletableFuture<Strin public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateDomainInput input = bindArgument(environment.getArgument("input"), CreateDomainInput.class); - final Urn parentDomain = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canCreateDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - // Create the Domain Key - final DomainKey key = new DomainKey(); - - // Take user provided id OR generate a random UUID for the domain. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setId(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Domain already exists!"); - } - - if (parentDomain != null && !_entityClient.exists(parentDomain, context.getAuthentication())) { - throw new IllegalArgumentException("Parent Domain does not exist!"); - } - - if (DomainUtils.hasNameConflict(input.getName(), parentDomain, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, DOMAIN_ENTITY_NAME, - DOMAIN_PROPERTIES_ASPECT_NAME, mapDomainProperties(input, context)); - proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); - - String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - OwnerUtils.addCreatorAsOwner(context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return domainUrn; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to create Domain with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Domain with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + final CreateDomainInput input = + bindArgument(environment.getArgument("input"), CreateDomainInput.class); + final Urn parentDomain = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final DomainKey key = new DomainKey(); + + // Take user provided id OR generate a random UUID for the domain. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setId(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Domain already exists!"); + } + + if (parentDomain != null + && !_entityClient.exists(parentDomain, context.getAuthentication())) { + throw new IllegalArgumentException("Parent Domain does not exist!"); + } + + if (DomainUtils.hasNameConflict( + input.getName(), parentDomain, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + DOMAIN_ENTITY_NAME, + DOMAIN_PROPERTIES_ASPECT_NAME, + mapDomainProperties(input, context)); + proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); + + String domainUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + OwnerUtils.addCreatorAsOwner( + context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return domainUrn; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to create Domain with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Domain with id: %s, name: %s", + input.getId(), input.getName()), + e); + } + }); } - private DomainProperties mapDomainProperties(final CreateDomainInput input, final QueryContext context) { + private DomainProperties mapDomainProperties( + final CreateDomainInput input, final QueryContext context) { final DomainProperties result = new DomainProperties(); result.setName(input.getName()); result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - result.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + result.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); if (input.getParentDomain() != null) { try { result.setParentDomain(Urn.createFromString(input.getParentDomain())); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), e); + throw new RuntimeException( + String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), + e); } } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java index 9ab90e8b4ff72..c863f2e581dcb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,37 +22,49 @@ public DeleteDomainResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String domainUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(domainUrn); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageDomains(context) + || AuthorizationUtils.canDeleteEntity(urn, context)) { + try { + // Make sure there are no child domains + if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { + throw new RuntimeException( + String.format("Cannot delete domain %s which has child domains", domainUrn)); + } - if (AuthorizationUtils.canManageDomains(context) || AuthorizationUtils.canDeleteEntity(urn, context)) { - try { - // Make sure there are no child domains - if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { - throw new RuntimeException(String.format("Cannot delete domain %s which has child domains", domainUrn)); - } + _entityClient.deleteEntity(urn, context.getAuthentication()); + log.info( + String.format("I've successfully deleted the entity %s with urn", domainUrn)); - _entityClient.deleteEntity(urn, context.getAuthentication()); - log.info(String.format("I've successfully deleted the entity %s with urn", domainUrn)); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Domain with urn %s", + urn), + e); + } + }); - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for Domain with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", domainUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", domainUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 0bf551c4683e6..8f6d109e71b2c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; @@ -19,13 +22,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolves the entities in a particular Domain. - */ +/** Resolves the entities in a particular Domain. */ @Slf4j public class DomainEntitiesResolver implements DataFetcher<CompletableFuture<SearchResults>> { @@ -49,50 +46,65 @@ public DomainEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<SearchResults> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Domain) environment.getSource()).getUrn(); - final DomainEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final DomainEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : DEFAULT_QUERY; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - try { - - final CriterionArray criteria = new CriterionArray(); - final Criterion filterCriterion = new Criterion() - .setField(DOMAINS_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - criteria.add(filterCriterion); - if (input.getFilters() != null) { - input.getFilters().forEach(filter -> { - criteria.add(new Criterion().setField(filter.getField()).setValue(filter.getValue())); - }); - } - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - query, - new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(criteria))), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with Domain with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final CriterionArray criteria = new CriterionArray(); + final Criterion filterCriterion = + new Criterion() + .setField(DOMAINS_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + criteria.add(filterCriterion); + if (input.getFilters() != null) { + input + .getFilters() + .forEach( + filter -> { + criteria.add( + new Criterion() + .setField(filter.getField()) + .setValue(filter.getValue())); + }); + } + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(criteria))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to resolve entities associated with Domain with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 3a751e502eb10..5453603f4cc9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,18 +21,14 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS + * platform privilege. */ public class ListDomainsResolver implements DataFetcher<CompletableFuture<ListDomainsResult>> { private static final Integer DEFAULT_START = 0; @@ -43,47 +42,56 @@ public ListDomainsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - final ListDomainsInput input = bindArgument(environment.getArgument("input"), ListDomainsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final Urn parentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); + return CompletableFuture.supplyAsync( + () -> { + final ListDomainsInput input = + bindArgument(environment.getArgument("input"), ListDomainsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final Urn parentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); - try { - // First, get all domain Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.DOMAIN_ENTITY_NAME, - query, - filter, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all domain Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.DOMAIN_ENTITY_NAME, + query, + filter, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListDomainsResult result = new ListDomainsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setDomains(mapUnresolvedDomains(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list domains", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListDomainsResult result = new ListDomainsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setDomains( + mapUnresolvedDomains( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list domains", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Domain objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Domain objects which will be + // resolved be a separate Batch resolver. private List<Domain> mapUnresolvedDomains(final List<Urn> entityUrns) { final List<Domain> results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index dcaa7d61ed90c..8406e19810468 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -9,51 +11,53 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; - public class ParentDomainsResolver implements DataFetcher<CompletableFuture<ParentDomainsResult>> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public ParentDomainsResolver(final EntityClient entityClient) { - _entityClient = entityClient; + public ParentDomainsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture<ParentDomainsResult> get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); + final List<Entity> parentDomains = new ArrayList<>(); + final Set<String> visitedParentUrns = new HashSet<>(); + + if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Failed to resolve parents for entity type %s", urn)); } - @Override - public CompletableFuture<ParentDomainsResult> get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - final List<Entity> parentDomains = new ArrayList<>(); - final Set<String> visitedParentUrns = new HashSet<>(); - - if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { - throw new IllegalArgumentException(String.format("Failed to resolve parents for entity type %s", urn)); - } - - return CompletableFuture.supplyAsync(() -> { - try { - Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); - - while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { - parentDomains.add(parentDomain); - visitedParentUrns.add(parentDomain.getUrn()); - parentDomain = DomainUtils.getParentDomain(Urn.createFromString(parentDomain.getUrn()), context, _entityClient); - } - - final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load parent domains for entity %s", urn), e); + return CompletableFuture.supplyAsync( + () -> { + try { + Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); + + while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { + parentDomains.add(parentDomain); + visitedParentUrns.add(parentDomain.getUrn()); + parentDomain = + DomainUtils.getParentDomain( + Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } + + final ParentDomainsResult result = new ParentDomainsResult(); + result.setCount(parentDomains.size()); + result.setDomains(parentDomains); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load parent domains for entity %s", urn), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 56a76dcb1e07f..1c52f707c61a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,19 +19,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { @@ -37,49 +38,56 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); final Urn domainUrn = Urn.createFromString(environment.getArgument("domainUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateSetDomainInput( - entityUrn, - domainUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - setDomain(domains, domainUrn); + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateSetDomainInput(entityUrn, domainUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + setDomain(domains, domainUrn); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set Domain to resource with entity urn {}, domain urn {}: {}", entityUrn, domainUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set Domain to resource with entity urn %s, domain urn %s", entityUrn, domainUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to set Domain to resource with entity urn {}, domain urn {}: {}", + entityUrn, + domainUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to set Domain to resource with entity urn %s, domain urn %s", + entityUrn, domainUrn), + e); + } + }); } public static Boolean validateSetDomainInput( - Urn entityUrn, - Urn domainUrn, - EntityService entityService - ) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Domain does not exist.", + entityUrn, domainUrn)); } if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Entity does not exist.", + entityUrn, domainUrn)); } return true; @@ -90,4 +98,4 @@ private static void setDomain(Domains domains, Urn domainUrn) { newDomain.add(domainUrn); domains.setDomains(newDomain); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index 01dd4f1254f8e..b2a82ac7608d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,19 +20,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class UnsetDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { @@ -37,39 +38,40 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - validateUnsetDomainInput( - entityUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - unsetDomain(domains); + validateUnsetDomainInput(entityUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + unsetDomain(domains); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to unset Domains for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to unset Domains for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), + e); + } + }); } - public static Boolean validateUnsetDomainInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( @@ -85,4 +87,4 @@ private static void unsetDomain(@Nonnull Domains domains) { } domains.getDomains().clear(); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index dbaf6000477aa..e1b264606074c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.Urn; @@ -19,14 +23,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for updating the embed render URL for an asset. - */ +/** Resolver used for updating the embed render URL for an asset. */ @Slf4j @RequiredArgsConstructor public class UpdateEmbedResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -37,62 +34,70 @@ public class UpdateEmbedResolver implements DataFetcher<CompletableFuture<Boolea public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateEmbedInput input = bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); + final UpdateEmbedInput input = + bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); final Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateEmbedInput(input, _entityService); + try { + final Embed embed = + (Embed) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), EMBED_ASPECT_NAME, _entityService, new Embed()); - if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateEmbedInput( - input, - _entityService - ); - try { - final Embed embed = (Embed) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - EMBED_ASPECT_NAME, - _entityService, - new Embed()); + updateEmbed(embed, input); - updateEmbed(embed, input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); - _entityService.ingestProposal( - proposal, - new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis()), - false - ); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Embed for to resource with entity urn %s", entityUrn), e); - } - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()), + false); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update Embed for to resource with entity urn %s", entityUrn), + e); + } + }); } /** - * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link IllegalArgumentException} if the input - * is not valid. + * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link + * IllegalArgumentException} if the input is not valid. * - * For an input to be valid, the target URN must exist. + * <p>For an input to be valid, the target URN must exist. * * @param input the input to validate * @param entityService an instance of {@link EntityService} used to validate the input. */ - private static void validateUpdateEmbedInput(@Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { + private static void validateUpdateEmbedInput( + @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { throw new IllegalArgumentException( - String.format("Failed to update embed for entity with urn %s. Entity does not exist!", input.getUrn())); + String.format( + "Failed to update embed for entity with urn %s. Entity does not exist!", + input.getUrn())); } } /** * Applies an instance of {@link UpdateEmbedInput} to a base instance of {@link Embed}. + * * @param embed an embed to update * @param input the updates to apply */ - private static void updateEmbed(@Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { + private static void updateEmbed( + @Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { embed.setRenderUrl(input.getRenderUrl(), SetMode.IGNORE_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index 613f97182c5dd..d2bd2f3fb8a17 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.entity.EntityService; @@ -8,12 +10,7 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for returning whether an entity exists. - */ +/** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityService _entityService; @@ -22,7 +19,8 @@ public EntityExistsResolver(final EntityService entityService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { String entityUrnString = bindArgument(environment.getArgument("urn"), String.class); // resolver can be used as its own endpoint or when hydrating an entity if (entityUrnString == null && environment.getSource() != null) { @@ -31,12 +29,14 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) Objects.requireNonNull(entityUrnString, "Entity urn must not be null!"); final Urn entityUrn = Urn.createFromString(entityUrnString); - return CompletableFuture.supplyAsync(() -> { - try { - return _entityService.exists(entityUrn); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to check whether entity %s exists", entityUrn.toString())); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _entityService.exists(entityUrn); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to check whether entity %s exists", entityUrn.toString())); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index d8190a160f268..751c6096de1a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -9,17 +9,16 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPrivileges; -import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.extern.slf4j.Slf4j; - import java.util.Collections; import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityPrivilegesResolver implements DataFetcher<CompletableFuture<EntityPrivileges>> { @@ -36,25 +35,28 @@ public CompletableFuture<EntityPrivileges> get(DataFetchingEnvironment environme final String urnString = ((Entity) environment.getSource()).getUrn(); final Urn urn = UrnUtils.getUrn(urnString); - return CompletableFuture.supplyAsync(() -> { - switch (urn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return getGlossaryTermPrivileges(urn, context); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return getGlossaryNodePrivileges(urn, context); - case Constants.DATASET_ENTITY_NAME: - return getDatasetPrivileges(urn, context); - case Constants.CHART_ENTITY_NAME: - return getChartPrivileges(urn, context); - case Constants.DASHBOARD_ENTITY_NAME: - return getDashboardPrivileges(urn, context); - case Constants.DATA_JOB_ENTITY_NAME: - return getDataJobPrivileges(urn, context); - default: - log.warn("Tried to get entity privileges for entity type {} but nothing is implemented for it yet", urn.getEntityType()); - return new EntityPrivileges(); - } - }); + return CompletableFuture.supplyAsync( + () -> { + switch (urn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return getGlossaryTermPrivileges(urn, context); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return getGlossaryNodePrivileges(urn, context); + case Constants.DATASET_ENTITY_NAME: + return getDatasetPrivileges(urn, context); + case Constants.CHART_ENTITY_NAME: + return getChartPrivileges(urn, context); + case Constants.DASHBOARD_ENTITY_NAME: + return getDashboardPrivileges(urn, context); + case Constants.DATA_JOB_ENTITY_NAME: + return getDataJobPrivileges(urn, context); + default: + log.warn( + "Tried to get entity privileges for entity type {} but nothing is implemented for it yet", + urn.getEntityType()); + return new EntityPrivileges(); + } + }); } private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext context) { @@ -66,7 +68,8 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con } Urn parentNodeUrn = GlossaryUtils.getParentUrn(termUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; @@ -80,25 +83,29 @@ private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext con result.setCanManageChildren(true); return result; } - Boolean canManageChildren = GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); + Boolean canManageChildren = + GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); result.setCanManageChildren(canManageChildren); Urn parentNodeUrn = GlossaryUtils.getParentUrn(nodeUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup orPrivilegesGroup = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup orPrivilegesGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 69b5b14edfbee..535dbbf70a4cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; @@ -9,22 +12,18 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -36,70 +35,89 @@ public class AddRelatedTermsResolver implements DataFetcher<CompletableFuture<Bo public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - validateRelatedTermsInput(urn, termUrns); - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - glossaryRelatedTerms = new GlossaryRelatedTerms(); - } - - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + validateRelatedTermsInput(urn, termUrns); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + glossaryRelatedTerms = new GlossaryRelatedTerms(); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add related terms to %s", input.getUrn()), e); } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add related terms to %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } public Boolean validateRelatedTermsInput(Urn urn, List<Urn> termUrns) { - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); } for (Urn termUrn : termUrns) { if (termUrn.equals(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. Tried to create related term with itself.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. Tried to create related term with itself.", urn)); } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); } else if (!_entityService.exists(termUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } } return true; } - private Boolean updateRelatedTerms(List<Urn> termUrns, GlossaryTermUrnArray existingTermUrns, Urn urn, GlossaryRelatedTerms glossaryRelatedTerms, Urn actor) { + private Boolean updateRelatedTerms( + List<Urn> termUrns, + GlossaryTermUrnArray existingTermUrns, + Urn urn, + GlossaryRelatedTerms glossaryRelatedTerms, + Urn actor) { List<Urn> termsToAdd = new ArrayList<>(); for (Urn termUrn : termUrns) { if (existingTermUrns.stream().anyMatch(association -> association.equals(termUrn))) { @@ -117,7 +135,12 @@ private Boolean updateRelatedTerms(List<Urn> termUrns, GlossaryTermUrnArray exis existingTermUrns.add(newUrn); } - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); return true; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index cc0ab4e03a4e8..815b4662e1ed2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,18 +24,11 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,41 +41,67 @@ public class CreateGlossaryNodeResolver implements DataFetcher<CompletableFuture public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - try { - final GlossaryNodeKey key = new GlossaryNodeKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Node already exists!"); + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + try { + final GlossaryNodeKey key = new GlossaryNodeKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Node already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_NODE_ENTITY_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + mapGlossaryNodeInfo(input)); + + String glossaryNodeUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryNodeUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryNodeUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryNode with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNode with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, mapGlossaryNodeInfo(input)); - - String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - - OwnerUtils.addCreatorAsOwner(context, glossaryNodeUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryNodeUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryNode with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryNode with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput input) { @@ -90,10 +114,12 @@ private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index ad69e0c5876e2..90979fe918f71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,9 +30,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -37,12 +39,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -57,42 +55,69 @@ public class CreateGlossaryTermResolver implements DataFetcher<CompletableFuture public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - // Ensure there isn't another glossary term with the same name at this level of the glossary - validateGlossaryTermName(parentNode, context, input.getName()); - try { - final GlossaryTermKey key = new GlossaryTermKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Term already exists!"); - } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, mapGlossaryTermInfo(input)); - - String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + // Ensure there isn't another glossary term with the same name at this level of the + // glossary + validateGlossaryTermName(parentNode, context, input.getName()); + try { + final GlossaryTermKey key = new GlossaryTermKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Term already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_TERM_ENTITY_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + mapGlossaryTermInfo(input)); + + String glossaryTermUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryTermUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryTermUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryTerm with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryTerm with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - OwnerUtils.addCreatorAsOwner(context, glossaryTermUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryTermUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryTerm with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryTerm with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput input) { @@ -106,7 +131,10 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; @@ -114,25 +142,22 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp private Filter buildParentNodeFilter(final Urn parentNodeUrn) { final Map<String, String> criterionMap = new HashMap<>(); - criterionMap.put(PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); + criterionMap.put( + PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); return QueryUtils.newFilter(criterionMap); } private Map<Urn, EntityResponse> getTermsWithSameParent(Urn parentNode, QueryContext context) { try { final Filter filter = buildParentNodeFilter(parentNode); - final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + _entityClient.filter( + GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final List<Urn> termUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List<Urn> termUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); return _entityClient.batchGetV2( GLOSSARY_TERM_ENTITY_NAME, @@ -147,14 +172,17 @@ private Map<Urn, EntityResponse> getTermsWithSameParent(Urn parentNode, QueryCon private void validateGlossaryTermName(Urn parentNode, QueryContext context, String name) { Map<Urn, EntityResponse> entities = getTermsWithSameParent(parentNode, context); - entities.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); - GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); - if (termInfo.hasName() && termInfo.getName().equals(name)) { - throw new IllegalArgumentException("Glossary Term with this name already exists at this level of the Business Glossary"); - } - } - }); + entities.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); + if (termInfo.hasName() && termInfo.getName().equals(name)) { + throw new IllegalArgumentException( + "Glossary Term with this name already exists at this level of the Business Glossary"); + } + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index 0929c7138528d..f623f0e34b366 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -11,50 +11,59 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DeleteGlossaryEntityResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteGlossaryEntityResolver(final EntityClient entityClient, EntityService entityService) { + public DeleteGlossaryEntityResolver( + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("urn")); final Urn parentNodeUrn = GlossaryUtils.getParentUrn(entityUrn, context, _entityClient); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { - throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); - } - - try { - _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + if (!_entityService.exists(entityUrn)) { + throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); + } - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { try { - _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for glossary entity with urn %s", + entityUrn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for glossary entity with urn %s", entityUrn), e); + throw new RuntimeException( + String.format( + "Failed to perform delete against glossary entity with urn %s", entityUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against glossary entity with urn %s", entityUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index 1457a308c8774..e7990b1a343d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryNodesResolver implements DataFetcher<CompletableFuture<GetRootGlossaryNodesResult>> { +public class GetRootGlossaryNodesResolver + implements DataFetcher<CompletableFuture<GetRootGlossaryNodesResult>> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryNodesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<GetRootGlossaryNodesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GetRootGlossaryNodesResult> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsNodesResult = _entityClient.filter( - Constants.GLOSSARY_NODE_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); - - final List<Urn> glossaryNodeUrns = gmsNodesResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); - - final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); - result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); - result.setCount(glossaryNodeUrns.size()); - result.setStart(gmsNodesResult.getFrom()); - result.setTotal(gmsNodesResult.getNumEntities()); - - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsNodesResult = + _entityClient.filter( + Constants.GLOSSARY_NODE_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); + + final List<Urn> glossaryNodeUrns = + gmsNodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); + result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); + result.setCount(glossaryNodeUrns.size()); + result.setStart(gmsNodesResult.getFrom()); + result.setTotal(gmsNodesResult.getNumEntities()); + + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } @@ -101,4 +103,3 @@ private List<GlossaryNode> mapUnresolvedGlossaryNodes(final List<Urn> entityUrns return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index f7684e477f830..40e4363dcff93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryTermsResolver implements DataFetcher<CompletableFuture<GetRootGlossaryTermsResult>> { +public class GetRootGlossaryTermsResolver + implements DataFetcher<CompletableFuture<GetRootGlossaryTermsResult>> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryTermsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<GetRootGlossaryTermsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GetRootGlossaryTermsResult> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsTermsResult = _entityClient.filter( - Constants.GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsTermsResult = + _entityClient.filter( + Constants.GLOSSARY_TERM_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); - final List<Urn> glossaryTermUrns = gmsTermsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List<Urn> glossaryTermUrns = + gmsTermsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); - result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); - result.setCount(glossaryTermUrns.size()); - result.setStart(gmsTermsResult.getFrom()); - result.setTotal(gmsTermsResult.getNumEntities()); + final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); + result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); + result.setCount(glossaryTermUrns.size()); + result.setStart(gmsTermsResult.getFrom()); + result.setTotal(gmsTermsResult.getNumEntities()); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); - } - }); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index d513d70f39f58..850469f996515 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -14,18 +18,13 @@ import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; - -public class ParentNodesResolver implements DataFetcher<CompletableFuture<ParentNodesResult>> { +public class ParentNodesResolver implements DataFetcher<CompletableFuture<ParentNodesResult>> { private final EntityClient _entityClient; @@ -36,19 +35,23 @@ public ParentNodesResolver(final EntityClient entityClient) { private void aggregateParentNodes(List<GlossaryNode> nodes, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(dataMap); if (nodeInfo.hasParentNode()) { Urn parentNodeUrn = nodeInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); nodes.add(mappedNode); @@ -64,19 +67,23 @@ private void aggregateParentNodes(List<GlossaryNode> nodes, String urn, QueryCon private GlossaryNode getTermParentNode(String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); if (termInfo.hasParentNode()) { Urn parentNodeUrn = termInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); return mappedNode; @@ -95,27 +102,28 @@ public CompletableFuture<ParentNodesResult> get(DataFetchingEnvironment environm final String urn = ((Entity) environment.getSource()).getUrn(); final List<GlossaryNode> nodes = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - final String type = Urn.createFromString(urn).getEntityType(); + return CompletableFuture.supplyAsync( + () -> { + try { + final String type = Urn.createFromString(urn).getEntityType(); - if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { - final GlossaryNode parentNode = getTermParentNode(urn, context); - if (parentNode != null) { - nodes.add(parentNode); - aggregateParentNodes(nodes, parentNode.getUrn(), context); - } - } else { - aggregateParentNodes(nodes, urn, context); - } + if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { + final GlossaryNode parentNode = getTermParentNode(urn, context); + if (parentNode != null) { + nodes.add(parentNode); + aggregateParentNodes(nodes, parentNode.getUrn(), context); + } + } else { + aggregateParentNodes(nodes, urn, context); + } - final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); - return result; - } catch (DataHubGraphQLException | URISyntaxException e) { - throw new RuntimeException(("Failed to load parent nodes")); - } - }); + final ParentNodesResult result = new ParentNodesResult(); + result.setCount(nodes.size()); + result.setNodes(nodes); + return result; + } catch (DataHubGraphQLException | URISyntaxException e) { + throw new RuntimeException(("Failed to load parent nodes")); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 417ef4292d0f7..8c9b792b74e0d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,15 +17,11 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -34,57 +33,82 @@ public class RemoveRelatedTermsResolver implements DataFetcher<CompletableFuture public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List<Urn> termUrnsToRemove = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List<Urn> termUrnsToRemove = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); - } + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", + urn, urn)); + } - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - throw new RuntimeException(String.format("Related Terms for this Urn do not exist: %s", urn)); - } + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + throw new RuntimeException( + String.format("Related Terms for this Urn do not exist: %s", urn)); + } - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to removes related terms from %s", input.getUrn()), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to removes related terms from %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java index daff0962bc2e8..acfc2cd14f8d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,13 +20,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver that adds a set of native members to a group, if the user and group both exist. - */ +/** Resolver that adds a set of native members to a group, if the user and group both exist. */ public class AddGroupMembersResolver implements DataFetcher<CompletableFuture<Boolean>> { private final GroupService _groupService; @@ -33,9 +30,11 @@ public AddGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { - final AddGroupMembersInput input = bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); + final AddGroupMembersInput input = + bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -52,30 +51,37 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) String.format("Failed to add members to group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership for group %s when adding group members", groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually added to it", - groupUrnStr)); - } + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership for group %s when adding group members", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually added to it", + groupUrnStr)); + } - try { - // Add each user to the group - final List<Urn> userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - userUrnList.forEach(userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add group members to group %s", groupUrnStr)); - } - }); + try { + // Add each user to the group + final List<Urn> userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + userUrnList.forEach( + userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add group members to group %s", groupUrnStr)); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index 75f2a61287ecc..e487ee00608d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,10 +14,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -// Currently, this resolver will override the group details, but not group membership, if a group with the same name already exists. +// Currently, this resolver will override the group details, but not group membership, if a group +// with the same name already exists. public class CreateGroupResolver implements DataFetcher<CompletableFuture<String>> { private final GroupService _groupService; @@ -33,19 +33,22 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - final CreateGroupInput input = bindArgument(environment.getArgument("input"), CreateGroupInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, check if the group already exists. - // Create the Group key. - final CorpGroupKey key = new CorpGroupKey(); - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". - return _groupService.createNativeGroup(key, input.getName(), input.getDescription(), authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create group", e); - } - }); + final CreateGroupInput input = + bindArgument(environment.getArgument("input"), CreateGroupInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, check if the group already exists. + // Create the Group key. + final CorpGroupKey key = new CorpGroupKey(); + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". + return _groupService.createNativeGroup( + key, input.getName(), input.getDescription(), authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create group", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index d0874b21fb106..93582fb956bd8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class EntityCountsResolver implements DataFetcher<CompletableFuture<EntityCountResults>> { private final EntityClient _entityClient; @@ -27,31 +26,42 @@ public EntityCountsResolver(final EntityClient entityClient) { @Override @WithSpan - public CompletableFuture<EntityCountResults> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<EntityCountResults> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final EntityCountInput input = bindArgument(environment.getArgument("input"), EntityCountInput.class); - final EntityCountResults results = new EntityCountResults(); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all counts - Map<String, Long> gmsResult = _entityClient.batchGetTotalEntityCount( - input.getTypes().stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), context.getAuthentication()); - - // bind to a result. - List<EntityCountResult> resultList = gmsResult.entrySet().stream().map(entry -> { - EntityCountResult result = new EntityCountResult(); - result.setCount(Math.toIntExact(entry.getValue())); - result.setEntityType(EntityTypeMapper.getType(entry.getKey())); - return result; - }).collect(Collectors.toList()); - results.setCounts(resultList); - return results; - } catch (Exception e) { - throw new RuntimeException("Failed to get entity counts", e); - } - }); + final EntityCountInput input = + bindArgument(environment.getArgument("input"), EntityCountInput.class); + final EntityCountResults results = new EntityCountResults(); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all counts + Map<String, Long> gmsResult = + _entityClient.batchGetTotalEntityCount( + input.getTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + context.getAuthentication()); + + // bind to a result. + List<EntityCountResult> resultList = + gmsResult.entrySet().stream() + .map( + entry -> { + EntityCountResult result = new EntityCountResult(); + result.setCount(Math.toIntExact(entry.getValue())); + result.setEntityType(EntityTypeMapper.getType(entry.getKey())); + return result; + }) + .collect(Collectors.toList()); + results.setCounts(resultList); + return results; + } catch (Exception e) { + throw new RuntimeException("Failed to get entity counts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index 67cc84a33a954..a6ad8698679f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -24,10 +27,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListGroupsResolver implements DataFetcher<CompletableFuture<ListGroupsResult>> { private static final Integer DEFAULT_START = 0; @@ -41,51 +40,68 @@ public ListGroupsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListGroupsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListGroupsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListGroupsInput input = bindArgument(environment.getArgument("input"), ListGroupsInput.class); + final ListGroupsInput input = + bindArgument(environment.getArgument("input"), ListGroupsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all group Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_GROUP_ENTITY_NAME, + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_GROUP_ENTITY_NAME, query, null, - new SortCriterion().setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, count, context.getAuthentication(), + new SortCriterion() + .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), new SearchFlags().setFulltext(true)); - // Then, get hydrate all groups. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), null, context.getAuthentication()); + // Then, get hydrate all groups. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListGroupsResult result = new ListGroupsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setGroups(mapUnresolvedGroups(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list groups", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListGroupsResult result = new ListGroupsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setGroups( + mapUnresolvedGroups( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list groups", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - // This method maps urns returned from the list endpoint into Partial Group objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Group objects which will be + // resolved be a separate Batch resolver. private List<CorpGroup> mapUnresolvedGroups(final List<Urn> entityUrns) { final List<CorpGroup> results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java index 287b4aa7b5dbd..9fb63b3eb463d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,10 +20,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class RemoveGroupMembersResolver implements DataFetcher<CompletableFuture<Boolean>> { private final GroupService _groupService; @@ -30,9 +29,11 @@ public RemoveGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { - final RemoveGroupMembersInput input = bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); + final RemoveGroupMembersInput input = + bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -43,37 +44,42 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) } final Urn groupUrn = Urn.createFromString(groupUrnStr); - final List<Urn> userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List<Urn> userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!_groupService.groupExists(groupUrn)) { // The group doesn't exist. throw new DataHubGraphQLException( - String.format("Failed to add remove members from group %s. Group does not exist.", groupUrnStr), + String.format( + "Failed to add remove members from group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership when removing group members from group %s", - groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually removed from it", - groupUrnStr)); - } - try { - _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership when removing group members from group %s", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually removed from it", + groupUrnStr)); + } + try { + _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java index 99481868e30ce..e69d6b471f3c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class RemoveGroupResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -24,30 +21,39 @@ public RemoveGroupResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String groupUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(groupUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for group with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for group with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against group with urn %s", groupUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against group with urn %s", groupUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 6a4af7563a8d8..036780d446701 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,25 +1,30 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), + authorizer); } public static boolean canManageSecrets(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); } - private IngestionAuthUtils() { } + private IngestionAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index 1140c031f1d35..ffa9dcf42d176 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -25,11 +25,11 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IngestionResolverUtils { - public static List<ExecutionRequest> mapExecutionRequests(final Collection<EntityResponse> requests) { + public static List<ExecutionRequest> mapExecutionRequests( + final Collection<EntityResponse> requests) { List<ExecutionRequest> result = new ArrayList<>(); for (final EntityResponse request : requests) { result.add(mapExecutionRequest(request)); @@ -46,10 +46,13 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe result.setId(entityUrn.getId()); // Map input aspect. Must be present. - final EnvelopedAspect envelopedInput = aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); + final EnvelopedAspect envelopedInput = + aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); if (envelopedInput != null) { - final ExecutionRequestInput executionRequestInput = new ExecutionRequestInput(envelopedInput.getValue().data()); - final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); + final ExecutionRequestInput executionRequestInput = + new ExecutionRequestInput(envelopedInput.getValue().data()); + final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = + new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); inputResult.setTask(executionRequestInput.getTask()); if (executionRequestInput.hasSource()) { @@ -63,23 +66,29 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe } // Map result aspect. Optional. - final EnvelopedAspect envelopedResult = aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + final EnvelopedAspect envelopedResult = + aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (envelopedResult != null) { - final ExecutionRequestResult executionRequestResult = new ExecutionRequestResult(envelopedResult.getValue().data()); + final ExecutionRequestResult executionRequestResult = + new ExecutionRequestResult(envelopedResult.getValue().data()); result.setResult(mapExecutionRequestResult(executionRequestResult)); } return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource + mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); result.setType(execRequestSource.getType()); return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult + mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); result.setStatus(execRequestResult.getStatus()); result.setStartTimeMs(execRequestResult.getStartTimeMs()); result.setDurationMs(execRequestResult.getDurationMs()); @@ -90,7 +99,8 @@ public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapE return result; } - public static StructuredReport mapStructuredReport(final StructuredExecutionReport structuredReport) { + public static StructuredReport mapStructuredReport( + final StructuredExecutionReport structuredReport) { StructuredReport structuredReportResult = new StructuredReport(); structuredReportResult.setType(structuredReport.getType()); structuredReportResult.setSerializedValue(structuredReport.getSerializedValue()); @@ -98,7 +108,8 @@ public static StructuredReport mapStructuredReport(final StructuredExecutionRepo return structuredReportResult; } - public static List<IngestionSource> mapIngestionSources(final Collection<EntityResponse> entities) { + public static List<IngestionSource> mapIngestionSources( + final Collection<EntityResponse> entities) { final List<IngestionSource> results = new ArrayList<>(); for (EntityResponse response : entities) { try { @@ -118,16 +129,19 @@ public static IngestionSource mapIngestionSource(final EntityResponse ingestionS final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); if (envelopedInfo == null) { - throw new IllegalStateException("No ingestion source info aspect exists for urn: " + entityUrn); + throw new IllegalStateException( + "No ingestion source info aspect exists for urn: " + entityUrn); } // Bind into a strongly typed object. - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); return mapIngestionSourceInfo(entityUrn, ingestionSourceInfo); } - public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHubIngestionSourceInfo info) { + public static IngestionSource mapIngestionSourceInfo( + final Urn urn, final DataHubIngestionSourceInfo info) { final IngestionSource result = new IngestionSource(); result.setUrn(urn.toString()); result.setName(info.getName()); @@ -139,29 +153,30 @@ public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHu return result; } - public static IngestionConfig mapIngestionSourceConfig(final DataHubIngestionSourceConfig config) { + public static IngestionConfig mapIngestionSourceConfig( + final DataHubIngestionSourceConfig config) { final IngestionConfig result = new IngestionConfig(); result.setRecipe(config.getRecipe()); result.setVersion(config.getVersion()); result.setExecutorId(config.getExecutorId()); result.setDebugMode(config.isDebugMode()); if (config.getExtraArgs() != null) { - List<StringMapEntry> extraArgs = config.getExtraArgs() - .keySet() - .stream() - .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) - .collect(Collectors.toList()); + List<StringMapEntry> extraArgs = + config.getExtraArgs().keySet().stream() + .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) + .collect(Collectors.toList()); result.setExtraArgs(extraArgs); } return result; } - public static IngestionSchedule mapIngestionSourceSchedule(final DataHubIngestionSourceSchedule schedule) { + public static IngestionSchedule mapIngestionSourceSchedule( + final DataHubIngestionSourceSchedule schedule) { final IngestionSchedule result = new IngestionSchedule(); result.setInterval(schedule.getInterval()); result.setTimezone(schedule.getTimezone()); return result; } - private IngestionResolverUtils() { } + private IngestionResolverUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java index 7f9cb6176989f..e346f2b077c98 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -22,15 +26,9 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Cancels a requested ingestion execution by emitting a KILL signal. - */ -public class CancelIngestionExecutionRequestResolver implements DataFetcher<CompletableFuture<String>> { +/** Cancels a requested ingestion execution by emitting a KILL signal. */ +public class CancelIngestionExecutionRequestResolver + implements DataFetcher<CompletableFuture<String>> { private static final String KILL_EXECUTION_REQUEST_SIGNAL = "KILL"; @@ -44,45 +42,58 @@ public CancelIngestionExecutionRequestResolver(final EntityClient entityClient) public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { - if (IngestionAuthUtils.canManageIngestion(context)) { + final CancelIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); - final CancelIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); + try { + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map<Urn, EntityResponse> response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); - try { - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map<Urn, EntityResponse> response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", ingestionSourceUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = - response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - // Build the arguments map. - final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); - execSignal.setSignal(KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. - execSignal.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execSignal.setCreatedAt(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(Urn.createFromString(context.getActorUrn())) - ); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn( - input.getExecutionRequestUrn()), EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, execSignal); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to submit cancel signal %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Build the arguments map. + final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); + execSignal.setSignal( + KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. + execSignal.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execSignal.setCreatedAt( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(input.getExecutionRequestUrn()), + EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, + execSignal); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to submit cancel signal %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java index ea20b837e0a1f..8ef5447cd9433 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; @@ -30,15 +34,9 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ -public class CreateIngestionExecutionRequestResolver implements DataFetcher<CompletableFuture<String>> { +/** Creates an on-demand ingestion execution request. */ +public class CreateIngestionExecutionRequestResolver + implements DataFetcher<CompletableFuture<String>> { private static final String RUN_INGEST_TASK_NAME = "RUN_INGEST"; private static final String MANUAL_EXECUTION_SOURCE_NAME = "MANUAL_INGESTION_SOURCE"; @@ -49,7 +47,8 @@ public class CreateIngestionExecutionRequestResolver implements DataFetcher<Comp private final EntityClient _entityClient; private final IngestionConfiguration _ingestionConfiguration; - public CreateIngestionExecutionRequestResolver(final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { + public CreateIngestionExecutionRequestResolver( + final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { _entityClient = entityClient; _ingestionConfiguration = ingestionConfiguration; } @@ -58,86 +57,108 @@ public CreateIngestionExecutionRequestResolver(final EntityClient entityClient, public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final CreateIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - // Fetch the original ingestion source - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map<Urn, EntityResponse> response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); - - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - - if (!ingestionSourceInfo.getConfig().hasRecipe()) { - throw new DataHubGraphQLException( - String.format("Failed to find valid ingestion source with urn %s. Missing recipe", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - // Build the arguments map. - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task - execInput.setSource( - new ExecutionRequestSource().setType(MANUAL_EXECUTION_SOURCE_NAME).setIngestionSource(ingestionSourceUrn)); - execInput.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map<String, String> arguments = new HashMap<>(); - String recipe = ingestionSourceInfo.getConfig().getRecipe(); - recipe = injectRunId(recipe, executionRequestUrn.toString()); - recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); - arguments.put(RECIPE_ARG_NAME, recipe); - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().hasVersion() - ? ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion() - ); - if (ingestionSourceInfo.getConfig().hasVersion()) { - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); - } - String debugMode = "false"; - if (ingestionSourceInfo.getConfig().hasDebugMode()) { - debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; - } - if (ingestionSourceInfo.getConfig().hasExtraArgs()) { - arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final CreateIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + // Fetch the original ingestion source + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map<Urn, EntityResponse> response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + + if (!ingestionSourceInfo.getConfig().hasRecipe()) { + throw new DataHubGraphQLException( + String.format( + "Failed to find valid ingestion source with urn %s. Missing recipe", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + // Build the arguments map. + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task + execInput.setSource( + new ExecutionRequestSource() + .setType(MANUAL_EXECUTION_SOURCE_NAME) + .setIngestionSource(ingestionSourceUrn)); + execInput.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map<String, String> arguments = new HashMap<>(); + String recipe = ingestionSourceInfo.getConfig().getRecipe(); + recipe = injectRunId(recipe, executionRequestUrn.toString()); + recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); + arguments.put(RECIPE_ARG_NAME, recipe); + arguments.put( + VERSION_ARG_NAME, + ingestionSourceInfo.getConfig().hasVersion() + ? ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); + if (ingestionSourceInfo.getConfig().hasVersion()) { + arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); + } + String debugMode = "false"; + if (ingestionSourceInfo.getConfig().hasDebugMode()) { + debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; + } + if (ingestionSourceInfo.getConfig().hasExtraArgs()) { + arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + } + arguments.put(DEBUG_MODE_ARG_NAME, debugMode); + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new ingestion execution request %s", input), e); + } } - arguments.put(DEBUG_MODE_ARG_NAME, debugMode); - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, - EXECUTION_REQUEST_ENTITY_NAME, EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new ingestion execution request %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Injects an override run id into a recipe for tracking purposes. Any existing run id will be overwritten. + * Injects an override run id into a recipe for tracking purposes. Any existing run id will be + * overwritten. * - * TODO: Determine if this should be handled in the executor itself. + * <p>TODO: Determine if this should be handled in the executor itself. * * @param runId the run id to place into the recipe * @return a modified recipe JSON string @@ -149,7 +170,8 @@ private String injectRunId(final String originalJson, final String runId) { return obj.toString(); } catch (JSONException e) { // This should ideally never be hit. - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided."); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided."); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java index 1886db62ae450..2505ce28c5c2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -10,26 +13,19 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.execution.ExecutionRequestInput; import com.linkedin.execution.ExecutionRequestSource; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.IngestionUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ +/** Creates an on-demand ingestion execution request. */ public class CreateTestConnectionRequestResolver implements DataFetcher<CompletableFuture<String>> { private static final String TEST_CONNECTION_TASK_NAME = "TEST_CONNECTION"; @@ -41,7 +37,8 @@ public class CreateTestConnectionRequestResolver implements DataFetcher<Completa private final EntityClient _entityClient; private final IngestionConfiguration _ingestionConfiguration; - public CreateTestConnectionRequestResolver(final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { + public CreateTestConnectionRequestResolver( + final EntityClient entityClient, final IngestionConfiguration ingestionConfiguration) { _entityClient = entityClient; _ingestionConfiguration = ingestionConfiguration; } @@ -50,41 +47,54 @@ public CreateTestConnectionRequestResolver(final EntityClient entityClient, fina public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - final CreateTestConnectionRequestInput input = - bindArgument(environment.getArgument("input"), CreateTestConnectionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(TEST_CONNECTION_TASK_NAME); - execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); - execInput.setExecutorId(DEFAULT_EXECUTOR_ID); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map<String, String> arguments = new HashMap<>(); - arguments.put(RECIPE_ARG_NAME, IngestionUtils.injectPipelineName(input.getRecipe(), executionRequestUrn.toString())); - if (input.getVersion() != null) { - arguments.put(VERSION_ARG_NAME, input.getVersion()); - } - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, EXECUTION_REQUEST_ENTITY_NAME, - EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new test ingestion connection request %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + final CreateTestConnectionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateTestConnectionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(TEST_CONNECTION_TASK_NAME); + execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); + execInput.setExecutorId(DEFAULT_EXECUTOR_ID); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map<String, String> arguments = new HashMap<>(); + arguments.put( + RECIPE_ARG_NAME, + IngestionUtils.injectPipelineName( + input.getRecipe(), executionRequestUrn.toString())); + if (input.getVersion() != null) { + arguments.put(VERSION_ARG_NAME, input.getVersion()); + } + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to create new test ingestion connection request %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 8880330d63495..722ffe3aba6b8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -19,12 +19,10 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Retrieves an Ingestion Execution Request by primary key (urn). - */ +/** Retrieves an Ingestion Execution Request by primary key (urn). */ @Slf4j -public class GetIngestionExecutionRequestResolver implements DataFetcher<CompletableFuture<ExecutionRequest>> { +public class GetIngestionExecutionRequestResolver + implements DataFetcher<CompletableFuture<ExecutionRequest>> { private final EntityClient _entityClient; @@ -33,32 +31,40 @@ public GetIngestionExecutionRequestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ExecutionRequest> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ExecutionRequest> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch specific execution request - final Urn urn = Urn.createFromString(urnStr); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No execution request found - throw new DataHubGraphQLException(String.format("Failed to find Execution Request with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve execution request", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch specific execution request + final Urn urn = Urn.createFromString(urnStr); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No execution request found + throw new DataHubGraphQLException( + String.format("Failed to find Execution Request with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Execution request found + return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve execution request", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index c72f273a9027e..01100a24d6b15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -29,11 +29,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j -public class IngestionSourceExecutionRequestsResolver implements DataFetcher<CompletableFuture<IngestionSourceExecutionRequests>> { +public class IngestionSourceExecutionRequestsResolver + implements DataFetcher<CompletableFuture<IngestionSourceExecutionRequests>> { private static final String INGESTION_SOURCE_FIELD_NAME = "ingestionSource"; private static final String REQUEST_TIME_MS_FIELD_NAME = "requestTimeMs"; @@ -45,64 +44,77 @@ public IngestionSourceExecutionRequestsResolver(final EntityClient entityClient) } @Override - public CompletableFuture<IngestionSourceExecutionRequests> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<IngestionSourceExecutionRequests> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final String urn = ((IngestionSource) environment.getSource()).getUrn(); - final Integer start = environment.getArgument("start") != null ? environment.getArgument("start") : 0; - final Integer count = environment.getArgument("count") != null ? environment.getArgument("count") : 10; + final Integer start = + environment.getArgument("start") != null ? environment.getArgument("start") : 0; + final Integer count = + environment.getArgument("count") != null ? environment.getArgument("count") : 10; - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + try { - try { + // 1. Fetch the related edges + final Criterion filterCriterion = + new Criterion() + .setField(INGESTION_SOURCE_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(urn); - // 1. Fetch the related edges - final Criterion filterCriterion = new Criterion() - .setField(INGESTION_SOURCE_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(urn); + final SearchResult executionsSearchResult = + _entityClient.filter( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + new SortCriterion() + .setField(REQUEST_TIME_MS_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication()); - final SearchResult executionsSearchResult = _entityClient.filter( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - new SortCriterion().setField(REQUEST_TIME_MS_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication() - ); + // 2. Batch fetch the related ExecutionRequests + final Set<Urn> relatedExecRequests = + executionsSearchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); - // 2. Batch fetch the related ExecutionRequests - final Set<Urn> relatedExecRequests = executionsSearchResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + relatedExecRequests, + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - relatedExecRequests, - ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - - // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests - final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); - result.setStart(executionsSearchResult.getFrom()); - result.setCount(executionsSearchResult.getPageSize()); - result.setTotal(executionsSearchResult.getNumEntities()); - result.setExecutionRequests(IngestionResolverUtils.mapExecutionRequests( - executionsSearchResult.getEntities() - .stream() - .map(searchResult -> entities.get(searchResult.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - )); - return result; - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve executions associated with ingestion source with urn %s", urn), e); - } - }); + // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests + final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); + result.setStart(executionsSearchResult.getFrom()); + result.setCount(executionsSearchResult.getPageSize()); + result.setTotal(executionsSearchResult.getNumEntities()); + result.setExecutionRequests( + IngestionResolverUtils.mapExecutionRequests( + executionsSearchResult.getEntities().stream() + .map(searchResult -> entities.get(searchResult.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve executions associated with ingestion source with urn %s", + urn), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 05fcacf7c0946..0b909dee51374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -7,11 +9,8 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class RollbackIngestionResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; @@ -20,33 +19,36 @@ public RollbackIngestionResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - final RollbackIngestionInput input = bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); - final String runId = input.getRunId(); + final RollbackIngestionInput input = + bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); + final String runId = input.getRunId(); - rollbackIngestion(runId, context); - return true; - }); + rollbackIngestion(runId, context); + return true; + }); } - public CompletableFuture<Boolean> rollbackIngestion(final String runId, final QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to rollback ingestion execution", e); - } - }); - + public CompletableFuture<Boolean> rollbackIngestion( + final String runId, final QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.rollbackIngestion(runId, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to rollback ingestion execution", e); + } + }); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index e1745031d9dae..577780e53ce86 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -17,23 +21,16 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the MANAGE_SECRETS privilege. + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. */ public class CreateSecretResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; private final SecretService _secretService; - public CreateSecretResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + public CreateSecretResolver(final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @@ -41,36 +38,46 @@ public CreateSecretResolver( @Override public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateSecretInput input = bindArgument(environment.getArgument("input"), CreateSecretInput.class); + final CreateSecretInput input = + bindArgument(environment.getArgument("input"), CreateSecretInput.class); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { - if (IngestionAuthUtils.canManageSecrets(context)) { + try { + // Create the Ingestion source key --> use the display name as a unique id to ensure + // it's not duplicated. + final DataHubSecretKey key = new DataHubSecretKey(); + key.setId(input.getName()); - try { - // Create the Ingestion source key --> use the display name as a unique id to ensure it's not duplicated. - final DataHubSecretKey key = new DataHubSecretKey(); - key.setId(input.getName()); + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Secret already exists!"); + } - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Secret already exists!"); - } - - // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + // Create the secret value. + final DataHubSecretValue value = new DataHubSecretValue(); + value.setName(input.getName()); + value.setValue(_secretService.encrypt(input.getValue())); + value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + value.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, SECRETS_ENTITY_NAME, - SECRET_VALUE_ASPECT_NAME, value); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new secret with name %s", input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, SECRETS_ENTITY_NAME, SECRET_VALUE_ASPECT_NAME, value); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new secret with name %s", input.getName()), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java index b35931420c078..228d5a094cdef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. - */ +/** Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. */ public class DeleteSecretResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -27,15 +24,19 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageSecrets(context)) { final String secretUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(secretUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return secretUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against secret with urn %s", secretUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return secretUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against secret with urn %s", secretUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 85c6c6754470d..67564aa721bda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -23,11 +25,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / decryption. - * Requires the MANAGE_SECRETS privilege. + * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / + * decryption. Requires the MANAGE_SECRETS privilege. */ public class GetSecretValuesResolver implements DataFetcher<CompletableFuture<List<SecretValue>>> { @@ -35,60 +35,67 @@ public class GetSecretValuesResolver implements DataFetcher<CompletableFuture<Li private final SecretService _secretService; public GetSecretValuesResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @Override - public CompletableFuture<List<SecretValue>> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<List<SecretValue>> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final GetSecretValuesInput input = bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); + final GetSecretValuesInput input = + bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch secrets - final Set<Urn> urns = input.getSecrets() - .stream() - .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) - .collect(Collectors.toSet()); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch secrets + final Set<Urn> urns = + input.getSecrets().stream() + .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) + .collect(Collectors.toSet()); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(urns), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>(urns), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); - // Now for each secret, decrypt and return the value. If no secret was found, then we will simply omit it from the list. - // There is no ordering guarantee for the list. - return entities.values() - .stream() - .map(entity -> { - EnvelopedAspect aspect = entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); - if (aspect != null) { - // Aspect is present. - final DataHubSecretValue secretValue = new DataHubSecretValue(aspect.getValue().data()); - // Now decrypt the encrypted secret. - final String decryptedSecretValue = decryptSecret(secretValue.getValue()); - return new SecretValue(secretValue.getName(), decryptedSecretValue); - } else { - // No secret exists - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + // Now for each secret, decrypt and return the value. If no secret was found, then we + // will simply omit it from the list. + // There is no ordering guarantee for the list. + return entities.values().stream() + .map( + entity -> { + EnvelopedAspect aspect = + entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); + if (aspect != null) { + // Aspect is present. + final DataHubSecretValue secretValue = + new DataHubSecretValue(aspect.getValue().data()); + // Now decrypt the encrypted secret. + final String decryptedSecretValue = decryptSecret(secretValue.getValue()); + return new SecretValue(secretValue.getName(), decryptedSecretValue); + } else { + // No secret exists + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private String decryptSecret(final String encryptedSecret) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index b0d8c9fd34303..eb054295af09b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; @@ -31,13 +34,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. - */ +/** Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. */ @Slf4j public class ListSecretsResolver implements DataFetcher<CompletableFuture<ListSecretsResult>> { @@ -52,55 +49,66 @@ public ListSecretsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListSecretsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListSecretsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final ListSecretsInput input = bindArgument(environment.getArgument("input"), ListSecretsInput.class); + final ListSecretsInput input = + bindArgument(environment.getArgument("input"), ListSecretsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all secrets - final SearchResult gmsResult = _entityClient.search( - Constants.SECRETS_ENTITY_NAME, - query, - null, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - // Then, resolve all secrets - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); - - // Now that we have entities we can bind this to a result. - final ListSecretsResult result = new ListSecretsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setSecrets(mapEntities(gmsResult.getEntities().stream() - .map(entity -> entities.get(entity.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()))); - return result; - - } catch (Exception e) { - throw new RuntimeException("Failed to list secrets", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all secrets + final SearchResult gmsResult = + _entityClient.search( + Constants.SECRETS_ENTITY_NAME, + query, + null, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + // Then, resolve all secrets + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); + + // Now that we have entities we can bind this to a result. + final ListSecretsResult result = new ListSecretsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setSecrets( + mapEntities( + gmsResult.getEntities().stream() + .map(entity -> entities.get(entity.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + + } catch (Exception e) { + throw new RuntimeException("Failed to list secrets", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List<Secret> mapEntities(final List<EntityResponse> entities) { @@ -113,7 +121,8 @@ private List<Secret> mapEntities(final List<EntityResponse> entities) { final EnvelopedAspect envelopedInfo = aspects.get(Constants.SECRET_VALUE_ASPECT_NAME); // Bind into a strongly typed object. - final DataHubSecretValue secretValue = new DataHubSecretValue(envelopedInfo.getValue().data()); + final DataHubSecretValue secretValue = + new DataHubSecretValue(envelopedInfo.getValue().data()); // Map using the strongly typed object. results.add(mapSecretValue(entityUrn, secretValue)); @@ -128,4 +137,4 @@ private Secret mapSecretValue(final Urn urn, final DataHubSecretValue value) { result.setDescription(value.getDescription(GetMode.NULL)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java index e510a9fff80aa..225a5801adec9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java @@ -8,10 +8,7 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - -/** - * Utility methods to encrypt and decrypt DataHub secrets. - */ +/** Utility methods to encrypt and decrypt DataHub secrets. */ public class SecretUtils { static String encrypt(String value, String secret) { @@ -30,7 +27,8 @@ static String encrypt(String value, String secret) { } Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, secretKey); - return Base64.getEncoder().encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); + return Base64.getEncoder() + .encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); } catch (Exception e) { throw new RuntimeException("Failed to encrypt value using provided secret!"); } @@ -59,6 +57,5 @@ static String decrypt(String encryptedValue, String secret) { return null; } - private SecretUtils() { - } + private SecretUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java index 38050331318ca..0666fab52dd4e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java @@ -9,10 +9,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - /** - * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires MANAGE_INGESTION - * privilege. + * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires + * MANAGE_INGESTION privilege. */ public class DeleteIngestionSourceResolver implements DataFetcher<CompletableFuture<String>> { @@ -28,15 +27,21 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageIngestion(context)) { final String ingestionSourceUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(ingestionSourceUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return ingestionSourceUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against ingestion source with urn %s", ingestionSourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return ingestionSourceUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against ingestion source with urn %s", + ingestionSourceUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java index 562d06b79d2c7..3b6790212ba23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java @@ -19,9 +19,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -/** - * Gets a particular Ingestion Source by urn. - */ +/** Gets a particular Ingestion Source by urn. */ @Slf4j public class GetIngestionSourceResolver implements DataFetcher<CompletableFuture<IngestionSource>> { @@ -32,31 +30,37 @@ public GetIngestionSourceResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<IngestionSource> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<IngestionSource> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = Urn.createFromString(urnStr); - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No ingestion source found - throw new DataHubGraphQLException(String.format("Failed to find Ingestion Source with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Ingestion source found - return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve ingestion source", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = Urn.createFromString(urnStr); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No ingestion source found + throw new DataHubGraphQLException( + String.format("Failed to find Ingestion Source with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Ingestion source found + return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve ingestion source", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index d019473606e58..51c9e30aadcce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -26,12 +28,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. - */ -public class ListIngestionSourcesResolver implements DataFetcher<CompletableFuture<ListIngestionSourcesResult>> { +/** Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. */ +public class ListIngestionSourcesResolver + implements DataFetcher<CompletableFuture<ListIngestionSourcesResult>> { private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,57 +43,74 @@ public ListIngestionSourcesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListIngestionSourcesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListIngestionSourcesResult> get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { - final ListIngestionSourcesInput input = bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); + final ListIngestionSourcesInput input = + bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List<FacetFilterInput> filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + final List<FacetFilterInput> filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all ingestion sources Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.INGESTION_SOURCE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - null, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all ingestion sources Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.INGESTION_SOURCE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + null, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, resolve all ingestion sources - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), - context.getAuthentication()); + // Then, resolve all ingestion sources + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), + context.getAuthentication()); - final Collection<EntityResponse> sortedEntities = entities.values() - .stream() - .sorted(Comparator.comparingLong(s -> -s.getAspects().get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME).getCreated().getTime())) - .collect(Collectors.toList()); + final Collection<EntityResponse> sortedEntities = + entities.values().stream() + .sorted( + Comparator.comparingLong( + s -> + -s.getAspects() + .get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME) + .getCreated() + .getTime())) + .collect(Collectors.toList()); - // Now that we have entities we can bind this to a result. - final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setIngestionSources(IngestionResolverUtils.mapIngestionSources(sortedEntities)); - return result; + // Now that we have entities we can bind this to a result. + final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setIngestionSources( + IngestionResolverUtils.mapIngestionSources(sortedEntities)); + return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list ingestion sources", e); - } - }); + } catch (Exception e) { + throw new RuntimeException("Failed to list ingestion sources", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java index 68e334bd976f8..6194452e4b6fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -19,23 +23,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; +import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. - */ +/** Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. */ @Slf4j public class UpsertIngestionSourceResolver implements DataFetcher<CompletableFuture<String>> { @@ -49,46 +45,60 @@ public UpsertIngestionSourceResolver(final EntityClient entityClient) { public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final Optional<String> ingestionSourceUrn = Optional.ofNullable(environment.getArgument("urn")); - final UpdateIngestionSourceInput input = bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); - - // Create the policy info. - final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); - final MetadataChangeProposal proposal; - if (ingestionSourceUrn.isPresent()) { - // Update existing ingestion source - try { - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(ingestionSourceUrn.get()), INGESTION_INFO_ASPECT_NAME, info); - } catch (URISyntaxException e) { - throw new DataHubGraphQLException( - String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), - DataHubGraphQLErrorCode.BAD_REQUEST); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final Optional<String> ingestionSourceUrn = + Optional.ofNullable(environment.getArgument("urn")); + final UpdateIngestionSourceInput input = + bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); + + // Create the policy info. + final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); + final MetadataChangeProposal proposal; + if (ingestionSourceUrn.isPresent()) { + // Update existing ingestion source + try { + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(ingestionSourceUrn.get()), + INGESTION_INFO_ASPECT_NAME, + info); + } catch (URISyntaxException e) { + throw new DataHubGraphQLException( + String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } else { + // Create new ingestion source + // Since we are creating a new Ingestion Source, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); + key.setId(uuidStr); + proposal = + buildMetadataChangeProposalWithKey( + key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); + } + + try { + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform update against ingestion source with urn %s", + input.toString()), + e); + } } - } else { - // Create new ingestion source - // Since we are creating a new Ingestion Source, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); - key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); - } - - try { - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against ingestion source with urn %s", input.toString()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private DataHubIngestionSourceInfo mapIngestionSourceInfo(final UpdateIngestionSourceInput input) { + private DataHubIngestionSourceInfo mapIngestionSourceInfo( + final UpdateIngestionSourceInput input) { final DataHubIngestionSourceInfo result = new DataHubIngestionSourceInfo(); result.setType(input.getType()); result.setName(input.getName()); @@ -113,15 +123,17 @@ private DataHubIngestionSourceConfig mapConfig(final UpdateIngestionSourceConfig result.setDebugMode(input.getDebugMode()); } if (input.getExtraArgs() != null) { - Map<String, String> extraArgs = input.getExtraArgs() - .stream() - .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + Map<String, String> extraArgs = + input.getExtraArgs().stream() + .collect( + Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); result.setExtraArgs(new StringMap(extraArgs)); } return result; } - private DataHubIngestionSourceSchedule mapSchedule(final UpdateIngestionSourceScheduleInput input) { + private DataHubIngestionSourceSchedule mapSchedule( + final UpdateIngestionSourceScheduleInput input) { final DataHubIngestionSourceSchedule result = new DataHubIngestionSourceSchedule(); result.setInterval(input.getInterval()); result.setTimezone(input.getTimezone()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index ea61b5e258d8b..06bad27e27062 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -32,10 +32,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -/** - * GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job - */ -public class DataJobRunsResolver implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { +/** GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job */ +public class DataJobRunsResolver + implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { private static final String PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME = "parentTemplate"; private static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; @@ -48,74 +47,76 @@ public DataJobRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture<DataProcessInstanceResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List<Urn> dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List<Urn> dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + // Step 2: Hydrate the incident entities + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 2: Hydrate the incident entities - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 3: Map GMS incident model to GraphQL model + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List<DataProcessInstance> dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 3: Map GMS incident model to GraphQL model - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List<DataProcessInstance> dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); - - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } private Filter buildTaskRunsEntityFilter(final String entityUrn) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 3ecf396f808b3..d595b1e513d75 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -33,11 +33,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of task runs associated with a Dataset. - */ -public class EntityRunsResolver implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { +/** GraphQL Resolver used for fetching the list of task runs associated with a Dataset. */ +public class EntityRunsResolver + implements DataFetcher<CompletableFuture<DataProcessInstanceResult>> { private static final String INPUT_FIELD_NAME = "inputs.keyword"; private static final String OUTPUT_FIELD_NAME = "outputs.keyword"; @@ -51,76 +49,84 @@ public EntityRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture<DataProcessInstanceResult> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); - final RelationshipDirection direction = RelationshipDirection.valueOf(environment.getArgumentOrDefault("direction", - RelationshipDirection.INCOMING.toString())); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final RelationshipDirection direction = + RelationshipDirection.valueOf( + environment.getArgumentOrDefault( + "direction", RelationshipDirection.INCOMING.toString())); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List<Urn> dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List<Urn> dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the incident entities - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the incident entities + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS instance model to GraphQL model - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List<DataProcessInstance> dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS instance model to GraphQL model + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List<DataProcessInstance> dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } - private Filter buildTaskRunsEntityFilter(final String entityUrn, final RelationshipDirection direction) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(direction.equals(RelationshipDirection.INCOMING) ? INPUT_FIELD_NAME : OUTPUT_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + private Filter buildTaskRunsEntityFilter( + final String entityUrn, final RelationshipDirection direction) { + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField( + direction.equals(RelationshipDirection.INCOMING) + ? INPUT_FIELD_NAME + : OUTPUT_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index 8fc3a60900662..a0caef20a4755 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.lineage; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -16,10 +18,6 @@ import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -29,8 +27,9 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,11 +42,13 @@ public class UpdateLineageResolver implements DataFetcher<CompletableFuture<Bool public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - final UpdateLineageInput input = bindArgument(environment.getArgument("input"), UpdateLineageInput.class); + final UpdateLineageInput input = + bindArgument(environment.getArgument("input"), UpdateLineageInput.class); final List<LineageEdge> edgesToAdd = input.getEdgesToAdd(); final List<LineageEdge> edgesToRemove = input.getEdgesToRemove(); - // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage for each entity + // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage + // for each entity checkPrivileges(context, edgesToAdd, edgesToRemove); // organize data to make updating lineage cleaner @@ -57,77 +58,118 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw downstreamUrns.addAll(downstreamToUpstreamsToAdd.keySet()); downstreamUrns.addAll(downstreamToUpstreamsToRemove.keySet()); - return CompletableFuture.supplyAsync(() -> { - // build MCP for every downstreamUrn - for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); - } - - final List<Urn> upstreamUrnsToAdd = downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); - final List<Urn> upstreamUrnsToRemove = downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); - try { - switch (downstreamUrn.getEntityType()) { - case Constants.DATASET_ENTITY_NAME: - // need to filter out dataJobs since this is a valid lineage edge, but will be handled in the downstream direction for DataJobInputOutputs - final List<Urn> filteredUpstreamUrnsToAdd = filterOutDataJobUrns(upstreamUrnsToAdd); - final List<Urn> filteredUpstreamUrnsToRemove = filterOutDataJobUrns(upstreamUrnsToRemove); - - _lineageService.updateDatasetLineage(downstreamUrn, filteredUpstreamUrnsToAdd, filteredUpstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.CHART_ENTITY_NAME: - _lineageService.updateChartLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DASHBOARD_ENTITY_NAME: - _lineageService.updateDashboardLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DATA_JOB_ENTITY_NAME: - _lineageService.updateDataJobUpstreamLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - default: + return CompletableFuture.supplyAsync( + () -> { + // build MCP for every downstreamUrn + for (Urn downstreamUrn : downstreamUrns) { + if (!_entityService.exists(downstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); + } + + final List<Urn> upstreamUrnsToAdd = + downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); + final List<Urn> upstreamUrnsToRemove = + downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); + try { + switch (downstreamUrn.getEntityType()) { + case Constants.DATASET_ENTITY_NAME: + // need to filter out dataJobs since this is a valid lineage edge, but will be + // handled in the downstream direction for DataJobInputOutputs + final List<Urn> filteredUpstreamUrnsToAdd = + filterOutDataJobUrns(upstreamUrnsToAdd); + final List<Urn> filteredUpstreamUrnsToRemove = + filterOutDataJobUrns(upstreamUrnsToRemove); + + _lineageService.updateDatasetLineage( + downstreamUrn, + filteredUpstreamUrnsToAdd, + filteredUpstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.CHART_ENTITY_NAME: + _lineageService.updateChartLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DASHBOARD_ENTITY_NAME: + _lineageService.updateDashboardLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DATA_JOB_ENTITY_NAME: + _lineageService.updateDataJobUpstreamLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + default: + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", downstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", downstreamUrn), e); - } - } - - Map<Urn, List<Urn>> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); - Map<Urn, List<Urn>> upstreamToDownstreamsToRemove = getUpstreamToDownstreamMap(edgesToRemove); - Set<Urn> upstreamUrns = new HashSet<>(); - upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); - upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); - - // build MCP for upstreamUrn if necessary - for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); - } - - final List<Urn> downstreamUrnsToAdd = upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); - final List<Urn> downstreamUrnsToRemove = upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); - try { - if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - // need to filter out dataJobs since this is a valid lineage edge, but is handled in the upstream direction for DataJobs - final List<Urn> filteredDownstreamUrnsToAdd = filterOutDataJobUrns(downstreamUrnsToAdd); - final List<Urn> filteredDownstreamUrnsToRemove = filterOutDataJobUrns(downstreamUrnsToRemove); - - _lineageService.updateDataJobDownstreamLineage( - upstreamUrn, filteredDownstreamUrnsToAdd, filteredDownstreamUrnsToRemove, actor, context.getAuthentication() - ); + + Map<Urn, List<Urn>> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); + Map<Urn, List<Urn>> upstreamToDownstreamsToRemove = + getUpstreamToDownstreamMap(edgesToRemove); + Set<Urn> upstreamUrns = new HashSet<>(); + upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); + upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + + // build MCP for upstreamUrn if necessary + for (Urn upstreamUrn : upstreamUrns) { + if (!_entityService.exists(upstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); + } + + final List<Urn> downstreamUrnsToAdd = + upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); + final List<Urn> downstreamUrnsToRemove = + upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); + try { + if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + // need to filter out dataJobs since this is a valid lineage edge, but is handled in + // the upstream direction for DataJobs + final List<Urn> filteredDownstreamUrnsToAdd = + filterOutDataJobUrns(downstreamUrnsToAdd); + final List<Urn> filteredDownstreamUrnsToRemove = + filterOutDataJobUrns(downstreamUrnsToRemove); + + _lineageService.updateDataJobDownstreamLineage( + upstreamUrn, + filteredDownstreamUrnsToAdd, + filteredDownstreamUrnsToRemove, + actor, + context.getAuthentication()); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", upstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", upstreamUrn), e); - } - } - return true; - }); + return true; + }); } private List<Urn> filterOutDataJobUrns(@Nonnull final List<Urn> urns) { - return urns.stream().filter( - upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME) - ).collect(Collectors.toList()); + return urns.stream() + .filter(upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); } private Map<Urn, List<Urn>> getDownstreamToUpstreamsMap(@Nonnull final List<LineageEdge> edges) { @@ -156,7 +198,10 @@ private Map<Urn, List<Urn>> getUpstreamToDownstreamMap(@Nonnull final List<Linea return upstreamToDownstreams; } - private boolean isAuthorized(@Nonnull final QueryContext context, @Nonnull final Urn urn, @Nonnull final DisjunctivePrivilegeGroup orPrivilegesGroup) { + private boolean isAuthorized( + @Nonnull final QueryContext context, + @Nonnull final Urn urn, + @Nonnull final DisjunctivePrivilegeGroup orPrivilegesGroup) { return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -168,39 +213,42 @@ private boolean isAuthorized(@Nonnull final QueryContext context, @Nonnull final private void checkLineageEdgePrivileges( @Nonnull final QueryContext context, @Nonnull final LineageEdge lineageEdge, - @Nonnull final DisjunctivePrivilegeGroup editLineagePrivileges - ) { + @Nonnull final DisjunctivePrivilegeGroup editLineagePrivileges) { Urn upstreamUrn = UrnUtils.getUrn(lineageEdge.getUpstreamUrn()); if (!isAuthorized(context, upstreamUrn, editLineagePrivileges)) { throw new AuthorizationException( - String.format("Unauthorized to edit %s lineage. Please contact your DataHub administrator.", upstreamUrn.getEntityType()) - ); + String.format( + "Unauthorized to edit %s lineage. Please contact your DataHub administrator.", + upstreamUrn.getEntityType())); } Urn downstreamUrn = UrnUtils.getUrn(lineageEdge.getDownstreamUrn()); if (!isAuthorized(context, downstreamUrn, editLineagePrivileges)) { throw new AuthorizationException( - String.format("Unauthorized to edit %s lineage. Please contact your DataHub administrator.", downstreamUrn.getEntityType()) - ); + String.format( + "Unauthorized to edit %s lineage. Please contact your DataHub administrator.", + downstreamUrn.getEntityType())); } } /** - * Loop over each edge to add and each edge to remove and ensure that the user has edit lineage privilege or edit entity privilege - * for every upstream and downstream urn. Throws an AuthorizationException if the actor doesn't have permissions. + * Loop over each edge to add and each edge to remove and ensure that the user has edit lineage + * privilege or edit entity privilege for every upstream and downstream urn. Throws an + * AuthorizationException if the actor doesn't have permissions. */ private void checkPrivileges( @Nonnull final QueryContext context, @Nonnull final List<LineageEdge> edgesToAdd, - @Nonnull final List<LineageEdge> edgesToRemove - ) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup editLineagePrivileges = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + @Nonnull final List<LineageEdge> edgesToRemove) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup editLineagePrivileges = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java index 023686b1d10c9..7f031cb481852 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java @@ -8,21 +8,19 @@ import java.util.concurrent.CompletableFuture; import org.dataloader.DataLoader; - /** * Generic GraphQL resolver responsible for * - * 1. Generating a single input AspectLoadKey. - * 2. Resolving a single {@link Aspect}. - * + * <p>1. Generating a single input AspectLoadKey. 2. Resolving a single {@link Aspect}. */ public class AspectResolver implements DataFetcher<CompletableFuture<Aspect>> { - @Override - public CompletableFuture<Aspect> get(DataFetchingEnvironment environment) { - final DataLoader<VersionedAspectKey, Aspect> loader = environment.getDataLoaderRegistry().getDataLoader("Aspect"); - final String fieldName = environment.getField().getName(); - final Long version = environment.getArgument("version"); - final String urn = ((Entity) environment.getSource()).getUrn(); - return loader.load(new VersionedAspectKey(urn, fieldName, version)); - } + @Override + public CompletableFuture<Aspect> get(DataFetchingEnvironment environment) { + final DataLoader<VersionedAspectKey, Aspect> loader = + environment.getDataLoaderRegistry().getDataLoader("Aspect"); + final String fieldName = environment.getField().getName(); + final Long version = environment.getArgument("version"); + final String urn = ((Entity) environment.getSource()).getUrn(); + return loader.load(new VersionedAspectKey(urn, fieldName, version)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index 20e0e4ae1c22a..ecf36769dfa9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -5,7 +5,6 @@ import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -21,8 +20,7 @@ public class BatchGetEntitiesResolver implements DataFetcher<CompletableFuture<L public BatchGetEntitiesResolver( final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, - final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider - ) { + final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider) { _entityTypes = entityTypes; _entitiesProvider = entitiesProvider; } @@ -32,22 +30,28 @@ public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment) final List<Entity> entities = _entitiesProvider.apply(environment); Map<EntityType, List<Entity>> entityTypeToEntities = new HashMap<>(); - entities.forEach((entity) -> { - EntityType type = entity.getType(); - List<Entity> entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); - entitiesList.add(entity); - entityTypeToEntities.put(type, entitiesList); - }); + entities.forEach( + (entity) -> { + EntityType type = entity.getType(); + List<Entity> entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); + entitiesList.add(entity); + entityTypeToEntities.put(type, entitiesList); + }); List<CompletableFuture<List<Entity>>> entitiesFutures = new ArrayList<>(); for (Map.Entry<EntityType, List<Entity>> entry : entityTypeToEntities.entrySet()) { - CompletableFuture<List<Entity>> entitiesFuture = BatchLoadUtils - .batchLoadEntitiesOfSameType(entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); + CompletableFuture<List<Entity>> entitiesFuture = + BatchLoadUtils.batchLoadEntitiesOfSameType( + entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); entitiesFutures.add(entitiesFuture); } return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) - .thenApply(v -> entitiesFutures.stream().flatMap(future -> future.join().stream()).collect(Collectors.toList())); + .thenApply( + v -> + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index d44f2b77029f3..c63ec819e8f6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; @@ -17,15 +19,14 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub + * graph. Lineage relationship denotes whether an entity is directly upstream or downstream of + * another entity */ @Slf4j -public class EntityLineageResultResolver implements DataFetcher<CompletableFuture<EntityLineageResult>> { +public class EntityLineageResultResolver + implements DataFetcher<CompletableFuture<EntityLineageResult>> { private final SiblingGraphService _siblingGraphService; @@ -39,38 +40,34 @@ public CompletableFuture<EntityLineageResult> get(DataFetchingEnvironment enviro final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); final LineageDirection lineageDirection = input.getDirection(); - @Nullable - final Integer start = input.getStart(); // Optional! - @Nullable - final Integer count = input.getCount(); // Optional! - @Nullable - final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! - @Nullable - final Long startTimeMillis = input.getStartTimeMillis(); // Optional! - @Nullable - final Long endTimeMillis = input.getEndTimeMillis(); // Optional! + @Nullable final Integer start = input.getStart(); // Optional! + @Nullable final Integer count = input.getCount(); // Optional! + @Nullable final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! + @Nullable final Long startTimeMillis = input.getStartTimeMillis(); // Optional! + @Nullable final Long endTimeMillis = input.getEndTimeMillis(); // Optional! com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - return mapEntityRelationships( - _siblingGraphService.getLineage( - Urn.createFromString(urn), - resolvedDirection, - start != null ? start : 0, - count != null ? count : 100, - 1, - separateSiblings != null ? input.getSeparateSiblings() : false, - new HashSet<>(), - startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return mapEntityRelationships( + _siblingGraphService.getLineage( + Urn.createFromString(urn), + resolvedDirection, + start != null ? start : 0, + count != null ? count : 100, + 1, + separateSiblings != null ? input.getSeparateSiblings() : false, + new HashSet<>(), + startTimeMillis, + endTimeMillis)); + } catch (URISyntaxException e) { + log.error("Failed to fetch lineage for {}", urn); + throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); + } + }); } private EntityLineageResult mapEntityRelationships( @@ -80,10 +77,10 @@ private EntityLineageResult mapEntityRelationships( result.setCount(entityLineageResult.getCount()); result.setTotal(entityLineageResult.getTotal()); result.setFiltered(entityLineageResult.getFiltered()); - result.setRelationships(entityLineageResult.getRelationships() - .stream() - .map(this::mapEntityRelationship) - .collect(Collectors.toList())); + result.setRelationships( + entityLineageResult.getRelationships().stream() + .map(this::mapEntityRelationship) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 43b28ef85f78a..223548d5d6242 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.load; -import com.linkedin.common.EntityRelationship; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; @@ -17,13 +18,11 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. */ -public class EntityRelationshipsResultResolver implements DataFetcher<CompletableFuture<EntityRelationshipsResult>> { +public class EntityRelationshipsResultResolver + implements DataFetcher<CompletableFuture<EntityRelationshipsResult>> { private final GraphClient _graphClient; @@ -35,24 +34,22 @@ public EntityRelationshipsResultResolver(final GraphClient graphClient) { public CompletableFuture<EntityRelationshipsResult> get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - final RelationshipsInput input = bindArgument(environment.getArgument("input"), RelationshipsInput.class); + final RelationshipsInput input = + bindArgument(environment.getArgument("input"), RelationshipsInput.class); final List<String> relationshipTypes = input.getTypes(); - final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = input.getDirection(); + final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = + input.getDirection(); final Integer start = input.getStart(); // Optional! final Integer count = input.getCount(); // Optional! - final RelationshipDirection resolvedDirection = RelationshipDirection.valueOf(relationshipDirection.toString()); - return CompletableFuture.supplyAsync(() -> mapEntityRelationships( - fetchEntityRelationships( - urn, - relationshipTypes, - resolvedDirection, - start, - count, - context.getActorUrn() - ), - resolvedDirection - )); + final RelationshipDirection resolvedDirection = + RelationshipDirection.valueOf(relationshipDirection.toString()); + return CompletableFuture.supplyAsync( + () -> + mapEntityRelationships( + fetchEntityRelationships( + urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), + resolvedDirection)); } private EntityRelationships fetchEntityRelationships( @@ -68,23 +65,28 @@ private EntityRelationships fetchEntityRelationships( private EntityRelationshipsResult mapEntityRelationships( final EntityRelationships entityRelationships, - final RelationshipDirection relationshipDirection - ) { + final RelationshipDirection relationshipDirection) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); result.setStart(entityRelationships.getStart()); result.setCount(entityRelationships.getCount()); result.setTotal(entityRelationships.getTotal()); - result.setRelationships(entityRelationships.getRelationships().stream().map(entityRelationship -> mapEntityRelationship( - com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf(relationshipDirection.name()), - entityRelationship) - ).collect(Collectors.toList())); + result.setRelationships( + entityRelationships.getRelationships().stream() + .map( + entityRelationship -> + mapEntityRelationship( + com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( + relationshipDirection.name()), + entityRelationship)) + .collect(Collectors.toList())); return result; } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { - final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); + final com.linkedin.datahub.graphql.generated.EntityRelationship result = + new com.linkedin.datahub.graphql.generated.EntityRelationship(); final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java index 6a32e0b14e313..d298c344240c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java @@ -8,31 +8,27 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; - /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + * <p>1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeBatchResolver implements DataFetcher<CompletableFuture<List<Entity>>> { - private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; - private final Function<DataFetchingEnvironment, List<Entity>> _entitiesProvider; + private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; + private final Function<DataFetchingEnvironment, List<Entity>> _entitiesProvider; - public EntityTypeBatchResolver( - final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, - final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider - ) { - _entityTypes = entityTypes; - _entitiesProvider = entitiesProvider; - } + public EntityTypeBatchResolver( + final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, + final Function<DataFetchingEnvironment, List<Entity>> entitiesProvider) { + _entityTypes = entityTypes; + _entitiesProvider = entitiesProvider; + } - @Override - public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment) { - final List<Entity> entities = _entitiesProvider.apply(environment); - return BatchLoadUtils.batchLoadEntitiesOfSameType(entities, _entityTypes, environment.getDataLoaderRegistry()); - } + @Override + public CompletableFuture<List<Entity>> get(DataFetchingEnvironment environment) { + final List<Entity> entities = _entitiesProvider.apply(environment); + return BatchLoadUtils.batchLoadEntitiesOfSameType( + entities, _entityTypes, environment.getDataLoaderRegistry()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java index 29d5d78e0ea96..3c285f30661bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java @@ -5,64 +5,65 @@ import com.linkedin.datahub.graphql.generated.Entity; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import org.dataloader.DataLoader; /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + * <p>1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeResolver implements DataFetcher<CompletableFuture<Entity>> { - private static final List<String> IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); - private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; - private final Function<DataFetchingEnvironment, Entity> _entityProvider; + private static final List<String> IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); + private final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> _entityTypes; + private final Function<DataFetchingEnvironment, Entity> _entityProvider; - public EntityTypeResolver( - final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, - final Function<DataFetchingEnvironment, Entity> entity - ) { - _entityTypes = entityTypes; - _entityProvider = entity; - } + public EntityTypeResolver( + final List<com.linkedin.datahub.graphql.types.EntityType<?, ?>> entityTypes, + final Function<DataFetchingEnvironment, Entity> entity) { + _entityTypes = entityTypes; + _entityProvider = entity; + } + private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { + return environment.getField().getSelectionSet().getSelections().stream() + .filter( + selection -> { + if (!(selection instanceof graphql.language.Field)) { + return true; + } + return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); + }) + .count() + == 0; + } - private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { - return environment.getField().getSelectionSet().getSelections().stream().filter(selection -> { - if (!(selection instanceof graphql.language.Field)) { - return true; - } - return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); - }).count() == 0; + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final Entity resolvedEntity = _entityProvider.apply(environment); + if (resolvedEntity == null) { + return CompletableFuture.completedFuture(null); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final Entity resolvedEntity = _entityProvider.apply(environment); - if (resolvedEntity == null) { - return CompletableFuture.completedFuture(null); - } - - final Object javaObject = _entityProvider.apply(environment); + final Object javaObject = _entityProvider.apply(environment); - if (isOnlySelectingIdentityFields(environment)) { - return CompletableFuture.completedFuture(javaObject); - } + if (isOnlySelectingIdentityFields(environment)) { + return CompletableFuture.completedFuture(javaObject); + } - final com.linkedin.datahub.graphql.types.EntityType filteredEntity = Iterables.getOnlyElement(_entityTypes.stream() + final com.linkedin.datahub.graphql.types.EntityType filteredEntity = + Iterables.getOnlyElement( + _entityTypes.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); - return loader.load(key); - } + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java index 02a92544855a3..ee2f7c3abe97d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java @@ -3,41 +3,42 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a batch of urns. - * 2. Resolving a single {@link LoadableType}. + * <p>1. Retrieving a batch of urns. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + * <p>Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param <T> the generated GraphQL POJO corresponding to the resolved type. * @param <K> the key type for the DataLoader */ public class LoadableTypeBatchResolver<T, K> implements DataFetcher<CompletableFuture<List<T>>> { - private final LoadableType<T, K> _loadableType; - private final Function<DataFetchingEnvironment, List<K>> _keyProvider; + private final LoadableType<T, K> _loadableType; + private final Function<DataFetchingEnvironment, List<K>> _keyProvider; - public LoadableTypeBatchResolver(final LoadableType<T, K> loadableType, final Function<DataFetchingEnvironment, List<K>> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeBatchResolver( + final LoadableType<T, K> loadableType, + final Function<DataFetchingEnvironment, List<K>> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture<List<T>> get(DataFetchingEnvironment environment) { - final List<K> keys = _keyProvider.apply(environment); - if (keys == null) { - return null; - } - final DataLoader<K, T> loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.loadMany(keys); + @Override + public CompletableFuture<List<T>> get(DataFetchingEnvironment environment) { + final List<K> keys = _keyProvider.apply(environment); + if (keys == null) { + return null; } + final DataLoader<K, T> loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.loadMany(keys); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java index 53702f9cafe8b..3868b1a35b64f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java @@ -3,40 +3,41 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + * <p>1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + * <p>Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param <T> the generated GraphQL POJO corresponding to the resolved type. * @param <K> the key type for the DataLoader */ public class LoadableTypeResolver<T, K> implements DataFetcher<CompletableFuture<T>> { - private final LoadableType<T, K> _loadableType; - private final Function<DataFetchingEnvironment, K> _keyProvider; + private final LoadableType<T, K> _loadableType; + private final Function<DataFetchingEnvironment, K> _keyProvider; - public LoadableTypeResolver(final LoadableType<T, K> loadableType, final Function<DataFetchingEnvironment, K> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeResolver( + final LoadableType<T, K> loadableType, + final Function<DataFetchingEnvironment, K> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture<T> get(DataFetchingEnvironment environment) { - final K key = _keyProvider.apply(environment); - if (key == null) { - return null; - } - final DataLoader<K, T> loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.load(key); + @Override + public CompletableFuture<T> get(DataFetchingEnvironment environment) { + final K key = _keyProvider.apply(environment); + if (key == null) { + return null; } + final DataLoader<K, T> loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java index a4867819a2401..e85eaca127d62 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.OwnerType; import com.linkedin.datahub.graphql.types.LoadableType; @@ -8,38 +9,41 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import org.dataloader.DataLoader; import java.util.stream.Collectors; -import com.google.common.collect.Iterables; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + * <p>1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + * <p>Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param <T> the generated GraphQL POJO corresponding to the resolved type. */ public class OwnerTypeResolver<T> implements DataFetcher<CompletableFuture<T>> { - private final List<LoadableType<?, ?>> _loadableTypes; - private final Function<DataFetchingEnvironment, OwnerType> _urnProvider; + private final List<LoadableType<?, ?>> _loadableTypes; + private final Function<DataFetchingEnvironment, OwnerType> _urnProvider; - public OwnerTypeResolver(final List<LoadableType<?, ?>> loadableTypes, final Function<DataFetchingEnvironment, OwnerType> urnProvider) { - _loadableTypes = loadableTypes; - _urnProvider = urnProvider; - } + public OwnerTypeResolver( + final List<LoadableType<?, ?>> loadableTypes, + final Function<DataFetchingEnvironment, OwnerType> urnProvider) { + _loadableTypes = loadableTypes; + _urnProvider = urnProvider; + } - @Override - public CompletableFuture<T> get(DataFetchingEnvironment environment) { - final OwnerType ownerType = _urnProvider.apply(environment); - final LoadableType<?, ?> filteredEntity = Iterables.getOnlyElement(_loadableTypes.stream() + @Override + public CompletableFuture<T> get(DataFetchingEnvironment environment) { + final OwnerType ownerType = _urnProvider.apply(environment); + final LoadableType<?, ?> filteredEntity = + Iterables.getOnlyElement( + _loadableTypes.stream() .filter(entity -> ownerType.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader<String, T> loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - return loader.load(((Entity) ownerType).getUrn()); - } + final DataLoader<String, T> loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + return loader.load(((Entity) ownerType).getUrn()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index f13ebf8373e91..0d00823697c25 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -27,24 +29,21 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. - * The purpose of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" API - * to a single place. - * - * It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and limit arguments - * used for filtering the specific TimeSeries Aspects to be fetched. + * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. The purpose + * of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" + * API to a single place. * - * On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping - * a generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil - * be invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. + * <p>It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and + * limit arguments used for filtering the specific TimeSeries Aspects to be fetched. * + * <p>On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping a + * generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil be + * invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. */ @Slf4j -public class TimeSeriesAspectResolver implements DataFetcher<CompletableFuture<List<TimeSeriesAspect>>> { +public class TimeSeriesAspectResolver + implements DataFetcher<CompletableFuture<List<TimeSeriesAspect>>> { private final EntityClient _client; private final String _entityName; @@ -73,13 +72,13 @@ public TimeSeriesAspectResolver( _sort = sort; } - /** - * Check whether the actor is authorized to fetch the timeseries aspect given the resource urn - */ + /** Check whether the actor is authorized to fetch the timeseries aspect given the resource urn */ private boolean isAuthorized(QueryContext context, String urn) { - if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals( - Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized(context, Optional.of(new EntitySpec(_entityName, urn)), + if (_entityName.equals(Constants.DATASET_ENTITY_NAME) + && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(_entityName, urn)), PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); } return true; @@ -87,46 +86,62 @@ private boolean isAuthorized(QueryContext context, String urn) { @Override public CompletableFuture<List<TimeSeriesAspect>> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); - // Fetch the urn, assuming the parent has an urn field. - // todo: what if the parent urn isn't projected? - final String urn = ((Entity) environment.getSource()).getUrn(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + // Fetch the urn, assuming the parent has an urn field. + // todo: what if the parent urn isn't projected? + final String urn = ((Entity) environment.getSource()).getUrn(); - if (!isAuthorized(context, urn)) { - return Collections.emptyList(); - } + if (!isAuthorized(context, urn)) { + return Collections.emptyList(); + } - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - // Max number of aspects to return. - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; - final SortCriterion maybeSort = _sort; + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + // Max number of aspects to return. + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; + final SortCriterion maybeSort = _sort; - try { - // Step 1: Get aspects. - List<EnvelopedAspect> aspects = - _client.getTimeseriesAspectValues(urn, _entityName, _aspectName, maybeStartTimeMillis, maybeEndTimeMillis, - maybeLimit, buildFilters(maybeFilters), maybeSort, context.getAuthentication()); + try { + // Step 1: Get aspects. + List<EnvelopedAspect> aspects = + _client.getTimeseriesAspectValues( + urn, + _entityName, + _aspectName, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilters(maybeFilters), + maybeSort, + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve aspects from GMS", e); - } - }); + // Step 2: Bind profiles into GraphQL strong types. + return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve aspects from GMS", e); + } + }); } private Filter buildFilters(@Nullable FilterInput maybeFilters) { if (maybeFilters == null) { return null; } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(maybeFilters.getAnd().stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + maybeFilters.getAnd().stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java index 619ca95e7d9ed..bee46f8a18cf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.linkedin.common.urn.CorpuserUrn; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddLinkResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -35,41 +33,42 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw String linkLabel = input.getLabel(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) + && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { - log.debug("Adding Link. input: {}", input.toString()); + log.debug("Adding Link. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.addLink( - linkUrl, - linkLabel, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add link to resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to add link to resource with input %s", input.toString()), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.addLink(linkUrl, linkLabel, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to add link to resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to add link to resource with input %s", input.toString()), e); + } + }); } - // Returns whether this is a glossary entity and whether you can edit this glossary entity with the + // Returns whether this is a glossary entity and whether you can edit this glossary entity with + // the // Manage all children or Manage direct children privileges private boolean canUpdateGlossaryEntityLinks(Urn targetUrn, QueryContext context) { - final boolean isGlossaryEntity = targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); + final boolean isGlossaryEntity = + targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); if (!isGlossaryEntity) { return false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java index 3f2dab0a5ba71..9c0d009ff9b0e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnerResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -42,28 +41,32 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw OwnerInput ownerInput = ownerInputBuilder.build(); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); - try { + try { - log.debug("Adding Owner. input: {}", input); + log.debug("Adding Owner. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - ImmutableList.of(ownerInput), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owner to resource with input %s", input), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + ImmutableList.of(ownerInput), + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owner to resource with input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java index 4e5b5bdb2a651..c64b2403364c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnersResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,37 +27,37 @@ public class AddOwnersResolver implements DataFetcher<CompletableFuture<Boolean> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final AddOwnersInput input = bindArgument(environment.getArgument("input"), AddOwnersInput.class); + final AddOwnersInput input = + bindArgument(environment.getArgument("input"), AddOwnersInput.class); List<OwnerInput> owners = input.getOwners(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - OwnerUtils.validateAddOwnerInput( - owners, - targetUrn, - _entityService - ); - try { - - log.debug("Adding Owners. input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - owners, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owners to resource with input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + OwnerUtils.validateAddOwnerInput(owners, targetUrn, _entityService); + try { + + log.debug("Adding Owners. input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + owners, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owners to resource with input %s", input), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java index 78d2341492b39..f4e3f7ed49056 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -27,44 +25,54 @@ public class AddTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { - if (!tagUrn.getEntityType().equals("tag")) { - log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals("tag")) { + log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.info("Adding Tag. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info("Adding Tag. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java index 7174f3edffee6..4135e774172c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,9 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -32,40 +30,47 @@ public class AddTagsResolver implements DataFetcher<CompletableFuture<Boolean>> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final AddTagsInput input = bindArgument(environment.getArgument("input"), AddTagsInput.class); - List<Urn> tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List<Urn> tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - tagUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { - log.info("Adding Tags. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - tagUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + LabelUtils.validateResourceAndLabel( + tagUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { + log.info("Adding Tags. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + tagUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java index 056b5db4324c3..a776fda558a42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,8 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,39 +25,49 @@ public class AddTermResolver implements DataFetcher<CompletableFuture<Boolean>> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java index 2f58b6b09e681..4fbe74a0349b4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -19,8 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,41 +29,48 @@ public class AddTermsResolver implements DataFetcher<CompletableFuture<Boolean>> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final AddTermsInput input = bindArgument(environment.getArgument("input"), AddTermsInput.class); - List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - termUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + LabelUtils.validateResourceAndLabel( + termUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - termUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + termUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java index 5beaeecae673f..94182835de159 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddOwnersResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,26 +28,30 @@ public class BatchAddOwnersResolver implements DataFetcher<CompletableFuture<Boo @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final BatchAddOwnersInput input = bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); + final BatchAddOwnersInput input = + bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); final List<OwnerInput> owners = input.getOwners(); final List<ResourceRefInput> resources = input.getResources(); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateOwners(owners); - validateInputResources(resources, context); + // First, validate the batch + validateOwners(owners); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchAddOwners(owners, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + try { + // Then execute the bulk add + batchAddOwners(owners, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } private void validateOwners(List<OwnerInput> owners) { @@ -67,23 +70,32 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be applied to subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be applied to subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddOwners(List<OwnerInput> owners, List<ResourceRefInput> resources, QueryContext context) { + private void batchAddOwners( + List<OwnerInput> owners, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch adding owners. owners: {}, resources: {}", owners, resources); try { - OwnerUtils.addOwnersToResources(owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.addOwnersToResources( + owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - owners, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + owners, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java index 9c5cddb3c50bc..239ada1653695 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTagsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -36,62 +33,64 @@ public class BatchAddTagsResolver implements DataFetcher<CompletableFuture<Boole @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTagsInput input = bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); - final List<Urn> tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTagsInput input = + bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); + final List<Urn> tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTags(tagUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTagsToSingleSchemaField(context, resources, tagUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTags(tagUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTagsToSingleSchemaField(context, resources, tagUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding tags to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * tag to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the tag to one of its siblings. If that fails, keep trying all siblings until one passes or all + * fail. Then we throw if none succeed. */ private Boolean handleAddTagsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List<ResourceRefInput> resources, - @Nonnull final List<Urn> tagUrns - ) { + @Nonnull final List<Urn> tagUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, new HashSet<>(), siblingUrns); } /** - * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. - * Try adding until we attempt all siblings or one passes. Throw if none pass. + * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. Try + * adding until we attempt all siblings or one passes. Throw if none pass. */ private Boolean attemptBatchAddTagsWithSiblings( @Nonnull final List<Urn> tagUrns, @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet<Urn> attemptedUrns, - @Nonnull final List<Urn> siblingUrns - ) { + @Nonnull final List<Urn> siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List<ResourceRefInput> resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTagsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add tags for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTags(List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { - log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); - try { - LabelUtils.addTagsToResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + private void batchAddTags( + List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { + log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); + try { + LabelUtils.addTagsToResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); - } + } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java index a46f37b110f4e..b6d799c13345d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTermsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -36,49 +33,52 @@ public class BatchAddTermsResolver implements DataFetcher<CompletableFuture<Bool @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTermsInput input = bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); - final List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTermsInput input = + bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); + final List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTerms(termUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTermsToSingleSchemaField(context, resources, termUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTerms(termUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTermsToSingleSchemaField(context, resources, termUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding terms to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * term to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the term to one of its siblings. If that fails, keep trying all siblings until one passes or + * all fail. Then we throw if none succeed. */ private Boolean handleAddTermsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List<ResourceRefInput> resources, - @Nonnull final List<Urn> termUrns - ) { + @Nonnull final List<Urn> termUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, new HashSet<>(), siblingUrns); } /** @@ -90,8 +90,7 @@ private Boolean attemptBatchAddTermsWithSiblings( @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet<Urn> attemptedUrns, - @Nonnull final List<Urn> siblingUrns - ) { + @Nonnull final List<Urn> siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List<ResourceRefInput> resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTermsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add terms for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTerms(List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { + private void batchAddTerms( + List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch adding Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.addTermsToResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.addTermsToResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java index debd68646910f..30e04ac36ee0f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveOwnersResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -29,27 +28,33 @@ public class BatchRemoveOwnersResolver implements DataFetcher<CompletableFuture< @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final BatchRemoveOwnersInput input = bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); + final BatchRemoveOwnersInput input = + bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); final List<String> owners = input.getOwnerUrns(); final List<ResourceRefInput> resources = input.getResources(); - final Optional<Urn> maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + final Optional<Urn> maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk remove - batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk remove + batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -62,26 +67,40 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be removed from subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be removed from subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveOwners(List<String> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn, - List<ResourceRefInput> resources, QueryContext context) { + private void batchRemoveOwners( + List<String> ownerUrns, + Optional<Urn> maybeOwnershipTypeUrn, + List<ResourceRefInput> resources, + QueryContext context) { log.debug("Batch removing owners. owners: {}, resources: {}", ownerUrns, resources); try { - OwnerUtils.removeOwnersFromResources(ownerUrns.stream().map(UrnUtils::getUrn).collect( - Collectors.toList()), maybeOwnershipTypeUrn, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.removeOwnersFromResources( + ownerUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + maybeOwnershipTypeUrn, + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch remove Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch remove Owners %s to resources with urns %s!", + ownerUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java index ab432f0afcaec..7500f29a0c67f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTagsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -28,26 +27,29 @@ public class BatchRemoveTagsResolver implements DataFetcher<CompletableFuture<Bo @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTagsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); - final List<Urn> tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTagsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); + final List<Urn> tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTags(List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { + private void batchRemoveTags( + List<Urn> tagUrns, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch removing Tags. tags: {}, resources: {}", resources, tagUrns); try { - LabelUtils.removeTagsFromResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTagsFromResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java index c8870cc44bf9e..3706e4e911b17 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTermsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -28,26 +27,29 @@ public class BatchRemoveTermsResolver implements DataFetcher<CompletableFuture<B @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTermsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); - final List<Urn> termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTermsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); + final List<Urn> termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTerms(List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { + private void batchRemoveTerms( + List<Urn> termUrns, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch removing Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.removeTermsFromResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTermsFromResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java index 9b6167c673d8d..551878371b489 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchSetDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -30,25 +29,29 @@ public class BatchSetDomainResolver implements DataFetcher<CompletableFuture<Boo @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDomainInput input = bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); + final BatchSetDomainInput input = + bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); final String maybeDomainUrn = input.getDomainUrn(); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the domain - validateDomain(maybeDomainUrn); - validateInputResources(resources, context); + // First, validate the domain + validateDomain(maybeDomainUrn); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchSetDomains(maybeDomainUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchSetDomains(maybeDomainUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateDomain(@Nullable String maybeDomainUrn) { @@ -66,23 +69,31 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchSetDomains(String maybeDomainUrn, List<ResourceRefInput> resources, QueryContext context) { + private void batchSetDomains( + String maybeDomainUrn, List<ResourceRefInput> resources, QueryContext context) { log.debug("Batch adding Domains. domainUrn: {}, resources: {}", maybeDomainUrn, resources); try { - DomainUtils.setDomainForResources(maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), + DomainUtils.setDomainForResources( + maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - maybeDomainUrn, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + maybeDomainUrn, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java index 5961dc9087a63..e76617d119621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateDeprecationResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -30,23 +29,32 @@ public class BatchUpdateDeprecationResolver implements DataFetcher<CompletableFu @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateDeprecationInput input = bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); + final BatchUpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); final List<ResourceRefInput> resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the resources - validateInputResources(resources, context); + // First, validate the resources + validateInputResources(resources, context); - try { - // Then execute the bulk update - batchUpdateDeprecation(input.getDeprecated(), input.getNote(), input.getDecommissionTime(), resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk update + batchUpdateDeprecation( + input.getDeprecated(), + input.getNote(), + input.getDecommissionTime(), + resources, + context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List<ResourceRefInput> resources, QueryContext context) { @@ -58,17 +66,25 @@ private void validateInputResources(List<ResourceRefInput> resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DeprecationUtils.isAuthorizedToUpdateDeprecationForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchUpdateDeprecation(boolean deprecated, + private void batchUpdateDeprecation( + boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, List<ResourceRefInput> resources, QueryContext context) { - log.debug("Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", deprecated, note, decommissionTime, resources); + log.debug( + "Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", + deprecated, + note, + decommissionTime, + resources); try { DeprecationUtils.updateDeprecationForResources( deprecated, @@ -78,10 +94,14 @@ private void batchUpdateDeprecation(boolean deprecated, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch update deprecated to %s for resources with urns %s!", - deprecated, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch update deprecated to %s for resources with urns %s!", + deprecated, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 69b2b92fb9cca..5a25e6d83e648 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -14,9 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,24 +25,32 @@ public class BatchUpdateSoftDeletedResolver implements DataFetcher<CompletableFu @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateSoftDeletedInput input = bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); + final BatchUpdateSoftDeletedInput input = + bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); final List<String> urns = input.getUrns(); final boolean deleted = input.getDeleted(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the entities exist - validateInputUrns(urns, context); + // First, validate the entities exist + validateInputUrns(urns, context); - try { - // Then execute the bulk soft delete - batchUpdateSoftDeleted(deleted, urns, context); - return true; - } catch (Exception e) { - log.error("Failed to perform batch soft delete against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform batch soft delete against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk soft delete + batchUpdateSoftDeleted(deleted, urns, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform batch soft delete against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform batch soft delete against input %s", input.toString()), + e); + } + }); } private void validateInputUrns(List<String> urnStrs, QueryContext context) { @@ -55,10 +62,12 @@ private void validateInputUrns(List<String> urnStrs, QueryContext context) { private void validateInputUrn(String urnStr, QueryContext context) { final Urn urn = UrnUtils.getUrn(urnStr); if (!DeleteUtils.isAuthorizedToDeleteEntity(context, urn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } if (!_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } } @@ -66,14 +75,12 @@ private void batchUpdateSoftDeleted(boolean removed, List<String> urnStrs, Query log.debug("Batch soft deleting assets. urns: {}", urnStrs); try { DeleteUtils.updateStatusForResources( - removed, - urnStrs, - UrnUtils.getUrn(context.getActorUrn()), - _entityService); + removed, urnStrs, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to batch update soft deleted status entities with urns %s!", urnStrs), + String.format( + "Failed to batch update soft deleted status entities with urns %s!", urnStrs), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index 59d5d6939c04c..d0796389d2280 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -1,13 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.container.EditableContainerProperties; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.DomainProperties; @@ -30,148 +31,191 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DescriptionUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DescriptionUtils() { } + private DescriptionUtils() {} public static void updateFieldDescription( String newDescription, Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService - ) { - EditableSchemaMetadata editableSchemaMetadata = - (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); - - editableFieldInfo.setDescription(newDescription); - - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + EntityService entityService) { + EditableSchemaMetadata editableSchemaMetadata = + (EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); + + editableFieldInfo.setDescription(newDescription); + + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } public static void updateContainerDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = - (EditableContainerProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableContainerProperties()); + (EditableContainerProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableContainerProperties()); containerProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, containerProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + containerProperties, + actor, + entityService); } public static void updateDomainDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = - (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, entityService, null); + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + entityService, + null); if (domainProperties == null) { - // If there are no properties for the domain already, then we should throw since the properties model also requires a name. + // If there are no properties for the domain already, then we should throw since the + // properties model also requires a name. throw new IllegalArgumentException("Properties for this Domain do not yet exist!"); } domainProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + entityService); } public static void updateTagDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = - (TagProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); + (TagProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); if (tagProperties == null) { - // If there are no properties for the tag already, then we should throw since the properties model also requires a name. + // If there are no properties for the tag already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Tag do not yet exist!"); } tagProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); + persistAspect( + resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); } public static void updateCorpGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = - (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, entityService, new CorpGroupEditableInfo()); + (CorpGroupEditableInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + entityService, + new CorpGroupEditableInfo()); if (corpGroupEditableInfo != null) { corpGroupEditableInfo.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, corpGroupEditableInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + corpGroupEditableInfo, + actor, + entityService); } public static void updateGlossaryTermDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + entityService, + null); if (glossaryTermInfo == null) { - // If there are no properties for the term already, then we should throw since the properties model also requires a name. + // If there are no properties for the term already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Glossary Term do not yet exist!"); } - glossaryTermInfo.setDefinition(newDescription); // We call description 'definition' for glossary terms. Not great, we know. :( - persistAspect(resourceUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, entityService); + glossaryTermInfo.setDefinition( + newDescription); // We call description 'definition' for glossary terms. Not great, we know. + // :( + persistAspect( + resourceUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + entityService); } public static void updateGlossaryNodeDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setDefinition(newDescription); - persistAspect(resourceUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + entityService); } public static void updateNotebookDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableNotebookProperties notebookProperties = + (EditableNotebookProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + entityService, + null); if (notebookProperties != null) { notebookProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, notebookProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + notebookProperties, + actor, + entityService); } public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); @@ -179,51 +223,41 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateContainerInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateLabelInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput( - Urn corpUserUrn, - EntityService entityService - ) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { if (!entityService.exists(corpUserUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput( - Urn notebookUrn, - EntityService entityService) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { if (!entityService.exists(notebookUrn)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); @@ -231,11 +265,15 @@ public static Boolean validateNotebookInput( return true; } - public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateFieldDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -245,11 +283,14 @@ public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -259,25 +300,31 @@ public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContex orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateContainerDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - targetUrn.getEntityType(), - targetUrn.toString(), - orPrivilegeGroups); - } + public static boolean isAuthorizedToUpdateContainerDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); + } - public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -288,79 +335,122 @@ public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext cont } public static void updateMlModelDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelProperties editableProperties = + (EditableMLModelProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlModelGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelGroupProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelGroupProperties editableProperties = + (EditableMLModelGroupProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelGroupProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } + public static void updateMlFeatureDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureProperties editableProperties = + (EditableMLFeatureProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlFeatureTableDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureTableProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureTableProperties editableProperties = + (EditableMLFeatureTableProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureTableProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlPrimaryKeyDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLPrimaryKeyProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLPrimaryKeyProperties editableProperties = + (EditableMLPrimaryKeyProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLPrimaryKeyProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateDataProductDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, entityService, new DataProductProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + DataProductProperties properties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + entityService, + new DataProductProperties()); if (properties != null) { properties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + properties, + actor, + entityService); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e5e3a5a0ee42e..e4c5c132be4f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -19,11 +19,10 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -33,57 +32,78 @@ public class MoveDomainResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final MoveDomainInput input = ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); + final MoveDomainInput input = + ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(input.getResourceUrn()); - final Urn newParentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Urn newParentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canManageDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canManageDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Resource is not a domain."); - } + try { + if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Resource is not a domain."); + } - DomainProperties properties = (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, - null - ); + DomainProperties properties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); - if (properties == null) { - throw new IllegalArgumentException("Domain properties do not exist."); - } + if (properties == null) { + throw new IllegalArgumentException("Domain properties do not exist."); + } - if (newParentDomainUrn != null) { - if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Parent entity is not a domain."); - } - if (!_entityService.exists(newParentDomainUrn)) { - throw new IllegalArgumentException("Parent entity does not exist."); - } - } + if (newParentDomainUrn != null) { + if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Parent entity is not a domain."); + } + if (!_entityService.exists(newParentDomainUrn)) { + throw new IllegalArgumentException("Parent entity does not exist."); + } + } - if (DomainUtils.hasNameConflict(properties.getName(), newParentDomainUrn, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in the destination domain. Please pick a unique name.", properties.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } + if (DomainUtils.hasNameConflict( + properties.getName(), newParentDomainUrn, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in the destination domain. Please pick a unique name.", + properties.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } - properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - MutationUtils.persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, properties, actor, _entityService); - return true; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to move domain {} to parent {} : {}", input.getResourceUrn(), input.getParentDomain(), e.getMessage()); - throw new RuntimeException(String.format("Failed to move domain %s to %s", input.getResourceUrn(), input.getParentDomain()), e); - } - }); + properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + MutationUtils.persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + properties, + actor, + _entityService); + return true; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to move domain {} to parent {} : {}", + input.getResourceUrn(), + input.getParentDomain(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to move domain %s to %s", + input.getResourceUrn(), input.getParentDomain()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java index 30bd940a7dfed..064b532a792c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java @@ -1,20 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -23,7 +21,8 @@ */ public class MutableTypeBatchResolver<I, B, T> implements DataFetcher<CompletableFuture<List<T>>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); private final BatchMutableType<I, B, T> _batchMutableType; @@ -33,21 +32,23 @@ public MutableTypeBatchResolver(final BatchMutableType<I, B, T> batchMutableType @Override public CompletableFuture<List<T>> get(DataFetchingEnvironment environment) throws Exception { - final B[] input = bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); - - return CompletableFuture.supplyAsync(() -> { - Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); - - try { - return _batchMutableType.batchUpdate(input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error("Failed to perform batchUpdate", e); - throw new IllegalArgumentException(e); - } finally { - timer.stop(); - } - }); + final B[] input = + bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); + + return CompletableFuture.supplyAsync( + () -> { + Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); + + try { + return _batchMutableType.batchUpdate(input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error("Failed to perform batchUpdate", e); + throw new IllegalArgumentException(e); + } finally { + timer.stop(); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java index 115a68e808de6..c62282c906597 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.MutableType; import graphql.schema.DataFetcher; @@ -8,8 +10,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -18,28 +18,34 @@ */ public class MutableTypeResolver<I, T> implements DataFetcher<CompletableFuture<T>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeResolver.class.getName()); - private final MutableType<I, T> _mutableType; + private final MutableType<I, T> _mutableType; - public MutableTypeResolver(final MutableType<I, T> mutableType) { - _mutableType = mutableType; - } + public MutableTypeResolver(final MutableType<I, T> mutableType) { + _mutableType = mutableType; + } - @Override - public CompletableFuture<T> get(DataFetchingEnvironment environment) throws Exception { - final String urn = environment.getArgument("urn"); - final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug(String.format("Mutating entity. input: %s", input)); - return _mutableType.update(urn, input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error(String.format("Failed to perform update against input %s", input) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } + @Override + public CompletableFuture<T> get(DataFetchingEnvironment environment) throws Exception { + final String urn = environment.getArgument("urn"); + final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug(String.format("Mutating entity. input: %s", input)); + return _mutableType.update(urn, input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error( + String.format("Failed to perform update against input %s", input) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java index c862fcfa83594..4a915b2a477cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; @@ -19,49 +21,56 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class MutationUtils { - private MutationUtils() { } + private MutationUtils() {} - public static void persistAspect(Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); + public static void persistAspect( + Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param urn * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithUrn(Urn urn, String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithUrn( + Urn urn, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); return setProposalProperties(proposal, urn.getEntityType(), aspectName, aspect); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param entityKey * @param entityType * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithKey(RecordTemplate entityKey, String entityType, - String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithKey( + RecordTemplate entityKey, String entityType, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(entityKey)); return setProposalProperties(proposal, entityType, aspectName, aspect); } - private static MetadataChangeProposal setProposalProperties(MetadataChangeProposal proposal, - String entityType, String aspectName, RecordTemplate aspect) { + private static MetadataChangeProposal setProposalProperties( + MetadataChangeProposal proposal, + String entityType, + String aspectName, + RecordTemplate aspect) { proposal.setEntityType(entityType); proposal.setAspectName(aspectName); proposal.setAspect(GenericRecordUtils.serializeAspect(aspect)); @@ -77,18 +86,16 @@ private static MetadataChangeProposal setProposalProperties(MetadataChangePropos } public static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional<EditableSchemaFieldInfo> fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional<EditableSchemaFieldInfo> fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); @@ -104,34 +111,37 @@ public static Boolean validateSubresourceExists( Urn targetUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (subResourceType.equals(SubResourceType.DATASET_FIELD)) { - SchemaMetadata schemaMetadata = (SchemaMetadata) entityService.getAspect(targetUrn, - Constants.SCHEMA_METADATA_ASPECT_NAME, 0); + SchemaMetadata schemaMetadata = + (SchemaMetadata) + entityService.getAspect(targetUrn, Constants.SCHEMA_METADATA_ASPECT_NAME, 0); if (schemaMetadata == null) { throw new IllegalArgumentException( - String.format("Failed to update %s & field %s. %s has no schema.", targetUrn, subResource, targetUrn) - ); + String.format( + "Failed to update %s & field %s. %s has no schema.", + targetUrn, subResource, targetUrn)); } Optional<SchemaField> fieldMatch = - schemaMetadata.getFields().stream().filter(field -> field.getFieldPath().equals(subResource)).findFirst(); + schemaMetadata.getFields().stream() + .filter(field -> field.getFieldPath().equals(subResource)) + .findFirst(); if (!fieldMatch.isPresent()) { - throw new IllegalArgumentException(String.format( - "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", - targetUrn, subResource, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", + targetUrn, subResource, subResource)); } return true; } - throw new IllegalArgumentException(String.format( - "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", - targetUrn, subResource, SubResourceType.values() - )); + throw new IllegalArgumentException( + String.format( + "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", + targetUrn, subResource, SubResourceType.values())); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java index 23c08043af5d3..f84d1b3a66f6f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,9 +15,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveLinkResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -24,36 +23,38 @@ public class RemoveLinkResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final RemoveLinkInput input = bindArgument(environment.getArgument("input"), RemoveLinkInput.class); + final RemoveLinkInput input = + bindArgument(environment.getArgument("input"), RemoveLinkInput.class); String linkUrl = input.getLinkUrl(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { - log.debug("Removing Link input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.removeLink( - linkUrl, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove link from resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to remove link from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { + log.debug("Removing Link input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.removeLink(linkUrl, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to remove link from resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to remove link from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java index 2d5faaab44458..9827aa0666d19 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveOwnerResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -27,36 +26,42 @@ public class RemoveOwnerResolver implements DataFetcher<CompletableFuture<Boolea @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final RemoveOwnerInput input = bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); + final RemoveOwnerInput input = + bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - Optional<Urn> maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + Optional<Urn> maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateRemoveInput( - targetUrn, - _entityService - ); - try { - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.removeOwnersFromResources( - ImmutableList.of(ownerUrn), - maybeOwnershipTypeUrn, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove owner from resource with input {}", input); - throw new RuntimeException(String.format("Failed to remove owner from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateRemoveInput(targetUrn, _entityService); + try { + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.removeOwnersFromResources( + ImmutableList.of(ownerUrn), + maybeOwnershipTypeUrn, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to remove owner from resource with input {}", input); + throw new RuntimeException( + String.format( + "Failed to remove owner from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java index 33a95c3576061..7e2919e0ca1f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,44 +25,54 @@ public class RemoveTagResolver implements DataFetcher<CompletableFuture<Boolean> @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - true - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + true); + try { - if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { - log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { + log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.debug("Removing Tag. input: %s", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTagsFromResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.debug("Removing Tag. input: %s", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTagsFromResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java index 8f18b0ecd6198..ec38360df6d8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTermResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,45 +25,55 @@ public class RemoveTermResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - true - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + true); - try { + try { - if (!termUrn.getEntityType().equals("glossaryTerm")) { - log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); - return false; - } + if (!termUrn.getEntityType().equals("glossaryTerm")) { + log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); + return false; + } - log.info(String.format("Removing Term. input: {}", input)); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTermsFromResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info(String.format("Removing Term. input: {}", input)); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTermsFromResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java index d6e6e5610da56..13a8427633cae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -12,19 +14,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class UpdateDescriptionResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -33,7 +30,8 @@ public class UpdateDescriptionResolver implements DataFetcher<CompletableFuture< @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final DescriptionUpdateInput input = bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); + final DescriptionUpdateInput input = + bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating description. input: {}", input.toString()); switch (targetUrn.getEntityType()) { @@ -67,380 +65,383 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw return updateDataProductDescription(targetUrn, input, environment.getContext()); default: throw new RuntimeException( - String.format("Failed to update description. Unsupported resource type %s provided.", targetUrn)); + String.format( + "Failed to update description. Unsupported resource type %s provided.", targetUrn)); } } - private CompletableFuture<Boolean> updateContainerDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - DescriptionUtils.validateContainerInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateContainerDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateContainerDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + DescriptionUtils.validateContainerInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateContainerDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateDomainDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateDomainInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDomainDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateDomainDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateDomainInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDomainDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - // If updating schema field description fails, try again on a sibling until there are no more siblings to try. Then throw if necessary. + // If updating schema field description fails, try again on a sibling until there are no more + // siblings to try. Then throw if necessary. private Boolean attemptUpdateDatasetSchemaFieldDescription( @Nonnull final Urn targetUrn, @Nonnull final DescriptionUpdateInput input, @Nonnull final QueryContext context, @Nonnull final HashSet<Urn> attemptedUrns, - @Nonnull final List<Urn> siblingUrns - ) { + @Nonnull final List<Urn> siblingUrns) { attemptedUrns.add(targetUrn); try { - DescriptionUtils.validateFieldDescriptionInput(targetUrn, input.getSubResource(), input.getSubResourceType(), - _entityService); + DescriptionUtils.validateFieldDescriptionInput( + targetUrn, input.getSubResource(), input.getSubResourceType(), _entityService); final Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateFieldDescription(input.getDescription(), targetUrn, input.getSubResource(), actor, - _entityService); + DescriptionUtils.updateFieldDescription( + input.getDescription(), targetUrn, input.getSubResource(), actor, _entityService); return true; } catch (Exception e) { final Optional<Urn> siblingUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, attemptedUrns); if (siblingUrn.isPresent()) { - log.warn("Failed to update description for input {}, trying sibling urn {} now.", input.toString(), siblingUrn.get()); - return attemptUpdateDatasetSchemaFieldDescription(siblingUrn.get(), input, context, attemptedUrns, siblingUrns); + log.warn( + "Failed to update description for input {}, trying sibling urn {} now.", + input.toString(), + siblingUrn.get()); + return attemptUpdateDatasetSchemaFieldDescription( + siblingUrn.get(), input, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } - private CompletableFuture<Boolean> updateDatasetSchemaFieldDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + private CompletableFuture<Boolean> updateDatasetSchemaFieldDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - if (input.getSubResourceType() == null) { - throw new IllegalArgumentException("Update description without subresource is not currently supported"); - } + if (input.getSubResourceType() == null) { + throw new IllegalArgumentException( + "Update description without subresource is not currently supported"); + } - List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); + List<Urn> siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); - return attemptUpdateDatasetSchemaFieldDescription(targetUrn, input, context, new HashSet<>(), siblingUrns); - }); + return attemptUpdateDatasetSchemaFieldDescription( + targetUrn, input, context, new HashSet<>(), siblingUrns); + }); } - private CompletableFuture<Boolean> updateTagDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateTagDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateTagDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateTagDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateGlossaryTermDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryTermDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateGlossaryTermDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryTermDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateGlossaryNodeDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryNodeDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateGlossaryNodeDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryNodeDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateCorpGroupDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateCorpGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateCorpGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateCorpGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateNotebookDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateNotebookInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateNotebookDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateNotebookDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateNotebookInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateNotebookDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlModelDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlModelDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlModelGroupDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlModelGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlFeatureDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlFeatureDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlPrimaryKeyDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlPrimaryKeyDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlPrimaryKeyDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlPrimaryKeyDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateMlFeatureTableDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureTableDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateMlFeatureTableDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureTableDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture<Boolean> updateDataProductDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDataProductDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture<Boolean> updateDataProductDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDataProductDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 0e316ac1296ee..dd44c2718b3a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -16,22 +19,18 @@ import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -41,178 +40,232 @@ public class UpdateNameResolver implements DataFetcher<CompletableFuture<Boolean @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final UpdateNameInput input = bindArgument(environment.getArgument("input"), UpdateNameInput.class); + final UpdateNameInput input = + bindArgument(environment.getArgument("input"), UpdateNameInput.class); Urn targetUrn = Urn.createFromString(input.getUrn()); log.info("Updating name. input: {}", input); - return CompletableFuture.supplyAsync(() -> { - if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_entityService.exists(targetUrn)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + } - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermName(targetUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeName(targetUrn, input, environment.getContext()); - case Constants.DOMAIN_ENTITY_NAME: - return updateDomainName(targetUrn, input, environment.getContext()); - case Constants.CORP_GROUP_ENTITY_NAME: - return updateGroupName(targetUrn, input, environment.getContext()); - case Constants.DATA_PRODUCT_ENTITY_NAME: - return updateDataProductName(targetUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update name. Unsupported resource type %s provided.", targetUrn)); - } - }); + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermName(targetUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeName(targetUrn, input, environment.getContext()); + case Constants.DOMAIN_ENTITY_NAME: + return updateDomainName(targetUrn, input, environment.getContext()); + case Constants.CORP_GROUP_ENTITY_NAME: + return updateGroupName(targetUrn, input, environment.getContext()); + case Constants.DATA_PRODUCT_ENTITY_NAME: + return updateDataProductName(targetUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update name. Unsupported resource type %s provided.", targetUrn)); + } + }); } private Boolean updateGlossaryTermName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { throw new IllegalArgumentException("Glossary Term does not exist"); } glossaryTermInfo.setName(input.getName()); Urn actor = UrnUtils.getUrn(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateGlossaryNodeName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateDomainName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateDomainName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageDomains(context)) { try { - DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, null); + DomainProperties domainProperties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (domainProperties == null) { throw new IllegalArgumentException("Domain does not exist"); } - if (DomainUtils.hasNameConflict(input.getName(), DomainUtils.getParentDomainSafely(domainProperties), context, _entityClient)) { + if (DomainUtils.hasNameConflict( + input.getName(), + DomainUtils.getParentDomainSafely(domainProperties), + context, + _entityClient)) { throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); } domainProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + _entityService); return true; } catch (DataHubGraphQLException e) { throw e; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateGroupName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageUsersAndGroups(context)) { try { - CorpGroupInfo corpGroupInfo = (CorpGroupInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.CORP_GROUP_INFO_ASPECT_NAME, _entityService, null); + CorpGroupInfo corpGroupInfo = + (CorpGroupInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.CORP_GROUP_INFO_ASPECT_NAME, + _entityService, + null); if (corpGroupInfo == null) { throw new IllegalArgumentException("Group does not exist"); } corpGroupInfo.setDisplayName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); + persistAspect( + targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateDataProductName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { try { - DataProductProperties dataProductProperties = (DataProductProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, _entityService, null); + DataProductProperties dataProductProperties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (dataProductProperties == null) { throw new IllegalArgumentException("Data Product does not exist"); } - Domains dataProductDomains = (Domains) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); - if (dataProductDomains != null && dataProductDomains.hasDomains() && dataProductDomains.getDomains().size() > 0) { + Domains dataProductDomains = + (Domains) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); + if (dataProductDomains != null + && dataProductDomains.hasDomains() + && dataProductDomains.getDomains().size() > 0) { // get first domain since we only allow one domain right now Urn domainUrn = UrnUtils.getUrn(dataProductDomains.getDomains().get(0).toString()); - // if they can't edit a data product from either the parent domain permission or from permission on the data product itself, throw error + // if they can't edit a data product from either the parent domain permission or from + // permission on the data product itself, throw error if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn) && !DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } else { // should not happen since data products need to have a domain if (!DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } dataProductProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + dataProductProperties, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 5d78bc38eafe8..848118e6cc0f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -8,21 +11,17 @@ import com.linkedin.datahub.graphql.generated.UpdateParentNodeInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -32,54 +31,72 @@ public class UpdateParentNodeResolver implements DataFetcher<CompletableFuture<B @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { - final UpdateParentNodeInput input = bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); + final UpdateParentNodeInput input = + bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); final QueryContext context = environment.getContext(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryNode.", targetUrn, parentNodeUrn)); + if (!_entityService.exists(parentNodeUrn) + || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryNode.", + targetUrn, parentNodeUrn)); } } GlossaryNodeUrn finalParentNodeUrn = parentNodeUrn; - return CompletableFuture.supplyAsync(() -> { - Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - // need to be able to manage current parent node and new parent node - if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) - && GlossaryUtils.canManageChildrenEntities(context, finalParentNodeUrn, _entityClient)) { - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update parentNode. Unsupported resource type %s provided.", targetUrn)); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + // need to be able to manage current parent node and new parent node + if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) + && GlossaryUtils.canManageChildrenEntities( + context, finalParentNodeUrn, _entityClient)) { + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update parentNode. Unsupported resource type %s provided.", + targetUrn)); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private Boolean updateGlossaryTermParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { - // If there is no info aspect for the term already, then we should throw since the model also requires a name. + // If there is no info aspect for the term already, then we should throw since the model + // also requires a name. throw new IllegalArgumentException("Info for this Glossary Term does not yet exist!"); } @@ -89,12 +106,18 @@ private Boolean updateGlossaryTermParentNode( glossaryTermInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } @@ -102,11 +125,15 @@ private Boolean updateGlossaryNodeParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Info for this Glossary Node does not yet exist!"); } @@ -117,12 +144,18 @@ private Boolean updateGlossaryNodeParentNode( glossaryNodeInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java index 875bc43e7c100..53b215bce7746 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,15 +21,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} - * instead. - */ +/** Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} instead. */ @Slf4j @RequiredArgsConstructor public class UpdateUserSettingResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -35,35 +31,46 @@ public class UpdateUserSettingResolver implements DataFetcher<CompletableFuture< @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateUserSettingInput input = bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); + final UpdateUserSettingInput input = + bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); UserSetting name = input.getName(); final boolean value = input.getValue(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - return CompletableFuture.supplyAsync(() -> { - try { - // In the future with more settings, we'll need to do a read-modify-write - // for now though, we can just write since there is only 1 setting - CorpUserSettings newSettings = new CorpUserSettings(); - newSettings.setAppearance(new CorpUserAppearanceSettings()); - if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { - newSettings.setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); - } else { - log.error("User Setting name {} not currently supported", name); - throw new RuntimeException(String.format("User Setting name %s not currently supported", name)); - } + return CompletableFuture.supplyAsync( + () -> { + try { + // In the future with more settings, we'll need to do a read-modify-write + // for now though, we can just write since there is only 1 setting + CorpUserSettings newSettings = new CorpUserSettings(); + newSettings.setAppearance(new CorpUserAppearanceSettings()); + if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { + newSettings.setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); + } else { + log.error("User Setting name {} not currently supported", name); + throw new RuntimeException( + String.format("User Setting name %s not currently supported", name)); + } - MetadataChangeProposal proposal = - buildMetadataChangeProposalWithUrn(actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); - _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); + _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); - return true; - } catch (Exception e) { - log.error("Failed to perform user settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform user settings update against input %s", input.toString()), e); - } - }); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform user settings update against input %s", input.toString()), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 7d4c5bee61e19..3fffe9fa019e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -19,22 +20,21 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeleteUtils() { } + private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -45,11 +45,7 @@ public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, } public static void updateStatusForResources( - boolean removed, - List<String> urnStrs, - Urn actor, - EntityService entityService - ) { + boolean removed, List<String> urnStrs, Urn actor, EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (String urnStr : urnStrs) { changes.add(buildSoftDeleteProposal(removed, urnStr, actor, entityService)); @@ -58,17 +54,13 @@ public static void updateStatusForResources( } private static MetadataChangeProposal buildSoftDeleteProposal( - boolean removed, - String urnStr, - Urn actor, - EntityService entityService - ) { - Status status = (Status) EntityUtils.getAspectFromEntity( - urnStr, - Constants.STATUS_ASPECT_NAME, - entityService, - new Status()); + boolean removed, String urnStr, Urn actor, EntityService entityService) { + Status status = + (Status) + EntityUtils.getAspectFromEntity( + urnStr, Constants.STATUS_ASPECT_NAME, entityService, new Status()); status.setRemoved(removed); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java index bd82bbb8e514f..3114e5241711c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; @@ -22,22 +23,22 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; - - @Slf4j public class DeprecationUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeprecationUtils() { } + private DeprecationUtils() {} - public static boolean isAuthorizedToUpdateDeprecationForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDeprecationForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -53,11 +54,12 @@ public static void updateDeprecationForResources( @Nullable Long decommissionTime, List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildUpdateDeprecationProposal(deprecated, note, decommissionTime, resource, actor, entityService)); + changes.add( + buildUpdateDeprecationProposal( + deprecated, note, decommissionTime, resource, actor, entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } @@ -68,21 +70,11 @@ private static MetadataChangeProposal buildUpdateDeprecationProposal( @Nullable Long decommissionTime, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { String resourceUrn = resource.getResourceUrn(); - Deprecation deprecation = getDeprecation( - entityService, - resourceUrn, - actor, - note, - deprecated, - decommissionTime - ); + Deprecation deprecation = + getDeprecation(entityService, resourceUrn, actor, note, deprecated, decommissionTime); return MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn(resourceUrn), - Constants.DEPRECATION_ASPECT_NAME, - deprecation - ); + UrnUtils.getUrn(resourceUrn), Constants.DEPRECATION_ASPECT_NAME, deprecation); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 585fbdf53a2ba..fb88d6c29f662 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -1,15 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; @@ -30,7 +32,6 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; - import com.linkedin.r2.RemoteInvocationException; import java.util.ArrayList; import java.util.Collections; @@ -40,13 +41,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - // TODO: Move to consuming from DomainService. @Slf4j public class DomainUtils { @@ -54,17 +50,20 @@ public class DomainUtils { private static final String HAS_PARENT_DOMAIN_INDEX_FIELD_NAME = "hasParentDomain"; private static final String NAME_INDEX_FIELD_NAME = "name"; - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DomainUtils() { } + private DomainUtils() {} - public static boolean isAuthorizedToUpdateDomainsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -78,8 +77,8 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildSetDomainProposal(domainUrn, resource, actor, entityService)); @@ -88,27 +87,27 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.DOMAINS_ASPECT_NAME, - entityService, - new Domains()); + @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.DOMAINS_ASPECT_NAME, + entityService, + new Domains()); final UrnArray newDomains = new UrnArray(); if (domainUrn != null) { newDomains.add(domainUrn); } domains.setDomains(newDomains); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } public static void validateDomain(Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } } @@ -119,14 +118,12 @@ private static List<Criterion> buildRootDomainCriteria() { new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("false") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("") - .setCondition(Condition.IS_NULL) - ); + .setCondition(Condition.IS_NULL)); return criteria; } @@ -138,14 +135,12 @@ private static List<Criterion> buildParentDomainCriteria(@Nonnull final Urn pare new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("true") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue(parentDomainUrn.toString()) - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); return criteria; } @@ -158,36 +153,38 @@ private static Criterion buildNameCriterion(@Nonnull final String name) { } /** - * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain criterion. - * The reason for the OR on root is elastic can have a null|false value to represent an root domain in the index. + * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain + * criterion. The reason for the OR on root is elastic can have a null|false value to represent an + * root domain in the index. + * * @param name an optional name to AND in to each condition of the filter * @param parentDomainUrn the parent domain (null means root). * @return the Filter */ - public static Filter buildNameAndParentDomainFilter(@Nullable final String name, @Nullable final Urn parentDomainUrn) { + public static Filter buildNameAndParentDomainFilter( + @Nullable final String name, @Nullable final Urn parentDomainUrn) { if (parentDomainUrn == null) { - return new Filter().setOr( - new ConjunctiveCriterionArray( - buildRootDomainCriteria().stream().map(parentCriterion -> { - final CriterionArray array = new CriterionArray(parentCriterion); - if (name != null) { - array.add(buildNameCriterion(name)); - } - return new ConjunctiveCriterion().setAnd(array); - }).collect(Collectors.toList()) - ) - ); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + buildRootDomainCriteria().stream() + .map( + parentCriterion -> { + final CriterionArray array = new CriterionArray(parentCriterion); + if (name != null) { + array.add(buildNameCriterion(name)); + } + return new ConjunctiveCriterion().setAnd(array); + }) + .collect(Collectors.toList()))); } final CriterionArray andArray = new CriterionArray(buildParentDomainCriteria(parentDomainUrn)); if (name != null) { andArray.add(buildNameCriterion(name)); } - return new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(andArray) - ) - ); + return new Filter() + .setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(andArray))); } public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn) { @@ -196,6 +193,7 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn /** * Check if a domain has any child domains + * * @param domainUrn the URN of the domain to check * @param context query context (includes authorization context to authorize the request) * @param entityClient client used to perform the check @@ -204,18 +202,14 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn public static boolean hasChildDomains( @Nonnull final Urn domainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) throws RemoteInvocationException { + @Nonnull final EntityClient entityClient) + throws RemoteInvocationException { Filter parentDomainFilter = buildParentDomainFilter(domainUrn); // Search for entities matching parent domain // Limit count to 1 for existence check - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - parentDomainFilter, - null, - 0, - 1, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication()); return (searchResult.getNumEntities() > 0); } @@ -223,23 +217,18 @@ private static Map<Urn, EntityResponse> getDomainsByNameAndParent( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { final Filter filter = buildNameAndParentDomainFilter(name, parentDomainUrn); - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final Set<Urn> domainUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Set<Urn> domainUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); return entityClient.batchGetV2( DOMAIN_ENTITY_NAME, @@ -255,51 +244,63 @@ public static boolean hasNameConflict( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { - final Map<Urn, EntityResponse> entities = getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); + @Nonnull final EntityClient entityClient) { + final Map<Urn, EntityResponse> entities = + getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); // Even though we searched by name, do one more pass to check the name is unique - return entities.values().stream().anyMatch(entityResponse -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data(); - DomainProperties domainProperties = new DomainProperties(dataMap); - return (domainProperties.hasName() && domainProperties.getName().equals(name)); - } - return false; - }); + return entities.values().stream() + .anyMatch( + entityResponse -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); + DomainProperties domainProperties = new DomainProperties(dataMap); + return (domainProperties.hasName() && domainProperties.getName().equals(name)); + } + return false; + }); } @Nullable public static Entity getParentDomain( @Nonnull final Urn urn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { - final EntityResponse entityResponse = entityClient.getV2( - DOMAIN_ENTITY_NAME, - urn, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - - if (entityResponse != null && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); + final EntityResponse entityResponse = + entityClient.getV2( + DOMAIN_ENTITY_NAME, + urn, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve parent domain for entity %s", urn), e); + throw new RuntimeException( + String.format("Failed to retrieve parent domain for entity %s", urn), e); } return null; } /** - * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where moving a domain - * to the root leaves the parentDomain field set but makes hasParentDomain false. This helper makes sure that queries - * to elastic where hasParentDomain=false and parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where + * moving a domain to the root leaves the parentDomain field set but makes hasParentDomain false. + * This helper makes sure that queries to elastic where hasParentDomain=false and + * parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * * @param properties the domain properties aspect * @return the parentDomain or null */ @@ -307,4 +308,4 @@ public static Entity getParentDomain( public static Urn getParentDomainSafely(@Nonnull final DomainProperties properties) { return properties.hasParentDomain() ? properties.getParentDomain() : null; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java index 8aa4a8d756bea..15c93904fc3bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java @@ -3,7 +3,6 @@ import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -11,20 +10,22 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EmbedUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private EmbedUtils() { } + private EmbedUtils() {} - public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateEmbedForEntity( + @Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -33,4 +34,4 @@ public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn enti entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 655e5333cb34e..996bd3da120d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; @@ -15,32 +15,36 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class GlossaryUtils { - private GlossaryUtils() { } + private GlossaryUtils() {} /** - * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the user has global control - * of their Business Glossary to create, edit, move, and delete Terms and Nodes. + * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the + * user has global control of their Business Glossary to create, edit, move, and delete Terms and + * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** - * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes under a parent Node. - * They can do this with either the global MANAGE_GLOSSARIES privilege, or if they have the MANAGE_GLOSSARY_CHILDREN privilege - * on the relevant parent node in the Glossary. + * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes + * under a parent Node. They can do this with either the global MANAGE_GLOSSARIES privilege, or if + * they have the MANAGE_GLOSSARY_CHILDREN privilege on the relevant parent node in the Glossary. */ - public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, @Nonnull EntityClient entityClient) { + public static boolean canManageChildrenEntities( + @Nonnull QueryContext context, + @Nullable Urn parentNodeUrn, + @Nonnull EntityClient entityClient) { if (canManageGlossaries(context)) { return true; } @@ -48,28 +52,31 @@ public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @ return false; // if no parent node, we must rely on the canManageGlossaries method above } - //Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege - if (hasManagePrivilege(context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { + // Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege + if (hasManagePrivilege( + context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } - //Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no parent associated. + // Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no + // parent associated. Urn currentParentNodeUrn = parentNodeUrn; while (currentParentNodeUrn != null) { - if (hasManagePrivilege(context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { + if (hasManagePrivilege( + context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } currentParentNodeUrn = getParentUrn(currentParentNodeUrn, context, entityClient); } return false; - } - public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())) - )); + public static boolean hasManagePrivilege( + @Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -83,13 +90,24 @@ public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullabl * Returns the urn of the parent node for a given Glossary Term. Returns null if it doesn't exist. */ @Nullable - private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getTermParentUrn( + @Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_TERM_ENTITY_NAME, termUrn, - ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { - GlossaryTermInfo termInfo = new GlossaryTermInfo(response.getAspects() - .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + termUrn, + ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { + GlossaryTermInfo termInfo = + new GlossaryTermInfo( + response + .getAspects() + .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) + .getValue() + .data()); return termInfo.getParentNode(); } return null; @@ -102,13 +120,24 @@ private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext * Returns the urn of the parent node for a given Glossary Node. Returns null if it doesn't exist. */ @Nullable - private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getNodeParentUrn( + @Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_NODE_ENTITY_NAME, nodeUrn, - ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { - GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(response.getAspects() - .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + nodeUrn, + ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { + GlossaryNodeInfo nodeInfo = + new GlossaryNodeInfo( + response + .getAspects() + .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) + .getValue() + .data()); return nodeInfo.getParentNode(); } return null; @@ -118,17 +147,21 @@ private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext } /** - * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null otherwise. + * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null + * otherwise. */ @Nullable - public static Urn getParentUrn(@Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + public static Urn getParentUrn( + @Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { switch (urn.getEntityType()) { case Constants.GLOSSARY_TERM_ENTITY_NAME: return getTermParentUrn(urn, context, entityClient); case Constants.GLOSSARY_NODE_ENTITY_NAME: return getNodeParentUrn(urn, context, entityClient); default: - log.warn("Tried to get the parent node urn of a non-glossary entity type: {}", urn.getEntityType()); + log.warn( + "Tried to get the parent node urn of a non-glossary entity type: {}", + urn.getEntityType()); return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index a93c7d5b333da..8765b91f65d9d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -13,8 +17,6 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.metadata.Constants; @@ -30,53 +32,56 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming GlossaryTermService, TagService. @Slf4j public class LabelUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LabelUtils() { } + private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, - Urn resourceUrn, - String subResource, - Urn actor, - EntityService entityService - ) { + Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermIfExists(terms, labelUrn); persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermIfExists(editableFieldInfo.getGlossaryTerms(), labelUrn); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } public static void removeTagsFromResources( - List<Urn> tags, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> tags, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTagsProposal(tags, resource, actor, entityService)); @@ -85,11 +90,8 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List<Urn> tagUrns, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> tagUrns, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTagsProposal(tagUrns, resource, actor, entityService)); @@ -98,11 +100,8 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List<Urn> termUrns, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> termUrns, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTermsProposal(termUrns, resource, actor, entityService)); @@ -111,11 +110,8 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List<Urn> termUrns, - List<ResourceRefInput> resources, - Urn actor, - EntityService entityService - ) throws Exception { + List<Urn> termUrns, List<ResourceRefInput> resources, Urn actor, EntityService entityService) + throws Exception { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTermsProposal(termUrns, resource, actor, entityService)); @@ -128,12 +124,16 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -144,10 +144,15 @@ public static void addTermsToResource( persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -155,7 +160,12 @@ public static void addTermsToResource( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), labelUrns); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } @@ -181,17 +191,22 @@ private static GlossaryTermAssociationArray removeTermIfExists(GlossaryTerms ter return termArray; } - public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTags( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -201,19 +216,23 @@ public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Ur orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateTerms(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTerms( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType() - )) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -230,37 +249,56 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { for (Urn urn : labelUrns) { - validateResourceAndLabel(urn, resourceUrn, subResource, subResourceType, labelEntityType, entityService, isRemoving); + validateResourceAndLabel( + urn, + resourceUrn, + subResource, + subResourceType, + labelEntityType, + entityService, + isRemoving); } } - public static void validateLabel(Urn labelUrn, String labelEntityType, EntityService entityService) { + public static void validateLabel( + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn type does not match entity type %s..", - labelUrn, - labelEntityType)); + throw new IllegalArgumentException( + String.format( + "Failed to validate label with urn %s. Urn type does not match entity type %s..", + labelUrn, labelEntityType)); } if (!entityService.exists(labelUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } } // TODO: Move this out into a separate utilities class. - public static void validateResource(Urn resourceUrn, String subResource, SubResourceType subResourceType, EntityService entityService) { + public static void validateResource( + Urn resourceUrn, + String subResource, + SubResourceType subResourceType, + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); } if ((subResource != null && subResource.length() > 0) || subResourceType != null) { if (subResource == null || subResource.length() == 0) { - throw new IllegalArgumentException(String.format( - "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", resourceUrn, subResourceType)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", + resourceUrn, subResourceType)); } if (subResourceType == null) { - throw new IllegalArgumentException(String.format( - "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", resourceUrn, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", + resourceUrn, subResource)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); } @@ -273,8 +311,7 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); } @@ -282,11 +319,8 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildAddTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -297,11 +331,8 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildRemoveTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -312,82 +343,90 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } removeTagsIfExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - entityService, - new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } addTagsIfNotExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List<Urn> tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } - private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throws URISyntaxException { + private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) + throws URISyntaxException { if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } @@ -396,7 +435,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throw List<Urn> tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -415,11 +455,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throw } private static MetadataChangeProposal buildAddTermsProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity return buildAddTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -430,11 +467,8 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity return buildRemoveTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -445,14 +479,15 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -460,20 +495,23 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } addTermsIfNotExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -481,42 +519,48 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermsIfExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List<Urn> termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List<Urn> termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermsIfExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static void addTermsIfNotExists(GlossaryTerms terms, List<Urn> termUrns) @@ -547,7 +591,8 @@ private static void addTermsIfNotExists(GlossaryTerms terms, List<Urn> termUrns) } } - private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms terms, List<Urn> termUrns) { + private static GlossaryTermAssociationArray removeTermsIfExists( + GlossaryTerms terms, List<Urn> termUrns) { if (!terms.hasTerms()) { terms.setTerms(new GlossaryTermAssociationArray()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index 9ec0f9b8e6070..b93c72edbcfc5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; @@ -9,59 +12,59 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.entity.EntityUtils; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class LinkUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LinkUtils() { } + private LinkUtils() {} public static void addLink( - String linkUrl, - String linkLabel, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); addLink(institutionalMemoryAspect, linkUrl, linkLabel, actor); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } public static void removeLink( - String linkUrl, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); removeLink(institutionalMemoryAspect, linkUrl); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } - private static void addLink(InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { + private static void addLink( + InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { if (!institutionalMemoryAspect.hasElements()) { institutionalMemoryAspect.setElements(new InstitutionalMemoryMetadataArray()); } @@ -90,10 +93,12 @@ private static void removeLink(InstitutionalMemory institutionalMemoryAspect, St } public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -104,21 +109,22 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, - Urn resourceUrn, - EntityService entityService - ) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); } catch (Exception e) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Expected a corp group urn.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Expected a corp group urn.", + resourceUrn)); } if (!entityService.exists(resourceUrn)) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Resource does not exist.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Resource does not exist.", + resourceUrn)); } return true; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 7233995804423..15c3c14c7b8f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -28,104 +30,124 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming from OwnerService @Slf4j public class OwnerUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static final String SYSTEM_ID = "__system__"; - private OwnerUtils() { } + private OwnerUtils() {} public static void addOwnersToResources( List<OwnerInput> owners, List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildAddOwnersProposal(owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); + changes.add( + buildAddOwnersProposal( + owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } public static void removeOwnersFromResources( - List<Urn> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn, List<ResourceRefInput> resources, + List<Urn> ownerUrns, + Optional<Urn> maybeOwnershipTypeUrn, + List<ResourceRefInput> resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveOwnersProposal(ownerUrns, maybeOwnershipTypeUrn, UrnUtils.getUrn(resource.getResourceUrn()), - actor, entityService)); + changes.add( + buildRemoveOwnersProposal( + ownerUrns, + maybeOwnershipTypeUrn, + UrnUtils.getUrn(resource.getResourceUrn()), + actor, + entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } - - static MetadataChangeProposal buildAddOwnersProposal(List<OwnerInput> owners, Urn resourceUrn, EntityService entityService) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, entityService, - new Ownership()); + static MetadataChangeProposal buildAddOwnersProposal( + List<OwnerInput> owners, Urn resourceUrn, EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); for (OwnerInput input : owners) { - addOwner(ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), UrnUtils.getUrn(input.getOwnershipTypeUrn())); + addOwner( + ownershipAspect, + UrnUtils.getUrn(input.getOwnerUrn()), + input.getType(), + UrnUtils.getUrn(input.getOwnershipTypeUrn())); } - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } public static MetadataChangeProposal buildRemoveOwnersProposal( - List<Urn> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn, Urn resourceUrn, + List<Urn> ownerUrns, + Optional<Urn> maybeOwnershipTypeUrn, + Urn resourceUrn, Urn actor, - EntityService entityService - ) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, - entityService, - new Ownership()); + EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); removeOwnersIfExists(ownershipAspect, ownerUrns, maybeOwnershipTypeUrn); - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } - private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { + private static void addOwner( + Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { if (!ownershipAspect.hasOwners()) { ownershipAspect.setOwners(new OwnerArray()); } final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); - ownerArray.removeIf(owner -> { - // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) + ownerArray.removeIf( + owner -> { + // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(ownershipUrn); - } + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(ownershipUrn); + } - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(ownershipUrn.toString()); - }); + // Fall back to mapping deprecated type to the new ownership entity, if it matches remove + return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(ownershipUrn.toString()); + }); Owner newOwner = new Owner(); // For backwards compatibility we have to always set the deprecated type. // If the type exists we assume it's an old ownership type that we can map to. // Else if it's a net new custom ownership type set old type to CUSTOM. - com.linkedin.common.OwnershipType gmsType = type != null ? com.linkedin.common.OwnershipType.valueOf(type.toString()) - : com.linkedin.common.OwnershipType.CUSTOM; + com.linkedin.common.OwnershipType gmsType = + type != null + ? com.linkedin.common.OwnershipType.valueOf(type.toString()) + : com.linkedin.common.OwnershipType.CUSTOM; newOwner.setType(gmsType); newOwner.setTypeUrn(ownershipUrn); @@ -135,8 +157,8 @@ private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipT ownershipAspect.setOwners(ownerArray); } - private static void removeOwnersIfExists(Ownership ownership, List<Urn> ownerUrns, - Optional<Urn> maybeOwnershipTypeUrn) { + private static void removeOwnersIfExists( + Ownership ownership, List<Urn> ownerUrns, Optional<Urn> maybeOwnershipTypeUrn) { if (!ownership.hasOwners()) { ownership.setOwners(new OwnerArray()); } @@ -144,23 +166,26 @@ private static void removeOwnersIfExists(Ownership ownership, List<Urn> ownerUrn OwnerArray ownerArray = ownership.getOwners(); for (Urn ownerUrn : ownerUrns) { if (maybeOwnershipTypeUrn.isPresent()) { - ownerArray.removeIf(owner -> { - // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(maybeOwnershipTypeUrn.get().toString()); - }); + ownerArray.removeIf( + owner -> { + // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) + + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } + + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); + } + + // Fall back to mapping deprecated type to the new ownership entity, if it matches + // remove + return mapOwnershipTypeToEntity( + OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(maybeOwnershipTypeUrn.get().toString()); + }); } else { ownerArray.removeIf(owner -> owner.getOwner().equals(ownerUrn)); } @@ -168,10 +193,12 @@ private static void removeOwnersIfExists(Ownership ownership, List<Urn> ownerUrn } public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -182,10 +209,7 @@ public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, } public static Boolean validateAddOwnerInput( - List<OwnerInput> owners, - Urn resourceUrn, - EntityService entityService - ) { + List<OwnerInput> owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { boolean result = validateAddOwnerInput(owner, resourceUrn, entityService); if (!result) { @@ -196,13 +220,12 @@ public static Boolean validateAddOwnerInput( } public static Boolean validateAddOwnerInput( - OwnerInput owner, - Urn resourceUrn, - EntityService entityService - ) { + OwnerInput owner, Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } validateOwner(owner, entityService); @@ -210,45 +233,55 @@ public static Boolean validateAddOwnerInput( return true; } - public static void validateOwner( - OwnerInput owner, - EntityService entityService - ) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); - if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) + && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp group urn, found %s", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp group urn, found %s", + ownerUrn)); } - if (OwnerEntityType.CORP_USER.equals(ownerEntityType) && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_USER.equals(ownerEntityType) + && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp user urn, found %s.", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp user urn, found %s.", + ownerUrn)); } if (!entityService.exists(ownerUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Owner with urn %s does not exist.", ownerUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Owner with urn %s does not exist.", + ownerUrn)); } - if (owner.getOwnershipTypeUrn() != null && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Custom Ownership type with " - + "urn %s does not exist.", owner.getOwnershipTypeUrn())); + if (owner.getOwnershipTypeUrn() != null + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Custom Ownership type with " + + "urn %s does not exist.", + owner.getOwnershipTypeUrn())); } if (owner.getType() == null && owner.getOwnershipTypeUrn() == null) { - throw new IllegalArgumentException("Failed to change ownership for resource(s). Expected either " - + "type or ownershipTypeUrn to be specified."); + throw new IllegalArgumentException( + "Failed to change ownership for resource(s). Expected either " + + "type or ownershipTypeUrn to be specified."); } } - public static Boolean validateRemoveInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateRemoveInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } return true; } @@ -264,15 +297,17 @@ public static void addCreatorAsOwner( String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { - throw new RuntimeException(String.format("Unknown ownership type urn %s", ownershipTypeUrn)); + throw new RuntimeException( + String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } addOwnersToResources( - ImmutableList.of(new OwnerInput(actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), + ImmutableList.of( + new OwnerInput( + actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), ImmutableList.of(new ResourceRefInput(urn, null, null)), actorUrn, - entityService - ); + entityService); } catch (Exception e) { log.error(String.format("Failed to add creator as owner of tag %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java index f740836694dbe..0dd737d3b2292 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java @@ -1,32 +1,35 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; + import com.linkedin.common.Siblings; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import javax.annotation.Nonnull; public class SiblingsUtils { - private SiblingsUtils() { } + private SiblingsUtils() {} - public static List<Urn> getSiblingUrns(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { - final Siblings siblingAspectOfEntity = (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + public static List<Urn> getSiblingUrns( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + final Siblings siblingAspectOfEntity = + (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { return siblingAspectOfEntity.getSiblings(); } return new ArrayList<>(); } - public static Optional<Urn> getNextSiblingUrn(@Nonnull final List<Urn> siblingUrns, @Nonnull final HashSet<Urn> usedUrns) { - final List<Urn> unusedSiblingUrns = siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); + public static Optional<Urn> getNextSiblingUrn( + @Nonnull final List<Urn> siblingUrns, @Nonnull final HashSet<Urn> usedUrns) { + final List<Urn> unusedSiblingUrns = + siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); return unusedSiblingUrns.stream().findFirst(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index c0fe697c6654c..abc479ed18ebf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -10,8 +17,6 @@ import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -30,22 +35,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for reporting Asset Operations - */ +/** Resolver used for reporting Asset Operations */ @Slf4j @RequiredArgsConstructor public class ReportOperationResolver implements DataFetcher<CompletableFuture<Boolean>> { - private static final List<String> SUPPORTED_ENTITY_TYPES = ImmutableList.of( - DATASET_ENTITY_NAME - ); + private static final List<String> SUPPORTED_ENTITY_TYPES = ImmutableList.of(DATASET_ENTITY_NAME); private final EntityClient _entityClient; @@ -53,32 +48,36 @@ public class ReportOperationResolver implements DataFetcher<CompletableFuture<Bo public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final ReportOperationInput input = bindArgument(environment.getArgument("input"), ReportOperationInput.class); - - return CompletableFuture.supplyAsync(() -> { - - Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - - if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - validateInput(entityUrn, input); - - try { - // Create an MCP to emit the operation - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, OPERATION_ASPECT_NAME, - mapOperation(input, context)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to report operation. {}", e.getMessage()); - throw new RuntimeException("Failed to report operation", e); - } - }); + final ReportOperationInput input = + bindArgument(environment.getArgument("input"), ReportOperationInput.class); + + return CompletableFuture.supplyAsync( + () -> { + Urn entityUrn = UrnUtils.getUrn(input.getUrn()); + + if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + validateInput(entityUrn, input); + + try { + // Create an MCP to emit the operation + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + entityUrn, OPERATION_ASPECT_NAME, mapOperation(input, context)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to report operation. {}", e.getMessage()); + throw new RuntimeException("Failed to report operation", e); + } + }); } - private Operation mapOperation(final ReportOperationInput input, final QueryContext context) throws URISyntaxException { + private Operation mapOperation(final ReportOperationInput input, final QueryContext context) + throws URISyntaxException { final Operation result = new Operation(); result.setActor(UrnUtils.getUrn(context.getActorUrn())); @@ -86,13 +85,17 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont result.setCustomOperationType(input.getCustomOperationType(), SetMode.IGNORE_NULL); result.setNumAffectedRows(input.getNumAffectedRows(), SetMode.IGNORE_NULL); - long timestampMillis = input.getTimestampMillis() != null ? input.getTimestampMillis() : System.currentTimeMillis(); + long timestampMillis = + input.getTimestampMillis() != null + ? input.getTimestampMillis() + : System.currentTimeMillis(); result.setLastUpdatedTimestamp(timestampMillis); result.setTimestampMillis(timestampMillis); result.setSourceType(OperationSourceType.valueOf(input.getSourceType().toString())); if (input.getPartition() != null) { - result.setPartitionSpec(new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); + result.setPartitionSpec( + new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); } if (input.getCustomProperties() != null) { @@ -102,7 +105,8 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont return result; } - private StringMap mapCustomProperties(final List<StringMapEntryInput> properties) throws URISyntaxException { + private StringMap mapCustomProperties(final List<StringMapEntryInput> properties) + throws URISyntaxException { final StringMap result = new StringMap(); for (StringMapEntryInput entry : properties) { result.put(entry.getKey(), entry.getValue()); @@ -113,16 +117,21 @@ private StringMap mapCustomProperties(final List<StringMapEntryInput> properties private void validateInput(final Urn entityUrn, final ReportOperationInput input) { if (!SUPPORTED_ENTITY_TYPES.contains(entityUrn.getEntityType())) { throw new DataHubGraphQLException( - String.format("Unable to report operation. Invalid entity type %s provided.", entityUrn.getEntityType()), + String.format( + "Unable to report operation. Invalid entity type %s provided.", + entityUrn.getEntityType()), DataHubGraphQLErrorCode.BAD_REQUEST); } } - private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToReportOperationForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -131,4 +140,4 @@ private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, resourceUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java index 4cfe58072aae9..a0cffa5eca44c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.metadata.service.OwnershipTypeService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -16,17 +18,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class CreateOwnershipTypeResolver implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { +public class CreateOwnershipTypeResolver + implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateOwnershipTypeInput input = bindArgument(environment.getArgument("input"), CreateOwnershipTypeInput.class); @@ -36,19 +37,25 @@ public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = _ownershipTypeService.createOwnershipType(input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - return createOwnershipType(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = + _ownershipTypeService.createOwnershipType( + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + return createOwnershipType(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - private OwnershipTypeEntity createOwnershipType(@Nonnull final Urn urn, - @Nonnull final CreateOwnershipTypeInput input) { + private OwnershipTypeEntity createOwnershipType( + @Nonnull final Urn urn, @Nonnull final CreateOwnershipTypeInput input) { return OwnershipTypeEntity.builder() .setUrn(urn.toString()) .setType(EntityType.CUSTOM_OWNERSHIP_TYPE) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java index 87cf70193d7fd..c5bb58a7d4b2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java @@ -12,7 +12,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteOwnershipTypeResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,21 +25,26 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final Urn urn = UrnUtils.getUrn(ownershipTypeUrn); // By default, delete references final boolean deleteReferences = - environment.getArgument("deleteReferences") == null ? true : environment.getArgument("deleteReferences"); + environment.getArgument("deleteReferences") == null + ? true + : environment.getArgument("deleteReferences"); if (!AuthorizationUtils.canManageOwnershipTypes(context)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.deleteOwnershipType(urn, deleteReferences, context.getAuthentication()); - log.info(String.format("Successfully deleted ownership type %s with urn", urn)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.deleteOwnershipType( + urn, deleteReferences, context.getAuthentication()); + log.info(String.format("Successfully deleted ownership type %s with urn", urn)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 70441815f0a74..1c8f43a490173 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -24,18 +26,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListOwnershipTypesResolver implements - DataFetcher<CompletableFuture<ListOwnershipTypesResult>> { +public class ListOwnershipTypesResolver + implements DataFetcher<CompletableFuture<ListOwnershipTypesResult>> { private static final String CREATED_AT_FIELD = "createdAt"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,43 +42,47 @@ public class ListOwnershipTypesResolver implements private final EntityClient _entityClient; @Override - public CompletableFuture<ListOwnershipTypesResult> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListOwnershipTypesResult> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListOwnershipTypesInput input = bindArgument(environment.getArgument("input"), - ListOwnershipTypesInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List<FacetFilterInput> filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - + final ListOwnershipTypesInput input = + bindArgument(environment.getArgument("input"), ListOwnershipTypesInput.class); - try { + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List<FacetFilterInput> filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - final SearchResult gmsResult = _entityClient.search( - Constants.OWNERSHIP_TYPE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { - final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setOwnershipTypes(mapUnresolvedOwnershipTypes(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list custom ownership types", e); - } + final SearchResult gmsResult = + _entityClient.search( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - }); + final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setOwnershipTypes( + mapUnresolvedOwnershipTypes( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list custom ownership types", e); + } + }); } private List<OwnershipTypeEntity> mapUnresolvedOwnershipTypes(List<Urn> entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 43fd249304397..839121a295d9a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,17 +19,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class UpdateOwnershipTypeResolver implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { +public class UpdateOwnershipTypeResolver + implements DataFetcher<CompletableFuture<OwnershipTypeEntity>> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final UpdateOwnershipTypeInput input = @@ -39,27 +40,35 @@ public CompletableFuture<OwnershipTypeEntity> get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.updateOwnershipType(urn, input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.updateOwnershipType( + urn, + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); + return getOwnershipType(urn, context.getAuthentication()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private OwnershipTypeEntity getOwnershipType(@Nonnull final Urn urn, - @Nonnull final Authentication authentication) { - final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); + private OwnershipTypeEntity getOwnershipType( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + final EntityResponse maybeResponse = + _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", + String.format( + "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } return OwnershipTypeMapper.map(maybeResponse); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java index 485d40e60547e..567745b894ca9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Resolver responsible for hard deleting a particular DataHub access control policy. - */ +/** Resolver responsible for hard deleting a particular DataHub access control policy. */ public class DeletePolicyResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -27,18 +24,24 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final String policyUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(policyUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return policyUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against policy with urn %s", policyUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return policyUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against policy with urn %s", policyUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 11f7793db82c8..3328eff2bdf45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.EntitySpec; @@ -14,17 +16,15 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - /** - * Resolver to support the getGrantedPrivileges end point - * Fetches all privileges that are granted for the given actor for the given resource (optional) + * Resolver to support the getGrantedPrivileges end point Fetches all privileges that are granted + * for the given actor for the given resource (optional) */ public class GetGrantedPrivilegesResolver implements DataFetcher<CompletableFuture<Privileges>> { @Override - public CompletableFuture<Privileges> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Privileges> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final GetGrantedPrivilegesInput input = @@ -33,22 +33,27 @@ public CompletableFuture<Privileges> get(final DataFetchingEnvironment environme if (!isAuthorized(context, actor)) { throw new AuthorizationException("Unauthorized to get privileges for the given author."); } - final Optional<EntitySpec> resourceSpec = Optional.ofNullable(input.getResourceSpec()) - .map(spec -> new EntitySpec(EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); + final Optional<EntitySpec> resourceSpec = + Optional.ofNullable(input.getResourceSpec()) + .map( + spec -> + new EntitySpec( + EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); if (context.getAuthorizer() instanceof AuthorizerChain) { - DataHubAuthorizer dataHubAuthorizer = ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); + DataHubAuthorizer dataHubAuthorizer = + ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); List<String> privileges = dataHubAuthorizer.getGrantedPrivileges(actor, resourceSpec); - return CompletableFuture.supplyAsync(() -> Privileges.builder() - .setPrivileges(privileges) - .build()); + return CompletableFuture.supplyAsync( + () -> Privileges.builder().setPrivileges(privileges).build()); } throw new UnsupportedOperationException( - String.format("GetGrantedPrivileges function is not supported on authorizer of type %s", + String.format( + "GetGrantedPrivileges function is not supported on authorizer of type %s", context.getAuthorizer().getClass().getSimpleName())); } private boolean isAuthorized(final QueryContext context, final String actor) { return actor.equals(context.getActorUrn()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index b44da1c2f832c..87832b8c3aa40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.PolicyFetcher; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - public class ListPoliciesResolver implements DataFetcher<CompletableFuture<ListPoliciesResult>> { private static final Integer DEFAULT_START = 0; @@ -30,18 +29,22 @@ public ListPoliciesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (PolicyAuthUtils.canManagePolicies(context)) { - final ListPoliciesInput input = bindArgument(environment.getArgument("input"), ListPoliciesInput.class); + final ListPoliciesInput input = + bindArgument(environment.getArgument("input"), ListPoliciesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return _policyFetcher.fetchPolicies(start, query, count, context.getAuthentication()) - .thenApply(policyFetchResult -> { + return _policyFetcher + .fetchPolicies(start, query, count, context.getAuthentication()) + .thenApply( + policyFetchResult -> { final ListPoliciesResult result = new ListPoliciesResult(); result.setStart(start); result.setCount(count); @@ -50,14 +53,18 @@ public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment e return result; }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List<Policy> mapEntities(final List<PolicyFetcher.Policy> policies) { - return policies.stream().map(policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); - mappedPolicy.setUrn(policy.getUrn().toString()); - return mappedPolicy; - }).collect(Collectors.toList()); + return policies.stream() + .map( + policy -> { + Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); + mappedPolicy.setUrn(policy.getUrn().toString()); + return mappedPolicy; + }) + .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index dcc5d1fd23302..d0446d218dac6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), + authorizer); } - private PolicyAuthUtils() { } + private PolicyAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index 6dcc143a1a3af..dcdf78ebc15bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.AuthorizerChain; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,10 +19,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - public class UpsertPolicyResolver implements DataFetcher<CompletableFuture<String>> { private static final String POLICY_ENTITY_NAME = "dataHubPolicy"; @@ -38,7 +37,8 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final Optional<String> policyUrn = Optional.ofNullable(environment.getArgument("urn")); - final PolicyUpdateInput input = bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); + final PolicyUpdateInput input = + bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; @@ -48,7 +48,9 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) if (policyUrn.isPresent()) { // Update existing policy - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); } else { // Create new policy // Since we are creating a new Policy, we need to generate a unique UUID. @@ -58,21 +60,29 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) // Create the Policy key. final DataHubPolicyKey key = new DataHubPolicyKey(); key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithKey( + key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); } - return CompletableFuture.supplyAsync(() -> { - try { - String urn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return urn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String urn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return urn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index b9a6bf07be8c8..a350fb91f9d3b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterion; @@ -9,7 +10,6 @@ import com.linkedin.datahub.graphql.generated.PolicyMatchFilter; import com.linkedin.datahub.graphql.generated.PolicyState; import com.linkedin.datahub.graphql.generated.PolicyType; -import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.ResourceFilter; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -20,9 +20,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link com.linkedin.datahub.graphql.generated.Policy}. + * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link + * com.linkedin.datahub.graphql.generated.Policy}. */ public class PolicyInfoPolicyMapper implements ModelMapper<DataHubPolicyInfo, Policy> { @@ -56,16 +56,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { result.setResourceOwners(actorFilter.isResourceOwners()); UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -87,14 +91,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -102,7 +112,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index cb323b60dd465..d82d71295d41b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -19,11 +19,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. - */ -public class PolicyUpdateInputInfoMapper implements ModelMapper<PolicyUpdateInput, DataHubPolicyInfo> { +/** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ +public class PolicyUpdateInputInfoMapper + implements ModelMapper<PolicyUpdateInput, DataHubPolicyInfo> { public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); @@ -52,13 +50,21 @@ private DataHubActorFilter mapActors(final ActorFilterInput actorInput) { result.setAllUsers(actorInput.getAllUsers()); result.setResourceOwners(actorInput.getResourceOwners()); if (actorInput.getResourceOwnersTypes() != null) { - result.setResourceOwnersTypes(new UrnArray(actorInput.getResourceOwnersTypes().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setResourceOwnersTypes( + new UrnArray( + actorInput.getResourceOwnersTypes().stream() + .map(this::createUrn) + .collect(Collectors.toList()))); } if (actorInput.getGroups() != null) { - result.setGroups(new UrnArray(actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setGroups( + new UrnArray( + actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); } if (actorInput.getUsers() != null) { - result.setUsers(new UrnArray(actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setUsers( + new UrnArray( + actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); } return result; } @@ -83,19 +89,26 @@ private DataHubResourceFilter mapResources(final ResourceFilterInput resourceInp } private PolicyMatchFilter mapFilter(final PolicyMatchFilterInput filter) { - return new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray(filter.getCriteria() - .stream() - .map(criterion -> new PolicyMatchCriterion().setField(criterion.getField()) - .setValues(new StringArray(criterion.getValues())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name()))) - .collect(Collectors.toList()))); + return new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + filter.getCriteria().stream() + .map( + criterion -> + new PolicyMatchCriterion() + .setField(criterion.getField()) + .setValues(new StringArray(criterion.getValues())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name()))) + .collect(Collectors.toList()))); } private Urn createUrn(String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urnStr %s into an URN object", urnStr), e); + throw new RuntimeException( + String.format("Failed to convert urnStr %s into an URN object", urnStr), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java index 524caf14e9afe..8e0ee335e09f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -18,16 +20,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreatePostResolver implements DataFetcher<CompletableFuture<Boolean>> { private final PostService _postService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { @@ -35,7 +35,8 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) "Unauthorized to create posts. Please contact your DataHub administrator if this needs corrective action."); } - final CreatePostInput input = bindArgument(environment.getArgument("input"), CreatePostInput.class); + final CreatePostInput input = + bindArgument(environment.getArgument("input"), CreatePostInput.class); final PostType type = input.getPostType(); final UpdatePostContentInput content = input.getContent(); final PostContentType contentType = content.getContentType(); @@ -45,16 +46,21 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) final UpdateMediaInput updateMediaInput = content.getMedia(); final Authentication authentication = context.getAuthentication(); - Media media = updateMediaInput == null ? null - : _postService.mapMedia(updateMediaInput.getType().toString(), updateMediaInput.getLocation()); - PostContent postContent = _postService.mapPostContent(contentType.toString(), title, description, link, media); - - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.createPost(type.toString(), postContent, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + Media media = + updateMediaInput == null + ? null + : _postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + _postService.mapPostContent(contentType.toString(), title, description, link, media); + + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.createPost(type.toString(), postContent, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java index d3cd0126fb852..7ab5d1381a1b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java @@ -13,14 +13,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeletePostResolver implements DataFetcher<CompletableFuture<Boolean>> { private final PostService _postService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canManageGlobalAnnouncements(context)) { @@ -31,12 +31,13 @@ public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) final Urn postUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.deletePost(postUrn, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.deletePost(postUrn, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 59f2b458fdc90..5292adbe3aac3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,10 +25,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListPostsResolver implements DataFetcher<CompletableFuture<ListPostsResult>> { @@ -36,38 +35,58 @@ public class ListPostsResolver implements DataFetcher<CompletableFuture<ListPost private final EntityClient _entityClient; @Override - public CompletableFuture<ListPostsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListPostsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final ListPostsInput input = bindArgument(environment.getArgument("input"), ListPostsInput.class); + final ListPostsInput input = + bindArgument(environment.getArgument("input"), ListPostsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(LAST_MODIFIED_FIELD_NAME).setOrder(SortOrder.DESCENDING); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); - // First, get all Post Urns. - final SearchResult gmsResult = _entityClient.search(POST_ENTITY_NAME, query, null, sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + // First, get all Post Urns. + final SearchResult gmsResult = + _entityClient.search( + POST_ENTITY_NAME, + query, + null, + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all Posts. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(POST_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, authentication); + // Then, get and hydrate all Posts. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + POST_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + authentication); - final ListPostsResult result = new ListPostsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setPosts(entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list posts", e); - } - }); + final ListPostsResult result = new ListPostsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setPosts( + entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list posts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 27de443bc100a..48f31fb75d371 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -7,8 +9,8 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateQueryInput; -import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.CreateQuerySubjectInput; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.types.query.QueryMapper; import com.linkedin.metadata.service.QueryService; import com.linkedin.query.QueryLanguage; @@ -22,9 +24,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateQueryResolver implements DataFetcher<CompletableFuture<QueryEntity>> { @@ -32,40 +31,49 @@ public class CreateQueryResolver implements DataFetcher<CompletableFuture<QueryE private final QueryService _queryService; @Override - public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateQueryInput input = bindArgument(environment.getArgument("input"), CreateQueryInput.class); + final CreateQueryInput input = + bindArgument(environment.getArgument("input"), CreateQueryInput.class); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!AuthorizationUtils.canCreateQuery(input.getSubjects() - .stream() - .map(CreateQuerySubjectInput::getDatasetUrn).map(UrnUtils::getUrn) - .collect(Collectors.toList()), context)) { - throw new AuthorizationException( - "Unauthorized to create Query. Please contact your DataHub administrator for more information."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateQuery( + input.getSubjects().stream() + .map(CreateQuerySubjectInput::getDatasetUrn) + .map(UrnUtils::getUrn) + .collect(Collectors.toList()), + context)) { + throw new AuthorizationException( + "Unauthorized to create Query. Please contact your DataHub administrator for more information."); + } - try { - final Urn queryUrn = _queryService.createQuery( - input.getProperties().getName(), - input.getProperties().getDescription(), - QuerySource.MANUAL, - new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())), - input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()), - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new Query from input %s", input), e); - } - }); + try { + final Urn queryUrn = + _queryService.createQuery( + input.getProperties().getName(), + input.getProperties().getDescription(), + QuerySource.MANUAL, + new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())), + input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()), + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java index 5c5bb288f32bf..4f5887c91b494 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java @@ -18,7 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteQueryResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -26,29 +25,34 @@ public class DeleteQueryResolver implements DataFetcher<CompletableFuture<Boolea private final QueryService _queryService; @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); - final List<Urn> subjectUrns = existingSubjects != null - ? existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()) - : Collections.emptyList(); - - if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); - } - - try { - _queryService.deleteQuery(queryUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Query", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); + final List<Urn> subjectUrns = + existingSubjects != null + ? existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()) + : Collections.emptyList(); + + if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); + } + + try { + _queryService.deleteQuery(queryUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Query", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index c7e70cac15bdb..fec5bb120eeba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -29,10 +32,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListQueriesResolver implements DataFetcher<CompletableFuture<ListQueriesResult>> { @@ -48,38 +47,52 @@ public class ListQueriesResolver implements DataFetcher<CompletableFuture<ListQu private final EntityClient _entityClient; @Override - public CompletableFuture<ListQueriesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListQueriesResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListQueriesInput input = bindArgument(environment.getArgument("input"), ListQueriesInput.class); + final ListQueriesInput input = + bindArgument(environment.getArgument("input"), ListQueriesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); - - // First, get all Query Urns. - final SearchResult gmsResult = _entityClient.search(QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true).setSkipHighlighting(true)); - - final ListQueriesResult result = new ListQueriesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setQueries(mapUnresolvedQueries(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Queries", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); + + // First, get all Query Urns. + final SearchResult gmsResult = + _entityClient.search( + QUERY_ENTITY_NAME, + query, + buildFilters(input), + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true).setSkipHighlighting(true)); + + final ListQueriesResult result = new ListQueriesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setQueries( + mapUnresolvedQueries( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Queries", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Query objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Query objects which will be + // resolved be a separate Batch resolver. private List<QueryEntity> mapUnresolvedQueries(final List<Urn> queryUrns) { final List<QueryEntity> results = new ArrayList<>(); for (final Urn urn : queryUrns) { @@ -99,13 +112,23 @@ private Filter buildFilters(@Nonnull final ListQueriesInput input) { // Optionally add a source filter. if (input.getSource() != null) { andConditions.add( - new FacetFilterInput(QUERY_SOURCE_FIELD, null, ImmutableList.of(input.getSource().toString()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_SOURCE_FIELD, + null, + ImmutableList.of(input.getSource().toString()), + false, + FilterOperator.EQUAL)); } // Optionally add an entity type filter. if (input.getDatasetUrn() != null) { andConditions.add( - new FacetFilterInput(QUERY_ENTITIES_FIELD, null, ImmutableList.of(input.getDatasetUrn()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_ENTITIES_FIELD, + null, + ImmutableList.of(input.getDatasetUrn()), + false, + FilterOperator.EQUAL)); } criteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index ef34e91d8fe77..cc284aaf7b563 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,9 +28,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class UpdateQueryResolver implements DataFetcher<CompletableFuture<QueryEntity>> { @@ -36,60 +35,72 @@ public class UpdateQueryResolver implements DataFetcher<CompletableFuture<QueryE private final QueryService _queryService; @Override - public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<QueryEntity> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateQueryInput input = bindArgument(environment.getArgument("input"), UpdateQueryInput.class); + final UpdateQueryInput input = + bindArgument(environment.getArgument("input"), UpdateQueryInput.class); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); - if (existingSubjects == null) { - // No Query Found - throw new DataHubGraphQLException(String.format("Failed to find query with urn %s", queryUrn), DataHubGraphQLErrorCode.NOT_FOUND); - } + if (existingSubjects == null) { + // No Query Found + throw new DataHubGraphQLException( + String.format("Failed to find query with urn %s", queryUrn), + DataHubGraphQLErrorCode.NOT_FOUND); + } - final List<Urn> subjectUrns = existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()); - final List<Urn> newSubjectUrns = input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) - .collect(Collectors.toList()) - : Collections.emptyList(); - final List<Urn> impactedSubjectUrns = new ArrayList<>(); - impactedSubjectUrns.addAll(subjectUrns); - impactedSubjectUrns.addAll(newSubjectUrns); + final List<Urn> subjectUrns = + existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()); + final List<Urn> newSubjectUrns = + input.getSubjects() != null + ? input.getSubjects().stream() + .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) + .collect(Collectors.toList()) + : Collections.emptyList(); + final List<Urn> impactedSubjectUrns = new ArrayList<>(); + impactedSubjectUrns.addAll(subjectUrns); + impactedSubjectUrns.addAll(newSubjectUrns); - if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); - } + if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); + } - try { - _queryService.updateQuery( - queryUrn, - input.getProperties() != null ? input.getProperties().getName() : null, - input.getProperties() != null ? input.getProperties().getDescription() : null, - input.getProperties() != null && input.getProperties().getStatement() != null - ? new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())) - : null, - input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()) - : null, - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Query from input %s", input), e); - } - }); + try { + _queryService.updateQuery( + queryUrn, + input.getProperties() != null ? input.getProperties().getName() : null, + input.getProperties() != null ? input.getProperties().getDescription() : null, + input.getProperties() != null && input.getProperties().getStatement() != null + ? new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())) + : null, + input.getSubjects() != null + ? input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()) + : null, + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index df1a6d4d4b00d..ca1e01b45989d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.recommendation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ContentParams; @@ -31,12 +33,10 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListRecommendationsResolver implements DataFetcher<CompletableFuture<ListRecommendationsResult>> { +public class ListRecommendationsResolver + implements DataFetcher<CompletableFuture<ListRecommendationsResult>> { private static final ListRecommendationsResult EMPTY_RECOMMENDATIONS = new ListRecommendationsResult(Collections.emptyList()); @@ -49,24 +49,28 @@ public CompletableFuture<ListRecommendationsResult> get(DataFetchingEnvironment final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Listing recommendations for input {}", input); - List<com.linkedin.metadata.recommendation.RecommendationModule> modules = - _recommendationsService.listRecommendations(Urn.createFromString(input.getUserUrn()), - mapRequestContext(input.getRequestContext()), input.getLimit()); - return ListRecommendationsResult.builder() - .setModules(modules.stream() - .map(this::mapRecommendationModule) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList())) - .build(); - } catch (Exception e) { - log.error("Failed to get recommendations for input {}", input, e); - return EMPTY_RECOMMENDATIONS; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Listing recommendations for input {}", input); + List<com.linkedin.metadata.recommendation.RecommendationModule> modules = + _recommendationsService.listRecommendations( + Urn.createFromString(input.getUserUrn()), + mapRequestContext(input.getRequestContext()), + input.getLimit()); + return ListRecommendationsResult.builder() + .setModules( + modules.stream() + .map(this::mapRecommendationModule) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList())) + .build(); + } catch (Exception e) { + log.error("Failed to get recommendations for input {}", input, e); + return EMPTY_RECOMMENDATIONS; + } + }); } private com.linkedin.metadata.recommendation.RecommendationRequestContext mapRequestContext( @@ -74,22 +78,24 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq com.linkedin.metadata.recommendation.ScenarioType mappedScenarioType; try { mappedScenarioType = - com.linkedin.metadata.recommendation.ScenarioType.valueOf(requestContext.getScenario().toString()); + com.linkedin.metadata.recommendation.ScenarioType.valueOf( + requestContext.getScenario().toString()); } catch (IllegalArgumentException e) { log.error("Failed to map scenario type: {}", requestContext.getScenario(), e); throw e; } com.linkedin.metadata.recommendation.RecommendationRequestContext mappedRequestContext = - new com.linkedin.metadata.recommendation.RecommendationRequestContext().setScenario(mappedScenarioType); + new com.linkedin.metadata.recommendation.RecommendationRequestContext() + .setScenario(mappedScenarioType); if (requestContext.getSearchRequestContext() != null) { SearchRequestContext searchRequestContext = new SearchRequestContext().setQuery(requestContext.getSearchRequestContext().getQuery()); if (requestContext.getSearchRequestContext().getFilters() != null) { - searchRequestContext.setFilters(new CriterionArray(requestContext.getSearchRequestContext() - .getFilters() - .stream() - .map(facetField -> criterionFromFilter(facetField)) - .collect(Collectors.toList()))); + searchRequestContext.setFilters( + new CriterionArray( + requestContext.getSearchRequestContext().getFilters().stream() + .map(facetField -> criterionFromFilter(facetField)) + .collect(Collectors.toList()))); } mappedRequestContext.setSearchRequestContext(searchRequestContext); } @@ -98,12 +104,17 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq try { entityUrn = Urn.createFromString(requestContext.getEntityRequestContext().getUrn()); } catch (URISyntaxException e) { - log.error("Malformed URN while mapping recommendations request: {}", - requestContext.getEntityRequestContext().getUrn(), e); + log.error( + "Malformed URN while mapping recommendations request: {}", + requestContext.getEntityRequestContext().getUrn(), + e); throw new IllegalArgumentException(e); } - EntityRequestContext entityRequestContext = new EntityRequestContext().setUrn(entityUrn) - .setType(EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); + EntityRequestContext entityRequestContext = + new EntityRequestContext() + .setUrn(entityUrn) + .setType( + EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); mappedRequestContext.setEntityRequestContext(entityRequestContext); } return mappedRequestContext; @@ -115,13 +126,16 @@ private Optional<RecommendationModule> mapRecommendationModule( mappedModule.setTitle(module.getTitle()); mappedModule.setModuleId(module.getModuleId()); try { - mappedModule.setRenderType(RecommendationRenderType.valueOf(module.getRenderType().toString())); + mappedModule.setRenderType( + RecommendationRenderType.valueOf(module.getRenderType().toString())); } catch (IllegalArgumentException e) { log.error("Failed to map render type: {}", module.getRenderType(), e); throw e; } mappedModule.setContent( - module.getContent().stream().map(this::mapRecommendationContent).collect(Collectors.toList())); + module.getContent().stream() + .map(this::mapRecommendationContent) + .collect(Collectors.toList())); return Optional.of(mappedModule); } @@ -145,26 +159,31 @@ private RecommendationParams mapRecommendationParams( SearchParams searchParams = new SearchParams(); searchParams.setQuery(params.getSearchParams().getQuery()); if (!params.getSearchParams().getFilters().isEmpty()) { - searchParams.setFilters(params.getSearchParams() - .getFilters() - .stream() - .map(criterion -> FacetFilter.builder().setField(criterion.getField()).setValues( - ImmutableList.of(criterion.getValue())).build()) - .collect(Collectors.toList())); + searchParams.setFilters( + params.getSearchParams().getFilters().stream() + .map( + criterion -> + FacetFilter.builder() + .setField(criterion.getField()) + .setValues(ImmutableList.of(criterion.getValue())) + .build()) + .collect(Collectors.toList())); } mappedParams.setSearchParams(searchParams); } if (params.hasEntityProfileParams()) { Urn profileUrn = params.getEntityProfileParams().getUrn(); - mappedParams.setEntityProfileParams(EntityProfileParams.builder() - .setUrn(profileUrn.toString()) - .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) - .build()); + mappedParams.setEntityProfileParams( + EntityProfileParams.builder() + .setUrn(profileUrn.toString()) + .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) + .build()); } if (params.hasContentParams()) { - mappedParams.setContentParams(ContentParams.builder().setCount(params.getContentParams().getCount()).build()); + mappedParams.setContentParams( + ContentParams.builder().setCount(params.getContentParams().getCount()).build()); } return mappedParams; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java index 43d975344ba25..a71da7821f09c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.datahub.authorization.role.RoleService; @@ -13,11 +15,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j - @RequiredArgsConstructor public class AcceptRoleResolver implements DataFetcher<CompletableFuture<Boolean>> { private final RoleService _roleService; @@ -27,25 +25,32 @@ public class AcceptRoleResolver implements DataFetcher<CompletableFuture<Boolean public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final AcceptRoleInput input = bindArgument(environment.getArgument("input"), AcceptRoleInput.class); + final AcceptRoleInput input = + bindArgument(environment.getArgument("input"), AcceptRoleInput.class); final String inviteTokenStr = input.getInviteToken(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { - throw new RuntimeException(String.format("Invite token %s is invalid", inviteTokenStr)); - } - - final Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); - _roleService.batchAssignRoleToActors(Collections.singletonList(authentication.getActor().toUrnStr()), roleUrn, - authentication); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to accept role using invite token %s", inviteTokenStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { + throw new RuntimeException( + String.format("Invite token %s is invalid", inviteTokenStr)); + } + + final Urn roleUrn = + _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); + _roleService.batchAssignRoleToActors( + Collections.singletonList(authentication.getActor().toUrnStr()), + roleUrn, + authentication); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to accept role using invite token %s", inviteTokenStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java index dc847069afae9..1997d0ac74601 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.linkedin.common.urn.Urn; @@ -13,10 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAssignRoleResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -30,19 +29,22 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw "Unauthorized to assign roles. Please contact your DataHub administrator if this needs corrective action."); } - final BatchAssignRoleInput input = bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); + final BatchAssignRoleInput input = + bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); final String roleUrnStr = input.getRoleUrn(); final List<String> actors = input.getActors(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); - _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); + _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 6bdf52e2f89f1..61ecf09fc91a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateInviteTokenResolver implements DataFetcher<CompletableFuture<InviteToken>> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to create invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final CreateInviteTokenInput input = bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); + final CreateInviteTokenInput input = + bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 0b0cbbb7ba473..066753c4f7559 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class GetInviteTokenResolver implements DataFetcher<CompletableFuture<InviteToken>> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<InviteToken> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to get invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final GetInviteTokenInput input = bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); + final GetInviteTokenInput input = + bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to get invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to get invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index 4746370d8603b..a1dd9219f6549 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListRolesResolver implements DataFetcher<CompletableFuture<ListRolesResult>> { @@ -38,36 +37,51 @@ public class ListRolesResolver implements DataFetcher<CompletableFuture<ListRole private final EntityClient _entityClient; @Override - public CompletableFuture<ListRolesResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListRolesResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListRolesInput input = bindArgument(environment.getArgument("input"), ListRolesInput.class); + final ListRolesInput input = + bindArgument(environment.getArgument("input"), ListRolesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all role Urns. - final SearchResult gmsResult = - _entityClient.search(DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all role Urns. + final SearchResult gmsResult = + _entityClient.search( + DATAHUB_ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all users. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, context.getAuthentication()); + // Then, get and hydrate all users. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - final ListRolesResult result = new ListRolesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list roles", e); - } - }); + final ListRolesResult result = new ListRolesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setRoles(mapEntitiesToRoles(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list roles", e); + } + }); } private List<DataHubRole> mapEntitiesToRoles(final Collection<EntityResponse> entities) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index e9140441999e2..6d23456b76b4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregateAcrossEntitiesInput; @@ -14,25 +19,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** - * Executes a search query only to get a provided list of aggregations back. - * Does not resolve any entities as results. + * Executes a search query only to get a provided list of aggregations back. Does not resolve any + * entities as results. */ @Slf4j @RequiredArgsConstructor -public class AggregateAcrossEntitiesResolver implements DataFetcher<CompletableFuture<AggregateResults>> { +public class AggregateAcrossEntitiesResolver + implements DataFetcher<CompletableFuture<AggregateResults>> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -48,47 +48,63 @@ public CompletableFuture<AggregateResults> get(DataFetchingEnvironment environme // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - - final List<String> facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; - - try { - return mapAggregateResults(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - 0, - 0, // 0 entity count because we don't want resolved entities - searchFlags, - null, - ResolverUtils.getAuthentication(environment), - facets)); - } catch (Exception e) { - log.error( - "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", - input.getTypes(), input.getQuery(), input.getOrFilters()); - throw new RuntimeException( - "Failed to execute aggregate across entities: " + String.format("entity types %s, query %s, filters: %s", - input.getTypes(), input.getQuery(), input.getOrFilters()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + + final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + + final List<String> facets = + input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; + + try { + return mapAggregateResults( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + 0, + 0, // 0 entity count because we don't want resolved entities + searchFlags, + null, + ResolverUtils.getAuthentication(environment), + facets)); + } catch (Exception e) { + log.error( + "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters()); + throw new RuntimeException( + "Failed to execute aggregate across entities: " + + String.format( + "entity types %s, query %s, filters: %s", + input.getTypes(), input.getQuery(), input.getOrFilters()), + e); + } + }); } AggregateResults mapAggregateResults(SearchResult searchResult) { final AggregateResults results = new AggregateResults(); - results.setFacets(searchResult.getMetadata().getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + results.setFacets( + searchResult.getMetadata().getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return results; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 043ecf5eb97f1..c3e843cefd5c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; @@ -13,87 +17,90 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; -import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ -public class AutoCompleteForMultipleResolver implements DataFetcher<CompletableFuture<AutoCompleteMultipleResults>> { +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ +public class AutoCompleteForMultipleResolver + implements DataFetcher<CompletableFuture<AutoCompleteMultipleResults>> { - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); - private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; - private final ViewService _viewService; + private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; + private final ViewService _viewService; - public AutoCompleteForMultipleResolver(@Nonnull final List<SearchableEntityType<?, ?>> searchableEntities, @Nonnull final ViewService viewService) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - _viewService = viewService; - } + public AutoCompleteForMultipleResolver( + @Nonnull final List<SearchableEntityType<?, ?>> searchableEntities, + @Nonnull final ViewService viewService) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + _viewService = viewService; + } - @Override - public CompletableFuture<AutoCompleteMultipleResults> get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final AutoCompleteMultipleInput input = bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); + @Override + public CompletableFuture<AutoCompleteMultipleResults> get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final AutoCompleteMultipleInput input = + bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); - if (isBlank(input.getQuery())) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) + if (isBlank(input.getQuery())) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) : null; - List<EntityType> types = getEntityTypes(input.getTypes(), maybeResolvedView); - if (types != null && types.size() > 0) { - return AutocompleteUtils.batchGetAutocompleteResults( - types.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); - } - - // By default, autocomplete only against the Default Set of Autocomplete entities - return AutocompleteUtils.batchGetAutocompleteResults( - AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); + List<EntityType> types = getEntityTypes(input.getTypes(), maybeResolvedView); + if (types != null && types.size() > 0) { + return AutocompleteUtils.batchGetAutocompleteResults( + types.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); } - /** - * Gets the intersection of provided input types and types on the view applied (if any) - */ - @Nullable - List<EntityType> getEntityTypes(final @Nullable List<EntityType> inputTypes, final @Nullable DataHubViewInfo maybeResolvedView) { - List<EntityType> types = inputTypes; - if (maybeResolvedView != null) { - List<EntityType> inputEntityTypes = types != null ? types : new ArrayList<>(); - final List<String> inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - List<String> stringEntityTypes = SearchUtils.intersectEntityTypes(inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); + // By default, autocomplete only against the Default Set of Autocomplete entities + return AutocompleteUtils.batchGetAutocompleteResults( + AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); + } - types = stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); - } + /** Gets the intersection of provided input types and types on the view applied (if any) */ + @Nullable + List<EntityType> getEntityTypes( + final @Nullable List<EntityType> inputTypes, + final @Nullable DataHubViewInfo maybeResolvedView) { + List<EntityType> types = inputTypes; + if (maybeResolvedView != null) { + List<EntityType> inputEntityTypes = types != null ? types : new ArrayList<>(); + final List<String> inputEntityNames = + inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List<String> stringEntityTypes = + SearchUtils.intersectEntityTypes( + inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); - return types; + types = + stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); } + + return types; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java index e13545aadc516..235f5f8d27899 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java @@ -1,90 +1,94 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.linkedin.datahub.graphql.types.SearchableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AutoCompleteInput; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ public class AutoCompleteResolver implements DataFetcher<CompletableFuture<AutoCompleteResults>> { - private static final int DEFAULT_LIMIT = 5; + private static final int DEFAULT_LIMIT = 5; - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); - private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; + private final Map<EntityType, SearchableEntityType<?, ?>> _typeToEntity; - public AutoCompleteResolver(@Nonnull final List<SearchableEntityType<?, ?>> searchableEntities) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - } + public AutoCompleteResolver(@Nonnull final List<SearchableEntityType<?, ?>> searchableEntities) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture<AutoCompleteResults> get(DataFetchingEnvironment environment) { - final AutoCompleteInput input = bindArgument(environment.getArgument("input"), AutoCompleteInput.class); + @Override + public CompletableFuture<AutoCompleteResults> get(DataFetchingEnvironment environment) { + final AutoCompleteInput input = + bindArgument(environment.getArgument("input"), AutoCompleteInput.class); - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - if (isBlank(sanitizedQuery)) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + if (isBlank(sanitizedQuery)) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug("Executing autocomplete. " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + "Executing autocomplete. " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), input.getLimit())); - return _typeToEntity.get(input.getType()).autoComplete( - sanitizedQuery, - input.getField(), - filter, - limit, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + return _typeToEntity + .get(input.getType()) + .autoComplete( + sanitizedQuery, input.getField(), filter, limit, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), - input.getLimit()) + " " - + e.getMessage()); - throw new RuntimeException("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", - input.getType(), - input.getField(), - input.getQuery(), - input.getFilters(), - input.getLimit()), e); - } - }); - } + input.getLimit()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", + input.getType(), + input.getField(), + input.getQuery(), + input.getFilters(), + input.getLimit()), + e); + } + }); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java index 40722211de8d3..9cd860781c0d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java @@ -14,69 +14,81 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; - - public class AutocompleteUtils { private static final Logger _logger = LoggerFactory.getLogger(AutocompleteUtils.class.getName()); private static final int DEFAULT_LIMIT = 5; - private AutocompleteUtils() { } + private AutocompleteUtils() {} public static CompletableFuture<AutoCompleteMultipleResults> batchGetAutocompleteResults( List<SearchableEntityType<?, ?>> entities, String sanitizedQuery, AutoCompleteMultipleInput input, DataFetchingEnvironment environment, - @Nullable DataHubViewInfo view - ) { + @Nullable DataHubViewInfo view) { final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - final List<CompletableFuture<AutoCompleteResultForEntity>> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> { - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = view != null - ? SearchUtils.combineFilters(filter, view.getDefinition().getFilter()) - : filter; + final List<CompletableFuture<AutoCompleteResultForEntity>> autoCompletesFuture = + entities.stream() + .map( + entity -> + CompletableFuture.supplyAsync( + () -> { + final Filter filter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = + view != null + ? SearchUtils.combineFilters( + filter, view.getDefinition().getFilter()) + : filter; - try { - final AutoCompleteResults searchResult = entity.autoComplete( - sanitizedQuery, - input.getField(), - finalFilter, - limit, - environment.getContext() - ); - return new AutoCompleteResultForEntity( - entity.type(), - searchResult.getSuggestions(), - searchResult.getEntities() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete all: " - + String.format("field %s, query %s, filters: %s, limit: %s", - input.getField(), - input.getQuery(), - filter, - input.getLimit()), e); - return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList()); - } - })).collect(Collectors.toList()); + try { + final AutoCompleteResults searchResult = + entity.autoComplete( + sanitizedQuery, + input.getField(), + finalFilter, + limit, + environment.getContext()); + return new AutoCompleteResultForEntity( + entity.type(), + searchResult.getSuggestions(), + searchResult.getEntities()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete all: " + + String.format( + "field %s, query %s, filters: %s, limit: %s", + input.getField(), + input.getQuery(), + filter, + input.getLimit()), + e); + return new AutoCompleteResultForEntity( + entity.type(), Collections.emptyList(), Collections.emptyList()); + } + })) + .collect(Collectors.toList()); return CompletableFuture.allOf(autoCompletesFuture.toArray(new CompletableFuture[0])) - .thenApplyAsync((res) -> { - AutoCompleteMultipleResults result = new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); - List<AutoCompleteResultForEntity> suggestions = autoCompletesFuture.stream() - .map(CompletableFuture::join) - .filter( + .thenApplyAsync( + (res) -> { + AutoCompleteMultipleResults result = + new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); + List<AutoCompleteResultForEntity> suggestions = + autoCompletesFuture.stream() + .map(CompletableFuture::join) + .filter( autoCompleteResultForEntity -> - autoCompleteResultForEntity.getSuggestions() != null && autoCompleteResultForEntity.getSuggestions().size() > 0 - ) - .collect(Collectors.toList()); - result.setSuggestions(suggestions); - return result; - }); + autoCompleteResultForEntity.getSuggestions() != null + && autoCompleteResultForEntity.getSuggestions().size() > 0) + .collect(Collectors.toList()); + result.setSuggestions(suggestions); + return result; + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 17058fd8d7cff..e54955e1857f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.Entity; @@ -18,26 +23,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor -public class GetQuickFiltersResolver implements DataFetcher<CompletableFuture<GetQuickFiltersResult>> { +public class GetQuickFiltersResolver + implements DataFetcher<CompletableFuture<GetQuickFiltersResult>> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -47,41 +46,51 @@ public class GetQuickFiltersResolver implements DataFetcher<CompletableFuture<Ge private static final int SOURCE_ENTITY_COUNT = 3; private static final int DATAHUB_ENTITY_COUNT = 2; - public CompletableFuture<GetQuickFiltersResult> get(final DataFetchingEnvironment environment) throws Exception { - final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); - - return CompletableFuture.supplyAsync(() -> { - final GetQuickFiltersResult result = new GetQuickFiltersResult(); - final List<QuickFilter> quickFilters = new ArrayList<>(); - - try { - final SearchResult searchResult = getSearchResults(ResolverUtils.getAuthentication(environment), input); - final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); - } catch (Exception e) { - log.error("Failed getting quick filters", e); - throw new RuntimeException("Failed to to get quick filters", e); - } - - result.setQuickFilters(quickFilters); - return result; - }); + public CompletableFuture<GetQuickFiltersResult> get(final DataFetchingEnvironment environment) + throws Exception { + final GetQuickFiltersInput input = + bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final GetQuickFiltersResult result = new GetQuickFiltersResult(); + final List<QuickFilter> quickFilters = new ArrayList<>(); + + try { + final SearchResult searchResult = + getSearchResults(ResolverUtils.getAuthentication(environment), input); + final AggregationMetadataArray aggregations = + searchResult.getMetadata().getAggregations(); + + quickFilters.addAll(getPlatformQuickFilters(aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); + } catch (Exception e) { + log.error("Failed getting quick filters", e); + throw new RuntimeException("Failed to to get quick filters", e); + } + + result.setQuickFilters(quickFilters); + return result; + }); } - /** - * Do a star search with view filter applied to get info about all data in this instance. - */ - private SearchResult getSearchResults(@Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) throws Exception { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) - : null; - final List<String> entityNames = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + /** Do a star search with view filter applied to get info about all data in this instance. */ + private SearchResult getSearchResults( + @Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) + throws Exception { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) + : null; + final List<String> entityNames = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) : entityNames, "*", maybeResolvedView != null @@ -95,67 +104,88 @@ private SearchResult getSearchResults(@Nonnull final Authentication authenticati } /** - * Get platforms and their count from an aggregations array, sorts by entity count, and map the top 5 to quick filters + * Get platforms and their count from an aggregations array, sorts by entity count, and map the + * top 5 to quick filters */ - private List<QuickFilter> getPlatformQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List<QuickFilter> getPlatformQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List<QuickFilter> platforms = new ArrayList<>(); - final Optional<AggregationMetadata> platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); + final Optional<AggregationMetadata> platformAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); if (platformAggregations.isPresent()) { final List<FilterValue> sortedPlatforms = - platformAggregations.get().getFilterValues().stream().sorted(Comparator.comparingLong(val -> -val.getFacetCount())).collect(Collectors.toList()); - sortedPlatforms.forEach(platformFilter -> { - if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); - } - }); + platformAggregations.get().getFilterValues().stream() + .sorted(Comparator.comparingLong(val -> -val.getFacetCount())) + .collect(Collectors.toList()); + sortedPlatforms.forEach( + platformFilter -> { + if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { + platforms.add(mapQuickFilter(PLATFORM, platformFilter)); + } + }); } // return platforms sorted alphabetically by their name - return platforms.stream().sorted(Comparator.comparing(QuickFilter::getValue)).collect(Collectors.toList()); + return platforms.stream() + .sorted(Comparator.comparing(QuickFilter::getValue)) + .collect(Collectors.toList()); } /** - * Gets entity type quick filters from search aggregations. First, get source entity type quick filters - * from a prioritized list. Do the same for datathub entity types. + * Gets entity type quick filters from search aggregations. First, get source entity type quick + * filters from a prioritized list. Do the same for datathub entity types. */ - private List<QuickFilter> getEntityTypeQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List<QuickFilter> getEntityTypeQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List<QuickFilter> entityTypes = new ArrayList<>(); - final Optional<AggregationMetadata> entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); + final Optional<AggregationMetadata> entityAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); if (entityAggregations.isPresent()) { final List<QuickFilter> sourceEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, + SOURCE_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(sourceEntityTypeFilters); final List<QuickFilter> dataHubEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, + DATAHUB_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(dataHubEntityTypeFilters); } return entityTypes; } /** - * Create a quick filters list by looping over prioritized list and adding filters that exist until we reach the maxListSize defined + * Create a quick filters list by looping over prioritized list and adding filters that exist + * until we reach the maxListSize defined */ private List<QuickFilter> getQuickFiltersFromList( @Nonnull final List<String> prioritizedList, final int maxListSize, - @Nonnull final AggregationMetadata entityAggregations - ) { + @Nonnull final AggregationMetadata entityAggregations) { final List<QuickFilter> entityTypes = new ArrayList<>(); - prioritizedList.forEach(entityType -> { - if (entityTypes.size() < maxListSize) { - final Optional<FilterValue> entityFilter = entityAggregations.getFilterValues().stream().filter(val -> val.getValue().equals(entityType)).findFirst(); - if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); - } - } - }); + prioritizedList.forEach( + entityType -> { + if (entityTypes.size() < maxListSize) { + final Optional<FilterValue> entityFilter = + entityAggregations.getFilterValues().stream() + .filter(val -> val.getValue().equals(entityType)) + .findFirst(); + if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { + entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); + } + } + }); return entityTypes; } - private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final FilterValue filterValue) { + private QuickFilter mapQuickFilter( + @Nonnull final String field, @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); @@ -167,9 +197,7 @@ private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final F return quickFilter; } - /** - * If we're working with an entity type filter, we need to convert the value to an EntityType - */ + /** If we're working with an entity type filter, we need to convert the value to an EntityType */ public static String convertFilterValue(String filterValue, boolean isEntityType) { if (isEntityType) { return EntityTypeMapper.getType(filterValue).toString(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index d576ffc8ca280..742d1d170de64 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -24,13 +27,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossEntitiesResolver implements DataFetcher<CompletableFuture<ScrollResults>> { @@ -48,57 +45,80 @@ public CompletableFuture<ScrollResults> get(DataFetchingEnvironment environment) bindArgument(environment.getArgument("input"), ScrollAcrossEntitiesInput.class); final List<EntityType> entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - final List<String> entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + final List<String> entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - // escape forward slash since it is a reserved character in Elasticsearch, default to * if blank/empty - final String sanitizedQuery = StringUtils.isNotBlank(input.getQuery()) - ? ResolverUtils.escapeForwardSlash(input.getQuery()) : "*"; + // escape forward slash since it is a reserved character in Elasticsearch, default to * if + // blank/empty + final String sanitizedQuery = + StringUtils.isNotBlank(input.getQuery()) + ? ResolverUtils.escapeForwardSlash(input.getQuery()) + : "*"; - @Nullable - final String scrollId = input.getScrollId(); + @Nullable final String scrollId = input.getScrollId(); final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; - return UrnScrollResultsMapper.map(_entityClient.scrollAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - scrollId, - keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), e); - } - }); + return UrnScrollResultsMapper.map( + _entityClient.scrollAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + scrollId, + keepAlive, + count, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index 78be1ac309690..adab62c22bb72 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AndFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossLineageResolver @@ -53,55 +50,98 @@ public CompletableFuture<ScrollAcrossLineageResults> get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List<EntityType> entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List<String> entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List<String> entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final String scrollId = input.getScrollId() != null ? input.getScrollId() : null; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List<AndFilterInput> filters = input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); - final List<FacetFilterInput> facetFilters = filters.stream() - .map(AndFilterInput::getAnd) - .flatMap(List::stream) - .collect(Collectors.toList()); + final List<AndFilterInput> filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List<FacetFilterInput> facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); final Integer maxHops = getMaxHops(facetFilters); String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); - SearchFlags searchFlags = null; - final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = new SearchFlags() - .setSkipCache(inputFlags.getSkipCache()) - .setFulltext(inputFlags.getFulltext()) - .setMaxAggValues(inputFlags.getMaxAggValues()); - } - return UrnScrollAcrossLineageResultsMapper.map( - _entityClient.scrollAcrossLineage(urn, resolvedDirection, entityNames, sanitizedQuery, - maxHops, ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), null, scrollId, - keepAlive, count, startTimeMillis, endTimeMillis, searchFlags, ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); - throw new RuntimeException("Failed to execute scroll across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count), e); - } - }); + SearchFlags searchFlags = null; + final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = + input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = + new SearchFlags() + .setSkipCache(inputFlags.getSkipCache()) + .setFulltext(inputFlags.getFulltext()) + .setMaxAggValues(inputFlags.getMaxAggValues()); + } + return UrnScrollAcrossLineageResultsMapper.map( + _entityClient.scrollAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), + null, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); + throw new RuntimeException( + "Failed to execute scroll across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 1022b25b3cd99..f8178e3b396cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; @@ -19,13 +22,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossEntitiesResolver implements DataFetcher<CompletableFuture<SearchResults>> { @@ -50,43 +47,65 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) : null; - - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - start, - count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + + SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + SortCriterion sortCriterion = + input.getSortInput() != null + ? mapSortCriterion(input.getSortInput().getSortCriterion()) + : null; + + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + start, + count, + searchFlags, + sortCriterion, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 9f489183f4af7..0f5d2d90ba0c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossLineageResolver @@ -54,76 +51,95 @@ public CompletableFuture<SearchAcrossLineageResults> get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List<EntityType> entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List<String> entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List<String> entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List<FacetFilterInput> filters = input.getFilters() != null ? input.getFilters() : new ArrayList<>(); + final List<FacetFilterInput> filters = + input.getFilters() != null ? input.getFilters() : new ArrayList<>(); final Integer maxHops = getMaxHops(filters); @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, - resolvedDirection, - input.getTypes(), - input.getQuery(), - filters, - start, - count); - - final Filter filter = - ResolverUtils.buildFilter( + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), filters, - input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - if (inputFlags.getSkipHighlighting() == null) { - searchFlags.setSkipHighlighting(true); - } - } else { - searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); - } - - return UrnSearchAcrossLineageResultsMapper.map( - _entityClient.searchAcrossLineage( + start, + count); + + final Filter filter = ResolverUtils.buildFilter(filters, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + if (inputFlags.getSkipHighlighting() == null) { + searchFlags.setSkipHighlighting(true); + } + } else { + searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); + } + + return UrnSearchAcrossLineageResultsMapper.map( + _entityClient.searchAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + filter, + null, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", urn, resolvedDirection, - entityNames, - sanitizedQuery, - maxHops, - filter, - null, + input.getTypes(), + input.getQuery(), + filters, start, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count); - throw new RuntimeException("Failed to execute search across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count), e); - } finally { - log.debug("Returning from search across lineage resolver"); - } - }); + count); + throw new RuntimeException( + "Failed to execute search across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + start, + count), + e); + } finally { + log.debug("Returning from search across lineage resolver"); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 0e66d6e601399..6821423887923 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -15,17 +18,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - -/** - * Resolver responsible for resolving the 'search' field of the Query type - */ +/** Resolver responsible for resolving the 'search' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchResolver implements DataFetcher<CompletableFuture<SearchResults>> { - private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = new SearchFlags() + private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = + new SearchFlags() .setFulltext(true) .setMaxAggValues(20) .setSkipCache(false) @@ -54,22 +52,52 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment) searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); - return UrnSearchResultsMapper.map( - _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), - input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment), + return UrnSearchResultsMapper.map( + _entityClient.search( + entityName, + sanitizedQuery, + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), + null, + start, + count, + ResolverUtils.getAuthentication(environment), searchFlags)); - } catch (Exception e) { - log.error("Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags), e); - } - }); + } catch (Exception e) { + log.error( + "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index fb146ef72877d..d04cb57e1a860 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -1,5 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -28,31 +44,11 @@ import lombok.extern.slf4j.Slf4j; import org.codehaus.plexus.util.CollectionUtils; -import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; - - @Slf4j public class SearchUtils { - private SearchUtils() { - } + private SearchUtils() {} - /** - * Entities that are searched by default in Search Across Entities - */ + /** Entities that are searched by default in Search Across Entities */ public static final List<EntityType> SEARCHABLE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -76,10 +72,7 @@ private SearchUtils() { EntityType.DATA_PRODUCT, EntityType.NOTEBOOK); - - /** - * Entities that are part of autocomplete by default in Auto Complete Across Entities - */ + /** Entities that are part of autocomplete by default in Auto Complete Across Entities */ public static final List<EntityType> AUTO_COMPLETE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -99,63 +92,64 @@ private SearchUtils() { EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); - /** - * A prioritized list of source filter types used to generate quick filters - */ - public static final List<String> PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( - DATASET_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME, - CHART_ENTITY_NAME, - CONTAINER_ENTITY_NAME, - ML_MODEL_ENTITY_NAME, - ML_MODEL_GROUP_ENTITY_NAME, - ML_FEATURE_ENTITY_NAME, - ML_FEATURE_TABLE_ENTITY_NAME, - ML_PRIMARY_KEY_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of source filter types used to generate quick filters */ + public static final List<String> PRIORITIZED_SOURCE_ENTITY_TYPES = + Stream.of( + DATASET_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME, + CHART_ENTITY_NAME, + CONTAINER_ENTITY_NAME, + ML_MODEL_ENTITY_NAME, + ML_MODEL_GROUP_ENTITY_NAME, + ML_FEATURE_ENTITY_NAME, + ML_FEATURE_TABLE_ENTITY_NAME, + ML_PRIMARY_KEY_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); - /** - * A prioritized list of DataHub filter types used to generate quick filters - */ - public static final List<String> PRIORITIZED_DATAHUB_ENTITY_TYPES = Stream.of( - DOMAIN_ENTITY_NAME, - GLOSSARY_TERM_ENTITY_NAME, - CORP_GROUP_ENTITY_NAME, - CORP_USER_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of DataHub filter types used to generate quick filters */ + public static final List<String> PRIORITIZED_DATAHUB_ENTITY_TYPES = + Stream.of( + DOMAIN_ENTITY_NAME, + GLOSSARY_TERM_ENTITY_NAME, + CORP_GROUP_ENTITY_NAME, + CORP_USER_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); /** - * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link Filter} - * in disjunctive normal form. + * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link + * Filter} in disjunctive normal form. * * @param baseFilter the filter to apply the view to * @param viewFilter the view filter, null if it doesn't exist - * * @return a new instance of {@link Filter} representing the applied view. */ @Nonnull - public static Filter combineFilters(@Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { - final Filter finalBaseFilter = baseFilter == null - ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) - : baseFilter; + public static Filter combineFilters( + @Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { + final Filter finalBaseFilter = + baseFilter == null + ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) + : baseFilter; // Join the filter conditions in Disjunctive Normal Form. return combineFiltersInConjunction(finalBaseFilter, viewFilter); } /** - * Returns the intersection of two sets of entity types. (Really just string lists). - * If either is empty, consider the entity types list to mean "all" (take the other set). + * Returns the intersection of two sets of entity types. (Really just string lists). If either is + * empty, consider the entity types list to mean "all" (take the other set). * * @param baseEntityTypes the entity types to apply the view to * @param viewEntityTypes the view info, null if it doesn't exist - * * @return the intersection of the two input sets */ @Nonnull - public static List<String> intersectEntityTypes(@Nonnull final List<String> baseEntityTypes, @Nonnull final List<String> viewEntityTypes) { + public static List<String> intersectEntityTypes( + @Nonnull final List<String> baseEntityTypes, @Nonnull final List<String> viewEntityTypes) { if (baseEntityTypes.isEmpty()) { return viewEntityTypes; } @@ -171,126 +165,29 @@ public static List<String> intersectEntityTypes(@Nonnull final List<String> base * * @param filter1 the first filter in the pair * @param filter2 the second filter in the pair - * - * This method supports either Filter format, where the "or" field is used, instead - * of criteria. If the criteria filter is used, then it will be converted into an "OR" before - * returning the new filter. - * + * <p>This method supports either Filter format, where the "or" field is used, instead of + * criteria. If the criteria filter is used, then it will be converted into an "OR" before + * returning the new filter. * @return the result of joining the 2 filters in a conjunction (AND) - * - * How does it work? It basically cross-products the conjunctions inside of each Filter clause. - * - * Example Inputs: - * filter1 -> - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * } - * ] - * } - * ] - * } - * filter2 -> - * { - * or: [ - * { - * and: [ - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * }, - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term2"] - * } - * ] - * } - * ] - * } - * Example Output: - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * ] - * } + * <p>How does it work? It basically cross-products the conjunctions inside of each Filter + * clause. + * <p>Example Inputs: filter1 -> { or: [ { and: [ { field: tags, condition: EQUAL, values: + * ["urn:li:tag:tag"] } ] }, { and: [ { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] } ] } ] } filter2 -> { or: [ { and: [ { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] }, ] }, { and: [ { field: glossaryTerms, + * condition: EQUAL, values: ["urn:li:glossaryTerm:term2"] } ] } ] } Example Output: { or: [ { + * and: [ { field: tags, condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] } ] }, { and: [ { field: tags, + * condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: glossaryTerms, condition: EQUAL, + * values: ["urn:li:glosaryTerm:term2"] } ] }, { and: [ { field: glossaryTerm, condition: + * EQUAL, values: ["urn:li:glossaryTerm:term"] }, { field: domain, condition: EQUAL, values: + * ["urn:li:domain:domain"] } ] }, { and: [ { field: glossaryTerm, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] }, { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glosaryTerm:term2"] } ] }, ] } */ @Nonnull - private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, @Nonnull final Filter filter2) { + private static Filter combineFiltersInConjunction( + @Nonnull final Filter filter1, @Nonnull final Filter filter2) { final Filter finalFilter1 = convertToV2Filter(filter1); final Filter finalFilter2 = convertToV2Filter(filter2); @@ -310,7 +207,8 @@ private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, for (ConjunctiveCriterion conjunction2 : finalFilter2.getOr()) { final List<Criterion> joinedCriterion = new ArrayList<>(conjunction1.getAnd()); joinedCriterion.addAll(conjunction2.getAnd()); - ConjunctiveCriterion newConjunction = new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); + ConjunctiveCriterion newConjunction = + new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); newDisjunction.add(newConjunction); } } @@ -325,38 +223,45 @@ private static Filter convertToV2Filter(@Nonnull Filter filter) { } else if (filter.hasCriteria()) { // Convert criteria to an OR return new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(filter.getCriteria()) - ))); + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(filter.getCriteria())))); } throw new IllegalArgumentException( - String.format("Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", filter)); + String.format( + "Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", + filter)); } /** - * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the specified - * urn cannot be found. + * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the + * specified urn cannot be found. */ - public static DataHubViewInfo resolveView(@Nonnull ViewService viewService, @Nonnull final Urn viewUrn, + public static DataHubViewInfo resolveView( + @Nonnull ViewService viewService, + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { try { DataHubViewInfo maybeViewInfo = viewService.getViewInfo(viewUrn, authentication); if (maybeViewInfo == null) { - log.warn(String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); + log.warn( + String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); } return maybeViewInfo; } catch (Exception e) { - throw new RuntimeException(String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), e); + throw new RuntimeException( + String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), + e); } } // Assumption is that filter values for degree are either null, 3+, 2, or 1. public static Integer getMaxHops(List<FacetFilterInput> filters) { - Set<String> degreeFilterValues = filters.stream() - .filter(filter -> filter.getField().equals("degree")) - .flatMap(filter -> filter.getValues().stream()) - .collect(Collectors.toSet()); + Set<String> degreeFilterValues = + filters.stream() + .filter(filter -> filter.getField().equals("degree")) + .flatMap(filter -> filter.getValues().stream()) + .collect(Collectors.toSet()); Integer maxHops = null; if (!degreeFilterValues.contains("3+")) { if (degreeFilterValues.contains("2")) { @@ -368,7 +273,8 @@ public static Integer getMaxHops(List<FacetFilterInput> filters) { return maxHops; } - public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { + public static SearchFlags mapInputFlags( + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); @@ -376,7 +282,8 @@ public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.S return searchFlags; } - public static SortCriterion mapSortCriterion(com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { + public static SortCriterion mapSortCriterion( + com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { SortCriterion result = new SortCriterion(); result.setField(sortCriterion.getField()); result.setOrder(SortOrder.valueOf(sortCriterion.getSortOrder().name())); @@ -388,4 +295,4 @@ public static List<String> getEntityNames(List<EntityType> inputTypes) { (inputTypes == null || inputTypes.isEmpty()) ? SEARCHABLE_ENTITY_TYPES : inputTypes; return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java index 8c21277b66a69..a7e0d93c7bd1e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -16,58 +18,61 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating the authenticated user's View-specific settings. - */ +/** Resolver responsible for updating the authenticated user's View-specific settings. */ @Slf4j @RequiredArgsConstructor -public class UpdateCorpUserViewsSettingsResolver implements DataFetcher<CompletableFuture<Boolean>> { +public class UpdateCorpUserViewsSettingsResolver + implements DataFetcher<CompletableFuture<Boolean>> { private final SettingsService _settingsService; @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateCorpUserViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); + final UpdateCorpUserViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { + return CompletableFuture.supplyAsync( + () -> { + try { - final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); + final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); - final CorpUserSettings maybeSettings = _settingsService.getCorpUserSettings( - userUrn, - context.getAuthentication() - ); + final CorpUserSettings maybeSettings = + _settingsService.getCorpUserSettings(userUrn, context.getAuthentication()); - final CorpUserSettings newSettings = maybeSettings == null - ? new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) - : maybeSettings; + final CorpUserSettings newSettings = + maybeSettings == null + ? new CorpUserSettings() + .setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) + : maybeSettings; - // Patch the new corp user settings. This does a R-M-F. - updateCorpUserSettings(newSettings, input); + // Patch the new corp user settings. This does a R-M-F. + updateCorpUserSettings(newSettings, input); - _settingsService.updateCorpUserSettings( - userUrn, - newSettings, - context.getAuthentication() - ); - return true; - } catch (Exception e) { - log.error("Failed to perform user view settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update to user view settings against input %s", input.toString()), e); - } - }); + _settingsService.updateCorpUserSettings( + userUrn, newSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user view settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform update to user view settings against input %s", + input.toString()), + e); + } + }); } private static void updateCorpUserSettings( @Nonnull final CorpUserSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - final CorpUserViewsSettings newViewSettings = settings.hasViews() - ? settings.getViews() - : new CorpUserViewsSettings(); + final CorpUserViewsSettings newViewSettings = + settings.hasViews() ? settings.getViews() : new CorpUserViewsSettings(); updateCorpUserViewsSettings(newViewSettings, input); settings.setViews(newViewSettings); } @@ -75,9 +80,8 @@ private static void updateCorpUserSettings( private static void updateCorpUserViewsSettings( @Nonnull final CorpUserViewsSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java index f1aba3d9247c5..208e871743269 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java @@ -14,11 +14,11 @@ /** * Retrieves the Global Settings related to the Views feature. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + * <p>This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ @Slf4j -public class GlobalViewsSettingsResolver implements - DataFetcher<CompletableFuture<GlobalViewsSettings>> { +public class GlobalViewsSettingsResolver + implements DataFetcher<CompletableFuture<GlobalViewsSettings>> { private final SettingsService _settingsService; @@ -27,25 +27,29 @@ public GlobalViewsSettingsResolver(final SettingsService settingsService) { } @Override - public CompletableFuture<GlobalViewsSettings> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GlobalViewsSettings> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - final GlobalSettingsInfo globalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); - return globalSettings != null && globalSettings.hasViews() - ? mapGlobalViewsSettings(globalSettings.getViews()) - : new GlobalViewsSettings(); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve Global Views Settings", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final GlobalSettingsInfo globalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); + return globalSettings != null && globalSettings.hasViews() + ? mapGlobalViewsSettings(globalSettings.getViews()) + : new GlobalViewsSettings(); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve Global Views Settings", e); + } + }); } - private static GlobalViewsSettings mapGlobalViewsSettings(@Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { + private static GlobalViewsSettings mapGlobalViewsSettings( + @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { final GlobalViewsSettings result = new GlobalViewsSettings(); if (settings.hasDefaultView()) { result.setDefaultView(settings.getDefaultView().toString()); } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java index c90ec04b3a2df..7d37683785fc2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,13 +17,10 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Resolver responsible for updating the Global Views settings. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + * <p>This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ public class UpdateGlobalViewsSettingsResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -32,45 +31,50 @@ public UpdateGlobalViewsSettingsResolver(@Nonnull final SettingsService settings } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateGlobalViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); + final UpdateGlobalViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (AuthorizationUtils.canManageGlobalViews(context)) { - try { - // First, fetch the existing global settings. This does a R-M-F. - final GlobalSettingsInfo maybeGlobalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageGlobalViews(context)) { + try { + // First, fetch the existing global settings. This does a R-M-F. + final GlobalSettingsInfo maybeGlobalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); - final GlobalSettingsInfo newGlobalSettings = maybeGlobalSettings != null - ? maybeGlobalSettings - : new GlobalSettingsInfo(); + final GlobalSettingsInfo newGlobalSettings = + maybeGlobalSettings != null ? maybeGlobalSettings : new GlobalSettingsInfo(); - final GlobalViewsSettings newGlobalViewsSettings = newGlobalSettings.hasViews() - ? newGlobalSettings.getViews() - : new GlobalViewsSettings(); + final GlobalViewsSettings newGlobalViewsSettings = + newGlobalSettings.hasViews() + ? newGlobalSettings.getViews() + : new GlobalViewsSettings(); - // Next, patch the global views settings. - updateViewsSettings(newGlobalViewsSettings, input); - newGlobalSettings.setViews(newGlobalViewsSettings); + // Next, patch the global views settings. + updateViewsSettings(newGlobalViewsSettings, input); + newGlobalSettings.setViews(newGlobalViewsSettings); - // Finally, write back to GMS. - _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update global view settings! %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Finally, write back to GMS. + _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update global view settings! %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static void updateViewsSettings( @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings, @Nonnull final UpdateGlobalViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java index 0e93cdfb231fa..9ea6cba0f211a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.EntityKeyUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -27,59 +31,64 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.EntityKeyUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchGetStepStatesResolver implements DataFetcher<CompletableFuture<BatchGetStepStatesResult>> { +public class BatchGetStepStatesResolver + implements DataFetcher<CompletableFuture<BatchGetStepStatesResult>> { private final EntityClient _entityClient; @Override - public CompletableFuture<BatchGetStepStatesResult> get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture<BatchGetStepStatesResult> get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); final BatchGetStepStatesInput input = bindArgument(environment.getArgument("input"), BatchGetStepStatesInput.class); - return CompletableFuture.supplyAsync(() -> { - Map<Urn, String> urnsToIdsMap; - Set<Urn> urns; - Map<Urn, EntityResponse> entityResponseMap; + return CompletableFuture.supplyAsync( + () -> { + Map<Urn, String> urnsToIdsMap; + Set<Urn> urns; + Map<Urn, EntityResponse> entityResponseMap; - try { - urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); - urns = urnsToIdsMap.keySet(); - entityResponseMap = _entityClient.batchGetV2(DATAHUB_STEP_STATE_ENTITY_NAME, urns, - ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), authentication); - } catch (Exception e) { - throw new RuntimeException(e); - } + try { + urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); + urns = urnsToIdsMap.keySet(); + entityResponseMap = + _entityClient.batchGetV2( + DATAHUB_STEP_STATE_ENTITY_NAME, + urns, + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), + authentication); + } catch (Exception e) { + throw new RuntimeException(e); + } - final Map<Urn, DataHubStepStateProperties> stepStatePropertiesMap = new HashMap<>(); - for (Map.Entry<Urn, EntityResponse> entry : entityResponseMap.entrySet()) { - final Urn urn = entry.getKey(); - final DataHubStepStateProperties stepStateProperties = getStepStateProperties(urn, entry.getValue()); - if (stepStateProperties != null) { - stepStatePropertiesMap.put(urn, stepStateProperties); - } - } + final Map<Urn, DataHubStepStateProperties> stepStatePropertiesMap = new HashMap<>(); + for (Map.Entry<Urn, EntityResponse> entry : entityResponseMap.entrySet()) { + final Urn urn = entry.getKey(); + final DataHubStepStateProperties stepStateProperties = + getStepStateProperties(urn, entry.getValue()); + if (stepStateProperties != null) { + stepStatePropertiesMap.put(urn, stepStateProperties); + } + } - final List<StepStateResult> results = stepStatePropertiesMap.entrySet() - .stream() - .map(entry -> buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); - result.setResults(results); - return result; - }); + final List<StepStateResult> results = + stepStatePropertiesMap.entrySet().stream() + .map( + entry -> + buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); + result.setResults(results); + return result; + }); } @Nonnull - private Map<Urn, String> buildUrnToIdMap(@Nonnull final List<String> ids, @Nonnull final Authentication authentication) + private Map<Urn, String> buildUrnToIdMap( + @Nonnull final List<String> ids, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Map<Urn, String> urnToIdMap = new HashMap<>(); for (final String id : ids) { @@ -99,37 +108,37 @@ private Urn getStepStateUrn(@Nonnull final String id) { } @Nullable - private DataHubStepStateProperties getStepStateProperties(@Nonnull final Urn urn, - @Nonnull final EntityResponse entityResponse) { + private DataHubStepStateProperties getStepStateProperties( + @Nonnull final Urn urn, @Nonnull final EntityResponse entityResponse) { final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); // If aspect is not present, log the error and return null. if (!aspectMap.containsKey(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME)) { log.error("Failed to find step state properties for urn: " + urn); return null; } - return new DataHubStepStateProperties(aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); + return new DataHubStepStateProperties( + aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); } @Nonnull - private StepStateResult buildStepStateResult(@Nonnull final String id, - @Nonnull final DataHubStepStateProperties stepStateProperties) { + private StepStateResult buildStepStateResult( + @Nonnull final String id, @Nonnull final DataHubStepStateProperties stepStateProperties) { final StepStateResult result = new StepStateResult(); result.setId(id); - final List<StringMapEntry> mappedProperties = stepStateProperties - .getProperties() - .entrySet() - .stream() - .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + final List<StringMapEntry> mappedProperties = + stepStateProperties.getProperties().entrySet().stream() + .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); result.setProperties(mappedProperties); return result; } @Nonnull - private StringMapEntry buildStringMapEntry(@Nonnull final String key, @Nonnull final String value) { + private StringMapEntry buildStringMapEntry( + @Nonnull final String key, @Nonnull final String value) { final StringMapEntry entry = new StringMapEntry(); entry.setKey(key); entry.setValue(value); return entry; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java index e4c21207ddd34..23d77ebba7457 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -25,19 +29,15 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchUpdateStepStatesResolver implements DataFetcher<CompletableFuture<BatchUpdateStepStatesResult>> { +public class BatchUpdateStepStatesResolver + implements DataFetcher<CompletableFuture<BatchUpdateStepStatesResult>> { private final EntityClient _entityClient; @Override - public CompletableFuture<BatchUpdateStepStatesResult> get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture<BatchUpdateStepStatesResult> get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -46,20 +46,23 @@ public CompletableFuture<BatchUpdateStepStatesResult> get(@Nonnull final DataFet final List<StepStateInput> states = input.getStates(); final String actorUrnStr = authentication.getActor().toUrnStr(); - return CompletableFuture.supplyAsync(() -> { - final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); - final AuditStamp auditStamp = new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); - final List<UpdateStepStateResult> results = states - .stream() - .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) - .collect(Collectors.toList()); - final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); - result.setResults(results); - return result; - }); + return CompletableFuture.supplyAsync( + () -> { + final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); + final AuditStamp auditStamp = + new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); + final List<UpdateStepStateResult> results = + states.stream() + .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) + .collect(Collectors.toList()); + final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); + result.setResults(results); + return result; + }); } - private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStateInput state, + private UpdateStepStateResult buildUpdateStepStateResult( + @Nonnull final StepStateInput state, @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final String id = state.getId(); @@ -70,19 +73,27 @@ private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStat return updateStepStateResult; } - private boolean updateStepState(@Nonnull final String id, - @Nonnull final List<StringMapEntryInput> inputProperties, @Nonnull final AuditStamp auditStamp, + private boolean updateStepState( + @Nonnull final String id, + @Nonnull final List<StringMapEntryInput> inputProperties, + @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final Map<String, String> properties = - inputProperties.stream().collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + inputProperties.stream() + .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); try { final DataHubStepStateKey stepStateKey = new DataHubStepStateKey().setId(id); final DataHubStepStateProperties stepStateProperties = - new DataHubStepStateProperties().setProperties(new StringMap(properties)).setLastModified(auditStamp); + new DataHubStepStateProperties() + .setProperties(new StringMap(properties)) + .setLastModified(auditStamp); final MetadataChangeProposal proposal = - buildMetadataChangeProposal(DATAHUB_STEP_STATE_ENTITY_NAME, stepStateKey, - DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, stepStateProperties); + buildMetadataChangeProposal( + DATAHUB_STEP_STATE_ENTITY_NAME, + stepStateKey, + DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, + stepStateProperties); _entityClient.ingestProposal(proposal, authentication, false); return true; } catch (Exception e) { @@ -90,4 +101,4 @@ private boolean updateStepState(@Nonnull final String id, return false; } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 9ee24e6941017..153c95c697a77 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -22,14 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS privilege. + * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -42,43 +42,58 @@ public class CreateTagResolver implements DataFetcher<CompletableFuture<String>> public CompletableFuture<String> get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateTagInput input = bindArgument(environment.getArgument("input"), CreateTagInput.class); - - return CompletableFuture.supplyAsync(() -> { + final CreateTagInput input = + bindArgument(environment.getArgument("input"), CreateTagInput.class); - if (!AuthorizationUtils.canCreateTags(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateTags(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - // Create the Tag Key - final TagKey key = new TagKey(); + try { + // Create the Tag Key + final TagKey key = new TagKey(); - // Take user provided id OR generate a random UUID for the Tag. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); + // Take user provided id OR generate a random UUID for the Tag. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Tag already exists!"); - } + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Tag already exists!"); + } - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); - String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); + String tagUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } - OwnerUtils.addCreatorAsOwner(context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return tagUrn; - } catch (Exception e) { - log.error("Failed to create Tag with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + OwnerUtils.addCreatorAsOwner( + context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return tagUrn; + } catch (Exception e) { + log.error( + "Failed to create Tag with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), + e); + } + }); } private TagProperties mapTagProperties(final CreateTagInput input) { @@ -87,4 +102,4 @@ private TagProperties mapTagProperties(final CreateTagInput input) { result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java index e6c3cf49df8db..c5b86b013103c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteTagResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,33 +22,41 @@ public DeleteTagResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String tagUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(tagUrn); - return CompletableFuture.supplyAsync(() -> { - - if (AuthorizationUtils.canManageTags(context) || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageTags(context) + || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Tag with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format( - "Caught exception while attempting to clear all entity references for Tag with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", tagUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", tagUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index e2aa5905be8bd..7b9290b4532b5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -21,19 +24,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetTagColorResolver implements DataFetcher<CompletableFuture<Boolean>> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throws Exception { @@ -42,48 +43,55 @@ public CompletableFuture<Boolean> get(DataFetchingEnvironment environment) throw final Urn tagUrn = Urn.createFromString(environment.getArgument("urn")); final String colorHex = environment.getArgument("colorHex"); - return CompletableFuture.supplyAsync(() -> { - - // If user is not authorized, then throw exception. - if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { - throw new IllegalArgumentException( - String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); - } - - try { - TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( - tagUrn.toString(), - TAG_PROPERTIES_ASPECT_NAME, - _entityService, - null); - - if (tagProperties == null) { - throw new IllegalArgumentException("Failed to set tag color. Tag properties does not yet exist!"); - } - - tagProperties.setColorHex(colorHex); - - // Update the TagProperties aspect. - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set color for Tag with urn %s", tagUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // If user is not authorized, then throw exception. + if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // If tag does not exist, then throw exception. + if (!_entityService.exists(tagUrn)) { + throw new IllegalArgumentException( + String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); + } + + try { + TagProperties tagProperties = + (TagProperties) + EntityUtils.getAspectFromEntity( + tagUrn.toString(), TAG_PROPERTIES_ASPECT_NAME, _entityService, null); + + if (tagProperties == null) { + throw new IllegalArgumentException( + "Failed to set tag color. Tag properties does not yet exist!"); + } + + tagProperties.setColorHex(colorHex); + + // Update the TagProperties aspect. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); + throw new RuntimeException( + String.format("Failed to set color for Tag with urn %s", tagUrn), e); + } + }); } public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -92,4 +100,4 @@ public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, U entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java index 14ae9f96eb683..b12b345a7b211 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,15 +20,7 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Creates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class CreateTestResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -36,39 +33,44 @@ public CreateTestResolver(final EntityClient entityClient) { public CompletableFuture<String> get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final CreateTestInput input = bindArgument(environment.getArgument("input"), CreateTestInput.class); - - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - - try { - - // Create new test - // Since we are creating a new Test, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); - - // Create the Ingestion source key - final TestKey key = new TestKey(); - key.setId(uuidStr); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), - authentication)) { - throw new IllegalArgumentException("This Test already exists!"); + final CreateTestInput input = + bindArgument(environment.getArgument("input"), CreateTestInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + + try { + + // Create new test + // Since we are creating a new Test, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); + + // Create the Ingestion source key + final TestKey key = new TestKey(); + key.setId(uuidStr); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), authentication)) { + throw new IllegalArgumentException("This Test already exists!"); + } + + // Create the Test info. + final TestInfo info = mapCreateTestInput(input); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } } - - // Create the Test info. - final TestInfo info = mapCreateTestInput(input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapCreateTestInput(final CreateTestInput input) { @@ -79,5 +81,4 @@ private static TestInfo mapCreateTestInput(final CreateTestInput input) { result.setDefinition(mapDefinition(input.getDefinition())); return result; } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java index e0c878dc652bd..6bc7e479b305c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -7,8 +9,6 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - /** * Resolver responsible for hard deleting a particular DataHub Test. Requires MANAGE_TESTS @@ -23,20 +23,24 @@ public DeleteTestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String testUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(testUrn); - return CompletableFuture.supplyAsync(() -> { - if (canManageTests(context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against Test with urn %s", testUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against Test with urn %s", testUrn), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index a1e1e48aae847..f345d9ceb21e5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.datahub.graphql.generated.ListTestsResult; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -20,12 +23,9 @@ import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - - /** - * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform + * privilege. */ public class ListTestsResolver implements DataFetcher<CompletableFuture<ListTestsResult>> { @@ -39,45 +39,50 @@ public ListTestsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListTestsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListTestsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - final ListTestsInput input = bindArgument(environment.getArgument("input"), ListTestsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? "" : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + final ListTestsInput input = + bindArgument(environment.getArgument("input"), ListTestsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? "" : input.getQuery(); - try { - // First, get all group Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.TEST_ENTITY_NAME, - query, - Collections.emptyMap(), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.TEST_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListTestsResult result = new ListTestsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setTests(mapUnresolvedTests(gmsResult.getEntities())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list tests", e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Now that we have entities we can bind this to a result. + final ListTestsResult result = new ListTestsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setTests(mapUnresolvedTests(gmsResult.getEntities())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list tests", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - // This method maps urns returned from the list endpoint into Partial Test objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Test objects which will be + // resolved be a separate Batch resolver. private List<Test> mapUnresolvedTests(final SearchEntityArray entityArray) { final List<Test> results = new ArrayList<>(); for (final SearchEntity entity : entityArray) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java index 9c4b5a4d4e0fa..6cb55100ec08e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java @@ -20,10 +20,7 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver used for fetching the list of tests for an entity - */ +/** GraphQL Resolver used for fetching the list of tests for an entity */ @Slf4j public class TestResultsResolver implements DataFetcher<CompletableFuture<TestResults>> { @@ -38,42 +35,44 @@ public CompletableFuture<TestResults> get(DataFetchingEnvironment environment) t final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); + return CompletableFuture.supplyAsync( + () -> { + final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); - if (gmsTestResults == null) { - return null; - } + if (gmsTestResults == null) { + return null; + } - TestResults testResults = new TestResults(); - testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); - testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); - return testResults; - }); + TestResults testResults = new TestResults(); + testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); + testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); + return testResults; + }); } @Nullable - private com.linkedin.test.TestResults getTestResults(final Urn entityUrn, final QueryContext context) { + private com.linkedin.test.TestResults getTestResults( + final Urn entityUrn, final QueryContext context) { try { - final EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), - context.getAuthentication()); - if (entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { + final EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { return new com.linkedin.test.TestResults( - entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME) - .getValue() - .data()); + entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME).getValue().data()); } return null; } catch (Exception e) { - throw new RuntimeException("Failed to get test results", e); + throw new RuntimeException("Failed to get test results", e); } } - private List<TestResult> mapTestResults(final @Nonnull List<com.linkedin.test.TestResult> gmsResults) { + private List<TestResult> mapTestResults( + final @Nonnull List<com.linkedin.test.TestResult> gmsResults) { final List<TestResult> results = new ArrayList<>(); for (com.linkedin.test.TestResult gmsResult : gmsResults) { results.add(mapTestResult(gmsResult)); @@ -89,4 +88,4 @@ private TestResult mapTestResult(final @Nonnull com.linkedin.test.TestResult gms testResult.setType(TestResultType.valueOf(gmsResult.getType().toString())); return testResult; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java index 248da3e58d8ae..922c28097f83c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,14 +17,9 @@ import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; - - public class TestUtils { - /** - * Returns true if the authenticated user is able to manage tests. - */ + /** Returns true if the authenticated user is able to manage tests. */ public static boolean canManageTests(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } @@ -38,11 +35,12 @@ public static EntityResponse buildEntityResponse(Map<String, RecordTemplate> asp final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry<String, RecordTemplate> entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java index 1dd8518076796..b5d6e50fe0774 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -13,15 +18,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Updates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Updates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class UpdateTestResolver implements DataFetcher<CompletableFuture<String>> { private final EntityClient _entityClient; @@ -35,26 +32,30 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { - final String urn = environment.getArgument("urn"); - final UpdateTestInput input = bindArgument(environment.getArgument("input"), UpdateTestInput.class); + final String urn = environment.getArgument("urn"); + final UpdateTestInput input = + bindArgument(environment.getArgument("input"), UpdateTestInput.class); - // Update the Test info - currently this simply creates a new test with same urn. - final TestInfo info = mapUpdateTestInput(input); + // Update the Test info - currently this simply creates a new test with same urn. + final TestInfo info = mapUpdateTestInput(input); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); - try { - return _entityClient.ingestProposal(proposal, authentication, false); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); + try { + return _entityClient.ingestProposal(proposal, authentication, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapUpdateTestInput(final UpdateTestInput input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java index 499e7c9ac177d..ea234280ed6c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaBlameInput; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -16,15 +18,13 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. TODO: Add tests for this resolver. */ @Slf4j -public class GetSchemaBlameResolver implements DataFetcher<CompletableFuture<GetSchemaBlameResult>> { +public class GetSchemaBlameResolver + implements DataFetcher<CompletableFuture<GetSchemaBlameResult>> { private final TimelineService _timelineService; public GetSchemaBlameResolver(TimelineService timelineService) { @@ -32,37 +32,37 @@ public GetSchemaBlameResolver(TimelineService timelineService) { } @Override - public CompletableFuture<GetSchemaBlameResult> get(final DataFetchingEnvironment environment) throws Exception { - final GetSchemaBlameInput input = bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); + public CompletableFuture<GetSchemaBlameResult> get(final DataFetchingEnvironment environment) + throws Exception { + final GetSchemaBlameInput input = + bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); final String datasetUrnString = input.getDatasetUrn(); final long startTime = 0; final long endTime = 0; final String version = input.getVersion() == null ? null : input.getVersion(); - return CompletableFuture.supplyAsync(() -> { - try { - final Set<ChangeCategory> changeCategorySet = Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); - final Urn datasetUrn = Urn.createFromString(datasetUrnString); - final List<ChangeTransaction> changeTransactionList = - _timelineService.getTimeline( - datasetUrn, - changeCategorySet, - startTime, - endTime, - null, - null, - false); - return SchemaBlameMapper.map(changeTransactionList, version); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set<ChangeCategory> changeCategorySet = + Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); + final Urn datasetUrn = Urn.createFromString(datasetUrnString); + final List<ChangeTransaction> changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaBlameMapper.map(changeTransactionList, version); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java index cfad1395a61a8..5063dbbf7ccf3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListInput; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; @@ -16,14 +18,12 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. */ @Slf4j -public class GetSchemaVersionListResolver implements DataFetcher<CompletableFuture<GetSchemaVersionListResult>> { +public class GetSchemaVersionListResolver + implements DataFetcher<CompletableFuture<GetSchemaVersionListResult>> { private final TimelineService _timelineService; public GetSchemaVersionListResolver(TimelineService timelineService) { @@ -31,7 +31,8 @@ public GetSchemaVersionListResolver(TimelineService timelineService) { } @Override - public CompletableFuture<GetSchemaVersionListResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<GetSchemaVersionListResult> get( + final DataFetchingEnvironment environment) throws Exception { final GetSchemaVersionListInput input = bindArgument(environment.getArgument("input"), GetSchemaVersionListInput.class); @@ -39,23 +40,27 @@ public CompletableFuture<GetSchemaVersionListResult> get(final DataFetchingEnvir final long startTime = 0; final long endTime = 0; - return CompletableFuture.supplyAsync(() -> { - try { - final Set<ChangeCategory> changeCategorySet = new HashSet<>(); - changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); - Urn datasetUrn = Urn.createFromString(datasetUrnString); - List<ChangeTransaction> changeTransactionList = - _timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false); - return SchemaVersionListMapper.map(changeTransactionList); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set<ChangeCategory> changeCategorySet = new HashSet<>(); + changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); + Urn datasetUrn = Urn.createFromString(datasetUrnString); + List<ChangeTransaction> changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaVersionListMapper.map(changeTransactionList); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java index 45998bdae45b0..14429696fefd4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java @@ -5,16 +5,18 @@ import graphql.schema.TypeResolver; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface + * type. */ public class AspectInterfaceTypeResolver implements TypeResolver { - public AspectInterfaceTypeResolver() { } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this - // out in the case we ever want to return fields of type Aspect in graphql. Right now - // we just use Aspect to define the shared `version` field. - return null; - } + public AspectInterfaceTypeResolver() {} + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this + // out in the case we ever want to return fields of type Aspect in graphql. Right now + // we just use Aspect to define the shared `version` field. + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java index 1a5f06da04014..52c20254332b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java @@ -6,27 +6,29 @@ import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - import java.util.List; import java.util.stream.Collectors; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface + * type. */ public class EntityInterfaceTypeResolver implements TypeResolver { - private final List<EntityType<?, ?>> _entities; + private final List<EntityType<?, ?>> _entities; - public EntityInterfaceTypeResolver(final List<EntityType<?, ?>> entities) { - _entities = entities; - } + public EntityInterfaceTypeResolver(final List<EntityType<?, ?>> entities) { + _entities = entities; + } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - Object javaObject = env.getObject(); - final LoadableType<?, ?> filteredEntity = Iterables.getOnlyElement(_entities.stream() + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + Object javaObject = env.getObject(); + final LoadableType<?, ?> filteredEntity = + Iterables.getOnlyElement( + _entities.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); - } + return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java index a69500f24ee24..aeeb9bafa1f4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java @@ -4,30 +4,30 @@ import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class HyperParameterValueTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; - public static final String INT_BOX = "IntBox"; - public static final String FLOAT_BOX = "FloatBox"; - public static final String BOOLEAN_BOX = "BooleanBox"; + public static final String STRING_BOX = "StringBox"; + public static final String INT_BOX = "IntBox"; + public static final String FLOAT_BOX = "FloatBox"; + public static final String BOOLEAN_BOX = "BooleanBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else if (env.getObject() instanceof IntBox) { - return env.getSchema().getObjectType(INT_BOX); - } else if (env.getObject() instanceof BooleanBox) { - return env.getSchema().getObjectType(BOOLEAN_BOX); - } else if (env.getObject() instanceof FloatBox) { - return env.getSchema().getObjectType(FLOAT_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else if (env.getObject() instanceof IntBox) { + return env.getSchema().getObjectType(INT_BOX); + } else if (env.getObject() instanceof BooleanBox) { + return env.getSchema().getObjectType(BOOLEAN_BOX); + } else if (env.getObject() instanceof FloatBox) { + return env.getSchema().getObjectType(FLOAT_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java index 25a9a540f51b1..ff190cff1339e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java @@ -8,17 +8,17 @@ public class PlatformSchemaUnionTypeResolver implements TypeResolver { - private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; - private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; + private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; + private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TableSchema) { - return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); - } else if (env.getObject() instanceof KeyValueSchema) { - return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TableSchema) { + return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); + } else if (env.getObject() instanceof KeyValueSchema) { + return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); + } else { + throw new RuntimeException("Unrecognized object type provided to type resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java index 0dc7b0485c51c..c5be5725f1d45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.type; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class ResultsTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; + public static final String STRING_BOX = "StringBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java index 5263e6b9b7df6..c66588008b103 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java @@ -6,7 +6,8 @@ public class TimeSeriesAspectInterfaceTypeResolver implements TypeResolver { - public TimeSeriesAspectInterfaceTypeResolver() { } + public TimeSeriesAspectInterfaceTypeResolver() {} + @Override public GraphQLObjectType getType(TypeResolutionEnvironment env) { // TODO(John): Fill this out. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java index d02f1a5f786a7..db26da05a2ba4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -10,14 +13,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Resolver responsible for creating a password reset token that Admins can share with native users to reset their - * credentials. + * Resolver responsible for creating a password reset token that Admins can share with native users + * to reset their credentials. */ -public class CreateNativeUserResetTokenResolver implements DataFetcher<CompletableFuture<ResetToken>> { +public class CreateNativeUserResetTokenResolver + implements DataFetcher<CompletableFuture<ResetToken>> { private final NativeUserService _nativeUserService; public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserService) { @@ -25,7 +26,8 @@ public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserServ } @Override - public CompletableFuture<ResetToken> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ResetToken> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateNativeUserResetTokenInput input = bindArgument(environment.getArgument("input"), CreateNativeUserResetTokenInput.class); @@ -38,15 +40,18 @@ public CompletableFuture<ResetToken> get(final DataFetchingEnvironment environme "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - String resetToken = - _nativeUserService.generateNativeUserPasswordResetToken(userUrnString, context.getAuthentication()); - return new ResetToken(resetToken); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to generate password reset token for user: %s", userUrnString)); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String resetToken = + _nativeUserService.generateNativeUserPasswordResetToken( + userUrnString, context.getAuthentication()); + return new ResetToken(resetToken); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to generate password reset token for user: %s", userUrnString)); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 69da642ad6bb1..215d53299c8ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,10 +26,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListUsersResolver implements DataFetcher<CompletableFuture<ListUsersResult>> { private static final Integer DEFAULT_START = 0; @@ -40,48 +39,60 @@ public ListUsersResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<ListUsersResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListUsersResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListUsersInput input = bindArgument(environment.getArgument("input"), ListUsersInput.class); + final ListUsersInput input = + bindArgument(environment.getArgument("input"), ListUsersInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all policy Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_USER_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get hydrate all users. - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()) - ), null, context.getAuthentication()); + // Then, get hydrate all users. + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListUsersResult result = new ListUsersResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list users", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListUsersResult result = new ListUsersResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setUsers(mapEntities(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list users", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List<CorpUser> mapEntities(final Collection<EntityResponse> entities) { - return entities.stream() - .map(CorpUserMapper::map) - .collect(Collectors.toList()); + return entities.stream().map(CorpUserMapper::map).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java index 718810e4710e7..7131a9d2a9a26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp User - */ +/** Resolver responsible for hard deleting a particular DataHub Corp User */ @Slf4j public class RemoveUserResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -24,30 +21,39 @@ public RemoveUserResolver(final EntityClient entityClient) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String userUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(userUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for user with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against user with urn %s", userUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against user with urn %s", userUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java index ab04d26fb5801..6a0e81a10f40b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,12 +16,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform privilege. + * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform + * privilege. */ public class UpdateUserStatusResolver implements DataFetcher<CompletableFuture<String>> { @@ -37,20 +37,28 @@ public CompletableFuture<String> get(final DataFetchingEnvironment environment) final CorpUserStatus newStatus = CorpUserStatus.valueOf(environment.getArgument("status")); // Create ths status aspect - final com.linkedin.identity.CorpUserStatus statusAspect = new com.linkedin.identity.CorpUserStatus(); + final com.linkedin.identity.CorpUserStatus statusAspect = + new com.linkedin.identity.CorpUserStatus(); statusAspect.setStatus(newStatus.toString()); - statusAspect.setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(Urn.createFromString(context.getActorUrn()))); - - return CompletableFuture.supplyAsync(() -> { - try { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(userUrn), - CORP_USER_STATUS_ASPECT_NAME, statusAspect); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update user status for urn", userUrn), e); - } - }); + statusAspect.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + + return CompletableFuture.supplyAsync( + () -> { + try { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(userUrn), CORP_USER_STATUS_ASPECT_NAME, statusAspect); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update user status for urn", userUrn), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java index 6e39879dd56bc..830c9013835d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,12 +20,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class CreateViewResolver implements DataFetcher<CompletableFuture<DataHubView>> { @@ -34,29 +31,34 @@ public CreateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateViewInput input = bindArgument(environment.getArgument("input"), CreateViewInput.class); + final CreateViewInput input = + bindArgument(environment.getArgument("input"), CreateViewInput.class); - return CompletableFuture.supplyAsync(() -> { - if (ViewUtils.canCreateView( - DataHubViewType.valueOf(input.getViewType().toString()), - context)) { - try { - final Urn urn = _viewService.createView( - DataHubViewType.valueOf(input.getViewType().toString()), - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - return createView(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create View with input: %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (ViewUtils.canCreateView( + DataHubViewType.valueOf(input.getViewType().toString()), context)) { + try { + final Urn urn = + _viewService.createView( + DataHubViewType.valueOf(input.getViewType().toString()), + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + return createView(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create View with input: %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateViewInput input) { @@ -66,15 +68,20 @@ private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateView .setViewType(input.getViewType()) .setName(input.getName()) .setDescription(input.getDescription()) - .setDefinition(new DataHubViewDefinition( - input.getDefinition().getEntityTypes(), - new DataHubViewFilter( - input.getDefinition().getFilter().getOperator(), - input.getDefinition().getFilter().getFilters().stream().map(filterInput -> - new FacetFilter(filterInput.getField(), filterInput.getCondition(), - filterInput.getValues(), - filterInput.getNegated())) - .collect(Collectors.toList())))) + .setDefinition( + new DataHubViewDefinition( + input.getDefinition().getEntityTypes(), + new DataHubViewFilter( + input.getDefinition().getFilter().getOperator(), + input.getDefinition().getFilter().getFilters().stream() + .map( + filterInput -> + new FacetFilter( + filterInput.getField(), + filterInput.getCondition(), + filterInput.getValues(), + filterInput.getNegated())) + .collect(Collectors.toList())))) .build(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java index 2b8c3b8640aa8..a3b21ad0c9681 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java @@ -11,10 +11,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub View - */ +/** Resolver responsible for hard deleting a particular DataHub View */ @Slf4j public class DeleteViewResolver implements DataFetcher<CompletableFuture<Boolean>> { @@ -25,24 +22,27 @@ public DeleteViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<Boolean> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.deleteView(urn, context.getAuthentication()); - log.info(String.format("Successfully deleted View %s with urn", urn)); - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.deleteView(urn, context.getAuthentication()); + log.info(String.format("Successfully deleted View %s with urn", urn)); + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against View with urn %s", urn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index 51bbcfcfa25ae..caa37f8264854 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,20 +32,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing global DataHub Views. - */ +/** Resolver used for listing global DataHub Views. */ @Slf4j public class ListGlobalViewsResolver implements DataFetcher<CompletableFuture<ListViewsResult>> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -55,43 +51,50 @@ public ListGlobalViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListGlobalViewsInput input = bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); + final ListGlobalViewsInput input = + bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - try { + try { - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list global Views", e); - } - }); + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list global Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List<DataHubView> mapUnresolvedViews(final List<Urn> entityUrns) { final List<DataHubView> results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -107,7 +110,12 @@ private Filter buildFilters() { final AndFilterInput globalCriteria = new AndFilterInput(); List<FacetFilterInput> andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(DataHubViewType.GLOBAL.name()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, + null, + ImmutableList.of(DataHubViewType.GLOBAL.name()), + false, + FilterOperator.EQUAL)); globalCriteria.setAnd(andConditions); return buildFilter(Collections.emptyList(), ImmutableList.of(globalCriteria)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index d8705e216503c..945d2d50bcc3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,21 +32,15 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing the current user's DataHub Views. - */ +/** Resolver used for listing the current user's DataHub Views. */ @Slf4j public class ListMyViewsResolver implements DataFetcher<CompletableFuture<ListViewsResult>> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; private static final String CREATOR_URN_FIELD = "createdBy"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -56,44 +52,52 @@ public ListMyViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<ListViewsResult> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListMyViewsInput input = bindArgument(environment.getArgument("input"), ListMyViewsInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final String viewType = input.getViewType() == null ? null : input.getViewType().toString(); - - try { - - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(viewType, context.getActorUrn()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Views", e); - } - }); + final ListMyViewsInput input = + bindArgument(environment.getArgument("input"), ListMyViewsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final String viewType = + input.getViewType() == null ? null : input.getViewType().toString(); + + try { + + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(viewType, context.getActorUrn()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List<DataHubView> mapUnresolvedViews(final List<Urn> entityUrns) { final List<DataHubView> results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -110,14 +114,12 @@ private Filter buildFilters(@Nullable final String viewType, final String creato final AndFilterInput filterCriteria = new AndFilterInput(); final List<FacetFilterInput> andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(CREATOR_URN_FIELD, - null, - ImmutableList.of(creatorUrn), - false, - FilterOperator.EQUAL)); + new FacetFilterInput( + CREATOR_URN_FIELD, null, ImmutableList.of(creatorUrn), false, FilterOperator.EQUAL)); if (viewType != null) { andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); } filterCriteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 61e22da3c9444..5a52a57d9c374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,11 +18,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class UpdateViewResolver implements DataFetcher<CompletableFuture<DataHubView>> { @@ -31,40 +29,47 @@ public UpdateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture<DataHubView> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); - final UpdateViewInput input = bindArgument(environment.getArgument("input"), UpdateViewInput.class); + final UpdateViewInput input = + bindArgument(environment.getArgument("input"), UpdateViewInput.class); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.updateView( - urn, - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.updateView( + urn, + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated View %s with urn", urn)); + return getView(urn, context.getAuthentication()); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private DataHubView getView(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + private DataHubView getView( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); + String.format( + "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } return DataHubViewMapper.map(maybeResponse); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index dda0c3bebc2eb..9da5f915ff31d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -26,39 +26,40 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ViewUtils { /** * Returns true if the authenticated actor is allowed to create a view with the given parameters. * - * The user can create a View if it's a personal View specific to them, or - * if it's a Global view and they have the correct Platform privileges. + * <p>The user can create a View if it's a personal View specific to them, or if it's a Global + * view and they have the correct Platform privileges. * * @param type the type of the new View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ public static boolean canCreateView( - @Nonnull DataHubViewType type, - @Nonnull QueryContext context) { + @Nonnull DataHubViewType type, @Nonnull QueryContext context) { Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(context, "context must not be null"); return DataHubViewType.PERSONAL.equals(type) - || (DataHubViewType.GLOBAL.equals(type) && AuthorizationUtils.canManageGlobalViews(context)); + || (DataHubViewType.GLOBAL.equals(type) + && AuthorizationUtils.canManageGlobalViews(context)); } - /** - * Returns true if the authenticated actor is allowed to update or delete - * the View with the specified urn. + * Returns true if the authenticated actor is allowed to update or delete the View with the + * specified urn. * * @param viewService an instance of {@link ViewService} * @param viewUrn the urn of the View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ - public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { + public static boolean canUpdateView( + @Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { Objects.requireNonNull(viewService, "viewService must not be null"); Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(context, "context must not be null"); @@ -67,16 +68,21 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U final DataHubViewInfo viewInfo = viewService.getViewInfo(viewUrn, context.getAuthentication()); if (viewInfo == null) { - throw new IllegalArgumentException(String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); } - // If the View is Global, then the user must have ability to manage global views OR must be its owner - if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) && AuthorizationUtils.canManageGlobalViews(context)) { + // If the View is Global, then the user must have ability to manage global views OR must be its + // owner + if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) + && AuthorizationUtils.canManageGlobalViews(context)) { return true; } // If the View is Personal, then the current actor must be the owner. - return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner( + viewInfo.getCreated().getActor(), + UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); } /** @@ -86,28 +92,32 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U * @return the GMS model */ @Nonnull - public static DataHubViewDefinition mapDefinition(@Nonnull final DataHubViewDefinitionInput input) { + public static DataHubViewDefinition mapDefinition( + @Nonnull final DataHubViewDefinitionInput input) { Objects.requireNonNull(input, "input must not be null"); final DataHubViewDefinition result = new DataHubViewDefinition(); if (input.getFilter() != null) { result.setFilter(mapFilter(input.getFilter()), SetMode.IGNORE_NULL); } - result.setEntityTypes(new StringArray(input.getEntityTypes().stream().map(EntityTypeMapper::getName).collect( - Collectors.toList()))); + result.setEntityTypes( + new StringArray( + input.getEntityTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()))); return result; } /** - * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} object, - * which is then persisted to the backend in an aspect. + * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} + * object, which is then persisted to the backend in an aspect. * - * We intentionally convert from a more rigid model to something more flexible to hedge for the case - * in which the views feature evolves to require more advanced filter capabilities. + * <p>We intentionally convert from a more rigid model to something more flexible to hedge for the + * case in which the views feature evolves to require more advanced filter capabilities. * - * The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), which cannot be - * rendered in full by the UI. We account for this on the read path by logging a warning and returning an empty - * View in such cases. + * <p>The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), + * which cannot be rendered in full by the UI. We account for this on the read path by logging a + * warning and returning an empty View in such cases. */ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { if (LogicalOperator.AND.equals(input.getOperator())) { @@ -121,19 +131,30 @@ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { private static Filter buildAndFilter(@Nonnull List<FacetFilterInput> input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(input.stream().map(ResolverUtils::criterionFromFilter).collect(Collectors.toList())))) - )); + result.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + input.stream() + .map(ResolverUtils::criterionFromFilter) + .collect(Collectors.toList())))))); return result; } private static Filter buildOrFilter(@Nonnull List<FacetFilterInput> input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(input.stream().map(filter -> - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(ResolverUtils.criterionFromFilter(filter)))) - ) - .collect(Collectors.toList()))); + result.setOr( + new ConjunctiveCriterionArray( + input.stream() + .map( + filter -> + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(ResolverUtils.criterionFromFilter(filter))))) + .collect(Collectors.toList()))); return result; } @@ -141,6 +162,5 @@ private static boolean isViewOwner(Urn creatorUrn, Urn actorUrn) { return creatorUrn.equals(actorUrn); } - private ViewUtils() { } - + private ViewUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java index 51fd503fff578..49c8c24c2b6be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java @@ -1,4 +1,3 @@ package com.linkedin.datahub.graphql.scalar; -public class LongScalarType { -} +public class LongScalarType {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java index 3bd8719a37abc..df7c729cb14c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java @@ -1,16 +1,18 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface BatchMutableType<I, B, T> extends MutableType<I, T> { - default Class<B[]> batchInputClass() throws UnsupportedOperationException { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchInputClass method"); - } + default Class<B[]> batchInputClass() throws UnsupportedOperationException { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchInputClass method"); + } - default List<T> batchUpdate(@Nonnull final B[] updateInput, QueryContext context) throws Exception { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchUpdate method"); - } + default List<T> batchUpdate(@Nonnull final B[] updateInput, QueryContext context) + throws Exception { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchUpdate method"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java index b50a229be0633..368c126131af2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java @@ -5,42 +5,46 @@ import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FacetFilterInput; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * Extension of {@link EntityType} containing methods required for 'browse' functionality. * - * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. * @param <K> the key type for the DataLoader */ public interface BrowsableEntityType<T extends Entity, K> extends EntityType<T, K> { - /** - * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. - * - * @param path the path to find browse results under - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & count. - * - * @param urn the entity urn to fetch browse paths for - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception; + /** + * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. + * + * @param path the path to find browse results under + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & + * count. + * + * @param urn the entity urn to fetch browse paths for + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java index 4185288776c06..43e4c1be55b71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java @@ -3,20 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import java.util.function.Function; - /** - * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, etc.). + * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, + * etc.). * * @param <T>: The GraphQL object type corresponding to the entity, must be of type {@link Entity} * @param <K> the key type for the DataLoader */ public interface EntityType<T extends Entity, K> extends LoadableType<T, K> { - /** - * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the Graph type, eg. 'DATASET' - */ - com.linkedin.datahub.graphql.generated.EntityType type(); - - Function<Entity, K> getKeyProvider(); + /** + * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the + * Graph type, eg. 'DATASET' + */ + com.linkedin.datahub.graphql.generated.EntityType type(); + Function<Entity, K> getKeyProvider(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java index a21fab09b79c3..9f9fe1f28994c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java @@ -2,10 +2,9 @@ import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; - import graphql.execution.DataFetcherResult; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * GQL graph type that can be loaded from a downstream service by primary key. @@ -15,35 +14,38 @@ */ public interface LoadableType<T, K> { - /** - * Returns generated GraphQL class associated with the type - */ - Class<T> objectClass(); - - /** - * Returns the name of the type, to be used in creating a corresponding GraphQL {@link org.dataloader.DataLoader} - */ - default String name() { - return objectClass().getSimpleName(); - } - - /** - * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity cannot be found. - * - * @param key to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - default DataFetcherResult<T> load(@Nonnull final K key, @Nonnull final QueryContext context) throws Exception { - return batchLoad(ImmutableList.of(key), context).get(0); - }; - - /** - * Retrieves an list of entities given a list of urn strings. The list returned is expected to - * be of same length of the list of urns, where nulls are provided in place of an entity object if an entity cannot be found. - * - * @param keys to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - List<DataFetcherResult<T>> batchLoad(@Nonnull final List<K> keys, @Nonnull final QueryContext context) throws Exception; - + /** Returns generated GraphQL class associated with the type */ + Class<T> objectClass(); + + /** + * Returns the name of the type, to be used in creating a corresponding GraphQL {@link + * org.dataloader.DataLoader} + */ + default String name() { + return objectClass().getSimpleName(); + } + + /** + * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity + * cannot be found. + * + * @param key to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + default DataFetcherResult<T> load(@Nonnull final K key, @Nonnull final QueryContext context) + throws Exception { + return batchLoad(ImmutableList.of(key), context).get(0); + } + ; + + /** + * Retrieves an list of entities given a list of urn strings. The list returned is expected to be + * of same length of the list of urns, where nulls are provided in place of an entity object if an + * entity cannot be found. + * + * @param keys to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + List<DataFetcherResult<T>> batchLoad( + @Nonnull final List<K> keys, @Nonnull final QueryContext context) throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java index 94f1200d3a783..fa24192913324 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - import javax.annotation.Nonnull; /** @@ -10,18 +9,16 @@ * @param <I>: The input type corresponding to the write. */ public interface MutableType<I, T> { - /** - * Returns generated GraphQL class associated with the input type - */ - - Class<I> inputClass(); + /** Returns generated GraphQL class associated with the input type */ + Class<I> inputClass(); - /** - * Update an entity by urn - * - * @param urn - * @param input input type - * @param context the {@link QueryContext} corresponding to the request. - */ - T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) throws Exception; + /** + * Update an entity by urn + * + * @param urn + * @param input input type + * @param context the {@link QueryContext} corresponding to the request. + */ + T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java index 96875956d22c1..a5ade054e71eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java @@ -6,52 +6,61 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.query.filter.Filter; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. * - * Extension of {@link EntityType} containing methods required for 'search' functionality. + * <p>Extension of {@link EntityType} containing methods required for 'search' functionality. * - * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param <T>: The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. */ @Deprecated public interface SearchableEntityType<T extends Entity, K> extends EntityType<T, K> { - /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. - * - * Retrieves {@link SearchResults} corresponding to a given query string, list of filters, start index, & count. - * - * @param query query text - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Deprecated - SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of filters, & limit. - * - * @param query query text - * @param field the name of the field to autocomplete against, null if one was not provided - * @param filters list of filters that should be applied to search results, null if non were provided - * @param limit the maximum number of autocomplete suggestions to be returned - * @param context the {@link QueryContext} corresponding to the request. - */ - AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception; + /** + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. + * + * <p>Retrieves {@link SearchResults} corresponding to a given query string, list of filters, + * start index, & count. + * + * @param query query text + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Deprecated + SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of + * filters, & limit. + * + * @param query query text + * @param field the name of the field to autocomplete against, null if one was not provided + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param limit the maximum number of autocomplete suggestions to be returned + * @param context the {@link QueryContext} corresponding to the request. + */ + AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index c9e2c322ace8d..00e9badf5e345 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index f3fdfdaa86f9e..45e80822b12c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -2,8 +2,8 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.LoadableType; import com.linkedin.entity.EntityResponse; @@ -35,45 +35,55 @@ public String name() { } /** - * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list returned is expected to - * be of same length of the list of keys, where nulls are provided in place of an aspect object if an entity cannot be found. + * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list + * returned is expected to be of same length of the list of keys, where nulls are provided in + * place of an aspect object if an entity cannot be found. * * @param keys to retrieve * @param context the {@link QueryContext} corresponding to the request. */ - public List<DataFetcherResult<Aspect>> batchLoad(@Nonnull List<VersionedAspectKey> keys, @Nonnull QueryContext context) throws Exception { + public List<DataFetcherResult<Aspect>> batchLoad( + @Nonnull List<VersionedAspectKey> keys, @Nonnull QueryContext context) throws Exception { try { - return keys.stream().map(key -> { - try { - Urn entityUrn = Urn.createFromString(key.getUrn()); + return keys.stream() + .map( + key -> { + try { + Urn entityUrn = Urn.createFromString(key.getUrn()); - Map<Urn, EntityResponse> response = _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication() - ); + Map<Urn, EntityResponse> response = + _entityClient.batchGetV2( + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName()), + context.getAuthentication()); - EntityResponse entityResponse = response.get(entityUrn); + EntityResponse entityResponse = response.get(entityUrn); - if (entityResponse == null || entityResponse.getAspects().get(key.getAspectName()) == null) { - // The aspect was not found. Return null. - return DataFetcherResult.<Aspect>newResult().data(null).build(); - } - final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); - return DataFetcherResult.<Aspect>newResult().data(AspectMapper.map(aspect, entityUrn)).build(); - } catch (Exception e) { - if (e instanceof RestLiResponseException) { - // if no aspect is found, restli will return a 404 rather than null - // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls - if (((RestLiResponseException) e).getStatus() == 404) { - return DataFetcherResult.<Aspect>newResult().data(null).build(); - } - } - throw new RuntimeException(String.format("Failed to load Aspect for entity %s", key.getUrn()), e); - } - }).collect(Collectors.toList()); + if (entityResponse == null + || entityResponse.getAspects().get(key.getAspectName()) == null) { + // The aspect was not found. Return null. + return DataFetcherResult.<Aspect>newResult().data(null).build(); + } + final EnvelopedAspect aspect = + entityResponse.getAspects().get(key.getAspectName()); + return DataFetcherResult.<Aspect>newResult() + .data(AspectMapper.map(aspect, entityUrn)) + .build(); + } catch (Exception e) { + if (e instanceof RestLiResponseException) { + // if no aspect is found, restli will return a 404 rather than null + // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls + if (((RestLiResponseException) e).getStatus() == 404) { + return DataFetcherResult.<Aspect>newResult().data(null).build(); + } + } + throw new RuntimeException( + String.format("Failed to load Aspect for entity %s", key.getUrn()), e); + } + }) + .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Aspects", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index e1d81bb31f471..2536f4d2521ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -25,7 +25,6 @@ import java.util.Collections; import java.util.stream.Collectors; - public class AssertionMapper { public static Assertion map(final EntityResponse entityResponse) { @@ -36,15 +35,18 @@ public static Assertion map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.ASSERTION); - final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); + final EnvelopedAspect envelopedAssertionInfo = + aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); } - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); @@ -60,7 +62,8 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); if (gmsAssertionInfo.hasDatasetAssertion()) { - DatasetAssertionInfo datasetAssertion = mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + DatasetAssertionInfo datasetAssertion = + mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } return assertionInfo; @@ -69,25 +72,25 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion private static DatasetAssertionInfo mapDatasetAssertionInfo( final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); - datasetAssertion.setDatasetUrn( - gmsDatasetAssertion.getDataset().toString()); - datasetAssertion.setScope( - DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); + datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); + datasetAssertion.setScope(DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); if (gmsDatasetAssertion.hasFields()) { - datasetAssertion.setFields(gmsDatasetAssertion.getFields() - .stream() - .map(AssertionMapper::mapDatasetSchemaField) - .collect(Collectors.toList())); + datasetAssertion.setFields( + gmsDatasetAssertion.getFields().stream() + .map(AssertionMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } else { datasetAssertion.setFields(Collections.emptyList()); } // Agg if (gmsDatasetAssertion.hasAggregation()) { - datasetAssertion.setAggregation(AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); + datasetAssertion.setAggregation( + AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); } // Op - datasetAssertion.setOperator(AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); + datasetAssertion.setOperator( + AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); // Params if (gmsDatasetAssertion.hasParameters()) { @@ -98,7 +101,8 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( datasetAssertion.setNativeType(gmsDatasetAssertion.getNativeType()); } if (gmsDatasetAssertion.hasNativeParameters()) { - datasetAssertion.setNativeParameters(StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + datasetAssertion.setNativeParameters( + StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } @@ -119,7 +123,8 @@ private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { return new SchemaFieldRef(schemaFieldUrn.toString(), schemaFieldUrn.getEntityKey().get(1)); } - private static AssertionStdParameters mapParameters(final com.linkedin.assertion.AssertionStdParameters params) { + private static AssertionStdParameters mapParameters( + final com.linkedin.assertion.AssertionStdParameters params) { final AssertionStdParameters result = new AssertionStdParameters(); if (params.hasValue()) { result.setValue(mapParameter(params.getValue())); @@ -133,13 +138,13 @@ private static AssertionStdParameters mapParameters(final com.linkedin.assertion return result; } - private static AssertionStdParameter mapParameter(final com.linkedin.assertion.AssertionStdParameter param) { + private static AssertionStdParameter mapParameter( + final com.linkedin.assertion.AssertionStdParameter param) { final AssertionStdParameter result = new AssertionStdParameter(); result.setType(AssertionStdParameterType.valueOf(param.getType().name())); result.setValue(param.getValue()); return result; } - private AssertionMapper() { - } + private AssertionMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index 3493afdd8bd84..ac5cce1191e5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -20,69 +20,71 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AssertionType + implements com.linkedin.datahub.graphql.types.EntityType<Assertion, String> { -public class AssertionType implements com.linkedin.datahub.graphql.types.EntityType<Assertion, String> { + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ASSERTION_KEY_ASPECT_NAME, + Constants.ASSERTION_INFO_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + private final EntityClient _entityClient; - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ASSERTION_KEY_ASPECT_NAME, - Constants.ASSERTION_INFO_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME - ); - private final EntityClient _entityClient; + public AssertionType(final EntityClient entityClient) { + _entityClient = entityClient; + } - public AssertionType(final EntityClient entityClient) { - _entityClient = entityClient; - } + @Override + public EntityType type() { + return EntityType.ASSERTION; + } - @Override - public EntityType type() { - return EntityType.ASSERTION; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<Assertion> objectClass() { - return Assertion.class; - } + @Override + public Class<Assertion> objectClass() { + return Assertion.class; + } - @Override - public List<DataFetcherResult<Assertion>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> assertionUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<Assertion>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> assertionUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Assertion>newResult() - .data(AssertionMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Assertions", e); - } + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Assertion>newResult() + .data(AssertionMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Assertions", e); } + } - private Urn getUrn(final String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); - } + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } -} \ No newline at end of file + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index d9f25a7cec8e1..bfe2ccbe34166 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -4,9 +4,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.types.auth.mappers.AccessTokenMetadataMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -21,7 +21,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class AccessTokenMetadataType implements com.linkedin.datahub.graphql.types.EntityType<AccessTokenMetadata, String> { @@ -48,13 +47,17 @@ public Class<AccessTokenMetadata> objectClass() { } @Override - public List<DataFetcherResult<AccessTokenMetadata>> batchLoad(@Nonnull List<String> keys, - @Nonnull QueryContext context) throws Exception { - final List<Urn> tokenInfoUrns = keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List<DataFetcherResult<AccessTokenMetadata>> batchLoad( + @Nonnull List<String> keys, @Nonnull QueryContext context) throws Exception { + final List<Urn> tokenInfoUrns = + keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(Constants.ACCESS_TOKEN_ENTITY_NAME, new HashSet<>(tokenInfoUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + new HashSet<>(tokenInfoUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -62,9 +65,13 @@ public List<DataFetcherResult<AccessTokenMetadata>> batchLoad(@Nonnull List<Stri gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null : DataFetcherResult.<AccessTokenMetadata>newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<AccessTokenMetadata>newResult() + .data(AccessTokenMetadataMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Access Token Info", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index 9b38757879896..a519a65e5cb6b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,8 +2,8 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -11,7 +11,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AccessTokenMetadataMapper implements ModelMapper<EntityResponse, AccessTokenMetadata> { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); @@ -29,13 +28,15 @@ public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { metadata.setType(EntityType.ACCESS_TOKEN); final EnvelopedAspectMap aspectMap = input.getAspects(); - final MappingHelper<AccessTokenMetadata> mappingHelper = new MappingHelper<>(aspectMap, metadata); + final MappingHelper<AccessTokenMetadata> mappingHelper = + new MappingHelper<>(aspectMap, metadata); mappingHelper.mapToResult(Constants.ACCESS_TOKEN_INFO_NAME, this::mapTokenInfo); return mappingHelper.getResult(); } - private void mapTokenInfo(@Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { + private void mapTokenInfo( + @Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(dataMap); accessTokenMetadata.setName(tokenInfo.getName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index fa0e3cd856803..ba8e96159b0bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.chart; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.ChartUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -36,8 +39,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,203 +57,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - +public class ChartType + implements SearchableEntityType<Chart, String>, + BrowsableEntityType<Chart, String>, + MutableType<ChartUpdateInput, Chart> { -public class ChartType implements SearchableEntityType<Chart, String>, BrowsableEntityType<Chart, String>, MutableType<ChartUpdateInput, Chart> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + CHART_KEY_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, + CHART_QUERY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = + ImmutableSet.of("access", "queryType", "tool", "type"); - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - CHART_KEY_ASPECT_NAME, - CHART_INFO_ASPECT_NAME, - EDITABLE_CHART_PROPERTIES_ASPECT_NAME, - CHART_QUERY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); - - private final EntityClient _entityClient; - - public ChartType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class<ChartUpdateInput> inputClass() { - return ChartUpdateInput.class; - } + public ChartType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.CHART; - } + @Override + public Class<ChartUpdateInput> inputClass() { + return ChartUpdateInput.class; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.CHART; + } - @Override - public Class<Chart> objectClass() { - return Chart.class; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public List<DataFetcherResult<Chart>> batchLoad(@Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> chartMap = - _entityClient.batchGetV2( - CHART_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); + @Override + public Class<Chart> objectClass() { + return Chart.class; + } - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(chartMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsChart -> gmsChart == null ? null : DataFetcherResult.<Chart>newResult() - .data(ChartMapper.map(gmsChart)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Charts", e); - } - } + @Override + public List<DataFetcherResult<Chart>> batchLoad( + @Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> chartMap = + _entityClient.batchGetV2( + CHART_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "chart", - query, - facetFilters, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(chartMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsChart -> + gmsChart == null + ? null + : DataFetcherResult.<Chart>newResult() + .data(ChartMapper.map(gmsChart)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Charts", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "chart", query, - filters, - limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "chart", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("chart", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "chart", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - private ChartUrn getChartUrn(String urnStr) { - try { - return ChartUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); - } - } + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - @Override - public Chart update(@Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = ChartUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + private ChartUrn getChartUrn(String urnStr) { + try { + return ChartUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); + } + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public Chart update( + @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = ChartUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } - private boolean isAuthorized(@Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CHART_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CHART_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index e0ffc57ddf519..0ef52c9f45716 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -28,13 +30,13 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -51,184 +53,211 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartMapper implements ModelMapper<EntityResponse, Chart> { + public static final ChartMapper INSTANCE = new ChartMapper(); -public class ChartMapper implements ModelMapper<EntityResponse, Chart> { + public static Chart map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - public static final ChartMapper INSTANCE = new ChartMapper(); + @Override + public Chart apply(@Nonnull final EntityResponse entityResponse) { + final Chart result = new Chart(); + Urn entityUrn = entityResponse.getUrn(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CHART); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { - final Chart result = new Chart(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CHART); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<Chart> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); - mappingHelper.mapToResult(EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> + MappingHelper<Chart> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); + mappingHelper.mapToResult( + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (chart, dataMap) -> chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (chart, dataMap) -> - chart.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> - chart.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (chart, dataMap) -> chart.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (chart, dataMap) -> + chart.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (chart, dataMap) -> chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (chart, dataMap) -> - chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (chart, dataMap) -> chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (chart, dataMap) -> chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (chart, dataMap) -> - chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } - private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final ChartKey gmsKey = new ChartKey(dataMap); - chart.setChartId(gmsKey.getChartId()); - chart.setTool(gmsKey.getDashboardTool()); - chart.setPlatform(DataPlatform.builder() + private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final ChartKey gmsKey = new ChartKey(dataMap); + chart.setChartId(gmsKey.getChartId()); + chart.setTool(gmsKey.getDashboardTool()); + chart.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } - private void mapChartInfo(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); - } + private void mapChartInfo( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); + chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); + } - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} - */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartInfo result = new ChartInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasInputs()) { - result.setInputs(info.getInputs().stream().map(input -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(input.getDatasetUrn().toString()); - return dataset; - }).collect(Collectors.toList())); - } - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; - } + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ + private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartInfo result = new ChartInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} - */ - private ChartProperties mapChartInfoToProperties(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartProperties result = new ChartProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasInputs()) { + result.setInputs( + info.getInputs().stream() + .map( + input -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(input.getDatasetUrn().toString()); + return dataset; + }) + .collect(Collectors.toList())); } - private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); - chart.setQuery(mapQuery(gmsChartQuery)); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { - final ChartQuery result = new ChartQuery(); - result.setRawQuery(query.getRawQuery()); - result.setType(ChartQueryType.valueOf(query.getType().toString())); - return result; + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); } - - private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); - final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); - chartEditableProperties.setDescription(editableChartProperties.getDescription()); - chart.setEditableProperties(chartEditableProperties); + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ + private ChartProperties mapChartInfoToProperties( + final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartProperties result = new ChartProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - private void mapGlobalTags(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - chart.setGlobalTags(globalTags); - chart.setTags(globalTags); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); + chart.setQuery(mapQuery(gmsChartQuery)); + } + + private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { + final ChartQuery result = new ChartQuery(); + result.setRawQuery(query.getRawQuery()); + result.setType(ChartQueryType.valueOf(query.getType().toString())); + return result; + } + + private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); + final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); + chartEditableProperties.setDescription(editableChartProperties.getDescription()); + chart.setEditableProperties(chartEditableProperties); + } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - chart.setContainer(Container - .builder() + private void mapGlobalTags( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + chart.setGlobalTags(globalTags); + chart.setTags(globalTags); + } + + private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + chart.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); - } + private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index b52ddad0b0071..f2a434b58686c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -17,68 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartUpdateInputMapper + implements InputModelMapper<ChartUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); -public class ChartUpdateInputMapper implements InputModelMapper<ChartUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static Collection<MetadataChangeProposal> map( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(chartUpdateInput, actor); + } - public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); + @Override + public Collection<MetadataChangeProposal> apply( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - public static Collection<MetadataChangeProposal> map(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + if (chartUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } - @Override - public Collection<MetadataChangeProposal> apply(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - - if (chartUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper - .aspectToProposal(OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); - } - - if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (chartUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - // Tags overrides global tags if provided - if (chartUpdateInput.getTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } - - if (chartUpdateInput.getEditableProperties() != null) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(); - editableChartProperties.setDescription(chartUpdateInput.getEditableProperties().getDescription()); - if (!editableChartProperties.hasCreated()) { - editableChartProperties.setCreated(auditStamp); - } - editableChartProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); - } + if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (chartUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + // Tags overrides global tags if provided + if (chartUpdateInput.getTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (chartUpdateInput.getEditableProperties() != null) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(); + editableChartProperties.setDescription( + chartUpdateInput.getEditableProperties().getDescription()); + if (!editableChartProperties.hasCreated()) { + editableChartProperties.setCreated(auditStamp); + } + editableChartProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index d6ef713f3ade6..4da18403f95cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -7,29 +7,36 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class InputFieldsMapper { - public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); - - public static com.linkedin.datahub.graphql.generated.InputFields map(@Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.InputFields apply(@Nonnull final InputFields input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); - result.setFields(input.getFields().stream().map(field -> { - InputField fieldResult = new InputField(); - - if (field.hasSchemaField()) { - fieldResult.setSchemaField(SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); - } - if (field.hasSchemaFieldUrn()) { - fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); - } - return fieldResult; - }).collect(Collectors.toList())); - - return result; - } + public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); + + public static com.linkedin.datahub.graphql.generated.InputFields map( + @Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.InputFields apply( + @Nonnull final InputFields input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.InputFields result = + new com.linkedin.datahub.graphql.generated.InputFields(); + result.setFields( + input.getFields().stream() + .map( + field -> { + InputField fieldResult = new InputField(); + + if (field.hasSchemaField()) { + fieldResult.setSchemaField( + SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); + } + if (field.hasSchemaFieldUrn()) { + fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); + } + return fieldResult; + }) + .collect(Collectors.toList())); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index beb2b64e1dd7d..1f952bb6a2bd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class AuditStampMapper implements ModelMapper<com.linkedin.common.AuditStamp, AuditStamp> { - public static final AuditStampMapper INSTANCE = new AuditStampMapper(); + public static final AuditStampMapper INSTANCE = new AuditStampMapper(); - public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - return INSTANCE.apply(auditStamp); - } + public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + return INSTANCE.apply(auditStamp); + } - @Override - public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - final AuditStamp result = new AuditStamp(); - result.setActor(auditStamp.getActor().toString()); - result.setTime(auditStamp.getTime()); - return result; - } + @Override + public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + final AuditStamp result = new AuditStamp(); + result.setActor(auditStamp.getActor().toString()); + result.setTime(auditStamp.getTime()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java index 41ee99fa412ad..79b7cf8e050d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java @@ -4,10 +4,9 @@ import com.linkedin.datahub.graphql.generated.BrowsePathEntry; import com.linkedin.datahub.graphql.generated.BrowsePathV2; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathsV2Mapper implements ModelMapper<BrowsePathsV2, BrowsePathV2> { @@ -20,7 +19,8 @@ public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { @Override public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); - final List<BrowsePathEntry> path = input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + final List<BrowsePathEntry> path = + input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); result.setPath(path); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index 7144730ba9337..e3a09bc8926a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -3,8 +3,8 @@ import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -public class ChangeAuditStampsMapper implements ModelMapper<com.linkedin.common.ChangeAuditStamps, ChangeAuditStamps> { +public class ChangeAuditStampsMapper + implements ModelMapper<com.linkedin.common.ChangeAuditStamps, ChangeAuditStamps> { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 6c8bdada17b24..806e8e6aadc5b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,26 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nonnull; import lombok.NonNull; public class CostMapper implements ModelMapper<com.linkedin.common.Cost, Cost> { - public static final CostMapper INSTANCE = new CostMapper(); + public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); - } + public static Cost map(@NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(cost); + } - @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { - final Cost result = new Cost(); - result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); - return result; - } + @Override + public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { + final Cost result = new Cost(); + result.setCostType(CostType.valueOf(cost.getCostType().name())); + result.setCostValue(CostValueMapper.map(cost.getCost())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 3f41c92cd1715..56c107f7ec059 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -2,25 +2,24 @@ import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class CostValueMapper implements ModelMapper<com.linkedin.common.CostValue, CostValue> { - public static final CostValueMapper INSTANCE = new CostValueMapper(); + public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); - } + public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(costValue); + } - @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { - final CostValue result = new CostValue(); - if (costValue.isCostCode()) { - result.setCostCode(costValue.getCostCode()); - } - if (costValue.isCostId()) { - result.setCostId(costValue.getCostId().floatValue()); - } - return result; + @Override + public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { + final CostValue result = new CostValue(); + if (costValue.isCostCode()) { + result.setCostCode(costValue.getCostCode()); + } + if (costValue.isCostId()) { + result.setCostId(costValue.getCostId().floatValue()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java index 50e4846611a9b..b09678ddeb42e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java @@ -1,36 +1,36 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.CustomPropertiesEntry; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class CustomPropertiesMapper { - public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); + public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); - public static List<CustomPropertiesEntry> map(@Nonnull final Map<String, String> input, @Nonnull Urn urn) { - return INSTANCE.apply(input, urn); - } + public static List<CustomPropertiesEntry> map( + @Nonnull final Map<String, String> input, @Nonnull Urn urn) { + return INSTANCE.apply(input, urn); + } - public List<CustomPropertiesEntry> apply(@Nonnull final Map<String, String> input, @Nonnull Urn urn) { - List<CustomPropertiesEntry> results = new ArrayList<>(); - for (String key : input.keySet()) { - final CustomPropertiesEntry entry = new CustomPropertiesEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - entry.setAssociatedUrn(urn.toString()); - results.add(entry); - } - return results; + public List<CustomPropertiesEntry> apply( + @Nonnull final Map<String, String> input, @Nonnull Urn urn) { + List<CustomPropertiesEntry> results = new ArrayList<>(); + for (String key : input.keySet()) { + final CustomPropertiesEntry entry = new CustomPropertiesEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + entry.setAssociatedUrn(urn.toString()); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index 1f10cd6ee3658..a2236f7e8586d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -3,14 +3,16 @@ import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DataPlatformInstanceAspectMapper implements ModelMapper<com.linkedin.common.DataPlatformInstance, DataPlatformInstance> { +public class DataPlatformInstanceAspectMapper + implements ModelMapper<com.linkedin.common.DataPlatformInstance, DataPlatformInstance> { - public static final DataPlatformInstanceAspectMapper INSTANCE = new DataPlatformInstanceAspectMapper(); + public static final DataPlatformInstanceAspectMapper INSTANCE = + new DataPlatformInstanceAspectMapper(); - public static DataPlatformInstance map(@Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { + public static DataPlatformInstance map( + @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { return INSTANCE.apply(dataPlatformInstance); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 4bbf50bb72362..7a88474166915 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,24 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class DeprecationMapper implements ModelMapper<com.linkedin.common.Deprecation, Deprecation> { - public static final DeprecationMapper INSTANCE = new DeprecationMapper(); +public class DeprecationMapper + implements ModelMapper<com.linkedin.common.Deprecation, Deprecation> { + public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 478d256df66a4..339c6a848d9f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class EmbedMapper implements ModelMapper<com.linkedin.common.Embed, Embed> { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java index 9f4517c89a6dc..830cbb0e79d79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java @@ -1,44 +1,49 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.FineGrainedLineage; import com.linkedin.datahub.graphql.generated.SchemaFieldRef; import com.linkedin.dataset.FineGrainedLineageArray; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import javax.annotation.Nonnull; public class FineGrainedLineagesMapper { public static final FineGrainedLineagesMapper INSTANCE = new FineGrainedLineagesMapper(); - public static List<FineGrainedLineage> map(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { + public static List<FineGrainedLineage> map( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { return INSTANCE.apply(fineGrainedLineages); } - public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { - final List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> result = new ArrayList<>(); + public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { + final List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> result = + new ArrayList<>(); if (fineGrainedLineages.size() == 0) { return result; } for (com.linkedin.dataset.FineGrainedLineage fineGrainedLineage : fineGrainedLineages) { - com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); + com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = + new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); if (fineGrainedLineage.hasUpstreams()) { - resultEntry.setUpstreams(fineGrainedLineage.getUpstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setUpstreams( + fineGrainedLineage.getUpstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } if (fineGrainedLineage.hasDownstreams()) { - resultEntry.setDownstreams(fineGrainedLineage.getDownstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setDownstreams( + fineGrainedLineage.getDownstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } result.add(resultEntry); } @@ -46,8 +51,7 @@ public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply(@No } private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { - return new SchemaFieldRef(schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); + return new SchemaFieldRef( + schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 8bcfe7eb3b6d0..4546e0e4d8dc0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -2,22 +2,25 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class InstitutionalMemoryMapper { - public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); + public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); - public static InstitutionalMemory map(@Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); - } + public static InstitutionalMemory map( + @Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(memory, entityUrn); + } - public InstitutionalMemory apply(@Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { - final InstitutionalMemory result = new InstitutionalMemory(); - result.setElements(input.getElements().stream().map(metadata -> - InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)).collect(Collectors.toList())); - return result; - } + public InstitutionalMemory apply( + @Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { + final InstitutionalMemory result = new InstitutionalMemory(); + result.setElements( + input.getElements().stream() + .map(metadata -> InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index ba4d37173abb8..49a4618507086 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,33 +1,37 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.CorpUser; - +import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; public class InstitutionalMemoryMetadataMapper { - public static final InstitutionalMemoryMetadataMapper INSTANCE = new InstitutionalMemoryMetadataMapper(); + public static final InstitutionalMemoryMetadataMapper INSTANCE = + new InstitutionalMemoryMetadataMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public InstitutionalMemoryMetadata apply(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { - final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); - result.setUrl(input.getUrl().toString()); - result.setDescription(input.getDescription()); // deprecated field - result.setLabel(input.getDescription()); - result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + public InstitutionalMemoryMetadata apply( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, + @Nonnull final Urn entityUrn) { + final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); + result.setUrl(input.getUrl().toString()); + result.setDescription(input.getDescription()); // deprecated field + result.setLabel(input.getDescription()); + result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); + result.setCreated(AuditStampMapper.map(input.getCreateStamp())); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } - private CorpUser getAuthor(String actor) { - CorpUser partialUser = new CorpUser(); - partialUser.setUrn(actor); - return partialUser; - } + private CorpUser getAuthor(String actor) { + CorpUser partialUser = new CorpUser(); + partialUser.setUrn(actor); + return partialUser; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 28986dcae5725..87d865471708e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -1,31 +1,34 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper<InstitutionalMemoryMetadataUpdate, InstitutionalMemoryMetadata> { +public class InstitutionalMemoryMetadataUpdateMapper + implements ModelMapper<InstitutionalMemoryMetadataUpdate, InstitutionalMemoryMetadata> { - private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = new InstitutionalMemoryMetadataUpdateMapper(); + private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = + new InstitutionalMemoryMetadataUpdateMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); - metadata.setDescription(input.getDescription()); - metadata.setUrl(new Url(input.getUrl())); - metadata.setCreateStamp(new AuditStamp() + @Override + public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { + final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); + metadata.setDescription(input.getDescription()); + metadata.setUrl(new Url(input.getUrl())); + metadata.setCreateStamp( + new AuditStamp() .setActor(CorpUserUtils.getCorpUserUrn(input.getAuthor())) - .setTime(input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt()) - ); - return metadata; - } + .setTime( + input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt())); + return metadata; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index bf063896290eb..d8b451458e72c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -1,30 +1,30 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - -import javax.annotation.Nonnull; - import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class InstitutionalMemoryUpdateMapper implements ModelMapper<InstitutionalMemoryUpdate, InstitutionalMemory> { +public class InstitutionalMemoryUpdateMapper + implements ModelMapper<InstitutionalMemoryUpdate, InstitutionalMemory> { - private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); + private static final InstitutionalMemoryUpdateMapper INSTANCE = + new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { - final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); - institutionalMemory.setElements(new InstitutionalMemoryMetadataArray( - input.getElements() - .stream() + @Override + public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { + final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); + institutionalMemory.setElements( + new InstitutionalMemoryMetadataArray( + input.getElements().stream() .map(InstitutionalMemoryMetadataUpdateMapper::map) .collect(Collectors.toList()))); - return institutionalMemory; - } + return institutionalMemory; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 986954fab87db..37b625715edd5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -1,59 +1,66 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.Operation; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; -public class OperationMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.Operation> { +public class OperationMapper + implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.Operation> { - public static final OperationMapper INSTANCE = new OperationMapper(); + public static final OperationMapper INSTANCE = new OperationMapper(); - public static com.linkedin.datahub.graphql.generated.Operation map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); - } + public static com.linkedin.datahub.graphql.generated.Operation map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.Operation apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + Operation gmsProfile = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + Operation.class); + + final com.linkedin.datahub.graphql.generated.Operation result = + new com.linkedin.datahub.graphql.generated.Operation(); - @Override - public com.linkedin.datahub.graphql.generated.Operation apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - Operation gmsProfile = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - Operation.class); - - final com.linkedin.datahub.graphql.generated.Operation result = - new com.linkedin.datahub.graphql.generated.Operation(); - - result.setTimestampMillis(gmsProfile.getTimestampMillis()); - result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); - if (gmsProfile.hasActor()) { - result.setActor(gmsProfile.getActor().toString()); - } - result.setOperationType(OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); - result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); - if (gmsProfile.hasSourceType()) { - result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); - } - if (gmsProfile.hasPartitionSpec()) { - result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); - } - if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); - } - if (gmsProfile.hasNumAffectedRows()) { - result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); - } - if (gmsProfile.hasAffectedDatasets()) { - result.setAffectedDatasets(gmsProfile.getAffectedDatasets().stream().map(Urn::toString).collect(Collectors.toList())); - } - - return result; + result.setTimestampMillis(gmsProfile.getTimestampMillis()); + result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); + if (gmsProfile.hasActor()) { + result.setActor(gmsProfile.getActor().toString()); } + result.setOperationType( + OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); + result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); + if (gmsProfile.hasSourceType()) { + result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); + } + if (gmsProfile.hasPartitionSpec()) { + result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); + } + if (gmsProfile.hasCustomProperties()) { + result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); + } + if (gmsProfile.hasNumAffectedRows()) { + result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); + } + if (gmsProfile.hasAffectedDatasets()) { + result.setAffectedDatasets( + gmsProfile.getAffectedDatasets().stream() + .map(Urn::toString) + .collect(Collectors.toList())); + } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index 181bdc176fb94..ea15aefdad3b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -10,51 +12,49 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class OwnerMapper { - public static final OwnerMapper INSTANCE = new OwnerMapper(); + public static final OwnerMapper INSTANCE = new OwnerMapper(); - public static Owner map(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + public static Owner map( + @Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(owner, entityUrn); + } + + public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + final Owner result = new Owner(); + // Deprecated + result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); + + if (owner.getTypeUrn() == null) { + OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); + owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - final Owner result = new Owner(); - // Deprecated - result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); - - if (owner.getTypeUrn() == null) { - OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); - owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); - } - - if (owner.getTypeUrn() != null) { - OwnershipTypeEntity entity = new OwnershipTypeEntity(); - entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); - entity.setUrn(owner.getTypeUrn().toString()); - result.setOwnershipType(entity); - } - if (owner.getOwner().getEntityType().equals("corpuser")) { - CorpUser partialOwner = new CorpUser(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } else { - CorpGroup partialOwner = new CorpGroup(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } - if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); - } - result.setAssociatedUrn(entityUrn.toString()); - return result; + if (owner.getTypeUrn() != null) { + OwnershipTypeEntity entity = new OwnershipTypeEntity(); + entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); + entity.setUrn(owner.getTypeUrn().toString()); + result.setOwnershipType(entity); + } + if (owner.getOwner().getEntityType().equals("corpuser")) { + CorpUser partialOwner = new CorpUser(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } else { + CorpGroup partialOwner = new CorpGroup(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } + if (owner.hasSource()) { + result.setSource(OwnershipSourceMapper.map(owner.getSource())); } + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index d978abee5bdfc..a38c16d02f121 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -1,56 +1,56 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import com.linkedin.common.urn.UrnUtils; -import javax.annotation.Nonnull; - import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.OwnerUpdate; -import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; +import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import com.linkedin.common.urn.Urn; - import java.net.URISyntaxException; +import javax.annotation.Nonnull; public class OwnerUpdateMapper implements ModelMapper<OwnerUpdate, Owner> { - private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); + + public static Owner map(@Nonnull final OwnerUpdate input) { + return INSTANCE.apply(input); + } + + @Override + public Owner apply(@Nonnull final OwnerUpdate input) { + final Owner owner = new Owner(); + try { + if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { + owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); + } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { + owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); } - - @Override - public Owner apply(@Nonnull final OwnerUpdate input) { - final Owner owner = new Owner(); - try { - if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { - owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); - } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { - owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); - } - } catch (URISyntaxException e) { - e.printStackTrace(); - } - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - } - // For backwards compatibility we have to always set the deprecated type. - // If the type exists we assume it's an old ownership type that we can map to. - // Else if it's a net new custom ownership type set old type to CUSTOM. - OwnershipType type = input.getType() != null ? OwnershipType.valueOf(input.getType().toString()) + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + } + // For backwards compatibility we have to always set the deprecated type. + // If the type exists we assume it's an old ownership type that we can map to. + // Else if it's a net new custom ownership type set old type to CUSTOM. + OwnershipType type = + input.getType() != null + ? OwnershipType.valueOf(input.getType().toString()) : OwnershipType.CUSTOM; - owner.setType(type); - - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - owner.setType(OwnershipType.CUSTOM); - } + owner.setType(type); - owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); - return owner; + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + owner.setType(OwnershipType.CUSTOM); } + + owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); + return owner; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 6614cfb28a478..31f637a047798 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -2,30 +2,31 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Ownership; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class OwnershipMapper { - public static final OwnershipMapper INSTANCE = new OwnershipMapper(); + public static final OwnershipMapper INSTANCE = new OwnershipMapper(); - public static Ownership map(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); - } + public static Ownership map( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(ownership, entityUrn); + } - public Ownership apply(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); - result.setOwners(ownership.getOwners() - .stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) - .collect(Collectors.toList())); - return result; - } + public Ownership apply( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + final Ownership result = new Ownership(); + result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); + result.setOwners( + ownership.getOwners().stream() + .map(owner -> OwnerMapper.map(owner, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index abcc67c35f92a..75eaffb850a8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -3,28 +3,28 @@ import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class OwnershipSourceMapper implements ModelMapper<com.linkedin.common.OwnershipSource, OwnershipSource> { +public class OwnershipSourceMapper + implements ModelMapper<com.linkedin.common.OwnershipSource, OwnershipSource> { - public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); + public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); - public static OwnershipSource map(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); - } + public static OwnershipSource map( + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + return INSTANCE.apply(ownershipSource); + } - @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - final OwnershipSource result = new OwnershipSource(); - result.setUrl(ownershipSource.getUrl()); - result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); - return result; - } + @Override + public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + final OwnershipSource result = new OwnershipSource(); + result.setUrl(ownershipSource.getUrl()); + result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 1162c69d74938..97afbc7ddf855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -1,7 +1,5 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -9,31 +7,30 @@ import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper<OwnershipUpdate, Ownership, Urn> { - private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); + private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); - } + public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + return INSTANCE.apply(input, actor); + } - @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - final Ownership ownership = new Ownership(); + @Override + public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + final Ownership ownership = new Ownership(); - ownership.setOwners(new OwnerArray(input.getOwners() - .stream() - .map(OwnerUpdateMapper::map) - .collect(Collectors.toList()))); + ownership.setOwners( + new OwnerArray( + input.getOwners().stream().map(OwnerUpdateMapper::map).collect(Collectors.toList()))); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - ownership.setLastModified(auditStamp); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + ownership.setLastModified(auditStamp); - return ownership; - } + return ownership; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index f3ac008734339..e2d29d0297449 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps GraphQL SearchFlags to Pegasus * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class SearchFlagsInputMapper implements ModelMapper<SearchFlags, com.linkedin.metadata.query.SearchFlags> { +public class SearchFlagsInputMapper + implements ModelMapper<SearchFlags, com.linkedin.metadata.query.SearchFlags> { public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); - public static com.linkedin.metadata.query.SearchFlags map(@Nonnull final SearchFlags searchFlags) { + public static com.linkedin.metadata.query.SearchFlags map( + @Nonnull final SearchFlags searchFlags) { return INSTANCE.apply(searchFlags); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 942171017cea4..0758daf5df2e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -5,13 +5,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class SiblingsMapper implements ModelMapper<com.linkedin.common.Siblings, SiblingProperties> { +public class SiblingsMapper + implements ModelMapper<com.linkedin.common.Siblings, SiblingProperties> { public static final SiblingsMapper INSTANCE = new SiblingsMapper(); @@ -23,10 +23,8 @@ public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); - result.setSiblings(siblings.getSiblings() - .stream() - .map(UrnToEntityMapper::map) - .collect(Collectors.toList())); + result.setSiblings( + siblings.getSiblings().stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 25d01d8de0e4c..2d1efdffc496c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -2,21 +2,20 @@ import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class StatusMapper implements ModelMapper<com.linkedin.common.Status, Status> { - public static final StatusMapper INSTANCE = new StatusMapper(); + public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); - } + public static Status map(@Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(metadata); + } - @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { - final Status result = new Status(); - result.setRemoved(input.isRemoved()); - return result; - } + @Override + public Status apply(@Nonnull final com.linkedin.common.Status input) { + final Status result = new Status(); + result.setRemoved(input.isRemoved()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 32c49a2010414..0e8d6822b7d09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -7,29 +7,28 @@ import java.util.Map; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class StringMapMapper implements ModelMapper<Map<String, String>, List<StringMapEntry>> { - public static final StringMapMapper INSTANCE = new StringMapMapper(); + public static final StringMapMapper INSTANCE = new StringMapMapper(); - public static List<StringMapEntry> map(@Nonnull final Map<String, String> input) { - return INSTANCE.apply(input); - } + public static List<StringMapEntry> map(@Nonnull final Map<String, String> input) { + return INSTANCE.apply(input); + } - @Override - public List<StringMapEntry> apply(@Nonnull final Map<String, String> input) { - List<StringMapEntry> results = new ArrayList<>(); - for (String key : input.keySet()) { - final StringMapEntry entry = new StringMapEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - results.add(entry); - } - return results; + @Override + public List<StringMapEntry> apply(@Nonnull final Map<String, String> input) { + List<StringMapEntry> results = new ArrayList<>(); + for (String key : input.keySet()) { + final StringMapEntry entry = new StringMapEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java index 9aa94eae62999..55294e4b46822 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -5,17 +5,20 @@ import java.util.ArrayList; import javax.annotation.Nonnull; -public class SubTypesMapper implements ModelMapper<SubTypes, com.linkedin.datahub.graphql.generated.SubTypes> { +public class SubTypesMapper + implements ModelMapper<SubTypes, com.linkedin.datahub.graphql.generated.SubTypes> { public static final SubTypesMapper INSTANCE = new SubTypesMapper(); - public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + public static com.linkedin.datahub.graphql.generated.SubTypes map( + @Nonnull final SubTypes metadata) { return INSTANCE.apply(metadata); } @Override public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { - final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + final com.linkedin.datahub.graphql.generated.SubTypes result = + new com.linkedin.datahub.graphql.generated.SubTypes(); result.setTypeNames(new ArrayList<>(input.getTypeNames())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java index 8359f1ec86f34..4fdf7edea07d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java @@ -4,22 +4,24 @@ import java.util.List; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class UpstreamLineagesMapper { public static final UpstreamLineagesMapper INSTANCE = new UpstreamLineagesMapper(); - public static List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> map(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + public static List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> map( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { return INSTANCE.apply(upstreamLineage); } - public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { - if (!upstreamLineage.hasFineGrainedLineages() || upstreamLineage.getFineGrainedLineages() == null) { + public List<com.linkedin.datahub.graphql.generated.FineGrainedLineage> apply( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + if (!upstreamLineage.hasFineGrainedLineages() + || upstreamLineage.getFineGrainedLineages() == null) { return new ArrayList<>(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 34bf56a396b62..4c452af126201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Chart; @@ -35,10 +37,7 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class UrnToEntityMapper implements ModelMapper<com.linkedin.common.urn.Urn, Entity> { +public class UrnToEntityMapper implements ModelMapper<com.linkedin.common.urn.Urn, Entity> { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 1e284efdb610f..0b156f11e8834 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -7,14 +7,10 @@ import lombok.AllArgsConstructor; import lombok.Getter; - @AllArgsConstructor public class MappingHelper<O> { - @Nonnull - private final EnvelopedAspectMap _aspectMap; - @Getter - @Nonnull - private final O result; + @Nonnull private final EnvelopedAspectMap _aspectMap; + @Getter @Nonnull private final O result; public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer<O, DataMap> consumer) { if (_aspectMap.containsKey(aspectName)) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java index 7d1b374e1f9b6..00e339a0320ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.Setter; - @Data @Setter @Getter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java index d08300d648c32..46df032cbffbf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java @@ -1,19 +1,17 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.mxe.SystemMetadata; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} @Nullable public static Long getLastIngestedTime(@Nonnull EnvelopedAspectMap aspectMap) { @@ -28,7 +26,8 @@ public static String getLastIngestedRunId(@Nonnull EnvelopedAspectMap aspectMap) } /** - * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects present for the entity. + * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects + * present for the entity. */ @Nonnull public static List<RunInfo> getLastIngestionRuns(@Nonnull EnvelopedAspectMap aspectMap) { @@ -36,12 +35,16 @@ public static List<RunInfo> getLastIngestionRuns(@Nonnull EnvelopedAspectMap asp for (String aspect : aspectMap.keySet()) { if (aspectMap.get(aspect).hasSystemMetadata()) { SystemMetadata systemMetadata = aspectMap.get(aspect).getSystemMetadata(); - if (systemMetadata.hasLastRunId() && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + if (systemMetadata.hasLastRunId() + && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getLastRunId(); RunInfo run = new RunInfo(runId, lastObserved); runs.add(run); - } else if (systemMetadata.hasRunId() && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + } else if (systemMetadata.hasRunId() + && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { // Handle the legacy case: Check original run ids. Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getRunId(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java index 108aa7ed5b0c9..606cebba0880f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java @@ -6,7 +6,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import lombok.AllArgsConstructor; - @AllArgsConstructor public class UpdateMappingHelper { private final String entityName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 20cfe6ac46127..1200493666a59 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -18,8 +18,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.net.URISyntaxException; @@ -33,31 +33,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -public class ContainerType implements SearchableEntityType<Container, String>, +public class ContainerType + implements SearchableEntityType<Container, String>, com.linkedin.datahub.graphql.types.EntityType<Container, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME, - Constants.SUB_TYPES_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - Constants.CONTAINER_ASPECT_NAME, - Constants.DOMAINS_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.DATA_PRODUCTS_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + Constants.CONTAINER_PROPERTIES_ASPECT_NAME, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME, + Constants.SUB_TYPES_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + Constants.CONTAINER_ASPECT_NAME, + Constants.DOMAINS_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.DATA_PRODUCTS_ASPECT_NAME); private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "container"; private final EntityClient _entityClient; - public ContainerType(final EntityClient entityClient) { + public ContainerType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -77,28 +77,30 @@ public Class<Container> objectClass() { } @Override - public List<DataFetcherResult<Container>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> containerUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Container>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> containerUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.CONTAINER_ENTITY_NAME, - new HashSet<>(containerUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.CONTAINER_ENTITY_NAME, + new HashSet<>(containerUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Container>newResult() - .data(ContainerMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Container>newResult() + .data(ContainerMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Container", e); @@ -114,24 +116,36 @@ private Urn getUrn(final String urnStr) { } @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index b81259e78be3e..07594c53c6831 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.container.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -15,11 +17,11 @@ import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -33,9 +35,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class ContainerMapper { @Nullable @@ -49,46 +48,61 @@ public static Container map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.CONTAINER); - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } - final EnvelopedAspect envelopedContainerProperties = aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedContainerProperties = + aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); if (envelopedContainerProperties != null) { - result.setProperties(mapContainerProperties(new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); + result.setProperties( + mapContainerProperties( + new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedEditableContainerProperties = aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedEditableContainerProperties = + aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); if (envelopedEditableContainerProperties != null) { - result.setEditableProperties(mapContainerEditableProperties(new EditableContainerProperties(envelopedEditableContainerProperties.getValue().data()))); + result.setEditableProperties( + mapContainerEditableProperties( + new EditableContainerProperties( + envelopedEditableContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); @@ -103,12 +117,13 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); if (envelopedContainer != null) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(envelopedContainer.getValue().data()); - result.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(envelopedContainer.getValue().data()); + result.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } final EnvelopedAspect envelopedDomains = aspects.get(Constants.DOMAINS_ASPECT_NAME); @@ -120,21 +135,25 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + result.setDeprecation( + DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); } return result; } - private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties(final ContainerProperties gmsProperties, Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = new com.linkedin.datahub.graphql.generated.ContainerProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties( + final ContainerProperties gmsProperties, Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.ContainerProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { propertiesResult.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - propertiesResult.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + propertiesResult.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } if (gmsProperties.hasQualifiedName()) { propertiesResult.setQualifiedName(gmsProperties.getQualifiedName().toString()); @@ -143,10 +162,11 @@ private static com.linkedin.datahub.graphql.generated.ContainerProperties mapCon return propertiesResult; } - private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties mapContainerEditableProperties( - final EditableContainerProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.ContainerEditableProperties editableContainerProperties = - new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties + mapContainerEditableProperties(final EditableContainerProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.ContainerEditableProperties + editableContainerProperties = + new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); editableContainerProperties.setDescription(gmsProperties.getDescription()); return editableContainerProperties; } @@ -158,5 +178,5 @@ private static DataPlatform mapPlatform(final DataPlatformInstance platformInsta return dummyPlatform; } - private ContainerMapper() { } + private ContainerMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 285a119be0d43..371cf6b280c20 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpgroup; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -7,8 +12,6 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -27,8 +30,8 @@ import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -42,155 +45,193 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - -public class CorpGroupType implements SearchableEntityType<CorpGroup, String>, MutableType<CorpGroupUpdateInput, CorpGroup> { - - private final EntityClient _entityClient; - - public CorpGroupType(final EntityClient entityClient) { - _entityClient = entityClient; +public class CorpGroupType + implements SearchableEntityType<CorpGroup, String>, + MutableType<CorpGroupUpdateInput, CorpGroup> { + + private final EntityClient _entityClient; + + public CorpGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<CorpGroup> objectClass() { + return CorpGroup.class; + } + + public Class<CorpGroupUpdateInput> inputClass() { + return CorpGroupUpdateInput.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_GROUP; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<CorpGroup>> batchLoad( + final List<String> urns, final QueryContext context) { + try { + final List<Urn> corpGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map<Urn, EntityResponse> corpGroupMap = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>(corpGroupUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> results = new ArrayList<>(); + for (Urn urn : corpGroupUrns) { + results.add(corpGroupMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpGroup -> + gmsCorpGroup == null + ? null + : DataFetcherResult.<CorpGroup>newResult() + .data(CorpGroupMapper.map(gmsCorpGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load CorpGroup", e); } - - @Override - public Class<CorpGroup> objectClass() { - return CorpGroup.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpGroup", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public CorpGroup update( + @Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Urn groupUrn = Urn.createFromString(urn); + Map<Urn, EntityResponse> gmsResponse = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + ImmutableSet.of(groupUrn), + ImmutableSet.of(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), + context.getAuthentication()); + + CorpGroupEditableInfo existingCorpGroupEditableInfo = null; + if (gmsResponse.containsKey(groupUrn) + && gmsResponse + .get(groupUrn) + .getAspects() + .containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { + existingCorpGroupEditableInfo = + new CorpGroupEditableInfo( + gmsResponse + .get(groupUrn) + .getAspects() + .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME) + .getValue() + .data()); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); } - - public Class<CorpGroupUpdateInput> inputClass() { - return CorpGroupUpdateInput.class; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpGroupUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final CorpGroupUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getDescription() != null) { + // Requires the Update Docs privilege. + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { + // Requires the Update Contact info privilege. + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); } - @Override - public EntityType type() { - return EntityType.CORP_GROUP; - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public List<DataFetcherResult<CorpGroup>> batchLoad(final List<String> urns, final QueryContext context) { - try { - final List<Urn> corpGroupUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map<Urn, EntityResponse> corpGroupMap = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(corpGroupUrns), null, context.getAuthentication()); - - final List<EntityResponse> results = new ArrayList<>(); - for (Urn urn : corpGroupUrns) { - results.add(corpGroupMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpGroup -> gmsCorpGroup == null ? null - : DataFetcherResult.<CorpGroup>newResult().data(CorpGroupMapper.map(gmsCorpGroup)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load CorpGroup", e); - } - } + private RecordTemplate mapCorpGroupEditableInfo( + CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { + CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult - searchResult = _entityClient.search("corpGroup", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getDescription() != null) { + result.setDescription(input.getDescription()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpGroup", query, filters, limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - @Override - public CorpGroup update(@Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Urn groupUrn = Urn.createFromString(urn); - Map<Urn, EntityResponse> gmsResponse = - _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, ImmutableSet.of(groupUrn), ImmutableSet.of( - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), - context.getAuthentication()); - - CorpGroupEditableInfo existingCorpGroupEditableInfo = null; - if (gmsResponse.containsKey(groupUrn) && gmsResponse.get(groupUrn).getAspects().containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { - existingCorpGroupEditableInfo = new CorpGroupEditableInfo(gmsResponse.get(groupUrn).getAspects() - .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME).getValue().data()); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorizedToUpdate(String urn, CorpGroupUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpGroupUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getDescription() != null) { - // Requires the Update Docs privilege. - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { - // Requires the Update Contact info privilege. - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } - - private RecordTemplate mapCorpGroupEditableInfo(CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { - CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - - if (input.getDescription() != null) { - result.setDescription(input.getDescription()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - return result; + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java index c1cd33b0077f6..318506d9d61fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpgroup; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpGroupUrn; +import java.net.URISyntaxException; public class CorpGroupUtils { - private CorpGroupUtils() { } + private CorpGroupUtils() {} - public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpGroupUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); - } + public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpGroupUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index f476794bc545e..a6e14535cf0b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -3,28 +3,32 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpGroupEditablePropertiesMapper implements ModelMapper<com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { +public class CorpGroupEditablePropertiesMapper + implements ModelMapper< + com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { - public static final CorpGroupEditablePropertiesMapper INSTANCE = new CorpGroupEditablePropertiesMapper(); + public static final CorpGroupEditablePropertiesMapper INSTANCE = + new CorpGroupEditablePropertiesMapper(); - public static CorpGroupEditableProperties map(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public static CorpGroupEditableProperties map( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { return INSTANCE.apply(corpGroupEditableInfo); } @Override - public CorpGroupEditableProperties apply(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public CorpGroupEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); result.setSlack(corpGroupEditableInfo.getSlack(GetMode.DEFAULT)); result.setEmail(corpGroupEditableInfo.getEmail(GetMode.DEFAULT)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 3d2d4aea2b001..04d0cc8ce94e6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,48 +1,58 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; -import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpGroupInfoMapper implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupInfo> { +public class CorpGroupInfoMapper + implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupInfo> { - public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); + public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); - public static CorpGroupInfo map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); - } + public static CorpGroupInfo map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + return INSTANCE.apply(corpGroupInfo); + } - @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { - final CorpGroupInfo result = new CorpGroupInfo(); - result.setEmail(info.getEmail()); - result.setDescription(info.getDescription()); - result.setDisplayName(info.getDisplayName()); - if (info.hasAdmins()) { - result.setAdmins(info.getAdmins().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasMembers()) { - result.setMembers(info.getMembers().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasGroups()) { - result.setGroups(info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); - } - return result; + @Override + public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + final CorpGroupInfo result = new CorpGroupInfo(); + result.setEmail(info.getEmail()); + result.setDescription(info.getDescription()); + result.setDisplayName(info.getDisplayName()); + if (info.hasAdmins()) { + result.setAdmins( + info.getAdmins().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasMembers()) { + result.setMembers( + info.getMembers().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasGroups()) { + result.setGroups( + info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 0fb1b66c644d7..52e200d19923a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Origin; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; @@ -16,78 +18,79 @@ import com.linkedin.metadata.key.CorpGroupKey; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class CorpGroupMapper implements ModelMapper<EntityResponse, CorpGroup> { - public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); + public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { - final CorpGroup result = new CorpGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { + final CorpGroup result = new CorpGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<CorpGroup> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); - if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { - mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); - } else { - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - result.setOrigin(mappedGroupOrigin); - } - return mappingHelper.getResult(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<CorpGroup> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); + mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); + mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); + if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { + mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); + } else { + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + result.setOrigin(mappedGroupOrigin); } + return mappingHelper.getResult(); + } - private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); - corpGroup.setName(corpGroupKey.getName()); - } + private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); + corpGroup.setName(corpGroupKey.getName()); + } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); - } + private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); + } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - corpGroup.setEditableProperties(CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); - } + private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + corpGroup.setEditableProperties( + CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); + } - private void mapOwnership(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); - } + private void mapOwnership( + @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); + } - private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - Origin groupOrigin = new Origin(dataMap); - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - if (groupOrigin.hasType()) { - mappedGroupOrigin.setType( - com.linkedin.datahub.graphql.generated.OriginType.valueOf(groupOrigin.getType().toString())); - } else { - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - } - if (groupOrigin.hasExternalType()) { - mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); - } - corpGroup.setOrigin(mappedGroupOrigin); + private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + Origin groupOrigin = new Origin(dataMap); + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + if (groupOrigin.hasType()) { + mappedGroupOrigin.setType( + com.linkedin.datahub.graphql.generated.OriginType.valueOf( + groupOrigin.getType().toString())); + } else { + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + } + if (groupOrigin.hasExternalType()) { + mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); } + corpGroup.setOrigin(mappedGroupOrigin); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 266d8be67cb06..29d0482863971 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -3,19 +3,20 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpGroupPropertiesMapper implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupProperties> { +public class CorpGroupPropertiesMapper + implements ModelMapper<com.linkedin.identity.CorpGroupInfo, CorpGroupProperties> { public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); - public static CorpGroupProperties map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + public static CorpGroupProperties map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { return INSTANCE.apply(corpGroupInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index db2b49c790f57..5749eef970fce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; @@ -8,8 +13,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; @@ -29,8 +32,8 @@ import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -45,176 +48,206 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class CorpUserType implements SearchableEntityType<CorpUser, String>, MutableType<CorpUserUpdateInput, CorpUser> { +public class CorpUserType + implements SearchableEntityType<CorpUser, String>, MutableType<CorpUserUpdateInput, CorpUser> { + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = featureFlags; + } + + @Override + public Class<CorpUser> objectClass() { + return CorpUser.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_USER; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<CorpUser>> batchLoad( + final List<String> urns, final QueryContext context) { + try { + final List<Urn> corpUserUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map<Urn, EntityResponse> corpUserMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>(corpUserUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> results = new ArrayList<>(); + for (Urn urn : corpUserUrns) { + results.add(corpUserMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpUser -> + gmsCorpUser == null + ? null + : DataFetcherResult.<CorpUser>newResult() + .data(CorpUserMapper.map(gmsCorpUser, _featureFlags)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); + } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpuser", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + public Class<CorpUserUpdateInput> inputClass() { + return CorpUserUpdateInput.class; + } + + @Override + public CorpUser update( + @Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Optional<CorpUserEditableInfo> existingCorpUserEditableInfo = + _entityClient.getVersionedAspect( + urn, + CORP_USER_EDITABLE_INFO_NAME, + 0L, + CorpUserEditableInfo.class, + context.getAuthentication()); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_USER_EDITABLE_INFO_NAME, + mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpUserUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + + // Either the updating actor is the user, or the actor has privileges to update the user + // information. + return context.getActorUrn().equals(urn) + || AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getSlack() != null + || updateInput.getEmail() != null + || updateInput.getPhone() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); + } else if (updateInput.getAboutMe() != null + || updateInput.getDisplayName() != null + || updateInput.getPictureLink() != null + || updateInput.getTeams() != null + || updateInput.getTitle() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); + } - private final EntityClient _entityClient; - private final FeatureFlags _featureFlags; + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { - _entityClient = entityClient; - _featureFlags = featureFlags; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public Class<CorpUser> objectClass() { - return CorpUser.class; + private RecordTemplate mapCorpUserEditableInfo( + CorpUserUpdateInput input, Optional<CorpUserEditableInfo> existing) { + CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public EntityType type() { - return EntityType.CORP_USER; + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); } - - @Override - public List<DataFetcherResult<CorpUser>> batchLoad(final List<String> urns, final QueryContext context) { - try { - final List<Urn> corpUserUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map<Urn, EntityResponse> corpUserMap = _entityClient - .batchGetV2(CORP_USER_ENTITY_NAME, new HashSet<>(corpUserUrns), null, - context.getAuthentication()); - - final List<EntityResponse> results = new ArrayList<>(); - for (Urn urn : corpUserUrns) { - results.add(corpUserMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpUser -> gmsCorpUser == null ? null - : DataFetcherResult.<CorpUser>newResult().data(CorpUserMapper.map(gmsCorpUser, _featureFlags)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search("corpuser", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getSkills() != null) { + result.setSkills(new StringArray(input.getSkills())); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getTeams() != null) { + result.setTeams(new StringArray(input.getTeams())); } - - public Class<CorpUserUpdateInput> inputClass() { - return CorpUserUpdateInput.class; + if (input.getTitle() != null) { + result.setTitle(input.getTitle()); } - - @Override - public CorpUser update(@Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Optional<CorpUserEditableInfo> existingCorpUserEditableInfo = - _entityClient.getVersionedAspect(urn, CORP_USER_EDITABLE_INFO_NAME, 0L, CorpUserEditableInfo.class, - context.getAuthentication()); - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_USER_EDITABLE_INFO_NAME, mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (input.getPhone() != null) { + result.setPhone(input.getPhone()); } - - private boolean isAuthorizedToUpdate(String urn, CorpUserUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - - // Either the updating actor is the user, or the actor has privileges to update the user information. - return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getSlack() != null - || updateInput.getEmail() != null - || updateInput.getPhone() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } else if (updateInput.getAboutMe() != null - || updateInput.getDisplayName() != null - || updateInput.getPictureLink() != null - || updateInput.getTeams() != null - || updateInput.getTitle() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } - private RecordTemplate mapCorpUserEditableInfo(CorpUserUpdateInput input, Optional<CorpUserEditableInfo> existing) { - CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getPictureLink() != null) { - result.setPictureLink(new Url(input.getPictureLink())); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getSkills() != null) { - result.setSkills(new StringArray(input.getSkills())); - } - if (input.getTeams() != null) { - result.setTeams(new StringArray(input.getTeams())); - } - if (input.getTitle() != null) { - result.setTitle(input.getTitle()); - } - if (input.getPhone() != null) { - result.setPhone(input.getPhone()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java index 0b5b40c3117e0..9cf8da69281a9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpuser; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpuserUrn; +import java.net.URISyntaxException; public class CorpUserUtils { - private CorpUserUtils() { } + private CorpUserUtils() {} - public static CorpuserUrn getCorpUserUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpuserUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpUserUrn from string %s", urnStr), e); - } + public static CorpuserUrn getCorpUserUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpuserUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpUserUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 2a9f0efd69bcc..3ee353293393e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -2,36 +2,38 @@ import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpUserEditableInfoMapper implements ModelMapper<com.linkedin.identity.CorpUserEditableInfo, CorpUserEditableProperties> { +public class CorpUserEditableInfoMapper + implements ModelMapper<com.linkedin.identity.CorpUserEditableInfo, CorpUserEditableProperties> { - public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); + public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); - public static CorpUserEditableProperties map(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); - } + public static CorpUserEditableProperties map( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + return INSTANCE.apply(info); + } - @Override - public CorpUserEditableProperties apply(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - final CorpUserEditableProperties result = new CorpUserEditableProperties(); - result.setDisplayName(info.getDisplayName()); - result.setTitle(info.getTitle()); - result.setAboutMe(info.getAboutMe()); - result.setSkills(info.getSkills()); - result.setTeams(info.getTeams()); - result.setEmail(info.getEmail()); - result.setPhone(info.getPhone()); - result.setSlack(info.getSlack()); - if (info.hasPictureLink()) { - result.setPictureLink(info.getPictureLink().toString()); - } - return result; + @Override + public CorpUserEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + final CorpUserEditableProperties result = new CorpUserEditableProperties(); + result.setDisplayName(info.getDisplayName()); + result.setTitle(info.getTitle()); + result.setAboutMe(info.getAboutMe()); + result.setSkills(info.getSkills()); + result.setTeams(info.getTeams()); + result.setEmail(info.getEmail()); + result.setPhone(info.getPhone()); + result.setSlack(info.getSlack()); + if (info.hasPictureLink()) { + result.setPictureLink(info.getPictureLink().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 96f60c08cd7c2..9044f4d510bcf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -3,38 +3,38 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ -public class CorpUserInfoMapper implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserInfo> { +public class CorpUserInfoMapper + implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserInfo> { - public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); + public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); - } + public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(corpUserInfo); + } - @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { - final CorpUserInfo result = new CorpUserInfo(); - result.setActive(info.isActive()); - result.setCountryCode(info.getCountryCode()); - result.setDepartmentId(info.getDepartmentId()); - result.setDepartmentName(info.getDepartmentName()); - result.setEmail(info.getEmail()); - result.setDisplayName(info.getDisplayName()); - result.setFirstName(info.getFirstName()); - result.setLastName(info.getLastName()); - result.setFullName(info.getFullName()); - result.setTitle(info.getTitle()); - if (info.hasManagerUrn()) { - result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); - } - return result; + @Override + public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + final CorpUserInfo result = new CorpUserInfo(); + result.setActive(info.isActive()); + result.setCountryCode(info.getCountryCode()); + result.setDepartmentId(info.getDepartmentId()); + result.setDepartmentName(info.getDepartmentName()); + result.setEmail(info.getEmail()); + result.setDisplayName(info.getDisplayName()); + result.setFirstName(info.getFirstName()); + result.setLastName(info.getLastName()); + result.setFullName(info.getFullName()); + result.setTitle(info.getTitle()); + if (info.hasManagerUrn()) { + result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index adcfb91c9cdf2..98783131a2d52 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.GlobalTags; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -26,120 +28,134 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class CorpUserMapper { - public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); - } - - public static CorpUser map(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); - } - - public CorpUser apply(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - final CorpUser result = new CorpUser(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_USER); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<CorpUser> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); - mappingHelper.mapToResult(CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setEditableProperties(CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> + public static final CorpUserMapper INSTANCE = new CorpUserMapper(); + + public static CorpUser map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse, null); + } + + public static CorpUser map( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(entityResponse, featureFlags); + } + + public CorpUser apply( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + final CorpUser result = new CorpUser(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_USER); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<CorpUser> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); + mappingHelper.mapToResult( + CORP_USER_EDITABLE_INFO_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setEditableProperties( + CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (corpUser, dataMap) -> corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, - (corpUser, dataMap) -> corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); - mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); - - mapCorpUserSettings(result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); - - return mappingHelper.getResult(); + mappingHelper.mapToResult( + CORP_USER_STATUS_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); + + mapCorpUserSettings( + result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); + + return mappingHelper.getResult(); + } + + private void mapCorpUserSettings( + @Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { + CorpUserSettings corpUserSettings = new CorpUserSettings(); + if (envelopedAspect != null) { + corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); } + com.linkedin.datahub.graphql.generated.CorpUserSettings result = + new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - private void mapCorpUserSettings(@Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { - CorpUserSettings corpUserSettings = new CorpUserSettings(); - if (envelopedAspect != null) { - corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); - } - com.linkedin.datahub.graphql.generated.CorpUserSettings result = - new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - - // Map Appearance Settings -- Appearance settings always exist. - result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); + // Map Appearance Settings -- Appearance settings always exist. + result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); - // Map Views Settings. - if (corpUserSettings.hasViews()) { - result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); - } - - corpUser.setSettings(result); + // Map Views Settings. + if (corpUserSettings.hasViews()) { + result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); } - @Nonnull - private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( - @Nonnull final CorpUserSettings corpUserSettings, - @Nullable final FeatureFlags featureFlags - ) { - CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); - if (featureFlags != null) { - appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); - } else { - appearanceResult.setShowSimplifiedHomepage(false); - } - - if (corpUserSettings.hasAppearance()) { - appearanceResult.setShowSimplifiedHomepage(corpUserSettings.getAppearance().isShowSimplifiedHomepage()); - } - return appearanceResult; + corpUser.setSettings(result); + } + + @Nonnull + private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( + @Nonnull final CorpUserSettings corpUserSettings, @Nullable final FeatureFlags featureFlags) { + CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); + if (featureFlags != null) { + appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); + } else { + appearanceResult.setShowSimplifiedHomepage(false); } - @Nonnull - private CorpUserViewsSettings mapCorpUserViewsSettings(@Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { - CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); - - if (viewsSettings.hasDefaultView()) { - final DataHubView unresolvedView = new DataHubView(); - unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); - unresolvedView.setType(EntityType.DATAHUB_VIEW); - viewsResult.setDefaultView(unresolvedView); - } - - return viewsResult; - } - - private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserKey corpUserKey = new CorpUserKey(dataMap); - corpUser.setUsername(corpUserKey.getUsername()); + if (corpUserSettings.hasAppearance()) { + appearanceResult.setShowSimplifiedHomepage( + corpUserSettings.getAppearance().isShowSimplifiedHomepage()); } - - private void mapCorpUserInfo(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); - CorpUserProperties corpUserProperties = corpUser.getProperties(); - if (corpUserInfo.hasCustomProperties()) { - corpUserProperties.setCustomProperties(CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); - } - corpUser.setProperties(corpUserProperties); + return appearanceResult; + } + + @Nonnull + private CorpUserViewsSettings mapCorpUserViewsSettings( + @Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { + CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); + + if (viewsSettings.hasDefaultView()) { + final DataHubView unresolvedView = new DataHubView(); + unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); + unresolvedView.setType(EntityType.DATAHUB_VIEW); + viewsResult.setDefaultView(unresolvedView); } - private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); - boolean isNativeUser = - corpUserCredentials != null && corpUserCredentials.hasSalt() && corpUserCredentials.hasHashedPassword(); - corpUser.setIsNativeUser(isNativeUser); + return viewsResult; + } + + private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserKey corpUserKey = new CorpUserKey(dataMap); + corpUser.setUsername(corpUserKey.getUsername()); + } + + private void mapCorpUserInfo( + @Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); + corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); + CorpUserProperties corpUserProperties = corpUser.getProperties(); + if (corpUserInfo.hasCustomProperties()) { + corpUserProperties.setCustomProperties( + CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); } + corpUser.setProperties(corpUserProperties); + } + + private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); + boolean isNativeUser = + corpUserCredentials != null + && corpUserCredentials.hasSalt() + && corpUserCredentials.hasHashedPassword(); + corpUser.setIsNativeUser(isNativeUser); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index c64406a74733b..106e3de661201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -3,18 +3,16 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ -public class CorpUserPropertiesMapper implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserProperties> { +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ +public class CorpUserPropertiesMapper + implements ModelMapper<com.linkedin.identity.CorpUserInfo, CorpUserProperties> { public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); - public static CorpUserProperties map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + public static CorpUserProperties map( + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { return INSTANCE.apply(corpUserInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index d0644fbfdacec..dd9e465a2d4ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -2,14 +2,15 @@ import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class CorpUserStatusMapper implements ModelMapper<com.linkedin.identity.CorpUserStatus, CorpUserStatus> { +public class CorpUserStatusMapper + implements ModelMapper<com.linkedin.identity.CorpUserStatus, CorpUserStatus> { public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); - public static CorpUserStatus map(@Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { + public static CorpUserStatus map( + @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { return INSTANCE.apply(corpUserStatus); } @@ -18,4 +19,4 @@ public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 104c7c004cb66..d01f9b3945dc3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dashboard; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -55,191 +58,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DashboardType + implements SearchableEntityType<Dashboard, String>, + BrowsableEntityType<Dashboard, String>, + MutableType<DashboardUpdateInput, Dashboard> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DASHBOARD_KEY_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME, + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "tool"); -public class DashboardType implements SearchableEntityType<Dashboard, String>, BrowsableEntityType<Dashboard, String>, - MutableType<DashboardUpdateInput, Dashboard> { - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DASHBOARD_KEY_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME, - EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "tool"); - - private final EntityClient _entityClient; - - public DashboardType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class<DashboardUpdateInput> inputClass() { - return DashboardUpdateInput.class; - } + public DashboardType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DASHBOARD; - } + @Override + public Class<DashboardUpdateInput> inputClass() { + return DashboardUpdateInput.class; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.DASHBOARD; + } - @Override - public Class<Dashboard> objectClass() { - return Dashboard.class; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public List<DataFetcherResult<Dashboard>> batchLoad(@Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dashboardMap = - _entityClient.batchGetV2( - Constants.DASHBOARD_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dashboardMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDashboard -> gmsDashboard == null ? null : DataFetcherResult.<Dashboard>newResult() - .data(DashboardMapper.map(gmsDashboard)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Dashboards", e); - } - } + @Override + public Class<Dashboard> objectClass() { + return Dashboard.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dashboard", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List<DataFetcherResult<Dashboard>> batchLoad( + @Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> dashboardMap = + _entityClient.batchGetV2( + Constants.DASHBOARD_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dashboardMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDashboard -> + gmsDashboard == null + ? null + : DataFetcherResult.<Dashboard>newResult() + .data(DashboardMapper.map(gmsDashboard)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Dashboards", e); } + } - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dashboard", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { - try { - return DashboardUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); - } - } + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dashboard", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - @Override - public Dashboard update(@Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DashboardUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { + try { + return DashboardUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); } + } + + @Override + public Dashboard update( + @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DashboardUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final DashboardUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DashboardUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 432624ac4699f..704d2ae308c1a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -26,13 +28,13 @@ import com.linkedin.datahub.graphql.types.chart.mappers.InputFieldsMapper; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -49,161 +51,202 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DashboardMapper implements ModelMapper<EntityResponse, Dashboard> { - public static final DashboardMapper INSTANCE = new DashboardMapper(); - - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { - final Dashboard result = new Dashboard(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DASHBOARD); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<Dashboard> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> + public static final DashboardMapper INSTANCE = new DashboardMapper(); + + public static Dashboard map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public Dashboard apply(@Nonnull final EntityResponse entityResponse) { + final Dashboard result = new Dashboard(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DASHBOARD); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<Dashboard> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final DashboardKey gmsKey = new DashboardKey(dataMap); - dashboard.setDashboardId(gmsKey.getDashboardId()); - dashboard.setTool(gmsKey.getDashboardTool()); - dashboard.setPlatform(DataPlatform.builder() + return mappingHelper.getResult(); + } + + private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final DashboardKey gmsKey = new DashboardKey(dataMap); + dashboard.setDashboardId(gmsKey.getDashboardId()); + dashboard.setTool(gmsKey.getDashboardTool()); + dashboard.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapDashboardInfo( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = + new com.linkedin.dashboard.DashboardInfo(dataMap); + dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link + * DashboardInfo} + */ + private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardInfo result = new DashboardInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + result.setCharts( + info.getCharts().stream() + .map( + urn -> { + final Chart chart = new Chart(); + chart.setUrn(urn.toString()); + return chart; + }) + .collect(Collectors.toList())); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapDashboardInfo(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link DashboardInfo} - */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardInfo result = new DashboardInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - result.setCharts(info.getCharts().stream().map(urn -> { - final Chart chart = new Chart(); - chart.setUrn(urn.toString()); - return chart; - }).collect(Collectors.toList())); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link DashboardProperties} - */ - private DashboardProperties mapDashboardInfoToProperties(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardProperties result = new DashboardProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); } - - private void mapEditableDashboardProperties(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(dataMap); - final DashboardEditableProperties dashboardEditableProperties = new DashboardEditableProperties(); - dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); - dashboard.setEditableProperties(dashboardEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link + * DashboardProperties} + */ + private DashboardProperties mapDashboardInfoToProperties( + final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardProperties result = new DashboardProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapGlobalTags(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dashboard.setGlobalTags(globalTags); - dashboard.setTags(globalTags); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dashboard.setContainer(Container - .builder() + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + return result; + } + + private void mapEditableDashboardProperties( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(dataMap); + final DashboardEditableProperties dashboardEditableProperties = + new DashboardEditableProperties(); + dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); + dashboard.setEditableProperties(dashboardEditableProperties); + } + + private void mapGlobalTags( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dashboard.setGlobalTags(globalTags); + dashboard.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dashboard.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); - } + private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index f084dbc0bc09f..6212663ee87e4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,67 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class DashboardUpdateInputMapper + implements InputModelMapper<DashboardUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); + public static Collection<MetadataChangeProposal> map( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dashboardUpdateInput, actor); + } -public class DashboardUpdateInputMapper implements - InputModelMapper<DashboardUpdateInput, Collection<MetadataChangeProposal>, Urn> { - public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); - - public static Collection<MetadataChangeProposal> map(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); - } + @Override + public Collection<MetadataChangeProposal> apply( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { - @Override - public Collection<MetadataChangeProposal> apply(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { + final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - - if (dashboardUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } - - if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dashboardUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } else { - // Tags override global tags - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dashboardUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dashboardUpdateInput.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); - editableDashboardProperties.setDescription(dashboardUpdateInput.getEditableProperties().getDescription()); - if (!editableDashboardProperties.hasCreated()) { - editableDashboardProperties.setCreated(auditStamp); - } - editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); - } + if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dashboardUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } else { + // Tags override global tags + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dashboardUpdateInput.getEditableProperties() != null) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); + editableDashboardProperties.setDescription( + dashboardUpdateInput.getEditableProperties().getDescription()); + if (!editableDashboardProperties.hasCreated()) { + editableDashboardProperties.setCreated(auditStamp); + } + editableDashboardProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index d257aef4be565..782ec3d3a6c07 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper<DashboardUsageMetrics> { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); @@ -18,8 +17,10 @@ public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect enveloped @Override public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), com.linkedin.dashboard.DashboardUsageStatistics.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + com.linkedin.dashboard.DashboardUsageStatistics.class); final com.linkedin.datahub.graphql.generated.DashboardUsageMetrics dashboardUsageMetrics = new com.linkedin.datahub.graphql.generated.DashboardUsageMetrics(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 54f7660064c05..6ec1979cd090d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataflow; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataFlowType + implements SearchableEntityType<DataFlow, String>, + BrowsableEntityType<DataFlow, String>, + MutableType<DataFlowUpdateInput, DataFlow> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_FLOW_KEY_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); + private final EntityClient _entityClient; -public class DataFlowType implements SearchableEntityType<DataFlow, String>, BrowsableEntityType<DataFlow, String>, - MutableType<DataFlowUpdateInput, DataFlow> { - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_FLOW_KEY_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); - private final EntityClient _entityClient; - - public DataFlowType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataFlowType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_FLOW; - } + @Override + public EntityType type() { + return EntityType.DATA_FLOW; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<DataFlow> objectClass() { - return DataFlow.class; - } + @Override + public Class<DataFlow> objectClass() { + return DataFlow.class; + } - @Override - public Class<DataFlowUpdateInput> inputClass() { - return DataFlowUpdateInput.class; - } - - @Override - public List<DataFetcherResult<DataFlow>> batchLoad(final List<String> urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dataFlowMap = - _entityClient.batchGetV2( - Constants.DATA_FLOW_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataFlowMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataFlow -> gmsDataFlow == null ? null : DataFetcherResult.<DataFlow>newResult() - .data(DataFlowMapper.map(gmsDataFlow)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Flows", e); - } - } + @Override + public Class<DataFlowUpdateInput> inputClass() { + return DataFlowUpdateInput.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dataFlow", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List<DataFetcherResult<DataFlow>> batchLoad( + final List<String> urnStrs, @Nonnull final QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> dataFlowMap = + _entityClient.batchGetV2( + Constants.DATA_FLOW_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataFlowMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataFlow -> + gmsDataFlow == null + ? null + : DataFetcherResult.<DataFlow>newResult() + .data(DataFlowMapper.map(gmsDataFlow)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Flows", e); } + } - @Override - public BrowseResults browse(@Nonnull List<String> path, @Nullable List<FacetFilterInput> filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataFlow", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataFlow update(@Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) throws Exception { + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataFlow", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DataFlowUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + DataFlowUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public DataFlow update( + @Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) + throws Exception { - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DataFlowUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 719fa9f0b2bf0..165fae81527ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -17,12 +19,12 @@ import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -38,120 +40,147 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataFlowMapper implements ModelMapper<EntityResponse, DataFlow> { - public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { - final DataFlow result = new DataFlow(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_FLOW); - Urn entityUrn = entityResponse.getUrn(); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<DataFlow> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> + public static final DataFlowMapper INSTANCE = new DataFlowMapper(); + + public static DataFlow map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataFlow apply(@Nonnull final EntityResponse entityResponse) { + final DataFlow result = new DataFlow(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_FLOW); + Urn entityUrn = entityResponse.getUrn(); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<DataFlow> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } + return mappingHelper.getResult(); + } - private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final DataFlowKey gmsKey = new DataFlowKey(dataMap); - dataFlow.setOrchestrator(gmsKey.getOrchestrator()); - dataFlow.setFlowId(gmsKey.getFlowId()); - dataFlow.setCluster(gmsKey.getCluster()); - dataFlow.setPlatform(DataPlatform.builder() + private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final DataFlowKey gmsKey = new DataFlowKey(dataMap); + dataFlow.setOrchestrator(gmsKey.getOrchestrator()); + dataFlow.setFlowId(gmsKey.getFlowId()); + dataFlow.setCluster(gmsKey.getCluster()); + dataFlow.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getOrchestrator()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } - - private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = new com.linkedin.datajob.DataFlowInfo(dataMap); - dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); - dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); - } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} - */ - private DataFlowInfo mapDataFlowInfo(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowInfo result = new DataFlowInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getOrchestrator()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = + new com.linkedin.datajob.DataFlowInfo(dataMap); + dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); + dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} + */ + private DataFlowInfo mapDataFlowInfo( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowInfo result = new DataFlowInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} - */ - private DataFlowProperties mapDataFlowInfoToProperties(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowProperties result = new DataFlowProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(dataMap); - final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); - dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); - dataFlow.setEditableProperties(dataFlowEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} + */ + private DataFlowProperties mapDataFlowInfoToProperties( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowProperties result = new DataFlowProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - private void mapGlobalTags(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataFlow.setGlobalTags(globalTags); - dataFlow.setTags(globalTags); - } - - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } + return result; + } + + private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(dataMap); + final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); + dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); + dataFlow.setEditableProperties(dataFlowEditableProperties); + } + + private void mapGlobalTags( + @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataFlow.setGlobalTags(globalTags); + dataFlow.setTags(globalTags); + } + + private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index c966fc8338ed4..87579a15d586e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,22 +19,18 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowUpdateInputMapper implements InputModelMapper<DataFlowUpdateInput, - Collection<MetadataChangeProposal>, Urn> { +public class DataFlowUpdateInputMapper + implements InputModelMapper<DataFlowUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); - public static Collection<MetadataChangeProposal> map(@Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + public static Collection<MetadataChangeProposal> map( + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(dataFlowUpdateInput, actor); } @Override public Collection<MetadataChangeProposal> apply( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -41,7 +39,8 @@ public Collection<MetadataChangeProposal> apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( - updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,28 +49,29 @@ public Collection<MetadataChangeProposal> apply( if (dataFlowUpdateInput.getGlobalTags() != null) { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (dataFlowUpdateInput.getEditableProperties() != null) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(); - editableDataFlowProperties.setDescription(dataFlowUpdateInput.getEditableProperties().getDescription()); + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(); + editableDataFlowProperties.setDescription( + dataFlowUpdateInput.getEditableProperties().getDescription()); editableDataFlowProperties.setCreated(auditStamp); editableDataFlowProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataFlowProperties, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataFlowProperties, EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index f6f37978bb36a..6e71584007504 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.datajob; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataJobType + implements SearchableEntityType<DataJob, String>, + BrowsableEntityType<DataJob, String>, + MutableType<DataJobUpdateInput, DataJob> { + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_JOB_KEY_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set<String> FACET_FIELDS = ImmutableSet.of("flow"); + private final EntityClient _entityClient; -public class DataJobType implements SearchableEntityType<DataJob, String>, BrowsableEntityType<DataJob, String>, - MutableType<DataJobUpdateInput, DataJob> { - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_JOB_KEY_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set<String> FACET_FIELDS = ImmutableSet.of("flow"); - private final EntityClient _entityClient; - - public DataJobType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataJobType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_JOB; - } + @Override + public EntityType type() { + return EntityType.DATA_JOB; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<DataJob> objectClass() { - return DataJob.class; - } + @Override + public Class<DataJob> objectClass() { + return DataJob.class; + } - @Override - public Class<DataJobUpdateInput> inputClass() { - return DataJobUpdateInput.class; - } + @Override + public Class<DataJobUpdateInput> inputClass() { + return DataJobUpdateInput.class; + } - @Override - public List<DataFetcherResult<DataJob>> batchLoad(final List<String> urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dataJobMap = _entityClient.batchGetV2( - Constants.DATA_JOB_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataJobMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataJob -> gmsDataJob == null ? null : DataFetcherResult.<DataJob>newResult() - .data(DataJobMapper.map(gmsDataJob)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Jobs", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "dataJob", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List<DataFetcherResult<DataJob>> batchLoad( + final List<String> urnStrs, @Nonnull final QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map<Urn, EntityResponse> dataJobMap = + _entityClient.batchGetV2( + Constants.DATA_JOB_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataJobMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataJob -> + gmsDataJob == null + ? null + : DataFetcherResult.<DataJob>newResult() + .data(DataJobMapper.map(gmsDataJob)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Jobs", e); } + } - @Override - public BrowseResults browse(@Nonnull List<String> path, @Nullable List<FacetFilterInput> filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataJob", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataJob update(@Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DataJobUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataJob", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public DataJob update( + @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DataJobUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 61802ad9cfe5c..0d0e7a613c8d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -21,13 +23,13 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.FineGrainedLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -42,143 +44,164 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataJobMapper implements ModelMapper<EntityResponse, DataJob> { - public static final DataJobMapper INSTANCE = new DataJobMapper(); + public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static DataJob map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { - final DataJob result = new DataJob(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public DataJob apply(@Nonnull final EntityResponse entityResponse) { + final DataJob result = new DataJob(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_JOB); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_JOB); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - entityResponse.getAspects().forEach((name, aspect) -> { - DataMap data = aspect.getValue().data(); - if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { + entityResponse + .getAspects() + .forEach( + (name, aspect) -> { + DataMap data = aspect.getValue().data(); + if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow(new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); result.setJobId(gmsKey.getJobId()); - } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = new com.linkedin.datajob.DataJobInfo(data); + } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = + new com.linkedin.datajob.DataJobInfo(data); result.setInfo(mapDataJobInfo(gmsDataJobInfo, entityUrn)); result.setProperties(mapDataJobInfoToProperties(gmsDataJobInfo, entityUrn)); - } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = new com.linkedin.datajob.DataJobInputOutput(data); + } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = + new com.linkedin.datajob.DataJobInputOutput(data); result.setInputOutput(mapDataJobInputOutput(gmsDataJobInputOutput)); - } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(data); - final DataJobEditableProperties dataJobEditableProperties = new DataJobEditableProperties(); - dataJobEditableProperties.setDescription(editableDataJobProperties.getDescription()); + } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { + final EditableDataJobProperties editableDataJobProperties = + new EditableDataJobProperties(data); + final DataJobEditableProperties dataJobEditableProperties = + new DataJobEditableProperties(); + dataJobEditableProperties.setDescription( + editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); - } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { + } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); - } else if (STATUS_ASPECT_NAME.equals(name)) { + } else if (STATUS_ASPECT_NAME.equals(name)) { result.setStatus(StatusMapper.map(new Status(data))); - } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); - } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); - } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); - } else if (DOMAINS_ASPECT_NAME.equals(name)) { + } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); + } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); + } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); - } else if (DEPRECATION_ASPECT_NAME.equals(name)) { + } else if (DEPRECATION_ASPECT_NAME.equals(name)) { result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); - } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); - } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { + } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); - } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); - } - }); - - return result; + } + }); + + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ + private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobInfo result = new DataJobInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} - */ - private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobInfo result = new DataJobInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} - */ - private DataJobProperties mapDataJobInfoToProperties(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobProperties result = new DataJobProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} */ + private DataJobProperties mapDataJobInfoToProperties( + final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobProperties result = new DataJobProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private DataJobInputOutput mapDataJobInputOutput( + final com.linkedin.datajob.DataJobInputOutput inputOutput) { + final DataJobInputOutput result = new DataJobInputOutput(); + if (inputOutput.hasInputDatasets()) { + result.setInputDatasets( + inputOutput.getInputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatasets(ImmutableList.of()); + } + if (inputOutput.hasOutputDatasets()) { + result.setOutputDatasets( + inputOutput.getOutputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setOutputDatasets(ImmutableList.of()); + } + if (inputOutput.hasInputDatajobs()) { + result.setInputDatajobs( + inputOutput.getInputDatajobs().stream() + .map( + urn -> { + final DataJob dataJob = new DataJob(); + dataJob.setUrn(urn.toString()); + return dataJob; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatajobs(ImmutableList.of()); } - private DataJobInputOutput mapDataJobInputOutput(final com.linkedin.datajob.DataJobInputOutput inputOutput) { - final DataJobInputOutput result = new DataJobInputOutput(); - if (inputOutput.hasInputDatasets()) { - result.setInputDatasets(inputOutput.getInputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setInputDatasets(ImmutableList.of()); - } - if (inputOutput.hasOutputDatasets()) { - result.setOutputDatasets(inputOutput.getOutputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setOutputDatasets(ImmutableList.of()); - } - if (inputOutput.hasInputDatajobs()) { - result.setInputDatajobs(inputOutput.getInputDatajobs().stream().map(urn -> { - final DataJob dataJob = new DataJob(); - dataJob.setUrn(urn.toString()); - return dataJob; - }).collect(Collectors.toList())); - } else { - result.setInputDatajobs(ImmutableList.of()); - } - - if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { - result.setFineGrainedLineages(FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); - } - - return result; + if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { + result.setFineGrainedLineages( + FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b075c42d411fb..b0f299e00b4ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,63 +19,61 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobUpdateInputMapper implements InputModelMapper<DataJobUpdateInput, Collection<MetadataChangeProposal>, Urn> { - public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); +public class DataJobUpdateInputMapper + implements InputModelMapper<DataJobUpdateInput, Collection<MetadataChangeProposal>, Urn> { + public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); - public static Collection<MetadataChangeProposal> map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); - } - - @Override - public Collection<MetadataChangeProposal> apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); + public static Collection<MetadataChangeProposal> map( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dataJobUpdateInput, actor); + } - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); + @Override + public Collection<MetadataChangeProposal> apply( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + final Collection<MetadataChangeProposal> proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); - if (dataJobUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dataJobUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } else { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dataJobUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dataJobUpdateInput.getEditableProperties() != null) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); - editableDataJobProperties.setDescription(dataJobUpdateInput.getEditableProperties().getDescription()); - editableDataJobProperties.setCreated(auditStamp); - editableDataJobProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataJobProperties, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); - } + if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dataJobUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } else { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dataJobUpdateInput.getEditableProperties() != null) { + final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); + editableDataJobProperties.setDescription( + dataJobUpdateInput.getEditableProperties().getDescription()); + editableDataJobProperties.setCreated(auditStamp); + editableDataJobProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataJobProperties, EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); } + + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 57a035d136645..567d275dbee0a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,56 +19,60 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformType implements EntityType<DataPlatform, String> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public DataPlatformType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataPlatformType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public Class<DataPlatform> objectClass() { - return DataPlatform.class; - } + @Override + public Class<DataPlatform> objectClass() { + return DataPlatform.class; + } - @Override - public List<DataFetcherResult<DataPlatform>> batchLoad(final List<String> urns, final QueryContext context) { + @Override + public List<DataFetcherResult<DataPlatform>> batchLoad( + final List<String> urns, final QueryContext context) { - final List<Urn> dataPlatformUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final List<Urn> dataPlatformUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> dataPlatformMap = _entityClient.batchGetV2( - DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), null, context.getAuthentication()); + try { + final Map<Urn, EntityResponse> dataPlatformMap = + _entityClient.batchGetV2( + DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + null, + context.getAuthentication()); - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformUrns) { - gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); - } + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformUrns) { + gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); + } - return gmsResults.stream() - .map(gmsPlatform -> gmsPlatform == null ? null - : DataFetcherResult.<DataPlatform>newResult() - .data(DataPlatformMapper.map(gmsPlatform)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Platforms", e); - } + return gmsResults.stream() + .map( + gmsPlatform -> + gmsPlatform == null + ? null + : DataFetcherResult.<DataPlatform>newResult() + .data(DataPlatformMapper.map(gmsPlatform)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Platforms", e); } + } - @Override - public com.linkedin.datahub.graphql.generated.EntityType type() { - return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; - } + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index 011fb83cddb33..c2dc3bfabd07c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -6,25 +6,27 @@ import javax.annotation.Nonnull; @Deprecated -public class DataPlatformInfoMapper implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformInfo> { +public class DataPlatformInfoMapper + implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformInfo> { - public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); + public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); - public static DataPlatformInfo map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); - } + public static DataPlatformInfo map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformInfo result = new DataPlatformInfo(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.hasDisplayName()) { - result.setDisplayName(input.getDisplayName()); - } - if (input.hasLogoUrl()) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + @Override + public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformInfo result = new DataPlatformInfo(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.hasDisplayName()) { + result.setDisplayName(input.getDisplayName()); + } + if (input.hasLogoUrl()) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index 8df44e8f6e9e9..f7078f9f37d7c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; @@ -13,36 +15,40 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformMapper implements ModelMapper<EntityResponse, DataPlatform> { - public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); - } - - @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { - final DataPlatform result = new DataPlatform(); - final DataPlatformKey dataPlatformKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKeyInternal(entityResponse.getUrn(), - new DataPlatformKey().schema()); - result.setType(EntityType.DATA_PLATFORM); - Urn urn = entityResponse.getUrn(); - result.setUrn(urn.toString()); - result.setName(dataPlatformKey.getPlatformName()); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<DataPlatform> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PLATFORM_KEY_ASPECT_NAME, (dataPlatform, dataMap) -> + public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); + + public static DataPlatform map(@Nonnull final EntityResponse platform) { + return INSTANCE.apply(platform); + } + + @Override + public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { + final DataPlatform result = new DataPlatform(); + final DataPlatformKey dataPlatformKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + entityResponse.getUrn(), new DataPlatformKey().schema()); + result.setType(EntityType.DATA_PLATFORM); + Urn urn = entityResponse.getUrn(); + result.setUrn(urn.toString()); + result.setName(dataPlatformKey.getPlatformName()); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<DataPlatform> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PLATFORM_KEY_ASPECT_NAME, + (dataPlatform, dataMap) -> dataPlatform.setName(new DataPlatformKey(dataMap).getPlatformName())); - mappingHelper.mapToResult(DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> - dataPlatform.setProperties(DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + DATA_PLATFORM_INFO_ASPECT_NAME, + (dataPlatform, dataMap) -> + dataPlatform.setProperties( + DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); + return mappingHelper.getResult(); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index c0a236dc1a402..ad6de5505bed6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -5,27 +5,28 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +public class DataPlatformPropertiesMapper + implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformProperties> { -public class DataPlatformPropertiesMapper implements ModelMapper<com.linkedin.dataplatform.DataPlatformInfo, DataPlatformProperties> { + public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); - public static final DataPlatformPropertiesMapper - INSTANCE = new DataPlatformPropertiesMapper(); + public static DataPlatformProperties map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - public static DataPlatformProperties map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + @Override + public DataPlatformProperties apply( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformProperties result = new DataPlatformProperties(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public DataPlatformProperties apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformProperties result = new DataPlatformProperties(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getLogoUrl() != null) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + if (input.getLogoUrl() != null) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 87614e1332528..6519a493f3991 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,19 +12,15 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.datahub.graphql.types.dataplatforminstance.mappers.DataPlatformInstanceMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; -import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,90 +28,100 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; -import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; - -public class DataPlatformInstanceType implements SearchableEntityType<DataPlatformInstance, String>, +public class DataPlatformInstanceType + implements SearchableEntityType<DataPlatformInstance, String>, com.linkedin.datahub.graphql.types.EntityType<DataPlatformInstance, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME - ); - private final EntityClient _entityClient; - - public DataPlatformInstanceType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.DATA_PLATFORM_INSTANCE; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<DataPlatformInstance> objectClass() { - return DataPlatformInstance.class; + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME); + private final EntityClient _entityClient; + + public DataPlatformInstanceType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.DATA_PLATFORM_INSTANCE; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<DataPlatformInstance> objectClass() { + return DataPlatformInstance.class; + } + + @Override + public List<DataFetcherResult<DataPlatformInstance>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> dataPlatformInstanceUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + new HashSet<>(dataPlatformInstanceUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataPlatformInstance>newResult() + .data(DataPlatformInstanceMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + + } catch (Exception e) { + throw new RuntimeException("Failed to batch load DataPlatformInstance", e); } - - @Override - public List<DataFetcherResult<DataPlatformInstance>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> dataPlatformInstanceUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, - new HashSet<>(dataPlatformInstanceUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<DataPlatformInstance>newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - - } catch (Exception e) { - throw new RuntimeException("Failed to batch load DataPlatformInstance", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, - filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index ba49f23133f9e..1a2bd0488c4bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance.mappers; -import com.linkedin.common.Ownership; +import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; import com.linkedin.common.Status; -import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; -import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; -import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; - import javax.annotation.Nonnull; public class DataPlatformInstanceMapper { @@ -41,65 +40,75 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) final EnvelopedAspectMap aspects = entityResponse.getAspects(); MappingHelper<DataPlatformInstance> mappingHelper = new MappingHelper<>(aspects, result); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - this::mapDataPlatformInstanceKey - ); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.OWNERSHIP_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.GLOBAL_TAGS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.STATUS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap))) - ); - mappingHelper.mapToResult(Constants.DEPRECATION_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap))) - ); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, this::mapDataPlatformInstanceKey); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.OWNERSHIP_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setOwnership( + OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.GLOBAL_TAGS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.STATUS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + Constants.DEPRECATION_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } - private void mapDataPlatformInstanceKey(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { + private void mapDataPlatformInstanceKey( + @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { final DataPlatformInstanceKey gmsKey = new DataPlatformInstanceKey(dataMap); - dataPlatformInstance.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()) - .build()); + dataPlatformInstance.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); dataPlatformInstance.setInstanceId(gmsKey.getInstance()); } private void mapDataPlatformInstanceProperties( - @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn - ) { - final DataPlatformInstanceProperties gmsProperties = new DataPlatformInstanceProperties(dataMap); + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + final DataPlatformInstanceProperties gmsProperties = + new DataPlatformInstanceProperties(dataMap); final com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties properties = - new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); + new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); properties.setName(gmsProperties.getName()); properties.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index ee014f9f66571..48a0cb984862d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.DataProcessInstance; @@ -12,43 +14,43 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class DataProcessInstanceMapper implements ModelMapper<EntityResponse, DataProcessInstance> { - public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); - - public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); + + public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { + final DataProcessInstance result = new DataProcessInstance(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_PROCESS_INSTANCE); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<DataProcessInstance> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + + return mappingHelper.getResult(); + } + + private void mapDataProcessProperties( + @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + DataProcessInstanceProperties dataProcessInstanceProperties = + new DataProcessInstanceProperties(dataMap); + dpi.setName(dataProcessInstanceProperties.getName()); + if (dataProcessInstanceProperties.hasCreated()) { + dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); } - - @Override - public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { - final DataProcessInstance result = new DataProcessInstance(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_PROCESS_INSTANCE); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<DataProcessInstance> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); - - return mappingHelper.getResult(); - } - - private void mapDataProcessProperties(@Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { - DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); - dpi.setName(dataProcessInstanceProperties.getName()); - if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); - } - if (dataProcessInstanceProperties.hasExternalUrl()) { - dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); - } + if (dataProcessInstanceProperties.hasExternalUrl()) { + dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index ca9a77f7e45cb..fd60711e8c569 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -6,36 +6,41 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - -public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DataProcessRunEvent> { - - public static final DataProcessInstanceRunEventMapper INSTANCE = new DataProcessInstanceRunEventMapper(); - - public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); +public class DataProcessInstanceRunEventMapper + implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DataProcessRunEvent> { + + public static final DataProcessInstanceRunEventMapper INSTANCE = + new DataProcessInstanceRunEventMapper(); + + public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + DataProcessInstanceRunEvent runEvent = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + DataProcessInstanceRunEvent.class); + + final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = + new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); + + result.setTimestampMillis(runEvent.getTimestampMillis()); + result.setAttempt(runEvent.getAttempt()); + if (runEvent.hasStatus()) { + result.setStatus( + com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf( + runEvent.getStatus().toString())); } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - DataProcessInstanceRunEvent runEvent = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - DataProcessInstanceRunEvent.class); - - final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = - new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); - - result.setTimestampMillis(runEvent.getTimestampMillis()); - result.setAttempt(runEvent.getAttempt()); - if (runEvent.hasStatus()) { - result.setStatus(com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf(runEvent.getStatus().toString())); - } - if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); - } - - return result; + if (runEvent.hasResult()) { + result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 91b03eea2745f..422bea73925a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -5,30 +5,34 @@ import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +public class DataProcessInstanceRunResultMapper + implements ModelMapper< + DataProcessInstanceRunResult, + com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { -public class DataProcessInstanceRunResultMapper implements ModelMapper< - DataProcessInstanceRunResult, com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { + public static final DataProcessInstanceRunResultMapper INSTANCE = + new DataProcessInstanceRunResultMapper(); - public static final DataProcessInstanceRunResultMapper INSTANCE = new DataProcessInstanceRunResultMapper(); + public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( + @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(input); + } - public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map(@Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); - } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply(@Nonnull final DataProcessInstanceRunResult input) { - - final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = - new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); + @Override + public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( + @Nonnull final DataProcessInstanceRunResult input) { - if (input.hasType()) { - result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); - } + final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = + new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); - if (input.hasNativeResultType()) { - result.setNativeResultType(input.getNativeResultType()); - } + if (input.hasType()) { + result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); + } - return result; + if (input.hasNativeResultType()) { + result.setNativeResultType(input.getNativeResultType()); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index eb8ca23f00b37..766f6937ce3e2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -1,5 +1,13 @@ package com.linkedin.datahub.graphql.types.dataproduct; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,11 +26,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import lombok.RequiredArgsConstructor; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,26 +33,23 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import org.apache.commons.lang3.NotImplementedException; @RequiredArgsConstructor -public class DataProductType implements SearchableEntityType<DataProduct, String>, - com.linkedin.datahub.graphql.types.EntityType<DataProduct, String> { - public static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME - ); +public class DataProductType + implements SearchableEntityType<DataProduct, String>, + com.linkedin.datahub.graphql.types.EntityType<DataProduct, String> { + public static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -68,13 +68,17 @@ public Class<DataProduct> objectClass() { } @Override - public List<DataFetcherResult<DataProduct>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { - final List<Urn> dataProductUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List<DataFetcherResult<DataProduct>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> dataProductUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(DATA_PRODUCT_ENTITY_NAME, new HashSet<>(dataProductUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATA_PRODUCT_ENTITY_NAME, + new HashSet<>(dataProductUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -82,8 +86,13 @@ public List<DataFetcherResult<DataProduct>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataProduct>newResult().data(DataProductMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataProduct>newResult() + .data(DataProductMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); @@ -91,22 +100,28 @@ public List<DataFetcherResult<DataProduct>> batchLoad(@Nonnull List<String> urns } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List<FacetFilterInput> filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Data Product entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Data Product entity type"); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 254b43ecb96cc..8039ea08dc722 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -21,17 +28,8 @@ import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - - public class DataProductMapper implements ModelMapper<EntityResponse, DataProduct> { public static final DataProductMapper INSTANCE = new DataProductMapper(); @@ -50,27 +48,44 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<DataProduct> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, (dataProduct, dataMap) -> - mapDataProductProperties(dataProduct, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setDomain(DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + (dataProduct, dataMap) -> mapDataProductProperties(dataProduct, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + DOMAINS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setDomain( + DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); return result; } - private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { + private void mapDataProductProperties( + @Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { DataProductProperties dataProductProperties = new DataProductProperties(dataMap); - com.linkedin.datahub.graphql.generated.DataProductProperties properties = new com.linkedin.datahub.graphql.generated.DataProductProperties(); + com.linkedin.datahub.graphql.generated.DataProductProperties properties = + new com.linkedin.datahub.graphql.generated.DataProductProperties(); - final String name = dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); + final String name = + dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); properties.setName(name); properties.setDescription(dataProductProperties.getDescription()); if (dataProductProperties.hasExternalUrl()) { @@ -81,7 +96,9 @@ private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull } else { properties.setNumAssets(0); } - properties.setCustomProperties(CustomPropertiesMapper.map(dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); + properties.setCustomProperties( + CustomPropertiesMapper.map( + dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); dataProduct.setProperties(properties); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 6f339d3985133..badb24810c82b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -8,19 +13,17 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; -import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; -import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.BrowsableEntityType; @@ -37,13 +40,12 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -56,235 +58,266 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetType implements SearchableEntityType<Dataset, String>, BrowsableEntityType<Dataset, String>, +public class DatasetType + implements SearchableEntityType<Dataset, String>, + BrowsableEntityType<Dataset, String>, BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> { - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private static final String ENTITY_NAME = "dataset"; - - private final EntityClient _entityClient; - - public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class<Dataset> objectClass() { - return Dataset.class; + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private static final String ENTITY_NAME = "dataset"; + + private final EntityClient _entityClient; + + public DatasetType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<Dataset> objectClass() { + return Dataset.class; + } + + @Override + public Class<DatasetUpdateInput> inputClass() { + return DatasetUpdateInput.class; + } + + @Override + public Class<BatchDatasetUpdateInput[]> batchInputClass() { + return BatchDatasetUpdateInput[].class; + } + + @Override + public EntityType type() { + return EntityType.DATASET; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<Dataset>> batchLoad( + @Nonnull final List<String> urnStrs, @Nonnull final QueryContext context) { + try { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map<Urn, EntityResponse> datasetMap = + _entityClient.batchGetV2( + Constants.DATASET_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(datasetMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.<Dataset>newResult() + .data(DatasetMapper.map(gmsDataset)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); } - - @Override - public Class<DatasetUpdateInput> inputClass() { - return DatasetUpdateInput.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataset", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public List<Dataset> batchUpdate( + @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { + final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); + + final Collection<MetadataChangeProposal> proposals = + Arrays.stream(input) + .map( + updateInput -> { + if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { + Collection<MetadataChangeProposal> datasetProposals = + DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); + datasetProposals.forEach( + proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); + return datasetProposals; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + + final List<String> urns = + Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } - @Override - public Class<BatchDatasetUpdateInput[]> batchInputClass() { - return BatchDatasetUpdateInput[].class; + return batchLoad(urns, context).stream() + .map(DataFetcherResult::getData) + .collect(Collectors.toList()); + } + + @Override + public Dataset update( + @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = + DatasetUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - @Override - public EntityType type() { - return EntityType.DATASET; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getInstitutionalMemory() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - @Override - public List<DataFetcherResult<Dataset>> batchLoad(@Nonnull final List<String> urnStrs, - @Nonnull final QueryContext context) { - try { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map<Urn, EntityResponse> datasetMap = - _entityClient.batchGetV2( - Constants.DATASET_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(datasetMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.<Dataset>newResult() - .data(DatasetMapper.map(gmsDataset)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (updateInput.getDeprecation() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "dataset", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); + if (updateInput.getEditableSchemaMetadata() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); } - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public List<Dataset> batchUpdate(@Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); - - final Collection<MetadataChangeProposal> proposals = Arrays.stream(input).map(updateInput -> { - if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { - Collection<MetadataChangeProposal> datasetProposals = DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); - datasetProposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); - return datasetProposals; - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }).flatMap(Collection::stream).collect(Collectors.toList()); - - final List<String> urns = Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); - } - - return batchLoad(urns, context).stream().map(DataFetcherResult::getData).collect(Collectors.toList()); - } - - @Override - public Dataset update(@Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = DatasetUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorized(@Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getInstitutionalMemory() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); - } - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDeprecation() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - if (updateInput.getEditableSchemaMetadata() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java index e1aa580276a50..676617bfa2f90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.types.dataset; import com.linkedin.common.urn.DatasetUrn; - import java.net.URISyntaxException; public class DatasetUtils { - private DatasetUtils() { } + private DatasetUtils() {} - static DatasetUrn getDatasetUrn(String urnStr) { - try { - return DatasetUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); - } + static DatasetUrn getDatasetUrn(String urnStr) { + try { + return DatasetUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index e620bfb30b6b7..df019cc5df8fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -22,32 +24,30 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class VersionedDatasetType implements com.linkedin.datahub.graphql.types.EntityType<VersionedDataset, VersionedUrn> { +public class VersionedDatasetType + implements com.linkedin.datahub.graphql.types.EntityType<VersionedDataset, VersionedUrn> { - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME - ); + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME); private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; @@ -74,8 +74,8 @@ public Function<Entity, VersionedUrn> getKeyProvider() { } @Override - public List<DataFetcherResult<VersionedDataset>> batchLoad(@Nonnull final List<VersionedUrn> versionedUrns, - @Nonnull final QueryContext context) { + public List<DataFetcherResult<VersionedDataset>> batchLoad( + @Nonnull final List<VersionedUrn> versionedUrns, @Nonnull final QueryContext context) { try { final Map<Urn, EntityResponse> datasetMap = _entityClient.batchGetVersionedV2( @@ -89,9 +89,13 @@ public List<DataFetcherResult<VersionedDataset>> batchLoad(@Nonnull final List<V gmsResults.add(datasetMap.getOrDefault(versionedUrn.getUrn(), null)); } return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.<VersionedDataset>newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) - .build()) + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.<VersionedDataset>newResult() + .data(VersionedDatasetMapper.map(gmsDataset)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Datasets", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 0ec9bed0c8511..5fe7815ea2f8d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class AssertionRunEventMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.AssertionRunEvent> { @@ -29,8 +28,10 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), AssertionRunEvent.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + AssertionRunEvent.class); final com.linkedin.datahub.graphql.generated.AssertionRunEvent assertionRunEvent = new com.linkedin.datahub.graphql.generated.AssertionRunEvent(); @@ -39,7 +40,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setAssertionUrn(gmsAssertionRunEvent.getAssertionUrn().toString()); assertionRunEvent.setAsserteeUrn(gmsAssertionRunEvent.getAsserteeUrn().toString()); assertionRunEvent.setRunId(gmsAssertionRunEvent.getRunId()); - assertionRunEvent.setStatus(AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); + assertionRunEvent.setStatus( + AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); } @@ -50,7 +52,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { - assertionRunEvent.setRuntimeContext(StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + assertionRunEvent.setRuntimeContext( + StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1adcea7e53dc2..1644e0243a181 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -2,24 +2,25 @@ import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DatasetDeprecationMapper implements ModelMapper<com.linkedin.dataset.DatasetDeprecation, Deprecation> { +public class DatasetDeprecationMapper + implements ModelMapper<com.linkedin.dataset.DatasetDeprecation, Deprecation> { - public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); + public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map( + @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 3e39c14c29ede..8296bc8244995 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Access; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -22,6 +24,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; @@ -29,15 +32,14 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,155 +55,196 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ @Slf4j public class DatasetMapper implements ModelMapper<EntityResponse, Dataset> { - public static final DatasetMapper INSTANCE = new DatasetMapper(); - - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); - } - - public Dataset apply(@Nonnull final EntityResponse entityResponse) { - Dataset result = new Dataset(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATASET); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper<Dataset> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + public static final DatasetMapper INSTANCE = new DatasetMapper(); + + public static Dataset map(@Nonnull final EntityResponse dataset) { + return INSTANCE.apply(dataset); + } + + public Dataset apply(@Nonnull final EntityResponse entityResponse) { + Dataset result = new Dataset(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATASET); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper<Dataset> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); - mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(SIBLINGS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); - mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + SIBLINGS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + mappingHelper.mapToResult( + UPSTREAM_LINEAGE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setFineGrainedLineages( + UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataset, dataMap) -> dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> - dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final DatasetKey gmsKey = new DatasetKey(dataMap); - dataset.setName(gmsKey.getName()); - dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() + mappingHelper.mapToResult( + ACCESS_DATASET_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } + + private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final DatasetKey gmsKey = new DatasetKey(dataMap); + dataset.setName(gmsKey.getName()); + dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); + dataset.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + .setUrn(gmsKey.getPlatform().toString()) + .build()); + } + + private void mapDatasetProperties( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final DatasetProperties gmsProperties = new DatasetProperties(dataMap); + final com.linkedin.datahub.graphql.generated.DatasetProperties properties = + new com.linkedin.datahub.graphql.generated.DatasetProperties(); + properties.setDescription(gmsProperties.getDescription()); + dataset.setDescription(gmsProperties.getDescription()); + properties.setOrigin(dataset.getOrigin()); + if (gmsProperties.getExternalUrl() != null) { + properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - - private void mapDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final DatasetProperties gmsProperties = new DatasetProperties(dataMap); - final com.linkedin.datahub.graphql.generated.DatasetProperties properties = - new com.linkedin.datahub.graphql.generated.DatasetProperties(); - properties.setDescription(gmsProperties.getDescription()); - dataset.setDescription(gmsProperties.getDescription()); - properties.setOrigin(dataset.getOrigin()); - if (gmsProperties.getExternalUrl() != null) { - properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); - } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); - if (gmsProperties.getName() != null) { - properties.setName(gmsProperties.getName()); - } else { - properties.setName(dataset.getName()); - } - properties.setQualifiedName(gmsProperties.getQualifiedName()); - dataset.setProperties(properties); - dataset.setDescription(properties.getDescription()); - if (gmsProperties.getUri() != null) { - dataset.setUri(gmsProperties.getUri().toString()); - } - TimeStamp created = gmsProperties.getCreated(); - if (created != null) { - properties.setCreated(created.getTime()); - if (created.hasActor()) { - properties.setCreatedActor(created.getActor().toString()); - } - } - TimeStamp lastModified = gmsProperties.getLastModified(); - if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } - } + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + if (gmsProperties.getName() != null) { + properties.setName(gmsProperties.getName()); + } else { + properties.setName(dataset.getName()); } - - private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); - final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); - editableProperties.setDescription(editableDatasetProperties.getDescription()); - dataset.setEditableProperties(editableProperties); + properties.setQualifiedName(gmsProperties.getQualifiedName()); + dataset.setProperties(properties); + dataset.setDescription(properties.getDescription()); + if (gmsProperties.getUri() != null) { + dataset.setUri(gmsProperties.getUri().toString()); } - - private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final ViewProperties properties = new ViewProperties(dataMap); - final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = - new com.linkedin.datahub.graphql.generated.ViewProperties(); - graphqlProperties.setMaterialized(properties.isMaterialized()); - graphqlProperties.setLanguage(properties.getViewLanguage()); - graphqlProperties.setLogic(properties.getViewLogic()); - dataset.setViewProperties(graphqlProperties); + TimeStamp created = gmsProperties.getCreated(); + if (created != null) { + properties.setCreated(created.getTime()); + if (created.hasActor()) { + properties.setCreatedActor(created.getActor().toString()); + } } - - private void mapGlobalTags(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataset.setGlobalTags(globalTags); - dataset.setTags(globalTags); + TimeStamp lastModified = gmsProperties.getLastModified(); + if (lastModified != null) { + properties.setLastModified(lastModified.getTime()); + if (lastModified.hasActor()) { + properties.setLastModifiedActor(lastModified.getActor().toString()); + } } - - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() + } + + private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); + final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); + editableProperties.setDescription(editableDatasetProperties.getDescription()); + dataset.setEditableProperties(editableProperties); + } + + private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final ViewProperties properties = new ViewProperties(dataMap); + final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = + new com.linkedin.datahub.graphql.generated.ViewProperties(); + graphqlProperties.setMaterialized(properties.isMaterialized()); + graphqlProperties.setLanguage(properties.getViewLanguage()); + graphqlProperties.setLogic(properties.getViewLogic()); + dataset.setViewProperties(graphqlProperties); + } + + private void mapGlobalTags( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataset.setGlobalTags(globalTags); + dataset.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); - } + private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index dbaaf27a3f2bc..25639e431fac1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -8,20 +8,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class DatasetProfileMapper implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DatasetProfile> { +public class DatasetProfileMapper + implements TimeSeriesAspectMapper<com.linkedin.datahub.graphql.generated.DatasetProfile> { public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); - public static com.linkedin.datahub.graphql.generated.DatasetProfile map(@Nonnull final EnvelopedAspect envelopedAspect) { + public static com.linkedin.datahub.graphql.generated.DatasetProfile map( + @Nonnull final EnvelopedAspect envelopedAspect) { return INSTANCE.apply(envelopedAspect); } @Override - public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull final EnvelopedAspect envelopedAspect) { + public com.linkedin.datahub.graphql.generated.DatasetProfile apply( + @Nonnull final EnvelopedAspect envelopedAspect) { - DatasetProfile gmsProfile = GenericRecordUtils - .deserializeAspect( + DatasetProfile gmsProfile = + GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), envelopedAspect.getAspect().getContentType(), DatasetProfile.class); @@ -35,13 +37,16 @@ public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull fina result.setTimestampMillis(gmsProfile.getTimestampMillis()); if (gmsProfile.hasFieldProfiles()) { result.setFieldProfiles( - gmsProfile.getFieldProfiles().stream().map(DatasetProfileMapper::mapFieldProfile).collect(Collectors.toList())); + gmsProfile.getFieldProfiles().stream() + .map(DatasetProfileMapper::mapFieldProfile) + .collect(Collectors.toList())); } return result; } - private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile(DatasetFieldProfile gmsProfile) { + private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile( + DatasetFieldProfile gmsProfile) { final com.linkedin.datahub.graphql.generated.DatasetFieldProfile result = new com.linkedin.datahub.graphql.generated.DatasetFieldProfile(); result.setFieldPath(gmsProfile.getFieldPath()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 78c1299ed9bd9..0b05d420030b5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -22,23 +24,19 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetUpdateInputMapper implements InputModelMapper<DatasetUpdateInput, Collection<MetadataChangeProposal>, Urn> { +public class DatasetUpdateInputMapper + implements InputModelMapper<DatasetUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection<MetadataChangeProposal> map( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(datasetUpdateInput, actor); } @Override public Collection<MetadataChangeProposal> apply( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { final Collection<MetadataChangeProposal> proposals = new ArrayList<>(6); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATASET_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -46,8 +44,10 @@ public Collection<MetadataChangeProposal> apply( auditStamp.setTime(System.currentTimeMillis()); if (datasetUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (datasetUpdateInput.getDeprecation() != null) { @@ -58,29 +58,32 @@ public Collection<MetadataChangeProposal> apply( } deprecation.setNote(datasetUpdateInput.getDeprecation().getNote()); deprecation.setActor(actor, SetMode.IGNORE_NULL); - proposals.add(updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); } if (datasetUpdateInput.getInstitutionalMemory() != null) { - proposals.add(updateMappingHelper.aspectToProposal(InstitutionalMemoryUpdateMapper - .map(datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + InstitutionalMemoryUpdateMapper.map(datasetUpdateInput.getInstitutionalMemory()), + INSTITUTIONAL_MEMORY_ASPECT_NAME)); } if (datasetUpdateInput.getTags() != null || datasetUpdateInput.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); if (datasetUpdateInput.getGlobalTags() != null) { - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getGlobalTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } @@ -89,28 +92,32 @@ public Collection<MetadataChangeProposal> apply( final EditableSchemaMetadata editableSchemaMetadata = new EditableSchemaMetadata(); editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( - datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream().map( - element -> mapSchemaFieldInfo(element) - ).collect(Collectors.toList()))); + datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() + .map(element -> mapSchemaFieldInfo(element)) + .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); } if (datasetUpdateInput.getEditableProperties() != null) { final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(); - editableDatasetProperties.setDescription(datasetUpdateInput.getEditableProperties().getDescription()); + editableDatasetProperties.setDescription( + datasetUpdateInput.getEditableProperties().getDescription()); editableDatasetProperties.setLastModified(auditStamp); editableDatasetProperties.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); } return proposals; } private EditableSchemaFieldInfo mapSchemaFieldInfo( - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo - ) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); if (schemaFieldInfo.getDescription() != null) { @@ -120,11 +127,14 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( if (schemaFieldInfo.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(schemaFieldInfo.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element)).collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + schemaFieldInfo.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } return output; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index 922574d5051d3..f54adbe8ba26c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -4,39 +4,34 @@ import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; - import javax.annotation.Nonnull; - public class EditableSchemaFieldInfoMapper { - public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); + public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); - public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(fieldInfo, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( + @Nonnull final EditableSchemaFieldInfo fieldInfo, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(fieldInfo, entityUrn); + } - public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); - if (input.hasDescription()) { - result.setDescription((input.getDescription())); - } - if (input.hasFieldPath()) { - result.setFieldPath((input.getFieldPath())); - } - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - return result; + public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( + @Nonnull final EditableSchemaFieldInfo input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = + new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); + if (input.hasDescription()) { + result.setDescription((input.getDescription())); + } + if (input.hasFieldPath()) { + result.setFieldPath((input.getFieldPath())); + } + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 376558d2fd18c..3cf012a523d54 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; -import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.common.urn.Urn; - -import javax.annotation.Nonnull; +import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class EditableSchemaMetadataMapper { - public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - - public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(metadata, entityUrn); - } + public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply(@Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); - result.setEditableSchemaFieldInfo(input.getEditableSchemaFieldInfo().stream().map(schemaField -> - EditableSchemaFieldInfoMapper.map(schemaField, entityUrn) - ).collect(Collectors.toList())); - return result; - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( + @Nonnull final EditableSchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( + @Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = + new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); + result.setEditableSchemaFieldInfo( + input.getEditableSchemaFieldInfo().stream() + .map(schemaField -> EditableSchemaFieldInfoMapper.map(schemaField, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b76767fa5d045..b99b243da5b94 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -5,14 +5,12 @@ import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import lombok.extern.slf4j.Slf4j; - import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { - private ForeignKeyConstraintMapper() { } + private ForeignKeyConstraintMapper() {} public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); @@ -22,15 +20,15 @@ public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint } if (constraint.hasSourceFields()) { result.setSourceFields( - constraint.getSourceFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getSourceFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( - constraint.getForeignFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getForeignFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index 515cba5e99c74..dd345bebf657f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -5,63 +5,66 @@ import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; - import javax.annotation.Nonnull; -public class PlatformSchemaMapper implements ModelMapper<SchemaMetadata.PlatformSchema, PlatformSchema> { +public class PlatformSchemaMapper + implements ModelMapper<SchemaMetadata.PlatformSchema, PlatformSchema> { - public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); + public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); - } + public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(metadata); + } - @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { - Object result; - if (input.isSchemaless()) { - return null; - } else if (input.isPrestoDDL()) { - final TableSchema prestoSchema = new TableSchema(); - prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); - result = prestoSchema; - } else if (input.isOracleDDL()) { - final TableSchema oracleSchema = new TableSchema(); - oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); - result = oracleSchema; - } else if (input.isMySqlDDL()) { - final TableSchema mySqlSchema = new TableSchema(); - mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); - result = mySqlSchema; - } else if (input.isKafkaSchema()) { - final TableSchema kafkaSchema = new TableSchema(); - kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); - result = kafkaSchema; - } else if (input.isOrcSchema()) { - final TableSchema orcSchema = new TableSchema(); - orcSchema.setSchema(input.getOrcSchema().getSchema()); - result = orcSchema; - } else if (input.isBinaryJsonSchema()) { - final TableSchema binaryJsonSchema = new TableSchema(); - binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); - result = binaryJsonSchema; - } else if (input.isEspressoSchema()) { - final KeyValueSchema espressoSchema = new KeyValueSchema(); - espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); - espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); - result = espressoSchema; - } else if (input.isKeyValueSchema()) { - final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); - otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); - otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); - result = otherKeyValueSchema; - } else if (input.isOtherSchema()) { - final TableSchema otherTableSchema = new TableSchema(); - otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); - result = otherTableSchema; - } else { - throw new RuntimeException(String.format("Unrecognized platform schema type %s provided", input.memberType().getType().name())); - } - return (PlatformSchema) result; + @Override + public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { + Object result; + if (input.isSchemaless()) { + return null; + } else if (input.isPrestoDDL()) { + final TableSchema prestoSchema = new TableSchema(); + prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); + result = prestoSchema; + } else if (input.isOracleDDL()) { + final TableSchema oracleSchema = new TableSchema(); + oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); + result = oracleSchema; + } else if (input.isMySqlDDL()) { + final TableSchema mySqlSchema = new TableSchema(); + mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); + result = mySqlSchema; + } else if (input.isKafkaSchema()) { + final TableSchema kafkaSchema = new TableSchema(); + kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); + result = kafkaSchema; + } else if (input.isOrcSchema()) { + final TableSchema orcSchema = new TableSchema(); + orcSchema.setSchema(input.getOrcSchema().getSchema()); + result = orcSchema; + } else if (input.isBinaryJsonSchema()) { + final TableSchema binaryJsonSchema = new TableSchema(); + binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); + result = binaryJsonSchema; + } else if (input.isEspressoSchema()) { + final KeyValueSchema espressoSchema = new KeyValueSchema(); + espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); + espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); + result = espressoSchema; + } else if (input.isKeyValueSchema()) { + final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); + otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); + otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); + result = otherKeyValueSchema; + } else if (input.isOtherSchema()) { + final TableSchema otherTableSchema = new TableSchema(); + otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); + result = otherTableSchema; + } else { + throw new RuntimeException( + String.format( + "Unrecognized platform schema type %s provided", + input.memberType().getType().name())); } + return (PlatformSchema) result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index f05a1adb6b443..f53803ce5be85 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -3,72 +3,75 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; - +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import javax.annotation.Nonnull; public class SchemaFieldMapper { - public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); - public static SchemaField map(@Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static SchemaField map( + @Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public SchemaField apply(@Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { - final SchemaField result = new SchemaField(); - result.setDescription(input.getDescription()); - result.setFieldPath(input.getFieldPath()); - result.setJsonPath(input.getJsonPath()); - result.setRecursive(input.isRecursive()); - result.setNullable(input.isNullable()); - result.setNativeDataType(input.getNativeDataType()); - result.setType(mapSchemaFieldDataType(input.getType())); - result.setLabel(input.getLabel()); - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - result.setIsPartOfKey(input.isIsPartOfKey()); - result.setIsPartitioningKey(input.isIsPartitioningKey()); - return result; + public SchemaField apply( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + final SchemaField result = new SchemaField(); + result.setDescription(input.getDescription()); + result.setFieldPath(input.getFieldPath()); + result.setJsonPath(input.getJsonPath()); + result.setRecursive(input.isRecursive()); + result.setNullable(input.isNullable()); + result.setNativeDataType(input.getNativeDataType()); + result.setType(mapSchemaFieldDataType(input.getType())); + result.setLabel(input.getLabel()); + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + result.setIsPartOfKey(input.isIsPartOfKey()); + result.setIsPartitioningKey(input.isIsPartitioningKey()); + return result; + } - private SchemaFieldDataType mapSchemaFieldDataType(@Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { - final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); - if (type.isBytesType()) { - return SchemaFieldDataType.BYTES; - } else if (type.isFixedType()) { - return SchemaFieldDataType.FIXED; - } else if (type.isBooleanType()) { - return SchemaFieldDataType.BOOLEAN; - } else if (type.isStringType()) { - return SchemaFieldDataType.STRING; - } else if (type.isNumberType()) { - return SchemaFieldDataType.NUMBER; - } else if (type.isDateType()) { - return SchemaFieldDataType.DATE; - } else if (type.isTimeType()) { - return SchemaFieldDataType.TIME; - } else if (type.isEnumType()) { - return SchemaFieldDataType.ENUM; - } else if (type.isNullType()) { - return SchemaFieldDataType.NULL; - } else if (type.isArrayType()) { - return SchemaFieldDataType.ARRAY; - } else if (type.isMapType()) { - return SchemaFieldDataType.MAP; - } else if (type.isRecordType()) { - return SchemaFieldDataType.STRUCT; - } else if (type.isUnionType()) { - return SchemaFieldDataType.UNION; - } else { - throw new RuntimeException(String.format("Unrecognized SchemaFieldDataType provided %s", - type.memberType().toString())); - } + private SchemaFieldDataType mapSchemaFieldDataType( + @Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { + final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); + if (type.isBytesType()) { + return SchemaFieldDataType.BYTES; + } else if (type.isFixedType()) { + return SchemaFieldDataType.FIXED; + } else if (type.isBooleanType()) { + return SchemaFieldDataType.BOOLEAN; + } else if (type.isStringType()) { + return SchemaFieldDataType.STRING; + } else if (type.isNumberType()) { + return SchemaFieldDataType.NUMBER; + } else if (type.isDateType()) { + return SchemaFieldDataType.DATE; + } else if (type.isTimeType()) { + return SchemaFieldDataType.TIME; + } else if (type.isEnumType()) { + return SchemaFieldDataType.ENUM; + } else if (type.isNullType()) { + return SchemaFieldDataType.NULL; + } else if (type.isArrayType()) { + return SchemaFieldDataType.ARRAY; + } else if (type.isMapType()) { + return SchemaFieldDataType.MAP; + } else if (type.isRecordType()) { + return SchemaFieldDataType.STRUCT; + } else if (type.isUnionType()) { + return SchemaFieldDataType.UNION; + } else { + throw new RuntimeException( + String.format( + "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index eb793cc17efb6..d0424ba89eca1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -4,44 +4,53 @@ import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; - +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.stream.Collectors; public class SchemaMapper { - public static final SchemaMapper INSTANCE = new SchemaMapper(); + public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); - } + public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, null, entityUrn); + } - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); - } + public static Schema map( + @Nonnull final SchemaMetadata metadata, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, systemMetadata, entityUrn); + } - public Schema apply(@Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - final Schema result = new Schema(); - if (input.getDataset() != null) { - result.setDatasetUrn(input.getDataset().toString()); - } - if (systemMetadata != null) { - result.setLastObserved(systemMetadata.getLastObserved()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - if (input.getForeignKeys() != null) { - result.setForeignKeys(input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) - .collect(Collectors.toList())); - } - return result; + public Schema apply( + @Nonnull final com.linkedin.schema.SchemaMetadata input, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + final Schema result = new Schema(); + if (input.getDataset() != null) { + result.setDatasetUrn(input.getDataset().toString()); + } + if (systemMetadata != null) { + result.setLastObserved(systemMetadata.getLastObserved()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + if (input.getForeignKeys() != null) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(ForeignKeyConstraintMapper::map) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 00cb91bed8abb..31381073a16dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -6,43 +6,42 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class SchemaMetadataMapper { - public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); + public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); - public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(aspect, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(aspect, entityUrn); + } - public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); - final com.linkedin.datahub.graphql.generated.SchemaMetadata result = - new com.linkedin.datahub.graphql.generated.SchemaMetadata(); + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + final com.linkedin.datahub.graphql.generated.SchemaMetadata result = + new com.linkedin.datahub.graphql.generated.SchemaMetadata(); - if (input.hasDataset()) { - result.setDatasetUrn(input.getDataset().toString()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); - if (input.hasForeignKeys()) { - result.setForeignKeys(input.getForeignKeys().stream().map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map( - foreignKeyConstraint - )).collect(Collectors.toList())); - } - return result; + if (input.hasDataset()) { + result.setDatasetUrn(input.getDataset().toString()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setAspectVersion(aspect.getVersion()); + if (input.hasForeignKeys()) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map(foreignKeyConstraint)) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 241c4872b1caa..727e8629f74b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -14,11 +16,11 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.generated.VersionedDataset; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -38,13 +40,10 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ @Slf4j public class VersionedDatasetMapper implements ModelMapper<EntityResponse, VersionedDataset> { @@ -67,28 +66,52 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { SystemMetadata schemaSystemMetadata = getSystemMetadata(aspectMap, SCHEMA_METADATA_ASPECT_NAME); mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSchema( + SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } @@ -104,12 +127,15 @@ private void mapDatasetKey(@Nonnull VersionedDataset dataset, @Nonnull DataMap d final DatasetKey gmsKey = new DatasetKey(dataMap); dataset.setName(gmsKey.getName()); dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + dataset.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); } - private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { + private void mapDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { final DatasetProperties gmsProperties = new DatasetProperties(dataMap); final com.linkedin.datahub.graphql.generated.DatasetProperties properties = new com.linkedin.datahub.graphql.generated.DatasetProperties(); @@ -118,7 +144,8 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da if (gmsProperties.getExternalUrl() != null) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); if (gmsProperties.getName() != null) { properties.setName(gmsProperties.getName()); } else { @@ -128,8 +155,10 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da dataset.setProperties(properties); } - private void mapEditableDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); + private void mapEditableDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); editableProperties.setDescription(editableDatasetProperties.getDescription()); dataset.setEditableProperties(editableProperties); @@ -145,18 +174,21 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index df8de87ff69ff..51ef254f52225 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -5,32 +5,32 @@ import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class DomainAssociationMapper { - public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); + public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); - public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, - @Nonnull final String entityUrn - ) { - return INSTANCE.apply(domains, entityUrn); - } + public static DomainAssociation map( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + return INSTANCE.apply(domains, entityUrn); + } - public DomainAssociation apply(@Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { - DomainAssociation association = new DomainAssociation(); - association.setDomain(Domain.builder() - .setType(EntityType.DOMAIN) - .setUrn(domains.getDomains().get(0).toString()).build()); - association.setAssociatedUrn(entityUrn); - return association; - } - return null; + public DomainAssociation apply( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0) { + DomainAssociation association = new DomainAssociation(); + association.setDomain( + Domain.builder() + .setType(EntityType.DOMAIN) + .setUrn(domains.getDomains().get(0).toString()) + .build()); + association.setAssociatedUrn(entityUrn); + return association; } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index fe52b5eff718f..7ff1f70311b22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; - public class DomainMapper { public static Domain map(final EntityResponse entityResponse) { @@ -33,30 +32,38 @@ public static Domain map(final EntityResponse entityResponse) { return null; } - final EnvelopedAspect envelopedDomainProperties = aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedDomainProperties = + aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (envelopedDomainProperties != null) { - result.setProperties(mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); + result.setProperties( + mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } return result; } - private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties(final DomainProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = new com.linkedin.datahub.graphql.generated.DomainProperties(); + private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( + final DomainProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.DomainProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); return propertiesResult; } - private DomainMapper() { } + private DomainMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index 4879c339d99fa..06d5df9354380 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -17,8 +17,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashSet; @@ -29,19 +27,21 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; +public class DomainType + implements SearchableEntityType<Domain, String>, + com.linkedin.datahub.graphql.types.EntityType<Domain, String> { -public class DomainType implements SearchableEntityType<Domain, String>, com.linkedin.datahub.graphql.types.EntityType<Domain, String> { - - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; - public DomainType(final EntityClient entityClient) { + public DomainType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -61,28 +61,30 @@ public Class<Domain> objectClass() { } @Override - public List<DataFetcherResult<Domain>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> domainUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Domain>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> domainUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.DOMAIN_ENTITY_NAME, - new HashSet<>(domainUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.DOMAIN_ENTITY_NAME, + new HashSet<>(domainUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Domain>newResult() - .data(DomainMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Domain>newResult() + .data(DomainMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Domains", e); @@ -90,25 +92,31 @@ public List<DataFetcherResult<Domain>> batchLoad(@Nonnull List<String> urns, @No } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List<FacetFilterInput> filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Domain entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Domain entity type"); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } - private Urn getUrn(final String urnStr) { try { return Urn.createFromString(urnStr); @@ -116,4 +124,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index f2c9e962811b9..9a27a1fba853f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -1,17 +1,21 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryNodeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -20,18 +24,12 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - -public class GlossaryNodeType implements com.linkedin.datahub.graphql.types.EntityType<GlossaryNode, String> { +public class GlossaryNodeType + implements com.linkedin.datahub.graphql.types.EntityType<GlossaryNode, String> { - static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_NODE_KEY_ASPECT_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_NODE_KEY_ASPECT_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); private final EntityClient _entityClient; @@ -55,25 +53,31 @@ public Function<Entity, String> getKeyProvider() { } @Override - public List<DataFetcherResult<GlossaryNode>> batchLoad(final List<String> urns, final QueryContext context) { - final List<Urn> glossaryNodeUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<GlossaryNode>> batchLoad( + final List<String> urns, final QueryContext context) { + final List<Urn> glossaryNodeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> glossaryNodeMap = _entityClient.batchGetV2(GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(glossaryNodeUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map<Urn, EntityResponse> glossaryNodeMap = + _entityClient.batchGetV2( + GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(glossaryNodeUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsGlossaryNode -> - gmsGlossaryNode == null ? null - : DataFetcherResult.<GlossaryNode>newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) - .build()) + .map( + gmsGlossaryNode -> + gmsGlossaryNode == null + ? null + : DataFetcherResult.<GlossaryNode>newResult() + .data(GlossaryNodeMapper.map(gmsGlossaryNode)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load GlossaryNodes", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index 3574c17a50923..c40740238f61e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.ArrayList; @@ -39,118 +42,135 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class GlossaryTermType implements SearchableEntityType<GlossaryTerm, String>, - BrowsableEntityType<GlossaryTerm, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); - - private static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_TERM_KEY_ASPECT_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, - GLOSSARY_RELATED_TERM_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - BROWSE_PATHS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME - ); - - private final EntityClient _entityClient; - - public GlossaryTermType(final EntityClient entityClient) { - _entityClient = entityClient; +public class GlossaryTermType + implements SearchableEntityType<GlossaryTerm, String>, + BrowsableEntityType<GlossaryTerm, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); + + private static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_TERM_KEY_ASPECT_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + GLOSSARY_RELATED_TERM_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + BROWSE_PATHS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME); + + private final EntityClient _entityClient; + + public GlossaryTermType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<GlossaryTerm> objectClass() { + return GlossaryTerm.class; + } + + @Override + public EntityType type() { + return EntityType.GLOSSARY_TERM; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List<DataFetcherResult<GlossaryTerm>> batchLoad( + final List<String> urns, final QueryContext context) { + final List<Urn> glossaryTermUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> glossaryTermMap = + _entityClient.batchGetV2( + GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(glossaryTermUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : glossaryTermUrns) { + gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsGlossaryTerm -> + gmsGlossaryTerm == null + ? null + : DataFetcherResult.<GlossaryTerm>newResult() + .data(GlossaryTermMapper.map(gmsGlossaryTerm)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load GlossaryTerms", e); } - - @Override - public Class<GlossaryTerm> objectClass() { - return GlossaryTerm.class; - } - - @Override - public EntityType type() { - return EntityType.GLOSSARY_TERM; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public List<DataFetcherResult<GlossaryTerm>> batchLoad(final List<String> urns, final QueryContext context) { - final List<Urn> glossaryTermUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> glossaryTermMap = _entityClient.batchGetV2(GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(glossaryTermUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : glossaryTermUrns) { - gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsGlossaryTerm -> - gmsGlossaryTerm == null ? null - : DataFetcherResult.<GlossaryTerm>newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load GlossaryTerms", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "glossaryTerm", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "glossaryTerm", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( "glossaryTerm", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "glossaryTerm", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } - + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "glossaryTerm", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java index 93b6ab53d5a3a..59f7cc8a9c828 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java @@ -1,27 +1,27 @@ package com.linkedin.datahub.graphql.types.glossary; import com.linkedin.common.urn.GlossaryTermUrn; - import java.net.URISyntaxException; import java.util.regex.Pattern; public class GlossaryTermUtils { - private GlossaryTermUtils() { } + private GlossaryTermUtils() {} - static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { - try { - return GlossaryTermUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); - } + static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { + try { + return GlossaryTermUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); } + } - public static String getGlossaryTermName(String hierarchicalName) { - if (hierarchicalName.contains(".")) { - String[] nodes = hierarchicalName.split(Pattern.quote(".")); - return nodes[nodes.length - 1]; - } - return hierarchicalName; + public static String getGlossaryTermName(String hierarchicalName) { + if (hierarchicalName.contains(".")) { + String[] nodes = hierarchicalName.split(Pattern.quote(".")); + return nodes[nodes.length - 1]; } + return hierarchicalName; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 6a1d849dd23bf..901361eb0b2be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -13,11 +15,8 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.key.GlossaryNodeKey; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class GlossaryNodeMapper implements ModelMapper<EntityResponse, GlossaryNode> { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); @@ -35,11 +34,14 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<GlossaryNode> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_NODE_INFO_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + mappingHelper.mapToResult( + GLOSSARY_NODE_INFO_ASPECT_NAME, + (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryNode, dataMap) -> + glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java index 2f99700bc30a1..12ba8c1e088f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java @@ -1,41 +1,44 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.GlossaryTermInfo; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermInfoMapper { - public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); + public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); - public static GlossaryTermInfo map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - return INSTANCE.apply(glossaryTermInfo, entityUrn); - } + public static GlossaryTermInfo map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + return INSTANCE.apply(glossaryTermInfo, entityUrn); + } - public GlossaryTermInfo apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); - glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); - if (glossaryTermInfo.hasName()) { - glossaryTermInfoResult.setName(glossaryTermInfo.getName()); - } - if (glossaryTermInfo.hasSourceRef()) { - glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); - } - if (glossaryTermInfo.hasSourceUrl()) { - glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); - } - if (glossaryTermInfo.hasCustomProperties()) { - glossaryTermInfoResult.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); - } - return glossaryTermInfoResult; + public GlossaryTermInfo apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = + new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); + glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); + if (glossaryTermInfo.hasName()) { + glossaryTermInfoResult.setName(glossaryTermInfo.getName()); + } + if (glossaryTermInfo.hasSourceRef()) { + glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); + } + if (glossaryTermInfo.hasSourceUrl()) { + glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); + } + if (glossaryTermInfo.hasCustomProperties()) { + glossaryTermInfoResult.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } + return glossaryTermInfoResult; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index c98177b458dea..a02f79535399f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; @@ -15,71 +17,82 @@ import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.key.GlossaryTermKey; -import com.linkedin.domain.Domains; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermMapper implements ModelMapper<EntityResponse, GlossaryTerm> { - public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); + public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); - public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { - GlossaryTerm result = new GlossaryTerm(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { + GlossaryTerm result = new GlossaryTerm(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.GLOSSARY_TERM); - final String legacyName = GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.GLOSSARY_TERM); + final String legacyName = + GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<GlossaryTerm> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setGlossaryTermInfo(GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setProperties(GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<GlossaryTerm> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setGlossaryTermInfo( + GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setProperties( + GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - // If there's no name property, resort to the legacy name computation. - if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { - result.getGlossaryTermInfo().setName(legacyName); - } - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } - return mappingHelper.getResult(); + // If there's no name property, resort to the legacy name computation. + if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { + result.getGlossaryTermInfo().setName(legacyName); } - - private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); - glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); - glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } + return mappingHelper.getResult(); + } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); - } + private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); + glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); + glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + } + + private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java index 6b35833183393..94edfcbd31455 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java @@ -2,25 +2,27 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GlossaryTermProperties; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermPropertiesMapper { public static final GlossaryTermPropertiesMapper INSTANCE = new GlossaryTermPropertiesMapper(); - public static GlossaryTermProperties map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + public static GlossaryTermProperties map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { return INSTANCE.apply(glossaryTermInfo, entityUrn); } - public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); + public GlossaryTermProperties apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = + new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); result.setDefinition(glossaryTermInfo.getDefinition()); result.setDescription(glossaryTermInfo.getDefinition()); result.setTermSource(glossaryTermInfo.getTermSource()); @@ -34,7 +36,8 @@ public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.Glossar result.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); } if (glossaryTermInfo.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index a64b0f7dc64fb..8494eace22244 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,51 +1,52 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.GlossaryTerms; -import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.datahub.graphql.generated.GlossaryTerm; +import com.linkedin.datahub.graphql.generated.GlossaryTerms; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class GlossaryTermsMapper { - public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); - - public static GlossaryTerms map( - @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(glossaryTerms, entityUrn); - } - - public GlossaryTerms apply(@Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); - result.setTerms(glossaryTerms.getTerms().stream().map( - association -> this.mapGlossaryTermAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( - @Nonnull final GlossaryTermAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); - final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); - resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); - resultGlossaryTerm.setUrn(input.getUrn().toString()); - resultGlossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); - result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } - + public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); + + public static GlossaryTerms map( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(glossaryTerms, entityUrn); + } + + public GlossaryTerms apply( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTerms result = + new com.linkedin.datahub.graphql.generated.GlossaryTerms(); + result.setTerms( + glossaryTerms.getTerms().stream() + .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( + @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = + new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); + resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); + resultGlossaryTerm.setUrn(input.getUrn().toString()); + resultGlossaryTerm.setName( + GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); + result.setTerm(resultGlossaryTerm); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index d575a81f4ae03..621fcf5f04140 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -3,26 +3,27 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; - import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AutoCompleteResultsMapper + implements ModelMapper<AutoCompleteResult, AutoCompleteResults> { -public class AutoCompleteResultsMapper implements ModelMapper<AutoCompleteResult, AutoCompleteResults> { - - public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); + public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); - } + public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(results); + } - @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { - final AutoCompleteResults result = new AutoCompleteResults(); - result.setQuery(input.getQuery()); - result.setSuggestions(input.getSuggestions()); - result.setEntities(input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect( - Collectors.toList())); - return result; - } + @Override + public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { + final AutoCompleteResults result = new AutoCompleteResults(); + result.setQuery(input.getQuery()); + result.setSuggestions(input.getSuggestions()); + result.setEntities( + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index ea44c4409b709..689ff82147e15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -2,27 +2,27 @@ import com.linkedin.datahub.graphql.Constants; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathMapper implements ModelMapper<String, BrowsePath> { - public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); + public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); - } + public static BrowsePath map(@Nonnull final String input) { + return INSTANCE.apply(input); + } - @Override - public BrowsePath apply(@Nonnull final String input) { - final BrowsePath browsePath = new BrowsePath(); - final List<String> path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()); - browsePath.setPath(path); - return browsePath; - } + @Override + public BrowsePath apply(@Nonnull final String input) { + final BrowsePath browsePath = new BrowsePath(); + final List<String> path = + Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList()); + browsePath.setPath(path); + return browsePath; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index 4dac4468a80d5..ae70823d675d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,25 +1,24 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; public class BrowsePathsMapper implements ModelMapper<List<String>, List<BrowsePath>> { - public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); + public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List<BrowsePath> map(@Nonnull final List<String> input) { - return INSTANCE.apply(input); - } + public static List<BrowsePath> map(@Nonnull final List<String> input) { + return INSTANCE.apply(input); + } - @Override - public List<BrowsePath> apply(@Nonnull final List<String> input) { - List<BrowsePath> results = new ArrayList<>(); - for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); - } - return results; + @Override + public List<BrowsePath> apply(@Nonnull final List<String> input) { + List<BrowsePath> results = new ArrayList<>(); + for (String pathStr : input) { + results.add(BrowsePathMapper.map(pathStr)); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index c3e74c28fe59d..5cac03b19a74c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -9,10 +9,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class BrowseResultMapper { - private BrowseResultMapper() { - } + private BrowseResultMapper() {} public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); @@ -31,7 +29,9 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setMetadata(browseResultMetadata); List<Entity> entities = - input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect(Collectors.toList()); + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList()); result.setEntities(entities); List<BrowseResultGroup> groups = @@ -41,7 +41,8 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) return result; } - private static BrowseResultGroup mapGroup(@Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { + private static BrowseResultGroup mapGroup( + @Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { final BrowseResultGroup result = new BrowseResultGroup(); result.setName(group.getName()); result.setCount(group.getCount()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index e6172debb439e..c58341f994d4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,8 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Maps an input of type I to an output of type O with actor context. - */ +/** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper<I, O, A> { - O apply(final I input, final A actor); -} \ No newline at end of file + O apply(final I input, final A actor); +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 2a615b24eaac2..7c7dab2e02472 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.AggregationMetadata; import com.linkedin.datahub.graphql.generated.FacetMetadata; @@ -10,75 +13,87 @@ import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class MapperUtils { - private MapperUtils() { - - } + private MapperUtils() {} public static SearchResult mapResult(SearchEntity searchEntity) { - return new SearchResult(UrnToEntityMapper.map(searchEntity.getEntity()), + return new SearchResult( + UrnToEntityMapper.map(searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), getMatchedFieldEntry(searchEntity.getMatchedFields())); } - public static FacetMetadata mapFacet(com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { + public static FacetMetadata mapFacet( + com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); - List<String> aggregationFacets = List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); - List<Boolean> isEntityTypeFilter = aggregationFacets.stream().map( - facet -> facet.equals("entity") || facet.contains("_entityType")).collect(Collectors.toList()); + List<String> aggregationFacets = + List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); + List<Boolean> isEntityTypeFilter = + aggregationFacets.stream() + .map(facet -> facet.equals("entity") || facet.contains("_entityType")) + .collect(Collectors.toList()); facetMetadata.setField(aggregationMetadata.getName()); facetMetadata.setDisplayName( - Optional.ofNullable(aggregationMetadata.getDisplayName()).orElse(aggregationMetadata.getName())); - facetMetadata.setAggregations(aggregationMetadata.getFilterValues() - .stream() - .map(filterValue -> new AggregationMetadata(convertFilterValue(filterValue.getValue(), isEntityTypeFilter), - filterValue.getFacetCount(), - filterValue.getEntity() == null ? null : UrnToEntityMapper.map(filterValue.getEntity()))) - .collect(Collectors.toList())); + Optional.ofNullable(aggregationMetadata.getDisplayName()) + .orElse(aggregationMetadata.getName())); + facetMetadata.setAggregations( + aggregationMetadata.getFilterValues().stream() + .map( + filterValue -> + new AggregationMetadata( + convertFilterValue(filterValue.getValue(), isEntityTypeFilter), + filterValue.getFacetCount(), + filterValue.getEntity() == null + ? null + : UrnToEntityMapper.map(filterValue.getEntity()))) + .collect(Collectors.toList())); return facetMetadata; } public static String convertFilterValue(String filterValue, List<Boolean> isEntityTypeFilter) { String[] aggregations = filterValue.split(AGGREGATION_SEPARATOR_CHAR); - return IntStream.range(0, aggregations.length).mapToObj( - idx -> idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) ? EntityTypeMapper.getType(aggregations[idx]).toString() : aggregations[idx]) + return IntStream.range(0, aggregations.length) + .mapToObj( + idx -> + idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) + ? EntityTypeMapper.getType(aggregations[idx]).toString() + : aggregations[idx]) .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } - public static List<MatchedField> getMatchedFieldEntry(List<com.linkedin.metadata.search.MatchedField> highlightMetadata) { + public static List<MatchedField> getMatchedFieldEntry( + List<com.linkedin.metadata.search.MatchedField> highlightMetadata) { return highlightMetadata.stream() - .map(field -> { - MatchedField matchedField = new MatchedField(); - matchedField.setName(field.getName()); - matchedField.setValue(field.getValue()); - if (SearchUtils.isUrn(field.getValue())) { - try { + .map( + field -> { + MatchedField matchedField = new MatchedField(); + matchedField.setName(field.getName()); + matchedField.setValue(field.getValue()); + if (SearchUtils.isUrn(field.getValue())) { + try { Urn urn = Urn.createFromString(field.getValue()); matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { + } catch (URISyntaxException e) { log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); + } } - } - return matchedField; - }) + return matchedField; + }) .collect(Collectors.toList()); } - public static SearchSuggestion mapSearchSuggestion(com.linkedin.metadata.search.SearchSuggestion suggestion) { - return new SearchSuggestion(suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); + public static SearchSuggestion mapSearchSuggestion( + com.linkedin.metadata.search.SearchSuggestion suggestion) { + return new SearchSuggestion( + suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 08afbd510b98f..2167be9f27ca8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Simple interface for classes capable of mapping an input of type I to - * an output of type O. - */ +/** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper<I, O> { - O apply(final I input); + O apply(final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java index 903e962524734..e0ac0336c8715 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java @@ -3,7 +3,5 @@ import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; import com.linkedin.metadata.aspect.EnvelopedAspect; - -public interface TimeSeriesAspectMapper<T extends TimeSeriesAspect> extends ModelMapper<EnvelopedAspect, T> { - -} +public interface TimeSeriesAspectMapper<T extends TimeSeriesAspect> + extends ModelMapper<EnvelopedAspect, T> {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index dd00727fc2845..baf632ae8bdf4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; @@ -12,10 +15,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnScrollAcrossLineageResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> ScrollAcrossLineageResults map( LineageScrollResult searchResult) { @@ -30,8 +29,12 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index fd774d73f3df7..72eb71cd095bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnScrollResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> ScrollResults map( com.linkedin.metadata.search.ScrollResult scrollResult) { @@ -25,8 +24,12 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index ae87d0269c188..642fe90cf2aed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -1,23 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; -import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; +import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnSearchAcrossLineageResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> SearchAcrossLineageResults map( LineageSearchResult searchResult) { @@ -32,17 +31,25 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); if (input.hasFreshness()) { FreshnessStats outputFreshness = new FreshnessStats(); outputFreshness.setCached(input.getFreshness().isCached()); - outputFreshness.setSystemFreshness(input.getFreshness().getSystemFreshness().entrySet().stream().map(x -> - SystemFreshness.builder() - .setSystemName(x.getKey()) - .setFreshnessMillis(x.getValue()) - .build()).collect(Collectors.toList())); + outputFreshness.setSystemFreshness( + input.getFreshness().getSystemFreshness().entrySet().stream() + .map( + x -> + SystemFreshness.builder() + .setSystemName(x.getKey()) + .setFreshnessMillis(x.getValue()) + .build()) + .collect(Collectors.toList())); result.setFreshness(outputFreshness); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index b16e2f10d1df7..d814c44e469bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnSearchResultsMapper<T extends RecordTemplate, E extends Entity> { public static <T extends RecordTemplate, E extends Entity> SearchResults map( com.linkedin.metadata.search.SearchResult searchResult) { @@ -25,9 +24,16 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); - result.setSuggestions(searchResultMetadata.getSuggestions().stream().map(MapperUtils::mapSearchSuggestion).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); + result.setSuggestions( + searchResultMetadata.getSuggestions().stream() + .map(MapperUtils::mapSearchSuggestion) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index f5594afc1a5b5..da3ddd1115437 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,103 +41,122 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLFeatureTableType implements SearchableEntityType<MLFeatureTable, String>, - BrowsableEntityType<MLFeatureTable, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("platform", "name"); - private final EntityClient _entityClient; - - public MLFeatureTableType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLFEATURE_TABLE; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<MLFeatureTable> objectClass() { - return MLFeatureTable.class; - } - - @Override - public List<DataFetcherResult<MLFeatureTable>> batchLoad(final List<String> urns, final QueryContext context) throws Exception { - final List<Urn> mlFeatureTableUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> mlFeatureTableMap = _entityClient.batchGetV2(ML_FEATURE_TABLE_ENTITY_NAME, - new HashSet<>(mlFeatureTableUrns), null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = mlFeatureTableUrns.stream() - .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlFeatureTable -> gmsMlFeatureTable == null ? null - : DataFetcherResult.<MLFeatureTable>newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatureTables", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeatureTable", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeatureTable", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlFeatureTable", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLFeatureTableType + implements SearchableEntityType<MLFeatureTable, String>, + BrowsableEntityType<MLFeatureTable, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("platform", "name"); + private final EntityClient _entityClient; + + public MLFeatureTableType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLFEATURE_TABLE; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<MLFeatureTable> objectClass() { + return MLFeatureTable.class; + } + + @Override + public List<DataFetcherResult<MLFeatureTable>> batchLoad( + final List<String> urns, final QueryContext context) throws Exception { + final List<Urn> mlFeatureTableUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> mlFeatureTableMap = + _entityClient.batchGetV2( + ML_FEATURE_TABLE_ENTITY_NAME, + new HashSet<>(mlFeatureTableUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = + mlFeatureTableUrns.stream() + .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlFeatureTable -> + gmsMlFeatureTable == null + ? null + : DataFetcherResult.<MLFeatureTable>newResult() + .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatureTables", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeatureTable", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlFeatureTable", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlFeatureTable", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index f5e0d80948bcc..6f94ea44cd476 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,94 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLFeatureType implements SearchableEntityType<MLFeature, String> { - private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLFeatureType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLFeatureType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLFEATURE; - } + @Override + public EntityType type() { + return EntityType.MLFEATURE; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<MLFeature> objectClass() { - return MLFeature.class; - } + @Override + public Class<MLFeature> objectClass() { + return MLFeature.class; + } - @Override - public List<DataFetcherResult<MLFeature>> batchLoad(final List<String> urns, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> mlFeatureUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<MLFeature>> batchLoad( + final List<String> urns, @Nonnull final QueryContext context) throws Exception { + final List<Urn> mlFeatureUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> mlFeatureMap = _entityClient.batchGetV2(ML_FEATURE_ENTITY_NAME, - new HashSet<>(mlFeatureUrns), null, context.getAuthentication()); + try { + final Map<Urn, EntityResponse> mlFeatureMap = + _entityClient.batchGetV2( + ML_FEATURE_ENTITY_NAME, + new HashSet<>(mlFeatureUrns), + null, + context.getAuthentication()); - final List<EntityResponse> gmsResults = mlFeatureUrns.stream() - .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) - .collect(Collectors.toList()); + final List<EntityResponse> gmsResults = + mlFeatureUrns.stream() + .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlFeature -> gmsMlFeature == null ? null - : DataFetcherResult.<MLFeature>newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatures", e); - } + return gmsResults.stream() + .map( + gmsMlFeature -> + gmsMlFeature == null + ? null + : DataFetcherResult.<MLFeature>newResult() + .data(MLFeatureMapper.map(gmsMlFeature)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatures", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeature", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeature", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index 05b70c15bafc6..d505b70effdd4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,104 +41,123 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelGroupType implements SearchableEntityType<MLModelGroup, String>, - BrowsableEntityType<MLModelGroup, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelGroupType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL_GROUP; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<MLModelGroup> objectClass() { - return MLModelGroup.class; - } - - @Override - public List<DataFetcherResult<MLModelGroup>> batchLoad(final List<String> urns, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> mlModelGroupUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> mlModelMap = _entityClient.batchGetV2(ML_MODEL_GROUP_ENTITY_NAME, - new HashSet<>(mlModelGroupUrns), null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = mlModelGroupUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModelGroup -> gmsMlModelGroup == null ? null - : DataFetcherResult.<MLModelGroup>newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModelGroups", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModelGroup", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModelGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModelGroup", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelGroupType + implements SearchableEntityType<MLModelGroup, String>, + BrowsableEntityType<MLModelGroup, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL_GROUP; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<MLModelGroup> objectClass() { + return MLModelGroup.class; + } + + @Override + public List<DataFetcherResult<MLModelGroup>> batchLoad( + final List<String> urns, @Nonnull final QueryContext context) throws Exception { + final List<Urn> mlModelGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_GROUP_ENTITY_NAME, + new HashSet<>(mlModelGroupUrns), + null, + context.getAuthentication()); + + final List<EntityResponse> gmsResults = + mlModelGroupUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModelGroup -> + gmsMlModelGroup == null + ? null + : DataFetcherResult.<MLModelGroup>newResult() + .data(MLModelGroupMapper.map(gmsMlModelGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModelGroups", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModelGroup", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlModelGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModelGroup", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index ef4be247a246b..27b791d78e78e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,102 +41,116 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelType implements SearchableEntityType<MLModel, String>, BrowsableEntityType<MLModel, String> { - - private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<MLModel> objectClass() { - return MLModel.class; - } - - @Override - public List<DataFetcherResult<MLModel>> batchLoad(final List<String> urns, final QueryContext context) throws Exception { - final List<Urn> mlModelUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> mlModelMap = _entityClient.batchGetV2(ML_MODEL_ENTITY_NAME, - new HashSet<>(mlModelUrns), null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = mlModelUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModel -> gmsMlModel == null ? null - : DataFetcherResult.<MLModel>newResult() - .data(MLModelMapper.map(gmsMlModel)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModels", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModel", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List<String> path, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModel", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelType + implements SearchableEntityType<MLModel, String>, BrowsableEntityType<MLModel, String> { + + private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<MLModel> objectClass() { + return MLModel.class; + } + + @Override + public List<DataFetcherResult<MLModel>> batchLoad( + final List<String> urns, final QueryContext context) throws Exception { + final List<Urn> mlModelUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_ENTITY_NAME, new HashSet<>(mlModelUrns), null, context.getAuthentication()); + + final List<EntityResponse> gmsResults = + mlModelUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModel -> + gmsMlModel == null + ? null + : DataFetcherResult.<MLModel>newResult() + .data(MLModelMapper.map(gmsMlModel)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModels", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModel", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModel", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java index ff51bab6c114e..ccecb0ae6406f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java @@ -1,44 +1,47 @@ package com.linkedin.datahub.graphql.types.mlmodel; -import java.net.URISyntaxException; - import com.linkedin.common.urn.MLFeatureUrn; import com.linkedin.common.urn.MLModelUrn; import com.linkedin.common.urn.Urn; +import java.net.URISyntaxException; public class MLModelUtils { - private MLModelUtils() { } + private MLModelUtils() {} - static MLModelUrn getMLModelUrn(String modelUrn) { - try { - return MLModelUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); - } + static MLModelUrn getMLModelUrn(String modelUrn) { + try { + return MLModelUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); } - - static Urn getMLModelGroupUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getMLModelGroupUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); } - - static MLFeatureUrn getMLFeatureUrn(String modelUrn) { - try { - return MLFeatureUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); - } + } + + static MLFeatureUrn getMLFeatureUrn(String modelUrn) { + try { + return MLFeatureUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); } - - static Urn getUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index a6963e6b20abd..10cfe181dd292 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,95 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLPrimaryKeyType implements SearchableEntityType<MLPrimaryKey, String> { - private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set<String> FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLPrimaryKeyType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLPrimaryKeyType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLPRIMARY_KEY; - } + @Override + public EntityType type() { + return EntityType.MLPRIMARY_KEY; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<MLPrimaryKey> objectClass() { - return MLPrimaryKey.class; - } + @Override + public Class<MLPrimaryKey> objectClass() { + return MLPrimaryKey.class; + } - @Override - public List<DataFetcherResult<MLPrimaryKey>> batchLoad(final List<String> urns, @Nonnull final QueryContext context) - throws Exception { - final List<Urn> mlPrimaryKeyUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<MLPrimaryKey>> batchLoad( + final List<String> urns, @Nonnull final QueryContext context) throws Exception { + final List<Urn> mlPrimaryKeyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> mlPrimaryKeyMap = _entityClient.batchGetV2(ML_PRIMARY_KEY_ENTITY_NAME, - new HashSet<>(mlPrimaryKeyUrns), null, context.getAuthentication()); + try { + final Map<Urn, EntityResponse> mlPrimaryKeyMap = + _entityClient.batchGetV2( + ML_PRIMARY_KEY_ENTITY_NAME, + new HashSet<>(mlPrimaryKeyUrns), + null, + context.getAuthentication()); - final List<EntityResponse> gmsResults = mlPrimaryKeyUrns.stream() - .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) - .collect(Collectors.toList()); + final List<EntityResponse> gmsResults = + mlPrimaryKeyUrns.stream() + .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlPrimaryKey -> gmsMlPrimaryKey == null ? null - : DataFetcherResult.<MLPrimaryKey>newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); - } + return gmsResults.stream() + .map( + gmsMlPrimaryKey -> + gmsMlPrimaryKey == null + ? null + : DataFetcherResult.<MLPrimaryKey>newResult() + .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlPrimaryKey", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlPrimaryKey", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlPrimaryKey", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlPrimaryKey", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index c82909d49acbf..7db1216e1390d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class BaseDataMapper implements ModelMapper<com.linkedin.ml.metadata.BaseData, BaseData> { - public static final BaseDataMapper INSTANCE = new BaseDataMapper(); + public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); - } + public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(input); + } - @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { - final BaseData result = new BaseData(); - result.setDataset(input.getDataset().toString()); - result.setMotivation(input.getMotivation()); - result.setPreProcessing(input.getPreProcessing()); - return result; - } + @Override + public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { + final BaseData result = new BaseData(); + result.setDataset(input.getDataset().toString()); + result.setMotivation(input.getMotivation()); + result.setPreProcessing(input.getPreProcessing()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index c19cb7bae2aff..108717f325f68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -2,29 +2,34 @@ import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsAndRecommendationsMapper implements ModelMapper<com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { +public class CaveatsAndRecommendationsMapper + implements ModelMapper< + com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { - public static final CaveatsAndRecommendationsMapper INSTANCE = new CaveatsAndRecommendationsMapper(); + public static final CaveatsAndRecommendationsMapper INSTANCE = + new CaveatsAndRecommendationsMapper(); - public static CaveatsAndRecommendations map(@NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); - } + public static CaveatsAndRecommendations map( + @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + return INSTANCE.apply(caveatsAndRecommendations); + } - @Override - public CaveatsAndRecommendations apply(com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); - if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); - } - if (caveatsAndRecommendations.getRecommendations() != null) { - result.setRecommendations(caveatsAndRecommendations.getRecommendations()); - } - if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { - result.setIdealDatasetCharacteristics(caveatsAndRecommendations.getIdealDatasetCharacteristics()); - } - return result; + @Override + public CaveatsAndRecommendations apply( + com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); + if (caveatsAndRecommendations.getCaveats() != null) { + result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); + } + if (caveatsAndRecommendations.getRecommendations() != null) { + result.setRecommendations(caveatsAndRecommendations.getRecommendations()); + } + if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { + result.setIdealDatasetCharacteristics( + caveatsAndRecommendations.getIdealDatasetCharacteristics()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 22617a8bc03e7..2226197e673f5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -2,24 +2,24 @@ import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsDetailsMapper implements ModelMapper<com.linkedin.ml.metadata.CaveatDetails, CaveatDetails> { +public class CaveatsDetailsMapper + implements ModelMapper<com.linkedin.ml.metadata.CaveatDetails, CaveatDetails> { - public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); + public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); - } + public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(input); + } - @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - final CaveatDetails result = new CaveatDetails(); + @Override + public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + final CaveatDetails result = new CaveatDetails(); - result.setCaveatDescription(input.getCaveatDescription()); - result.setGroupsNotRepresented(input.getGroupsNotRepresented()); - result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); - return result; - } + result.setCaveatDescription(input.getCaveatDescription()); + result.setGroupsNotRepresented(input.getGroupsNotRepresented()); + result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 1d967619d43cb..8959e59265e14 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -2,25 +2,27 @@ import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class EthicalConsiderationsMapper implements ModelMapper<com.linkedin.ml.metadata.EthicalConsiderations, EthicalConsiderations> { +public class EthicalConsiderationsMapper + implements ModelMapper<com.linkedin.ml.metadata.EthicalConsiderations, EthicalConsiderations> { - public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); + public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); - public static EthicalConsiderations map(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); - } + public static EthicalConsiderations map( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + return INSTANCE.apply(ethicalConsiderations); + } - @Override - public EthicalConsiderations apply(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - final EthicalConsiderations result = new EthicalConsiderations(); - result.setData(ethicalConsiderations.getData()); - result.setHumanLife(ethicalConsiderations.getHumanLife()); - result.setMitigations(ethicalConsiderations.getMitigations()); - result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); - result.setUseCases(ethicalConsiderations.getUseCases()); - return result; - } + @Override + public EthicalConsiderations apply( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + final EthicalConsiderations result = new EthicalConsiderations(); + result.setData(ethicalConsiderations.getData()); + result.setHumanLife(ethicalConsiderations.getHumanLife()); + result.setMitigations(ethicalConsiderations.getMitigations()); + result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); + result.setUseCases(ethicalConsiderations.getUseCases()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 73aa8db362a54..212db94081371 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -3,26 +3,26 @@ import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; - import lombok.NonNull; -public class HyperParameterMapMapper implements ModelMapper<HyperParameterValueTypeMap, HyperParameterMap> { - - public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); +public class HyperParameterMapMapper + implements ModelMapper<HyperParameterValueTypeMap, HyperParameterMap> { - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); - } + public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { - final HyperParameterMap result = new HyperParameterMap(); + public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(input); + } - for (String key: input.keySet()) { - result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); - } + @Override + public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { + final HyperParameterMap result = new HyperParameterMap(); - return result; + for (String key : input.keySet()) { + result.setKey(key); + result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index 6509b0e6cfa84..f60f34dd7a085 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -6,34 +6,37 @@ import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class HyperParameterValueTypeMapper implements ModelMapper<com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { +public class HyperParameterValueTypeMapper + implements ModelMapper< + com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { - public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); + public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); - public static HyperParameterValueType map(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); - } + public static HyperParameterValueType map( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + return INSTANCE.apply(input); + } - @Override - public HyperParameterValueType apply(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - HyperParameterValueType result = null; + @Override + public HyperParameterValueType apply( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + HyperParameterValueType result = null; - if (input.isString()) { - result = new StringBox(input.getString()); - } else if (input.isBoolean()) { - result = new BooleanBox(input.getBoolean()); - } else if (input.isInt()) { - result = new IntBox(input.getInt()); - } else if (input.isDouble()) { - result = new FloatBox(input.getDouble()); - } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); - } - return result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else if (input.isBoolean()) { + result = new BooleanBox(input.getBoolean()); + } else if (input.isInt()) { + result = new IntBox(input.getInt()); + } else if (input.isDouble()) { + result = new FloatBox(input.getDouble()); + } else if (input.isFloat()) { + result = new FloatBox(new Double(input.getFloat())); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 47598bc2a3e4c..9f724ae71a55e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,29 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class IntendedUseMapper implements ModelMapper<com.linkedin.ml.metadata.IntendedUse, IntendedUse> { +public class IntendedUseMapper + implements ModelMapper<com.linkedin.ml.metadata.IntendedUse, IntendedUse> { - public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); + public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); - } + public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(intendedUse); + } - @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - final IntendedUse result = new IntendedUse(); - result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); - result.setPrimaryUses(intendedUse.getPrimaryUses()); - if (intendedUse.getPrimaryUsers() != null) { - result.setPrimaryUsers(intendedUse.getPrimaryUsers().stream().map(v -> IntendedUserType.valueOf(v.toString())).collect(Collectors.toList())); - } - return result; + @Override + public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + final IntendedUse result = new IntendedUse(); + result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); + result.setPrimaryUses(intendedUse.getPrimaryUses()); + if (intendedUse.getPrimaryUsers() != null) { + result.setPrimaryUsers( + intendedUse.getPrimaryUsers().stream() + .map(v -> IntendedUserType.valueOf(v.toString())) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 010ae477251f3..58e59edfa2e38 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -37,91 +37,105 @@ import com.linkedin.ml.metadata.MLFeatureProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper<EntityResponse, MLFeature> { - public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); + public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeature map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { - final MLFeature result = new MLFeature(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeature apply(@Nonnull final EntityResponse entityResponse) { + final MLFeature result = new MLFeature(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLFeature> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> + MappingHelper<MLFeature> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + return mappingHelper.getResult(); + } + + private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); + mlFeature.setName(mlFeatureKey.getName()); + mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); + } + + private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setDescription(featureProperties.getDescription()); + if (featureProperties.getDataType() != null) { + mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } - - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); - mlFeature.setName(mlFeatureKey.getName()); - mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); - } - - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setDescription(featureProperties.getDescription()); - if (featureProperties.getDataType() != null) { - mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); - } - } - - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { - EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); - MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeature entity, DataMap dataMap) { + EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); + MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 9d647a38d2153..7bcefbc305192 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,44 +1,46 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; -import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLFeaturePropertiesMapper implements ModelMapper<com.linkedin.ml.metadata.MLFeatureProperties, MLFeatureProperties> { +public class MLFeaturePropertiesMapper + implements ModelMapper<com.linkedin.ml.metadata.MLFeatureProperties, MLFeatureProperties> { - public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); + public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); - public static MLFeatureProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); - } + public static MLFeatureProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + return INSTANCE.apply(mlFeatureProperties); + } - @Override - public MLFeatureProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - final MLFeatureProperties result = new MLFeatureProperties(); + @Override + public MLFeatureProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + final MLFeatureProperties result = new MLFeatureProperties(); - result.setDescription(mlFeatureProperties.getDescription()); - if (mlFeatureProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); - } - if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); - } - if (mlFeatureProperties.getSources() != null) { - result.setSources(mlFeatureProperties - .getSources() - .stream() - .map(urn -> { + result.setDescription(mlFeatureProperties.getDescription()); + if (mlFeatureProperties.getDataType() != null) { + result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); + } + if (mlFeatureProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); + } + if (mlFeatureProperties.getSources() != null) { + result.setSources( + mlFeatureProperties.getSources().stream() + .map( + urn -> { final Dataset dataset = new Dataset(); dataset.setUrn(urn.toString()); return dataset; - }) - .collect(Collectors.toList())); - } - - return result; + }) + .collect(Collectors.toList())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 3ba9a76c4bdde..d074e14f95c82 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,9 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -36,90 +37,111 @@ import com.linkedin.ml.metadata.MLFeatureTableProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper<EntityResponse, MLFeatureTable> { - public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); + public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { - final MLFeatureTable result = new MLFeatureTable(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { + final MLFeatureTable result = new MLFeatureTable(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE_TABLE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE_TABLE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLFeatureTable> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> + MappingHelper<MLFeatureTable> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); - mappingHelper.mapToResult(ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLFeatureTableKey(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { - MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); - mlFeatureTable.setName(mlFeatureTableKey.getName()); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); - mlFeatureTable.setPlatform(partialPlatform); - } - - private void mapMLFeatureTableProperties(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { - MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); - mlFeatureTable.setFeatureTableProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setDescription(featureTableProperties.getDescription()); - } - - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { - EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); - MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + return mappingHelper.getResult(); + } + + private void mapMLFeatureTableKey( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { + MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); + mlFeatureTable.setName(mlFeatureTableKey.getName()); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); + mlFeatureTable.setPlatform(partialPlatform); + } + + private void mapMLFeatureTableProperties( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { + MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); + mlFeatureTable.setFeatureTableProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setDescription(featureTableProperties.getDescription()); + } + + private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { + EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); + MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index 13e3c79599725..fff504d43c81a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -5,46 +5,55 @@ import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; public class MLFeatureTablePropertiesMapper { - public static final MLFeatureTablePropertiesMapper INSTANCE = new MLFeatureTablePropertiesMapper(); - - public static MLFeatureTableProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + public static final MLFeatureTablePropertiesMapper INSTANCE = + new MLFeatureTablePropertiesMapper(); + + public static MLFeatureTableProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + } + + public MLFeatureTableProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + final MLFeatureTableProperties result = new MLFeatureTableProperties(); + + result.setDescription(mlFeatureTableProperties.getDescription()); + if (mlFeatureTableProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlFeatureTableProperties.getMlFeatures().stream() + .map( + urn -> { + final MLFeature mlFeature = new MLFeature(); + mlFeature.setUrn(urn.toString()); + return mlFeature; + }) + .collect(Collectors.toList())); } - public MLFeatureTableProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - final MLFeatureTableProperties result = new MLFeatureTableProperties(); - - result.setDescription(mlFeatureTableProperties.getDescription()); - if (mlFeatureTableProperties.getMlFeatures() != null) { - result.setMlFeatures(mlFeatureTableProperties.getMlFeatures().stream().map(urn -> { - final MLFeature mlFeature = new MLFeature(); - mlFeature.setUrn(urn.toString()); - return mlFeature; - }).collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { - result.setMlPrimaryKeys(mlFeatureTableProperties - .getMlPrimaryKeys() - .stream() - .map(urn -> { + if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { + result.setMlPrimaryKeys( + mlFeatureTableProperties.getMlPrimaryKeys().stream() + .map( + urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); mlPrimaryKey.setUrn(urn.toString()); return mlPrimaryKey; - }) - .collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); - } + }) + .collect(Collectors.toList())); + } - return result; + if (mlFeatureTableProperties.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index 5cc242d0b19f2..bb3c85e411e71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -2,25 +2,25 @@ import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class MLHyperParamMapper implements ModelMapper<com.linkedin.ml.metadata.MLHyperParam, MLHyperParam> { +public class MLHyperParamMapper + implements ModelMapper<com.linkedin.ml.metadata.MLHyperParam, MLHyperParam> { - public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); + public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); - } + public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(input); + } - @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - final MLHyperParam result = new MLHyperParam(); + @Override + public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + final MLHyperParam result = new MLHyperParam(); - result.setDescription(input.getDescription()); - result.setValue(input.getValue()); - result.setCreatedAt(input.getCreatedAt()); - result.setName(input.getName()); - return result; - } + result.setDescription(input.getDescription()); + result.setValue(input.getValue()); + result.setCreatedAt(input.getCreatedAt()); + result.setName(input.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 2545bd5f8a848..765a44d218567 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -4,22 +4,21 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; - public class MLMetricMapper implements ModelMapper<com.linkedin.ml.metadata.MLMetric, MLMetric> { - public static final MLMetricMapper INSTANCE = new MLMetricMapper(); + public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); - } + public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(metric); + } - @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - final MLMetric result = new MLMetric(); - result.setDescription(metric.getDescription()); - result.setValue(metric.getValue()); - result.setCreatedAt(metric.getCreatedAt()); - result.setName(metric.getName()); - return result; - } + @Override + public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + final MLMetric result = new MLMetric(); + result.setDescription(metric.getDescription()); + result.setValue(metric.getValue()); + result.setCreatedAt(metric.getCreatedAt()); + result.setName(metric.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index 0d32f7275e5fe..e86072ce3848e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,29 +1,36 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class MLModelFactorPromptsMapper implements ModelMapper<com.linkedin.ml.metadata.MLModelFactorPrompts, MLModelFactorPrompts> { +public class MLModelFactorPromptsMapper + implements ModelMapper<com.linkedin.ml.metadata.MLModelFactorPrompts, MLModelFactorPrompts> { - public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); + public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); - public static MLModelFactorPrompts map(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); - } + public static MLModelFactorPrompts map( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + return INSTANCE.apply(input); + } - @Override - public MLModelFactorPrompts apply(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); - if (input.getEvaluationFactors() != null) { - mlModelFactorPrompts.setEvaluationFactors(input.getEvaluationFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - if (input.getRelevantFactors() != null) { - mlModelFactorPrompts.setRelevantFactors(input.getRelevantFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - return mlModelFactorPrompts; + @Override + public MLModelFactorPrompts apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); + if (input.getEvaluationFactors() != null) { + mlModelFactorPrompts.setEvaluationFactors( + input.getEvaluationFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); + } + if (input.getRelevantFactors() != null) { + mlModelFactorPrompts.setRelevantFactors( + input.getRelevantFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); } + return mlModelFactorPrompts; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index aa4737dfd229c..3b212eca52801 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,32 +1,33 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.ArrayList; - import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.ArrayList; import lombok.NonNull; -public class MLModelFactorsMapper implements ModelMapper<com.linkedin.ml.metadata.MLModelFactors, MLModelFactors> { +public class MLModelFactorsMapper + implements ModelMapper<com.linkedin.ml.metadata.MLModelFactors, MLModelFactors> { - public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); + public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); - public static MLModelFactors map(@NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); - } + public static MLModelFactors map( + @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { + return INSTANCE.apply(modelFactors); + } - @Override - public MLModelFactors apply(@NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { - final MLModelFactors result = new MLModelFactors(); - if (mlModelFactors.getEnvironment() != null) { - result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); - } - if (mlModelFactors.getGroups() != null) { - result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); - } - if (mlModelFactors.getInstrumentation() != null) { - result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); - } - return result; + @Override + public MLModelFactors apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { + final MLModelFactors result = new MLModelFactors(); + if (mlModelFactors.getEnvironment() != null) { + result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); + } + if (mlModelFactors.getGroups() != null) { + result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); + } + if (mlModelFactors.getInstrumentation() != null) { + result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 311ee121bcaf9..cc9baaa33a660 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -34,90 +36,102 @@ import com.linkedin.ml.metadata.MLModelGroupProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper<EntityResponse, MLModelGroup> { - public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); + public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { - final MLModelGroup result = new MLModelGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { + final MLModelGroup result = new MLModelGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLModelGroup> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> + MappingHelper<MLModelGroup> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); - mappingHelper.mapToResult(ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); + mappingHelper.mapToResult( + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); - mlModelGroup.setName(mlModelGroupKey.getName()); - mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); - mlModelGroup.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); + mlModelGroup.setName(mlModelGroupKey.getName()); + mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); + mlModelGroup.setPlatform(partialPlatform); + } + + private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); + if (modelGroupProperties.getDescription() != null) { + mlModelGroup.setDescription(modelGroupProperties.getDescription()); } - - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); - if (modelGroupProperties.getDescription() != null) { - mlModelGroup.setDescription(modelGroupProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { - EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); - MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); + MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index 9a12d7917e648..bae60a026b49a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,28 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; -public class MLModelGroupPropertiesMapper implements ModelMapper<com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - - public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); +public class MLModelGroupPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - public static MLModelGroupProperties map(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); - } + public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); - @Override - public MLModelGroupProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - final MLModelGroupProperties result = new MLModelGroupProperties(); + public static MLModelGroupProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + return INSTANCE.apply(mlModelGroupProperties); + } - result.setDescription(mlModelGroupProperties.getDescription()); - if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); - } - result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + @Override + public MLModelGroupProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + final MLModelGroupProperties result = new MLModelGroupProperties(); - return result; + result.setDescription(mlModelGroupProperties.getDescription()); + if (mlModelGroupProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); } + result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 0c2eeabe5701d..827b35c282237 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Cost; import com.linkedin.common.DataPlatformInstance; @@ -48,124 +50,165 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper<EntityResponse, MLModel> { - public static final MLModelMapper INSTANCE = new MLModelMapper(); + public static final MLModelMapper INSTANCE = new MLModelMapper(); - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModel map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { - final MLModel result = new MLModel(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModel apply(@Nonnull final EntityResponse entityResponse) { + final MLModel result = new MLModel(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLModel> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> + MappingHelper<MLModel> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); - mappingHelper.mapToResult(INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); + mappingHelper.mapToResult( + INTENDED_USE_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); - mappingHelper.mapToResult(ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setFactorPrompts(MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); - mappingHelper.mapToResult(METRICS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); - mappingHelper.mapToResult(EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEvaluationData(new EvaluationData(dataMap).getEvaluationData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(TRAINING_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setTrainingData(new TrainingData(dataMap).getTrainingData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setQuantitativeAnalyses(QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); - mappingHelper.mapToResult(ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEthicalConsiderations(EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); - mappingHelper.mapToResult(CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCaveatsAndRecommendations(CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(COST_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCost(CostMapper.map(new Cost(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setFactorPrompts( + MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); + mappingHelper.mapToResult( + METRICS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); + mappingHelper.mapToResult( + EVALUATION_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEvaluationData( + new EvaluationData(dataMap) + .getEvaluationData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + TRAINING_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setTrainingData( + new TrainingData(dataMap) + .getTrainingData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + QUANTITATIVE_ANALYSES_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setQuantitativeAnalyses( + QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); + mappingHelper.mapToResult( + ETHICAL_CONSIDERATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEthicalConsiderations( + EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); + mappingHelper.mapToResult( + CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setCaveatsAndRecommendations( + CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + COST_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(new Cost(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { - MLModelKey mlModelKey = new MLModelKey(dataMap); - mlModel.setName(mlModelKey.getName()); - mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelKey.getPlatform().toString()); - mlModel.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + MLModelKey mlModelKey = new MLModelKey(dataMap); + mlModel.setName(mlModelKey.getName()); + mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelKey.getPlatform().toString()); + mlModel.setPlatform(partialPlatform); + } + + private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + MLModelProperties modelProperties = new MLModelProperties(dataMap); + mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); + if (modelProperties.getDescription() != null) { + mlModel.setDescription(modelProperties.getDescription()); } - - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); - if (modelProperties.getDescription() != null) { - mlModel.setDescription(modelProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - mlModel.setGlobalTags(graphQlGlobalTags); - mlModel.setTags(graphQlGlobalTags); - } - - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { - SourceCode sourceCode = new SourceCode(dataMap); - com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = - new com.linkedin.datahub.graphql.generated.SourceCode(); - graphQlSourceCode.setSourceCode(sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map).collect(Collectors.toList())); - mlModel.setSourceCode(graphQlSourceCode); - } - - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModel entity, DataMap dataMap) { - EditableMLModelProperties input = new EditableMLModelProperties(dataMap); - MLModelEditableProperties editableProperties = new MLModelEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + mlModel.setGlobalTags(graphQlGlobalTags); + mlModel.setTags(graphQlGlobalTags); + } + + private void mapSourceCode(MLModel mlModel, DataMap dataMap) { + SourceCode sourceCode = new SourceCode(dataMap); + com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = + new com.linkedin.datahub.graphql.generated.SourceCode(); + graphQlSourceCode.setSourceCode( + sourceCode.getSourceCode().stream() + .map(SourceCodeUrlMapper::map) + .collect(Collectors.toList())); + mlModel.setSourceCode(graphQlSourceCode); + } + + private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModel entity, DataMap dataMap) { + EditableMLModelProperties input = new EditableMLModelProperties(dataMap); + MLModelEditableProperties editableProperties = new MLModelEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index 554c14e9a4a56..f2781f5bca5c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,65 +1,71 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.MLModelGroup; +import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; - -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.MLModelProperties; - import lombok.NonNull; public class MLModelPropertiesMapper { - public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); + public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); - public static MLModelProperties map(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); - } + public static MLModelProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + return INSTANCE.apply(mlModelProperties, entityUrn); + } - public MLModelProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - final MLModelProperties result = new MLModelProperties(); + public MLModelProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + final MLModelProperties result = new MLModelProperties(); - result.setDate(mlModelProperties.getDate()); - result.setDescription(mlModelProperties.getDescription()); - if (mlModelProperties.getExternalUrl() != null) { - result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); - } - if (mlModelProperties.getVersion() != null) { - result.setVersion(mlModelProperties.getVersion().getVersionTag()); - } - result.setType(mlModelProperties.getType()); - if (mlModelProperties.getHyperParams() != null) { - result.setHyperParams(mlModelProperties.getHyperParams().stream().map( - param -> MLHyperParamMapper.map(param)).collect(Collectors.toList())); - } - - result.setCustomProperties(CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); + result.setDate(mlModelProperties.getDate()); + result.setDescription(mlModelProperties.getDescription()); + if (mlModelProperties.getExternalUrl() != null) { + result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); + } + if (mlModelProperties.getVersion() != null) { + result.setVersion(mlModelProperties.getVersion().getVersionTag()); + } + result.setType(mlModelProperties.getType()); + if (mlModelProperties.getHyperParams() != null) { + result.setHyperParams( + mlModelProperties.getHyperParams().stream() + .map(param -> MLHyperParamMapper.map(param)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getTrainingMetrics() != null) { - result.setTrainingMetrics(mlModelProperties.getTrainingMetrics().stream().map(metric -> - MLMetricMapper.map(metric) - ).collect(Collectors.toList())); - } + result.setCustomProperties( + CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); - if (mlModelProperties.getGroups() != null) { - result.setGroups(mlModelProperties.getGroups().stream().map(group -> { - final MLModelGroup subgroup = new MLModelGroup(); - subgroup.setUrn(group.toString()); - return subgroup; - }).collect(Collectors.toList())); - } + if (mlModelProperties.getTrainingMetrics() != null) { + result.setTrainingMetrics( + mlModelProperties.getTrainingMetrics().stream() + .map(metric -> MLMetricMapper.map(metric)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getMlFeatures() != null) { - result.setMlFeatures(mlModelProperties - .getMlFeatures() - .stream() - .map(Urn::toString) - .collect(Collectors.toList())); - } - result.setTags(mlModelProperties.getTags()); + if (mlModelProperties.getGroups() != null) { + result.setGroups( + mlModelProperties.getGroups().stream() + .map( + group -> { + final MLModelGroup subgroup = new MLModelGroup(); + subgroup.setUrn(group.toString()); + return subgroup; + }) + .collect(Collectors.toList())); + } - return result; + if (mlModelProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlModelProperties.getMlFeatures().stream() + .map(Urn::toString) + .collect(Collectors.toList())); } + result.setTags(mlModelProperties.getTags()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index 0bd5db4d884ae..a8efd748401f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -33,88 +35,102 @@ import com.linkedin.ml.metadata.MLPrimaryKeyProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper<EntityResponse, MLPrimaryKey> { - public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); + public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { - final MLPrimaryKey result = new MLPrimaryKey(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { + final MLPrimaryKey result = new MLPrimaryKey(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLPRIMARY_KEY); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLPRIMARY_KEY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper<MLPrimaryKey> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + MappingHelper<MLPrimaryKey> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); - mappingHelper.mapToResult(ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + return mappingHelper.getResult(); + } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); - mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); - mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); - } + private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); + mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); + mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); + } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); - if (primaryKeyProperties.getDataType() != null) { - mlPrimaryKey.setDataType(MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); - } + private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); + mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); + if (primaryKeyProperties.getDataType() != null) { + mlPrimaryKey.setDataType( + MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); } + } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } + private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } + private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { - EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); - MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); + MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 39ecd96af182f..16d6120cd9dff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -4,39 +4,43 @@ import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLPrimaryKeyPropertiesMapper implements ModelMapper<com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { +public class MLPrimaryKeyPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { - public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); + public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); - public static MLPrimaryKeyProperties map(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); - } + public static MLPrimaryKeyProperties map( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + return INSTANCE.apply(mlPrimaryKeyProperties); + } - @Override - public MLPrimaryKeyProperties apply(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); + @Override + public MLPrimaryKeyProperties apply( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); - result.setDescription(mlPrimaryKeyProperties.getDescription()); - if (mlPrimaryKeyProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); - } - if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); - } - result.setSources(mlPrimaryKeyProperties - .getSources() - .stream() - .map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }) + result.setDescription(mlPrimaryKeyProperties.getDescription()); + if (mlPrimaryKeyProperties.getDataType() != null) { + result.setDataType( + MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); + } + if (mlPrimaryKeyProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); + } + result.setSources( + mlPrimaryKeyProperties.getSources().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) .collect(Collectors.toList())); - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 05b34ba3acb9c..76fa8c84e9571 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class MetricsMapper implements ModelMapper<com.linkedin.ml.metadata.Metrics, Metrics> { - public static final MetricsMapper INSTANCE = new MetricsMapper(); + public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); - } + public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(metrics); + } - @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - final Metrics result = new Metrics(); - result.setDecisionThreshold(metrics.getDecisionThreshold()); - result.setPerformanceMeasures(metrics.getPerformanceMeasures()); - return result; - } + @Override + public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + final Metrics result = new Metrics(); + result.setDecisionThreshold(metrics.getDecisionThreshold()); + result.setPerformanceMeasures(metrics.getPerformanceMeasures()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index 8bd25a4474579..e46cb0a074bd7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -2,22 +2,25 @@ import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class QuantitativeAnalysesMapper implements ModelMapper<com.linkedin.ml.metadata.QuantitativeAnalyses, QuantitativeAnalyses> { +public class QuantitativeAnalysesMapper + implements ModelMapper<com.linkedin.ml.metadata.QuantitativeAnalyses, QuantitativeAnalyses> { - public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); + public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); - public static QuantitativeAnalyses map(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); - } + public static QuantitativeAnalyses map( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + return INSTANCE.apply(quantitativeAnalyses); + } - @Override - public QuantitativeAnalyses apply(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - final QuantitativeAnalyses result = new QuantitativeAnalyses(); - result.setIntersectionalResults(ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); - return result; - } + @Override + public QuantitativeAnalyses apply( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + final QuantitativeAnalyses result = new QuantitativeAnalyses(); + result.setIntersectionalResults( + ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 78292f08f8cad..4b6529c59db3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -3,25 +3,25 @@ import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class ResultsTypeMapper implements ModelMapper<com.linkedin.ml.metadata.ResultsType, ResultsType> { +public class ResultsTypeMapper + implements ModelMapper<com.linkedin.ml.metadata.ResultsType, ResultsType> { - public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); + public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); - } + public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(input); + } - @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - final ResultsType result; - if (input.isString()) { - result = new StringBox(input.getString()); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); - } - return result; + @Override + public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + final ResultsType result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index 79dbd2cded4c2..b6bd5efdc4217 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class SourceCodeUrlMapper implements ModelMapper<com.linkedin.ml.metadata.SourceCodeUrl, SourceCodeUrl> { - public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); +public class SourceCodeUrlMapper + implements ModelMapper<com.linkedin.ml.metadata.SourceCodeUrl, SourceCodeUrl> { + public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); - } + public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(input); + } - @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - final SourceCodeUrl results = new SourceCodeUrl(); - results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); - results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); - return results; - } + @Override + public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + final SourceCodeUrl results = new SourceCodeUrl(); + results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); + results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 6ad0945b0621f..5758a52538c1e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -2,20 +2,22 @@ import com.linkedin.common.VersionTag; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class VersionTagMapper implements ModelMapper<VersionTag, com.linkedin.datahub.graphql.generated.VersionTag> { - public static final VersionTagMapper INSTANCE = new VersionTagMapper(); +public class VersionTagMapper + implements ModelMapper<VersionTag, com.linkedin.datahub.graphql.generated.VersionTag> { + public static final VersionTagMapper INSTANCE = new VersionTagMapper(); - public static com.linkedin.datahub.graphql.generated.VersionTag map(@Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); - } + public static com.linkedin.datahub.graphql.generated.VersionTag map( + @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(versionTag); + } - @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { - final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); - result.setVersionTag(input.getVersionTag()); - return result; - } + @Override + public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { + final com.linkedin.datahub.graphql.generated.VersionTag result = + new com.linkedin.datahub.graphql.generated.VersionTag(); + result.setVersionTag(input.getVersionTag()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index 080cdeba09f19..b6990c3816b53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.notebook; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -18,25 +21,25 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; +import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.BrowsableEntityType; import com.linkedin.datahub.graphql.types.MutableType; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookUpdateInputMapper; -import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -53,25 +56,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class NotebookType implements SearchableEntityType<Notebook, String>, BrowsableEntityType<Notebook, String>, - MutableType<NotebookUpdateInput, Notebook> { - static final Set<String> ASPECTS_TO_RESOLVE = ImmutableSet.of( - NOTEBOOK_KEY_ASPECT_NAME, - NOTEBOOK_INFO_ASPECT_NAME, - NOTEBOOK_CONTENT_ASPECT_NAME, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); +public class NotebookType + implements SearchableEntityType<Notebook, String>, + BrowsableEntityType<Notebook, String>, + MutableType<NotebookUpdateInput, Notebook> { + static final Set<String> ASPECTS_TO_RESOLVE = + ImmutableSet.of( + NOTEBOOK_KEY_ASPECT_NAME, + NOTEBOOK_INFO_ASPECT_NAME, + NOTEBOOK_CONTENT_ASPECT_NAME, + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); private final EntityClient _entityClient; @@ -80,44 +83,68 @@ public NotebookType(EntityClient entityClient) { } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List<FacetFilterInput> filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { + @Nonnull final QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map<String, String> facetFilters = Collections.emptyMap(); - final SearchResult searchResult = _entityClient.search(NOTEBOOK_ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + NOTEBOOK_ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public BrowseResults browse(@Nonnull List<String> path, @Nullable List<FacetFilterInput> filters, int start, - int count, @Nonnull QueryContext context) throws Exception { + public BrowseResults browse( + @Nonnull List<String> path, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map<String, String> facetFilters = Collections.emptyMap(); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse(NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); return BrowseResultMapper.map(result); } @Override - public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(NotebookUrn.createFromString(urn), context.getAuthentication()); + public List<BrowsePath> browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + NotebookUrn.createFromString(urn), context.getAuthentication()); return BrowsePathsMapper.map(result); } @@ -137,22 +164,26 @@ public Class<Notebook> objectClass() { } @Override - public List<DataFetcherResult<Notebook>> batchLoad(@Nonnull List<String> urnStrs, @Nonnull QueryContext context) - throws Exception { - final List<Urn> urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Notebook>> batchLoad( + @Nonnull List<String> urnStrs, @Nonnull QueryContext context) throws Exception { + final List<Urn> urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> notebookMap = _entityClient.batchGetV2(NOTEBOOK_ENTITY_NAME, new HashSet<>(urns), - ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map<Urn, EntityResponse> notebookMap = + _entityClient.batchGetV2( + NOTEBOOK_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); return urns.stream() .map(urn -> notebookMap.getOrDefault(urn, null)) - .map(entityResponse -> entityResponse == null - ? null - : DataFetcherResult.<Notebook>newResult() - .data(NotebookMapper.map(entityResponse)) - .build()) + .map( + entityResponse -> + entityResponse == null + ? null + : DataFetcherResult.<Notebook>newResult() + .data(NotebookMapper.map(entityResponse)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Notebook", e); @@ -165,13 +196,16 @@ public Class<NotebookUpdateInput> inputClass() { } @Override - public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) + public Notebook update( + @Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) throws Exception { if (!isAuthorized(urn, input, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); Collection<MetadataChangeProposal> proposals = NotebookUpdateInputMapper.map(input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); @@ -184,7 +218,8 @@ public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, return load(urn, context).getData(); } - private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { // Decide whether the current principal should be allowed to update the Dataset. final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( @@ -197,9 +232,9 @@ private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput u private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); List<String> specificPrivileges = new ArrayList<>(); if (updateInput.getOwnership() != null) { @@ -211,12 +246,12 @@ private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateIn if (updateInput.getTags() != null) { specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index 2b937c86c9779..a263e31b26faf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.GlobalTags; @@ -26,11 +28,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; import com.linkedin.datahub.graphql.types.common.mappers.ChangeAuditStampsMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -45,8 +47,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class NotebookMapper implements ModelMapper<EntityResponse, Notebook> { public static final NotebookMapper INSTANCE = new NotebookMapper(); @@ -64,41 +64,59 @@ public Notebook apply(EntityResponse response) { EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper<Notebook> mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); - mappingHelper.mapToResult(NOTEBOOK_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + NOTEBOOK_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); - mappingHelper.mapToResult(EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> notebook.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn) - )); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> - notebook.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); return mappingHelper.getResult(); } private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); - notebook.setPlatform(DataPlatform - .builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(dataPlatformInstance.getPlatform().toString()) - .build()); - notebook.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + notebook.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(dataPlatformInstance.getPlatform().toString()) + .build()); + notebook.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); } private void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); + com.linkedin.datahub.graphql.generated.SubTypes subTypes = + new com.linkedin.datahub.graphql.generated.SubTypes(); subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); notebook.setSubTypes(subTypes); } @@ -110,11 +128,14 @@ private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); + private void mapNotebookInfo( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = + new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); - notebookInfo.setChangeAuditStamps(ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + notebookInfo.setChangeAuditStamps( + ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -122,40 +143,46 @@ private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMa } if (gmsNotebookInfo.hasCustomProperties()) { - notebookInfo.setCustomProperties(CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); + notebookInfo.setCustomProperties( + CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); } notebook.setInfo(notebookInfo); } private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); + com.linkedin.notebook.NotebookContent pegasusNotebookContent = + new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List<NotebookCell> mapNotebookCells(com.linkedin.notebook.NotebookCellArray pegasusCells) { + private List<NotebookCell> mapNotebookCells( + com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() - .map(pegasusCell -> { - NotebookCell notebookCell = new NotebookCell(); - NotebookCellType cellType = NotebookCellType.valueOf(pegasusCell.getType().toString()); - notebookCell.setType(cellType); - switch (cellType) { - case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); - break; - case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); - break; - case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); - break; - default: - throw new DataHubGraphQLException(String.format("Un-supported NotebookCellType: %s", cellType), - DataHubGraphQLErrorCode.SERVER_ERROR); - } - return notebookCell; - }) + .map( + pegasusCell -> { + NotebookCell notebookCell = new NotebookCell(); + NotebookCellType cellType = + NotebookCellType.valueOf(pegasusCell.getType().toString()); + notebookCell.setType(cellType); + switch (cellType) { + case CHART_CELL: + notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); + break; + case TEXT_CELL: + notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); + break; + case QUERY_CELL: + notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); + break; + default: + throw new DataHubGraphQLException( + String.format("Un-supported NotebookCellType: %s", cellType), + DataHubGraphQLErrorCode.SERVER_ERROR); + } + return notebookCell; + }) .collect(Collectors.toList()); } @@ -163,7 +190,8 @@ private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); - chartCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + chartCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); return chartCell; } @@ -171,7 +199,8 @@ private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); - textCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + textCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } @@ -180,7 +209,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); - queryCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + queryCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); @@ -189,7 +219,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) } private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); + final EditableNotebookProperties editableNotebookProperties = + new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); notebookEditableProperties.setDescription(editableNotebookProperties.getDescription()); notebook.setEditableProperties(notebookEditableProperties); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0c3787d630500..0d6c70e07053f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,16 +19,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class NotebookUpdateInputMapper implements InputModelMapper<NotebookUpdateInput, Collection<MetadataChangeProposal>, - Urn> { +public class NotebookUpdateInputMapper + implements InputModelMapper<NotebookUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); - public static Collection<MetadataChangeProposal> map(@Nonnull final NotebookUpdateInput notebookUpdateInput, - @Nonnull final Urn actor) { + public static Collection<MetadataChangeProposal> map( + @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(notebookUpdateInput, actor); } @@ -39,27 +38,32 @@ public Collection<MetadataChangeProposal> apply(NotebookUpdateInput input, Urn a auditStamp.setTime(System.currentTimeMillis()); if (input.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(input.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(input.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + input.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (input.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); editableDashboardProperties.setDescription(input.getEditableProperties().getDescription()); if (!editableDashboardProperties.hasCreated()) { editableDashboardProperties.setCreated(auditStamp); } editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index 79f95ac8439a5..f7ed4c59a805a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; @@ -20,14 +22,12 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class OwnershipType implements com.linkedin.datahub.graphql.types.EntityType<OwnershipTypeEntity, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +46,17 @@ public Class<OwnershipTypeEntity> objectClass() { } @Override - public List<DataFetcherResult<OwnershipTypeEntity>> batchLoad(@Nonnull List<String> urns, - @Nonnull QueryContext context) throws Exception { - final List<Urn> ownershipTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List<DataFetcherResult<OwnershipTypeEntity>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> ownershipTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(OWNERSHIP_TYPE_ENTITY_NAME, new HashSet<>(ownershipTypeUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + OWNERSHIP_TYPE_ENTITY_NAME, + new HashSet<>(ownershipTypeUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -60,12 +64,16 @@ public List<DataFetcherResult<OwnershipTypeEntity>> batchLoad(@Nonnull List<Stri gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null : DataFetcherResult.<OwnershipTypeEntity>newResult() - .data(OwnershipTypeMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<OwnershipTypeEntity>newResult() + .data(OwnershipTypeMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Custom Ownership Types", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 37b59b679e3ac..9eebe95df8d8c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -14,9 +16,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTypeMapper implements ModelMapper<EntityResponse, OwnershipTypeEntity> { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); @@ -34,12 +33,14 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { EnvelopedAspectMap aspectMap = input.getAspects(); MappingHelper<OwnershipTypeEntity> mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); return mappingHelper.getResult(); } - private void mapOwnershipTypeInfo(@Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { + private void mapOwnershipTypeInfo( + @Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { final com.linkedin.ownership.OwnershipTypeInfo gmsOwnershipTypeInfo = new com.linkedin.ownership.OwnershipTypeInfo(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 167e1615fc4cc..318818b8a2140 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -25,9 +27,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubPolicyMapper implements ModelMapper<EntityResponse, DataHubPolicy> { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); @@ -71,16 +70,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { // Change here is not executed at the moment - leaving it for the future UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -102,14 +105,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -117,7 +126,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 4cec59009af3f..3dea9046dcf36 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubPolicyType implements com.linkedin.datahub.graphql.types.EntityType<DataHubPolicy, String> { +public class DataHubPolicyType + implements com.linkedin.datahub.graphql.types.EntityType<DataHubPolicy, String> { static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_POLICY_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class<DataHubPolicy> objectClass() { } @Override - public List<DataFetcherResult<DataHubPolicy>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<DataHubPolicy>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List<DataFetcherResult<DataHubPolicy>> batchLoad(@Nonnull List<String> ur gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataHubPolicy>newResult().data(DataHubPolicyMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataHubPolicy>newResult() + .data(DataHubPolicyMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index 791197c7d47e4..f35111f78a694 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.post; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,9 +18,6 @@ import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class PostMapper implements ModelMapper<EntityResponse, Post> { public static final PostMapper INSTANCE = new PostMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index cf77821b1a280..2bdcda3592608 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; @@ -21,9 +23,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class QueryMapper implements ModelMapper<EntityResponse, QueryEntity> { public static final QueryMapper INSTANCE = new QueryMapper(); @@ -47,13 +46,15 @@ public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); - com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); + com.linkedin.datahub.graphql.generated.QueryProperties res = + new com.linkedin.datahub.graphql.generated.QueryProperties(); // Query Source must be kept in sync. res.setSource(QuerySource.valueOf(queryProperties.getSource().toString())); - res.setStatement(new QueryStatement( - queryProperties.getStatement().getValue(), - QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); + res.setStatement( + new QueryStatement( + queryProperties.getStatement().getValue(), + QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); @@ -73,10 +74,10 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat @Nonnull private void mapQuerySubjects(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QuerySubjects querySubjects = new QuerySubjects(dataMap); - List<QuerySubject> res = querySubjects.getSubjects() - .stream() - .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) - .collect(Collectors.toList()); + List<QuerySubject> res = + querySubjects.getSubjects().stream() + .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) + .collect(Collectors.toList()); query.setSubjects(res); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index c138cd56f20b3..0c1fd33e38110 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -20,14 +22,11 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class QueryType implements com.linkedin.datahub.graphql.types.EntityType<QueryEntity, String> { - public static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - QUERY_PROPERTIES_ASPECT_NAME, - QUERY_SUBJECTS_ASPECT_NAME); +public class QueryType + implements com.linkedin.datahub.graphql.types.EntityType<QueryEntity, String> { + public static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +45,16 @@ public Class<QueryEntity> objectClass() { } @Override - public List<DataFetcherResult<QueryEntity>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<QueryEntity>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(QUERY_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + QUERY_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -60,11 +62,16 @@ public List<DataFetcherResult<QueryEntity>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<QueryEntity>newResult().data(QueryMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<QueryEntity>newResult() + .data(QueryMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index e1762022f4bcb..db086e682d57c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DataFlowDataJobsRelationshipsMapper implements - ModelMapper<com.linkedin.common.EntityRelationships, DataFlowDataJobsRelationships> { +public class DataFlowDataJobsRelationshipsMapper + implements ModelMapper<com.linkedin.common.EntityRelationships, DataFlowDataJobsRelationships> { - public static final DataFlowDataJobsRelationshipsMapper INSTANCE = new DataFlowDataJobsRelationshipsMapper(); + public static final DataFlowDataJobsRelationshipsMapper INSTANCE = + new DataFlowDataJobsRelationshipsMapper(); - public static DataFlowDataJobsRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DataFlowDataJobsRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DataFlowDataJobsRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DataFlowDataJobsRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 824e1181c5871..4df64c7ecb85e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DownstreamEntityRelationshipsMapper implements - ModelMapper<com.linkedin.common.EntityRelationships, DownstreamEntityRelationships> { +public class DownstreamEntityRelationshipsMapper + implements ModelMapper<com.linkedin.common.EntityRelationships, DownstreamEntityRelationships> { - public static final DownstreamEntityRelationshipsMapper INSTANCE = new DownstreamEntityRelationshipsMapper(); + public static final DownstreamEntityRelationshipsMapper INSTANCE = + new DownstreamEntityRelationshipsMapper(); - public static DownstreamEntityRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DownstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DownstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DownstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index 58f4f477bc7e6..e3743804b4908 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -5,28 +5,32 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class EntityRelationshipLegacyMapper implements ModelMapper<com.linkedin.common.EntityRelationship, EntityRelationshipLegacy> { +public class EntityRelationshipLegacyMapper + implements ModelMapper<com.linkedin.common.EntityRelationship, EntityRelationshipLegacy> { - public static final EntityRelationshipLegacyMapper INSTANCE = new EntityRelationshipLegacyMapper(); + public static final EntityRelationshipLegacyMapper INSTANCE = + new EntityRelationshipLegacyMapper(); - public static EntityRelationshipLegacy map(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); - } + public static EntityRelationshipLegacy map( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + return INSTANCE.apply(relationship); + } - @Override - public EntityRelationshipLegacy apply(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); + @Override + public EntityRelationshipLegacy apply( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); - EntityWithRelationships partialLineageEntity = (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); - if (partialLineageEntity != null) { - result.setEntity(partialLineageEntity); - } - if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); - } - return result; + EntityWithRelationships partialLineageEntity = + (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); + if (partialLineageEntity != null) { + result.setEntity(partialLineageEntity); + } + if (relationship.hasCreated()) { + result.setCreated(AuditStampMapper.map(relationship.getCreated())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 7db5e08c73fc6..832e1bb396b3b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -2,24 +2,28 @@ import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class UpstreamEntityRelationshipsMapper implements ModelMapper<com.linkedin.common.EntityRelationships, UpstreamEntityRelationships> { +public class UpstreamEntityRelationshipsMapper + implements ModelMapper<com.linkedin.common.EntityRelationships, UpstreamEntityRelationships> { - public static final UpstreamEntityRelationshipsMapper INSTANCE = new UpstreamEntityRelationshipsMapper(); + public static final UpstreamEntityRelationshipsMapper INSTANCE = + new UpstreamEntityRelationshipsMapper(); - public static UpstreamEntityRelationships map(@Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static UpstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public UpstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public UpstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 8c6496390943b..9521945770195 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -21,11 +23,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubRoleType implements com.linkedin.datahub.graphql.types.EntityType<DataHubRole, String> { +public class DataHubRoleType + implements com.linkedin.datahub.graphql.types.EntityType<DataHubRole, String> { static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_ROLE_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -45,13 +45,16 @@ public Class<DataHubRole> objectClass() { } @Override - public List<DataFetcherResult<DataHubRole>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<DataHubRole>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -59,8 +62,13 @@ public List<DataFetcherResult<DataHubRole>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataHubRole>newResult().data(DataHubRoleMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataHubRole>newResult() + .data(DataHubRoleMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 5ba31a1602780..7a467886fc084 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; @@ -10,9 +12,6 @@ import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubRoleMapper implements ModelMapper<EntityResponse, DataHubRole> { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index 084c4d5033ad0..d51e0d06c0fda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -11,9 +11,9 @@ import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.SearchableEntityType; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -22,9 +22,6 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -33,88 +30,101 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class RoleType implements SearchableEntityType<Role, String>, +public class RoleType + implements SearchableEntityType<Role, String>, com.linkedin.datahub.graphql.types.EntityType<Role, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ROLE_KEY, - Constants.ROLE_PROPERTIES_ASPECT_NAME, - Constants.ROLE_ACTORS_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ROLE_KEY, + Constants.ROLE_PROPERTIES_ASPECT_NAME, + Constants.ROLE_ACTORS_ASPECT_NAME); - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public RoleType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public RoleType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.ROLE; - } + @Override + public EntityType type() { + return EntityType.ROLE; + } - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class<Role> objectClass() { - return Role.class; - } + @Override + public Class<Role> objectClass() { + return Role.class; + } - @Override - public List<DataFetcherResult<Role>> batchLoad(@Nonnull List<String> urns, - @Nonnull QueryContext context) throws Exception { - final List<Urn> externalRolesUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List<DataFetcherResult<Role>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> externalRolesUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.ROLE_ENTITY_NAME, - new HashSet<>(externalRolesUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.ROLE_ENTITY_NAME, + new HashSet<>(externalRolesUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : externalRolesUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Role>newResult() - .data(RoleMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Role", e); - } + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : externalRolesUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Role>newResult().data(RoleMapper.map(gmsResult)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Role", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search(Constants.ROLE_ENTITY_NAME, - query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.ROLE_ENTITY_NAME, - query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + Constants.ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.ROLE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java index cabace1a52441..3eb090e452439 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java @@ -1,41 +1,39 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.RoleAssociation; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class AccessMapper { - public static final AccessMapper INSTANCE = new AccessMapper(); - - public static com.linkedin.datahub.graphql.generated.Access map( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - return INSTANCE.apply(access, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.Access apply( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.Access result = new com.linkedin.datahub.graphql.generated.Access(); - result.setRoles(access.getRoles().stream().map( - association -> this.mapRoleAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private RoleAssociation mapRoleAssociation(com.linkedin.common.RoleAssociation association, Urn entityUrn) { - RoleAssociation roleAssociation = new RoleAssociation(); - Role role = new Role(); - role.setType(EntityType.ROLE); - role.setUrn(association.getUrn().toString()); - roleAssociation.setRole(role); - roleAssociation.setAssociatedUrn(entityUrn.toString()); - return roleAssociation; - } - + public static final AccessMapper INSTANCE = new AccessMapper(); + + public static com.linkedin.datahub.graphql.generated.Access map( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(access, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Access apply( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.Access result = + new com.linkedin.datahub.graphql.generated.Access(); + result.setRoles( + access.getRoles().stream() + .map(association -> this.mapRoleAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private RoleAssociation mapRoleAssociation( + com.linkedin.common.RoleAssociation association, Urn entityUrn) { + RoleAssociation roleAssociation = new RoleAssociation(); + Role role = new Role(); + role.setType(EntityType.ROLE); + role.setUrn(association.getUrn().toString()); + roleAssociation.setRole(role); + roleAssociation.setAssociatedUrn(entityUrn.toString()); + return roleAssociation; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index 3cb0ec942a457..df18b7c89fafc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -15,79 +15,77 @@ import com.linkedin.metadata.key.RoleKey; import com.linkedin.role.Actors; import com.linkedin.role.RoleUserArray; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class RoleMapper implements ModelMapper<EntityResponse, Role> { - public static final RoleMapper INSTANCE = new RoleMapper(); - - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final RoleMapper INSTANCE = new RoleMapper(); + + public static Role map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { + final RoleProperties propertiesResult = new RoleProperties(); + propertiesResult.setName(e.getName()); + propertiesResult.setDescription(e.getDescription()); + propertiesResult.setType(e.getType()); + propertiesResult.setRequestUrl(e.getRequestUrl()); + + return propertiesResult; + } + + private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { + RoleUser result = new RoleUser(); + CorpUser corpUser = new CorpUser(); + corpUser.setUrn(provisionedUser.getUser().toString()); + result.setUser(corpUser); + return result; + } + + private static Actor mapActor(Actors actors) { + Actor actor = new Actor(); + actor.setUsers(mapRoleUsers(actors.getUsers())); + return actor; + } + + private static List<RoleUser> mapRoleUsers(RoleUserArray users) { + if (users == null) { + return null; } + return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + } - private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { - final RoleProperties propertiesResult = new RoleProperties(); - propertiesResult.setName(e.getName()); - propertiesResult.setDescription(e.getDescription()); - propertiesResult.setType(e.getType()); - propertiesResult.setRequestUrl(e.getRequestUrl()); + @Override + public Role apply(EntityResponse input) { - return propertiesResult; - } + final Role result = new Role(); + final Urn entityUrn = input.getUrn(); - private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { - RoleUser result = new RoleUser(); - CorpUser corpUser = new CorpUser(); - corpUser.setUrn(provisionedUser.getUser().toString()); - result.setUser(corpUser); - return result; - } + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ROLE); - private static Actor mapActor(Actors actors) { - Actor actor = new Actor(); - actor.setUsers(mapRoleUsers(actors.getUsers())); - return actor; - } + final EnvelopedAspectMap aspects = input.getAspects(); - private static List<RoleUser> mapRoleUsers(RoleUserArray users) { - if (users == null) { - return null; - } - return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); + if (roleKeyAspect != null) { + result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); + } + final EnvelopedAspect envelopedPropertiesAspect = + aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); + if (envelopedPropertiesAspect != null) { + result.setProperties( + mapRoleProperties( + new com.linkedin.role.RoleProperties(envelopedPropertiesAspect.getValue().data()))); } - @Override - public Role apply(EntityResponse input) { - - - final Role result = new Role(); - final Urn entityUrn = input.getUrn(); - - result.setUrn(entityUrn.toString()); - result.setType(EntityType.ROLE); - - final EnvelopedAspectMap aspects = input.getAspects(); - - final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); - if (roleKeyAspect != null) { - result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); - } - final EnvelopedAspect envelopedPropertiesAspect = aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); - if (envelopedPropertiesAspect != null) { - result.setProperties(mapRoleProperties( - new com.linkedin.role.RoleProperties( - envelopedPropertiesAspect.getValue().data())) - ); - } - - final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); - if (envelopedUsers != null) { - result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); - } - - return result; + final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); + if (envelopedUsers != null) { + result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index 748753c4e22b1..b543a40cbac41 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -8,15 +8,15 @@ import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType<SchemaFieldEntity, String> { +public class SchemaFieldType + implements com.linkedin.datahub.graphql.types.EntityType<SchemaFieldEntity, String> { - public SchemaFieldType() { } + public SchemaFieldType() {} @Override public EntityType type() { @@ -34,18 +34,17 @@ public Class<SchemaFieldEntity> objectClass() { } @Override - public List<DataFetcherResult<SchemaFieldEntity>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> schemaFieldUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<SchemaFieldEntity>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> schemaFieldUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { return schemaFieldUrns.stream() .map(this::mapSchemaFieldUrn) - .map(schemaFieldEntity -> DataFetcherResult.<SchemaFieldEntity>newResult() - .data(schemaFieldEntity) - .build() - ) + .map( + schemaFieldEntity -> + DataFetcherResult.<SchemaFieldEntity>newResult().data(schemaFieldEntity).build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -66,6 +65,4 @@ private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { throw new RuntimeException("Failed to load schemaField entity", e); } } - } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index f79b23033c995..c56833cc817eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.types.tag; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.Entity; @@ -26,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -44,136 +46,150 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class TagType implements com.linkedin.datahub.graphql.types.SearchableEntityType<Tag, String>, - MutableType<TagUpdateInput, Tag> { - - private static final Set<String> FACET_FIELDS = Collections.emptySet(); - - private final EntityClient _entityClient; - - public TagType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class<Tag> objectClass() { - return Tag.class; +public class TagType + implements com.linkedin.datahub.graphql.types.SearchableEntityType<Tag, String>, + MutableType<TagUpdateInput, Tag> { + + private static final Set<String> FACET_FIELDS = Collections.emptySet(); + + private final EntityClient _entityClient; + + public TagType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class<Tag> objectClass() { + return Tag.class; + } + + @Override + public EntityType type() { + return EntityType.TAG; + } + + @Override + public Function<Entity, String> getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class<TagUpdateInput> inputClass() { + return TagUpdateInput.class; + } + + @Override + public List<DataFetcherResult<Tag>> batchLoad( + final List<String> urns, final QueryContext context) { + + final List<Urn> tagUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map<Urn, EntityResponse> tagMap = + _entityClient.batchGetV2( + TAG_ENTITY_NAME, new HashSet<>(tagUrns), null, context.getAuthentication()); + + final List<EntityResponse> gmsResults = new ArrayList<>(); + for (Urn urn : tagUrns) { + gmsResults.add(tagMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsTag -> + gmsTag == null + ? null + : DataFetcherResult.<Tag>newResult().data(TagMapper.map(gmsTag)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Tags", e); } - - @Override - public EntityType type() { - return EntityType.TAG; - } - - @Override - public Function<Entity, String> getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class<TagUpdateInput> inputClass() { - return TagUpdateInput.class; - } - - @Override - public List<DataFetcherResult<Tag>> batchLoad(final List<String> urns, final QueryContext context) { - - final List<Urn> tagUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map<Urn, EntityResponse> tagMap = _entityClient.batchGetV2(TAG_ENTITY_NAME, new HashSet<>(tagUrns), - null, context.getAuthentication()); - - final List<EntityResponse> gmsResults = new ArrayList<>(); - for (Urn urn : tagUrns) { - gmsResults.add(tagMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsTag -> gmsTag == null ? null - : DataFetcherResult.<Tag>newResult() - .data(TagMapper.map(gmsTag)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Tags", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List<FacetFilterInput> filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("tag", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - - @Override - public Tag update(@Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection<MetadataChangeProposal> proposals = TagUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List<FacetFilterInput> filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "tag", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public Tag update( + @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection<MetadataChangeProposal> proposals = TagUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.TAG_PRIVILEGES.getResourceType(), - update.getUrn(), - orPrivilegeGroups); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.TAG_PRIVILEGES.getResourceType(), + update.getUrn(), + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List<String> specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List<String> specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDescription() != null || updateInput.getName() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (updateInput.getDescription() != null || updateInput.getName() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index f4d5f0a549a0e..72665535e5980 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -4,35 +4,36 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Tag; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class GlobalTagsMapper { - public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); + public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); - public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(standardTags, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.GlobalTags map( + @Nonnull final GlobalTags standardTags, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(standardTags, entityUrn); + } - public com.linkedin.datahub.graphql.generated.GlobalTags apply(@Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); - result.setTags(input.getTags().stream().map(tag -> this.mapTagAssociation(tag, entityUrn)).collect(Collectors.toList())); - return result; - } + public com.linkedin.datahub.graphql.generated.GlobalTags apply( + @Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlobalTags result = + new com.linkedin.datahub.graphql.generated.GlobalTags(); + result.setTags( + input.getTags().stream() + .map(tag -> this.mapTagAssociation(tag, entityUrn)) + .collect(Collectors.toList())); + return result; + } - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); - final Tag resultTag = new Tag(); - resultTag.setUrn(input.getTag().toString()); - result.setTag(resultTag); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( + @Nonnull final TagAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.TagAssociation result = + new com.linkedin.datahub.graphql.generated.TagAssociation(); + final Tag resultTag = new Tag(); + resultTag.setUrn(input.getTag().toString()); + result.setTag(resultTag); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 775c123070a80..3792a42376004 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -4,27 +4,28 @@ import com.linkedin.common.urn.TagUrn; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; -public class TagAssociationUpdateMapper implements ModelMapper<TagAssociationUpdate, TagAssociation> { +public class TagAssociationUpdateMapper + implements ModelMapper<TagAssociationUpdate, TagAssociation> { - public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); + public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); - } + public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(tagAssociationUpdate); + } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { - final TagAssociation output = new TagAssociation(); - try { - output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to update tag with urn %s, invalid urn", - tagAssociationUpdate.getTag().getUrn())); - } - return output; + public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { + final TagAssociation output = new TagAssociation(); + try { + output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to update tag with urn %s, invalid urn", + tagAssociationUpdate.getTag().getUrn())); } - + return output; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index 43736b412b004..d6ce24582678d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -16,63 +18,61 @@ import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + * <p>To be replaced by auto-generated mappers implementations */ public class TagMapper implements ModelMapper<EntityResponse, Tag> { - public static final TagMapper INSTANCE = new TagMapper(); + public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { - final Tag result = new Tag(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.TAG); + public static Tag map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - final String legacyName = entityResponse.getUrn().getId(); - result.setName(legacyName); + @Override + public Tag apply(@Nonnull final EntityResponse entityResponse) { + final Tag result = new Tag(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.TAG); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper<Tag> mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (tag, dataMap) -> - tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + final String legacyName = entityResponse.getUrn().getId(); + result.setName(legacyName); - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper<Tag> mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (tag, dataMap) -> tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - return mappingHelper.getResult(); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - TagKey tagKey = new TagKey(dataMap); - tag.setName(tagKey.getName()); - } + return mappingHelper.getResult(); + } + + private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + TagKey tagKey = new TagKey(dataMap); + tag.setName(tagKey.getName()); + } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - final TagProperties properties = new TagProperties(dataMap); - final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = - new com.linkedin.datahub.graphql.generated.TagProperties.Builder() - .setColorHex(properties.getColorHex(GetMode.DEFAULT)) - .setName(properties.getName(GetMode.DEFAULT)) - .setDescription(properties.getDescription(GetMode.DEFAULT)) - .build(); - tag.setProperties(graphQlProperties); - // Set deprecated top-level description field. - if (properties.hasDescription()) { - tag.setDescription(properties.getDescription()); - } + private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + final TagProperties properties = new TagProperties(dataMap); + final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = + new com.linkedin.datahub.graphql.generated.TagProperties.Builder() + .setColorHex(properties.getColorHex(GetMode.DEFAULT)) + .setName(properties.getName(GetMode.DEFAULT)) + .setDescription(properties.getDescription(GetMode.DEFAULT)) + .build(); + tag.setProperties(graphQlProperties); + // Set deprecated top-level description field. + if (properties.hasDescription()) { + tag.setDescription(properties.getDescription()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 505dd0d36954b..316994881ccfe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; @@ -19,24 +22,19 @@ import java.util.Collection; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class TagUpdateInputMapper implements InputModelMapper<TagUpdateInput, Collection<MetadataChangeProposal>, Urn> { +public class TagUpdateInputMapper + implements InputModelMapper<TagUpdateInput, Collection<MetadataChangeProposal>, Urn> { public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection<MetadataChangeProposal> map( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { return INSTANCE.apply(tagUpdate, actor); } @Override public Collection<MetadataChangeProposal> apply( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { final Collection<MetadataChangeProposal> proposals = new ArrayList<>(2); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(TAG_ENTITY_NAME); @@ -59,9 +57,10 @@ public Collection<MetadataChangeProposal> apply( TagProperties tagProperties = new TagProperties(); tagProperties.setName(tagUpdate.getName()); tagProperties.setDescription(tagUpdate.getDescription()); - proposals.add(updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); } return proposals; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java index ddc9f33b25516..be67d17421917 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java @@ -1,15 +1,14 @@ package com.linkedin.datahub.graphql.types.test; -import com.linkedin.datahub.graphql.generated.TestDefinition; -import com.linkedin.test.TestInfo; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; +import com.linkedin.datahub.graphql.generated.TestDefinition; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; - +import com.linkedin.test.TestInfo; public class TestMapper { @@ -29,12 +28,11 @@ public static Test map(final EntityResponse entityResponse) { result.setName(testInfo.getName()); result.setDescription(testInfo.getDescription()); result.setDefinition(new TestDefinition(testInfo.getDefinition().getJson())); - } else { + } else { return null; } return result; } - private TestMapper() { - } -} \ No newline at end of file + private TestMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index 4b7df8a0d23d3..eefcc356c22a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -3,9 +3,9 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -20,15 +20,12 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class TestType implements com.linkedin.datahub.graphql.types.EntityType<Test, String> { - static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.TEST_INFO_ASPECT_NAME - ); + static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(Constants.TEST_INFO_ASPECT_NAME); private final EntityClient _entityClient; - public TestType(final EntityClient entityClient) { + public TestType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -48,28 +45,28 @@ public Class<Test> objectClass() { } @Override - public List<DataFetcherResult<Test>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { - final List<Urn> testUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List<DataFetcherResult<Test>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { + final List<Urn> testUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map<Urn, EntityResponse> entities = _entityClient.batchGetV2( - Constants.TEST_ENTITY_NAME, - new HashSet<>(testUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map<Urn, EntityResponse> entities = + _entityClient.batchGetV2( + Constants.TEST_ENTITY_NAME, + new HashSet<>(testUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.<Test>newResult() - .data(TestMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<Test>newResult().data(TestMapper.map(gmsResult)).build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tests", e); @@ -83,4 +80,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java index 7812282d0c1e5..02de39ffc644c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChangeOperationType; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -25,15 +27,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame structs for every schema +// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame +// structs for every schema // at every semantic version. @Slf4j public class SchemaBlameMapper { - public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> changeTransactions, + public static GetSchemaBlameResult map( + @Nonnull final List<ChangeTransaction> changeTransactions, @Nullable final String versionCutoff) { final GetSchemaBlameResult result = new GetSchemaBlameResult(); if (changeTransactions.isEmpty()) { @@ -46,7 +47,8 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch final String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - final String semanticVersionFilterString = versionCutoff == null ? latestSemanticVersionString : versionCutoff; + final String semanticVersionFilterString = + versionCutoff == null ? latestSemanticVersionString : versionCutoff; final Optional<ComparableVersion> semanticVersionFilterOptional = createSemanticVersion(semanticVersionFilterString); if (semanticVersionFilterOptional.isEmpty()) { @@ -55,25 +57,30 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch final ComparableVersion semanticVersionFilter = semanticVersionFilterOptional.get(); - final List<ChangeTransaction> reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .filter(semanticVersionChangeTransactionPair -> - semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) <= 0) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + final List<ChangeTransaction> reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .filter( + semanticVersionChangeTransactionPair -> + semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) + <= 0) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); if (reversedChangeTransactions.isEmpty()) { return result; } - final String selectedSemanticVersion = truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); + final String selectedSemanticVersion = + truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); final long selectedSemanticVersionTimestamp = reversedChangeTransactions.get(0).getTimestamp(); final String selectedVersionStamp = reversedChangeTransactions.get(0).getVersionStamp(); result.setVersion( - new SemanticVersionStruct(selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); + new SemanticVersionStruct( + selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); for (ChangeTransaction changeTransaction : reversedChangeTransactions) { for (ChangeEvent changeEvent : changeTransaction.getChangeEvents()) { @@ -90,8 +97,10 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch SchemaFieldKey schemaFieldKey; try { - schemaFieldKey = (SchemaFieldKey) EntityKeyUtils.convertUrnToEntityKeyInternal(Urn.createFromString(schemaUrn), - new SchemaFieldKey().schema()); + schemaFieldKey = + (SchemaFieldKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + Urn.createFromString(schemaUrn), new SchemaFieldKey().schema()); } catch (Exception e) { log.debug(String.format("Could not generate schema urn for %s", schemaUrn)); continue; @@ -101,7 +110,10 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch schemaFieldBlame.setFieldPath(fieldPath); final SchemaFieldChange schemaFieldChange = - getLastSchemaFieldChange(changeEvent, changeTransaction.getTimestamp(), changeTransaction.getSemVer(), + getLastSchemaFieldChange( + changeEvent, + changeTransaction.getTimestamp(), + changeTransaction.getSemVer(), changeTransaction.getVersionStamp()); schemaFieldBlame.setSchemaFieldChange(schemaFieldChange); @@ -109,15 +121,17 @@ public static GetSchemaBlameResult map(@Nonnull final List<ChangeTransaction> ch } } - result.setSchemaFieldBlameList(schemaBlameMap.values() - .stream() - .filter(schemaFieldBlame -> !schemaFieldBlame.getSchemaFieldChange() - .getChangeType() - .equals(ChangeOperationType.REMOVE)) - .collect(Collectors.toList())); + result.setSchemaFieldBlameList( + schemaBlameMap.values().stream() + .filter( + schemaFieldBlame -> + !schemaFieldBlame + .getSchemaFieldChange() + .getChangeType() + .equals(ChangeOperationType.REMOVE)) + .collect(Collectors.toList())); return result; } - private SchemaBlameMapper() { - } -} \ No newline at end of file + private SchemaBlameMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java index 249957b1a1262..295ca0856821c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; import com.linkedin.datahub.graphql.generated.SemanticVersionStruct; import com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils; @@ -12,10 +14,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to list of schema versions. +// Class for converting ChangeTransactions received from the Timeline API to list of schema +// versions. @Slf4j public class SchemaVersionListMapper { @@ -29,28 +29,36 @@ public static GetSchemaVersionListResult map(List<ChangeTransaction> changeTrans String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - long latestSemanticVersionTimestamp = changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); - String latestVersionStamp = changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); + long latestSemanticVersionTimestamp = + changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); + String latestVersionStamp = + changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); result.setLatestVersion( - new SemanticVersionStruct(latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); + new SemanticVersionStruct( + latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); - List<ChangeTransaction> reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + List<ChangeTransaction> reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); - List<SemanticVersionStruct> semanticVersionStructList = reversedChangeTransactions.stream() - .map(changeTransaction -> new SemanticVersionStruct(truncateSemanticVersion(changeTransaction.getSemVer()), - changeTransaction.getTimestamp(), changeTransaction.getVersionStamp())) - .collect(Collectors.toList()); + List<SemanticVersionStruct> semanticVersionStructList = + reversedChangeTransactions.stream() + .map( + changeTransaction -> + new SemanticVersionStruct( + truncateSemanticVersion(changeTransaction.getSemVer()), + changeTransaction.getTimestamp(), + changeTransaction.getVersionStamp())) + .collect(Collectors.toList()); result.setSemanticVersionList(semanticVersionStructList); return result; } - private SchemaVersionListMapper() { - } -} \ No newline at end of file + private SchemaVersionListMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java index 175cf678117f0..37acfe3da0f9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java @@ -9,13 +9,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class TimelineUtils { - public static Optional<Pair<ComparableVersion, ChangeTransaction>> semanticVersionChangeTransactionPair( - ChangeTransaction changeTransaction) { - Optional<ComparableVersion> semanticVersion = createSemanticVersion(changeTransaction.getSemVer()); + public static Optional<Pair<ComparableVersion, ChangeTransaction>> + semanticVersionChangeTransactionPair(ChangeTransaction changeTransaction) { + Optional<ComparableVersion> semanticVersion = + createSemanticVersion(changeTransaction.getSemVer()); return semanticVersion.map(version -> Pair.of(version, changeTransaction)); } @@ -29,21 +29,24 @@ public static Optional<ComparableVersion> createSemanticVersion(String semanticV } } - // The SemanticVersion is currently returned from the ChangeTransactions in the format "x.y.z-computed". This function + // The SemanticVersion is currently returned from the ChangeTransactions in the format + // "x.y.z-computed". This function // removes the suffix "computed". public static String truncateSemanticVersion(String semanticVersion) { String suffix = "-computed"; - return semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + return semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; } - public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent, long timestamp, - String semanticVersion, String versionStamp) { + public static SchemaFieldChange getLastSchemaFieldChange( + ChangeEvent changeEvent, long timestamp, String semanticVersion, String versionStamp) { SchemaFieldChange schemaFieldChange = new SchemaFieldChange(); schemaFieldChange.setTimestampMillis(timestamp); schemaFieldChange.setLastSemanticVersion(truncateSemanticVersion(semanticVersion)); schemaFieldChange.setChangeType( - ChangeOperationType.valueOf(ChangeOperationType.class, changeEvent.getOperation().toString())); + ChangeOperationType.valueOf( + ChangeOperationType.class, changeEvent.getOperation().toString())); schemaFieldChange.setVersionStamp(versionStamp); String translatedChangeOperationType; @@ -65,15 +68,16 @@ public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent String suffix = "-computed"; String translatedSemanticVersion = - semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; - String lastSchemaFieldChange = String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); + String lastSchemaFieldChange = + String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); schemaFieldChange.setLastSchemaFieldChange(lastSchemaFieldChange); return schemaFieldChange; } - private TimelineUtils() { - } + private TimelineUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index 3bf84d21a3215..e4e67c86f1ae6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -4,12 +4,13 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class FieldUsageCountsMapper implements ModelMapper<com.linkedin.usage.FieldUsageCounts, FieldUsageCounts> { +public class FieldUsageCountsMapper + implements ModelMapper<com.linkedin.usage.FieldUsageCounts, FieldUsageCounts> { public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); - public static FieldUsageCounts map(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public static FieldUsageCounts map( + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { return INSTANCE.apply(usageCounts); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 453ae97d40306..3449c6782a46b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -5,18 +5,19 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UsageAggregationMapper implements - ModelMapper<com.linkedin.usage.UsageAggregation, UsageAggregation> { +public class UsageAggregationMapper + implements ModelMapper<com.linkedin.usage.UsageAggregation, UsageAggregation> { public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); - public static UsageAggregation map(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public static UsageAggregation map( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { return INSTANCE.apply(pdlUsageAggregation); } @Override - public UsageAggregation apply(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public UsageAggregation apply( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index 697b15d57e4e4..ff9f6fd5c4855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -5,31 +5,34 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageAggregationMetricsMapper implements - ModelMapper<com.linkedin.usage.UsageAggregationMetrics, UsageAggregationMetrics> { +public class UsageAggregationMetricsMapper + implements ModelMapper<com.linkedin.usage.UsageAggregationMetrics, UsageAggregationMetrics> { public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); - public static UsageAggregationMetrics map(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public static UsageAggregationMetrics map( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { return INSTANCE.apply(usageAggregationMetrics); } @Override - public UsageAggregationMetrics apply(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public UsageAggregationMetrics apply( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); result.setUniqueUserCount(usageAggregationMetrics.getUniqueUserCount()); result.setTopSqlQueries(usageAggregationMetrics.getTopSqlQueries()); if (usageAggregationMetrics.hasFields()) { result.setFields( - usageAggregationMetrics.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + usageAggregationMetrics.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { - result.setUsers(usageAggregationMetrics.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + usageAggregationMetrics.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index ba3b86b72af8b..63fe051b7ede9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -5,30 +5,35 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class UsageQueryResultAggregationMapper + implements ModelMapper< + com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { -public class UsageQueryResultAggregationMapper implements - ModelMapper<com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { + public static final UsageQueryResultAggregationMapper INSTANCE = + new UsageQueryResultAggregationMapper(); - public static final UsageQueryResultAggregationMapper INSTANCE = new UsageQueryResultAggregationMapper(); - - public static UsageQueryResultAggregations map(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public static UsageQueryResultAggregations map( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UsageQueryResultAggregations apply(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public UsageQueryResultAggregations apply( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); result.setUniqueUserCount(pdlUsageResultAggregations.getUniqueUserCount()); if (pdlUsageResultAggregations.hasFields()) { result.setFields( - pdlUsageResultAggregations.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + pdlUsageResultAggregations.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { - result.setUsers(pdlUsageResultAggregations.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + pdlUsageResultAggregations.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index f54259180c739..444605cd99377 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,17 +1,17 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.UsageQueryResult; - import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageQueryResultMapper implements ModelMapper<com.linkedin.usage.UsageQueryResult, UsageQueryResult> { +public class UsageQueryResultMapper + implements ModelMapper<com.linkedin.usage.UsageQueryResult, UsageQueryResult> { public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); - public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public static UsageQueryResult map( + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { return INSTANCE.apply(pdlUsageResult); } @@ -19,11 +19,14 @@ public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryR public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { - result.setAggregations(UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + result.setAggregations( + UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { - result.setBuckets(pdlUsageResult.getBuckets().stream().map( - bucket -> UsageAggregationMapper.map(bucket)).collect(Collectors.toList())); + result.setBuckets( + pdlUsageResult.getBuckets().stream() + .map(bucket -> UsageAggregationMapper.map(bucket)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index b525a761841e3..014003dd86554 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.CorpUser; - import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UserUsageCountsMapper implements - ModelMapper<com.linkedin.usage.UserUsageCounts, UserUsageCounts> { +public class UserUsageCountsMapper + implements ModelMapper<com.linkedin.usage.UserUsageCounts, UserUsageCounts> { public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); - public static UserUsageCounts map(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public static UserUsageCounts map( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UserUsageCounts apply(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public UserUsageCounts apply( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { CorpUser partialUser = new CorpUser(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index f6c348937c7a5..8ea06f46d5133 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; @@ -24,8 +26,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class DataHubViewMapper implements ModelMapper<EntityResponse, DataHubView> { @@ -57,20 +57,26 @@ private void mapDataHubViewInfo(@Nonnull final DataHubView view, @Nonnull final } @Nonnull - private DataHubViewDefinition mapViewDefinition(@Nonnull final com.linkedin.view.DataHubViewDefinition definition) { + private DataHubViewDefinition mapViewDefinition( + @Nonnull final com.linkedin.view.DataHubViewDefinition definition) { final DataHubViewDefinition result = new DataHubViewDefinition(); result.setFilter(mapFilter(definition.getFilter())); - result.setEntityTypes(definition.getEntityTypes().stream().map(EntityTypeMapper::getType).collect( - Collectors.toList())); + result.setEntityTypes( + definition.getEntityTypes().stream() + .map(EntityTypeMapper::getType) + .collect(Collectors.toList())); return result; } @Nullable - private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.filter.Filter filter) { - // This assumes that people DO NOT emit Views on their own, since we expect that the Filter structure is within + private DataHubViewFilter mapFilter( + @Nonnull final com.linkedin.metadata.query.filter.Filter filter) { + // This assumes that people DO NOT emit Views on their own, since we expect that the Filter + // structure is within // a finite set of possibilities. // - // If we find a View that was ingested manually and malformed, then we log that and return a default. + // If we find a View that was ingested manually and malformed, then we log that and return a + // default. final DataHubViewFilter result = new DataHubViewFilter(); if (filter.hasOr() && filter.getOr().size() == 1) { // Then we are looking at an AND with multiple sub conditions. @@ -84,9 +90,7 @@ private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.f return result; } - /** - * This simply converts a List of leaf criterion into the FacetFiler equivalent. - */ + /** This simply converts a List of leaf criterion into the FacetFiler equivalent. */ @Nonnull private List<FacetFilter> mapAndFilters(@Nullable final List<Criterion> ands) { // If the array is missing, return empty array. @@ -98,9 +102,9 @@ private List<FacetFilter> mapAndFilters(@Nullable final List<Criterion> ands) { } /** - * This converts a list of Conjunctive Criterion into a flattened list - * of FacetFilters. This method makes the assumption that WE (our GraphQL API) - * has minted the View and that each or criterion contains at maximum one nested condition. + * This converts a list of Conjunctive Criterion into a flattened list of FacetFilters. This + * method makes the assumption that WE (our GraphQL API) has minted the View and that each or + * criterion contains at maximum one nested condition. */ @Nonnull private List<FacetFilter> mapOrFilters(@Nullable final List<ConjunctiveCriterion> ors) { @@ -109,8 +113,10 @@ private List<FacetFilter> mapOrFilters(@Nullable final List<ConjunctiveCriterion return Collections.emptyList(); } if (ors.stream().anyMatch(or -> or.hasAnd() && or.getAnd().size() > 1)) { - log.warn(String.format( - "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", ors)); + log.warn( + String.format( + "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", + ors)); return Collections.emptyList(); } // It is assumed that in this case, the view is a flat list of ORs. Thus, we filter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java index 21a80e3f900d4..9b3680bde9b2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubViewType implements com.linkedin.datahub.graphql.types.EntityType<DataHubView, String> { +public class DataHubViewType + implements com.linkedin.datahub.graphql.types.EntityType<DataHubView, String> { public static final Set<String> ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class<DataHubView> objectClass() { } @Override - public List<DataFetcherResult<DataHubView>> batchLoad(@Nonnull List<String> urns, @Nonnull QueryContext context) - throws Exception { + public List<DataFetcherResult<DataHubView>> batchLoad( + @Nonnull List<String> urns, @Nonnull QueryContext context) throws Exception { final List<Urn> viewUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map<Urn, EntityResponse> entities = - _entityClient.batchGetV2(DATAHUB_VIEW_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_VIEW_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List<EntityResponse> gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List<DataFetcherResult<DataHubView>> batchLoad(@Nonnull List<String> urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.<DataHubView>newResult().data(DataHubViewMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.<DataHubView>newResult() + .data(DataHubViewMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Views", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index bb9de5fb96802..4b837605d4e31 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -5,39 +5,30 @@ import org.joda.time.DateTimeConstants; public class DateUtil { - public DateTime getNow() { - return DateTime.now(); - } + public DateTime getNow() { + return DateTime.now(); + } - public DateTime getStartOfNextWeek() { - return setTimeToZero(getNow() - .withDayOfWeek(DateTimeConstants.SUNDAY) - .plusDays(1)); - } + public DateTime getStartOfNextWeek() { + return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); + } - public DateTime getStartOfNextMonth() { - return setTimeToZero(getNow() - .withDayOfMonth(1) - .plusMonths(1)); - } + public DateTime getStartOfNextMonth() { + return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); + } - public DateTime setTimeToZero(DateTime input) { - return input.withHourOfDay(0) - .withMinuteOfHour(0) - .withSecondOfMinute(0) - .withMillisOfDay(0); - } + public DateTime setTimeToZero(DateTime input) { + return input.withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0).withMillisOfDay(0); + } - public DateTime getTomorrowStart() { - return setTimeToZero(getNow().plusDays(1)); - } + public DateTime getTomorrowStart() { + return setTimeToZero(getNow().plusDays(1)); + } - public DateRange getTrailingWeekDateRange() { - final DateTime todayEnd = getTomorrowStart().minusMillis(1); - final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); - return new DateRange( - String.valueOf(aWeekAgoStart.getMillis()), - String.valueOf(todayEnd.getMillis()) - ); - } + public DateRange getTrailingWeekDateRange() { + final DateTime todayEnd = getTomorrowStart().minusMillis(1); + final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); + return new DateRange( + String.valueOf(aWeekAgoStart.getMillis()), String.valueOf(todayEnd.getMillis())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java index 7f90071c6770c..904db311d34d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nullable; - public class SearchInsightsUtil { public static List<SearchInsight> getInsightsFromFeatures(@Nullable final DoubleMap features) { @@ -18,5 +17,5 @@ public static List<SearchInsight> getInsightsFromFeatures(@Nullable final Double return Collections.emptyList(); } - private SearchInsightsUtil() { } + private SearchInsightsUtil() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 606123cac926d..69cd73ecd7d68 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -14,17 +14,17 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; - import java.util.List; - +import org.mockito.Mockito; public class TestUtils { public static EntityService getMockEntityService() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); + EntityRegistry registry = + new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); EntityService mockEntityService = Mockito.mock(EntityService.class); Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); return mockEntityService; @@ -44,9 +44,10 @@ public static QueryContext getMockAllowContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -60,9 +61,10 @@ public static QueryContext getMockAllowContext(String actorUrn, AuthorizationReq Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -80,9 +82,10 @@ public static QueryContext getMockDenyContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -96,55 +99,54 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, List<MetadataChangeProposal> proposals) { - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(proposals, mockService.getEntityRegistry()) - .build(); - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(batch), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, List<MetadataChangeProposal> proposals) { + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(proposals, mockService.getEntityRegistry()).build(); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(batch), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(AuditStamp.class), Mockito.eq(false)); } public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( Mockito.any(MetadataChangeProposal.class), Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.eq(false)); } public static void verifyNoIngestProposal(EntityService mockService) { - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java index 7cd548a4790ba..57d85e5b204c2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.testng.AssertJUnit.assertEquals; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,16 +18,11 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetchingEnvironment; -import org.testng.annotations.Test; -import org.mockito.Mockito; - import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static org.testng.AssertJUnit.assertEquals; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ResolverUtilsTest { @@ -35,46 +33,48 @@ public void testCriterionFromFilter() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockAllowContext); // this is the expected path - Criterion valuesCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), - false, - FilterOperator.EQUAL - ) - ); - assertEquals(valuesCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valuesCriterion = + criterionFromFilter( + new FacetFilterInput( + "tags", + null, + ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), + false, + FilterOperator.EQUAL)); + assertEquals( + valuesCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); // this is the legacy pathway - Criterion valueCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - "urn:li:tag:abc", - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(valueCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc")) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valueCriterion = + criterionFromFilter( + new FacetFilterInput("tags", "urn:li:tag:abc", null, true, FilterOperator.EQUAL)); + assertEquals( + valueCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc"))) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); - // check that both being null doesn't cause a NPE. this should never happen except via API interaction - Criterion doubleNullCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(doubleNullCriterion, new Criterion().setValue("").setValues( - new StringArray(ImmutableList.of()) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + // check that both being null doesn't cause a NPE. this should never happen except via API + // interaction + Criterion doubleNullCriterion = + criterionFromFilter(new FacetFilterInput("tags", null, null, true, FilterOperator.EQUAL)); + assertEquals( + doubleNullCriterion, + new Criterion() + .setValue("") + .setValues(new StringArray(ImmutableList.of())) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); } @Test @@ -85,21 +85,25 @@ public void testBuildFilterWithUrns() throws Exception { urns.add(urn1); urns.add(urn2); - Criterion ownersCriterion = new Criterion() - .setField("owners") - .setValues(new StringArray("urn:li:corpuser:chris")) - .setCondition(Condition.EQUAL); + Criterion ownersCriterion = + new Criterion() + .setField("owners") + .setValues(new StringArray("urn:li:corpuser:chris")) + .setCondition(Condition.EQUAL); CriterionArray andCriterionArray = new CriterionArray(ImmutableList.of(ownersCriterion)); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(andCriterionArray) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(andCriterionArray)))); Filter finalFilter = buildFilterWithUrns(urns, filter); - Criterion urnsCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnsCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); for (ConjunctiveCriterion conjunctiveCriterion : finalFilter.getOr()) { assertEquals(conjunctiveCriterion.getAnd().contains(ownersCriterion), true); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index c7424174255ce..0d87ce4b2e2ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -9,35 +14,35 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetchingEnvironment; -import org.joda.time.DateTimeUtils; -import org.mockito.Mockito; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.joda.time.DateTimeUtils; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; public class UpdateLineageResolverTest { private static EntityService _mockService = Mockito.mock(EntityService.class); private static LineageService _lineageService; private static DataFetchingEnvironment _mockEnv; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; @BeforeMethod public void setupTest() { @@ -50,8 +55,12 @@ public void setupTest() { // Adds upstream for dataset1 to dataset2 and removes edge to dataset3 @Test public void testUpdateDatasetLineage() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -65,7 +74,8 @@ public void testUpdateDatasetLineage() throws Exception { @Test public void testFailUpdateWithMissingDownstream() throws Exception { - List<LineageEdge> edgesToAdd = Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); + List<LineageEdge> edgesToAdd = + Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -93,8 +103,12 @@ public void testUpdateChartLineage() throws Exception { // Adds upstream for dashboard to dataset2 and chart1 and removes edge to dataset1 @Test public void testUpdateDashboardLineage() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_2), createLineageEdge(DASHBOARD_URN, CHART_URN)); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DASHBOARD_URN, DATASET_URN_2), + createLineageEdge(DASHBOARD_URN, CHART_URN)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -109,12 +123,13 @@ public void testUpdateDashboardLineage() throws Exception { // Adds upstream datajob and dataset and one downstream dataset @Test public void testUpdateDataJobLineage() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList( - createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), - createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), - createLineageEdge(DATASET_URN_3, DATAJOB_URN_1) - ); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), + createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), + createLineageEdge(DATASET_URN_3, DATAJOB_URN_1)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -129,8 +144,12 @@ public void testUpdateDataJobLineage() throws Exception { @Test public void testFailUpdateLineageNoPermissions() throws Exception { - List<LineageEdge> edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List<LineageEdge> edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List<LineageEdge> edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List<LineageEdge> edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); @@ -147,7 +166,6 @@ public void testFailUpdateLineageNoPermissions() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } - private void mockInputAndContext(List<LineageEdge> edgesToAdd, List<LineageEdge> edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java index 6fdb1f2b70ce4..f590e71146eb4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionResult; @@ -19,9 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class AssertionRunEventResolverTest { @Test public void testGetSuccess() throws Exception { @@ -29,35 +28,36 @@ public void testGetSuccess() throws Exception { final Urn assertionUrn = Urn.createFromString("urn:li:assertion:guid-1"); final Urn asserteeUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); - final AssertionRunEvent gmsRunEvent = new AssertionRunEvent() - .setTimestampMillis(12L) - .setAssertionUrn(assertionUrn) - .setRunId("test-id") - .setAsserteeUrn(asserteeUrn) - .setStatus(AssertionRunStatus.COMPLETE) - .setResult(new AssertionResult() - .setActualAggValue(10) - .setMissingCount(0L) - .setRowCount(1L) - .setType(AssertionResultType.SUCCESS) - .setUnexpectedCount(2L) - ); + final AssertionRunEvent gmsRunEvent = + new AssertionRunEvent() + .setTimestampMillis(12L) + .setAssertionUrn(assertionUrn) + .setRunId("test-id") + .setAsserteeUrn(asserteeUrn) + .setStatus(AssertionRunStatus.COMPLETE) + .setResult( + new AssertionResult() + .setActualAggValue(10) + .setMissingCount(0L) + .setRowCount(1L) + .setType(AssertionResultType.SUCCESS) + .setUnexpectedCount(2L)); - Mockito.when(mockClient.getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.eq(AssertionRunEventResolver.buildFilter(null, AssertionRunStatus.COMPLETE.toString())), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableList.of( - new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)) - ) - ); + Mockito.when( + mockClient.getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.eq( + AssertionRunEventResolver.buildFilter( + null, AssertionRunStatus.COMPLETE.toString())), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableList.of( + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)))); AssertionRunEventResolver resolver = new AssertionRunEventResolver(mockClient); @@ -66,9 +66,12 @@ public void testGetSuccess() throws Exception { Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))).thenReturn("COMPLETE"); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))).thenReturn(0L); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))).thenReturn(10L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))) + .thenReturn("COMPLETE"); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))) + .thenReturn(0L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))) + .thenReturn(10L); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("limit"), Mockito.eq(null))).thenReturn(5); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -78,32 +81,37 @@ public void testGetSuccess() throws Exception { AssertionRunEventsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.any(Filter.class), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.any(Filter.class), + Mockito.any(Authentication.class)); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getTotal(), 1); assertEquals(result.getFailed(), 0); assertEquals(result.getSucceeded(), 1); - com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = resolver.get(mockEnv).get().getRunEvents().get(0); + com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = + resolver.get(mockEnv).get().getRunEvents().get(0); assertEquals(graphqlRunEvent.getAssertionUrn(), assertionUrn.toString()); assertEquals(graphqlRunEvent.getAsserteeUrn(), asserteeUrn.toString()); assertEquals(graphqlRunEvent.getRunId(), "test-id"); - assertEquals(graphqlRunEvent.getStatus(), com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); + assertEquals( + graphqlRunEvent.getStatus(), + com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); assertEquals((float) graphqlRunEvent.getTimestampMillis(), 12L); assertEquals((float) graphqlRunEvent.getResult().getActualAggValue(), 10); assertEquals((long) graphqlRunEvent.getResult().getMissingCount(), 0L); assertEquals((long) graphqlRunEvent.getResult().getRowCount(), 1L); assertEquals((long) graphqlRunEvent.getResult().getUnexpectedCount(), 2L); - assertEquals(graphqlRunEvent.getResult().getType(), com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); + assertEquals( + graphqlRunEvent.getResult().getType(), + com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 8afec0a889577..019d254ffdaac 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.assertion.AssertionInfo; @@ -18,10 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteAssertionResolverTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; @@ -33,20 +32,17 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -58,20 +54,19 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -80,11 +75,10 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn(null); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn(null); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -96,20 +90,19 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -130,21 +123,20 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -153,20 +145,17 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -177,17 +166,16 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); @@ -202,4 +190,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java index c5b5725f23b7a..19152a7a11877 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -37,9 +39,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityAssertionsResolverTest { @Test public void testGetSuccess() throws Exception { @@ -49,73 +48,76 @@ public void testGetSuccess() throws Exception { Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); Urn assertionUrn = Urn.createFromString("urn:li:assertion:test-guid"); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(datasetUrn.toString()), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(assertionUrn) - .setType("Asserts")) - )) - ); - + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(datasetUrn.toString()), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship().setEntity(assertionUrn).setType("Asserts"))))); Map<String, com.linkedin.entity.EnvelopedAspect> assertionAspects = new HashMap<>(); assertionAspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionKey().setAssertionId("test-guid").data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new AssertionKey().setAssertionId("test-guid").data()))); assertionAspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(new DatasetAssertionInfo() - .setDataset(datasetUrn) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setAggregation(AssertionStdAggregation.MAX) - .setOperator(AssertionStdOperator.EQUAL_TO) - .setFields(new UrnArray(ImmutableList.of( - Urn.createFromString("urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)") - ))) - .setParameters(new AssertionStdParameters().setValue(new AssertionStdParameter() - .setValue("10") - .setType( - AssertionStdParameterType.NUMBER))) - ).data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(datasetUrn) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setAggregation(AssertionStdAggregation.MAX) + .setOperator(AssertionStdOperator.EQUAL_TO) + .setFields( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)")))) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setValue("10") + .setType(AssertionStdParameterType.NUMBER)))) + .data()))); assertionAspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) - .data() - )) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn)), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(ImmutableMap.of( - assertionUrn, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn) - .setAspects(new EnvelopedAspectMap(assertionAspects)))); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) + .data()))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn)), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn) + .setAspects(new EnvelopedAspectMap(assertionAspects)))); EntityAssertionsResolver resolver = new EntityAssertionsResolver(mockClient, graphClient); @@ -134,38 +136,45 @@ public void testGetSuccess() throws Exception { EntityAssertionsResult result = resolver.get(mockEnv).get(); - Mockito.verify(graphClient, Mockito.times(1)).getRelatedEntities( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(graphClient, Mockito.times(1)) + .getRelatedEntities( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getStart(), 0); assertEquals(result.getCount(), 1); assertEquals(result.getTotal(), 1); - com.linkedin.datahub.graphql.generated.Assertion assertion = resolver.get(mockEnv).get().getAssertions().get(0); + com.linkedin.datahub.graphql.generated.Assertion assertion = + resolver.get(mockEnv).get().getAssertions().get(0); assertEquals(assertion.getUrn(), assertionUrn.toString()); assertEquals(assertion.getType(), EntityType.ASSERTION); assertEquals(assertion.getPlatform().getUrn(), "urn:li:dataPlatform:hive"); - assertEquals(assertion.getInfo().getType(), com.linkedin.datahub.graphql.generated.AssertionType.DATASET); + assertEquals( + assertion.getInfo().getType(), + com.linkedin.datahub.graphql.generated.AssertionType.DATASET); assertEquals(assertion.getInfo().getDatasetAssertion().getDatasetUrn(), datasetUrn.toString()); - assertEquals(assertion.getInfo().getDatasetAssertion().getScope(), com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); - assertEquals(assertion.getInfo().getDatasetAssertion().getAggregation(), com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); - assertEquals(assertion.getInfo().getDatasetAssertion().getOperator(), com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), + assertEquals( + assertion.getInfo().getDatasetAssertion().getScope(), + com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); + assertEquals( + assertion.getInfo().getDatasetAssertion().getAggregation(), + com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); + assertEquals( + assertion.getInfo().getDatasetAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 52d06f73dcfab..419eb71d5e143 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class ListAccessTokensResolverTest { @Test @@ -42,16 +41,22 @@ public void testGetSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); final Authentication testAuth = getAuthentication(mockEnv); - Mockito.when(mockClient.search( - Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(buildFilter(filters, Collections.emptyList())), - Mockito.any(SortCriterion.class), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) - .thenReturn(new SearchResult().setFrom(0).setNumEntities(0).setPageSize(0).setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(buildFilter(filters, Collections.emptyList())), + Mockito.any(SortCriterion.class), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.eq(testAuth), + Mockito.any(SearchFlags.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setNumEntities(0) + .setPageSize(0) + .setEntities(new SearchEntityArray())); final ListAccessTokensResolver resolver = new ListAccessTokensResolver(mockClient); final ListAccessTokenResult listAccessTokenResult = resolver.get(mockEnv).get(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 4a948537ab4fe..bffc2b31af2b9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -29,15 +31,12 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.List; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.List; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class BrowseV2ResolverTest { private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @@ -46,23 +45,30 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - null, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + null, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -92,23 +98,30 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { orFilters.add(andFilterInput); Filter filter = ResolverUtils.buildFilter(null, orFilters); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "test", - filter, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "test", + filter, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -132,23 +145,30 @@ public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - viewInfo.getDefinition().getFilter(), - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + viewInfo.getDefinition().getFilter(), + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); @@ -166,16 +186,25 @@ public static void testBrowseV2SuccessWithView() throws Exception { compareResultToExpectedData(result, getExpectedResult()); } - private static void compareResultToExpectedData(BrowseResultsV2 result, BrowseResultsV2 expected) { + private static void compareResultToExpectedData( + BrowseResultsV2 result, BrowseResultsV2 expected) { Assert.assertEquals(result.getCount(), expected.getCount()); Assert.assertEquals(result.getStart(), expected.getStart()); Assert.assertEquals(result.getTotal(), expected.getTotal()); Assert.assertEquals(result.getGroups().size(), expected.getGroups().size()); - result.getGroups().forEach(group -> { - Assert.assertTrue(expected.getGroups().stream().filter(g -> g.getName().equals(group.getName())).count() > 0); - }); + result + .getGroups() + .forEach( + group -> { + Assert.assertTrue( + expected.getGroups().stream() + .filter(g -> g.getName().equals(group.getName())) + .count() + > 0); + }); Assert.assertEquals(result.getMetadata().getPath(), expected.getMetadata().getPath()); - Assert.assertEquals(result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); + Assert.assertEquals( + result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); } private static BrowseResultsV2 getExpectedResult() { @@ -185,19 +214,22 @@ private static BrowseResultsV2 getExpectedResult() { results.setCount(10); List<com.linkedin.datahub.graphql.generated.BrowseResultGroupV2> groups = new ArrayList<>(); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup1.setName("first group"); browseGroup1.setCount(5L); browseGroup1.setHasSubGroups(true); groups.add(browseGroup1); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup2.setName("second group"); browseGroup2.setCount(4L); browseGroup2.setHasSubGroups(false); groups.add(browseGroup2); results.setGroups(groups); - com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); + com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = + new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); resultMetadata.setPath(ImmutableList.of("test", "path")); resultMetadata.setTotalNumEntities(100L); results.setMetadata(resultMetadata); @@ -212,60 +244,52 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - BrowseResultV2 result - ) throws Exception { + BrowseResultV2 result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.browseV2( - Mockito.eq(entityName), - Mockito.eq(path), - Mockito.eq(filter), - Mockito.eq(query), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.browseV2( + Mockito.eq(entityName), + Mockito.eq(path), + Mockito.eq(filter), + Mockito.eq(query), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } + private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } - private BrowseV2ResolverTest() { } - + private BrowseV2ResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java index 659e6aea740ec..75abf1d48a15c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -16,12 +18,10 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityBrowsePathsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { @@ -30,9 +30,7 @@ public void testGetSuccess() throws Exception { List<String> path = ImmutableList.of("prod", "mysql"); Mockito.when(mockType.browsePaths(Mockito.eq(TEST_ENTITY_URN), Mockito.any())) - .thenReturn(ImmutableList.of( - new BrowsePath(path)) - ); + .thenReturn(ImmutableList.of(new BrowsePath(path))); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -55,9 +53,9 @@ public void testGetSuccess() throws Exception { @Test public void testGetBrowsePathsException() throws Exception { BrowsableEntityType mockType = Mockito.mock(BrowsableEntityType.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockType).browsePaths( - Mockito.any(), - Mockito.any()); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockType) + .browsePaths(Mockito.any(), Mockito.any()); EntityBrowsePathsResolver resolver = new EntityBrowsePathsResolver(mockType); @@ -75,4 +73,4 @@ public void testGetBrowsePathsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 39a08ca26167d..1203f4e22bdc2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -24,17 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ContainerEntitiesResolverTest { - private static final ContainerEntitiesInput TEST_INPUT = new ContainerEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final ContainerEntitiesInput TEST_INPUT = + new ContainerEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -44,35 +39,39 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; - final Criterion filterCriterion = new Criterion() - .setField("container.keyword") - .setCondition(Condition.EQUAL) - .setValue(containerUrn); + final Criterion filterCriterion = + new Criterion() + .setField("container.keyword") + .setCondition(Condition.EQUAL) + .setValue(containerUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); @@ -92,6 +91,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index 92f8dfc4e1d67..b4c58ca182b2f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.container.Container; @@ -14,18 +19,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentContainersResolverTest { @Test @@ -42,77 +40,88 @@ public void testGetSuccess() throws Exception { datasetEntity.setType(EntityType.DATASET); Mockito.when(mockEnv.getSource()).thenReturn(datasetEntity); - final Container parentContainer1 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); - final Container parentContainer2 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); + final Container parentContainer1 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); + final Container parentContainer2 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); Map<String, EnvelopedAspect> datasetAspects = new HashMap<>(); - datasetAspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); + datasetAspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); Map<String, EnvelopedAspect> parentContainer1Aspects = new HashMap<>(); - parentContainer1Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_schema").data() - ))); - parentContainer1Aspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - parentContainer2.data() - ))); + parentContainer1Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_schema").data()))); + parentContainer1Aspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer2.data()))); Map<String, EnvelopedAspect> parentContainer2Aspects = new HashMap<>(); - parentContainer2Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_database").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(datasetUrn.getEntityType()), - Mockito.eq(datasetUrn), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer1.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer2.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + parentContainer2Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_database").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(datasetUrn.getEntityType()), + Mockito.eq(datasetUrn), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer1.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer2.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); ParentContainersResolver resolver = new ParentContainersResolver(mockClient); ParentContainersResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); - assertEquals(result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); - assertEquals(result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); + assertEquals( + result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); + assertEquals( + result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java index 6a9617ea41b44..2abfa39b35149 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; @@ -28,9 +30,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - public class DashboardStatsSummaryTest { private static final Dashboard TEST_SOURCE = new Dashboard(); @@ -65,31 +64,35 @@ public void testGetSuccess() throws Exception { Assert.assertEquals((int) result.getUniqueUserCountLast30Days(), 2); // Validate the cache. -- First return a new result. - DashboardUsageStatistics newUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(40); - EnvelopedAspect newResult = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); + DashboardUsageStatistics newUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(40); + EnvelopedAspect newResult = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn(ImmutableList.of(newResult)); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(newResult)); // Then verify that the new result is _not_ returned (cache hit) DashboardStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getViewCount(), 20); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 2); } @@ -97,28 +100,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DASHBOARD_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DASHBOARD_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -140,48 +142,46 @@ private TimeseriesAspectService initTestAspectService() { TimeseriesAspectService mockClient = Mockito.mock(TimeseriesAspectService.class); // Mock fetching the latest absolute (snapshot) statistics - DashboardUsageStatistics latestUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(20); - EnvelopedAspect envelopedLatestStats = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); + DashboardUsageStatistics latestUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(20); + EnvelopedAspect envelopedLatestStats = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn( - ImmutableList.of(envelopedLatestStats) - ); - - Mockito.when(mockClient.getAggregatedStats( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.any(), - Mockito.any(Filter.class), - Mockito.any() - )).thenReturn( - new GenericTable().setRows(new StringArrayArray( - new StringArray(ImmutableList.of( - TEST_USER_URN_1, "10", "20", "30", "1", "1", "1" - )), - new StringArray(ImmutableList.of( - TEST_USER_URN_2, "20", "30", "40", "1", "1", "1" - )) - )) - .setColumnNames(new StringArray()) - .setColumnTypes(new StringArray()) - ); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(envelopedLatestStats)); + + Mockito.when( + mockClient.getAggregatedStats( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.any(), + Mockito.any(Filter.class), + Mockito.any())) + .thenReturn( + new GenericTable() + .setRows( + new StringArrayArray( + new StringArray( + ImmutableList.of(TEST_USER_URN_1, "10", "20", "30", "1", "1", "1")), + new StringArray( + ImmutableList.of(TEST_USER_URN_2, "20", "30", "40", "1", "1", "1")))) + .setColumnNames(new StringArray()) + .setColumnTypes(new StringArray())); return mockClient; } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java index ea9ab2a1b768b..3ff0120448e54 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataset; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; @@ -23,60 +25,54 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DatasetHealthResolverTest { private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:test-guid-2"; - @Test public void testGetSuccessHealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts")) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -103,20 +99,20 @@ public void testGetSuccessNullHealth() throws Exception { TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); // 0 associated assertions, meaning we don't report any health. - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(0) - .setRelationships(new EntityRelationshipArray(Collections.emptyList())) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -134,13 +130,9 @@ public void testGetSuccessNullHealth() throws Exception { List<Health> result = resolver.get(mockEnv).get(); assertEquals(result.size(), 0); - Mockito.verify(mockAspectService, Mockito.times(0)).getAggregatedStats( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockAspectService, Mockito.times(0)) + .getAggregatedStats( + Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); } @Test @@ -148,52 +140,47 @@ public void testGetSuccessUnhealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(2) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts"), - new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) - .setType("Asserts") - ) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )), - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN_2, "FAILURE", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(2) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"), + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray(ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")), + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN_2, "FAILURE", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java index 013e23b779c51..52516295f97ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java @@ -19,11 +19,11 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class DatasetStatsSummaryResolverTest { private static final Dataset TEST_SOURCE = new Dataset(); - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; private static final String TEST_USER_URN_1 = "urn:li:corpuser:test1"; private static final String TEST_USER_URN_2 = "urn:li:corpuser:test2"; @@ -35,28 +35,27 @@ public class DatasetStatsSummaryResolverTest { public void testGetSuccess() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(testResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(testResult); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -84,17 +83,19 @@ public void testGetSuccess() throws Exception { // Validate the cache. -- First return a new result. UsageQueryResult newResult = new UsageQueryResult(); newResult.setAggregations(new UsageQueryResultAggregations()); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(newResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(newResult); // Then verify that the new result is _not_ returned (cache hit) DatasetStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getQueryCountLast30Days(), 10); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 5); } @@ -102,28 +103,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index bae6f27a854bc..49ccc751d35f6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.delete; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; @@ -14,39 +18,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateSoftDeletedResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -55,17 +56,21 @@ public void testGetSuccessNoExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(true); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -76,16 +81,18 @@ public void testGetSuccessExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -96,17 +103,21 @@ public void testGetSuccessExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(false); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -115,15 +126,17 @@ public void testGetSuccessExistingStatus() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -134,7 +147,9 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,7 +166,9 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); @@ -165,20 +182,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index ce5a02bb573e1..8c3620fa978a9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; @@ -15,39 +19,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingDeprecation() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -56,46 +57,57 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 0L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 0L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(0L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(0L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - final Deprecation originalDeprecation = new Deprecation() - .setDeprecated(false) - .setNote("") - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setNote("") + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,23 +118,31 @@ public void testGetSuccessExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(1L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(1L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -131,15 +151,17 @@ public void testGetSuccessExistingDeprecation() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -150,9 +172,14 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -168,9 +195,14 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -183,21 +215,29 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index 5d30ae08d6dea..e4be330f5ba2a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,20 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = new UpdateDeprecationInput( - TEST_ENTITY_URN, - true, - 0L, - "Test note" - ); + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = + new UpdateDeprecationInput(TEST_ENTITY_URN, true, 0L, "Test note"); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test @@ -47,16 +43,19 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -71,41 +70,53 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation().setDeprecated(true).setDecommissionTime(0L).setNote("Test note").setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - Deprecation originalDeprecation = new Deprecation().setDeprecated(false).setDecommissionTime(1L).setActor(TEST_ACTOR_URN).setNote(""); + Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setDecommissionTime(1L) + .setActor(TEST_ACTOR_URN) + .setNote(""); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDeprecation.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -120,24 +131,21 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setDecommissionTime(0L) - .setNote("Test note") - .setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -145,16 +153,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DEPRECATION_ASPECT_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DEPRECATION_ASPECT_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -169,9 +180,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -188,18 +198,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); // Execute resolver @@ -210,4 +219,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 8cd3c71a21555..d5ba88066e846 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.UrnArray; @@ -18,21 +22,17 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchSetDomainResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_DOMAIN_1_URN = "urn:li:domain:test-id-1"; private static final String TEST_DOMAIN_2_URN = "urn:li:domain:test-id-2"; @@ -40,19 +40,20 @@ public class BatchSetDomainResolverTest { public void testGetSuccessNoExistingDomains() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,46 +65,53 @@ public void testGetSuccessNoExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -117,51 +125,58 @@ public void testGetSuccessExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); proposal1.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.DOMAINS_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessUnsetDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,19 +190,24 @@ public void testGetSuccessUnsetDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -196,10 +216,11 @@ public void testGetSuccessUnsetDomains() throws Exception { public void testGetFailureDomainDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -210,9 +231,12 @@ public void testGetFailureDomainDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -224,15 +248,17 @@ public void testGetFailureDomainDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -244,9 +270,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -262,9 +291,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -277,21 +309,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java index 1ea84b99cfec3..8f86e33158ad5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java @@ -6,13 +6,12 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.ArgumentMatcher; - public class CreateDomainProposalMatcher implements ArgumentMatcher<MetadataChangeProposal> { private MetadataChangeProposal left; public CreateDomainProposalMatcher(MetadataChangeProposal left) { - this.left = left; + this.left = left; } @Override @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean domainPropertiesMatch(GenericAspect left, GenericAspect right) { - DomainProperties leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DomainProperties.class - ); - - DomainProperties rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DomainProperties.class - ); + DomainProperties leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DomainProperties.class); + + DomainProperties rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DomainProperties.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 560a3865ce9e1..6184760abfabd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -15,49 +19,35 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; -import static org.testng.Assert.*; - - public class CreateDomainResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final CreateDomainInput TEST_INPUT = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - TEST_PARENT_DOMAIN_URN.toString() - ); + private static final CreateDomainInput TEST_INPUT = + new CreateDomainInput( + "test-id", "test-name", "test-description", TEST_PARENT_DOMAIN_URN.toString()); - private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = + new CreateDomainInput("test-id", "test-name", "test-description", null); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public void testGetSuccess() throws Exception { // Create resolver @@ -65,15 +55,13 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -81,14 +69,17 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -107,11 +98,11 @@ public void testGetSuccess() throws Exception { proposal.setChangeType(ChangeType.UPSERT); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -120,24 +111,23 @@ public void testGetSuccessNoParentDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_NO_PARENT_DOMAIN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -154,11 +144,11 @@ public void testGetSuccessNoParentDomain() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(props)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -167,15 +157,13 @@ public void testGetInvalidParent() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -191,31 +179,32 @@ public void testGetNameConflict() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)) - )); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); DomainProperties domainProperties = new DomainProperties(); domainProperties.setDescription(TEST_INPUT.getDescription()); @@ -225,18 +214,21 @@ public void testGetNameConflict() throws Exception { EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); + envelopedAspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); entityResponse.setAspects(envelopedAspectMap); Map<Urn, EntityResponse> entityResponseMap = new HashMap<>(); entityResponseMap.put(TEST_DOMAIN_URN, entityResponse); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class) - )).thenReturn(entityResponseMap); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(entityResponseMap); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } @@ -255,9 +247,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -265,9 +256,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -278,4 +269,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 9bcdbe6d2a0e0..5632654a26ad9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -10,10 +13,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteDomainResolverTest { private static final String TEST_URN = "urn:li:domain:test-id"; @@ -30,15 +29,21 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has 0 child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -53,14 +58,20 @@ public void testDeleteWithChildDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -76,8 +87,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 93fe3d0017160..9596abf55d04f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -26,18 +29,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.testng.Assert.*; - - public class DomainEntitiesResolverTest { - private static final DomainEntitiesInput TEST_INPUT = new DomainEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final DomainEntitiesInput TEST_INPUT = + new DomainEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -47,35 +42,42 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String domainUrn = "urn:li:domain:test-domain"; - final Criterion filterCriterion = new Criterion() - .setField("domains.keyword") - .setCondition(Condition.EQUAL) - .setValue(domainUrn); + final Criterion filterCriterion = + new Criterion() + .setField("domains.keyword") + .setCondition(Condition.EQUAL) + .setValue(domainUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList())), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList())), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); DomainEntitiesResolver resolver = new DomainEntitiesResolver(mockClient); @@ -95,6 +97,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index bd8a8f98de497..ffc3e823d8351 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -20,46 +25,43 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class ListDomainsResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final ListDomainsInput TEST_INPUT = new ListDomainsInput( - 0, 20, null, TEST_PARENT_DOMAIN_URN.toString() - ); + private static final ListDomainsInput TEST_INPUT = + new ListDomainsInput(0, 20, null, TEST_PARENT_DOMAIN_URN.toString()); - private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = new ListDomainsInput( - 0, 20, null, null - ); + private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = + new ListDomainsInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -74,7 +76,8 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -82,22 +85,27 @@ public void testGetSuccessNoParentDomain() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(null)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(null)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -112,7 +120,8 @@ public void testGetSuccessNoParentDomain() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -124,33 +133,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver @@ -161,4 +172,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index 4059c180b0eb0..a0eff5d0574db 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; @@ -17,52 +22,51 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class MoveDomainResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; private static final String PARENT_DOMAIN_URN = "urn:li:domain:00005397daf94708a8822b8106cfd451"; private static final String DOMAIN_URN = "urn:li:domain:11115397daf94708a8822b8106cfd451"; private static final MoveDomainInput INPUT = new MoveDomainInput(PARENT_DOMAIN_URN, DOMAIN_URN); - private static final MoveDomainInput INVALID_INPUT = new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); + private static final MoveDomainInput INVALID_INPUT = + new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(name, Urn.createFromString(PARENT_DOMAIN_URN))), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + name, Urn.createFromString(PARENT_DOMAIN_URN))), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(name); properties.setParentDomain(Urn.createFromString(PARENT_DOMAIN_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); } @Test @@ -77,11 +81,11 @@ public void testGetSuccess() throws Exception { setupTests(mockEnv, mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -97,10 +101,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(null); MoveDomainResolver resolver = new MoveDomainResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 7bd7c3afac001..4c8ceff9c4f80 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,15 +16,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentDomainsResolverTest { @Test @@ -38,58 +37,68 @@ public void testGetSuccessForDomain() throws Exception { domainEntity.setType(EntityType.DOMAIN); Mockito.when(mockEnv.getSource()).thenReturn(domainEntity); - final DomainProperties parentDomain1 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:11115397daf94708a8822b8106cfd451") - ).setName("test def"); - final DomainProperties parentDomain2 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:22225397daf94708a8822b8106cfd451") - ).setName("test def 2"); + final DomainProperties parentDomain1 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:11115397daf94708a8822b8106cfd451")) + .setName("test def"); + final DomainProperties parentDomain2 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:22225397daf94708a8822b8106cfd451")) + .setName("test def 2"); Map<String, EnvelopedAspect> domainAspects = new HashMap<>(); - domainAspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); + domainAspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); Map<String, EnvelopedAspect> parentDomain1Aspects = new HashMap<>(); - parentDomain1Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 1").setParentDomain(parentDomain2.getParentDomain()).data() - ))); + parentDomain1Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new DomainProperties() + .setName("domain parent 1") + .setParentDomain(parentDomain2.getParentDomain()) + .data()))); Map<String, EnvelopedAspect> parentDomain2Aspects = new HashMap<>(); - parentDomain2Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 2").data() - ))); + parentDomain2Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new DomainProperties().setName("domain parent 2").data()))); - Mockito.when(mockClient.getV2( - Mockito.eq(domainUrn.getEntityType()), - Mockito.eq(domainUrn), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(domainUrn.getEntityType()), + Mockito.eq(domainUrn), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain1.getParentDomain().getEntityType()), - Mockito.eq(parentDomain1.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain1.getParentDomain().getEntityType()), + Mockito.eq(parentDomain1.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain2.getParentDomain().getEntityType()), - Mockito.eq(parentDomain2.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain2.getParentDomain().getEntityType()), + Mockito.eq(parentDomain2.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); ParentDomainsResolver resolver = new ParentDomainsResolver(mockClient); ParentDomainsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(3)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(3)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getDomains().get(0).getUrn(), parentDomain1.getParentDomain().toString()); assertEquals(result.getDomains().get(1).getUrn(), parentDomain2.getParentDomain().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index 92fb26288aa1d..ad5ad2315ce43 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; private static final String TEST_NEW_DOMAIN_URN = "urn:li:domain:test-id-2"; @@ -43,16 +43,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -68,47 +71,52 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -124,23 +132,21 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test @@ -149,16 +155,19 @@ public void testGetFailureDomainDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -174,9 +183,8 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -185,16 +193,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -210,9 +221,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -230,18 +240,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetDomainResolver resolver = new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetDomainResolver resolver = + new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -252,4 +262,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index decda39943dde..7e6e258168898 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UnsetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; @Test @@ -42,16 +42,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -66,43 +69,46 @@ public void testGetSuccessNoExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -117,18 +123,15 @@ public void testGetSuccessExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -137,16 +140,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -160,9 +166,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -179,18 +184,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + UnsetDomainResolver resolver = + new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -200,4 +205,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index f1d44fcb47255..45a17744a2697 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,29 +29,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateEmbedResolverTest { private static final String TEST_ENTITY_URN = "urn:li:dashboard:(looker,1)"; private static final String TEST_RENDER_URL = "https://www.google.com"; - private static final UpdateEmbedInput TEST_EMBED_INPUT = new UpdateEmbedInput( - TEST_ENTITY_URN, - TEST_RENDER_URL - ); + private static final UpdateEmbedInput TEST_EMBED_INPUT = + new UpdateEmbedInput(TEST_ENTITY_URN, TEST_RENDER_URL); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test public void testGetSuccessNoExistingEmbed() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -62,14 +61,15 @@ public void testGetSuccessNoExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - verifySingleIngestProposal(mockService, 1, proposal);; + verifySingleIngestProposal(mockService, 1, proposal); + ; - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -79,10 +79,12 @@ public void testGetSuccessExistingEmbed() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(originalEmbed); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalEmbed); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -97,14 +99,14 @@ public void testGetSuccessExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); verifySingleIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -112,16 +114,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DASHBOARD_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -136,11 +141,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - );; + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + ; } @Test @@ -156,20 +160,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -180,4 +182,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index cde2739b2bcc6..fa8b1d6a747ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -1,14 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EntityExistsResolverTest { private static final String ENTITY_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java index 913ea4602faf0..d9d5e643057ce 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Chart; @@ -14,14 +17,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class EntityPrivilegesResolverTest { final String glossaryTermUrn = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; @@ -29,7 +28,8 @@ public class EntityPrivilegesResolverTest { final String datasetUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)"; final String chartUrn = "urn:li:chart:(looker,baz1)"; final String dashboardUrn = "urn:li:dashboard:(looker,dashboards.1)"; - final String dataJobUrn = "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; + final String dataJobUrn = + "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; private DataFetchingEnvironment setUpTestWithPermissions(Entity entity) { QueryContext mockContext = getMockAllowContext(); @@ -115,11 +115,13 @@ public void testGetFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); DataFetchingEnvironment mockEnv = setUpTestWithoutPermissions(glossaryNode); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class)); EntityPrivilegesResolver resolver = new EntityPrivilegesResolver(mockClient); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 26c13186c4a81..287d270ab569c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,15 +14,9 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.ExecutionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.*; - - public class AddRelatedTermsResolverTest { private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; @@ -28,10 +26,11 @@ public class AddRelatedTermsResolverTest { private EntityService setUpService() { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); return mockService; } @@ -48,24 +47,22 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -80,24 +77,22 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -110,9 +105,9 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_ENTITY_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -130,9 +125,9 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - DATASET_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,9 +146,9 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -172,9 +167,9 @@ public void testGetFailAddToNonExistentUrn() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -193,9 +188,9 @@ public void testGetFailAddToNonTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(DATASET_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -215,15 +210,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 3b47514d87181..2a36d77716ab7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,39 +11,27 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; -import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static com.linkedin.metadata.Constants.*; - - public class CreateGlossaryNodeResolverTest { - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -47,8 +39,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_NODE_ENTITY_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, props); } @Test @@ -72,16 +64,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -89,16 +79,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,15 +94,13 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 2dbe637d16057..6653b19d6ef2b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -1,61 +1,53 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static com.linkedin.metadata.Constants.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class CreateGlossaryTermResolverTest { private static final String EXISTING_TERM_URN = "urn:li:glossaryTerm:testing12345"; - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -80,8 +72,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_TERM_ENTITY_NAME, GLOSSARY_TERM_INFO_ASPECT_NAME, props); } @Test @@ -89,16 +81,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,16 +96,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -123,16 +111,14 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -140,73 +126,71 @@ public void testGetFailureExistingTermSameName() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))) - )); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))))); Map<Urn, EntityResponse> result = new HashMap<>(); EnvelopedAspectMap map = new EnvelopedAspectMap(); GlossaryTermInfo termInfo = new GlossaryTermInfo().setName("Duplicated Name"); - map.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); + map.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); result.put(UrnUtils.getUrn(EXISTING_TERM_URN), new EntityResponse().setAspects(map)); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(result); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(result); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - CreateGlossaryEntityInput input = new CreateGlossaryEntityInput( - "test-id", - "Duplicated Name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); + CreateGlossaryEntityInput input = + new CreateGlossaryEntityInput( + "test-id", + "Duplicated Name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); setupTest(mockEnv, input, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private EntityClient initMockClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(new HashMap<>()); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(new HashMap<>()); return mockClient; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 94f0d0b7a1143..7229d2acf763d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,19 +12,14 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class DeleteGlossaryEntityResolverTest { - private static final String TEST_TERM_URN = "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; + private static final String TEST_TERM_URN = + "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; @Test public void testGetSuccess() throws Exception { @@ -33,26 +33,27 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_TERM_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_TERM_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_TERM_URN)), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index 677516e9404e8..b879baf1e65dc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,16 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryNodesResolverTest { - final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput( - 0, 100 - ); + final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryNodeUrn1 = "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451"; final String glossaryNodeUrn2 = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -42,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryNodesResolver resolver = new GetRootGlossaryNodesResolver(mockClient); GetRootGlossaryNodesResult result = resolver.get(mockEnv).get(); @@ -64,24 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); - assertEquals(result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); + assertEquals( + result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); + assertEquals( + result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 5aba32108b7db..201bea752d53f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,14 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryTermsResolverTest { final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryTermUrn1 = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; final String glossaryTermUrn2 = "urn:li:glossaryTerm:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -40,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryTermsResolver resolver = new GetRootGlossaryTermsResolver(mockClient); GetRootGlossaryTermsResult result = resolver.get(mockEnv).get(); @@ -62,23 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); - assertEquals(result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); + assertEquals( + result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); + assertEquals( + result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java index 8bfc32e1999ae..969fda541d6a6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java @@ -1,33 +1,32 @@ package com.linkedin.datahub.graphql.resolvers.glossary; -import com.google.common.collect.ImmutableSet; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.Aspect; -import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.Constants; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.Optional; -import java.util.Map; -import java.util.HashMap; - -import static org.testng.Assert.*; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; - public class GlossaryUtilsTest { private final String userUrn = "urn:li:corpuser:authorized"; @@ -44,67 +43,87 @@ private void setUpTests() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(userUrn); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node2") - ); - GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node3") - ); - + GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node2")); + GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node3")); + GlossaryNodeInfo parentNode3 = new GlossaryNodeInfo(); - + Map<String, EnvelopedAspect> parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode1.getParentNode()).data() - ))); - + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode1.getParentNode()) + .data()))); + Map<String, EnvelopedAspect> parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 2") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map<String, EnvelopedAspect> parentNode3Aspects = new HashMap<>(); - parentNode3Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 3").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn1), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn2), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn3), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); - - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + parentNode3Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 3").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn1), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn2), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn3), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); + + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); } - private void mockAuthRequest(String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { - final AuthorizationRequest authorizationRequest = new AuthorizationRequest( - userUrn, - privilege, - resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty() - ); + private void mockAuthRequest( + String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { + final AuthorizationRequest authorizationRequest = + new AuthorizationRequest( + userUrn, + privilege, + resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(allowOrDeny); Mockito.when(mockAuthorizer.authorize(Mockito.eq(authorizationRequest))).thenReturn(result); @@ -150,7 +169,8 @@ public void testCanManageChildrenEntitiesAuthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn, mockClient)); @@ -162,7 +182,8 @@ public void testCanManageChildrenEntitiesUnauthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); @@ -175,13 +196,16 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorized() throws Exceptio // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -193,13 +217,16 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorized() throws Except // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -211,10 +238,12 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorizedLevel2() throws Ex // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -226,10 +255,12 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorizedLevel2() throws // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn2, mockClient)); @@ -241,7 +272,8 @@ public void testCanManageChildrenRecursivelyEntitiesNoLevel2() throws Exception // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn3, mockClient)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 06dff7611fac8..446f58bec73aa 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -16,17 +21,11 @@ import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentNodesResolverTest { @Test @@ -43,76 +42,94 @@ public void testGetSuccessForTerm() throws Exception { termEntity.setType(EntityType.GLOSSARY_TERM); Mockito.when(mockEnv.getSource()).thenReturn(termEntity); - final GlossaryTermInfo parentNode1 = new GlossaryTermInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryTermInfo parentNode1 = + new GlossaryTermInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map<String, EnvelopedAspect> glossaryTermAspects = new HashMap<>(); - glossaryTermAspects.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryTermAspects.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map<String, EnvelopedAspect> parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map<String, EnvelopedAspect> parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(termUrn.getEntityType()), - Mockito.eq(termUrn), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(termUrn.getEntityType()), + Mockito.eq(termUrn), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); @@ -132,78 +149,96 @@ public void testGetSuccessForNode() throws Exception { nodeEntity.setType(EntityType.GLOSSARY_NODE); Mockito.when(mockEnv.getSource()).thenReturn(nodeEntity); - final GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map<String, EnvelopedAspect> glossaryNodeAspects = new HashMap<>(); - glossaryNodeAspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryNodeAspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map<String, EnvelopedAspect> parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map<String, EnvelopedAspect> parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(nodeUrn.getEntityType()), - Mockito.eq(nodeUrn), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(nodeUrn.getEntityType()), + Mockito.eq(nodeUrn), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 3906d1188cb17..47de668b2c9dc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; @@ -12,15 +16,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Arrays; import java.util.concurrent.ExecutionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RemoveRelatedTermsResolverTest { @@ -35,10 +34,11 @@ public void testGetSuccessIsA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -47,17 +47,16 @@ public void testGetSuccessIsA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -67,10 +66,11 @@ public void testGetSuccessHasA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -79,26 +79,26 @@ public void testGetSuccessHasA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testFailAspectDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -107,9 +107,9 @@ public void testFailAspectDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -124,10 +124,11 @@ public void testFailNoPermissions() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -136,16 +137,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); - Mockito.verify(mockService, Mockito.times(0)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(0)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index eee9cfbae8fcb..3972715fcefb1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; @@ -19,16 +23,10 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateNameResolverTest { private static final String NEW_NAME = "New Name"; @@ -40,23 +38,23 @@ public class UpdateNameResolverTest { private static final UpdateNameInput INPUT_FOR_DOMAIN = new UpdateNameInput(NEW_NAME, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(NEW_NAME); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -88,16 +86,16 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); @@ -118,25 +116,27 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index a78c28890fecf..74a59b10a40b0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -15,45 +20,43 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateParentNodeResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; - private static final String PARENT_NODE_URN = "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; + private static final String PARENT_NODE_URN = + "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; private static final String TERM_URN = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; private static final String NODE_URN = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - private static final UpdateParentNodeInput INPUT = new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); - private static final UpdateParentNodeInput INPUT_WITH_NODE = new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); - private static final UpdateParentNodeInput INVALID_INPUT = new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT = + new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT_WITH_NODE = + new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); + private static final UpdateParentNodeInput INVALID_INPUT = + new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -61,7 +64,8 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -77,7 +81,8 @@ public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -87,17 +92,17 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService, mockClient); @@ -110,7 +115,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -126,7 +132,8 @@ public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(false); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -142,7 +149,8 @@ public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java index a20c84d11ba9f..19d9dd20d3f80 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AddGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java index 876de633bd656..a29680a6de52d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateGroupResolverTest { private static final String GROUP_ID = "id"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java index 73b0be96fce17..601d5e08a4233 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class RemoveGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index dae0758f6a2f6..e5cb43c4dab61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -21,15 +23,14 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.Mockito; -import static org.testng.Assert.*; - - public class IngestTestUtils { - public static final Urn TEST_INGESTION_SOURCE_URN = Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); - public static final Urn TEST_SECRET_URN = Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); - public static final Urn TEST_EXECUTION_REQUEST_URN = Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); - + public static final Urn TEST_INGESTION_SOURCE_URN = + Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); + public static final Urn TEST_SECRET_URN = + Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); + public static final Urn TEST_EXECUTION_REQUEST_URN = + Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); public static QueryContext getMockAllowContext() { QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -63,8 +64,13 @@ public static DataHubIngestionSourceInfo getTestIngestionSourceInfo() { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setName("My Test Source"); info.setType("mysql"); - info.setSchedule(new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); - info.setConfig(new DataHubIngestionSourceConfig().setVersion("0.8.18").setRecipe("{}").setExecutorId("executor id")); + info.setSchedule( + new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); + info.setConfig( + new DataHubIngestionSourceConfig() + .setVersion("0.8.18") + .setRecipe("{}") + .setExecutorId("executor id")); return info; } @@ -78,15 +84,18 @@ public static DataHubSecretValue getTestSecretValue() { public static ExecutionRequestInput getTestExecutionRequestInput() { ExecutionRequestInput input = new ExecutionRequestInput(); - input.setArgs(new StringMap( - ImmutableMap.of( - "recipe", "my-custom-recipe", - "version", "0.8.18") - )); + input.setArgs( + new StringMap( + ImmutableMap.of( + "recipe", "my-custom-recipe", + "version", "0.8.18"))); input.setTask("RUN_INGEST"); input.setExecutorId("default"); input.setRequestedAt(0L); - input.setSource(new ExecutionRequestSource().setIngestionSource(TEST_INGESTION_SOURCE_URN).setType("SCHEDULED_INGESTION")); + input.setSource( + new ExecutionRequestSource() + .setIngestionSource(TEST_INGESTION_SOURCE_URN) + .setType("SCHEDULED_INGESTION")); return input; } @@ -99,7 +108,8 @@ public static ExecutionRequestResult getTestExecutionRequestResult() { return result; } - public static void verifyTestIngestionSourceGraphQL(IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { + public static void verifyTestIngestionSourceGraphQL( + IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { assertEquals(ingestionSource.getUrn(), TEST_INGESTION_SOURCE_URN.toString()); assertEquals(ingestionSource.getName(), info.getName()); assertEquals(ingestionSource.getType(), info.getType()); @@ -134,5 +144,5 @@ public static void verifyTestExecutionRequest( assertEquals(executionRequest.getResult().getStartTimeMs(), result.getStartTimeMs()); } - private IngestTestUtils() { } + private IngestTestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 12045b9361469..3de88333b959d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -7,7 +9,6 @@ import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; public class IngestionAuthUtilsTest { @@ -16,11 +17,9 @@ public void testCanManageIngestionAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:authorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -37,11 +36,9 @@ public void testCanManageIngestionUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -58,11 +55,8 @@ public void testCanManageSecretsAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest("urn:li:corpuser:authorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -79,11 +73,9 @@ public void testCanManageSecretsUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java index e7226c6e4db08..3d0c24b9aa022 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -18,35 +21,36 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CancelIngestionExecutionRequestResolverTest { - private static final CancelIngestionExecutionRequestInput TEST_INPUT = new CancelIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString(), - TEST_EXECUTION_REQUEST_URN.toString() - ); + private static final CancelIngestionExecutionRequestInput TEST_INPUT = + new CancelIngestionExecutionRequestInput( + TEST_INGESTION_SOURCE_URN.toString(), TEST_EXECUTION_REQUEST_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -57,18 +61,19 @@ Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( resolver.get(mockEnv).get(); // Verify ingest proposal has been called to create a Signal request. - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -77,19 +82,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java index 7973e49c6efdf..18ce1d8c27955 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -12,6 +14,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -19,35 +22,37 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateIngestionExecutionRequestResolverTest { - private static final CreateIngestionExecutionRequestInput TEST_INPUT = new CreateIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString() - ); + private static final CreateIngestionExecutionRequestInput TEST_INPUT = + new CreateIngestionExecutionRequestInput(TEST_INGESTION_SOURCE_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_INGESTION_SOURCE_URN, - new EntityResponse().setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -58,11 +63,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -71,7 +76,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -80,21 +86,21 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -105,4 +111,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java index 75df240441965..eaf3186524721 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java @@ -1,25 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestConnectionRequestResolverTest { - private static final CreateTestConnectionRequestInput TEST_INPUT = new CreateTestConnectionRequestInput( - "{}", - "0.8.44" - ); + private static final CreateTestConnectionRequestInput TEST_INPUT = + new CreateTestConnectionRequestInput("{}", "0.8.44"); @Test public void testGetSuccess() throws Exception { @@ -27,7 +24,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -37,11 +35,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -50,7 +48,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -59,9 +58,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java index 532b9b89f3a99..268f8b8927b67 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -20,9 +23,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetIngestionExecutionRequestResolverTest { @Test @@ -33,32 +33,48 @@ public void testGetSuccess() throws Exception { ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -69,7 +85,8 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -78,7 +95,9 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test @@ -87,13 +106,16 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index 25f3ccbd47cd6..fdb150e692441 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,10 +31,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class IngestionSourceExecutionRequestsResolverTest { @Test @@ -40,49 +39,65 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Mock filter response - Mockito.when(mockClient.filter( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.any(Filter.class), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) - .thenReturn(new SearchResult() - .setFrom(0) - .setPageSize(10) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableList.of( - new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN)))) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.any(Filter.class), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(10) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN))))); // Mock batch get response ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -99,14 +114,16 @@ public void testGetSuccess() throws Exception { assertEquals((int) executionRequests.getStart(), 0); assertEquals((int) executionRequests.getCount(), 10); assertEquals((int) executionRequests.getTotal(), 1); - verifyTestExecutionRequest(executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); + verifyTestExecutionRequest( + executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -119,29 +136,28 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getSource()).thenReturn(parentSource); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .list( + Mockito.any(), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index c7a72e475f7ab..bec141bddf260 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -8,10 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class RollbackIngestionResolverTest { private static final String RUN_ID = "testRunId"; @@ -46,9 +45,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test @@ -59,24 +57,22 @@ public void testRollbackIngestionMethod() throws Exception { QueryContext mockContext = getMockAllowContext(); resolver.rollbackIngestion(RUN_ID, mockContext).get(); - Mockito.verify(mockClient, Mockito.times(1)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).rollbackIngestion( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); QueryContext mockContext = getMockAllowContext(); - assertThrows(RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); + assertThrows( + RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java index 2d64d4ec56ba1..85ef304d28533 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java @@ -6,7 +6,6 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.ArgumentMatcher; - public class CreateSecretResolverMatcherTest implements ArgumentMatcher<MetadataChangeProposal> { private MetadataChangeProposal left; @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean secretPropertiesMatch(GenericAspect left, GenericAspect right) { - DataHubSecretValue leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DataHubSecretValue.class - ); - - DataHubSecretValue rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DataHubSecretValue.class - ); + DataHubSecretValue leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DataHubSecretValue.class); + + DataHubSecretValue rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DataHubSecretValue.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index 18ae71661318e..eafdfde364947 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -20,24 +22,18 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateSecretResolverTest { - private static final CreateSecretInput TEST_INPUT = new CreateSecretInput( - "MY_SECRET", - "mysecretvalue", - "none" - ); + private static final CreateSecretInput TEST_INPUT = + new CreateSecretInput("MY_SECRET", "mysecretvalue", "none"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))).thenReturn("encryptedvalue"); + Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))) + .thenReturn("encryptedvalue"); CreateSecretResolver resolver = new CreateSecretResolver(mockClient, mockSecretService); // Execute resolver @@ -57,18 +53,21 @@ public void testGetSuccess() throws Exception { value.setValue("encryptedvalue"); value.setName(TEST_INPUT.getName()); value.setDescription(TEST_INPUT.getDescription()); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateSecretResolverMatcherTest(new MetadataChangeProposal() - .setChangeType(ChangeType.UPSERT) - .setEntityType(Constants.SECRETS_ENTITY_NAME) - .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) - .setAspect(GenericRecordUtils.serializeAspect(value)) - .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + value.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new CreateSecretResolverMatcherTest( + new MetadataChangeProposal() + .setChangeType(ChangeType.UPSERT) + .setEntityType(Constants.SECRETS_ENTITY_NAME) + .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) + .setAspect(GenericRecordUtils.serializeAspect(value)) + .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -80,23 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver @@ -108,4 +105,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java index 679425afbf2e7..7cfe33feb58fc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteSecretResolverTest { @Test @@ -26,7 +26,8 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_SECRET_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test @@ -42,14 +43,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); DeleteSecretResolver resolver = new DeleteSecretResolver(mockClient); // Execute Resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 0042d34e602cc..495adb27dbd5d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -22,14 +25,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetSecretValuesResolverTest { - private static final GetSecretValuesInput TEST_INPUT = new GetSecretValuesInput( - ImmutableList.of(getTestSecretValue().getName()) - ); + private static final GetSecretValuesInput TEST_INPUT = + new GetSecretValuesInput(ImmutableList.of(getTestSecretValue().getName())); @Test public void testGetSuccess() throws Exception { @@ -39,27 +38,29 @@ public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))).thenReturn(decryptedSecretValue); + Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))) + .thenReturn(decryptedSecretValue); DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); @@ -90,22 +91,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); SecretService mockSecretService = Mockito.mock(SecretService.class); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index ad91c214db28f..7d89f4aafa01a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,15 +26,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListSecretsResolverTest { - private static final ListSecretsInput TEST_INPUT = new ListSecretsInput( - 0, 20, null - ); + private static final ListSecretsInput TEST_INPUT = new ListSecretsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { @@ -40,40 +37,43 @@ public void testGetSuccess() throws Exception { DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.search( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver @@ -99,36 +99,33 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java index c898ae7280710..5172ef01c25eb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteIngestionSourceResolverTest { @Test @@ -22,11 +22,13 @@ public void testGetSuccess() throws Exception { // execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_INGESTION_SOURCE_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test @@ -38,24 +40,29 @@ public void testGetUnauthorized() throws Exception { // Execute resolver QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); // Execute Resolver QueryContext mockContext = getMockAllowContext(); DeleteIngestionSourceResolver resolver = new DeleteIngestionSourceResolver(mockClient); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java index ebafd1782e000..bda18961d3890 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -14,13 +17,9 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.HashSet; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.assertThrows; - public class GetIngestionSourceResolverTest { @Test @@ -30,29 +29,31 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo returnedInfo = getTestIngestionSourceInfo(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInfo.data()))))))); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -72,28 +73,26 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index 8e2453ce06a39..a86d67fcd15c1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,13 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListIngestionSourceResolverTest { - private static final ListIngestionSourcesInput TEST_INPUT = new ListIngestionSourcesInput(0, 20, null, null); + private static final ListIngestionSourcesInput TEST_INPUT = + new ListIngestionSourcesInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { @@ -40,41 +40,47 @@ public void testGetSuccess() throws Exception { final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(""), - Mockito.any(), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), - Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(key.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(key.data()))))))); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver @@ -88,7 +94,8 @@ public void testGetSuccess() throws Exception { assertEquals(resolver.get(mockEnv).get().getCount(), 1); assertEquals(resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getIngestionSources().size(), 1); - verifyTestIngestionSourceGraphQL(resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); + verifyTestIngestionSourceGraphQL( + resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); } @Test @@ -100,35 +107,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java index 16d8da9169a8f..8213a5fb61a55 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceConfigInput; @@ -15,19 +19,16 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpsertIngestionSourceResolverTest { - private static final UpdateIngestionSourceInput TEST_INPUT = new UpdateIngestionSourceInput( - "Test source", - "mysql", "Test source description", - new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), - new UpdateIngestionSourceConfigInput("my test recipe", "0.8.18", "executor id", false, null) - ); + private static final UpdateIngestionSourceInput TEST_INPUT = + new UpdateIngestionSourceInput( + "Test source", + "mysql", + "Test source description", + new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), + new UpdateIngestionSourceConfigInput( + "my test recipe", "0.8.18", "executor id", false, null)); @Test public void testGetSuccess() throws Exception { @@ -38,7 +39,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -48,24 +50,24 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setType(TEST_INPUT.getType()); info.setName(TEST_INPUT.getName()); - info.setSchedule(new DataHubIngestionSourceSchedule() - .setInterval(TEST_INPUT.getSchedule().getInterval()) - .setTimezone(TEST_INPUT.getSchedule().getTimezone()) - ); - info.setConfig(new DataHubIngestionSourceConfig() - .setRecipe(TEST_INPUT.getConfig().getRecipe()) - .setVersion(TEST_INPUT.getConfig().getVersion()) - .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) - .setDebugMode(TEST_INPUT.getConfig().getDebugMode()) - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(MutationUtils.buildMetadataChangeProposalWithUrn(TEST_INGESTION_SOURCE_URN, - INGESTION_INFO_ASPECT_NAME, info) - ), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + info.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval(TEST_INPUT.getSchedule().getInterval()) + .setTimezone(TEST_INPUT.getSchedule().getTimezone())); + info.setConfig( + new DataHubIngestionSourceConfig() + .setRecipe(TEST_INPUT.getConfig().getRecipe()) + .setVersion(TEST_INPUT.getConfig().getVersion()) + .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) + .setDebugMode(TEST_INPUT.getConfig().getDebugMode())); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + MutationUtils.buildMetadataChangeProposalWithUrn( + TEST_INGESTION_SOURCE_URN, INGESTION_INFO_ASPECT_NAME, info)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -77,24 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 61dd6c678e6e0..8fc5ab6ebb828 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -14,6 +17,7 @@ import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; @@ -22,153 +26,158 @@ import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.testng.annotations.Test; -import com.linkedin.entity.Aspect; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class MutableTypeBatchResolverTest { - private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; - private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; - private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; - private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; - private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; - private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; - private static final Deprecation TEST_DATASET_1_DEPRECATION; - - static { - try { - TEST_DATASET_1_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; + private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; + private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; + private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; + private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; + private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; + private static final Deprecation TEST_DATASET_1_DEPRECATION; + + static { + try { + TEST_DATASET_1_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } - - private static final Deprecation TEST_DATASET_2_DEPRECATION; - - static { - try { - TEST_DATASET_2_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + } + + private static final Deprecation TEST_DATASET_2_DEPRECATION; + + static { + try { + TEST_DATASET_2_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } + } - @Test - public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = new DatasetType(mockClient); + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = + new DatasetType(mockClient); - MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = new MutableTypeBatchResolver<>(batchMutableType); + MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = + new MutableTypeBatchResolver<>(batchMutableType); - List<BatchDatasetUpdateInput> mockInputs = Arrays.asList( + List<BatchDatasetUpdateInput> mockInputs = + Arrays.asList( new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_1_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build(), + .setUrn(TEST_DATASET_1_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .build()) + .build()) + .build(), new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_2_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build() - ); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Authentication mockAuth = Mockito.mock(Authentication.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); - Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - - Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); - Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); - - Mockito.when(mockClient.batchGetV2(Mockito.eq(Constants.DATASET_ENTITY_NAME), + .setUrn(TEST_DATASET_2_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .build()) + .build()) + .build()); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Authentication mockAuth = Mockito.mock(Authentication.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); + Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + + Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); + Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(new HashSet<>(ImmutableSet.of(datasetUrn1, datasetUrn2))), Mockito.any(), Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - datasetUrn1, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data())) - ))), - datasetUrn2, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn2) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data())) - ))) - )); - - List<Dataset> result = resolver.get(mockEnv).join(); - - ArgumentCaptor<Collection<MetadataChangeProposal>> changeProposalCaptor = ArgumentCaptor.forClass((Class) Collection.class); - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), - // Dataset aspects to fetch are private, but aren't important for this test - Mockito.any(), - Mockito.any(Authentication.class) - ); - Collection<MetadataChangeProposal> changeProposals = changeProposalCaptor.getValue(); - - assertEquals(changeProposals.size(), 2); - assertEquals(result.size(), 2); - } - - @Test - public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = new DatasetType(mockClient); - - MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = new MutableTypeBatchResolver<>(batchMutableType); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - } + .thenReturn( + ImmutableMap.of( + datasetUrn1, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data()))))), + datasetUrn2, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn2) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data()))))))); + + List<Dataset> result = resolver.get(mockEnv).join(); + + ArgumentCaptor<Collection<MetadataChangeProposal>> changeProposalCaptor = + ArgumentCaptor.forClass((Class) Collection.class); + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), + // Dataset aspects to fetch are private, but aren't important for this test + Mockito.any(), + Mockito.any(Authentication.class)); + Collection<MetadataChangeProposal> changeProposals = changeProposalCaptor.getValue(); + + assertEquals(changeProposals.size(), 2); + assertEquals(result.size(), 2); + } + + @Test + public void testGetFailureUnauthorized() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> batchMutableType = + new DatasetType(mockClient); + + MutableTypeBatchResolver<DatasetUpdateInput, BatchDatasetUpdateInput, Dataset> resolver = + new MutableTypeBatchResolver<>(batchMutableType); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java index 1adf7b1200574..bdadfc98f6d85 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java @@ -1,60 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.Siblings; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.SiblingsUtils; import com.linkedin.metadata.entity.EntityService; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashSet; import java.util.Optional; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; -import static org.testng.AssertJUnit.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class SiblingsUtilsTest { - private static final String TEST_DATASET_URN1 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; - private static final String TEST_DATASET_URN2 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; - private static final String TEST_DATASET_URN3 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; + private static final String TEST_DATASET_URN1 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; + private static final String TEST_DATASET_URN2 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; + private static final String TEST_DATASET_URN3 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; @Test public void testGetSiblingUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings().setSiblings(siblingUrns) - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings().setSiblings(siblingUrns)); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); } @Test public void testGetSiblingUrnsWithoutSiblings() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings() - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings()); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetSiblingUrnsWithSiblingsAspect() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - null - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(null); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetNextSiblingUrn() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); Optional<Urn> nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, new HashSet<>()); assertEquals(nextUrn, Optional.of(UrnUtils.getUrn(TEST_DATASET_URN2))); @@ -62,7 +71,8 @@ public void testGetNextSiblingUrn() { @Test public void testGetNextSiblingUrnWithUsedUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); HashSet<Urn> usedUrns = new HashSet<>(); usedUrns.add(UrnUtils.getUrn(TEST_DATASET_URN2)); Optional<Urn> nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, usedUrns); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 9bd44e9ab0906..3fee28bc31725 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; @@ -12,13 +15,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class UpdateUserSettingResolverTest { private static final String TEST_USER_URN = "urn:li:corpuser:test"; + @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); @@ -36,9 +36,12 @@ public void testWriteCorpUserSettings() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - CorpUserSettings newSettings = new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_USER_URN), - CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + CorpUserSettings newSettings = + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_USER_URN), CORP_USER_SETTINGS_ASPECT_NAME, newSettings); verifySingleIngestProposal(mockService, 1, proposal); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java index e2661841fe8f7..abc1a5786f363 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -16,37 +20,35 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class ReportOperationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Operation expectedOperation = new Operation() - .setTimestampMillis(0L) - .setLastUpdatedTimestamp(0L) - .setOperationType(OperationType.INSERT) - .setSourceType(OperationSourceType.DATA_PLATFORM) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) - .setCustomOperationType(null, SetMode.IGNORE_NULL) - .setNumAffectedRows(1L); + Operation expectedOperation = + new Operation() + .setTimestampMillis(0L) + .setLastUpdatedTimestamp(0L) + .setOperationType(OperationType.INSERT) + .setSourceType(OperationSourceType.DATA_PLATFORM) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setCustomOperationType(null, SetMode.IGNORE_NULL) + .setNumAffectedRows(1L); - MetadataChangeProposal expectedProposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - OPERATION_ASPECT_NAME, expectedOperation); + MetadataChangeProposal expectedProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), OPERATION_ASPECT_NAME, expectedOperation); // Test setting the domain - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class))) - .thenReturn(TEST_ENTITY_URN); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class))) + .thenReturn(TEST_ENTITY_URN); ReportOperationResolver resolver = new ReportOperationResolver(mockClient); @@ -57,11 +59,9 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -77,9 +77,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private ReportOperationInput getTestInput() { @@ -91,4 +90,4 @@ private ReportOperationInput getTestInput() { input.setSourceType(com.linkedin.datahub.graphql.generated.OperationSourceType.DATA_PLATFORM); return input; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 329d71ec125db..74f88f95fc171 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -24,13 +27,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddOwnersResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_OWNER_1_URN = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_2_URN = "urn:li:corpuser:test-id-2"; private static final String TEST_OWNER_3_URN = "urn:li:corpGroup:test-id-3"; @@ -39,18 +39,23 @@ public class AddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -58,12 +63,20 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), - new OwnerInput(TEST_OWNER_2_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) - ), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), + new OwnerInput( + TEST_OWNER_2_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -71,38 +84,45 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); } @Test public void testGetSuccessExistingOwnerNewType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -110,13 +130,16 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -124,34 +147,42 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -159,12 +190,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -172,39 +207,51 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessMultipleOwnerTypes() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -212,22 +259,28 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_2_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_3_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_GROUP) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -235,27 +288,25 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -266,9 +317,15 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,10 +337,11 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -294,9 +352,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -312,9 +376,15 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -327,21 +397,30 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 79fc62742f444..92a789530d6e4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -23,14 +26,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -38,16 +39,18 @@ public class BatchAddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -56,8 +59,12 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -65,52 +72,64 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetSuccessExistingOwners() throws Exception { - final Ownership originalOwnership = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); + final Ownership originalOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -119,12 +138,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -132,44 +159,49 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of( - new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -180,20 +212,27 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -205,15 +244,17 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -225,20 +266,27 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -254,20 +302,27 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,32 +335,42 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 9dc2ec8127806..7cef90ffee512 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,13 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of( - TEST_OWNER_URN_1, - TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,24 +78,36 @@ public void testGetSuccessNoExistingOwners() throws Exception { public void testGetSuccessExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - final Ownership oldOwners1 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners1 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners1); - final Ownership oldOwners2 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_2)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners2 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_2)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -105,10 +121,13 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -120,15 +139,17 @@ public void testGetSuccessExistingOwners() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -140,10 +161,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +183,13 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -175,22 +202,28 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java index 0643ead444c94..ff11d971b52e8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.service.OwnershipTypeService; @@ -15,15 +18,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateOwnershipTypeResolverTest { - private static final CreateOwnershipTypeInput TEST_INPUT = new CreateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final CreateOwnershipTypeInput TEST_INPUT = + new CreateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); @@ -45,10 +44,12 @@ public void testCreateSuccess() throws Exception { assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); assertEquals(ownershipType.getType(), EntityType.CUSTOM_OWNERSHIP_TYPE); - Mockito.verify(mockService, Mockito.times(1)).createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -65,20 +66,18 @@ public void testCreateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testCreateOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createOwnershipType( - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createOwnershipType( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class), Mockito.anyLong()); CreateOwnershipTypeResolver resolver = new CreateOwnershipTypeResolver(mockService); @@ -93,12 +92,13 @@ public void testCreateOwnershipTypeServiceException() throws Exception { private OwnershipTypeService initMockService() { OwnershipTypeService service = Mockito.mock(OwnershipTypeService.class); - Mockito.when(service.createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_OWNERSHIP_TYPE_URN); + Mockito.when( + service.createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_OWNERSHIP_TYPE_URN); return service; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java index 9f526e4008236..ae97164a2787e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -14,11 +18,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.Assert.*; - - public class DeleteOwnershipTypeResolverTest { private static final Urn TEST_URN = @@ -41,11 +40,8 @@ public void testGetSuccessOwnershipTypeCanManage() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test @@ -62,21 +58,17 @@ public void testGetFailureOwnershipTypeCanNotManager() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test public void testGetOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteOwnershipType( - Mockito.any(), - anyBoolean(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteOwnershipType(Mockito.any(), anyBoolean(), Mockito.any(Authentication.class)); DeleteOwnershipTypeResolver resolver = new DeleteOwnershipTypeResolver(mockService); @@ -93,17 +85,18 @@ public void testGetOwnershipTypeServiceException() throws Exception { private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index ceab13167246c..fd7baf6af7469 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,16 +21,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListOwnershipTypesResolverTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); - private static final ListOwnershipTypesInput TEST_INPUT = new ListOwnershipTypesInput(0, 20, "", null); + private static final ListOwnershipTypesInput TEST_INPUT = + new ListOwnershipTypesInput(0, 20, "", null); @Test public void testGetSuccess() throws Exception { @@ -38,21 +38,24 @@ public void testGetSuccess() throws Exception { final OwnershipTypeKey key = new OwnershipTypeKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN))))); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); @@ -78,35 +81,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); // Execute resolver @@ -124,4 +124,4 @@ public static OwnershipTypeInfo getOwnershipTypeInfo() { info.setDescription("some description"); return info; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java index f35b8f98cc1ac..6e428842201d5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.linkedin.common.AuditStamp; @@ -7,8 +10,8 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.UpdateOwnershipTypeInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; @@ -24,19 +27,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateOwnershipTypeResolverTest { private static final Urn TEST_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateOwnershipTypeInput TEST_INPUT = new UpdateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final UpdateOwnershipTypeInput TEST_INPUT = + new UpdateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); @Test public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { @@ -55,23 +54,27 @@ public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { assertEquals(ownershipType.getInfo().getName(), TEST_INPUT.getName()); assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); - Mockito.verify(mockService, Mockito.times(1)).updateOwnershipType( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateOwnershipType( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testUpdateOwnershipTypeServiceException() throws Exception { // Update resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateOwnershipType( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateOwnershipType( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateOwnershipTypeResolver resolver = new UpdateOwnershipTypeResolver(mockService); @@ -100,39 +103,41 @@ public void testUpdateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getOwnershipTypeEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java index b56d897a468ba..2827e3602e379 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreatePostResolverTest { private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; private static final String POST_MEDIA_LOCATION = @@ -59,9 +58,12 @@ public void testCreatePost() throws Exception { UpdateMediaInput media = new UpdateMediaInput(); media.setType(POST_MEDIA_TYPE); media.setLocation(POST_MEDIA_LOCATION); - Media mediaObj = new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION)); - when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))).thenReturn(mediaObj); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))) + .thenReturn(mediaObj); UpdatePostContentInput content = new UpdatePostContentInput(); content.setTitle(POST_TITLE); @@ -69,22 +71,33 @@ public void testCreatePost() throws Exception { content.setLink(POST_LINK); content.setContentType(POST_CONTENT_TYPE); content.setMedia(media); - com.linkedin.post.PostContent postContentObj = new com.linkedin.post.PostContent().setType( - com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION))); - when(_postService.mapPostContent(eq(POST_CONTENT_TYPE.toString()), eq(POST_TITLE), eq(POST_DESCRIPTION), - eq(POST_LINK), any(Media.class))).thenReturn(postContentObj); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(_postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); CreatePostInput input = new CreatePostInput(); input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); input.setContent(content); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - when(_postService.createPost(eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj), - eq(_authentication))).thenReturn(true); + when(_postService.createPost( + eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), + eq(postContentObj), + eq(_authentication))) + .thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java index b8a7488a824fd..085cfd0569781 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.urn.Urn; @@ -9,11 +13,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class DeletePostResolverTest { private static final String POST_URN_STRING = "urn:li:post:123"; private PostService _postService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index c22d6bf39640d..6c475cdc7f5a8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,17 +33,10 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListPostsResolverTest { private static Map<Urn, EntityResponse> _entityResponseMap; private static final String POST_URN_STRING = "urn:li:post:examplePost"; @@ -49,12 +47,15 @@ public class ListPostsResolverTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private EntityClient _entityClient; @@ -72,8 +73,11 @@ private Map<Urn, EntityResponse> getMockPostsEntityResponse() throws URISyntaxEx DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(postUrn.toString()); dataHubRoleInfo.setName(postUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return ImmutableMap.of(postUrn, entityResponse); } @@ -106,13 +110,27 @@ public void testListPosts() throws Exception { ListPostsInput input = new ListPostsInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(1); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(1); roleSearchResult.setEntities( - new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); - - when(_entityClient.search(eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt(), - eq(_authentication), Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); + + when(_entityClient.search( + eq(POST_ENTITY_NAME), + any(), + eq(null), + any(), + anyInt(), + anyInt(), + eq(_authentication), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListPostsResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java index 9c04c67dd3a3b..eebe0034fce61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -24,10 +26,10 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.service.QueryService; -import com.linkedin.entity.client.EntityClient; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; import com.linkedin.query.QueryStatement; @@ -40,21 +42,19 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class CreateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final CreateQueryInput TEST_INPUT = new CreateQueryInput( - new CreateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString())) - ); + private static final CreateQueryInput TEST_INPUT = + new CreateQueryInput( + new CreateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString()))); @Test public void testGetSuccess() throws Exception { @@ -70,25 +70,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -105,23 +115,24 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateQueryResolver resolver = new CreateQueryResolver(mockService); @@ -136,58 +147,68 @@ public void testGetQueryServiceException() throws Exception { private QueryService initMockService() { QueryService service = Mockito.mock(QueryService.class); - Mockito.when(service.createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_QUERY_URN); - - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects querySubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(querySubjects.data())) - ))) - ); + Mockito.when( + service.createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT + .getProperties() + .getStatement() + .getLanguage() + .toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_QUERY_URN); + + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects querySubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(querySubjects.data())))))); return service; } @@ -197,36 +218,40 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java index 78c894f27cbc3..96ddc632562ee 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -22,14 +25,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteQueryResolverTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test @@ -45,10 +45,8 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -62,10 +60,8 @@ public void testGetSuccessCanEditQueries() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -79,19 +75,17 @@ public void testGetFailureActorUnauthorized() { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteQuery( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteQuery(Mockito.any(), Mockito.any(Authentication.class)); DeleteQueryResolver resolver = new DeleteQueryResolver(mockService); @@ -108,14 +102,13 @@ private static QueryService initMockService() { QueryService mockService = Mockito.mock(QueryService.class); QuerySubjects existingQuerySubjects = new QuerySubjects(); - existingQuerySubjects.setSubjects(new QuerySubjectArray( - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)) - )); + existingQuerySubjects.setSubjects( + new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(mockService.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) - .thenReturn(existingQuerySubjects); + Mockito.when( + mockService.getQuerySubjects( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn(existingQuerySubjects); return mockService; } @@ -126,40 +119,47 @@ private QueryContext getMockAllowEditQueriesOnQueryContext() { private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getActorUrn()) + .thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), - DeleteQueryResolverTest.TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec( + DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), + DeleteQueryResolverTest.TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 877a4d2b27f6a..8a56b142e5b5e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -32,28 +35,24 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListQueriesResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = Urn.createFromTuple("query", "test-id"); - private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = new ListQueriesInput( - 0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString() - ); - private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = new ListQueriesInput( - 0, 30, null, QuerySource.MANUAL, null - ); - private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = new ListQueriesInput( - 0, 40, null, null, TEST_DATASET_URN.toString() - ); + private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = + new ListQueriesInput(0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString()); + private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = + new ListQueriesInput(0, 30, null, QuerySource.MANUAL, null); + private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = + new ListQueriesInput(0, 40, null, null, TEST_DATASET_URN.toString()); @DataProvider(name = "inputs") public static Object[][] inputs() { - return new Object[][] {{ TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER}}; + return new Object[][] { + {TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER} + }; } @Test(dataProvider = "inputs") @@ -61,22 +60,30 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(input.getQuery() == null ? ListQueriesResolver.DEFAULT_QUERY : input.getQuery()), - Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), - Mockito.eq(new SortCriterion().setField(ListQueriesResolver.CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq( + input.getQuery() == null + ? ListQueriesResolver.DEFAULT_QUERY + : input.getQuery()), + Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), + Mockito.eq( + new SortCriterion() + .setField(ListQueriesResolver.CREATED_AT_FIELD) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN))))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); @@ -90,7 +97,8 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getQueries().size(), 1); - assertEquals(resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); } @Test @@ -102,33 +110,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT_FULL_FILTERS); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_FULL_FILTERS); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver @@ -146,7 +156,8 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity if (source != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_SOURCE_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_SOURCE_FIELD, null, ImmutableList.of(source.toString()), false, @@ -154,14 +165,14 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity } if (entityUrn != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_ENTITIES_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_ENTITIES_FIELD, null, ImmutableList.of(entityUrn), false, FilterOperator.EQUAL)); - } criteria.setAnd(andConditions); return ResolverUtils.buildFilter(Collections.emptyList(), ImmutableList.of(criteria)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java index 9b500b5fb3936..766d8a2ccb136 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -13,12 +15,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateQueryInput; -import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; -import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.QueryLanguage; import com.linkedin.datahub.graphql.generated.QueryStatementInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; +import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; import com.linkedin.entity.EntityResponse; @@ -40,22 +42,21 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class UpdateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateQueryInput TEST_INPUT = new UpdateQueryInput( - new UpdateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString())) - ); + private static final UpdateQueryInput TEST_INPUT = + new UpdateQueryInput( + new UpdateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString()))); @Test public void testGetSuccess() throws Exception { @@ -72,25 +73,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).updateQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN_2) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateQuery( + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -108,23 +119,24 @@ public void testGetUnauthorizedNoEditQueriesRights() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Update resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateQueryResolver resolver = new UpdateQueryResolver(mockService); @@ -143,56 +155,59 @@ private QueryService initMockService() { // Pre-Update QueryService service = Mockito.mock(QueryService.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(service.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) + Mockito.when( + service.getQuerySubjects(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) .thenReturn(existingSubjects); // Post-Update - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects newSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN_2) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(newSubjects.data())) - ))) - ); + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects newSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(newSubjects.data())))))); return service; } @@ -202,62 +217,71 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editQueriesRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); - - AuthorizationRequest editAllRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); + AuthorizationRequest editQueriesRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editQueriesRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); + + AuthorizationRequest editAllRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); AuthorizationResult editQueriesResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))).thenReturn(editQueriesResult1); + Mockito.when(editQueriesResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))) + .thenReturn(editQueriesResult1); AuthorizationResult editAllResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest1))).thenReturn(editAllResult1); AuthorizationResult editQueriesResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))).thenReturn(editQueriesResult2); + Mockito.when(editQueriesResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))) + .thenReturn(editQueriesResult2); AuthorizationResult editAllResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest2))).thenReturn(editAllResult2); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java index 3cde81d7a7f31..fe032d0bf4859 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AcceptRoleResolverTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -54,7 +53,8 @@ public void testInvalidInviteToken() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(false); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(false); AcceptRoleInput input = new AcceptRoleInput(); input.setInviteToken(INVITE_TOKEN_STRING); @@ -69,8 +69,10 @@ public void testNoRoleUrn() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(null); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(null); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); @@ -89,8 +91,10 @@ public void testAssignRolePasses() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(roleUrn); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(roleUrn); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java index 85891dbd96fb0..6411728552a1e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.google.common.collect.ImmutableList; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchAssignRoleResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 8d8faf5c3f12e..9197d1b18c0c9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index ef426979953d0..8e761454cb06c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GetInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index 4a0b062c67ffd..d956295faa180 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyInt; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -24,14 +31,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListRolesResolverTest { private static final String ADMIN_ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String EDITOR_ROLE_URN_STRING = "urn:li:dataHubRole:Editor"; @@ -47,8 +46,11 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(roleUrn.toString()); dataHubRoleInfo.setName(roleUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return entityResponse; } @@ -57,8 +59,12 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { public void setupTest() throws Exception { Urn adminRoleUrn = Urn.createFromString(ADMIN_ROLE_URN_STRING); Urn editorRoleUrn = Urn.createFromString(EDITOR_ROLE_URN_STRING); - _entityResponseMap = ImmutableMap.of(adminRoleUrn, getMockRoleEntityResponse(adminRoleUrn), editorRoleUrn, - getMockRoleEntityResponse(editorRoleUrn)); + _entityResponseMap = + ImmutableMap.of( + adminRoleUrn, + getMockRoleEntityResponse(adminRoleUrn), + editorRoleUrn, + getMockRoleEntityResponse(editorRoleUrn)); _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); @@ -84,14 +90,28 @@ public void testListRoles() throws Exception { ListRolesInput input = new ListRolesInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(2); - roleSearchResult.setEntities(new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), - new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); - - when(_entityClient.search(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt(), any(), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(2); + roleSearchResult.setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), + new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); + + when(_entityClient.search( + eq(DATAHUB_ROLE_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + any(), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListRolesResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c161a66d3ee93..c7d397c5a4a73 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -30,61 +33,49 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class AggregateAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); List<String> facets = ImmutableList.of("platform", "domains"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -94,18 +85,16 @@ public static void testApplyViewNullBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", viewFilter, // Verify that view filter was used. 0, 0, facets // Verify called with facets we provide - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -113,42 +102,44 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -158,18 +149,15 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, 0, - null - ); + null); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -178,36 +166,28 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List<String> facets = ImmutableList.of("platform"); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - null, - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -218,18 +198,17 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, facets // Verify facets passed in were used - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -238,36 +217,29 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List<String> facets = ImmutableList.of(); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -278,55 +250,50 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, null // Verify that an empty list for facets in input sends null - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List<String> searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List<String> searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -334,45 +301,31 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 0, - null - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 0, null); } @Test public static void testErrorFetchingResults() throws Exception { - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -383,17 +336,18 @@ public static void testErrorFetchingResults() throws Exception { private static Filter createFilter(String field, String value) { return new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField(field) - .setValue(value) - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of(value))) - )) - ))); + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(field) + .setValue(value) + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues(new StringArray(ImmutableList.of(value)))))))); } private static DataHubViewInfo getViewInfo(Filter viewFilter) { @@ -402,24 +356,20 @@ private static DataHubViewInfo getViewInfo(Filter viewFilter) { info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); return info; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -430,22 +380,21 @@ private static EntityClient initMockEntityClient( int start, int limit, List<String> facets, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), - Mockito.eq(facets) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class), + Mockito.eq(facets))) + .thenReturn(result); return client; } @@ -456,8 +405,8 @@ private static void verifyMockEntityClient( Filter filter, int start, int limit, - List<String> facets - ) throws Exception { + List<String> facets) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -468,21 +417,13 @@ private static void verifyMockEntityClient( Mockito.eq(null), Mockito.eq(null), Mockito.any(Authentication.class), - Mockito.eq(facets) - ); + Mockito.eq(facets)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private AggregateAcrossEntitiesResolverTest() { } - + private AggregateAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 7397ea8fa21cf..3b69337acfbd0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -32,14 +34,12 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class AutoCompleteForMultipleResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private AutoCompleteForMultipleResolverTest() { } + private AutoCompleteForMultipleResolverTest() {} public static void testAutoCompleteResolverSuccess( EntityClient mockClient, @@ -48,9 +48,10 @@ public static void testAutoCompleteResolverSuccess( EntityType entityType, SearchableEntityType<?, ?> entity, Urn viewUrn, - Filter filter - ) throws Exception { - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); + Filter filter) + throws Exception { + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -65,13 +66,7 @@ public static void testAutoCompleteResolverSuccess( Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - entityName, - "test", - filter, - 10 - ); + verifyMockEntityClient(mockClient, entityName, "test", filter, 10); } // test our main entity types @@ -79,43 +74,64 @@ public static void testAutoCompleteResolverSuccess( public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); // Daatasets - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATASET_ENTITY_NAME, EntityType.DATASET, new DatasetType(mockClient), null, null); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATASET_ENTITY_NAME, + EntityType.DATASET, + new DatasetType(mockClient), + null, + null); // Dashboards - mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DASHBOARD_ENTITY_NAME, EntityType.DASHBOARD, new DashboardType(mockClient), null, null); + mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DASHBOARD_ENTITY_NAME, + EntityType.DASHBOARD, + new DashboardType(mockClient), + null, + null); - //DataFlows - mockClient = initMockEntityClient( - Constants.DATA_FLOW_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATA_FLOW_ENTITY_NAME, EntityType.DATA_FLOW, new DataFlowType(mockClient), null, null); + // DataFlows + mockClient = + initMockEntityClient( + Constants.DATA_FLOW_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATA_FLOW_ENTITY_NAME, + EntityType.DATA_FLOW, + new DataFlowType(mockClient), + null, + null); } // test filters with a given view @@ -123,16 +139,16 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); testAutoCompleteResolverSuccess( mockClient, viewService, @@ -140,8 +156,7 @@ public static void testAutoCompleteResolverWithViewFilter() throws Exception { EntityType.DATASET, new DatasetType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); } // test entity type filters with a given view @@ -152,16 +167,16 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti entityNames.add(Constants.DASHBOARD_ENTITY_NAME); DataHubViewInfo viewInfo = createViewInfo(entityNames); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); // ensure we do hit the entity client for dashboards since dashboards are in our view testAutoCompleteResolverSuccess( @@ -171,25 +186,26 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti EntityType.DASHBOARD, new DashboardType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); - // if the view has only dashboards, we should not make an auto-complete request on other entity types + // if the view has only dashboards, we should not make an auto-complete request on other entity + // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), Mockito.eq(10), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } @Test public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(new DatasetType(mockClient)), viewService); + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver( + ImmutableList.of(new DatasetType(mockClient)), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -204,75 +220,60 @@ public static void testAutoCompleteResolverFailNoQuery() throws Exception { } private static EntityClient initMockEntityClient( - String entityName, - String query, - Filter filters, - int limit, - AutoCompleteResult result - ) throws Exception { + String entityName, String query, Filter filters, int limit, AutoCompleteResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.autoComplete( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn(result); + Mockito.when( + client.autoComplete( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filters), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } - + private static void verifyMockEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filters, - int limit - ) throws Exception { + EntityClient mockClient, String entityName, String query, Filter filters, int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), Mockito.eq(limit), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index a599117c3e165..29a2b3081aefe 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,20 +23,16 @@ import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class GetQuickFiltersResolverTest { @@ -41,19 +40,21 @@ public class GetQuickFiltersResolverTest { public static void testGetQuickFiltersHappyPathSuccess() throws Exception { SearchResultMetadata mockData = getHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -72,19 +73,21 @@ public static void testGetQuickFiltersHappyPathSuccess() throws Exception { public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { SearchResultMetadata mockData = getUnHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -103,16 +106,17 @@ public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { public static void testGetQuickFiltersFailure() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -124,26 +128,36 @@ public static void testGetQuickFiltersFailure() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static void compareResultToExpectedData(GetQuickFiltersResult result, GetQuickFiltersResult expected) { - IntStream.range(0, result.getQuickFilters().size()).forEach(index -> { - QuickFilter resultFilter = result.getQuickFilters().get(index); - QuickFilter expectedFilter = expected.getQuickFilters().get(index); - Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); - Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); - if (resultFilter.getEntity() != null) { - Assert.assertEquals(resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); - } - }); + private static void compareResultToExpectedData( + GetQuickFiltersResult result, GetQuickFiltersResult expected) { + IntStream.range(0, result.getQuickFilters().size()) + .forEach( + index -> { + QuickFilter resultFilter = result.getQuickFilters().get(index); + QuickFilter expectedFilter = expected.getQuickFilters().get(index); + Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); + Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); + if (resultFilter.getEntity() != null) { + Assert.assertEquals( + resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); + } + }); } private static SearchResultMetadata getHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); FilterValueArray entityTypeFilters = new FilterValueArray(); entityTypeFilters.add(createFilterValue("dataset", 100, null)); @@ -168,11 +182,18 @@ private static GetQuickFiltersResult getHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List<QuickFilter> quickFilters = new ArrayList<>(); // platforms should be in alphabetical order - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DASHBOARD", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_FLOW", null)); @@ -186,9 +207,12 @@ private static GetQuickFiltersResult getHappyPathResultData() { private static SearchResultMetadata getUnHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); // only 3 platforms available - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); FilterValueArray entityTypeFilters = new FilterValueArray(); // no dashboard, data flows, or glossary terms @@ -210,10 +234,15 @@ private static SearchResultMetadata getUnHappyPathTestData() { private static GetQuickFiltersResult getUnHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List<QuickFilter> quickFilters = new ArrayList<>(); - // in correct order by count for platforms (alphabetical). In correct order by priority for entity types - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + // in correct order by count for platforms (alphabetical). In correct order by priority for + // entity types + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_JOB", null)); quickFilters.add(createQuickFilter("_entityType", "CHART", null)); @@ -224,7 +253,8 @@ private static GetQuickFiltersResult getUnHappyPathResultData() { return result; } - private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { + private static QuickFilter createQuickFilter( + @Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(value); @@ -234,7 +264,8 @@ private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnu return quickFilter; } - private static FilterValue createFilterValue(@Nonnull final String value, final int count, @Nullable final String entity) { + private static FilterValue createFilterValue( + @Nonnull final String value, final int count, @Nullable final String entity) { FilterValue filterValue = new FilterValue(); filterValue.setValue(value); filterValue.setFacetCount(count); @@ -244,7 +275,8 @@ private static FilterValue createFilterValue(@Nonnull final String value, final return filterValue; } - private static AggregationMetadata createAggregationMetadata(@Nonnull final String name, @Nonnull final FilterValueArray filterValues) { + private static AggregationMetadata createAggregationMetadata( + @Nonnull final String name, @Nonnull final FilterValueArray filterValues) { AggregationMetadata aggregationMetadata = new AggregationMetadata(); aggregationMetadata.setName(name); aggregationMetadata.setFilterValues(filterValues); @@ -257,24 +289,22 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private GetQuickFiltersResolverTest() { } - + private GetQuickFiltersResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index b0a681c9b2342..d0bbfd126b9b9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -38,167 +41,172 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - public class SearchAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockEntityClient( + mockClient, + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", - viewFilter, + viewFilter, // Verify that view filter was used. 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockEntityClient( - mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. - "", - viewFilter, // Verify that view filter was used. - 0, - 10 - ); - - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + 10); + + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("baseField.keyword") - .setValue("baseTest") - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("baseTest"))) - )) - ))); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("baseField.keyword") + .setValue("baseTest") + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues( + new StringArray(ImmutableList.of("baseTest")))))))); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -208,74 +216,66 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewNullBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - null, - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + null, "", 0, 10, null, null, TEST_VIEW_URN.toString(), null, null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -285,74 +285,75 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewEmptyBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -362,56 +363,55 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List<String> searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List<String> searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -419,49 +419,41 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 10 - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 10); } @Test public static void testApplyViewErrorFetchingView() throws Exception { // When a view cannot be successfully resolved, the endpoint show THROW. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -470,17 +462,10 @@ public static void testApplyViewErrorFetchingView() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -490,21 +475,20 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } @@ -514,8 +498,8 @@ private static void verifyMockEntityClient( String query, Filter filter, int start, - int limit - ) throws Exception { + int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -525,21 +509,13 @@ private static void verifyMockEntityClient( Mockito.eq(limit), Mockito.eq(null), Mockito.eq(null), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private SearchAcrossEntitiesResolverTest() { } - + private SearchAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index c68b621e6921f..273f7156c12a8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.UrnUtils; @@ -23,15 +27,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - // Initialize this class in the style of SearchAcrossEntitiesResolverTest.java public class SearchAcrossLineageResolverTest { - private static final String SOURCE_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; - private static final String TARGET_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; + private static final String SOURCE_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; + private static final String TARGET_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; private static final String QUERY = ""; private static final int START = 0; private static final int COUNT = 10; @@ -87,19 +88,20 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); when(_entityClient.searchAcrossLineage( - eq(UrnUtils.getUrn(SOURCE_URN_STRING)), - eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), - anyList(), - eq(QUERY), - eq(null), - any(), - eq(null), - eq(START), - eq(COUNT), - eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))).thenReturn(lineageSearchResult); + eq(UrnUtils.getUrn(SOURCE_URN_STRING)), + eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), + anyList(), + eq(QUERY), + eq(null), + any(), + eq(null), + eq(START), + eq(COUNT), + eq(START_TIMESTAMP_MILLIS), + eq(END_TIMESTAMP_MILLIS), + eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), + eq(_authentication))) + .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(results.getCount(), 10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 6ba8b3cefe504..24724cb8e23ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,134 +18,107 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - - public class SearchResolverTest { - @Test - public void testDefaultSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - @Test - public void testOverrideSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchFlags inputSearchFlags = new SearchFlags(); - inputSearchFlags.setFulltext(false); - inputSearchFlags.setSkipAggregates(true); - inputSearchFlags.setSkipHighlighting(true); - inputSearchFlags.setMaxAggValues(10); - inputSearchFlags.setSkipCache(true); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 1, - 11, - null, - null, - inputSearchFlags - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 1, - 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true) - ); - } - - @Test - public void testNonWildCardSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "not a wildcard", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "not a wildcard", - null, // Verify that view filter was used. - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - private EntityClient initMockSearchEntityClient() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.search( + @Test + public void testDefaultSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = new SearchInput(EntityType.DATASET, "", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + @Test + public void testOverrideSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchFlags inputSearchFlags = new SearchFlags(); + inputSearchFlags.setFulltext(false); + inputSearchFlags.setSkipAggregates(true); + inputSearchFlags.setSkipHighlighting(true); + inputSearchFlags.setMaxAggValues(10); + inputSearchFlags.setSkipCache(true); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "", 1, 11, null, null, inputSearchFlags); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 1, + 11, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true)); + } + + @Test + public void testNonWildCardSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "not a wildcard", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "not a wildcard", + null, // Verify that view filter was used. + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + private EntityClient initMockSearchEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + Mockito.when( + client.search( Mockito.anyString(), Mockito.anyString(), Mockito.any(), @@ -151,40 +126,38 @@ private EntityClient initMockSearchEntityClient() throws Exception { Mockito.anyInt(), Mockito.anyInt(), Mockito.any(Authentication.class), - Mockito.any() - )).thenReturn( - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - return client; - } - - private void verifyMockSearchEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filter, - SortCriterion sortCriterion, - int start, - int limit, - com.linkedin.metadata.query.SearchFlags searchFlags - ) throws Exception { - Mockito.verify(mockClient, Mockito.times(1)).search( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(sortCriterion), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags) - ); - } - - private SearchResolverTest() { - } + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + return client; + } + + private void verifyMockSearchEntityClient( + EntityClient mockClient, + String entityName, + String query, + Filter filter, + SortCriterion sortCriterion, + int start, + int limit, + com.linkedin.metadata.query.SearchFlags searchFlags) + throws Exception { + Mockito.verify(mockClient, Mockito.times(1)) + .search( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(sortCriterion), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class), + Mockito.eq(searchFlags)); + } + + private SearchResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java index b35f7a77b209c..8f23f0a624576 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java @@ -17,16 +17,18 @@ public class SearchUtilsTest { @Test public static void testApplyViewToFilterNullBaseFilter() { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(null, viewFilter); Assert.assertEquals(viewFilter, result); @@ -34,275 +36,272 @@ public static void testApplyViewToFilterNullBaseFilter() { @Test public static void testApplyViewToFilterComplexBaseFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterComplexViewFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray(ImmutableList.of("viewTest4")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterV1Filter() { - Filter baseFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ); - - Filter viewFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ); - - Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( + Filter baseFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("field1") .setValue("test1") @@ -310,7 +309,13 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("field2") .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), + .setValues(new StringArray(ImmutableList.of("test2")))))); + + Filter viewFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("viewField1") .setValue("viewTest1") @@ -318,10 +323,38 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("viewField2") .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ) - ))); + .setValues(new StringArray(ImmutableList.of("viewTest2")))))); + + Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); + + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray( + ImmutableList.of("viewTest2"))))))))); Assert.assertEquals(expectedResult, result); } @@ -329,24 +362,17 @@ public static void testApplyViewToFilterV1Filter() { @Test public static void testApplyViewToEntityTypes() { - List<String> baseEntityTypes = ImmutableList.of( - Constants.CHART_ENTITY_NAME, - Constants.DATASET_ENTITY_NAME - ); + List<String> baseEntityTypes = + ImmutableList.of(Constants.CHART_ENTITY_NAME, Constants.DATASET_ENTITY_NAME); - List<String> viewEntityTypes = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME - ); + List<String> viewEntityTypes = + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME); final List<String> result = SearchUtils.intersectEntityTypes(baseEntityTypes, viewEntityTypes); - final List<String> expectedResult = ImmutableList.of( - Constants.DATASET_ENTITY_NAME - ); + final List<String> expectedResult = ImmutableList.of(Constants.DATASET_ENTITY_NAME); Assert.assertEquals(expectedResult, result); } - private SearchUtilsTest() { } - + private SearchUtilsTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java index 905e913fba909..553a2c85a7ae2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,29 +18,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateCorpUserViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = new UpdateCorpUserViewsSettingsInput( - TEST_URN.toString() - ); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = new UpdateCorpUserViewsSettingsInput( - null - ); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = + new UpdateCorpUserViewsSettingsInput(TEST_URN.toString()); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = + new UpdateCorpUserViewsSettingsInput(null); @Test public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -46,25 +44,28 @@ public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessViewSettingsExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -73,26 +74,28 @@ public void testGetSuccessViewSettingsExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } - @Test public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -101,22 +104,26 @@ public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), + Mockito.any(Authentication.class)); } @Test public void testGetCorpUserSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getCorpUserSettings(Mockito.eq(TEST_USER_URN), Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -126,19 +133,18 @@ public void testGetCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateCorpUserSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - null - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(CorpUserSettings.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(TEST_USER_URN, null); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.any(CorpUserSettings.class), + Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -148,17 +154,13 @@ public void testUpdateCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - Urn user, - CorpUserSettings existingSettings - ) { + private static SettingsService initSettingsService(Urn user, CorpUserSettings existingSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getCorpUserSettings( - Mockito.eq(user), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getCorpUserSettings(Mockito.eq(user), Mockito.any(Authentication.class))) .thenReturn(existingSettings); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java index 4e2283735b8c9..8f96eae9480f8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,10 +17,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class GlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -25,9 +24,7 @@ public class GlobalViewsSettingsResolverTest { @Test public void testGetSuccessNullSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -42,9 +39,7 @@ public void testGetSuccessNullSettings() throws Exception { @Test public void testGetSuccessEmptySettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -53,16 +48,13 @@ public void testGetSuccessEmptySettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertNull( - result.getDefaultView() - ); + Assert.assertNull(result.getDefaultView()); } @Test public void testGetSuccessExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(TEST_URN) - ); + SettingsService mockService = + initSettingsService(new GlobalViewsSettings().setDefaultView(TEST_URN)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -71,17 +63,15 @@ public void testGetSuccessExistingSettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertEquals( - result.getDefaultView(), - TEST_URN.toString() - ); + Assert.assertEquals(result.getDefaultView(), TEST_URN.toString()); } @Test public void testGetException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); @@ -94,9 +84,7 @@ public void testGetException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -107,15 +95,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java index 9ea3c223559cd..c0cc09052176d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,22 +17,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateGlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); - private static final UpdateGlobalViewsSettingsInput TEST_INPUT = new UpdateGlobalViewsSettingsInput( - TEST_URN.toString() - ); + private static final UpdateGlobalViewsSettingsInput TEST_INPUT = + new UpdateGlobalViewsSettingsInput(TEST_URN.toString()); @Test public void testGetSuccessNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -39,16 +35,17 @@ public void testGetSuccessNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessNoDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -58,18 +55,20 @@ public void testGetSuccessNoDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessExistingDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - )) - ); + SettingsService mockService = + initSettingsService( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView"))); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -79,16 +78,20 @@ public void testGetSuccessExistingDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetGlobalViewsSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -100,15 +103,13 @@ public void testGetGlobalViewsSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateGlobalViewsSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateGlobalSettings( - Mockito.any(GlobalSettingsInfo.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateGlobalSettings( + Mockito.any(GlobalSettingsInfo.class), Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -122,11 +123,13 @@ public void testUpdateGlobalViewsSettingsException() throws Exception { @Test public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - null // Should never be null. - ); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + SettingsService mockService = + initSettingsService( + null // Should never be null. + ); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -140,9 +143,7 @@ public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -154,15 +155,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) - .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) + .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java index 8c4445452c564..db3e9afab7249 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -21,12 +26,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchGetStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final long TIME = 123L; @@ -35,7 +34,8 @@ public class BatchGetStepStatesResolverTest { private static final String SECOND_STEP_STATE_ID = "2"; private static final Urn FIRST_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:1"); private static final Urn SECOND_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:2"); - private static final Set<String> ASPECTS = ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); + private static final Set<String> ASPECTS = + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); private EntityClient _entityClient; private BatchGetStepStatesResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; @@ -68,15 +68,17 @@ public void testBatchGetStepStatesFirstStepCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set<Urn> urns = ImmutableSet.of(FIRST_STEP_STATE_URN); - final Map<String, RecordTemplate> firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map<Urn, EntityResponse> entityResponseMap = ImmutableMap.of(FIRST_STEP_STATE_URN, - TestUtils.buildEntityResponse(firstAspectMap)); + final Map<String, RecordTemplate> firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map<Urn, EntityResponse> entityResponseMap = + ImmutableMap.of(FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap)); - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); } @@ -100,18 +102,21 @@ public void testBatchGetStepStatesBothStepsCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set<Urn> urns = ImmutableSet.of(FIRST_STEP_STATE_URN, SECOND_STEP_STATE_URN); - final Map<String, RecordTemplate> firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map<String, RecordTemplate> secondAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - secondStepStateProperties); - final Map<Urn, EntityResponse> entityResponseMap = ImmutableMap.of( - FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), - SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); - - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + final Map<String, RecordTemplate> firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map<String, RecordTemplate> secondAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, secondStepStateProperties); + final Map<Urn, EntityResponse> entityResponseMap = + ImmutableMap.of( + FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), + SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); + + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(2, actualBatchResult.getResults().size()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java index 5f20a11f15ac6..b457498cc547a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,11 +20,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchUpdateStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final String FIRST_STEP_STATE_ID = "1"; @@ -52,7 +51,8 @@ public void testBatchUpdateStepStatesFirstStepCompleted() throws Exception { input.setStates(ImmutableList.of(firstInput)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - final BatchUpdateStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchUpdateStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); verify(_entityClient, times(1)).ingestProposal(any(), eq(_authentication), eq(false)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 268d6a6bc4268..340802cde467b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -20,14 +24,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class AddTagsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -35,11 +35,12 @@ public class AddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -50,46 +51,51 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(originalTags); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -100,41 +106,43 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -145,9 +153,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +166,11 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -173,9 +181,8 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -191,9 +198,8 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -206,21 +212,21 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 651b89359c83f..71354627b1145 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -18,21 +22,17 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchAddTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -40,19 +40,20 @@ public class BatchAddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,55 +65,63 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -126,45 +135,49 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,33 +188,36 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -213,19 +229,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -236,42 +254,47 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index f302540eba904..8cd10afee293e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -26,15 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchRemoveTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -42,15 +43,17 @@ public class BatchRemoveTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -64,22 +67,25 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); proposal2.setEntityType(Constants.DATASET_ENTITY_NAME); proposal2.setAspectName(Constants.GLOBAL_TAGS_ASPECT_NAME); @@ -93,25 +99,33 @@ public void testGetSuccessNoExistingTags() throws Exception { public void testGetSuccessExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - final GlobalTags oldTags1 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags1 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags1); - final GlobalTags oldTags2 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags2 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -125,22 +139,25 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -149,15 +166,17 @@ public void testGetSuccessExistingTags() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -169,19 +188,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -192,44 +213,49 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java index f801daf4f2a3f..dac7104ca2930 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java @@ -1,39 +1,36 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.tag.TagProperties; import com.linkedin.metadata.key.TagKey; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.tag.TagProperties; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class CreateTagResolverTest { - private static final CreateTagInput TEST_INPUT = new CreateTagInput( - "test-id", - "test-name", - "test-description" - ); + private static final CreateTagInput TEST_INPUT = + new CreateTagInput("test-id", "test-name", "test-description"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal(Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) .thenReturn(String.format("urn:li:tag:%s", TEST_INPUT.getId())); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -50,15 +47,13 @@ public void testGetSuccess() throws Exception { TagProperties props = new TagProperties(); props.setDescription("test-description"); props.setName("test-name"); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, props); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, props); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -75,9 +70,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -85,10 +79,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); // Execute resolver @@ -99,4 +92,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java index b01ac1a9b14ae..11dfad43d5731 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTagResolverTest { private static final String TEST_URN = "urn:li:tag:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index b5bbf0775a8ba..6ae72fcbb7268 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,11 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetTagColorResolverTest { private static final String TEST_ENTITY_URN = "urn:li:tag:test-tag"; @@ -41,10 +40,11 @@ public void testGetSuccessExistingProperties() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTagProperties); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -59,19 +59,17 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final TagProperties newTagProperties = new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - TAG_PROPERTIES_ASPECT_NAME, newTagProperties); + final TagProperties newTagProperties = + new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), TAG_PROPERTIES_ASPECT_NAME, newTagProperties); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -81,10 +79,11 @@ public void testGetFailureNoExistingProperties() throws Exception { EntityService mockService = getMockEntityService(); // Test setting the domain - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -99,9 +98,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -111,21 +109,26 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - final EnvelopedAspect oldTagPropertiesAspect = new EnvelopedAspect() - .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) - .setValue(new Aspect(oldTagProperties.data())); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.TAG_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.TAG_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.TAG_PROPERTIES_ASPECT_NAME, - oldTagPropertiesAspect))))); + final EnvelopedAspect oldTagPropertiesAspect = + new EnvelopedAspect() + .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) + .setValue(new Aspect(oldTagProperties.data())); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.TAG_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.TAG_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -139,9 +142,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("colorHex"))).thenReturn(TEST_COLOR_HEX); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -159,18 +161,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetTagColorResolver resolver = new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetTagColorResolver resolver = + new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -181,4 +183,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 213d21fd35dc1..cb827a42333b2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -19,13 +22,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddTermsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -33,11 +33,12 @@ public class AddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); @@ -48,41 +49,42 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -94,37 +96,34 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetFailureTermDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -135,26 +134,28 @@ public void testGetFailureTermDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -165,16 +166,17 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -185,38 +187,41 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 8887bb452b478..7df19fad52689 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_GLOSSARY_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_GLOSSARY_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,123 +36,134 @@ public class BatchAddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of( - TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -163,31 +175,35 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -203,11 +219,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -220,21 +237,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index 995a4acb8a467..659ce40542a9c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,12 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,25 +77,36 @@ public void testGetSuccessNoExistingTerms() throws Exception { public void testGetSuccessExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - final GlossaryTerms oldTerms1 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms1 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms1); - final GlossaryTerms oldTerms2 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms2 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,12 +120,12 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -123,15 +137,17 @@ public void testGetSuccessExistingTerms() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -143,12 +159,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -164,12 +180,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -182,24 +198,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java index 911152d8c97c1..adf4b1c29ad0d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestInput; @@ -19,19 +22,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestResolverTest { - private static final CreateTestInput TEST_INPUT = new CreateTestInput( - "test-id", - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final CreateTestInput TEST_INPUT = + new CreateTestInput( + "test-id", + "test-name", + "test-category", + "test-description", + new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -50,16 +49,21 @@ public void testGetSuccess() throws Exception { final TestKey key = new TestKey(); key.setId("test-id"); - ArgumentCaptor<MetadataChangeProposal> proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor<MetadataChangeProposal> proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityKeyAspect(), GenericRecordUtils.serializeAspect(key)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -80,19 +84,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTestResolver resolver = new CreateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java index 6a449e3c4c4c4..1c4973871af09 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 5026e015039e1..6075425d09c05 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,37 +21,34 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListTestsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("test", "test-id"); - private static final ListTestsInput TEST_INPUT = new ListTestsInput( - 0, 20, null - ); + private static final ListTestsInput TEST_INPUT = new ListTestsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.TEST_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.TEST_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListTestsResolver resolver = new ListTestsResolver(mockClient); @@ -75,33 +75,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver @@ -112,4 +114,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java index ae24232bce17c..45e0126367578 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.datahub.graphql.generated.TestDefinitionInput; +import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; @@ -19,19 +22,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; - private static final UpdateTestInput TEST_INPUT = new UpdateTestInput( - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final UpdateTestInput TEST_INPUT = + new UpdateTestInput( + "test-name", "test-category", "test-description", new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -48,16 +44,21 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - ArgumentCaptor<MetadataChangeProposal> proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor<MetadataChangeProposal> proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityUrn(), UrnUtils.getUrn(TEST_URN)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -79,18 +80,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Update resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateTestResolver resolver = new UpdateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java index 2164d4160634c..742e162963ea3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateNativeUserResetTokenResolverTest { private static final String RESET_TOKEN = "resetToken"; @@ -47,7 +46,8 @@ public void testFailsNullUserUrn() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(null); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -59,7 +59,8 @@ public void testPasses() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(USER_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertEquals(RESET_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getResetToken()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java index 0957acf0cbbb3..15864dc3ac925 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -15,6 +18,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -23,34 +28,35 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateViewResolverTest { - private static final CreateViewInput TEST_INPUT = new CreateViewInput( - DataHubViewType.PERSONAL, - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final CreateViewInput TEST_INPUT = + new CreateViewInput( + DataHubViewType.PERSONAL, + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -71,37 +77,59 @@ public void testGetSuccess() throws Exception { assertEquals(view.getDescription(), TEST_INPUT.getDescription()); assertEquals(view.getViewType(), TEST_INPUT.getViewType()); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - assertEquals(view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); - assertEquals(view.getDefinition().getFilter().getOperator(), TEST_INPUT.getDefinition().getFilter().getOperator()); - assertEquals(view.getDefinition().getFilter().getFilters().size(), TEST_INPUT.getDefinition().getFilter().getFilters().size()); - - Mockito.verify(mockService, Mockito.times(1)).createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + assertEquals( + view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); + assertEquals( + view.getDefinition().getFilter().getOperator(), + TEST_INPUT.getDefinition().getFilter().getOperator()); + assertEquals( + view.getDefinition().getFilter().getFilters().size(), + TEST_INPUT.getDefinition().getFilter().getFilters().size()); + + Mockito.verify(mockService, Mockito.times(1)) + .createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -118,22 +146,23 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createView( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createView( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateViewResolver resolver = new CreateViewResolver(mockService); @@ -148,14 +177,15 @@ public void testGetViewServiceException() throws Exception { private ViewService initMockService() { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_VIEW_URN); + Mockito.when( + service.createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_VIEW_URN); return service; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java index afb4c16767f47..357f2119187d6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -17,10 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -40,10 +39,8 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -60,10 +57,8 @@ public void testGetSuccessGlobalViewCanManager() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -79,13 +74,10 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } - @Test public void testGetSuccessPersonalViewIsCreator() throws Exception { ViewService mockService = initViewService(DataHubViewType.PERSONAL); @@ -99,10 +91,8 @@ public void testGetSuccessPersonalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -118,19 +108,17 @@ public void testGetFailurePersonalViewIsNotCreator() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteView( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteView(Mockito.any(), Mockito.any(Authentication.class)); DeleteViewResolver resolver = new DeleteViewResolver(mockService); @@ -146,19 +134,21 @@ public void testGetViewServiceException() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 9a25c9eb1d25c..8c30c17201bc6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -28,53 +31,53 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListGlobalViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput( - 0, 20, "" - ); + private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput(0, 20, ""); @Test public void testGetSuccessInput() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListGlobalViewsResolver resolver = new ListGlobalViewsResolver(mockClient); @@ -107,7 +110,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -121,7 +125,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -139,4 +145,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 4c43584144825..85e20cd656fcd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -27,63 +30,65 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListMyViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListMyViewsInput TEST_INPUT_1 = new ListMyViewsInput( - 0, 20, "", DataHubViewType.GLOBAL - ); + private static final ListMyViewsInput TEST_INPUT_1 = + new ListMyViewsInput(0, 20, "", DataHubViewType.GLOBAL); - private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput( - 0, 20, "", null - ); + private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput(0, 20, "", null); @Test public void testGetSuccessInput1() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false), + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false), new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -106,35 +111,41 @@ public void testGetSuccessInput2() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -165,7 +176,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -179,7 +191,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -197,4 +211,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java index b4895982ae780..1917e55705828 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -38,30 +41,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateViewInput TEST_INPUT = new UpdateViewInput( - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final UpdateViewInput TEST_INPUT = + new UpdateViewInput( + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); @Test public void testGetSuccessGlobalViewIsCreator() throws Exception { @@ -81,33 +87,50 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -128,46 +151,65 @@ public void testGetSuccessGlobalViewManageGlobalViews() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testGetViewServiceException() throws Exception { // Update resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateView( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateView( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateViewResolver resolver = new UpdateViewResolver(mockService); @@ -196,43 +238,46 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getViewEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getViewEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java index 9578ff201ca19..3ad3f0786e987 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -25,12 +28,8 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.Assert; -import org.testng.annotations.Test; import org.mockito.Mockito; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - +import org.testng.annotations.Test; public class ViewUtilsTest { @@ -39,10 +38,10 @@ public class ViewUtilsTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); - @Test public static void testCanCreatePersonalViewAllowed() { - boolean res = ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); + boolean res = + ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); Assert.assertTrue(res); } @@ -67,10 +66,8 @@ public void testCanUpdateViewSuccessGlobalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -80,10 +77,8 @@ public void testCanUpdateViewSuccessGlobalViewCanManageGlobalViews() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -93,10 +88,8 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -106,10 +99,8 @@ public void testGetSuccessPersonalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -119,50 +110,69 @@ public void testGetFailurePersonalViewIsNotCreator() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test public void testMapDefinition() throws Exception { - DataHubViewDefinitionInput input = new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.IN), - new FacetFilterInput("test2", null, ImmutableList.of("value3", "value4"), true, FilterOperator.CONTAIN) - ) - ) - ); - - DataHubViewDefinition expectedResult = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion() - .setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setValue("value1") // Disgraceful - .setField("test1.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.IN), - new Criterion() - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("value3", "value4"))) - .setValue("value3") // Disgraceful - .setField("test2.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.CONTAIN) - )) - ) - ) - )) - ); + DataHubViewDefinitionInput input = + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.IN), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value3", "value4"), + true, + FilterOperator.CONTAIN)))); + + DataHubViewDefinition expectedResult = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setNegated(false) + .setValues( + new StringArray( + ImmutableList.of("value1", "value2"))) + .setValue("value1") // Disgraceful + .setField( + "test1.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.IN), + new Criterion() + .setNegated(true) + .setValues( + new StringArray( + ImmutableList.of("value3", "value4"))) + .setValue("value3") // Disgraceful + .setField( + "test2.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.CONTAIN)))))))); assertEquals(ViewUtils.mapDefinition(input), expectedResult); } @@ -170,17 +180,20 @@ public void testMapDefinition() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when( + mockService.getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c4465c7d3cb65..c975c7ebb0507 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,27 +30,25 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class AssertionTypeTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:guid-1"; - private static final AssertionKey TEST_ASSERTION_KEY = new AssertionKey() - .setAssertionId("guid-1"); - private static final AssertionInfo TEST_ASSERTION_INFO = new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(null, SetMode.IGNORE_NULL) - .setCustomProperties(new StringMap()); - private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("snowflake")) - .setInstance(null, SetMode.IGNORE_NULL); + private static final AssertionKey TEST_ASSERTION_KEY = + new AssertionKey().setAssertionId("guid-1"); + private static final AssertionInfo TEST_ASSERTION_INFO = + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion(null, SetMode.IGNORE_NULL) + .setCustomProperties(new StringMap()); + private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn("snowflake")) + .setInstance(null, SetMode.IGNORE_NULL); private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:guid-2"; - @Test public void testBatchLoad() throws Exception { @@ -60,41 +60,43 @@ public void testBatchLoad() throws Exception { Map<String, EnvelopedAspect> assertion1Aspects = new HashMap<>(); assertion1Aspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data()))); assertion1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))); assertion1Aspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - assertionUrn1, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn1) - .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); - - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn1, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn1) + .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); + + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<Assertion>> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); + List<DataFetcherResult<Assertion>> result = + type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -112,17 +114,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 3ff4e43ca112c..1e2acd0db455c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,6 +1,7 @@ - package com.linkedin.datahub.graphql.types.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,12 +27,12 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.container.ContainerProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -46,46 +47,55 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class ContainerTypeTest { private static final String TEST_CONTAINER_1_URN = "urn:li:container:guid-1"; - private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey() - .setGuid("guid-1"); - private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = new ContainerProperties() - .setDescription("test description") - .setName("Test Container"); - private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = new EditableContainerProperties() - .setDescription("test editable description"); - private static final Ownership TEST_CONTAINER_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); - private static final Status TEST_CONTAINER_1_STATUS = new Status() - .setRemoved(false); - private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = new SubTypes() - .setTypeNames(new StringArray(ImmutableList.of("Database"))); - private static final GlobalTags TEST_CONTAINER_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); - private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = new com.linkedin.container.Container() - .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); + private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey().setGuid("guid-1"); + private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = + new ContainerProperties().setDescription("test description").setName("Test Container"); + private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = + new EditableContainerProperties().setDescription("test editable description"); + private static final Ownership TEST_CONTAINER_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); + private static final Status TEST_CONTAINER_1_STATUS = new Status().setRemoved(false); + private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("Database"))); + private static final GlobalTags TEST_CONTAINER_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = + new com.linkedin.container.Container() + .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); private static final String TEST_CONTAINER_2_URN = "urn:li:container:guid-2"; @@ -100,73 +110,65 @@ public void testBatchLoad() throws Exception { Map<String, EnvelopedAspect> container1Aspects = new HashMap<>(); container1Aspects.put( Constants.CONTAINER_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data()))); container1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data()))); container1Aspects.put( Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data()))); container1Aspects.put( Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data()))); container1Aspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data()))); container1Aspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data()))); container1Aspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data()))); container1Aspects.put( Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data()))); container1Aspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data()))); container1Aspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data()))); container1Aspects.put( Constants.CONTAINER_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - containerUrn1, - new EntityResponse() - .setEntityName(Constants.CONTAINER_ENTITY_NAME) - .setUrn(containerUrn1) - .setAspects(new EnvelopedAspectMap(container1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + containerUrn1, + new EntityResponse() + .setEntityName(Constants.CONTAINER_ENTITY_NAME) + .setUrn(containerUrn1) + .setAspects(new EnvelopedAspectMap(container1Aspects)))); ContainerType type = new ContainerType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<Container>> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); + List<DataFetcherResult<Container>> result = + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -177,8 +179,12 @@ public void testBatchLoad() throws Exception { assertEquals(container1.getProperties().getDescription(), "test description"); assertEquals(container1.getProperties().getName(), "Test Container"); assertEquals(container1.getInstitutionalMemory().getElements().size(), 1); - assertEquals(container1.getSubTypes().getTypeNames().get(0), TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); - assertEquals(container1.getEditableProperties().getDescription(), TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); + assertEquals( + container1.getSubTypes().getTypeNames().get(0), + TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); + assertEquals( + container1.getEditableProperties().getDescription(), + TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); assertEquals( container1.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_CONTAINER_1_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); @@ -186,8 +192,7 @@ public void testBatchLoad() throws Exception { container1.getTags().getTags().get(0).getTag().getUrn(), TEST_CONTAINER_1_TAGS.getTags().get(0).getTag().toString()); assertEquals( - container1.getContainer().getUrn(), - TEST_CONTAINER_1_CONTAINER.getContainer().toString()); + container1.getContainer().getUrn(), TEST_CONTAINER_1_CONTAINER.getContainer().toString()); // Assert second element is null. assertNull(result.get(1)); @@ -196,17 +201,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 9b6e11fd0b3a4..667d943b1095d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -1,22 +1,24 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; -import com.linkedin.common.Ownership; -import com.linkedin.common.OwnerArray; -import com.linkedin.common.Owner; -import com.linkedin.common.OwnershipType; +import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlobalTags; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.Status; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; -import com.linkedin.common.Status; import com.linkedin.common.url.Url; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; @@ -33,185 +35,181 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataPlatformInstanceTest { - private static final Urn TEST_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); - - private static final String TEST_DATAPLATFORMINSTANCE_1_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; - - private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY - = new DataPlatformInstanceKey() - .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) - .setInstance("I1"); - - private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES - = new DataPlatformInstanceProperties() - .setDescription("test description") - .setName("Test Data Platform Instance"); - - private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = new Deprecation() - .setDeprecated(true) - .setActor(TEST_ACTOR_URN) - .setNote("legacy"); - - private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(TEST_ACTOR_URN)))); - - private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); - - private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - - private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status() - .setRemoved(false); - - private static final String TEST_DATAPLATFORMINSTANCE_2_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; - - @Test - public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - - Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); - Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); - - Map<String, EnvelopedAspect> dataPlatformInstance1Aspects = new HashMap<>(); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), - Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - dataPlatformInstance1Urn, - new EntityResponse() - .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) - .setUrn(dataPlatformInstance1Urn) - .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); - - DataPlatformInstanceType type = new DataPlatformInstanceType(client); - - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); - List<DataFetcherResult<DataPlatformInstance>> result = type.batchLoad( - ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), mockContext); - - // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( + private static final Urn TEST_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); + + private static final String TEST_DATAPLATFORMINSTANCE_1_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; + + private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY = + new DataPlatformInstanceKey() + .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) + .setInstance("I1"); + + private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES = + new DataPlatformInstanceProperties() + .setDescription("test description") + .setName("Test Data Platform Instance"); + + private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = + new Deprecation().setDeprecated(true).setActor(TEST_ACTOR_URN).setNote("legacy"); + + private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setType(OwnershipType.DATAOWNER).setOwner(TEST_ACTOR_URN)))); + + private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); + + private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + + private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status().setRemoved(false); + + private static final String TEST_DATAPLATFORMINSTANCE_2_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; + + @Test + public void testBatchLoad() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); + Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); + + Map<String, EnvelopedAspect> dataPlatformInstance1Aspects = new HashMap<>(); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data()))); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data()))); + dataPlatformInstance1Aspects.put( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data()))); + dataPlatformInstance1Aspects.put( + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data()))); + dataPlatformInstance1Aspects.put( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data()))); + dataPlatformInstance1Aspects.put( + Constants.GLOBAL_TAGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data()))); + dataPlatformInstance1Aspects.put( + Constants.STATUS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data()))); + Mockito.when( + client.batchGetV2( Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq( + new HashSet<>( + ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); - - assertEquals(result.size(), 2); - - DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); - assertEquals( - dataPlatformInstance1.getUrn(), - TEST_DATAPLATFORMINSTANCE_1_URN - ); - assertEquals( - dataPlatformInstance1.getType(), - EntityType.DATA_PLATFORM_INSTANCE - ); - assertEquals( - dataPlatformInstance1.getProperties().getDescription(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription() - ); - assertEquals( - dataPlatformInstance1.getProperties().getName(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getDeprecated(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getNote(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getActor(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString() - ); - assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); - assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); - assertEquals( - dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), - TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString() - ); - assertEquals( - dataPlatformInstance1.getStatus().getRemoved(), - TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue() - ); - - // Assert second element is null. - assertNull(result.get(1)); - } - - @Test - public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type - = new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType(mockClient); - - // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of( - TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), context)); - } + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + dataPlatformInstance1Urn, + new EntityResponse() + .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .setUrn(dataPlatformInstance1Urn) + .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); + + DataPlatformInstanceType type = new DataPlatformInstanceType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + List<DataFetcherResult<DataPlatformInstance>> result = + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); + + assertEquals(result.size(), 2); + + DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); + assertEquals(dataPlatformInstance1.getUrn(), TEST_DATAPLATFORMINSTANCE_1_URN); + assertEquals(dataPlatformInstance1.getType(), EntityType.DATA_PLATFORM_INSTANCE); + assertEquals( + dataPlatformInstance1.getProperties().getDescription(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription()); + assertEquals( + dataPlatformInstance1.getProperties().getName(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName()); + assertEquals( + dataPlatformInstance1.getDeprecation().getDeprecated(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue()); + assertEquals( + dataPlatformInstance1.getDeprecation().getNote(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote()); + assertEquals( + dataPlatformInstance1.getDeprecation().getActor(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString()); + assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); + assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); + assertEquals( + dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), + TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString()); + assertEquals( + dataPlatformInstance1.getStatus().getRemoved(), + TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type = + new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType( + mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + context)); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 3d22f1c429fd6..1959ae6d43208 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -8,146 +8,165 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; public class DatasetMapperTest { - private static final Urn TEST_DATASET_URN = Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); - private static final Urn TEST_CREATED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); - private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); - - @Test - public void testDatasetPropertiesMapperWithCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - input.setQualifiedName("Test QualifiedName"); - - final TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - final TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setQualifiedName("Test QualifiedName"); - expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); - expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - Assert.assertEquals(actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); - expectedDatasetProperties.setCreated(null); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutTimestampActors() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } + private static final Urn TEST_DATASET_URN = + Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); + private static final Urn TEST_CREATED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); + private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); + + @Test + public void testDatasetPropertiesMapperWithCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + input.setQualifiedName("Test QualifiedName"); + + final TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + final TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setQualifiedName("Test QualifiedName"); + expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); + expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); + expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + Assert.assertEquals( + actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(null); + expectedDatasetProperties.setCreated(null); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutTimestampActors() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map<String, com.linkedin.entity.EnvelopedAspect> dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 78cdaa0a276da..612136d1f9164 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -19,90 +19,128 @@ public void testMapperFullProfile() { input.setRowCount(10L); input.setColumnCount(45L); input.setSizeInBytes(15L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setMax("1") - .setMean("2") - .setStdev("3") - .setMedian("4") - .setMin("5") - .setNullCount(20L) - .setNullProportion(20.5f) - .setUniqueCount(30L) - .setUniqueProportion(30.5f) - .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setNullCount(30L) - .setNullProportion(30.5f) - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - .setSampleValues(new StringArray(ImmutableList.of("val3", "val4"))) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setMax("1") + .setMean("2") + .setStdev("3") + .setMedian("4") + .setMin("5") + .setNullCount(20L) + .setNullProportion(20.5f) + .setUniqueCount(30L) + .setUniqueProportion(30.5f) + .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setNullCount(30L) + .setNullProportion(30.5f) + .setUniqueCount(40L) + .setUniqueProportion(40.5f) + .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); expected.setSizeInBytes(15L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - 20L, - 20.5f, - "5", - "1", - "2", - "4", - "3", - new ArrayList<>(ImmutableList.of("val1", "val2"))), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - 30L, - 30.5f, - "6", - "2", - "3", - "5", - "4", - new ArrayList<>(ImmutableList.of("val3", "val4"))) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", + 30L, + 30.5f, + 20L, + 20.5f, + "5", + "1", + "2", + "4", + "3", + new ArrayList<>(ImmutableList.of("val1", "val2"))), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", + 40L, + 40.5f, + 30L, + 30.5f, + "6", + "2", + "3", + "5", + "4", + new ArrayList<>(ImmutableList.of("val3", "val4")))))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } @Test @@ -111,77 +149,95 @@ public void testMapperPartialProfile() { input.setTimestampMillis(1L); input.setRowCount(10L); input.setColumnCount(45L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setUniqueCount(30L) - .setUniqueProportion(30.5f), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setUniqueCount(30L) + .setUniqueProportion(30.5f), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setUniqueCount(40L) + .setUniqueProportion(40.5f)))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - null, - null, - null, - null, - null, - null, - null, - null), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - null, - null, - "6", - "2", - "3", - "5", - "4", - null) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", 30L, 30.5f, null, null, null, null, null, null, null, null), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", 40L, 40.5f, null, null, "6", "2", "3", "5", "4", null)))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java index 48c23f436f875..32735ad7874a0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -30,33 +33,34 @@ import java.util.HashSet; import java.util.List; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class DomainTypeTest { private static final String TEST_DOMAIN_1_URN = "urn:li:domain:id-1"; - private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey() - .setId("id-1"); - private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = new DomainProperties() - .setDescription("test description") - .setName("Test Domain"); - private static final Ownership TEST_DOMAIN_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey().setId("id-1"); + private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = + new DomainProperties().setDescription("test description").setName("Test Domain"); + private static final Ownership TEST_DOMAIN_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); private static final String TEST_DOMAIN_2_URN = "urn:li:domain:id-2"; @@ -68,39 +72,48 @@ public void testBatchLoad() throws Exception { Urn domainUrn1 = Urn.createFromString(TEST_DOMAIN_1_URN); Urn domainUrn2 = Urn.createFromString(TEST_DOMAIN_2_URN); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - domainUrn1, - new EntityResponse() - .setEntityName(Constants.DOMAIN_ENTITY_NAME) - .setUrn(domainUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data())) - ))))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + domainUrn1, + new EntityResponse() + .setEntityName(Constants.DOMAIN_ENTITY_NAME) + .setUrn(domainUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data()))))))); DomainType type = new DomainType(client); QueryContext mockContext = getMockAllowContext(); - List<DataFetcherResult<Domain>> result = type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); + List<DataFetcherResult<Domain>> result = + type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -120,17 +133,20 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); DomainType type = new DomainType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index 918616a2705b7..f88c8285e20df 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -31,16 +33,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; -import com.linkedin.notebook.NotebookCell; -import com.linkedin.notebook.NotebookCellArray; -import com.linkedin.notebook.NotebookCellType; -import com.linkedin.notebook.NotebookContent; -import com.linkedin.notebook.NotebookInfo; -import com.linkedin.notebook.EditableNotebookProperties; -import com.linkedin.notebook.TextCell; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.types.container.ContainerType; import com.linkedin.domain.Domains; import com.linkedin.entity.Aspect; @@ -50,6 +45,13 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.notebook.EditableNotebookProperties; +import com.linkedin.notebook.NotebookCell; +import com.linkedin.notebook.NotebookCellArray; +import com.linkedin.notebook.NotebookCellType; +import com.linkedin.notebook.NotebookContent; +import com.linkedin.notebook.NotebookInfo; +import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -60,58 +62,75 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class NotebookTypeTest { private static final String TEST_NOTEBOOK = "urn:li:notebook:(querybook,123)"; - private static final NotebookKey NOTEBOOK_KEY = new NotebookKey() - .setNotebookId("123") - .setNotebookTool("querybook"); - private static final NotebookContent NOTEBOOK_CONTENT = new NotebookContent() - .setCells(new NotebookCellArray(ImmutableList.of(new NotebookCell() - .setType(NotebookCellType.TEXT_CELL) - .setTextCell(new TextCell() - .setCellId("1234") - .setCellTitle("test cell") - .setText("test text") - .setChangeAuditStamps(new ChangeAuditStamps()))))); - private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = new EditableNotebookProperties() - .setDescription("test editable description"); - private static final Ownership OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - - private static final SubTypes SUB_TYPES = new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); - - private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("test_platform")); - - private static final NotebookInfo NOTEBOOK_INFO = new NotebookInfo() - .setTitle("title") - .setExternalUrl(new Url("https://querybook.com/notebook/123")) - .setChangeAuditStamps(new ChangeAuditStamps()) - .setDescription("test doc"); - - private static final Status STATUS = new Status() - .setRemoved(false); - - private static final Domains DOMAINS = new Domains() - .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); - private static final GlobalTags GLOBAL_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final NotebookKey NOTEBOOK_KEY = + new NotebookKey().setNotebookId("123").setNotebookTool("querybook"); + private static final NotebookContent NOTEBOOK_CONTENT = + new NotebookContent() + .setCells( + new NotebookCellArray( + ImmutableList.of( + new NotebookCell() + .setType(NotebookCellType.TEXT_CELL) + .setTextCell( + new TextCell() + .setCellId("1234") + .setCellTitle("test cell") + .setText("test text") + .setChangeAuditStamps(new ChangeAuditStamps()))))); + private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = + new EditableNotebookProperties().setDescription("test editable description"); + private static final Ownership OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + + private static final SubTypes SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); + + private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(new DataPlatformUrn("test_platform")); + + private static final NotebookInfo NOTEBOOK_INFO = + new NotebookInfo() + .setTitle("title") + .setExternalUrl(new Url("https://querybook.com/notebook/123")) + .setChangeAuditStamps(new ChangeAuditStamps()) + .setDescription("test doc"); + + private static final Status STATUS = new Status().setRemoved(false); + + private static final Domains DOMAINS = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); + private static final GlobalTags GLOBAL_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); @Test public void testBatchLoad() throws Exception { @@ -121,79 +140,69 @@ public void testBatchLoad() throws Exception { Map<String, EnvelopedAspect> notebookAspects = new HashMap<>(); notebookAspects.put( Constants.NOTEBOOK_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data()))); notebookAspects.put( Constants.NOTEBOOK_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data()))); notebookAspects.put( Constants.NOTEBOOK_CONTENT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data()))); notebookAspects.put( Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data()))); notebookAspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data()))); notebookAspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data()))); notebookAspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(STATUS.data())) - ); + Constants.STATUS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(STATUS.data()))); notebookAspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data()))); notebookAspects.put( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(DOMAINS.data())) - ); + Constants.DOMAINS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DOMAINS.data()))); notebookAspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data()))); notebookAspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data())) - ); - notebookAspects.put(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data()))); + notebookAspects.put( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DATA_PLATFORM_INSTANCE.data()))); Urn notebookUrn = new NotebookUrn("querybook", "123"); Urn dummyNotebookUrn = new NotebookUrn("querybook", "dummy"); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - notebookUrn, - new EntityResponse() - .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) - .setUrn(notebookUrn) - .setAspects(new EnvelopedAspectMap(notebookAspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + notebookUrn, + new EntityResponse() + .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) + .setUrn(notebookUrn) + .setAspects(new EnvelopedAspectMap(notebookAspects)))); NotebookType type = new NotebookType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<Notebook>> - result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); + List<DataFetcherResult<Notebook>> result = + type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -201,13 +210,17 @@ public void testBatchLoad() throws Exception { Notebook notebook = result.get(0).getData(); assertEquals(notebook.getContent().getCells().size(), NOTEBOOK_CONTENT.getCells().size()); - assertEquals(notebook.getContent().getCells().get(0).getType().toString(), + assertEquals( + notebook.getContent().getCells().get(0).getType().toString(), NOTEBOOK_CONTENT.getCells().get(0).getType().toString()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellId(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellId(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellId()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellTitle()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getText(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getText(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getText()); assertEquals(notebook.getInfo().getDescription(), NOTEBOOK_INFO.getDescription()); assertEquals(notebook.getInfo().getExternalUrl(), NOTEBOOK_INFO.getExternalUrl().toString()); @@ -217,11 +230,17 @@ public void testBatchLoad() throws Exception { assertEquals(notebook.getType(), EntityType.NOTEBOOK); assertEquals(notebook.getOwnership().getOwners().size(), 1); assertEquals(notebook.getInstitutionalMemory().getElements().size(), 1); - assertEquals(notebook.getEditableProperties().getDescription(), TEST_EDITABLE_DESCRIPTION.getDescription()); - assertEquals(notebook.getTags().getTags().get(0).getTag().getUrn(), + assertEquals( + notebook.getEditableProperties().getDescription(), + TEST_EDITABLE_DESCRIPTION.getDescription()); + assertEquals( + notebook.getTags().getTags().get(0).getTag().getUrn(), GLOBAL_TAGS.getTags().get(0).getTag().toString()); - assertEquals(notebook.getSubTypes().getTypeNames(), SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); - assertEquals(notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), + assertEquals( + notebook.getSubTypes().getTypeNames(), + SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); + assertEquals( + notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); assertEquals(notebook.getPlatform().getUrn(), DATA_PLATFORM_INSTANCE.getPlatform().toString()); @@ -232,17 +251,19 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), - context)); + assertThrows( + RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index a3c089b91de87..c8f694320d88a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,28 +1,30 @@ package com.linkedin.datahub.graphql.types.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.datahub.graphql.generated.QueryEntity; -import com.linkedin.query.QueryLanguage; -import com.linkedin.query.QueryProperties; -import com.linkedin.query.QuerySource; -import com.linkedin.query.QueryStatement; -import com.linkedin.query.QuerySubject; -import com.linkedin.query.QuerySubjectArray; -import com.linkedin.query.QuerySubjects; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryProperties; +import com.linkedin.query.QuerySource; +import com.linkedin.query.QueryStatement; +import com.linkedin.query.QuerySubject; +import com.linkedin.query.QuerySubjectArray; +import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -30,53 +32,50 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class QueryTypeTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); private static final Urn TEST_QUERY_2_URN = UrnUtils.getUrn("urn:li:query:test-2"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); - private static final Urn TEST_DATASET_2_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_2_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final QueryProperties TEST_QUERY_PROPERTIES_1 = new QueryProperties() - .setName("Query Name") - .setDescription("Query Description") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ) - )); - private static final QueryProperties TEST_QUERY_PROPERTIES_2 = new QueryProperties() - .setName("Query Name 2") - .setDescription("Query Description 2") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable2") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_2_URN) - ) - )); + private static final QueryProperties TEST_QUERY_PROPERTIES_1 = + new QueryProperties() + .setName("Query Name") + .setDescription("Query Description") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + private static final QueryProperties TEST_QUERY_PROPERTIES_2 = + new QueryProperties() + .setName("Query Name 2") + .setDescription("Query Description 2") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable2")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_2_URN)))); @Test public void testBatchLoad() throws Exception { @@ -87,38 +86,54 @@ public void testBatchLoad() throws Exception { Urn queryUrn2 = TEST_QUERY_2_URN; Map<String, EnvelopedAspect> query1Aspects = new HashMap<>(); - query1Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); - query1Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); Map<String, EnvelopedAspect> query2Aspects = new HashMap<>(); - query2Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_2.data()))); - query2Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_2.data()))); - Mockito.when(client.batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(queryUrn1, new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)), queryUrn2, - new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn2) - .setAspects(new EnvelopedAspectMap(query2Aspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)), + queryUrn2, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn2) + .setAspects(new EnvelopedAspectMap(query2Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); List<DataFetcherResult<QueryEntity>> result = - type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response Mockito.verify(client, Mockito.times(1)) - .batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), Mockito.eq(QueryType.ASPECTS_TO_FETCH), + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -141,40 +156,39 @@ public void testBatchLoadNullEntity() throws Exception { Map<String, EnvelopedAspect> query1Aspects = new HashMap<>(); query1Aspects.put( Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); query1Aspects.put( Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - queryUrn1, - new EntityResponse() - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<QueryEntity>> result = type.batchLoad(ImmutableList.of( - TEST_QUERY_URN.toString(), - TEST_QUERY_2_URN.toString()), - mockContext); + List<DataFetcherResult<QueryEntity>> result = + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), - Mockito.eq(QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -188,18 +202,23 @@ public void testBatchLoadNullEntity() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); QueryType type = new QueryType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), context)); } private void verifyQuery1(QueryEntity query) { @@ -207,14 +226,30 @@ private void verifyQuery1(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_1.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_1.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_1.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_1.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_1.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_1.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_1.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_1.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); } private void verifyQuery2(QueryEntity query) { @@ -222,13 +257,29 @@ private void verifyQuery2(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_2.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_2.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_2.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_2.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_2.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_2.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_2.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_2.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index 7f3c8f99f6593..f02fd38e2ca7c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,114 +1,149 @@ package com.linkedin.datahub.graphql.types.view; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubView; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.DataHubView; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.entity.Aspect; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; -import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class DataHubViewTypeTest { private static final String TEST_VIEW_URN = "urn:li:dataHubView:test"; private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + /** * A Valid View is one which is minted by the createView or updateView GraphQL resolvers. * - * View Definitions currently support a limited Filter structure, which includes a single Logical filter set. - * Either a set of OR criteria with 1 value in each nested "and", or a single OR criteria with a set of nested ANDs. + * <p>View Definitions currently support a limited Filter structure, which includes a single + * Logical filter set. Either a set of OR criteria with 1 value in each nested "and", or a single + * OR criteria with a set of nested ANDs. * - * This enables us to easily support merging more complex View predicates in the future without a data migration, - * should the need arise. + * <p>This enables us to easily support merging more complex View predicates in the future without + * a data migration, should the need arise. */ - private static final DataHubViewInfo TEST_VALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_VALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); /** - * An Invalid View is on which has been ingested manually, which should not occur under normal operation of DataHub. + * An Invalid View is on which has been ingested manually, which should not occur under normal + * operation of DataHub. * - * This would be a complex view with multiple OR and nested AND predicates. + * <p>This would be a complex view with multiple OR and nested AND predicates. */ - private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))), - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); private static final String TEST_VIEW_URN_2 = "urn:li:dataHubView:test2"; @@ -123,33 +158,37 @@ public void testBatchLoadValidView() throws Exception { Map<String, EnvelopedAspect> view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - viewUrn1, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(viewUrn1) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + viewUrn1, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(viewUrn1) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<DataHubView>> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); + List<DataFetcherResult<DataHubView>> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -164,9 +203,12 @@ public void testBatchLoadValidView() throws Exception { assertEquals(view.getDefinition().getEntityTypes().get(1), EntityType.DASHBOARD); assertEquals(view.getDefinition().getFilter().getOperator(), LogicalOperator.AND); assertEquals(view.getDefinition().getFilter().getFilters().size(), 1); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); assertEquals(view.getDefinition().getFilter().getFilters().get(0).getField(), "test"); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getValues(), ImmutableList.of("value1", "value2")); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getValues(), + ImmutableList.of("value1", "value2")); // Assert second element is null. assertNull(result.get(1)); @@ -174,40 +216,45 @@ public void testBatchLoadValidView() throws Exception { @Test public void testBatchLoadInvalidView() throws Exception { - // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log a warning). + // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log + // a warning). EntityClient client = Mockito.mock(EntityClient.class); Urn invalidViewUrn = Urn.createFromString(TEST_VIEW_URN); Map<String, EnvelopedAspect> view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - invalidViewUrn, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(invalidViewUrn) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + invalidViewUrn, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(invalidViewUrn) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List<DataFetcherResult<DataHubView>> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); + List<DataFetcherResult<DataHubView>> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(invalidViewUrn)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(invalidViewUrn)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 1); @@ -227,17 +274,21 @@ public void testBatchLoadInvalidView() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java index 0a58ff88586c6..6ecbc8d015b29 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java @@ -1,57 +1,50 @@ package com.linkedin.datahub.graphql.utils; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.datahub.graphql.util.DateUtil; import org.joda.time.DateTime; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - public class DateUtilTest { - private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { - DateTime result = new DateTime() - .withDate(2023, 1, dayOfMonth); - if (zeroTime) { - return new DateUtil().setTimeToZero(result); - } - return result - .withHourOfDay(1) - .withMinuteOfHour(2) - .withSecondOfMinute(3) - .withMillisOfSecond(4); + private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { + DateTime result = new DateTime().withDate(2023, 1, dayOfMonth); + if (zeroTime) { + return new DateUtil().setTimeToZero(result); } + return result.withHourOfDay(1).withMinuteOfHour(2).withSecondOfMinute(3).withMillisOfSecond(4); + } - private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { - assertEquals( - setTimeParts(dayOfMonth, true).getMillis(), - dateUtil.getStartOfNextWeek().getMillis() - ); - } + private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { + assertEquals( + setTimeParts(dayOfMonth, true).getMillis(), dateUtil.getStartOfNextWeek().getMillis()); + } - @Test - public void testStartOfNextWeek() { - DateUtil dateUtil = Mockito.spy(DateUtil.class); + @Test + public void testStartOfNextWeek() { + DateUtil dateUtil = Mockito.spy(DateUtil.class); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); - assertEqualStartOfNextWeek(dateUtil, 9); - } + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); + assertEqualStartOfNextWeek(dateUtil, 9); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java index 48ce2ddb6dde4..0419fe0b5254d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.*; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.identity.CorpUserInfo; @@ -7,19 +10,24 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.AssertJUnit.*; - - public class MutationsUtilsTest { @Test public void testBuildMetadataChangeProposal() { - MetadataChangeProposal metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn("urn:li:corpuser:datahub"), CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); - metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithKey(new CorpUserKey().setUsername("datahub"), - CORP_USER_ENTITY_NAME, CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + MetadataChangeProposal metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn("urn:li:corpuser:datahub"), + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithKey( + new CorpUserKey().setUsername("datahub"), + CORP_USER_ENTITY_NAME, + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java index adbc6808b5ab9..005b47df56982 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.types.common.mappers.util.RunInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.entity.EnvelopedAspect; @@ -8,10 +11,6 @@ import java.util.List; import org.testng.annotations.Test; -import static org.testng.Assert.*; - -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtilsTest { private final Long recentLastObserved = 1660056070640L; @@ -21,15 +20,21 @@ public class SystemMetadataUtilsTest { @Test public void testGetLastIngestedTime() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertEquals(lastObserved, mediumLastObserved); @@ -38,15 +43,21 @@ public void testGetLastIngestedTime() { @Test public void testGetLastIngestedRunId() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); String lastRunId = SystemMetadataUtils.getLastIngestedRunId(aspectMap); assertEquals(lastRunId, "real-id-1"); @@ -55,15 +66,21 @@ public void testGetLastIngestedRunId() { @Test public void testGetLastIngestedRuns() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); List<RunInfo> runs = SystemMetadataUtils.getLastIngestionRuns(aspectMap); @@ -75,15 +92,23 @@ public void testGetLastIngestedRuns() { @Test public void testGetLastIngestedTimeAllDefaultRunIds() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("default-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved) - )); - aspectMap.put("default-run-id3", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "default-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved))); + aspectMap.put( + "default-run-id3", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertNull(lastObserved, null); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java index c42e1bb7f92e0..d3aea2a3dac12 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java @@ -1,30 +1,19 @@ package com.linkedin.datahub.upgrade; import com.google.common.collect.ImmutableList; - import java.util.List; - -/** - * Specification of an upgrade to be performed to the DataHub platform. - */ +/** Specification of an upgrade to be performed to the DataHub platform. */ public interface Upgrade { - /** - * String identifier for the upgrade. - */ + /** String identifier for the upgrade. */ String id(); - /** - * Returns a set of steps to perform during the upgrade. - */ + /** Returns a set of steps to perform during the upgrade. */ List<UpgradeStep> steps(); - /** - * Returns a set of steps to perform on upgrade success, failure, or abort. - */ + /** Returns a set of steps to perform on upgrade success, failure, or abort. */ default List<UpgradeCleanupStep> cleanupSteps() { return ImmutableList.of(); } - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java index bf356c60a21a4..6da656020edf8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java @@ -2,21 +2,15 @@ import java.util.function.BiConsumer; - /** * Step executed on finish of an {@link Upgrade}. * - * Note that this step is not retried, even in case of failures. + * <p>Note that this step is not retried, even in case of failures. */ public interface UpgradeCleanupStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the cleanup step's logic. - */ + /** Returns a function representing the cleanup step's logic. */ BiConsumer<UpgradeContext, UpgradeResult> executable(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index e6be6905accee..eee27096e2238 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.datahub.upgrade.removeunknownaspects.RemoveUnknownAspects; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import java.util.List; import javax.inject.Inject; import javax.inject.Named; @@ -17,7 +17,6 @@ import org.springframework.stereotype.Component; import picocli.CommandLine; - @Slf4j @Component public class UpgradeCli implements CommandLineRunner { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 53a5c0758f318..909ceeb8f3bab 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -8,18 +8,23 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") @SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class}) -@ComponentScan(basePackages = { - "com.linkedin.gms.factory", - "com.linkedin.datahub.upgrade.config", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class) -}) +@ComponentScan( + basePackages = { + "com.linkedin.gms.factory", + "com.linkedin.datahub.upgrade.config", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class) + }) public class UpgradeCliApplication { public static void main(String[] args) { - new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class).web(WebApplicationType.NONE).run(args); + new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class) + .web(WebApplicationType.NONE) + .run(args); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java index 76cfc6321adfd..25a3d44b6e9da 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java @@ -4,35 +4,21 @@ import java.util.Map; import java.util.Optional; - -/** - * Context about a currently running upgrade. - */ +/** Context about a currently running upgrade. */ public interface UpgradeContext { - /** - * Returns the currently running upgrade. - */ + /** Returns the currently running upgrade. */ Upgrade upgrade(); - /** - * Returns the results from steps that have been completed. - */ + /** Returns the results from steps that have been completed. */ List<UpgradeStepResult> stepResults(); - /** - * Returns a report object where human-readable messages can be logged. - */ + /** Returns a report object where human-readable messages can be logged. */ UpgradeReport report(); - /** - * Returns a list of raw arguments that have been provided as input to the upgrade. - */ + /** Returns a list of raw arguments that have been provided as input to the upgrade. */ List<String> args(); - /** - * Returns a map of argument to <>optional</> value, as delimited by an '=' character. - */ + /** Returns a map of argument to <>optional</> value, as delimited by an '=' character. */ Map<String, Optional<String>> parsedArgs(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java index 927ccc0578308..c01aca12254a3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java @@ -2,20 +2,12 @@ import java.util.List; - -/** - * Responsible for managing the execution of an {@link Upgrade}. - */ +/** Responsible for managing the execution of an {@link Upgrade}. */ public interface UpgradeManager { - /** - * Register an {@link Upgrade} with the manaager. - */ + /** Register an {@link Upgrade} with the manaager. */ void register(Upgrade upgrade); - /** - * Kick off an {@link Upgrade} by identifier. - */ + /** Kick off an {@link Upgrade} by identifier. */ UpgradeResult execute(String upgradeId, List<String> args); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java index 2ed3f105a4eda..1c677f6fe8578 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java @@ -2,25 +2,15 @@ import java.util.List; - -/** - * A human-readable record of upgrade progress + status. - */ +/** A human-readable record of upgrade progress + status. */ public interface UpgradeReport { - /** - * Adds a new line to the upgrade report. - */ + /** Adds a new line to the upgrade report. */ void addLine(String line); - /** - * Adds a new line to the upgrade report with exception - */ + /** Adds a new line to the upgrade report with exception */ void addLine(String line, Exception e); - /** - * Retrieves the lines in the report. - */ + /** Retrieves the lines in the report. */ List<String> lines(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java index cdb94f0c0bba1..25dc758575fd1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java @@ -1,36 +1,21 @@ package com.linkedin.datahub.upgrade; -/** - * Represents the result of executing an {@link Upgrade} - */ +/** Represents the result of executing an {@link Upgrade} */ public interface UpgradeResult { - /** - * The execution result. - */ + /** The execution result. */ enum Result { - /** - * Upgrade succeeded. - */ + /** Upgrade succeeded. */ SUCCEEDED, - /** - * Upgrade failed. - */ + /** Upgrade failed. */ FAILED, - /** - * Upgrade was aborted. - */ + /** Upgrade was aborted. */ ABORTED } - /** - * Returns the {@link Result} of executing an {@link Upgrade} - */ + /** Returns the {@link Result} of executing an {@link Upgrade} */ Result result(); - /** - * Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. - */ + /** Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. */ UpgradeReport report(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java index b85bd7a51e3dd..3f90dcb33a005 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java @@ -2,39 +2,29 @@ import java.util.function.Function; - -/** - * Represents a single executable step in an {@link Upgrade}. - */ +/** Represents a single executable step in an {@link Upgrade}. */ public interface UpgradeStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the step's execution logic. - */ + /** Returns a function representing the step's execution logic. */ Function<UpgradeContext, UpgradeStepResult> executable(); - /** - * Returns the number of times the step should be retried. - */ + /** Returns the number of times the step should be retried. */ default int retryCount() { return 0; } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ default boolean isOptional() { return false; } - /** - * Returns whether or not to skip the step based on the UpgradeContext - */ + /** Returns whether or not to skip the step based on the UpgradeContext */ default boolean skip(UpgradeContext context) { return false; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java index 60d51f9ba476c..04b3d4b8559e6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java @@ -2,52 +2,33 @@ public interface UpgradeStepResult { - /** - * Returns a string identifier associated with the step. - */ + /** Returns a string identifier associated with the step. */ String stepId(); - /** - * The outcome of the step execution. - */ + /** The outcome of the step execution. */ enum Result { - /** - * The step succeeded. - */ + /** The step succeeded. */ SUCCEEDED, - /** - * The step failed. - */ + /** The step failed. */ FAILED } - /** - * A control-flow action to perform as a result of the step execution. - */ + /** A control-flow action to perform as a result of the step execution. */ enum Action { - /** - * Continue attempting the upgrade. - */ + /** Continue attempting the upgrade. */ CONTINUE, - /** - * Immediately fail the upgrade, without retry. - */ + /** Immediately fail the upgrade, without retry. */ FAIL, - /** - * Immediately abort the upgrade, without retry. - */ + /** Immediately abort the upgrade, without retry. */ ABORT } - /** - * Returns the result of executing the step, either success or failure. - */ + /** Returns the result of executing the step, either success or failure. */ Result result(); - /** - * Returns the action to perform after executing the step, either continue or abort. - */ + /** Returns the action to perform after executing the step, either continue or abort. */ default Action action() { return Action.CONTINUE; - }; + } + ; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java index a6f3ef5560442..8d5f1118433fc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java @@ -19,10 +19,12 @@ public static Map<String, Optional<String>> parseArgs(final List<String> args) { for (final String arg : args) { List<String> parsedArg = Arrays.asList(arg.split(KEY_VALUE_DELIMITER, 2)); - parsedArgs.put(parsedArg.get(0), parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); + parsedArgs.put( + parsedArg.get(0), + parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); } return parsedArgs; } - private UpgradeUtils() { } + private UpgradeUtils() {} } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java index 4f980b11b888a..393b5411599ad 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.graph.GraphService; import java.util.function.Function; - public class ClearGraphServiceStep implements UpgradeStep { private final String deletePattern = ".*"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java index fca8f60aefd95..230f5a60cb9ff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java @@ -8,13 +8,13 @@ import com.linkedin.metadata.search.EntitySearchService; import java.util.function.Function; - public class ClearSearchServiceStep implements UpgradeStep { private final EntitySearchService _entitySearchService; private final boolean _alwaysRun; - public ClearSearchServiceStep(final EntitySearchService entitySearchService, final boolean alwaysRun) { + public ClearSearchServiceStep( + final EntitySearchService entitySearchService, final boolean alwaysRun) { _entitySearchService = entitySearchService; _alwaysRun = alwaysRun; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index 270aa11c7b070..dd6c3fd1e44aa 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8df02123983e8..8a0d374d6ee3e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { private final SystemRestliEntityClient _entityClient; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java index 1391ef685c335..4e7447cb1e2cb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.common.steps; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -18,9 +20,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class GMSQualificationStep implements UpgradeStep { @@ -70,9 +69,16 @@ private boolean isEligible(ObjectNode configJson) { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - String gmsHost = System.getenv("DATAHUB_GMS_HOST") == null ? "localhost" : System.getenv("DATAHUB_GMS_HOST"); - String gmsPort = System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); - String gmsProtocol = System.getenv("DATAHUB_GMS_PROTOCOL") == null ? "http" : System.getenv("DATAHUB_GMS_PROTOCOL"); + String gmsHost = + System.getenv("DATAHUB_GMS_HOST") == null + ? "localhost" + : System.getenv("DATAHUB_GMS_HOST"); + String gmsPort = + System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); + String gmsProtocol = + System.getenv("DATAHUB_GMS_PROTOCOL") == null + ? "http" + : System.getenv("DATAHUB_GMS_PROTOCOL"); try { String spec = String.format("%s://%s:%s/config", gmsProtocol, gmsHost, gmsPort); @@ -81,33 +87,37 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { String responseString = convertStreamToString(response); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, - MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode configJson = mapper.readTree(responseString); if (isEligible((ObjectNode) configJson)) { - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } else { - context.report().addLine(String.format("Failed to qualify GMS. It is not running on the latest version." - + "Re-run GMS on the latest datahub release")); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "Failed to qualify GMS. It is not running on the latest version." + + "Re-run GMS on the latest datahub release")); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } catch (Exception e) { e.printStackTrace(); - context.report().addLine(String.format("ERROR: Cannot connect to GMS" - + "at %s://host %s port %s. Make sure GMS is on the latest version " - + "and is running at that host before starting the migration.", - gmsProtocol, - gmsHost, - gmsPort)); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "ERROR: Cannot connect to GMS" + + "at %s://host %s port %s. Make sure GMS is on the latest version " + + "and is running at that host before starting the migration.", + gmsProtocol, gmsHost, gmsPort)); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 16e5e4247267f..abd144bf453ed 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -6,12 +6,12 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2 backfillBrowsePathsV2( + EntityService entityService, SearchService searchService) { return new BackfillBrowsePathsV2(entityService, searchService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index e98f0dc2093f6..1e9298bc60612 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public BuildIndices buildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new BuildIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new BuildIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 558c9780911ac..5bd7244a92e45 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CleanIndicesConfig { @Bean(name = "cleanIndices") - public CleanIndices cleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public CleanIndices cleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new CleanIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new CleanIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 23ea81009fa1d..24bcec5852b4f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.config; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -12,17 +14,18 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - @Configuration public class NoCodeCleanupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeCleanup") - @DependsOn({"ebeanServer", "graphService", "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN}) + @DependsOn({ + "ebeanServer", + "graphService", + "elasticSearchRestHighLevelClient", + INDEX_CONVENTION_BEAN + }) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index cd264e529e9a5..68009d7ed1718 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -12,12 +12,10 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class NoCodeUpgradeConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) @@ -25,7 +23,8 @@ public class NoCodeUpgradeConfig { public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index cdc739efc416d..0b46133209382 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -5,7 +5,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 97a08800534de..743e4ffe84b0e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -14,25 +14,30 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class RestoreBackupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreBackup") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "graphService", - "searchService", "entityRegistry"}) + @DependsOn({ + "ebeanServer", + "entityService", + "systemRestliEntityClient", + "graphService", + "searchService", + "entityRegistry" + }) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreBackup(ebeanServer, entityService, entityRegistry, entityClient, - graphClient, searchClient); + return new RestoreBackup( + ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 663cad4a4bff6..d258c4a4d1a52 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -13,11 +13,9 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class RestoreIndicesConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) @@ -25,11 +23,12 @@ public class RestoreIndicesConfig { public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); + final EntitySearchService entitySearchService = + applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices(ebeanServer, entityService, entityRegistry, entitySearchService, - graphService); + return new RestoreIndices( + ebeanServer, entityService, entityRegistry, entitySearchService, graphService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 9848fc7a0008f..3b63d81486eb4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -24,18 +24,21 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Slf4j @Configuration public class SystemUpdateConfig { @Bean(name = "systemUpdate") - public SystemUpdate systemUpdate(final BuildIndices buildIndices, final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + public SystemUpdate systemUpdate( + final BuildIndices buildIndices, + final CleanIndices cleanIndices, + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate(buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + return new SystemUpdate( + buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -50,16 +53,18 @@ public String getRevision() { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "duheKafkaEventProducer") - protected KafkaEventProducer duheKafkaEventProducer(@Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties properties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { + protected KafkaEventProducer duheKafkaEventProducer( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Producer<String, IndexedRecord> producer = new KafkaProducer<>( - DataHubKafkaProducerFactory.buildProducerProperties(duheSchemaRegistryConfig, kafkaConfiguration, properties)); + Producer<String, IndexedRecord> producer = + new KafkaProducer<>( + DataHubKafkaProducerFactory.buildProducerProperties( + duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 972b55f2001f1..6cc94fbed5bf3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -9,7 +9,6 @@ import java.util.Map; import java.util.Optional; - public class DefaultUpgradeContext implements UpgradeContext { private final Upgrade _upgrade; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index a642ee3fb0a90..623c8a71e861d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -17,7 +17,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class DefaultUpgradeManager implements UpgradeManager { private final Map<String, Upgrade> _upgrades = new HashMap<>(); @@ -32,16 +31,19 @@ public UpgradeResult execute(String upgradeId, List<String> args) { if (_upgrades.containsKey(upgradeId)) { return executeInternal(_upgrades.get(upgradeId), args); } - throw new IllegalArgumentException(String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); + throw new IllegalArgumentException( + String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); } private UpgradeResult executeInternal(Upgrade upgrade, List<String> args) { final UpgradeReport upgradeReport = new DefaultUpgradeReport(); - final UpgradeContext context = new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); + final UpgradeContext context = + new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); upgradeReport.addLine(String.format("Starting upgrade with id %s...", upgrade.id())); UpgradeResult result = executeInternal(context); upgradeReport.addLine( - String.format("Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); + String.format( + "Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); executeCleanupInternal(context, result); return result; } @@ -58,12 +60,16 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (step.skip(context)) { upgradeReport.addLine( - String.format(String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); continue; } upgradeReport.addLine( - String.format(String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); final UpgradeStepResult stepResult = executeStepInternal(context, step); stepResults.add(stepResult); @@ -71,7 +77,8 @@ private UpgradeResult executeInternal(UpgradeContext context) { // Apply Actions if (UpgradeStepResult.Action.ABORT.equals(stepResult.action())) { upgradeReport.addLine( - String.format("Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", + String.format( + "Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", step.id())); return new DefaultUpgradeResult(UpgradeResult.Result.ABORTED, upgradeReport); } @@ -80,23 +87,27 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (UpgradeStepResult.Result.FAILED.equals(stepResult.result())) { if (step.isOptional()) { upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", i + 1, - steps.size(), step.id())); + String.format( + "Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", + i + 1, steps.size(), step.id())); continue; } // Required step failed. Fail the entire upgrade process. upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Failed after %s retries.", i + 1, steps.size(), step.id(), - step.retryCount())); + String.format( + "Failed Step %s/%s: %s. Failed after %s retries.", + i + 1, steps.size(), step.id(), step.retryCount())); upgradeReport.addLine(String.format("Exiting upgrade %s with failure.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.FAILED, upgradeReport); } - upgradeReport.addLine(String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); + upgradeReport.addLine( + String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); } - upgradeReport.addLine(String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); + upgradeReport.addLine( + String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.SUCCEEDED, upgradeReport); } @@ -105,15 +116,19 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte UpgradeStepResult result = null; int maxAttempts = retryCount + 1; for (int i = 0; i < maxAttempts; i++) { - try (Timer.Context completionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { - try (Timer.Context executionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { + try (Timer.Context completionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { + try (Timer.Context executionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { result = step.executable().apply(context); } if (result == null) { // Failed to even retrieve a result. Create a default failure result. result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); - context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); + context + .report() + .addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); MetricUtils.counter(MetricRegistry.name(step.id(), "retry")).inc(); } @@ -122,9 +137,11 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); + String.format( + "Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); MetricUtils.counter(MetricRegistry.name(step.id(), "failed")).inc(); result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); @@ -139,7 +156,11 @@ private void executeCleanupInternal(UpgradeContext context, UpgradeResult result try { step.executable().accept(context, result); } catch (Exception e) { - context.report().addLine(String.format("Caught exception while executing cleanup step with id %s", step.id())); + context + .report() + .addLine( + String.format( + "Caught exception while executing cleanup step with id %s", step.id())); } } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java index 19706937e20ca..913b0ff20e6ff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java @@ -1,10 +1,9 @@ package com.linkedin.datahub.upgrade.impl; import com.linkedin.datahub.upgrade.UpgradeReport; -import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.List; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class DefaultUpgradeReport implements UpgradeReport { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java index 6ecb522848291..cf0e7221b406b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.UpgradeReport; import com.linkedin.datahub.upgrade.UpgradeResult; - public class DefaultUpgradeResult implements UpgradeResult { private final Result _result; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java index d0c086f607edd..e11eaf89bfc8d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; - public class DefaultUpgradeStepResult implements UpgradeStepResult { private final String _stepId; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java index 7ed7169bf20bc..3b3098f43c473 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import io.ebean.Database; import java.util.function.Function; @@ -36,40 +36,42 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - - DbType targetDbType = context.parsedArgs().containsKey(DB_TYPE_ARG) - ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) - : DbType.MYSQL; + DbType targetDbType = + context.parsedArgs().containsKey(DB_TYPE_ARG) + ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) + : DbType.MYSQL; String sqlUpdateStr; switch (targetDbType) { case POSTGRES: - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint not null,\n" - + " metadata text not null,\n" - + " systemmetadata text,\n" - + " createdon timestamp not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint not null,\n" + + " metadata text not null,\n" + + " systemmetadata text,\n" + + " createdon timestamp not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; default: // both mysql and maria - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint(20) not null,\n" - + " metadata longtext not null,\n" - + " systemmetadata longtext,\n" - + " createdon datetime(6) not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint(20) not null,\n" + + " metadata longtext not null,\n" + + " systemmetadata longtext,\n" + + " createdon datetime(6) not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; } @@ -77,9 +79,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { _server.execute(_server.createSqlUpdate(sqlUpdateStr)); } catch (Exception e) { context.report().addLine("Failed to create table metadata_aspect_v2", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index 1b5770a11ff62..ac56e5e91c72b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -1,22 +1,22 @@ package com.linkedin.datahub.upgrade.nocode; +import com.datahub.util.RecordUtils; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.utils.PegasusUtils; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.util.Pair; import io.ebean.Database; import io.ebean.PagedList; @@ -29,13 +29,13 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; - public class DataMigrationStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; private static final long DEFAULT_BATCH_DELAY_MS = 250; - private static final String BROWSE_PATHS_ASPECT_NAME = PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); + private static final String BROWSE_PATHS_ASPECT_NAME = + PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); private final Database _server; private final EntityService _entityService; @@ -64,7 +64,6 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - context.report().addLine("Starting data migration..."); final int rowCount = _server.find(EbeanAspectV1.class).findCount(); context.report().addLine(String.format("Found %s rows in legacy aspects table", rowCount)); @@ -74,7 +73,11 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { int count = getBatchSize(context.parsedArgs()); while (start < rowCount) { - context.report().addLine(String.format("Reading rows %s through %s from legacy aspects table.", start, start + count)); + context + .report() + .addLine( + String.format( + "Reading rows %s through %s from legacy aspects table.", start, start + count)); PagedList<EbeanAspectV1> rows = getPagedAspects(start, count); for (EbeanAspectV1 oldAspect : rows.getList()) { @@ -84,11 +87,18 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { // 1. Instantiate the RecordTemplate class associated with the aspect. final RecordTemplate aspectRecord; try { - aspectRecord = RecordUtils.toRecordTemplate( - Class.forName(oldAspectName).asSubclass(RecordTemplate.class), - oldAspect.getMetadata()); + aspectRecord = + RecordUtils.toRecordTemplate( + Class.forName(oldAspectName).asSubclass(RecordTemplate.class), + oldAspect.getMetadata()); } catch (Exception e) { - context.report().addLine(String.format("Failed to convert aspect with name %s into a RecordTemplate class", oldAspectName), e); + context + .report() + .addLine( + String.format( + "Failed to convert aspect with name %s into a RecordTemplate class", + oldAspectName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -97,7 +107,11 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { urn = Urn.createFromString(oldAspect.getKey().getUrn()); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to bind Urn with value %s into Urn object", oldAspect.getKey().getUrn()), e); + throw new RuntimeException( + String.format( + "Failed to bind Urn with value %s into Urn object", + oldAspect.getKey().getUrn()), + e); } // 3. Verify that the entity associated with the aspect is found in the registry. @@ -106,7 +120,12 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find Entity with name %s in Entity Registry", entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -115,9 +134,13 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { newAspectName = PegasusUtils.getAspectNameFromSchema(aspectRecord.schema()); } catch (Exception e) { - context.report().addLine(String.format("Failed to retrieve @Aspect name from schema %s, urn %s", - aspectRecord.schema().getFullName(), - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to retrieve @Aspect name from schema %s, urn %s", + aspectRecord.schema().getFullName(), entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -126,23 +149,24 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { aspectSpec = entitySpec.getAspectSpec(newAspectName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - newAspectName, - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + newAspectName, entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } // 6. Write the row back using the EntityService boolean emitMae = oldAspect.getKey().getVersion() == 0L; _entityService.ingestAspects( - urn, - List.of(Pair.of(newAspectName, aspectRecord)), - toAuditStamp(oldAspect), - null - ); + urn, List.of(Pair.of(newAspectName, aspectRecord)), toAuditStamp(oldAspect), null); // 7. If necessary, emit a browse path aspect. - if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) && !urnsWithBrowsePath.contains(urn)) { + if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) + && !urnsWithBrowsePath.contains(urn)) { // Emit a browse path aspect. final BrowsePaths browsePaths; try { @@ -152,7 +176,11 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); browsePathsStamp.setTime(System.currentTimeMillis()); - _entityService.ingestAspects(urn, List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), browsePathsStamp, null); + _entityService.ingestAspects( + urn, + List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), + browsePathsStamp, + null); urnsWithBrowsePath.add(urn); } catch (URISyntaxException e) { @@ -167,13 +195,17 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { TimeUnit.MILLISECONDS.sleep(getBatchDelayMs(context.parsedArgs())); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } } if (totalRowsMigrated != rowCount) { - context.report().addLine(String.format("Number of rows migrated %s does not equal the number of input rows %s...", - totalRowsMigrated, - rowCount)); + context + .report() + .addLine( + String.format( + "Number of rows migrated %s does not equal the number of input rows %s...", + totalRowsMigrated, rowCount)); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -195,9 +227,9 @@ private AuditStamp toAuditStamp(final EbeanAspectV1 aspect) { return auditStamp; } - private PagedList<EbeanAspectV1> getPagedAspects(final int start, final int pageSize) { - return _server.find(EbeanAspectV1.class) + return _server + .find(EbeanAspectV1.class) .select(EbeanAspectV1.ALL_COLUMNS) .setFirstRow(start) .setMaxRows(pageSize) @@ -219,7 +251,8 @@ private long getBatchDelayMs(final Map<String, Optional<String>> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (parsedArgs.containsKey(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME) && parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).isPresent()) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index a299deb874721..6753d309b9f50 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -30,10 +30,7 @@ public NoCodeUpgrade( final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps( - server, entityService, - entityRegistry, - entityClient); + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); _cleanupSteps = buildCleanupSteps(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java index cf8e848762f14..6180573d902d2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java @@ -7,10 +7,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class RemoveAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java index 0fe9afa8cc6f8..d22af9d292400 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; import io.ebean.Database; import java.util.function.Function; @@ -29,7 +29,6 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - if (context.parsedArgs().containsKey(NoCodeUpgrade.FORCE_UPGRADE_ARG_NAME)) { context.report().addLine("Forced upgrade detected. Proceeding with upgrade..."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -43,7 +42,8 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { } // Unqualified (Table already exists) context.report().addLine("Failed to qualify upgrade candidate. Aborting the upgrade..."); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); + return new DefaultUpgradeStepResult( + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); @@ -67,8 +67,13 @@ private boolean isQualified(Database server, UpgradeContext context) { return true; } context.report().addLine(String.format("-- V2 table has %d rows", v2TableRowCount)); - context.report().addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); - context.report().addLine("-- If V2 table has significantly less rows, consider running the forced upgrade. "); + context + .report() + .addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); + context + .report() + .addLine( + "-- If V2 table has significantly less rows, consider running the forced upgrade. "); return false; } context.report().addLine("-- V2 table does not exist"); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java index 8005e31e01c67..ba0a0124545e9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java @@ -7,7 +7,6 @@ import io.ebean.Database; import java.util.function.Function; - // Do we need SQL-tech specific migration paths? public class DeleteAspectTableStep implements UpgradeStep { @@ -34,9 +33,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); } catch (Exception e) { context.report().addLine("Failed to delete data from legacy table metadata_aspect", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java index 12ff125a05127..5066e05f8bf5a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java @@ -6,10 +6,8 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; // Do we need SQL-tech specific migration paths? @Slf4j @@ -44,9 +42,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { } } catch (Exception e) { context.report().addLine("Failed to delete legacy data from graph", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java index 9a64d5fe1810c..05656373377b9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java @@ -11,7 +11,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; - // Do we need SQL-tech specific migration paths? @RequiredArgsConstructor public class DeleteLegacySearchIndicesStep implements UpgradeStep { @@ -20,7 +19,8 @@ public class DeleteLegacySearchIndicesStep implements UpgradeStep { private final RestHighLevelClient _searchClient; - public DeleteLegacySearchIndicesStep(final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public DeleteLegacySearchIndicesStep( + final RestHighLevelClient searchClient, final IndexConvention indexConvention) { _searchClient = searchClient; deletePattern = indexConvention.getPrefix().map(p -> p + "_").orElse("") + "*document*"; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index a5d8d6ce9b666..8a267be6ad808 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -11,15 +11,17 @@ import java.util.List; import org.opensearch.client.RestHighLevelClient; - public class NoCodeCleanupUpgrade implements Upgrade { private final List<UpgradeStep> _steps; private final List<UpgradeCleanupStep> _cleanupSteps; // Upgrade requires the Database. - public NoCodeCleanupUpgrade(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public NoCodeCleanupUpgrade( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); _cleanupSteps = buildCleanupSteps(); } @@ -43,8 +45,11 @@ private List<UpgradeCleanupStep> buildCleanupSteps() { return Collections.emptyList(); } - private List<UpgradeStep> buildUpgradeSteps(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + private List<UpgradeStep> buildUpgradeSteps( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { final List<UpgradeStep> steps = new ArrayList<>(); steps.add(new NoCodeUpgradeQualificationStep(server)); steps.add(new DeleteAspectTableStep(server)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java index 67a226f8f0676..15c7584532e2c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java @@ -8,7 +8,6 @@ import io.ebean.Database; import java.util.function.Function; - public class NoCodeUpgradeQualificationStep implements UpgradeStep { private final Database _server; @@ -33,23 +32,19 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { try { if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { // Unqualified (V2 Table does not exist) - context.report().addLine("You have not successfully migrated yet. Aborting the cleanup..."); + context + .report() + .addLine("You have not successfully migrated yet. Aborting the cleanup..."); return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED, - UpgradeStepResult.Action.ABORT); + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } else { // Qualified. context.report().addLine("Found qualified upgrade candidate. Proceeding with upgrade..."); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists: %s", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index b55d439745e69..7e55dcddc639f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -11,7 +11,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements UpgradeStep { @@ -33,9 +32,10 @@ public boolean skip(UpgradeContext context) { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return upgradeContext -> { - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, - new HashMap<>(), true); - return (UpgradeStepResult) new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, new HashMap<>(), true); + return (UpgradeStepResult) + new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index f8af69dba0865..dc95b7605ef88 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -8,7 +8,6 @@ import java.util.ArrayList; import java.util.List; - public class RemoveUnknownAspects implements Upgrade { private final List<UpgradeStep> _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java index 0303739e62afe..addf6dcb89c1a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java @@ -8,10 +8,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class ClearAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 9175ad606e3c8..b11abb2d6bc23 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -17,7 +17,6 @@ import java.util.ArrayList; import java.util.List; - public class RestoreBackup implements Upgrade { private final List<UpgradeStep> _steps; @@ -29,7 +28,8 @@ public RestoreBackup( final SystemRestliEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + _steps = + buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 42f7f0073e59b..5c4567c856d0e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -35,7 +34,6 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class RestoreStorageStep implements UpgradeStep { private static final int REPORT_BATCH_SIZE = 1000; @@ -43,11 +41,13 @@ public class RestoreStorageStep implements UpgradeStep { private final EntityService _entityService; private final EntityRegistry _entityRegistry; - private final Map<String, Class<? extends BackupReader<? extends ReaderWrapper<?>>>> _backupReaders; + private final Map<String, Class<? extends BackupReader<? extends ReaderWrapper<?>>>> + _backupReaders; private final ExecutorService _fileReaderThreadPool; private final ExecutorService _gmsThreadPool; - public RestoreStorageStep(final EntityService entityService, final EntityRegistry entityRegistry) { + public RestoreStorageStep( + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); @@ -82,7 +82,6 @@ public int retryCount() { @Override public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { - context.report().addLine("Starting backup restore..."); int numRows = 0; Optional<String> backupReaderName = context.parsedArgs().get("BACKUP_READER"); @@ -93,19 +92,32 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } - Class<? extends BackupReader<? extends ReaderWrapper>> clazz = _backupReaders.get(backupReaderName.get()); + Class<? extends BackupReader<? extends ReaderWrapper>> clazz = + _backupReaders.get(backupReaderName.get()); List<String> argNames = BackupReaderArgs.getArgNames(clazz); - List<Optional<String>> args = argNames.stream().map(argName -> context.parsedArgs().get(argName)).collect( - Collectors.toList()); + List<Optional<String>> args = + argNames.stream() + .map(argName -> context.parsedArgs().get(argName)) + .collect(Collectors.toList()); BackupReader<? extends ReaderWrapper> backupReader; try { backupReader = clazz.getConstructor(List.class).newInstance(args); - } catch (InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { + } catch (InstantiationException + | InvocationTargetException + | IllegalAccessException + | NoSuchMethodException e) { e.printStackTrace(); - context.report().addLine("Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); - throw new IllegalArgumentException("Invalid BackupReader: " + clazz.getSimpleName() + ", need to implement proper constructor."); + context + .report() + .addLine( + "Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); + throw new IllegalArgumentException( + "Invalid BackupReader: " + + clazz.getSimpleName() + + ", need to implement proper constructor."); } - EbeanAspectBackupIterator<? extends ReaderWrapper> iterator = backupReader.getBackupIterator(context); + EbeanAspectBackupIterator<? extends ReaderWrapper> iterator = + backupReader.getBackupIterator(context); ReaderWrapper reader; List<Future<?>> futureList = new ArrayList<>(); while ((reader = iterator.getNextReader()) != null) { @@ -138,9 +150,12 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), e); + String.format( + "Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), + e); continue; } @@ -150,8 +165,11 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format("Failed to find Entity with name %s in Entity Registry", entityName), + e); continue; } final String aspectName = aspect.getKey().getAspect(); @@ -160,11 +178,16 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final RecordTemplate aspectRecord; try { aspectRecord = - EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to create aspect record with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to create aspect record with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } @@ -173,17 +196,27 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { aspectSpec = entitySpec.getAspectSpec(aspectName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } // 5. Write the row back using the EntityService final long version = aspect.getKey().getVersion(); final AuditStamp auditStamp = toAuditStamp(aspect); - futureList.add(_gmsThreadPool.submit(() -> - _entityService.ingestAspects(urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null).get(0).getNewValue())); + futureList.add( + _gmsThreadPool.submit( + () -> + _entityService + .ingestAspects( + urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null) + .get(0) + .getNewValue())); if (numRows % REPORT_BATCH_SIZE == 0) { for (Future<?> future : futureList) { try { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 7ea1811adfdd8..212f0da9f592d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -3,10 +3,10 @@ import com.linkedin.datahub.upgrade.UpgradeContext; import javax.annotation.Nonnull; - /** - * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 object to be - * ingested back into GMS. Must have a constructor that takes a List of Optional Strings + * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 + * object to be ingested back into GMS. Must have a constructor that takes a List of Optional + * Strings */ public interface BackupReader<T extends ReaderWrapper> { String getName(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java index 20f43b5414ddd..6176d56fbec95 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java @@ -4,14 +4,9 @@ import java.util.List; import java.util.Map; - -/** - * Retains a map of what arguments are passed in to a backup reader - */ +/** Retains a map of what arguments are passed in to a backup reader */ public final class BackupReaderArgs { - private BackupReaderArgs() { - - } + private BackupReaderArgs() {} private static final Map<Class<? extends BackupReader>, List<String>> ARGS_MAP; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java index 3a2505311e245..cce5928277a20 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java @@ -7,10 +7,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * Base interface for iterators that retrieves EbeanAspectV2 objects - * This allows us to restore from backups of various format + * Base interface for iterators that retrieves EbeanAspectV2 objects This allows us to restore from + * backups of various format */ @Slf4j @RequiredArgsConstructor @@ -35,12 +34,13 @@ public T getNextReader() { @Override public void close() { - _readers.forEach(reader -> { - try { - reader.close(); - } catch (IOException e) { - log.error("Error while closing parquet reader", e); - } - }); + _readers.forEach( + reader -> { + try { + reader.close(); + } catch (IOException e) { + log.error("Error while closing parquet reader", e); + } + }); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java index 9b8a3133ac04c..9f0f81f466cfa 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java @@ -14,10 +14,7 @@ import org.apache.parquet.avro.AvroParquetReader; import org.apache.parquet.hadoop.ParquetReader; - -/** - * BackupReader for retrieving EbeanAspectV2 objects from a local parquet file - */ +/** BackupReader for retrieving EbeanAspectV2 objects from a local parquet file */ @Slf4j public class LocalParquetReader implements BackupReader<ParquetReaderWrapper> { @@ -46,16 +43,20 @@ public String getName() { public EbeanAspectBackupIterator<ParquetReaderWrapper> getBackupIterator(UpgradeContext context) { Optional<String> path = context.parsedArgs().get("BACKUP_FILE_PATH"); if (!path.isPresent()) { - context.report().addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); + context + .report() + .addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); throw new IllegalArgumentException( "BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); } try { - ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord>builder(new Path(path.get())).build(); - return new EbeanAspectBackupIterator<>(ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); + ParquetReader<GenericRecord> reader = + AvroParquetReader.<GenericRecord>builder(new Path(path.get())).build(); + return new EbeanAspectBackupIterator<>( + ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); } catch (IOException e) { throw new RuntimeException(String.format("Failed to build ParquetReader: %s", e)); } } -} \ No newline at end of file +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java index 2b7cacff65249..01c502221f77f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java @@ -15,9 +15,9 @@ @Slf4j public class ParquetReaderWrapper extends ReaderWrapper<GenericRecord> { - private final static long NANOS_PER_MILLISECOND = 1000000; - private final static long MILLIS_IN_DAY = 86400000; - private final static long JULIAN_EPOCH_OFFSET_DAYS = 2440588; + private static final long NANOS_PER_MILLISECOND = 1000000; + private static final long MILLIS_IN_DAY = 86400000; + private static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588; private final ParquetReader<GenericRecord> _parquetReader; @@ -45,22 +45,30 @@ EbeanAspectV2 convertRecord(GenericRecord record) { ts = (Long) record.get("createdon"); } - return new EbeanAspectV2(record.get("urn").toString(), record.get("aspect").toString(), - (Long) record.get("version"), record.get("metadata").toString(), - Timestamp.from(Instant.ofEpochMilli(ts / 1000)), record.get("createdby").toString(), + return new EbeanAspectV2( + record.get("urn").toString(), + record.get("aspect").toString(), + (Long) record.get("version"), + record.get("metadata").toString(), + Timestamp.from(Instant.ofEpochMilli(ts / 1000)), + record.get("createdby").toString(), Optional.ofNullable(record.get("createdfor")).map(Object::toString).orElse(null), Optional.ofNullable(record.get("systemmetadata")).map(Object::toString).orElse(null)); } private long convertFixed96IntToTs(GenericFixed createdon) { // From https://github.com/apache/parquet-format/pull/49/filesParquetTimestampUtils.java - // and ParquetTimestampUtils.java from https://github.com/kube-reporting/presto/blob/master/presto-parquet/ + // and ParquetTimestampUtils.java from + // https://github.com/kube-reporting/presto/blob/master/presto-parquet/ // src/main/java/io/prestosql/parquet/ParquetTimestampUtils.java byte[] bytes = createdon.bytes(); // little endian encoding - need to invert byte order - long timeOfDayNanos = Longs.fromBytes(bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); + long timeOfDayNanos = + Longs.fromBytes( + bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); int julianDay = Ints.fromBytes(bytes[11], bytes[10], bytes[9], bytes[8]); - return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + (timeOfDayNanos / NANOS_PER_MILLISECOND); + return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + + (timeOfDayNanos / NANOS_PER_MILLISECOND); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java index d0db42e678eea..48d0fa2fda04c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java @@ -5,9 +5,10 @@ import java.io.IOException; import lombok.extern.slf4j.Slf4j; - /** - * Abstract class that reads entries from a given source and transforms then into {@link EbeanAspectV2} instances. + * Abstract class that reads entries from a given source and transforms then into {@link + * EbeanAspectV2} instances. + * * @param <T> The object type to read from a reader source. */ @Slf4j @@ -69,9 +70,15 @@ record = read(); abstract EbeanAspectV2 convertRecord(T record); private void printStat(String prefix) { - log.info("{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," - + " records failed: {}, Total millis in convert: {}", prefix, _fileName, - recordsProcessed, totalTimeSpentInRead / 1000 / 1000, recordsSkipped, recordsFailed, + log.info( + "{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," + + " records failed: {}, Total millis in convert: {}", + prefix, + _fileName, + recordsProcessed, + totalTimeSpentInRead / 1000 / 1000, + recordsSkipped, + recordsFailed, totalTimeSpentInConvert / 1000 / 1000); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 3c0a9762a28c9..8bb3b0073710a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -14,7 +14,6 @@ import java.util.ArrayList; import java.util.List; - public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; public static final String BATCH_DELAY_MS_ARG_NAME = "batchDelayMs"; @@ -29,8 +28,11 @@ public class RestoreIndices implements Upgrade { private final List<UpgradeStep> _steps; - public RestoreIndices(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + public RestoreIndices( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); } @@ -45,8 +47,11 @@ public List<UpgradeStep> steps() { return _steps; } - private List<UpgradeStep> buildSteps(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + private List<UpgradeStep> buildSteps( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { final List<UpgradeStep> steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index 2ac4fea2e653a..ce59cf2edb84e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.restoreindices; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -11,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; - import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -23,9 +24,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -38,19 +36,24 @@ public class SendMAEStep implements UpgradeStep { private final EntityService _entityService; public class KafkaJob implements Callable<RestoreIndicesResult> { - UpgradeContext context; - RestoreIndicesArgs args; - public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { - this.context = context; - this.args = args; - } - @Override - public RestoreIndicesResult call() { - return _entityService.restoreIndices(args, context.report()::addLine); - } + UpgradeContext context; + RestoreIndicesArgs args; + + public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { + this.context = context; + this.args = args; + } + + @Override + public RestoreIndicesResult call() { + return _entityService.restoreIndices(args, context.report()::addLine); + } } - public SendMAEStep(final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { + public SendMAEStep( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry) { _server = server; _entityService = entityService; } @@ -67,7 +70,7 @@ public int retryCount() { private List<RestoreIndicesResult> iterateFutures(List<Future<RestoreIndicesResult>> futures) { List<RestoreIndicesResult> result = new ArrayList<>(); - for (Future<RestoreIndicesResult> future: new ArrayList<>(futures)) { + for (Future<RestoreIndicesResult> future : new ArrayList<>(futures)) { if (future.isDone()) { try { result.add(future.get()); @@ -100,9 +103,10 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { private int getRowCount(RestoreIndicesArgs args) { ExpressionList<EbeanAspectV2> countExp = - _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); + _server + .find(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); if (args.aspectName != null) { countExp = countExp.eq(EbeanAspectV2.ASPECT_COLUMN, args.aspectName); } @@ -120,13 +124,18 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { RestoreIndicesResult finalJobResult = new RestoreIndicesResult(); RestoreIndicesArgs args = getArgs(context); - ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); + ThreadPoolExecutor executor = + (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); context.report().addLine("Sending MAE from local DB"); long startTime = System.currentTimeMillis(); final int rowCount = getRowCount(args); - context.report().addLine(String.format("Found %s latest aspects in aspects table in %.2f minutes.", - rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); + context + .report() + .addLine( + String.format( + "Found %s latest aspects in aspects table in %.2f minutes.", + rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); int start = args.start; List<Future<RestoreIndicesResult>> futures = new ArrayList<>(); @@ -139,7 +148,7 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { } while (futures.size() > 0) { List<RestoreIndicesResult> tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult: tmpResults) { + for (RestoreIndicesResult tmpResult : tmpResults) { reportStats(context, finalJobResult, tmpResult, rowCount, startTime); } } @@ -149,16 +158,23 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { if (rowCount > 0) { percentFailed = (float) (rowCount - finalJobResult.rowsMigrated) * 100 / rowCount; } - context.report().addLine(String.format( - "Failed to send MAEs for %d rows (%.2f%% of total).", - rowCount - finalJobResult.rowsMigrated, percentFailed)); + context + .report() + .addLine( + String.format( + "Failed to send MAEs for %d rows (%.2f%% of total).", + rowCount - finalJobResult.rowsMigrated, percentFailed)); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } - private static void reportStats(UpgradeContext context, RestoreIndicesResult finalResult, RestoreIndicesResult tmpResult, - int rowCount, long startTime) { + private static void reportStats( + UpgradeContext context, + RestoreIndicesResult finalResult, + RestoreIndicesResult tmpResult, + int rowCount, + long startTime) { finalResult.ignored += tmpResult.ignored; finalResult.rowsMigrated += tmpResult.rowsMigrated; finalResult.timeSqlQueryMs += tmpResult.timeSqlQueryMs; @@ -178,11 +194,22 @@ private static void reportStats(UpgradeContext context, RestoreIndicesResult fin estimatedTimeMinutesComplete = timeSoFarMinutes * (100 - percentSent) / percentSent; } float totalTimeComplete = timeSoFarMinutes + estimatedTimeMinutesComplete; - context.report().addLine(String.format( - "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", - finalResult.rowsMigrated, rowCount, percentSent, finalResult.ignored, percentIgnored)); - context.report().addLine(String.format("%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", - timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); + context + .report() + .addLine( + String.format( + "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", + finalResult.rowsMigrated, + rowCount, + percentSent, + finalResult.ignored, + percentIgnored)); + context + .report() + .addLine( + String.format( + "%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", + timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); } private int getBatchSize(final Map<String, Optional<String>> parsedArgs) { @@ -196,7 +223,8 @@ private int getStartingOffset(final Map<String, Optional<String>> parsedArgs) { private long getBatchDelayMs(final Map<String, Optional<String>> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (containsKey(parsedArgs, RestoreIndices.BATCH_DELAY_MS_ARG_NAME)) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } @@ -205,7 +233,8 @@ private int getThreadCount(final Map<String, Optional<String>> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } - private int getInt(final Map<String, Optional<String>> parsedArgs, int defaultVal, String argKey) { + private int getInt( + final Map<String, Optional<String>> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; if (containsKey(parsedArgs, argKey)) { result = Integer.parseInt(parsedArgs.get(argKey).get()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index 4a8211f2cd4ac..aba751bff8177 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -8,47 +8,48 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; import com.linkedin.metadata.dao.producer.KafkaEventProducer; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class SystemUpdate implements Upgrade { - private final List<Upgrade> _preStartupUpgrades; - private final List<Upgrade> _postStartupUpgrades; - private final List<UpgradeStep> _steps; - - public SystemUpdate(final BuildIndices buildIndicesJob, final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { - - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); - } - - @Override - public String id() { - return "SystemUpdate"; - } - - @Override - public List<UpgradeStep> steps() { - return Stream.concat(Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), - _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); - } - - @Override - public List<UpgradeCleanupStep> cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); - } + private final List<Upgrade> _preStartupUpgrades; + private final List<Upgrade> _postStartupUpgrades; + private final List<UpgradeStep> _steps; + + public SystemUpdate( + final BuildIndices buildIndicesJob, + final CleanIndices cleanIndicesJob, + final KafkaEventProducer kafkaEventProducer, + final String version, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + + _preStartupUpgrades = List.of(buildIndicesJob); + _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); + _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); + } + + @Override + public String id() { + return "SystemUpdate"; + } + + @Override + public List<UpgradeStep> steps() { + return Stream.concat( + Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), _steps.stream()), + _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) + .collect(Collectors.toList()); + } + + @Override + public List<UpgradeCleanupStep> cleanupSteps() { + return Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), + _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) + .collect(Collectors.toList()); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index 1da5b6d6a25ce..eb76a72fba71a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -2,9 +2,9 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; import com.linkedin.metadata.graph.GraphService; @@ -17,49 +17,54 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - public class BuildIndices implements Upgrade { - private final List<UpgradeStep> _steps; - - public BuildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - - final ConfigurationProvider configurationProvider) { - + private final List<UpgradeStep> _steps; - List<ElasticSearchIndexed> indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); + public BuildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); - } + List<ElasticSearchIndexed> indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); - @Override - public String id() { - return "BuildIndices"; - } + _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + } - @Override - public List<UpgradeStep> steps() { - return _steps; - } + @Override + public String id() { + return "BuildIndices"; + } - private List<UpgradeStep> buildSteps(final List<ElasticSearchIndexed> indexedServices, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + @Override + public List<UpgradeStep> steps() { + return _steps; + } - final List<UpgradeStep> steps = new ArrayList<>(); - // Disable ES write mode/change refresh rate and clone indices - steps.add(new BuildIndicesPreStep(baseElasticSearchComponents, indexedServices, configurationProvider)); - // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService - steps.add(new BuildIndicesStep(indexedServices)); - // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in pre-configure step if it already exists? - steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); - return steps; - } + private List<UpgradeStep> buildSteps( + final List<ElasticSearchIndexed> indexedServices, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + final List<UpgradeStep> steps = new ArrayList<>(); + // Disable ES write mode/change refresh rate and clone indices + steps.add( + new BuildIndicesPreStep( + baseElasticSearchComponents, indexedServices, configurationProvider)); + // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService + steps.add(new BuildIndicesStep(indexedServices)); + // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in + // pre-configure step if it already exists? + steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); + return steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index 1fb9c8526ad3b..ad68386622b21 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -10,41 +10,45 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CleanIndices implements Upgrade { - private final List<UpgradeStep> _steps; - - public CleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { - - List<ElasticSearchIndexed> indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); - - _steps = List.of(new CleanIndicesStep( + private final List<UpgradeStep> _steps; + + public CleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + + List<ElasticSearchIndexed> indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); + + _steps = + List.of( + new CleanIndicesStep( baseElasticSearchComponents.getSearchClient(), configurationProvider.getElasticSearch(), indexedServices)); - } + } - @Override - public String id() { - return "CleanIndices"; - } + @Override + public String id() { + return "CleanIndices"; + } - @Override - public List<UpgradeStep> steps() { - return _steps; - } + @Override + public List<UpgradeStep> steps() { + return _steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java index 2feca1f27e625..a44f6d6487067 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -13,16 +16,11 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.client.RequestOptions; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPostStep implements UpgradeStep { @@ -45,8 +43,9 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { try { - List<ReindexConfig> indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List<ReindexConfig> indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); // Reset write blocking @@ -56,12 +55,26 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { request.settings(indexSettings); boolean ack = - _esComponents.getSearchClient().indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); if (ack) { - ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexConfig.name(), false); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + ack = + IndexUtils.validateWriteBlock( + _esComponents.getSearchClient(), indexConfig.name(), false); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); } if (!ack) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index 82b9428c89fb8..c25888be07f89 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -8,15 +11,13 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; - +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.OpenSearchStatusException; @@ -24,10 +25,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.indices.ResizeRequest; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPreStep implements UpgradeStep { @@ -50,16 +47,19 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { try { // Get indices to update - List<ReindexConfig> indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List<ReindexConfig> indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); for (ReindexConfig indexConfig : indexConfigs) { - String indexName = IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); + String indexName = + IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); boolean ack = blockWrites(indexName); if (!ack) { - log.error("Partial index settings update, some indices may still be blocking writes." + log.error( + "Partial index settings update, some indices may still be blocking writes." + " Please fix the error and re-run the BuildIndices upgrade job."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -69,10 +69,16 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { String clonedName = indexConfig.name() + "_clone_" + System.currentTimeMillis(); ResizeRequest resizeRequest = new ResizeRequest(clonedName, indexName); boolean cloneAck = - _esComponents.getSearchClient().indices().clone(resizeRequest, RequestOptions.DEFAULT).isAcknowledged(); + _esComponents + .getSearchClient() + .indices() + .clone(resizeRequest, RequestOptions.DEFAULT) + .isAcknowledged(); log.info("Cloned index {} into {}, Acknowledged: {}", indexName, clonedName, cloneAck); if (!cloneAck) { - log.error("Partial index settings update, cloned indices may need to be cleaned up: {}", clonedName); + log.error( + "Partial index settings update, cloned indices may need to be cleaned up: {}", + clonedName); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } @@ -85,8 +91,6 @@ public Function<UpgradeContext, UpgradeStepResult> executable() { }; } - - private boolean blockWrites(String indexName) throws InterruptedException, IOException { UpdateSettingsRequest request = new UpdateSettingsRequest(indexName); Map<String, Object> indexSettings = ImmutableMap.of(INDEX_BLOCKS_WRITE_SETTING, "true"); @@ -94,13 +98,23 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc request.settings(indexSettings); boolean ack; try { - ack = _esComponents.getSearchClient().indices() - .putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + ack = + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } catch (OpenSearchStatusException | IOException ese) { - // Cover first run case, indices won't exist so settings updates won't work nor will the rest of the preConfigure steps. + // Cover first run case, indices won't exist so settings updates won't work nor will the rest + // of the preConfigure steps. // Since no data are in there they are skippable. - // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way to extract it :( + // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way + // to extract it :( if (ese.getMessage().contains("index_not_found")) { return true; } else { @@ -110,7 +124,11 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc if (ack) { ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexName, true); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } return ack; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java index ef59f2998929e..d37ee173bd9af 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java @@ -5,13 +5,11 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.shared.ElasticSearchIndexed; - import java.util.List; import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class BuildIndicesStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java index bb042bac6df95..c3a4d8ab89c07 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java @@ -1,54 +1,55 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.shared.ElasticSearchIndexed; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; - import java.util.List; import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; +import org.opensearch.client.RestHighLevelClient; @Slf4j public class CleanIndicesStep implements UpgradeStep { - private final RestHighLevelClient searchClient; - private final ElasticSearchConfiguration esConfig; - private final List<ElasticSearchIndexed> indexedServices; - - public CleanIndicesStep(final RestHighLevelClient searchClient, final ElasticSearchConfiguration esConfig, - final List<ElasticSearchIndexed> indexedServices) { - this.searchClient = searchClient; - this.esConfig = esConfig; - this.indexedServices = indexedServices; - } - - @Override - public String id() { - return "CleanUpIndicesStep"; - } - - @Override - public int retryCount() { - return 0; - } - - @Override - public Function<UpgradeContext, UpgradeStepResult> executable() { - return (context) -> { - try { - IndexUtils.getAllReindexConfigs(indexedServices) - .forEach(reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); - } catch (Exception e) { - log.error("CleanUpIndicesStep failed.", e); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); - } - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } + private final RestHighLevelClient searchClient; + private final ElasticSearchConfiguration esConfig; + private final List<ElasticSearchIndexed> indexedServices; + + public CleanIndicesStep( + final RestHighLevelClient searchClient, + final ElasticSearchConfiguration esConfig, + final List<ElasticSearchIndexed> indexedServices) { + this.searchClient = searchClient; + this.esConfig = esConfig; + this.indexedServices = indexedServices; + } + + @Override + public String id() { + return "CleanUpIndicesStep"; + } + + @Override + public int retryCount() { + return 0; + } + + @Override + public Function<UpgradeContext, UpgradeStepResult> executable() { + return (context) -> { + try { + IndexUtils.getAllReindexConfigs(indexedServices) + .forEach( + reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); + } catch (Exception e) { + log.error("CleanUpIndicesStep failed.", e); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index 1e568f1e9a9fe..b4a506c3f5c63 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -6,12 +6,10 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; +import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.function.Function; - - @RequiredArgsConstructor @Slf4j public class DataHubStartupStep implements UpgradeStep { @@ -32,8 +30,8 @@ public int retryCount() { public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { try { - DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent() - .setVersion(_version); + DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = + new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); log.info("Initiating startup for version: {}", _version); } catch (Exception e) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index d9788448444ed..b3de7c503fb3e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,6 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -11,22 +15,18 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - - @Slf4j public class IndexUtils { public static final String INDEX_BLOCKS_WRITE_SETTING = "index.blocks.write"; public static final int INDEX_BLOCKS_WRITE_RETRY = 4; public static final int INDEX_BLOCKS_WRITE_WAIT_SECONDS = 10; - private IndexUtils() { } + + private IndexUtils() {} private static List<ReindexConfig> _reindexConfigs = new ArrayList<>(); - public static List<ReindexConfig> getAllReindexConfigs(List<ElasticSearchIndexed> elasticSearchIndexedList) throws IOException { + public static List<ReindexConfig> getAllReindexConfigs( + List<ElasticSearchIndexed> elasticSearchIndexedList) throws IOException { // Avoid locking & reprocessing List<ReindexConfig> reindexConfigs = new ArrayList<>(_reindexConfigs); if (reindexConfigs.isEmpty()) { @@ -39,19 +39,24 @@ public static List<ReindexConfig> getAllReindexConfigs(List<ElasticSearchIndexed return reindexConfigs; } - public static boolean validateWriteBlock(RestHighLevelClient esClient, String indexName, boolean expectedState) - throws IOException, InterruptedException { + public static boolean validateWriteBlock( + RestHighLevelClient esClient, String indexName, boolean expectedState) + throws IOException, InterruptedException { final String finalIndexName = resolveAlias(esClient, indexName); - GetSettingsRequest request = new GetSettingsRequest() + GetSettingsRequest request = + new GetSettingsRequest() .indices(finalIndexName) .names(INDEX_BLOCKS_WRITE_SETTING) .includeDefaults(true); int count = INDEX_BLOCKS_WRITE_RETRY; while (count > 0) { - GetSettingsResponse response = esClient.indices().getSettings(request, RequestOptions.DEFAULT); - if (response.getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING).equals(String.valueOf(expectedState))) { + GetSettingsResponse response = + esClient.indices().getSettings(request, RequestOptions.DEFAULT); + if (response + .getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING) + .equals(String.valueOf(expectedState))) { return true; } count = count - 1; @@ -64,20 +69,20 @@ public static boolean validateWriteBlock(RestHighLevelClient esClient, String in return false; } - public static String resolveAlias(RestHighLevelClient esClient, String indexName) throws IOException { + public static String resolveAlias(RestHighLevelClient esClient, String indexName) + throws IOException { String finalIndexName = indexName; - GetAliasesResponse aliasResponse = esClient.indices() - .getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); + GetAliasesResponse aliasResponse = + esClient.indices().getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); if (!aliasResponse.getAliases().isEmpty()) { Set<String> indices = aliasResponse.getAliases().keySet(); if (indices.size() != 1) { throw new NotImplementedException( - String.format("Clone not supported for %s indices in alias %s. Indices: %s", - indices.size(), - indexName, - String.join(",", indices))); + String.format( + "Clone not supported for %s indices in alias %s. Indices: %s", + indices.size(), indexName, String.join(",", indices))); } finalIndexName = indices.stream().findFirst().get(); log.info("Alias {} resolved to index {}", indexName, finalIndexName); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index e213c0b2fd4de..03f0b0b7f2ec2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.search.SearchService; import java.util.List; - public class BackfillBrowsePathsV2 implements Upgrade { private final List<UpgradeStep> _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 08a752d9597f4..610d9069337a5 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.system.entity.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -27,32 +29,29 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class BackfillBrowsePathsV2Step implements UpgradeStep { public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = + "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; - private static final Set<String> ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set<String> ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final Integer BATCH_SIZE = 5000; private final EntityService _entityService; @@ -67,14 +66,18 @@ public BackfillBrowsePathsV2Step(EntityService entityService, SearchService sear public Function<UpgradeContext, UpgradeStepResult> executable() { return (context) -> { final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", migratedCount, - migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -88,22 +91,26 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final Filter filter; if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) - && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { filter = backfillDefaultBrowsePathsV2Filter(); - } else { + } else { filter = backfillBrowsePathsV2Filter(); } - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - null, - BATCH_SIZE, - new SearchFlags().setFulltext(true).setSkipCache(true).setSkipHighlighting(true).setSkipAggregates(true) - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), + "*", + filter, + null, + scrollId, + null, + BATCH_SIZE, + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -113,7 +120,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -177,13 +188,10 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - true - ); + _entityService.ingestProposal(proposal, auditStamp, true); } @Override @@ -192,7 +200,8 @@ public String id() { } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ @Override public boolean isOptional() { @@ -204,4 +213,3 @@ public boolean skip(UpgradeContext context) { return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); } } - diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index db697a40d0c6c..83b8e028727ce 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -1,70 +1,73 @@ package com.linkedin.datahub.upgrade; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.datahub.upgrade.system.SystemUpdate; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, - properties = { - "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" + }) public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("systemUpdate") - private SystemUpdate systemUpdate; - - @Test - public void testSystemUpdateInit() { - assertNotNull(systemUpdate); - } + @Autowired + @Named("systemUpdate") + private SystemUpdate systemUpdate; - @Test - public void testSystemUpdateSend() { - UpgradeStepResult.Result result = systemUpdate.steps().stream() - .filter(s -> s.id().equals("DataHubStartupStep")) - .findFirst().get() - .executable().apply(new UpgradeContext() { - @Override - public Upgrade upgrade() { - return null; - } + @Test + public void testSystemUpdateInit() { + assertNotNull(systemUpdate); + } - @Override - public List<UpgradeStepResult> stepResults() { - return null; - } + @Test + public void testSystemUpdateSend() { + UpgradeStepResult.Result result = + systemUpdate.steps().stream() + .filter(s -> s.id().equals("DataHubStartupStep")) + .findFirst() + .get() + .executable() + .apply( + new UpgradeContext() { + @Override + public Upgrade upgrade() { + return null; + } - @Override - public UpgradeReport report() { - return null; - } + @Override + public List<UpgradeStepResult> stepResults() { + return null; + } - @Override - public List<String> args() { - return null; - } + @Override + public UpgradeReport report() { + return null; + } - @Override - public Map<String, Optional<String>> parsedArgs() { - return null; - } - }).result(); - assertEquals("SUCCEEDED", result.toString()); - } + @Override + public List<String> args() { + return null; + } + @Override + public Map<String, Optional<String>> parsedArgs() { + return null; + } + }) + .result(); + assertEquals("SUCCEEDED", result.toString()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 74cde414adc2f..3e655be900bf2 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -1,49 +1,48 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import static org.testng.AssertJUnit.*; + import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import static org.testng.AssertJUnit.*; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("restoreIndices") - private RestoreIndices restoreIndices; - - @Autowired - @Named("buildIndices") - private BuildIndices buildIndices; - - @Autowired - private ESIndexBuilder esIndexBuilder; - - @Test - public void testRestoreIndicesInit() { - /* - This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean - */ - assertTrue(restoreIndices.steps().size() >= 3); - } - - @Test - public void testBuildIndicesInit() { - assertEquals("BuildIndices", buildIndices.id()); - assertTrue(buildIndices.steps().size() >= 3); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); - assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); - assertFalse(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); - } - + @Autowired + @Named("restoreIndices") + private RestoreIndices restoreIndices; + + @Autowired + @Named("buildIndices") + private BuildIndices buildIndices; + + @Autowired private ESIndexBuilder esIndexBuilder; + + @Test + public void testRestoreIndicesInit() { + /* + This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean + */ + assertTrue(restoreIndices.steps().size() >= 3); + } + + @Test + public void testBuildIndicesInit() { + assertEquals("BuildIndices", buildIndices.id()); + assertTrue(buildIndices.steps().size() >= 3); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); + assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); + assertFalse( + esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 6cc853b2c7c4d..0e7bf5ddd5250 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -16,27 +16,19 @@ @Import(value = {SystemAuthenticationFactory.class}) public class UpgradeCliApplicationTestConfiguration { - @MockBean - private UpgradeCli upgradeCli; + @MockBean private UpgradeCli upgradeCli; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityService _entityService; + @MockBean private EntityService _entityService; - @MockBean - private SearchService searchService; + @MockBean private SearchService searchService; - @MockBean - private GraphService graphService; + @MockBean private GraphService graphService; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - ConfigEntityRegistry configEntityRegistry; + @MockBean ConfigEntityRegistry configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/docker/build.gradle b/docker/build.gradle index c7f783af6c997..bc79be501b395 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -158,4 +158,4 @@ task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { ] + pg_compose_args commandLine 'bash', '-c', cmd.join(" ") -} +} \ No newline at end of file diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java index 015a0a9a0f14a..9cf8b4174ecfb 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java @@ -27,12 +27,11 @@ public class AspectSpec { // Classpath & Pegasus-specific: Temporary. private final RecordDataSchema _schema; private final Class<RecordTemplate> _aspectClass; - @Setter @Getter - private String registryName = "unknownRegistry"; - @Setter @Getter - private ComparableVersion registryVersion = new ComparableVersion("0.0.0.0-dev"); + @Setter @Getter private String registryName = "unknownRegistry"; + @Setter @Getter private ComparableVersion registryVersion = new ComparableVersion("0.0.0.0-dev"); - public AspectSpec(@Nonnull final AspectAnnotation aspectAnnotation, + public AspectSpec( + @Nonnull final AspectAnnotation aspectAnnotation, @Nonnull final List<SearchableFieldSpec> searchableFieldSpecs, @Nonnull final List<SearchScoreFieldSpec> searchScoreFieldSpecs, @Nonnull final List<RelationshipFieldSpec> relationshipFieldSpecs, @@ -41,18 +40,35 @@ public AspectSpec(@Nonnull final AspectAnnotation aspectAnnotation, final RecordDataSchema schema, final Class<RecordTemplate> aspectClass) { _aspectAnnotation = aspectAnnotation; - _searchableFieldSpecs = searchableFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _searchScoreFieldSpecs = searchScoreFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _relationshipFieldSpecs = relationshipFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _timeseriesFieldSpecs = timeseriesFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getTimeseriesFieldAnnotation().getStatName(), spec -> spec, - (val1, val2) -> val1)); - _timeseriesFieldCollectionSpecs = timeseriesFieldCollectionSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getTimeseriesFieldCollectionAnnotation().getCollectionName(), spec -> spec, - (val1, val2) -> val1)); + _searchableFieldSpecs = + searchableFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _searchScoreFieldSpecs = + searchScoreFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _relationshipFieldSpecs = + relationshipFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _timeseriesFieldSpecs = + timeseriesFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getTimeseriesFieldAnnotation().getStatName(), + spec -> spec, + (val1, val2) -> val1)); + _timeseriesFieldCollectionSpecs = + timeseriesFieldCollectionSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getTimeseriesFieldCollectionAnnotation().getCollectionName(), + spec -> spec, + (val1, val2) -> val1)); _schema = schema; _aspectClass = aspectClass; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java index 766944e150390..b235e2adcae11 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java @@ -10,10 +10,8 @@ import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; - import lombok.ToString; - @ToString public class ConfigEntitySpec implements EntitySpec { @@ -26,7 +24,8 @@ public ConfigEntitySpec( @Nonnull final String entityName, @Nonnull final String keyAspect, @Nonnull final Collection<AspectSpec> aspectSpecs) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = new EntityAnnotation(entityName, keyAspect); } @@ -72,12 +71,14 @@ public AspectSpec getAspectSpec(final String name) { @Override public RecordDataSchema getSnapshotSchema() { - throw new UnsupportedOperationException("Failed to find Snapshot associated with Config-based Entity"); + throw new UnsupportedOperationException( + "Failed to find Snapshot associated with Config-based Entity"); } @Override public TyperefDataSchema getAspectTyperefSchema() { - throw new UnsupportedOperationException("Failed to find Typeref schema associated with Config-based Entity"); + throw new UnsupportedOperationException( + "Failed to find Typeref schema associated with Config-based Entity"); } @Override @@ -89,4 +90,3 @@ public List<SearchableFieldSpec> getSearchableFieldSpecs() { return _searchableFieldSpecs; } } - diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java index ddc87aacc72cf..b9766d0ca8640 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java @@ -23,7 +23,6 @@ import lombok.extern.slf4j.Slf4j; import org.reflections.Reflections; - /** * Factory class to get a map of all entity schemas and aspect schemas under com.linkedin package * This lets us fetch the PDL data schema of an arbitrary entity or aspect based on their names @@ -39,22 +38,25 @@ public class DataSchemaFactory { private static final String NAME_FIELD = "name"; private static final DataSchemaFactory INSTANCE = new DataSchemaFactory(); - private static final String[] DEFAULT_TOP_LEVEL_NAMESPACES = new String[]{"com", "org", "io", "datahub"}; + private static final String[] DEFAULT_TOP_LEVEL_NAMESPACES = + new String[] {"com", "org", "io", "datahub"}; public DataSchemaFactory() { - this(new String[]{"com.linkedin", "com.datahub"}); + this(new String[] {"com.linkedin", "com.datahub"}); } public DataSchemaFactory(String classPath) { - this(new String[]{classPath}); + this(new String[] {classPath}); } + public DataSchemaFactory(String[] classPaths) { this(classPaths, null); } /** - * Construct a DataSchemaFactory with classes and schemas found under a specific folder. - * This will only look for classes under the `com`, `org` or `datahub` top level namespaces. + * Construct a DataSchemaFactory with classes and schemas found under a specific folder. This will + * only look for classes under the `com`, `org` or `datahub` top level namespaces. + * * @param pluginLocation The location of the classes and schema files. */ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws IOException { @@ -66,15 +68,18 @@ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws File pluginDir = pluginLocation.toFile(); if (!pluginDir.exists()) { throw new RuntimeException( - "Failed to find plugin directory " + pluginDir.getAbsolutePath() + ". Current directory is " + new File( - ".").getAbsolutePath()); + "Failed to find plugin directory " + + pluginDir.getAbsolutePath() + + ". Current directory is " + + new File(".").getAbsolutePath()); } List<URL> urls = new ArrayList<URL>(); if (pluginDir.isDirectory()) { - List<Path> jarFiles = Files.walk(pluginLocation) - .filter(Files::isRegularFile) - .filter(p -> p.toString().endsWith(".jar")) - .collect(Collectors.toList()); + List<Path> jarFiles = + Files.walk(pluginLocation) + .filter(Files::isRegularFile) + .filter(p -> p.toString().endsWith(".jar")) + .collect(Collectors.toList()); for (Path f : jarFiles) { URL url = f.toUri().toURL(); if (url != null) { @@ -87,12 +92,14 @@ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws } URL[] urlsArray = new URL[urls.size()]; urls.toArray(urlsArray); - URLClassLoader classLoader = new URLClassLoader(urlsArray, Thread.currentThread().getContextClassLoader()); + URLClassLoader classLoader = + new URLClassLoader(urlsArray, Thread.currentThread().getContextClassLoader()); return new DataSchemaFactory(DEFAULT_TOP_LEVEL_NAMESPACES, classLoader); } /** - * Construct a DataSchemaFactory with a custom class loader and a list of class namespaces to look for entities and aspects. + * Construct a DataSchemaFactory with a custom class loader and a list of class namespaces to look + * for entities and aspects. */ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader) { entitySchemas = new HashMap<>(); @@ -120,7 +127,8 @@ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader Reflections reflections = new Reflections(namespace, standardClassLoader); stdClasses.addAll(reflections.getSubTypesOf(RecordTemplate.class)); } - log.debug("Standard ClassLoader found a total of {} RecordTemplate classes", stdClasses.size()); + log.debug( + "Standard ClassLoader found a total of {} RecordTemplate classes", stdClasses.size()); classes.removeAll(stdClasses); log.debug("Finally found a total of {} RecordTemplate classes to inspect", classes.size()); } @@ -135,15 +143,19 @@ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader if (schema != null) { DataSchema finalSchema = schema; - getName(schema, EntityAnnotation.ANNOTATION_NAME).ifPresent( - entityName -> entitySchemas.put(entityName, finalSchema)); - getName(schema, AspectAnnotation.ANNOTATION_NAME).ifPresent(aspectName -> { - aspectSchemas.put(aspectName, finalSchema); - aspectClasses.put(aspectName, recordClass); - }); - getName(schema, EventAnnotation.ANNOTATION_NAME).ifPresent(eventName -> { - eventSchemas.put(eventName, finalSchema); - }); + getName(schema, EntityAnnotation.ANNOTATION_NAME) + .ifPresent(entityName -> entitySchemas.put(entityName, finalSchema)); + getName(schema, AspectAnnotation.ANNOTATION_NAME) + .ifPresent( + aspectName -> { + aspectSchemas.put(aspectName, finalSchema); + aspectClasses.put(aspectName, recordClass); + }); + getName(schema, EventAnnotation.ANNOTATION_NAME) + .ifPresent( + eventName -> { + eventSchemas.put(eventName, finalSchema); + }); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java index 9a083660d1023..5db8ca264f69d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java @@ -11,7 +11,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.ToString; @ToString @@ -31,7 +30,8 @@ public DefaultEntitySpec( @Nonnull final EntityAnnotation entityAnnotation, @Nonnull final RecordDataSchema snapshotSchema, @Nullable final TyperefDataSchema aspectTyperefSchema) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = entityAnnotation; _snapshotSchema = snapshotSchema; _aspectTyperefSchema = aspectTyperefSchema; @@ -102,5 +102,4 @@ public List<SearchableFieldSpec> getSearchableFieldSpecs() { return _searchableFieldSpecs; } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java index 7f7c1004aeddb..31b73e6cc9e5e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor @Getter public class DefaultEventSpec implements EventSpec { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java index 4bdb8e37d565f..e4c9dd55a3b4a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java @@ -7,10 +7,7 @@ import java.util.Map; import java.util.stream.Collectors; - -/** - * A specification of a DataHub Entity - */ +/** A specification of a DataHub Entity */ public interface EntitySpec { String getName(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java index 37cb9eabc09da..580134f566871 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java @@ -28,7 +28,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EntitySpecBuilder { @@ -61,7 +60,8 @@ public EntitySpecBuilder(final AnnotationExtractionMode extractionMode) { public List<EntitySpec> buildEntitySpecs(@Nonnull final DataSchema snapshotSchema) { - final UnionDataSchema snapshotUnionSchema = (UnionDataSchema) snapshotSchema.getDereferencedDataSchema(); + final UnionDataSchema snapshotUnionSchema = + (UnionDataSchema) snapshotSchema.getDereferencedDataSchema(); final List<UnionDataSchema.Member> unionMembers = snapshotUnionSchema.getMembers(); final List<EntitySpec> entitySpecs = new ArrayList<>(); @@ -73,16 +73,19 @@ public List<EntitySpec> buildEntitySpecs(@Nonnull final DataSchema snapshotSchem } // Now validate that all relationships point to valid entities. - // TODO: Fix this so that aspects that are just in the entity registry don't fail because they aren't in the + // TODO: Fix this so that aspects that are just in the entity registry don't fail because they + // aren't in the // snapshot registry. -// for (final RelationshipFieldSpec spec : _relationshipFieldSpecs) { -// if (!_entityNames.containsAll( -// spec.getValidDestinationTypes().stream().map(String::toLowerCase).collect(Collectors.toList()))) { -// failValidation( -// String.format("Found invalid relationship with name %s at path %s. Invalid entityType(s) provided.", -// spec.getRelationshipName(), spec.getPath())); -// } -// } + // for (final RelationshipFieldSpec spec : _relationshipFieldSpecs) { + // if (!_entityNames.containsAll( + // + // spec.getValidDestinationTypes().stream().map(String::toLowerCase).collect(Collectors.toList()))) { + // failValidation( + // String.format("Found invalid relationship with name %s at path %s. Invalid + // entityType(s) provided.", + // spec.getRelationshipName(), spec.getPath())); + // } + // } return entitySpecs; } @@ -93,17 +96,21 @@ public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema final RecordDataSchema entitySnapshotRecordSchema = validateSnapshot(entitySnapshotSchema); // 1. Parse information about the entity from the "entity" annotation. - final Object entityAnnotationObj = entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { EntityAnnotation entityAnnotation = - EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); + EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); final ArrayDataSchema aspectArraySchema = - (ArrayDataSchema) entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME) - .getType() - .getDereferencedDataSchema(); + (ArrayDataSchema) + entitySnapshotRecordSchema + .getField(ASPECTS_FIELD_NAME) + .getType() + .getDereferencedDataSchema(); final UnionDataSchema aspectUnionSchema = (UnionDataSchema) aspectArraySchema.getItems().getDereferencedDataSchema(); @@ -113,111 +120,147 @@ public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema for (final UnionDataSchema.Member member : unionMembers) { NamedDataSchema namedDataSchema = (NamedDataSchema) member.getType(); try { - final AspectSpec spec = buildAspectSpec(member.getType(), - (Class<RecordTemplate>) Class.forName(namedDataSchema.getFullName()).asSubclass(RecordTemplate.class)); + final AspectSpec spec = + buildAspectSpec( + member.getType(), + (Class<RecordTemplate>) + Class.forName(namedDataSchema.getFullName()) + .asSubclass(RecordTemplate.class)); aspectSpecs.add(spec); } catch (ClassNotFoundException ce) { log.warn("Failed to find class for {}", member.getType(), ce); } } - final EntitySpec entitySpec = new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema, - (TyperefDataSchema) aspectArraySchema.getItems()); + final EntitySpec entitySpec = + new DefaultEntitySpec( + aspectSpecs, + entityAnnotation, + entitySnapshotRecordSchema, + (TyperefDataSchema) aspectArraySchema.getItems()); validateEntitySpec(entitySpec); return entitySpec; } - failValidation(String.format("Could not build entity spec for entity with name %s. Missing @%s annotation.", - entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); + failValidation( + String.format( + "Could not build entity spec for entity with name %s. Missing @%s annotation.", + entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); return null; } - public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema, - @Nonnull final List<AspectSpec> aspectSpecs) { + public EntitySpec buildEntitySpec( + @Nonnull final DataSchema entitySnapshotSchema, @Nonnull final List<AspectSpec> aspectSpecs) { // 0. Validate the Snapshot definition final RecordDataSchema entitySnapshotRecordSchema = validateSnapshot(entitySnapshotSchema); // 1. Parse information about the entity from the "entity" annotation. - final Object entityAnnotationObj = entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { EntityAnnotation entityAnnotation = - EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); + EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); - final EntitySpec entitySpec = new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema); + final EntitySpec entitySpec = + new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema); validateEntitySpec(entitySpec); return entitySpec; } - failValidation(String.format("Could not build entity spec for entity with name %s. Missing @%s annotation.", - entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); + failValidation( + String.format( + "Could not build entity spec for entity with name %s. Missing @%s annotation.", + entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); return null; } - /** - * Build a config-based {@link EntitySpec}, as opposed to a Snapshot-based {@link EntitySpec} - */ - public EntitySpec buildConfigEntitySpec(@Nonnull final String entityName, @Nonnull final String keyAspect, + /** Build a config-based {@link EntitySpec}, as opposed to a Snapshot-based {@link EntitySpec} */ + public EntitySpec buildConfigEntitySpec( + @Nonnull final String entityName, + @Nonnull final String keyAspect, @Nonnull final List<AspectSpec> aspectSpecs) { return new ConfigEntitySpec(entityName, keyAspect, aspectSpecs); } - public EntitySpec buildPartialEntitySpec(@Nonnull final String entityName, @Nullable final String keyAspectName, + public EntitySpec buildPartialEntitySpec( + @Nonnull final String entityName, + @Nullable final String keyAspectName, @Nonnull final List<AspectSpec> aspectSpecs) { - EntitySpec entitySpec = new PartialEntitySpec(aspectSpecs, new EntityAnnotation(entityName, keyAspectName)); + EntitySpec entitySpec = + new PartialEntitySpec(aspectSpecs, new EntityAnnotation(entityName, keyAspectName)); return entitySpec; } - public AspectSpec buildAspectSpec(@Nonnull final DataSchema aspectDataSchema, - final Class<RecordTemplate> aspectClass) { + public AspectSpec buildAspectSpec( + @Nonnull final DataSchema aspectDataSchema, final Class<RecordTemplate> aspectClass) { final RecordDataSchema aspectRecordSchema = validateAspect(aspectDataSchema); - final Object aspectAnnotationObj = aspectRecordSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectRecordSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { final AspectAnnotation aspectAnnotation = - AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectRecordSchema.getFullName()); + AspectAnnotation.fromSchemaProperty( + aspectAnnotationObj, aspectRecordSchema.getFullName()); if (AnnotationExtractionMode.IGNORE_ASPECT_FIELDS.equals(_extractionMode)) { // Short Circuit. - return new AspectSpec(aspectAnnotation, Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), aspectRecordSchema, aspectClass); + return new AspectSpec( + aspectAnnotation, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + aspectRecordSchema, + aspectClass); } final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedSearchResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_searchHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_searchHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract Searchable Field Specs - final SearchableFieldSpecExtractor searchableFieldSpecExtractor = new SearchableFieldSpecExtractor(); + final SearchableFieldSpecExtractor searchableFieldSpecExtractor = + new SearchableFieldSpecExtractor(); final DataSchemaRichContextTraverser searchableFieldSpecTraverser = new DataSchemaRichContextTraverser(searchableFieldSpecExtractor); searchableFieldSpecTraverser.traverse(processedSearchResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedSearchScoreResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_searchScoreHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_searchScoreHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract SearchScore Field Specs - final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = new SearchScoreFieldSpecExtractor(); + final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = + new SearchScoreFieldSpecExtractor(); final DataSchemaRichContextTraverser searcScoreFieldSpecTraverser = new DataSchemaRichContextTraverser(searchScoreFieldSpecExtractor); searcScoreFieldSpecTraverser.traverse(processedSearchScoreResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedRelationshipResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_relationshipHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_relationshipHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract Relationship Field Specs - final RelationshipFieldSpecExtractor relationshipFieldSpecExtractor = new RelationshipFieldSpecExtractor(); + final RelationshipFieldSpecExtractor relationshipFieldSpecExtractor = + new RelationshipFieldSpecExtractor(); final DataSchemaRichContextTraverser relationshipFieldSpecTraverser = new DataSchemaRichContextTraverser(relationshipFieldSpecExtractor); relationshipFieldSpecTraverser.traverse(processedRelationshipResult.getResultSchema()); @@ -227,23 +270,33 @@ public AspectSpec buildAspectSpec(@Nonnull final DataSchema aspectDataSchema, final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedTimeseriesFieldResult = SchemaAnnotationProcessor.process( - ImmutableList.of(_timeseriesFiledAnnotationHandler, _timeseriesFieldCollectionHandler), - aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); + ImmutableList.of( + _timeseriesFiledAnnotationHandler, _timeseriesFieldCollectionHandler), + aspectRecordSchema, + new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract TimeseriesField/ TimeseriesFieldCollection Specs - final TimeseriesFieldSpecExtractor timeseriesFieldSpecExtractor = new TimeseriesFieldSpecExtractor(); + final TimeseriesFieldSpecExtractor timeseriesFieldSpecExtractor = + new TimeseriesFieldSpecExtractor(); final DataSchemaRichContextTraverser timeseriesFieldSpecTraverser = new DataSchemaRichContextTraverser(timeseriesFieldSpecExtractor); timeseriesFieldSpecTraverser.traverse(processedTimeseriesFieldResult.getResultSchema()); - return new AspectSpec(aspectAnnotation, searchableFieldSpecExtractor.getSpecs(), - searchScoreFieldSpecExtractor.getSpecs(), relationshipFieldSpecExtractor.getSpecs(), + return new AspectSpec( + aspectAnnotation, + searchableFieldSpecExtractor.getSpecs(), + searchScoreFieldSpecExtractor.getSpecs(), + relationshipFieldSpecExtractor.getSpecs(), timeseriesFieldSpecExtractor.getTimeseriesFieldSpecs(), - timeseriesFieldSpecExtractor.getTimeseriesFieldCollectionSpecs(), aspectRecordSchema, aspectClass); + timeseriesFieldSpecExtractor.getTimeseriesFieldCollectionSpecs(), + aspectRecordSchema, + aspectClass); } - failValidation(String.format("Could not build aspect spec for aspect with name %s. Missing @Aspect annotation.", - aspectRecordSchema.getName())); + failValidation( + String.format( + "Could not build aspect spec for aspect with name %s. Missing @Aspect annotation.", + aspectRecordSchema.getName())); return null; } @@ -252,7 +305,8 @@ private void validateEntitySpec(EntitySpec entitySpec) { if (entitySpec.getKeyAspectSpec() == null) { failValidation( - String.format("Did not find required Key Aspect with name %s in aspects for Entity %s in list of aspects.", + String.format( + "Did not find required Key Aspect with name %s in aspects for Entity %s in list of aspects.", entitySpec.getKeyAspectName(), entitySpec.getName())); } @@ -263,9 +317,11 @@ private void validateEntitySpec(EntitySpec entitySpec) { for (final AspectSpec aspectSpec : entitySpec.getAspectSpecs()) { validateAspect(aspectSpec); if (aspectNames.contains(aspectSpec.getName())) { - failValidation(String.format( - "Could not build entity spec for entity with name %s." + " Found multiple Aspects with the same name %s", - entitySpec.getName(), aspectSpec.getName())); + failValidation( + String.format( + "Could not build entity spec for entity with name %s." + + " Found multiple Aspects with the same name %s", + entitySpec.getName(), aspectSpec.getName())); } aspectNames.add(aspectSpec.getName()); } @@ -273,8 +329,11 @@ private void validateEntitySpec(EntitySpec entitySpec) { // Validate entity name if (_entityNames.contains(entitySpec.getName().toLowerCase())) { // Duplicate entity found. - failValidation(String.format("Could not build entity spec for entity with name %s." - + " Found multiple Entity Snapshots with the same name.", entitySpec.getName())); + failValidation( + String.format( + "Could not build entity spec for entity with name %s." + + " Found multiple Entity Snapshots with the same name.", + entitySpec.getName())); } _entityNames.add(entitySpec.getName().toLowerCase()); @@ -283,13 +342,16 @@ private void validateEntitySpec(EntitySpec entitySpec) { private void validateAspect(final AspectSpec aspectSpec) { if (aspectSpec.isTimeseries()) { if (aspectSpec.getPegasusSchema().contains(TIMESTAMP_FIELD_NAME)) { - DataSchema timestamp = aspectSpec.getPegasusSchema().getField(TIMESTAMP_FIELD_NAME).getType(); + DataSchema timestamp = + aspectSpec.getPegasusSchema().getField(TIMESTAMP_FIELD_NAME).getType(); if (timestamp.getType() == DataSchema.Type.LONG) { return; } } - failValidation(String.format("Aspect %s is of type timeseries but does not include TimeseriesAspectBase", - aspectSpec.getName())); + failValidation( + String.format( + "Aspect %s is of type timeseries but does not include TimeseriesAspectBase", + aspectSpec.getName())); } } @@ -297,7 +359,8 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh // 0. Validate that schema is a Record if (entitySnapshotSchema.getType() != DataSchema.Type.RECORD) { failValidation( - String.format("Failed to validate entity snapshot schema of type %s. Schema must be of record type.", + String.format( + "Failed to validate entity snapshot schema of type %s. Schema must be of record type.", entitySnapshotSchema.getType().toString())); } @@ -306,30 +369,40 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh // 1. Validate Urn field if (entitySnapshotRecordSchema.getField(URN_FIELD_NAME) == null || entitySnapshotRecordSchema.getField(URN_FIELD_NAME).getType().getDereferencedType() - != DataSchema.Type.STRING) { - failValidation(String.format("Failed to validate entity snapshot schema with name %s. Invalid urn field.", - entitySnapshotRecordSchema.getName())); + != DataSchema.Type.STRING) { + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid urn field.", + entitySnapshotRecordSchema.getName())); } // 2. Validate Aspect Array if (entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME) == null || entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME).getType().getDereferencedType() - != DataSchema.Type.ARRAY) { + != DataSchema.Type.ARRAY) { - failValidation(String.format( - "Failed to validate entity snapshot schema with name %s. Invalid aspects field found. " - + "'aspects' should be an array of union type.", entitySnapshotRecordSchema.getName())); + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid aspects field found. " + + "'aspects' should be an array of union type.", + entitySnapshotRecordSchema.getName())); } // 3. Validate Aspect Union final ArrayDataSchema aspectArray = - (ArrayDataSchema) entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME).getType().getDereferencedDataSchema(); + (ArrayDataSchema) + entitySnapshotRecordSchema + .getField(ASPECTS_FIELD_NAME) + .getType() + .getDereferencedDataSchema(); if (aspectArray.getItems().getType() != DataSchema.Type.TYPEREF || aspectArray.getItems().getDereferencedType() != DataSchema.Type.UNION) { - failValidation(String.format( - "Failed to validate entity snapshot schema with name %s. Invalid aspects field field. " - + "'aspects' should be an array of union type.", entitySnapshotRecordSchema.getName())); + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid aspects field field. " + + "'aspects' should be an array of union type.", + entitySnapshotRecordSchema.getName())); } return entitySnapshotRecordSchema; @@ -338,8 +411,10 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh private RecordDataSchema validateAspect(@Nonnull final DataSchema aspectSchema) { // Validate that schema is a Record if (aspectSchema.getType() != DataSchema.Type.RECORD) { - failValidation(String.format("Failed to validate aspect schema of type %s. Schema must be of record type.", - aspectSchema.getType().toString())); + failValidation( + String.format( + "Failed to validate aspect schema of type %s. Schema must be of record type.", + aspectSchema.getType().toString())); } return (RecordDataSchema) aspectSchema; } @@ -349,11 +424,13 @@ private void validateKeyAspect(@Nonnull final AspectSpec keyAspect) { RecordDataSchema schema = keyAspect.getPegasusSchema(); // Validate that each field is a string or enum. for (RecordDataSchema.Field field : schema.getFields()) { - if (!DataSchema.Type.STRING.equals(field.getType().getDereferencedType()) && !DataSchema.Type.ENUM.equals( - field.getType().getDereferencedType())) { - failValidation(String.format("Failed to validate key aspect nameed %s. Key " - + "aspects must only contain fields of STRING or ENUM type. Found %s.", keyAspect.getName(), - field.getType().toString())); + if (!DataSchema.Type.STRING.equals(field.getType().getDereferencedType()) + && !DataSchema.Type.ENUM.equals(field.getType().getDereferencedType())) { + failValidation( + String.format( + "Failed to validate key aspect nameed %s. Key " + + "aspects must only contain fields of STRING or ENUM type. Found %s.", + keyAspect.getName(), field.getType().toString())); } } } @@ -363,14 +440,9 @@ private void failValidation(@Nonnull final String message) { } public enum AnnotationExtractionMode { - /** - * Extract all annotations types, the default. - */ + /** Extract all annotations types, the default. */ DEFAULT, - /** - * Skip annotations on aspect record fields, only - * parse entity + aspect annotations. - */ + /** Skip annotations on aspect record fields, only parse entity + aspect annotations. */ IGNORE_ASPECT_FIELDS } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java index a25bf1c2dea62..0a265c46a5164 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java @@ -5,19 +5,17 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class EntitySpecUtils { - private EntitySpecUtils() { - } + private EntitySpecUtils() {} - public static List<String> getEntityTimeseriesAspectNames(@Nonnull EntityRegistry entityRegistry, - @Nonnull String entityName) { + public static List<String> getEntityTimeseriesAspectNames( + @Nonnull EntityRegistry entityRegistry, @Nonnull String entityName) { final EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - final List<String> timeseriesAspectNames = entitySpec.getAspectSpecs() - .stream() - .filter(x -> x.isTimeseries()) - .map(x -> x.getName()) - .collect(Collectors.toList()); + final List<String> timeseriesAspectNames = + entitySpec.getAspectSpecs().stream() + .filter(x -> x.isTimeseries()) + .map(x -> x.getName()) + .collect(Collectors.toList()); return timeseriesAspectNames; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java index 20f0dfc70d465..09ec6641777f9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java @@ -3,23 +3,14 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.metadata.models.annotation.EventAnnotation; - -/** - * A specification of a DataHub Platform Event - */ +/** A specification of a DataHub Platform Event */ public interface EventSpec { - /** - * Returns the name of an event - */ + /** Returns the name of an event */ String getName(); - /** - * Returns the raw event annotation - */ + /** Returns the raw event annotation */ EventAnnotation getEventAnnotation(); - /** - * Returns the PDL schema object for the Event - */ + /** Returns the PDL schema object for the Event */ RecordDataSchema getPegasusSchema(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java index 04322b3b550cb..ceb984cdbc5b4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java @@ -9,33 +9,32 @@ @Slf4j public class EventSpecBuilder { - public EventSpecBuilder() { - } + public EventSpecBuilder() {} public EventSpec buildEventSpec( - @Nonnull final String eventName, - @Nonnull final DataSchema eventDataSchema) { + @Nonnull final String eventName, @Nonnull final DataSchema eventDataSchema) { final RecordDataSchema eventRecordSchema = validateEvent(eventDataSchema); - final Object eventAnnotationObj = eventDataSchema.getProperties().get(EventAnnotation.ANNOTATION_NAME); + final Object eventAnnotationObj = + eventDataSchema.getProperties().get(EventAnnotation.ANNOTATION_NAME); if (eventAnnotationObj != null) { final EventAnnotation eventAnnotation = - EventAnnotation.fromPegasusAnnotationObject(eventAnnotationObj, eventRecordSchema.getFullName()); + EventAnnotation.fromPegasusAnnotationObject( + eventAnnotationObj, eventRecordSchema.getFullName()); - return new DefaultEventSpec( - eventName, - eventAnnotation, - eventRecordSchema); + return new DefaultEventSpec(eventName, eventAnnotation, eventRecordSchema); } return null; } private RecordDataSchema validateEvent(@Nonnull final DataSchema eventSchema) { if (eventSchema.getType() != DataSchema.Type.RECORD) { - failValidation(String.format("Failed to validate event schema of type %s. Schema must be of record type.", - eventSchema.getType().toString())); + failValidation( + String.format( + "Failed to validate event schema of type %s. Schema must be of record type.", + eventSchema.getType().toString())); } return (RecordDataSchema) eventSchema; } @@ -43,4 +42,4 @@ private RecordDataSchema validateEvent(@Nonnull final DataSchema eventSchema) { private void failValidation(@Nonnull final String message) { throw new ModelValidationException(message); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java index 303fd06299356..b109f9498cba6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java @@ -3,19 +3,15 @@ import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.PathSpec; - /** - * Base interface for aspect field specs. Contains a) the path to the field and b) the schema of the field + * Base interface for aspect field specs. Contains a) the path to the field and b) the schema of the + * field */ public interface FieldSpec { - /** - * Returns the {@link PathSpec} corresponding to the field, relative to its parent aspect. - */ + /** Returns the {@link PathSpec} corresponding to the field, relative to its parent aspect. */ PathSpec getPath(); - /** - * Returns the {@link DataSchema} associated with the aspect field. - */ + /** Returns the {@link DataSchema} associated with the aspect field. */ DataSchema getPegasusSchema(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java index ac1e1dfc21590..53a689602f27c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java @@ -9,11 +9,9 @@ import java.util.Map; import java.util.Optional; - public class FieldSpecUtils { - private FieldSpecUtils() { - } + private FieldSpecUtils() {} public static String getSchemaFieldName(PathSpec pathSpec) { List<String> components = pathSpec.getPathComponents(); @@ -25,16 +23,25 @@ public static String getSchemaFieldName(PathSpec pathSpec) { } public static Map<String, Object> getResolvedProperties(final DataSchema schema) { - return !schema.getResolvedProperties().isEmpty() ? schema.getResolvedProperties() : schema.getProperties(); + return !schema.getResolvedProperties().isEmpty() + ? schema.getResolvedProperties() + : schema.getProperties(); } public static Optional<PathSpec> getPathSpecWithAspectName(TraverserContext context) { - Object aspectAnnotationObj = context.getTopLevelSchema().getProperties().get(AspectAnnotation.ANNOTATION_NAME); - if (aspectAnnotationObj == null || !Map.class.isAssignableFrom(aspectAnnotationObj.getClass()) + Object aspectAnnotationObj = + context.getTopLevelSchema().getProperties().get(AspectAnnotation.ANNOTATION_NAME); + if (aspectAnnotationObj == null + || !Map.class.isAssignableFrom(aspectAnnotationObj.getClass()) || !((Map) aspectAnnotationObj).containsKey(AspectAnnotation.NAME_FIELD)) { return Optional.empty(); } String aspectName = (((Map) aspectAnnotationObj).get(AspectAnnotation.NAME_FIELD)).toString(); - return Optional.of(new PathSpec(ImmutableList.<String>builder().add(aspectName).addAll(context.getSchemaPathSpec()).build())); + return Optional.of( + new PathSpec( + ImmutableList.<String>builder() + .add(aspectName) + .addAll(context.getSchemaPathSpec()) + .build())); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java index 7dfe596c8de4c..549c0a9ef7916 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.models; -/** - * Exception thrown when Entity, Aspect models fail to be validated. - */ +/** Exception thrown when Entity, Aspect models fail to be validated. */ public class ModelValidationException extends RuntimeException { public ModelValidationException(String message) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java index 0124fc8ce7bb1..13678d29da730 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java @@ -12,13 +12,11 @@ import javax.annotation.Nonnull; import lombok.ToString; - /** - * A partially specified entity spec that can be used with a {@link com.linkedin.metadata.models.registry.PatchEntityRegistry}. - * Specifically, it does not require the following things compared to a {@link DefaultEntitySpec} - * - a key aspect - * - snapshot schemas for the entity - * - typeref schemas for aspect + * A partially specified entity spec that can be used with a {@link + * com.linkedin.metadata.models.registry.PatchEntityRegistry}. Specifically, it does not require the + * following things compared to a {@link DefaultEntitySpec} - a key aspect - snapshot schemas for + * the entity - typeref schemas for aspect */ @ToString public class PartialEntitySpec implements EntitySpec { @@ -26,8 +24,10 @@ public class PartialEntitySpec implements EntitySpec { private final EntityAnnotation _entityAnnotation; private final Map<String, AspectSpec> _aspectSpecs; - public PartialEntitySpec(@Nonnull final Collection<AspectSpec> aspectSpecs, final EntityAnnotation entityAnnotation) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + public PartialEntitySpec( + @Nonnull final Collection<AspectSpec> aspectSpecs, final EntityAnnotation entityAnnotation) { + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = entityAnnotation; } @@ -82,7 +82,7 @@ public RecordDataSchema getSnapshotSchema() { @Override public TyperefDataSchema getAspectTyperefSchema() { - throw new UnsupportedOperationException("Partial entity specs do not contain aspect typeref schemas"); + throw new UnsupportedOperationException( + "Partial entity specs do not contain aspect typeref schemas"); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java b/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java index 1a262731a48af..a4dabea0a3345 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java @@ -3,7 +3,6 @@ import java.util.Comparator; import org.apache.commons.lang3.tuple.Pair; - public class PropertyOverrideComparator implements Comparator<Pair<String, Object>> { public int compare(Pair<String, Object> o1, Pair<String, Object> o2) { return Integer.compare(o2.getKey().split("/").length, o1.getKey().split("/").length); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java index 76454850aa2f8..06d6994e7dc45 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java @@ -8,7 +8,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class RelationshipFieldSpec implements FieldSpec { @@ -16,17 +15,13 @@ public class RelationshipFieldSpec implements FieldSpec { @NonNull RelationshipAnnotation relationshipAnnotation; @NonNull DataSchema pegasusSchema; - /** - * Returns the name of the outbound relationship extending from the field. - */ + /** Returns the name of the outbound relationship extending from the field. */ @Nonnull public String getRelationshipName() { return relationshipAnnotation.getName(); } - /** - * Returns a list of entity names representing the destination node type of the relationship. - */ + /** Returns a list of entity names representing the destination node type of the relationship. */ @Nonnull public List<String> getValidDestinationTypes() { return relationshipAnnotation.getValidDestinationTypes(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java index 99c0908abbd02..ad32b315f6b1a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java @@ -12,7 +12,6 @@ import java.util.List; import java.util.Map; - /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link RelationshipFieldSpec}s * from an aspect schema. @@ -41,25 +40,31 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order final Object primaryAnnotationObj = properties.get(RelationshipAnnotation.ANNOTATION_NAME); if (primaryAnnotationObj != null) { - validatePropertiesAnnotation(currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); + validatePropertiesAnnotation( + currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); } // Next, check resolved properties for annotations on primitives. - final Map<String, Object> resolvedProperties = FieldSpecUtils.getResolvedProperties(currentSchema); - final Object resolvedAnnotationObj = resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); + final Map<String, Object> resolvedProperties = + FieldSpecUtils.getResolvedProperties(currentSchema); + final Object resolvedAnnotationObj = + resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); if (resolvedAnnotationObj != null) { - if (currentSchema.isPrimitive() && isValidPrimitiveType((PrimitiveDataSchema) currentSchema)) { + if (currentSchema.isPrimitive() + && isValidPrimitiveType((PrimitiveDataSchema) currentSchema)) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); - final RelationshipAnnotation annotation = RelationshipAnnotation.fromPegasusAnnotationObject( - resolvedAnnotationObj, - path.toString() - ); - final RelationshipFieldSpec fieldSpec = new RelationshipFieldSpec(path, annotation, currentSchema); + final RelationshipAnnotation annotation = + RelationshipAnnotation.fromPegasusAnnotationObject( + resolvedAnnotationObj, path.toString()); + final RelationshipFieldSpec fieldSpec = + new RelationshipFieldSpec(path, annotation, currentSchema); _specs.add(fieldSpec); return; } - throw new ModelValidationException(String.format("Invalid @Relationship Annotation at %s", context.getSchemaPathSpec().toString())); + throw new ModelValidationException( + String.format( + "Invalid @Relationship Annotation at %s", context.getSchemaPathSpec().toString())); } } } @@ -78,7 +83,8 @@ private Boolean isValidPrimitiveType(final PrimitiveDataSchema schema) { return DataSchema.Type.STRING.equals(schema.getDereferencedDataSchema().getDereferencedType()); } - private void validatePropertiesAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validatePropertiesAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. if (currentSchema.isPrimitive()) { @@ -87,20 +93,19 @@ private void validatePropertiesAnnotation(DataSchema currentSchema, Object annot // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - RelationshipAnnotation.ANNOTATION_NAME, - pathStr - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + RelationshipAnnotation.ANNOTATION_NAME, pathStr)); } Map<String, Object> annotationMap = (Map<String, Object>) annotationObj; for (String key : annotationMap.keySet()) { if (!key.startsWith(Character.toString(PathSpec.SEPARATOR))) { throw new ModelValidationException( - String.format("Invalid @Relationship Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, - currentSchema.getType())); + String.format( + "Invalid @Relationship Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java index 2346923d70a48..bdd3546b75857 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java @@ -6,10 +6,9 @@ import lombok.NonNull; import lombok.Value; - @Value public class SearchScoreFieldSpec implements FieldSpec { @NonNull PathSpec path; @NonNull SearchScoreAnnotation searchScoreAnnotation; @NonNull DataSchema pegasusSchema; -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java index c4f767c4a24bc..776d5ee7a20b7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java @@ -15,7 +15,6 @@ import java.util.Optional; import java.util.Set; - /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link SearchScoreFieldSpec}s * from an aspect schema. @@ -24,7 +23,8 @@ public class SearchScoreFieldSpecExtractor implements SchemaVisitor { private final List<SearchScoreFieldSpec> _specs = new ArrayList<>(); private static final Set<DataSchema.Type> NUMERIC_TYPES = - ImmutableSet.of(DataSchema.Type.INT, DataSchema.Type.LONG, DataSchema.Type.FLOAT, DataSchema.Type.DOUBLE); + ImmutableSet.of( + DataSchema.Type.INT, DataSchema.Type.LONG, DataSchema.Type.FLOAT, DataSchema.Type.DOUBLE); public List<SearchScoreFieldSpec> getSpecs() { return _specs; @@ -46,9 +46,10 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order if (currentSchema.isPrimitive() && isNumericType((PrimitiveDataSchema) currentSchema)) { extractAnnotation(annotationObj, currentSchema, context); } else { - throw new ModelValidationException(String.format( - "Invalid @SearchScore Annotation at %s. This annotation can only be put in on a numeric singular (non-array) field", - context.getSchemaPathSpec().toString())); + throw new ModelValidationException( + String.format( + "Invalid @SearchScore Annotation at %s. This annotation can only be put in on a numeric singular (non-array) field", + context.getSchemaPathSpec().toString())); } } } @@ -59,19 +60,21 @@ private Object getAnnotationObj(TraverserContext context) { return properties.get(SearchScoreAnnotation.ANNOTATION_NAME); } - private void extractAnnotation(final Object annotationObj, final DataSchema currentSchema, - final TraverserContext context) { + private void extractAnnotation( + final Object annotationObj, final DataSchema currentSchema, final TraverserContext context) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); final Optional<PathSpec> fullPath = FieldSpecUtils.getPathSpecWithAspectName(context); if (context.getSchemaPathSpec().contains(PathSpec.WILDCARD)) { throw new ModelValidationException( - String.format("SearchScore annotation can only be put on singular fields (non-arrays): path %s", + String.format( + "SearchScore annotation can only be put on singular fields (non-arrays): path %s", fullPath.orElse(path))); } final SearchScoreAnnotation annotation = - SearchScoreAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - path.toString()); - final SearchScoreFieldSpec fieldSpec = new SearchScoreFieldSpec(path, annotation, currentSchema); + SearchScoreAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); + final SearchScoreFieldSpec fieldSpec = + new SearchScoreFieldSpec(path, annotation, currentSchema); _specs.add(fieldSpec); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java index 9ebd7e991df48..217bd8e58340a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java @@ -6,7 +6,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class SearchableFieldSpec implements FieldSpec { @@ -17,4 +16,4 @@ public class SearchableFieldSpec implements FieldSpec { public boolean isArray() { return path.getPathComponents().contains("*"); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java index 8f2f42cd69cae..add6a88369b13 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java @@ -11,14 +11,13 @@ import com.linkedin.data.schema.annotation.SchemaVisitorTraversalResult; import com.linkedin.data.schema.annotation.TraverserContext; import com.linkedin.metadata.models.annotation.SearchableAnnotation; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import lombok.extern.slf4j.Slf4j; /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link SearchableFieldSpec}s @@ -33,6 +32,7 @@ public class SearchableFieldSpecExtractor implements SchemaVisitor { private static final String MAP = "map"; public static final Map<String, Object> PRIMARY_URN_SEARCH_PROPERTIES; + static { PRIMARY_URN_SEARCH_PROPERTIES = new DataMap(); PRIMARY_URN_SEARCH_PROPERTIES.put("enableAutocomplete", "true"); @@ -41,10 +41,8 @@ public class SearchableFieldSpecExtractor implements SchemaVisitor { } private static final float SECONDARY_URN_FACTOR = 0.1f; - private static final Set<String> SECONDARY_URN_FIELD_TYPES = ImmutableSet.<String>builder() - .add("URN") - .add("URN_PARTIAL") - .build(); + private static final Set<String> SECONDARY_URN_FIELD_TYPES = + ImmutableSet.<String>builder().add("URN").add("URN_PARTIAL").build(); public List<SearchableFieldSpec> getSpecs() { return _specs; @@ -72,7 +70,8 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order extractSearchableAnnotation(annotationObj, currentSchema, context); } else { throw new ModelValidationException( - String.format("Invalid @Searchable Annotation at %s", context.getSchemaPathSpec().toString())); + String.format( + "Invalid @Searchable Annotation at %s", context.getSchemaPathSpec().toString())); } } } @@ -86,34 +85,45 @@ private Object getAnnotationObj(TraverserContext context) { final Object primaryAnnotationObj = properties.get(SearchableAnnotation.ANNOTATION_NAME); if (primaryAnnotationObj != null) { - validatePropertiesAnnotation(currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); - // Unfortunately, annotations on collections always need to be a nested map (byproduct of making overrides work) + validatePropertiesAnnotation( + currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); + // Unfortunately, annotations on collections always need to be a nested map (byproduct of + // making overrides work) // As such, for annotation maps, we make it a single entry map, where the key has no meaning - if (currentSchema.getDereferencedType() == DataSchema.Type.MAP && primaryAnnotationObj instanceof Map + if (currentSchema.getDereferencedType() == DataSchema.Type.MAP + && primaryAnnotationObj instanceof Map && !((Map) primaryAnnotationObj).isEmpty()) { return ((Map<?, ?>) primaryAnnotationObj).entrySet().stream().findFirst().get().getValue(); } } - // Check if the path has map in it. Individual values of the maps (actual maps are caught above) can be ignored + // Check if the path has map in it. Individual values of the maps (actual maps are caught above) + // can be ignored if (context.getTraversePath().contains(MAP)) { return null; } - final boolean isUrn = ((DataMap) context.getParentSchema().getProperties() - .getOrDefault("java", new DataMap())) - .getOrDefault("class", "").equals("com.linkedin.common.urn.Urn"); + final boolean isUrn = + ((DataMap) context.getParentSchema().getProperties().getOrDefault("java", new DataMap())) + .getOrDefault("class", "") + .equals("com.linkedin.common.urn.Urn"); - final Map<String, Object> resolvedProperties = FieldSpecUtils.getResolvedProperties(currentSchema); + final Map<String, Object> resolvedProperties = + FieldSpecUtils.getResolvedProperties(currentSchema); // if primary doesn't have an annotation, then ignore secondary urns if (isUrn && primaryAnnotationObj != null) { - DataMap annotationMap = (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); + DataMap annotationMap = + (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); Map<String, Object> result = new HashMap<>(annotationMap); // Override boostScore for secondary urn - if (SECONDARY_URN_FIELD_TYPES.contains(annotationMap.getOrDefault("fieldType", "URN").toString())) { - result.put("boostScore", Float.parseFloat(String.valueOf(annotationMap.getOrDefault("boostScore", "1.0"))) * SECONDARY_URN_FACTOR); + if (SECONDARY_URN_FIELD_TYPES.contains( + annotationMap.getOrDefault("fieldType", "URN").toString())) { + result.put( + "boostScore", + Float.parseFloat(String.valueOf(annotationMap.getOrDefault("boostScore", "1.0"))) + * SECONDARY_URN_FACTOR); } return result; @@ -123,40 +133,47 @@ private Object getAnnotationObj(TraverserContext context) { } } - private void extractSearchableAnnotation(final Object annotationObj, final DataSchema currentSchema, - final TraverserContext context) { + private void extractSearchableAnnotation( + final Object annotationObj, final DataSchema currentSchema, final TraverserContext context) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); final Optional<PathSpec> fullPath = FieldSpecUtils.getPathSpecWithAspectName(context); SearchableAnnotation annotation = - SearchableAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - currentSchema.getDereferencedType(), path.toString()); + SearchableAnnotation.fromPegasusAnnotationObject( + annotationObj, + FieldSpecUtils.getSchemaFieldName(path), + currentSchema.getDereferencedType(), + path.toString()); String schemaPathSpec = context.getSchemaPathSpec().toString(); - if (_searchFieldNamesToPatch.containsKey(annotation.getFieldName()) && !_searchFieldNamesToPatch.get( - annotation.getFieldName()).equals(schemaPathSpec)) { + if (_searchFieldNamesToPatch.containsKey(annotation.getFieldName()) + && !_searchFieldNamesToPatch.get(annotation.getFieldName()).equals(schemaPathSpec)) { // Try to use path String pathName = path.toString().replace('/', '_').replace("*", ""); if (pathName.startsWith("_")) { pathName = pathName.replaceFirst("_", ""); } - if (_searchFieldNamesToPatch.containsKey(pathName) && !_searchFieldNamesToPatch.get(pathName).equals(schemaPathSpec)) { + if (_searchFieldNamesToPatch.containsKey(pathName) + && !_searchFieldNamesToPatch.get(pathName).equals(schemaPathSpec)) { throw new ModelValidationException( - String.format("Entity has multiple searchable fields with the same field name %s, path: %s", annotation.getFieldName(), fullPath.orElse(path))); + String.format( + "Entity has multiple searchable fields with the same field name %s, path: %s", + annotation.getFieldName(), fullPath.orElse(path))); } else { - annotation = new SearchableAnnotation( - pathName, - annotation.getFieldType(), - annotation.isQueryByDefault(), - annotation.isEnableAutocomplete(), - annotation.isAddToFilters(), - annotation.isAddHasValuesToFilters(), - annotation.getFilterNameOverride(), - annotation.getHasValuesFilterNameOverride(), - annotation.getBoostScore(), - annotation.getHasValuesFieldName(), - annotation.getNumValuesFieldName(), - annotation.getWeightsPerFieldValue(), - annotation.getFieldNameAliases()); + annotation = + new SearchableAnnotation( + pathName, + annotation.getFieldType(), + annotation.isQueryByDefault(), + annotation.isEnableAutocomplete(), + annotation.isAddToFilters(), + annotation.isAddHasValuesToFilters(), + annotation.getFilterNameOverride(), + annotation.getHasValuesFilterNameOverride(), + annotation.getBoostScore(), + annotation.getHasValuesFieldName(), + annotation.getNumValuesFieldName(), + annotation.getWeightsPerFieldValue(), + annotation.getFieldNameAliases()); } } log.debug("Searchable annotation for field: {} : {}", schemaPathSpec, annotation); @@ -184,35 +201,39 @@ private Boolean isValidPrimitiveType(final PrimitiveDataSchema schema) { return true; } - private void validatePropertiesAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validatePropertiesAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. - if (currentSchema.isPrimitive() || currentSchema.getDereferencedType().equals(DataSchema.Type.ENUM) || currentSchema - .getDereferencedType() - .equals(DataSchema.Type.MAP)) { + if (currentSchema.isPrimitive() + || currentSchema.getDereferencedType().equals(DataSchema.Type.ENUM) + || currentSchema.getDereferencedType().equals(DataSchema.Type.MAP)) { return; } // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - SearchableAnnotation.ANNOTATION_NAME, pathStr)); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + SearchableAnnotation.ANNOTATION_NAME, pathStr)); } Map<String, Object> annotationMap = (Map<String, Object>) annotationObj; if (annotationMap.size() == 0) { - throw new ModelValidationException(String.format( - "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, currentSchema.getType())); + throw new ModelValidationException( + String.format( + "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } for (String key : annotationMap.keySet()) { if (!key.startsWith(Character.toString(PathSpec.SEPARATOR))) { - throw new ModelValidationException(String.format( - "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, currentSchema.getType())); + throw new ModelValidationException( + String.format( + "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java index 5771144fd33c2..efdb8b876cbda 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java @@ -7,7 +7,6 @@ import lombok.Data; import lombok.NonNull; - @Data public class TimeseriesFieldCollectionSpec implements FieldSpec { @NonNull PathSpec path; @@ -24,4 +23,4 @@ public String getName() { public String getKeyPathFromAnnotation() { return path + "/" + timeseriesFieldCollectionAnnotation.getKey(); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java index cbd0c0581600e..6ad7c1c9d34ca 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java @@ -6,7 +6,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class TimeseriesFieldSpec implements FieldSpec { @NonNull PathSpec path; @@ -16,4 +15,4 @@ public class TimeseriesFieldSpec implements FieldSpec { public String getName() { return timeseriesFieldAnnotation.getStatName(); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java index 4391bd1497741..e29b1a88afca4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java @@ -15,16 +15,16 @@ import java.util.Optional; import lombok.Getter; - /** - * Implementation of {@link SchemaVisitor} responsible for extracting {@link TimeseriesFieldSpec} and - * {@link TimeseriesFieldCollectionSpec} from an aspect schema. + * Implementation of {@link SchemaVisitor} responsible for extracting {@link TimeseriesFieldSpec} + * and {@link TimeseriesFieldCollectionSpec} from an aspect schema. */ @Getter public class TimeseriesFieldSpecExtractor implements SchemaVisitor { private final List<TimeseriesFieldSpec> timeseriesFieldSpecs = new ArrayList<>(); - private final List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs = new ArrayList<>(); + private final List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs = + new ArrayList<>(); private final Map<String, String> namesToPath = new HashMap<>(); @Override @@ -40,16 +40,22 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order // First, check for collection in primary properties final Map<String, Object> primaryProperties = context.getEnclosingField().getProperties(); - final Object timeseriesFieldAnnotationObj = primaryProperties.get(TimeseriesFieldAnnotation.ANNOTATION_NAME); + final Object timeseriesFieldAnnotationObj = + primaryProperties.get(TimeseriesFieldAnnotation.ANNOTATION_NAME); final Object timeseriesFieldCollectionAnnotationObj = primaryProperties.get(TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME); - if (currentSchema.getType() == DataSchema.Type.RECORD && timeseriesFieldCollectionAnnotationObj != null) { - validateCollectionAnnotation(currentSchema, timeseriesFieldCollectionAnnotationObj, + if (currentSchema.getType() == DataSchema.Type.RECORD + && timeseriesFieldCollectionAnnotationObj != null) { + validateCollectionAnnotation( + currentSchema, + timeseriesFieldCollectionAnnotationObj, context.getTraversePath().toString()); - addTimeseriesFieldCollectionSpec(currentSchema, path, timeseriesFieldCollectionAnnotationObj); - } else if (timeseriesFieldAnnotationObj != null && !path.getPathComponents() - .get(path.getPathComponents().size() - 1) - .equals("*")) { // For arrays make sure to add just the array form + addTimeseriesFieldCollectionSpec( + currentSchema, path, timeseriesFieldCollectionAnnotationObj); + } else if (timeseriesFieldAnnotationObj != null + && !path.getPathComponents() + .get(path.getPathComponents().size() - 1) + .equals("*")) { // For arrays make sure to add just the array form addTimeseriesFieldSpec(currentSchema, path, timeseriesFieldAnnotationObj); } else { addTimeseriesFieldCollectionKey(path); @@ -57,7 +63,8 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order } } - private void validateCollectionAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validateCollectionAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. if (currentSchema.isPrimitive()) { @@ -66,21 +73,25 @@ private void validateCollectionAnnotation(DataSchema currentSchema, Object annot // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME, pathStr)); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME, pathStr)); } } - private void addTimeseriesFieldCollectionSpec(DataSchema currentSchema, PathSpec path, Object annotationObj) { + private void addTimeseriesFieldCollectionSpec( + DataSchema currentSchema, PathSpec path, Object annotationObj) { if (currentSchema.getType() == DataSchema.Type.RECORD) { TimeseriesFieldCollectionAnnotation annotation = - TimeseriesFieldCollectionAnnotation.fromPegasusAnnotationObject(annotationObj, - FieldSpecUtils.getSchemaFieldName(path), path.toString()); - if (namesToPath.containsKey(annotation.getCollectionName()) && !namesToPath.get(annotation.getCollectionName()) - .equals(path.toString())) { + TimeseriesFieldCollectionAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); + if (namesToPath.containsKey(annotation.getCollectionName()) + && !namesToPath.get(annotation.getCollectionName()).equals(path.toString())) { throw new ModelValidationException( - String.format("There are multiple fields with the same name: %s", annotation.getCollectionName())); + String.format( + "There are multiple fields with the same name: %s", + annotation.getCollectionName())); } namesToPath.put(annotation.getCollectionName(), path.toString()); timeseriesFieldCollectionSpecs.add( @@ -88,25 +99,32 @@ private void addTimeseriesFieldCollectionSpec(DataSchema currentSchema, PathSpec } } - private void addTimeseriesFieldSpec(DataSchema currentSchema, PathSpec path, Object annotationObj) { + private void addTimeseriesFieldSpec( + DataSchema currentSchema, PathSpec path, Object annotationObj) { // First check whether the stat is part of a collection String pathStr = path.toString(); - Optional<TimeseriesFieldCollectionSpec> fieldCollectionSpec = timeseriesFieldCollectionSpecs.stream() - .filter(spec -> pathStr.startsWith(spec.getPath().toString())) - .findFirst(); + Optional<TimeseriesFieldCollectionSpec> fieldCollectionSpec = + timeseriesFieldCollectionSpecs.stream() + .filter(spec -> pathStr.startsWith(spec.getPath().toString())) + .findFirst(); TimeseriesFieldAnnotation annotation = - TimeseriesFieldAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - path.toString()); + TimeseriesFieldAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); if (fieldCollectionSpec.isPresent()) { - fieldCollectionSpec.get() + fieldCollectionSpec + .get() .getTimeseriesFieldSpecMap() - .put(annotation.getStatName(), - new TimeseriesFieldSpec(getRelativePath(path, fieldCollectionSpec.get().getPath()), annotation, + .put( + annotation.getStatName(), + new TimeseriesFieldSpec( + getRelativePath(path, fieldCollectionSpec.get().getPath()), + annotation, currentSchema)); } else { if (path.getPathComponents().contains("*")) { throw new ModelValidationException( - String.format("No matching collection found for the given timeseries field %s", pathStr)); + String.format( + "No matching collection found for the given timeseries field %s", pathStr)); } timeseriesFieldSpecs.add(new TimeseriesFieldSpec(path, annotation, currentSchema)); } @@ -123,7 +141,9 @@ private void addTimeseriesFieldCollectionKey(PathSpec path) { private PathSpec getRelativePath(PathSpec child, PathSpec parent) { return new PathSpec( - child.getPathComponents().subList(parent.getPathComponents().size(), child.getPathComponents().size())); + child + .getPathComponents() + .subList(parent.getPathComponents().size(), child.getPathComponents().size())); } @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java index 3d9e1cf04cd36..7aa5be69a0541 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java @@ -4,11 +4,11 @@ import java.util.Optional; import lombok.experimental.UtilityClass; - @UtilityClass public class AnnotationUtils { <T> Optional<T> getField(final Map fieldMap, final String fieldName, final Class<T> fieldType) { - if (fieldMap.containsKey(fieldName) && fieldType.isAssignableFrom(fieldMap.get(fieldName).getClass())) { + if (fieldMap.containsKey(fieldName) + && fieldType.isAssignableFrom(fieldMap.get(fieldName).getClass())) { return Optional.of(fieldType.cast(fieldMap.get(fieldName))); } return Optional.empty(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java index d116170e10d22..56dca9ab3eaf9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java @@ -7,10 +7,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Aspect annotation metadata. - */ +/** Simple object representation of the @Aspect annotation metadata. */ @Value public class AspectAnnotation { @@ -29,15 +26,12 @@ public class AspectAnnotation { @Nonnull public static AspectAnnotation fromSchemaProperty( - @Nonnull final Object annotationObj, - @Nonnull final String context) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + ANNOTATION_NAME, context)); } final Map map = (Map) annotationObj; final Optional<String> name = AnnotationUtils.getField(map, NAME_FIELD, String.class); @@ -45,10 +39,7 @@ public static AspectAnnotation fromSchemaProperty( throw new ModelValidationException( String.format( "Failed to validated @%s annotation declared at %s: missing '%s' property", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } final Optional<String> type = AnnotationUtils.getField(map, TYPE_FIELD, String.class); @@ -56,6 +47,10 @@ public static AspectAnnotation fromSchemaProperty( Optional<Boolean> autoRender = AnnotationUtils.getField(map, AUTO_RENDER_FIELD, Boolean.class); Optional<DataMap> renderSpec = AnnotationUtils.getField(map, RENDER_SPEC_FIELD, DataMap.class); - return new AspectAnnotation(name.get(), isTimeseries, autoRender.orElseGet(() -> false), renderSpec.orElseGet(() -> null)); + return new AspectAnnotation( + name.get(), + isTimeseries, + autoRender.orElseGet(() -> false), + renderSpec.orElseGet(() -> null)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java index e7174dcc9b176..94cdf130d1e88 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java @@ -6,10 +6,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Entity annotation metadata. - */ +/** Simple object representation of the @Entity annotation metadata. */ @Value public class EntityAnnotation { @@ -22,39 +19,31 @@ public class EntityAnnotation { @Nonnull public static EntityAnnotation fromSchemaProperty( - @Nonnull final Object annotationObj, - @Nonnull final String context) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional<String> name = AnnotationUtils.getField(map, NAME_FIELD, String.class); - final Optional<String> keyAspect = AnnotationUtils.getField(map, KEY_ASPECT_FIELD, String.class); + final Optional<String> keyAspect = + AnnotationUtils.getField(map, KEY_ASPECT_FIELD, String.class); if (!name.isPresent()) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } if (!keyAspect.isPresent()) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - KEY_ASPECT_FIELD - )); + ANNOTATION_NAME, context, KEY_ASPECT_FIELD)); } return new EntityAnnotation(name.get(), keyAspect.get()); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java index ee0229dabfc37..ddfa23412955d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java @@ -6,10 +6,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * An annotation associated with a DataHub Event. - */ +/** An annotation associated with a DataHub Event. */ @Value public class EventAnnotation { @@ -20,15 +17,12 @@ public class EventAnnotation { @Nonnull public static EventAnnotation fromPegasusAnnotationObject( - @Nonnull final Object annotationObj, - @Nonnull final String context - ) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; @@ -37,10 +31,7 @@ public static EventAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } return new EventAnnotation(name.get()); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java index 7631f95c3a5ff..a22ef56d60006 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java @@ -8,10 +8,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Relationship annotation metadata. - */ +/** Simple object representation of the @Relationship annotation metadata. */ @Value public class RelationshipAnnotation { @@ -38,15 +35,12 @@ public class RelationshipAnnotation { @Nonnull public static RelationshipAnnotation fromPegasusAnnotationObject( - @Nonnull final Object annotationObj, - @Nonnull final String context - ) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; @@ -55,13 +49,11 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } - final Optional<List> entityTypesList = AnnotationUtils.getField(map, ENTITY_TYPES_FIELD, List.class); + final Optional<List> entityTypesList = + AnnotationUtils.getField(map, ENTITY_TYPES_FIELD, List.class); final List<String> entityTypes = new ArrayList<>(); if (entityTypesList.isPresent()) { for (Object entityTypeObj : entityTypesList.get()) { @@ -69,21 +61,22 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type List<String>", - ANNOTATION_NAME, - context, - ENTITY_TYPES_FIELD - )); + ANNOTATION_NAME, context, ENTITY_TYPES_FIELD)); } entityTypes.add((String) entityTypeObj); } } - final Optional<Boolean> isUpstream = AnnotationUtils.getField(map, IS_UPSTREAM_FIELD, Boolean.class); - final Optional<Boolean> isLineage = AnnotationUtils.getField(map, IS_LINEAGE_FIELD, Boolean.class); + final Optional<Boolean> isUpstream = + AnnotationUtils.getField(map, IS_UPSTREAM_FIELD, Boolean.class); + final Optional<Boolean> isLineage = + AnnotationUtils.getField(map, IS_LINEAGE_FIELD, Boolean.class); final Optional<String> createdOn = AnnotationUtils.getField(map, CREATED_ON, String.class); - final Optional<String> createdActor = AnnotationUtils.getField(map, CREATED_ACTOR, String.class); + final Optional<String> createdActor = + AnnotationUtils.getField(map, CREATED_ACTOR, String.class); final Optional<String> updatedOn = AnnotationUtils.getField(map, UPDATED_ON, String.class); - final Optional<String> updatedActor = AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); + final Optional<String> updatedActor = + AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); final Optional<String> properties = AnnotationUtils.getField(map, PROPERTIES, String.class); return new RelationshipAnnotation( @@ -95,6 +88,6 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( createdActor.orElse(null), updatedOn.orElse(null), updatedActor.orElse(null), - properties.orElse(null) - ); } -} \ No newline at end of file + properties.orElse(null)); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java index 77c5920ca9ba8..2221650eac1c9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java @@ -8,7 +8,6 @@ import lombok.Value; import org.apache.commons.lang3.EnumUtils; - /** * Annotation indicating how the search results should be ranked by the underlying search service */ @@ -35,26 +34,31 @@ public enum Modifier { } @Nonnull - public static SearchScoreAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static SearchScoreAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional<String> fieldName = AnnotationUtils.getField(map, "fieldName", String.class); final Optional<Double> weight = AnnotationUtils.getField(map, "weight", Double.class); - final Optional<Double> defaultValue = AnnotationUtils.getField(map, "defaultValue", Double.class); + final Optional<Double> defaultValue = + AnnotationUtils.getField(map, "defaultValue", Double.class); final Optional<String> modifierStr = AnnotationUtils.getField(map, "modifier", String.class); if (modifierStr.isPresent() && !EnumUtils.isValidEnum(Modifier.class, modifierStr.get())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid field 'modifier'. Invalid modifier provided. Valid modifiers are %s", - ANNOTATION_NAME, context, Arrays.toString(Modifier.values()))); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid field 'modifier'. Invalid modifier provided. Valid modifiers are %s", + ANNOTATION_NAME, context, Arrays.toString(Modifier.values()))); } final Optional<Modifier> modifier = modifierStr.map(Modifier::valueOf); - return new SearchScoreAnnotation(fieldName.orElse(schemaFieldName), weight.orElse(1.0), defaultValue.orElse(0.0), - modifier); + return new SearchScoreAnnotation( + fieldName.orElse(schemaFieldName), weight.orElse(1.0), defaultValue.orElse(0.0), modifier); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index efa30a948e237..d5eae2a2315fa 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -4,7 +4,6 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.data.schema.DataSchema; import com.linkedin.metadata.models.ModelValidationException; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -15,17 +14,19 @@ import lombok.Value; import org.apache.commons.lang3.EnumUtils; - -/** - * Simple object representation of the @Searchable annotation metadata. - */ +/** Simple object representation of the @Searchable annotation metadata. */ @Value public class SearchableAnnotation { public static final String FIELD_NAME_ALIASES = "fieldNameAliases"; public static final String ANNOTATION_NAME = "Searchable"; private static final Set<FieldType> DEFAULT_QUERY_FIELD_TYPES = - ImmutableSet.of(FieldType.TEXT, FieldType.TEXT_PARTIAL, FieldType.WORD_GRAM, FieldType.URN, FieldType.URN_PARTIAL); + ImmutableSet.of( + FieldType.TEXT, + FieldType.TEXT_PARTIAL, + FieldType.WORD_GRAM, + FieldType.URN, + FieldType.URN_PARTIAL); // Name of the field in the search index. Defaults to the field name in the schema String fieldName; @@ -71,12 +72,15 @@ public enum FieldType { } @Nonnull - public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final DataSchema.Type schemaDataType, + public static SearchableAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final DataSchema.Type schemaDataType, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } @@ -84,23 +88,32 @@ public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Ob final Optional<String> fieldName = AnnotationUtils.getField(map, "fieldName", String.class); final Optional<String> fieldType = AnnotationUtils.getField(map, "fieldType", String.class); if (fieldType.isPresent() && !EnumUtils.isValidEnum(FieldType.class, fieldType.get())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid field 'fieldType'. Invalid fieldType provided. Valid types are %s", - ANNOTATION_NAME, context, Arrays.toString(FieldType.values()))); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid field 'fieldType'. Invalid fieldType provided. Valid types are %s", + ANNOTATION_NAME, context, Arrays.toString(FieldType.values()))); } - final Optional<Boolean> queryByDefault = AnnotationUtils.getField(map, "queryByDefault", Boolean.class); - final Optional<Boolean> enableAutocomplete = AnnotationUtils.getField(map, "enableAutocomplete", Boolean.class); - final Optional<Boolean> addToFilters = AnnotationUtils.getField(map, "addToFilters", Boolean.class); - final Optional<Boolean> addHasValuesToFilters = AnnotationUtils.getField(map, "addHasValuesToFilters", Boolean.class); - final Optional<String> filterNameOverride = AnnotationUtils.getField(map, "filterNameOverride", String.class); + final Optional<Boolean> queryByDefault = + AnnotationUtils.getField(map, "queryByDefault", Boolean.class); + final Optional<Boolean> enableAutocomplete = + AnnotationUtils.getField(map, "enableAutocomplete", Boolean.class); + final Optional<Boolean> addToFilters = + AnnotationUtils.getField(map, "addToFilters", Boolean.class); + final Optional<Boolean> addHasValuesToFilters = + AnnotationUtils.getField(map, "addHasValuesToFilters", Boolean.class); + final Optional<String> filterNameOverride = + AnnotationUtils.getField(map, "filterNameOverride", String.class); final Optional<String> hasValuesFilterNameOverride = AnnotationUtils.getField(map, "hasValuesFilterNameOverride", String.class); final Optional<Double> boostScore = AnnotationUtils.getField(map, "boostScore", Double.class); - final Optional<String> hasValuesFieldName = AnnotationUtils.getField(map, "hasValuesFieldName", String.class); - final Optional<String> numValuesFieldName = AnnotationUtils.getField(map, "numValuesFieldName", String.class); + final Optional<String> hasValuesFieldName = + AnnotationUtils.getField(map, "hasValuesFieldName", String.class); + final Optional<String> numValuesFieldName = + AnnotationUtils.getField(map, "numValuesFieldName", String.class); final Optional<Map> weightsPerFieldValueMap = - AnnotationUtils.getField(map, "weightsPerFieldValue", Map.class).map(m -> (Map<Object, Double>) m); + AnnotationUtils.getField(map, "weightsPerFieldValue", Map.class) + .map(m -> (Map<Object, Double>) m); final List<String> fieldNameAliases = getFieldNameAliases(map); final FieldType resolvedFieldType = getFieldType(fieldType, schemaDataType); @@ -120,7 +133,8 @@ public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Ob fieldNameAliases); } - private static FieldType getFieldType(Optional<String> maybeFieldType, DataSchema.Type schemaDataType) { + private static FieldType getFieldType( + Optional<String> maybeFieldType, DataSchema.Type schemaDataType) { if (!maybeFieldType.isPresent()) { return getDefaultFieldType(schemaDataType); } @@ -139,7 +153,8 @@ private static FieldType getDefaultFieldType(DataSchema.Type schemaDataType) { } } - private static Boolean getQueryByDefault(Optional<Boolean> maybeQueryByDefault, FieldType fieldType) { + private static Boolean getQueryByDefault( + Optional<Boolean> maybeQueryByDefault, FieldType fieldType) { if (!maybeQueryByDefault.isPresent()) { if (DEFAULT_QUERY_FIELD_TYPES.contains(fieldType)) { return Boolean.TRUE; @@ -168,7 +183,8 @@ private static String capitalizeFirstLetter(String str) { private static List<String> getFieldNameAliases(Map map) { final List<String> aliases = new ArrayList<>(); - final Optional<List> fieldNameAliases = AnnotationUtils.getField(map, FIELD_NAME_ALIASES, List.class); + final Optional<List> fieldNameAliases = + AnnotationUtils.getField(map, FIELD_NAME_ALIASES, List.class); if (fieldNameAliases.isPresent()) { for (Object alias : fieldNameAliases.get()) { aliases.add((String) alias); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java index ca74c2df385f1..62ab073e41acd 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class TimeseriesFieldAnnotation { @@ -16,23 +15,29 @@ public class TimeseriesFieldAnnotation { AggregationType aggregationType; @Nonnull - public static TimeseriesFieldAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static TimeseriesFieldAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional<String> statName = AnnotationUtils.getField(map, "name", String.class); - final Optional<String> aggregationType = AnnotationUtils.getField(map, "aggregationType", String.class); + final Optional<String> aggregationType = + AnnotationUtils.getField(map, "aggregationType", String.class); - return new TimeseriesFieldAnnotation(statName.orElse(schemaFieldName), + return new TimeseriesFieldAnnotation( + statName.orElse(schemaFieldName), aggregationType.map(AggregationType::valueOf).orElse(AggregationType.LATEST)); } public enum AggregationType { - LATEST, SUM + LATEST, + SUM } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java index c507d88445cdf..d8816e0667316 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class TimeseriesFieldCollectionAnnotation { public static final String ANNOTATION_NAME = "TimeseriesFieldCollection"; @@ -15,11 +14,14 @@ public class TimeseriesFieldCollectionAnnotation { String key; @Nonnull - public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } @@ -28,10 +30,12 @@ public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject(@N final Optional<String> key = AnnotationUtils.getField(map, "key", String.class); if (!key.isPresent()) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: 'key' field is required", ANNOTATION_NAME, - context)); + String.format( + "Failed to validate @%s annotation declared at %s: 'key' field is required", + ANNOTATION_NAME, context)); } - return new TimeseriesFieldCollectionAnnotation(collectionName.orElse(schemaFieldName), key.get()); + return new TimeseriesFieldCollectionAnnotation( + collectionName.orElse(schemaFieldName), key.get()); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java index 720eb87ec5c0e..b0ff6459ffbee 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java @@ -10,29 +10,31 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - -/** - * Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. - */ +/** Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. */ @Slf4j public class AspectExtractor { - private AspectExtractor() { - } + private AspectExtractor() {} public static Map<String, RecordTemplate> extractAspectRecords(RecordTemplate snapshot) { - return ModelUtils.getAspectsFromSnapshot(snapshot) - .stream() - .collect(Collectors.toMap(record -> getAspectNameFromSchema(record.schema()), Function.identity())); + return ModelUtils.getAspectsFromSnapshot(snapshot).stream() + .collect( + Collectors.toMap( + record -> getAspectNameFromSchema(record.schema()), Function.identity())); } private static String getAspectNameFromSchema(final RecordDataSchema aspectSchema) { - final Object aspectAnnotationObj = aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { - return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()).getName(); + return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + log.error( + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java index 6cc4fa4cd362d..899f66e66ea5a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.models.extractor; +import com.datahub.util.RecordUtils; import com.linkedin.data.schema.PathSpec; import com.linkedin.data.template.RecordTemplate; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.FieldSpec; @@ -16,28 +16,26 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. - */ +/** Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. */ public class FieldExtractor { private static final String ARRAY_WILDCARD = "*"; private static final int MAX_VALUE_LENGTH = 200; - private FieldExtractor() { - } + private FieldExtractor() {} private static long getNumArrayWildcards(PathSpec pathSpec) { return pathSpec.getPathComponents().stream().filter(ARRAY_WILDCARD::equals).count(); } // Extract the value of each field in the field specs from the input record - public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull RecordTemplate record, List<T> fieldSpecs) { + public static <T extends FieldSpec> Map<T, List<Object>> extractFields( + @Nonnull RecordTemplate record, List<T> fieldSpecs) { return extractFields(record, fieldSpecs, MAX_VALUE_LENGTH); } - public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull RecordTemplate record, List<T> fieldSpecs, int maxValueLength) { + public static <T extends FieldSpec> Map<T, List<Object>> extractFields( + @Nonnull RecordTemplate record, List<T> fieldSpecs, int maxValueLength) { final Map<T, List<Object>> extractedFields = new HashMap<>(); for (T fieldSpec : fieldSpecs) { Optional<Object> value = RecordUtils.getFieldValue(record, fieldSpec.getPath()); @@ -49,12 +47,16 @@ public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull if (numArrayWildcards == 0) { // For maps, convert it into a list of the form key=value (Filter out long values) if (value.get() instanceof Map) { - extractedFields.put(fieldSpec, ((Map<?, ?>) value.get()).entrySet() - .stream() - .map(entry -> new Pair<>(entry.getKey().toString(), entry.getValue().toString())) - .filter(entry -> entry.getValue().length() < maxValueLength) - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(Collectors.toList())); + extractedFields.put( + fieldSpec, + ((Map<?, ?>) value.get()) + .entrySet().stream() + .map( + entry -> + new Pair<>(entry.getKey().toString(), entry.getValue().toString())) + .filter(entry -> entry.getValue().length() < maxValueLength) + .map(entry -> entry.getKey() + "=" + entry.getValue()) + .collect(Collectors.toList())); } else { extractedFields.put(fieldSpec, Collections.singletonList(value.get())); } @@ -62,7 +64,10 @@ public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull List<Object> valueList = (List<Object>) value.get(); // If the field is a nested list of values, flatten it for (int i = 0; i < numArrayWildcards - 1; i++) { - valueList = valueList.stream().flatMap(v -> ((List<Object>) v).stream()).collect(Collectors.toList()); + valueList = + valueList.stream() + .flatMap(v -> ((List<Object>) v).stream()) + .collect(Collectors.toList()); } extractedFields.put(fieldSpec, valueList); } @@ -71,14 +76,20 @@ public static <T extends FieldSpec> Map<T, List<Object>> extractFields(@Nonnull return extractedFields; } - public static <T extends FieldSpec> Map<T, List<Object>> extractFieldsFromSnapshot(RecordTemplate snapshot, - EntitySpec entitySpec, Function<AspectSpec, List<T>> getFieldSpecsFunc, int maxValueLength) { + public static <T extends FieldSpec> Map<T, List<Object>> extractFieldsFromSnapshot( + RecordTemplate snapshot, + EntitySpec entitySpec, + Function<AspectSpec, List<T>> getFieldSpecsFunc, + int maxValueLength) { final Map<String, RecordTemplate> aspects = AspectExtractor.extractAspectRecords(snapshot); final Map<T, List<Object>> extractedFields = new HashMap<>(); - aspects.keySet() - .stream() - .map(aspectName -> FieldExtractor.extractFields(aspects.get(aspectName), - getFieldSpecsFunc.apply(entitySpec.getAspectSpec(aspectName)), maxValueLength)) + aspects.keySet().stream() + .map( + aspectName -> + FieldExtractor.extractFields( + aspects.get(aspectName), + getFieldSpecsFunc.apply(entitySpec.getAspectSpec(aspectName)), + maxValueLength)) .forEach(extractedFields::putAll); return extractedFields; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index 95195620cf85a..fba916abd2430 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -32,13 +35,9 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects - * from an entity registry config yaml file + * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from an + * entity registry config yaml file */ @Slf4j public class ConfigEntityRegistry implements EntityRegistry { @@ -51,37 +50,55 @@ public class ConfigEntityRegistry implements EntityRegistry { private final Map<String, AspectSpec> _aspectNameToSpec; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public ConfigEntityRegistry(Pair<Path, Path> configFileClassPathPair) throws IOException { - this(DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), configFileClassPathPair.getFirst()); + this( + DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), + configFileClassPathPair.getFirst()); } - public ConfigEntityRegistry(String entityRegistryRoot) throws EntityRegistryException, IOException { + public ConfigEntityRegistry(String entityRegistryRoot) + throws EntityRegistryException, IOException { this(getFileAndClassPath(entityRegistryRoot)); } - private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) throws IOException, EntityRegistryException { + private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) + throws IOException, EntityRegistryException { Path entityRegistryRootLoc = Paths.get(entityRegistryRoot); if (Files.isDirectory(entityRegistryRootLoc)) { // Look for entity_registry.yml or entity_registry.yaml in the root folder - List<Path> yamlFiles = Files.walk(entityRegistryRootLoc, 1) - .filter(Files::isRegularFile) - .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) - .collect(Collectors.toList()); + List<Path> yamlFiles = + Files.walk(entityRegistryRootLoc, 1) + .filter(Files::isRegularFile) + .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) + .collect(Collectors.toList()); if (yamlFiles.size() == 0) { throw new EntityRegistryException( - String.format("Did not find an entity registry (entity_registry.yaml/yml) under %s", entityRegistryRootLoc)); + String.format( + "Did not find an entity registry (entity_registry.yaml/yml) under %s", + entityRegistryRootLoc)); } if (yamlFiles.size() > 1) { - log.warn("Found more than one yaml file in the directory {}. Will pick the first {}", - entityRegistryRootLoc, yamlFiles.get(0)); + log.warn( + "Found more than one yaml file in the directory {}. Will pick the first {}", + entityRegistryRootLoc, + yamlFiles.get(0)); } Path entityRegistryFile = yamlFiles.get(0); - log.info("Loading custom config entity file: {}, dir: {}", entityRegistryFile, entityRegistryRootLoc); + log.info( + "Loading custom config entity file: {}, dir: {}", + entityRegistryFile, + entityRegistryRootLoc); return new Pair<>(entityRegistryFile, entityRegistryRootLoc); } else { // We assume that the file being passed in is a bare entity registry yaml file @@ -94,7 +111,8 @@ public ConfigEntityRegistry(InputStream configFileInputStream) { this(DataSchemaFactory.getInstance(), configFileInputStream); } - public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath) throws FileNotFoundException { + public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath) + throws FileNotFoundException { this(dataSchemaFactory, new FileInputStream(configFilePath.toString())); } @@ -106,7 +124,8 @@ public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con } catch (IOException e) { e.printStackTrace(); throw new IllegalArgumentException( - String.format("Error while reading config file in path %s: %s", configFileStream, e.getMessage())); + String.format( + "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); } if (entities.getId() != null) { identifier = entities.getId(); @@ -120,12 +139,16 @@ public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con for (Entity entity : entities.getEntities()) { List<AspectSpec> aspectSpecs = new ArrayList<>(); aspectSpecs.add(buildAspectSpec(entity.getKeyAspect(), entitySpecBuilder)); - entity.getAspects().forEach(aspect -> aspectSpecs.add(buildAspectSpec(aspect, entitySpecBuilder))); + entity + .getAspects() + .forEach(aspect -> aspectSpecs.add(buildAspectSpec(aspect, entitySpecBuilder))); EntitySpec entitySpec; Optional<DataSchema> entitySchema = dataSchemaFactory.getEntitySchema(entity.getName()); if (!entitySchema.isPresent()) { - entitySpec = entitySpecBuilder.buildConfigEntitySpec(entity.getName(), entity.getKeyAspect(), aspectSpecs); + entitySpec = + entitySpecBuilder.buildConfigEntitySpec( + entity.getName(), entity.getKeyAspect(), aspectSpecs); } else { entitySpec = entitySpecBuilder.buildEntitySpec(entitySchema.get(), aspectSpecs); } @@ -210,7 +233,7 @@ public Map<String, EventSpec> getEventSpecs() { @Override public AspectTemplateEngine getAspectTemplateEngine() { - //TODO: add support for config based aspect templates + // TODO: add support for config based aspect templates return new AspectTemplateEngine(); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java index cf9ca68d0ee4f..8c415d56f0d5f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java @@ -9,7 +9,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** * The Entity Registry provides a mechanism to retrieve metadata about entities modeled in GMA. * Metadata includes the entity's common name, the aspects that comprise it, and search index + @@ -23,8 +22,10 @@ default String getIdentifier() { /** * Given an entity name, returns an instance of {@link DefaultEntitySpec} + * * @param entityName the name of the entity to be retrieved - * @return an {@link DefaultEntitySpec} corresponding to the entity name provided, null if none exists. + * @return an {@link DefaultEntitySpec} corresponding to the entity name provided, null if none + * exists. */ @Nonnull EntitySpec getEntitySpec(@Nonnull final String entityName); @@ -33,34 +34,36 @@ default String getIdentifier() { * Given an event name, returns an instance of {@link DefaultEventSpec}. * * @param eventName the name of the event to be retrieved - * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none exists. + * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none + * exists. */ @Nullable EventSpec getEventSpec(@Nonnull final String eventName); /** * Returns all {@link DefaultEntitySpec}s that the registry is aware of. + * * @return a map of String to {@link DefaultEntitySpec}s, empty map if none exists. */ @Nonnull Map<String, EntitySpec> getEntitySpecs(); - /** * Returns all {@link AspectSpec}s that the registry is aware of. + * * @return a map of String to {@link AspectSpec}s, empty map if none exists. */ @Nonnull Map<String, AspectSpec> getAspectSpecs(); - /** - * Returns all {@link EventSpec}s that the registry is aware of. - */ + /** Returns all {@link EventSpec}s that the registry is aware of. */ @Nonnull Map<String, EventSpec> getEventSpecs(); /** - * Returns an {@link AspectTemplateEngine} that is used for generating templates from {@link com.linkedin.metadata.models.AspectSpec}s + * Returns an {@link AspectTemplateEngine} that is used for generating templates from {@link + * com.linkedin.metadata.models.AspectSpec}s + * * @return a template engine instance associated with this registry */ @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java index d43782ce0f07f..8d108445e67be 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java @@ -8,17 +8,17 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class EntityRegistryUtils { - private EntityRegistryUtils() { - - } + private EntityRegistryUtils() {} public static Map<String, AspectSpec> populateAspectMap(List<EntitySpec> entitySpecs) { return entitySpecs.stream() .map(EntitySpec::getAspectSpecs) .flatMap(Collection::stream) - .collect(Collectors.toMap(AspectSpec::getName, Function.identity(), (aspectSpec1, aspectSpec2) -> aspectSpec1)); + .collect( + Collectors.toMap( + AspectSpec::getName, + Function.identity(), + (aspectSpec1, aspectSpec2) -> aspectSpec1)); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java index 345d5aa02f398..2a5d09db00396 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java @@ -19,10 +19,10 @@ import lombok.Value; import org.apache.commons.lang3.tuple.Triple; - /** - * The Lineage Registry provides a mechanism to retrieve metadata about the lineage relationships between different entities - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * The Lineage Registry provides a mechanism to retrieve metadata about the lineage relationships + * between different entities Lineage relationship denotes whether an entity is directly upstream or + * downstream of another entity */ public class LineageRegistry { @@ -35,55 +35,73 @@ public LineageRegistry(EntityRegistry entityRegistry) { } private Map<String, LineageSpec> buildLineageSpecs(EntityRegistry entityRegistry) { - // 1. Flatten relationship annotations into a list of lineage edges (source, dest, type, isUpstream) - Collection<LineageEdge> lineageEdges = entityRegistry.getEntitySpecs() - .entrySet() - .stream() - .flatMap(entry -> entry.getValue() - .getRelationshipFieldSpecs() - .stream() + // 1. Flatten relationship annotations into a list of lineage edges (source, dest, type, + // isUpstream) + Collection<LineageEdge> lineageEdges = + entityRegistry.getEntitySpecs().entrySet().stream() .flatMap( - spec -> getLineageEdgesFromRelationshipAnnotation(entry.getKey(), spec.getRelationshipAnnotation()))) - // If there are multiple edges with the same source, dest, edge type, get one of them - .collect(Collectors.toMap(edge -> Triple.of(edge.getSourceEntity(), edge.getDestEntity(), edge.getType()), - Function.identity(), (x1, x2) -> x1)) - .values(); + entry -> + entry.getValue().getRelationshipFieldSpecs().stream() + .flatMap( + spec -> + getLineageEdgesFromRelationshipAnnotation( + entry.getKey(), spec.getRelationshipAnnotation()))) + // If there are multiple edges with the same source, dest, edge type, get one of them + .collect( + Collectors.toMap( + edge -> Triple.of(edge.getSourceEntity(), edge.getDestEntity(), edge.getType()), + Function.identity(), + (x1, x2) -> x1)) + .values(); // 2. Figure out the upstream and downstream edges of each entity type Map<String, Set<EdgeInfo>> upstreamPerEntity = new HashMap<>(); Map<String, Set<EdgeInfo>> downstreamPerEntity = new HashMap<>(); - // A downstreamOf B : A -> upstream (downstreamOf, OUTGOING), B -> downstream (downstreamOf, INCOMING) + // A downstreamOf B : A -> upstream (downstreamOf, OUTGOING), B -> downstream (downstreamOf, + // INCOMING) // A produces B : A -> downstream (produces, OUTGOING), B -> upstream (produces, INCOMING) for (LineageEdge edge : lineageEdges) { if (edge.isUpstream()) { - upstreamPerEntity.computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) + upstreamPerEntity + .computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.OUTGOING, edge.destEntity)); - downstreamPerEntity.computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) + downstreamPerEntity + .computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.INCOMING, edge.sourceEntity)); } else { - downstreamPerEntity.computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) + downstreamPerEntity + .computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.OUTGOING, edge.destEntity)); - upstreamPerEntity.computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) + upstreamPerEntity + .computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.INCOMING, edge.sourceEntity)); } } - return entityRegistry.getEntitySpecs() - .keySet() - .stream() - .collect(Collectors.toMap(String::toLowerCase, entityName -> new LineageSpec( - new ArrayList<>(upstreamPerEntity.getOrDefault(entityName.toLowerCase(), Collections.emptySet())), - new ArrayList<>(downstreamPerEntity.getOrDefault(entityName.toLowerCase(), Collections.emptySet()))))); + return entityRegistry.getEntitySpecs().keySet().stream() + .collect( + Collectors.toMap( + String::toLowerCase, + entityName -> + new LineageSpec( + new ArrayList<>( + upstreamPerEntity.getOrDefault( + entityName.toLowerCase(), Collections.emptySet())), + new ArrayList<>( + downstreamPerEntity.getOrDefault( + entityName.toLowerCase(), Collections.emptySet()))))); } - private Stream<LineageEdge> getLineageEdgesFromRelationshipAnnotation(String sourceEntity, - RelationshipAnnotation annotation) { + private Stream<LineageEdge> getLineageEdgesFromRelationshipAnnotation( + String sourceEntity, RelationshipAnnotation annotation) { if (!annotation.isLineage()) { return Stream.empty(); } - return annotation.getValidDestinationTypes() - .stream() - .map(destEntity -> new LineageEdge(sourceEntity, destEntity, annotation.getName(), annotation.isUpstream())); + return annotation.getValidDestinationTypes().stream() + .map( + destEntity -> + new LineageEdge( + sourceEntity, destEntity, annotation.getName(), annotation.isUpstream())); } public LineageSpec getLineageSpec(String entityName) { @@ -92,11 +110,13 @@ public LineageSpec getLineageSpec(String entityName) { public Set<String> getEntitiesWithLineageToEntityType(String entityType) { Map<String, EntitySpec> specs = _entityRegistry.getEntitySpecs(); - Set<String> result = Streams.concat(_lineageSpecMap.get(entityType.toLowerCase()).getDownstreamEdges().stream(), - _lineageSpecMap.get(entityType.toLowerCase()).getUpstreamEdges().stream()) - .map(EdgeInfo::getOpposingEntityType) - .map(entity -> specs.get(entity.toLowerCase()).getName()) - .collect(Collectors.toSet()); + Set<String> result = + Streams.concat( + _lineageSpecMap.get(entityType.toLowerCase()).getDownstreamEdges().stream(), + _lineageSpecMap.get(entityType.toLowerCase()).getUpstreamEdges().stream()) + .map(EdgeInfo::getOpposingEntityType) + .map(entity -> specs.get(entity.toLowerCase()).getName()) + .collect(Collectors.toSet()); result.add(entityType); return result; } @@ -120,9 +140,11 @@ public List<EdgeInfo> getLineageRelationships(String entityName, LineageDirectio private List<EdgeInfo> getSchemaFieldRelationships(LineageDirection direction) { List<EdgeInfo> schemaFieldEdges = new ArrayList<>(); if (direction == LineageDirection.UPSTREAM) { - schemaFieldEdges.add(new EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "schemafield")); + schemaFieldEdges.add( + new EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "schemafield")); } else { - schemaFieldEdges.add(new EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "schemafield")); + schemaFieldEdges.add( + new EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "schemafield")); } return schemaFieldEdges; } @@ -165,8 +187,9 @@ public boolean equals(Object o) { public int hashCode() { return ((this.type == null ? 0 : this.type.toLowerCase().hashCode()) ^ (this.direction == null ? 0 : this.direction.hashCode()) - ^ (this.opposingEntityType == null ? 0 : this.opposingEntityType.toLowerCase().hashCode())); + ^ (this.opposingEntityType == null + ? 0 + : this.opposingEntityType.toLowerCase().hashCode())); } } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java index f0ec57b8d81c3..06aeefc2e5aa0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java @@ -19,10 +19,7 @@ import lombok.Setter; import lombok.extern.slf4j.Slf4j; - -/** - * Combines results from two entity registries, where the second takes precedence - */ +/** Combines results from two entity registries, where the second takes precedence */ @Slf4j public class MergedEntityRegistry implements EntityRegistry { @@ -34,8 +31,14 @@ public class MergedEntityRegistry implements EntityRegistry { public MergedEntityRegistry(EntityRegistry baseEntityRegistry) { // baseEntityRegistry.get*Specs() can return immutable Collections.emptyMap() which fails // when this class attempts .put* operations on it. - entityNameToSpec = baseEntityRegistry.getEntitySpecs() != null ? new HashMap<>(baseEntityRegistry.getEntitySpecs()) : new HashMap<>(); - eventNameToSpec = baseEntityRegistry.getEventSpecs() != null ? new HashMap<>(baseEntityRegistry.getEventSpecs()) : new HashMap<>(); + entityNameToSpec = + baseEntityRegistry.getEntitySpecs() != null + ? new HashMap<>(baseEntityRegistry.getEntitySpecs()) + : new HashMap<>(); + eventNameToSpec = + baseEntityRegistry.getEventSpecs() != null + ? new HashMap<>(baseEntityRegistry.getEventSpecs()) + : new HashMap<>(); baseEntityRegistry.getAspectTemplateEngine(); _aspectTemplateEngine = baseEntityRegistry.getAspectTemplateEngine(); _aspectNameToSpec = baseEntityRegistry.getAspectSpecs(); @@ -44,22 +47,28 @@ public MergedEntityRegistry(EntityRegistry baseEntityRegistry) { private void validateEntitySpec(EntitySpec entitySpec, final ValidationResult validationResult) { if (entitySpec.getKeyAspectSpec() == null) { validationResult.setValid(false); - validationResult.getValidationFailures().add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); + validationResult + .getValidationFailures() + .add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); } } - public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) throws EntityRegistryException { + public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) + throws EntityRegistryException { ValidationResult validationResult = validatePatch(patchEntityRegistry); if (!validationResult.isValid()) { - throw new EntityRegistryException(String.format("Failed to validate new registry with %s", validationResult.validationFailures.stream().collect( - Collectors.joining("\n")))); + throw new EntityRegistryException( + String.format( + "Failed to validate new registry with %s", + validationResult.validationFailures.stream().collect(Collectors.joining("\n")))); } // Merge Entity Specs for (Map.Entry<String, EntitySpec> e2Entry : patchEntityRegistry.getEntitySpecs().entrySet()) { if (entityNameToSpec.containsKey(e2Entry.getKey())) { - EntitySpec mergeEntitySpec = mergeEntitySpecs(entityNameToSpec.get(e2Entry.getKey()), e2Entry.getValue()); + EntitySpec mergeEntitySpec = + mergeEntitySpecs(entityNameToSpec.get(e2Entry.getKey()), e2Entry.getValue()); entityNameToSpec.put(e2Entry.getKey(), mergeEntitySpec); } else { // We are inserting a new entity into the registry @@ -71,41 +80,63 @@ public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) throws Ent if (patchEntityRegistry.getEventSpecs().size() > 0) { eventNameToSpec.putAll(patchEntityRegistry.getEventSpecs()); } - //TODO: Validate that the entity registries don't have conflicts among each other + // TODO: Validate that the entity registries don't have conflicts among each other return this; } private ValidationResult validatePatch(EntityRegistry patchEntityRegistry) { ValidationResult validationResult = new ValidationResult(); for (Map.Entry<String, EntitySpec> e2Entry : patchEntityRegistry.getEntitySpecs().entrySet()) { - checkMergeable(entityNameToSpec.getOrDefault(e2Entry.getKey(), null), e2Entry.getValue(), validationResult); + checkMergeable( + entityNameToSpec.getOrDefault(e2Entry.getKey(), null), + e2Entry.getValue(), + validationResult); } return validationResult; } - private void checkMergeable(EntitySpec existingEntitySpec, EntitySpec newEntitySpec, final ValidationResult validationResult) { + private void checkMergeable( + EntitySpec existingEntitySpec, + EntitySpec newEntitySpec, + final ValidationResult validationResult) { if (existingEntitySpec != null) { - existingEntitySpec.getAspectSpecMap().entrySet().forEach(aspectSpecEntry -> { - if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { - CompatibilityResult result = CompatibilityChecker.checkCompatibility(aspectSpecEntry.getValue().getPegasusSchema(), newEntitySpec.getAspectSpec( - aspectSpecEntry.getKey()).getPegasusSchema(), new CompatibilityOptions()); - if (result.isError()) { - log.error("{} schema is not compatible with previous schema due to {}", aspectSpecEntry.getKey(), result.getMessages()); - // we want to continue processing all aspects to collect all failures - validationResult.setValid(false); - validationResult.getValidationFailures().add( - String.format("%s schema is not compatible with previous schema due to %s", aspectSpecEntry.getKey(), result.getMessages())); - } else { - log.info("{} schema is compatible with previous schema due to {}", aspectSpecEntry.getKey(), result.getMessages()); - } - } - }); + existingEntitySpec + .getAspectSpecMap() + .entrySet() + .forEach( + aspectSpecEntry -> { + if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { + CompatibilityResult result = + CompatibilityChecker.checkCompatibility( + aspectSpecEntry.getValue().getPegasusSchema(), + newEntitySpec.getAspectSpec(aspectSpecEntry.getKey()).getPegasusSchema(), + new CompatibilityOptions()); + if (result.isError()) { + log.error( + "{} schema is not compatible with previous schema due to {}", + aspectSpecEntry.getKey(), + result.getMessages()); + // we want to continue processing all aspects to collect all failures + validationResult.setValid(false); + validationResult + .getValidationFailures() + .add( + String.format( + "%s schema is not compatible with previous schema due to %s", + aspectSpecEntry.getKey(), result.getMessages())); + } else { + log.info( + "{} schema is compatible with previous schema due to {}", + aspectSpecEntry.getKey(), + result.getMessages()); + } + } + }); } else { validateEntitySpec(newEntitySpec, validationResult); } } - private EntitySpec mergeEntitySpecs(EntitySpec existingEntitySpec, EntitySpec newEntitySpec) { Map<String, AspectSpec> aspectSpecMap = new HashMap<>(existingEntitySpec.getAspectSpecMap()); aspectSpecMap.putAll(newEntitySpec.getAspectSpecMap()); @@ -116,8 +147,11 @@ private EntitySpec mergeEntitySpecs(EntitySpec existingEntitySpec, EntitySpec ne existingEntitySpec.getEntityAnnotation().getKeyAspect(), aspectSpecMap.values()); } - return new DefaultEntitySpec(aspectSpecMap.values(), existingEntitySpec.getEntityAnnotation(), - existingEntitySpec.getSnapshotSchema(), existingEntitySpec.getAspectTyperefSchema()); + return new DefaultEntitySpec( + aspectSpecMap.values(), + existingEntitySpec.getEntityAnnotation(), + existingEntitySpec.getSnapshotSchema(), + existingEntitySpec.getAspectTyperefSchema()); } @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index 76d9c8ceb089c..9eafbe05a4fc6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -32,13 +35,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that is similar to {@link ConfigEntityRegistry} but different in one important way. - * It builds potentially partially specified {@link com.linkedin.metadata.models.PartialEntitySpec} objects from an entity registry config yaml file + * Implementation of {@link EntityRegistry} that is similar to {@link ConfigEntityRegistry} but + * different in one important way. It builds potentially partially specified {@link + * com.linkedin.metadata.models.PartialEntitySpec} objects from an entity registry config yaml file */ @Slf4j public class PatchEntityRegistry implements EntityRegistry { @@ -53,37 +53,50 @@ public class PatchEntityRegistry implements EntityRegistry { private final String identifier; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } @Override public String toString() { StringBuilder sb = new StringBuilder("PatchEntityRegistry[" + "identifier=" + identifier + ';'); - entityNameToSpec.entrySet() - .stream() - .forEach(entry -> sb.append("[entityName=") - .append(entry.getKey()) - .append(";aspects=[") - .append( - entry.getValue().getAspectSpecs().stream().map(spec -> spec.getName()).collect(Collectors.joining(","))) - .append("]]")); - eventNameToSpec.entrySet() - .stream() - .forEach(entry -> sb.append("[eventName=") - .append(entry.getKey()) - .append("]")); + entityNameToSpec.entrySet().stream() + .forEach( + entry -> + sb.append("[entityName=") + .append(entry.getKey()) + .append(";aspects=[") + .append( + entry.getValue().getAspectSpecs().stream() + .map(spec -> spec.getName()) + .collect(Collectors.joining(","))) + .append("]]")); + eventNameToSpec.entrySet().stream() + .forEach(entry -> sb.append("[eventName=").append(entry.getKey()).append("]")); return sb.toString(); } - public PatchEntityRegistry(Pair<Path, Path> configFileClassPathPair, String registryName, - ComparableVersion registryVersion) throws IOException, EntityRegistryException { - this(DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), configFileClassPathPair.getFirst(), - registryName, registryVersion); + public PatchEntityRegistry( + Pair<Path, Path> configFileClassPathPair, + String registryName, + ComparableVersion registryVersion) + throws IOException, EntityRegistryException { + this( + DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), + configFileClassPathPair.getFirst(), + registryName, + registryVersion); } - public PatchEntityRegistry(String entityRegistryRoot, String registryName, ComparableVersion registryVersion) + public PatchEntityRegistry( + String entityRegistryRoot, String registryName, ComparableVersion registryVersion) throws EntityRegistryException, IOException { this(getFileAndClassPath(entityRegistryRoot), registryName, registryVersion); } @@ -93,21 +106,28 @@ private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) Path entityRegistryRootLoc = Paths.get(entityRegistryRoot); if (Files.isDirectory(entityRegistryRootLoc)) { // Look for entity-registry.yml or entity-registry.yaml in the root folder - List<Path> yamlFiles = Files.walk(entityRegistryRootLoc, 1) - .filter(Files::isRegularFile) - .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) - .collect(Collectors.toList()); + List<Path> yamlFiles = + Files.walk(entityRegistryRootLoc, 1) + .filter(Files::isRegularFile) + .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) + .collect(Collectors.toList()); if (yamlFiles.size() == 0) { throw new EntityRegistryException( - String.format("Did not find an entity registry (entity-registry.yaml/yml) under %s", + String.format( + "Did not find an entity registry (entity-registry.yaml/yml) under %s", entityRegistryRootLoc)); } if (yamlFiles.size() > 1) { - log.warn("Found more than one yaml file in the directory {}. Will pick the first {}", entityRegistryRootLoc, + log.warn( + "Found more than one yaml file in the directory {}. Will pick the first {}", + entityRegistryRootLoc, yamlFiles.get(0)); } Path entityRegistryFile = yamlFiles.get(0); - log.info("Loading custom config entity file: {}, dir: {}", entityRegistryFile, entityRegistryRootLoc); + log.info( + "Loading custom config entity file: {}, dir: {}", + entityRegistryFile, + entityRegistryRootLoc); return new Pair<>(entityRegistryFile, entityRegistryRootLoc); } else { // We assume that the file being passed in is a bare entity registry yaml file @@ -116,13 +136,25 @@ private static Pair<Path, Path> getFileAndClassPath(String entityRegistryRoot) } } - public PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath, String registryName, - ComparableVersion registryVersion) throws FileNotFoundException, EntityRegistryException { - this(dataSchemaFactory, new FileInputStream(configFilePath.toString()), registryName, registryVersion); + public PatchEntityRegistry( + DataSchemaFactory dataSchemaFactory, + Path configFilePath, + String registryName, + ComparableVersion registryVersion) + throws FileNotFoundException, EntityRegistryException { + this( + dataSchemaFactory, + new FileInputStream(configFilePath.toString()), + registryName, + registryVersion); } - private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream configFileStream, String registryName, - ComparableVersion registryVersion) throws EntityRegistryException { + private PatchEntityRegistry( + DataSchemaFactory dataSchemaFactory, + InputStream configFileStream, + String registryName, + ComparableVersion registryVersion) + throws EntityRegistryException { this.dataSchemaFactory = dataSchemaFactory; this.registryName = registryName; this.registryVersion = registryVersion; @@ -133,7 +165,8 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con } catch (IOException e) { e.printStackTrace(); throw new IllegalArgumentException( - String.format("Error while reading config file in path %s: %s", configFileStream, e.getMessage())); + String.format( + "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); } if (entities.getId() != null) { identifier = entities.getId(); @@ -144,7 +177,9 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con // Build Entity Specs EntitySpecBuilder entitySpecBuilder = new EntitySpecBuilder(); for (Entity entity : entities.getEntities()) { - log.info("Discovered entity {} with aspects {}", entity.getName(), + log.info( + "Discovered entity {} with aspects {}", + entity.getName(), entity.getAspects().stream().collect(Collectors.joining())); List<AspectSpec> aspectSpecs = new ArrayList<>(); if (entity.getKeyAspect() != null) { @@ -152,16 +187,20 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con log.info("Adding key aspect {} with spec {}", entity.getKeyAspect(), keyAspectSpec); aspectSpecs.add(keyAspectSpec); } - entity.getAspects().forEach(aspect -> { - if (!aspect.equals(entity.getKeyAspect())) { - AspectSpec aspectSpec = buildAspectSpec(aspect, entitySpecBuilder); - log.info("Adding aspect {} with spec {}", aspect, aspectSpec); - aspectSpecs.add(aspectSpec); - } - }); + entity + .getAspects() + .forEach( + aspect -> { + if (!aspect.equals(entity.getKeyAspect())) { + AspectSpec aspectSpec = buildAspectSpec(aspect, entitySpecBuilder); + log.info("Adding aspect {} with spec {}", aspect, aspectSpec); + aspectSpecs.add(aspectSpec); + } + }); EntitySpec entitySpec = - entitySpecBuilder.buildPartialEntitySpec(entity.getName(), entity.getKeyAspect(), aspectSpecs); + entitySpecBuilder.buildPartialEntitySpec( + entity.getName(), entity.getKeyAspect(), aspectSpecs); entityNameToSpec.put(entity.getName().toLowerCase(), entitySpec); } @@ -225,7 +264,7 @@ public Map<String, EventSpec> getEventSpecs() { @Nonnull @Override public AspectTemplateEngine getAspectTemplateEngine() { - //TODO: support patch based templates + // TODO: support patch based templates return new AspectTemplateEngine(); } @@ -236,7 +275,8 @@ private AspectSpec buildAspectSpec(String aspectName, EntitySpecBuilder entitySp if (!aspectSchema.isPresent()) { throw new IllegalArgumentException(String.format("Aspect %s does not exist", aspectName)); } - AspectSpec aspectSpec = entitySpecBuilder.buildAspectSpec(aspectSchema.get(), aspectClass.get()); + AspectSpec aspectSpec = + entitySpecBuilder.buildAspectSpec(aspectSchema.get(), aspectClass.get()); aspectSpec.setRegistryName(this.registryName); aspectSpec.setRegistryVersion(this.registryVersion); return aspectSpec; @@ -249,5 +289,4 @@ private EventSpec buildEventSpec(String eventName) { } return new EventSpecBuilder().buildEventSpec(eventName, eventSchema.get()); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java index 4809b1f4d2f21..05c752a5c1575 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java @@ -23,14 +23,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class PluginEntityRegistryLoader { private static int _MAXLOADFAILURES = 5; private final Boolean scanningEnabled; private final String pluginDirectory; // Registry Name -> Registry Version -> (Registry, LoadResult) - private final Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> patchRegistries; + private final Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + patchRegistries; private MergedEntityRegistry mergedEntityRegistry; private boolean started = false; private final Lock lock = new ReentrantLock(); @@ -41,7 +41,9 @@ public class PluginEntityRegistryLoader { public PluginEntityRegistryLoader(String pluginDirectory) { File directory = new File(pluginDirectory); if (!directory.exists() || !directory.isDirectory()) { - log.warn("{} directory does not exist or is not a directory. Plugin scanning will be disabled.", directory); + log.warn( + "{} directory does not exist or is not a directory. Plugin scanning will be disabled.", + directory); scanningEnabled = false; } else { scanningEnabled = true; @@ -50,7 +52,8 @@ public PluginEntityRegistryLoader(String pluginDirectory) { this.patchRegistries = new HashMap<>(); } - public Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> getPatchRegistries() { + public Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + getPatchRegistries() { return patchRegistries; } @@ -59,7 +62,8 @@ public PluginEntityRegistryLoader withBaseRegistry(MergedEntityRegistry baseEnti return this; } - public PluginEntityRegistryLoader start(boolean waitForInitialization) throws InterruptedException { + public PluginEntityRegistryLoader start(boolean waitForInitialization) + throws InterruptedException { if (started) { log.warn("Already started!. Skipping"); return this; @@ -68,45 +72,69 @@ public PluginEntityRegistryLoader start(boolean waitForInitialization) throws In return this; } - executorService.scheduleAtFixedRate(() -> { - lock.lock(); - try { - Path rootPath = Paths.get(this.pluginDirectory); - int rootDepth = rootPath.getNameCount(); - List<Path> paths = - Files.walk(rootPath, 2).filter(x -> x.getNameCount() - rootDepth == 2).collect(Collectors.toList()); - log.debug("Size of list {}", paths.size()); - log.debug("Paths : {}", paths.stream().map(x -> x.toString() + ";").collect(Collectors.joining())); - List<Path> versionedPaths = paths.stream().filter(path -> { + executorService.scheduleAtFixedRate( + () -> { + lock.lock(); try { - ComparableVersion comparableVersion = new ComparableVersion(path.getName(rootDepth + 1).toString()); - return true; + Path rootPath = Paths.get(this.pluginDirectory); + int rootDepth = rootPath.getNameCount(); + List<Path> paths = + Files.walk(rootPath, 2) + .filter(x -> x.getNameCount() - rootDepth == 2) + .collect(Collectors.toList()); + log.debug("Size of list {}", paths.size()); + log.debug( + "Paths : {}", + paths.stream().map(x -> x.toString() + ";").collect(Collectors.joining())); + List<Path> versionedPaths = + paths.stream() + .filter( + path -> { + try { + ComparableVersion comparableVersion = + new ComparableVersion(path.getName(rootDepth + 1).toString()); + return true; + } catch (Exception e) { + log.warn( + String.format( + "Will skip %s since we weren't able to parse a legal version from it", + path.toString())); + return false; + } + }) + .sorted( + (path1, path2) -> { + if (path1.getName(rootDepth).equals(path2.getName(rootDepth))) { + return new ComparableVersion(path1.getName(rootDepth + 1).toString()) + .compareTo( + new ComparableVersion(path2.getName(rootDepth + 1).toString())); + } else { + return path1.getName(rootDepth).compareTo(path2.getName(rootDepth)); + } + }) + .collect(Collectors.toList()); + log.debug( + "Will be loading paths in this order {}", + versionedPaths.stream().map(p -> p.toString()).collect(Collectors.joining(";"))); + + versionedPaths.forEach( + x -> + loadOneRegistry( + this.mergedEntityRegistry, + x.getName(rootDepth).toString(), + x.getName(rootDepth + 1).toString(), + x.toString())); } catch (Exception e) { - log.warn( - String.format("Will skip %s since we weren't able to parse a legal version from it", path.toString())); - return false; - } - }).sorted((path1, path2) -> { - if (path1.getName(rootDepth).equals(path2.getName(rootDepth))) { - return new ComparableVersion(path1.getName(rootDepth + 1).toString()).compareTo( - new ComparableVersion(path2.getName(rootDepth + 1).toString())); - } else { - return path1.getName(rootDepth).compareTo(path2.getName(rootDepth)); + log.warn("Failed to walk directory with exception", e); + } finally { + booted = true; + initialized.signal(); + lock.unlock(); } - }).collect(Collectors.toList()); - log.debug("Will be loading paths in this order {}", - versionedPaths.stream().map(p -> p.toString()).collect(Collectors.joining(";"))); - - versionedPaths.forEach(x -> loadOneRegistry(this.mergedEntityRegistry, x.getName(rootDepth).toString(), - x.getName(rootDepth + 1).toString(), x.toString())); - } catch (Exception e) { - log.warn("Failed to walk directory with exception", e); - } finally { - booted = true; - initialized.signal(); - lock.unlock(); - } - }, 0, 5, TimeUnit.SECONDS); + }, + 0, + 5, + TimeUnit.SECONDS); started = true; if (waitForInitialization) { lock.lock(); @@ -121,7 +149,10 @@ public PluginEntityRegistryLoader start(boolean waitForInitialization) throws In return this; } - private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registryName, String registryVersionStr, + private void loadOneRegistry( + MergedEntityRegistry parentRegistry, + String registryName, + String registryVersionStr, String patchDirectory) { ComparableVersion registryVersion = new ComparableVersion("0.0.0-dev"); try { @@ -129,11 +160,15 @@ private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registr log.debug("{}: Found registry version {}", this, maybeVersion); registryVersion = maybeVersion; } catch (IllegalArgumentException ie) { - log.warn("Found un-parseable registry version {}, will default to {}", registryVersionStr, registryVersion); + log.warn( + "Found un-parseable registry version {}, will default to {}", + registryVersionStr, + registryVersion); } if (registryExists(registryName, registryVersion)) { - log.debug("Registry {}:{} already exists. Skipping loading...", registryName, registryVersion); + log.debug( + "Registry {}:{} already exists. Skipping loading...", registryName, registryVersion); return; } else { log.info("{}: Registry {}:{} discovered. Loading...", this, registryName, registryVersion); @@ -160,31 +195,39 @@ private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registr private boolean registryExists(String registryName, ComparableVersion registryVersion) { Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>> nameTree = patchRegistries.getOrDefault(registryName, new HashMap<>()); - if (nameTree.containsKey(registryVersion) && ( - (nameTree.get(registryVersion).getSecond().getLoadResult() == LoadStatus.SUCCESS) || ( - nameTree.get(registryVersion).getSecond().getFailureCount() == _MAXLOADFAILURES))) { + if (nameTree.containsKey(registryVersion) + && ((nameTree.get(registryVersion).getSecond().getLoadResult() == LoadStatus.SUCCESS) + || (nameTree.get(registryVersion).getSecond().getFailureCount() == _MAXLOADFAILURES))) { return true; } return false; } - private void addLoadResult(String registryName, ComparableVersion semanticVersion, - EntityRegistryLoadResult loadResult, EntityRegistry e) { + private void addLoadResult( + String registryName, + ComparableVersion semanticVersion, + EntityRegistryLoadResult loadResult, + EntityRegistry e) { Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>> nameTree = patchRegistries.getOrDefault(registryName, new HashMap<>()); if (nameTree.containsKey(semanticVersion)) { - if ((loadResult.getLoadResult() == LoadStatus.FAILURE) && ( - nameTree.get(semanticVersion).getSecond().getLoadResult() == LoadStatus.FAILURE)) { + if ((loadResult.getLoadResult() == LoadStatus.FAILURE) + && (nameTree.get(semanticVersion).getSecond().getLoadResult() == LoadStatus.FAILURE)) { // previous load and current loads are both failures loadResult.setFailureCount(nameTree.get(semanticVersion).getSecond().getFailureCount() + 1); if (loadResult.getFailureCount() == _MAXLOADFAILURES) { // Abandoning this registry version forever - log.error("Tried {} times. Failed to load registry {} with {}", loadResult.getFailureCount(), registryName, loadResult.getFailureReason()); + log.error( + "Tried {} times. Failed to load registry {} with {}", + loadResult.getFailureCount(), + registryName, + loadResult.getFailureReason()); } } log.warn( - String.format("Attempt %d to re-load registry %s: %s", loadResult.getFailureCount(), - registryName, semanticVersion)); + String.format( + "Attempt %d to re-load registry %s: %s", + loadResult.getFailureCount(), registryName, semanticVersion)); } nameTree.put(semanticVersion, new Pair<>(e, loadResult)); patchRegistries.put(registryName, nameTree); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index 32738d65573fd..cfc2c0901ce0d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; import com.linkedin.metadata.models.AspectSpec; @@ -27,13 +30,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects - * from the a {@link Snapshot} Record Template present on the classpath + * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from the a + * {@link Snapshot} Record Template present on the classpath */ public class SnapshotEntityRegistry implements EntityRegistry { @@ -45,36 +44,41 @@ public class SnapshotEntityRegistry implements EntityRegistry { private static final SnapshotEntityRegistry INSTANCE = new SnapshotEntityRegistry(); public SnapshotEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder().buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); } public SnapshotEntityRegistry(UnionTemplate snapshot) { - entityNameToSpec = new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(snapshot.schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); } private AspectTemplateEngine populateTemplateEngine(Map<String, AspectSpec> aspectSpecs) { - // TODO: This should be more dynamic ideally, "hardcoding" for now, passing in aspect spec map preemptively + // TODO: This should be more dynamic ideally, "hardcoding" for now, passing in aspect spec map + // preemptively Map<String, Template<? extends RecordTemplate>> aspectSpecTemplateMap = new HashMap<>(); aspectSpecTemplateMap.put(OWNERSHIP_ASPECT_NAME, new OwnershipTemplate()); aspectSpecTemplateMap.put(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesTemplate()); aspectSpecTemplateMap.put(UPSTREAM_LINEAGE_ASPECT_NAME, new UpstreamLineageTemplate()); aspectSpecTemplateMap.put(GLOBAL_TAGS_ASPECT_NAME, new GlobalTagsTemplate()); - aspectSpecTemplateMap.put(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataTemplate()); + aspectSpecTemplateMap.put( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataTemplate()); aspectSpecTemplateMap.put(GLOSSARY_TERMS_ASPECT_NAME, new GlossaryTermsTemplate()); aspectSpecTemplateMap.put(DATA_FLOW_INFO_ASPECT_NAME, new DataFlowInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INFO_ASPECT_NAME, new DataJobInfoTemplate()); - aspectSpecTemplateMap.put(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); + aspectSpecTemplateMap.put( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java index f32aa1aa8bd47..e5d048d6ef647 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java @@ -1,15 +1,12 @@ package com.linkedin.metadata.models.registry.config; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; - +import javax.annotation.Nullable; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import lombok.Value; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -import javax.annotation.Nullable; - @Value @NoArgsConstructor(force = true, access = AccessLevel.PRIVATE) @@ -21,6 +18,5 @@ public class Entity { String keyAspect; List<String> aspects; - @Nullable - String category; + @Nullable String category; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java index caec5fc69c148..f08fa5ba0a477 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java @@ -4,13 +4,11 @@ import lombok.Getter; import lombok.Setter; - @Builder @Getter public class EntityRegistryLoadResult { private LoadStatus loadResult; private String registryLocation; private String failureReason; - @Setter - private int failureCount; + @Setter private int failureCount; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java index 12c9f5ab36a09..4a868ed92e4a7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.models.registry.config; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.Value; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @Value @NoArgsConstructor(force = true, access = AccessLevel.PRIVATE) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java index cf63e87abf7f9..9cd8e74d952d6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -8,61 +10,68 @@ import java.util.Collections; import java.util.List; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - public interface ArrayMergingTemplate<T extends RecordTemplate> extends Template<T> { /** - * Takes an Array field on the {@link RecordTemplate} subtype along with a set of key fields to transform into a map - * Avoids producing side effects by copying nodes, use resulting node and not the original + * Takes an Array field on the {@link RecordTemplate} subtype along with a set of key fields to + * transform into a map Avoids producing side effects by copying nodes, use resulting node and not + * the original + * * @param baseNode the base unmodified node * @param arrayFieldName name of the array field to be transformed - * @param keyFields subfields of the array object to be used as keys, empty implies the list is just strings to be merged + * @param keyFields subfields of the array object to be used as keys, empty implies the list is + * just strings to be merged * @return the modified {@link JsonNode} with array fields transformed to maps */ - default JsonNode arrayFieldToMap(JsonNode baseNode, String arrayFieldName, List<String> keyFields) { + default JsonNode arrayFieldToMap( + JsonNode baseNode, String arrayFieldName, List<String> keyFields) { JsonNode transformedNode = baseNode.deepCopy(); JsonNode arrayNode = baseNode.get(arrayFieldName); ObjectNode mapNode = instance.objectNode(); if (arrayNode instanceof ArrayNode) { - ((ArrayNode) arrayNode).elements() - .forEachRemaining(node -> { - ObjectNode keyValue = mapNode; - // Creates nested object of keys with final value being the full value of the node - JsonNode nodeClone = node.deepCopy(); - if (!keyFields.isEmpty()) { - for (String keyField : keyFields) { - String key = node.get(keyField).asText(); - keyValue = keyValue.get(key) == null ? (ObjectNode) keyValue.set(key, instance.objectNode()).get(key) - : (ObjectNode) keyValue.get(key); + ((ArrayNode) arrayNode) + .elements() + .forEachRemaining( + node -> { + ObjectNode keyValue = mapNode; + // Creates nested object of keys with final value being the full value of the node + JsonNode nodeClone = node.deepCopy(); + if (!keyFields.isEmpty()) { + for (String keyField : keyFields) { + String key = node.get(keyField).asText(); + keyValue = + keyValue.get(key) == null + ? (ObjectNode) keyValue.set(key, instance.objectNode()).get(key) + : (ObjectNode) keyValue.get(key); + } + } else { + // No key fields, assume String array + nodeClone = instance.objectNode().set(((TextNode) node).asText(), node); } - } else { - // No key fields, assume String array - nodeClone = instance.objectNode().set(((TextNode) node).asText(), node); - } - keyValue.setAll((ObjectNode) nodeClone); - } - ); - + keyValue.setAll((ObjectNode) nodeClone); + }); } return ((ObjectNode) transformedNode).set(arrayFieldName, mapNode); } /** - * Takes a transformed map field on the {@link JsonNode} representation along with a set of key fields used to transform into a map - * and rebases it to the original defined format - * Avoids producing side effects by copying nodes, use resulting node and not the original + * Takes a transformed map field on the {@link JsonNode} representation along with a set of key + * fields used to transform into a map and rebases it to the original defined format Avoids + * producing side effects by copying nodes, use resulting node and not the original + * * @param transformedNode the transformed node * @param arrayFieldName name of the array field to be transformed - * @param keyFields subfields of the array object to be used as keys, empty implies the list is just strings to be merged + * @param keyFields subfields of the array object to be used as keys, empty implies the list is + * just strings to be merged * @return the modified {@link JsonNode} formatted consistent with the original schema */ - default JsonNode transformedMapToArray(JsonNode transformedNode, String arrayFieldName, List<String> keyFields) { + default JsonNode transformedMapToArray( + JsonNode transformedNode, String arrayFieldName, List<String> keyFields) { JsonNode fieldNode = transformedNode.get(arrayFieldName); if (fieldNode instanceof ArrayNode) { - // We already have an ArrayNode, no need to transform. This happens during `replace` operations + // We already have an ArrayNode, no need to transform. This happens during `replace` + // operations return transformedNode; } ObjectNode rebasedNode = transformedNode.deepCopy(); @@ -74,9 +83,7 @@ default JsonNode transformedMapToArray(JsonNode transformedNode, String arrayFie } else { // No keys, assume pure Strings arrayNode = instance.arrayNode(); - mapNode.fields().forEachRemaining(entry -> - arrayNode.add(entry.getValue()) - ); + mapNode.fields().forEachRemaining(entry -> arrayNode.add(entry.getValue())); } return rebasedNode.set(arrayFieldName, arrayNode); } @@ -86,9 +93,16 @@ default ArrayNode mergeToArray(JsonNode mapNode, List<String> keyFields) { return instance.arrayNode().add(mapNode); } else { ArrayNode mergingArray = instance.arrayNode(); - mapNode.elements().forEachRemaining(node -> - mergingArray.addAll(mergeToArray(node, keyFields.size() > 1 ? keyFields.subList(1, keyFields.size()) : Collections.emptyList())) - ); + mapNode + .elements() + .forEachRemaining( + node -> + mergingArray.addAll( + mergeToArray( + node, + keyFields.size() > 1 + ? keyFields.subList(1, keyFields.size()) + : Collections.emptyList()))); return mergingArray; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java index 742dbd70d4503..95849a94bae29 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.github.fge.jsonpatch.JsonPatchException; import com.github.fge.jsonpatch.Patch; @@ -13,25 +15,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** - * Holds connection between aspect specs and their templates and drives the generation from templates + * Holds connection between aspect specs and their templates and drives the generation from + * templates */ public class AspectTemplateEngine { - public static final Set<String> SUPPORTED_TEMPLATES = Stream.of( - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).collect(Collectors.toSet()); + public static final Set<String> SUPPORTED_TEMPLATES = + Stream.of( + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .collect(Collectors.toSet()); private final Map<String, Template<? extends RecordTemplate>> _aspectTemplateMap; @@ -45,11 +47,14 @@ public AspectTemplateEngine(Map<String, Template<? extends RecordTemplate>> aspe @Nullable public RecordTemplate getDefaultTemplate(String aspectSpecName) { - return _aspectTemplateMap.containsKey(aspectSpecName) ? _aspectTemplateMap.get(aspectSpecName).getDefault() : null; + return _aspectTemplateMap.containsKey(aspectSpecName) + ? _aspectTemplateMap.get(aspectSpecName).getDefault() + : null; } /** * Applies a json patch to a record, optionally merging array fields as necessary + * * @param recordTemplate original template to be updated * @param jsonPatch patch to apply * @param aspectSpec aspectSpec of the template @@ -58,7 +63,8 @@ public RecordTemplate getDefaultTemplate(String aspectSpecName) { * @throws JsonPatchException if there is an issue with applying the json patch */ @Nonnull - public <T extends RecordTemplate> RecordTemplate applyPatch(RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec) + public <T extends RecordTemplate> RecordTemplate applyPatch( + RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec) throws JsonProcessingException, JsonPatchException { Template<T> template = getTemplate(aspectSpec); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java index cf2f5552fbb73..44090b3a6d05b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry.template; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -9,14 +12,13 @@ import com.linkedin.data.template.RecordTemplate; import java.util.List; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - -public abstract class CompoundKeyTemplate<T extends RecordTemplate> implements ArrayMergingTemplate<T> { +public abstract class CompoundKeyTemplate<T extends RecordTemplate> + implements ArrayMergingTemplate<T> { /** - * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent paths to be specified + * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent + * paths to be specified + * * @param transformedNode transformed node to have keys populated * @return transformed node that has top level keys populated */ @@ -25,7 +27,8 @@ public JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) List<String> paths = getPaths(jsonPatch); for (String path : paths) { String[] keys = path.split("/"); - // Skip first as it will always be blank due to path starting with /, skip last key as we only need to populate top level + // Skip first as it will always be blank due to path starting with /, skip last key as we only + // need to populate top level JsonNode parent = transformedNodeClone; for (int i = 1; i < keys.length - 1; i++) { if (parent.get(keys[i]) == null) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java index 4310c84ded0e2..0793cacce780f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -8,26 +10,23 @@ import com.linkedin.data.template.RecordTemplate; import javax.annotation.Nonnull; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - public interface Template<T extends RecordTemplate> { /** * Cast method to get subtype of {@link RecordTemplate} for applying templating methods + * * @param recordTemplate generic record * @return specific type for this template * @throws {@link ClassCastException} when recordTemplate is not the correct type for the template */ T getSubtype(RecordTemplate recordTemplate) throws ClassCastException; - /** - * Get the template clas type - */ + /** Get the template clas type */ Class<T> getTemplateType(); /** * Get a template aspect with defaults set + * * @return subtype of {@link RecordTemplate} that lines up with a predefined AspectSpec */ @Nonnull @@ -35,6 +34,7 @@ public interface Template<T extends RecordTemplate> { /** * Applies a specified {@link Patch} to an aspect + * * @param recordTemplate original {@link RecordTemplate} to be patched * @param jsonPatch patch to apply * @return patched value @@ -50,20 +50,24 @@ default T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) } /** - * Returns a json representation of the template, modified for template based operations to be compatible with patch - * semantics. + * Returns a json representation of the template, modified for template based operations to be + * compatible with patch semantics. + * * @param recordTemplate template to be transformed into json * @return a {@link JsonNode} representation of the template * @throws JsonProcessingException if there is an issue converting the input to JSON */ - default JsonNode preprocessTemplate(RecordTemplate recordTemplate) throws JsonProcessingException { + default JsonNode preprocessTemplate(RecordTemplate recordTemplate) + throws JsonProcessingException { T subtype = getSubtype(recordTemplate); JsonNode baseNode = OBJECT_MAPPER.readTree(RecordUtils.toJsonString(subtype)); return transformFields(baseNode); } /** - * Transforms fields from base json representation of RecordTemplate to definition specific to aspect per patch semantics + * Transforms fields from base json representation of RecordTemplate to definition specific to + * aspect per patch semantics + * * @param baseNode the base node to be transformed * @return transformed {@link JsonNode} */ @@ -72,12 +76,10 @@ default JsonNode preprocessTemplate(RecordTemplate recordTemplate) throws JsonPr /** * Reserializes the patched {@link JsonNode} to the base {@link RecordTemplate} definition + * * @param patched the deserialized patched json in custom format per aspect spec * @return A {@link JsonNode} that has been retranslated from patch semantics */ @Nonnull JsonNode rebaseFields(JsonNode patched); - - - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java index 0cd9a52c8fe60..a98e60c739749 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java @@ -8,7 +8,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class GlobalTagsTemplate implements ArrayMergingTemplate<GlobalTags> { private static final String TAGS_FIELD_NAME = "tags"; @@ -45,6 +44,7 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, TAGS_FIELD_NAME, Collections.singletonList(TAG_FIELD_NAME)); + return transformedMapToArray( + patched, TAGS_FIELD_NAME, Collections.singletonList(TAG_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java index e905404824022..7ce59916f2073 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry.template.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; @@ -11,10 +14,6 @@ import java.util.Collections; import javax.annotation.Nonnull; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class GlossaryTermsTemplate implements ArrayMergingTemplate<GlossaryTerms> { private static final String TERMS_FIELD_NAME = "terms"; @@ -40,8 +39,12 @@ public Class<GlossaryTerms> getTemplateType() { @Override public GlossaryTerms getDefault() { GlossaryTerms glossaryTerms = new GlossaryTerms(); - glossaryTerms.setTerms(new GlossaryTermAssociationArray()) - .setAuditStamp(new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + glossaryTerms + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp( + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); return glossaryTerms; } @@ -52,8 +55,7 @@ public JsonNode transformFields(JsonNode baseNode) { // Set required deprecated field if (baseNode.get(AUDIT_STAMP_FIELD) == null) { ObjectNode auditStampNode = instance.objectNode(); - auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR) - .put(TIME_FIELD, System.currentTimeMillis()); + auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR).put(TIME_FIELD, System.currentTimeMillis()); ((ObjectNode) baseNode).set(AUDIT_STAMP_FIELD, auditStampNode); } return arrayFieldToMap(baseNode, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); @@ -65,10 +67,10 @@ public JsonNode rebaseFields(JsonNode patched) { // Set required deprecated field if (patched.get(AUDIT_STAMP_FIELD) == null) { ObjectNode auditStampNode = instance.objectNode(); - auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR) - .put(TIME_FIELD, System.currentTimeMillis()); + auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR).put(TIME_FIELD, System.currentTimeMillis()); ((ObjectNode) patched).set(AUDIT_STAMP_FIELD, auditStampNode); } - return transformedMapToArray(patched, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + return transformedMapToArray( + patched, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java index 0a2cff4395b54..b850ae830b98c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.common; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; @@ -10,9 +12,6 @@ import java.util.Arrays; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTemplate extends CompoundKeyTemplate<Ownership> { private static final String OWNERS_FIELD_NAME = "owners"; @@ -37,9 +36,10 @@ public Class<Ownership> getTemplateType() { public Ownership getDefault() { Ownership ownership = new Ownership(); ownership.setOwners(new OwnerArray()); - ownership.setLastModified(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR))); + ownership.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR))); return ownership; } @@ -47,12 +47,14 @@ public Ownership getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap(baseNode, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); + return arrayFieldToMap( + baseNode, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); + return transformedMapToArray( + patched, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java index 5997bd8e7910d..73e837f368f0b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.models.registry.template.Template; import javax.annotation.Nonnull; - public class DataFlowInfoTemplate implements Template<DataFlowInfo> { @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java index 9d25fa71286d3..bdb306c2d32e4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.models.registry.template.Template; import javax.annotation.Nonnull; - public class DataJobInfoTemplate implements Template<DataJobInfo> { @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java index b4ddb4523c9a5..889297734e977 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java @@ -12,7 +12,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DataJobInputOutputTemplate implements ArrayMergingTemplate<DataJobInputOutput> { private static final String INPUT_DATA_JOB_EDGES_FIELD_NAME = "inputDatajobEdges"; @@ -23,6 +22,7 @@ public class DataJobInputOutputTemplate implements ArrayMergingTemplate<DataJobI private static final String INPUT_DATASET_FIELDS_FIELD_NAME = "inputDatasetFields"; private static final String OUTPUT_DATASET_FIELDS_FIELD_NAME = "outputDatasetFields"; + // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key @Override @@ -60,17 +60,28 @@ public DataJobInputOutput getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - JsonNode transformedNode = arrayFieldToMap(baseNode, INPUT_DATA_JOB_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - transformedNode = arrayFieldToMap(transformedNode, INPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - transformedNode = arrayFieldToMap(transformedNode, OUTPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - transformedNode = arrayFieldToMap(transformedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); - transformedNode = arrayFieldToMap(transformedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + INPUT_DATA_JOB_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap( + transformedNode, + INPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap( + transformedNode, + OUTPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap(transformedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + transformedNode = + arrayFieldToMap(transformedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); return transformedNode; } @@ -78,17 +89,30 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - JsonNode rebasedNode = transformedMapToArray(patched, INPUT_DATA_JOB_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - rebasedNode = transformedMapToArray(rebasedNode, INPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - rebasedNode = transformedMapToArray(rebasedNode, OUTPUT_DATASET_EDGES_FIELD_NAME, - Collections.singletonList(DESTINATION_URN_FIELD_NAME)); - - rebasedNode = transformedMapToArray(rebasedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); - rebasedNode = transformedMapToArray(rebasedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + JsonNode rebasedNode = + transformedMapToArray( + patched, + INPUT_DATA_JOB_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, + INPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, + OUTPUT_DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, INPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); + rebasedNode = + transformedMapToArray( + rebasedNode, OUTPUT_DATASET_FIELDS_FIELD_NAME, Collections.emptyList()); return rebasedNode; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java index d835d5ae939ae..899c51a7c3d7e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java @@ -8,7 +8,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DataProductPropertiesTemplate implements ArrayMergingTemplate<DataProductProperties> { private static final String ASSETS_FIELD_NAME = "assets"; @@ -44,6 +43,7 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, ASSETS_FIELD_NAME, Collections.singletonList(KEY_FIELD_NAME)); + return transformedMapToArray( + patched, ASSETS_FIELD_NAME, Collections.singletonList(KEY_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java index 3c1be1f7ecaad..991f7f3d4053a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java @@ -9,7 +9,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DatasetPropertiesTemplate implements ArrayMergingTemplate<DatasetProperties> { private static final String TAGS_FIELD_NAME = "tags"; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java index 62888d117b3de..9712a9081d33a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.dataset; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; @@ -13,9 +15,6 @@ import java.util.Collections; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class EditableSchemaMetadataTemplate extends CompoundKeyTemplate<EditableSchemaMetadata> { private static final String EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME = "editableSchemaFieldInfo"; @@ -24,7 +23,8 @@ public class EditableSchemaMetadataTemplate extends CompoundKeyTemplate<Editable private static final String GLOSSARY_TERMS_FIELD_NAME = "glossaryTerms"; @Override - public EditableSchemaMetadata getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + public EditableSchemaMetadata getSubtype(RecordTemplate recordTemplate) + throws ClassCastException { if (recordTemplate instanceof EditableSchemaMetadata) { return (EditableSchemaMetadata) recordTemplate; } @@ -39,7 +39,10 @@ public Class<EditableSchemaMetadata> getTemplateType() { @Nonnull @Override public EditableSchemaMetadata getDefault() { - AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); return new EditableSchemaMetadata() .setCreated(auditStamp) .setLastModified(auditStamp) @@ -49,47 +52,70 @@ public EditableSchemaMetadata getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - JsonNode transformedNode = arrayFieldToMap(baseNode, EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, - Collections.singletonList(FIELDPATH_FIELD_NAME)); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, + Collections.singletonList(FIELDPATH_FIELD_NAME)); // Create temporary templates for array subfields GlobalTagsTemplate globalTagsTemplate = new GlobalTagsTemplate(); GlossaryTermsTemplate glossaryTermsTemplate = new GlossaryTermsTemplate(); // Apply template transforms to array subfields - transformedNode.get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME).elements().forEachRemaining(node -> { - JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); - JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); - if (globalTags != null) { - ((ObjectNode) node).set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.transformFields(node.get(GLOBAL_TAGS_FIELD_NAME))); - } - if (glossaryTerms != null) { - ((ObjectNode) node).set(GLOSSARY_TERMS_FIELD_NAME, glossaryTermsTemplate.transformFields(node.get(GLOSSARY_TERMS_FIELD_NAME))); - } - }); + transformedNode + .get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME) + .elements() + .forEachRemaining( + node -> { + JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); + JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); + if (globalTags != null) { + ((ObjectNode) node) + .set( + GLOBAL_TAGS_FIELD_NAME, + globalTagsTemplate.transformFields(node.get(GLOBAL_TAGS_FIELD_NAME))); + } + if (glossaryTerms != null) { + ((ObjectNode) node) + .set( + GLOSSARY_TERMS_FIELD_NAME, + glossaryTermsTemplate.transformFields(node.get(GLOSSARY_TERMS_FIELD_NAME))); + } + }); return transformedNode; } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - JsonNode rebasedNode = transformedMapToArray(patched, EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, - Collections.singletonList(FIELDPATH_FIELD_NAME)); + JsonNode rebasedNode = + transformedMapToArray( + patched, + EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, + Collections.singletonList(FIELDPATH_FIELD_NAME)); // Create temporary templates for array subfields GlobalTagsTemplate globalTagsTemplate = new GlobalTagsTemplate(); GlossaryTermsTemplate glossaryTermsTemplate = new GlossaryTermsTemplate(); // Apply template rebases to array subfields - rebasedNode.get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME).elements().forEachRemaining(node -> { - JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); - JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); - if (globalTags != null) { - ((ObjectNode) node).set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.rebaseFields(globalTags)); - } - if (glossaryTerms != null) { - ((ObjectNode) node).set(GLOSSARY_TERMS_FIELD_NAME, glossaryTermsTemplate.rebaseFields(glossaryTerms)); - } - }); - + rebasedNode + .get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME) + .elements() + .forEachRemaining( + node -> { + JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); + JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); + if (globalTags != null) { + ((ObjectNode) node) + .set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.rebaseFields(globalTags)); + } + if (glossaryTerms != null) { + ((ObjectNode) node) + .set( + GLOSSARY_TERMS_FIELD_NAME, + glossaryTermsTemplate.rebaseFields(glossaryTerms)); + } + }); return rebasedNode; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java index 9e87b8a385328..35816895669be 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java @@ -9,11 +9,11 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class UpstreamLineageTemplate implements ArrayMergingTemplate<UpstreamLineage> { private static final String UPSTREAMS_FIELD_NAME = "upstreams"; private static final String DATASET_FIELD_NAME = "dataset"; + // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key @Override @@ -42,12 +42,14 @@ public UpstreamLineage getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap(baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + return arrayFieldToMap( + baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + return transformedMapToArray( + patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java index 6496ac125d867..18d070ec3da45 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.util; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -7,27 +9,31 @@ import java.util.ArrayList; import java.util.List; -import static com.linkedin.metadata.Constants.*; - - public class TemplateUtil { - private TemplateUtil() { - - } + private TemplateUtil() {} public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public static List<String> getPaths(Patch jsonPatch) { JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); List<String> paths = new ArrayList<>(); - patchNode.elements().forEachRemaining(node -> { - paths.add(node.get("path").asText()); - }); + patchNode + .elements() + .forEachRemaining( + node -> { + paths.add(node.get("path").asText()); + }); return paths; } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java index 0ce066b7a3433..ad16aec7f66d2 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java @@ -1,20 +1,24 @@ package com.linkedin.metadata.models; +import static org.testng.Assert.*; + import com.linkedin.data.schema.DataSchema; import com.linkedin.metadata.models.registry.TestConstants; import java.nio.file.Paths; import java.util.Optional; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DataSchemaFactoryTest { @Test public void testCustomClassLoading() throws Exception { - DataSchemaFactory dsf = DataSchemaFactory.withCustomClasspath(Paths.get( - TestConstants.BASE_DIRECTORY + "/" + TestConstants.TEST_REGISTRY + "/" - + TestConstants.TEST_VERSION.toString())); + DataSchemaFactory dsf = + DataSchemaFactory.withCustomClasspath( + Paths.get( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString())); // Assert that normally found aspects from the core model are missing Optional<DataSchema> dataSchema = dsf.getAspectSchema("datasetProfile"); assertFalse(dataSchema.isPresent(), "datasetProfile"); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index b95cb1085283f..e1ea80e2bcad2 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -1,8 +1,12 @@ package com.linkedin.metadata.models; -import com.datahub.test.TestBrowsePaths; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.test.SearchFeatures; import com.datahub.test.Snapshot; +import com.datahub.test.TestBrowsePaths; import com.datahub.test.TestEntityInfo; import com.datahub.test.TestEntityKey; import com.datahub.test.invalid.DuplicateSearchableFields; @@ -18,67 +22,76 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - - -/** - * Tests the capabilities of {@link EntitySpecBuilder} - */ +/** Tests the capabilities of {@link EntitySpecBuilder} */ public class EntitySpecBuilderTest { @Test public void testBuildAspectSpecValidationAspectMissingAnnotation() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new MissingAspectAnnotation().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new MissingAspectAnnotation().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationInvalidSearchableFieldType() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new InvalidSearchableFieldType().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new InvalidSearchableFieldType().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationDuplicateSearchableFields() { - AspectSpec aspectSpec = new EntitySpecBuilder() - .buildAspectSpec(new DuplicateSearchableFields().schema(), RecordTemplate.class); + AspectSpec aspectSpec = + new EntitySpecBuilder() + .buildAspectSpec(new DuplicateSearchableFields().schema(), RecordTemplate.class); - aspectSpec.getSearchableFieldSpecs().forEach(searchableFieldSpec -> { - String name = searchableFieldSpec.getSearchableAnnotation().getFieldName(); - assertTrue("textField".equals(name) || "textField2".equals(name)); - }); + aspectSpec + .getSearchableFieldSpecs() + .forEach( + searchableFieldSpec -> { + String name = searchableFieldSpec.getSearchableAnnotation().getFieldName(); + assertTrue("textField".equals(name) || "textField2".equals(name)); + }); } @Test public void testBuildAspectSpecValidationMissingRelationshipName() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new MissingRelationshipName().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new MissingRelationshipName().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationNonNumericSearchScoreField() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new NonNumericSearchScoreField().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new NonNumericSearchScoreField().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationNonSingularSearchScoreField() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new NonSingularSearchScoreField().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new NonSingularSearchScoreField().schema(), RecordTemplate.class)); } + @Test public void testBuildEntitySpecs() { // Instantiate the test Snapshot final Snapshot snapshot = new Snapshot(); - final List<EntitySpec> validEntitySpecs = new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()); + final List<EntitySpec> validEntitySpecs = + new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()); // Assert single entity. assertEquals(1, validEntitySpecs.size()); @@ -110,116 +123,265 @@ public void testBuildEntitySpecs() { private void validateTestEntityKey(final AspectSpec keyAspectSpec) { assertEquals("testEntityKey", keyAspectSpec.getName()); - assertEquals(new TestEntityKey().schema().getFullName(), keyAspectSpec.getPegasusSchema().getFullName()); + assertEquals( + new TestEntityKey().schema().getFullName(), keyAspectSpec.getPegasusSchema().getFullName()); // Assert on Searchable Fields assertEquals(2, keyAspectSpec.getSearchableFieldSpecs().size()); // keyPart1, keyPart3 - assertEquals("keyPart1", keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart1").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart1").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("keyPart3", keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart3").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.KEYWORD, keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart3").toString()) - .getSearchableAnnotation().getFieldType()); + assertEquals( + "keyPart1", + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart1").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart1").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "keyPart3", + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart3").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.KEYWORD, + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart3").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Field assertEquals(1, keyAspectSpec.getRelationshipFieldSpecs().size()); - assertEquals("keyForeignKey", keyAspectSpec.getRelationshipFieldSpecMap().get(new PathSpec("keyPart2").toString()).getRelationshipName()); + assertEquals( + "keyForeignKey", + keyAspectSpec + .getRelationshipFieldSpecMap() + .get(new PathSpec("keyPart2").toString()) + .getRelationshipName()); } - private void validateBrowsePaths(final AspectSpec browsePathAspectSpec) { assertEquals("testBrowsePaths", browsePathAspectSpec.getName()); - assertEquals(new TestBrowsePaths().schema().getFullName(), browsePathAspectSpec.getPegasusSchema().getFullName()); + assertEquals( + new TestBrowsePaths().schema().getFullName(), + browsePathAspectSpec.getPegasusSchema().getFullName()); assertEquals(1, browsePathAspectSpec.getSearchableFieldSpecs().size()); - assertEquals(SearchableAnnotation.FieldType.BROWSE_PATH, browsePathAspectSpec.getSearchableFieldSpecs().get(0) - .getSearchableAnnotation().getFieldType()); + assertEquals( + SearchableAnnotation.FieldType.BROWSE_PATH, + browsePathAspectSpec + .getSearchableFieldSpecs() + .get(0) + .getSearchableAnnotation() + .getFieldType()); } private void validateTestEntityInfo(final AspectSpec testEntityInfo) { assertEquals("testEntityInfo", testEntityInfo.getName()); - assertEquals(new TestEntityInfo().schema().getFullName(), testEntityInfo.getPegasusSchema().getFullName()); + assertEquals( + new TestEntityInfo().schema().getFullName(), + testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); - assertEquals("customProperties", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("customProperties").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.KEYWORD, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("customProperties").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("textFieldOverride", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("textArrayField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textArrayField", "*").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT_PARTIAL, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textArrayField", "*").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("wordGramField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("wordGramField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.WORD_GRAM, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("wordGramField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedIntegerField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedIntegerField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.COUNT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedIntegerField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedArrayStringField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedArrayArrayField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("esObjectField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("esObjectField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.OBJECT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("esObjectField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("foreignKey", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(true, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getSearchableAnnotation().isQueryByDefault()); - assertEquals("doubleField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.DOUBLE, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldType()); - + assertEquals( + "customProperties", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("customProperties").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.KEYWORD, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("customProperties").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "textFieldOverride", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "textArrayField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textArrayField", "*").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT_PARTIAL, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textArrayField", "*").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "wordGramField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("wordGramField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.WORD_GRAM, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("wordGramField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedIntegerField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedIntegerField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.COUNT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedIntegerField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedArrayStringField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedArrayArrayField", + testEntityInfo + .getSearchableFieldSpecMap() + .get( + new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*") + .toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get( + new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*") + .toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "esObjectField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("esObjectField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.OBJECT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("esObjectField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "foreignKey", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + true, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getSearchableAnnotation() + .isQueryByDefault()); + assertEquals( + "doubleField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("doubleField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.DOUBLE, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("doubleField").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Fields assertEquals(4, testEntityInfo.getRelationshipFieldSpecs().size()); - assertEquals("foreignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getRelationshipName()); - assertEquals("foreignKeyArray", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("foreignKeyArray", "*").toString()).getRelationshipName()); - assertEquals("nestedForeignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedForeignKey").toString()).getRelationshipName()); - assertEquals("nestedArrayForeignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayForeignKey").toString()).getRelationshipName()); + assertEquals( + "foreignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getRelationshipName()); + assertEquals( + "foreignKeyArray", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("foreignKeyArray", "*").toString()) + .getRelationshipName()); + assertEquals( + "nestedForeignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedForeignKey").toString()) + .getRelationshipName()); + assertEquals( + "nestedArrayForeignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayForeignKey").toString()) + .getRelationshipName()); } private void validateSearchFeatures(final AspectSpec searchFeaturesAspectSpec) { assertEquals("searchFeatures", searchFeaturesAspectSpec.getName()); - assertEquals(new SearchFeatures().schema().getFullName(), + assertEquals( + new SearchFeatures().schema().getFullName(), searchFeaturesAspectSpec.getPegasusSchema().getFullName()); assertEquals(2, searchFeaturesAspectSpec.getSearchScoreFieldSpecs().size()); - assertEquals("feature1", searchFeaturesAspectSpec.getSearchScoreFieldSpecMap() - .get(new PathSpec("feature1").toString()) - .getSearchScoreAnnotation() - .getFieldName()); - assertEquals("feature2", searchFeaturesAspectSpec.getSearchScoreFieldSpecMap() - .get(new PathSpec("feature2").toString()) - .getSearchScoreAnnotation() - .getFieldName()); + assertEquals( + "feature1", + searchFeaturesAspectSpec + .getSearchScoreFieldSpecMap() + .get(new PathSpec("feature1").toString()) + .getSearchScoreAnnotation() + .getFieldName()); + assertEquals( + "feature2", + searchFeaturesAspectSpec + .getSearchScoreFieldSpecMap() + .get(new PathSpec("feature2").toString()) + .getSearchScoreAnnotation() + .getFieldName()); } - } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java index 320dfc47f21e4..852e4f19bac12 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry; +import static org.testng.Assert.*; + import com.datahub.test.TestEntityProfile; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.models.EntitySpec; @@ -9,21 +11,22 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ConfigEntityRegistryTest { @BeforeTest public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @Test public void testEntityRegistry() throws FileNotFoundException { - ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); Map<String, EntitySpec> entitySpecs = configEntityRegistry.getEntitySpecs(); Map<String, EventSpec> eventSpecs = configEntityRegistry.getEventSpecs(); @@ -54,9 +57,11 @@ public void testEntityRegistry() throws FileNotFoundException { @Test public void testEntityRegistryIdentifier() throws FileNotFoundException { - ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); assertEquals(configEntityRegistry.getIdentifier(), "test-registry"); } } - diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java index 05d23eb4b455f..20a64f9af25c0 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java @@ -1,5 +1,11 @@ package com.linkedin.metadata.models.registry; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.RelationshipFieldSpec; @@ -11,13 +17,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - - public class LineageRegistryTest { @Test public void testRegistryWhenEmpty() { @@ -33,14 +32,16 @@ public void testRegistry() { Map<String, EntitySpec> mockEntitySpecs = new HashMap<>(); EntitySpec mockDatasetSpec = mock(EntitySpec.class); List<RelationshipFieldSpec> datasetRelations = - ImmutableList.of(buildSpec("DownstreamOf", ImmutableList.of("dataset"), true, true), + ImmutableList.of( + buildSpec("DownstreamOf", ImmutableList.of("dataset"), true, true), buildSpec("AssociatedWith", ImmutableList.of("tag"), true, false), buildSpec("AssociatedWith", ImmutableList.of("glossaryTerm"), true, false)); when(mockDatasetSpec.getRelationshipFieldSpecs()).thenReturn(datasetRelations); mockEntitySpecs.put("dataset", mockDatasetSpec); EntitySpec mockJobSpec = mock(EntitySpec.class); List<RelationshipFieldSpec> jobRelations = - ImmutableList.of(buildSpec("Produces", ImmutableList.of("dataset"), false, true), + ImmutableList.of( + buildSpec("Produces", ImmutableList.of("dataset"), false, true), buildSpec("Consumes", ImmutableList.of("dataset"), true, true)); when(mockJobSpec.getRelationshipFieldSpecs()).thenReturn(jobRelations); mockEntitySpecs.put("dataJob", mockJobSpec); @@ -50,22 +51,51 @@ public void testRegistry() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); LineageRegistry.LineageSpec lineageSpec = lineageRegistry.getLineageSpec("dataset"); assertEquals(lineageSpec.getUpstreamEdges().size(), 2); - assertTrue(lineageSpec.getUpstreamEdges() - .contains(new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "dataset"))); - assertTrue(lineageSpec.getUpstreamEdges() - .contains(new LineageRegistry.EdgeInfo("Produces", RelationshipDirection.INCOMING, "dataJob"))); + assertTrue( + lineageSpec + .getUpstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, "dataset"))); + assertTrue( + lineageSpec + .getUpstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "Produces", RelationshipDirection.INCOMING, "dataJob"))); assertEquals(lineageSpec.getDownstreamEdges().size(), 2); - assertTrue(lineageSpec.getDownstreamEdges() - .contains(new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "dataset"))); - assertTrue(lineageSpec.getDownstreamEdges() - .contains(new LineageRegistry.EdgeInfo("Consumes", RelationshipDirection.INCOMING, "dataJob"))); + assertTrue( + lineageSpec + .getDownstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.INCOMING, "dataset"))); + assertTrue( + lineageSpec + .getDownstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.INCOMING, "dataJob"))); } - private RelationshipFieldSpec buildSpec(String relationshipType, List<String> destinationEntityTypes, - boolean isUpstream, boolean isLineage) { + private RelationshipFieldSpec buildSpec( + String relationshipType, + List<String> destinationEntityTypes, + boolean isUpstream, + boolean isLineage) { RelationshipFieldSpec spec = mock(RelationshipFieldSpec.class); - when(spec.getRelationshipAnnotation()).thenReturn( - new RelationshipAnnotation(relationshipType, destinationEntityTypes, isUpstream, isLineage, null, null, null, null, null)); + when(spec.getRelationshipAnnotation()) + .thenReturn( + new RelationshipAnnotation( + relationshipType, + destinationEntityTypes, + isUpstream, + isLineage, + null, + null, + null, + null, + null)); return spec; } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java index 38664fedb1570..1652a51290597 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry; +import static org.testng.Assert.*; + import com.linkedin.metadata.models.DataSchemaFactory; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; @@ -7,20 +9,19 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class PatchEntityRegistryTest { @Test public void testEntityRegistryLoad() throws Exception, EntityRegistryException { - PatchEntityRegistry patchEntityRegistry = new PatchEntityRegistry( - TestConstants.BASE_DIRECTORY - + "/" - + TestConstants.TEST_REGISTRY - + "/" - + TestConstants.TEST_VERSION.toString(), - TestConstants.TEST_REGISTRY, TestConstants.TEST_VERSION); + PatchEntityRegistry patchEntityRegistry = + new PatchEntityRegistry( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString(), + TestConstants.TEST_REGISTRY, + TestConstants.TEST_VERSION); Map<String, EntitySpec> entitySpecs = patchEntityRegistry.getEntitySpecs(); assertEquals(entitySpecs.values().size(), 1); @@ -40,21 +41,27 @@ public void testEntityRegistryLoad() throws Exception, EntityRegistryException { /** * Validate that patch entity registries can have key aspects + * * @throws Exception * @throws EntityRegistryException */ @Test public void testEntityRegistryWithKeyLoad() throws Exception, EntityRegistryException { - DataSchemaFactory dataSchemaFactory = DataSchemaFactory.withCustomClasspath( - Paths.get(TestConstants.BASE_DIRECTORY - + "/" - + TestConstants.TEST_REGISTRY - + "/" - + TestConstants.TEST_VERSION.toString())); + DataSchemaFactory dataSchemaFactory = + DataSchemaFactory.withCustomClasspath( + Paths.get( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString())); - PatchEntityRegistry patchEntityRegistry = new PatchEntityRegistry( - dataSchemaFactory, Paths.get("src/test_plugins/mycompany-full-model/0.0.1/entity-registry.yaml"), - TestConstants.TEST_REGISTRY, TestConstants.TEST_VERSION); + PatchEntityRegistry patchEntityRegistry = + new PatchEntityRegistry( + dataSchemaFactory, + Paths.get("src/test_plugins/mycompany-full-model/0.0.1/entity-registry.yaml"), + TestConstants.TEST_REGISTRY, + TestConstants.TEST_VERSION); Map<String, EntitySpec> entitySpecs = patchEntityRegistry.getEntitySpecs(); assertEquals(entitySpecs.values().size(), 1); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java index 06ed794ecc684..b3eb2af72708c 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.models.registry.TestConstants.*; +import static org.testng.Assert.*; + import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.RecordDataSchema; @@ -28,122 +31,137 @@ import org.apache.maven.artifact.versioning.ComparableVersion; import org.testng.annotations.Test; -import static com.linkedin.metadata.models.registry.TestConstants.*; -import static org.testng.Assert.*; - - public class PluginEntityRegistryLoaderTest { @Test public void testEntityRegistry() throws FileNotFoundException, InterruptedException { - EntityRegistry baseEntityRegistry = new EntityRegistry() { - @Nonnull - @Override - public EntitySpec getEntitySpec(@Nonnull String entityName) { - return null; - } - - @Nonnull - @Override - public EventSpec getEventSpec(@Nonnull String eventName) { - return null; - } - - @Nonnull - @Override - public Map<String, EntitySpec> getEntitySpecs() { - return null; - } - - @Nonnull - @Override - public Map<String, AspectSpec> getAspectSpecs() { - return new HashMap<>(); - } - - @Nonnull - @Override - public Map<String, EventSpec> getEventSpecs() { - return null; - } - - @Nonnull - @Override - public AspectTemplateEngine getAspectTemplateEngine() { - return new AspectTemplateEngine(); - } - }; + EntityRegistry baseEntityRegistry = + new EntityRegistry() { + @Nonnull + @Override + public EntitySpec getEntitySpec(@Nonnull String entityName) { + return null; + } + + @Nonnull + @Override + public EventSpec getEventSpec(@Nonnull String eventName) { + return null; + } + + @Nonnull + @Override + public Map<String, EntitySpec> getEntitySpecs() { + return null; + } + + @Nonnull + @Override + public Map<String, AspectSpec> getAspectSpecs() { + return new HashMap<>(); + } + + @Nonnull + @Override + public Map<String, EventSpec> getEventSpecs() { + return null; + } + + @Nonnull + @Override + public AspectTemplateEngine getAspectTemplateEngine() { + return new AspectTemplateEngine(); + } + }; MergedEntityRegistry configEntityRegistry = new MergedEntityRegistry(baseEntityRegistry); PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(TestConstants.BASE_DIRECTORY).withBaseRegistry(configEntityRegistry).start(true); + new PluginEntityRegistryLoader(TestConstants.BASE_DIRECTORY) + .withBaseRegistry(configEntityRegistry) + .start(true); assertEquals(pluginEntityRegistryLoader.getPatchRegistries().size(), 1); EntityRegistryLoadResult loadResult = - pluginEntityRegistryLoader.getPatchRegistries().get(TestConstants.TEST_REGISTRY).get(TEST_VERSION).getSecond(); + pluginEntityRegistryLoader + .getPatchRegistries() + .get(TestConstants.TEST_REGISTRY) + .get(TEST_VERSION) + .getSecond(); assertNotNull(loadResult); assertEquals(loadResult.getLoadResult(), LoadStatus.FAILURE); } private EntityRegistry getBaseEntityRegistry() { final AspectSpec keyAspectSpec = - new AspectSpec(new AspectAnnotation("datasetKey", false, false, null), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), + new AspectSpec( + new AspectAnnotation("datasetKey", false, false, null), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), (RecordDataSchema) DataSchemaFactory.getInstance().getAspectSchema("datasetKey").get(), DataSchemaFactory.getInstance().getAspectClass("datasetKey").get()); final Map<String, EntitySpec> entitySpecMap = new HashMap<>(1); List<AspectSpec> aspectSpecList = new ArrayList<>(1); aspectSpecList.add(keyAspectSpec); - EntitySpec baseEntitySpec = new DefaultEntitySpec(aspectSpecList, new EntityAnnotation("dataset", "datasetKey"), - (RecordDataSchema) DataSchemaFactory.getInstance().getEntitySchema("dataset").get()); + EntitySpec baseEntitySpec = + new DefaultEntitySpec( + aspectSpecList, + new EntityAnnotation("dataset", "datasetKey"), + (RecordDataSchema) DataSchemaFactory.getInstance().getEntitySchema("dataset").get()); entitySpecMap.put("dataset", baseEntitySpec); final Map<String, EventSpec> eventSpecMap = new HashMap<>(1); - EventSpec baseEventSpec = new DefaultEventSpec("testEvent", new EventAnnotation("testEvent"), - (RecordDataSchema) DataSchemaFactory.getInstance().getEventSchema("testEvent").get()); + EventSpec baseEventSpec = + new DefaultEventSpec( + "testEvent", + new EventAnnotation("testEvent"), + (RecordDataSchema) DataSchemaFactory.getInstance().getEventSchema("testEvent").get()); eventSpecMap.put("testevent", baseEventSpec); - EntityRegistry baseEntityRegistry = new EntityRegistry() { - - @Nonnull - @Override - public EntitySpec getEntitySpec(@Nonnull String entityName) { - assertEquals(entityName, "dataset"); - return baseEntitySpec; - } - - @Nullable - @Override - public EventSpec getEventSpec(@Nonnull String eventName) { - assertEquals(eventName, "testEvent"); - return baseEventSpec; - } - - @Nonnull - @Override - public Map<String, EntitySpec> getEntitySpecs() { - return entitySpecMap; - } - - @Nonnull - @Override - public Map<String, AspectSpec> getAspectSpecs() { - return new HashMap<>(); - } - - @Nonnull - @Override - public Map<String, EventSpec> getEventSpecs() { - return eventSpecMap; - } - - @Nonnull - @Override - public AspectTemplateEngine getAspectTemplateEngine() { - return new AspectTemplateEngine(); - } - }; + EntityRegistry baseEntityRegistry = + new EntityRegistry() { + + @Nonnull + @Override + public EntitySpec getEntitySpec(@Nonnull String entityName) { + assertEquals(entityName, "dataset"); + return baseEntitySpec; + } + + @Nullable + @Override + public EventSpec getEventSpec(@Nonnull String eventName) { + assertEquals(eventName, "testEvent"); + return baseEventSpec; + } + + @Nonnull + @Override + public Map<String, EntitySpec> getEntitySpecs() { + return entitySpecMap; + } + + @Nonnull + @Override + public Map<String, AspectSpec> getAspectSpecs() { + return new HashMap<>(); + } + + @Nonnull + @Override + public Map<String, EventSpec> getEventSpecs() { + return eventSpecMap; + } + + @Nonnull + @Override + public AspectTemplateEngine getAspectTemplateEngine() { + return new AspectTemplateEngine(); + } + }; return baseEntityRegistry; } @@ -152,12 +170,21 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter MergedEntityRegistry mergedEntityRegistry = new MergedEntityRegistry(getBaseEntityRegistry()); PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(BASE_DIRECTORY).withBaseRegistry(mergedEntityRegistry).start(true); + new PluginEntityRegistryLoader(BASE_DIRECTORY) + .withBaseRegistry(mergedEntityRegistry) + .start(true); assertEquals(pluginEntityRegistryLoader.getPatchRegistries().size(), 1); EntityRegistryLoadResult loadResult = - pluginEntityRegistryLoader.getPatchRegistries().get(TEST_REGISTRY).get(TEST_VERSION).getSecond(); + pluginEntityRegistryLoader + .getPatchRegistries() + .get(TEST_REGISTRY) + .get(TEST_VERSION) + .getSecond(); assertNotNull(loadResult); - assertEquals(loadResult.getLoadResult(), LoadStatus.SUCCESS, "load failed with " + loadResult.getFailureReason()); + assertEquals( + loadResult.getLoadResult(), + LoadStatus.SUCCESS, + "load failed with " + loadResult.getFailureReason()); Map<String, EntitySpec> entitySpecs = mergedEntityRegistry.getEntitySpecs(); @@ -165,7 +192,8 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter assertEquals(entitySpec.getName(), "dataset"); assertEquals(entitySpec.getKeyAspectSpec().getName(), "datasetKey"); Optional<DataSchema> dataSchema = - Optional.ofNullable(entitySpecs.get("dataset").getAspectSpec("datasetKey").getPegasusSchema()); + Optional.ofNullable( + entitySpecs.get("dataset").getAspectSpec("datasetKey").getPegasusSchema()); assertTrue(dataSchema.isPresent(), "datasetKey"); assertNotNull(entitySpec.getAspectSpec("testDataQualityRules")); assertEquals(entitySpecs.values().size(), 1); @@ -179,37 +207,65 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter @Test /** - * Tests that we can load up entity registries that represent safe evolutions as well as decline to load registries that represent unsafe evolutions. - * - */ public void testEntityRegistryVersioning() throws InterruptedException { + * Tests that we can load up entity registries that represent safe evolutions as well as decline + * to load registries that represent unsafe evolutions. + */ + public void testEntityRegistryVersioning() throws InterruptedException { MergedEntityRegistry mergedEntityRegistry = new MergedEntityRegistry(getBaseEntityRegistry()); String multiversionPluginDir = "src/test_plugins/"; PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(multiversionPluginDir).withBaseRegistry(mergedEntityRegistry).start(true); - Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> loadedRegistries = - pluginEntityRegistryLoader.getPatchRegistries(); + new PluginEntityRegistryLoader(multiversionPluginDir) + .withBaseRegistry(mergedEntityRegistry) + .start(true); + Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + loadedRegistries = pluginEntityRegistryLoader.getPatchRegistries(); String registryName = "mycompany-dq-model"; assertTrue(loadedRegistries.containsKey(registryName)); assertTrue(loadedRegistries.get(registryName).containsKey(new ComparableVersion("0.0.1"))); - System.out.println(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.1")).getSecond().getFailureReason()); - - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.1")).getSecond().getLoadResult(), + System.out.println( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.1")) + .getSecond() + .getFailureReason()); + + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.1")) + .getSecond() + .getLoadResult(), LoadStatus.SUCCESS); - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.2")).getSecond().getLoadResult(), + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.2")) + .getSecond() + .getLoadResult(), LoadStatus.SUCCESS); - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.3")).getSecond().getLoadResult(), + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.3")) + .getSecond() + .getLoadResult(), LoadStatus.FAILURE); - assertTrue(loadedRegistries.get(registryName) - .get(new ComparableVersion("0.0.3")) - .getSecond() - .getFailureReason() - .contains("new record removed required fields type")); + assertTrue( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.3")) + .getSecond() + .getFailureReason() + .contains("new record removed required fields type")); assertTrue(mergedEntityRegistry.getEntitySpec("dataset").hasAspect("dataQualityRules")); RecordDataSchema dataSchema = - mergedEntityRegistry.getEntitySpec("dataset").getAspectSpec("dataQualityRules").getPegasusSchema(); + mergedEntityRegistry + .getEntitySpec("dataset") + .getAspectSpec("dataQualityRules") + .getPegasusSchema(); ArrayDataSchema arrayDataSchema = (ArrayDataSchema) dataSchema.getField("rules").getType().getDereferencedDataSchema(); // Aspect Schema should be the same as version 0.0.2, checking to see that all fields exist diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java index ae46f3796aa73..43ae86076ae8c 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java @@ -2,7 +2,6 @@ import org.apache.maven.artifact.versioning.ComparableVersion; - public class TestConstants { public static final String TEST_REGISTRY = "mycompany-dq-model"; public static final String BASE_DIRECTORY = "custom-test-model/build/plugins/models"; @@ -10,6 +9,5 @@ public class TestConstants { public static final String TEST_ASPECT_NAME = "testDataQualityRules"; public static final String TEST_EVENT_NAME = "dataQualityEvent"; - private TestConstants() { - } + private TestConstants() {} } diff --git a/gradle/checkstyle/checkstyle.xml b/gradle/checkstyle/checkstyle.xml deleted file mode 100644 index a9bffe839edad..0000000000000 --- a/gradle/checkstyle/checkstyle.xml +++ /dev/null @@ -1,198 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN" "http://www.puppycrawl.com/dtds/configuration_1_3.dtd"> - -<!-- - Checkstyle-Configuration: LinkedIn Style - Description: -LinkedIn Java style. ---> -<module name="Checker"> - <property name="severity" value="warning"/> - <property name="fileExtensions" value="java"/> - - <module name="TreeWalker"> - <property name="tabWidth" value="2"/> - <module name="SuppressWarningsHolder"/> - <module name="FileContentsHolder"/> - - <!-- ANNOTATIONS --> - - <!-- No trailing empty parenthesis or commas --> - <module name="AnnotationUseStyle"> - <property name="elementStyle" value="ignore"/> - </module> - <!-- Ensure @Override is present when {@inheritDoc} Javadoc tag is present --> - <module name="MissingOverride"/> - <!-- Package level annotations belong in package-info.java --> - <module name="PackageAnnotation"/> - - <!-- BLOCKS --> - - <!-- Block opening brace on same line --> - <module name="LeftCurly"> - <property name="option" value="eol"/> - </module> - <!-- Block closing brace for else, catch, finally on same line --> - <module name="RightCurly"> - <property name="option" value="same"/> - </module> - <!-- Always use braces even if optional --> - <module name="NeedBraces"/> - - <!-- CLASS DESIGN --> - - <!-- Classes containing only static methods should not have a public constructor --> - <module name="HideUtilityClassConstructor"/> - - <!-- CODING --> - - <!-- Use Java style array declarations (e.g. String[] names), not C style (e.g. String names[]) --> - <module name="ArrayTypeStyle"/> - <!-- If covariant equals defined, standard equals must also be defined --> - <module name="CovariantEquals"/> - <!-- Switch 'default' case must appear last --> - <module name="DefaultComesLast"/> - <!-- Override equals and hashCode together --> - <module name="EqualsHashCode"/> - <!-- No fall through in switch cases, even the last one --> - <module name="FallThrough"> - <property name="checkLastCaseGroup" value="true"/> - </module> - <!-- Do not perform assignments embedded within expressions --> - <module name="InnerAssignment"/> - <!-- Switch statements must have a 'default' case --> - <module name="MissingSwitchDefault"/> - <!-- Do not modify the 'for' loop control variable --> - <module name="ModifiedControlVariable"/> - <!-- Each variable delcaration must be on a separate line --> - <module name="MultipleVariableDeclarations"/> - <!-- Each statement (i.e. code terminated by a semicolon) must be on a separate line --> - <module name="OneStatementPerLine"/> - <!-- Classes must have an explicit package declaration --> - <module name="PackageDeclaration"/> - <!-- Do not test boolean expressions against the values true or false --> - <module name="SimplifyBooleanExpression"/> - <!-- Do not test for boolean conditions and return the values true or false --> - <module name="SimplifyBooleanReturn"/> - <!-- Do not use '==' to compare string against a literal; use 'equals' --> - <module name="StringLiteralEquality"/> - <!-- Use 'L' with long literals --> - <module name="UpperEll"/> - - <!-- IMPORTS --> - - <!-- No imports statements using '*' notation except static imports --> - <module name="AvoidStarImport"> - <property name="allowStaticMemberImports" value="true"/> - </module> - <!-- Do not import 'sun' packages --> - <module name="IllegalImport"/> - <!-- Do not duplicate import statements --> - <module name="RedundantImport"/> - <!-- Eliminate unused imports --> - <module name="UnusedImports"/> - - <!-- JAVADOC COMMENTS --> - - <!-- If you have a Javadoc comment, make sure it is properly formed --> - <module name="JavadocStyle"> - <property name="checkFirstSentence" value="false"/> - </module> - - <!-- NAMING CONVENTIONS --> - - <!-- Generic parameters for a class must be uppercase letters separated by underscores (e.g. <V>, <NEW>, <KEY_T>) --> - <module name="ClassTypeParameterName"> - <property name="format" value="^[A-Z]+(_[A-Z]+)*$"/> - </module> - <!-- Constants must be all uppercase letters separated by underscores --> - <module name="ConstantName"> - <property name="format" value="^(_?log)|([A-Z][A-Z0-9]*(_[A-Z0-9]+)*)$"/> - </module> - <!-- Local variables must be camel case starting with lowercase letter --> - <module name="LocalFinalVariableName"/> - <module name="LocalVariableName"/> - <!-- Member variables must be camel case starting with an underscore or lowercase letter --> - <module name="MemberName"> - <property name="format" value="^[_a-z][a-zA-Z0-9]*$"/> - </module> - <!-- Method name must be camel case starting with a lowercase letter --> - <module name="MethodName"/> - <!-- Generic parameters for a method must be uppercase letters separated by underscores (e.g. <V>, <NEW>, <KEY_T>) --> - <module name="MethodTypeParameterName"> - <property name="format" value="^[A-Z]+(_[A-Z]+)*$"/> - </module> - <!-- Package name must be all lowercase letters separated by periods --> - <module name="PackageName"> - <property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/> - </module> - <!-- Parameters must be camel case starting with a lowercase letter --> - <module name="ParameterName"/> - <!-- Static variables must be camel case starting with an underscore or lowercase letter --> - <module name="StaticVariableName"> - <property name="format" value="^[_a-z][a-zA-Z0-9]*$"/> - </module> - <!-- Type names must be camel case starting with an uppercase letter --> - <module name="TypeName"/> - - <!-- LENGTHS --> - - <!-- Desired line length is 120 but allow some overrun beyond that --> - <module name="LineLength"> - <property name="max" value="160"/> - <message key="maxLineLen" value="Line is longer than {0,number,integer} characters (found {1,number,integer}). Try to keep lines under 120 characters."/> - </module> - - <!-- WHITESPACE --> - - <module name="GenericWhitespace"/> - <module name="MethodParamPad"/> - <module name="NoWhitespaceAfter"> - <property name="tokens" value="BNOT,DEC,DOT,INC,LNOT,UNARY_MINUS,UNARY_PLUS"/> - </module> - <module name="NoWhitespaceBefore"/> - <module name="OperatorWrap"/> - <module name="ParenPad"/> - <module name="TypecastParenPad"> - <property name="tokens" value="RPAREN,TYPECAST"/> - </module> - <module name="WhitespaceAfter"/> - <module name="WhitespaceAround"/> - - <!-- Do not allow meaningless, IDE generated parameter names --> - <module name="RegexpSinglelineJava"> - <property name="format" value="[\s]+arg[\d]+[,\)]"/> - <property name="message" value="Replace argN with a meaningful parameter name"/> - </module> - </module> - - <!-- Do not allow tab characters in source files --> - <module name="FileTabCharacter"/> - - <!-- Ensure parameter and exception names are present on @param and @throws tags --> - <module name="RegexpSingleline"> - <property name="format" value="\*[\s]*@(throws|param)[\s]*$"/> - <property name="message" value="Missing parameter or exception name"/> - </module> - <!-- IDE generated code must be reviewed by developer --> - <module name="RegexpSingleline"> - <property name="format" value="\/\/[\s]*TODO[\s]+Auto-generated"/> - <property name="message" value="Replace IDE generated code with real implementation"/> - </module> - <!-- Detect commonly misspelled Javadoc tags --> - <module name="RegexpSingleline"> - <property name="format" value="\*[\s]*@(params|throw|returns)[\s]+"/> - <property name="message" value="Correct misspelled Javadoc tag"/> - </module> - - <!-- Read checker suppressions from a file --> - <module name="SuppressionFilter"> - <property name="file" value="${config_loc}/suppressions.xml"/> - </module> - <!-- Allow Checkstyle warnings to be suppressed using trailing comments --> - <module name="SuppressWithNearbyCommentFilter"/> - <!-- Allow Checkstyle warnings to be suppressed using block comments --> - <module name="SuppressionCommentFilter"/> - <!-- Allow SuppressWarnings annotation to suppress Checkstyle issues --> - <module name="SuppressWarningsFilter"/> -</module> diff --git a/gradle/checkstyle/suppressions.xml b/gradle/checkstyle/suppressions.xml deleted file mode 100644 index 829689ba35611..0000000000000 --- a/gradle/checkstyle/suppressions.xml +++ /dev/null @@ -1,7 +0,0 @@ -<?xml version="1.0"?> -<!DOCTYPE suppressions PUBLIC - "-//Puppy Crawl//DTD Suppressions 1.1//EN" - "http://www.puppycrawl.com/dtds/suppressions_1_1.dtd"> -<suppressions> - <suppress checks=".*" files="src/mainGeneratedDataTemplate"/> -</suppressions> diff --git a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java index e71fe6266b955..02aeb047a4d3e 100644 --- a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java +++ b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java @@ -1,7 +1,6 @@ package com.datahub.metadata.ingestion; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,6 +17,7 @@ import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -45,29 +45,31 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.scheduling.support.CronSequenceGenerator; - /** - * This class serves as a stateful scheduler of Ingestion Runs for Ingestion Sources defined - * within DataHub. It manages storing and triggering ingestion sources on a pre-defined schedule - * based on the information present in the {@link DataHubIngestionSourceInfo} aspect. As such, this class + * This class serves as a stateful scheduler of Ingestion Runs for Ingestion Sources defined within + * DataHub. It manages storing and triggering ingestion sources on a pre-defined schedule based on + * the information present in the {@link DataHubIngestionSourceInfo} aspect. As such, this class * should never be instantiated more than once - it's a singleton. * - * When the scheduler is created, it will first batch load all "info" aspects associated with the DataHubIngestionSource entity. - * It then iterates through all the aspects and attempts to extract a Quartz-cron (* * * * *) formatted schedule string & timezone from each. - * Upon finding a schedule and timezone, the "next execution time" as a relative timestamp is computed and a task - * is scheduled at that time in the future. + * <p>When the scheduler is created, it will first batch load all "info" aspects associated with the + * DataHubIngestionSource entity. It then iterates through all the aspects and attempts to extract a + * Quartz-cron (* * * * *) formatted schedule string & timezone from each. Upon finding a schedule + * and timezone, the "next execution time" as a relative timestamp is computed and a task is + * scheduled at that time in the future. * - * The child task is scheduled on another thread via {@link ScheduledExecutorService} and is responsible for creating a - * new DataHubExecutionRequest entity instance using an {@link EntityClient}. The execution request includes the inputs required - * to execute an ingestion source: an Ingestion Recipe encoded as JSON. This in turn triggers the execution of a downstream - * "action" which actually executes the ingestion process and reports the status back. + * <p>The child task is scheduled on another thread via {@link ScheduledExecutorService} and is + * responsible for creating a new DataHubExecutionRequest entity instance using an {@link + * EntityClient}. The execution request includes the inputs required to execute an ingestion source: + * an Ingestion Recipe encoded as JSON. This in turn triggers the execution of a downstream "action" + * which actually executes the ingestion process and reports the status back. * - * After initial load, this class will continuously listen to the MetadataChangeProposal stream and update its local cache based - * on changes performed against Ingestion Source entities. Specifically, if the schedule of an Ingestion Source is changed in any way, - * the next execution time of that source will be recomputed, with previously scheduled execution clear if necessary. + * <p>After initial load, this class will continuously listen to the MetadataChangeProposal stream + * and update its local cache based on changes performed against Ingestion Source entities. + * Specifically, if the schedule of an Ingestion Source is changed in any way, the next execution + * time of that source will be recomputed, with previously scheduled execution clear if necessary. * - * On top of that, the component can also refresh its entire cache periodically. By default, it batch loads all the latest - * schedules on a once-per-day cadence. + * <p>On top of that, the component can also refresh its entire cache periodically. By default, it + * batch loads all the latest schedules on a once-per-day cadence. */ @Slf4j @RequiredArgsConstructor @@ -76,32 +78,35 @@ public class IngestionScheduler { private final Authentication _systemAuthentication; private final EntityClient _entityClient; - // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the source + // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the + // source // Visible for testing final Map<Urn, ScheduledFuture<?>> _nextIngestionSourceExecutionCache = new HashMap<>(); // Shared executor service used for executing an ingestion source on a schedule - private final ScheduledExecutorService _sharedExecutorService = Executors.newScheduledThreadPool(1); + private final ScheduledExecutorService _sharedExecutorService = + Executors.newScheduledThreadPool(1); private final IngestionConfiguration _ingestionConfiguration; private final int _batchGetDelayIntervalSeconds; private final int _batchGetRefreshIntervalSeconds; public void init() { - final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = new BatchRefreshSchedulesRunnable( - _systemAuthentication, - _entityClient, - this::scheduleNextIngestionSourceExecution, - this::unscheduleAll); + final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = + new BatchRefreshSchedulesRunnable( + _systemAuthentication, + _entityClient, + this::scheduleNextIngestionSourceExecution, + this::unscheduleAll); // Schedule a recurring batch-reload task. _sharedExecutorService.scheduleAtFixedRate( - batchRefreshSchedulesRunnable, _batchGetDelayIntervalSeconds, _batchGetRefreshIntervalSeconds, + batchRefreshSchedulesRunnable, + _batchGetDelayIntervalSeconds, + _batchGetRefreshIntervalSeconds, TimeUnit.SECONDS); } - /** - * Removes the next scheduled execution of a particular ingestion source, if it exists. - */ + /** Removes the next scheduled execution of a particular ingestion source, if it exists. */ public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) { log.info("Unscheduling ingestion source with urn {}", ingestionSourceUrn); // Deleting an ingestion source schedule. Un-schedule the next execution. @@ -113,20 +118,25 @@ public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) } /** - * Un-schedule all ingestion sources that are scheduled for execution. This is performed on refresh of ingestion sources. + * Un-schedule all ingestion sources that are scheduled for execution. This is performed on + * refresh of ingestion sources. */ public void unscheduleAll() { // Deleting an ingestion source schedule. Un-schedule the next execution. - Set<Urn> scheduledSources = new HashSet<>(_nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. + Set<Urn> scheduledSources = + new HashSet<>( + _nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. for (Urn urn : scheduledSources) { unscheduleNextIngestionSourceExecution(urn); } } /** - * Computes and schedules the next execution time for a particular Ingestion Source, if it has not already been scheduled. + * Computes and schedules the next execution time for a particular Ingestion Source, if it has not + * already been scheduled. */ - public void scheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn, final DataHubIngestionSourceInfo newInfo) { + public void scheduleNextIngestionSourceExecution( + final Urn ingestionSourceUrn, final DataHubIngestionSourceInfo newInfo) { // 1. Attempt to un-schedule any previous executions unscheduleNextIngestionSourceExecution(ingestionSourceUrn); @@ -137,50 +147,63 @@ public void scheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn, f // 2. Schedule the next run of the ingestion source log.info( - String.format("Scheduling next execution of Ingestion Source with urn %s. Schedule: %s", - ingestionSourceUrn, - schedule.getInterval(GetMode.NULL))); + String.format( + "Scheduling next execution of Ingestion Source with urn %s. Schedule: %s", + ingestionSourceUrn, schedule.getInterval(GetMode.NULL))); // Construct the new cron expression final String modifiedCronInterval = adjustCronInterval(schedule.getInterval()); if (CronSequenceGenerator.isValidExpression(modifiedCronInterval)) { final String timezone = schedule.hasTimezone() ? schedule.getTimezone() : "UTC"; - final CronSequenceGenerator generator = new CronSequenceGenerator(modifiedCronInterval, TimeZone.getTimeZone(timezone)); + final CronSequenceGenerator generator = + new CronSequenceGenerator(modifiedCronInterval, TimeZone.getTimeZone(timezone)); final Date currentDate = new Date(); final Date nextExecDate = generator.next(currentDate); final long scheduleTime = nextExecDate.getTime() - currentDate.getTime(); // Schedule the ingestion source to run some time in the future. - final ExecutionRequestRunnable executionRequestRunnable = new ExecutionRequestRunnable( - _systemAuthentication, - _entityClient, - _ingestionConfiguration, - ingestionSourceUrn, - newInfo, - () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), - this::scheduleNextIngestionSourceExecution); + final ExecutionRequestRunnable executionRequestRunnable = + new ExecutionRequestRunnable( + _systemAuthentication, + _entityClient, + _ingestionConfiguration, + ingestionSourceUrn, + newInfo, + () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), + this::scheduleNextIngestionSourceExecution); // Schedule the next ingestion run - final ScheduledFuture<?> scheduledFuture = _sharedExecutorService.schedule(executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); + final ScheduledFuture<?> scheduledFuture = + _sharedExecutorService.schedule( + executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); _nextIngestionSourceExecutionCache.put(ingestionSourceUrn, scheduledFuture); - log.info(String.format("Scheduled next execution of Ingestion Source with urn %s in %sms.", ingestionSourceUrn, scheduleTime)); + log.info( + String.format( + "Scheduled next execution of Ingestion Source with urn %s in %sms.", + ingestionSourceUrn, scheduleTime)); } else { - log.error(String.format("Found malformed Ingestion Source schedule: %s for urn: %s. Skipping scheduling.", schedule.getInterval(), ingestionSourceUrn)); + log.error( + String.format( + "Found malformed Ingestion Source schedule: %s for urn: %s. Skipping scheduling.", + schedule.getInterval(), ingestionSourceUrn)); } } else { - log.info(String.format("Ingestion source with urn %s has no configured schedule. Not scheduling.", ingestionSourceUrn)); + log.info( + String.format( + "Ingestion source with urn %s has no configured schedule. Not scheduling.", + ingestionSourceUrn)); } } /** * A {@link Runnable} used to periodically re-populate the schedules cache. * - * Currently, the refresh logic is not very smart. When the cache is invalidated, we simply re-fetch the - * entire cache using schedules stored in the backend. + * <p>Currently, the refresh logic is not very smart. When the cache is invalidated, we simply + * re-fetch the entire cache using schedules stored in the backend. */ @VisibleForTesting static class BatchRefreshSchedulesRunnable implements Runnable { @@ -193,11 +216,13 @@ static class BatchRefreshSchedulesRunnable implements Runnable { public BatchRefreshSchedulesRunnable( @Nonnull final Authentication systemAuthentication, @Nonnull final EntityClient entityClient, - @Nonnull final BiConsumer<Urn, DataHubIngestionSourceInfo> scheduleNextIngestionSourceExecution, + @Nonnull + final BiConsumer<Urn, DataHubIngestionSourceInfo> scheduleNextIngestionSourceExecution, @Nonnull final Runnable unscheduleAll) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _entityClient = Objects.requireNonNull(entityClient); - _scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); + _scheduleNextIngestionSourceExecution = + Objects.requireNonNull(scheduleNextIngestionSourceExecution); _unscheduleAll = unscheduleAll; } @@ -214,25 +239,31 @@ public void run() { while (start < total) { try { - log.debug(String.format("Batch fetching ingestion source schedules. start: %s, count: %s ", start, count)); + log.debug( + String.format( + "Batch fetching ingestion source schedules. start: %s, count: %s ", + start, count)); // 1. List all ingestion source urns. - final ListResult ingestionSourceUrns = _entityClient.list( - Constants.INGESTION_SOURCE_ENTITY_NAME, - Collections.emptyMap(), - start, - count, - _systemAuthentication); + final ListResult ingestionSourceUrns = + _entityClient.list( + Constants.INGESTION_SOURCE_ENTITY_NAME, + Collections.emptyMap(), + start, + count, + _systemAuthentication); // 2. Fetch all ingestion sources, specifically the "info" aspect. - final Map<Urn, EntityResponse> ingestionSources = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ingestionSourceUrns.getEntities()), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - _systemAuthentication); + final Map<Urn, EntityResponse> ingestionSources = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ingestionSourceUrns.getEntities()), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + _systemAuthentication); // 3. Reschedule ingestion sources based on the fetched schedules (inside "info") - log.debug("Received batch of Ingestion Source Info aspects. Attempting to re-schedule execution requests."); + log.debug( + "Received batch of Ingestion Source Info aspects. Attempting to re-schedule execution requests."); // Then schedule the next ingestion runs scheduleNextIngestionRuns(new ArrayList<>(ingestionSources.values())); @@ -242,29 +273,33 @@ public void run() { } catch (RemoteInvocationException e) { log.error( - String.format("Failed to retrieve ingestion sources! Skipping updating schedule cache until next refresh. start: %s, count: %s", - start, - count), + String.format( + "Failed to retrieve ingestion sources! Skipping updating schedule cache until next refresh. start: %s, count: %s", + start, count), e); return; } } log.info(String.format("Successfully fetched %s ingestion sources.", total)); } catch (Exception e) { - log.error("Caught exception while loading Ingestion Sources. Will retry on next scheduled attempt.", e); + log.error( + "Caught exception while loading Ingestion Sources. Will retry on next scheduled attempt.", + e); } } /** - * Attempts to reschedule the next ingestion source run based on a batch of {@link EntityResponse} objects - * received from the Metadata Service. + * Attempts to reschedule the next ingestion source run based on a batch of {@link + * EntityResponse} objects received from the Metadata Service. */ - private void scheduleNextIngestionRuns(@Nonnull final List<EntityResponse> ingestionSourceEntities) { + private void scheduleNextIngestionRuns( + @Nonnull final List<EntityResponse> ingestionSourceEntities) { for (final EntityResponse response : ingestionSourceEntities) { final Urn entityUrn = response.getUrn(); final EnvelopedAspectMap aspects = response.getAspects(); final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); // Invoke the "scheduleNextIngestionSourceExecution" (passed from parent) _scheduleNextIngestionSourceExecution.accept(entityUrn, ingestionSourceInfo); @@ -275,7 +310,8 @@ private void scheduleNextIngestionRuns(@Nonnull final List<EntityResponse> inges /** * A {@link Runnable} used to create Ingestion Execution Requests. * - * The expectation is that there's a downstream action which is listening and executing new Execution Requests. + * <p>The expectation is that there's a downstream action which is listening and executing new + * Execution Requests. */ @VisibleForTesting static class ExecutionRequestRunnable implements Runnable { @@ -294,7 +330,8 @@ static class ExecutionRequestRunnable implements Runnable { private final Urn _ingestionSourceUrn; private final DataHubIngestionSourceInfo _ingestionSourceInfo; - // Used for clearing the "next execution" cache once a corresponding execution request has been created. + // Used for clearing the "next execution" cache once a corresponding execution request has been + // created. private final Runnable _deleteNextIngestionSourceExecution; // Used for re-scheduling the ingestion source once it has executed! @@ -307,27 +344,33 @@ public ExecutionRequestRunnable( @Nonnull final Urn ingestionSourceUrn, @Nonnull final DataHubIngestionSourceInfo ingestionSourceInfo, @Nonnull final Runnable deleteNextIngestionSourceExecution, - @Nonnull final BiConsumer<Urn, DataHubIngestionSourceInfo> scheduleNextIngestionSourceExecution) { + @Nonnull + final BiConsumer<Urn, DataHubIngestionSourceInfo> + scheduleNextIngestionSourceExecution) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _entityClient = Objects.requireNonNull(entityClient); _ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); _ingestionSourceUrn = Objects.requireNonNull(ingestionSourceUrn); _ingestionSourceInfo = Objects.requireNonNull(ingestionSourceInfo); - _deleteNextIngestionSourceExecution = Objects.requireNonNull(deleteNextIngestionSourceExecution); - _scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); + _deleteNextIngestionSourceExecution = + Objects.requireNonNull(deleteNextIngestionSourceExecution); + _scheduleNextIngestionSourceExecution = + Objects.requireNonNull(scheduleNextIngestionSourceExecution); } @Override public void run() { - // Remove the next ingestion execution as we are going to execute it now. (no retry logic currently) + // Remove the next ingestion execution as we are going to execute it now. (no retry logic + // currently) _deleteNextIngestionSourceExecution.run(); try { - log.info(String.format( - "Creating Execution Request for scheduled Ingestion Source with urn %s", - _ingestionSourceUrn)); + log.info( + String.format( + "Creating Execution Request for scheduled Ingestion Source with urn %s", + _ingestionSourceUrn)); // Create a new Execution Request Proposal final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -341,18 +384,23 @@ public void run() { // Construct arguments (arguments) of the Execution Request final ExecutionRequestInput input = new ExecutionRequestInput(); input.setTask(RUN_INGEST_TASK_NAME); - input.setSource(new ExecutionRequestSource() - .setType(EXECUTION_REQUEST_SOURCE_NAME) - .setIngestionSource(_ingestionSourceUrn)); + input.setSource( + new ExecutionRequestSource() + .setType(EXECUTION_REQUEST_SOURCE_NAME) + .setIngestionSource(_ingestionSourceUrn)); input.setExecutorId(_ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); input.setRequestedAt(System.currentTimeMillis()); Map<String, String> arguments = new HashMap<>(); - String recipe = IngestionUtils.injectPipelineName(_ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); + String recipe = + IngestionUtils.injectPipelineName( + _ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); arguments.put(RECIPE_ARGUMENT_NAME, recipe); - arguments.put(VERSION_ARGUMENT_NAME, _ingestionSourceInfo.getConfig().hasVersion() - ? _ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion()); + arguments.put( + VERSION_ARGUMENT_NAME, + _ingestionSourceInfo.getConfig().hasVersion() + ? _ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); String debugMode = "false"; if (_ingestionSourceInfo.getConfig().hasDebugMode()) { debugMode = _ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; @@ -368,9 +416,11 @@ public void run() { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (Exception e) { // TODO: This type of thing should likely be proactively reported. - log.error(String.format( - "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", - _ingestionSourceUrn), e); + log.error( + String.format( + "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", + _ingestionSourceUrn), + e); } // 2. Re-Schedule the next execution request. @@ -380,11 +430,12 @@ public void run() { private String adjustCronInterval(final String origCronInterval) { Objects.requireNonNull(origCronInterval, "origCronInterval must not be null"); - // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make an adjustment here. + // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make + // an adjustment here. final String[] originalCronParts = origCronInterval.split(" "); if (originalCronParts.length == 5) { return String.format("0 %s", origCronInterval); } return origCronInterval; } -} \ No newline at end of file +} diff --git a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java index 51b7fe85f4922..4366ff64ae384 100644 --- a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java +++ b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java @@ -1,7 +1,8 @@ package com.datahub.metadata.ingestion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.UrnArray; @@ -10,11 +11,12 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.ingestion.DataHubIngestionSourceConfig; import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.query.ListResult; import java.util.Collections; import java.util.concurrent.Future; @@ -24,8 +26,6 @@ import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class IngestionSchedulerTest { private IngestionScheduler _ingestionScheduler; @@ -36,14 +36,17 @@ public void setupTest() throws Exception { // Init mocks. final Urn ingestionSourceUrn1 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo info1 = new DataHubIngestionSourceInfo(); - info1.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 0 * * 1").setTimezone("America/Los Angeles")); // Run every monday + info1.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 0 * * 1") + .setTimezone("America/Los Angeles")); // Run every monday info1.setType("mysql"); info1.setName("My Test Source"); - info1.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + info1.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); final EnvelopedAspect envelopedAspect1 = new EnvelopedAspect(); envelopedAspect1.setName(Constants.INGESTION_INFO_ASPECT_NAME); @@ -54,19 +57,23 @@ public void setupTest() throws Exception { final EntityResponse entityResponse1 = Mockito.mock(EntityResponse.class); Mockito.when(entityResponse1.getUrn()).thenReturn(ingestionSourceUrn1); - Mockito.when(entityResponse1.getEntityName()).thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); + Mockito.when(entityResponse1.getEntityName()) + .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse1.getAspects()).thenReturn(map1); final Urn ingestionSourceUrn2 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:1"); final DataHubIngestionSourceInfo info2 = new DataHubIngestionSourceInfo(); - info2.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 0 * * 1 BLUE GREEN").setTimezone("America/Los Angeles")); // Run every monday + info2.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 0 * * 1 BLUE GREEN") + .setTimezone("America/Los Angeles")); // Run every monday info2.setType("invalid"); info2.setName("My Invalid Source"); - info2.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + info2.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); final EnvelopedAspect envelopedAspect2 = new EnvelopedAspect(); envelopedAspect2.setName(Constants.INGESTION_INFO_ASPECT_NAME); @@ -77,35 +84,44 @@ public void setupTest() throws Exception { final EntityResponse entityResponse2 = Mockito.mock(EntityResponse.class); Mockito.when(entityResponse2.getUrn()).thenReturn(ingestionSourceUrn2); - Mockito.when(entityResponse2.getEntityName()).thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); + Mockito.when(entityResponse2.getEntityName()) + .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse2.getAspects()).thenReturn(map2); JavaEntityClient mockClient = Mockito.mock(JavaEntityClient.class); // Set up mocks for ingestion source batch fetching - Mockito.when(mockClient.list( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(30), - Mockito.any() - )).thenReturn(new ListResult().setCount(30).setTotal(2).setStart(0).setEntities( - new UrnArray(ingestionSourceUrn1, ingestionSourceUrn2))); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(ingestionSourceUrn1, ingestionSourceUrn2)), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any() - )).thenReturn(ImmutableMap.of( - ingestionSourceUrn1, entityResponse1, - ingestionSourceUrn2, entityResponse2)); - - _ingestionScheduler = new IngestionScheduler( - Mockito.mock(Authentication.class), - mockClient, - Mockito.mock(IngestionConfiguration.class), - 1, - 1200); + Mockito.when( + mockClient.list( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(30), + Mockito.any())) + .thenReturn( + new ListResult() + .setCount(30) + .setTotal(2) + .setStart(0) + .setEntities(new UrnArray(ingestionSourceUrn1, ingestionSourceUrn2))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(ingestionSourceUrn1, ingestionSourceUrn2)), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn( + ImmutableMap.of( + ingestionSourceUrn1, entityResponse1, + ingestionSourceUrn2, entityResponse2)); + + _ingestionScheduler = + new IngestionScheduler( + Mockito.mock(Authentication.class), + mockClient, + Mockito.mock(IngestionConfiguration.class), + 1, + 1200); _ingestionScheduler.init(); Thread.sleep(2000); // Sleep so the runnable can execute. (not ideal) } @@ -115,22 +131,27 @@ public void testInvokeUpdateExistingSchedule() throws Exception { assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); Urn ingestionSourceUrn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); - Future<?> beforeFuture = _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + Future<?> beforeFuture = + _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(ingestionSourceUrn, newInfo); assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); - Future<?> newFuture = _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + Future<?> newFuture = + _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); // Ensure that there is an overwritten future. Assert.assertNotSame(beforeFuture, newFuture); @@ -142,14 +163,17 @@ public void testInvokeNewSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -163,14 +187,17 @@ public void testInvokeInvalidSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); // Invalid schedule set. - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("NOT A SCHEDULE").setTimezone("America/Los Angeles")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("NOT A SCHEDULE") + .setTimezone("America/Los Angeles")); // Run every monday newInfo.setType("snowflake"); newInfo.setName("My Snowflake Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that no changes have been made to next execution cache. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -186,11 +213,11 @@ public void testInvokeMissingSchedule() throws Exception { // No schedule set. newInfo.setType("mysql"); newInfo.setName("My Test Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the schedule has been removed. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -218,19 +245,24 @@ public void testSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("* * * * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("* * * * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); ScheduledFuture<?> future = _ingestionScheduler._nextIngestionSourceExecutionCache.get(urn); - Assert.assertTrue(future.getDelay(TimeUnit.SECONDS) < 60); // Next execution must always be less than a minute away. + Assert.assertTrue( + future.getDelay(TimeUnit.SECONDS) + < 60); // Next execution must always be less than a minute away. } @Test @@ -239,14 +271,17 @@ public void testUnscheduleAll() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:3"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("* * * * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("* * * * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source 2"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 2); diff --git a/li-utils/src/main/java/com/datahub/util/ModelUtils.java b/li-utils/src/main/java/com/datahub/util/ModelUtils.java index 65379d353de86..538a0d2dfdeb0 100644 --- a/li-utils/src/main/java/com/datahub/util/ModelUtils.java +++ b/li-utils/src/main/java/com/datahub/util/ModelUtils.java @@ -29,7 +29,6 @@ import javax.annotation.Nonnull; import org.reflections.Reflections; - public class ModelUtils { private static final ClassLoader CLASS_LOADER = DummySnapshot.class.getClassLoader(); @@ -69,13 +68,15 @@ public static Class<? extends RecordTemplate> getAspectClass(@Nonnull String asp * @return a set of supported aspects */ @Nonnull - public static <ASPECT_UNION extends UnionTemplate> Set<Class<? extends RecordTemplate>> getValidAspectTypes( - @Nonnull Class<ASPECT_UNION> aspectUnionClass) { + public static <ASPECT_UNION extends UnionTemplate> + Set<Class<? extends RecordTemplate>> getValidAspectTypes( + @Nonnull Class<ASPECT_UNION> aspectUnionClass) { AspectValidator.validateAspectUnionSchema(aspectUnionClass); Set<Class<? extends RecordTemplate>> validTypes = new HashSet<>(); - for (UnionDataSchema.Member member : ValidationUtils.getUnionSchema(aspectUnionClass).getMembers()) { + for (UnionDataSchema.Member member : + ValidationUtils.getUnionSchema(aspectUnionClass).getMembers()) { if (member.getType().getType() == DataSchema.Type.RECORD) { String fqcn = ((RecordDataSchema) member.getType()).getBindingName(); try { @@ -89,11 +90,10 @@ public static <ASPECT_UNION extends UnionTemplate> Set<Class<? extends RecordTem return validTypes; } - /** - * Gets a {@link Class} from its FQCN. - */ + /** Gets a {@link Class} from its FQCN. */ @Nonnull - public static <T> Class<? extends T> getClassFromName(@Nonnull String className, @Nonnull Class<T> parentClass) { + public static <T> Class<? extends T> getClassFromName( + @Nonnull String className, @Nonnull Class<T> parentClass) { try { return CLASS_LOADER.loadClass(className).asSubclass(parentClass); } catch (ClassNotFoundException e) { @@ -108,8 +108,10 @@ public static <T> Class<? extends T> getClassFromName(@Nonnull String className, * @return snapshot class that extends {@link RecordTemplate}, associated with className */ @Nonnull - public static Class<? extends RecordTemplate> getMetadataSnapshotClassFromName(@Nonnull String className) { - Class<? extends RecordTemplate> snapshotClass = getClassFromName(className, RecordTemplate.class); + public static Class<? extends RecordTemplate> getMetadataSnapshotClassFromName( + @Nonnull String className) { + Class<? extends RecordTemplate> snapshotClass = + getClassFromName(className, RecordTemplate.class); SnapshotValidator.validateSnapshotSchema(snapshotClass); return snapshotClass; } @@ -122,13 +124,16 @@ public static Class<? extends RecordTemplate> getMetadataSnapshotClassFromName(@ * @return the extracted {@link Urn} */ @Nonnull - public static <SNAPSHOT extends RecordTemplate> Urn getUrnFromSnapshot(@Nonnull SNAPSHOT snapshot) { + public static <SNAPSHOT extends RecordTemplate> Urn getUrnFromSnapshot( + @Nonnull SNAPSHOT snapshot) { SnapshotValidator.validateSnapshotSchema(snapshot.getClass()); - return RecordUtils.getRecordTemplateField(snapshot, "urn", urnClassForSnapshot(snapshot.getClass())); + return RecordUtils.getRecordTemplateField( + snapshot, "urn", urnClassForSnapshot(snapshot.getClass())); } /** - * Similar to {@link #getUrnFromSnapshot(RecordTemplate)} but extracts from a Snapshot union instead. + * Similar to {@link #getUrnFromSnapshot(RecordTemplate)} but extracts from a Snapshot union + * instead. */ @Nonnull public static Urn getUrnFromSnapshotUnion(@Nonnull UnionTemplate snapshotUnion) { @@ -164,9 +169,11 @@ public static Urn getUrnFromDeltaUnion(@Nonnull UnionTemplate deltaUnion) { * @return the extracted {@link Urn} */ @Nonnull - public static <DOCUMENT extends RecordTemplate> Urn getUrnFromDocument(@Nonnull DOCUMENT document) { + public static <DOCUMENT extends RecordTemplate> Urn getUrnFromDocument( + @Nonnull DOCUMENT document) { DocumentValidator.validateDocumentSchema(document.getClass()); - return RecordUtils.getRecordTemplateField(document, "urn", urnClassForDocument(document.getClass())); + return RecordUtils.getRecordTemplateField( + document, "urn", urnClassForDocument(document.getClass())); } /** @@ -179,37 +186,35 @@ public static <DOCUMENT extends RecordTemplate> Urn getUrnFromDocument(@Nonnull @Nonnull public static <ENTITY extends RecordTemplate> Urn getUrnFromEntity(@Nonnull ENTITY entity) { EntityValidator.validateEntitySchema(entity.getClass()); - return RecordUtils.getRecordTemplateField(entity, "urn", urnClassForDocument(entity.getClass())); + return RecordUtils.getRecordTemplateField( + entity, "urn", urnClassForDocument(entity.getClass())); } /** * Extracts the fields with type urn from a relationship. * * @param relationship the relationship to extract urn from - * @param <RELATIONSHIP> must be a valid relationship model defined in com.linkedin.metadata.relationship + * @param <RELATIONSHIP> must be a valid relationship model defined in + * com.linkedin.metadata.relationship * @param fieldName name of the field with type urn * @return the extracted {@link Urn} */ @Nonnull - private static <RELATIONSHIP extends RecordTemplate> Urn getUrnFromRelationship(@Nonnull RELATIONSHIP relationship, - @Nonnull String fieldName) { + private static <RELATIONSHIP extends RecordTemplate> Urn getUrnFromRelationship( + @Nonnull RELATIONSHIP relationship, @Nonnull String fieldName) { RelationshipValidator.validateRelationshipSchema(relationship.getClass()); - return RecordUtils.getRecordTemplateField(relationship, fieldName, - urnClassForRelationship(relationship.getClass(), fieldName)); + return RecordUtils.getRecordTemplateField( + relationship, fieldName, urnClassForRelationship(relationship.getClass(), fieldName)); } - /** - * Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. - */ + /** Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. */ @Nonnull public static <RELATIONSHIP extends RecordTemplate> Urn getSourceUrnFromRelationship( @Nonnull RELATIONSHIP relationship) { return getUrnFromRelationship(relationship, "source"); } - /** - * Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. - */ + /** Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. */ @Nonnull public static <RELATIONSHIP extends RecordTemplate> Urn getDestinationUrnFromRelationship( @Nonnull RELATIONSHIP relationship) { @@ -240,8 +245,9 @@ public static <SNAPSHOT extends RecordTemplate> List<RecordTemplate> getAspectsF * @return the extracted aspect */ @Nonnull - public static <SNAPSHOT extends RecordTemplate, ASPECT extends DataTemplate> Optional<ASPECT> getAspectFromSnapshot( - @Nonnull SNAPSHOT snapshot, @Nonnull Class<ASPECT> aspectClass) { + public static <SNAPSHOT extends RecordTemplate, ASPECT extends DataTemplate> + Optional<ASPECT> getAspectFromSnapshot( + @Nonnull SNAPSHOT snapshot, @Nonnull Class<ASPECT> aspectClass) { return getAspectsFromSnapshot(snapshot).stream() .filter(aspect -> aspect.getClass().equals(aspectClass)) @@ -250,10 +256,12 @@ public static <SNAPSHOT extends RecordTemplate, ASPECT extends DataTemplate> Opt } /** - * Similar to {@link #getAspectsFromSnapshot(RecordTemplate)} but extracts from a snapshot union instead. + * Similar to {@link #getAspectsFromSnapshot(RecordTemplate)} but extracts from a snapshot union + * instead. */ @Nonnull - public static List<RecordTemplate> getAspectsFromSnapshotUnion(@Nonnull UnionTemplate snapshotUnion) { + public static List<RecordTemplate> getAspectsFromSnapshotUnion( + @Nonnull UnionTemplate snapshotUnion) { return getAspects(RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion)); } @@ -261,10 +269,12 @@ public static List<RecordTemplate> getAspectsFromSnapshotUnion(@Nonnull UnionTem private static List<RecordTemplate> getAspects(@Nonnull RecordTemplate snapshot) { final Class<? extends WrappingArrayTemplate> clazz = getAspectsArrayClass(snapshot.getClass()); - WrappingArrayTemplate aspectArray = RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); + WrappingArrayTemplate aspectArray = + RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); final List<RecordTemplate> aspects = new ArrayList<>(); - aspectArray.forEach(item -> aspects.add(RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item))); + aspectArray.forEach( + item -> aspects.add(RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item))); return aspects; } @@ -280,12 +290,17 @@ private static List<RecordTemplate> getAspects(@Nonnull RecordTemplate snapshot) * @return the created snapshot */ @Nonnull - public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> SNAPSHOT newSnapshot( - @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull URN urn, @Nonnull List<ASPECT_UNION> aspects) { + public static < + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> + SNAPSHOT newSnapshot( + @Nonnull Class<SNAPSHOT> snapshotClass, + @Nonnull URN urn, + @Nonnull List<ASPECT_UNION> aspects) { SnapshotValidator.validateSnapshotSchema(snapshotClass); - final Class<? extends WrappingArrayTemplate> aspectArrayClass = getAspectsArrayClass(snapshotClass); + final Class<? extends WrappingArrayTemplate> aspectArrayClass = + getAspectsArrayClass(snapshotClass); try { final SNAPSHOT snapshot = snapshotClass.newInstance(); @@ -300,11 +315,15 @@ public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTempla } @Nonnull - private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTemplate> getAspectsArrayClass( - @Nonnull Class<SNAPSHOT> snapshotClass) { + private static <SNAPSHOT extends RecordTemplate> + Class<? extends WrappingArrayTemplate> getAspectsArrayClass( + @Nonnull Class<SNAPSHOT> snapshotClass) { try { - return snapshotClass.getMethod("getAspects").getReturnType().asSubclass(WrappingArrayTemplate.class); + return snapshotClass + .getMethod("getAspects") + .getReturnType() + .asSubclass(WrappingArrayTemplate.class); } catch (NoSuchMethodException | ClassCastException e) { throw new RuntimeException((e)); } @@ -320,8 +339,9 @@ private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTe * @return the created aspect union */ @Nonnull - public static <ASPECT_UNION extends UnionTemplate, ASPECT extends RecordTemplate> ASPECT_UNION newAspectUnion( - @Nonnull Class<ASPECT_UNION> aspectUnionClass, @Nonnull ASPECT aspect) { + public static <ASPECT_UNION extends UnionTemplate, ASPECT extends RecordTemplate> + ASPECT_UNION newAspectUnion( + @Nonnull Class<ASPECT_UNION> aspectUnionClass, @Nonnull ASPECT aspect) { AspectValidator.validateAspectUnionSchema(aspectUnionClass); @@ -334,60 +354,57 @@ public static <ASPECT_UNION extends UnionTemplate, ASPECT extends RecordTemplate } } - /** - * Gets the expected aspect class for a specific kind of snapshot. - */ + /** Gets the expected aspect class for a specific kind of snapshot. */ @Nonnull public static Class<? extends UnionTemplate> aspectClassForSnapshot( @Nonnull Class<? extends RecordTemplate> snapshotClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); - String aspectClassName = ((TyperefDataSchema) ((ArrayDataSchema) ValidationUtils.getRecordSchema(snapshotClass) - .getField("aspects") - .getType()).getItems()).getBindingName(); + String aspectClassName = + ((TyperefDataSchema) + ((ArrayDataSchema) + ValidationUtils.getRecordSchema(snapshotClass) + .getField("aspects") + .getType()) + .getItems()) + .getBindingName(); return getClassFromName(aspectClassName, UnionTemplate.class); } - /** - * Gets the expected {@link Urn} class for a specific kind of entity. - */ + /** Gets the expected {@link Urn} class for a specific kind of entity. */ @Nonnull - public static Class<? extends Urn> urnClassForEntity(@Nonnull Class<? extends RecordTemplate> entityClass) { + public static Class<? extends Urn> urnClassForEntity( + @Nonnull Class<? extends RecordTemplate> entityClass) { EntityValidator.validateEntitySchema(entityClass); return urnClassForField(entityClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of snapshot. - */ + /** Gets the expected {@link Urn} class for a specific kind of snapshot. */ @Nonnull - public static Class<? extends Urn> urnClassForSnapshot(@Nonnull Class<? extends RecordTemplate> snapshotClass) { + public static Class<? extends Urn> urnClassForSnapshot( + @Nonnull Class<? extends RecordTemplate> snapshotClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); return urnClassForField(snapshotClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of delta. - */ + /** Gets the expected {@link Urn} class for a specific kind of delta. */ @Nonnull - public static Class<? extends Urn> urnClassForDelta(@Nonnull Class<? extends RecordTemplate> deltaClass) { + public static Class<? extends Urn> urnClassForDelta( + @Nonnull Class<? extends RecordTemplate> deltaClass) { DeltaValidator.validateDeltaSchema(deltaClass); return urnClassForField(deltaClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of search document. - */ + /** Gets the expected {@link Urn} class for a specific kind of search document. */ @Nonnull - public static Class<? extends Urn> urnClassForDocument(@Nonnull Class<? extends RecordTemplate> documentClass) { + public static Class<? extends Urn> urnClassForDocument( + @Nonnull Class<? extends RecordTemplate> documentClass) { DocumentValidator.validateDocumentSchema(documentClass); return urnClassForField(documentClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of relationship. - */ + /** Gets the expected {@link Urn} class for a specific kind of relationship. */ @Nonnull private static Class<? extends Urn> urnClassForRelationship( @Nonnull Class<? extends RecordTemplate> relationshipClass, @Nonnull String fieldName) { @@ -405,7 +422,8 @@ public static Class<? extends Urn> sourceUrnClassForRelationship( } /** - * Gets the expected {@link Urn} class for the destination field of a specific kind of relationship. + * Gets the expected {@link Urn} class for the destination field of a specific kind of + * relationship. */ @Nonnull public static Class<? extends Urn> destinationUrnClassForRelationship( @@ -414,35 +432,37 @@ public static Class<? extends Urn> destinationUrnClassForRelationship( } @Nonnull - private static Class<? extends Urn> urnClassForField(@Nonnull Class<? extends RecordTemplate> recordClass, - @Nonnull String fieldName) { - String urnClassName = ((DataMap) ValidationUtils.getRecordSchema(recordClass) - .getField(fieldName) - .getType() - .getProperties() - .get("java")).getString("class"); + private static Class<? extends Urn> urnClassForField( + @Nonnull Class<? extends RecordTemplate> recordClass, @Nonnull String fieldName) { + String urnClassName = + ((DataMap) + ValidationUtils.getRecordSchema(recordClass) + .getField(fieldName) + .getType() + .getProperties() + .get("java")) + .getString("class"); return getClassFromName(urnClassName, Urn.class); } - /** - * Validates a specific snapshot-aspect combination. - */ - public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate> void validateSnapshotAspect( - @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<ASPECT_UNION> aspectUnionClass) { + /** Validates a specific snapshot-aspect combination. */ + public static <SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate> + void validateSnapshotAspect( + @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<ASPECT_UNION> aspectUnionClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); AspectValidator.validateAspectUnionSchema(aspectUnionClass); // Make sure that SNAPSHOT's "aspects" array field contains ASPECT_UNION type. if (!aspectClassForSnapshot(snapshotClass).equals(aspectUnionClass)) { - throw new InvalidSchemaException(aspectUnionClass.getCanonicalName() + " is not a supported aspect class of " - + snapshotClass.getCanonicalName()); + throw new InvalidSchemaException( + aspectUnionClass.getCanonicalName() + + " is not a supported aspect class of " + + snapshotClass.getCanonicalName()); } } - /** - * Validates a specific snapshot-URN combination. - */ + /** Validates a specific snapshot-URN combination. */ public static <SNAPSHOT extends RecordTemplate, URN extends Urn> void validateSnapshotUrn( @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<URN> urnClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); @@ -450,7 +470,9 @@ public static <SNAPSHOT extends RecordTemplate, URN extends Urn> void validateSn // Make sure that SNAPSHOT's "urn" field uses the correct class or subclasses if (!urnClassForSnapshot(snapshotClass).isAssignableFrom(urnClass)) { throw new InvalidSchemaException( - urnClass.getCanonicalName() + " is not a supported URN class of " + snapshotClass.getCanonicalName()); + urnClass.getCanonicalName() + + " is not a supported URN class of " + + snapshotClass.getCanonicalName()); } } @@ -459,13 +481,16 @@ public static <SNAPSHOT extends RecordTemplate, URN extends Urn> void validateSn * * @param relationshipUnionClass the type of relationship union to create * @param relationship the relationship to set - * @param <RELATIONSHIP_UNION> must be a valid relationship union defined in com.linkedin.metadata.relationship + * @param <RELATIONSHIP_UNION> must be a valid relationship union defined in + * com.linkedin.metadata.relationship * @param <RELATIONSHIP> must be a supported relationship type in ASPECT_UNION * @return the created relationship union */ @Nonnull - public static <RELATIONSHIP_UNION extends UnionTemplate, RELATIONSHIP extends RecordTemplate> RELATIONSHIP_UNION newRelationshipUnion( - @Nonnull Class<RELATIONSHIP_UNION> relationshipUnionClass, @Nonnull RELATIONSHIP relationship) { + public static <RELATIONSHIP_UNION extends UnionTemplate, RELATIONSHIP extends RecordTemplate> + RELATIONSHIP_UNION newRelationshipUnion( + @Nonnull Class<RELATIONSHIP_UNION> relationshipUnionClass, + @Nonnull RELATIONSHIP relationship) { RelationshipValidator.validateRelationshipUnionSchema(relationshipUnionClass); @@ -478,20 +503,16 @@ public static <RELATIONSHIP_UNION extends UnionTemplate, RELATIONSHIP extends Re } } - /** - * Returns all entity classes. - */ + /** Returns all entity classes. */ @Nonnull public static Set<Class<? extends RecordTemplate>> getAllEntities() { - return new Reflections("com.linkedin.metadata.entity").getSubTypesOf(RecordTemplate.class) - .stream() - .filter(EntityValidator::isValidEntitySchema) - .collect(Collectors.toSet()); + return new Reflections("com.linkedin.metadata.entity") + .getSubTypesOf(RecordTemplate.class).stream() + .filter(EntityValidator::isValidEntitySchema) + .collect(Collectors.toSet()); } - /** - * Get entity type from urn class. - */ + /** Get entity type from urn class. */ @Nonnull public static String getEntityTypeFromUrnClass(@Nonnull Class<? extends Urn> urnClass) { try { @@ -501,13 +522,14 @@ public static String getEntityTypeFromUrnClass(@Nonnull Class<? extends Urn> urn } } - /** - * Get aspect specific kafka topic name from urn and aspect classes. - */ + /** Get aspect specific kafka topic name from urn and aspect classes. */ @Nonnull - public static <URN extends Urn, ASPECT extends RecordTemplate> String getAspectSpecificMAETopicName(@Nonnull URN urn, - @Nonnull ASPECT newValue) { - return String.format("%s_%s_%s", METADATA_AUDIT_EVENT_PREFIX, urn.getEntityType().toUpperCase(), + public static <URN extends Urn, ASPECT extends RecordTemplate> + String getAspectSpecificMAETopicName(@Nonnull URN urn, @Nonnull ASPECT newValue) { + return String.format( + "%s_%s_%s", + METADATA_AUDIT_EVENT_PREFIX, + urn.getEntityType().toUpperCase(), newValue.getClass().getSimpleName().toUpperCase()); } @@ -521,8 +543,9 @@ public static <URN extends Urn, ASPECT extends RecordTemplate> String getAspectS * @return the created entity union */ @Nonnull - public static <ENTITY_UNION extends UnionTemplate, ENTITY extends RecordTemplate> ENTITY_UNION newEntityUnion( - @Nonnull Class<ENTITY_UNION> entityUnionClass, @Nonnull ENTITY entity) { + public static <ENTITY_UNION extends UnionTemplate, ENTITY extends RecordTemplate> + ENTITY_UNION newEntityUnion( + @Nonnull Class<ENTITY_UNION> entityUnionClass, @Nonnull ENTITY entity) { EntityValidator.validateEntityUnionSchema(entityUnionClass); diff --git a/li-utils/src/main/java/com/datahub/util/RecordUtils.java b/li-utils/src/main/java/com/datahub/util/RecordUtils.java index a9f8a07742491..d57875f79de61 100644 --- a/li-utils/src/main/java/com/datahub/util/RecordUtils.java +++ b/li-utils/src/main/java/com/datahub/util/RecordUtils.java @@ -33,10 +33,10 @@ import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; - public class RecordUtils { - private static final JacksonDataTemplateCodec DATA_TEMPLATE_CODEC = new JacksonDataTemplateCodec(); + private static final JacksonDataTemplateCodec DATA_TEMPLATE_CODEC = + new JacksonDataTemplateCodec(); private static final String ARRAY_WILDCARD = "*"; private static final Pattern LEADING_SPACESLASH_PATTERN = Pattern.compile("^[/ ]+"); private static final Pattern TRAILING_SPACESLASH_PATTERN = Pattern.compile("[/ ]+$"); @@ -44,10 +44,11 @@ public class RecordUtils { /** * Using in-memory hash map to store the get/is methods of the schema fields of RecordTemplate. - * Here map has RecordTemplate class as key, value being another map of field name with the associated get/is method + * Here map has RecordTemplate class as key, value being another map of field name with the + * associated get/is method */ - private static final ConcurrentHashMap<Class<? extends RecordTemplate>, Map<String, Method>> METHOD_CACHE = - new ConcurrentHashMap<>(); + private static final ConcurrentHashMap<Class<? extends RecordTemplate>, Map<String, Method>> + METHOD_CACHE = new ConcurrentHashMap<>(); private RecordUtils() { // Util class @@ -72,7 +73,8 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { try { return DATA_TEMPLATE_CODEC.mapToString(recordTemplate.data()); } catch (IOException e) { - throw new ModelConversionException("Failed to serialize RecordTemplate: " + recordTemplate.toString()); + throw new ModelConversionException( + "Failed to serialize RecordTemplate: " + recordTemplate.toString()); } } @@ -85,7 +87,8 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { * @return the created {@link RecordTemplate} */ @Nonnull - public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> type, @Nonnull String jsonString) { + public static <T extends RecordTemplate> T toRecordTemplate( + @Nonnull Class<T> type, @Nonnull String jsonString) { DataMap dataMap; try { dataMap = DATA_TEMPLATE_CODEC.stringToMap(jsonString); @@ -105,18 +108,21 @@ public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> ty * @return the created {@link RecordTemplate} */ @Nonnull - public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> type, @Nonnull DataMap dataMap) { + public static <T extends RecordTemplate> T toRecordTemplate( + @Nonnull Class<T> type, @Nonnull DataMap dataMap) { Constructor<T> constructor; try { constructor = type.getConstructor(DataMap.class); } catch (NoSuchMethodException e) { - throw new ModelConversionException("Unable to find constructor for " + type.getCanonicalName(), e); + throw new ModelConversionException( + "Unable to find constructor for " + type.getCanonicalName(), e); } try { return constructor.newInstance(dataMap); } catch (Exception e) { - throw new ModelConversionException("Failed to invoke constructor for " + type.getCanonicalName(), e); + throw new ModelConversionException( + "Failed to invoke constructor for " + type.getCanonicalName(), e); } } @@ -128,7 +134,8 @@ public static <T extends RecordTemplate> T toRecordTemplate(@Nonnull Class<T> ty * @return the created {@link RecordTemplate} */ @Nonnull - public static RecordTemplate toRecordTemplate(@Nonnull String className, @Nonnull DataMap dataMap) { + public static RecordTemplate toRecordTemplate( + @Nonnull String className, @Nonnull DataMap dataMap) { Class<? extends RecordTemplate> clazz; try { clazz = Class.forName(className).asSubclass(RecordTemplate.class); @@ -145,34 +152,41 @@ public static RecordTemplate toRecordTemplate(@Nonnull String className, @Nonnul * @param entity the entity value. * @param aspectClass the aspect class. * @return the aspect which is included in the entity. - * */ + */ @Nonnull - public static <ASPECT extends RecordTemplate, ENTITY extends RecordTemplate> ASPECT extractAspectFromSingleAspectEntity( - @Nonnull ENTITY entity, @Nonnull Class<ASPECT> aspectClass) { + public static <ASPECT extends RecordTemplate, ENTITY extends RecordTemplate> + ASPECT extractAspectFromSingleAspectEntity( + @Nonnull ENTITY entity, @Nonnull Class<ASPECT> aspectClass) { // Create an empty aspect to extract it's field names final Constructor<ASPECT> constructor; try { @SuppressWarnings("rawtypes") - final Class[] constructorParamArray = new Class[]{}; + final Class[] constructorParamArray = new Class[] {}; constructor = aspectClass.getConstructor(constructorParamArray); } catch (NoSuchMethodException e) { - throw new RuntimeException("Exception occurred while trying to get the default constructor for the aspect. ", e); + throw new RuntimeException( + "Exception occurred while trying to get the default constructor for the aspect. ", e); } final ASPECT aspect; try { aspect = constructor.newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException("Exception occurred while creating an instance of the aspect. ", e); + throw new RuntimeException( + "Exception occurred while creating an instance of the aspect. ", e); } final Set<String> aspectFields = - aspect.schema().getFields().stream().map(RecordDataSchema.Field::getName).collect(Collectors.toSet()); + aspect.schema().getFields().stream() + .map(RecordDataSchema.Field::getName) + .collect(Collectors.toSet()); // Get entity's field names and only keep fields which occur in the entity and not in the aspect final Set<String> entityFields = - entity.schema().getFields().stream().map(RecordDataSchema.Field::getName).collect(Collectors.toSet()); + entity.schema().getFields().stream() + .map(RecordDataSchema.Field::getName) + .collect(Collectors.toSet()); entityFields.removeAll(aspectFields); // remove non aspect fields from entity's cloned datamap and use it to create an aspect @@ -194,13 +208,15 @@ public static <ASPECT extends RecordTemplate, ENTITY extends RecordTemplate> ASP * @return the field */ @Nonnull - public static <T extends RecordTemplate> RecordDataSchema.Field getRecordDataSchemaField(@Nonnull T recordTemplate, - @Nonnull String fieldName) { + public static <T extends RecordTemplate> RecordDataSchema.Field getRecordDataSchemaField( + @Nonnull T recordTemplate, @Nonnull String fieldName) { RecordDataSchema.Field field = recordTemplate.schema().getField(fieldName); if (field == null) { throw new InvalidSchemaException( - String.format("Missing expected field '%s' in %s", fieldName, recordTemplate.getClass().getCanonicalName())); + String.format( + "Missing expected field '%s' in %s", + fieldName, recordTemplate.getClass().getCanonicalName())); } return field; } @@ -212,14 +228,20 @@ public static <T extends RecordTemplate> RecordDataSchema.Field getRecordDataSch * @param fieldName the name of the field to update * @param value the value to set */ - public static <T extends RecordTemplate, V> void setRecordTemplatePrimitiveField(@Nonnull T recordTemplate, - @Nonnull String fieldName, @Nonnull V value) { + public static <T extends RecordTemplate, V> void setRecordTemplatePrimitiveField( + @Nonnull T recordTemplate, @Nonnull String fieldName, @Nonnull V value) { final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method putDirect = - getProtectedMethod(RecordTemplate.class, "putDirect", RecordDataSchema.Field.class, Class.class, Object.class, + getProtectedMethod( + RecordTemplate.class, + "putDirect", + RecordDataSchema.Field.class, + Class.class, + Object.class, SetMode.class); - invokeProtectedMethod(recordTemplate, putDirect, field, value.getClass(), value, SetMode.DISALLOW_NULL); + invokeProtectedMethod( + recordTemplate, putDirect, field, value.getClass(), value, SetMode.DISALLOW_NULL); } /** @@ -234,9 +256,15 @@ public static <T extends RecordTemplate, V> void setRecordTemplateComplexField( final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method putWrapped = - getProtectedMethod(RecordTemplate.class, "putWrapped", RecordDataSchema.Field.class, Class.class, - DataTemplate.class, SetMode.class); - invokeProtectedMethod(recordTemplate, putWrapped, field, value.getClass(), value, SetMode.DISALLOW_NULL); + getProtectedMethod( + RecordTemplate.class, + "putWrapped", + RecordDataSchema.Field.class, + Class.class, + DataTemplate.class, + SetMode.class); + invokeProtectedMethod( + recordTemplate, putWrapped, field, value.getClass(), value, SetMode.DISALLOW_NULL); } /** @@ -248,14 +276,19 @@ public static <T extends RecordTemplate, V> void setRecordTemplateComplexField( * @return the value for the field */ @Nonnull - public static <T extends RecordTemplate, V> V getRecordTemplateField(@Nonnull T recordTemplate, - @Nonnull String fieldName, @Nonnull Class<V> valueClass) { + public static <T extends RecordTemplate, V> V getRecordTemplateField( + @Nonnull T recordTemplate, @Nonnull String fieldName, @Nonnull Class<V> valueClass) { final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method obtainCustomType = - getProtectedMethod(RecordTemplate.class, "obtainCustomType", RecordDataSchema.Field.class, Class.class, + getProtectedMethod( + RecordTemplate.class, + "obtainCustomType", + RecordDataSchema.Field.class, + Class.class, GetMode.class); - return (V) invokeProtectedMethod(recordTemplate, obtainCustomType, field, valueClass, GetMode.STRICT); + return (V) + invokeProtectedMethod(recordTemplate, obtainCustomType, field, valueClass, GetMode.STRICT); } /** @@ -272,9 +305,14 @@ public static <T extends RecordTemplate, V extends DataTemplate> V getRecordTemp final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method obtainWrapped = - getProtectedMethod(RecordTemplate.class, "obtainWrapped", RecordDataSchema.Field.class, Class.class, + getProtectedMethod( + RecordTemplate.class, + "obtainWrapped", + RecordDataSchema.Field.class, + Class.class, GetMode.class); - return (V) invokeProtectedMethod(recordTemplate, obtainWrapped, field, valueClass, GetMode.STRICT); + return (V) + invokeProtectedMethod(recordTemplate, obtainWrapped, field, valueClass, GetMode.STRICT); } /** @@ -290,22 +328,33 @@ public static <V extends RecordTemplate> RecordTemplate getSelectedRecordTemplat final DataSchema dataSchema = unionTemplate.memberType(); if (!(dataSchema instanceof RecordDataSchema)) { throw new InvalidSchemaException( - "The currently selected member isn't a RecordTemplate in " + unionTemplate.getClass().getCanonicalName()); + "The currently selected member isn't a RecordTemplate in " + + unionTemplate.getClass().getCanonicalName()); } final Class<? extends RecordTemplate> clazz = - ModelUtils.getClassFromName(((RecordDataSchema) dataSchema).getBindingName(), RecordTemplate.class); + ModelUtils.getClassFromName( + ((RecordDataSchema) dataSchema).getBindingName(), RecordTemplate.class); final Method obtainWrapped = - getProtectedMethod(UnionTemplate.class, "obtainWrapped", DataSchema.class, Class.class, String.class); - final List<UnionDataSchema.Member> members = ((UnionDataSchema) unionTemplate.schema()).getMembers(); + getProtectedMethod( + UnionTemplate.class, "obtainWrapped", DataSchema.class, Class.class, String.class); + final List<UnionDataSchema.Member> members = + ((UnionDataSchema) unionTemplate.schema()).getMembers(); for (UnionDataSchema.Member m : members) { - if (m.hasAlias() && m.getType().getDereferencedDataSchema().getUnionMemberKey().equals(clazz.getName())) { - return (V) invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, m.getAlias()); + if (m.hasAlias() + && m.getType().getDereferencedDataSchema().getUnionMemberKey().equals(clazz.getName())) { + return (V) + invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, m.getAlias()); } } - return (V) invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, - ((RecordDataSchema) dataSchema).getFullName()); + return (V) + invokeProtectedMethod( + unionTemplate, + obtainWrapped, + dataSchema, + clazz, + ((RecordDataSchema) dataSchema).getFullName()); } /** @@ -320,25 +369,44 @@ public static <V extends RecordTemplate> RecordTemplate setSelectedRecordTemplat @Nonnull UnionTemplate unionTemplate, @Nonnull RecordTemplate selectedMember) { final Method selectWrapped = - getProtectedMethod(UnionTemplate.class, "selectWrapped", DataSchema.class, Class.class, String.class, + getProtectedMethod( + UnionTemplate.class, + "selectWrapped", + DataSchema.class, + Class.class, + String.class, DataTemplate.class); - final List<UnionDataSchema.Member> members = ((UnionDataSchema) unionTemplate.schema()).getMembers(); + final List<UnionDataSchema.Member> members = + ((UnionDataSchema) unionTemplate.schema()).getMembers(); for (UnionDataSchema.Member m : members) { - if (m.hasAlias() && m.getType() - .getDereferencedDataSchema() - .getUnionMemberKey() - .equals(selectedMember.getClass().getName())) { - return (V) invokeProtectedMethod(unionTemplate, selectWrapped, selectedMember.schema(), - selectedMember.getClass(), m.getAlias(), selectedMember); + if (m.hasAlias() + && m.getType() + .getDereferencedDataSchema() + .getUnionMemberKey() + .equals(selectedMember.getClass().getName())) { + return (V) + invokeProtectedMethod( + unionTemplate, + selectWrapped, + selectedMember.schema(), + selectedMember.getClass(), + m.getAlias(), + selectedMember); } } - return (V) invokeProtectedMethod(unionTemplate, selectWrapped, selectedMember.schema(), selectedMember.getClass(), - selectedMember.schema().getUnionMemberKey(), selectedMember); + return (V) + invokeProtectedMethod( + unionTemplate, + selectWrapped, + selectedMember.schema(), + selectedMember.getClass(), + selectedMember.schema().getUnionMemberKey(), + selectedMember); } @Nonnull - private static Method getProtectedMethod(@Nonnull Class clazz, @Nonnull String methodName, - @Nonnull Class<?>... parameterTypes) { + private static Method getProtectedMethod( + @Nonnull Class clazz, @Nonnull String methodName, @Nonnull Class<?>... parameterTypes) { try { return clazz.getDeclaredMethod(methodName, parameterTypes); } catch (NoSuchMethodException e) { @@ -359,26 +427,32 @@ private static <T> T invokeProtectedMethod(Object object, Method method, Object. } @Nonnull - private static Map<String, Method> getMethodsFromRecordTemplate(@Nonnull RecordTemplate recordTemplate) { + private static Map<String, Method> getMethodsFromRecordTemplate( + @Nonnull RecordTemplate recordTemplate) { final HashMap<String, Method> methodMap = new HashMap<>(); for (RecordDataSchema.Field field : recordTemplate.schema().getFields()) { final String capitalizedName = capitalizeFirst(field.getName()); final String getMethodName = - (field.getType().getType().equals(RecordDataSchema.Type.BOOLEAN) ? "is" : "get") + capitalizedName; + (field.getType().getType().equals(RecordDataSchema.Type.BOOLEAN) ? "is" : "get") + + capitalizedName; try { methodMap.put(field.getName(), recordTemplate.getClass().getMethod(getMethodName)); } catch (NoSuchMethodException e) { - throw new RuntimeException(String.format("Failed to get method [%s], for class [%s], field [%s]", getMethodName, - recordTemplate.getClass().getCanonicalName(), field.getName()), e); + throw new RuntimeException( + String.format( + "Failed to get method [%s], for class [%s], field [%s]", + getMethodName, recordTemplate.getClass().getCanonicalName(), field.getName()), + e); } } return Collections.unmodifiableMap(methodMap); } /** - * Given a {@link RecordTemplate} and field name, this will find and execute getFieldName/isFieldName and return the result - * If neither getFieldName/isFieldName has been called for any of the fields of the RecordTemplate, then the get/is method - * for all schema fields of the record will be found and subsequently cached. + * Given a {@link RecordTemplate} and field name, this will find and execute + * getFieldName/isFieldName and return the result If neither getFieldName/isFieldName has been + * called for any of the fields of the RecordTemplate, then the get/is method for all schema + * fields of the record will be found and subsequently cached. * * @param record {@link RecordTemplate} whose field has to be referenced * @param fieldName field name of the record that has to be referenced @@ -391,8 +465,10 @@ private static Object invokeMethod(@Nonnull RecordTemplate record, @Nonnull Stri return METHOD_CACHE.get(record.getClass()).get(fieldName).invoke(record); } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException( - String.format("Failed to execute method for class [%s], field [%s]", record.getClass().getCanonicalName(), - fieldName), e); + String.format( + "Failed to execute method for class [%s], field [%s]", + record.getClass().getCanonicalName(), fieldName), + e); } } @@ -402,21 +478,24 @@ private static Object getUnionMember(@Nonnull UnionTemplate union, @Nonnull Stri return ((DataMap) union.data()).get(memberName); } throw new RuntimeException( - String.format("Failed to extract member from union [%s], member [%s]", union.getClass().getCanonicalName(), - memberName)); + String.format( + "Failed to extract member from union [%s], member [%s]", + union.getClass().getCanonicalName(), memberName)); } /** - * Helper method for referencing array of RecordTemplate objects. Referencing a particular index or range of indices of an array is not supported. + * Helper method for referencing array of RecordTemplate objects. Referencing a particular index + * or range of indices of an array is not supported. * - * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} which needs to be referenced + * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} + * which needs to be referenced * @param ps {@link PathSpec} for the entire path inside the array that needs to be referenced * @return {@link List} of objects from the array, referenced using the PathSpec */ @Nonnull @SuppressWarnings("rawtypes") - private static List<Object> getReferenceForAbstractArray(@Nonnull AbstractArrayTemplate<Object> reference, - @Nonnull PathSpec ps) { + private static List<Object> getReferenceForAbstractArray( + @Nonnull AbstractArrayTemplate<Object> reference, @Nonnull PathSpec ps) { if (!reference.isEmpty()) { return Arrays.stream((reference).toArray()) .map(x -> getFieldValue(x, ps)) @@ -427,17 +506,19 @@ private static List<Object> getReferenceForAbstractArray(@Nonnull AbstractArrayT } /** - * Nullable version of the method above. Allows us to get null values in a list in the correct oder. - * Helper method for referencing array of RecordTemplate objects. Referencing a particular index or range of indices of an array is not supported. + * Nullable version of the method above. Allows us to get null values in a list in the correct + * oder. Helper method for referencing array of RecordTemplate objects. Referencing a particular + * index or range of indices of an array is not supported. * - * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} which needs to be referenced + * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} + * which needs to be referenced * @param ps {@link PathSpec} for the entire path inside the array that needs to be referenced * @return {@link List} of objects from the array, referenced using the PathSpec */ @Nullable @SuppressWarnings("rawtypes") - private static List<Object> getNullableReferenceForAbstractArray(@Nonnull AbstractArrayTemplate<Object> reference, - @Nonnull PathSpec ps) { + private static List<Object> getNullableReferenceForAbstractArray( + @Nonnull AbstractArrayTemplate<Object> reference, @Nonnull PathSpec ps) { if (!reference.isEmpty()) { return Arrays.stream((reference).toArray()) .map(x -> getNullableFieldValue(x, ps)) @@ -447,11 +528,12 @@ private static List<Object> getNullableReferenceForAbstractArray(@Nonnull Abstra } /** - * Similar to {@link #getFieldValue(Object, PathSpec)} but takes string representation of Pegasus PathSpec as - * input. + * Similar to {@link #getFieldValue(Object, PathSpec)} but takes string representation of Pegasus + * PathSpec as input. */ @Nonnull - public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull String pathSpecAsString) { + public static Optional<Object> getFieldValue( + @Nonnull Object record, @Nonnull String pathSpecAsString) { pathSpecAsString = LEADING_SPACESLASH_PATTERN.matcher(pathSpecAsString).replaceAll(""); pathSpecAsString = TRAILING_SPACESLASH_PATTERN.matcher(pathSpecAsString).replaceAll(""); @@ -462,13 +544,16 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull St } /** - * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of the path from the record. - * This handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of primitive types or array of records. - * Fetching of values in a RecordTemplate where the field has a default value will return the field default value. - * Referencing field corresponding to a particular index or range of indices of an array is not supported. - * Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported. + * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of + * the path from the record. This handles only RecordTemplate, fields of which can be primitive + * types, typeRefs, arrays of primitive types or array of records. Fetching of values in a + * RecordTemplate where the field has a default value will return the field default value. + * Referencing field corresponding to a particular index or range of indices of an array is not + * supported. Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) + * FixedTemplate are currently not supported. * - * @param record {@link Object} Object to traverse the path. If record is of primitive type, and path is not empty, it will fail to traverse. + * @param record {@link Object} Object to traverse the path. If record is of primitive type, and + * path is not empty, it will fail to traverse. * @param ps {@link PathSpec} representing the path whose value needs to be returned * @return Referenced object of the RecordTemplate corresponding to the PathSpec */ @@ -484,7 +569,8 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull Pa } if (StringUtils.isNumeric(part)) { throw new UnsupportedOperationException( - String.format("Array indexing is not supported for %s (%s from %s)", part, ps, reference)); + String.format( + "Array indexing is not supported for %s (%s from %s)", part, ps, reference)); } if (reference instanceof RecordTemplate) { reference = invokeMethod((RecordTemplate) reference, part); @@ -497,8 +583,10 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull Pa return Optional.empty(); } } else if (reference instanceof AbstractArrayTemplate) { - return Optional.of(getReferenceForAbstractArray((AbstractArrayTemplate<Object>) reference, - new PathSpec(ps.getPathComponents().subList(i, pathSize)))); + return Optional.of( + getReferenceForAbstractArray( + (AbstractArrayTemplate<Object>) reference, + new PathSpec(ps.getPathComponents().subList(i, pathSize)))); } else { throw new UnsupportedOperationException( String.format("Failed at extracting %s (%s from %s)", part, ps, record)); @@ -508,16 +596,20 @@ public static Optional<Object> getFieldValue(@Nonnull Object record, @Nonnull Pa } /** - * A nullable version of the getFieldValue method above. This is used when grabbing values from aspects based on field specs - * on Relationship annotations. This allows us to get null values for fields that don't have a value for a given path spec. - * Then we can map values correctly based on list indices creating graph edges. - * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of the path from the record. - * This handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of primitive types or array of records. - * Fetching of values in a RecordTemplate where the field has a default value will return the field default value. - * Referencing field corresponding to a particular index or range of indices of an array is not supported. - * Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported, return null. + * A nullable version of the getFieldValue method above. This is used when grabbing values from + * aspects based on field specs on Relationship annotations. This allows us to get null values for + * fields that don't have a value for a given path spec. Then we can map values correctly based on + * list indices creating graph edges. Given a {@link Object} and {@link + * com.linkedin.data.schema.PathSpec} this will return value of the path from the record. This + * handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of + * primitive types or array of records. Fetching of values in a RecordTemplate where the field has + * a default value will return the field default value. Referencing field corresponding to a + * particular index or range of indices of an array is not supported. Fields corresponding to 1) + * multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported, + * return null. * - * @param record {@link Object} Object to traverse the path. If record is of primitive type, and path is not empty, it will fail to traverse. + * @param record {@link Object} Object to traverse the path. If record is of primitive type, and + * path is not empty, it will fail to traverse. * @param ps {@link PathSpec} representing the path whose value needs to be returned * @return Referenced object of the RecordTemplate corresponding to the PathSpec */ @@ -533,14 +625,16 @@ public static Object getNullableFieldValue(@Nonnull Object record, @Nonnull Path } if (StringUtils.isNumeric(part)) { throw new UnsupportedOperationException( - String.format("Array indexing is not supported for %s (%s from %s)", part, ps, reference)); + String.format( + "Array indexing is not supported for %s (%s from %s)", part, ps, reference)); } if (reference instanceof RecordTemplate) { reference = invokeMethod((RecordTemplate) reference, part); } else if (reference instanceof UnionTemplate) { reference = getUnionMember((UnionTemplate) reference, part); } else if (reference instanceof AbstractArrayTemplate) { - return getNullableReferenceForAbstractArray((AbstractArrayTemplate<Object>) reference, + return getNullableReferenceForAbstractArray( + (AbstractArrayTemplate<Object>) reference, new PathSpec(ps.getPathComponents().subList(i, pathSize))); } else { return null; @@ -548,5 +642,4 @@ public static Object getNullableFieldValue(@Nonnull Object record, @Nonnull Path } return reference; } - } diff --git a/li-utils/src/main/java/com/datahub/util/Statement.java b/li-utils/src/main/java/com/datahub/util/Statement.java index c30a5e9b70c76..f2c56a409312c 100644 --- a/li-utils/src/main/java/com/datahub/util/Statement.java +++ b/li-utils/src/main/java/com/datahub/util/Statement.java @@ -5,7 +5,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class Statement { diff --git a/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java b/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java index 3df693e59adf7..56b97a3fb8233 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * An exception to be thrown when elastic search query fails. - */ +/** An exception to be thrown when elastic search query fails. */ public class ESQueryException extends RuntimeException { public ESQueryException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java b/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java index 019e6896eb006..b7e182df527bf 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * Thrown when a schema didn't match the expectation. - */ +/** Thrown when a schema didn't match the expectation. */ public class InvalidSchemaException extends RuntimeException { public InvalidSchemaException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java b/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java index bab319812bed9..2a1784f6d7197 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * An exception to be thrown when Model Conversion fails. - */ +/** An exception to be thrown when Model Conversion fails. */ public class ModelConversionException extends RuntimeException { public ModelConversionException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java b/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java index e0533cb2d2502..fc082abf22771 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java @@ -8,14 +8,12 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate aspects are part of the union schemas. - */ +/** Utility class to validate aspects are part of the union schemas. */ public final class AspectValidator { // A cache of validated classes - private static final Set<Class<? extends UnionTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends UnionTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private AspectValidator() { // Util class @@ -26,15 +24,18 @@ private AspectValidator() { * * @param schema schema for the model */ - public static void validateAspectUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String aspectClassName) { + public static void validateAspectUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String aspectClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Aspect '%s' must be a union containing only record type members", aspectClassName); + ValidationUtils.invalidSchema( + "Aspect '%s' must be a union containing only record type members", aspectClassName); } } /** - * Similar to {@link #validateAspectUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateAspectUnionSchema(UnionDataSchema, String)} but take a {@link Class} + * instead and caches results. */ public static void validateAspectUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -46,8 +47,9 @@ public static void validateAspectUnionSchema(@Nonnull Class<? extends UnionTempl } private static boolean isValidMetadataField(RecordDataSchema.Field field) { - return field.getName().equals("metadata") && !field.getOptional() - && field.getType().getType() == DataSchema.Type.UNION && ValidationUtils.isUnionWithOnlyComplexMembers( - (UnionDataSchema) field.getType()); + return field.getName().equals("metadata") + && !field.getOptional() + && field.getType().getType() == DataSchema.Type.UNION + && ValidationUtils.isUnionWithOnlyComplexMembers((UnionDataSchema) field.getType()); } } diff --git a/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java b/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java index 3fbf348c5cb1c..034e3008b26ab 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/DeltaValidator.java @@ -7,14 +7,12 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate delta event schemas. - */ +/** Utility class to validate delta event schemas. */ public final class DeltaValidator { // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private DeltaValidator() { // Util class @@ -30,17 +28,19 @@ public static void validateDeltaSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Delta '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Delta '%s' must contain an non-optional 'urn' field of URN type", className); } if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, DeltaValidator::isValidDeltaField)) { - ValidationUtils.invalidSchema("Delta '%s' must contain an non-optional 'delta' field of UNION type", - className); + ValidationUtils.invalidSchema( + "Delta '%s' must contain an non-optional 'delta' field of UNION type", className); } } /** - * Similar to {@link #validateDeltaSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateDeltaSchema(RecordDataSchema)} but take a {@link Class} instead and + * caches results. */ public static void validateDeltaSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -52,7 +52,8 @@ public static void validateDeltaSchema(@Nonnull Class<? extends RecordTemplate> } private static boolean isValidDeltaField(@Nonnull RecordDataSchema.Field field) { - return field.getName().equals("delta") && !field.getOptional() + return field.getName().equals("delta") + && !field.getOptional() && field.getType().getType() == DataSchema.Type.UNION; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java b/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java index 31898c01f4233..c8741d2ccea83 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java @@ -8,21 +8,21 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate search document schemas. - */ +/** Utility class to validate search document schemas. */ public final class DocumentValidator { // Allowed non-optional fields. All other fields must be optional. - private static final Set<String> NON_OPTIONAL_FIELDS = Collections.unmodifiableSet(new HashSet<String>() { - { - add("urn"); - } - }); + private static final Set<String> NON_OPTIONAL_FIELDS = + Collections.unmodifiableSet( + new HashSet<String>() { + { + add("urn"); + } + }); // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private DocumentValidator() { // Util class @@ -38,21 +38,29 @@ public static void validateDocumentSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Document '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Document '%s' must contain an non-optional 'urn' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Document '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Document '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); - ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS).forEach(field -> { - ValidationUtils.invalidSchema("Document '%s' must contain an optional '%s' field", className, field.getName()); - }); + ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Document '%s' must contain an optional '%s' field", className, field.getName()); + }); } /** - * Similar to {@link #validateDocumentSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateDocumentSchema(RecordDataSchema)} but take a {@link Class} instead + * and caches results. */ public static void validateDocumentSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -62,4 +70,4 @@ public static void validateDocumentSchema(@Nonnull Class<? extends RecordTemplat validateDocumentSchema(ValidationUtils.getRecordSchema(clazz)); VALIDATED.add(clazz); } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java b/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java index ccb1c2751a802..726283a40f830 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/EntityValidator.java @@ -11,25 +11,25 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate entity schemas. - */ +/** Utility class to validate entity schemas. */ public final class EntityValidator { // Allowed non-optional fields. All other fields must be optional. - private static final Set<String> NON_OPTIONAL_FIELDS = Collections.unmodifiableSet(new HashSet<String>() { - { - add("urn"); - } - }); + private static final Set<String> NON_OPTIONAL_FIELDS = + Collections.unmodifiableSet( + new HashSet<String>() { + { + add("urn"); + } + }); // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); // A cache of validated classes - private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = ConcurrentHashMap.newKeySet(); - + private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = + ConcurrentHashMap.newKeySet(); private EntityValidator() { // Util class @@ -45,21 +45,29 @@ public static void validateEntitySchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Entity '%s' must contain a non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Entity '%s' must contain a non-optional 'urn' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Entity '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); - - ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS).forEach(field -> { - ValidationUtils.invalidSchema("Entity '%s' must contain an optional '%s' field", className, field.getName()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Entity '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); + + ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Entity '%s' must contain an optional '%s' field", className, field.getName()); + }); } /** - * Similar to {@link #validateEntitySchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateEntitySchema(RecordDataSchema)} but take a {@link Class} instead and + * caches results. */ public static void validateEntitySchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -71,8 +79,8 @@ public static void validateEntitySchema(@Nonnull Class<? extends RecordTemplate> } /** - * Similar to {@link #validateEntityUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches - * results. + * Similar to {@link #validateEntityUnionSchema(UnionDataSchema, String)} but take a {@link Class} + * instead and caches results. */ public static void validateEntityUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { if (UNION_VALIDATED.contains(clazz)) { @@ -88,16 +96,16 @@ public static void validateEntityUnionSchema(@Nonnull Class<? extends UnionTempl * * @param schema schema for the model */ - public static void validateEntityUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String entityClassName) { + public static void validateEntityUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String entityClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Entity '%s' must be a union containing only record type members", entityClassName); + ValidationUtils.invalidSchema( + "Entity '%s' must be a union containing only record type members", entityClassName); } } - /** - * Checks if an entity schema is valid. - */ + /** Checks if an entity schema is valid. */ public static boolean isValidEntitySchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (!VALIDATED.contains(clazz)) { try { @@ -109,4 +117,4 @@ public static boolean isValidEntitySchema(@Nonnull Class<? extends RecordTemplat return true; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java b/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java index f4627087149fc..c96f1c4fb7313 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/RelationshipValidator.java @@ -14,14 +14,15 @@ import javax.annotation.Nonnull; import lombok.Value; - public class RelationshipValidator { // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); // A cache of validated classes - private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends UnionTemplate>> UNION_VALIDATED = + ConcurrentHashMap.newKeySet(); @Value private static class Pair { @@ -42,29 +43,33 @@ public static void validateRelationshipSchema(@Nonnull RecordDataSchema schema) final String className = schema.getBindingName(); - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, - field -> ValidationUtils.isValidUrnField(field, "source"))) { - ValidationUtils.invalidSchema("Relationship '%s' must contain an non-optional 'source' field of URN type", - className); + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, field -> ValidationUtils.isValidUrnField(field, "source"))) { + ValidationUtils.invalidSchema( + "Relationship '%s' must contain an non-optional 'source' field of URN type", className); } - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, - field -> ValidationUtils.isValidUrnField(field, "destination"))) { - ValidationUtils.invalidSchema("Relationship '%s' must contain an non-optional 'destination' field of URN type", + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, field -> ValidationUtils.isValidUrnField(field, "destination"))) { + ValidationUtils.invalidSchema( + "Relationship '%s' must contain an non-optional 'destination' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Relationship '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Relationship '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); validatePairings(schema); } - /** - * Similar to {@link #validateRelationshipSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateRelationshipSchema(RecordDataSchema)} but take a {@link Class} + * instead and caches results. */ public static void validateRelationshipSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -76,14 +81,17 @@ public static void validateRelationshipSchema(@Nonnull Class<? extends RecordTem } /** - * Similar to {@link #validateRelationshipUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateRelationshipUnionSchema(UnionDataSchema, String)} but take a {@link + * Class} instead and caches results. */ - public static void validateRelationshipUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { + public static void validateRelationshipUnionSchema( + @Nonnull Class<? extends UnionTemplate> clazz) { if (UNION_VALIDATED.contains(clazz)) { return; } - validateRelationshipUnionSchema(ValidationUtils.getUnionSchema(clazz), clazz.getCanonicalName()); + validateRelationshipUnionSchema( + ValidationUtils.getUnionSchema(clazz), clazz.getCanonicalName()); UNION_VALIDATED.add(clazz); } @@ -92,10 +100,13 @@ public static void validateRelationshipUnionSchema(@Nonnull Class<? extends Unio * * @param schema schema for the model */ - public static void validateRelationshipUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String relationshipClassName) { + public static void validateRelationshipUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String relationshipClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Relationship '%s' must be a union containing only record type members", relationshipClassName); + ValidationUtils.invalidSchema( + "Relationship '%s' must be a union containing only record type members", + relationshipClassName); } } @@ -105,39 +116,45 @@ private static void validatePairings(@Nonnull RecordDataSchema schema) { Map<String, Object> properties = schema.getProperties(); if (!properties.containsKey("pairings")) { - ValidationUtils.invalidSchema("Relationship '%s' must contain a 'pairings' property", className); + ValidationUtils.invalidSchema( + "Relationship '%s' must contain a 'pairings' property", className); } DataList pairings = (DataList) properties.get("pairings"); Set<Pair> registeredPairs = new HashSet<>(); - pairings.stream().forEach(obj -> { - DataMap map = (DataMap) obj; - if (!map.containsKey("source") || !map.containsKey("destination")) { - ValidationUtils.invalidSchema("Relationship '%s' contains an invalid 'pairings' item. " - + "Each item must contain a 'source' and 'destination' properties.", className); - } - - String sourceUrn = map.getString("source"); - if (!isValidUrnClass(sourceUrn)) { - ValidationUtils.invalidSchema( - "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", className, - sourceUrn); - } - - String destinationUrn = map.getString("destination"); - if (!isValidUrnClass(destinationUrn)) { - ValidationUtils.invalidSchema( - "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", className, - destinationUrn); - } - - Pair pair = new Pair(sourceUrn, destinationUrn); - if (registeredPairs.contains(pair)) { - ValidationUtils.invalidSchema("Relationship '%s' contains a repeated 'pairings' item (%s, %s)", className, - sourceUrn, destinationUrn); - } - registeredPairs.add(pair); - }); + pairings.stream() + .forEach( + obj -> { + DataMap map = (DataMap) obj; + if (!map.containsKey("source") || !map.containsKey("destination")) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid 'pairings' item. " + + "Each item must contain a 'source' and 'destination' properties.", + className); + } + + String sourceUrn = map.getString("source"); + if (!isValidUrnClass(sourceUrn)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", + className, sourceUrn); + } + + String destinationUrn = map.getString("destination"); + if (!isValidUrnClass(destinationUrn)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", + className, destinationUrn); + } + + Pair pair = new Pair(sourceUrn, destinationUrn); + if (registeredPairs.contains(pair)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains a repeated 'pairings' item (%s, %s)", + className, sourceUrn, destinationUrn); + } + registeredPairs.add(pair); + }); } private static boolean isValidUrnClass(String className) { @@ -147,4 +164,4 @@ private static boolean isValidUrnClass(String className) { throw new RuntimeException(e); } } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java b/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java index 988fabe0411c8..08f349b146db6 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java @@ -13,11 +13,11 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - public class SnapshotValidator { // A cache of validated classes - private static final Set<Class<? extends RecordTemplate>> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set<Class<? extends RecordTemplate>> VALIDATED = + ConcurrentHashMap.newKeySet(); private SnapshotValidator() { // Util class @@ -33,19 +33,22 @@ public static void validateSnapshotSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Snapshot '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Snapshot '%s' must contain an non-optional 'urn' field of URN type", className); } - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, SnapshotValidator::isValidAspectsField)) { - ValidationUtils.invalidSchema("Snapshot '%s' must contain an non-optional 'aspects' field of ARRAY type", - className); + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, SnapshotValidator::isValidAspectsField)) { + ValidationUtils.invalidSchema( + "Snapshot '%s' must contain an non-optional 'aspects' field of ARRAY type", className); } validateAspectsItemType(schema.getField("aspects"), className); } /** - * Similar to {@link #validateSnapshotSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateSnapshotSchema(RecordDataSchema)} but take a {@link Class} instead + * and caches results. */ public static void validateSnapshotSchema(@Nonnull Class<? extends RecordTemplate> clazz) { if (VALIDATED.contains(clazz)) { @@ -61,38 +64,47 @@ public static void validateSnapshotSchema(@Nonnull Class<? extends RecordTemplat * * @param snapshotClasses a collection of snapshot classes. */ - public static void validateUniqueUrn(@Nonnull Collection<Class<? extends RecordTemplate>> snapshotClasses) { + public static void validateUniqueUrn( + @Nonnull Collection<Class<? extends RecordTemplate>> snapshotClasses) { final Set<Class<Urn>> urnClasses = new HashSet<>(); - snapshotClasses.forEach(snapshotClass -> { - final Class<Urn> urnClass = - ValidationUtils.getUrnClass(ValidationUtils.getRecordSchema(snapshotClass).getField("urn")); - if (urnClasses.contains(urnClass)) { - ValidationUtils.invalidSchema("URN class %s in %s has already been claimed by another snapshot.", urnClass, - snapshotClass); - } - urnClasses.add(urnClass); - }); + snapshotClasses.forEach( + snapshotClass -> { + final Class<Urn> urnClass = + ValidationUtils.getUrnClass( + ValidationUtils.getRecordSchema(snapshotClass).getField("urn")); + if (urnClasses.contains(urnClass)) { + ValidationUtils.invalidSchema( + "URN class %s in %s has already been claimed by another snapshot.", + urnClass, snapshotClass); + } + urnClasses.add(urnClass); + }); } private static boolean isValidAspectsField(@Nonnull RecordDataSchema.Field field) { - return field.getName().equals("aspects") && !field.getOptional() + return field.getName().equals("aspects") + && !field.getOptional() && field.getType().getType() == DataSchema.Type.ARRAY; } - private static void validateAspectsItemType(@Nonnull RecordDataSchema.Field aspectsField, @Nonnull String className) { + private static void validateAspectsItemType( + @Nonnull RecordDataSchema.Field aspectsField, @Nonnull String className) { DataSchema itemSchema = ((ArrayDataSchema) aspectsField.getType()).getItems(); if (itemSchema.getType() != DataSchema.Type.TYPEREF) { - ValidationUtils.invalidSchema("Snapshot %s' 'aspects' field must be an array of aspect typeref", className); + ValidationUtils.invalidSchema( + "Snapshot %s' 'aspects' field must be an array of aspect typeref", className); } TyperefDataSchema typerefSchema = (TyperefDataSchema) itemSchema; DataSchema unionSchema = typerefSchema.getDereferencedDataSchema(); if (unionSchema.getType() != DataSchema.Type.UNION) { - ValidationUtils.invalidSchema("Snapshot '%s' 'aspects' field must be an array of union typeref", className); + ValidationUtils.invalidSchema( + "Snapshot '%s' 'aspects' field must be an array of union typeref", className); } - AspectValidator.validateAspectUnionSchema((UnionDataSchema) unionSchema, typerefSchema.getBindingName()); + AspectValidator.validateAspectUnionSchema( + (UnionDataSchema) unionSchema, typerefSchema.getBindingName()); } } diff --git a/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java b/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java index 1af6de8ff3940..5b38ff21e4b81 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java +++ b/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java @@ -1,7 +1,7 @@ package com.datahub.util.validator; -import com.linkedin.common.urn.Urn; import com.datahub.util.exception.InvalidSchemaException; +import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; @@ -18,24 +18,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Utility class for schema validation classes. - */ +/** Utility class for schema validation classes. */ public final class ValidationUtils { public static final Set<DataSchema.Type> PRIMITIVE_TYPES = - Collections.unmodifiableSet(new HashSet<DataSchema.Type>() { - { - add(DataSchema.Type.BOOLEAN); - add(DataSchema.Type.INT); - add(DataSchema.Type.LONG); - add(DataSchema.Type.FLOAT); - add(DataSchema.Type.DOUBLE); - add(DataSchema.Type.STRING); - add(DataSchema.Type.ENUM); - } - }); + Collections.unmodifiableSet( + new HashSet<DataSchema.Type>() { + { + add(DataSchema.Type.BOOLEAN); + add(DataSchema.Type.INT); + add(DataSchema.Type.LONG); + add(DataSchema.Type.FLOAT); + add(DataSchema.Type.DOUBLE); + add(DataSchema.Type.STRING); + add(DataSchema.Type.ENUM); + } + }); private ValidationUtils() { // Util class @@ -45,9 +43,7 @@ public static void invalidSchema(@Nonnull String format, Object... args) { throw new InvalidSchemaException(String.format(format, args)); } - /** - * Gets the {@link RecordDataSchema} of a {@link RecordTemplate} via reflection. - */ + /** Gets the {@link RecordDataSchema} of a {@link RecordTemplate} via reflection. */ @Nonnull public static RecordDataSchema getRecordSchema(@Nonnull Class<? extends RecordTemplate> clazz) { try { @@ -61,9 +57,7 @@ public static RecordDataSchema getRecordSchema(@Nonnull Class<? extends RecordTe } } - /** - * Gets the {@link UnionDataSchema} of a {@link UnionTemplate} via reflection. - */ + /** Gets the {@link UnionDataSchema} of a {@link UnionTemplate} via reflection. */ @Nonnull public static UnionDataSchema getUnionSchema(@Nonnull Class<? extends UnionTemplate> clazz) { try { @@ -77,30 +71,29 @@ public static UnionDataSchema getUnionSchema(@Nonnull Class<? extends UnionTempl } } - /** - * Returns true if the supply schema has exactly one field matching the predicate. - */ - public static boolean schemaHasExactlyOneSuchField(@Nonnull RecordDataSchema schema, - @Nonnull Predicate<RecordDataSchema.Field> predicate) { + /** Returns true if the supply schema has exactly one field matching the predicate. */ + public static boolean schemaHasExactlyOneSuchField( + @Nonnull RecordDataSchema schema, @Nonnull Predicate<RecordDataSchema.Field> predicate) { return schema.getFields().stream().filter(predicate).count() == 1; } - /** - * Returns true if the non-optional field matches the field name and has a URN type. - */ - public static boolean isValidUrnField(@Nonnull RecordDataSchema.Field field, @Nonnull String fieldName) { - return field.getName().equals(fieldName) && !field.getOptional() - && field.getType().getType() == DataSchema.Type.TYPEREF && Urn.class.isAssignableFrom(getUrnClass(field)); + /** Returns true if the non-optional field matches the field name and has a URN type. */ + public static boolean isValidUrnField( + @Nonnull RecordDataSchema.Field field, @Nonnull String fieldName) { + return field.getName().equals(fieldName) + && !field.getOptional() + && field.getType().getType() == DataSchema.Type.TYPEREF + && Urn.class.isAssignableFrom(getUrnClass(field)); } - /** - * Returns the Java class for an URN typeref field. - */ + /** Returns the Java class for an URN typeref field. */ public static Class<Urn> getUrnClass(@Nonnull RecordDataSchema.Field field) { try { @SuppressWarnings("unchecked") final Class<Urn> clazz = - (Class<Urn>) Class.forName(((DataMap) field.getType().getProperties().get("java")).getString("class")); + (Class<Urn>) + Class.forName( + ((DataMap) field.getType().getProperties().get("java")).getString("class")); return clazz; } catch (ClassNotFoundException e) { throw new RuntimeException(e); @@ -108,52 +101,55 @@ public static Class<Urn> getUrnClass(@Nonnull RecordDataSchema.Field field) { } /** - * Similar to {@link #isValidUrnField(RecordDataSchema.Field, String)} but with a fixed field "urn". + * Similar to {@link #isValidUrnField(RecordDataSchema.Field, String)} but with a fixed field + * "urn". */ public static boolean isValidUrnField(@Nonnull RecordDataSchema.Field field) { return isValidUrnField(field, "urn"); } - /** - * Returns all the non-whitelisted, non-optional fields in a {@link RecordDataSchema}. - */ + /** Returns all the non-whitelisted, non-optional fields in a {@link RecordDataSchema}. */ @Nonnull - public static List<RecordDataSchema.Field> nonOptionalFields(@Nonnull RecordDataSchema schema, - @Nonnull Set<String> whitelistedFields) { - return schema.getFields().stream().filter(field -> { - if (!whitelistedFields.contains(field.getName())) { - if (!field.getOptional()) { - return true; - } - } - return false; - }).collect(Collectors.toList()); + public static List<RecordDataSchema.Field> nonOptionalFields( + @Nonnull RecordDataSchema schema, @Nonnull Set<String> whitelistedFields) { + return schema.getFields().stream() + .filter( + field -> { + if (!whitelistedFields.contains(field.getName())) { + if (!field.getOptional()) { + return true; + } + } + return false; + }) + .collect(Collectors.toList()); } - /** - * Returns all the non-whitelisted, optional fields in a {@link RecordDataSchema}. - */ + /** Returns all the non-whitelisted, optional fields in a {@link RecordDataSchema}. */ @Nonnull - public static List<RecordDataSchema.Field> optionalFields(@Nonnull RecordDataSchema schema, - @Nonnull Set<String> whitelistedFields) { - return schema.getFields().stream().filter(field -> { - if (!whitelistedFields.contains(field.getName())) { - if (field.getOptional()) { - return true; - } - } - return false; - }).collect(Collectors.toList()); + public static List<RecordDataSchema.Field> optionalFields( + @Nonnull RecordDataSchema schema, @Nonnull Set<String> whitelistedFields) { + return schema.getFields().stream() + .filter( + field -> { + if (!whitelistedFields.contains(field.getName())) { + if (field.getOptional()) { + return true; + } + } + return false; + }) + .collect(Collectors.toList()); } /** - * Return all the fields in a {@link RecordDataSchema} that are not using one of the allowed types. + * Return all the fields in a {@link RecordDataSchema} that are not using one of the allowed + * types. */ @Nonnull - public static List<RecordDataSchema.Field> fieldsUsingInvalidType(@Nonnull RecordDataSchema schema, - @Nonnull Set<DataSchema.Type> allowedTypes) { - return schema.getFields() - .stream() + public static List<RecordDataSchema.Field> fieldsUsingInvalidType( + @Nonnull RecordDataSchema schema, @Nonnull Set<DataSchema.Type> allowedTypes) { + return schema.getFields().stream() .filter(field -> !allowedTypes.contains(getFieldOrArrayItemType(field))) .collect(Collectors.toList()); } @@ -164,8 +160,10 @@ public static boolean isUnionWithOnlyComplexMembers(UnionDataSchema unionDataSch @Nonnull private static DataSchema.Type getFieldOrArrayItemType(@Nonnull RecordDataSchema.Field field) { - DataSchema type = field.getType().getType() == DataSchema.Type.ARRAY - ? ((ArrayDataSchema) field.getType()).getItems() : field.getType(); + DataSchema type = + field.getType().getType() == DataSchema.Type.ARRAY + ? ((ArrayDataSchema) field.getType()).getItems() + : field.getType(); if (type.getType() == DataSchema.Type.TYPEREF) { return type.getDereferencedType(); } diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 972f52b8824ce..f5a3c9c12ff70 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -2,42 +2,42 @@ import com.linkedin.common.urn.Urn; - -/** - * Static class containing commonly-used constants across DataHub services. - */ +/** Static class containing commonly-used constants across DataHub services. */ public class Constants { public static final String INTERNAL_DELEGATED_FOR_ACTOR_HEADER_NAME = "X-DataHub-Delegated-For"; public static final String INTERNAL_DELEGATED_FOR_ACTOR_TYPE = "X-DataHub-Delegated-For-"; public static final String DATAHUB_ACTOR = "urn:li:corpuser:datahub"; // Super user. - public static final String SYSTEM_ACTOR = "urn:li:corpuser:__datahub_system"; // DataHub internal service principal. + public static final String SYSTEM_ACTOR = + "urn:li:corpuser:__datahub_system"; // DataHub internal service principal. public static final String UNKNOWN_ACTOR = "urn:li:corpuser:UNKNOWN"; // Unknown principal. public static final Long ASPECT_LATEST_VERSION = 0L; public static final String UNKNOWN_DATA_PLATFORM = "urn:li:dataPlatform:unknown"; // !!!!!!! IMPORTANT !!!!!!! - // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. Without this the limit is + // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. + // Without this the limit is // whatever Jackson is defaulting to (5 MB currently). public static final String MAX_JACKSON_STRING_SIZE = "16000000"; - public static final String INGESTION_MAX_SERIALIZED_STRING_LENGTH = "INGESTION_MAX_SERIALIZED_STRING_LENGTH"; + public static final String INGESTION_MAX_SERIALIZED_STRING_LENGTH = + "INGESTION_MAX_SERIALIZED_STRING_LENGTH"; - /** - * System Metadata - */ + /** System Metadata */ public static final String DEFAULT_RUN_ID = "no-run-id-provided"; - // Forces indexing for no-ops, enabled for restore indices calls. Only considered in the no-op case + + // Forces indexing for no-ops, enabled for restore indices calls. Only considered in the no-op + // case public static final String FORCE_INDEXING_KEY = "forceIndexing"; - // Indicates an event source from an application with hooks that have already been processed and should not be reprocessed + // Indicates an event source from an application with hooks that have already been processed and + // should not be reprocessed public static final String APP_SOURCE = "appSource"; // App sources public static final String UI_SOURCE = "ui"; - /** - * Entities - */ + /** Entities */ public static final String CORP_USER_ENTITY_NAME = "corpuser"; + public static final String CORP_GROUP_ENTITY_NAME = "corpGroup"; public static final String DATASET_ENTITY_NAME = "dataset"; public static final String CHART_ENTITY_NAME = "chart"; @@ -74,11 +74,10 @@ public class Constants { public static final String DATA_PRODUCT_ENTITY_NAME = "dataProduct"; public static final String OWNERSHIP_TYPE_ENTITY_NAME = "ownershipType"; - /** - * Aspects - */ + /** Aspects */ // Common public static final String OWNERSHIP_ASPECT_NAME = "ownership"; + public static final String INSTITUTIONAL_MEMORY_ASPECT_NAME = "institutionalMemory"; public static final String DATA_PLATFORM_INSTANCE_ASPECT_NAME = "dataPlatformInstance"; public static final String BROWSE_PATHS_ASPECT_NAME = "browsePaths"; @@ -136,19 +135,22 @@ public class Constants { // Dashboard public static final String DASHBOARD_KEY_ASPECT_NAME = "dashboardKey"; public static final String DASHBOARD_INFO_ASPECT_NAME = "dashboardInfo"; - public static final String EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME = "editableDashboardProperties"; + public static final String EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME = + "editableDashboardProperties"; public static final String DASHBOARD_USAGE_STATISTICS_ASPECT_NAME = "dashboardUsageStatistics"; // Notebook public static final String NOTEBOOK_KEY_ASPECT_NAME = "notebookKey"; public static final String NOTEBOOK_INFO_ASPECT_NAME = "notebookInfo"; public static final String NOTEBOOK_CONTENT_ASPECT_NAME = "notebookContent"; - public static final String EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME = "editableNotebookProperties"; + public static final String EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME = + "editableNotebookProperties"; // DataFlow public static final String DATA_FLOW_KEY_ASPECT_NAME = "dataFlowKey"; public static final String DATA_FLOW_INFO_ASPECT_NAME = "dataFlowInfo"; - public static final String EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME = "editableDataFlowProperties"; + public static final String EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME = + "editableDataFlowProperties"; // DataJob public static final String DATA_JOB_KEY_ASPECT_NAME = "dataJobKey"; @@ -162,19 +164,22 @@ public class Constants { // DataPlatformInstance public static final String DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME = "dataPlatformInstanceKey"; - public static final String DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME = "dataPlatformInstanceProperties"; + public static final String DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME = + "dataPlatformInstanceProperties"; // ML Feature public static final String ML_FEATURE_KEY_ASPECT_NAME = "mlFeatureKey"; public static final String ML_FEATURE_PROPERTIES_ASPECT_NAME = "mlFeatureProperties"; - public static final String ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlFeatureProperties"; + public static final String ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlFeatureProperties"; // ML Feature Table public static final String ML_FEATURE_TABLE_KEY_ASPECT_NAME = "mlFeatureTableKey"; public static final String ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME = "mlFeatureTableProperties"; - public static final String ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlFeatureTableProperties"; + public static final String ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlFeatureTableProperties"; - //ML Model + // ML Model public static final String ML_MODEL_KEY_ASPECT_NAME = "mlModelKey"; public static final String ML_MODEL_PROPERTIES_ASPECT_NAME = "mlModelProperties"; public static final String ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlModelProperties"; @@ -192,12 +197,14 @@ public class Constants { // ML Model Group public static final String ML_MODEL_GROUP_KEY_ASPECT_NAME = "mlModelGroupKey"; public static final String ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME = "mlModelGroupProperties"; - public static final String ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlModelGroupProperties"; + public static final String ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlModelGroupProperties"; // ML Primary Key public static final String ML_PRIMARY_KEY_KEY_ASPECT_NAME = "mlPrimaryKeyKey"; public static final String ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME = "mlPrimaryKeyProperties"; - public static final String ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlPrimaryKeyProperties"; + public static final String ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlPrimaryKeyProperties"; // Policy public static final String DATAHUB_POLICY_INFO_ASPECT_NAME = "dataHubPolicyInfo"; @@ -212,15 +219,16 @@ public class Constants { // Container public static final String CONTAINER_KEY_ASPECT_NAME = "containerKey"; public static final String CONTAINER_PROPERTIES_ASPECT_NAME = "containerProperties"; - public static final String CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME = "editableContainerProperties"; + public static final String CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableContainerProperties"; public static final String CONTAINER_ASPECT_NAME = "container"; // parent container - // Glossary term + // Glossary term public static final String GLOSSARY_TERM_KEY_ASPECT_NAME = "glossaryTermKey"; public static final String GLOSSARY_TERM_INFO_ASPECT_NAME = "glossaryTermInfo"; public static final String GLOSSARY_RELATED_TERM_ASPECT_NAME = "glossaryRelatedTerms"; - // Glossary node + // Glossary node public static final String GLOSSARY_NODE_KEY_ASPECT_NAME = "glossaryNodeKey"; public static final String GLOSSARY_NODE_INFO_ASPECT_NAME = "glossaryNodeInfo"; @@ -304,24 +312,24 @@ public class Constants { public static final String CHANGE_EVENT_PLATFORM_EVENT_NAME = "entityChangeEvent"; - /** - * Retention - */ + /** Retention */ public static final String DATAHUB_RETENTION_ENTITY = "dataHubRetention"; + public static final String DATAHUB_RETENTION_ASPECT = "dataHubRetentionConfig"; public static final String DATAHUB_RETENTION_KEY_ASPECT = "dataHubRetentionKey"; - /** - * User Status - */ + + /** User Status */ public static final String CORP_USER_STATUS_ACTIVE = "ACTIVE"; - /** - * Task Runs - */ + /** Task Runs */ public static final String DATA_PROCESS_INSTANCE_ENTITY_NAME = "dataProcessInstance"; - public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = "dataProcessInstanceProperties"; - public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = "dataProcessInstanceRunEvent"; - public static final String DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME = "dataProcessInstanceRelationships"; + + public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = + "dataProcessInstanceProperties"; + public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = + "dataProcessInstanceRunEvent"; + public static final String DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME = + "dataProcessInstanceRelationships"; // Posts public static final String POST_INFO_ASPECT_NAME = "postInfo"; @@ -332,8 +340,8 @@ public class Constants { public static final String CLIENT_ID_ASPECT = "telemetryClientId"; // Step - public static final String DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME = "dataHubStepStateProperties"; - + public static final String DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME = + "dataHubStepStateProperties"; // Authorization public static final String REST_API_AUTHORIZATION_ENABLED_ENV = "REST_API_AUTHORIZATION_ENABLED"; @@ -357,6 +365,5 @@ public class Constants { // DAO public static final long LATEST_VERSION = 0; - private Constants() { - } + private Constants() {} } diff --git a/li-utils/src/main/java/com/linkedin/util/Configuration.java b/li-utils/src/main/java/com/linkedin/util/Configuration.java index cf2085839aefa..e0a1f181b48aa 100644 --- a/li-utils/src/main/java/com/linkedin/util/Configuration.java +++ b/li-utils/src/main/java/com/linkedin/util/Configuration.java @@ -1,34 +1,34 @@ package com.linkedin.util; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import java.util.Properties; +import javax.annotation.Nonnull; public class Configuration { - private Configuration() { - } + private Configuration() {} - @Nonnull - public static Properties loadProperties(@Nonnull String configFile) { - Properties configuration = new Properties(); - try (InputStream inputStream = Configuration.class.getClassLoader().getResourceAsStream(configFile)) { - configuration.load(inputStream); - } catch (IOException e) { - throw new RuntimeException("Can't read file: " + configFile); - } - return configuration; + @Nonnull + public static Properties loadProperties(@Nonnull String configFile) { + Properties configuration = new Properties(); + try (InputStream inputStream = + Configuration.class.getClassLoader().getResourceAsStream(configFile)) { + configuration.load(inputStream); + } catch (IOException e) { + throw new RuntimeException("Can't read file: " + configFile); } + return configuration; + } - @Nonnull - public static String getEnvironmentVariable(@Nonnull String envVar) { - return System.getenv(envVar); - } + @Nonnull + public static String getEnvironmentVariable(@Nonnull String envVar) { + return System.getenv(envVar); + } - @Nonnull - public static String getEnvironmentVariable(@Nonnull String envVar, @Nonnull String defaultVal) { - return Optional.ofNullable(System.getenv(envVar)).orElse(defaultVal); - } + @Nonnull + public static String getEnvironmentVariable(@Nonnull String envVar, @Nonnull String defaultVal) { + return Optional.ofNullable(System.getenv(envVar)).orElse(defaultVal); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java index ab90b3e054a3b..22d8065844a8c 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java @@ -4,35 +4,35 @@ import java.net.URISyntaxException; public class Uri { - private final String _uri; + private final String _uri; - public Uri(String url) { - if (url == null) { - throw new NullPointerException("URL must be non-null"); - } - _uri = url; + public Uri(String url) { + if (url == null) { + throw new NullPointerException("URL must be non-null"); } + _uri = url; + } - @Override - public String toString() { - return _uri; - } + @Override + public String toString() { + return _uri; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Uri)) { - return false; - } else { - return _uri.equals(((Uri) obj)._uri); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Uri)) { + return false; + } else { + return _uri.equals(((Uri) obj)._uri); } + } - @Override - public int hashCode() { - return _uri.hashCode(); - } + @Override + public int hashCode() { + return _uri.hashCode(); + } - public URI toURI() throws URISyntaxException { - return new URI(_uri); - } + public URI toURI() throws URISyntaxException { + return new URI(_uri); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java index a23d2b08752d1..6a30bb22a73a3 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java @@ -5,15 +5,16 @@ import com.linkedin.data.template.TemplateOutputCastException; public class UriCoercer implements DirectCoercer<Uri> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new UriCoercer(), Uri.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new UriCoercer(), Uri.class); - @Override - public Object coerceInput(Uri object) throws ClassCastException { - return object.toString(); - } + @Override + public Object coerceInput(Uri object) throws ClassCastException { + return object.toString(); + } - @Override - public Uri coerceOutput(Object object) throws TemplateOutputCastException { - return new Uri((String) object); - } + @Override + public Uri coerceOutput(Object object) throws TemplateOutputCastException { + return new Uri((String) object); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java b/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java index 3e1950160cca2..17abf09361e36 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java @@ -4,35 +4,35 @@ import java.net.URISyntaxException; public class Url { - private final String _url; + private final String _url; - public Url(String url) { - if (url == null) { - throw new NullPointerException("URL must be non-null"); - } - _url = url; + public Url(String url) { + if (url == null) { + throw new NullPointerException("URL must be non-null"); } + _url = url; + } - @Override - public String toString() { - return _url; - } + @Override + public String toString() { + return _url; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Url)) { - return false; - } else { - return _url.equals(((Url) obj)._url); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Url)) { + return false; + } else { + return _url.equals(((Url) obj)._url); } + } - @Override - public int hashCode() { - return _url.hashCode(); - } + @Override + public int hashCode() { + return _url.hashCode(); + } - public URI toURI() throws URISyntaxException { - return new URI(_url); - } + public URI toURI() throws URISyntaxException { + return new URI(_url); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java index 9424fffdd2f68..3bae43ee0ca6a 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java @@ -5,15 +5,16 @@ import com.linkedin.data.template.TemplateOutputCastException; public class UrlCoercer implements DirectCoercer<Url> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new UrlCoercer(), Url.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new UrlCoercer(), Url.class); - @Override - public Object coerceInput(Url object) throws ClassCastException { - return object.toString(); - } + @Override + public Object coerceInput(Url object) throws ClassCastException { + return object.toString(); + } - @Override - public Url coerceOutput(Object object) throws TemplateOutputCastException { - return new Url((String) object); - } + @Override + public Url coerceOutput(Object object) throws TemplateOutputCastException { + return new Url((String) object); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java index feb7cacd7a48a..0110471c9cdfd 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class AzkabanFlowUrn extends Urn { public static final String ENTITY_TYPE = "azkabanFlow"; @@ -48,7 +47,9 @@ public static AzkabanFlowUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new AzkabanFlowUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new AzkabanFlowUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (String) key.getAs(2, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -62,18 +63,20 @@ public static AzkabanFlowUrn deserialize(String rawUrn) throws URISyntaxExceptio } static { - Custom.registerCoercer(new DirectCoercer<AzkabanFlowUrn>() { - public Object coerceInput(AzkabanFlowUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<AzkabanFlowUrn>() { + public Object coerceInput(AzkabanFlowUrn object) throws ClassCastException { + return object.toString(); + } - public AzkabanFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return AzkabanFlowUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, AzkabanFlowUrn.class); + public AzkabanFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return AzkabanFlowUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + AzkabanFlowUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java index 662c89b12139f..f264bccbc5056 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class AzkabanJobUrn extends Urn { public static final String ENTITY_TYPE = "azkabanJob"; @@ -42,7 +41,8 @@ public static AzkabanJobUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new AzkabanJobUrn((AzkabanFlowUrn) key.getAs(0, AzkabanFlowUrn.class), + return new AzkabanJobUrn( + (AzkabanFlowUrn) key.getAs(0, AzkabanFlowUrn.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -57,18 +57,20 @@ public static AzkabanJobUrn deserialize(String rawUrn) throws URISyntaxException static { Custom.initializeCustomClass(AzkabanFlowUrn.class); - Custom.registerCoercer(new DirectCoercer<AzkabanJobUrn>() { - public Object coerceInput(AzkabanJobUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<AzkabanJobUrn>() { + public Object coerceInput(AzkabanJobUrn object) throws ClassCastException { + return object.toString(); + } - public AzkabanJobUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return AzkabanJobUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, AzkabanJobUrn.class); + public AzkabanJobUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return AzkabanJobUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + AzkabanJobUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java index 4840a2bf7b1e8..8193bd05b527a 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class ChartUrn extends Urn { public static final String ENTITY_TYPE = "chart"; @@ -42,7 +41,8 @@ public static ChartUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new ChartUrn((String)key.getAs(0, String.class), (String)key.getAs(1, String.class)); + return new ChartUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -55,18 +55,20 @@ public static ChartUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<ChartUrn>() { - public Object coerceInput(ChartUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<ChartUrn>() { + public Object coerceInput(ChartUrn object) throws ClassCastException { + return object.toString(); + } - public ChartUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return ChartUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, ChartUrn.class); + public ChartUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return ChartUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + ChartUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java index da33ed2a625f1..0ed5b3514e786 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class CorpGroupUrn extends Urn { public static final String ENTITY_TYPE = "corpGroup"; @@ -31,7 +30,7 @@ public static CorpGroupUrn createFromString(String rawUrn) throws URISyntaxExcep } private static CorpGroupUrn decodeUrn(String groupName) throws Exception { - return new CorpGroupUrn(TupleKey.create(new Object[]{groupName}), groupName); + return new CorpGroupUrn(TupleKey.create(new Object[] {groupName}), groupName); } public static CorpGroupUrn createFromUrn(Urn urn) throws URISyntaxException { @@ -45,9 +44,10 @@ public static CorpGroupUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return decodeUrn((String)key.getAs(0, String.class)); + return decodeUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -58,18 +58,20 @@ public static CorpGroupUrn deserialize(String rawUrn) throws URISyntaxException } static { - Custom.registerCoercer(new DirectCoercer<CorpGroupUrn>() { - public Object coerceInput(CorpGroupUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<CorpGroupUrn>() { + public Object coerceInput(CorpGroupUrn object) throws ClassCastException { + return object.toString(); + } - public CorpGroupUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return CorpGroupUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, CorpGroupUrn.class); + public CorpGroupUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return CorpGroupUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + CorpGroupUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java index da527254bbe2c..701e18a015753 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java @@ -1,12 +1,9 @@ package com.linkedin.common.urn; -import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import java.util.regex.Pattern; - public final class CorpuserUrn extends Urn { @@ -40,7 +37,8 @@ public static CorpuserUrn createFromUrn(Urn urn) throws URISyntaxException { try { return new CorpuserUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -51,19 +49,20 @@ public static CorpuserUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<CorpuserUrn>() { - public Object coerceInput(CorpuserUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<CorpuserUrn>() { + public Object coerceInput(CorpuserUrn object) throws ClassCastException { + return object.toString(); + } - public CorpuserUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return CorpuserUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, CorpuserUrn.class); + public CorpuserUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return CorpuserUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + CorpuserUrn.class); } - } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java index ed4b38fe2f2be..ceb06986989b5 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DashboardUrn extends Urn { public static final String ENTITY_TYPE = "dashboard"; @@ -42,7 +41,8 @@ public static DashboardUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DashboardUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new DashboardUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -55,18 +55,20 @@ public static DashboardUrn deserialize(String rawUrn) throws URISyntaxException } static { - Custom.registerCoercer(new DirectCoercer<DashboardUrn>() { - public Object coerceInput(DashboardUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DashboardUrn>() { + public Object coerceInput(DashboardUrn object) throws ClassCastException { + return object.toString(); + } - public DashboardUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DashboardUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DashboardUrn.class); + public DashboardUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DashboardUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DashboardUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java index 40e6d796d1882..2df70eed13343 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DataFlowUrn extends Urn { public static final String ENTITY_TYPE = "dataFlow"; @@ -48,7 +47,9 @@ public static DataFlowUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataFlowUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new DataFlowUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (String) key.getAs(2, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -62,18 +63,20 @@ public static DataFlowUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<DataFlowUrn>() { - public Object coerceInput(DataFlowUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataFlowUrn>() { + public Object coerceInput(DataFlowUrn object) throws ClassCastException { + return object.toString(); + } - public DataFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataFlowUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataFlowUrn.class); + public DataFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataFlowUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataFlowUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java index 46579a40897a3..6d0f37d1796b8 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DataJobUrn extends Urn { public static final String ENTITY_TYPE = "dataJob"; @@ -42,8 +41,8 @@ public static DataJobUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataJobUrn((DataFlowUrn) key.getAs(0, DataFlowUrn.class), - (String) key.getAs(1, String.class)); + return new DataJobUrn( + (DataFlowUrn) key.getAs(0, DataFlowUrn.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -57,18 +56,20 @@ public static DataJobUrn deserialize(String rawUrn) throws URISyntaxException { static { Custom.initializeCustomClass(DataFlowUrn.class); - Custom.registerCoercer(new DirectCoercer<DataJobUrn>() { - public Object coerceInput(DataJobUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataJobUrn>() { + public Object coerceInput(DataJobUrn object) throws ClassCastException { + return object.toString(); + } - public DataJobUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataJobUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataJobUrn.class); + public DataJobUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataJobUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataJobUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java index 25d219ef2c39e..910e6b9c98e96 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java @@ -3,10 +3,8 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class DataPlatformUrn extends Urn { public static final String ENTITY_TYPE = "dataPlatform"; @@ -50,18 +48,20 @@ public static DataPlatformUrn deserialize(String rawUrn) throws URISyntaxExcepti } static { - Custom.registerCoercer(new DirectCoercer<DataPlatformUrn>() { - public Object coerceInput(DataPlatformUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataPlatformUrn>() { + public Object coerceInput(DataPlatformUrn object) throws ClassCastException { + return object.toString(); + } - public DataPlatformUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataPlatformUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataPlatformUrn.class); + public DataPlatformUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataPlatformUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataPlatformUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java index 2edfdae251b01..513ffa6d8cf44 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java @@ -4,12 +4,8 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; -import static com.linkedin.common.urn.UrnUtils.toFabricType; - - public class DataProcessUrn extends Urn { public static final String ENTITY_TYPE = "dataProcess"; @@ -55,10 +51,13 @@ public static DataProcessUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataProcessUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new DataProcessUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -67,18 +66,20 @@ public static DataProcessUrn createFromUrn(Urn urn) throws URISyntaxException { static { Custom.initializeCustomClass(DataProcessUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer<DataProcessUrn>() { - public Object coerceInput(DataProcessUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DataProcessUrn>() { + public Object coerceInput(DataProcessUrn object) throws ClassCastException { + return object.toString(); + } - public DataProcessUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataProcessUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataProcessUrn.class); + public DataProcessUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataProcessUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataProcessUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java index 3d4b7d71566be..14cbfaf02fbae 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java @@ -5,53 +5,49 @@ import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Standardized dataset field information identifier - */ +/** Standardized dataset field information identifier */ public class DatasetFieldUrn extends Urn { // uniquely identifies urn's key type public static final String ENTITY_TYPE = "datasetField"; - /** - * Dataset urn of the datasetFieldUrn - */ + /** Dataset urn of the datasetFieldUrn */ private final DatasetUrn _dataset; - /** - * Field of datasetFieldUrn - */ + /** Field of datasetFieldUrn */ private final String _fieldPath; static { Custom.initializeCustomClass(DatasetUrn.class); - Custom.registerCoercer(new DirectCoercer<DatasetFieldUrn>() { + Custom.registerCoercer( + new DirectCoercer<DatasetFieldUrn>() { - @Override - public String coerceInput(DatasetFieldUrn object) throws ClassCastException { - return object.toString(); - } + @Override + public String coerceInput(DatasetFieldUrn object) throws ClassCastException { + return object.toString(); + } - @Override - public DatasetFieldUrn coerceOutput(Object object) throws TemplateOutputCastException { - if (object instanceof String) { - try { - return DatasetFieldUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + @Override + public DatasetFieldUrn coerceOutput(Object object) throws TemplateOutputCastException { + if (object instanceof String) { + try { + return DatasetFieldUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + throw new TemplateOutputCastException( + (("Output '" + object) + + ("' is not a String, and cannot be coerced to " + + DatasetFieldUrn.class.getName()))); } - } - throw new TemplateOutputCastException((("Output '" + object) + ("' is not a String, and cannot be coerced to " - + DatasetFieldUrn.class.getName()))); - } - }, DatasetFieldUrn.class); + }, + DatasetFieldUrn.class); } - public DatasetFieldUrn(String dataPlatform, String datasetName, FabricType fabricType, String fieldPath) { + public DatasetFieldUrn( + String dataPlatform, String datasetName, FabricType fabricType, String fieldPath) { this(new DatasetUrn(new DataPlatformUrn(dataPlatform), datasetName, fabricType), fieldPath); } @@ -86,9 +82,11 @@ public static DatasetFieldUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DatasetFieldUrn((DatasetUrn) key.getAs(0, DatasetUrn.class), (String) key.getAs(1, String.class)); + return new DatasetFieldUrn( + (DatasetUrn) key.getAs(0, DatasetUrn.class), (String) key.getAs(1, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java index 3be084d1daff9..5f18ce5f1abe7 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java @@ -6,7 +6,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DatasetUrn extends Urn { public static final String ENTITY_TYPE = "dataset"; @@ -49,10 +48,13 @@ public static DatasetUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DatasetUrn((DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), - (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); + return new DatasetUrn( + (DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), + (String) key.getAs(1, String.class), + (FabricType) key.getAs(2, FabricType.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -66,18 +68,20 @@ public static DatasetUrn deserialize(String rawUrn) throws URISyntaxException { Custom.initializeCustomClass(DataPlatformUrn.class); Custom.initializeCustomClass(DatasetUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer<DatasetUrn>() { - public Object coerceInput(DatasetUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<DatasetUrn>() { + public Object coerceInput(DatasetUrn object) throws ClassCastException { + return object.toString(); + } - public DatasetUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DatasetUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DatasetUrn.class); + public DatasetUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DatasetUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DatasetUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java index 597ae3386fec1..24fd7f26bf977 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class FabricUrn extends Urn { public static final String ENTITY_TYPE = "fabric"; @@ -45,18 +44,20 @@ public static FabricUrn createFromUrn(Urn urn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<FabricUrn>() { - public Object coerceInput(FabricUrn object) throws ClassCastException { - return object.toString(); - } - - public FabricUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return FabricUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, FabricUrn.class); + Custom.registerCoercer( + new DirectCoercer<FabricUrn>() { + public Object coerceInput(FabricUrn object) throws ClassCastException { + return object.toString(); + } + + public FabricUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return FabricUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + FabricUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java index 29ff1aa5fcdb3..7820eac21755d 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java @@ -3,66 +3,66 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class GlossaryNodeUrn extends Urn { - public static final String ENTITY_TYPE = "glossaryNode"; + public static final String ENTITY_TYPE = "glossaryNode"; - private final String _name; + private final String _name; - public GlossaryNodeUrn(String name) { - super(ENTITY_TYPE, TupleKey.create(name)); - this._name = name; - } + public GlossaryNodeUrn(String name) { + super(ENTITY_TYPE, TupleKey.create(name)); + this._name = name; + } - public String getNameEntity() { - return _name; - } + public String getNameEntity() { + return _name; + } - public static GlossaryNodeUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static GlossaryNodeUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static GlossaryNodeUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be 'glossaryNode'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 1) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys."); - } else { - try { - return new GlossaryNodeUrn((String) key.getAs(0, String.class)); - } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); - } - } + public static GlossaryNodeUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException(urn.toString(), "Urn entity type should be 'glossaryNode'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 1) { + throw new URISyntaxException(urn.toString(), "Invalid number of keys."); + } else { + try { + return new GlossaryNodeUrn((String) key.getAs(0, String.class)); + } catch (Exception var3) { + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } + } } + } - public static GlossaryNodeUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static GlossaryNodeUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer<GlossaryNodeUrn>() { - public Object coerceInput(GlossaryNodeUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer<GlossaryNodeUrn>() { + public Object coerceInput(GlossaryNodeUrn object) throws ClassCastException { + return object.toString(); + } - public GlossaryNodeUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return GlossaryNodeUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public GlossaryNodeUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return GlossaryNodeUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, GlossaryNodeUrn.class); - } - + } + }, + GlossaryNodeUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java index bf8ec131d410e..f7e3496fbc582 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java @@ -1,14 +1,9 @@ package com.linkedin.common.urn; -import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - -import java.lang.reflect.Array; import java.net.URISyntaxException; -import java.util.regex.Pattern; - public final class GlossaryTermUrn extends Urn { @@ -42,7 +37,8 @@ public static GlossaryTermUrn createFromUrn(Urn urn) throws URISyntaxException { try { return new GlossaryTermUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -53,19 +49,20 @@ public static GlossaryTermUrn deserialize(String rawUrn) throws URISyntaxExcepti } static { - Custom.registerCoercer(new DirectCoercer<GlossaryTermUrn>() { - public Object coerceInput(GlossaryTermUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<GlossaryTermUrn>() { + public Object coerceInput(GlossaryTermUrn object) throws ClassCastException { + return object.toString(); + } - public GlossaryTermUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return GlossaryTermUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, GlossaryTermUrn.class); + public GlossaryTermUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return GlossaryTermUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + GlossaryTermUrn.class); } - } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java index 8774ba36d07b2..5c05b74cb0038 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class MLFeatureUrn extends Urn { public static final String ENTITY_TYPE = "mlFeature"; @@ -43,7 +42,8 @@ public static MLFeatureUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new MLFeatureUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new MLFeatureUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -52,18 +52,20 @@ public static MLFeatureUrn createFromUrn(Urn urn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<MLFeatureUrn>() { - public Object coerceInput(MLFeatureUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<MLFeatureUrn>() { + public Object coerceInput(MLFeatureUrn object) throws ClassCastException { + return object.toString(); + } - public MLFeatureUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return MLFeatureUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, MLFeatureUrn.class); + public MLFeatureUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return MLFeatureUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + MLFeatureUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java index ded7f90dcc112..85680f5a3922f 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java @@ -1,15 +1,11 @@ package com.linkedin.common.urn; +import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import com.linkedin.common.FabricType; - -import static com.linkedin.common.urn.UrnUtils.toFabricType; - - public final class MLModelUrn extends Urn { public static final String ENTITY_TYPE = "mlModel"; @@ -52,8 +48,10 @@ public static MLModelUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new MLModelUrn((DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), - (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); + return new MLModelUrn( + (DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), + (String) key.getAs(1, String.class), + (FabricType) key.getAs(2, FabricType.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -68,18 +66,20 @@ public static MLModelUrn deserialize(String rawUrn) throws URISyntaxException { static { Custom.initializeCustomClass(DataPlatformUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer<MLModelUrn>() { - public Object coerceInput(MLModelUrn object) throws ClassCastException { - return object.toString(); - } - - public MLModelUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return MLModelUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, MLModelUrn.class); + Custom.registerCoercer( + new DirectCoercer<MLModelUrn>() { + public Object coerceInput(MLModelUrn object) throws ClassCastException { + return object.toString(); + } + + public MLModelUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return MLModelUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + MLModelUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java index c9d6c203d2ed8..00a0660bbf49d 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public class NotebookUrn extends Urn { public static final String ENTITY_TYPE = "notebook"; @@ -41,7 +40,8 @@ public static NotebookUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new NotebookUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new NotebookUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -54,18 +54,20 @@ public static NotebookUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer<NotebookUrn>() { - public Object coerceInput(NotebookUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer<NotebookUrn>() { + public Object coerceInput(NotebookUrn object) throws ClassCastException { + return object.toString(); + } - public NotebookUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return NotebookUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, NotebookUrn.class); + public NotebookUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return NotebookUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + NotebookUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java index 1375cf345b084..60cf2d4e16819 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java @@ -3,65 +3,67 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class TagUrn extends Urn { - public static final String ENTITY_TYPE = "tag"; + public static final String ENTITY_TYPE = "tag"; - private final String _name; + private final String _name; - public TagUrn(String name) { - super(ENTITY_TYPE, TupleKey.create(name)); - this._name = name; - } + public TagUrn(String name) { + super(ENTITY_TYPE, TupleKey.create(name)); + this._name = name; + } - public String getName() { - return _name; - } + public String getName() { + return _name; + } - public static TagUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static TagUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static TagUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be '" + urn.getEntityType() + "'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 1) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys: found " + key.size() + " expected 1."); - } else { - try { - return new TagUrn((String) key.getAs(0, String.class)); - } catch (Exception e) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); - } - } + public static TagUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException( + urn.toString(), "Urn entity type should be '" + urn.getEntityType() + "'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 1) { + throw new URISyntaxException( + urn.toString(), "Invalid number of keys: found " + key.size() + " expected 1."); + } else { + try { + return new TagUrn((String) key.getAs(0, String.class)); + } catch (Exception e) { + throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } + } } + } - public static TagUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static TagUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer<TagUrn>() { - public Object coerceInput(TagUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer<TagUrn>() { + public Object coerceInput(TagUrn object) throws ClassCastException { + return object.toString(); + } - public TagUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return TagUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public TagUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return TagUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, TagUrn.class); - } + } + }, + TagUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java index 5b348b7d9b1a9..ecdd4f754c4ea 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java @@ -3,68 +3,69 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class TestEntityUrn extends Urn { - public static final String ENTITY_TYPE = "testEntity"; + public static final String ENTITY_TYPE = "testEntity"; - private final String _keyPart1; - private final String _keyPart2; - private final String _keyPart3; + private final String _keyPart1; + private final String _keyPart2; + private final String _keyPart3; - public TestEntityUrn(String keyPart1, String keyPart2, String keyPart3) { - super(ENTITY_TYPE, TupleKey.create(keyPart1, keyPart2, keyPart3)); - this._keyPart1 = keyPart1; - this._keyPart2 = keyPart2; - this._keyPart3 = keyPart3; - } + public TestEntityUrn(String keyPart1, String keyPart2, String keyPart3) { + super(ENTITY_TYPE, TupleKey.create(keyPart1, keyPart2, keyPart3)); + this._keyPart1 = keyPart1; + this._keyPart2 = keyPart2; + this._keyPart3 = keyPart3; + } - public static TestEntityUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static TestEntityUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static TestEntityUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be '" + ENTITY_TYPE + " got " + urn.getEntityType() + "'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 3) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys: found " + key.size() + " expected 3."); - } else { - try { - return new TestEntityUrn( - key.getAs(0, String.class), - key.getAs(1, String.class), - key.getAs(2, String.class)); - } catch (Exception e) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); - } - } + public static TestEntityUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException( + urn.toString(), + "Urn entity type should be '" + ENTITY_TYPE + " got " + urn.getEntityType() + "'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 3) { + throw new URISyntaxException( + urn.toString(), "Invalid number of keys: found " + key.size() + " expected 3."); + } else { + try { + return new TestEntityUrn( + key.getAs(0, String.class), key.getAs(1, String.class), key.getAs(2, String.class)); + } catch (Exception e) { + throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } + } } + } - public static TestEntityUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static TestEntityUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer<TestEntityUrn>() { - public Object coerceInput(TestEntityUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer<TestEntityUrn>() { + public Object coerceInput(TestEntityUrn object) throws ClassCastException { + return object.toString(); + } - public TestEntityUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return TestEntityUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public TestEntityUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return TestEntityUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, TestEntityUrn.class); - } + } + }, + TestEntityUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java index c26e0d2571b33..f847252e28836 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java @@ -8,11 +8,10 @@ import java.util.Collections; import java.util.List; - /** - * Represents the entity key portion of a Urn, encoded as a tuple of Strings. - * A single-element tuple is encoded simply as the value of that element. A tuple with multiple - * elements is encoded as a parenthesized list of strings, comma-delimited. + * Represents the entity key portion of a Urn, encoded as a tuple of Strings. A single-element tuple + * is encoded simply as the value of that element. A tuple with multiple elements is encoded as a + * parenthesized list of strings, comma-delimited. */ public class TupleKey { public static final char START_TUPLE = '('; @@ -31,27 +30,26 @@ public TupleKey(List<String> tuple) { /** * Constructs a {@code TupleKey} given a list of tuple parts. - * <p> - * When {@code calledFromExternal} is {@code false}, it means the constructor - * was called from within this class, where we can ensure our implementation - * satisfies some constraints and skip some work. - * <p> - * The work we skip is checking that no tuple parts are null and wrapping the - * list with an unmodifiable view. - * <p> - * For context, an earlier performance optimization introduced from Guava the - * {@code ImmutableList}, which gives both of that for free. Since then, we - * have encountered complications with Guava (specifically, Hadoop at the time - * of this writing requires using Guava 11 -- see LIHADOOP-44200). In order to - * resolve that with minimal effect, we copy this behavior here. - * <p> - * Whether this optimization is meaningful can be examined later, if time is - * permitting, or {@code List#copyOf} from JDK 10 can be used to recover the - * benefits more elegantly when it is available for us to use. + * + * <p>When {@code calledFromExternal} is {@code false}, it means the constructor was called from + * within this class, where we can ensure our implementation satisfies some constraints and skip + * some work. + * + * <p>The work we skip is checking that no tuple parts are null and wrapping the list with an + * unmodifiable view. + * + * <p>For context, an earlier performance optimization introduced from Guava the {@code + * ImmutableList}, which gives both of that for free. Since then, we have encountered + * complications with Guava (specifically, Hadoop at the time of this writing requires using Guava + * 11 -- see LIHADOOP-44200). In order to resolve that with minimal effect, we copy this behavior + * here. + * + * <p>Whether this optimization is meaningful can be examined later, if time is permitting, or + * {@code List#copyOf} from JDK 10 can be used to recover the benefits more elegantly when it is + * available for us to use. * * @param tuple tuple parts - * @param calledFromExternal whether the constructions is invoked from outside - * of this class + * @param calledFromExternal whether the constructions is invoked from outside of this class */ private TupleKey(List<String> tuple, boolean calledFromExternal) { _tuple = calledFromExternal ? Collections.unmodifiableList(checkStringsNotNull(tuple)) : tuple; @@ -74,9 +72,8 @@ public static TupleKey createWithOneKeyPart(String input) { } /** - * Create a tuple key from a sequence of Objects. The resulting tuple - * consists of the sequence of String values resulting from calling .toString() on each - * object in the input sequence + * Create a tuple key from a sequence of Objects. The resulting tuple consists of the sequence of + * String values resulting from calling .toString() on each object in the input sequence * * @param tuple - a sequence of Objects to be represented in the tuple * @return - a TupleKey representation of the object sequence @@ -99,9 +96,8 @@ public static TupleKey create(Object... tuple) { } /** - * Create a tuple key from a sequence of Objects. The resulting tuple - * consists of the sequence of String values resulting from calling .toString() on each - * object in the input sequence + * Create a tuple key from a sequence of Objects. The resulting tuple consists of the sequence of + * String values resulting from calling .toString() on each object in the input sequence * * @param tuple - a sequence of Objects to be represented in the tuple * @return - a TupleKey representation of the object sequence @@ -130,7 +126,8 @@ public String get(int index) { * Return a tuple element coerced to a specific type * * @param index - the index of the tuple element to be returned - * @param clazz - the Class object for the return type. Must be String, Short, Boolean, Integer, Long, or an Enum subclass + * @param clazz - the Class object for the return type. Must be String, Short, Boolean, Integer, + * Long, or an Enum subclass * @param <T> - the desired type for the returned object. * @return The specified element of the tuple, coerced to the specified type T. */ @@ -166,9 +163,7 @@ public <T> T getAs(int index, Class<T> clazz) { return rv; } - /** - * Helper method to capture E. - */ + /** Helper method to capture E. */ private <E extends Enum<E>> Enum<E> getEnumValue(Class<?> clazz, String value) { @SuppressWarnings("unchecked") final Class<E> enumClazz = (Class<E>) clazz.asSubclass(Enum.class); @@ -228,6 +223,7 @@ public static TupleKey fromString(String s) throws URISyntaxException { /** * Create a tuple key from a string starting at the given index. + * * @param s raw urn string or urn type specific string. * @param startIndex index where urn type specific string starts. * @return entity tuple key. @@ -237,7 +233,8 @@ public static TupleKey fromString(String s, int startIndex) throws URISyntaxExce return new TupleKey(parseKeyParts(s, startIndex), false); } - private static List<String> parseKeyParts(String input, int startIndex) throws URISyntaxException { + private static List<String> parseKeyParts(String input, int startIndex) + throws URISyntaxException { if (startIndex >= input.length()) { return Collections.emptyList(); } @@ -270,7 +267,7 @@ private static List<String> parseKeyParts(String input, int startIndex) throws U List<String> parts = new ArrayList<>(3); int numStartedParenPairs = 1; // We know we have at least one starting paren - int partStart = startIndex + 1; // +1 to skip opening paren + int partStart = startIndex + 1; // +1 to skip opening paren for (int i = startIndex + 1; i < input.length(); i++) { char c = input.charAt(i); if (c == START_TUPLE) { @@ -302,7 +299,8 @@ private static List<String> parseKeyParts(String input, int startIndex) throws U throw new URISyntaxException(input, "mismatched paren nesting"); } - int lastPartEnd = input.charAt(input.length() - 1) == END_TUPLE ? input.length() - 1 : input.length(); + int lastPartEnd = + input.charAt(input.length() - 1) == END_TUPLE ? input.length() - 1 : input.length(); if (lastPartEnd - partStart <= 0) { throw new URISyntaxException(input, "empty part disallowed"); @@ -347,4 +345,4 @@ private static List<String> checkStringsNotNull(List<String> list) { } return list; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java index 84231fdf3be4a..e7ae51b57671f 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java @@ -8,34 +8,30 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nullable; - /** - * Represents a URN (Uniform Resource Name) for a Linkedin entity, in the spirit of RFC 2141. - * Our default URN format uses the non-standard namespace identifier "li", and hence default URNs - * begin with "urn:li:". Note that the namespace according to - * <a href="https://www.ietf.org/rfc/rfc2141.txt">RFC 2141</a> [Section 2.1] is case-insensitive and + * Represents a URN (Uniform Resource Name) for a Linkedin entity, in the spirit of RFC 2141. Our + * default URN format uses the non-standard namespace identifier "li", and hence default URNs begin + * with "urn:li:". Note that the namespace according to <a + * href="https://www.ietf.org/rfc/rfc2141.txt">RFC 2141</a> [Section 2.1] is case-insensitive and * for safety we only allow lower-case letters in our implementation. * - * <p>Our URNs all consist of an "entity type", which denotes an internal namespace for the resource, - * as well as an entity key, formatted as a tuple of parts. The full format of a URN is: + * <p>Our URNs all consist of an "entity type", which denotes an internal namespace for the + * resource, as well as an entity key, formatted as a tuple of parts. The full format of a URN is: * * <p><URN> ::= urn:<namespace>:<entityType>:<entityKey> * - * <p>The entity key is represented as a tuple of strings. If the tuple is of length 1, the - * key is encoded directly. If the tuple has multiple parts, the parts are enclosed in - * parenthesizes and comma-delimited, e.g., a URN whose key is the tuple [1, 2, 3] would be - * encoded as: + * <p>The entity key is represented as a tuple of strings. If the tuple is of length 1, the key is + * encoded directly. If the tuple has multiple parts, the parts are enclosed in parenthesizes and + * comma-delimited, e.g., a URN whose key is the tuple [1, 2, 3] would be encoded as: * * <p>urn:li:example:(1,2,3) */ public class Urn { /** - * - * @deprecated Don't create the Urn string manually, use Typed Urns or {@link #create(String entityType, Object... - * tupleParts)} + * @deprecated Don't create the Urn string manually, use Typed Urns or {@link #create(String + * entityType, Object... tupleParts)} */ - @Deprecated - public static final String URN_PREFIX = "urn:li:"; + @Deprecated public static final String URN_PREFIX = "urn:li:"; private static final String URN_START = "urn:"; private static final String DEFAULT_NAMESPACE = "li"; @@ -46,29 +42,28 @@ public class Urn { // Used to speed up toString() in the common case where the Urn is built up // from parsing an input string. - @Nullable - private String _cachedStringUrn; + @Nullable private String _cachedStringUrn; static { Custom.registerCoercer(new UrnCoercer(), Urn.class); } /** - * Customized interner for all strings that may be used for _entityType. - * Urn._entityType is by nature a pretty small set of values, such as "member", - * "company" etc. Due to this fact, when an app creates and keeps in memory a - * large number of Urn's, it may end up with a very big number of identical strings. - * Thus it's worth saving memory by interning _entityType when an Urn is instantiated. - * String.intern() would be a natural choice, but it takes a few microseconds, and - * thus may become too expensive when many (temporary) Urns are generated in very - * quick succession. Thus we use a faster CHM below. Compared to the internal table - * used by String.intern() it has a bigger memory overhead per each interned string, - * but for a small set of canonical strings it doesn't matter. + * Customized interner for all strings that may be used for _entityType. Urn._entityType is by + * nature a pretty small set of values, such as "member", "company" etc. Due to this fact, when an + * app creates and keeps in memory a large number of Urn's, it may end up with a very big number + * of identical strings. Thus it's worth saving memory by interning _entityType when an Urn is + * instantiated. String.intern() would be a natural choice, but it takes a few microseconds, and + * thus may become too expensive when many (temporary) Urns are generated in very quick + * succession. Thus we use a faster CHM below. Compared to the internal table used by + * String.intern() it has a bigger memory overhead per each interned string, but for a small set + * of canonical strings it doesn't matter. */ private static final Map<String, String> ENTITY_TYPE_INTERNER = new ConcurrentHashMap<>(); /** * Create a Urn given its raw String representation. + * * @param rawUrn - the String representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -77,10 +72,7 @@ public Urn(String rawUrn) throws URISyntaxException { _cachedStringUrn = rawUrn; if (!rawUrn.startsWith(URN_START)) { - throw new URISyntaxException( - rawUrn, - "Urn doesn't start with 'urn:'. Urn: " + rawUrn, - 0); + throw new URISyntaxException(rawUrn, "Urn doesn't start with 'urn:'. Urn: " + rawUrn, 0); } int secondColonIndex = rawUrn.indexOf(':', URN_START.length() + 1); @@ -89,9 +81,7 @@ public Urn(String rawUrn) throws URISyntaxException { // First char of entityType must be [a-z] if (!charIsLowerCaseAlphabet(rawUrn, secondColonIndex + 1)) { throw new URISyntaxException( - rawUrn, - "First char of entityType must be [a-z]! Urn: " + rawUrn, - secondColonIndex + 1); + rawUrn, "First char of entityType must be [a-z]! Urn: " + rawUrn, secondColonIndex + 1); } int thirdColonIndex = rawUrn.indexOf(':', secondColonIndex + 2); @@ -101,8 +91,7 @@ public Urn(String rawUrn) throws URISyntaxException { _entityType = rawUrn.substring(secondColonIndex + 1); if (!charsAreWordClass(_entityType)) { throw new URISyntaxException( - rawUrn, - "entityType must have only [a-zA-Z0-9] chars. Urn: " + rawUrn); + rawUrn, "entityType must have only [a-zA-Z0-9] chars. Urn: " + rawUrn); } _entityKey = new TupleKey(); return; @@ -111,15 +100,13 @@ public Urn(String rawUrn) throws URISyntaxException { String entityType = rawUrn.substring(secondColonIndex + 1, thirdColonIndex); if (!charsAreWordClass(entityType)) { throw new URISyntaxException( - rawUrn, - "entityType must have only [a-zA-Z_0-9] chars. Urn: " + rawUrn); + rawUrn, "entityType must have only [a-zA-Z_0-9] chars. Urn: " + rawUrn); } int numEntityKeyChars = rawUrn.length() - (thirdColonIndex + 1); if (numEntityKeyChars <= 0) { throw new URISyntaxException( - rawUrn, - "Urns with empty entityKey are not allowed. Urn: " + rawUrn); + rawUrn, "Urns with empty entityKey are not allowed. Urn: " + rawUrn); } _entityType = internEntityType(entityType); @@ -135,8 +122,8 @@ public Urn(String rawUrn) throws URISyntaxException { } /** - * Create a Urn from an entity type and an encoded String key. The key is converted to a - * Tuple by parsing using @see TupleKey#fromString + * Create a Urn from an entity type and an encoded String key. The key is converted to a Tuple by + * parsing using @see TupleKey#fromString * * @param entityType - the entity type for the Urn * @param typeSpecificString - the encoded string representation of a TupleKey @@ -158,9 +145,8 @@ public Urn(String namespace, String entityType, TupleKey entityKey) { } /** - * DEPRECATED - use {@link #createFromTuple(String, Object...)} - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * DEPRECATED - use {@link #createFromTuple(String, Object...)} Create a Urn from an entity type + * and a sequence of key parts. The key parts are converted to a tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -172,9 +158,9 @@ public static Urn create(String entityType, Object... tupleParts) { } /** - * DEPRECATED - use {@link #createFromTuple(String, java.util.Collection)} - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * DEPRECATED - use {@link #createFromTuple(String, java.util.Collection)} Create a Urn from an + * entity type and a sequence of key parts. The key parts are converted to a tuple using @see + * TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -186,8 +172,8 @@ public static Urn create(String entityType, Collection<?> tupleParts) { } /** - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an entity type and a sequence of key parts. The key parts are converted to a + * tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -198,21 +184,22 @@ public static Urn createFromTuple(String entityType, Object... tupleParts) { } /** - * Create a Urn from an namespace, entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an namespace, entity type and a sequence of key parts. The key parts are + * converted to a tuple using @see TupleKey#create * * @param namespace - The namespace of this urn. * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn * @return - a new Urn object */ - public static Urn createFromTupleWithNamespace(String namespace, String entityType, Object... tupleParts) { + public static Urn createFromTupleWithNamespace( + String namespace, String entityType, Object... tupleParts) { return new Urn(namespace, entityType, TupleKey.create(tupleParts)); } /** - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an entity type and a sequence of key parts. The key parts are converted to a + * tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -224,6 +211,7 @@ public static Urn createFromTuple(String entityType, Collection<?> tupleParts) { /** * Create a Urn given its raw String representation. + * * @param rawUrn - the String representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -233,6 +221,7 @@ public static Urn createFromString(String rawUrn) throws URISyntaxException { /** * Create a Urn given its raw CharSequence representation. + * * @param rawUrn - the Char Sequence representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -242,8 +231,8 @@ public static Urn createFromCharSequence(CharSequence rawUrn) throws URISyntaxEx } /** - * Create a Urn from an entity type and an encoded String key. The key is converted to a - * Tuple by parsing using @see TupleKey#fromString + * Create a Urn from an entity type and an encoded String key. The key is converted to a Tuple by + * parsing using @see TupleKey#fromString * * @param entityType - the entity type for the Urn * @param typeSpecificString - the encoded string representation of a TupleKey @@ -298,8 +287,8 @@ public Urn getIdAsUrn() { } /** - * Return the namespace-specific string portion of this URN, i.e., - * everything following the "urn:<namespace>:" prefix. + * Return the namespace-specific string portion of this URN, i.e., everything following the + * "urn:<namespace>:" prefix. * * @return The namespace-specific string portion of this URN */ @@ -344,28 +333,21 @@ public int hashCode() { return result; } - private static String validateAndExtractNamespace(String rawUrn, - int secondColonIndex) + private static String validateAndExtractNamespace(String rawUrn, int secondColonIndex) throws URISyntaxException { if (!charIsLowerCaseAlphabet(rawUrn, URN_START.length())) { throw new URISyntaxException( - rawUrn, - "First char of Urn namespace must be [a-z]! Urn: " + rawUrn, - URN_START.length()); + rawUrn, "First char of Urn namespace must be [a-z]! Urn: " + rawUrn, URN_START.length()); } if (secondColonIndex == -1) { - throw new URISyntaxException( - rawUrn, - "Missing second ':' char. Urn: " + rawUrn); + throw new URISyntaxException(rawUrn, "Missing second ':' char. Urn: " + rawUrn); } int namespaceLen = secondColonIndex - URN_START.length(); if (namespaceLen > 32) { throw new URISyntaxException( - rawUrn, - "Namespace length > 32 chars. Urn: " + rawUrn, - secondColonIndex); + rawUrn, "Namespace length > 32 chars. Urn: " + rawUrn, secondColonIndex); } if (namespaceLen == 2 @@ -377,9 +359,7 @@ private static String validateAndExtractNamespace(String rawUrn, String namespace = rawUrn.substring(URN_START.length(), secondColonIndex); if (!charsAreValidNamespace(namespace)) { - throw new URISyntaxException( - rawUrn, - "Chars in namespace must be [a-z0-9-]!. Urn: " + rawUrn); + throw new URISyntaxException(rawUrn, "Chars in namespace must be [a-z0-9-]!. Urn: " + rawUrn); } return namespace; } @@ -414,17 +394,17 @@ private static boolean charsAreWordClass(String input) { char c = input.charAt(index); // Not using Character.isLowerCase etc on purpose because that is // unicode-aware and we only need ASCII. Handling only ASCII is faster. - if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') - || (c >= '0' && c <= '9') || c == '_')) { + if (!((c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z') + || (c >= '0' && c <= '9') + || c == '_')) { return false; } } return true; } - /** - * Intern a string to be assigned to the _entityType field. - */ + /** Intern a string to be assigned to the _entityType field. */ private static String internEntityType(String et) { // Most of the times this method is called, the canonical string is already // in the table, so let's do a quick get() first. @@ -436,4 +416,4 @@ private static String internEntityType(String et) { canonicalET = ENTITY_TYPE_INTERNER.putIfAbsent(et, et); return canonicalET != null ? canonicalET : et; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java index a1bd54a995d65..e04796690db77 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java @@ -10,22 +10,21 @@ import java.net.URISyntaxException; public class UrnCoercer implements DirectCoercer<Urn> { - public UrnCoercer() { - } + public UrnCoercer() {} - public Object coerceInput(Urn object) throws ClassCastException { - return object.toString(); - } + public Object coerceInput(Urn object) throws ClassCastException { + return object.toString(); + } - public Urn coerceOutput(Object object) throws TemplateOutputCastException { - if (object.getClass() != String.class) { - throw new TemplateOutputCastException("Urn not backed by String"); - } else { - try { - return Urn.createFromString((String)object); - } catch (URISyntaxException use) { - throw new TemplateOutputCastException("Invalid URN syntax: " + use.getMessage(), use); - } - } + public Urn coerceOutput(Object object) throws TemplateOutputCastException { + if (object.getClass() != String.class) { + throw new TemplateOutputCastException("Urn not backed by String"); + } else { + try { + return Urn.createFromString((String) object); + } catch (URISyntaxException use) { + throw new TemplateOutputCastException("Invalid URN syntax: " + use.getMessage(), use); + } } + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java index b68e429a5202c..25cb5475d7299 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java @@ -2,75 +2,75 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; - import java.net.URISyntaxException; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class UrnUtils { - private static final CorpuserUrn UNKNOWN_ACTOR_URN = new CorpuserUrn("unknown"); + private static final CorpuserUrn UNKNOWN_ACTOR_URN = new CorpuserUrn("unknown"); - private UrnUtils() { - } + private UrnUtils() {} - /** - * Convert platform + dataset + origin into DatasetUrn - * @param platformName String, e.g. hdfs, oracle - * @param datasetName String, e.g. /jobs/xxx, ABOOK.ADDRESS - * @param origin PROD, CORP, EI, DEV - * @return DatasetUrn - */ - @Nonnull - public static DatasetUrn toDatasetUrn(@Nonnull String platformName, @Nonnull String datasetName, - @Nonnull String origin) { - return new DatasetUrn(new DataPlatformUrn(platformName), datasetName, toFabricType(origin)); - } + /** + * Convert platform + dataset + origin into DatasetUrn + * + * @param platformName String, e.g. hdfs, oracle + * @param datasetName String, e.g. /jobs/xxx, ABOOK.ADDRESS + * @param origin PROD, CORP, EI, DEV + * @return DatasetUrn + */ + @Nonnull + public static DatasetUrn toDatasetUrn( + @Nonnull String platformName, @Nonnull String datasetName, @Nonnull String origin) { + return new DatasetUrn(new DataPlatformUrn(platformName), datasetName, toFabricType(origin)); + } - /** - * Convert fabric String to FabricType - * @param fabric PROD, CORP, EI, DEV, LIT, PRIME - * @return FabricType - */ - @Nonnull - public static FabricType toFabricType(@Nonnull String fabric) { - switch (fabric.toUpperCase()) { - case "PROD": - return FabricType.PROD; - case "CORP": - return FabricType.CORP; - case "EI": - return FabricType.EI; - case "DEV": - return FabricType.DEV; - default: - throw new IllegalArgumentException("Unsupported Fabric Type: " + fabric); - } + /** + * Convert fabric String to FabricType + * + * @param fabric PROD, CORP, EI, DEV, LIT, PRIME + * @return FabricType + */ + @Nonnull + public static FabricType toFabricType(@Nonnull String fabric) { + switch (fabric.toUpperCase()) { + case "PROD": + return FabricType.PROD; + case "CORP": + return FabricType.CORP; + case "EI": + return FabricType.EI; + case "DEV": + return FabricType.DEV; + default: + throw new IllegalArgumentException("Unsupported Fabric Type: " + fabric); } + } - public static Urn getUrn(String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve entity with urn %s, invalid urn", urnStr)); - } + public static Urn getUrn(String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve entity with urn %s, invalid urn", urnStr)); } + } - /** - * Get audit stamp without time. If actor is null, set as Unknown Application URN. - * @param actor Urn - * @return AuditStamp - */ - @Nonnull - public static AuditStamp getAuditStamp(@Nullable Urn actor) { - return new AuditStamp().setActor(getActorOrDefault(actor)); - } + /** + * Get audit stamp without time. If actor is null, set as Unknown Application URN. + * + * @param actor Urn + * @return AuditStamp + */ + @Nonnull + public static AuditStamp getAuditStamp(@Nullable Urn actor) { + return new AuditStamp().setActor(getActorOrDefault(actor)); + } - /** - * Return actor URN, if input actor is null, return Unknown Application URN. - */ - @Nonnull - public static Urn getActorOrDefault(@Nullable Urn actor) { - return actor != null ? actor : UNKNOWN_ACTOR_URN; - } + /** Return actor URN, if input actor is null, return Unknown Application URN. */ + @Nonnull + public static Urn getActorOrDefault(@Nullable Urn actor) { + return actor != null ? actor : UNKNOWN_ACTOR_URN; + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java index fb3d79964f71d..24026f0287b22 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java @@ -7,12 +7,11 @@ import com.linkedin.data.schema.validator.ValidatorContext; import java.net.URISyntaxException; - /** * Rest.li Validator responsible for ensuring that {@link Urn} objects are well-formed. * - * Note that this validator does not validate the integrity of strongly typed urns, - * or validate Urn objects against their associated key aspect. + * <p>Note that this validator does not validate the integrity of strongly typed urns, or validate + * Urn objects against their associated key aspect. */ public class UrnValidator implements Validator { @Override @@ -22,9 +21,13 @@ public void validate(ValidatorContext context) { try { Urn.createFromString((String) context.dataElement().getValue()); } catch (URISyntaxException e) { - context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid", context.dataElement().getValue())); + context.addResult( + new Message( + context.dataElement().path(), + "\"Provided urn %s\" is invalid", + context.dataElement().getValue())); context.setHasFix(false); } } } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java index d5b7a7da456a9..2742d13fb4dba 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java @@ -2,54 +2,53 @@ public class VersionedUrn { - private final String _urn; - private final String _versionStamp; - - public VersionedUrn(String urn, String versionStamp) { - _urn = urn; - _versionStamp = versionStamp; - } - - public String getUrn() { - return _urn; - } - - public String getVersionStamp() { - return _versionStamp; - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals(Object obj) { - if (obj instanceof VersionedUrn) { - VersionedUrn other = (VersionedUrn) obj; - return equals(_urn, other._urn) && equals(_versionStamp, other._versionStamp); - } - return false; - } - - @Override - public int hashCode() { - int h1 = _urn != null ? _urn.hashCode() : 0; - int h2 = _versionStamp != null ? _versionStamp.hashCode() : 0; - return 31 * h1 + h2; - } - - @Override - public String toString() { - return "(" + _urn + " , " + _versionStamp + ")"; - } - - private static boolean equals(Object o1, Object o2) { - if (o1 != null) { - return o1.equals(o2); - } - return o2 == null; - } - - /*convenient method*/ - public static VersionedUrn of(String urn, String versionStamp) { - return new VersionedUrn(urn, versionStamp); - } - + private final String _urn; + private final String _versionStamp; + + public VersionedUrn(String urn, String versionStamp) { + _urn = urn; + _versionStamp = versionStamp; + } + + public String getUrn() { + return _urn; + } + + public String getVersionStamp() { + return _versionStamp; + } + + @SuppressWarnings("unchecked") + @Override + public boolean equals(Object obj) { + if (obj instanceof VersionedUrn) { + VersionedUrn other = (VersionedUrn) obj; + return equals(_urn, other._urn) && equals(_versionStamp, other._versionStamp); + } + return false; + } + + @Override + public int hashCode() { + int h1 = _urn != null ? _urn.hashCode() : 0; + int h2 = _versionStamp != null ? _versionStamp.hashCode() : 0; + return 31 * h1 + h2; + } + + @Override + public String toString() { + return "(" + _urn + " , " + _versionStamp + ")"; + } + + private static boolean equals(Object o1, Object o2) { + if (o1 != null) { + return o1.equals(o2); + } + return o2 == null; + } + + /*convenient method*/ + public static VersionedUrn of(String urn, String versionStamp) { + return new VersionedUrn(urn, versionStamp); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java index f7e0b6c99e334..2bae15bd19354 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java @@ -5,11 +5,9 @@ import java.util.SortedMap; import org.apache.commons.lang3.StringUtils; - public class VersionedUrnUtils { - private VersionedUrnUtils() { - } + private VersionedUrnUtils() {} public static Map<String, Long> convertVersionStamp(String versionStamp) { Map<String, Long> aspectVersionMap = new HashMap<>(); @@ -20,7 +18,8 @@ public static Map<String, Long> convertVersionStamp(String versionStamp) { for (String pair : aspectNameVersionPairs) { String[] tokens = pair.split(":"); if (tokens.length != 2) { - throw new IllegalArgumentException("Invalid version stamp cannot be parsed: " + versionStamp); + throw new IllegalArgumentException( + "Invalid version stamp cannot be parsed: " + versionStamp); } try { aspectVersionMap.put(tokens[0], Long.valueOf(tokens[1])); @@ -33,10 +32,13 @@ public static Map<String, Long> convertVersionStamp(String versionStamp) { } public static String constructVersionStamp(SortedMap<String, Long> versionStampMap) { - StringBuilder versionStamp = versionStampMap.entrySet().stream() - .collect(StringBuilder::new, (builder, entry) -> builder.append(entry.getKey()) - .append(":") - .append(entry.getValue()).append(";"), StringBuilder::append); + StringBuilder versionStamp = + versionStampMap.entrySet().stream() + .collect( + StringBuilder::new, + (builder, entry) -> + builder.append(entry.getKey()).append(":").append(entry.getValue()).append(";"), + StringBuilder::append); // trim off last ; return versionStamp.substring(0, versionStamp.length() - 1); } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java index 14949d9c946d9..880fcc2843333 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - public class VersionedUrnCoercer implements DirectCoercer<VersionedUrn> { static { Custom.registerCoercer(new VersionedUrnCoercer(), VersionedUrn.class); diff --git a/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java b/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java index c0ddbb710e2ee..ea878c41936ae 100644 --- a/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java +++ b/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java @@ -5,7 +5,6 @@ import org.assertj.core.api.Assertions; import org.testng.annotations.Test; - public class DatasetFieldUrnTest { private static final String PLATFORM = "fooPlatform"; @@ -16,39 +15,45 @@ public class DatasetFieldUrnTest { @Test public void testSerialization() throws URISyntaxException { final String datasetFieldString = - String.format("urn:li:datasetField:(urn:li:dataset:(urn:li:dataPlatform:%s,%s,%s),%s)", PLATFORM, DATASET_NAME, - FABRIC_TYPE, FIELD_NAME); + String.format( + "urn:li:datasetField:(urn:li:dataset:(urn:li:dataPlatform:%s,%s,%s),%s)", + PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetFieldUrn datasetFieldUrn = DatasetFieldUrn.deserialize(datasetFieldString); final DatasetUrn datasetUrn = datasetFieldUrn.getDatasetEntity(); Assertions.assertThat(datasetFieldUrn.getFieldPathEntity()).isEqualTo(FIELD_NAME); Assertions.assertThat(datasetUrn.getDatasetNameEntity()).isEqualTo(DATASET_NAME); - Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()).isEqualTo(PLATFORM); + Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()) + .isEqualTo(PLATFORM); Assertions.assertThat(datasetUrn.getOriginEntity()).isEqualTo(FabricType.PROD); Assertions.assertThat(datasetFieldUrn.toString()) .isEqualTo(datasetFieldString) - .describedAs("serialization followed by deserialization should produce the same urn string"); + .describedAs( + "serialization followed by deserialization should produce the same urn string"); } @Test public void testCreateUrn() { - final DatasetFieldUrn datasetFieldUrn = new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); + final DatasetFieldUrn datasetFieldUrn = + new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetUrn datasetUrn = datasetFieldUrn.getDatasetEntity(); Assertions.assertThat(datasetFieldUrn.getFieldPathEntity()).isEqualTo(FIELD_NAME); Assertions.assertThat(datasetUrn.getDatasetNameEntity()).isEqualTo(DATASET_NAME); - Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()).isEqualTo(PLATFORM); + Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()) + .isEqualTo(PLATFORM); Assertions.assertThat(datasetUrn.getOriginEntity()).isEqualTo(FabricType.PROD); } @Test public void testUrnConstructors() { - final DatasetFieldUrn datasetFieldUrn1 = new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); + final DatasetFieldUrn datasetFieldUrn1 = + new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetUrn datasetUrn = datasetFieldUrn1.getDatasetEntity(); final DatasetFieldUrn datasetFieldUrn2 = new DatasetFieldUrn(datasetUrn, FIELD_NAME); Assertions.assertThat(datasetFieldUrn1).isEqualTo(datasetFieldUrn2); } -} \ No newline at end of file +} diff --git a/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java index 76668abf4e5ce..f2d58c80177fb 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.common.util; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.test.testing.AspectBar; import com.datahub.test.testing.AspectFoo; import com.datahub.test.testing.DeltaUnion; @@ -39,10 +42,6 @@ import org.testng.annotations.Test; import org.testng.collections.Lists; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class ModelUtilsTest { class ChildUrn extends Urn { @@ -71,7 +70,8 @@ public void testGetInvalidAspectClass() { @Test public void testGetValidAspectTypes() { - Set<Class<? extends RecordTemplate>> validTypes = ModelUtils.getValidAspectTypes(EntityAspectUnion.class); + Set<Class<? extends RecordTemplate>> validTypes = + ModelUtils.getValidAspectTypes(EntityAspectUnion.class); assertEquals(validTypes, ImmutableSet.of(AspectFoo.class, AspectBar.class)); } @@ -172,7 +172,8 @@ public void testGetUrnFromEntity() { public void testGetUrnFromRelationship() { FooUrn expectedSource = makeFooUrn(1); BarUrn expectedDestination = makeBarUrn(1); - RelationshipFoo relationship = new RelationshipFoo().setSource(expectedSource).setDestination(expectedDestination); + RelationshipFoo relationship = + new RelationshipFoo().setSource(expectedSource).setDestination(expectedDestination); Urn sourceUrn = ModelUtils.getSourceUrnFromRelationship(relationship); Urn destinationUrn = ModelUtils.getDestinationUrnFromRelationship(relationship); @@ -269,7 +270,8 @@ public void testNewSnapshot() { EntityAspectUnion aspectUnion = new EntityAspectUnion(); aspectUnion.setAspectFoo(foo); - EntitySnapshot snapshot = ModelUtils.newSnapshot(EntitySnapshot.class, urn, Lists.newArrayList(aspectUnion)); + EntitySnapshot snapshot = + ModelUtils.newSnapshot(EntitySnapshot.class, urn, Lists.newArrayList(aspectUnion)); assertEquals(snapshot.getUrn(), urn); assertEquals(snapshot.getAspects().size(), 1); @@ -289,7 +291,8 @@ public void testNewAspect() { public void testNewAspectAlias() { AspectFoo foo = new AspectFoo().setValue("foo"); - EntityAspectUnionAlias aspectUnion = ModelUtils.newAspectUnion(EntityAspectUnionAlias.class, foo); + EntityAspectUnionAlias aspectUnion = + ModelUtils.newAspectUnion(EntityAspectUnionAlias.class, foo); assertEquals(aspectUnion.getFoo(), foo); } @@ -337,18 +340,22 @@ public void testValidateCorrectUrnForSnapshot() { @Test public void testNewRelatioshipUnion() { - RelationshipFoo foo = new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); + RelationshipFoo foo = + new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); - RelationshipUnion relationshipUnion = ModelUtils.newRelationshipUnion(RelationshipUnion.class, foo); + RelationshipUnion relationshipUnion = + ModelUtils.newRelationshipUnion(RelationshipUnion.class, foo); assertEquals(relationshipUnion.getRelationshipFoo(), foo); } @Test public void testNewRelatioshipUnionAlias() { - RelationshipFoo foo = new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); + RelationshipFoo foo = + new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); - RelationshipUnionAlias relationshipUnion = ModelUtils.newRelationshipUnion(RelationshipUnionAlias.class, foo); + RelationshipUnionAlias relationshipUnion = + ModelUtils.newRelationshipUnion(RelationshipUnionAlias.class, foo); assertEquals(relationshipUnion.getFoo(), foo); } @@ -358,11 +365,14 @@ public void testGetMAETopicName() throws URISyntaxException { FooUrn urn = new FooUrn(1); AspectFoo foo = new AspectFoo().setValue("foo"); - assertEquals(ModelUtils.getAspectSpecificMAETopicName(urn, foo), "METADATA_AUDIT_EVENT_FOO_ASPECTFOO"); + assertEquals( + ModelUtils.getAspectSpecificMAETopicName(urn, foo), "METADATA_AUDIT_EVENT_FOO_ASPECTFOO"); PizzaUrn pizza = new PizzaUrn(1); AspectBar bar = new AspectBar().setValue("bar"); - assertEquals(ModelUtils.getAspectSpecificMAETopicName(pizza, bar), "METADATA_AUDIT_EVENT_PIZZA_ASPECTBAR"); + assertEquals( + ModelUtils.getAspectSpecificMAETopicName(pizza, bar), + "METADATA_AUDIT_EVENT_PIZZA_ASPECTBAR"); } @Test diff --git a/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java index 90514a498c67a..145ab2322adb0 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.common.util; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.test.testing.AspectBar; import com.datahub.test.testing.AspectBaz; import com.datahub.test.testing.AspectFoo; @@ -29,17 +32,16 @@ import org.apache.commons.io.IOUtils; import org.testng.annotations.Test; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class RecordUtilsTest { @Test public void testToJsonString() throws IOException { AspectFoo foo = new AspectFoo().setValue("foo"); String expected = - loadJsonFromResource("foo.json").replaceAll("\\s+", "").replaceAll("\\n", "").replaceAll("\\r", ""); + loadJsonFromResource("foo.json") + .replaceAll("\\s+", "") + .replaceAll("\\n", "") + .replaceAll("\\r", ""); String actual = RecordUtils.toJsonString(foo); @@ -55,7 +57,8 @@ public void testToRecordTemplate() throws IOException { assertEquals(actual, expected); - RecordTemplate actual2 = RecordUtils.toRecordTemplate(AspectFoo.class.getCanonicalName(), expected.data()); + RecordTemplate actual2 = + RecordUtils.toRecordTemplate(AspectFoo.class.getCanonicalName(), expected.data()); assertEquals(actual2.getClass(), AspectFoo.class); assertEquals(actual2, expected); @@ -71,7 +74,8 @@ public void testGetValidRecordDataSchemaField() { RecordDataSchema schema = ValidationUtils.getRecordSchema(AspectFoo.class); RecordDataSchema.Field expected = schema.getField("value"); - assertEquals(RecordUtils.getRecordDataSchemaField(new AspectFoo().setValue("foo"), "value"), expected); + assertEquals( + RecordUtils.getRecordDataSchemaField(new AspectFoo().setValue("foo"), "value"), expected); } @Test(expectedExceptions = InvalidSchemaException.class) @@ -112,7 +116,8 @@ public void testGetRecordTemplatePrimitiveField() throws IOException { assertTrue(RecordUtils.getRecordTemplateField(baz, "boolField", Boolean.class)); assertEquals(RecordUtils.getRecordTemplateField(baz, "stringField", String.class), "baz"); - assertEquals(RecordUtils.getRecordTemplateField(baz, "longField", Long.class), Long.valueOf(1234L)); + assertEquals( + RecordUtils.getRecordTemplateField(baz, "longField", Long.class), Long.valueOf(1234L)); } @Test @@ -127,9 +132,10 @@ public void testGetRecordTemplateUrnField() { public void testGetRecordTemplateWrappedField() throws IOException { AspectBaz baz = loadAspectBaz("baz.json"); - StringArray stringArray = RecordUtils.getRecordTemplateWrappedField(baz, "arrayField", StringArray.class); + StringArray stringArray = + RecordUtils.getRecordTemplateWrappedField(baz, "arrayField", StringArray.class); - assertEquals(stringArray.toArray(), new String[]{"1", "2", "3"}); + assertEquals(stringArray.toArray(), new String[] {"1", "2", "3"}); } @Test @@ -241,7 +247,10 @@ public void testGetFieldValueRecordType() { MixedRecord mixedRecord1 = new MixedRecord().setRecordField(foo1); PathSpec ps1f1 = MixedRecord.fields().recordField().value(); PathSpec ps1f2 = - MixedRecord.fields().nestedRecordField().foo().value(); // referencing a nullable record template field + MixedRecord.fields() + .nestedRecordField() + .foo() + .value(); // referencing a nullable record template field Optional<Object> o1f1 = RecordUtils.getFieldValue(mixedRecord1, ps1f1); Optional<Object> o1f2 = RecordUtils.getFieldValue(mixedRecord1, ps1f2); @@ -253,7 +262,8 @@ public void testGetFieldValueRecordType() { // case 2: referencing a field inside a RecordTemplate, two levels deep i.e. nested field AspectFoo foo2 = new AspectFoo().setValue("fooVal2"); - com.datahub.test.testing.EntityValue entityValue = new com.datahub.test.testing.EntityValue().setFoo(foo2); + com.datahub.test.testing.EntityValue entityValue = + new com.datahub.test.testing.EntityValue().setFoo(foo2); MixedRecord mixedRecord2 = new MixedRecord().setNestedRecordField(entityValue); PathSpec ps2 = MixedRecord.fields().nestedRecordField().foo().value(); @@ -268,7 +278,8 @@ public void testGetFieldValueArray() { // case 1: array of strings final MixedRecord mixedRecord1 = - new MixedRecord().setStringArray(new StringArray(Arrays.asList("val1", "val2", "val3", "val4"))); + new MixedRecord() + .setStringArray(new StringArray(Arrays.asList("val1", "val2", "val3", "val4"))); PathSpec ps1 = MixedRecord.fields().stringArray(); Object o1 = RecordUtils.getFieldValue(mixedRecord1, ps1).get(); @@ -293,20 +304,25 @@ public void testGetFieldValueArray() { // case 3: array of records is empty final MixedRecord mixedRecord3 = new MixedRecord().setRecordArray(new AspectFooArray()); - Object o3 = RecordUtils.getFieldValue(mixedRecord3, MixedRecord.fields().recordArray().items().value()).get(); + Object o3 = + RecordUtils.getFieldValue(mixedRecord3, MixedRecord.fields().recordArray().items().value()) + .get(); assertEquals(o3, new StringArray()); // case 4: referencing an index of array is not supported final MixedRecord mixedRecord4 = new MixedRecord().setRecordArray(aspectFooArray); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> RecordUtils.getFieldValue(mixedRecord4, "/recordArray/0/value")); // case 5: referencing nested field inside array of records, field being 2 levels deep AspectFoo f1 = new AspectFoo().setValue("val1"); AspectFoo f2 = new AspectFoo().setValue("val2"); - com.datahub.test.testing.EntityValue val1 = new com.datahub.test.testing.EntityValue().setFoo(f1); - com.datahub.test.testing.EntityValue val2 = new com.datahub.test.testing.EntityValue().setFoo(f2); + com.datahub.test.testing.EntityValue val1 = + new com.datahub.test.testing.EntityValue().setFoo(f1); + com.datahub.test.testing.EntityValue val2 = + new com.datahub.test.testing.EntityValue().setFoo(f2); EntityValueArray entityValues = new EntityValueArray(Arrays.asList(val1, val2)); final MixedRecord mixedRecord5 = new MixedRecord().setNestedRecordArray(entityValues); @@ -333,17 +349,21 @@ public void testGetFieldValueArray() { assertFalse(o7.isPresent()); } - @Test(description = "Test getFieldValue() when RecordTemplate has field of type array of primitive unions") + @Test( + description = + "Test getFieldValue() when RecordTemplate has field of type array of primitive unions") public void testGetFieldValueArrayOfPrimitiveUnions() { // case 1: array of unions of strings final MixedRecord mixedRecord1 = - new MixedRecord().setUnionArray(new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + new MixedRecord() + .setUnionArray( + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); PathSpec ps1 = MixedRecord.fields().unionArray(); Object o1 = RecordUtils.getFieldValue(mixedRecord1, ps1).get(); @@ -351,20 +371,24 @@ public void testGetFieldValueArrayOfPrimitiveUnions() { PathSpec ps2 = MixedRecord.fields().unionArray().items(); Object o2 = RecordUtils.getFieldValue(mixedRecord1, ps2).get(); - assertEquals(o1, new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + assertEquals( + o1, + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); assertEquals(ps1.toString(), "/unionArray"); - assertEquals(o2, new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + assertEquals( + o2, + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); assertEquals(ps2.toString(), "/unionArray/*"); } @@ -381,8 +405,9 @@ public void testCapitalizeFirst() { } private AspectBaz loadAspectBaz(String resourceName) throws IOException { - return RecordUtils.toRecordTemplate(AspectBaz.class, - IOUtils.toString(ClassLoader.getSystemResourceAsStream(resourceName), StandardCharsets.UTF_8)); + return RecordUtils.toRecordTemplate( + AspectBaz.class, + IOUtils.toString( + ClassLoader.getSystemResourceAsStream(resourceName), StandardCharsets.UTF_8)); } - } diff --git a/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java index cb5ac62d71a1d..93875b0f06706 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.common.util; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.VersionedUrnUtils; import java.util.Comparator; import java.util.Map; @@ -7,9 +9,6 @@ import java.util.TreeMap; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class VersionedUrnUtilsTest { private static final String SCHEMA_METADATA = "schemaMetadata"; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java index b8b62782309b8..2ae9ee8ab14ea 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java @@ -5,28 +5,24 @@ import lombok.AllArgsConstructor; import lombok.Getter; - /** - * Represents a unique DataHub actor (i.e. principal). Defining characteristics of all DataHub Actors includes a + * Represents a unique DataHub actor (i.e. principal). Defining characteristics of all DataHub + * Actors includes a * - * a) Actor Type: A specific type of actor, e.g. CORP_USER or SERVICE_USER. - * b) Actor Id: A unique id for the actor. + * <p>a) Actor Type: A specific type of actor, e.g. CORP_USER or SERVICE_USER. b) Actor Id: A unique + * id for the actor. * - * These pieces of information are in turn used to construct an Entity Urn, which can be used as a primary key to fetch and update specific information - * about the actor. + * <p>These pieces of information are in turn used to construct an Entity Urn, which can be used as + * a primary key to fetch and update specific information about the actor. */ @Getter @AllArgsConstructor public class Actor { - /** - * The {@link ActorType} associated with a DataHub actor. - */ + /** The {@link ActorType} associated with a DataHub actor. */ private final ActorType type; - /** - * The unique id associated with a DataHub actor. - */ + /** The unique id associated with a DataHub actor. */ private final String id; /** @@ -37,6 +33,7 @@ public String toUrnStr() { if (Objects.requireNonNull(getType()) == ActorType.USER) { return String.format("urn:li:corpuser:%s", getId()); } - throw new IllegalArgumentException(String.format("Unrecognized ActorType %s provided", getType())); + throw new IllegalArgumentException( + String.format("Unrecognized ActorType %s provided", getType())); } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java index c41a30e57b2d6..4fc175cd4815e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java @@ -3,12 +3,10 @@ /** * A specific type of Actor on DataHub's platform. * - * Currently the only actor type officially supported, though in the future this may evolve - * to include service users. + * <p>Currently the only actor type officially supported, though in the future this may evolve to + * include service users. */ public enum ActorType { - /** - * A user actor, e.g. john smith - */ + /** A user actor, e.g. john smith */ USER, } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java index 71efedda56e5c..b53d868e6e878 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java @@ -5,21 +5,21 @@ import java.util.Objects; import javax.annotation.Nonnull; - -/** - * Class representing an authenticated actor accessing DataHub. - */ +/** Class representing an authenticated actor accessing DataHub. */ public class Authentication { private final Actor authenticatedActor; private final String credentials; private final Map<String, Object> claims; - public Authentication(@Nonnull final Actor authenticatedActor, @Nonnull final String credentials) { + public Authentication( + @Nonnull final Actor authenticatedActor, @Nonnull final String credentials) { this(authenticatedActor, credentials, Collections.emptyMap()); } - public Authentication(@Nonnull final Actor authenticatedActor, @Nonnull final String credentials, + public Authentication( + @Nonnull final Actor authenticatedActor, + @Nonnull final String credentials, @Nonnull final Map<String, Object> claims) { this.authenticatedActor = Objects.requireNonNull(authenticatedActor); this.credentials = Objects.requireNonNull(credentials); @@ -34,7 +34,8 @@ public Actor getActor() { } /** - * @return Returns the credentials associated with the current request (e.g. the value of the "Authorization" header) + * @return Returns the credentials associated with the current request (e.g. the value of the + * "Authorization" header) */ public String getCredentials() { return this.credentials; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java index e95f891b853a5..3a59b23122e25 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java @@ -19,6 +19,5 @@ public static void remove() { AUTHENTICATION.remove(); } - private AuthenticationContext() { - } + private AuthenticationContext() {} } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java index 9fbac00d3aeb5..5928b258c5f80 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java @@ -2,7 +2,6 @@ import com.datahub.plugins.auth.authentication.Authenticator; - /** * An {@link Exception} thrown when an {@link Authenticator} is unable to be resolved an instance of * {@link Authentication} for the current request. diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java index 0a8e3cba3d07b..500248d6c7b4e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java @@ -2,7 +2,6 @@ import com.datahub.plugins.auth.authentication.Authenticator; - /** * An {@link Exception} thrown when an {@link Authenticator} is unable to be resolve an instance of * {@link Authentication} for the current request. diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java index 5673bac5442b2..9aaf40df5a0f6 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java @@ -1,18 +1,16 @@ package com.datahub.authentication; import com.datahub.plugins.auth.authentication.Authenticator; -import lombok.Getter; - import java.util.Map; import java.util.Objects; import java.util.TreeMap; import javax.annotation.Nonnull; - +import lombok.Getter; /** * Request context provided to each {@link Authenticator} to perform Authentication. * - * Currently, this class only hold the inbound request's headers, but could certainly be extended + * <p>Currently, this class only hold the inbound request's headers, but could certainly be extended * to contain additional information like the request parameters, body, ip, etc as needed. */ @Getter @@ -27,7 +25,10 @@ public AuthenticationRequest(@Nonnull final Map<String, String> requestHeaders) this("", "", requestHeaders); } - public AuthenticationRequest(@Nonnull String servletInfo, @Nonnull String pathInfo, @Nonnull final Map<String, String> requestHeaders) { + public AuthenticationRequest( + @Nonnull String servletInfo, + @Nonnull String pathInfo, + @Nonnull final Map<String, String> requestHeaders) { Objects.requireNonNull(requestHeaders); caseInsensitiveHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); caseInsensitiveHeaders.putAll(requestHeaders); diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java index 4c6ee071e5ca1..06a70d55c0802 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java @@ -6,10 +6,10 @@ import java.util.Objects; import javax.annotation.Nonnull; - /** - * Context class to provide Authenticator implementations with concrete objects necessary for their correct workings. - * DataHub creates {@link AuthenticatorContext} instance and provides it as an argument to init method of {@link Authenticator} + * Context class to provide Authenticator implementations with concrete objects necessary for their + * correct workings. DataHub creates {@link AuthenticatorContext} instance and provides it as an + * argument to init method of {@link Authenticator} */ public class AuthenticatorContext { private final Map<String, Object> contextMap; @@ -21,10 +21,9 @@ public AuthenticatorContext(@Nonnull final Map<String, Object> context) { } /** - * - * @return contextMap The contextMap contains below key and value - * {@link com.datahub.plugins.PluginConstant#PLUGIN_HOME PLUGIN_HOME}: Directory path where plugin is installed - * + * @return contextMap The contextMap contains below key and value {@link + * com.datahub.plugins.PluginConstant#PLUGIN_HOME PLUGIN_HOME}: Directory path where plugin is + * installed */ @Nonnull public Map<String, Object> data() { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java index e159993a8a243..f8d08c6adbd3a 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java @@ -5,22 +5,22 @@ import java.util.Optional; import javax.annotation.Nonnull; - public class AuthUtil { public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, @Nonnull Optional<EntitySpec> maybeResourceSpec, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : privilegeGroup.getAuthorizedPrivilegeGroups()) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { + for (ConjunctivePrivilegeGroup andPrivilegeGroup : + privilegeGroup.getAuthorizedPrivilegeGroups()) { // If any conjunctive privilege group is authorized, then the entire request is authorized. if (isAuthorized(authorizer, actor, andPrivilegeGroup, maybeResourceSpec)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not authorized. + // If none of the disjunctive privilege groups were authorized, then the entire request is not + // authorized. return false; } @@ -28,15 +28,16 @@ public static boolean isAuthorizedForResources( @Nonnull Authorizer authorizer, @Nonnull String actor, @Nonnull List<Optional<EntitySpec>> resourceSpecs, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : privilegeGroup.getAuthorizedPrivilegeGroups()) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { + for (ConjunctivePrivilegeGroup andPrivilegeGroup : + privilegeGroup.getAuthorizedPrivilegeGroups()) { // If any conjunctive privilege group is authorized, then the entire request is authorized. if (isAuthorizedForResources(authorizer, actor, andPrivilegeGroup, resourceSpecs)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not authorized. + // If none of the disjunctive privilege groups were authorized, then the entire request is not + // authorized. return false; } @@ -67,7 +68,8 @@ private static boolean isAuthorizedForResources( for (final String privilege : requiredPrivileges.getRequiredPrivileges()) { // Create and evaluate an Authorization request. for (Optional<EntitySpec> resourceSpec : resourceSpecs) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege, resourceSpec); + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege, resourceSpec); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { // Short circuit. @@ -78,5 +80,5 @@ private static boolean isAuthorizedForResources( return true; } - private AuthUtil() { } -} \ No newline at end of file + private AuthUtil() {} +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java index 9e75de3cbf44d..62889a50d2d96 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java @@ -3,23 +3,18 @@ import java.util.Optional; import lombok.Value; - -/** - * A request to authorize a user for a specific privilege. - */ +/** A request to authorize a user for a specific privilege. */ @Value public class AuthorizationRequest { - /** - * The urn of the actor (corpuser) making the request. - */ + /** The urn of the actor (corpuser) making the request. */ String actorUrn; - /** - * The privilege that the user is requesting - */ + + /** The privilege that the user is requesting */ String privilege; + /** - * The resource that the user is requesting for, if applicable. If the privilege is a platform privilege - * this optional will be empty. + * The resource that the user is requesting for, if applicable. If the privilege is a platform + * privilege this optional will be empty. */ Optional<EntitySpec> resourceSpec; } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java index 17d199be583e3..a8eea06dfab27 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java @@ -3,39 +3,24 @@ import lombok.AllArgsConstructor; import lombok.Data; - -/** - * A result returned after requesting authorization for a particular privilege. - */ +/** A result returned after requesting authorization for a particular privilege. */ @Data @AllArgsConstructor public class AuthorizationResult { - /** - * The original authorization request - */ + /** The original authorization request */ AuthorizationRequest request; - /** - * The result type. Allow or deny the authorization request for the actor. - */ + /** The result type. Allow or deny the authorization request for the actor. */ public enum Type { - /** - * Allow the request - the requested actor is privileged. - */ + /** Allow the request - the requested actor is privileged. */ ALLOW, - /** - * Deny the request - the requested actor is not privileged. - */ + /** Deny the request - the requested actor is not privileged. */ DENY } - /** - * The decision - whether to allow or deny the request. - */ + /** The decision - whether to allow or deny the request. */ public Type type; - /** - * Optional message associated with the decision. Useful for debugging. - */ + /** Optional message associated with the decision. Useful for debugging. */ String message; } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java index 5a9990552bb34..0155c49fd9da7 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java @@ -7,7 +7,6 @@ import lombok.Builder; import lombok.Value; - @Value @AllArgsConstructor(access = AccessLevel.PUBLIC) @Builder diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java index b79a4fa20c7ea..50bc749cd9921 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java @@ -7,25 +7,21 @@ import lombok.AllArgsConstructor; import lombok.Data; - /** - * Context provided to an Authorizer on initialization. - * DataHub creates {@link AuthenticatorContext} instance and provides it as an argument to init method of {@link Authenticator} + * Context provided to an Authorizer on initialization. DataHub creates {@link AuthenticatorContext} + * instance and provides it as an argument to init method of {@link Authenticator} */ @Data @AllArgsConstructor public class AuthorizerContext { private final Map<String, Object> contextMap; - /** - * A utility for resolving an {@link EntitySpec} to resolved entity field values. - */ + /** A utility for resolving an {@link EntitySpec} to resolved entity field values. */ private EntitySpecResolver entitySpecResolver; /** - * - * @return contextMap The contextMap contains below key and value - * PLUGIN_DIRECTORY: Directory path where plugin is installed i.e. PLUGIN_HOME + * @return contextMap The contextMap contains below key and value PLUGIN_DIRECTORY: Directory path + * where plugin is installed i.e. PLUGIN_HOME */ @Nonnull public Map<String, Object> data() { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java index d47783268f70d..bc3a3c9f385a6 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java @@ -2,12 +2,10 @@ import java.util.List; - /** - * Represents a group of privileges that must <b>ALL</b> be required to - * authorize a request. + * Represents a group of privileges that must <b>ALL</b> be required to authorize a request. * - * That is, an AND of privileges. + * <p>That is, an AND of privileges. */ public class ConjunctivePrivilegeGroup { private final List<String> _requiredPrivileges; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java index 40bb22d036f0a..350476326da9f 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java @@ -5,7 +5,7 @@ /** * Represents a group of privilege groups, any of which must be authorized to authorize a request. * - * That is, an OR of privilege groups. + * <p>That is, an OR of privilege groups. */ public class DisjunctivePrivilegeGroup { private final List<ConjunctivePrivilegeGroup> _authorizedPrivilegeGroups; @@ -17,4 +17,4 @@ public DisjunctivePrivilegeGroup(List<ConjunctivePrivilegeGroup> authorizedPrivi public List<ConjunctivePrivilegeGroup> getAuthorizedPrivilegeGroups() { return _authorizedPrivilegeGroups; } -} \ No newline at end of file +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java index 1258d958f2092..6b08cdb00e9ab 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java @@ -1,44 +1,32 @@ package com.datahub.authorization; -/** - * List of entity field types to fetch for a given entity - */ +/** List of entity field types to fetch for a given entity */ public enum EntityFieldType { /** * Type of the entity (e.g. dataset, chart) + * * @deprecated */ @Deprecated RESOURCE_URN, /** * Urn of the entity + * * @deprecated */ @Deprecated RESOURCE_TYPE, - /** - * Type of the entity (e.g. dataset, chart) - */ + /** Type of the entity (e.g. dataset, chart) */ TYPE, - /** - * Urn of the entity - */ + /** Urn of the entity */ URN, - /** - * Owners of the entity - */ + /** Owners of the entity */ OWNER, - /** - * Domains of the entity - */ + /** Domains of the entity */ DOMAIN, - /** - * Groups of which the entity (only applies to corpUser) is a member - */ + /** Groups of which the entity (only applies to corpUser) is a member */ GROUP_MEMBERSHIP, - /** - * Data platform instance of resource - */ + /** Data platform instance of resource */ DATA_PLATFORM_INSTANCE } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java index 656bec0f44fc2..eb412cdeff14e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java @@ -3,21 +3,19 @@ import javax.annotation.Nonnull; import lombok.Value; - /** - * Details about the entities involved in the authorization process. It models the actor and the resource being acted - * upon. Resource types currently supported can be found inside of {@link com.linkedin.metadata.authorization.PoliciesConfig} + * Details about the entities involved in the authorization process. It models the actor and the + * resource being acted upon. Resource types currently supported can be found inside of {@link + * com.linkedin.metadata.authorization.PoliciesConfig} */ @Value public class EntitySpec { + /** The entity type. (dataset, chart, dashboard, corpGroup, etc). */ + @Nonnull String type; + /** - * The entity type. (dataset, chart, dashboard, corpGroup, etc). - */ - @Nonnull - String type; - /** - * The entity identity. Most often, this corresponds to the raw entity urn. (urn:li:corpGroup:groupId) + * The entity identity. Most often, this corresponds to the raw entity urn. + * (urn:li:corpGroup:groupId) */ - @Nonnull - String entity; -} \ No newline at end of file + @Nonnull String entity; +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java index 67347fbf87a87..0d482f3816e28 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java @@ -1,11 +1,10 @@ package com.datahub.authorization; /** - * An Entity Spec Resolver is responsible for resolving a {@link EntitySpec} to a {@link ResolvedEntitySpec}. + * An Entity Spec Resolver is responsible for resolving a {@link EntitySpec} to a {@link + * ResolvedEntitySpec}. */ public interface EntitySpecResolver { - /** - Resolve a {@link EntitySpec} to a resolved entity spec. - **/ + /** Resolve a {@link EntitySpec} to a resolved entity spec. */ ResolvedEntitySpec resolve(EntitySpec entitySpec); } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java index 955a06fd54cb9..3e6287c335c97 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java @@ -10,32 +10,30 @@ import lombok.RequiredArgsConstructor; import lombok.Value; - /** - * Helper class for lazy resolution of fields - * Input resolveField function that is given as input will only be called when getFieldValuesFuture is called + * Helper class for lazy resolution of fields Input resolveField function that is given as input + * will only be called when getFieldValuesFuture is called */ @RequiredArgsConstructor public class FieldResolver { private final Supplier<CompletableFuture<FieldValue>> resolveField; + @Getter(lazy = true) private final CompletableFuture<FieldValue> fieldValuesFuture = resolveField.get(); private static final FieldValue EMPTY = new FieldValue(Collections.emptySet()); - /** - * Helper function that returns FieldResolver for precomputed values - */ + /** Helper function that returns FieldResolver for precomputed values */ public static FieldResolver getResolverFromValues(Set<String> values) { - return new FieldResolver(() -> CompletableFuture.completedFuture(FieldValue.builder().values(values).build())); + return new FieldResolver( + () -> CompletableFuture.completedFuture(FieldValue.builder().values(values).build())); } - /** - * Helper function that returns FieldResolver given a fetchFieldValue function - */ - public static FieldResolver getResolverFromFunction(EntitySpec entitySpec, - Function<EntitySpec, FieldValue> fetchFieldValue) { - return new FieldResolver(() -> CompletableFuture.supplyAsync(() -> fetchFieldValue.apply(entitySpec))); + /** Helper function that returns FieldResolver given a fetchFieldValue function */ + public static FieldResolver getResolverFromFunction( + EntitySpec entitySpec, Function<EntitySpec, FieldValue> fetchFieldValue) { + return new FieldResolver( + () -> CompletableFuture.supplyAsync(() -> fetchFieldValue.apply(entitySpec))); } public static FieldValue emptyFieldValue() { @@ -43,7 +41,8 @@ public static FieldValue emptyFieldValue() { } /** - * Container for storing the field value, in case we need to extend this to have more types of field values + * Container for storing the field value, in case we need to extend this to have more types of + * field values */ @Value @Builder diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java index 7948766df5715..0a639bed1082b 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java @@ -8,15 +8,14 @@ import lombok.RequiredArgsConstructor; import lombok.ToString; - /** - * Wrapper around authorization request with field resolvers for lazily fetching the field values for each field type + * Wrapper around authorization request with field resolvers for lazily fetching the field values + * for each field type */ @RequiredArgsConstructor @ToString public class ResolvedEntitySpec { - @Getter - private final EntitySpec spec; + @Getter private final EntitySpec spec; private final Map<EntityFieldType, FieldResolver> fieldResolvers; public Set<String> getFieldValues(EntityFieldType entityFieldType) { @@ -28,6 +27,7 @@ public Set<String> getFieldValues(EntityFieldType entityFieldType) { /** * Fetch the owners for an entity. + * * @return a set of owner urns, or empty set if none exist. */ public Set<String> getOwners() { @@ -39,6 +39,7 @@ public Set<String> getOwners() { /** * Fetch the platform instance for a Resolved Resource Spec + * * @return a Platform Instance or null if one does not exist. */ @Nullable @@ -46,7 +47,12 @@ public String getDataPlatformInstance() { if (!fieldResolvers.containsKey(EntityFieldType.DATA_PLATFORM_INSTANCE)) { return null; } - Set<String> dataPlatformInstance = fieldResolvers.get(EntityFieldType.DATA_PLATFORM_INSTANCE).getFieldValuesFuture().join().getValues(); + Set<String> dataPlatformInstance = + fieldResolvers + .get(EntityFieldType.DATA_PLATFORM_INSTANCE) + .getFieldValuesFuture() + .join() + .getValues(); if (dataPlatformInstance.size() > 0) { return dataPlatformInstance.stream().findFirst().get(); } @@ -55,12 +61,17 @@ public String getDataPlatformInstance() { /** * Fetch the group membership for an entity. + * * @return a set of groups urns, or empty set if none exist. */ public Set<String> getGroupMembership() { if (!fieldResolvers.containsKey(EntityFieldType.GROUP_MEMBERSHIP)) { return Collections.emptySet(); } - return fieldResolvers.get(EntityFieldType.GROUP_MEMBERSHIP).getFieldValuesFuture().join().getValues(); + return fieldResolvers + .get(EntityFieldType.GROUP_MEMBERSHIP) + .getFieldValuesFuture() + .join() + .getValues(); } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java index 474dd7363e495..3b8406ad5ed5a 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java @@ -1,7 +1,4 @@ package com.datahub.plugins; -/** - * A tag interface for plugin - */ -public interface Plugin { -} +/** A tag interface for plugin */ +public interface Plugin {} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java index 03afc06af7f3c..ac72fc9b8f816 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java @@ -3,6 +3,5 @@ public class PluginConstant { public static final String PLUGIN_HOME = "PLUGIN_HOME"; - private PluginConstant() { - } + private PluginConstant() {} } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java index b7cf80384564b..6485495608773 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java @@ -10,30 +10,36 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** - * An {@link Authenticator}'s job is to authenticate an inbound request by resolving the provided {@link AuthenticationRequest} - * to an instance of {@link Authentication}, which includes an authenticated {@link Actor} within. + * An {@link Authenticator}'s job is to authenticate an inbound request by resolving the provided + * {@link AuthenticationRequest} to an instance of {@link Authentication}, which includes an + * authenticated {@link Actor} within. * - * In the case that {@link Authentication} cannot be resolved, for example because the request is missing the required - * authentication information, an {@link AuthenticationException} may be thrown. + * <p>In the case that {@link Authentication} cannot be resolved, for example because the request is + * missing the required authentication information, an {@link AuthenticationException} may be + * thrown. */ public interface Authenticator extends Plugin { /** * Initialize the Authenticator. Invoked once at boot time. * - * @param authenticatorConfig config provided to the authenticator derived from the Metadata Service YAML config. This - * config comes from the "plugins[].params.configs" configuration. - * @param context nullable configuration objects that are potentially required by an Authenticator instance. + * @param authenticatorConfig config provided to the authenticator derived from the Metadata + * Service YAML config. This config comes from the "plugins[].params.configs" configuration. + * @param context nullable configuration objects that are potentially required by an Authenticator + * instance. */ - void init(@Nonnull final Map<String, Object> authenticatorConfig, @Nullable final AuthenticatorContext context); + void init( + @Nonnull final Map<String, Object> authenticatorConfig, + @Nullable final AuthenticatorContext context); /** * Authenticates an inbound request given an instance of the {@link AuthenticationRequest}. - * @param authenticationRequest authentication request {@link AuthenticationRequest} that need to be authenticated - * If the request is authenticated successfully, an instance of {@link Authentication} is returned. - * If the request cannot be authenticated, returns "null" or throws an {@link AuthenticationException}. + * + * @param authenticationRequest authentication request {@link AuthenticationRequest} that need to + * be authenticated If the request is authenticated successfully, an instance of {@link + * Authentication} is returned. If the request cannot be authenticated, returns "null" or + * throws an {@link AuthenticationException}. */ @Nullable Authentication authenticate(@Nonnull final AuthenticationRequest authenticationRequest) diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java index c731a3ec987c1..a6baf0b5b282c 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java @@ -10,27 +10,27 @@ import java.util.Optional; import javax.annotation.Nonnull; - /** - * An Authorizer is responsible for determining whether an actor should be granted a specific privilege. + * An Authorizer is responsible for determining whether an actor should be granted a specific + * privilege. */ public interface Authorizer extends Plugin { /** * Initialize the Authorizer. Invoked once at boot time. * - * @param authorizerConfig config provided to the authenticator derived from the Metadata Service YAML config. This - * config comes from the "authorization.authorizers.config" configuration. + * @param authorizerConfig config provided to the authenticator derived from the Metadata Service + * YAML config. This config comes from the "authorization.authorizers.config" configuration. */ - void init(@Nonnull final Map<String, Object> authorizerConfig, @Nonnull final AuthorizerContext ctx); + void init( + @Nonnull final Map<String, Object> authorizerConfig, @Nonnull final AuthorizerContext ctx); - /** - * Authorizes an action based on the actor, the resource, and required privileges. - */ + /** Authorizes an action based on the actor, the resource, and required privileges. */ AuthorizationResult authorize(@Nonnull final AuthorizationRequest request); /** - * Retrieves the current list of actors authorized to for a particular privilege against - * an optional resource + * Retrieves the current list of actors authorized to for a particular privilege against an + * optional resource */ - AuthorizedActors authorizedActors(final String privilege, final Optional<EntitySpec> resourceSpec); + AuthorizedActors authorizedActors( + final String privilege, final Optional<EntitySpec> resourceSpec); } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java index d8d66ddeeb648..2ac16091128a2 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java @@ -1,25 +1,25 @@ package com.datahub.metadata.dao.producer; -import com.linkedin.common.urn.Urn; import com.datahub.util.ModelUtils; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** * A base class for all metadata event producers. * - *<p>See http://go/gma for more details. + * <p>See http://go/gma for more details. */ -public abstract class BaseMetadataEventProducer<SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> { +public abstract class BaseMetadataEventProducer< + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> { protected final Class<SNAPSHOT> _snapshotClass; protected final Class<ASPECT_UNION> _aspectUnionClass; - public BaseMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, - @Nonnull Class<ASPECT_UNION> aspectUnionClass) { + public BaseMetadataEventProducer( + @Nonnull Class<SNAPSHOT> snapshotClass, @Nonnull Class<ASPECT_UNION> aspectUnionClass) { ModelUtils.validateSnapshotAspect(snapshotClass, aspectUnionClass); _snapshotClass = snapshotClass; _aspectUnionClass = aspectUnionClass; @@ -32,8 +32,8 @@ public BaseMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, * @param newValue the proposed new value for the metadata * @param <ASPECT> must be a supported aspect type in {@code ASPECT_UNION} */ - public abstract <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent(@Nonnull URN urn, - @Nonnull ASPECT newValue); + public abstract <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent( + @Nonnull URN urn, @Nonnull ASPECT newValue); /** * Produces a Metadata Audit Event (MAE) after a metadata aspect is updated for an entity. @@ -43,17 +43,17 @@ public abstract <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadat * @param newValue the value after the update * @param <ASPECT> must be a supported aspect type in {@code ASPECT_UNION} */ - public abstract <ASPECT extends RecordTemplate> void produceMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); + public abstract <ASPECT extends RecordTemplate> void produceMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); /** - * Produces an aspect specific Metadata Audit Event (MAE) after a metadata aspect is updated for an entity. + * Produces an aspect specific Metadata Audit Event (MAE) after a metadata aspect is updated for + * an entity. * * @param urn {@link Urn} of the entity * @param oldValue the value prior to the update, or null if there's none. * @param newValue the value after the update */ - public abstract <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); + public abstract <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); } - diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java index 00b5bb75d901b..26b48449c1c2f 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java @@ -24,8 +24,9 @@ import org.apache.kafka.clients.producer.ProducerRecord; /** - * <p>The topic names that this emits to can be controlled by constructing this with a {@link TopicConvention}. - * If none is given, defaults to a {@link TopicConventionImpl} with the default delimiter of an underscore (_). + * The topic names that this emits to can be controlled by constructing this with a {@link + * TopicConvention}. If none is given, defaults to a {@link TopicConventionImpl} with the default + * delimiter of an underscore (_). */ @Slf4j public class KafkaEventProducer implements EventProducer { @@ -41,8 +42,10 @@ public class KafkaEventProducer implements EventProducer { * @param topicConvention the convention to use to get kafka topic names * @param kafkaHealthChecker The {@link Callback} to invoke when the request is completed */ - public KafkaEventProducer(@Nonnull final Producer<String, ? extends IndexedRecord> producer, - @Nonnull final TopicConvention topicConvention, @Nonnull final KafkaHealthChecker kafkaHealthChecker) { + public KafkaEventProducer( + @Nonnull final Producer<String, ? extends IndexedRecord> producer, + @Nonnull final TopicConvention topicConvention, + @Nonnull final KafkaHealthChecker kafkaHealthChecker) { _producer = producer; _topicConvention = topicConvention; _kafkaHealthChecker = kafkaHealthChecker; @@ -50,13 +53,16 @@ public KafkaEventProducer(@Nonnull final Producer<String, ? extends IndexedRecor @Override @WithSpan - public Future<?> produceMetadataChangeLog(@Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, + public Future<?> produceMetadataChangeLog( + @Nonnull final Urn urn, + @Nonnull AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeLog: %s", - urn, - metadataChangeLog)); + log.debug( + String.format( + "Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeLog: %s", + urn, metadataChangeLog)); record = EventUtils.pegasusToAvroMCL(metadataChangeLog); } catch (IOException e) { log.error(String.format("Failed to convert Pegasus MAE to Avro: %s", metadataChangeLog), e); @@ -67,38 +73,42 @@ record = EventUtils.pegasusToAvroMCL(metadataChangeLog); if (aspectSpec.isTimeseries()) { topic = _topicConvention.getMetadataChangeLogTimeseriesTopicName(); } - return _producer.send(new ProducerRecord(topic, urn.toString(), record), - _kafkaHealthChecker.getKafkaCallBack("MCL", urn.toString())); + return _producer.send( + new ProducerRecord(topic, urn.toString(), record), + _kafkaHealthChecker.getKafkaCallBack("MCL", urn.toString())); } @Override @WithSpan - public Future<?> produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull final MetadataChangeProposal metadataChangeProposal) { + public Future<?> produceMetadataChangeProposal( + @Nonnull final Urn urn, @Nonnull final MetadataChangeProposal metadataChangeProposal) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", - urn, - metadataChangeProposal)); + log.debug( + String.format( + "Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", + urn, metadataChangeProposal)); record = EventUtils.pegasusToAvroMCP(metadataChangeProposal); } catch (IOException e) { - log.error(String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); + log.error( + String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); throw new ModelConversionException("Failed to convert Pegasus MCP to Avro", e); } String topic = _topicConvention.getMetadataChangeProposalTopicName(); - return _producer.send(new ProducerRecord(topic, urn.toString(), record), - _kafkaHealthChecker.getKafkaCallBack("MCP", urn.toString())); + return _producer.send( + new ProducerRecord(topic, urn.toString(), record), + _kafkaHealthChecker.getKafkaCallBack("MCP", urn.toString())); } @Override - public Future<?> producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { + public Future<?> producePlatformEvent( + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus Event to Avro Event urn %s\nEvent: %s", - name, - event)); + log.debug( + String.format("Converting Pegasus Event to Avro Event urn %s\nEvent: %s", name, event)); record = EventUtils.pegasusToAvroPE(event); } catch (IOException e) { log.error(String.format("Failed to convert Pegasus Platform Event to Avro: %s", event), e); @@ -106,8 +116,9 @@ record = EventUtils.pegasusToAvroPE(event); } final String topic = _topicConvention.getPlatformEventTopicName(); - return _producer.send(new ProducerRecord(topic, key == null ? name : key, record), - _kafkaHealthChecker.getKafkaCallBack("Platform Event", name)); + return _producer.send( + new ProducerRecord(topic, key == null ? name : key, record), + _kafkaHealthChecker.getKafkaCallBack("Platform Event", name)); } @Override @@ -117,12 +128,17 @@ public void produceDataHubUpgradeHistoryEvent(@Nonnull DataHubUpgradeHistoryEven log.debug(String.format("Converting Pegasus Event to Avro Event\nEvent: %s", event)); record = EventUtils.pegasusToAvroDUHE(event); } catch (IOException e) { - log.error(String.format("Failed to convert Pegasus DataHub Upgrade History Event to Avro: %s", event), e); + log.error( + String.format( + "Failed to convert Pegasus DataHub Upgrade History Event to Avro: %s", event), + e); throw new ModelConversionException("Failed to convert Pegasus Platform Event to Avro", e); } final String topic = _topicConvention.getDataHubUpgradeHistoryTopicName(); - _producer.send(new ProducerRecord(topic, event.getVersion(), record), _kafkaHealthChecker - .getKafkaCallBack("History Event", "Event Version: " + event.getVersion())); + _producer.send( + new ProducerRecord(topic, event.getVersion(), record), + _kafkaHealthChecker.getKafkaCallBack( + "History Event", "Event Version: " + event.getVersion())); } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java index 8fc89a8ddd5ed..1bfd829617e09 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java @@ -2,6 +2,10 @@ import com.codahale.metrics.MetricRegistry; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.producer.Callback; @@ -10,107 +14,108 @@ import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - @Slf4j @EnableScheduling @Component public class KafkaHealthChecker { - @Value("${kafka.producer.deliveryTimeout}") - private long kafkaProducerDeliveryTimeout; - - private final Set<MessageLog> messagesInProgress = ConcurrentHashMap.newKeySet(); - - public Callback getKafkaCallBack(String eventType, String entityDesc) { - final MessageLog tracking = MessageLog.track(entityDesc, kafkaProducerDeliveryTimeout); - sendMessageStarted(tracking); - return (metadata, e) -> { - sendMessageEnded(tracking); - if (e != null) { - log.error(String.format("Failed to emit %s for entity %s", eventType, entityDesc), e); - MetricUtils.counter(this.getClass(), - MetricRegistry.name("producer_failed_count", eventType.replaceAll(" ", "_"))).inc(); - } else { - log.debug(String.format( - "Successfully emitted %s for entity %s at offset %s, partition %s, topic %s", - eventType, entityDesc, metadata.offset(), metadata.partition(), metadata.topic())); - } - }; + @Value("${kafka.producer.deliveryTimeout}") + private long kafkaProducerDeliveryTimeout; + + private final Set<MessageLog> messagesInProgress = ConcurrentHashMap.newKeySet(); + + public Callback getKafkaCallBack(String eventType, String entityDesc) { + final MessageLog tracking = MessageLog.track(entityDesc, kafkaProducerDeliveryTimeout); + sendMessageStarted(tracking); + return (metadata, e) -> { + sendMessageEnded(tracking); + if (e != null) { + log.error(String.format("Failed to emit %s for entity %s", eventType, entityDesc), e); + MetricUtils.counter( + this.getClass(), + MetricRegistry.name("producer_failed_count", eventType.replaceAll(" ", "_"))) + .inc(); + } else { + log.debug( + String.format( + "Successfully emitted %s for entity %s at offset %s, partition %s, topic %s", + eventType, entityDesc, metadata.offset(), metadata.partition(), metadata.topic())); + } + }; + } + + private void sendMessageStarted(MessageLog messageLog) { + messagesInProgress.add(messageLog); + } + + private void sendMessageEnded(MessageLog messageLog) { + messagesInProgress.remove(messageLog); + } + + @Scheduled(cron = "0/60 * * * * ?") + private synchronized void periodicKafkaHealthChecker() { + long moment = System.currentTimeMillis(); + Set<MessageLog> oldItems = + messagesInProgress.stream() + .filter(item -> item.expectedMilli < moment) + .collect(Collectors.toSet()); + + if (oldItems.size() > 0) { + Map<String, Long> itemCounts = + oldItems.stream() + .collect(Collectors.groupingBy(MessageLog::getEntityDesc, Collectors.counting())); + log.error( + String.format( + "Kafka Health Check Failed. Old message(s) were waiting to be sent: %s", itemCounts)); + messagesInProgress.removeAll(oldItems); } + } - private void sendMessageStarted(MessageLog messageLog) { - messagesInProgress.add(messageLog); + @Getter + static class MessageLog { + private final String entityDesc; + private final long uniqueMessageId; + private final long expectedMilli; + private static long lastMoment = 0L; + + public static MessageLog track(String entityDesc, long maxDelayMilli) { + return new MessageLog(entityDesc, maxDelayMilli); } - private void sendMessageEnded(MessageLog messageLog) { - messagesInProgress.remove(messageLog); + private MessageLog(String entityDesc, long maxDelayMilli) { + this.entityDesc = entityDesc; + this.uniqueMessageId = getNextUniqueMoment(); + this.expectedMilli = this.uniqueMessageId + maxDelayMilli; } - @Scheduled(cron = "0/60 * * * * ?") - private synchronized void periodicKafkaHealthChecker() { - long moment = System.currentTimeMillis(); - Set<MessageLog> oldItems = messagesInProgress.stream() - .filter(item -> item.expectedMilli < moment) - .collect(Collectors.toSet()); - - if (oldItems.size() > 0) { - Map<String, Long> itemCounts = oldItems.stream() - .collect(Collectors.groupingBy(MessageLog::getEntityDesc, Collectors.counting())); - log.error(String.format("Kafka Health Check Failed. Old message(s) were waiting to be sent: %s", itemCounts)); - messagesInProgress.removeAll(oldItems); - } + private synchronized long getNextUniqueMoment() { + long moment = System.currentTimeMillis(); + lastMoment = moment != lastMoment ? moment : ++lastMoment; + return lastMoment; } - @Getter - static class MessageLog { - private final String entityDesc; - private final long uniqueMessageId; - private final long expectedMilli; - private static long lastMoment = 0L; - - - public static MessageLog track(String entityDesc, long maxDelayMilli) { - return new MessageLog(entityDesc, maxDelayMilli); - } - private MessageLog(String entityDesc, long maxDelayMilli) { - this.entityDesc = entityDesc; - this.uniqueMessageId = getNextUniqueMoment(); - this.expectedMilli = this.uniqueMessageId + maxDelayMilli; - } - - private synchronized long getNextUniqueMoment() { - long moment = System.currentTimeMillis(); - lastMoment = moment != lastMoment ? moment : ++lastMoment; - return lastMoment; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - MessageLog that = (MessageLog) o; - - if (uniqueMessageId != that.uniqueMessageId) { - return false; - } - return entityDesc.equals(that.entityDesc); - } - - @Override - public int hashCode() { - int result = entityDesc.hashCode(); - result = 31 * result + (int) (uniqueMessageId ^ (uniqueMessageId >>> 32)); - return result; - } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + MessageLog that = (MessageLog) o; + + if (uniqueMessageId != that.uniqueMessageId) { + return false; + } + return entityDesc.equals(that.entityDesc); } + @Override + public int hashCode() { + int result = entityDesc.hashCode(); + result = 31 * result + (int) (uniqueMessageId ^ (uniqueMessageId >>> 32)); + return result; + } + } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java index 8b4db36ba27ff..765ee8c0736f2 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java @@ -29,15 +29,16 @@ import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; - /** * A Kafka implementation of {@link BaseMetadataEventProducer}. * - * <p>The topic names that this emits to can be controlled by constructing this with a {@link TopicConvention}. If - * none is given, defaults to a {@link TopicConventionImpl} with the default delimiter of an underscore (_). + * <p>The topic names that this emits to can be controlled by constructing this with a {@link + * TopicConvention}. If none is given, defaults to a {@link TopicConventionImpl} with the default + * delimiter of an underscore (_). */ @Slf4j -public class KafkaMetadataEventProducer<SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> +public class KafkaMetadataEventProducer< + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> extends BaseMetadataEventProducer<SNAPSHOT, ASPECT_UNION, URN> { private final Producer<String, ? extends IndexedRecord> _producer; @@ -52,10 +53,11 @@ public class KafkaMetadataEventProducer<SNAPSHOT extends RecordTemplate, ASPECT_ * @param producer The Kafka {@link Producer} to use * @param topicConvention the convention to use to get kafka topic names */ - public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, - @Nonnull Class<ASPECT_UNION> aspectUnionClass, - @Nonnull Producer<String, ? extends IndexedRecord> producer, - @Nonnull TopicConvention topicConvention) { + public KafkaMetadataEventProducer( + @Nonnull Class<SNAPSHOT> snapshotClass, + @Nonnull Class<ASPECT_UNION> aspectUnionClass, + @Nonnull Producer<String, ? extends IndexedRecord> producer, + @Nonnull TopicConvention topicConvention) { this(snapshotClass, aspectUnionClass, producer, topicConvention, null); } @@ -68,11 +70,12 @@ public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, * @param topicConvention the convention to use to get kafka topic names * @param callback The {@link Callback} to invoke when the request is completed */ - public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, - @Nonnull Class<ASPECT_UNION> aspectUnionClass, - @Nonnull Producer<String, ? extends IndexedRecord> producer, - @Nonnull TopicConvention topicConvention, - @Nullable Callback callback) { + public KafkaMetadataEventProducer( + @Nonnull Class<SNAPSHOT> snapshotClass, + @Nonnull Class<ASPECT_UNION> aspectUnionClass, + @Nonnull Producer<String, ? extends IndexedRecord> producer, + @Nonnull TopicConvention topicConvention, + @Nullable Callback callback) { super(snapshotClass, aspectUnionClass); _producer = producer; _callback = Optional.ofNullable(callback); @@ -80,8 +83,8 @@ public KafkaMetadataEventProducer(@Nonnull Class<SNAPSHOT> snapshotClass, } @Override - public <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent(@Nonnull URN urn, - @Nonnull ASPECT newValue) { + public <ASPECT extends RecordTemplate> void produceSnapshotBasedMetadataChangeEvent( + @Nonnull URN urn, @Nonnull ASPECT newValue) { MetadataChangeEvent metadataChangeEvent = new MetadataChangeEvent(); metadataChangeEvent.setProposedSnapshot(makeSnapshot(urn, newValue)); @@ -93,16 +96,20 @@ record = EventUtils.pegasusToAvroMCE(metadataChangeEvent); } if (_callback.isPresent()) { - _producer.send(new ProducerRecord(_topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record), + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record), _callback.get()); } else { - _producer.send(new ProducerRecord(_topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record)); + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record)); } } @Override - public <ASPECT extends RecordTemplate> void produceMetadataAuditEvent(@Nonnull URN urn, @Nullable ASPECT oldValue, - @Nonnull ASPECT newValue) { + public <ASPECT extends RecordTemplate> void produceMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { MetadataAuditEvent metadataAuditEvent = new MetadataAuditEvent(); metadataAuditEvent.setNewSnapshot(makeSnapshot(urn, newValue)); @@ -118,16 +125,20 @@ record = EventUtils.pegasusToAvroMAE(metadataAuditEvent); } if (_callback.isPresent()) { - _producer.send(new ProducerRecord(_topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record), + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record), _callback.get()); } else { - _producer.send(new ProducerRecord(_topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record)); + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record)); } } @Override - public <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { + public <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { // Aspect Specific MAE not supported. // TODO: Remove references to this class. throw new UnsupportedOperationException(); @@ -139,7 +150,8 @@ private Snapshot makeSnapshot(@Nonnull URN urn, @Nonnull RecordTemplate value) { List<ASPECT_UNION> aspects = new ArrayList<>(); aspects.add(ModelUtils.newAspectUnion(_aspectUnionClass, value)); - RecordUtils.setSelectedRecordTemplateInUnion(snapshot, ModelUtils.newSnapshot(_snapshotClass, urn, aspects)); + RecordUtils.setSelectedRecordTemplateInUnion( + snapshot, ModelUtils.newSnapshot(_snapshotClass, urn, aspects)); return snapshot; } @@ -147,4 +159,4 @@ private Snapshot makeSnapshot(@Nonnull URN urn, @Nonnull RecordTemplate value) { static boolean isValidAspectSpecificTopic(@Nonnull String topic) { return Arrays.stream(Topics.class.getFields()).anyMatch(field -> field.getName().equals(topic)); } -} \ No newline at end of file +} diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java index 2622404d03939..ca17ed4aa12d0 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java @@ -4,7 +4,6 @@ import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; - @Slf4j public class KafkaProducerCallback implements Callback { @Override diff --git a/metadata-events/mxe-avro/build.gradle b/metadata-events/mxe-avro/build.gradle index 9d11eeb160ff0..3aebc6bb1004d 100644 --- a/metadata-events/mxe-avro/build.gradle +++ b/metadata-events/mxe-avro/build.gradle @@ -47,4 +47,4 @@ jar { clean { delete 'src' -} \ No newline at end of file +} diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java index df06d1bae28e0..5611e4356bb64 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java @@ -8,30 +8,32 @@ import java.util.Map; import org.apache.avro.Schema; - public class Configs { public static final Map<String, String> FABRIC_SCHEMA_REGISTRY_MAP = - Collections.unmodifiableMap(new HashMap<String, String>() { - { - put("ei", "http://1.schemaregistry.ei4.atd.int.linkedin.com:10252"); - put("corp", "http://1.schemaregistry.corp-lca1.atd.corp.linkedin.com:10252"); - } - }); + Collections.unmodifiableMap( + new HashMap<String, String>() { + { + put("ei", "http://1.schemaregistry.ei4.atd.int.linkedin.com:10252"); + put("corp", "http://1.schemaregistry.corp-lca1.atd.corp.linkedin.com:10252"); + } + }); - public static final Map<String, Schema> TOPIC_SCHEMA_MAP = Collections.unmodifiableMap(new HashMap<String, Schema>() { - { - put(Topics.METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); - put(Topics.METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); - put(Topics.FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); + public static final Map<String, Schema> TOPIC_SCHEMA_MAP = + Collections.unmodifiableMap( + new HashMap<String, Schema>() { + { + put(Topics.METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); + put(Topics.METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); + put(Topics.FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); - put(Topics.DEV_METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); - put(Topics.DEV_METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); - put(Topics.DEV_FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); - } - }); + put(Topics.DEV_METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); + put(Topics.DEV_METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); + put(Topics.DEV_FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); + } + }); private Configs() { // Util class } -} \ No newline at end of file +} diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java index 463abfdeca845..c61330565bcbf 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java @@ -5,65 +5,55 @@ import javax.annotation.Nonnull; import org.apache.avro.specific.SpecificRecord; - /** * The convention for naming kafka topics. * - * <p>Different companies may have different naming conventions or styles for their kafka topics. Namely, companies - * should pick _ or . as a delimiter, but not both, as they collide in metric names. + * <p>Different companies may have different naming conventions or styles for their kafka topics. + * Namely, companies should pick _ or . as a delimiter, but not both, as they collide in metric + * names. */ public interface TopicConvention { /** - * The name of the metadata change event (v4) kafka topic. - * Note that MetadataChangeEvents are deprecated, replaced by {@link MetadataChangeProposal}. + * The name of the metadata change event (v4) kafka topic. Note that MetadataChangeEvents are + * deprecated, replaced by {@link MetadataChangeProposal}. */ @Nonnull @Deprecated String getMetadataChangeEventTopicName(); /** - * The name of the metadata audit event (v4) kafka topic. - * Note that MetadataAuditEvents are deprecated, replaced by {@link MetadataChangeLog}. + * The name of the metadata audit event (v4) kafka topic. Note that MetadataAuditEvents are + * deprecated, replaced by {@link MetadataChangeLog}. */ @Nonnull @Deprecated String getMetadataAuditEventTopicName(); /** - * The name of the failed metadata change event (v4) kafka topic. - * Note that FailedMetadataChangeEvents are deprecated, replaced by {@link FailedMetadataChangeProposal}. + * The name of the failed metadata change event (v4) kafka topic. Note that + * FailedMetadataChangeEvents are deprecated, replaced by {@link FailedMetadataChangeProposal}. */ @Nonnull @Deprecated String getFailedMetadataChangeEventTopicName(); - /** - * The name of the metadata change proposal kafka topic. - */ + /** The name of the metadata change proposal kafka topic. */ @Nonnull String getMetadataChangeProposalTopicName(); - /** - * The name of the metadata change log kafka topic. - */ + /** The name of the metadata change log kafka topic. */ @Nonnull String getMetadataChangeLogVersionedTopicName(); - /** - * The name of the metadata change log kafka topic with limited retention. - */ + /** The name of the metadata change log kafka topic with limited retention. */ @Nonnull String getMetadataChangeLogTimeseriesTopicName(); - /** - * The name of the failed metadata change proposal kafka topic. - */ + /** The name of the failed metadata change proposal kafka topic. */ @Nonnull String getFailedMetadataChangeProposalTopicName(); - /** - * The name of the platform event topic. - */ + /** The name of the platform event topic. */ @Nonnull String getPlatformEventTopicName(); @@ -77,9 +67,7 @@ public interface TopicConvention { @Deprecated String getMetadataChangeEventTopicName(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); - /** - * The name of the DataHub Upgrade history topic. - */ + /** The name of the DataHub Upgrade history topic. */ String getDataHubUpgradeHistoryTopicName(); /** @@ -89,7 +77,8 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class<? extends SpecificRecord> getMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); + Class<? extends SpecificRecord> getMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); /** * Returns the name of the metadata audit event (v5) kafka topic. @@ -108,8 +97,8 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class<? extends SpecificRecord> getMetadataAuditEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); - + Class<? extends SpecificRecord> getMetadataAuditEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); /** * Returns the name of the failed metadata change event (v5) kafka topic. @@ -128,5 +117,6 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class<? extends SpecificRecord> getFailedMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); + Class<? extends SpecificRecord> getFailedMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); } diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java index 3143584bbdcaf..282a015319781 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java @@ -5,14 +5,14 @@ import javax.annotation.Nonnull; import org.apache.avro.specific.SpecificRecord; - /** * Default implementation of a {@link TopicConvention}, which is fully customizable for event names. * - * <p>The newer aspect-entity specific event names are based on a pattern that can also be configured. The pattern is a - * string, which can use {@link #EVENT_TYPE_PLACEHOLDER}, {@link #VERSION_PLACEHOLDER}, {@link #ENTITY_PLACEHOLDER}, and - * {@link #ASPECT_PLACEHOLDER} as placeholders for the event type (MCE, MAE, FMCE, etc), event version, entity name, - * and aspect name, respectively. + * <p>The newer aspect-entity specific event names are based on a pattern that can also be + * configured. The pattern is a string, which can use {@link #EVENT_TYPE_PLACEHOLDER}, {@link + * #VERSION_PLACEHOLDER}, {@link #ENTITY_PLACEHOLDER}, and {@link #ASPECT_PLACEHOLDER} as + * placeholders for the event type (MCE, MAE, FMCE, etc), event version, entity name, and aspect + * name, respectively. */ public final class TopicConventionImpl implements TopicConvention { // Placeholders @@ -45,11 +45,17 @@ public final class TopicConventionImpl implements TopicConvention { // v5 patterns private final String _eventPattern; - public TopicConventionImpl(@Nonnull String metadataChangeEventTopicName, @Nonnull String metadataAuditEventTopicName, - @Nonnull String failedMetadataChangeEventTopicName, @Nonnull String metadataChangeProposalTopicName, - @Nonnull String metadataChangeLogVersionedTopicName, @Nonnull String metadataChangeLogTimeseriesTopicName, - @Nonnull String failedMetadataChangeProposalTopicName, @Nonnull String platformEventTopicName, - @Nonnull String eventPattern, @Nonnull String dataHubUpgradeHistoryTopicName) { + public TopicConventionImpl( + @Nonnull String metadataChangeEventTopicName, + @Nonnull String metadataAuditEventTopicName, + @Nonnull String failedMetadataChangeEventTopicName, + @Nonnull String metadataChangeProposalTopicName, + @Nonnull String metadataChangeLogVersionedTopicName, + @Nonnull String metadataChangeLogTimeseriesTopicName, + @Nonnull String failedMetadataChangeProposalTopicName, + @Nonnull String platformEventTopicName, + @Nonnull String eventPattern, + @Nonnull String dataHubUpgradeHistoryTopicName) { _metadataChangeEventTopicName = metadataChangeEventTopicName; _metadataAuditEventTopicName = metadataAuditEventTopicName; _failedMetadataChangeEventTopicName = failedMetadataChangeEventTopicName; @@ -63,9 +69,17 @@ public TopicConventionImpl(@Nonnull String metadataChangeEventTopicName, @Nonnul } public TopicConventionImpl() { - this(Topics.METADATA_CHANGE_EVENT, Topics.METADATA_AUDIT_EVENT, Topics.FAILED_METADATA_CHANGE_EVENT, - Topics.METADATA_CHANGE_PROPOSAL, Topics.METADATA_CHANGE_LOG_VERSIONED, Topics.METADATA_CHANGE_LOG_TIMESERIES, - Topics.FAILED_METADATA_CHANGE_PROPOSAL, Topics.PLATFORM_EVENT, DEFAULT_EVENT_PATTERN, Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME); + this( + Topics.METADATA_CHANGE_EVENT, + Topics.METADATA_AUDIT_EVENT, + Topics.FAILED_METADATA_CHANGE_EVENT, + Topics.METADATA_CHANGE_PROPOSAL, + Topics.METADATA_CHANGE_LOG_VERSIONED, + Topics.METADATA_CHANGE_LOG_TIMESERIES, + Topics.FAILED_METADATA_CHANGE_PROPOSAL, + Topics.PLATFORM_EVENT, + DEFAULT_EVENT_PATTERN, + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME); } @Nonnull @@ -117,15 +131,20 @@ public String getPlatformEventTopicName() { } @Nonnull - private String buildEventName(@Nonnull String eventType, @Nonnull String entityName, @Nonnull String aspectName, + private String buildEventName( + @Nonnull String eventType, + @Nonnull String entityName, + @Nonnull String aspectName, int version) { - return _eventPattern.replace(EVENT_TYPE_PLACEHOLDER, eventType) + return _eventPattern + .replace(EVENT_TYPE_PLACEHOLDER, eventType) .replace(ENTITY_PLACEHOLDER, entityName) .replace(ASPECT_PLACEHOLDER, aspectName) .replace(VERSION_PLACEHOLDER, Integer.toString(version)); } - private String buildEventName(@Nonnull String eventType, @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + private String buildEventName( + @Nonnull String eventType, @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { final String urnName = urn.getClass().getSimpleName(); // Expect URN name to relate to the entity name. (EntityName) + "Urn" == (UrnName) final String entityType = urnName.substring(0, urnName.length() - "Urn".length()); @@ -147,7 +166,8 @@ public String getDataHubUpgradeHistoryTopicName() { } @Override - public Class<? extends SpecificRecord> getMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public Class<? extends SpecificRecord> getMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } @@ -159,20 +179,22 @@ public String getMetadataAuditEventTopicName(@Nonnull Urn urn, @Nonnull RecordTe } @Override - public Class<? extends SpecificRecord> getMetadataAuditEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public Class<? extends SpecificRecord> getMetadataAuditEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } @Nonnull @Override - public String getFailedMetadataChangeEventTopicName(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public String getFailedMetadataChangeEventTopicName( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { return buildEventName(FAILED_METADATA_CHANGE_EVENT_TYPE, urn, aspect); } @Override - public Class<? extends SpecificRecord> getFailedMetadataChangeEventType(@Nonnull Urn urn, - @Nonnull RecordTemplate aspect) { + public Class<? extends SpecificRecord> getFailedMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java index 3a9a0812e1031..45bc2364aaa42 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java @@ -19,10 +19,7 @@ public class Topics { public static final String DEV_METADATA_CHANGE_EVENT = "MetadataChangeEvent_v4_dev"; public static final String DEV_FAILED_METADATA_CHANGE_EVENT = "FailedMetadataChangeEvent_v4_dev"; - /** - * aspect-specific MAE topics. - * format : METADATA_AUDIT_EVENT_<URN>_<ASPECT> - */ + /** aspect-specific MAE topics. format : METADATA_AUDIT_EVENT_<URN>_<ASPECT> */ // MAE topics for CorpGroup entity. public static final String METADATA_AUDIT_EVENT_CORPGROUP_CORPGROUPINFO = "MetadataAuditEvent_CorpGroup_CorpGroupInfo_v1"; @@ -30,12 +27,10 @@ public class Topics { // MAE topics for CorpUser entity. public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSEREDITABLEINFO = "MetadataAuditEvent_CorpUser_CorpUserEditableInfo_v2"; - public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSERINFO = "MetadataAuditEvent_CorpUser_CorpUserInfo_v2"; + public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSERINFO = + "MetadataAuditEvent_CorpUser_CorpUserInfo_v2"; - /** - * aspect-specific MCE topics. - * format : METADATA_CHANGE_EVENT_<URN>_<ASPECT> - */ + /** aspect-specific MCE topics. format : METADATA_CHANGE_EVENT_<URN>_<ASPECT> */ // MCE topics for CorpGroup entity. public static final String METADATA_CHANGE_EVENT_CORPGROUP_CORPGROUPINFO = "MetadataChangeEvent_CorpGroup_CorpGroupInfo_v1"; @@ -46,10 +41,7 @@ public class Topics { public static final String METADATA_CHANGE_EVENT_CORPUSER_CORPUSERINFO = "MetadataChangeEvent_CorpUser_CorpUserInfo_v1"; - /** - * aspect-specific FMCE topics. - * format : FAILED_METADATA_CHANGE_EVENT_<URN>_<ASPECT> - */ + /** aspect-specific FMCE topics. format : FAILED_METADATA_CHANGE_EVENT_<URN>_<ASPECT> */ // FMCE topics for CorpGroup entity. public static final String FAILED_METADATA_CHANGE_EVENT_CORPGROUP_CORPGROUPINFO = "FailedMetadataChangeEvent_CorpGroup_CorpGroupInfo_v1"; @@ -63,4 +55,4 @@ public class Topics { private Topics() { // Util class } -} \ No newline at end of file +} diff --git a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java index 9c95d9f4aabdc..645c2fe210e09 100644 --- a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java +++ b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java @@ -8,10 +8,10 @@ import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.FailedMetadataChangeEvent; import com.linkedin.mxe.FailedMetadataChangeProposal; -import com.linkedin.mxe.MetadataChangeLog; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.MetadataAuditEvent; import com.linkedin.mxe.MetadataChangeEvent; +import com.linkedin.mxe.MetadataChangeLog; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -30,7 +30,6 @@ import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificRecord; - public class EventUtils { private static final RecordDataSchema MCE_PEGASUS_SCHEMA = new MetadataChangeEvent().schema(); @@ -43,7 +42,8 @@ public class EventUtils { private static final RecordDataSchema PE_PEGASUS_SCHEMA = new PlatformEvent().schema(); - private static final RecordDataSchema DUHE_PEGASUS_SCHEMA = new DataHubUpgradeHistoryEvent().schema(); + private static final RecordDataSchema DUHE_PEGASUS_SCHEMA = + new DataHubUpgradeHistoryEvent().schema(); private static final Schema ORIGINAL_MCE_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeEvent.avsc"); @@ -69,14 +69,17 @@ public class EventUtils { public static final Schema ORIGINAL_DUHE_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/DataHubUpgradeHistoryEvent.avsc"); - private static final Schema RENAMED_MCE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$; + private static final Schema RENAMED_MCE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$; - private static final Schema RENAMED_MAE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$; + private static final Schema RENAMED_MAE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$; private static final Schema RENAMED_FAILED_MCE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$; - private static final Schema RENAMED_PE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.PlatformEvent.SCHEMA$; + private static final Schema RENAMED_PE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.PlatformEvent.SCHEMA$; private static final Schema RENAMED_MCP_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataChangeProposal.SCHEMA$; @@ -107,79 +110,102 @@ private static Schema getAvroSchemaFromResource(@Nonnull String resourcePath) { /** * Converts a {@link GenericRecord} MAE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MAE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MAE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataAuditEvent} model */ @Nonnull - public static MetadataAuditEvent avroToPegasusMAE(@Nonnull GenericRecord record) throws IOException { - return new MetadataAuditEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MAE_AVRO_SCHEMA, ORIGINAL_MAE_AVRO_SCHEMA), MAE_PEGASUS_SCHEMA, - ORIGINAL_MAE_AVRO_SCHEMA)); + public static MetadataAuditEvent avroToPegasusMAE(@Nonnull GenericRecord record) + throws IOException { + return new MetadataAuditEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MAE_AVRO_SCHEMA, ORIGINAL_MAE_AVRO_SCHEMA), + MAE_PEGASUS_SCHEMA, + ORIGINAL_MAE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeEvent} model */ @Nonnull - public static MetadataChangeEvent avroToPegasusMCE(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCE_AVRO_SCHEMA, ORIGINAL_MCE_AVRO_SCHEMA), MCE_PEGASUS_SCHEMA, - ORIGINAL_MCE_AVRO_SCHEMA)); + public static MetadataChangeEvent avroToPegasusMCE(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCE_AVRO_SCHEMA, ORIGINAL_MCE_AVRO_SCHEMA), + MCE_PEGASUS_SCHEMA, + ORIGINAL_MCE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCL into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCL in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCL in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeLog} model */ @Nonnull - public static MetadataChangeLog avroToPegasusMCL(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeLog(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCL_AVRO_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA), - MCL_PEGASUS_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA)); + public static MetadataChangeLog avroToPegasusMCL(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeLog( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCL_AVRO_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA), + MCL_PEGASUS_SCHEMA, + ORIGINAL_MCL_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCP into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCP in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCP in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeProposal} model */ @Nonnull - public static MetadataChangeProposal avroToPegasusMCP(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeProposal(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCP_AVRO_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA), - MCP_PEGASUS_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA)); + public static MetadataChangeProposal avroToPegasusMCP(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeProposal( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCP_AVRO_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA), + MCP_PEGASUS_SCHEMA, + ORIGINAL_MCP_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} PE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link PlatformEvent} model */ @Nonnull public static PlatformEvent avroToPegasusPE(@Nonnull GenericRecord record) throws IOException { - return new PlatformEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_PE_AVRO_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA), - PE_PEGASUS_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA)); + return new PlatformEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_PE_AVRO_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA), + PE_PEGASUS_SCHEMA, + ORIGINAL_PE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} PE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link PlatformEvent} model */ @Nonnull - public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecord record) throws IOException { - return new DataHubUpgradeHistoryEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_DUHE_AVRO_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA), - DUHE_PEGASUS_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA)); + public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecord record) + throws IOException { + return new DataHubUpgradeHistoryEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_DUHE_AVRO_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA), + DUHE_PEGASUS_SCHEMA, + ORIGINAL_DUHE_AVRO_SCHEMA)); } /** @@ -190,9 +216,11 @@ public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecor * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) throws IOException { + public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MAE_AVRO_SCHEMA); } @@ -204,9 +232,11 @@ public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) throws IOException { + public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCL_AVRO_SCHEMA); } @@ -218,9 +248,11 @@ public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) t * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) throws IOException { + public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCP_AVRO_SCHEMA); } @@ -232,26 +264,30 @@ public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal eve * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) throws IOException { + public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCE_AVRO_SCHEMA); } /** - * Converts a Pegasus aspect specific MXE into the equivalent Avro model as a {@link GenericRecord}. + * Converts a Pegasus aspect specific MXE into the equivalent Avro model as a {@link + * GenericRecord}. * * @param event the Pegasus aspect specific MXE model * @return the Avro model with com.linkedin.pegasus2avro.mxe namespace * @throws IOException if the conversion fails */ @Nonnull - public static <MXE extends GenericRecord, T extends SpecificRecord> MXE pegasusToAvroAspectSpecificMXE( - @Nonnull Class<T> clazz, @Nonnull RecordTemplate event) - throws NoSuchFieldException, IOException, IllegalAccessException { + public static <MXE extends GenericRecord, T extends SpecificRecord> + MXE pegasusToAvroAspectSpecificMXE(@Nonnull Class<T> clazz, @Nonnull RecordTemplate event) + throws NoSuchFieldException, IOException, IllegalAccessException { final Schema newSchema = (Schema) clazz.getField("SCHEMA$").get(null); final Schema originalSchema = getAvroSchemaFromResource(getAvroResourcePath(clazz)); - final GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), originalSchema); + final GenericRecord original = + DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), originalSchema); return (MXE) renameSchemaNamespace(original, originalSchema, newSchema); } @@ -263,10 +299,12 @@ public static <MXE extends GenericRecord, T extends SpecificRecord> MXE pegasusT * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChangeEvent failedMetadataChangeEvent) - throws IOException { + public static GenericRecord pegasusToAvroFailedMCE( + @Nonnull FailedMetadataChangeEvent failedMetadataChangeEvent) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(failedMetadataChangeEvent.data(), failedMetadataChangeEvent.schema(), + DataTranslator.dataMapToGenericRecord( + failedMetadataChangeEvent.data(), + failedMetadataChangeEvent.schema(), ORIGINAL_FAILED_MCE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_FAILED_MCE_AVRO_SCHEMA); } @@ -282,7 +320,9 @@ public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChange public static GenericRecord pegasusToAvroFailedMCP( @Nonnull FailedMetadataChangeProposal failedMetadataChangeProposal) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(failedMetadataChangeProposal.data(), failedMetadataChangeProposal.schema(), + DataTranslator.dataMapToGenericRecord( + failedMetadataChangeProposal.data(), + failedMetadataChangeProposal.schema(), ORIGINAL_FMCL_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_FMCP_AVRO_SCHEMA); } @@ -297,33 +337,37 @@ public static GenericRecord pegasusToAvroFailedMCP( @Nonnull public static GenericRecord pegasusToAvroPE(@Nonnull PlatformEvent event) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_PE_AVRO_SCHEMA); } /** - * Converts a Pegasus DataHub Upgrade History Event into the equivalent Avro model as a {@link GenericRecord}. + * Converts a Pegasus DataHub Upgrade History Event into the equivalent Avro model as a {@link + * GenericRecord}. * * @param event the Pegasus {@link com.linkedin.mxe.DataHubUpgradeHistoryEvent} model * @return the Avro model with com.linkedin.pegasus2avro.event namespace * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroDUHE(@Nonnull DataHubUpgradeHistoryEvent event) throws IOException { + public static GenericRecord pegasusToAvroDUHE(@Nonnull DataHubUpgradeHistoryEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_DUHE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_DUHE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_DUHE_AVRO_SCHEMA); } /** - * Converts original MXE into a renamed namespace - * Does a double convert that should not be necessary since we're already converting prior to calling this method - * in most spots + * Converts original MXE into a renamed namespace Does a double convert that should not be + * necessary since we're already converting prior to calling this method in most spots */ @Nonnull @Deprecated - private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema originalSchema, - @Nonnull Schema newSchema) throws IOException { + private static GenericRecord renameSchemaNamespace( + @Nonnull GenericRecord original, @Nonnull Schema originalSchema, @Nonnull Schema newSchema) + throws IOException { // Step 1: Updates to the latest original schema final GenericRecord record = changeSchema(original, original.getSchema(), originalSchema); @@ -332,12 +376,10 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin return changeSchema(record, newSchema, newSchema); } - /** - * Converts original MXE into a renamed namespace - */ + /** Converts original MXE into a renamed namespace */ @Nonnull - private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema newSchema) - throws IOException { + private static GenericRecord renameSchemaNamespace( + @Nonnull GenericRecord original, @Nonnull Schema newSchema) throws IOException { return changeSchema(original, newSchema, newSchema); } @@ -345,7 +387,8 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin /** * Changes the schema of a {@link GenericRecord} to a compatible schema * - * Achieved by serializing the record using its embedded schema and deserializing it using the new compatible schema. + * <p>Achieved by serializing the record using its embedded schema and deserializing it using the + * new compatible schema. * * @param record the record to update schema for * @param writerSchema the writer schema to use when deserializing @@ -354,8 +397,9 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin * @throws IOException */ @Nonnull - private static GenericRecord changeSchema(@Nonnull GenericRecord record, @Nonnull Schema writerSchema, - @Nonnull Schema readerSchema) throws IOException { + private static GenericRecord changeSchema( + @Nonnull GenericRecord record, @Nonnull Schema writerSchema, @Nonnull Schema readerSchema) + throws IOException { try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(os, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); @@ -374,6 +418,7 @@ private static GenericRecord changeSchema(@Nonnull GenericRecord record, @Nonnul /** * Get Pegasus class from Avro class. + * * @param clazz the aspect specific MXE avro class * @return the Pegasus aspect specific MXE class * @throws Exception @@ -383,6 +428,7 @@ public static Class<?> getPegasusClass(@Nonnull Class<?> clazz) throws ClassNotF } private static String getAvroResourcePath(@Nonnull Class<?> clazz) { - return String.format("avro/%s.avsc", clazz.getCanonicalName().replace(".pegasus2avro", "").replace(".", "/")); + return String.format( + "avro/%s.avsc", clazz.getCanonicalName().replace(".pegasus2avro", "").replace(".", "/")); } } diff --git a/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java b/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java index 66759d4637c18..1318109d476d7 100644 --- a/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java +++ b/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java @@ -1,5 +1,8 @@ package com.linkedin.metadata; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.util.RecordUtils; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.data.template.RecordTemplate; @@ -16,39 +19,53 @@ import org.apache.avro.io.JsonDecoder; import org.testng.annotations.Test; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class EventUtilsTests { @Test public void testAvroToPegasusMAE() throws IOException { - GenericRecord record = genericRecordFromResource("test-avro2pegasus-mae.json", - com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$); + GenericRecord record = + genericRecordFromResource( + "test-avro2pegasus-mae.json", com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$); MetadataAuditEvent mae = EventUtils.avroToPegasusMAE(record); assertEquals( - mae.getNewSnapshot().getDatasetSnapshot().getAspects().get(0).getOwnership().getOwners().get(0).getOwner(), + mae.getNewSnapshot() + .getDatasetSnapshot() + .getAspects() + .get(0) + .getOwnership() + .getOwners() + .get(0) + .getOwner(), new CorpuserUrn("foobar")); } @Test public void testAvroToPegasusMCE() throws IOException { - GenericRecord record = genericRecordFromResource("test-avro2pegasus-mce.json", - com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$); + GenericRecord record = + genericRecordFromResource( + "test-avro2pegasus-mce.json", + com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$); MetadataChangeEvent mce = EventUtils.avroToPegasusMCE(record); assertEquals( - mce.getProposedSnapshot().getDatasetSnapshot().getAspects().get(0).getOwnership().getOwners().get(0).getOwner(), + mce.getProposedSnapshot() + .getDatasetSnapshot() + .getAspects() + .get(0) + .getOwnership() + .getOwners() + .get(0) + .getOwner(), new CorpuserUrn("foobar")); } @Test public void testPegasusToAvroMAE() throws IOException { - MetadataAuditEvent event = recordTemplateFromResource("test-pegasus2avro-mae.json", MetadataAuditEvent.class); + MetadataAuditEvent event = + recordTemplateFromResource("test-pegasus2avro-mae.json", MetadataAuditEvent.class); GenericRecord record = EventUtils.pegasusToAvroMAE(event); @@ -58,7 +75,8 @@ public void testPegasusToAvroMAE() throws IOException { @Test public void testPegasusToAvroMCE() throws IOException { - MetadataChangeEvent event = recordTemplateFromResource("test-pegasus2avro-mce.json", MetadataChangeEvent.class); + MetadataChangeEvent event = + recordTemplateFromResource("test-pegasus2avro-mce.json", MetadataChangeEvent.class); GenericRecord record = EventUtils.pegasusToAvroMCE(event); @@ -68,24 +86,27 @@ public void testPegasusToAvroMCE() throws IOException { @Test public void testPegasusToAvroFailedMCE() throws IOException { - FailedMetadataChangeEvent event = recordTemplateFromResource("test-pegasus2avro-fmce.json", FailedMetadataChangeEvent.class); + FailedMetadataChangeEvent event = + recordTemplateFromResource("test-pegasus2avro-fmce.json", FailedMetadataChangeEvent.class); GenericRecord record = EventUtils.pegasusToAvroFailedMCE(event); - assertEquals(record.getSchema(), com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$); + assertEquals( + record.getSchema(), com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$); assertNotNull(record.get("error")); assertNotNull(record.get("metadataChangeEvent")); } - private GenericRecord genericRecordFromResource(String resourcePath, Schema schema) throws IOException { + private GenericRecord genericRecordFromResource(String resourcePath, Schema schema) + throws IOException { InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath); JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, is); DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema); return reader.read(null, decoder); } - private <T extends RecordTemplate> T recordTemplateFromResource(String resourcePath, - Class<? extends RecordTemplate> clazz) throws IOException { + private <T extends RecordTemplate> T recordTemplateFromResource( + String resourcePath, Class<? extends RecordTemplate> clazz) throws IOException { String json = loadJsonFromResource(resourcePath); return (T) RecordUtils.toRecordTemplate(clazz, json); } diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 0bf6b18fa5073..7ae01faaaabdd 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -239,8 +239,6 @@ processResources.dependsOn generateOpenApiPojos sourceSets.main.java.srcDir "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" -checkstyleMain.exclude '**/generated/**' - clean { project.delete("$projectDir/generated") } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java index 84fe9cef0817c..a899f27a0cb2c 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java @@ -2,21 +2,20 @@ import javax.annotation.Nullable; - public interface Callback { /** - * Called when the client request has completed. - * Completion does not imply success. Inspect the response object to understand if - * this was a successfully processed request or not. + * Called when the client request has completed. Completion does not imply success. Inspect the + * response object to understand if this was a successfully processed request or not. + * * @param response */ void onCompletion(@Nullable MetadataWriteResponse response); /** * Called when the client request has thrown an exception before completion. + * * @param exception */ void onFailure(Throwable exception); - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java index 25bcba5f7d4c6..97c4558933b69 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java @@ -11,61 +11,74 @@ import javax.annotation.Nonnull; import javax.annotation.concurrent.ThreadSafe; - /** - * An interface implemented by all metadata emitters to DataHub. - * Typical usage: - * 1. Construct the emitter using the native constructor or builder for the Emitter. - * 2. Call `emitter.emit(mcpw, callback)` for each event you want to send - * 3. Wait for all events to be sent by inspecting the futures returned by each call or using callbacks - * 4. Call `emitter.close()` to finalize. + * An interface implemented by all metadata emitters to DataHub. Typical usage: 1. Construct the + * emitter using the native constructor or builder for the Emitter. 2. Call `emitter.emit(mcpw, + * callback)` for each event you want to send 3. Wait for all events to be sent by inspecting the + * futures returned by each call or using callbacks 4. Call `emitter.close()` to finalize. */ @ThreadSafe public interface Emitter extends Closeable { /** * Asynchronously emit a {@link MetadataChangeProposalWrapper} event. + * * @param mcpw * @param callback if not null, is called from the IO thread. Should be a quick operation. - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw, Callback callback) throws IOException; + Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException; /** * Asynchronously emit a {@link MetadataChangeProposalWrapper} event. + * * @param mcpw - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw) throws IOException { + default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposalWrapper mcpw) + throws IOException { return emit(mcpw, null); } /** * Asynchronously emit a {@link MetadataChangeProposal} event. Prefer using the sibling method - * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to construct. + * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to + * construct. + * * @param mcp * @param callback if not null, is called from the IO thread. Should be a quick operation. - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp, Callback callback) throws IOException; + Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp, Callback callback) + throws IOException; /** * Asynchronously emit a {@link MetadataChangeProposal} event. Prefer using the sibling method - * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to construct. + * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to + * construct. + * * @param mcp - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp) throws IOException { + default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp) + throws IOException { return emit(mcp, null); } /** * Test that the emitter can establish a valid connection to the DataHub platform - * @return true if a valid connection can be established, false or throws one of the exceptions otherwise + * + * @return true if a valid connection can be established, false or throws one of the exceptions + * otherwise * @throws IOException * @throws ExecutionException * @throws InterruptedException @@ -74,11 +87,12 @@ default Future<MetadataWriteResponse> emit(@Nonnull MetadataChangeProposal mcp) /** * Asynchronously emit a {@link UpsertAspectRequest}. + * * @param request request with with metadata aspect to upsert into DataHub - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) throws IOException; - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java index 51126a1cdcbea..89db9738efda6 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java @@ -9,15 +9,16 @@ import lombok.SneakyThrows; import org.apache.http.HttpResponse; - public class MetadataResponseFuture implements Future<MetadataWriteResponse> { private final Future<HttpResponse> requestFuture; private final AtomicReference<MetadataWriteResponse> responseReference; private final CountDownLatch responseLatch; private final ResponseMapper mapper; - public MetadataResponseFuture(Future<HttpResponse> underlyingFuture, - AtomicReference<MetadataWriteResponse> responseAtomicReference, CountDownLatch responseLatch) { + public MetadataResponseFuture( + Future<HttpResponse> underlyingFuture, + AtomicReference<MetadataWriteResponse> responseAtomicReference, + CountDownLatch responseLatch) { this.requestFuture = underlyingFuture; this.responseReference = responseAtomicReference; this.responseLatch = responseLatch; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java index 969ef10c41a24..b6e77556980c1 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java @@ -3,27 +3,19 @@ import lombok.Builder; import lombok.Value; - @Value @Builder public class MetadataWriteResponse { - /** - * True if the client send succeeded and we got a successful response from the server - */ - @Builder.Default - boolean success = true; + /** True if the client send succeeded and we got a successful response from the server */ + @Builder.Default boolean success = true; /** - * If the write failed due to an exception thrown by the server - * and we have access to it, then we store the stack trace here + * If the write failed due to an exception thrown by the server and we have access to it, then we + * store the stack trace here */ String responseContent; - /** - * The underlying response object - * (typically an HTTPResponse or a kafka.ResponseMetadata) - */ + /** The underlying response object (typically an HTTPResponse or a kafka.ResponseMetadata) */ Object underlyingResponse; - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java index aae0e51b6736e..ab866f060b354 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java @@ -1,34 +1,31 @@ package datahub.client.file; -import com.fasterxml.jackson.core.StreamReadConstraints; -import java.io.BufferedWriter; -import java.io.FileWriter; -import java.io.IOException; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; +import static com.linkedin.metadata.Constants.*; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.util.DefaultIndenter; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.data.template.JacksonDataTemplateCodec; import com.linkedin.mxe.MetadataChangeProposal; - import datahub.client.Callback; import datahub.client.Emitter; import datahub.client.MetadataWriteResponse; import datahub.event.EventFormatter; import datahub.event.MetadataChangeProposalWrapper; import datahub.event.UpsertAspectRequest; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class FileEmitter implements Emitter { @@ -45,22 +42,27 @@ public class FileEmitter implements Emitter { /** * The default constructor - * + * * @param config */ public FileEmitter(FileEmitterConfig config) { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); this.config = config; this.eventFormatter = this.config.getEventFormatter(); - DefaultPrettyPrinter pp = new DefaultPrettyPrinter() - .withObjectIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)) - .withArrayIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)); + DefaultPrettyPrinter pp = + new DefaultPrettyPrinter() + .withObjectIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)) + .withArrayIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)); this.dataTemplateCodec.setPrettyPrinter(pp); try { @@ -75,33 +77,37 @@ public FileEmitter(FileEmitterConfig config) { this.wroteSomething = false; log.debug("Emitter created successfully for " + this.config.getFileName()); - this.cachedSuccessFuture = new Future<MetadataWriteResponse>() { - @Override - public boolean cancel(boolean mayInterruptIfRunning) { - return false; - } - - @Override - public MetadataWriteResponse get() throws InterruptedException, ExecutionException { - return MetadataWriteResponse.builder().success(true).responseContent("MCP witten to File").build(); - } - - @Override - public MetadataWriteResponse get(long timeout, TimeUnit unit) - throws InterruptedException, ExecutionException, TimeoutException { - return this.get(); - } - - @Override - public boolean isCancelled() { - return false; - } - - @Override - public boolean isDone() { - return true; - } - }; + this.cachedSuccessFuture = + new Future<MetadataWriteResponse>() { + @Override + public boolean cancel(boolean mayInterruptIfRunning) { + return false; + } + + @Override + public MetadataWriteResponse get() throws InterruptedException, ExecutionException { + return MetadataWriteResponse.builder() + .success(true) + .responseContent("MCP witten to File") + .build(); + } + + @Override + public MetadataWriteResponse get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { + return this.get(); + } + + @Override + public boolean isCancelled() { + return false; + } + + @Override + public boolean isDone() { + return true; + } + }; } @Override @@ -114,13 +120,15 @@ public void close() throws IOException { } @Override - public Future<MetadataWriteResponse> emit(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, - Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit( + @SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException { return emit(this.eventFormatter.convert(mcpw), callback); } @Override - public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback callback) + throws IOException { if (this.closed.get()) { String errorMsg = "File Emitter is already closed."; log.error(errorMsg); @@ -167,7 +175,8 @@ public boolean testConnection() throws IOException, ExecutionException, Interrup } @Override - public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) + throws IOException { throw new UnsupportedOperationException("UpsertAspectRequest not relevant for File Emitter"); } @@ -185,8 +194,8 @@ public MetadataWriteResponse get() throws InterruptedException, ExecutionExcepti } @Override - public MetadataWriteResponse get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, - TimeoutException { + public MetadataWriteResponse get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { return this.get(); } @@ -199,8 +208,6 @@ public boolean isCancelled() { public boolean isDone() { return true; } - }; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java index c89edef81ef5e..61ee12d88824d 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java @@ -7,10 +7,9 @@ @Value @Builder public class FileEmitterConfig { + @Builder.Default @lombok.NonNull private final String fileName = null; + @Builder.Default - @lombok.NonNull - private final String fileName = null; - @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); - + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java index 6212e57470be4..0d0341562e7dd 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java @@ -1,16 +1,13 @@ package datahub.client.kafka; -import java.io.IOException; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericRecord; - import com.google.common.annotations.VisibleForTesting; import com.linkedin.mxe.MetadataChangeProposal; - import datahub.event.EventFormatter; import datahub.event.MetadataChangeProposalWrapper; +import java.io.IOException; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; class AvroSerializer { @@ -20,8 +17,12 @@ class AvroSerializer { private final EventFormatter _eventFormatter; public AvroSerializer() throws IOException { - _recordSchema = new Schema.Parser() - .parse(this.getClass().getClassLoader().getResourceAsStream("MetadataChangeProposal.avsc")); + _recordSchema = + new Schema.Parser() + .parse( + this.getClass() + .getClassLoader() + .getResourceAsStream("MetadataChangeProposal.avsc")); _genericAspectSchema = this._recordSchema.getField("aspect").schema().getTypes().get(1); _changeTypeEnumSchema = this._recordSchema.getField("changeType").schema(); _eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); @@ -32,7 +33,8 @@ Schema getRecordSchema() { return _recordSchema; } - public GenericRecord serialize(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw) throws IOException { + public GenericRecord serialize(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw) + throws IOException { return serialize(_eventFormatter.convert(mcpw)); } @@ -45,7 +47,8 @@ public GenericRecord serialize(MetadataChangeProposal mcp) throws IOException { genericRecord.put("aspect", genericAspect); genericRecord.put("aspectName", mcp.getAspectName()); genericRecord.put("entityType", mcp.getEntityType()); - genericRecord.put("changeType", new GenericData.EnumSymbol(_changeTypeEnumSchema, mcp.getChangeType())); + genericRecord.put( + "changeType", new GenericData.EnumSymbol(_changeTypeEnumSchema, mcp.getChangeType())); return genericRecord; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java index 45528f79fad19..ba310de14813e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java @@ -1,5 +1,11 @@ package datahub.client.kafka; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.Emitter; +import datahub.client.MetadataWriteResponse; +import datahub.event.MetadataChangeProposalWrapper; +import datahub.event.UpsertAspectRequest; import java.io.IOException; import java.util.List; import java.util.Properties; @@ -7,7 +13,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - +import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.ListTopicsOptions; @@ -16,15 +22,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.Emitter; -import datahub.client.MetadataWriteResponse; -import datahub.event.MetadataChangeProposalWrapper; -import datahub.event.UpsertAspectRequest; -import lombok.extern.slf4j.Slf4j; - @Slf4j public class KafkaEmitter implements Emitter { @@ -45,9 +42,11 @@ public KafkaEmitter(KafkaEmitterConfig config) throws IOException { this.config = config; kafkaConfigProperties = new Properties(); kafkaConfigProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.config.getBootstrap()); - kafkaConfigProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + kafkaConfigProperties.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.class); - kafkaConfigProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + kafkaConfigProperties.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); kafkaConfigProperties.put("schema.registry.url", this.config.getSchemaRegistryUrl()); kafkaConfigProperties.putAll(config.getSchemaRegistryConfig()); @@ -59,28 +58,31 @@ public KafkaEmitter(KafkaEmitterConfig config) throws IOException { @Override public void close() throws IOException { producer.close(); - } @Override - public Future<MetadataWriteResponse> emit(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, - Callback datahubCallback) throws IOException { + public Future<MetadataWriteResponse> emit( + @SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, Callback datahubCallback) + throws IOException { return emit(this.config.getEventFormatter().convert(mcpw), datahubCallback); } @Override - public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback datahubCallback) throws IOException { + public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback datahubCallback) + throws IOException { GenericRecord genricRecord = _avroSerializer.serialize(mcp); - ProducerRecord<Object, Object> record = new ProducerRecord<>(KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, - mcp.getEntityUrn().toString(), genricRecord); - org.apache.kafka.clients.producer.Callback callback = new org.apache.kafka.clients.producer.Callback() { - - @Override - public void onCompletion(RecordMetadata metadata, Exception exception) { - MetadataWriteResponse response = mapResponse(metadata, exception); - datahubCallback.onCompletion(response); - } - }; + ProducerRecord<Object, Object> record = + new ProducerRecord<>( + KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, mcp.getEntityUrn().toString(), genricRecord); + org.apache.kafka.clients.producer.Callback callback = + new org.apache.kafka.clients.producer.Callback() { + + @Override + public void onCompletion(RecordMetadata metadata, Exception exception) { + MetadataWriteResponse response = mapResponse(metadata, exception); + datahubCallback.onCompletion(response); + } + }; log.debug("Emit: topic: {} \n record: {}", KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, record); Future<RecordMetadata> future = this.producer.send(record, callback); return mapFuture(future); @@ -117,14 +119,17 @@ public boolean isDone() { return future.isDone(); } }; - } @Override public boolean testConnection() throws IOException, ExecutionException, InterruptedException { try (AdminClient client = AdminClient.create(this.kafkaConfigProperties)) { - log.info("Available topics:" - + client.listTopics(new ListTopicsOptions().timeoutMs(ADMIN_CLIENT_TIMEOUT_MS)).listings().get()); + log.info( + "Available topics:" + + client + .listTopics(new ListTopicsOptions().timeoutMs(ADMIN_CLIENT_TIMEOUT_MS)) + .listings() + .get()); } catch (ExecutionException ex) { log.error("Kafka is not available, timed out after {} ms", ADMIN_CLIENT_TIMEOUT_MS); return false; @@ -133,7 +138,8 @@ public boolean testConnection() throws IOException, ExecutionException, Interrup } @Override - public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) throws IOException { + public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Callback callback) + throws IOException { throw new UnsupportedOperationException("UpsertAspectRequest cannot be sent over Kafka"); } @@ -156,5 +162,4 @@ private static MetadataWriteResponse mapResponse(RecordMetadata metadata, Except public Properties getKafkaConfgiProperties() { return kafkaConfigProperties; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java index 9452dd5686ac7..c0a5df3bddf37 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java @@ -1,12 +1,11 @@ package datahub.client.kafka; +import datahub.event.EventFormatter; import java.io.InputStream; import java.util.Collections; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; - -import datahub.event.EventFormatter; import lombok.Builder; import lombok.Value; import lombok.extern.slf4j.Slf4j; @@ -18,24 +17,22 @@ public class KafkaEmitterConfig { public static final String CLIENT_VERSION_PROPERTY = "clientVersion"; + @Builder.Default private final String bootstrap = "localhost:9092"; + @Builder.Default private final String schemaRegistryUrl = "http://localhost:8081"; + + @Builder.Default private final Map<String, String> schemaRegistryConfig = Collections.emptyMap(); + @Builder.Default private final Map<String, String> producerConfig = Collections.emptyMap(); + @Builder.Default - private final String bootstrap = "localhost:9092"; - @Builder.Default - private final String schemaRegistryUrl = "http://localhost:8081"; - - @Builder.Default - private final Map<String, String> schemaRegistryConfig = Collections.emptyMap(); - @Builder.Default - private final Map<String, String> producerConfig = Collections.emptyMap(); - - @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); - + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); + public static class KafkaEmitterConfigBuilder { @SuppressWarnings("unused") private String getVersion() { - try (InputStream foo = this.getClass().getClassLoader().getResourceAsStream("client.properties")) { + try (InputStream foo = + this.getClass().getClassLoader().getResourceAsStream("client.properties")) { Properties properties = new Properties(); properties.load(foo); return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); @@ -49,7 +46,5 @@ public KafkaEmitterConfigBuilder with(Consumer<KafkaEmitterConfigBuilder> builde builderFunction.accept(this); return this; } - } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java index bf40addef6505..943aaefec469b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java @@ -1,5 +1,7 @@ package datahub.client.patch; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.linkedin.common.urn.Urn; @@ -13,9 +15,6 @@ import org.apache.commons.lang3.tuple.ImmutableTriple; import org.apache.http.entity.ContentType; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - public abstract class AbstractMultiFieldPatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> { public static final String OP_KEY = "op"; @@ -27,6 +26,7 @@ public abstract class AbstractMultiFieldPatchBuilder<T extends AbstractMultiFiel /** * Builder method + * * @return a {@link MetadataChangeProposal} constructed from the builder's properties */ public MetadataChangeProposal build() { @@ -41,6 +41,7 @@ public MetadataChangeProposal build() { /** * Sets the target entity urn to be updated by this patch + * * @param urn The target entity whose aspect is to be patched by this update * @return this PatchBuilder subtype's instance */ @@ -52,18 +53,21 @@ public T urn(Urn urn) { /** * The aspect name associated with this builder + * * @return aspect name */ protected abstract String getAspectName(); /** * Returns the String representation of the Entity type associated with this aspect + * * @return entity type name */ protected abstract String getEntityType(); /** * Overrides basic behavior to construct multiple patches based on properties + * * @return a JsonPatch wrapped by GenericAspect */ protected GenericAspect buildPatch() { @@ -73,9 +77,14 @@ protected GenericAspect buildPatch() { ArrayNode patches = instance.arrayNode(); List<ImmutableTriple<String, String, JsonNode>> triples = getPathValues(); - triples.forEach(triple -> patches.add(instance.objectNode().put(OP_KEY, triple.left) - .put(PATH_KEY, triple.middle) - .set(VALUE_KEY, triple.right))); + triples.forEach( + triple -> + patches.add( + instance + .objectNode() + .put(OP_KEY, triple.left) + .put(PATH_KEY, triple.middle) + .set(VALUE_KEY, triple.right))); GenericAspect genericAspect = new GenericAspect(); genericAspect.setContentType(ContentType.APPLICATION_JSON.getMimeType()); @@ -85,7 +94,9 @@ protected GenericAspect buildPatch() { } /** - * Constructs a list of Op, Path, Value triples to create as patches. Not idempotent and should not be called more than once + * Constructs a list of Op, Path, Value triples to create as patches. Not idempotent and should + * not be called more than once + * * @return list of patch precursor triples */ protected List<ImmutableTriple<String, String, JsonNode>> getPathValues() { diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java index 8e8b5e324586f..ac93fd24fee02 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java @@ -2,16 +2,13 @@ import lombok.Getter; - public enum PatchOperationType { ADD("add"), REMOVE("remove"); - @Getter - private final String value; + @Getter private final String value; PatchOperationType(String value) { this.value = value; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java index 34618ddba7c5e..e621aaf57ff97 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java @@ -1,5 +1,7 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -10,10 +12,8 @@ import java.util.Map; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - -public class CustomPropertiesPatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> implements IntermediatePatchBuilder<T> { +public class CustomPropertiesPatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> + implements IntermediatePatchBuilder<T> { public static final String CUSTOM_PROPERTIES_BASE_PATH = "/customProperties"; @@ -26,35 +26,46 @@ public CustomPropertiesPatchBuilder(T parentBuilder) { /** * Add a property to a custom properties field + * * @param key * @param value * @return */ public CustomPropertiesPatchBuilder<T> addProperty(String key, String value) { - operations.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, - instance.textNode(value))); + operations.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + CUSTOM_PROPERTIES_BASE_PATH + "/" + key, + instance.textNode(value))); return this; } /** - * Remove a property from a custom properties field. If the property doesn't exist, this is a no-op. + * Remove a property from a custom properties field. If the property doesn't exist, this is a + * no-op. + * * @param key * @return */ public CustomPropertiesPatchBuilder<T> removeProperty(String key) { - operations.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, null)); + operations.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, null)); return this; } /** * Fully replace the properties of the target aspect + * * @param properties * @return */ public CustomPropertiesPatchBuilder<T> setProperties(Map<String, String> properties) { ObjectNode propertiesNode = instance.objectNode(); properties.forEach((key, value) -> propertiesNode.set(key, instance.textNode(value))); - operations.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH, propertiesNode)); + operations.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH, propertiesNode)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java index 6cebee0ac1265..84db0ba307cf2 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TagUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -8,10 +11,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class GlobalTagsPatchBuilder extends AbstractMultiFieldPatchBuilder<GlobalTagsPatchBuilder> { private static final String BASE_PATH = "/tags/"; @@ -20,6 +19,7 @@ public class GlobalTagsPatchBuilder extends AbstractMultiFieldPatchBuilder<Globa /** * Adds a tag with an optional context string + * * @param urn required * @param context optional * @return @@ -49,7 +49,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java index 9f937503384fc..6f31025406b1b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.GlossaryTermUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -8,11 +11,8 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class GlossaryTermsPatchBuilder extends AbstractMultiFieldPatchBuilder<GlossaryTermsPatchBuilder> { +public class GlossaryTermsPatchBuilder + extends AbstractMultiFieldPatchBuilder<GlossaryTermsPatchBuilder> { private static final String BASE_PATH = "/glossaryTerms/"; private static final String URN_KEY = "urn"; @@ -20,6 +20,7 @@ public class GlossaryTermsPatchBuilder extends AbstractMultiFieldPatchBuilder<Gl /** * Adds a term with an optional context string + * * @param urn required * @param context optional * @return @@ -49,7 +50,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java index 33fc8b68d9c26..20e0c930a8c95 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; @@ -8,10 +11,6 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipPatchBuilder extends AbstractMultiFieldPatchBuilder<OwnershipPatchBuilder> { private static final String BASE_PATH = "/owners/"; @@ -23,33 +22,39 @@ public OwnershipPatchBuilder addOwner(@Nonnull Urn owner, @Nonnull OwnershipType value.put(OWNER_KEY, owner.toString()); value.put(TYPE_KEY, type.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + owner + "/" + type, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + owner + "/" + type, value)); return this; } /** * Remove all ownership types for an owner + * * @param owner * @return */ public OwnershipPatchBuilder removeOwner(@Nonnull Urn owner) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + owner, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + owner, null)); return this; } /** - * Removes a specific ownership type for a particular owner, a single owner may have multiple ownership types + * Removes a specific ownership type for a particular owner, a single owner may have multiple + * ownership types + * * @param owner * @param type * @return */ - public OwnershipPatchBuilder removeOwnershipType(@Nonnull Urn owner, @Nonnull OwnershipType type) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + owner + "/" + type, null)); + public OwnershipPatchBuilder removeOwnershipType( + @Nonnull Urn owner, @Nonnull OwnershipType type) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + owner + "/" + type, null)); return this; } @@ -61,7 +66,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java index 3161eb492dff5..9e55ab4fc6db4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java @@ -1,23 +1,23 @@ package datahub.client.patch.dataflow; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; import datahub.client.patch.common.CustomPropertiesPatchBuilder; +import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<DataFlowInfoPatchBuilder> +public class DataFlowInfoPatchBuilder + extends AbstractMultiFieldPatchBuilder<DataFlowInfoPatchBuilder> implements CustomPropertiesPatchBuilderSupport<DataFlowInfoPatchBuilder> { public static final String BASE_PATH = "/"; @@ -30,28 +30,41 @@ public class DataFlowInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<Dat public static final String TIME_KEY = "time"; public static final String ACTOR_KEY = "actor"; - private CustomPropertiesPatchBuilder<DataFlowInfoPatchBuilder> customPropertiesPatchBuilder = new CustomPropertiesPatchBuilder<>(this); + private CustomPropertiesPatchBuilder<DataFlowInfoPatchBuilder> customPropertiesPatchBuilder = + new CustomPropertiesPatchBuilder<>(this); public DataFlowInfoPatchBuilder setName(@Nonnull String name) { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); return this; } public DataFlowInfoPatchBuilder setDescription(@Nullable String description) { if (description == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } public DataFlowInfoPatchBuilder setProject(@Nullable String project) { if (project == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + PROJECT_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + PROJECT_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + PROJECT_KEY, instance.textNode(project))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + PROJECT_KEY, + instance.textNode(project))); } return this; } @@ -59,28 +72,35 @@ public DataFlowInfoPatchBuilder setProject(@Nullable String project) { public DataFlowInfoPatchBuilder setCreated(@Nullable TimeStamp created) { if (created == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); } else { ObjectNode createdNode = instance.objectNode(); createdNode.put(TIME_KEY, created.getTime()); if (created.getActor() != null) { createdNode.put(ACTOR_KEY, created.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); } return this; } public DataFlowInfoPatchBuilder setLastModified(@Nullable TimeStamp lastModified) { if (lastModified == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); } ObjectNode lastModifiedNode = instance.objectNode(); lastModifiedNode.put(TIME_KEY, lastModified.getTime()); if (lastModified.getActor() != null) { lastModifiedNode.put(ACTOR_KEY, lastModified.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java index 96e9c31288966..581616f54e9b9 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.datajob; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; @@ -14,10 +17,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class DataJobInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<DataJobInfoPatchBuilder> implements CustomPropertiesPatchBuilderSupport<DataJobInfoPatchBuilder> { @@ -37,62 +36,80 @@ public class DataJobInfoPatchBuilder extends AbstractMultiFieldPatchBuilder<Data new CustomPropertiesPatchBuilder<>(this); public DataJobInfoPatchBuilder setName(@Nonnull String name) { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); return this; } public DataJobInfoPatchBuilder setDescription(@Nullable String description) { if (description == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } - public DataJobInfoPatchBuilder setType(@Nonnull String type) { ObjectNode union = instance.objectNode(); union.set("string", instance.textNode(type)); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + TYPE_KEY, union)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + TYPE_KEY, union)); return this; } public DataJobInfoPatchBuilder setFlowUrn(@Nullable DataFlowUrn flowUrn) { if (flowUrn == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + FLOW_URN_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + FLOW_URN_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + FLOW_URN_KEY, - instance.textNode(flowUrn.toString()))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + FLOW_URN_KEY, + instance.textNode(flowUrn.toString()))); } return this; } public DataJobInfoPatchBuilder setCreated(@Nullable TimeStamp created) { if (created == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); } else { ObjectNode createdNode = instance.objectNode(); createdNode.put(TIME_KEY, created.getTime()); if (created.getActor() != null) { createdNode.put(ACTOR_KEY, created.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); } return this; } public DataJobInfoPatchBuilder setLastModified(@Nullable TimeStamp lastModified) { if (lastModified == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); } else { ObjectNode lastModifiedNode = instance.objectNode(); lastModifiedNode.put(TIME_KEY, lastModified.getTime()); if (lastModified.getActor() != null) { lastModifiedNode.put(ACTOR_KEY, lastModified.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); } return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java index 1ff6e817e40cf..0fb0454533fc0 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.datajob; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.linkedin.common.Edge; @@ -12,11 +15,8 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobInputOutputPatchBuilder extends AbstractMultiFieldPatchBuilder<DataJobInputOutputPatchBuilder> { +public class DataJobInputOutputPatchBuilder + extends AbstractMultiFieldPatchBuilder<DataJobInputOutputPatchBuilder> { private static final String INPUT_DATA_JOB_EDGES_PATH_START = "/inputDatajobEdges/"; private static final String INPUT_DATASET_EDGES_PATH_START = "/inputDatasetEdges/"; private static final String OUTPUT_DATASET_EDGES_PATH_START = "/outputDatasetEdges/"; @@ -39,65 +39,96 @@ public class DataJobInputOutputPatchBuilder extends AbstractMultiFieldPatchBuild public DataJobInputOutputPatchBuilder addInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { ObjectNode value = createEdgeValue(dataJobUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, + value)); return this; } public DataJobInputOutputPatchBuilder removeInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addInputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { ObjectNode value = createEdgeValue(datasetUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); return this; } public DataJobInputOutputPatchBuilder removeInputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + INPUT_DATASET_EDGES_PATH_START + datasetUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addOutputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { ObjectNode value = createEdgeValue(datasetUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, + value)); return this; } public DataJobInputOutputPatchBuilder removeOutputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addInputDatasetField(@Nonnull Urn urn) { TextNode textNode = instance.textNode(urn.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, textNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, textNode)); return this; } public DataJobInputOutputPatchBuilder removeInputDatasetField(@Nonnull Urn urn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, null)); return this; } public DataJobInputOutputPatchBuilder addOutputDatasetField(@Nonnull Urn urn) { TextNode textNode = instance.textNode(urn.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, textNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, textNode)); return this; } public DataJobInputOutputPatchBuilder removeOutputDatasetField(@Nonnull Urn urn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, null)); return this; } // Full Edge modification - public DataJobInputOutputPatchBuilder addEdge(@Nonnull Edge edge, @Nonnull LineageDirection direction) { + public DataJobInputOutputPatchBuilder addEdge( + @Nonnull Edge edge, @Nonnull LineageDirection direction) { ObjectNode value = createEdgeValue(edge); String path = getEdgePath(edge, direction); @@ -105,7 +136,8 @@ public DataJobInputOutputPatchBuilder addEdge(@Nonnull Edge edge, @Nonnull Linea return this; } - public DataJobInputOutputPatchBuilder removeEdge(@Nonnull Edge edge, @Nonnull LineageDirection direction) { + public DataJobInputOutputPatchBuilder removeEdge( + @Nonnull Edge edge, @Nonnull LineageDirection direction) { String path = getEdgePath(edge, direction); pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), path, null)); @@ -115,11 +147,9 @@ public DataJobInputOutputPatchBuilder removeEdge(@Nonnull Edge edge, @Nonnull Li private ObjectNode createEdgeValue(@Nonnull Urn urn) { ObjectNode value = instance.objectNode(); ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()) - .put(ACTOR_KEY, UNKNOWN_ACTOR); + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - value.put(DESTINATION_URN_KEY, urn.toString()) - .set(LAST_MODIFIED_KEY, auditStamp); + value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); value.set(CREATED_KEY, auditStamp); return value; @@ -151,11 +181,11 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { lastModified .put(TIME_KEY, edge.getLastModified().getTime()) .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); - if (edge.getLastModified() .getImpersonator() != null) { + if (edge.getLastModified().getImpersonator() != null) { lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); } - if (edge.getLastModified() .getMessage() != null) { - lastModified.put(MESSAGE_KEY, edge.getLastModified() .getMessage()); + if (edge.getLastModified().getMessage() != null) { + lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); } } value.set(LAST_MODIFIED_KEY, lastModified); @@ -171,12 +201,13 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); } - return value; } /** - * Determines Edge path based on supplied Urn, if not a valid entity type throws IllegalArgumentException + * Determines Edge path based on supplied Urn, if not a valid entity type throws + * IllegalArgumentException + * * @param edge * @return * @throws IllegalArgumentException if destinationUrn is an invalid entity type @@ -184,21 +215,25 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { private String getEdgePath(@Nonnull Edge edge, LineageDirection direction) { Urn destinationUrn = edge.getDestinationUrn(); - if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.UPSTREAM.equals(direction)) { + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.UPSTREAM.equals(direction)) { return INPUT_DATASET_EDGES_PATH_START + destinationUrn; } - if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.DOWNSTREAM.equals(direction)) { + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.DOWNSTREAM.equals(direction)) { return INPUT_DATASET_EDGES_PATH_START + destinationUrn; } - if (DATA_JOB_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.UPSTREAM.equals(direction)) { + if (DATA_JOB_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.UPSTREAM.equals(direction)) { return INPUT_DATA_JOB_EDGES_PATH_START + destinationUrn; } // TODO: Output Data Jobs not supported by aspect, add here if this changes - throw new IllegalArgumentException(String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); + throw new IllegalArgumentException( + String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); } @Override @@ -210,5 +245,4 @@ protected String getAspectName() { protected String getEntityType() { return DATA_JOB_ENTITY_NAME; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java index d8c9b9308ae57..f4329c84f33ff 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; @@ -11,11 +14,8 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetPropertiesPatchBuilder extends AbstractMultiFieldPatchBuilder<DatasetPropertiesPatchBuilder> +public class DatasetPropertiesPatchBuilder + extends AbstractMultiFieldPatchBuilder<DatasetPropertiesPatchBuilder> implements CustomPropertiesPatchBuilderSupport<DatasetPropertiesPatchBuilder> { public static final String BASE_PATH = "/"; @@ -29,62 +29,78 @@ public class DatasetPropertiesPatchBuilder extends AbstractMultiFieldPatchBuilde private CustomPropertiesPatchBuilder<DatasetPropertiesPatchBuilder> customPropertiesPatchBuilder = new CustomPropertiesPatchBuilder<>(this); - public DatasetPropertiesPatchBuilder setExternalUrl(@Nullable String externalUrl) { if (externalUrl == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + EXTERNAL_URL_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + EXTERNAL_URL_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + EXTERNAL_URL_KEY, - instance.textNode(externalUrl))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + EXTERNAL_URL_KEY, + instance.textNode(externalUrl))); } return this; } public DatasetPropertiesPatchBuilder setName(@Nullable String name) { if (name == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + NAME_KEY, null)); + this.pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + NAME_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); } return this; } public DatasetPropertiesPatchBuilder setQualifiedName(@Nullable String qualifiedName) { if (qualifiedName == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + QUALIFIED_NAME_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + QUALIFIED_NAME_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + QUALIFIED_NAME_KEY, - instance.textNode(qualifiedName))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + QUALIFIED_NAME_KEY, + instance.textNode(qualifiedName))); } return this; } public DatasetPropertiesPatchBuilder setDescription(@Nullable String description) { if (description == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + DESCRIPTION_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } public DatasetPropertiesPatchBuilder setUri(@Nullable String uri) { if (uri == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + URI_KEY, null)); + this.pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + URI_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + URI_KEY, instance.textNode(uri))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + URI_KEY, instance.textNode(uri))); } return this; } @Override - public DatasetPropertiesPatchBuilder addCustomProperty(@Nonnull String key, @Nonnull String value) { + public DatasetPropertiesPatchBuilder addCustomProperty( + @Nonnull String key, @Nonnull String value) { this.customPropertiesPatchBuilder.addProperty(key, value); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java index 8b8dea275a3f4..6478b31d27ef0 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java @@ -1,21 +1,20 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.GlossaryTermAssociation; -import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.TagAssociation; +import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class EditableSchemaMetadataPatchBuilder extends - AbstractMultiFieldPatchBuilder<EditableSchemaMetadataPatchBuilder> { +public class EditableSchemaMetadataPatchBuilder + extends AbstractMultiFieldPatchBuilder<EditableSchemaMetadataPatchBuilder> { private static final String BASE_PATH = "/editableSchemaFieldInfo/"; private static final String TAGS_PATH_EXTENSION = "/globalTags/tags/"; @@ -24,39 +23,55 @@ public class EditableSchemaMetadataPatchBuilder extends private static final String URN_KEY = "urn"; private static final String CONTEXT_KEY = "context"; - public EditableSchemaMetadataPatchBuilder addTag(@Nonnull TagAssociation tag, @Nonnull String fieldPath) { + public EditableSchemaMetadataPatchBuilder addTag( + @Nonnull TagAssociation tag, @Nonnull String fieldPath) { ObjectNode value = instance.objectNode(); value.put(TAG_KEY, tag.getTag().toString()); if (tag.getContext() != null) { value.put(CONTEXT_KEY, tag.getContext()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag.getTag(), value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag.getTag(), + value)); return this; } - public EditableSchemaMetadataPatchBuilder removeTag(@Nonnull TagUrn tag, @Nonnull String fieldPath) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag, null)); + public EditableSchemaMetadataPatchBuilder removeTag( + @Nonnull TagUrn tag, @Nonnull String fieldPath) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag, + null)); return this; } - public EditableSchemaMetadataPatchBuilder addGlossaryTerm(@Nonnull GlossaryTermAssociation term, @Nonnull String fieldPath) { + public EditableSchemaMetadataPatchBuilder addGlossaryTerm( + @Nonnull GlossaryTermAssociation term, @Nonnull String fieldPath) { ObjectNode value = instance.objectNode(); value.put(URN_KEY, term.getUrn().toString()); if (term.getContext() != null) { value.put(CONTEXT_KEY, term.getContext()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term.getUrn(), value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term.getUrn(), + value)); return this; } - public EditableSchemaMetadataPatchBuilder removeGlossaryTerm(@Nonnull GlossaryTermUrn term, @Nonnull String fieldPath) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term, null)); + public EditableSchemaMetadataPatchBuilder removeGlossaryTerm( + @Nonnull GlossaryTermUrn term, @Nonnull String fieldPath) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term, + null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java index 29330bee01ef3..6ded8a25b4e22 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.dataset.DatasetLineageType; @@ -9,12 +12,9 @@ import lombok.ToString; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - @ToString -public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder<UpstreamLineagePatchBuilder> { +public class UpstreamLineagePatchBuilder + extends AbstractMultiFieldPatchBuilder<UpstreamLineagePatchBuilder> { private static final String PATH_START = "/upstreams/"; private static final String DATASET_KEY = "dataset"; @@ -23,21 +23,24 @@ public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder< private static final String ACTOR_KEY = "actor"; private static final String TYPE_KEY = "type"; - public UpstreamLineagePatchBuilder addUpstream(@Nonnull DatasetUrn datasetUrn, @Nonnull DatasetLineageType lineageType) { + public UpstreamLineagePatchBuilder addUpstream( + @Nonnull DatasetUrn datasetUrn, @Nonnull DatasetLineageType lineageType) { ObjectNode value = instance.objectNode(); ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()) - .put(ACTOR_KEY, UNKNOWN_ACTOR); - value.put(DATASET_KEY, datasetUrn.toString()) + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + value + .put(DATASET_KEY, datasetUrn.toString()) .put(TYPE_KEY, lineageType.toString()) .set(AUDIT_STAMP_KEY, auditStamp); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); return this; } public UpstreamLineagePatchBuilder removeUpstream(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java index 562ab715848b1..9f221bac15be4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java @@ -4,14 +4,12 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * Interface to implement if an aspect supports custom properties changes - */ +/** Interface to implement if an aspect supports custom properties changes */ public interface CustomPropertiesPatchBuilderSupport<T extends AbstractMultiFieldPatchBuilder<T>> { /** * Adds a custom property + * * @param key * @param value * @return @@ -20,6 +18,7 @@ public interface CustomPropertiesPatchBuilderSupport<T extends AbstractMultiFiel /** * Removes a custom property + * * @param key * @return */ @@ -27,6 +26,7 @@ public interface CustomPropertiesPatchBuilderSupport<T extends AbstractMultiFiel /** * Fully replace the custom properties + * * @param properties * @return */ diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java index 660b6ff8fb84a..e3b14c0838ad6 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java @@ -5,21 +5,20 @@ import java.util.List; import org.apache.commons.lang3.tuple.ImmutableTriple; - /** - * Used for supporting intermediate subtypes when constructing a patch for an aspect that includes complex objects. + * Used for supporting intermediate subtypes when constructing a patch for an aspect that includes + * complex objects. + * * @param <T> The parent patch builder type */ public interface IntermediatePatchBuilder<T extends AbstractMultiFieldPatchBuilder<T>> { - /** - * Convenience method to return parent patch builder in functional callstack - */ + /** Convenience method to return parent patch builder in functional callstack */ T getParent(); /** - * Exposes subpath values to parent patch builder in Op, Path, Value triples. Should - * usually only be called by the parent patch builder class when constructing the path values. + * Exposes subpath values to parent patch builder in Op, Path, Value triples. Should usually only + * be called by the parent patch builder class when constructing the path values. */ List<ImmutableTriple<String, String, JsonNode>> getSubPaths(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java index 7396fa2d926d3..a2692c432513e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java @@ -1,77 +1,64 @@ package datahub.client.rest; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.JacksonDataTemplateCodec; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.Emitter; +import datahub.client.MetadataResponseFuture; +import datahub.client.MetadataWriteResponse; +import datahub.event.EventFormatter; +import datahub.event.MetadataChangeProposalWrapper; +import datahub.event.UpsertAspectRequest; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; - import javax.annotation.concurrent.ThreadSafe; - +import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.concurrent.FutureCallback; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.entity.StringEntity; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.data.DataMap; -import com.linkedin.data.template.JacksonDataTemplateCodec; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.Emitter; -import datahub.client.MetadataResponseFuture; -import datahub.client.MetadataWriteResponse; -import datahub.event.EventFormatter; -import datahub.event.MetadataChangeProposalWrapper; -import datahub.event.UpsertAspectRequest; -import lombok.extern.slf4j.Slf4j; - -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.nio.client.HttpAsyncClient; import org.apache.http.ssl.SSLContextBuilder; -import java.security.KeyManagementException; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; - -import static com.linkedin.metadata.Constants.*; - - @ThreadSafe @Slf4j /** * The REST emitter is a thin wrapper on top of the Apache HttpClient - * (https://hc.apache.org/httpcomponents-client-4.5.x/index.html) library. It supports non-blocking emission of - * metadata and handles the details of JSON serialization of metadata aspects over the wire. + * (https://hc.apache.org/httpcomponents-client-4.5.x/index.html) library. It supports non-blocking + * emission of metadata and handles the details of JSON serialization of metadata aspects over the + * wire. * - * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` method. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * ); - * You can also customize the underlying - * http client by calling the `customizeHttpAsyncClient` method on the builder. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) - * ); + * <p>Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` + * method. e.g. RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") ); You can also customize + * the underlying http client by calling the `customizeHttpAsyncClient` method on the builder. e.g. + * RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") .customizeHttpAsyncClient(c + * :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) ); */ public class RestEmitter implements Emitter { @@ -87,29 +74,36 @@ public class RestEmitter implements Emitter { /** * The default constructor, prefer using the `create` factory method. + * * @param config */ public RestEmitter(RestEmitterConfig config) { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); this.config = config; // Override httpClient settings with RestEmitter configs if present if (config.getTimeoutSec() != null) { HttpAsyncClientBuilder httpClientBuilder = this.config.getAsyncHttpClientBuilder(); - httpClientBuilder.setDefaultRequestConfig(RequestConfig.custom() - .setConnectTimeout(config.getTimeoutSec() * 1000) - .setSocketTimeout(config.getTimeoutSec() * 1000) - .build()); + httpClientBuilder.setDefaultRequestConfig( + RequestConfig.custom() + .setConnectTimeout(config.getTimeoutSec() * 1000) + .setSocketTimeout(config.getTimeoutSec() * 1000) + .build()); } if (config.isDisableSslVerification()) { HttpAsyncClientBuilder httpClientBuilder = this.config.getAsyncHttpClientBuilder(); try { httpClientBuilder - .setSSLContext(new SSLContextBuilder().loadTrustMaterial(null, TrustAllStrategy.INSTANCE).build()) + .setSSLContext( + new SSLContextBuilder().loadTrustMaterial(null, TrustAllStrategy.INSTANCE).build()) .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE); } catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e) { throw new RuntimeException("Error while creating insecure http client", e); @@ -127,8 +121,10 @@ public RestEmitter(RestEmitterConfig config) { private static MetadataWriteResponse mapResponse(HttpResponse response) { MetadataWriteResponse.MetadataWriteResponseBuilder builder = MetadataWriteResponse.builder().underlyingResponse(response); - if ((response != null) && (response.getStatusLine() != null) && (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK - || response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED)) { + if ((response != null) + && (response.getStatusLine() != null) + && (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK + || response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED)) { builder.success(true); } else { builder.success(false); @@ -144,51 +140,49 @@ private static MetadataWriteResponse mapResponse(HttpResponse response) { length = contentStream.read(buffer); } builder.responseContent(result.toString("UTF-8")); - } catch (Exception e) { - // Catch all exceptions and still return a valid response object - log.warn("Wasn't able to convert response into a string", e); - } + } catch (Exception e) { + // Catch all exceptions and still return a valid response object + log.warn("Wasn't able to convert response into a string", e); + } return builder.build(); } - /** - * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` method. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") // coordinates of gms server - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * ); - * You can also customize the underlying http client by calling the `customizeHttpAsyncClient` method on the builder. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) - * ); + * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` + * method. e.g. RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * // coordinates of gms server .extraHeaders(Collections.singletonMap("Custom-Header", + * "custom-val") ); You can also customize the underlying http client by calling the + * `customizeHttpAsyncClient` method on the builder. e.g. RestEmitter emitter = + * RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") + * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) ); + * * @param builderSupplier - * @return a constructed RestEmitter. Call #testConnection to make sure this emitter has a valid connection to the server + * @return a constructed RestEmitter. Call #testConnection to make sure this emitter has a valid + * connection to the server */ - public static RestEmitter create(Consumer<RestEmitterConfig.RestEmitterConfigBuilder> builderSupplier) { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().with(builderSupplier).build()); + public static RestEmitter create( + Consumer<RestEmitterConfig.RestEmitterConfigBuilder> builderSupplier) { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().with(builderSupplier).build()); return restEmitter; } /** * Creates a RestEmitter with default settings. - * @return a constructed RestEmitter. - * Call #test_connection to validate that this emitter can communicate with the server. + * + * @return a constructed RestEmitter. Call #test_connection to validate that this emitter can + * communicate with the server. */ public static RestEmitter createWithDefaults() { // No-op creator -> creates RestEmitter using default settings - return create(b -> { - }); + return create(b -> {}); } @Override - public Future<MetadataWriteResponse> emit(MetadataChangeProposalWrapper mcpw, - Callback callback) throws IOException { - return emit(this.eventFormatter.convert(mcpw), callback); + public Future<MetadataWriteResponse> emit(MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException { + return emit(this.eventFormatter.convert(mcpw), callback); } @Override @@ -201,8 +195,9 @@ public Future<MetadataWriteResponse> emit(MetadataChangeProposal mcp, Callback c return this.postGeneric(this.ingestProposalUrl, serializedMCP, mcp, callback); } - private Future<MetadataWriteResponse> postGeneric(String urlStr, String payloadJson, Object originalRequest, - Callback callback) throws IOException { + private Future<MetadataWriteResponse> postGeneric( + String urlStr, String payloadJson, Object originalRequest, Callback callback) + throws IOException { HttpPost httpPost = new HttpPost(urlStr); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("X-RestLi-Protocol-Version", "2.0.0"); @@ -214,48 +209,49 @@ private Future<MetadataWriteResponse> postGeneric(String urlStr, String payloadJ httpPost.setEntity(new StringEntity(payloadJson)); AtomicReference<MetadataWriteResponse> responseAtomicReference = new AtomicReference<>(); CountDownLatch responseLatch = new CountDownLatch(1); - FutureCallback<HttpResponse> httpCallback = new FutureCallback<HttpResponse>() { - @Override - public void completed(HttpResponse response) { - MetadataWriteResponse writeResponse = null; - try { - writeResponse = mapResponse(response); - responseAtomicReference.set(writeResponse); - } catch (Exception e) { - // do nothing - } - responseLatch.countDown(); - if (callback != null) { - try { - callback.onCompletion(writeResponse); - } catch (Exception e) { - log.error("Error executing user callback on completion.", e); + FutureCallback<HttpResponse> httpCallback = + new FutureCallback<HttpResponse>() { + @Override + public void completed(HttpResponse response) { + MetadataWriteResponse writeResponse = null; + try { + writeResponse = mapResponse(response); + responseAtomicReference.set(writeResponse); + } catch (Exception e) { + // do nothing + } + responseLatch.countDown(); + if (callback != null) { + try { + callback.onCompletion(writeResponse); + } catch (Exception e) { + log.error("Error executing user callback on completion.", e); + } + } } - } - } - @Override - public void failed(Exception ex) { - if (callback != null) { - try { - callback.onFailure(ex); - } catch (Exception e) { - log.error("Error executing user callback on failure.", e); + @Override + public void failed(Exception ex) { + if (callback != null) { + try { + callback.onFailure(ex); + } catch (Exception e) { + log.error("Error executing user callback on failure.", e); + } + } } - } - } - @Override - public void cancelled() { - if (callback != null) { - try { - callback.onFailure(new RuntimeException("Cancelled")); - } catch (Exception e) { - log.error("Error executing user callback on failure due to cancellation.", e); + @Override + public void cancelled() { + if (callback != null) { + try { + callback.onFailure(new RuntimeException("Cancelled")); + } catch (Exception e) { + log.error("Error executing user callback on failure due to cancellation.", e); + } + } } - } - } - }; + }; Future<HttpResponse> requestFuture = httpClient.execute(httpPost, httpCallback); return new MetadataResponseFuture(requestFuture, responseAtomicReference, responseLatch); } @@ -286,8 +282,8 @@ public Future<MetadataWriteResponse> emit(List<UpsertAspectRequest> request, Cal return this.postOpenAPI(request, callback); } - private Future<MetadataWriteResponse> postOpenAPI(List<UpsertAspectRequest> payload, Callback callback) - throws IOException { + private Future<MetadataWriteResponse> postOpenAPI( + List<UpsertAspectRequest> payload, Callback callback) throws IOException { HttpPost httpPost = new HttpPost(ingestOpenApiUrl); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("Accept", "application/json"); @@ -298,48 +294,49 @@ private Future<MetadataWriteResponse> postOpenAPI(List<UpsertAspectRequest> payl httpPost.setEntity(new StringEntity(objectMapper.writeValueAsString(payload))); AtomicReference<MetadataWriteResponse> responseAtomicReference = new AtomicReference<>(); CountDownLatch responseLatch = new CountDownLatch(1); - FutureCallback<HttpResponse> httpCallback = new FutureCallback<HttpResponse>() { - @Override - public void completed(HttpResponse response) { - MetadataWriteResponse writeResponse = null; - try { - writeResponse = mapResponse(response); - responseAtomicReference.set(writeResponse); - } catch (Exception e) { - // do nothing - } - responseLatch.countDown(); - if (callback != null) { - try { - callback.onCompletion(writeResponse); - } catch (Exception e) { - log.error("Error executing user callback on completion.", e); + FutureCallback<HttpResponse> httpCallback = + new FutureCallback<HttpResponse>() { + @Override + public void completed(HttpResponse response) { + MetadataWriteResponse writeResponse = null; + try { + writeResponse = mapResponse(response); + responseAtomicReference.set(writeResponse); + } catch (Exception e) { + // do nothing + } + responseLatch.countDown(); + if (callback != null) { + try { + callback.onCompletion(writeResponse); + } catch (Exception e) { + log.error("Error executing user callback on completion.", e); + } + } } - } - } - @Override - public void failed(Exception ex) { - if (callback != null) { - try { - callback.onFailure(ex); - } catch (Exception e) { - log.error("Error executing user callback on failure.", e); + @Override + public void failed(Exception ex) { + if (callback != null) { + try { + callback.onFailure(ex); + } catch (Exception e) { + log.error("Error executing user callback on failure.", e); + } + } } - } - } - @Override - public void cancelled() { - if (callback != null) { - try { - callback.onFailure(new RuntimeException("Cancelled")); - } catch (Exception e) { - log.error("Error executing user callback on failure due to cancellation.", e); + @Override + public void cancelled() { + if (callback != null) { + try { + callback.onFailure(new RuntimeException("Cancelled")); + } catch (Exception e) { + log.error("Error executing user callback on failure due to cancellation.", e); + } + } } - } - } - }; + }; Future<HttpResponse> requestFuture = httpClient.execute(httpPost, httpCallback); return new MetadataResponseFuture(requestFuture, responseAtomicReference, responseLatch); } @@ -348,5 +345,4 @@ public void cancelled() { HttpAsyncClient getHttpClient() { return this.httpClient; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java index f615c3ccb3e4f..7e24429213246 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java @@ -13,7 +13,6 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; - @Value @Builder @Slf4j @@ -24,46 +23,43 @@ public class RestEmitterConfig { public static final String DEFAULT_AUTH_TOKEN = null; public static final String CLIENT_VERSION_PROPERTY = "clientVersion"; - @Builder.Default - private final String server = "http://localhost:8080"; + @Builder.Default private final String server = "http://localhost:8080"; private final Integer timeoutSec; - @Builder.Default - private final boolean disableSslVerification = false; - - @Builder.Default - private final String token = DEFAULT_AUTH_TOKEN; + @Builder.Default private final boolean disableSslVerification = false; - @Builder.Default - @NonNull - private final Map<String, String> extraHeaders = Collections.EMPTY_MAP; + @Builder.Default private final String token = DEFAULT_AUTH_TOKEN; + + @Builder.Default @NonNull private final Map<String, String> extraHeaders = Collections.EMPTY_MAP; private final HttpAsyncClientBuilder asyncHttpClientBuilder; @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); public static class RestEmitterConfigBuilder { private String getVersion() { - try ( - InputStream foo = this.getClass().getClassLoader().getResourceAsStream("client.properties")) { - Properties properties = new Properties(); - properties.load(foo); - return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); + try (InputStream foo = + this.getClass().getClassLoader().getResourceAsStream("client.properties")) { + Properties properties = new Properties(); + properties.load(foo); + return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); } catch (Exception e) { log.warn("Unable to find a version for datahub-client. Will set to unknown", e); return "unknown"; } } - private HttpAsyncClientBuilder asyncHttpClientBuilder = HttpAsyncClientBuilder - .create() - .setDefaultRequestConfig(RequestConfig.custom() - .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_SEC * 1000) - .setSocketTimeout(DEFAULT_READ_TIMEOUT_SEC * 1000) - .build()) - .setUserAgent("DataHub-RestClient/" + getVersion()); + private HttpAsyncClientBuilder asyncHttpClientBuilder = + HttpAsyncClientBuilder.create() + .setDefaultRequestConfig( + RequestConfig.custom() + .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_SEC * 1000) + .setSocketTimeout(DEFAULT_READ_TIMEOUT_SEC * 1000) + .build()) + .setUserAgent("DataHub-RestClient/" + getVersion()); public RestEmitterConfigBuilder with(Consumer<RestEmitterConfigBuilder> builderFunction) { builderFunction.accept(this); @@ -76,4 +72,4 @@ public RestEmitterConfigBuilder customizeHttpAsyncClient( return this; } } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java index 5d42f814e1fe0..5238c19610601 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java @@ -1,5 +1,7 @@ package datahub.event; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -8,18 +10,12 @@ import com.linkedin.data.template.JacksonDataTemplateCodec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; - import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import lombok.SneakyThrows; -import static com.linkedin.metadata.Constants.*; - - -/** - * A class that helps to format Metadata events for transport - */ +/** A class that helps to format Metadata events for transport */ public class EventFormatter { private final ObjectMapper objectMapper; @@ -30,9 +26,13 @@ public class EventFormatter { public EventFormatter(Format serializationFormat) { this.serializationFormat = serializationFormat; objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); } @@ -42,21 +42,29 @@ public EventFormatter() { @SneakyThrows(URISyntaxException.class) public MetadataChangeProposal convert(MetadataChangeProposalWrapper mcpw) throws IOException { - - String serializedAspect = StringEscapeUtils.escapeJava(dataTemplateCodec.dataTemplateToString(mcpw.getAspect())); - MetadataChangeProposal mcp = new MetadataChangeProposal().setEntityType(mcpw.getEntityType()) - .setAspectName(mcpw.getAspectName()) - .setEntityUrn(Urn.createFromString(mcpw.getEntityUrn())) - .setChangeType(mcpw.getChangeType()); + + String serializedAspect = + StringEscapeUtils.escapeJava(dataTemplateCodec.dataTemplateToString(mcpw.getAspect())); + MetadataChangeProposal mcp = + new MetadataChangeProposal() + .setEntityType(mcpw.getEntityType()) + .setAspectName(mcpw.getAspectName()) + .setEntityUrn(Urn.createFromString(mcpw.getEntityUrn())) + .setChangeType(mcpw.getChangeType()); switch (this.serializationFormat) { - case PEGASUS_JSON: { - mcp.setAspect(new GenericAspect().setContentType("application/json") - .setValue(ByteString.unsafeWrap(serializedAspect.getBytes(StandardCharsets.UTF_8)))); - } - break; + case PEGASUS_JSON: + { + mcp.setAspect( + new GenericAspect() + .setContentType("application/json") + .setValue( + ByteString.unsafeWrap(serializedAspect.getBytes(StandardCharsets.UTF_8)))); + } + break; default: - throw new EventValidationException("Cannot handle serialization format " + this.serializationFormat); + throw new EventValidationException( + "Cannot handle serialization format " + this.serializationFormat); } return mcp; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java index 43778cb325971..dff3791a64ec9 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java @@ -4,6 +4,7 @@ public class EventValidationException extends RuntimeException { public EventValidationException(String message) { super(message); } + public EventValidationException(String message, Throwable t) { super(message, t); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java index 083a4cb40471b..4eb33015e33f4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java @@ -12,9 +12,9 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - /** * A class that makes it easy to create new {@link MetadataChangeProposal} events + * * @param <T> */ @Value @@ -53,7 +53,11 @@ public interface Build { } public static class MetadataChangeProposalWrapperBuilder - implements EntityUrnStepBuilder, EntityTypeStepBuilder, ChangeStepBuilder, AspectStepBuilder, Build { + implements EntityUrnStepBuilder, + EntityTypeStepBuilder, + ChangeStepBuilder, + AspectStepBuilder, + Build { private String entityUrn; private String entityType; @@ -116,9 +120,11 @@ public Build aspect(DataTemplate aspect) { @Override public MetadataChangeProposalWrapper build() { try { - Objects.requireNonNull(this.aspectName, + Objects.requireNonNull( + this.aspectName, "aspectName could not be inferred from provided aspect and was not explicitly provided as an override"); - return new MetadataChangeProposalWrapper(entityType, entityUrn, changeType, aspect, aspectName); + return new MetadataChangeProposalWrapper( + entityType, entityUrn, changeType, aspect, aspectName); } catch (Exception e) { throw new EventValidationException("Failed to create a metadata change proposal event", e); } @@ -131,7 +137,8 @@ public Build aspectName(String aspectName) { } } - public static MetadataChangeProposalWrapper create(Consumer<EntityTypeStepBuilder> builderConsumer) { + public static MetadataChangeProposalWrapper create( + Consumer<EntityTypeStepBuilder> builderConsumer) { MetadataChangeProposalWrapperBuilder builder = new MetadataChangeProposalWrapperBuilder(); builderConsumer.accept(builder); return builder.build(); diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java index 6fe07ac448a80..fa7c21fd41d9a 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java @@ -1,22 +1,17 @@ /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * <p>http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * <p>Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing permissions and + * limitations under the License. */ - package datahub.event; import java.io.IOException; @@ -26,21 +21,20 @@ public class StringEscapeUtils { - private StringEscapeUtils() { + private StringEscapeUtils() {} - } - /** * Worker method for the {@link #escapeJavaScript(String)} method. - * + * * @param out write to receieve the escaped string * @param str String to escape values in, may be null * @param escapeSingleQuote escapes single quotes if <code>true</code> * @param escapeForwardSlash TODO * @throws IOException if an IOException occurs */ - private static void escapeJavaStyleString(Writer out, String str, boolean escapeSingleQuote, - boolean escapeForwardSlash) throws IOException { + private static void escapeJavaStyleString( + Writer out, String str, boolean escapeSingleQuote, boolean escapeForwardSlash) + throws IOException { if (out == null) { throw new IllegalArgumentException("The Writer must not be null"); } else if (str != null) { @@ -56,35 +50,35 @@ private static void escapeJavaStyleString(Writer out, String str, boolean escape out.write("\\u00" + hex(ch)); } else if (ch < ' ') { switch (ch) { - case '\b': - out.write(92); - out.write(98); - break; - case '\t': - out.write(92); - out.write(116); - break; - case '\n': - out.write(92); - out.write(110); - break; - case '\u000b': + case '\b': + out.write(92); + out.write(98); + break; + case '\t': + out.write(92); + out.write(116); + break; + case '\n': + out.write(92); + out.write(110); + break; + case '\u000b': - case '\f': - out.write(92); - out.write(102); - break; - case '\r': - out.write(92); - out.write(114); - break; - default: - if (ch > 15) { - out.write("\\u00" + hex(ch)); - } else { - out.write("\\u000" + hex(ch)); - } - break; + case '\f': + out.write(92); + out.write(102); + break; + case '\r': + out.write(92); + out.write(114); + break; + default: + if (ch > 15) { + out.write("\\u00" + hex(ch)); + } else { + out.write("\\u000" + hex(ch)); + } + break; } } else { @@ -95,8 +89,7 @@ private static void escapeJavaStyleString(Writer out, String str, boolean escape } /** - * Returns an upper case hexadecimal <code>String</code> for the given - * character. + * Returns an upper case hexadecimal <code>String</code> for the given character. * * @param ch The character to convert. * @return An upper case hexadecimal <code>String</code> @@ -113,34 +106,35 @@ private static String hex(char ch) { * @param escapeForwardSlash TODO * @return the escaped string */ - private static String escapeJavaStyleString(String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) throws IOException { + private static String escapeJavaStyleString( + String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) throws IOException { if (str == null) { return null; } else { StringWriter writer = new StringWriter(str.length() * 2); escapeJavaStyleString(writer, str, escapeSingleQuotes, escapeForwardSlash); return writer.toString(); - } } - + /** * Escapes the characters in a <code>String</code> using Java String rules. - * <p> - * Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) - * <p> - * So a tab becomes the characters <code>'\\'</code> and <code>'t'</code>. - * <p> - * The only difference between Java strings and JavaScript strings - * is that in JavaScript, a single quote must be escaped. - * <p> - * Example: + * + * <p>Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) + * + * <p>So a tab becomes the characters <code>'\\'</code> and <code>'t'</code>. + * + * <p>The only difference between Java strings and JavaScript strings is that in JavaScript, a + * single quote must be escaped. + * + * <p>Example: + * * <pre> * input string: He didn't say, "Stop!" * output string: He didn't say, \"Stop!\" * </pre> * - * @param str String to escape values in, may be null + * @param str String to escape values in, may be null * @return String with escaped values, <code>null</code> if null string input */ public static String escapeJava(String str) throws IOException { diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java index eb834ccea2b91..7dfb9d33f6948 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java @@ -9,7 +9,6 @@ import lombok.Builder; import lombok.Value; - @JsonInclude(JsonInclude.Include.NON_NULL) @Value @Builder @@ -17,15 +16,21 @@ public class UpsertAspectRequest { @JsonProperty("entityType") - @Schema(required = true, description = "The name of the entity matching with its definition in the entity registry") + @Schema( + required = true, + description = "The name of the entity matching with its definition in the entity registry") String entityType; @JsonProperty("entityUrn") - @Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") + @Schema( + description = + "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") String entityUrn; @JsonProperty("entityKeyAspect") - @Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null") + @Schema( + description = + "A key aspect referencing the entity to be updated, required if entityUrn is null") OneOfGenericAspectValue entityKeyAspect; @JsonProperty("aspect") @@ -33,7 +38,5 @@ public class UpsertAspectRequest { OneOfGenericAspectValue aspect; @JsonPOJOBuilder(withPrefix = "") - public static class UpsertAspectRequestBuilder { - - } + public static class UpsertAspectRequestBuilder {} } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java index e591fee3f68a8..01b39f77913bc 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java @@ -1,7 +1,20 @@ package datahub.client.file; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DatabindException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.JacksonDataTemplateCodec; +import com.linkedin.dataset.DatasetProperties; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.MetadataWriteResponse; +import datahub.event.MetadataChangeProposalWrapper; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -10,47 +23,34 @@ import java.util.Map; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; - import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; -import com.fasterxml.jackson.core.exc.StreamReadException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DatabindException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.data.DataMap; -import com.linkedin.data.template.JacksonDataTemplateCodec; -import com.linkedin.dataset.DatasetProperties; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.MetadataWriteResponse; -import datahub.event.MetadataChangeProposalWrapper; - -import static com.linkedin.metadata.Constants.*; - - public class FileEmitterTest { private final ObjectMapper objectMapper; private final JacksonDataTemplateCodec dataTemplateCodec; public FileEmitterTest() { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); } - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Test public void testFileEmitter() throws IOException { - InputStream goldenFileStream = ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); + InputStream goldenFileStream = + ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); String tempRoot = tempFolder.getRoot().toString(); String outputFile = tempRoot + "/test.json"; @@ -61,24 +61,22 @@ public void testFileEmitter() throws IOException { emitter.close(); goldenFileStream = ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); this.assertEqualJsonFile(goldenFileStream, outputFile); - } - private void assertEqualJsonFile(InputStream file1, String file2) throws StreamReadException, DatabindException, - IOException { - TypeReference<List<Map<String, Object>>> typeRef = new TypeReference<List<Map<String, Object>>>() { - }; + private void assertEqualJsonFile(InputStream file1, String file2) + throws StreamReadException, DatabindException, IOException { + TypeReference<List<Map<String, Object>>> typeRef = + new TypeReference<List<Map<String, Object>>>() {}; List<Map<String, Object>> map1 = this.objectMapper.readValue(file1, typeRef); File f2 = new File(file2); List<Map<String, Object>> map2 = this.objectMapper.readValue(f2, typeRef); Assert.assertEquals(map1, map2); } - private List<MetadataChangeProposal> getMCPs(InputStream fileStream) throws StreamReadException, DatabindException, - IOException { + private List<MetadataChangeProposal> getMCPs(InputStream fileStream) + throws StreamReadException, DatabindException, IOException { ArrayList<MetadataChangeProposal> mcps = new ArrayList<MetadataChangeProposal>(); - TypeReference<Map<String, Object>[]> typeRef = new TypeReference<Map<String, Object>[]>() { - }; + TypeReference<Map<String, Object>[]> typeRef = new TypeReference<Map<String, Object>[]>() {}; Map<String, Object>[] maps = this.objectMapper.readValue(fileStream, typeRef); for (Map<String, Object> map : maps) { String json = objectMapper.writeValueAsString(map); @@ -94,20 +92,24 @@ public void testSuccessCallback() throws Exception { String tempRoot = tempFolder.getRoot().toString(); String outputFile = tempRoot + "/testCallBack.json"; FileEmitter emitter = new FileEmitter(FileEmitterConfig.builder().fileName(outputFile).build()); - MetadataChangeProposalWrapper<?> mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + MetadataChangeProposalWrapper<?> mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); AtomicReference<MetadataWriteResponse> callbackResponse = new AtomicReference<>(); - Future<MetadataWriteResponse> future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - callbackResponse.set(response); - Assert.assertTrue(response.isSuccess()); - } - - @Override - public void onFailure(Throwable exception) { - Assert.fail("Should not be called"); - } - }); + Future<MetadataWriteResponse> future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + callbackResponse.set(response); + Assert.assertTrue(response.isSuccess()); + } + + @Override + public void onFailure(Throwable exception) { + Assert.fail("Should not be called"); + } + }); Assert.assertEquals(callbackResponse.get(), future.get()); } @@ -119,25 +121,27 @@ public void testFailCallback() throws Exception { String outputFile = tempRoot + "/testCallBack.json"; FileEmitter emitter = new FileEmitter(FileEmitterConfig.builder().fileName(outputFile).build()); emitter.close(); - MetadataChangeProposalWrapper<?> mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); - Future<MetadataWriteResponse> future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - - Assert.fail("Should not be called"); - } - - @Override - public void onFailure(Throwable exception) { - - } - }); + MetadataChangeProposalWrapper<?> mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + Future<MetadataWriteResponse> future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + + Assert.fail("Should not be called"); + } + + @Override + public void onFailure(Throwable exception) {} + }); Assert.assertFalse(future.get().isSuccess()); - } - private MetadataChangeProposalWrapper<?> getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper<?> getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -145,5 +149,4 @@ private MetadataChangeProposalWrapper<?> getMetadataChangeProposalWrapper(String .aspect(new DatasetProperties().setDescription(description)) .build(); } - } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java index 520594381426f..f61121adf1395 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java @@ -14,14 +14,12 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; - public class AvroSerializerTest { + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -35,12 +33,14 @@ public void avroFileWrite() throws Exception { AvroSerializer avroSerializer = new AvroSerializer(); File file = tempFolder.newFile("data.avro"); - DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(avroSerializer.getRecordSchema()); + DatumWriter<GenericRecord> writer = + new GenericDatumWriter<GenericRecord>(avroSerializer.getRecordSchema()); DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(writer); dataFileWriter.create(avroSerializer.getRecordSchema(), file); String entityUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,logging_events,PROD)"; for (int i = 0; i < 10; ++i) { - MetadataChangeProposalWrapper metadataChangeProposalWrapper = getMetadataChangeProposalWrapper("Test description - " + i, entityUrn); + MetadataChangeProposalWrapper metadataChangeProposalWrapper = + getMetadataChangeProposalWrapper("Test description - " + i, entityUrn); GenericRecord record = avroSerializer.serialize(metadataChangeProposalWrapper); dataFileWriter.append(record); } @@ -48,7 +48,8 @@ public void avroFileWrite() throws Exception { File readerFile = file; DatumReader<GenericRecord> reader = new GenericDatumReader<>(avroSerializer.getRecordSchema()); - DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(readerFile, reader); + DataFileReader<GenericRecord> dataFileReader = + new DataFileReader<GenericRecord>(readerFile, reader); while (dataFileReader.hasNext()) { GenericRecord record = dataFileReader.next(); System.out.println(record.get("entityUrn")); diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java index dff109cf1e455..5161e6460b8a1 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java @@ -3,13 +3,20 @@ import static datahub.client.kafka.KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC; import static java.util.Collections.singletonList; +import com.linkedin.dataset.DatasetProperties; +import datahub.client.MetadataWriteResponse; +import datahub.client.kafka.containers.KafkaContainer; +import datahub.client.kafka.containers.SchemaRegistryContainer; +import datahub.client.kafka.containers.ZookeeperContainer; +import datahub.event.MetadataChangeProposalWrapper; +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import java.io.IOException; import java.util.Objects; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.stream.Stream; - import org.apache.avro.Schema; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.KafkaAdminClient; @@ -20,16 +27,6 @@ import org.testcontainers.containers.Network; import org.testng.Assert; -import com.linkedin.dataset.DatasetProperties; - -import datahub.client.MetadataWriteResponse; -import datahub.client.kafka.containers.KafkaContainer; -import datahub.client.kafka.containers.SchemaRegistryContainer; -import datahub.client.kafka.containers.ZookeeperContainer; -import datahub.event.MetadataChangeProposalWrapper; -import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; - public class KafkaEmitterTest { private static final String TOPIC = DEFAULT_MCP_KAFKA_TOPIC; @@ -47,11 +44,13 @@ public class KafkaEmitterTest { public static void confluentSetup() throws Exception { network = Network.newNetwork(); zookeeperContainer = new ZookeeperContainer().withNetwork(network); - kafkaContainer = new KafkaContainer(zookeeperContainer.getInternalUrl()) + kafkaContainer = + new KafkaContainer(zookeeperContainer.getInternalUrl()) .withNetwork(network) .dependsOn(zookeeperContainer); - schemaRegistryContainer = new SchemaRegistryContainer(zookeeperContainer.getInternalUrl(), - kafkaContainer.getInternalBootstrapServers()) + schemaRegistryContainer = + new SchemaRegistryContainer( + zookeeperContainer.getInternalUrl(), kafkaContainer.getInternalBootstrapServers()) .withNetwork(network) .dependsOn(zookeeperContainer, kafkaContainer); schemaRegistryContainer.start(); @@ -78,8 +77,9 @@ public void testConnection() throws IOException, ExecutionException, Interrupted public void testSend() throws IOException, InterruptedException, ExecutionException { @SuppressWarnings("rawtypes") - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", - "urn:li:dataset:(urn:li:dataPlatform:spark,foo.bar,PROD)"); + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper( + "Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:spark,foo.bar,PROD)"); Future<MetadataWriteResponse> future = emitter.emit(mcpw); MetadataWriteResponse response = future.get(); System.out.println("Response: " + response); @@ -95,26 +95,41 @@ private static AdminClient createAdminClient(String bootstrap) { private static void registerSchemaRegistryTypes() throws IOException, RestClientException { Schema mcpSchema = new AvroSerializer().getRecordSchema(); - CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryContainer.getUrl(), 1000); + CachedSchemaRegistryClient schemaRegistryClient = + new CachedSchemaRegistryClient(schemaRegistryContainer.getUrl(), 1000); schemaRegistryClient.register(mcpSchema.getFullName(), mcpSchema); } private static String createTopics(Stream<String> bootstraps) { short replicationFactor = 1; int partitions = 1; - return bootstraps.parallel().map(bootstrap -> { - try { - createAdminClient(bootstrap).createTopics(singletonList(new NewTopic(TOPIC, partitions, replicationFactor))).all().get(); - return bootstrap; - } catch (RuntimeException | InterruptedException | ExecutionException ex) { - return null; - } - }).filter(Objects::nonNull).findFirst().get(); + return bootstraps + .parallel() + .map( + bootstrap -> { + try { + createAdminClient(bootstrap) + .createTopics(singletonList(new NewTopic(TOPIC, partitions, replicationFactor))) + .all() + .get(); + return bootstrap; + } catch (RuntimeException | InterruptedException | ExecutionException ex) { + return null; + } + }) + .filter(Objects::nonNull) + .findFirst() + .get(); } @SuppressWarnings("rawtypes") - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { - return MetadataChangeProposalWrapper.builder().entityType("dataset").entityUrn(entityUrn).upsert() - .aspect(new DatasetProperties().setDescription(description)).build(); + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { + return MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(entityUrn) + .upsert() + .aspect(new DatasetProperties().setDescription(description)) + .build(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java index 5f26748cb677c..86ced5ce620cd 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java @@ -1,21 +1,17 @@ package datahub.client.kafka.containers; +import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; + import com.github.dockerjava.api.command.InspectContainerResponse; +import java.nio.charset.StandardCharsets; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; import org.testcontainers.images.builder.Transferable; import org.testcontainers.utility.TestcontainersConfiguration; -import java.nio.charset.StandardCharsets; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; - -/** - * This container wraps Confluent Kafka. - * - */ +/** This container wraps Confluent Kafka. */ public class KafkaContainer extends GenericContainer<KafkaContainer> { private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; @@ -47,11 +43,17 @@ public KafkaContainer(String confluentPlatformVersion, String zookeeperConnect) // with itself via internal // listener when KAFKA_INTER_BROKER_LISTENER_NAME is set, otherwise Kafka will // try to use the advertised listener - withEnv("KAFKA_LISTENERS", - "PLAINTEXT://0.0.0.0:" + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT - + ",BROKER://0.0.0.0:" + KAFKA_INTERNAL_PORT - + ",BROKER_LOCAL://0.0.0.0:" + KAFKA_LOCAL_PORT); - withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,BROKER_LOCAL:PLAINTEXT"); + withEnv( + "KAFKA_LISTENERS", + "PLAINTEXT://0.0.0.0:" + + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT + + ",BROKER://0.0.0.0:" + + KAFKA_INTERNAL_PORT + + ",BROKER_LOCAL://0.0.0.0:" + + KAFKA_LOCAL_PORT); + withEnv( + "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", + "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,BROKER_LOCAL:PLAINTEXT"); withEnv("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER"); withEnv("KAFKA_BROKER_ID", "1"); @@ -68,8 +70,9 @@ public Stream<String> getBootstrapServers() { if (port == PORT_NOT_ASSIGNED) { throw new IllegalStateException("You should start Kafka container first"); } - return Stream.of(String.format("PLAINTEXT://%s:%s", getHost(), port), - String.format("PLAINTEXT://localhost:%s", getMappedPort(KAFKA_LOCAL_PORT))); + return Stream.of( + String.format("PLAINTEXT://%s:%s", getHost(), port), + String.format("PLAINTEXT://localhost:%s", getMappedPort(KAFKA_LOCAL_PORT))); } public String getInternalBootstrapServers() { @@ -78,7 +81,10 @@ public String getInternalBootstrapServers() { @Override protected void doStart() { - withCommand("sh", "-c", "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + "sh " + STARTER_SCRIPT); + withCommand( + "sh", + "-c", + "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + "sh " + STARTER_SCRIPT); super.doStart(); } @@ -100,22 +106,33 @@ protected void containerIsStarting(InspectContainerResponse containerInfo, boole String command = "#!/bin/bash \n"; command += "export KAFKA_ZOOKEEPER_CONNECT='" + zookeeperConnect + "'\n"; - command += "export KAFKA_ADVERTISED_LISTENERS='" + Stream - .concat(Stream.of("PLAINTEXT://" + networkAlias + ":" + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT, + command += + "export KAFKA_ADVERTISED_LISTENERS='" + + Stream.concat( + Stream.of( + "PLAINTEXT://" + + networkAlias + + ":" + + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT, "BROKER_LOCAL://localhost:" + getMappedPort(KAFKA_LOCAL_PORT)), - containerInfo.getNetworkSettings().getNetworks().values().stream() - .map(it -> "BROKER://" + it.getIpAddress() + ":" + KAFKA_INTERNAL_PORT)) - .collect(Collectors.joining(",")) + "'\n"; + containerInfo.getNetworkSettings().getNetworks().values().stream() + .map(it -> "BROKER://" + it.getIpAddress() + ":" + KAFKA_INTERNAL_PORT)) + .collect(Collectors.joining(",")) + + "'\n"; command += ". /etc/confluent/docker/bash-config \n"; command += "/etc/confluent/docker/configure \n"; command += "/etc/confluent/docker/launch \n"; - copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT); + copyFileToContainer( + Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT); } private static String getKafkaContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration.getInstance().getProperties().getOrDefault("kafka.container.image", - "confluentinc/cp-kafka:" + confluentPlatformVersion); + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "kafka.container.image", "confluentinc/cp-kafka:" + confluentPlatformVersion); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java index 907a4558b60d9..5c0223e580575 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java @@ -5,46 +5,48 @@ import java.io.IOException; import java.time.Duration; - import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; import org.testcontainers.utility.TestcontainersConfiguration; - public class SchemaRegistryContainer extends GenericContainer<SchemaRegistryContainer> { - private static final int SCHEMA_REGISTRY_INTERNAL_PORT = 8081; - - private final String networkAlias = "schema-registry"; - - public SchemaRegistryContainer(String zookeeperConnect, String kafkaBootstrap) throws IOException { - this(CONFLUENT_PLATFORM_VERSION, zookeeperConnect, kafkaBootstrap); - } - - public SchemaRegistryContainer(String confluentPlatformVersion, String zookeeperConnect, String kafkaBootstrap) throws IOException { - super(getSchemaRegistryContainerImage(confluentPlatformVersion)); - - addEnv("SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL", zookeeperConnect); - addEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost"); - addEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", kafkaBootstrap); - - withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT); - withNetworkAliases(networkAlias); - - waitingFor(new HttpWaitStrategy().forPath("/subjects").withStartupTimeout(Duration.ofMinutes(2))); - } - - public String getUrl() { - return format("http://%s:%d", this.getContainerIpAddress(), this.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT)); - } - - - private static String getSchemaRegistryContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration - .getInstance().getProperties().getOrDefault( - "schemaregistry.container.image", - "confluentinc/cp-schema-registry:" + confluentPlatformVersion - ); - } + private static final int SCHEMA_REGISTRY_INTERNAL_PORT = 8081; + + private final String networkAlias = "schema-registry"; + + public SchemaRegistryContainer(String zookeeperConnect, String kafkaBootstrap) + throws IOException { + this(CONFLUENT_PLATFORM_VERSION, zookeeperConnect, kafkaBootstrap); + } + + public SchemaRegistryContainer( + String confluentPlatformVersion, String zookeeperConnect, String kafkaBootstrap) + throws IOException { + super(getSchemaRegistryContainerImage(confluentPlatformVersion)); + + addEnv("SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL", zookeeperConnect); + addEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost"); + addEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", kafkaBootstrap); + + withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT); + withNetworkAliases(networkAlias); + + waitingFor( + new HttpWaitStrategy().forPath("/subjects").withStartupTimeout(Duration.ofMinutes(2))); + } + + public String getUrl() { + return format( + "http://%s:%d", + this.getContainerIpAddress(), this.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT)); + } + + private static String getSchemaRegistryContainerImage(String confluentPlatformVersion) { + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "schemaregistry.container.image", + "confluentinc/cp-schema-registry:" + confluentPlatformVersion); + } } - - diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java index efa79724f114b..93ba828096282 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java @@ -6,18 +6,17 @@ final class Utils { public static final String CONFLUENT_PLATFORM_VERSION = "7.4.0"; - private Utils() { - } + private Utils() {} /** - * Retrieves a random port that is currently not in use on this machine. - * - * @return a free port - * @throws IOException wraps the exceptions which may occur during this method call. - */ - static int getRandomFreePort() throws IOException { - @SuppressWarnings("resource") - ServerSocket serverSocket = new ServerSocket(0); - return serverSocket.getLocalPort(); - } -} \ No newline at end of file + * Retrieves a random port that is currently not in use on this machine. + * + * @return a free port + * @throws IOException wraps the exceptions which may occur during this method call. + */ + static int getRandomFreePort() throws IOException { + @SuppressWarnings("resource") + ServerSocket serverSocket = new ServerSocket(0); + return serverSocket.getLocalPort(); + } +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java index 5bfc5055df68a..538092d810ce1 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java @@ -1,48 +1,48 @@ package datahub.client.kafka.containers; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; -import org.testcontainers.utility.TestcontainersConfiguration; +import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; +import static java.lang.String.format; import java.io.IOException; import java.util.HashMap; - -import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; -import static java.lang.String.format; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; +import org.testcontainers.utility.TestcontainersConfiguration; public class ZookeeperContainer extends GenericContainer<ZookeeperContainer> { - private static final int ZOOKEEPER_INTERNAL_PORT = 2181; - private static final int ZOOKEEPER_TICK_TIME = 2000; - - private final String networkAlias = "zookeeper"; - - public ZookeeperContainer() throws IOException { - this(CONFLUENT_PLATFORM_VERSION); - } - - public ZookeeperContainer(String confluentPlatformVersion) throws IOException { - super(getZookeeperContainerImage(confluentPlatformVersion)); - - HashMap<String, String> env = new HashMap<String, String>(); - env.put("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_INTERNAL_PORT)); - env.put("ZOOKEEPER_TICK_TIME", Integer.toString(ZOOKEEPER_TICK_TIME)); - withEnv(env); - - addExposedPort(ZOOKEEPER_INTERNAL_PORT); - withNetworkAliases(networkAlias); - waitingFor(new HostPortWaitStrategy()); - } - - public String getInternalUrl() { - return format("%s:%d", networkAlias, ZOOKEEPER_INTERNAL_PORT); - } - - private static String getZookeeperContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration - .getInstance().getProperties().getOrDefault( - "zookeeper.container.image", - "confluentinc/cp-zookeeper:" + confluentPlatformVersion - ); - } -} \ No newline at end of file + private static final int ZOOKEEPER_INTERNAL_PORT = 2181; + private static final int ZOOKEEPER_TICK_TIME = 2000; + + private final String networkAlias = "zookeeper"; + + public ZookeeperContainer() throws IOException { + this(CONFLUENT_PLATFORM_VERSION); + } + + public ZookeeperContainer(String confluentPlatformVersion) throws IOException { + super(getZookeeperContainerImage(confluentPlatformVersion)); + + HashMap<String, String> env = new HashMap<String, String>(); + env.put("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_INTERNAL_PORT)); + env.put("ZOOKEEPER_TICK_TIME", Integer.toString(ZOOKEEPER_TICK_TIME)); + withEnv(env); + + addExposedPort(ZOOKEEPER_INTERNAL_PORT); + withNetworkAliases(networkAlias); + waitingFor(new HostPortWaitStrategy()); + } + + public String getInternalUrl() { + return format("%s:%d", networkAlias, ZOOKEEPER_INTERNAL_PORT); + } + + private static String getZookeeperContainerImage(String confluentPlatformVersion) { + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "zookeeper.container.image", + "confluentinc/cp-zookeeper:" + confluentPlatformVersion); + } +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index c90d3f0d2179e..1d387acb0ce12 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -1,5 +1,7 @@ package datahub.client.patch; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Edge; import com.linkedin.common.FabricType; @@ -38,26 +40,25 @@ import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; -import static com.linkedin.metadata.Constants.*; - - @RunWith(MockitoJUnitRunner.class) public class PatchTest { - /** - * Examples for running patches, tests set to ignore as they target a GMS running on localhost - */ - + /** Examples for running patches, tests set to ignore as they target a GMS running on localhost */ @Test @Ignore public void testLocalUpstream() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addUpstream(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), - DatasetLineageType.TRANSFORMED) - .build(); + MetadataChangeProposal upstreamPatch = + new UpstreamLineagePatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addUpstream( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), + DatasetLineageType.TRANSFORMED) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(upstreamPatch); System.out.println(response.get().getResponseContent()); @@ -72,10 +73,15 @@ public void testLocalUpstream() { public void testLocalUpstreamRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeUpstream(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .build(); + MetadataChangeProposal upstreamPatch = + new UpstreamLineagePatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeUpstream( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(upstreamPatch); System.out.println(response.get().getResponseContent()); @@ -92,10 +98,13 @@ public void testLocalEditableSchemaMetadataTag() { try { TagAssociation tagAssociation = new TagAssociation(); tagAssociation.setTag(new TagUrn("Legacy")); - MetadataChangeProposal fieldTagPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addTag(tagAssociation, "field_foo") - .build(); + MetadataChangeProposal fieldTagPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addTag(tagAssociation, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTagPatch); System.out.println(response.get().getResponseContent()); @@ -111,10 +120,13 @@ public void testLocalEditableSchemaMetadataTagRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { TagUrn urn = new TagUrn("Legacy"); - MetadataChangeProposal fieldTagPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeTag(urn, "field_foo") - .build(); + MetadataChangeProposal fieldTagPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeTag(urn, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTagPatch); System.out.println(response.get().getResponseContent()); @@ -132,10 +144,13 @@ public void testLocalEditableSchemaMetadataTerm() { GlossaryTermAssociation termAssociation = new GlossaryTermAssociation(); termAssociation.setUrn(new GlossaryTermUrn("CustomerAccount")); - MetadataChangeProposal fieldTermPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addGlossaryTerm(termAssociation, "field_foo") - .build(); + MetadataChangeProposal fieldTermPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addGlossaryTerm(termAssociation, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTermPatch); System.out.println(response.get().getResponseContent()); @@ -152,10 +167,13 @@ public void testLocalEditableSchemaMetadataTermRemove() { try { GlossaryTermUrn urn = new GlossaryTermUrn("CustomerAccount"); - MetadataChangeProposal fieldTermPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeGlossaryTerm(urn, "field_foo") - .build(); + MetadataChangeProposal fieldTermPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeGlossaryTerm(urn, "field_foo") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(fieldTermPatch); System.out.println(response.get().getResponseContent()); @@ -168,16 +186,18 @@ public void testLocalEditableSchemaMetadataTermRemove() { @Test @Ignore public void testLocalOwnership() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .addOwner(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder() + .urn(datasetUrn) + .addOwner(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) + .build(); System.out.println(ownershipPatch.toString()); Future<MetadataWriteResponse> response = fileEmitter.emit(ownershipPatch); response.get(); @@ -193,16 +213,15 @@ public void testLocalOwnership() { @Test @Ignore public void testLocalOwnershipRemove() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .removeOwner(new CorpuserUrn("gdoe")) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder().urn(datasetUrn).removeOwner(new CorpuserUrn("gdoe")).build(); System.out.println(ownershipPatch.toString()); Future<MetadataWriteResponse> response = fileEmitter.emit(ownershipPatch); response.get(); @@ -218,16 +237,18 @@ public void testLocalOwnershipRemove() { @Test @Ignore public void testLocalOwnershipRemoveType() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .removeOwnershipType(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder() + .urn(datasetUrn) + .removeOwnershipType(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) + .build(); System.out.println(ownershipPatch.toString()); Future<MetadataWriteResponse> response = fileEmitter.emit(ownershipPatch); response.get(); @@ -245,14 +266,17 @@ public void testLocalOwnershipRemoveType() { public void testLocalDataJobInfo() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal jobInfoToPatch = new DataJobInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .setDescription("something") - .setName("name") - .setType("type") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + MetadataChangeProposal jobInfoToPatch = + new DataJobInfoPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .setDescription("something") + .setName("name") + .setType("type") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(jobInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -267,12 +291,15 @@ public void testLocalDataJobInfo() { public void testLocalDataJobInfoRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal jobInfoToPatch = new DataJobInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .setDescription(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + MetadataChangeProposal jobInfoToPatch = + new DataJobInfoPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .setDescription(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(jobInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -288,14 +315,16 @@ public void testLocalDatasetProperties() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal datasetPropertiesToPatch = new DatasetPropertiesPatchBuilder() - .urn(datasetUrn) - .setDescription("something") - .setName("name") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal datasetPropertiesToPatch = + new DatasetPropertiesPatchBuilder() + .urn(datasetUrn) + .setDescription("something") + .setName("name") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(datasetPropertiesToPatch); System.out.println(response.get().getResponseContent()); @@ -311,14 +340,16 @@ public void testLocalDatasetPropertiesRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal datasetPropertiesToPatch = new DatasetPropertiesPatchBuilder() - .urn(datasetUrn) - .setDescription(null) - .setName(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal datasetPropertiesToPatch = + new DatasetPropertiesPatchBuilder() + .urn(datasetUrn) + .setDescription(null) + .setName(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(datasetPropertiesToPatch); System.out.println(response.get().getResponseContent()); @@ -333,14 +364,15 @@ public void testLocalDatasetPropertiesRemove() { public void testLocalDataFlowInfo() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal flowInfoToPatch = new DataFlowInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) - .setDescription("something") - .setName("name") - .setProject("project") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + MetadataChangeProposal flowInfoToPatch = + new DataFlowInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) + .setDescription("something") + .setName("name") + .setProject("project") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(flowInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -355,13 +387,14 @@ public void testLocalDataFlowInfo() { public void testLocalDataFlowInfoRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal flowInfoToPatch = new DataFlowInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) - .setDescription(null) - .setProject(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + MetadataChangeProposal flowInfoToPatch = + new DataFlowInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) + .setDescription(null) + .setProject(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(flowInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -376,14 +409,27 @@ public void testLocalDataFlowInfoRemove() { public void testLocalDataJobInputAdd() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .addInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .addOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .addInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .addInputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .addOutputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .addInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .addOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .addInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .addOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -398,14 +444,27 @@ public void testLocalDataJobInputAdd() { public void testLocalDataJobInputRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .removeInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .removeOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .removeInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .removeInputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .removeOutputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .removeInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .removeOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .removeInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .removeInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .removeOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -420,24 +479,54 @@ public void testLocalDataJobInputRemove() { public void testLocalDataJobInputAddEdge() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - Edge inputDataset = new Edge() - .setDestinationUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - Edge outputDataset = new Edge() - .setDestinationUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - Edge inputDataJob = new Edge() - .setDestinationUrn(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .addEdge(inputDataset, LineageDirection.UPSTREAM) - .addEdge(outputDataset, LineageDirection.DOWNSTREAM) - .addEdge(inputDataJob, LineageDirection.UPSTREAM) - .build(); + Edge inputDataset = + new Edge() + .setDestinationUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + Edge outputDataset = + new Edge() + .setDestinationUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + Edge inputDataJob = + new Edge() + .setDestinationUrn( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .addEdge(inputDataset, LineageDirection.UPSTREAM) + .addEdge(outputDataset, LineageDirection.DOWNSTREAM) + .addEdge(inputDataJob, LineageDirection.UPSTREAM) + .build(); Future<MetadataWriteResponse> response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -446,5 +535,4 @@ public void testLocalDataJobInputAddEdge() { System.out.println(Arrays.asList(e.getStackTrace())); } } - } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java index 190ca8a8313c2..657669d19439c 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java @@ -1,5 +1,8 @@ package datahub.client.rest; +import static com.linkedin.metadata.Constants.*; +import static org.mockserver.model.HttpRequest.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.dataset.DatasetProperties; @@ -28,9 +31,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; - import javax.net.ssl.SSLHandshakeException; - import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; @@ -50,24 +51,16 @@ import org.mockserver.model.HttpRequest; import org.mockserver.model.RequestDefinition; -import static com.linkedin.metadata.Constants.*; -import static org.mockserver.model.HttpRequest.*; - - @RunWith(MockitoJUnitRunner.class) public class RestEmitterTest { - @Mock - HttpAsyncClientBuilder mockHttpClientFactory; + @Mock HttpAsyncClientBuilder mockHttpClientFactory; - @Mock - CloseableHttpAsyncClient mockClient; + @Mock CloseableHttpAsyncClient mockClient; - @Captor - ArgumentCaptor<HttpPost> postArgumentCaptor; + @Captor ArgumentCaptor<HttpPost> postArgumentCaptor; - @Captor - ArgumentCaptor<FutureCallback> callbackCaptor; + @Captor ArgumentCaptor<FutureCallback> callbackCaptor; @Before public void setupMocks() { @@ -79,7 +72,8 @@ public void testPost() throws URISyntaxException, IOException { RestEmitter emitter = RestEmitter.create(b -> b.asyncHttpClientBuilder(mockHttpClientFactory)); MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)"); + getMetadataChangeProposalWrapper( + "Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)"); emitter.emit(mcp, null); Mockito.verify(mockClient).execute(postArgumentCaptor.capture(), callbackCaptor.capture()); FutureCallback callback = callbackCaptor.getValue(); @@ -90,26 +84,32 @@ public void testPost() throws URISyntaxException, IOException { byte[] contentBytes = new byte[(int) testPost.getEntity().getContentLength()]; is.read(contentBytes); String contentString = new String(contentBytes, StandardCharsets.UTF_8); - String expectedContent = "{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset\\\"}\"}}}"; + String expectedContent = + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset\\\"}\"}}}"; Assert.assertEquals(expectedContent, contentString); } - + @Test - public void testExceptions() throws URISyntaxException, IOException, ExecutionException, InterruptedException { + public void testExceptions() + throws URISyntaxException, IOException, ExecutionException, InterruptedException { RestEmitter emitter = RestEmitter.create($ -> $.asyncHttpClientBuilder(mockHttpClientFactory)); - MetadataChangeProposalWrapper mcp = MetadataChangeProposalWrapper.create(b -> b.entityType("dataset") - .entityUrn("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)") - .upsert() - .aspect(new DatasetProperties().setDescription("Test Dataset"))); + MetadataChangeProposalWrapper mcp = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)") + .upsert() + .aspect(new DatasetProperties().setDescription("Test Dataset"))); Future<HttpResponse> mockFuture = Mockito.mock(Future.class); Mockito.when(mockClient.execute(Mockito.any(), Mockito.any())).thenReturn(mockFuture); - Mockito.when(mockFuture.get()).thenThrow(new ExecutionException("Test execution exception", null)); + Mockito.when(mockFuture.get()) + .thenThrow(new ExecutionException("Test execution exception", null)); try { emitter.emit(mcp, null).get(); Assert.fail("should not be here"); @@ -120,10 +120,18 @@ public void testExceptions() throws URISyntaxException, IOException, ExecutionEx @Test public void testExtraHeaders() throws Exception { - RestEmitter emitter = RestEmitter.create(b -> b.asyncHttpClientBuilder(mockHttpClientFactory) - .extraHeaders(Collections.singletonMap("Test-Header", "Test-Value"))); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create( - b -> b.entityType("dataset").entityUrn("urn:li:dataset:foo").upsert().aspect(new DatasetProperties())); + RestEmitter emitter = + RestEmitter.create( + b -> + b.asyncHttpClientBuilder(mockHttpClientFactory) + .extraHeaders(Collections.singletonMap("Test-Header", "Test-Value"))); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:foo") + .upsert() + .aspect(new DatasetProperties())); Future<HttpResponse> mockFuture = Mockito.mock(Future.class); Mockito.when(mockClient.execute(Mockito.any(), Mockito.any())).thenReturn(mockFuture); emitter.emit(mcpw, null); @@ -151,11 +159,15 @@ public void multithreadedTestExecutors() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) .respond(org.mockserver.model.HttpResponse.response().withStatusCode(200)); ExecutorService executor = Executors.newFixedThreadPool(10); ArrayList<Future> results = new ArrayList(); @@ -164,59 +176,82 @@ public void multithreadedTestExecutors() throws Exception { int numRequests = 100; for (int i = 0; i < numRequests; ++i) { int finalI = i; - results.add(executor.submit(() -> { - try { - Thread.sleep(random.nextInt(100)); - MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper(String.format("Test Dataset %d", testIteration), - String.format("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", finalI)); - Future<MetadataWriteResponse> future = emitter.emit(mcp, null); - MetadataWriteResponse response = future.get(); - Assert.assertTrue(response.isSuccess()); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - })); + results.add( + executor.submit( + () -> { + try { + Thread.sleep(random.nextInt(100)); + MetadataChangeProposalWrapper mcp = + getMetadataChangeProposalWrapper( + String.format("Test Dataset %d", testIteration), + String.format( + "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", finalI)); + Future<MetadataWriteResponse> future = emitter.emit(mcp, null); + MetadataWriteResponse response = future.get(); + Assert.assertTrue(response.isSuccess()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + })); } - results.forEach(x -> { - try { - x.get(); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - }); + results.forEach( + x -> { + try { + x.get(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + }); RequestDefinition[] recordedRequests = - testDataHubServer.getMockServer().retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); + testDataHubServer + .getMockServer() + .retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); Assert.assertEquals(100, recordedRequests.length); - List<HttpRequest> requests = Arrays.stream(recordedRequests) - .sequential() - .filter(x -> x instanceof HttpRequest) - .map(x -> (HttpRequest) x) - .collect(Collectors.toList()); + List<HttpRequest> requests = + Arrays.stream(recordedRequests) + .sequential() + .filter(x -> x instanceof HttpRequest) + .map(x -> (HttpRequest) x) + .collect(Collectors.toList()); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); for (int i = 0; i < numRequests; ++i) { - String expectedContent = String.format("{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", i, testIteration); - - Assert.assertEquals(requests.stream().filter(x -> { - String bodyString = ""; - try { - bodyString = mapper.writeValueAsString( - mapper.readValue(x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); - } catch (IOException ioException) { - return false; - } - return bodyString.equals(expectedContent); - }).count(), 1); + String expectedContent = + String.format( + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", + i, testIteration); + + Assert.assertEquals( + requests.stream() + .filter( + x -> { + String bodyString = ""; + try { + bodyString = + mapper.writeValueAsString( + mapper.readValue( + x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); + } catch (IOException ioException) { + return false; + } + return bodyString.equals(expectedContent); + }) + .count(), + 1); } } - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -231,11 +266,15 @@ public void multithreadedTestSingleThreadCaller() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) .respond(org.mockserver.model.HttpResponse.response().withStatusCode(200)); ArrayList<Future> results = new ArrayList(); Random random = new Random(); @@ -243,46 +282,65 @@ public void multithreadedTestSingleThreadCaller() throws Exception { int numRequests = 100; for (int i = 0; i < numRequests; ++i) { MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper(String.format("Test Dataset %d", testIteration), + getMetadataChangeProposalWrapper( + String.format("Test Dataset %d", testIteration), String.format("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", i)); Future<MetadataWriteResponse> future = emitter.emit(mcp, null); results.add(future); } - results.forEach(x -> { - try { - x.get(); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - }); + results.forEach( + x -> { + try { + x.get(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + }); RequestDefinition[] recordedRequests = - testDataHubServer.getMockServer().retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); + testDataHubServer + .getMockServer() + .retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); Assert.assertEquals(numRequests, recordedRequests.length); - List<HttpRequest> requests = Arrays.stream(recordedRequests) - .sequential() - .filter(x -> x instanceof HttpRequest) - .map(x -> (HttpRequest) x) - .collect(Collectors.toList()); + List<HttpRequest> requests = + Arrays.stream(recordedRequests) + .sequential() + .filter(x -> x instanceof HttpRequest) + .map(x -> (HttpRequest) x) + .collect(Collectors.toList()); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); for (int i = 0; i < numRequests; ++i) { - String expectedContent = String.format("{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", i, testIteration); - - Assert.assertEquals(requests.stream().filter(x -> { - String bodyString = ""; - try { - bodyString = mapper.writeValueAsString( - mapper.readValue(x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); - } catch (IOException ioException) { - return false; - } - return bodyString.equals(expectedContent); - }).count(), 1); + String expectedContent = + String.format( + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", + i, testIteration); + + Assert.assertEquals( + requests.stream() + .filter( + x -> { + String bodyString = ""; + try { + bodyString = + mapper.writeValueAsString( + mapper.readValue( + x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); + } catch (IOException ioException) { + return false; + } + return bodyString.equals(expectedContent); + }) + .count(), + 1); } } @@ -292,30 +350,39 @@ public void testCallback() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) - .respond(org.mockserver.model.HttpResponse.response().withStatusCode(500).withBody("exception")); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond( + org.mockserver.model.HttpResponse.response().withStatusCode(500).withBody("exception")); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); AtomicReference<MetadataWriteResponse> callbackResponse = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - Future<MetadataWriteResponse> future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - callbackResponse.set(response); - Assert.assertFalse(response.isSuccess()); - latch.countDown(); - } - - @Override - public void onFailure(Throwable exception) { - Assert.fail("Should not be called"); - latch.countDown(); - } - }); + Future<MetadataWriteResponse> future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + callbackResponse.set(response); + Assert.assertFalse(response.isSuccess()); + latch.countDown(); + } + + @Override + public void onFailure(Throwable exception) { + Assert.fail("Should not be called"); + latch.countDown(); + } + }); latch.await(); Assert.assertEquals(callbackResponse.get(), future.get()); @@ -328,16 +395,22 @@ public void testTimeoutOnGet() { RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); testDataHubServer.getMockServer().reset(); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.once()) - .respond(org.mockserver.model.HttpResponse.response() - .withStatusCode(200) - .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.once()) + .respond( + org.mockserver.model.HttpResponse.response() + .withStatusCode(200) + .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); try { long startTime = System.currentTimeMillis(); MetadataWriteResponse response = emitter.emit(mcpw, null).get(); @@ -356,20 +429,28 @@ public void testTimeoutOnGetWithTimeout() { RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); testDataHubServer.getMockServer().reset(); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.once()) - .respond(org.mockserver.model.HttpResponse.response() - .withStatusCode(200) - .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.once()) + .respond( + org.mockserver.model.HttpResponse.response() + .withStatusCode(200) + .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); try { long startTime = System.currentTimeMillis(); MetadataWriteResponse response = - emitter.emit(mcpw, null).get(RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC - 3, TimeUnit.SECONDS); + emitter + .emit(mcpw, null) + .get(RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC - 3, TimeUnit.SECONDS); long duration = (long) ((System.currentTimeMillis() - startTime) / 1000.0); Assert.fail("Should not succeed with duration " + duration); } catch (Exception ioe) { @@ -388,14 +469,16 @@ public void testUserAgentHeader() throws IOException, ExecutionException, Interr properties.load(emitter.getClass().getClassLoader().getResourceAsStream("client.properties")); Assert.assertNotNull(properties.getProperty("clientVersion")); String version = properties.getProperty("clientVersion"); - testDataHubServer.getMockServer().verify( - request("/config") - .withHeader("User-Agent", "DataHub-RestClient/" + version)); + testDataHubServer + .getMockServer() + .verify(request("/config").withHeader("User-Agent", "DataHub-RestClient/" + version)); } - + @Test - public void testDisableSslVerification() throws IOException, InterruptedException, ExecutionException { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().disableSslVerification(true).build()); + public void testDisableSslVerification() + throws IOException, InterruptedException, ExecutionException { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().disableSslVerification(true).build()); final String hostWithSsl = "https://self-signed.badssl.com"; final HttpGet request = new HttpGet(hostWithSsl); @@ -403,10 +486,12 @@ public void testDisableSslVerification() throws IOException, InterruptedExceptio restEmitter.close(); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } - + @Test - public void testSslVerificationException() throws IOException, InterruptedException, ExecutionException { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().disableSslVerification(false).build()); + public void testSslVerificationException() + throws IOException, InterruptedException, ExecutionException { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().disableSslVerification(false).build()); final String hostWithSsl = "https://self-signed.badssl.com"; final HttpGet request = new HttpGet(hostWithSsl); try { @@ -418,4 +503,4 @@ public void testSslVerificationException() throws IOException, InterruptedExcept } restEmitter.close(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java index 70efcd240a0ef..0b2a4500e019d 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java @@ -1,49 +1,53 @@ package datahub.event; +import com.linkedin.dataset.DatasetProperties; +import com.linkedin.mxe.MetadataChangeProposal; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; - import org.junit.Test; import org.testng.Assert; -import com.linkedin.dataset.DatasetProperties; -import com.linkedin.mxe.MetadataChangeProposal; - - public class EventFormatterTest { @Test public void testPartialMCPW() throws URISyntaxException, IOException, EventValidationException { - MetadataChangeProposalWrapper metadataChangeProposalWrapper = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:foo") + MetadataChangeProposalWrapper metadataChangeProposalWrapper = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn("urn:li:foo") .upsert() - .aspect(new DatasetProperties().setDescription("A test dataset")) - .build(); + .aspect(new DatasetProperties().setDescription("A test dataset")) + .build(); EventFormatter eventFormatter = new EventFormatter(); MetadataChangeProposal mcp = eventFormatter.convert(metadataChangeProposalWrapper); Assert.assertEquals(mcp.getAspect().getContentType(), "application/json"); String content = mcp.getAspect().getValue().asString(StandardCharsets.UTF_8); Assert.assertEquals(content, "{\"description\":\"A test dataset\"}"); } - + @Test public void testUtf8Encoding() throws URISyntaxException, IOException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-project.my-dataset.user-table,PROD)") - .upsert() - .aspect(new DatasetProperties().setDescription("This is the canonical User profile dataset œ∑´´†¥¨ˆˆπ“‘åß∂ƒ©˙∆˚¬…æΩ≈ç√∫˜˜≤≥ç")) - .build(); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-project.my-dataset.user-table,PROD)") + .upsert() + .aspect( + new DatasetProperties() + .setDescription( + "This is the canonical User profile dataset œ∑´´†¥¨ˆˆπ“‘åß∂ƒ©˙∆˚¬…æΩ≈ç√∫˜˜≤≥ç")) + .build(); EventFormatter eventFormatter = new EventFormatter(); MetadataChangeProposal mcp = eventFormatter.convert(mcpw); Assert.assertEquals(mcp.getAspect().getContentType(), "application/json"); String content = mcp.getAspect().getValue().asString(StandardCharsets.UTF_8); - String expectedContent = "{\"description\":\"This is the canonical User profile dataset \\u0153\\u2211\\u00B4\\u00B4" - + "\\u2020\\u00A5\\u00A8\\u02C6\\u02C6\\u03C0\\u201C\\u2018\\u00E5\\u00DF\\u2202\\u0192\\u00A9\\u02D9\\u2206" - + "\\u02DA\\u00AC\\u2026\\u00E6\\u03A9\\u2248\\u00E7\\u221A\\u222B\\u02DC\\u02DC\\u2264\\u2265\\u00E7\"}"; + String expectedContent = + "{\"description\":\"This is the canonical User profile dataset \\u0153\\u2211\\u00B4\\u00B4" + + "\\u2020\\u00A5\\u00A8\\u02C6\\u02C6\\u03C0\\u201C\\u2018\\u00E5\\u00DF\\u2202\\u0192\\u00A9\\u02D9\\u2206" + + "\\u02DA\\u00AC\\u2026\\u00E6\\u03A9\\u2248\\u00E7\\u221A\\u222B\\u02DC\\u02DC\\u2264\\u2265\\u00E7\"}"; Assert.assertEquals(content, expectedContent); } } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java index 3d371954c0f37..3a333abc5cb10 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java @@ -3,75 +3,74 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.dataset.DatasetProperties; - import java.net.URISyntaxException; - import org.junit.Assert; import org.junit.Test; - public class MetadataChangeProposalWrapperTest { - /** - * We should throw errors on validation as exceptions - */ - @Test - public void testBuilderExceptions() { - try { - MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("foo") // bad urn should throw exception - ); - Assert.fail("Should throw an exception"); - } catch (EventValidationException e) { - Assert.assertTrue("Underlying exception should be a URI syntax issue", e.getCause() instanceof URISyntaxException); - } catch (Exception e) { - Assert.fail("Should not throw any other exception"); - } - } - - @Test - public void testAspectInferenceSuccess() throws EventValidationException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new DatasetProperties())); - Assert.assertEquals(mcpw.getAspectName(), "datasetProperties"); - } - - /** - * We throw exceptions on using the regular builder pattern - * - * @throws URISyntaxException - * @throws EventValidationException - */ - @Test(expected = EventValidationException.class) - public void testAspectInferenceFailure() throws URISyntaxException, EventValidationException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new AuditStamp().setActor(Urn.createFromString("urn:li:corpUser:jdoe"))) - .build(); + /** We should throw errors on validation as exceptions */ + @Test + public void testBuilderExceptions() { + try { + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataset").entityUrn("foo") // bad urn should throw exception + ); + Assert.fail("Should throw an exception"); + } catch (EventValidationException e) { + Assert.assertTrue( + "Underlying exception should be a URI syntax issue", + e.getCause() instanceof URISyntaxException); + } catch (Exception e) { + Assert.fail("Should not throw any other exception"); } + } - /** - * We throw exceptions on using the lambda builder pattern - * - * @throws URISyntaxException - * @throws EventValidationException - */ - @Test(expected = EventValidationException.class) - public void testAspectInferenceFailureLambda() throws URISyntaxException, EventValidationException { - Urn actorUrn = Urn.createFromString("urn:li:corpUser:jdoe"); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new AuditStamp().setActor(actorUrn)) - ); - } + @Test + public void testAspectInferenceSuccess() throws EventValidationException { + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new DatasetProperties())); + Assert.assertEquals(mcpw.getAspectName(), "datasetProperties"); + } + /** + * We throw exceptions on using the regular builder pattern + * + * @throws URISyntaxException + * @throws EventValidationException + */ + @Test(expected = EventValidationException.class) + public void testAspectInferenceFailure() throws URISyntaxException, EventValidationException { + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new AuditStamp().setActor(Urn.createFromString("urn:li:corpUser:jdoe"))) + .build(); + } + /** + * We throw exceptions on using the lambda builder pattern + * + * @throws URISyntaxException + * @throws EventValidationException + */ + @Test(expected = EventValidationException.class) + public void testAspectInferenceFailureLambda() + throws URISyntaxException, EventValidationException { + Urn actorUrn = Urn.createFromString("urn:li:corpUser:jdoe"); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new AuditStamp().setActor(actorUrn))); + } } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java index e7cdee3f369e1..44e60a4bde783 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java @@ -1,11 +1,10 @@ package datahub.server; -import org.mockserver.integration.ClientAndServer; -import org.mockserver.matchers.Times; - import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.*; +import org.mockserver.integration.ClientAndServer; +import org.mockserver.matchers.Times; public class TestDataHubServer { @@ -26,17 +25,12 @@ public TestDataHubServer() { public void init() { mockServer - .when( - request() - .withMethod("GET") - .withPath("/config") - .withHeader("Content-type", "application/json"), - Times.unlimited() - ).respond( - org.mockserver.model.HttpResponse.response() - .withBody("{\"noCode\": true }") - ); + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); } - - } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java b/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java index e6f93eb1a4f0c..12bbb9e59ab95 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java @@ -32,7 +32,6 @@ import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.FieldDescriptor; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -89,300 +88,296 @@ * @author kenton@google.com Kenton Varda */ public class ExtensionRegistry extends ExtensionRegistryLite { - /** Construct a new, empty instance. */ - public static ExtensionRegistry newInstance() { - return new ExtensionRegistry(); - } - - /** Get the unmodifiable singleton empty instance. */ - public static ExtensionRegistry getEmptyRegistry() { - return EMPTY_REGISTRY; - } - - - /** Returns an unmodifiable view of the registry. */ - @Override - public ExtensionRegistry getUnmodifiable() { - return new ExtensionRegistry(this); - } - - /** A (Descriptor, Message) pair, returned by lookup methods. */ - public static final class ExtensionInfo { - /** The extension's descriptor. */ - public final FieldDescriptor descriptor; - - /** - * A default instance of the extension's type, if it has a message type. Otherwise, {@code - * null}. - */ - public final Message defaultInstance; - - private ExtensionInfo(final FieldDescriptor descriptor) { - this.descriptor = descriptor; - defaultInstance = null; - } - - private ExtensionInfo(final FieldDescriptor descriptor, final Message defaultInstance) { - this.descriptor = descriptor; - this.defaultInstance = defaultInstance; - } - } - - /** Deprecated. Use {@link #findImmutableExtensionByName(String)} instead. */ - @Deprecated - public ExtensionInfo findExtensionByName(final String fullName) { - return findImmutableExtensionByName(fullName); - } + /** Construct a new, empty instance. */ + public static ExtensionRegistry newInstance() { + return new ExtensionRegistry(); + } + + /** Get the unmodifiable singleton empty instance. */ + public static ExtensionRegistry getEmptyRegistry() { + return EMPTY_REGISTRY; + } + + /** Returns an unmodifiable view of the registry. */ + @Override + public ExtensionRegistry getUnmodifiable() { + return new ExtensionRegistry(this); + } + + /** A (Descriptor, Message) pair, returned by lookup methods. */ + public static final class ExtensionInfo { + /** The extension's descriptor. */ + public final FieldDescriptor descriptor; /** - * Find an extension for immutable APIs by fully-qualified field name, in the proto namespace. - * i.e. {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. - * - * @return Information about the extension if found, or {@code null} otherwise. + * A default instance of the extension's type, if it has a message type. Otherwise, {@code + * null}. */ - public ExtensionInfo findImmutableExtensionByName(final String fullName) { - return immutableExtensionsByName.get(fullName); - } + public final Message defaultInstance; - /** - * Find an extension for mutable APIs by fully-qualified field name, in the proto namespace. i.e. - * {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findMutableExtensionByName(final String fullName) { - return mutableExtensionsByName.get(fullName); + private ExtensionInfo(final FieldDescriptor descriptor) { + this.descriptor = descriptor; + defaultInstance = null; } - /** Deprecated. Use {@link #findImmutableExtensionByNumber( Descriptors.Descriptor, int)} */ - @Deprecated - public ExtensionInfo findExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return findImmutableExtensionByNumber(containingType, fieldNumber); + private ExtensionInfo(final FieldDescriptor descriptor, final Message defaultInstance) { + this.descriptor = descriptor; + this.defaultInstance = defaultInstance; } - - /** - * Find an extension by containing type and field number for immutable APIs. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findImmutableExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return immutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** Deprecated. Use {@link #findImmutableExtensionByName(String)} instead. */ + @Deprecated + public ExtensionInfo findExtensionByName(final String fullName) { + return findImmutableExtensionByName(fullName); + } + + /** + * Find an extension for immutable APIs by fully-qualified field name, in the proto namespace. + * i.e. {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findImmutableExtensionByName(final String fullName) { + return immutableExtensionsByName.get(fullName); + } + + /** + * Find an extension for mutable APIs by fully-qualified field name, in the proto namespace. i.e. + * {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findMutableExtensionByName(final String fullName) { + return mutableExtensionsByName.get(fullName); + } + + /** Deprecated. Use {@link #findImmutableExtensionByNumber( Descriptors.Descriptor, int)} */ + @Deprecated + public ExtensionInfo findExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return findImmutableExtensionByNumber(containingType, fieldNumber); + } + + /** + * Find an extension by containing type and field number for immutable APIs. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findImmutableExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return immutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** + * Find an extension by containing type and field number for mutable APIs. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findMutableExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** + * Find all extensions for mutable APIs by fully-qualified name of extended class. Note that this + * method is more computationally expensive than getting a single extension by name or number. + * + * @return Information about the extensions found, or {@code null} if there are none. + */ + public Set<ExtensionInfo> getAllMutableExtensionsByExtendedType(final String fullName) { + HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); + for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) { + if (pair.descriptor.getFullName().equals(fullName)) { + extensions.add(mutableExtensionsByNumber.get(pair)); + } } - - /** - * Find an extension by containing type and field number for mutable APIs. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findMutableExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + return extensions; + } + + /** + * Find all extensions for immutable APIs by fully-qualified name of extended class. Note that + * this method is more computationally expensive than getting a single extension by name or + * number. + * + * @return Information about the extensions found, or {@code null} if there are none. + */ + public Set<ExtensionInfo> getAllImmutableExtensionsByExtendedType(final String fullName) { + HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); + for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) { + if (pair.descriptor.getFullName().equals(fullName)) { + extensions.add(immutableExtensionsByNumber.get(pair)); + } } - - /** - * Find all extensions for mutable APIs by fully-qualified name of extended class. Note that this - * method is more computationally expensive than getting a single extension by name or number. - * - * @return Information about the extensions found, or {@code null} if there are none. - */ - public Set<ExtensionInfo> getAllMutableExtensionsByExtendedType(final String fullName) { - HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); - for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) { - if (pair.descriptor.getFullName().equals(fullName)) { - extensions.add(mutableExtensionsByNumber.get(pair)); - } - } - return extensions; + return extensions; + } + + /** Add an extension from a generated file to the registry. */ + public void add(final Extension<?, ?> extension) { + if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE + && extension.getExtensionType() != Extension.ExtensionType.MUTABLE) { + // do not support other extension types. ignore + return; } - - /** - * Find all extensions for immutable APIs by fully-qualified name of extended class. Note that - * this method is more computationally expensive than getting a single extension by name or - * number. - * - * @return Information about the extensions found, or {@code null} if there are none. - */ - public Set<ExtensionInfo> getAllImmutableExtensionsByExtendedType(final String fullName) { - HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>(); - for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) { - if (pair.descriptor.getFullName().equals(fullName)) { - extensions.add(immutableExtensionsByNumber.get(pair)); - } - } - return extensions; + add(newExtensionInfo(extension), extension.getExtensionType()); + } + + /** Add an extension from a generated file to the registry. */ + public void add(final GeneratedMessage.GeneratedExtension<?, ?> extension) { + add((Extension<?, ?>) extension); + } + + static ExtensionInfo newExtensionInfo(final Extension<?, ?> extension) { + if (extension.getDescriptor().getJavaType() == FieldDescriptor.JavaType.MESSAGE) { + if (extension.getMessageDefaultInstance() == null) { + throw new IllegalStateException( + "Registered message-type extension had null default instance: " + + extension.getDescriptor().getFullName()); + } + return new ExtensionInfo( + extension.getDescriptor(), (Message) extension.getMessageDefaultInstance()); + } else { + return new ExtensionInfo(extension.getDescriptor(), null); } - - /** Add an extension from a generated file to the registry. */ - public void add(final Extension<?, ?> extension) { - if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE - && extension.getExtensionType() != Extension.ExtensionType.MUTABLE) { - // do not support other extension types. ignore - return; - } - add(newExtensionInfo(extension), extension.getExtensionType()); + } + + /** Add a non-message-type extension to the registry by descriptor. */ + public void add(final FieldDescriptor type) { + if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() must be provided a default instance when " + + "adding an embedded message extension."); } - - /** Add an extension from a generated file to the registry. */ - public void add(final GeneratedMessage.GeneratedExtension<?, ?> extension) { - add((Extension<?, ?>) extension); + ExtensionInfo info = new ExtensionInfo(type, null); + add(info, Extension.ExtensionType.IMMUTABLE); + add(info, Extension.ExtensionType.MUTABLE); + } + + /** Add a message-type extension to the registry by descriptor. */ + public void add(final FieldDescriptor type, final Message defaultInstance) { + if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() provided a default instance for a non-message extension."); } - - static ExtensionInfo newExtensionInfo(final Extension<?, ?> extension) { - if (extension.getDescriptor().getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - if (extension.getMessageDefaultInstance() == null) { - throw new IllegalStateException( - "Registered message-type extension had null default instance: " - + extension.getDescriptor().getFullName()); - } - return new ExtensionInfo( - extension.getDescriptor(), (Message) extension.getMessageDefaultInstance()); - } else { - return new ExtensionInfo(extension.getDescriptor(), null); - } + add(new ExtensionInfo(type, defaultInstance), Extension.ExtensionType.IMMUTABLE); + } + + // ================================================================= + // Private stuff. + + private ExtensionRegistry() { + this.immutableExtensionsByName = new HashMap<String, ExtensionInfo>(); + this.mutableExtensionsByName = new HashMap<String, ExtensionInfo>(); + this.immutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); + this.mutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); + } + + private ExtensionRegistry(ExtensionRegistry other) { + super(other); + this.immutableExtensionsByName = Collections.unmodifiableMap(other.immutableExtensionsByName); + this.mutableExtensionsByName = Collections.unmodifiableMap(other.mutableExtensionsByName); + this.immutableExtensionsByNumber = + Collections.unmodifiableMap(other.immutableExtensionsByNumber); + this.mutableExtensionsByNumber = Collections.unmodifiableMap(other.mutableExtensionsByNumber); + } + + private final Map<String, ExtensionInfo> immutableExtensionsByName; + private final Map<String, ExtensionInfo> mutableExtensionsByName; + private final Map<DescriptorIntPair, ExtensionInfo> immutableExtensionsByNumber; + private final Map<DescriptorIntPair, ExtensionInfo> mutableExtensionsByNumber; + + ExtensionRegistry(boolean empty) { + super(EMPTY_REGISTRY_LITE); + this.immutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); + this.mutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); + this.immutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); + this.mutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); + } + + static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true); + + private void add(final ExtensionInfo extension, final Extension.ExtensionType extensionType) { + if (!extension.descriptor.isExtension()) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() was given a FieldDescriptor for a regular " + + "(non-extension) field."); } - /** Add a non-message-type extension to the registry by descriptor. */ - public void add(final FieldDescriptor type) { - if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() must be provided a default instance when " - + "adding an embedded message extension."); - } - ExtensionInfo info = new ExtensionInfo(type, null); - add(info, Extension.ExtensionType.IMMUTABLE); - add(info, Extension.ExtensionType.MUTABLE); + Map<String, ExtensionInfo> extensionsByName; + Map<DescriptorIntPair, ExtensionInfo> extensionsByNumber; + switch (extensionType) { + case IMMUTABLE: + extensionsByName = immutableExtensionsByName; + extensionsByNumber = immutableExtensionsByNumber; + break; + case MUTABLE: + extensionsByName = mutableExtensionsByName; + extensionsByNumber = mutableExtensionsByNumber; + break; + default: + // Ignore the unknown supported type. + return; } - /** Add a message-type extension to the registry by descriptor. */ - public void add(final FieldDescriptor type, final Message defaultInstance) { - if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() provided a default instance for a non-message extension."); - } - add(new ExtensionInfo(type, defaultInstance), Extension.ExtensionType.IMMUTABLE); + extensionsByName.put(extension.descriptor.getFullName(), extension); + extensionsByNumber.put( + new DescriptorIntPair( + extension.descriptor.getContainingType(), extension.descriptor.getNumber()), + extension); + + final FieldDescriptor field = extension.descriptor; + if (field.getContainingType().getOptions().getMessageSetWireFormat() + && field.getType() == FieldDescriptor.Type.MESSAGE + && field.isOptional() + && field.getExtensionScope() == field.getMessageType()) { + // This is an extension of a MessageSet type defined within the extension + // type's own scope. For backwards-compatibility, allow it to be looked + // up by type name. + extensionsByName.put(field.getMessageType().getFullName(), extension); } - - // ================================================================= - // Private stuff. - - private ExtensionRegistry() { - this.immutableExtensionsByName = new HashMap<String, ExtensionInfo>(); - this.mutableExtensionsByName = new HashMap<String, ExtensionInfo>(); - this.immutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); - this.mutableExtensionsByNumber = new HashMap<DescriptorIntPair, ExtensionInfo>(); + } + + /** + * DataHub modification of hashcode/equals based on full name. The upstream project uses the + * descriptor and in our use of the registry results in objects that are practically identical + * except for the `jsonName` field. This is a difference generated by internal components and is + * not under our control. + * + * <p>A (GenericDescriptor, int) pair, used as a map key. + */ + private static final class DescriptorIntPair { + private final String fullName; + private final Descriptor descriptor; + private final int number; + + DescriptorIntPair(final Descriptor descriptor, final int number) { + this.descriptor = descriptor; + this.fullName = descriptor.getFullName(); + this.number = number; } - private ExtensionRegistry(ExtensionRegistry other) { - super(other); - this.immutableExtensionsByName = Collections.unmodifiableMap(other.immutableExtensionsByName); - this.mutableExtensionsByName = Collections.unmodifiableMap(other.mutableExtensionsByName); - this.immutableExtensionsByNumber = - Collections.unmodifiableMap(other.immutableExtensionsByNumber); - this.mutableExtensionsByNumber = Collections.unmodifiableMap(other.mutableExtensionsByNumber); - } - - private final Map<String, ExtensionInfo> immutableExtensionsByName; - private final Map<String, ExtensionInfo> mutableExtensionsByName; - private final Map<DescriptorIntPair, ExtensionInfo> immutableExtensionsByNumber; - private final Map<DescriptorIntPair, ExtensionInfo> mutableExtensionsByNumber; - - ExtensionRegistry(boolean empty) { - super(EMPTY_REGISTRY_LITE); - this.immutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); - this.mutableExtensionsByName = Collections.<String, ExtensionInfo>emptyMap(); - this.immutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); - this.mutableExtensionsByNumber = Collections.<DescriptorIntPair, ExtensionInfo>emptyMap(); - } - - static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true); - - private void add(final ExtensionInfo extension, final Extension.ExtensionType extensionType) { - if (!extension.descriptor.isExtension()) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() was given a FieldDescriptor for a regular " - + "(non-extension) field."); - } - - Map<String, ExtensionInfo> extensionsByName; - Map<DescriptorIntPair, ExtensionInfo> extensionsByNumber; - switch (extensionType) { - case IMMUTABLE: - extensionsByName = immutableExtensionsByName; - extensionsByNumber = immutableExtensionsByNumber; - break; - case MUTABLE: - extensionsByName = mutableExtensionsByName; - extensionsByNumber = mutableExtensionsByNumber; - break; - default: - // Ignore the unknown supported type. - return; - } - - extensionsByName.put(extension.descriptor.getFullName(), extension); - extensionsByNumber.put( - new DescriptorIntPair( - extension.descriptor.getContainingType(), extension.descriptor.getNumber()), - extension); - - final FieldDescriptor field = extension.descriptor; - if (field.getContainingType().getOptions().getMessageSetWireFormat() - && field.getType() == FieldDescriptor.Type.MESSAGE - && field.isOptional() - && field.getExtensionScope() == field.getMessageType()) { - // This is an extension of a MessageSet type defined within the extension - // type's own scope. For backwards-compatibility, allow it to be looked - // up by type name. - extensionsByName.put(field.getMessageType().getFullName(), extension); - } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DescriptorIntPair that = (DescriptorIntPair) o; + + if (number != that.number) { + return false; + } + return fullName.equals(that.fullName); } - /** - * - * DataHub modification of hashcode/equals based on full name. The upstream - * project uses the descriptor and in our use of the registry results - * in objects that are practically identical except for the `jsonName` field. - * This is a difference generated by internal components and is not under - * our control. - * - * A (GenericDescriptor, int) pair, used as a map key. - * - * */ - private static final class DescriptorIntPair { - private final String fullName; - private final Descriptor descriptor; - private final int number; - - DescriptorIntPair(final Descriptor descriptor, final int number) { - this.descriptor = descriptor; - this.fullName = descriptor.getFullName(); - this.number = number; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DescriptorIntPair that = (DescriptorIntPair) o; - - if (number != that.number) { - return false; - } - return fullName.equals(that.fullName); - } - - @Override - public int hashCode() { - int result = fullName.hashCode(); - result = 31 * result + number; - return result; - } + @Override + public int hashCode() { + int result = fullName.hashCode(); + result = 31 * result + number; + return result; } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java index c0a6a2eaa410c..e4030e12574f0 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java @@ -25,51 +25,52 @@ public DirectoryWalker(String directory, String[] excludePatterns) { this.excludeMatchers.add(FileSystems.getDefault().getPathMatcher("glob:" + excludePattern)); } } - } public Stream<Path> walkFiles() throws IOException { final Path baseDir = this.rootDirectory; final ArrayList<Path> files = new ArrayList<>(); - Files.walkFileTree(this.rootDirectory, new FileVisitor<Path>() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - return FileVisitResult.CONTINUE; - } + Files.walkFileTree( + this.rootDirectory, + new FileVisitor<Path>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) + throws IOException { + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - boolean excluded = false; - Path relativePath = baseDir.relativize(file); - if (!includeMatcher.matches(relativePath)) { - excluded = true; - } else { - for (PathMatcher matcher : excludeMatchers) { - if (matcher.matches(relativePath)) { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + boolean excluded = false; + Path relativePath = baseDir.relativize(file); + if (!includeMatcher.matches(relativePath)) { excluded = true; + } else { + for (PathMatcher matcher : excludeMatchers) { + if (matcher.matches(relativePath)) { + excluded = true; + } + } } - } - } - if (!excluded) { - files.add(file); - } - return FileVisitResult.CONTINUE; - } + if (!excluded) { + files.add(file); + } + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - }); + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } + }); return files.stream(); } - - } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java index dc49457e3e6e1..dcc95222fabf2 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java @@ -1,14 +1,13 @@ package datahub.protobuf; -import com.linkedin.common.FabricType; import com.linkedin.common.AuditStamp; +import com.linkedin.common.FabricType; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.DataPlatformUrn; import datahub.client.Emitter; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; import datahub.client.rest.RestEmitter; - import java.io.FileInputStream; import java.io.InputStream; import java.nio.file.Files; @@ -25,350 +24,396 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; - -/** - * Rudimentary application - */ +/** Rudimentary application */ public class Proto2DataHub { - private static final Option OPTION_DATAHUB_PLATFORM = - Option.builder() - .longOpt("platform") - .hasArg() - .desc("[Optional] The data platform to produce schemas for. e.g. kafka, snowflake, etc. (defaults to kafka)") - .build(); - - private static final Option OPTION_DATAHUB_API = Option.builder() - .longOpt("datahub_api") - .hasArg() - .desc("[Optional] The API endpoint for DataHub GMS. (defaults to https://localhost:8080)") - .build(); - - private static final Option OPTION_DATAHUB_TOKEN = Option.builder() - .longOpt("datahub_token") - .hasArg() - .desc("[Optional] The authentication token for DataHub API access. (defaults to empty)") - .build(); - - private static final Option OPTION_DESCRIPTOR = Option.builder() - .longOpt("descriptor") - .hasArg() - .desc("[Required] The generated protobuf descriptor file. " - + "Typically a single .dsc file for the repo or a .protoc file (1:1 with each src file)") - .required() - .build(); - - private static final Option OPTION_FILE = Option.builder() - .longOpt("file") - .hasArg() - .desc("[Optional if using --directory] The protobuf source file. Typically a .proto file.") - .build(); - - private static final Option OPTION_DIR = Option.builder() - .longOpt("directory") - .hasArg() - .desc("[Optional if using --file] The root directory containing protobuf source files.") - .build(); - - private static final Option OPTION_EXCLUDE_PATTERN = Option.builder() - .longOpt("exclude") - .valueSeparator(',') - .hasArgs() - .desc("[Optional] Exclude patterns to avoid processing all source files, separated by ,. Typically used with --directory option. " - + "Follows glob patterns: e.g. --exclude \"build/**,generated/**\" will exclude all files in the build " - + "and generated directories under the rootDirectory given by the --directory option") - .build(); - - private static final Option OPTION_DATAHUB_USER = Option.builder() - .longOpt("datahub_user") - .hasArg() - .desc("[Optional] The datahub user to attribute this ingestion to. (defaults to ..)") - .build(); - - private static final Option OPTION_ENV = Option.builder() - .longOpt("env") - .hasArg() - .desc("[Optional] The environment to attach all entities to. Typically, DEV, PROD etc. (defaults to DEV)") - .build(); - - private static final Option OPTION_GITHUB_ORG = Option.builder() - .longOpt("github_org") - .hasArg() - .desc("[Optional] The GitHub organization that this schema repository belongs to. " - + "We will translate comments in your protoc files like @datahub-project/data-team " - + "to GitHub team urls like: https://github.com/orgs/datahub-project/teams/data-team") - .build(); - - private static final Option OPTION_SLACK_ID = Option.builder() - .longOpt("slack_id") - .hasArg() - .desc("[Optional] The Slack team id if your protobuf files contain comments with references to channel names. " - + "We will translate comments like #data-eng in your protobuf file to slack urls like: " - + "https://slack.com/app_redirect?channel=data-eng&team=T1234 following the " - + "documentation at (https://api.slack.com/reference/deep-linking#deep-linking-into-your-slack-app__opening-a-channel-by-name-or-id) " - + "The easiest way to find your Slack team id is to open your workspace in your browser. It should look " - + "something like: https://app.slack.com/client/TUMKD5EGJ/... In this case, the team-id is TUMKD5EGJ.") - .build(); - - private static final Option OPTION_TRANSPORT = Option.builder() - .longOpt("transport") - .hasArg() - .desc("[Optional] What transport to use to communicate with DataHub. Options are: rest (default), kafka and file.") - .build(); - - private static final Option OPTION_FILENAME = Option.builder() - .longOpt("filename") - .hasArg() - .desc("[Required if using transport file] Filename to write output to.") - .build(); - - private static final Option OPTION_HELP = Option.builder() - .longOpt("help") - .desc("Print this help message") - .build(); - - private static final Option OPTION_SUBTYPE = Option.builder() - .longOpt("subtype") - .desc("[Optional] A custom subtype to attach to all entities produced. e.g. event, schema, topic etc." - + "(Default is schema)") - .build(); - - enum TransportOptions { - REST, - KAFKA, - FILE + private static final Option OPTION_DATAHUB_PLATFORM = + Option.builder() + .longOpt("platform") + .hasArg() + .desc( + "[Optional] The data platform to produce schemas for. e.g. kafka, snowflake, etc. (defaults to kafka)") + .build(); + + private static final Option OPTION_DATAHUB_API = + Option.builder() + .longOpt("datahub_api") + .hasArg() + .desc("[Optional] The API endpoint for DataHub GMS. (defaults to https://localhost:8080)") + .build(); + + private static final Option OPTION_DATAHUB_TOKEN = + Option.builder() + .longOpt("datahub_token") + .hasArg() + .desc("[Optional] The authentication token for DataHub API access. (defaults to empty)") + .build(); + + private static final Option OPTION_DESCRIPTOR = + Option.builder() + .longOpt("descriptor") + .hasArg() + .desc( + "[Required] The generated protobuf descriptor file. " + + "Typically a single .dsc file for the repo or a .protoc file (1:1 with each src file)") + .required() + .build(); + + private static final Option OPTION_FILE = + Option.builder() + .longOpt("file") + .hasArg() + .desc( + "[Optional if using --directory] The protobuf source file. Typically a .proto file.") + .build(); + + private static final Option OPTION_DIR = + Option.builder() + .longOpt("directory") + .hasArg() + .desc("[Optional if using --file] The root directory containing protobuf source files.") + .build(); + + private static final Option OPTION_EXCLUDE_PATTERN = + Option.builder() + .longOpt("exclude") + .valueSeparator(',') + .hasArgs() + .desc( + "[Optional] Exclude patterns to avoid processing all source files, separated by ,. Typically used with --directory option. " + + "Follows glob patterns: e.g. --exclude \"build/**,generated/**\" will exclude all files in the build " + + "and generated directories under the rootDirectory given by the --directory option") + .build(); + + private static final Option OPTION_DATAHUB_USER = + Option.builder() + .longOpt("datahub_user") + .hasArg() + .desc("[Optional] The datahub user to attribute this ingestion to. (defaults to ..)") + .build(); + + private static final Option OPTION_ENV = + Option.builder() + .longOpt("env") + .hasArg() + .desc( + "[Optional] The environment to attach all entities to. Typically, DEV, PROD etc. (defaults to DEV)") + .build(); + + private static final Option OPTION_GITHUB_ORG = + Option.builder() + .longOpt("github_org") + .hasArg() + .desc( + "[Optional] The GitHub organization that this schema repository belongs to. " + + "We will translate comments in your protoc files like @datahub-project/data-team " + + "to GitHub team urls like: https://github.com/orgs/datahub-project/teams/data-team") + .build(); + + private static final Option OPTION_SLACK_ID = + Option.builder() + .longOpt("slack_id") + .hasArg() + .desc( + "[Optional] The Slack team id if your protobuf files contain comments with references to channel names. " + + "We will translate comments like #data-eng in your protobuf file to slack urls like: " + + "https://slack.com/app_redirect?channel=data-eng&team=T1234 following the " + + "documentation at (https://api.slack.com/reference/deep-linking#deep-linking-into-your-slack-app__opening-a-channel-by-name-or-id) " + + "The easiest way to find your Slack team id is to open your workspace in your browser. It should look " + + "something like: https://app.slack.com/client/TUMKD5EGJ/... In this case, the team-id is TUMKD5EGJ.") + .build(); + + private static final Option OPTION_TRANSPORT = + Option.builder() + .longOpt("transport") + .hasArg() + .desc( + "[Optional] What transport to use to communicate with DataHub. Options are: rest (default), kafka and file.") + .build(); + + private static final Option OPTION_FILENAME = + Option.builder() + .longOpt("filename") + .hasArg() + .desc("[Required if using transport file] Filename to write output to.") + .build(); + + private static final Option OPTION_HELP = + Option.builder().longOpt("help").desc("Print this help message").build(); + + private static final Option OPTION_SUBTYPE = + Option.builder() + .longOpt("subtype") + .desc( + "[Optional] A custom subtype to attach to all entities produced. e.g. event, schema, topic etc." + + "(Default is schema)") + .build(); + + enum TransportOptions { + REST, + KAFKA, + FILE + } + + static class AppConfig { + + private final String datahubUser; + private final FabricType fabricType; + private final String datahubAPI; + private final String datahubToken; + private final String githubOrg; + private final String slackId; + private final String dataPlatform; + private final String protoc; + private final String inputFile; + private final String inputDir; + private final TransportOptions transport; + private final String filename; + private final String subType; + private final String[] excludePatterns; + + AppConfig(CommandLine cli) { + Map<String, String> env = System.getenv(); + datahubAPI = + cli.getOptionValue( + OPTION_DATAHUB_API, env.getOrDefault("DATAHUB_API", "http://localhost:8080")); + datahubToken = + cli.getOptionValue(OPTION_DATAHUB_TOKEN, env.getOrDefault("DATAHUB_TOKEN", "")); + datahubUser = + cli.getOptionValue(OPTION_DATAHUB_USER, env.getOrDefault("DATAHUB_USER", "datahub")); + fabricType = + FabricType.valueOf( + cli.getOptionValue(OPTION_ENV, env.getOrDefault("DATAHUB_ENV", "DEV")) + .toUpperCase(Locale.ROOT)); + githubOrg = + cli.getOptionValue(OPTION_GITHUB_ORG, env.getOrDefault("DATAHUB_GITHUBORG", null)); + slackId = cli.getOptionValue(OPTION_SLACK_ID, env.getOrDefault("DATAHUB_SLACKID", null)); + dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); + protoc = cli.getOptionValue(OPTION_DESCRIPTOR); + inputFile = cli.getOptionValue(OPTION_FILE, null); + transport = + TransportOptions.valueOf( + cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); + filename = cli.getOptionValue(OPTION_FILENAME, null); + subType = cli.getOptionValue(OPTION_SUBTYPE, "schema").toLowerCase(Locale.ROOT); + inputDir = cli.getOptionValue(OPTION_DIR, null); + excludePatterns = cli.getOptionValues(OPTION_EXCLUDE_PATTERN); } - static class AppConfig { - - private final String datahubUser; - private final FabricType fabricType; - private final String datahubAPI; - private final String datahubToken; - private final String githubOrg; - private final String slackId; - private final String dataPlatform; - private final String protoc; - private final String inputFile; - private final String inputDir; - private final TransportOptions transport; - private final String filename; - private final String subType; - private final String[] excludePatterns; - - - AppConfig(CommandLine cli) { - Map<String, String> env = System.getenv(); - datahubAPI = cli.getOptionValue(OPTION_DATAHUB_API, env.getOrDefault("DATAHUB_API", "http://localhost:8080")); - datahubToken = cli.getOptionValue(OPTION_DATAHUB_TOKEN, env.getOrDefault("DATAHUB_TOKEN", "")); - datahubUser = cli.getOptionValue(OPTION_DATAHUB_USER, env.getOrDefault("DATAHUB_USER", "datahub")); - fabricType = FabricType.valueOf( - cli.getOptionValue(OPTION_ENV, env.getOrDefault("DATAHUB_ENV", "DEV")).toUpperCase(Locale.ROOT)); - githubOrg = cli.getOptionValue(OPTION_GITHUB_ORG, env.getOrDefault("DATAHUB_GITHUBORG", null)); - slackId = cli.getOptionValue(OPTION_SLACK_ID, env.getOrDefault("DATAHUB_SLACKID", null)); - dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); - protoc = cli.getOptionValue(OPTION_DESCRIPTOR); - inputFile = cli.getOptionValue(OPTION_FILE, null); - transport = TransportOptions.valueOf(cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); - filename = cli.getOptionValue(OPTION_FILENAME, null); - subType = cli.getOptionValue(OPTION_SUBTYPE, "schema").toLowerCase(Locale.ROOT); - inputDir = cli.getOptionValue(OPTION_DIR, null); - excludePatterns = cli.getOptionValues(OPTION_EXCLUDE_PATTERN); + private AppConfig validate() throws Exception { + switch (transport) { + case FILE: + if (filename == null) { + throw new Exception("Transport file is being used, but a filename was not provided"); + } + break; + default: + // do nothing + } + if (this.protoc != null) { + Path path = Path.of(this.protoc); + if (!Files.exists(path)) { + throw new Exception( + String.format("Proto-descriptor file %s does not exist", this.protoc)); } - - private AppConfig validate() throws Exception { - switch (transport) { - case FILE: - if (filename == null) { - throw new Exception("Transport file is being used, but a filename was not provided"); - } - break; - default: - // do nothing - } - if (this.protoc != null) { - Path path = Path.of(this.protoc); - if (!Files.exists(path)) { - throw new Exception(String.format("Proto-descriptor file %s does not exist", this.protoc)); - } - if (!Files.isRegularFile(path)) { - throw new Exception(String.format("Proto-descriptor file %s is not a regular file", this.protoc)); - } - } - if ((this.inputFile == null) && (this.inputDir == null)) { - throw new Exception("Must provide either an input file or an input directory to read from"); - } - if (this.slackId != null) { - if (!this.slackId.startsWith("T")) { - throw new Exception(String.format("Slack team id %s should start with the letter T. " - + "The easiest way to find your Slack team id is to open your workspace in your browser. " - + "It should look something like: https://app.slack.com/client/TUMKD5EGJ/... " - + "In this case, the team-id is TUMKD5EGJ.", this.slackId)); - } - } - return this; + if (!Files.isRegularFile(path)) { + throw new Exception( + String.format("Proto-descriptor file %s is not a regular file", this.protoc)); } - + } + if ((this.inputFile == null) && (this.inputDir == null)) { + throw new Exception("Must provide either an input file or an input directory to read from"); + } + if (this.slackId != null) { + if (!this.slackId.startsWith("T")) { + throw new Exception( + String.format( + "Slack team id %s should start with the letter T. " + + "The easiest way to find your Slack team id is to open your workspace in your browser. " + + "It should look something like: https://app.slack.com/client/TUMKD5EGJ/... " + + "In this case, the team-id is TUMKD5EGJ.", + this.slackId)); + } + } + return this; } - - private Proto2DataHub() { - + } + + private Proto2DataHub() {} + + public static void main(String[] args) throws Exception { + Options options = new Options(); + + options + .addOption(OPTION_DATAHUB_PLATFORM) + .addOption(OPTION_DATAHUB_API) + .addOption(OPTION_DATAHUB_TOKEN) + .addOption(OPTION_DESCRIPTOR) + .addOption(OPTION_FILE) + .addOption(OPTION_DIR) + .addOption(OPTION_EXCLUDE_PATTERN) + .addOption(OPTION_DATAHUB_USER) + .addOption(OPTION_GITHUB_ORG) + .addOption(OPTION_ENV) + .addOption(OPTION_SLACK_ID) + .addOption(OPTION_TRANSPORT) + .addOption(OPTION_FILENAME) + .addOption(OPTION_SUBTYPE) + .addOption(OPTION_HELP); + + Options firstPassOptions = new Options().addOption(OPTION_HELP); + + // create the parser + CommandLineParser parser = new DefaultParser(); + CommandLine cli = null; + cli = parser.parse(firstPassOptions, args, true); + if (cli.hasOption(OPTION_HELP)) { + printUsageAndExit(options, 0); } - public static void main(String[] args) throws Exception { - Options options = new Options(); - - options.addOption(OPTION_DATAHUB_PLATFORM) - .addOption(OPTION_DATAHUB_API) - .addOption(OPTION_DATAHUB_TOKEN) - .addOption(OPTION_DESCRIPTOR) - .addOption(OPTION_FILE) - .addOption(OPTION_DIR) - .addOption(OPTION_EXCLUDE_PATTERN) - .addOption(OPTION_DATAHUB_USER) - .addOption(OPTION_GITHUB_ORG) - .addOption(OPTION_ENV) - .addOption(OPTION_SLACK_ID) - .addOption(OPTION_TRANSPORT) - .addOption(OPTION_FILENAME) - .addOption(OPTION_SUBTYPE) - .addOption(OPTION_HELP); - - Options firstPassOptions = new Options() - .addOption(OPTION_HELP); - - // create the parser - CommandLineParser parser = new DefaultParser(); - CommandLine cli = null; - cli = parser.parse(firstPassOptions, args, true); - if (cli.hasOption(OPTION_HELP)) { - printUsageAndExit(options, 0); + try { + // parse the real command line arguments + cli = parser.parse(options, args); + } catch (Exception exp) { + // oops, something went wrong + // we try old-style format before giving up + try { + String[] translatedArgs = convertOldStyleArgsIfPossible(args); + if (translatedArgs != null) { + cli = parser.parse(options, translatedArgs); + } else { + System.err.println("Parsing failed. Reason: " + exp.getMessage()); + printUsageAndExit(options, 1); } + } catch (Exception secondExp) { + System.err.println("Parsing failed. Reason: " + secondExp.getMessage()); + printUsageAndExit(options, 1); + } + } - try { - // parse the real command line arguments - cli = parser.parse(options, args); - } catch (Exception exp) { - // oops, something went wrong - // we try old-style format before giving up - try { - String[] translatedArgs = convertOldStyleArgsIfPossible(args); - if (translatedArgs != null) { - cli = parser.parse(options, translatedArgs); - } else { - System.err.println("Parsing failed. Reason: " + exp.getMessage()); - printUsageAndExit(options, 1); - } - } catch (Exception secondExp) { - System.err.println("Parsing failed. Reason: " + secondExp.getMessage()); - printUsageAndExit(options, 1); - } - } + AppConfig config = new AppConfig(cli).validate(); + Emitter emitter = null; + AtomicInteger totalEvents = new AtomicInteger(); - AppConfig config = new AppConfig(cli).validate(); - Emitter emitter = null; - AtomicInteger totalEvents = new AtomicInteger(); - - switch (config.transport) { - case REST: { - emitter = RestEmitter - .create(b -> b.server(config.datahubAPI).token(config.datahubToken)); - } break; - case KAFKA: { - throw new UnsupportedOperationException("Kafka transport is not supported yet."); - } - case FILE: { - emitter = new FileEmitter(FileEmitterConfig.builder().fileName(config.filename).build()); - } - break; - default: { - throw new UnsupportedOperationException(String - .format("%s transport is not supported yet.", config.transport)); - } + switch (config.transport) { + case REST: + { + emitter = RestEmitter.create(b -> b.server(config.datahubAPI).token(config.datahubToken)); + } + break; + case KAFKA: + { + throw new UnsupportedOperationException("Kafka transport is not supported yet."); + } + case FILE: + { + emitter = new FileEmitter(FileEmitterConfig.builder().fileName(config.filename).build()); + } + break; + default: + { + throw new UnsupportedOperationException( + String.format("%s transport is not supported yet.", config.transport)); } + } - AuditStamp auditStamp = new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(new CorpuserUrn(config.datahubUser)); + AuditStamp auditStamp = + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(new CorpuserUrn(config.datahubUser)); - InputStream protocStream = new FileInputStream(config.protoc); + InputStream protocStream = new FileInputStream(config.protoc); - Stream<Path> filePathStream = Stream.empty(); - if (config.inputFile != null) { - filePathStream = Stream.of(Path.of(config.inputFile)); - } else { - DirectoryWalker walker = new DirectoryWalker(config.inputDir, config.excludePatterns); - filePathStream = walker.walkFiles(); - } + Stream<Path> filePathStream = Stream.empty(); + if (config.inputFile != null) { + filePathStream = Stream.of(Path.of(config.inputFile)); + } else { + DirectoryWalker walker = new DirectoryWalker(config.inputDir, config.excludePatterns); + filePathStream = walker.walkFiles(); + } - Emitter finalEmitter = emitter; - AtomicInteger exitCode = new AtomicInteger(0); - AtomicInteger totalFiles = new AtomicInteger(0); - - try { - filePathStream.forEach(filePath -> { - totalFiles.incrementAndGet(); - try { - String textSchema = Files.readString(filePath); - - ProtobufDataset dataset = ProtobufDataset.builder() - .setDataPlatformUrn(new DataPlatformUrn(config.dataPlatform)) - .setProtocIn(new FileInputStream(config.protoc)) - .setFilename(filePath.toString()) - .setSchema(textSchema) - .setAuditStamp(auditStamp) - .setFabricType(config.fabricType) - .setGithubOrganization(config.githubOrg) - .setSlackTeamId(config.slackId) - .setSubType(config.subType) - .build(); - - dataset.getAllMetadataChangeProposals().flatMap(Collection::stream).forEach(mcpw -> { + Emitter finalEmitter = emitter; + AtomicInteger exitCode = new AtomicInteger(0); + AtomicInteger totalFiles = new AtomicInteger(0); + + try { + filePathStream.forEach( + filePath -> { + totalFiles.incrementAndGet(); + try { + String textSchema = Files.readString(filePath); + + ProtobufDataset dataset = + ProtobufDataset.builder() + .setDataPlatformUrn(new DataPlatformUrn(config.dataPlatform)) + .setProtocIn(new FileInputStream(config.protoc)) + .setFilename(filePath.toString()) + .setSchema(textSchema) + .setAuditStamp(auditStamp) + .setFabricType(config.fabricType) + .setGithubOrganization(config.githubOrg) + .setSlackTeamId(config.slackId) + .setSubType(config.subType) + .build(); + + dataset + .getAllMetadataChangeProposals() + .flatMap(Collection::stream) + .forEach( + mcpw -> { try { - finalEmitter.emit(mcpw, null).get(); - totalEvents.getAndIncrement(); + finalEmitter.emit(mcpw, null).get(); + totalEvents.getAndIncrement(); } catch (Exception e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); - } catch (Exception e) { - if (e.getMessage() != null && e.getMessage().equals("Cannot autodetect protobuf Message.")) { - System.err.printf("WARN: Top-level schema not found in %s, no dataset emitted%n", args[1]); - } else { - e.printStackTrace(); - System.err.println(String.format("‼️ Failed to emit to DataHub over %s. Num events emitted so far %d", - config.transport, totalEvents.get())); - exitCode.set(1); - } - } - }); - } finally { - if (emitter != null) { - emitter.close(); - } + }); + } catch (Exception e) { + if (e.getMessage() != null + && e.getMessage().equals("Cannot autodetect protobuf Message.")) { + System.err.printf( + "WARN: Top-level schema not found in %s, no dataset emitted%n", args[1]); + } else { + e.printStackTrace(); + System.err.println( + String.format( + "‼️ Failed to emit to DataHub over %s. Num events emitted so far %d", + config.transport, totalEvents.get())); + exitCode.set(1); + } + } + }); + } finally { + if (emitter != null) { + emitter.close(); + } } if (exitCode.get() == 0) { - System.out.println( - String.format("✅ Successfully emitted %d events for %d files to DataHub %s", totalEvents.get(), totalFiles.get(), config.transport)); + System.out.println( + String.format( + "✅ Successfully emitted %d events for %d files to DataHub %s", + totalEvents.get(), totalFiles.get(), config.transport)); } else { - System.out.println( - String.format("‼️ Emitted %d events for %d files to DataHub %s", totalEvents.get(), totalFiles.get(), config.transport)); + System.out.println( + String.format( + "‼️ Emitted %d events for %d files to DataHub %s", + totalEvents.get(), totalFiles.get(), config.transport)); } System.exit(exitCode.get()); -} + } - private static String[] convertOldStyleArgsIfPossible(String[] args) { - if (args.length == 2) { - String[] translatedArgs = {"--descriptor", args[0], "--file", args[1]}; - return translatedArgs; - } else { - return null; - } + private static String[] convertOldStyleArgsIfPossible(String[] args) { + if (args.length == 2) { + String[] translatedArgs = {"--descriptor", args[0], "--file", args[1]}; + return translatedArgs; + } else { + return null; } + } - private static void printUsageAndExit(Options options, int exitCode) { - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(Proto2DataHub.class.getSimpleName(), options); - System.exit(exitCode); - } + private static void printUsageAndExit(Options options, int exitCode) { + HelpFormatter helpFormatter = new HelpFormatter(); + helpFormatter.printHelp(Proto2DataHub.class.getSimpleName(), options); + System.exit(exitCode); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java index 312b3785ac791..e0c27ebea18bc 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java @@ -15,6 +15,7 @@ import com.linkedin.schema.SchemaFieldArray; import com.linkedin.schema.SchemaMetadata; import com.linkedin.util.Pair; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; @@ -26,270 +27,282 @@ import datahub.protobuf.visitors.dataset.PropertyVisitor; import datahub.protobuf.visitors.dataset.TagAssociationVisitor; import datahub.protobuf.visitors.dataset.TermAssociationVisitor; -import datahub.protobuf.visitors.field.SchemaFieldVisitor; -import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.visitors.field.ProtobufExtensionFieldVisitor; +import datahub.protobuf.visitors.field.SchemaFieldVisitor; import datahub.protobuf.visitors.tags.TagVisitor; - -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.util.Base64; import java.util.Collection; import java.util.Comparator; -import java.util.Optional; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - +import javax.annotation.Nullable; public class ProtobufDataset { - public static ProtobufDataset.Builder builder() { - return new Builder(); - } - - public static class Builder { - private DataPlatformUrn dataPlatformUrn; - private DatasetUrn datasetUrn; - private FabricType fabricType; - private AuditStamp auditStamp; - private byte[] protocBytes; - private String messageName; - private String filename; - private String schema; - private String githubOrganization; - private String slackTeamId; - private String subType; - - public Builder setGithubOrganization(@Nullable String githubOrganization) { - this.githubOrganization = githubOrganization; - return this; - } - - public Builder setSlackTeamId(@Nullable String slackTeamId) { - this.slackTeamId = slackTeamId; - return this; - } - - public Builder setProtocIn(InputStream protocIn) throws IOException { - return setProtocBytes(protocIn.readAllBytes()); - } - - public Builder setDataPlatformUrn(@Nullable DataPlatformUrn dataPlatformUrn) { - this.dataPlatformUrn = dataPlatformUrn; - return this; - } - - public Builder setDatasetUrn(@Nullable DatasetUrn datasetUrn) { - this.datasetUrn = datasetUrn; - return this; - } - - public Builder setProtocBytes(byte[] protocBytes) { - this.protocBytes = protocBytes; - return this; - } - - public Builder setFabricType(FabricType fabricType) { - this.fabricType = fabricType; - return this; - } - - public Builder setAuditStamp(AuditStamp auditStamp) { - this.auditStamp = auditStamp; - return this; - } - - public Builder setMessageName(@Nullable String messageName) { - this.messageName = messageName; - return this; - } - public Builder setFilename(@Nullable String filename) { - this.filename = filename; - return this; - } - - public Builder setSchema(@Nullable String schema) { - this.schema = schema; - return this; - } - - public Builder setSubType(@Nullable String subType) { - this.subType = subType; - return this; - } - - public ProtobufDataset build() throws IOException { - FileDescriptorSet fileSet = FileDescriptorSet.parseFrom(protocBytes); - - return new ProtobufDataset( - this, - Optional.ofNullable(dataPlatformUrn).orElse(new DataPlatformUrn("kafka")), - datasetUrn, - new ProtobufGraph(fileSet, messageName, filename), schema, auditStamp, fabricType) - .setMetadataChangeProposalVisitors( - List.of( - new TagVisitor() - ) - ) - .setFieldVisitor(new ProtobufExtensionFieldVisitor()) - .setDatasetVisitor(DatasetVisitor.builder() - .protocBase64(Base64.getEncoder().encodeToString(protocBytes)) - .datasetPropertyVisitors( - List.of( - new KafkaTopicPropertyVisitor(), - new PropertyVisitor() - ) - ) - .institutionalMemoryMetadataVisitors( - List.of( - new InstitutionalMemoryVisitor(slackTeamId, githubOrganization) - ) - ) - .tagAssociationVisitors( - List.of( - new TagAssociationVisitor() - ) - ) - .termAssociationVisitors( - List.of( - new TermAssociationVisitor() - ) - ) - .ownershipVisitors( - List.of( - new OwnershipVisitor() - ) - ) - .domainVisitors( - List.of( - new DomainVisitor() - ) - ) - .build() - ) - .setSubType(subType); - } + public static ProtobufDataset.Builder builder() { + return new Builder(); + } + + public static class Builder { + private DataPlatformUrn dataPlatformUrn; + private DatasetUrn datasetUrn; + private FabricType fabricType; + private AuditStamp auditStamp; + private byte[] protocBytes; + private String messageName; + private String filename; + private String schema; + private String githubOrganization; + private String slackTeamId; + private String subType; + + public Builder setGithubOrganization(@Nullable String githubOrganization) { + this.githubOrganization = githubOrganization; + return this; } - private final DatasetUrn datasetUrn; - private final Optional<String> schemaSource; - private final ProtobufGraph graph; - private final AuditStamp auditStamp; - private Optional<String> subType; - private final VisitContext.VisitContextBuilder contextBuilder; - private final ProtobufDataset.Builder builder; - - private DatasetVisitor datasetVisitor; - private ProtobufModelVisitor<Pair<SchemaField, Double>> fieldVisitor; - private List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> mcpwVisitors; - - public ProtobufDataset(DataPlatformUrn dataPlatformUrn, DatasetUrn datasetUrn, ProtobufGraph graph, String schema, - AuditStamp auditStamp, FabricType fabricType) { - this(null, dataPlatformUrn, datasetUrn, graph, schema, auditStamp, fabricType); + public Builder setSlackTeamId(@Nullable String slackTeamId) { + this.slackTeamId = slackTeamId; + return this; } - public ProtobufDataset(ProtobufDataset.Builder builder, DataPlatformUrn dataPlatformUrn, DatasetUrn datasetUrn, ProtobufGraph graph, - String schema, AuditStamp auditStamp, FabricType fabricType) { - this.builder = builder; - this.schemaSource = Optional.ofNullable(schema); - this.auditStamp = auditStamp; - this.graph = graph; - this.subType = Optional.empty(); - - // Default - non-protobuf extension - fieldVisitor = new SchemaFieldVisitor(); - mcpwVisitors = List.of(); - - this.datasetUrn = datasetUrn != null ? datasetUrn : new DatasetUrn(dataPlatformUrn, this.graph.getFullName(), fabricType); - this.contextBuilder = VisitContext.builder().datasetUrn(this.datasetUrn).auditStamp(this.auditStamp); + public Builder setProtocIn(InputStream protocIn) throws IOException { + return setProtocBytes(protocIn.readAllBytes()); } - public ProtobufDataset setMetadataChangeProposalVisitors(List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> visitors) { - this.mcpwVisitors = visitors; - return this; + public Builder setDataPlatformUrn(@Nullable DataPlatformUrn dataPlatformUrn) { + this.dataPlatformUrn = dataPlatformUrn; + return this; } - public ProtobufDataset setDatasetVisitor(DatasetVisitor datasetVisitor) { - this.datasetVisitor = datasetVisitor; - return this; + public Builder setDatasetUrn(@Nullable DatasetUrn datasetUrn) { + this.datasetUrn = datasetUrn; + return this; } - public ProtobufDataset setFieldVisitor(ProtobufModelVisitor<Pair<SchemaField, Double>> visitor) { - this.fieldVisitor = visitor; - return this; + public Builder setProtocBytes(byte[] protocBytes) { + this.protocBytes = protocBytes; + return this; } - public ProtobufDataset setSubType(String subType) { - this.subType = Optional.ofNullable(subType); - return this; + public Builder setFabricType(FabricType fabricType) { + this.fabricType = fabricType; + return this; } - public ProtobufDataset.Builder toBuilder() { - return builder; + public Builder setAuditStamp(AuditStamp auditStamp) { + this.auditStamp = auditStamp; + return this; } - public ProtobufGraph getGraph() { - return graph; + public Builder setMessageName(@Nullable String messageName) { + this.messageName = messageName; + return this; } - public AuditStamp getAuditStamp() { - return auditStamp; + public Builder setFilename(@Nullable String filename) { + this.filename = filename; + return this; } - public DatasetUrn getDatasetUrn() { - return datasetUrn; + public Builder setSchema(@Nullable String schema) { + this.schema = schema; + return this; } - public Stream<Collection<MetadataChangeProposalWrapper<? extends RecordTemplate>>> getAllMetadataChangeProposals() { - return Stream.of(getVisitorMCPs(), getDatasetMCPs()); + public Builder setSubType(@Nullable String subType) { + this.subType = subType; + return this; } - public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getVisitorMCPs() { - return graph.accept(contextBuilder, mcpwVisitors).collect(Collectors.toList()); + public ProtobufDataset build() throws IOException { + FileDescriptorSet fileSet = FileDescriptorSet.parseFrom(protocBytes); + + return new ProtobufDataset( + this, + Optional.ofNullable(dataPlatformUrn).orElse(new DataPlatformUrn("kafka")), + datasetUrn, + new ProtobufGraph(fileSet, messageName, filename), + schema, + auditStamp, + fabricType) + .setMetadataChangeProposalVisitors(List.of(new TagVisitor())) + .setFieldVisitor(new ProtobufExtensionFieldVisitor()) + .setDatasetVisitor( + DatasetVisitor.builder() + .protocBase64(Base64.getEncoder().encodeToString(protocBytes)) + .datasetPropertyVisitors( + List.of(new KafkaTopicPropertyVisitor(), new PropertyVisitor())) + .institutionalMemoryMetadataVisitors( + List.of(new InstitutionalMemoryVisitor(slackTeamId, githubOrganization))) + .tagAssociationVisitors(List.of(new TagAssociationVisitor())) + .termAssociationVisitors(List.of(new TermAssociationVisitor())) + .ownershipVisitors(List.of(new OwnershipVisitor())) + .domainVisitors(List.of(new DomainVisitor())) + .build()) + .setSubType(subType); } - - public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getDatasetMCPs() { - Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> mcpStream = - Stream.concat(this.graph.accept(contextBuilder, List.of(datasetVisitor)), + } + + private final DatasetUrn datasetUrn; + private final Optional<String> schemaSource; + private final ProtobufGraph graph; + private final AuditStamp auditStamp; + private Optional<String> subType; + private final VisitContext.VisitContextBuilder contextBuilder; + private final ProtobufDataset.Builder builder; + + private DatasetVisitor datasetVisitor; + private ProtobufModelVisitor<Pair<SchemaField, Double>> fieldVisitor; + private List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> + mcpwVisitors; + + public ProtobufDataset( + DataPlatformUrn dataPlatformUrn, + DatasetUrn datasetUrn, + ProtobufGraph graph, + String schema, + AuditStamp auditStamp, + FabricType fabricType) { + this(null, dataPlatformUrn, datasetUrn, graph, schema, auditStamp, fabricType); + } + + public ProtobufDataset( + ProtobufDataset.Builder builder, + DataPlatformUrn dataPlatformUrn, + DatasetUrn datasetUrn, + ProtobufGraph graph, + String schema, + AuditStamp auditStamp, + FabricType fabricType) { + this.builder = builder; + this.schemaSource = Optional.ofNullable(schema); + this.auditStamp = auditStamp; + this.graph = graph; + this.subType = Optional.empty(); + + // Default - non-protobuf extension + fieldVisitor = new SchemaFieldVisitor(); + mcpwVisitors = List.of(); + + this.datasetUrn = + datasetUrn != null + ? datasetUrn + : new DatasetUrn(dataPlatformUrn, this.graph.getFullName(), fabricType); + this.contextBuilder = + VisitContext.builder().datasetUrn(this.datasetUrn).auditStamp(this.auditStamp); + } + + public ProtobufDataset setMetadataChangeProposalVisitors( + List<ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>>> + visitors) { + this.mcpwVisitors = visitors; + return this; + } + + public ProtobufDataset setDatasetVisitor(DatasetVisitor datasetVisitor) { + this.datasetVisitor = datasetVisitor; + return this; + } + + public ProtobufDataset setFieldVisitor(ProtobufModelVisitor<Pair<SchemaField, Double>> visitor) { + this.fieldVisitor = visitor; + return this; + } + + public ProtobufDataset setSubType(String subType) { + this.subType = Optional.ofNullable(subType); + return this; + } + + public ProtobufDataset.Builder toBuilder() { + return builder; + } + + public ProtobufGraph getGraph() { + return graph; + } + + public AuditStamp getAuditStamp() { + return auditStamp; + } + + public DatasetUrn getDatasetUrn() { + return datasetUrn; + } + + public Stream<Collection<MetadataChangeProposalWrapper<? extends RecordTemplate>>> + getAllMetadataChangeProposals() { + return Stream.of(getVisitorMCPs(), getDatasetMCPs()); + } + + public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getVisitorMCPs() { + return graph.accept(contextBuilder, mcpwVisitors).collect(Collectors.toList()); + } + + public List<MetadataChangeProposalWrapper<? extends RecordTemplate>> getDatasetMCPs() { + Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> mcpStream = + Stream.concat( + this.graph.accept(contextBuilder, List.of(datasetVisitor)), Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - getSchemaMetadata(), "schemaMetadata"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - new Status().setRemoved(false), "status"))); - - if (this.subType.isPresent()) { - SubTypes subTypes = new SubTypes().setTypeNames(new StringArray(this.subType.get())); - mcpStream = Stream.concat(mcpStream, - Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - subTypes, "subTypes"))); - } - return mcpStream.collect(Collectors.toList()); - } - - public SchemaMetadata getSchemaMetadata() { - SchemaMetadata.PlatformSchema platformSchema = new SchemaMetadata.PlatformSchema(); - schemaSource.ifPresent(schemaStr -> platformSchema.setKafkaSchema(new KafkaSchema().setDocumentSchema(schemaStr))); - - List<SchemaField> schemaFields = graph.accept(contextBuilder, List.of(fieldVisitor)) - .sorted(COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); - - return new SchemaMetadata() - .setSchemaName(graph.getFullName()) - .setPlatform(datasetUrn.getPlatformEntity()) - .setCreated(auditStamp) - .setLastModified(auditStamp) - .setVersion(graph.getMajorVersion()) - .setHash(graph.getHash()) - .setPlatformSchema(platformSchema) - .setFields(new SchemaFieldArray(schemaFields)); + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + getSchemaMetadata(), + "schemaMetadata"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + new Status().setRemoved(false), + "status"))); + + if (this.subType.isPresent()) { + SubTypes subTypes = new SubTypes().setTypeNames(new StringArray(this.subType.get())); + mcpStream = + Stream.concat( + mcpStream, + Stream.of( + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + subTypes, + "subTypes"))); } - - public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT = Comparator.comparing(Pair::getSecond); - public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_FIELD_PATH = Comparator - .comparing(p -> p.getFirst().getFieldPath()); + return mcpStream.collect(Collectors.toList()); + } + + public SchemaMetadata getSchemaMetadata() { + SchemaMetadata.PlatformSchema platformSchema = new SchemaMetadata.PlatformSchema(); + schemaSource.ifPresent( + schemaStr -> platformSchema.setKafkaSchema(new KafkaSchema().setDocumentSchema(schemaStr))); + + List<SchemaField> schemaFields = + graph + .accept(contextBuilder, List.of(fieldVisitor)) + .sorted(COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); + + return new SchemaMetadata() + .setSchemaName(graph.getFullName()) + .setPlatform(datasetUrn.getPlatformEntity()) + .setCreated(auditStamp) + .setLastModified(auditStamp) + .setVersion(graph.getMajorVersion()) + .setHash(graph.getHash()) + .setPlatformSchema(platformSchema) + .setFields(new SchemaFieldArray(schemaFields)); + } + + public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT = + Comparator.comparing(Pair::getSecond); + public static final Comparator<Pair<SchemaField, Double>> COMPARE_BY_FIELD_PATH = + Comparator.comparing(p -> p.getFirst().getFieldPath()); } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java index 5f5cfaa15cf41..ef5bc52aaee7a 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java @@ -5,7 +5,6 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.ExtensionRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; @@ -20,176 +19,211 @@ import java.util.stream.Stream; public class ProtobufUtils { - private ProtobufUtils() { } - - public static String collapseLocationComments(DescriptorProtos.SourceCodeInfo.Location location) { - String orig = Stream.concat(location.getLeadingDetachedCommentsList().stream(), - Stream.of(location.getLeadingComments(), location.getTrailingComments())) - .filter(Objects::nonNull) - .flatMap(line -> Arrays.stream(line.split("\n"))) - .map(line -> line.replaceFirst("^[*/ ]+", "")) - .collect(Collectors.joining("\n")) - .trim(); - - /* - * Sometimes DataHub doesn't like these strings. Not sure if its DataHub - * or protobuf issue: https://github.com/protocolbuffers/protobuf/issues/4691 - * - * We essentially smash utf8 chars to ascii here - */ - return new String(orig.getBytes(StandardCharsets.ISO_8859_1)); - } + private ProtobufUtils() {} + + public static String collapseLocationComments(DescriptorProtos.SourceCodeInfo.Location location) { + String orig = + Stream.concat( + location.getLeadingDetachedCommentsList().stream(), + Stream.of(location.getLeadingComments(), location.getTrailingComments())) + .filter(Objects::nonNull) + .flatMap(line -> Arrays.stream(line.split("\n"))) + .map(line -> line.replaceFirst("^[*/ ]+", "")) + .collect(Collectors.joining("\n")) + .trim(); /* - * Reflection used to prevent an exception deep inside the protobuf library due to a getter method - * mutating the json name field and causing an equality check to fail between an instance that has and has not - * had the getter called. - * - * https://github.com/protocolbuffers/protobuf/blob/main/java/core/src/main/java/com/google/protobuf/Descriptors.java#L1105 - * - * java.lang.IllegalArgumentException: FieldDescriptors can only be compared to other FieldDescriptors for fields of the same message type. - * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1344) - * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1057) - * at java.base/java.util.TreeMap.put(TreeMap.java:566) - * at java.base/java.util.AbstractMap.putAll(AbstractMap.java:281) - * at java.base/java.util.TreeMap.putAll(TreeMap.java:325) - * at com.google.protobuf.GeneratedMessageV3$ExtendableMessage.getAllFields(GeneratedMessageV3.java:1240) + * Sometimes DataHub doesn't like these strings. Not sure if its DataHub + * or protobuf issue: https://github.com/protocolbuffers/protobuf/issues/4691 * + * We essentially smash utf8 chars to ascii here */ - private static final Method FIELD_OPT_EXT_FIELDS_METHOD; - private static final Method FIELD_OPT_ALL_FIELD_METHOD; - private static final Method MSG_OPT_EXT_FIELDS_METHOD; - private static final Method MSG_OPT_ALL_FIELD_METHOD; - static { - try { - FIELD_OPT_EXT_FIELDS_METHOD = DescriptorProtos.FieldOptions.class.getSuperclass() - .getDeclaredMethod("getExtensionFields"); - FIELD_OPT_EXT_FIELDS_METHOD.setAccessible(true); - - FIELD_OPT_ALL_FIELD_METHOD = DescriptorProtos.FieldOptions.class.getSuperclass().getSuperclass() - .getDeclaredMethod("getAllFieldsMutable", boolean.class); - FIELD_OPT_ALL_FIELD_METHOD.setAccessible(true); - - MSG_OPT_EXT_FIELDS_METHOD = DescriptorProtos.MessageOptions.class.getSuperclass() - .getDeclaredMethod("getExtensionFields"); - MSG_OPT_EXT_FIELDS_METHOD.setAccessible(true); - - MSG_OPT_ALL_FIELD_METHOD = DescriptorProtos.MessageOptions.class.getSuperclass().getSuperclass() - .getDeclaredMethod("getAllFieldsMutable", boolean.class); - MSG_OPT_ALL_FIELD_METHOD.setAccessible(true); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } + return new String(orig.getBytes(StandardCharsets.ISO_8859_1)); + } + + /* + * Reflection used to prevent an exception deep inside the protobuf library due to a getter method + * mutating the json name field and causing an equality check to fail between an instance that has and has not + * had the getter called. + * + * https://github.com/protocolbuffers/protobuf/blob/main/java/core/src/main/java/com/google/protobuf/Descriptors.java#L1105 + * + * java.lang.IllegalArgumentException: FieldDescriptors can only be compared to other FieldDescriptors for fields of the same message type. + * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1344) + * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1057) + * at java.base/java.util.TreeMap.put(TreeMap.java:566) + * at java.base/java.util.AbstractMap.putAll(AbstractMap.java:281) + * at java.base/java.util.TreeMap.putAll(TreeMap.java:325) + * at com.google.protobuf.GeneratedMessageV3$ExtendableMessage.getAllFields(GeneratedMessageV3.java:1240) + * + */ + private static final Method FIELD_OPT_EXT_FIELDS_METHOD; + private static final Method FIELD_OPT_ALL_FIELD_METHOD; + private static final Method MSG_OPT_EXT_FIELDS_METHOD; + private static final Method MSG_OPT_ALL_FIELD_METHOD; + + static { + try { + FIELD_OPT_EXT_FIELDS_METHOD = + DescriptorProtos.FieldOptions.class + .getSuperclass() + .getDeclaredMethod("getExtensionFields"); + FIELD_OPT_EXT_FIELDS_METHOD.setAccessible(true); + + FIELD_OPT_ALL_FIELD_METHOD = + DescriptorProtos.FieldOptions.class + .getSuperclass() + .getSuperclass() + .getDeclaredMethod("getAllFieldsMutable", boolean.class); + FIELD_OPT_ALL_FIELD_METHOD.setAccessible(true); + + MSG_OPT_EXT_FIELDS_METHOD = + DescriptorProtos.MessageOptions.class + .getSuperclass() + .getDeclaredMethod("getExtensionFields"); + MSG_OPT_EXT_FIELDS_METHOD.setAccessible(true); + + MSG_OPT_ALL_FIELD_METHOD = + DescriptorProtos.MessageOptions.class + .getSuperclass() + .getSuperclass() + .getDeclaredMethod("getAllFieldsMutable", boolean.class); + MSG_OPT_ALL_FIELD_METHOD.setAccessible(true); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); } - - public static List<Pair<Descriptors.FieldDescriptor, Object>> getFieldOptions(DescriptorProtos.FieldDescriptorProto fieldProto) { - try { - LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) FIELD_OPT_EXT_FIELDS_METHOD.invoke(fieldProto.getOptions())) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) FIELD_OPT_ALL_FIELD_METHOD.invoke(fieldProto.getOptions(), false)) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - return options; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } + } + + public static List<Pair<Descriptors.FieldDescriptor, Object>> getFieldOptions( + DescriptorProtos.FieldDescriptorProto fieldProto) { + try { + LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + FIELD_OPT_EXT_FIELDS_METHOD.invoke(fieldProto.getOptions())) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + FIELD_OPT_ALL_FIELD_METHOD.invoke(fieldProto.getOptions(), false)) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + return options; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - - public static List<Pair<Descriptors.FieldDescriptor, Object>> getMessageOptions(DescriptorProtos.DescriptorProto messageProto) { - try { - LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) MSG_OPT_EXT_FIELDS_METHOD.invoke(messageProto.getOptions())) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - options.addAll(((Map<Descriptors.FieldDescriptor, Object>) MSG_OPT_ALL_FIELD_METHOD.invoke(messageProto.getOptions(), - false)) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - return options; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } + } + + public static List<Pair<Descriptors.FieldDescriptor, Object>> getMessageOptions( + DescriptorProtos.DescriptorProto messageProto) { + try { + LinkedList<Pair<Descriptors.FieldDescriptor, Object>> options = new LinkedList<>(); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + MSG_OPT_EXT_FIELDS_METHOD.invoke(messageProto.getOptions())) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + options.addAll( + ((Map<Descriptors.FieldDescriptor, Object>) + MSG_OPT_ALL_FIELD_METHOD.invoke(messageProto.getOptions(), false)) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + return options; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - - public static ExtensionRegistry buildRegistry(DescriptorProtos.FileDescriptorSet fileSet) { - ExtensionRegistry registry = ExtensionRegistry.newInstance(); - Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoMap = fileSet.getFileList().stream() - .collect(Collectors.toMap(DescriptorProtos.FileDescriptorProto::getName, Function.identity())); - Map<String, Descriptors.FileDescriptor> descriptorCache = new HashMap<>(); - - fileSet.getFileList().forEach(fdp -> { - try { - Descriptors.FileDescriptor file = descriptorFromProto(fdp, descriptorProtoMap, descriptorCache); - Stream.concat(file.getExtensions().stream(), file.getMessageTypes().stream().flatMap(msg -> msg.getExtensions().stream())) - .forEach(ext -> addToRegistry(fdp, ext, registry)); - } catch (Descriptors.DescriptorValidationException e) { + } + + public static ExtensionRegistry buildRegistry(DescriptorProtos.FileDescriptorSet fileSet) { + ExtensionRegistry registry = ExtensionRegistry.newInstance(); + Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoMap = + fileSet.getFileList().stream() + .collect( + Collectors.toMap( + DescriptorProtos.FileDescriptorProto::getName, Function.identity())); + Map<String, Descriptors.FileDescriptor> descriptorCache = new HashMap<>(); + + fileSet + .getFileList() + .forEach( + fdp -> { + try { + Descriptors.FileDescriptor file = + descriptorFromProto(fdp, descriptorProtoMap, descriptorCache); + Stream.concat( + file.getExtensions().stream(), + file.getMessageTypes().stream() + .flatMap(msg -> msg.getExtensions().stream())) + .forEach(ext -> addToRegistry(fdp, ext, registry)); + } catch (Descriptors.DescriptorValidationException e) { e.printStackTrace(); - } - }); - return registry; + } + }); + return registry; + } + + private static void addToRegistry( + DescriptorProtos.FileDescriptorProto fileDescriptorProto, + Descriptors.FieldDescriptor fieldDescriptor, + ExtensionRegistry registry) { + if (fieldDescriptor.getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { + registry.add(fieldDescriptor); + } else { + fileDescriptorProto.getMessageTypeList().stream() + .filter(typ -> typ.getName().equals(fieldDescriptor.getMessageType().getName())) + .findFirst() + .ifPresent( + messageType -> + registry.add(fieldDescriptor, messageType.getDefaultInstanceForType())); + fieldDescriptor.getMessageType().getFields().stream() + .filter(Descriptors.FieldDescriptor::isExtension) + .forEach(f -> addToRegistry(fileDescriptorProto, f, registry)); } - - private static void addToRegistry(DescriptorProtos.FileDescriptorProto fileDescriptorProto, - Descriptors.FieldDescriptor fieldDescriptor, ExtensionRegistry registry) { - if (fieldDescriptor.getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { - registry.add(fieldDescriptor); - } else { - fileDescriptorProto.getMessageTypeList().stream() - .filter(typ -> typ.getName().equals(fieldDescriptor.getMessageType().getName())) - .findFirst().ifPresent(messageType -> registry.add(fieldDescriptor, messageType.getDefaultInstanceForType())); - fieldDescriptor.getMessageType().getFields() - .stream().filter(Descriptors.FieldDescriptor::isExtension) - .forEach(f -> addToRegistry(fileDescriptorProto, f, registry)); - } + } + + /** + * Recursively constructs file descriptors for all dependencies of the supplied proto and returns + * a {@link Descriptors.FileDescriptor} for the supplied proto itself. For maximal efficiency, + * reuse the descriptorCache argument across calls. + */ + private static Descriptors.FileDescriptor descriptorFromProto( + DescriptorProtos.FileDescriptorProto descriptorProto, + Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoIndex, + Map<String, Descriptors.FileDescriptor> descriptorCache) + throws Descriptors.DescriptorValidationException { + // First, check the cache. + String descriptorName = descriptorProto.getName(); + if (descriptorCache.containsKey(descriptorName)) { + return descriptorCache.get(descriptorName); } - /** - * Recursively constructs file descriptors for all dependencies of the supplied proto and returns - * a {@link Descriptors.FileDescriptor} for the supplied proto itself. For maximal efficiency, reuse the - * descriptorCache argument across calls. - */ - private static Descriptors.FileDescriptor descriptorFromProto( - DescriptorProtos.FileDescriptorProto descriptorProto, - Map<String, DescriptorProtos.FileDescriptorProto> descriptorProtoIndex, - Map<String, Descriptors.FileDescriptor> descriptorCache) throws Descriptors.DescriptorValidationException { - // First, check the cache. - String descriptorName = descriptorProto.getName(); - if (descriptorCache.containsKey(descriptorName)) { - return descriptorCache.get(descriptorName); - } - - // Then, fetch all the required dependencies recursively. - ImmutableList.Builder<Descriptors.FileDescriptor> dependencies = ImmutableList.builder(); - for (String dependencyName : descriptorProto.getDependencyList()) { - if (!descriptorProtoIndex.containsKey(dependencyName)) { - throw new IllegalArgumentException("Could not find dependency: " + dependencyName); - } - DescriptorProtos.FileDescriptorProto dependencyProto = descriptorProtoIndex.get(dependencyName); - dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache)); - } - - // Finally, construct the actual descriptor. - Descriptors.FileDescriptor[] empty = new Descriptors.FileDescriptor[0]; - Descriptors.FileDescriptor descript = Descriptors.FileDescriptor.buildFrom(descriptorProto, dependencies.build().toArray(empty), false); - descriptorCache.put(descript.getName(), descript); - return descript; + // Then, fetch all the required dependencies recursively. + ImmutableList.Builder<Descriptors.FileDescriptor> dependencies = ImmutableList.builder(); + for (String dependencyName : descriptorProto.getDependencyList()) { + if (!descriptorProtoIndex.containsKey(dependencyName)) { + throw new IllegalArgumentException("Could not find dependency: " + dependencyName); + } + DescriptorProtos.FileDescriptorProto dependencyProto = + descriptorProtoIndex.get(dependencyName); + dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache)); } + // Finally, construct the actual descriptor. + Descriptors.FileDescriptor[] empty = new Descriptors.FileDescriptor[0]; + Descriptors.FileDescriptor descript = + Descriptors.FileDescriptor.buildFrom( + descriptorProto, dependencies.build().toArray(empty), false); + descriptorCache.put(descript.getName(), descript); + return descript; + } } - diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java index 7926ba0702762..49ecb7ec2aedf 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java @@ -5,51 +5,48 @@ import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; - @Builder @Getter public class FieldTypeEdge extends DefaultEdge { - @Builder.Default - private final String type = ""; - @Builder.Default - private final boolean isMessageType = false; - private final transient ProtobufElement edgeSource; - private final transient ProtobufElement edgeTarget; - - public FieldTypeEdge inGraph(DefaultDirectedGraph<ProtobufElement, FieldTypeEdge> g) { - g.addEdge(edgeSource, edgeTarget, this); - return this; + @Builder.Default private final String type = ""; + @Builder.Default private final boolean isMessageType = false; + private final transient ProtobufElement edgeSource; + private final transient ProtobufElement edgeTarget; + + public FieldTypeEdge inGraph(DefaultDirectedGraph<ProtobufElement, FieldTypeEdge> g) { + g.addEdge(edgeSource, edgeTarget, this); + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - FieldTypeEdge that = (FieldTypeEdge) o; + FieldTypeEdge that = (FieldTypeEdge) o; - if (isMessageType() != that.isMessageType()) { - return false; - } - if (!getType().equals(that.getType())) { - return false; - } - if (!getEdgeSource().equals(that.getEdgeSource())) { - return false; - } - return getEdgeTarget().equals(that.getEdgeTarget()); + if (isMessageType() != that.isMessageType()) { + return false; } - - @Override - public int hashCode() { - int result = getType().hashCode(); - result = 31 * result + (isMessageType() ? 1 : 0); - result = 31 * result + getEdgeSource().hashCode(); - result = 31 * result + getEdgeTarget().hashCode(); - return result; + if (!getType().equals(that.getType())) { + return false; + } + if (!getEdgeSource().equals(that.getEdgeSource())) { + return false; } + return getEdgeTarget().equals(that.getEdgeTarget()); + } + + @Override + public int hashCode() { + int result = getType().hashCode(); + result = 31 * result + (isMessageType() ? 1 : 0); + result = 31 * result + getEdgeSource().hashCode(); + result = 31 * result + getEdgeTarget().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java index 91c76fe16b73f..e47e804763ecf 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java @@ -5,28 +5,33 @@ import com.google.protobuf.DescriptorProtos.SourceCodeInfo; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.List; import java.util.stream.Stream; - public interface ProtobufElement { - String name(); - String fullName(); - String nativeType(); - String comment(); - String fieldPathType(); - - FileDescriptorProto fileProto(); - DescriptorProto messageProto(); - - default Stream<SourceCodeInfo.Location> messageLocations() { - List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); - return fileLocations.stream() - .filter(loc -> loc.getPathCount() > 1 - && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER - && messageProto() == fileProto().getMessageType(loc.getPath(1))); - } - - <T> Stream<T> accept(ProtobufModelVisitor<T> v, VisitContext context); + String name(); + + String fullName(); + + String nativeType(); + + String comment(); + + String fieldPathType(); + + FileDescriptorProto fileProto(); + + DescriptorProto messageProto(); + + default Stream<SourceCodeInfo.Location> messageLocations() { + List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); + return fileLocations.stream() + .filter( + loc -> + loc.getPathCount() > 1 + && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER + && messageProto() == fileProto().getMessageType(loc.getPath(1))); + } + + <T> Stream<T> accept(ProtobufModelVisitor<T> v, VisitContext context); } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java index ff894112d0d51..3d4e170939455 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java @@ -6,82 +6,81 @@ import com.linkedin.schema.EnumType; import com.linkedin.schema.SchemaFieldDataType; import datahub.protobuf.ProtobufUtils; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; -import java.util.stream.Collectors; - - @Getter public class ProtobufEnum extends ProtobufMessage { - private final EnumDescriptorProto enumProto; - - @Builder(builderMethodName = "enumBuilder") - public ProtobufEnum(FileDescriptorProto fileProto, - DescriptorProto messageProto, - EnumDescriptorProto enumProto) { - super(messageProto, null, fileProto); - this.enumProto = enumProto; - } - - @Override - public String name() { - return enumProto.getName(); - } - - @Override - public String fieldPathType() { - return "[type=enum]"; + private final EnumDescriptorProto enumProto; + + @Builder(builderMethodName = "enumBuilder") + public ProtobufEnum( + FileDescriptorProto fileProto, DescriptorProto messageProto, EnumDescriptorProto enumProto) { + super(messageProto, null, fileProto); + this.enumProto = enumProto; + } + + @Override + public String name() { + return enumProto.getName(); + } + + @Override + public String fieldPathType() { + return "[type=enum]"; + } + + @Override + public String nativeType() { + return "enum"; + } + + @Override + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())); + } + + @Override + public String comment() { + return messageLocations() + .filter( + loc -> + loc.getPathCount() > 3 + && loc.getPath(2) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER + && enumProto == messageProto().getEnumType(loc.getPath(3))) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } + + @Override + public String toString() { + return String.format("ProtobufEnum[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String nativeType() { - return "enum"; + if (o == null || getClass() != o.getClass()) { + return false; } - - @Override - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())); + if (!super.equals(o)) { + return false; } - @Override - public String comment() { - return messageLocations() - .filter(loc -> loc.getPathCount() > 3 - && loc.getPath(2) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER - && enumProto == messageProto().getEnumType(loc.getPath(3))) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); - } - - @Override - public String toString() { - return String.format("ProtobufEnum[%s]", fullName()); - } + ProtobufEnum that = (ProtobufEnum) o; - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } + return getEnumProto().equals(that.getEnumProto()); + } - ProtobufEnum that = (ProtobufEnum) o; - - return getEnumProto().equals(that.getEnumProto()); - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + getEnumProto().hashCode(); - return result; - } + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + getEnumProto().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java index d890c373f1299..5bb41017488f3 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java @@ -18,152 +18,159 @@ import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; - import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; @Builder(toBuilder = true) @Getter @AllArgsConstructor public class ProtobufField implements ProtobufElement { - private final ProtobufMessage protobufMessage; - private final FieldDescriptorProto fieldProto; - private final String nativeType; - private final String fieldPathType; - private final Boolean isMessageType; - private final SchemaFieldDataType schemaFieldDataType; - private final Boolean isNestedType; - - public OneofDescriptorProto oneOfProto() { - if (fieldProto.hasOneofIndex()) { - return protobufMessage.messageProto().getOneofDecl(fieldProto.getOneofIndex()); - } - return null; - } - - @Override - public FileDescriptorProto fileProto() { - return protobufMessage.fileProto(); - } - - @Override - public DescriptorProto messageProto() { - return protobufMessage.messageProto(); - } - - public String parentMessageName() { - return protobufMessage.fullName(); - } - - @Override - public String name() { - return fieldProto.getName(); - } - - @Override - public String fullName() { - return String.join(".", parentMessageName(), name()); - } - - public String getNativeType() { - return nativeType(); - } - - public int getNumber() { - return fieldProto.getNumber(); + private final ProtobufMessage protobufMessage; + private final FieldDescriptorProto fieldProto; + private final String nativeType; + private final String fieldPathType; + private final Boolean isMessageType; + private final SchemaFieldDataType schemaFieldDataType; + private final Boolean isNestedType; + + public OneofDescriptorProto oneOfProto() { + if (fieldProto.hasOneofIndex()) { + return protobufMessage.messageProto().getOneofDecl(fieldProto.getOneofIndex()); } - - @Override - public String nativeType() { - return Optional.ofNullable(nativeType).orElseGet(() -> { - if (fieldProto.getTypeName().isEmpty()) { + return null; + } + + @Override + public FileDescriptorProto fileProto() { + return protobufMessage.fileProto(); + } + + @Override + public DescriptorProto messageProto() { + return protobufMessage.messageProto(); + } + + public String parentMessageName() { + return protobufMessage.fullName(); + } + + @Override + public String name() { + return fieldProto.getName(); + } + + @Override + public String fullName() { + return String.join(".", parentMessageName(), name()); + } + + public String getNativeType() { + return nativeType(); + } + + public int getNumber() { + return fieldProto.getNumber(); + } + + @Override + public String nativeType() { + return Optional.ofNullable(nativeType) + .orElseGet( + () -> { + if (fieldProto.getTypeName().isEmpty()) { return fieldProto.getType().name().split("_")[1].toLowerCase(); - } else { + } else { return fieldProto.getTypeName().replaceFirst("^[.]", ""); - } - }); - } - - @Override - public String fieldPathType() { - return Optional.ofNullable(fieldPathType).orElseGet(() -> { - final String pathType; - - switch (fieldProto.getType()) { + } + }); + } + + @Override + public String fieldPathType() { + return Optional.ofNullable(fieldPathType) + .orElseGet( + () -> { + final String pathType; + + switch (fieldProto.getType()) { case TYPE_DOUBLE: - pathType = "double"; - break; + pathType = "double"; + break; case TYPE_FLOAT: - pathType = "float"; - break; + pathType = "float"; + break; case TYPE_SFIXED64: case TYPE_FIXED64: case TYPE_UINT64: case TYPE_INT64: case TYPE_SINT64: - pathType = "long"; - break; + pathType = "long"; + break; case TYPE_FIXED32: case TYPE_SFIXED32: case TYPE_INT32: case TYPE_UINT32: case TYPE_SINT32: - pathType = "int"; - break; + pathType = "int"; + break; case TYPE_BYTES: - pathType = "bytes"; - break; + pathType = "bytes"; + break; case TYPE_ENUM: - pathType = "enum"; - break; + pathType = "enum"; + break; case TYPE_BOOL: - pathType = "boolean"; - break; + pathType = "boolean"; + break; case TYPE_STRING: - pathType = "string"; - break; + pathType = "string"; + break; case TYPE_GROUP: case TYPE_MESSAGE: - pathType = nativeType().replace(".", "_"); - break; + pathType = nativeType().replace(".", "_"); + break; default: - throw new IllegalStateException(String.format("Unexpected FieldDescriptorProto => FieldPathType %s", fieldProto.getType())); - } + throw new IllegalStateException( + String.format( + "Unexpected FieldDescriptorProto => FieldPathType %s", + fieldProto.getType())); + } - StringArray fieldPath = new StringArray(); + StringArray fieldPath = new StringArray(); - if (schemaFieldDataType().getType().isArrayType()) { + if (schemaFieldDataType().getType().isArrayType()) { fieldPath.add("[type=array]"); - } + } - fieldPath.add(String.format("[type=%s]", pathType)); + fieldPath.add(String.format("[type=%s]", pathType)); - return String.join(".", fieldPath); - }); - } + return String.join(".", fieldPath); + }); + } - public boolean isMessage() { - return Optional.ofNullable(isMessageType).orElseGet(() -> - fieldProto.getType().equals(FieldDescriptorProto.Type.TYPE_MESSAGE)); - } + public boolean isMessage() { + return Optional.ofNullable(isMessageType) + .orElseGet(() -> fieldProto.getType().equals(FieldDescriptorProto.Type.TYPE_MESSAGE)); + } - public int sortWeight() { - return messageProto().getFieldList().indexOf(fieldProto) + 1; - } + public int sortWeight() { + return messageProto().getFieldList().indexOf(fieldProto) + 1; + } - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return Optional.ofNullable(schemaFieldDataType).orElseGet(() -> { - final SchemaFieldDataType.Type fieldType; + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return Optional.ofNullable(schemaFieldDataType) + .orElseGet( + () -> { + final SchemaFieldDataType.Type fieldType; - switch (fieldProto.getType()) { + switch (fieldProto.getType()) { case TYPE_DOUBLE: case TYPE_FLOAT: case TYPE_INT64: @@ -172,139 +179,150 @@ public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { case TYPE_UINT32: case TYPE_SINT32: case TYPE_SINT64: - fieldType = SchemaFieldDataType.Type.create(new NumberType()); - break; + fieldType = SchemaFieldDataType.Type.create(new NumberType()); + break; case TYPE_GROUP: case TYPE_MESSAGE: - fieldType = SchemaFieldDataType.Type.create(new RecordType()); - break; + fieldType = SchemaFieldDataType.Type.create(new RecordType()); + break; case TYPE_BYTES: - fieldType = SchemaFieldDataType.Type.create(new BytesType()); - break; + fieldType = SchemaFieldDataType.Type.create(new BytesType()); + break; case TYPE_ENUM: - fieldType = SchemaFieldDataType.Type.create(new EnumType()); - break; + fieldType = SchemaFieldDataType.Type.create(new EnumType()); + break; case TYPE_BOOL: - fieldType = SchemaFieldDataType.Type.create(new BooleanType()); - break; + fieldType = SchemaFieldDataType.Type.create(new BooleanType()); + break; case TYPE_STRING: - fieldType = SchemaFieldDataType.Type.create(new StringType()); - break; + fieldType = SchemaFieldDataType.Type.create(new StringType()); + break; case TYPE_FIXED64: case TYPE_FIXED32: case TYPE_SFIXED32: case TYPE_SFIXED64: - fieldType = SchemaFieldDataType.Type.create(new FixedType()); - break; + fieldType = SchemaFieldDataType.Type.create(new FixedType()); + break; default: - throw new IllegalStateException(String.format("Unexpected FieldDescriptorProto => SchemaFieldDataType: %s", fieldProto.getType())); - } - - if (fieldProto.getLabel().equals(FieldDescriptorProto.Label.LABEL_REPEATED)) { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType() - .setNestedType(new StringArray()))); - } - - return new SchemaFieldDataType().setType(fieldType); - }); - } - - @Override - public Stream<SourceCodeInfo.Location> messageLocations() { - List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); - return fileLocations.stream() - .filter(loc -> loc.getPathCount() > 1 - && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER); + throw new IllegalStateException( + String.format( + "Unexpected FieldDescriptorProto => SchemaFieldDataType: %s", + fieldProto.getType())); + } + + if (fieldProto.getLabel().equals(FieldDescriptorProto.Label.LABEL_REPEATED)) { + return new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray()))); + } + + return new SchemaFieldDataType().setType(fieldType); + }); + } + + @Override + public Stream<SourceCodeInfo.Location> messageLocations() { + List<SourceCodeInfo.Location> fileLocations = fileProto().getSourceCodeInfo().getLocationList(); + return fileLocations.stream() + .filter( + loc -> + loc.getPathCount() > 1 + && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER); + } + + @Override + public String comment() { + return messageLocations() + .filter(location -> location.getPathCount() > 3) + .filter( + location -> + !ProtobufUtils.collapseLocationComments(location).isEmpty() + && !isEnumType(location.getPathList())) + .filter( + location -> { + List<Integer> pathList = location.getPathList(); + DescriptorProto messageType = fileProto().getMessageType(pathList.get(1)); + + if (!isNestedType + && location.getPath(2) == DescriptorProto.FIELD_FIELD_NUMBER + && fieldProto == messageType.getField(location.getPath(3))) { + return true; + } else if (isNestedType + && location.getPath(2) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER + && fieldProto == getNestedTypeFields(pathList, messageType)) { + return true; + } + return false; + }) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } + + private FieldDescriptorProto getNestedTypeFields( + List<Integer> pathList, DescriptorProto messageType) { + int pathSize = pathList.size(); + List<Integer> nestedValues = new ArrayList<>(pathSize); + + for (int index = 0; index < pathSize; index++) { + if (index > 1 + && index % 2 == 0 + && pathList.get(index) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER) { + nestedValues.add(pathList.get(index + 1)); + } } - @Override - public String comment() { - return messageLocations() - .filter(location -> location.getPathCount() > 3) - .filter(location -> !ProtobufUtils.collapseLocationComments(location).isEmpty() - && !isEnumType(location.getPathList())) - .filter(location -> { - List<Integer> pathList = location.getPathList(); - DescriptorProto messageType = fileProto().getMessageType(pathList.get(1)); - - if (!isNestedType - && location.getPath(2) == DescriptorProto.FIELD_FIELD_NUMBER - && fieldProto == messageType.getField(location.getPath(3))) { - return true; - } else if (isNestedType - && location.getPath(2) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER - && fieldProto == getNestedTypeFields(pathList, messageType)) { - return true; - } - return false; - }) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); + for (Integer value : nestedValues) { + messageType = messageType.getNestedType(value); } - private FieldDescriptorProto getNestedTypeFields(List<Integer> pathList, DescriptorProto messageType) { - int pathSize = pathList.size(); - List<Integer> nestedValues = new ArrayList<>(pathSize); - - for (int index = 0; index < pathSize; index++) { - if (index > 1 - && index % 2 == 0 - && pathList.get(index) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER) { - nestedValues.add(pathList.get(index + 1)); - } - } - - for (Integer value : nestedValues) { - messageType = messageType.getNestedType(value); - } - - if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { - return messageType.getField(pathList.get(pathSize - 1)); - } else { - return null; - } + if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER + && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER + && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { + return messageType.getField(pathList.get(pathSize - 1)); + } else { + return null; } - - private boolean isEnumType(List<Integer> pathList) { - for (int index = 0; index < pathList.size(); index++) { - if (index > 1 - && index % 2 == 0 - && pathList.get(index) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER) { - return true; - } - } - return false; + } + + private boolean isEnumType(List<Integer> pathList) { + for (int index = 0; index < pathList.size(); index++) { + if (index > 1 + && index % 2 == 0 + && pathList.get(index) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER) { + return true; + } } - - @Override - public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { - return visitor.visitField(this, context); + return false; + } + + @Override + public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { + return visitor.visitField(this, context); + } + + @Override + public String toString() { + return String.format("ProtobufField[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return String.format("ProtobufField[%s]", fullName()); + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } + ProtobufElement that = (ProtobufElement) o; - ProtobufElement that = (ProtobufElement) o; + return fullName().equals(that.fullName()); + } - return fullName().equals(that.fullName()); - } - - @Override - public int hashCode() { - return fullName().hashCode(); - } + @Override + public int hashCode() { + return fullName().hashCode(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java index ae2319af85988..2f8c885de0e96 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java @@ -6,10 +6,6 @@ import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import org.jgrapht.GraphPath; -import org.jgrapht.alg.shortestpath.AllDirectedPaths; -import org.jgrapht.graph.DefaultDirectedGraph; - import java.util.Collection; import java.util.HashSet; import java.util.List; @@ -18,374 +14,476 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - +import org.jgrapht.GraphPath; +import org.jgrapht.alg.shortestpath.AllDirectedPaths; +import org.jgrapht.graph.DefaultDirectedGraph; public class ProtobufGraph extends DefaultDirectedGraph<ProtobufElement, FieldTypeEdge> { - private final transient ProtobufMessage rootProtobufMessage; - private final transient AllDirectedPaths<ProtobufElement, FieldTypeEdge> directedPaths; - private final transient ExtensionRegistry registry; - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) throws InvalidProtocolBufferException { - this(fileSet, null, null, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName) throws InvalidProtocolBufferException { - this(fileSet, messageName, null, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName, String relativeFilename) throws InvalidProtocolBufferException { - this(fileSet, messageName, relativeFilename, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName, String filename, - boolean flattenGoogleWrapped) throws InvalidProtocolBufferException { - super(FieldTypeEdge.class); - this.registry = ProtobufUtils.buildRegistry(fileSet); - DescriptorProtos.FileDescriptorSet fileSetExtended = DescriptorProtos.FileDescriptorSet - .parseFrom(fileSet.toByteArray(), this.registry); - buildProtobufGraph(fileSetExtended); - if (flattenGoogleWrapped) { - flattenGoogleWrapped(); - } - - if (messageName != null) { - this.rootProtobufMessage = findMessage(messageName); - } else { - DescriptorProtos.FileDescriptorProto lastFile = fileSetExtended.getFileList() - .stream().filter(f -> filename != null && filename.endsWith(f.getName())) - .findFirst().orElse(fileSetExtended.getFile(fileSetExtended.getFileCount() - 1)); - - if (filename != null) { - this.rootProtobufMessage = autodetectRootMessage(lastFile) - .orElse(autodetectSingleMessage(lastFile) - .orElse(autodetectLocalFileRootMessage(lastFile) - .orElseThrow(() -> new IllegalArgumentException("Cannot autodetect protobuf Message.")))); - } else { - this.rootProtobufMessage = autodetectRootMessage(lastFile) - .orElseThrow(() -> new IllegalArgumentException("Cannot autodetect root protobuf Message.")); - } - } - - this.directedPaths = new AllDirectedPaths<>(this); - } - - public List<GraphPath<ProtobufElement, FieldTypeEdge>> getAllPaths(ProtobufElement a, ProtobufElement b) { - return directedPaths.getAllPaths(a, b, true, null); - } - - public ExtensionRegistry getRegistry() { - return registry; - } - - public String getFullName() { - return rootProtobufMessage.fullName(); - } - - public int getMajorVersion() { - return rootProtobufMessage.majorVersion(); - } - - public String getComment() { - return rootProtobufMessage.comment(); - } - - public ProtobufMessage root() { - return rootProtobufMessage; - } - - - public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept(VisitContext.VisitContextBuilder contextBuilder, Collection<V> visitors) { - VisitContext context = Optional.ofNullable(contextBuilder).orElse(VisitContext.builder()).graph(this).build(); - return accept(context, visitors); - } - - public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept(VisitContext context, Collection<V> visitors) { - return Stream.concat( - visitors.stream().flatMap(visitor -> visitor.visitGraph(context)), - vertexSet().stream().flatMap(vertex -> visitors.stream().flatMap(visitor -> vertex.accept(visitor, context))) - ); - } - - protected Optional<ProtobufMessage> autodetectRootMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && incomingEdgesOf(v).isEmpty() - && outgoingEdgesOf(v).stream() - .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) - .allMatch(e -> e.getEdgeSource().equals(v))) // all the incoming edges on the child vertices should be self - .map(v -> (ProtobufMessage) v) - .findFirst(); + private final transient ProtobufMessage rootProtobufMessage; + private final transient AllDirectedPaths<ProtobufElement, FieldTypeEdge> directedPaths; + private final transient ExtensionRegistry registry; + + public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) + throws InvalidProtocolBufferException { + this(fileSet, null, null, true); + } + + public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName) + throws InvalidProtocolBufferException { + this(fileSet, messageName, null, true); + } + + public ProtobufGraph( + DescriptorProtos.FileDescriptorSet fileSet, String messageName, String relativeFilename) + throws InvalidProtocolBufferException { + this(fileSet, messageName, relativeFilename, true); + } + + public ProtobufGraph( + DescriptorProtos.FileDescriptorSet fileSet, + String messageName, + String filename, + boolean flattenGoogleWrapped) + throws InvalidProtocolBufferException { + super(FieldTypeEdge.class); + this.registry = ProtobufUtils.buildRegistry(fileSet); + DescriptorProtos.FileDescriptorSet fileSetExtended = + DescriptorProtos.FileDescriptorSet.parseFrom(fileSet.toByteArray(), this.registry); + buildProtobufGraph(fileSetExtended); + if (flattenGoogleWrapped) { + flattenGoogleWrapped(); } - protected Optional<ProtobufMessage> autodetectSingleMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && targetFile.getMessageTypeCount() == 1) - .map(v -> (ProtobufMessage) v) - .findFirst(); + if (messageName != null) { + this.rootProtobufMessage = findMessage(messageName); + } else { + DescriptorProtos.FileDescriptorProto lastFile = + fileSetExtended.getFileList().stream() + .filter(f -> filename != null && filename.endsWith(f.getName())) + .findFirst() + .orElse(fileSetExtended.getFile(fileSetExtended.getFileCount() - 1)); + + if (filename != null) { + this.rootProtobufMessage = + autodetectRootMessage(lastFile) + .orElse( + autodetectSingleMessage(lastFile) + .orElse( + autodetectLocalFileRootMessage(lastFile) + .orElseThrow( + () -> + new IllegalArgumentException( + "Cannot autodetect protobuf Message.")))); + } else { + this.rootProtobufMessage = + autodetectRootMessage(lastFile) + .orElseThrow( + () -> new IllegalArgumentException("Cannot autodetect root protobuf Message.")); + } } - protected Optional<ProtobufMessage> autodetectLocalFileRootMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && incomingEdgesOf(v).stream().noneMatch(e -> e.getEdgeSource().fileProto().equals(targetFile)) - && outgoingEdgesOf(v).stream() // all the incoming edges on the child vertices should be self within target file - .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) - .allMatch(e -> !e.getEdgeSource().fileProto().equals(targetFile) || e.getEdgeSource().equals(v))) - .map(v -> (ProtobufMessage) v) - .findFirst(); - } + this.directedPaths = new AllDirectedPaths<>(this); + } + + public List<GraphPath<ProtobufElement, FieldTypeEdge>> getAllPaths( + ProtobufElement a, ProtobufElement b) { + return directedPaths.getAllPaths(a, b, true, null); + } + + public ExtensionRegistry getRegistry() { + return registry; + } + + public String getFullName() { + return rootProtobufMessage.fullName(); + } + + public int getMajorVersion() { + return rootProtobufMessage.majorVersion(); + } + + public String getComment() { + return rootProtobufMessage.comment(); + } + + public ProtobufMessage root() { + return rootProtobufMessage; + } + + public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept( + VisitContext.VisitContextBuilder contextBuilder, Collection<V> visitors) { + VisitContext context = + Optional.ofNullable(contextBuilder).orElse(VisitContext.builder()).graph(this).build(); + return accept(context, visitors); + } + + public <T, V extends ProtobufModelVisitor<T>> Stream<T> accept( + VisitContext context, Collection<V> visitors) { + return Stream.concat( + visitors.stream().flatMap(visitor -> visitor.visitGraph(context)), + vertexSet().stream() + .flatMap( + vertex -> visitors.stream().flatMap(visitor -> vertex.accept(visitor, context)))); + } + + protected Optional<ProtobufMessage> autodetectRootMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && incomingEdgesOf(v).isEmpty() + && outgoingEdgesOf(v).stream() + .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) + .allMatch( + e -> + e.getEdgeSource() + .equals( + v))) // all the incoming edges on the child vertices should + // be self + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + protected Optional<ProtobufMessage> autodetectSingleMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && targetFile.getMessageTypeCount() == 1) + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + protected Optional<ProtobufMessage> autodetectLocalFileRootMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && incomingEdgesOf(v).stream() + .noneMatch(e -> e.getEdgeSource().fileProto().equals(targetFile)) + && outgoingEdgesOf(v) + .stream() // all the incoming edges on the child vertices should be self + // within target file + .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) + .allMatch( + e -> + !e.getEdgeSource().fileProto().equals(targetFile) + || e.getEdgeSource().equals(v))) + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + public ProtobufMessage findMessage(String messageName) throws IllegalArgumentException { + return (ProtobufMessage) + vertexSet().stream() + .filter(v -> v instanceof ProtobufMessage && messageName.equals(v.fullName())) + .findFirst() + .orElseThrow( + () -> + new IllegalArgumentException( + String.format("Cannot find protobuf Message %s", messageName))); + } + + private void buildProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) { + // Attach non-nested fields to messages + fileSet + .getFileList() + .forEach( + fileProto -> + fileProto + .getMessageTypeList() + .forEach( + messageProto -> { + ProtobufMessage messageVertex = + ProtobufMessage.builder() + .fileProto(fileProto) + .messageProto(messageProto) + .build(); + addVertex(messageVertex); + + // Handle nested fields + addNestedMessage(fileProto, messageProto); + + // Add enum types + addEnum(fileProto, messageProto); + + // handle normal fields and oneofs + messageProto + .getFieldList() + .forEach( + fieldProto -> { + ProtobufField fieldVertex = + ProtobufField.builder() + .protobufMessage(messageVertex) + .fieldProto(fieldProto) + .isNestedType(false) + .build(); + + // Add field vertex + addVertex(fieldVertex); + + if (fieldVertex.oneOfProto() != null) { + // Handle oneOf + addOneOf(messageVertex, fieldVertex); + } else { + // Add schema to field edge + linkMessageToField(messageVertex, fieldVertex); + } + }); + })); + + // attach field paths to root message + Map<String, List<ProtobufField>> fieldMap = + vertexSet().stream() + .filter( + v -> + v instanceof ProtobufField + && incomingEdgesOf(v).stream() + .noneMatch(e -> e.getEdgeSource() instanceof ProtobufOneOfField)) + .map(v -> (ProtobufField) v) + .collect(Collectors.groupingBy(ProtobufField::parentMessageName)); + + edgeSet().stream().filter(FieldTypeEdge::isMessageType).collect(Collectors.toSet()).stream() + .map(e -> (ProtobufField) e.getEdgeTarget()) + .forEach(f -> attachNestedMessageFields(fieldMap, f)); + } + + private void addEnum( + DescriptorProtos.FileDescriptorProto fileProto, + DescriptorProtos.DescriptorProto messageProto) { + messageProto + .getEnumTypeList() + .forEach( + enumProto -> { + ProtobufEnum enumVertex = + ProtobufEnum.enumBuilder() + .fileProto(fileProto) + .messageProto(messageProto) + .enumProto(enumProto) + .build(); + addVertex(enumVertex); + }); + } - public ProtobufMessage findMessage(String messageName) throws IllegalArgumentException { - return (ProtobufMessage) vertexSet().stream() - .filter(v -> v instanceof ProtobufMessage && messageName.equals(v.fullName())) - .findFirst().orElseThrow(() -> new IllegalArgumentException(String.format("Cannot find protobuf Message %s", messageName))); + private void addNestedMessage( + DescriptorProtos.FileDescriptorProto fileProto, + DescriptorProtos.DescriptorProto messageProto) { + if (messageProto.getNestedTypeCount() < 1) { + return; } - private void buildProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) { - // Attach non-nested fields to messages - fileSet.getFileList().forEach(fileProto -> - fileProto.getMessageTypeList().forEach(messageProto -> { - - ProtobufMessage messageVertex = ProtobufMessage.builder() - .fileProto(fileProto) - .messageProto(messageProto) - .build(); - addVertex(messageVertex); - - // Handle nested fields - addNestedMessage(fileProto, messageProto); - - // Add enum types - addEnum(fileProto, messageProto); - - // handle normal fields and oneofs - messageProto.getFieldList().forEach(fieldProto -> { - ProtobufField fieldVertex = ProtobufField.builder() - .protobufMessage(messageVertex) - .fieldProto(fieldProto) - .isNestedType(false) + messageProto + .getNestedTypeList() + .forEach( + nestedMessageProto -> { + ProtobufMessage nestedMessageVertex = + ProtobufMessage.builder() + .fileProto(fileProto) + .parentMessageProto(messageProto) + .messageProto(nestedMessageProto) + .build(); + addVertex(nestedMessageVertex); + + nestedMessageProto + .getFieldList() + .forEach( + nestedFieldProto -> { + ProtobufField field = + ProtobufField.builder() + .protobufMessage(nestedMessageVertex) + .fieldProto(nestedFieldProto) + .isNestedType(true) .build(); // Add field vertex - addVertex(fieldVertex); - - if (fieldVertex.oneOfProto() != null) { - // Handle oneOf - addOneOf(messageVertex, fieldVertex); - } else { - // Add schema to field edge - linkMessageToField(messageVertex, fieldVertex); + addVertex(field); + + // Add schema to field edge + if (!field.isMessage()) { + FieldTypeEdge.builder() + .edgeSource(nestedMessageVertex) + .edgeTarget(field) + .type(field.fieldPathType()) + .build() + .inGraph(this); } - }); - }) - ); - - // attach field paths to root message - Map<String, List<ProtobufField>> fieldMap = vertexSet().stream() - .filter(v -> v instanceof ProtobufField && incomingEdgesOf(v).stream().noneMatch(e -> e.getEdgeSource() instanceof ProtobufOneOfField)) - .map(v -> (ProtobufField) v) - .collect(Collectors.groupingBy(ProtobufField::parentMessageName)); - - edgeSet().stream().filter(FieldTypeEdge::isMessageType).collect(Collectors.toSet()) - .stream().map(e -> (ProtobufField) e.getEdgeTarget()) - .forEach(f -> attachNestedMessageFields(fieldMap, f)); - } - + }); - private void addEnum(DescriptorProtos.FileDescriptorProto fileProto, DescriptorProtos.DescriptorProto messageProto) { - messageProto.getEnumTypeList().forEach(enumProto -> { - ProtobufEnum enumVertex = ProtobufEnum.enumBuilder() - .fileProto(fileProto) - .messageProto(messageProto) - .enumProto(enumProto) - .build(); - addVertex(enumVertex); - }); - } - - private void addNestedMessage(DescriptorProtos.FileDescriptorProto fileProto, DescriptorProtos.DescriptorProto messageProto) { - if (messageProto.getNestedTypeCount() < 1) { - return; - } - - messageProto.getNestedTypeList().forEach(nestedMessageProto -> { - ProtobufMessage nestedMessageVertex = ProtobufMessage.builder() - .fileProto(fileProto) - .parentMessageProto(messageProto) - .messageProto(nestedMessageProto) - .build(); - addVertex(nestedMessageVertex); - - nestedMessageProto.getFieldList().forEach(nestedFieldProto -> { - ProtobufField field = ProtobufField.builder() - .protobufMessage(nestedMessageVertex) - .fieldProto(nestedFieldProto) - .isNestedType(true) - .build(); - - // Add field vertex - addVertex(field); - - // Add schema to field edge - if (!field.isMessage()) { - FieldTypeEdge.builder() - .edgeSource(nestedMessageVertex) - .edgeTarget(field) - .type(field.fieldPathType()) - .build().inGraph(this); - } + addNestedMessage(fileProto, nestedMessageProto); + }); + } + + private Stream<ProtobufField> addOneOf(ProtobufMessage messageVertex, ProtobufField fieldVertex) { + // Handle oneOf + ProtobufField oneOfVertex = + ProtobufOneOfField.oneOfBuilder() + .protobufMessage(messageVertex) + .fieldProto(fieldVertex.getFieldProto()) + .build(); + addVertex(oneOfVertex); + + FieldTypeEdge.builder() + .edgeSource(messageVertex) + .edgeTarget(oneOfVertex) + .type(oneOfVertex.fieldPathType()) + .build() + .inGraph(this); + + // Add oneOf field to field edge + FieldTypeEdge.builder() + .edgeSource(oneOfVertex) + .edgeTarget(fieldVertex) + .type(fieldVertex.fieldPathType()) + .isMessageType(fieldVertex.isMessage()) + .build() + .inGraph(this); + + return Stream.of(oneOfVertex); + } + + private Stream<ProtobufField> linkMessageToField( + ProtobufMessage messageVertex, ProtobufField fieldVertex) { + FieldTypeEdge.builder() + .edgeSource(messageVertex) + .edgeTarget(fieldVertex) + .type(fieldVertex.fieldPathType()) + .isMessageType(fieldVertex.isMessage()) + .build() + .inGraph(this); + + return Stream.of(fieldVertex); + } + + private void attachNestedMessageFields( + Map<String, List<ProtobufField>> fieldMap, ProtobufField messageField) { + fieldMap + .getOrDefault(messageField.nativeType(), List.of()) + .forEach( + target -> { + FieldTypeEdge.builder() + .edgeSource(messageField) + .edgeTarget(target) + .type(target.fieldPathType()) + .isMessageType(target.isMessage()) + .build() + .inGraph(this); + }); + } + + private static final Set<String> GOOGLE_WRAPPERS = + Set.of("google/protobuf/wrappers.proto", "google/protobuf/timestamp.proto"); + + private void flattenGoogleWrapped() { + HashSet<ProtobufElement> removeVertices = new HashSet<>(); + HashSet<FieldTypeEdge> removeEdges = new HashSet<>(); + HashSet<ProtobufElement> addVertices = new HashSet<>(); + HashSet<FieldTypeEdge> addEdges = new HashSet<>(); + + Set<ProtobufElement> googleWrapped = + vertexSet().stream() + .filter( + v -> + v instanceof ProtobufMessage + && GOOGLE_WRAPPERS.contains(v.fileProto().getName())) + .collect(Collectors.toSet()); + removeVertices.addAll(googleWrapped); + + Set<ProtobufField> wrappedPrimitiveFields = + googleWrapped.stream() + .flatMap(wrapped -> outgoingEdgesOf(wrapped).stream()) + .map(FieldTypeEdge::getEdgeTarget) + .map(ProtobufField.class::cast) + .collect(Collectors.toSet()); + removeVertices.addAll(wrappedPrimitiveFields); + + wrappedPrimitiveFields.stream() + .filter(fld -> fld.getNumber() == 1) + .forEach( + primitiveField -> { + // remove incoming old edges to primitive + removeEdges.addAll(incomingEdgesOf(primitiveField)); + + Set<ProtobufField> originatingFields = + incomingEdgesOf(primitiveField).stream() + .map(FieldTypeEdge::getEdgeSource) + .filter(edgeSource -> !googleWrapped.contains(edgeSource)) + .map(ProtobufField.class::cast) + .collect(Collectors.toSet()); + removeVertices.addAll(originatingFields); + + originatingFields.forEach( + originatingField -> { + // Replacement Field + ProtobufElement fieldVertex = + originatingField.toBuilder() + .fieldPathType(primitiveField.fieldPathType()) + .schemaFieldDataType(primitiveField.schemaFieldDataType()) + .isMessageType(false) + .build(); + addVertices.add(fieldVertex); + + // link source field parent directly to primitive + Set<FieldTypeEdge> incomingEdges = incomingEdgesOf(originatingField); + removeEdges.addAll(incomingEdgesOf(originatingField)); + addEdges.addAll( + incomingEdges.stream() + .map( + oldEdge -> + // Replace old edge with new edge to primitive + FieldTypeEdge.builder() + .edgeSource(oldEdge.getEdgeSource()) + .edgeTarget(fieldVertex) + .type(primitiveField.fieldPathType()) + .isMessageType(false) // known primitive + .build()) + .collect(Collectors.toSet())); + }); + + // remove old fields + removeVertices.addAll(originatingFields); }); - addNestedMessage(fileProto, nestedMessageProto); - }); - } - - private Stream<ProtobufField> addOneOf(ProtobufMessage messageVertex, ProtobufField fieldVertex) { - // Handle oneOf - ProtobufField oneOfVertex = ProtobufOneOfField.oneOfBuilder() - .protobufMessage(messageVertex) - .fieldProto(fieldVertex.getFieldProto()) - .build(); - addVertex(oneOfVertex); - - FieldTypeEdge.builder() - .edgeSource(messageVertex) - .edgeTarget(oneOfVertex) - .type(oneOfVertex.fieldPathType()) - .build().inGraph(this); - - // Add oneOf field to field edge - FieldTypeEdge.builder() - .edgeSource(oneOfVertex) - .edgeTarget(fieldVertex) - .type(fieldVertex.fieldPathType()) - .isMessageType(fieldVertex.isMessage()) - .build().inGraph(this); - - return Stream.of(oneOfVertex); + // Remove edges + removeAllEdges(removeEdges); + // Remove vertices + removeAllVertices(removeVertices); + // Add vertices + addVertices.forEach(this::addVertex); + // Add edges + addEdges.forEach(e -> e.inGraph(this)); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - private Stream<ProtobufField> linkMessageToField(ProtobufMessage messageVertex, ProtobufField fieldVertex) { - FieldTypeEdge.builder() - .edgeSource(messageVertex) - .edgeTarget(fieldVertex) - .type(fieldVertex.fieldPathType()) - .isMessageType(fieldVertex.isMessage()) - .build().inGraph(this); - - return Stream.of(fieldVertex); + if (o == null || getClass() != o.getClass()) { + return false; } - - private void attachNestedMessageFields(Map<String, List<ProtobufField>> fieldMap, ProtobufField messageField) { - fieldMap.getOrDefault(messageField.nativeType(), List.of()).forEach(target -> { - FieldTypeEdge.builder() - .edgeSource(messageField) - .edgeTarget(target) - .type(target.fieldPathType()) - .isMessageType(target.isMessage()) - .build().inGraph(this); - }); + if (!super.equals(o)) { + return false; } - private static final Set<String> GOOGLE_WRAPPERS = Set.of("google/protobuf/wrappers.proto", "google/protobuf/timestamp.proto"); - private void flattenGoogleWrapped() { - HashSet<ProtobufElement> removeVertices = new HashSet<>(); - HashSet<FieldTypeEdge> removeEdges = new HashSet<>(); - HashSet<ProtobufElement> addVertices = new HashSet<>(); - HashSet<FieldTypeEdge> addEdges = new HashSet<>(); + ProtobufGraph that = (ProtobufGraph) o; - Set<ProtobufElement> googleWrapped = vertexSet().stream() - .filter(v -> v instanceof ProtobufMessage - && GOOGLE_WRAPPERS.contains(v.fileProto().getName())) - .collect(Collectors.toSet()); - removeVertices.addAll(googleWrapped); - - Set<ProtobufField> wrappedPrimitiveFields = googleWrapped.stream() - .flatMap(wrapped -> outgoingEdgesOf(wrapped).stream()) - .map(FieldTypeEdge::getEdgeTarget) - .map(ProtobufField.class::cast) - .collect(Collectors.toSet()); - removeVertices.addAll(wrappedPrimitiveFields); - - wrappedPrimitiveFields.stream().filter(fld -> fld.getNumber() == 1).forEach(primitiveField -> { - // remove incoming old edges to primitive - removeEdges.addAll(incomingEdgesOf(primitiveField)); - - Set<ProtobufField> originatingFields = incomingEdgesOf(primitiveField).stream() - .map(FieldTypeEdge::getEdgeSource) - .filter(edgeSource -> !googleWrapped.contains(edgeSource)) - .map(ProtobufField.class::cast) - .collect(Collectors.toSet()); - removeVertices.addAll(originatingFields); - - originatingFields.forEach(originatingField -> { - // Replacement Field - ProtobufElement fieldVertex = originatingField.toBuilder() - .fieldPathType(primitiveField.fieldPathType()) - .schemaFieldDataType(primitiveField.schemaFieldDataType()) - .isMessageType(false) - .build(); - addVertices.add(fieldVertex); - - // link source field parent directly to primitive - Set<FieldTypeEdge> incomingEdges = incomingEdgesOf(originatingField); - removeEdges.addAll(incomingEdgesOf(originatingField)); - addEdges.addAll(incomingEdges.stream().map(oldEdge -> - // Replace old edge with new edge to primitive - FieldTypeEdge.builder() - .edgeSource(oldEdge.getEdgeSource()) - .edgeTarget(fieldVertex) - .type(primitiveField.fieldPathType()) - .isMessageType(false) // known primitive - .build()).collect(Collectors.toSet())); - }); - - // remove old fields - removeVertices.addAll(originatingFields); - }); - - // Remove edges - removeAllEdges(removeEdges); - // Remove vertices - removeAllVertices(removeVertices); - // Add vertices - addVertices.forEach(this::addVertex); - // Add edges - addEdges.forEach(e -> e.inGraph(this)); - } + return rootProtobufMessage.equals(that.rootProtobufMessage); + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } - - ProtobufGraph that = (ProtobufGraph) o; - - return rootProtobufMessage.equals(that.rootProtobufMessage); - } + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + rootProtobufMessage.hashCode(); + return result; + } - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + rootProtobufMessage.hashCode(); - return result; - } - - public String getHash() { - return String.valueOf(super.hashCode()); - } + public String getHash() { + return String.valueOf(super.hashCode()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java index 6b46b11231623..62f02a47a6c86 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java @@ -4,119 +4,117 @@ import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.MapType; import com.linkedin.schema.RecordType; -import lombok.AllArgsConstructor; -import lombok.Builder; -import java.util.Arrays; -import java.util.stream.Stream; import com.linkedin.schema.SchemaFieldDataType; - import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - - +import java.util.Arrays; +import java.util.stream.Stream; +import lombok.AllArgsConstructor; +import lombok.Builder; @Builder @AllArgsConstructor public class ProtobufMessage implements ProtobufElement { - private final DescriptorProto messageProto; - private final DescriptorProto parentMessageProto; - private final FileDescriptorProto fileProto; - - @Override - public String name() { - return messageProto.getName(); - } - - @Override - public String fullName() { - if (parentMessageProto != null) { - return String.join(".", fileProto.getPackage(), parentMessageProto.getName(), name()); - } - return String.join(".", fileProto.getPackage(), name()); + private final DescriptorProto messageProto; + private final DescriptorProto parentMessageProto; + private final FileDescriptorProto fileProto; + + @Override + public String name() { + return messageProto.getName(); + } + + @Override + public String fullName() { + if (parentMessageProto != null) { + return String.join(".", fileProto.getPackage(), parentMessageProto.getName(), name()); } - - @Override - public String nativeType() { - return fullName(); - } - - @Override - public String fieldPathType() { - return String.format("[type=%s]", nativeType().replace(".", "_")); - } - - @Override - public FileDescriptorProto fileProto() { - return fileProto; - } - - @Override - public DescriptorProto messageProto() { - return messageProto; + return String.join(".", fileProto.getPackage(), name()); + } + + @Override + public String nativeType() { + return fullName(); + } + + @Override + public String fieldPathType() { + return String.format("[type=%s]", nativeType().replace(".", "_")); + } + + @Override + public FileDescriptorProto fileProto() { + return fileProto; + } + + @Override + public DescriptorProto messageProto() { + return messageProto; + } + + public SchemaFieldDataType schemaFieldDataType() { + if (parentMessageProto != null && messageProto.getName().equals("MapFieldEntry")) { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())); } - - public SchemaFieldDataType schemaFieldDataType() { - if (parentMessageProto != null && messageProto.getName().equals("MapFieldEntry")) { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())); - } - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())); + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())); + } + + public int majorVersion() { + return Integer.parseInt( + Arrays.stream(fileProto.getName().split("/")) + .filter(p -> p.matches("^v[0-9]+$")) + .findFirst() + .map(p -> p.replace("v", "")) + .orElse("1")); + } + + @Override + public String comment() { + return messageLocations().map(ProtobufUtils::collapseLocationComments).findFirst().orElse(""); + } + + @Override + public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { + return visitor.visitMessage(this, context); + } + + @Override + public String toString() { + return String.format("ProtobufMessage[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - public int majorVersion() { - return Integer.parseInt(Arrays.stream(fileProto.getName().split("/")) - .filter(p -> p.matches("^v[0-9]+$")) - .findFirst() - .map(p -> p.replace("v", "")) - .orElse("1")); + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public String comment() { - return messageLocations() - .map(ProtobufUtils::collapseLocationComments) - .findFirst().orElse(""); - } + ProtobufMessage that = (ProtobufMessage) o; - @Override - public <T> Stream<T> accept(ProtobufModelVisitor<T> visitor, VisitContext context) { - return visitor.visitMessage(this, context); + if (!fullName().equals(that.fullName())) { + return false; } - - @Override - public String toString() { - return String.format("ProtobufMessage[%s]", fullName()); + if (!messageProto.equals(that.messageProto)) { + return false; } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ProtobufMessage that = (ProtobufMessage) o; - - if (!fullName().equals(that.fullName())) { - return false; - } - if (!messageProto.equals(that.messageProto)) { - return false; - } - if (parentMessageProto != null ? !parentMessageProto.equals(that.parentMessageProto) : that.parentMessageProto != null) { - return false; - } - return fileProto.equals(that.fileProto); - } - - @Override - public int hashCode() { - int result = messageProto.hashCode(); - result = 31 * result + (parentMessageProto != null ? parentMessageProto.hashCode() : 0); - result = 31 * result + fileProto.hashCode(); - result = 31 * result + fullName().hashCode(); - return result; + if (parentMessageProto != null + ? !parentMessageProto.equals(that.parentMessageProto) + : that.parentMessageProto != null) { + return false; } + return fileProto.equals(that.fileProto); + } + + @Override + public int hashCode() { + int result = messageProto.hashCode(); + result = 31 * result + (parentMessageProto != null ? parentMessageProto.hashCode() : 0); + result = 31 * result + fileProto.hashCode(); + result = 31 * result + fullName().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java index 514d84b1cff2a..08c157f4c9c71 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java @@ -5,61 +5,60 @@ import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.UnionType; import datahub.protobuf.ProtobufUtils; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; -import java.util.stream.Collectors; - - @Getter public class ProtobufOneOfField extends ProtobufField { - public static final String NATIVE_TYPE = "oneof"; - public static final String FIELD_PATH_TYPE = "[type=union]"; + public static final String NATIVE_TYPE = "oneof"; + public static final String FIELD_PATH_TYPE = "[type=union]"; - @Builder(builderMethodName = "oneOfBuilder") - public ProtobufOneOfField(ProtobufMessage protobufMessage, - FieldDescriptorProto fieldProto) { - super(protobufMessage, fieldProto, null, null, null, null, null); - } + @Builder(builderMethodName = "oneOfBuilder") + public ProtobufOneOfField(ProtobufMessage protobufMessage, FieldDescriptorProto fieldProto) { + super(protobufMessage, fieldProto, null, null, null, null, null); + } - @Override - public String name() { - return oneOfProto().getName(); - } + @Override + public String name() { + return oneOfProto().getName(); + } - @Override - public String fieldPathType() { - return FIELD_PATH_TYPE; - } + @Override + public String fieldPathType() { + return FIELD_PATH_TYPE; + } - @Override - public String nativeType() { - return NATIVE_TYPE; - } + @Override + public String nativeType() { + return NATIVE_TYPE; + } - @Override - public boolean isMessage() { - return false; - } + @Override + public boolean isMessage() { + return false; + } - @Override - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())); - } + @Override + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())); + } - @Override - public String comment() { - return messageLocations() - .filter(loc -> loc.getPathCount() > 3 - && loc.getPath(2) == DescriptorProto.ONEOF_DECL_FIELD_NUMBER - && oneOfProto() == messageProto().getOneofDecl(loc.getPath(3))) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); - } + @Override + public String comment() { + return messageLocations() + .filter( + loc -> + loc.getPathCount() > 3 + && loc.getPath(2) == DescriptorProto.ONEOF_DECL_FIELD_NUMBER + && oneOfProto() == messageProto().getOneofDecl(loc.getPath(3))) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } - @Override - public String toString() { - return String.format("ProtobufOneOf[%s]", fullName()); - } + @Override + public String toString() { + return String.format("ProtobufOneOf[%s]", fullName()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java index c14217fb9add2..085516a025e0c 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java @@ -9,7 +9,6 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.tag.TagProperties; import com.linkedin.util.Pair; - import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -21,140 +20,200 @@ public class ProtobufExtensionUtil { - private ProtobufExtensionUtil() { } + private ProtobufExtensionUtil() {} - public static DescriptorProtos.FieldDescriptorProto extendProto(DescriptorProtos.FieldDescriptorProto proto, ExtensionRegistry registry) { - try { - return DescriptorProtos.FieldDescriptorProto.parseFrom(proto.toByteArray(), registry); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } + public static DescriptorProtos.FieldDescriptorProto extendProto( + DescriptorProtos.FieldDescriptorProto proto, ExtensionRegistry registry) { + try { + return DescriptorProtos.FieldDescriptorProto.parseFrom(proto.toByteArray(), registry); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); } + } - public enum DataHubMetadataType { - PROPERTY, TAG, TAG_LIST, TERM, OWNER, DOMAIN, DEPRECATION; + public enum DataHubMetadataType { + PROPERTY, + TAG, + TAG_LIST, + TERM, + OWNER, + DOMAIN, + DEPRECATION; - public static final String PROTOBUF_TYPE = "DataHubMetadataType"; - } + public static final String PROTOBUF_TYPE = "DataHubMetadataType"; + } - public static List<Pair<Descriptors.FieldDescriptor, Object>> filterByDataHubType(List<Pair<Descriptors.FieldDescriptor, Object>> options, - ExtensionRegistry registry, DataHubMetadataType filterType) { - return options.stream() - .filter(entry -> { - DescriptorProtos.FieldDescriptorProto extendedProtoOptions = extendProto(entry.getKey().toProto(), registry); - Optional<DataHubMetadataType> dataHubMetadataType = extendedProtoOptions.getOptions().getAllFields().entrySet().stream() - .filter(extEntry -> extEntry.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.ENUM) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<Descriptors.EnumValueDescriptor>) extEntry.getValue()).stream(); - } else { - return Stream.of((Descriptors.EnumValueDescriptor) extEntry.getValue()); - } - }) - .filter(enumDesc -> enumDesc.getType().getFullName().endsWith("." + DataHubMetadataType.PROTOBUF_TYPE)) - .map(enumDesc -> DataHubMetadataType.valueOf(enumDesc.getName())) - .filter(dhmt -> dhmt.equals(filterType)) - .findFirst(); + public static List<Pair<Descriptors.FieldDescriptor, Object>> filterByDataHubType( + List<Pair<Descriptors.FieldDescriptor, Object>> options, + ExtensionRegistry registry, + DataHubMetadataType filterType) { + return options.stream() + .filter( + entry -> { + DescriptorProtos.FieldDescriptorProto extendedProtoOptions = + extendProto(entry.getKey().toProto(), registry); + Optional<DataHubMetadataType> dataHubMetadataType = + extendedProtoOptions.getOptions().getAllFields().entrySet().stream() + .filter( + extEntry -> + extEntry.getKey().getJavaType() + == Descriptors.FieldDescriptor.JavaType.ENUM) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<Descriptors.EnumValueDescriptor>) + extEntry.getValue()) + .stream(); + } else { + return Stream.of( + (Descriptors.EnumValueDescriptor) extEntry.getValue()); + } + }) + .filter( + enumDesc -> + enumDesc + .getType() + .getFullName() + .endsWith("." + DataHubMetadataType.PROTOBUF_TYPE)) + .map(enumDesc -> DataHubMetadataType.valueOf(enumDesc.getName())) + .filter(dhmt -> dhmt.equals(filterType)) + .findFirst(); - return filterType.equals(dataHubMetadataType.orElse(DataHubMetadataType.PROPERTY)); - }).collect(Collectors.toList()); - } + return filterType.equals(dataHubMetadataType.orElse(DataHubMetadataType.PROPERTY)); + }) + .collect(Collectors.toList()); + } - public static Stream<Map.Entry<String, String>> getProperties(Descriptors.FieldDescriptor field, DescriptorProtos.DescriptorProto value) { - return value.getUnknownFields().asMap().entrySet().stream().map(unknown -> { - Descriptors.FieldDescriptor fieldDesc = field.getMessageType().findFieldByNumber(unknown.getKey()); - String fieldValue = unknown.getValue().getLengthDelimitedList().stream().map(ByteString::toStringUtf8).collect(Collectors.joining("")); - return Map.entry(String.join(".", field.getFullName(), fieldDesc.getName()), fieldValue); - }); - } + public static Stream<Map.Entry<String, String>> getProperties( + Descriptors.FieldDescriptor field, DescriptorProtos.DescriptorProto value) { + return value.getUnknownFields().asMap().entrySet().stream() + .map( + unknown -> { + Descriptors.FieldDescriptor fieldDesc = + field.getMessageType().findFieldByNumber(unknown.getKey()); + String fieldValue = + unknown.getValue().getLengthDelimitedList().stream() + .map(ByteString::toStringUtf8) + .collect(Collectors.joining("")); + return Map.entry( + String.join(".", field.getFullName(), fieldDesc.getName()), fieldValue); + }); + } - public static Stream<TagProperties> extractTagPropertiesFromOptions(List<Pair<Descriptors.FieldDescriptor, Object>> options, ExtensionRegistry registry) { - Stream<TagProperties> tags = filterByDataHubType(options, registry, DataHubMetadataType.TAG).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<?>) extEntry.getValue()).stream().map(v -> Pair.of(extEntry.getKey(), v)); - } else { - return Stream.of(extEntry); - } + public static Stream<TagProperties> extractTagPropertiesFromOptions( + List<Pair<Descriptors.FieldDescriptor, Object>> options, ExtensionRegistry registry) { + Stream<TagProperties> tags = + filterByDataHubType(options, registry, DataHubMetadataType.TAG).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<?>) extEntry.getValue()) + .stream().map(v -> Pair.of(extEntry.getKey(), v)); + } else { + return Stream.of(extEntry); + } }) - .map(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return new TagProperties() - .setName(String.format("%s.%s", entry.getKey().getName(), entry.getValue().toString())) - .setDescription(entry.getKey().getFullName()); - case BOOLEAN: - if ((boolean) entry.getValue()) { - return new TagProperties() - .setName(entry.getKey().getName()) - .setDescription(String.format("%s is true.", entry.getKey().getFullName())); - } - return null; - case ENUM: - Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) entry.getValue(); - String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); - String others = entry.getKey().getEnumType().getValues().stream() - .map(Descriptors.EnumValueDescriptor::getName).collect(Collectors.joining(", ")); - return new TagProperties() - .setName(name) - .setDescription(String.format("Enum %s of {%s}", name, others)); - default: - return null; - } - }).filter(Objects::nonNull); - - Stream<TagProperties> tagListTags = filterByDataHubType(options, registry, DataHubMetadataType.TAG_LIST).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return Arrays.stream(entry.getValue().toString().split(",")) - .map(t -> new TagProperties() - .setName(t.trim()) - .setDescription(entry.getKey().getFullName())); - default: - return Stream.empty(); - } - }).filter(Objects::nonNull); + .map( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return new TagProperties() + .setName( + String.format( + "%s.%s", entry.getKey().getName(), entry.getValue().toString())) + .setDescription(entry.getKey().getFullName()); + case BOOLEAN: + if ((boolean) entry.getValue()) { + return new TagProperties() + .setName(entry.getKey().getName()) + .setDescription( + String.format("%s is true.", entry.getKey().getFullName())); + } + return null; + case ENUM: + Descriptors.EnumValueDescriptor desc = + (Descriptors.EnumValueDescriptor) entry.getValue(); + String name = + String.format("%s.%s", desc.getType().getName(), desc.getName()); + String others = + entry.getKey().getEnumType().getValues().stream() + .map(Descriptors.EnumValueDescriptor::getName) + .collect(Collectors.joining(", ")); + return new TagProperties() + .setName(name) + .setDescription(String.format("Enum %s of {%s}", name, others)); + default: + return null; + } + }) + .filter(Objects::nonNull); - Stream<TagProperties> deprecationTag; - if (options.stream().anyMatch(opt -> opt.getKey().getFullName().endsWith(".deprecated") - && opt.getKey().getFullName().startsWith("google.protobuf.") - && opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.BOOLEAN - && (Boolean) opt.getValue())) { - deprecationTag = Stream.of(new TagProperties().setName("deprecated").setColorHex("#FF0000")); - } else { - deprecationTag = Stream.empty(); - } + Stream<TagProperties> tagListTags = + filterByDataHubType(options, registry, DataHubMetadataType.TAG_LIST).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return Arrays.stream(entry.getValue().toString().split(",")) + .map( + t -> + new TagProperties() + .setName(t.trim()) + .setDescription(entry.getKey().getFullName())); + default: + return Stream.empty(); + } + }) + .filter(Objects::nonNull); - return Stream.of(tags, tagListTags, deprecationTag).reduce(Stream::concat).orElse(Stream.empty()); + Stream<TagProperties> deprecationTag; + if (options.stream() + .anyMatch( + opt -> + opt.getKey().getFullName().endsWith(".deprecated") + && opt.getKey().getFullName().startsWith("google.protobuf.") + && opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.BOOLEAN + && (Boolean) opt.getValue())) { + deprecationTag = Stream.of(new TagProperties().setName("deprecated").setColorHex("#FF0000")); + } else { + deprecationTag = Stream.empty(); } - public static Stream<GlossaryTermAssociation> extractTermAssociationsFromOptions(List<Pair<Descriptors.FieldDescriptor, Object>> fieldOptions, - ExtensionRegistry registry) { - return filterByDataHubType(fieldOptions, registry, DataHubMetadataType.TERM).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<?>) extEntry.getValue()).stream().map(v -> Pair.of(extEntry.getKey(), v)); - } else { - return Stream.of(extEntry); - } - }) - .map(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return new GlossaryTermAssociation() - .setUrn(new GlossaryTermUrn(entry.getValue().toString())); - case ENUM: - Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) entry.getValue(); - String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); - return new GlossaryTermAssociation() - .setUrn(new GlossaryTermUrn(name)); - default: - return null; - } - }).filter(Objects::nonNull); - } + return Stream.of(tags, tagListTags, deprecationTag) + .reduce(Stream::concat) + .orElse(Stream.empty()); + } + + public static Stream<GlossaryTermAssociation> extractTermAssociationsFromOptions( + List<Pair<Descriptors.FieldDescriptor, Object>> fieldOptions, ExtensionRegistry registry) { + return filterByDataHubType(fieldOptions, registry, DataHubMetadataType.TERM).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<?>) extEntry.getValue()) + .stream().map(v -> Pair.of(extEntry.getKey(), v)); + } else { + return Stream.of(extEntry); + } + }) + .map( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return new GlossaryTermAssociation() + .setUrn(new GlossaryTermUrn(entry.getValue().toString())); + case ENUM: + Descriptors.EnumValueDescriptor desc = + (Descriptors.EnumValueDescriptor) entry.getValue(); + String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); + return new GlossaryTermAssociation().setUrn(new GlossaryTermUrn(name)); + default: + return null; + } + }) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java index b5c630302d946..336de520a96bd 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java @@ -1,24 +1,24 @@ package datahub.protobuf.visitors; - import datahub.protobuf.model.ProtobufElement; import datahub.protobuf.model.ProtobufField; import datahub.protobuf.model.ProtobufMessage; - import java.util.stream.Stream; public interface ProtobufModelVisitor<T> { - default Stream<T> visitField(ProtobufField field, VisitContext context) { - return visitElement(field, context); - } - default Stream<T> visitMessage(ProtobufMessage message, VisitContext context) { - return visitElement(message, context); - } + default Stream<T> visitField(ProtobufField field, VisitContext context) { + return visitElement(field, context); + } + + default Stream<T> visitMessage(ProtobufMessage message, VisitContext context) { + return visitElement(message, context); + } + + default Stream<T> visitElement(ProtobufElement element, VisitContext context) { + return Stream.of(); + } - default Stream<T> visitElement(ProtobufElement element, VisitContext context) { - return Stream.of(); - } - default Stream<T> visitGraph(VisitContext context) { - return Stream.of(); - } + default Stream<T> visitGraph(VisitContext context) { + return Stream.of(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java index 51c92332d98a0..5718b0a8a2ae6 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java @@ -7,55 +7,56 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.model.ProtobufMessage; -import lombok.Builder; -import lombok.Getter; -import org.jgrapht.GraphPath; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; +import lombok.Builder; +import lombok.Getter; +import org.jgrapht.GraphPath; @Builder @Getter public class VisitContext { - public static final String FIELD_PATH_VERSION = "[version=2.0]"; - - private final DatasetUrn datasetUrn; - private final ProtobufGraph graph; - private final AuditStamp auditStamp; - - public ProtobufMessage root() { - return graph.root(); - } - - public Stream<GraphPath<ProtobufElement, FieldTypeEdge>> streamAllPaths(ProtobufField field) { - return graph.getAllPaths(root(), field).stream(); - } - - public String getFieldPath(GraphPath<ProtobufElement, FieldTypeEdge> path) { - String fieldPathString = path.getEdgeList().stream() - .flatMap(e -> Stream.of(e.getType(), e.getEdgeTarget().name())) - .collect(Collectors.joining(".")); - return String.join(".", FIELD_PATH_VERSION, root().fieldPathType(), fieldPathString); - } - - // This is because order matters for the frontend. Both for matching the protobuf field order - // and also the nested struct's fieldPaths - public Double calculateSortOrder(GraphPath<ProtobufElement, FieldTypeEdge> path, ProtobufField field) { - List<Integer> weights = path.getEdgeList().stream() - .map(FieldTypeEdge::getEdgeTarget) - .filter(f -> f instanceof ProtobufField) - .map(f -> ((ProtobufField) f).sortWeight()) - .collect(Collectors.toList()); - - return IntStream.range(0, weights.size()) - .mapToDouble(i -> weights.get(i) * (1.0 / (i + 1))) - .reduce(Double::sum) - .orElse(0); - } - - public static class VisitContextBuilder { - - }; + public static final String FIELD_PATH_VERSION = "[version=2.0]"; + + private final DatasetUrn datasetUrn; + private final ProtobufGraph graph; + private final AuditStamp auditStamp; + + public ProtobufMessage root() { + return graph.root(); + } + + public Stream<GraphPath<ProtobufElement, FieldTypeEdge>> streamAllPaths(ProtobufField field) { + return graph.getAllPaths(root(), field).stream(); + } + + public String getFieldPath(GraphPath<ProtobufElement, FieldTypeEdge> path) { + String fieldPathString = + path.getEdgeList().stream() + .flatMap(e -> Stream.of(e.getType(), e.getEdgeTarget().name())) + .collect(Collectors.joining(".")); + return String.join(".", FIELD_PATH_VERSION, root().fieldPathType(), fieldPathString); + } + + // This is because order matters for the frontend. Both for matching the protobuf field order + // and also the nested struct's fieldPaths + public Double calculateSortOrder( + GraphPath<ProtobufElement, FieldTypeEdge> path, ProtobufField field) { + List<Integer> weights = + path.getEdgeList().stream() + .map(FieldTypeEdge::getEdgeTarget) + .filter(f -> f instanceof ProtobufField) + .map(f -> ((ProtobufField) f).sortWeight()) + .collect(Collectors.toList()); + + return IntStream.range(0, weights.size()) + .mapToDouble(i -> weights.get(i) * (1.0 / (i + 1))) + .reduce(Double::sum) + .orElse(0); + } + + public static class VisitContextBuilder {} + ; } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java index 80dc05d33e17d..1b03e13705910 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java @@ -21,13 +21,10 @@ import com.linkedin.dataset.DatasetProperties; import com.linkedin.domain.Domains; import com.linkedin.events.metadata.ChangeType; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; -import lombok.AllArgsConstructor; -import lombok.Builder; - import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -35,76 +32,145 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.AllArgsConstructor; +import lombok.Builder; @Builder @AllArgsConstructor -public class DatasetVisitor implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { - @Builder.Default - private final List<ProtobufModelVisitor<InstitutionalMemoryMetadata>> institutionalMemoryMetadataVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<DatasetProperties>> datasetPropertyVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<TagAssociation>> tagAssociationVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<GlossaryTermAssociation>> termAssociationVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<Owner>> ownershipVisitors = List.of(); - @Builder.Default - private final List<ProtobufModelVisitor<com.linkedin.common.urn.Urn>> domainVisitors = List.of(); - @Builder.Default - private final String protocBase64 = ""; - @Builder.Default - private final ProtobufModelVisitor<String> descriptionVisitor = new DescriptionVisitor(); - @Builder.Default - private final ProtobufModelVisitor<Deprecation> deprecationVisitor = new DeprecationVisitor(); +public class DatasetVisitor + implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { + @Builder.Default + private final List<ProtobufModelVisitor<InstitutionalMemoryMetadata>> + institutionalMemoryMetadataVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<DatasetProperties>> datasetPropertyVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<TagAssociation>> tagAssociationVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<GlossaryTermAssociation>> termAssociationVisitors = + List.of(); + + @Builder.Default private final List<ProtobufModelVisitor<Owner>> ownershipVisitors = List.of(); + + @Builder.Default + private final List<ProtobufModelVisitor<com.linkedin.common.urn.Urn>> domainVisitors = List.of(); + + @Builder.Default private final String protocBase64 = ""; + + @Builder.Default + private final ProtobufModelVisitor<String> descriptionVisitor = new DescriptionVisitor(); + + @Builder.Default + private final ProtobufModelVisitor<Deprecation> deprecationVisitor = new DeprecationVisitor(); - @Override - public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph(VisitContext context) { - final String datasetUrn = context.getDatasetUrn().toString(); - final ProtobufGraph g = context.getGraph(); + @Override + public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph( + VisitContext context) { + final String datasetUrn = context.getDatasetUrn().toString(); + final ProtobufGraph g = context.getGraph(); - return Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new DatasetProperties() - .setName(context.getDatasetUrn() + return Stream.of( + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new DatasetProperties() + .setName( + context + .getDatasetUrn() .getDatasetNameEntity() - .substring(context.getDatasetUrn() - .getDatasetNameEntity() - .lastIndexOf(".") + 1)) - .setQualifiedName(context.getDatasetUrn().getDatasetNameEntity()) - .setDescription(g.accept(context, List.of(descriptionVisitor)).collect(Collectors.joining("\n"))) - .setCustomProperties(new StringMap( - Stream.concat( - Stream.of(Map.entry("protoc", protocBase64)), - g.accept(context, datasetPropertyVisitors).flatMap(props -> props.getCustomProperties().entrySet().stream())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - )), "datasetProperties"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, new InstitutionalMemory().setElements( + .substring( + context.getDatasetUrn().getDatasetNameEntity().lastIndexOf(".") + + 1)) + .setQualifiedName(context.getDatasetUrn().getDatasetNameEntity()) + .setDescription( + g.accept(context, List.of(descriptionVisitor)) + .collect(Collectors.joining("\n"))) + .setCustomProperties( + new StringMap( + Stream.concat( + Stream.of(Map.entry("protoc", protocBase64)), + g.accept(context, datasetPropertyVisitors) + .flatMap( + props -> + props.getCustomProperties().entrySet().stream())) + .collect( + Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))), + "datasetProperties"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new InstitutionalMemory() + .setElements( new InstitutionalMemoryMetadataArray( - g.accept(context, institutionalMemoryMetadataVisitors) - .map(inst -> inst.setCreateStamp(context.getAuditStamp())) - .collect(Collectors.toMap(InstitutionalMemoryMetadata::getUrl, Function.identity(), - (a1, a2) -> a1, LinkedHashMap::new)) - .values() - )), "institutionalMemory"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new GlobalTags().setTags(new TagAssociationArray( - g.accept(context, tagAssociationVisitors).collect(Collectors.toList()) - )), "globalTags"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - g.accept(context, termAssociationVisitors).collect(Collectors.toList()) - )).setAuditStamp(context.getAuditStamp()), "glossaryTerms"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new Ownership().setOwners(new OwnerArray( - g.accept(context, ownershipVisitors).collect(Collectors.toList()) - )).setLastModified(context.getAuditStamp()), "ownership"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new Domains(new DataMap(Map.of("domains", - new UrnArray(g.accept(context, domainVisitors).collect(Collectors.toList())).data()))), "domains"), - g.accept(context, List.of(deprecationVisitor)).findFirst() - .map(dep -> new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - dep, "deprecation")).orElse(null) - ).filter(Objects::nonNull); - } + g.accept(context, institutionalMemoryMetadataVisitors) + .map(inst -> inst.setCreateStamp(context.getAuditStamp())) + .collect( + Collectors.toMap( + InstitutionalMemoryMetadata::getUrl, + Function.identity(), + (a1, a2) -> a1, + LinkedHashMap::new)) + .values())), + "institutionalMemory"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new GlobalTags() + .setTags( + new TagAssociationArray( + g.accept(context, tagAssociationVisitors) + .collect(Collectors.toList()))), + "globalTags"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + g.accept(context, termAssociationVisitors) + .collect(Collectors.toList()))) + .setAuditStamp(context.getAuditStamp()), + "glossaryTerms"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new Ownership() + .setOwners( + new OwnerArray( + g.accept(context, ownershipVisitors).collect(Collectors.toList()))) + .setLastModified(context.getAuditStamp()), + "ownership"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new Domains( + new DataMap( + Map.of( + "domains", + new UrnArray( + g.accept(context, domainVisitors).collect(Collectors.toList())) + .data()))), + "domains"), + g.accept(context, List.of(deprecationVisitor)) + .findFirst() + .map( + dep -> + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + dep, + "deprecation")) + .orElse(null)) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java index 612082e6a521b..46d17205e4219 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java @@ -1,53 +1,60 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.google.protobuf.Descriptors; import com.linkedin.common.Deprecation; import com.linkedin.util.Pair; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class DeprecationVisitor implements ProtobufModelVisitor<Deprecation> { - @Override - public Stream<Deprecation> visitGraph(VisitContext context) { - if (context.root().messageProto().getOptions().getDeprecated()) { - List<Pair<Descriptors.FieldDescriptor, Object>> deprecationOptions = ProtobufExtensionUtil - .filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.DEPRECATION); - - String decommissionNote = deprecationOptions.stream() - .filter(opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.STRING) - .flatMap(opt -> { - if (opt.getKey().isRepeated()) { - return ((Collection<String>) opt.getValue()).stream(); - } else { - return Stream.of(opt.getValue()); - } - }) - .map(Object::toString) - .collect(Collectors.joining("\n")); - - Optional<Long> decommissionTime = deprecationOptions.stream() - .filter(opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.LONG) - .map(opt -> (Long) opt.getValue()) - .findFirst(); - - return Stream.of(new Deprecation() - .setDeprecated(true) - .setNote(decommissionNote) - .setDecommissionTime(decommissionTime.orElse(0L)) - .setActor(context.getAuditStamp().getActor())); - } else { - return Stream.empty(); - } + @Override + public Stream<Deprecation> visitGraph(VisitContext context) { + if (context.root().messageProto().getOptions().getDeprecated()) { + List<Pair<Descriptors.FieldDescriptor, Object>> deprecationOptions = + ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.DEPRECATION); + + String decommissionNote = + deprecationOptions.stream() + .filter( + opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.STRING) + .flatMap( + opt -> { + if (opt.getKey().isRepeated()) { + return ((Collection<String>) opt.getValue()).stream(); + } else { + return Stream.of(opt.getValue()); + } + }) + .map(Object::toString) + .collect(Collectors.joining("\n")); + + Optional<Long> decommissionTime = + deprecationOptions.stream() + .filter( + opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.LONG) + .map(opt -> (Long) opt.getValue()) + .findFirst(); + + return Stream.of( + new Deprecation() + .setDeprecated(true) + .setNote(decommissionNote) + .setDecommissionTime(decommissionTime.orElse(0L)) + .setActor(context.getAuditStamp().getActor())); + } else { + return Stream.empty(); } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java index 802c7e0c05408..4bd7dd96d0db9 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java @@ -2,13 +2,12 @@ import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; public class DescriptionVisitor implements ProtobufModelVisitor<String> { - @Override - public Stream<String> visitGraph(VisitContext context) { - return Stream.of(context.root().comment()); - } + @Override + public Stream<String> visitGraph(VisitContext context) { + return Stream.of(context.root().comment()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java index ac9d092c2392e..01908bb8c3b6d 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java @@ -1,23 +1,24 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.urn.Urn; import com.linkedin.util.Pair; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class DomainVisitor implements ProtobufModelVisitor<Urn> { - @Override - public Stream<Urn> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.DOMAIN) - .stream().map(Pair::getValue).map(o -> - Urn.createFromTuple("domain", ((String) o).toLowerCase()) - ); - } + @Override + public Stream<Urn> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.DOMAIN) + .stream() + .map(Pair::getValue) + .map(o -> Urn.createFromTuple("domain", ((String) o).toLowerCase())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java index b6f52fe01c109..c4a29b1b70f61 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java @@ -5,8 +5,6 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - -import javax.annotation.Nullable; import java.util.LinkedList; import java.util.List; import java.util.Optional; @@ -18,123 +16,153 @@ import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.stream.StreamSupport; +import javax.annotation.Nullable; -public class InstitutionalMemoryVisitor implements ProtobufModelVisitor<InstitutionalMemoryMetadata> { - public static final String TEAM_DESC = "Github Team"; - public static final String SLACK_CHAN_DESC = "Slack Channel"; - - private static final Pattern SLACK_CHANNEL_REGEX = Pattern.compile("(?si).*#([a-z0-9-]+).*"); - private static final Pattern LINK_REGEX = Pattern.compile("(?s)(\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|])"); - private final String githubOrganization; - private final Pattern githubTeamRegex; - private final String slackTeamId; - - public InstitutionalMemoryVisitor(@Nullable String slackTeamId, @Nullable String githubOrganization) { - this.slackTeamId = slackTeamId; - this.githubOrganization = githubOrganization; - if (githubOrganization != null) { - this.githubTeamRegex = Pattern.compile(String.format("(?si).*@%s/([a-z-]+).*", githubOrganization)); - } else { - this.githubTeamRegex = null; - } +public class InstitutionalMemoryVisitor + implements ProtobufModelVisitor<InstitutionalMemoryMetadata> { + public static final String TEAM_DESC = "Github Team"; + public static final String SLACK_CHAN_DESC = "Slack Channel"; + + private static final Pattern SLACK_CHANNEL_REGEX = Pattern.compile("(?si).*#([a-z0-9-]+).*"); + private static final Pattern LINK_REGEX = + Pattern.compile( + "(?s)(\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|])"); + private final String githubOrganization; + private final Pattern githubTeamRegex; + private final String slackTeamId; + + public InstitutionalMemoryVisitor( + @Nullable String slackTeamId, @Nullable String githubOrganization) { + this.slackTeamId = slackTeamId; + this.githubOrganization = githubOrganization; + if (githubOrganization != null) { + this.githubTeamRegex = + Pattern.compile(String.format("(?si).*@%s/([a-z-]+).*", githubOrganization)); + } else { + this.githubTeamRegex = null; } - - // https://slack.com/app_redirect?channel=fdn-analytics-data-catalog&team=T024F4EL1 - private Optional<Url> slackLink(String text) { - return Optional.ofNullable(slackTeamId).map(teamId -> { - Matcher m = SLACK_CHANNEL_REGEX.matcher(text); - if (m.matches()) { - return new Url(String.format("https://slack.com/app_redirect?channel=%s&team=%s", m.group(1), slackTeamId)); - } else { + } + + // https://slack.com/app_redirect?channel=fdn-analytics-data-catalog&team=T024F4EL1 + private Optional<Url> slackLink(String text) { + return Optional.ofNullable(slackTeamId) + .map( + teamId -> { + Matcher m = SLACK_CHANNEL_REGEX.matcher(text); + if (m.matches()) { + return new Url( + String.format( + "https://slack.com/app_redirect?channel=%s&team=%s", + m.group(1), slackTeamId)); + } else { return null; - } - }); - } - - private Optional<Url> teamLink(String text) { - return Optional.ofNullable(githubTeamRegex).map(regex -> { - Matcher m = regex.matcher(text); - if (m.matches()) { - return new Url(String.format("https://github.com/orgs/%s/teams/%s", githubOrganization, m.group(1))); - } else { + } + }); + } + + private Optional<Url> teamLink(String text) { + return Optional.ofNullable(githubTeamRegex) + .map( + regex -> { + Matcher m = regex.matcher(text); + if (m.matches()) { + return new Url( + String.format( + "https://github.com/orgs/%s/teams/%s", githubOrganization, m.group(1))); + } else { return null; - } - }); - } + } + }); + } - @Override - public Stream<InstitutionalMemoryMetadata> visitGraph(VisitContext context) { - List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); + @Override + public Stream<InstitutionalMemoryMetadata> visitGraph(VisitContext context) { + List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); - teamLink(context.root().comment()).ifPresent(url -> - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + teamLink(context.root().comment()) + .ifPresent( + url -> + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) .setDescription(TEAM_DESC) .setUrl(url))); - - slackLink(context.root().comment()).ifPresent(url -> - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + slackLink(context.root().comment()) + .ifPresent( + url -> + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) .setDescription(SLACK_CHAN_DESC) .setUrl(url))); - final int[] cnt = {0}; - MatcherStream.findMatches(LINK_REGEX, context.root().comment()).forEach(match -> { - cnt[0] += 1; - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() - .setCreateStamp(context.getAuditStamp()) - .setDescription(String.format("%s Reference %d", context.root().name(), cnt[0])) - .setUrl(new Url(match.group(1)))); - }); + final int[] cnt = {0}; + MatcherStream.findMatches(LINK_REGEX, context.root().comment()) + .forEach( + match -> { + cnt[0] += 1; + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() + .setCreateStamp(context.getAuditStamp()) + .setDescription( + String.format("%s Reference %d", context.root().name(), cnt[0])) + .setUrl(new Url(match.group(1)))); + }); - return institutionalMemoryMetadata.stream(); - } + return institutionalMemoryMetadata.stream(); + } - @Override - public Stream<InstitutionalMemoryMetadata> visitField(ProtobufField field, VisitContext context) { - List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); + @Override + public Stream<InstitutionalMemoryMetadata> visitField(ProtobufField field, VisitContext context) { + List<InstitutionalMemoryMetadata> institutionalMemoryMetadata = new LinkedList<>(); - if (field.messageProto().equals(context.getGraph().root().messageProto())) { - final int[] cnt = {0}; - MatcherStream.findMatches(LINK_REGEX, field.comment()).forEach(match -> { + if (field.messageProto().equals(context.getGraph().root().messageProto())) { + final int[] cnt = {0}; + MatcherStream.findMatches(LINK_REGEX, field.comment()) + .forEach( + match -> { cnt[0] += 1; - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) - .setDescription(String.format("%s.%s Reference %d", + .setDescription( + String.format( + "%s.%s Reference %d", field.getProtobufMessage().name(), field.getFieldProto().getName(), cnt[0])) .setUrl(new Url(match.group(1)))); - }); - } + }); + } + + return institutionalMemoryMetadata.stream(); + } + + private static class MatcherStream { + private MatcherStream() {} - return institutionalMemoryMetadata.stream(); + public static Stream<String> find(Pattern pattern, CharSequence input) { + return findMatches(pattern, input).map(MatchResult::group); } - private static class MatcherStream { - private MatcherStream() { } - - public static Stream<String> find(Pattern pattern, CharSequence input) { - return findMatches(pattern, input).map(MatchResult::group); - } - - public static Stream<MatchResult> findMatches( - Pattern pattern, CharSequence input) { - Matcher matcher = pattern.matcher(input); - - Spliterator<MatchResult> spliterator = new Spliterators.AbstractSpliterator<MatchResult>( - Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.NONNULL) { - @Override - public boolean tryAdvance(Consumer<? super MatchResult> action) { - if (!matcher.find()) { - return false; - } - action.accept(matcher.toMatchResult()); - return true; - } }; - - return StreamSupport.stream(spliterator, false); - } + public static Stream<MatchResult> findMatches(Pattern pattern, CharSequence input) { + Matcher matcher = pattern.matcher(input); + + Spliterator<MatchResult> spliterator = + new Spliterators.AbstractSpliterator<MatchResult>( + Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.NONNULL) { + @Override + public boolean tryAdvance(Consumer<? super MatchResult> action) { + if (!matcher.find()) { + return false; + } + action.accept(matcher.toMatchResult()); + return true; + } + }; + + return StreamSupport.stream(spliterator, false); } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java index 57ec38611d47f..d2132316fdef3 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java @@ -4,28 +4,28 @@ import com.linkedin.dataset.DatasetProperties; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Map; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; - public class KafkaTopicPropertyVisitor implements ProtobufModelVisitor<DatasetProperties> { - @Override - public Stream<DatasetProperties> visitGraph(VisitContext context) { - return getKafkaTopic(context.root().comment()).stream().map(kafkaTopic -> - new DatasetProperties() - .setCustomProperties(new StringMap(Map.of("kafka_topic", kafkaTopic))) - ); - } + @Override + public Stream<DatasetProperties> visitGraph(VisitContext context) { + return getKafkaTopic(context.root().comment()).stream() + .map( + kafkaTopic -> + new DatasetProperties() + .setCustomProperties(new StringMap(Map.of("kafka_topic", kafkaTopic)))); + } - private static final Pattern TOPIC_NAME_REGEX = Pattern.compile("(?si).*kafka.+topic.+[`]([a-z._-]+)[`].*"); + private static final Pattern TOPIC_NAME_REGEX = + Pattern.compile("(?si).*kafka.+topic.+[`]([a-z._-]+)[`].*"); - private static Optional<String> getKafkaTopic(String text) { - Matcher m = TOPIC_NAME_REGEX.matcher(text); - return m.matches() ? Optional.of(m.group(1)) : Optional.empty(); - } + private static Optional<String> getKafkaTopic(String text) { + Matcher m = TOPIC_NAME_REGEX.matcher(text); + return m.matches() ? Optional.of(m.group(1)) : Optional.empty(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java index 7bb4d9860f72c..0a7081a35fa86 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java @@ -1,5 +1,7 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; @@ -8,47 +10,55 @@ import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.net.URISyntaxException; import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class OwnershipVisitor implements ProtobufModelVisitor<Owner> { - @Override - public Stream<Owner> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry(), - ProtobufExtensionUtil.DataHubMetadataType.OWNER) - .stream() - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection<String>) extEntry.getValue()).stream().map(v -> Map.entry(extEntry.getKey(), v)); - } else { - return Stream.of(Map.entry(extEntry.getKey(), (String) extEntry.getValue())); - } - }) - .map(entry -> { - try { - OwnershipType ownershipType; - try { - ownershipType = OwnershipType.valueOf(entry.getKey().getName().toUpperCase()); - } catch (IllegalArgumentException e) { - ownershipType = OwnershipType.TECHNICAL_OWNER; - } + @Override + public Stream<Owner> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.OWNER) + .stream() + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection<String>) extEntry.getValue()) + .stream().map(v -> Map.entry(extEntry.getKey(), v)); + } else { + return Stream.of(Map.entry(extEntry.getKey(), (String) extEntry.getValue())); + } + }) + .map( + entry -> { + try { + OwnershipType ownershipType; + try { + ownershipType = OwnershipType.valueOf(entry.getKey().getName().toUpperCase()); + } catch (IllegalArgumentException e) { + ownershipType = OwnershipType.TECHNICAL_OWNER; + } - String[] id = entry.getValue().toLowerCase().split(":", 2); - return new Owner() - .setType(ownershipType) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(new Urn(id.length > 1 ? id[0].replaceFirst("corpgroup", "corpGroup") : "corpGroup", id[id.length - 1])); - } catch (URISyntaxException e) { - System.err.println(e.getMessage()); - return null; - } - }).filter(Objects::nonNull); - } + String[] id = entry.getValue().toLowerCase().split(":", 2); + return new Owner() + .setType(ownershipType) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner( + new Urn( + id.length > 1 + ? id[0].replaceFirst("corpgroup", "corpGroup") + : "corpGroup", + id[id.length - 1])); + } catch (URISyntaxException e) { + System.err.println(e.getMessage()); + return null; + } + }) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java index 9abd903f242aa..113cf6f1a548f 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java @@ -1,45 +1,56 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; +import static datahub.protobuf.visitors.ProtobufExtensionUtil.getProperties; + import com.google.gson.Gson; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Collection; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; -import static datahub.protobuf.visitors.ProtobufExtensionUtil.getProperties; - - public class PropertyVisitor implements ProtobufModelVisitor<DatasetProperties> { - private static final Gson GSON = new Gson(); + private static final Gson GSON = new Gson(); - @Override - public Stream<DatasetProperties> visitGraph(VisitContext context) { - Map<String, String> properties = ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.PROPERTY) - .stream().flatMap(fd -> { - if (fd.getKey().getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { - if (fd.getKey().isRepeated()) { - return Stream.of(Map.entry(fd.getKey().getName(), GSON.toJson( - ((Collection<?>) fd.getValue()).stream().map(Object::toString).collect(Collectors.toList())))); - } else { - return Stream.of(Map.entry(fd.getKey().getName(), fd.getValue().toString())); - } + @Override + public Stream<DatasetProperties> visitGraph(VisitContext context) { + Map<String, String> properties = + ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.PROPERTY) + .stream() + .flatMap( + fd -> { + if (fd.getKey().getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { + if (fd.getKey().isRepeated()) { + return Stream.of( + Map.entry( + fd.getKey().getName(), + GSON.toJson( + ((Collection<?>) fd.getValue()) + .stream() + .map(Object::toString) + .collect(Collectors.toList())))); } else { - Descriptors.FieldDescriptor field = fd.getKey(); - DescriptorProtos.DescriptorProto value = (DescriptorProtos.DescriptorProto) fd.getValue(); - return getProperties(field, value); + return Stream.of(Map.entry(fd.getKey().getName(), fd.getValue().toString())); } - }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } else { + Descriptors.FieldDescriptor field = fd.getKey(); + DescriptorProtos.DescriptorProto value = + (DescriptorProtos.DescriptorProto) fd.getValue(); + return getProperties(field, value); + } + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - return Stream.of(new DatasetProperties().setCustomProperties(new StringMap(properties))); - } + return Stream.of(new DatasetProperties().setCustomProperties(new StringMap(properties))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java index f0ca32fbbc2f8..6874044215241 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java @@ -1,22 +1,20 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.TagUrn; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - - public class TagAssociationVisitor implements ProtobufModelVisitor<TagAssociation> { - @Override - public Stream<TagAssociation> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()) - .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))); - } + @Override + public Stream<TagAssociation> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()) + .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java index 7656bb5236825..b13bc0eed1152 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java @@ -1,19 +1,18 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.GlossaryTermAssociation; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class TermAssociationVisitor implements ProtobufModelVisitor<GlossaryTermAssociation> { - @Override - public Stream<GlossaryTermAssociation> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTermAssociationsFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()); - } + @Override + public Stream<GlossaryTermAssociation> visitGraph(VisitContext context) { + return ProtobufExtensionUtil.extractTermAssociationsFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java index c67c7414e521b..240cf7b6d168b 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java @@ -1,5 +1,8 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.ProtobufUtils.getFieldOptions; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -13,41 +16,45 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.VisitContext; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getFieldOptions; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class ProtobufExtensionFieldVisitor extends SchemaFieldVisitor { - @Override - public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { - boolean isPrimaryKey = getFieldOptions(field.getFieldProto()).stream().map(Pair::getKey) - .anyMatch(fieldDesc -> fieldDesc.getName().matches("(?i).*primary_?key")); + @Override + public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { + boolean isPrimaryKey = + getFieldOptions(field.getFieldProto()).stream() + .map(Pair::getKey) + .anyMatch(fieldDesc -> fieldDesc.getName().matches("(?i).*primary_?key")); - List<TagAssociation> tags = Stream.concat( + List<TagAssociation> tags = + Stream.concat( ProtobufExtensionUtil.extractTagPropertiesFromOptions( - getFieldOptions(field.getFieldProto()), - context.getGraph().getRegistry()), - promotedTags(field, context)) - .distinct().map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))) - .sorted(Comparator.comparing(t -> t.getTag().getName())) - .collect(Collectors.toList()); + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), + promotedTags(field, context)) + .distinct() + .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))) + .sorted(Comparator.comparing(t -> t.getTag().getName())) + .collect(Collectors.toList()); - List<GlossaryTermAssociation> terms = Stream.concat( + List<GlossaryTermAssociation> terms = + Stream.concat( ProtobufExtensionUtil.extractTermAssociationsFromOptions( - getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), promotedTerms(field, context)) - .distinct() - .sorted(Comparator.comparing(a -> a.getUrn().getNameEntity())) - .collect(Collectors.toList()); + .distinct() + .sorted(Comparator.comparing(a -> a.getUrn().getNameEntity())) + .collect(Collectors.toList()); - return context.streamAllPaths(field).map(path -> Pair.of( - new SchemaField() + return context + .streamAllPaths(field) + .map( + path -> + Pair.of( + new SchemaField() .setFieldPath(context.getFieldPath(path)) .setNullable(!isPrimaryKey) .setIsPartOfKey(isPrimaryKey) @@ -55,40 +62,48 @@ public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitCo .setNativeDataType(field.nativeType()) .setType(field.schemaFieldDataType()) .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray(tags))) - .setGlossaryTerms(new GlossaryTerms() + .setGlossaryTerms( + new GlossaryTerms() .setTerms(new GlossaryTermAssociationArray(terms)) .setAuditStamp(context.getAuditStamp())), - context.calculateSortOrder(path, field))); - } + context.calculateSortOrder(path, field))); + } - /** - * Promote tags from nested message to field. - * @return tags - */ - private Stream<TagProperties> promotedTags(ProtobufField field, VisitContext context) { - if (field.isMessage()) { - return context.getGraph().outgoingEdgesOf(field).stream().flatMap(e -> - ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(e.getEdgeTarget().messageProto()), - context.getGraph().getRegistry()) - ).distinct(); - } else { - return Stream.of(); - } + /** + * Promote tags from nested message to field. + * + * @return tags + */ + private Stream<TagProperties> promotedTags(ProtobufField field, VisitContext context) { + if (field.isMessage()) { + return context.getGraph().outgoingEdgesOf(field).stream() + .flatMap( + e -> + ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(e.getEdgeTarget().messageProto()), + context.getGraph().getRegistry())) + .distinct(); + } else { + return Stream.of(); } + } - /** - * Promote terms from nested message to field. - * @return terms - */ - private Stream<GlossaryTermAssociation> promotedTerms(ProtobufField field, VisitContext context) { - if (field.isMessage()) { - return context.getGraph().outgoingEdgesOf(field).stream().flatMap(e -> - ProtobufExtensionUtil.extractTermAssociationsFromOptions(getMessageOptions(e.getEdgeTarget().messageProto()), - context.getGraph().getRegistry()) - ).distinct(); - } else { - return Stream.of(); - } + /** + * Promote terms from nested message to field. + * + * @return terms + */ + private Stream<GlossaryTermAssociation> promotedTerms(ProtobufField field, VisitContext context) { + if (field.isMessage()) { + return context.getGraph().outgoingEdgesOf(field).stream() + .flatMap( + e -> + ProtobufExtensionUtil.extractTermAssociationsFromOptions( + getMessageOptions(e.getEdgeTarget().messageProto()), + context.getGraph().getRegistry())) + .distinct(); + } else { + return Stream.of(); } - + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java index 8f8da1970967d..46f9bc5f2f90c 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java @@ -5,21 +5,23 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; public class SchemaFieldVisitor implements ProtobufModelVisitor<Pair<SchemaField, Double>> { - @Override - public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { - return context.streamAllPaths(field).map(path -> + @Override + public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { + return context + .streamAllPaths(field) + .map( + path -> Pair.of( - new SchemaField() - .setFieldPath(context.getFieldPath(path)) - .setNullable(true) - .setDescription(field.comment()) - .setNativeDataType(field.nativeType()) - .setType(field.schemaFieldDataType()), - context.calculateSortOrder(path, field))); - } + new SchemaField() + .setFieldPath(context.getFieldPath(path)) + .setNullable(true) + .setDescription(field.comment()) + .setNativeDataType(field.nativeType()) + .setType(field.schemaFieldDataType()), + context.calculateSortOrder(path, field))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java index eb416653232a1..ad6a3344e5b1e 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java @@ -1,43 +1,46 @@ package datahub.protobuf.visitors.tags; +import static datahub.protobuf.ProtobufUtils.getFieldOptions; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.urn.TagUrn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.tag.TagProperties; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufField; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; - -import static datahub.protobuf.ProtobufUtils.getFieldOptions; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - import java.util.stream.Stream; -public class TagVisitor implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { - private static final String TAG_PROPERTIES_ASPECT = "tagProperties"; +public class TagVisitor + implements ProtobufModelVisitor<MetadataChangeProposalWrapper<? extends RecordTemplate>> { + private static final String TAG_PROPERTIES_ASPECT = "tagProperties"; - @Override - public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()) - .map(TagVisitor::wrapTagProperty); - } + @Override + public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitGraph( + VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()) + .map(TagVisitor::wrapTagProperty); + } - @Override - public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitField(ProtobufField field, VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getFieldOptions(field.getFieldProto()), - context.getGraph().getRegistry()) - .map(TagVisitor::wrapTagProperty); - } + @Override + public Stream<MetadataChangeProposalWrapper<? extends RecordTemplate>> visitField( + ProtobufField field, VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()) + .map(TagVisitor::wrapTagProperty); + } - private static MetadataChangeProposalWrapper<TagProperties> wrapTagProperty(TagProperties tagProperty) { - return new MetadataChangeProposalWrapper<>( - "tag", - new TagUrn(tagProperty.getName()).toString(), - ChangeType.UPSERT, - tagProperty, - TAG_PROPERTIES_ASPECT); - } + private static MetadataChangeProposalWrapper<TagProperties> wrapTagProperty( + TagProperties tagProperty) { + return new MetadataChangeProposalWrapper<>( + "tag", + new TagUrn(tagProperty.getName()).toString(), + ChangeType.UPSERT, + tagProperty, + TAG_PROPERTIES_ASPECT); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java index bbb8e532f1033..e96bb63220b04 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java @@ -1,5 +1,10 @@ package datahub.protobuf; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.linkedin.common.FabricType; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -9,9 +14,8 @@ import com.linkedin.common.Status; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.url.Url; -import com.linkedin.data.template.StringArray; import com.linkedin.common.urn.DataPlatformUrn; -import com.linkedin.common.FabricType; +import com.linkedin.data.template.StringArray; import com.linkedin.schema.ArrayType; import com.linkedin.schema.BooleanType; import com.linkedin.schema.BytesType; @@ -26,430 +30,701 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - +import org.junit.jupiter.api.Test; public class ProtobufDatasetTest { - @Test - public void noSchemaTest() throws IOException { - ProtobufDataset dataset = ProtobufDataset.builder() - .setDataPlatformUrn(new DataPlatformUrn("kafka")) - .setProtocIn(getTestProtoc("protobuf", "messageA")) - .setAuditStamp(TEST_AUDIT_STAMP) - .setFabricType(FabricType.DEV) - .build(); - - assertNotNull(dataset); - assertEquals(2, dataset.getAllMetadataChangeProposals().count()); - assertEquals(8, dataset.getDatasetMCPs().size()); - assertEquals(0, dataset.getVisitorMCPs().size()); - } - - @Test - public void platformSchemaTest() throws IOException { - assertEquals(getTestProtoSource("protobuf", "messageA"), - extractDocumentSchema(getTestProtobufDataset("protobuf", "messageA"))); - } - - @Test - public void messageA() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(9, testMetadata.getFields().size()); - - assertEquals("MessageA", extractAspect(test.getDatasetMCPs().get(0), "name")); - assertEquals("protobuf.MessageA", extractAspect(test.getDatasetMCPs().get(0), "qualifiedName")); - - assertEquals("platform.topic", extractCustomProperty(test.getDatasetMCPs().get(0), "kafka_topic")); - - assertEquals(new InstitutionalMemory().setElements(new InstitutionalMemoryMetadataArray( - new InstitutionalMemoryMetadata() - .setDescription("Github Team") - .setCreateStamp(TEST_AUDIT_STAMP) - .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), - new InstitutionalMemoryMetadata() - .setDescription("Slack Channel") - .setCreateStamp(TEST_AUDIT_STAMP) - .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 1") - .setUrl(new Url("https://some/link")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 2") - .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 3") - .setUrl(new Url("https://github.com/apache/kafka")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA.map_field Reference 1") - .setUrl(new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps")))).data(), - test.getDatasetMCPs().get(1).getAspect().data()); - - assertEquals(new Status().setRemoved(false).data(), test.getDatasetMCPs().get(test.getDatasetMCPs().size() - 1).getAspect().data()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))) - .setNativeDataType("bytes") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Leading single line comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].position") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("uint32") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Leading multiline comment\nSecond line of leading multiline comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=int].position")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].total") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("uint32") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Detached comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=int].total")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("uint64") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Test repeated and trailing comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("string") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str")).findFirst().orElseThrow()); - - } - - @Test - public void messageB() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(24, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].id") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("google.protobuf.Int64Value") - .setDescription("wrapped int64") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=long].id")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BooleanType()))) - .setNativeDataType("google.protobuf.BoolValue") - .setDescription("Indicator") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot")).findFirst().orElseThrow()); - - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=string].value") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("message value") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=string].value")).findFirst().orElseThrow()); - } - - @Test - public void messageC() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC"); - - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(4, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) - .setNativeDataType("oneof") - .setDescription("one of field comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("one of string comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("int32") - .setDescription("one of int comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int")).findFirst().orElseThrow()); - } - - @Test - @SuppressWarnings("LineLength") - public void messageC2NestedOneOf() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC2"); - - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC1,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(6, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageC2") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("protobuf.MessageC3") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) - .setNativeDataType("oneof") - .setDescription("one of field comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("one of string comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("int32") - .setDescription("one of int comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int")).findFirst().orElseThrow()); - } - - @Test - public void customFieldVisitors() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); - - test.setFieldVisitor(new ProtobufModelVisitor<Pair<SchemaField, Double>>() { - @Override - public Stream<Pair<SchemaField, Double>> visitField(ProtobufField field, VisitContext context) { - if (field.fullName().equals("protobuf.MessageA.sequence_id")) { - return Stream.of(Pair.of( - new SchemaField() - .setDescription("my comment") - .setNativeDataType("my type") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))), - 0d)); - } else { - return Stream.of(); - } + @Test + public void noSchemaTest() throws IOException { + ProtobufDataset dataset = + ProtobufDataset.builder() + .setDataPlatformUrn(new DataPlatformUrn("kafka")) + .setProtocIn(getTestProtoc("protobuf", "messageA")) + .setAuditStamp(TEST_AUDIT_STAMP) + .setFabricType(FabricType.DEV) + .build(); + + assertNotNull(dataset); + assertEquals(2, dataset.getAllMetadataChangeProposals().count()); + assertEquals(8, dataset.getDatasetMCPs().size()); + assertEquals(0, dataset.getVisitorMCPs().size()); + } + + @Test + public void platformSchemaTest() throws IOException { + assertEquals( + getTestProtoSource("protobuf", "messageA"), + extractDocumentSchema(getTestProtobufDataset("protobuf", "messageA"))); + } + + @Test + public void messageA() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(9, testMetadata.getFields().size()); + + assertEquals("MessageA", extractAspect(test.getDatasetMCPs().get(0), "name")); + assertEquals("protobuf.MessageA", extractAspect(test.getDatasetMCPs().get(0), "qualifiedName")); + + assertEquals( + "platform.topic", extractCustomProperty(test.getDatasetMCPs().get(0), "kafka_topic")); + + assertEquals( + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + new InstitutionalMemoryMetadata() + .setDescription("Github Team") + .setCreateStamp(TEST_AUDIT_STAMP) + .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), + new InstitutionalMemoryMetadata() + .setDescription("Slack Channel") + .setCreateStamp(TEST_AUDIT_STAMP) + .setUrl( + new Url( + "https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 1") + .setUrl(new Url("https://some/link")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 2") + .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 3") + .setUrl(new Url("https://github.com/apache/kafka")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA.map_field Reference 1") + .setUrl( + new Url( + "https://developers.google.com/protocol-buffers/docs/proto3#maps")))) + .data(), + test.getDatasetMCPs().get(1).getAspect().data()); + + assertEquals( + new Status().setRemoved(false).data(), + test.getDatasetMCPs().get(test.getDatasetMCPs().size() - 1).getAspect().data()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id") + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))) + .setNativeDataType("bytes") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Leading single line comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].position") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("uint32") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Leading multiline comment\nSecond line of leading multiline comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=int].position")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].total") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("uint32") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Detached comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=int].total")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num") + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("uint64") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Test repeated and trailing comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str") + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("string") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str")) + .findFirst() + .orElseThrow()); + } + + @Test + public void messageB() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(24, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].id") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("google.protobuf.Int64Value") + .setDescription("wrapped int64") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=long].id")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BooleanType()))) + .setNativeDataType("google.protobuf.BoolValue") + .setDescription("Indicator") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=string].value") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("message value") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=string].value")) + .findFirst() + .orElseThrow()); + } + + @Test + public void messageC() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(4, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) + .setNativeDataType("oneof") + .setDescription("one of field comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("one of string comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("int32") + .setDescription("one of int comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int")) + .findFirst() + .orElseThrow()); + } + + @Test + @SuppressWarnings("LineLength") + public void messageC2NestedOneOf() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC2"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC1,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(6, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageC2") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("protobuf.MessageC3") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) + .setNativeDataType("oneof") + .setDescription("one of field comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("one of string comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("int32") + .setDescription("one of int comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int")) + .findFirst() + .orElseThrow()); + } + + @Test + public void customFieldVisitors() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); + + test.setFieldVisitor( + new ProtobufModelVisitor<Pair<SchemaField, Double>>() { + @Override + public Stream<Pair<SchemaField, Double>> visitField( + ProtobufField field, VisitContext context) { + if (field.fullName().equals("protobuf.MessageA.sequence_id")) { + return Stream.of( + Pair.of( + new SchemaField() + .setDescription("my comment") + .setNativeDataType("my type") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType()))), + 0d)); + } else { + return Stream.of(); } + } }); - assertEquals(1, test.getSchemaMetadata().getFields().size()); - assertEquals(new SchemaField() - .setDescription("my comment") - .setNativeDataType("my type") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))), - test.getSchemaMetadata().getFields().get(0)); - } - - @Test - public void duplicateNested() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageA") - .setDescription("nested message a") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageA") - .setDescription("nested message a second time") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested")).findFirst().orElseThrow()); - - Set<String> firstNested = testMetadata.getFields().stream().map(SchemaField::getFieldPath) - .filter(f -> f.contains(".nested")) - .collect(Collectors.toSet()); - Set<String> secondNested = testMetadata.getFields().stream().map(SchemaField::getFieldPath) - .filter(f -> f.contains(".secondary_nested")) - .collect(Collectors.toSet()); - - assertEquals(firstNested.size(), secondNested.size()); - assertEquals(firstNested.stream().map(s -> s.replace(".nested", ".secondary_nested")).collect(Collectors.toSet()), secondNested); - } - - @Test - public void googleTimestamp() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].time") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("google.protobuf.Timestamp") - .setDescription("google timestamp") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=long].time")).findFirst().orElseThrow()); - } + assertEquals(1, test.getSchemaMetadata().getFields().size()); + assertEquals( + new SchemaField() + .setDescription("my comment") + .setNativeDataType("my type") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType()))), + test.getSchemaMetadata().getFields().get(0)); + } + + @Test + public void duplicateNested() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageA") + .setDescription("nested message a") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageA") + .setDescription("nested message a second time") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested")) + .findFirst() + .orElseThrow()); + + Set<String> firstNested = + testMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .filter(f -> f.contains(".nested")) + .collect(Collectors.toSet()); + Set<String> secondNested = + testMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .filter(f -> f.contains(".secondary_nested")) + .collect(Collectors.toSet()); + + assertEquals(firstNested.size(), secondNested.size()); + assertEquals( + firstNested.stream() + .map(s -> s.replace(".nested", ".secondary_nested")) + .collect(Collectors.toSet()), + secondNested); + } + + @Test + public void googleTimestamp() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].time") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("google.protobuf.Timestamp") + .setDescription("google timestamp") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=long].time")) + .findFirst() + .orElseThrow()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java index 3a00edca8284a..e2599cb4c3f68 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java @@ -1,45 +1,47 @@ package datahub.protobuf; -import com.google.protobuf.DescriptorProtos; -import com.google.protobuf.ExtensionRegistry; -import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - -import java.io.IOException; - import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtoc; import static org.junit.jupiter.api.Assertions.*; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.ExtensionRegistry; +import datahub.protobuf.model.ProtobufGraph; +import java.io.IOException; +import org.junit.jupiter.api.Test; public class ProtobufUtilsTest { - @Test - public void registryTest() throws IOException, IllegalArgumentException { - byte[] protocBytes = getTestProtoc("extended_protobuf", "messageA").readAllBytes(); - DescriptorProtos.FileDescriptorSet fileSet = getTestProtobufFileSet("extended_protobuf", "messageA"); - ExtensionRegistry registry = ProtobufUtils.buildRegistry(fileSet); - DescriptorProtos.FileDescriptorSet fileSetWithRegistry = DescriptorProtos.FileDescriptorSet.parseFrom(protocBytes, registry); - - assertNotEquals(fileSet, fileSetWithRegistry); - - /* - * - * Without the ExtensionRegistry we get field numbers instead of the names. - */ - ProtobufGraph graph = new ProtobufGraph(fileSet, null); - assertEquals("[meta.msg.classification_enum]: HighlyConfidential\n" - + "[meta.msg.team]: \"corpGroup:TeamB\"\n" - + "[meta.msg.team]: \"corpUser:datahub\"\n" - + "[meta.msg.technical_owner]: \"corpGroup:TechnicalOwner\"\n" - + "[meta.msg.domain]: \"Engineering\"\n" - + "[meta.msg.type]: ENTITY\n" - + "[meta.msg.bool_feature]: true\n" - + "[meta.msg.alert_channel]: \"#alerts\"\n" - + "[meta.msg.tag_list]: \"a, b, c\"\n" - + "[meta.msg.repeat_string]: \"a\"\n" - + "[meta.msg.repeat_string]: \"b\"\n" - + "[meta.msg.repeat_enum]: ENTITY\n" - + "[meta.msg.repeat_enum]: EVENT\n", graph.root().messageProto().getOptions().toString()); - } + @Test + public void registryTest() throws IOException, IllegalArgumentException { + byte[] protocBytes = getTestProtoc("extended_protobuf", "messageA").readAllBytes(); + DescriptorProtos.FileDescriptorSet fileSet = + getTestProtobufFileSet("extended_protobuf", "messageA"); + ExtensionRegistry registry = ProtobufUtils.buildRegistry(fileSet); + DescriptorProtos.FileDescriptorSet fileSetWithRegistry = + DescriptorProtos.FileDescriptorSet.parseFrom(protocBytes, registry); + + assertNotEquals(fileSet, fileSetWithRegistry); + + /* + * + * Without the ExtensionRegistry we get field numbers instead of the names. + */ + ProtobufGraph graph = new ProtobufGraph(fileSet, null); + assertEquals( + "[meta.msg.classification_enum]: HighlyConfidential\n" + + "[meta.msg.team]: \"corpGroup:TeamB\"\n" + + "[meta.msg.team]: \"corpUser:datahub\"\n" + + "[meta.msg.technical_owner]: \"corpGroup:TechnicalOwner\"\n" + + "[meta.msg.domain]: \"Engineering\"\n" + + "[meta.msg.type]: ENTITY\n" + + "[meta.msg.bool_feature]: true\n" + + "[meta.msg.alert_channel]: \"#alerts\"\n" + + "[meta.msg.tag_list]: \"a, b, c\"\n" + + "[meta.msg.repeat_string]: \"a\"\n" + + "[meta.msg.repeat_string]: \"b\"\n" + + "[meta.msg.repeat_enum]: ENTITY\n" + + "[meta.msg.repeat_enum]: EVENT\n", + graph.root().messageProto().getOptions().toString()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java index 6859e7fee9a60..7ee69149cf9dd 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java @@ -11,72 +11,85 @@ import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.VisitContext; - import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Objects; public class TestFixtures { - private TestFixtures() { } + private TestFixtures() {} - public static final DataPlatformUrn TEST_DATA_PLATFORM = new DataPlatformUrn("kafka"); - public static final AuditStamp TEST_AUDIT_STAMP = new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(new CorpuserUrn("datahub")); + public static final DataPlatformUrn TEST_DATA_PLATFORM = new DataPlatformUrn("kafka"); + public static final AuditStamp TEST_AUDIT_STAMP = + new AuditStamp().setTime(System.currentTimeMillis()).setActor(new CorpuserUrn("datahub")); - public static InputStream getTestProtoc(String protoPackage, String filename) { - return Objects.requireNonNull(TestFixtures.class.getClassLoader() - .getResourceAsStream(String.format("%s/%s.protoc", protoPackage, filename))); - } + public static InputStream getTestProtoc(String protoPackage, String filename) { + return Objects.requireNonNull( + TestFixtures.class + .getClassLoader() + .getResourceAsStream(String.format("%s/%s.protoc", protoPackage, filename))); + } - public static String getTestProtoSource(String protoPackage, String filename) throws IOException { - return new String(Objects.requireNonNull(TestFixtures.class.getClassLoader() - .getResourceAsStream(String.format("%s/%s.proto", protoPackage, filename))).readAllBytes(), - StandardCharsets.UTF_8); - } + public static String getTestProtoSource(String protoPackage, String filename) throws IOException { + return new String( + Objects.requireNonNull( + TestFixtures.class + .getClassLoader() + .getResourceAsStream(String.format("%s/%s.proto", protoPackage, filename))) + .readAllBytes(), + StandardCharsets.UTF_8); + } - public static ProtobufDataset getTestProtobufDataset(String protoPackage, String filename) throws IOException { - return ProtobufDataset.builder() - .setDataPlatformUrn(TEST_DATA_PLATFORM) - .setSchema(getTestProtoSource(protoPackage, filename)) - .setProtocIn(getTestProtoc(protoPackage, filename)) - .setAuditStamp(TEST_AUDIT_STAMP) - .setFabricType(FabricType.TEST) - .setGithubOrganization("myOrg") - .setSlackTeamId("SLACK123") - .build(); - } + public static ProtobufDataset getTestProtobufDataset(String protoPackage, String filename) + throws IOException { + return ProtobufDataset.builder() + .setDataPlatformUrn(TEST_DATA_PLATFORM) + .setSchema(getTestProtoSource(protoPackage, filename)) + .setProtocIn(getTestProtoc(protoPackage, filename)) + .setAuditStamp(TEST_AUDIT_STAMP) + .setFabricType(FabricType.TEST) + .setGithubOrganization("myOrg") + .setSlackTeamId("SLACK123") + .build(); + } - public static DescriptorProtos.FileDescriptorSet getTestProtobufFileSet(String protoPackage, String filename) throws IOException { - return DescriptorProtos.FileDescriptorSet - .parseFrom(getTestProtoc(protoPackage, filename).readAllBytes()); - } + public static DescriptorProtos.FileDescriptorSet getTestProtobufFileSet( + String protoPackage, String filename) throws IOException { + return DescriptorProtos.FileDescriptorSet.parseFrom( + getTestProtoc(protoPackage, filename).readAllBytes()); + } - public static VisitContext.VisitContextBuilder getVisitContextBuilder(String message) { - return VisitContext.builder() - .datasetUrn(new DatasetUrn(TEST_DATA_PLATFORM, message, FabricType.TEST)) - .auditStamp(TEST_AUDIT_STAMP); - } + public static VisitContext.VisitContextBuilder getVisitContextBuilder(String message) { + return VisitContext.builder() + .datasetUrn(new DatasetUrn(TEST_DATA_PLATFORM, message, FabricType.TEST)) + .auditStamp(TEST_AUDIT_STAMP); + } - public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename) throws IOException { - return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename)); - } + public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename) + throws IOException { + return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename)); + } - public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename, String messageName) throws IOException { - return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename), messageName); - } + public static ProtobufGraph getTestProtobufGraph( + String protoPackage, String filename, String messageName) throws IOException { + return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename), messageName); + } - public static Object extractAspect(MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String aspect) { - return mcp.getAspect().data().get(aspect); - } + public static Object extractAspect( + MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String aspect) { + return mcp.getAspect().data().get(aspect); + } - public static Object extractCustomProperty(MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String key) { - return ((DataMap) extractAspect(mcp, "customProperties")).get(key); - } + public static Object extractCustomProperty( + MetadataChangeProposalWrapper<? extends RecordTemplate> mcp, String key) { + return ((DataMap) extractAspect(mcp, "customProperties")).get(key); + } - public static String extractDocumentSchema(ProtobufDataset protobufDataset) { - return String.valueOf(((DataMap) ((DataMap) protobufDataset.getSchemaMetadata().getPlatformSchema().data()) - .get("com.linkedin.schema.KafkaSchema")).get("documentSchema")); - } + public static String extractDocumentSchema(ProtobufDataset protobufDataset) { + return String.valueOf( + ((DataMap) + ((DataMap) protobufDataset.getSchemaMetadata().getPlatformSchema().data()) + .get("com.linkedin.schema.KafkaSchema")) + .get("documentSchema")); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java index 7c98077690d66..fed9f250b359f 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java @@ -1,80 +1,87 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.EnumDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.EnumType; import com.linkedin.schema.SchemaFieldDataType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufEnumTest { - @Test - public void enumTest() { - EnumDescriptorProto expectedEnum = EnumDescriptorProto.newBuilder() - .setName("enum1") - .build(); - DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .addEnumType(expectedEnum) - .build(); - - ProtobufEnum test = ProtobufEnum.enumBuilder() - .enumProto(expectedEnum) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); + @Test + public void enumTest() { + EnumDescriptorProto expectedEnum = EnumDescriptorProto.newBuilder().setName("enum1").build(); + DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .addEnumType(expectedEnum) + .build(); - assertEquals("enum1", test.name()); - assertEquals("protobuf.enum1", test.fullName()); - assertEquals("[type=enum]", test.fieldPathType()); - assertEquals("enum", test.nativeType()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), test.schemaFieldDataType()); - assertEquals("ProtobufEnum[protobuf.enum1]", test.toString()); - assertEquals("", test.comment()); - } + ProtobufEnum test = + ProtobufEnum.enumBuilder() + .enumProto(expectedEnum) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); - @Test - public void enumEqualityTest() { - EnumDescriptorProto enum1 = EnumDescriptorProto.newBuilder().setName("enum1").build(); - EnumDescriptorProto enum2 = EnumDescriptorProto.newBuilder().setName("enum2").build(); - EnumDescriptorProto enum1Dup = EnumDescriptorProto.newBuilder().setName("enum1").build(); + assertEquals("enum1", test.name()); + assertEquals("protobuf.enum1", test.fullName()); + assertEquals("[type=enum]", test.fieldPathType()); + assertEquals("enum", test.nativeType()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), + test.schemaFieldDataType()); + assertEquals("ProtobufEnum[protobuf.enum1]", test.toString()); + assertEquals("", test.comment()); + } - DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .addAllEnumType(List.of(enum1, enum2, enum1Dup)) - .build(); + @Test + public void enumEqualityTest() { + EnumDescriptorProto enum1 = EnumDescriptorProto.newBuilder().setName("enum1").build(); + EnumDescriptorProto enum2 = EnumDescriptorProto.newBuilder().setName("enum2").build(); + EnumDescriptorProto enum1Dup = EnumDescriptorProto.newBuilder().setName("enum1").build(); - ProtobufEnum test1 = ProtobufEnum.enumBuilder().enumProto(enum1) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); - ProtobufEnum test2 = ProtobufEnum.enumBuilder().enumProto(enum2) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); - ProtobufEnum test1Dup = ProtobufEnum.enumBuilder().enumProto(enum1Dup) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); + DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .addAllEnumType(List.of(enum1, enum2, enum1Dup)) + .build(); - assertEquals(test1, test1Dup); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); - } + ProtobufEnum test1 = + ProtobufEnum.enumBuilder() + .enumProto(enum1) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + ProtobufEnum test2 = + ProtobufEnum.enumBuilder() + .enumProto(enum2) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + ProtobufEnum test1Dup = + ProtobufEnum.enumBuilder() + .enumProto(enum1Dup) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + assertEquals(test1, test1Dup); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 543b815f7f72b..6d4dc8bc4d585 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -1,10 +1,12 @@ package datahub.protobuf.model; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.data.template.StringArray; - import com.linkedin.schema.ArrayType; import com.linkedin.schema.BooleanType; import com.linkedin.schema.BytesType; @@ -12,257 +14,313 @@ import com.linkedin.schema.FixedType; import com.linkedin.schema.NumberType; import com.linkedin.schema.RecordType; -import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.Arrays; import java.util.Set; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufFieldTest { - private static final DescriptorProto EXPECTED_MESSAGE_PROTO = DescriptorProto.newBuilder() - .setName("message1") + private static final DescriptorProto EXPECTED_MESSAGE_PROTO = + DescriptorProto.newBuilder().setName("message1").build(); + private static final FileDescriptorProto EXPECTED_FILE_PROTO = + FileDescriptorProto.newBuilder() + .addMessageType(EXPECTED_MESSAGE_PROTO) + .setPackage("protobuf") + .build(); + private static final ProtobufMessage EXPECTED_MESSAGE = + ProtobufMessage.builder() + .messageProto(EXPECTED_MESSAGE_PROTO) + .fileProto(EXPECTED_FILE_PROTO) + .build(); + + @Test + public void fieldTest() { + FieldDescriptorProto expectedField = + FieldDescriptorProto.newBuilder() + .setName("field1") + .setNumber(1) + .setType(FieldDescriptorProto.Type.TYPE_BYTES) .build(); - private static final FileDescriptorProto EXPECTED_FILE_PROTO = FileDescriptorProto.newBuilder() - .addMessageType(EXPECTED_MESSAGE_PROTO) + DescriptorProto expectedMessage1 = + DescriptorProto.newBuilder().setName("message1").addField(expectedField).build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage1) .setPackage("protobuf") .build(); - private static final ProtobufMessage EXPECTED_MESSAGE = ProtobufMessage.builder() - .messageProto(EXPECTED_MESSAGE_PROTO) - .fileProto(EXPECTED_FILE_PROTO) - .build(); - - - @Test - public void fieldTest() { - FieldDescriptorProto expectedField = FieldDescriptorProto.newBuilder() - .setName("field1") - .setNumber(1) - .setType(FieldDescriptorProto.Type.TYPE_BYTES) - .build(); - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .addField(expectedField) - .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage1) - .setPackage("protobuf") - .build(); - ProtobufMessage expectedMessage = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile) - .build(); + ProtobufMessage expectedMessage = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile).build(); - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(expectedMessage) - .build(); + ProtobufField test = + ProtobufField.builder().fieldProto(expectedField).protobufMessage(expectedMessage).build(); - assertEquals("field1", test.name()); - assertEquals("protobuf.message1.field1", test.fullName()); - assertEquals("[type=bytes]", test.fieldPathType()); - assertEquals("protobuf.message1", test.parentMessageName()); - assertEquals(expectedMessage1, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertNull(test.oneOfProto()); - assertEquals("bytes", test.nativeType()); - assertFalse(test.isMessage()); - assertEquals(1, test.sortWeight()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), test.schemaFieldDataType()); - assertEquals("ProtobufField[protobuf.message1.field1]", test.toString()); - } + assertEquals("field1", test.name()); + assertEquals("protobuf.message1.field1", test.fullName()); + assertEquals("[type=bytes]", test.fieldPathType()); + assertEquals("protobuf.message1", test.parentMessageName()); + assertEquals(expectedMessage1, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertNull(test.oneOfProto()); + assertEquals("bytes", test.nativeType()); + assertFalse(test.isMessage()); + assertEquals(1, test.sortWeight()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), + test.schemaFieldDataType()); + assertEquals("ProtobufField[protobuf.message1.field1]", test.toString()); + } - @Test - public void fieldPathTypeTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void fieldPathTypeTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { + if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { assertEquals("[type=protobuf_message1]", test.fieldPathType()); - } else if (type.name().endsWith("64")) { + } else if (type.name().endsWith("64")) { assertEquals("[type=long]", test.fieldPathType()); - } else if (type.name().endsWith("32")) { + } else if (type.name().endsWith("32")) { assertEquals("[type=int]", test.fieldPathType()); - } else if (type.name().endsWith("BOOL")) { + } else if (type.name().endsWith("BOOL")) { assertEquals("[type=boolean]", test.fieldPathType()); - } else { - assertEquals(String.format("[type=%s]", type.name().split("_")[1].toLowerCase()), test.fieldPathType()); - } - }); - } + } else { + assertEquals( + String.format("[type=%s]", type.name().split("_")[1].toLowerCase()), + test.fieldPathType()); + } + }); + } - @Test - public void fieldPathTypeArrayTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; + @Test + public void fieldPathTypeArrayTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { + if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { assertEquals("[type=array].[type=protobuf_message1]", test.fieldPathType()); - } else if (type.name().endsWith("64")) { + } else if (type.name().endsWith("64")) { assertEquals("[type=array].[type=long]", test.fieldPathType()); - } else if (type.name().endsWith("32")) { + } else if (type.name().endsWith("32")) { assertEquals("[type=array].[type=int]", test.fieldPathType()); - } else if (type.name().endsWith("BOOL")) { + } else if (type.name().endsWith("BOOL")) { assertEquals("[type=array].[type=boolean]", test.fieldPathType()); - } else { - assertEquals(String.format("[type=array].[type=%s]", type.name().split("_")[1].toLowerCase()), test.fieldPathType()); - } - }); - } + } else { + assertEquals( + String.format( + "[type=array].[type=%s]", type.name().split("_")[1].toLowerCase()), + test.fieldPathType()); + } + }); + } - @Test - public void schemaFieldTypeTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void schemaFieldTypeTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (Set.of("TYPE_MESSAGE", "TYPE_GROUP").contains(type.name())) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), test.schemaFieldDataType()); - } else if (type.name().contains("FIXED")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new FixedType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("64") || type.name().endsWith("32") || Set.of("TYPE_DOUBLE", "TYPE_FLOAT").contains(type.name())) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("BOOL")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BooleanType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("STRING")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("ENUM")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("BYTES")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), test.schemaFieldDataType()); - } else { + if (Set.of("TYPE_MESSAGE", "TYPE_GROUP").contains(type.name())) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType())), + test.schemaFieldDataType()); + } else if (type.name().contains("FIXED")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new FixedType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("64") + || type.name().endsWith("32") + || Set.of("TYPE_DOUBLE", "TYPE_FLOAT").contains(type.name())) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("BOOL")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BooleanType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("STRING")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("ENUM")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new EnumType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("BYTES")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType())), + test.schemaFieldDataType()); + } else { fail(String.format("Add test case for %s", type)); - } - }); - } + } + }); + } - @Test - public void schemaFieldTypeArrayTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void schemaFieldTypeArrayTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType() - .setNestedType(new StringArray()))), test.schemaFieldDataType()); - }); - } + assertEquals( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray()))), + test.schemaFieldDataType()); + }); + } - @Test - public void nestedTypeFieldTest() throws IOException { - ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageC"); - SchemaMetadata testMetadata = test.getSchemaMetadata(); + @Test + public void nestedTypeFieldTest() throws IOException { + ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageC"); + SchemaMetadata testMetadata = test.getSchemaMetadata(); - SchemaField nicknameField = testMetadata.getFields() - .stream() - .filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].nickname")) - .findFirst() - .orElseThrow(); + SchemaField nicknameField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].nickname")) + .findFirst() + .orElseThrow(); - assertEquals("nickname info", nicknameField.getDescription()); + assertEquals("nickname info", nicknameField.getDescription()); - SchemaField profileUrlField = testMetadata.getFields() - .stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].profile_url")) - .findFirst() - .orElseThrow(); + SchemaField profileUrlField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].profile_url")) + .findFirst() + .orElseThrow(); - assertEquals("profile url info", profileUrlField.getDescription()); + assertEquals("profile url info", profileUrlField.getDescription()); - SchemaField addressField = testMetadata.getFields() - .stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg]." + SchemaField addressField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg]." + "[type=extended_protobuf_UserMsg_AddressMsg].address.[type=google_protobuf_StringValue].zipcode")) - .findFirst() - .orElseThrow(); + .findFirst() + .orElseThrow(); - assertEquals("Zip code, alphanumeric", addressField.getDescription()); - } + assertEquals("Zip code, alphanumeric", addressField.getDescription()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java index 80ffafff3f451..488222b87766d 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java @@ -1,84 +1,99 @@ package datahub.protobuf.model; -import com.google.protobuf.DescriptorProtos.FileDescriptorSet; -import org.junit.jupiter.api.Test; +import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static org.junit.jupiter.api.Assertions.*; +import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; public class ProtobufGraphTest { - @Test - public void autodetectRootMessageTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - assertEquals("MessageB", test.autodetectRootMessage( - fileset.getFileList().stream().filter(f -> f.getName().equals("protobuf/messageB.proto")).findFirst().get()).get().messageProto().getName()); - - assertEquals("MessageA", test.autodetectRootMessage( - fileset.getFileList().stream().filter(f -> f.getName().equals("protobuf/messageA.proto")).findFirst().get()).get().messageProto().getName()); - } - - @Test - public void autodetectRootMessageFailureTest() throws IOException { - FileDescriptorSet empty = getTestProtobufFileSet("protobuf", "messageEmpty"); - assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(empty)); - } - - @Test - public void findMessageTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - assertEquals("MessageA", - test.findMessage("protobuf.MessageA").messageProto().getName()); - assertEquals("MessageB", - test.findMessage("protobuf.MessageB").messageProto().getName()); - - assertThrows(IllegalArgumentException.class, () -> test.findMessage("not found")); - assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(fileset, "not found")); - assertEquals(test, new ProtobufGraph(fileset, "protobuf.MessageB")); - } - - @Test - public void commentTest() throws IOException { - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageC"); - assertEquals("Test for one of", test.getComment()); - } - - @Test - public void equalityHashCodeTest() throws IOException { - ProtobufGraph testA = getTestProtobufGraph("protobuf", "messageA"); - ProtobufGraph testB = getTestProtobufGraph("protobuf", "messageB"); - FileDescriptorSet filesetB = getTestProtobufFileSet("protobuf", "messageB"); - - assertEquals(testB, new ProtobufGraph(filesetB)); - assertNotEquals(testA, new ProtobufGraph(filesetB)); - assertEquals(testA, testA); - assertNotEquals(testA, testB); - - HashSet<ProtobufGraph> graphs = new HashSet<>(); - graphs.add(testA); - graphs.add(testB); - graphs.add(new ProtobufGraph(filesetB)); - assertEquals(2, graphs.size()); - } - - @Test - public void duplicateNestedTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - List<ProtobufElement> nestedMessages = test.vertexSet().stream().filter(f -> f.name().endsWith("nested")) - .collect(Collectors.toList()); - - assertEquals(2, nestedMessages.size(), "Expected 2 nested fields"); - } + @Test + public void autodetectRootMessageTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + assertEquals( + "MessageB", + test.autodetectRootMessage( + fileset.getFileList().stream() + .filter(f -> f.getName().equals("protobuf/messageB.proto")) + .findFirst() + .get()) + .get() + .messageProto() + .getName()); + + assertEquals( + "MessageA", + test.autodetectRootMessage( + fileset.getFileList().stream() + .filter(f -> f.getName().equals("protobuf/messageA.proto")) + .findFirst() + .get()) + .get() + .messageProto() + .getName()); + } + + @Test + public void autodetectRootMessageFailureTest() throws IOException { + FileDescriptorSet empty = getTestProtobufFileSet("protobuf", "messageEmpty"); + assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(empty)); + } + + @Test + public void findMessageTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + assertEquals("MessageA", test.findMessage("protobuf.MessageA").messageProto().getName()); + assertEquals("MessageB", test.findMessage("protobuf.MessageB").messageProto().getName()); + + assertThrows(IllegalArgumentException.class, () -> test.findMessage("not found")); + assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(fileset, "not found")); + assertEquals(test, new ProtobufGraph(fileset, "protobuf.MessageB")); + } + + @Test + public void commentTest() throws IOException { + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageC"); + assertEquals("Test for one of", test.getComment()); + } + + @Test + public void equalityHashCodeTest() throws IOException { + ProtobufGraph testA = getTestProtobufGraph("protobuf", "messageA"); + ProtobufGraph testB = getTestProtobufGraph("protobuf", "messageB"); + FileDescriptorSet filesetB = getTestProtobufFileSet("protobuf", "messageB"); + + assertEquals(testB, new ProtobufGraph(filesetB)); + assertNotEquals(testA, new ProtobufGraph(filesetB)); + assertEquals(testA, testA); + assertNotEquals(testA, testB); + + HashSet<ProtobufGraph> graphs = new HashSet<>(); + graphs.add(testA); + graphs.add(testB); + graphs.add(new ProtobufGraph(filesetB)); + assertEquals(2, graphs.size()); + } + + @Test + public void duplicateNestedTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + List<ProtobufElement> nestedMessages = + test.vertexSet().stream() + .filter(f -> f.name().endsWith("nested")) + .collect(Collectors.toList()); + + assertEquals(2, nestedMessages.size(), "Expected 2 nested fields"); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java index e961b6ffd2d61..1d6b3907d76d9 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java @@ -1,180 +1,168 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.MapType; import com.linkedin.schema.RecordType; import com.linkedin.schema.SchemaFieldDataType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufMessageTest { - @Test - public void messageTest() { - DescriptorProto expectedMessage = DescriptorProto.newBuilder() - .setName("message1") - .build(); - DescriptorProto expectedParentMessage1 = DescriptorProto.newBuilder() - .setName("messageParent1") - .addNestedType(expectedMessage) - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .build(); - - ProtobufMessage testParent = ProtobufMessage.builder() - .messageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage test = ProtobufMessage.builder() - .messageProto(expectedMessage) - .parentMessageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - - assertEquals("messageParent1", testParent.name()); - assertEquals("protobuf.messageParent1", testParent.fullName()); - assertEquals("protobuf.messageParent1", testParent.nativeType()); - assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); - assertEquals(expectedFile, testParent.fileProto()); - assertEquals(expectedParentMessage1, testParent.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), testParent.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); - - assertEquals("message1", test.name()); - assertEquals("protobuf.messageParent1.message1", test.fullName()); - assertEquals("protobuf.messageParent1.message1", test.nativeType()); - assertEquals("[type=protobuf_messageParent1_message1]", test.fieldPathType()); - assertEquals(expectedFile, test.fileProto()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), test.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1.message1]", test.toString()); - } - - @Test - public void mapTest() { - DescriptorProto expectedMap = DescriptorProto.newBuilder() - .setName("MapFieldEntry") - .build(); - DescriptorProto expectedParentMessage1 = DescriptorProto.newBuilder() - .setName("messageParent1") - .addNestedType(expectedMap) - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMap) - .setPackage("protobuf") - .build(); - - ProtobufMessage testParent = ProtobufMessage.builder() - .messageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage testMap = ProtobufMessage.builder() - .messageProto(expectedMap) - .parentMessageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - - assertEquals("messageParent1", testParent.name()); - assertEquals("protobuf.messageParent1", testParent.fullName()); - assertEquals("protobuf.messageParent1", testParent.nativeType()); - assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); - assertEquals(expectedFile, testParent.fileProto()); - assertEquals(expectedParentMessage1, testParent.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), testParent.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); - - assertEquals("MapFieldEntry", testMap.name()); - assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.fullName()); - assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.nativeType()); - assertEquals("[type=protobuf_messageParent1_MapFieldEntry]", testMap.fieldPathType()); - assertEquals(expectedFile, testMap.fileProto()); - assertEquals(expectedMap, testMap.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())), testMap.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1.MapFieldEntry]", testMap.toString()); - } - - @Test - public void messageEqualityTest() { - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .build(); - DescriptorProto expectedMessage2 = DescriptorProto.newBuilder() - .setName("message2") - .build(); - DescriptorProto expectedMessage1Dup = DescriptorProto.newBuilder() - .setName("message1") - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addAllMessageType(List.of(expectedMessage1, expectedMessage2, expectedMessage1Dup)) - .setPackage("protobuf") - .build(); - - - ProtobufMessage test1 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage test2 = ProtobufMessage.builder() - .messageProto(expectedMessage2) - .fileProto(expectedFile) - .build(); - ProtobufMessage test1Dup = ProtobufMessage.builder() - .messageProto(expectedMessage1Dup) - .fileProto(expectedFile) - .build(); - - assertEquals(test1, test1Dup); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); - } - - @Test - public void majorVersionTest() { - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .build(); - - FileDescriptorProto expectedFile1 = FileDescriptorProto.newBuilder() - .setName("zendesk/v1/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test1 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile1) - .build(); - assertEquals(1, test1.majorVersion()); - - FileDescriptorProto expectedFile2 = FileDescriptorProto.newBuilder() - .setName("zendesk/v2/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test2 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile2) - .build(); - assertEquals(2, test2.majorVersion()); - - FileDescriptorProto expectedFile3 = FileDescriptorProto.newBuilder() - .setName("zendesk/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test3 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile3) - .build(); - assertEquals(1, test3.majorVersion()); - } + @Test + public void messageTest() { + DescriptorProto expectedMessage = DescriptorProto.newBuilder().setName("message1").build(); + DescriptorProto expectedParentMessage1 = + DescriptorProto.newBuilder() + .setName("messageParent1") + .addNestedType(expectedMessage) + .build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .build(); + + ProtobufMessage testParent = + ProtobufMessage.builder() + .messageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + ProtobufMessage test = + ProtobufMessage.builder() + .messageProto(expectedMessage) + .parentMessageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + + assertEquals("messageParent1", testParent.name()); + assertEquals("protobuf.messageParent1", testParent.fullName()); + assertEquals("protobuf.messageParent1", testParent.nativeType()); + assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); + assertEquals(expectedFile, testParent.fileProto()); + assertEquals(expectedParentMessage1, testParent.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + testParent.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); + + assertEquals("message1", test.name()); + assertEquals("protobuf.messageParent1.message1", test.fullName()); + assertEquals("protobuf.messageParent1.message1", test.nativeType()); + assertEquals("[type=protobuf_messageParent1_message1]", test.fieldPathType()); + assertEquals(expectedFile, test.fileProto()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + test.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1.message1]", test.toString()); + } + + @Test + public void mapTest() { + DescriptorProto expectedMap = DescriptorProto.newBuilder().setName("MapFieldEntry").build(); + DescriptorProto expectedParentMessage1 = + DescriptorProto.newBuilder().setName("messageParent1").addNestedType(expectedMap).build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder().addMessageType(expectedMap).setPackage("protobuf").build(); + + ProtobufMessage testParent = + ProtobufMessage.builder() + .messageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + ProtobufMessage testMap = + ProtobufMessage.builder() + .messageProto(expectedMap) + .parentMessageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + + assertEquals("messageParent1", testParent.name()); + assertEquals("protobuf.messageParent1", testParent.fullName()); + assertEquals("protobuf.messageParent1", testParent.nativeType()); + assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); + assertEquals(expectedFile, testParent.fileProto()); + assertEquals(expectedParentMessage1, testParent.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + testParent.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); + + assertEquals("MapFieldEntry", testMap.name()); + assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.fullName()); + assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.nativeType()); + assertEquals("[type=protobuf_messageParent1_MapFieldEntry]", testMap.fieldPathType()); + assertEquals(expectedFile, testMap.fileProto()); + assertEquals(expectedMap, testMap.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())), + testMap.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1.MapFieldEntry]", testMap.toString()); + } + + @Test + public void messageEqualityTest() { + DescriptorProto expectedMessage1 = DescriptorProto.newBuilder().setName("message1").build(); + DescriptorProto expectedMessage2 = DescriptorProto.newBuilder().setName("message2").build(); + DescriptorProto expectedMessage1Dup = DescriptorProto.newBuilder().setName("message1").build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addAllMessageType(List.of(expectedMessage1, expectedMessage2, expectedMessage1Dup)) + .setPackage("protobuf") + .build(); + + ProtobufMessage test1 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile).build(); + ProtobufMessage test2 = + ProtobufMessage.builder().messageProto(expectedMessage2).fileProto(expectedFile).build(); + ProtobufMessage test1Dup = + ProtobufMessage.builder().messageProto(expectedMessage1Dup).fileProto(expectedFile).build(); + + assertEquals(test1, test1Dup); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); + } + + @Test + public void majorVersionTest() { + DescriptorProto expectedMessage1 = DescriptorProto.newBuilder().setName("message1").build(); + + FileDescriptorProto expectedFile1 = + FileDescriptorProto.newBuilder() + .setName("zendesk/v1/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test1 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile1).build(); + assertEquals(1, test1.majorVersion()); + + FileDescriptorProto expectedFile2 = + FileDescriptorProto.newBuilder() + .setName("zendesk/v2/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test2 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile2).build(); + assertEquals(2, test2.majorVersion()); + + FileDescriptorProto expectedFile3 = + FileDescriptorProto.newBuilder() + .setName("zendesk/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test3 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile3).build(); + assertEquals(1, test3.majorVersion()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java index 438e0a79206bd..c8bd8a322aad5 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java @@ -1,121 +1,146 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.google.protobuf.DescriptorProtos.OneofDescriptorProto; import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.UnionType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufOneOfFieldTest { - @Test - public void oneOfTest() { - OneofDescriptorProto expectedOneOf = OneofDescriptorProto.newBuilder() - .setName("oneof1") - .build(); - FieldDescriptorProto expectedField = FieldDescriptorProto.newBuilder() - .setName("field1") - .setOneofIndex(0) - .build(); - DescriptorProto expectedMessage = DescriptorProto.newBuilder() - .setName("message1") - .addOneofDecl(expectedOneOf) - .addField(expectedField) - .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .build(); + @Test + public void oneOfTest() { + OneofDescriptorProto expectedOneOf = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + FieldDescriptorProto expectedField = + FieldDescriptorProto.newBuilder().setName("field1").setOneofIndex(0).build(); + DescriptorProto expectedMessage = + DescriptorProto.newBuilder() + .setName("message1") + .addOneofDecl(expectedOneOf) + .addField(expectedField) + .build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .build(); - ProtobufOneOfField test = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage).build()) - .build(); + ProtobufOneOfField test = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage) + .build()) + .build(); - assertEquals("oneof1", test.name()); - assertEquals("protobuf.message1.oneof1", test.fullName()); - assertEquals("[type=union]", test.fieldPathType()); - assertEquals("oneof", test.nativeType()); - assertEquals(expectedOneOf, test.oneOfProto()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertFalse(test.isMessage()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())), test.schemaFieldDataType()); - assertEquals("ProtobufOneOf[protobuf.message1.oneof1]", test.toString()); - } + assertEquals("oneof1", test.name()); + assertEquals("protobuf.message1.oneof1", test.fullName()); + assertEquals("[type=union]", test.fieldPathType()); + assertEquals("oneof", test.nativeType()); + assertEquals(expectedOneOf, test.oneOfProto()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertFalse(test.isMessage()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())), + test.schemaFieldDataType()); + assertEquals("ProtobufOneOf[protobuf.message1.oneof1]", test.toString()); + } - @Test - public void oneOfEqualityTest() { - OneofDescriptorProto oneof1Message1 = OneofDescriptorProto.newBuilder().setName("oneof1").build(); - OneofDescriptorProto oneof2Message1 = OneofDescriptorProto.newBuilder().setName("oneof2").build(); - OneofDescriptorProto oneof1Message2 = OneofDescriptorProto.newBuilder().setName("oneof1").build(); - OneofDescriptorProto oneof1Message1Dup = OneofDescriptorProto.newBuilder().setName("oneof1").build(); + @Test + public void oneOfEqualityTest() { + OneofDescriptorProto oneof1Message1 = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + OneofDescriptorProto oneof2Message1 = + OneofDescriptorProto.newBuilder().setName("oneof2").build(); + OneofDescriptorProto oneof1Message2 = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + OneofDescriptorProto oneof1Message1Dup = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); - FieldDescriptorProto expectedField1 = FieldDescriptorProto.newBuilder() - .setName("field1") - .setOneofIndex(0) - .build(); - FieldDescriptorProto expectedField2 = FieldDescriptorProto.newBuilder() - .setName("field2") - .setOneofIndex(1) - .build(); - FieldDescriptorProto expectedField1Dup = FieldDescriptorProto.newBuilder() - .setName("field3") - .setOneofIndex(3) - .build(); - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .addAllOneofDecl(List.of(oneof1Message1, oneof2Message1, oneof1Message1Dup)) - .addField(expectedField1) - .addField(expectedField2) - .addField(expectedField1Dup) - .build(); + FieldDescriptorProto expectedField1 = + FieldDescriptorProto.newBuilder().setName("field1").setOneofIndex(0).build(); + FieldDescriptorProto expectedField2 = + FieldDescriptorProto.newBuilder().setName("field2").setOneofIndex(1).build(); + FieldDescriptorProto expectedField1Dup = + FieldDescriptorProto.newBuilder().setName("field3").setOneofIndex(3).build(); + DescriptorProto expectedMessage1 = + DescriptorProto.newBuilder() + .setName("message1") + .addAllOneofDecl(List.of(oneof1Message1, oneof2Message1, oneof1Message1Dup)) + .addField(expectedField1) + .addField(expectedField2) + .addField(expectedField1Dup) + .build(); - FieldDescriptorProto expectedField3 = FieldDescriptorProto.newBuilder() - .setName("field3") - .setOneofIndex(0) - .build(); - DescriptorProto expectedMessage2 = DescriptorProto.newBuilder() - .setName("message2") - .addAllOneofDecl(List.of(oneof1Message2)) - .addField(expectedField3) - .build(); + FieldDescriptorProto expectedField3 = + FieldDescriptorProto.newBuilder().setName("field3").setOneofIndex(0).build(); + DescriptorProto expectedMessage2 = + DescriptorProto.newBuilder() + .setName("message2") + .addAllOneofDecl(List.of(oneof1Message2)) + .addField(expectedField3) + .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addAllMessageType(List.of(expectedMessage1, expectedMessage2)) - .setPackage("protobuf") - .build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addAllMessageType(List.of(expectedMessage1, expectedMessage2)) + .setPackage("protobuf") + .build(); - ProtobufOneOfField test1 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField1) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test1Dup = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField1) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test2 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField2) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test3 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField3) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage2).build()) - .build(); + ProtobufOneOfField test1 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField1) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test1Dup = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField1) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test2 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField2) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test3 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField3) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage2) + .build()) + .build(); - assertEquals(test1, test1Dup); - assertNotEquals(test1, test3); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2, test3), Stream.of(test1, test2, test3, test1Dup).collect(Collectors.toSet())); - } + assertEquals(test1, test1Dup); + assertNotEquals(test1, test3); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2, test3), + Stream.of(test1, test2, test3, test1Dup).collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java index ceebefb3a207e..2fc5f3834a749 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java @@ -1,38 +1,43 @@ package datahub.protobuf.visitors; +import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import datahub.protobuf.model.FieldTypeEdge; import datahub.protobuf.model.ProtobufElement; import datahub.protobuf.model.ProtobufGraph; -import org.jgrapht.GraphPath; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.assertNotEquals; +import org.jgrapht.GraphPath; +import org.junit.jupiter.api.Test; public class VisitContextTest { - @Test - public void duplicateNestedTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageB"); - VisitContext test = VisitContext.builder().graph(graph).build(); - - List<ProtobufElement> nestedMessages = graph.vertexSet().stream().filter(f -> f.name().endsWith("nested")) - .collect(Collectors.toList()); - - List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsA = graph.getAllPaths(graph.root(), nestedMessages.get(0)); - List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsB = graph.getAllPaths(graph.root(), nestedMessages.get(1)); - assertNotEquals(nestedPathsA, nestedPathsB); - - Set<String> fieldPathsA = nestedPathsA.stream().map(test::getFieldPath).collect(Collectors.toSet()); - Set<String> fieldPathsB = nestedPathsB.stream().map(test::getFieldPath).collect(Collectors.toSet()); - assertNotEquals(fieldPathsA, fieldPathsB); - } + @Test + public void duplicateNestedTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageB"); + VisitContext test = VisitContext.builder().graph(graph).build(); + + List<ProtobufElement> nestedMessages = + graph.vertexSet().stream() + .filter(f -> f.name().endsWith("nested")) + .collect(Collectors.toList()); + + List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsA = + graph.getAllPaths(graph.root(), nestedMessages.get(0)); + List<GraphPath<ProtobufElement, FieldTypeEdge>> nestedPathsB = + graph.getAllPaths(graph.root(), nestedMessages.get(1)); + assertNotEquals(nestedPathsA, nestedPathsB); + + Set<String> fieldPathsA = + nestedPathsA.stream().map(test::getFieldPath).collect(Collectors.toSet()); + Set<String> fieldPathsB = + nestedPathsB.stream().map(test::getFieldPath).collect(Collectors.toSet()); + assertNotEquals(fieldPathsA, fieldPathsB); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java index fb51f42a6c759..de9a0f5ec4abe 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java @@ -1,56 +1,59 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.urn.DatasetUrn; import com.linkedin.data.template.RecordTemplate; -import org.junit.jupiter.api.Test; - +import datahub.event.MetadataChangeProposalWrapper; +import datahub.protobuf.ProtobufDataset; +import datahub.protobuf.visitors.ProtobufModelVisitor; +import datahub.protobuf.visitors.VisitContext; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import datahub.protobuf.ProtobufDataset; -import datahub.protobuf.visitors.ProtobufModelVisitor; -import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DatasetVisitorTest { - @Test - public void protocBase64Test() throws URISyntaxException, IOException { - String expected = "23454345452345233455"; - DatasetVisitor test = DatasetVisitor.builder().protocBase64(expected).build(); - - List<MetadataChangeProposalWrapper<? extends RecordTemplate>> changes = - test.visitGraph( - VisitContext.builder() - .auditStamp(TEST_AUDIT_STAMP) - .datasetUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)")) - .graph(getTestProtobufGraph("protobuf", "messageA")).build() - ).collect(Collectors.toList()); - - assertEquals(expected, extractCustomProperty(changes.get(0), "protoc")); - } - - @Test - public void customDescriptionVisitors() throws IOException { - ProtobufDataset testDataset = getTestProtobufDataset("protobuf", "messageA"); - - DatasetVisitor test = DatasetVisitor.builder() - .descriptionVisitor(new ProtobufModelVisitor<String>() { - @Override - public Stream<String> visitGraph(VisitContext context) { - return Stream.of("Test Description"); - } + @Test + public void protocBase64Test() throws URISyntaxException, IOException { + String expected = "23454345452345233455"; + DatasetVisitor test = DatasetVisitor.builder().protocBase64(expected).build(); + + List<MetadataChangeProposalWrapper<? extends RecordTemplate>> changes = + test.visitGraph( + VisitContext.builder() + .auditStamp(TEST_AUDIT_STAMP) + .datasetUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)")) + .graph(getTestProtobufGraph("protobuf", "messageA")) + .build()) + .collect(Collectors.toList()); + + assertEquals(expected, extractCustomProperty(changes.get(0), "protoc")); + } + + @Test + public void customDescriptionVisitors() throws IOException { + ProtobufDataset testDataset = getTestProtobufDataset("protobuf", "messageA"); + + DatasetVisitor test = + DatasetVisitor.builder() + .descriptionVisitor( + new ProtobufModelVisitor<String>() { + @Override + public Stream<String> visitGraph(VisitContext context) { + return Stream.of("Test Description"); + } }) - .build(); - testDataset.setDatasetVisitor(test); + .build(); + testDataset.setDatasetVisitor(test); - assertEquals("Test Description", extractAspect(testDataset.getDatasetMCPs().get(0), "description")); - } + assertEquals( + "Test Description", extractAspect(testDataset.getDatasetMCPs().get(0), "description")); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java index 4edc65b29d663..679048fb48a53 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java @@ -1,26 +1,27 @@ package datahub.protobuf.visitors.dataset; -import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DescriptionVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageC2", "protobuf.MessageC2"); + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageC2", "protobuf.MessageC2"); - DescriptionVisitor test = new DescriptionVisitor(); + DescriptionVisitor test = new DescriptionVisitor(); - assertEquals(Set.of("This contains nested type\n\nDescription for MessageC2"), - graph.accept(getVisitContextBuilder("protobuf.MessageC2"), List.of(test)).collect(Collectors.toSet())); - } + assertEquals( + Set.of("This contains nested type\n\nDescription for MessageC2"), + graph + .accept(getVisitContextBuilder("protobuf.MessageC2"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java index b3fa2c8fd081b..c24fc30766f0e 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java @@ -1,28 +1,29 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DomainVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); - DomainVisitor test = new DomainVisitor(); + DomainVisitor test = new DomainVisitor(); - assertEquals(Set.of(Urn.createFromTuple("domain", "engineering")), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)).collect(Collectors.toSet())); - } + assertEquals( + Set.of(Urn.createFromTuple("domain", "engineering")), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java index 09fc0a3765436..a57916441bfcb 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java @@ -1,68 +1,70 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class InstitutionalMemoryVisitorTest { - @Test - public void messageATest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("Slack Channel") - .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("Github Team") - .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 1") - .setUrl(new Url("https://some/link")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 2") - .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 3") - .setUrl(new Url("https://github.com/apache/kafka")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA.map_field Reference 1") - .setUrl(new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps")) - ), - - getTestProtobufGraph("protobuf", "messageA") - .accept(getVisitContextBuilder("protobuf.MessageA"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageATest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of( + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("Slack Channel") + .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("Github Team") + .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 1") + .setUrl(new Url("https://some/link")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 2") + .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 3") + .setUrl(new Url("https://github.com/apache/kafka")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA.map_field Reference 1") + .setUrl( + new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps"))), + getTestProtobufGraph("protobuf", "messageA") + .accept(getVisitContextBuilder("protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void messageBTest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(), - getTestProtobufGraph("protobuf", "messageB") - .accept(getVisitContextBuilder("protobuf.MessageB"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageBTest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageB") + .accept(getVisitContextBuilder("protobuf.MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void messageCTest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(), getTestProtobufGraph("protobuf", "messageC") - .accept(getVisitContextBuilder("protobuf.MessageC"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageCTest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageC") + .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java index 971500b5f43a2..5f8572cf6ddd8 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java @@ -1,36 +1,39 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class KafkaTopicPropertyVisitorTest { - @Test - public void visitorTest() throws IOException { - KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); - assertEquals(List.of(new DatasetProperties() - .setCustomProperties(new StringMap(Map.of("kafka_topic", "platform.topic")))), - getTestProtobufGraph("protobuf", "messageA") - .accept(getVisitContextBuilder("MessageB"), - List.of(test)).collect(Collectors.toList())); - } + @Test + public void visitorTest() throws IOException { + KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties(new StringMap(Map.of("kafka_topic", "platform.topic")))), + getTestProtobufGraph("protobuf", "messageA") + .accept(getVisitContextBuilder("MessageB"), List.of(test)) + .collect(Collectors.toList())); + } - @Test - public void visitorEmptyTest() throws IOException { - KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); - assertEquals(Set.of(), getTestProtobufGraph("protobuf", "messageB") - .accept(getVisitContextBuilder("MessageB"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void visitorEmptyTest() throws IOException { + KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageB") + .accept(getVisitContextBuilder("MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java index b087c683f9ffe..1b0aff28eb517 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java @@ -1,58 +1,62 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class OwnershipVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); - - OwnershipVisitor test = new OwnershipVisitor(); - - assertEquals(Set.of(new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpGroup", "teamb")), - new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpuser", "datahub")), - new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpGroup", "technicalowner")) - ), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)).collect(Collectors.toSet())); - } - - @Test - public void visitorSingleOwnerTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageB"); - - OwnershipVisitor test = new OwnershipVisitor(); - - assertEquals(Set.of(new Owner() - .setType(OwnershipType.DATA_STEWARD) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpuser", "datahub")) - ), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageB"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); + + OwnershipVisitor test = new OwnershipVisitor(); + + assertEquals( + Set.of( + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpGroup", "teamb")), + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpuser", "datahub")), + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpGroup", "technicalowner"))), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } + + @Test + public void visitorSingleOwnerTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageB"); + + OwnershipVisitor test = new OwnershipVisitor(); + + assertEquals( + Set.of( + new Owner() + .setType(OwnershipType.DATA_STEWARD) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpuser", "datahub"))), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java index dc3647cdf34c8..13912100f28a5 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java @@ -1,58 +1,68 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static java.util.Map.entry; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static java.util.Map.entry; -import static org.junit.jupiter.api.Assertions.assertEquals; +public class PropertyVisitorTest { + @Test + public void extendedMessageTest() throws IOException { + PropertyVisitor test = new PropertyVisitor(); -public class PropertyVisitorTest { + List<DatasetProperties> actual = + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toList()); + + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties( + new StringMap( + Map.ofEntries( + entry("classification_enum", "HighlyConfidential"), + entry("bool_feature", "true"), + entry("alert_channel", "#alerts"), + entry("repeat_enum", "[\"ENTITY\",\"EVENT\"]"), + entry("team", "[\"corpGroup:TeamB\",\"corpUser:datahub\"]"), + entry("technical_owner", "[\"corpGroup:TechnicalOwner\"]"), + entry("tag_list", "a, b, c"), + entry("domain", "Engineering"), + entry("repeat_string", "[\"a\",\"b\"]"), + entry("type", "ENTITY"))))), + actual); + } + + @Test + public void extendedFieldTest() throws IOException { + PropertyVisitor test = new PropertyVisitor(); + List<DatasetProperties> actual = + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toList()); - @Test - public void extendedMessageTest() throws IOException { - PropertyVisitor test = new PropertyVisitor(); - - List<DatasetProperties> actual = getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toList()); - - assertEquals(List.of( - new DatasetProperties().setCustomProperties(new StringMap(Map.ofEntries( - entry("classification_enum", "HighlyConfidential"), - entry("bool_feature", "true"), - entry("alert_channel", "#alerts"), - entry("repeat_enum", "[\"ENTITY\",\"EVENT\"]"), - entry("team", "[\"corpGroup:TeamB\",\"corpUser:datahub\"]"), - entry("technical_owner", "[\"corpGroup:TechnicalOwner\"]"), - entry("tag_list", "a, b, c"), - entry("domain", "Engineering"), - entry("repeat_string", "[\"a\",\"b\"]"), - entry("type", "ENTITY"))))), - actual); - } - - @Test - public void extendedFieldTest() throws IOException { - PropertyVisitor test = new PropertyVisitor(); - List<DatasetProperties> actual = getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toList()); - - assertEquals(List.of(new DatasetProperties() - .setCustomProperties(new StringMap(Map.ofEntries( - entry("data_steward", "corpUser:datahub"), - entry("deprecated", "true"), - entry("deprecation_note", "[\"Deprecated for this other message.\",\"Drop in replacement.\"]"), - entry("deprecation_time", "1649689387") - )))), actual); - } + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties( + new StringMap( + Map.ofEntries( + entry("data_steward", "corpUser:datahub"), + entry("deprecated", "true"), + entry( + "deprecation_note", + "[\"Deprecated for this other message.\",\"Drop in replacement.\"]"), + entry("deprecation_time", "1649689387"))))), + actual); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java index c140a798ef6e6..f734c00bb76e0 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java @@ -1,42 +1,42 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.GlossaryTermUrn; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class TermAssociationVisitorTest { - @Test - public void extendedMessageTest() throws IOException { - TermAssociationVisitor test = new TermAssociationVisitor(); - assertEquals(Set.of( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("a")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("b")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.ENTITY")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.EVENT")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - ), - getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void extendedMessageTest() throws IOException { + TermAssociationVisitor test = new TermAssociationVisitor(); + assertEquals( + Set.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("a")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("b")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.ENTITY")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.EVENT")), + new GlossaryTermAssociation() + .setUrn(new GlossaryTermUrn("Classification.HighlyConfidential"))), + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void extendedFieldTest() throws IOException { - TermAssociationVisitor test = new TermAssociationVisitor(); - assertEquals( - Set.of(), - getTestProtobufGraph("extended_protobuf", "messageB"). - accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void extendedFieldTest() throws IOException { + TermAssociationVisitor test = new TermAssociationVisitor(); + assertEquals( + Set.of(), + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java index 57a8cf1d63cd2..eec397011a4ce 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java @@ -1,5 +1,8 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -15,207 +18,303 @@ import com.linkedin.schema.StringType; import com.linkedin.util.Pair; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class ProtobufExtensionFieldVisitorTest { - @Test - public void extendedMessageTest() throws IOException, URISyntaxException { - ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); - List<SchemaField> actual = getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); + @Test + public void extendedMessageTest() throws IOException, URISyntaxException { + ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); + List<SchemaField> actual = + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - List<SchemaField> expected = Stream.of( + List<SchemaField> expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 2), + new SchemaField() + .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].email") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 3), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].email") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 3), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("extended_protobuf.Department") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - new TagAssociation().setTag(new TagUrn("MetaEnumExample.ENTITY")) - ))) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.Sensitive")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("extended_protobuf.Department") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + new TagAssociation() + .setTag(new TagUrn("MetaEnumExample.ENTITY"))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn("Classification.Sensitive")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4) - ).map(Pair::getFirst).collect(Collectors.toList()); - + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - assertEquals(expected, actual); - } + assertEquals(expected, actual); + } - @Test - public void extendedFieldTest() throws IOException { - ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); - List<SchemaField> actual = getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); + @Test + public void extendedFieldTest() throws IOException { + ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); + List<SchemaField> actual = + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - List<SchemaField> expected = Stream.of( + List<SchemaField> expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("person name") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("person name") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn( + "Classification.HighlyConfidential")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") - .setNullable(false) - .setIsPartOfKey(true) - .setDescription("unique identifier for a given person") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 2), + new SchemaField() + .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") + .setNullable(false) + .setIsPartOfKey(true) + .setDescription("unique identifier for a given person") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].email") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("official email address") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 3), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].email") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("official email address") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn( + "Classification.HighlyConfidential")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 3), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("department name of the person") - .setNativeDataType("extended_protobuf.Department") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("department name of the person") + .setNativeDataType("extended_protobuf.Department") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") - .setNullable(false) - .setIsPartOfKey(true) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") + .setNullable(false) + .setIsPartOfKey(true) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].test_coverage") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - new TagAssociation().setTag(new TagUrn("MetaEnumExample.EVENT")), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].test_coverage") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + new TagAssociation() + .setTag(new TagUrn("MetaEnumExample.EVENT")), new TagAssociation().setTag(new TagUrn("d")), new TagAssociation().setTag(new TagUrn("deprecated")), new TagAssociation().setTag(new TagUrn("e")), new TagAssociation().setTag(new TagUrn("f")), - new TagAssociation().setTag(new TagUrn("product_type.my type")), - new TagAssociation().setTag(new TagUrn("product_type_bool")) - ))) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 5) - ).map(Pair::getFirst).collect(Collectors.toList()); + new TagAssociation() + .setTag(new TagUrn("product_type.my type")), + new TagAssociation() + .setTag(new TagUrn("product_type_bool"))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 5)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - assertEquals(expected, actual); - } + assertEquals(expected, actual); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java index 1da29b5320637..af31a80d3b53a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java @@ -1,5 +1,9 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.schema.NumberType; import com.linkedin.schema.SchemaField; import com.linkedin.schema.SchemaFieldDataType; @@ -7,62 +11,73 @@ import com.linkedin.schema.UnionType; import com.linkedin.util.Pair; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class SchemaFieldVisitorTest { - @Test - public void visitorTest() throws IOException { - List<SchemaField> expected = Stream.of( + @Test + public void visitorTest() throws IOException { + List<SchemaField> expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") - .setNullable(true) - .setDescription("one of field comment") - .setNativeDataType("oneof") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") + .setNullable(true) + .setDescription("one of field comment") + .setNativeDataType("oneof") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new UnionType()))), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") - .setNullable(true) - .setDescription("one of string comment") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") + .setNullable(true) + .setDescription("one of string comment") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") - .setNullable(true) - .setDescription("one of int comment") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))), - 2), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") + .setNullable(true) + .setDescription("one of int comment") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=string].normal") - .setNullable(true) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))), - 4) - ).map(Pair::getFirst).collect(Collectors.toList()); + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=string].normal") + .setNullable(true) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))), + 4)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - SchemaFieldVisitor test = new SchemaFieldVisitor(); - assertEquals(expected, getTestProtobufGraph("protobuf", "messageC") - .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList())); - } + SchemaFieldVisitor test = new SchemaFieldVisitor(); + assertEquals( + expected, + getTestProtobufGraph("protobuf", "messageC") + .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java index 84ab1312a7d8a..258d816d9d1da 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java @@ -1,89 +1,69 @@ package datahub.protobuf.visitors.tag; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.tag.TagProperties; -import datahub.protobuf.visitors.tags.TagVisitor; import datahub.event.MetadataChangeProposalWrapper; -import org.junit.jupiter.api.Test; - +import datahub.protobuf.visitors.tags.TagVisitor; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class TagVisitorTest { - @Test - public void extendedMessageTest() throws IOException { - TagVisitor test = new TagVisitor(); - assertEquals(Set.of( - new TagProperties() - .setName("bool_feature") - .setDescription("meta.msg.bool_feature is true."), - new TagProperties() - .setName("MetaEnumExample.ENTITY") - .setDescription("Enum MetaEnumExample.ENTITY of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("MetaEnumExample.EVENT") - .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("a") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("b") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("c") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("repeat_string.a") - .setDescription("meta.msg.repeat_string"), - new TagProperties() - .setName("repeat_string.b") - .setDescription("meta.msg.repeat_string"), - new TagProperties() - .setName("deprecated") - .setColorHex("#FF0000") - ), getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .map(MetadataChangeProposalWrapper::getAspect) - .collect(Collectors.toSet())); - } + @Test + public void extendedMessageTest() throws IOException { + TagVisitor test = new TagVisitor(); + assertEquals( + Set.of( + new TagProperties() + .setName("bool_feature") + .setDescription("meta.msg.bool_feature is true."), + new TagProperties() + .setName("MetaEnumExample.ENTITY") + .setDescription("Enum MetaEnumExample.ENTITY of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties() + .setName("MetaEnumExample.EVENT") + .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties().setName("a").setDescription("meta.msg.tag_list"), + new TagProperties().setName("b").setDescription("meta.msg.tag_list"), + new TagProperties().setName("c").setDescription("meta.msg.tag_list"), + new TagProperties().setName("repeat_string.a").setDescription("meta.msg.repeat_string"), + new TagProperties().setName("repeat_string.b").setDescription("meta.msg.repeat_string"), + new TagProperties().setName("deprecated").setColorHex("#FF0000")), + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .map(MetadataChangeProposalWrapper::getAspect) + .collect(Collectors.toSet())); + } - @Test - public void extendedFieldTest() throws IOException { - Set<TagProperties> expectedTagProperties = Set.of( - new TagProperties() - .setName("product_type_bool") - .setDescription("meta.fld.product_type_bool is true."), - new TagProperties() - .setName("product_type.my type") - .setDescription("meta.fld.product_type"), - new TagProperties() - .setName("MetaEnumExample.EVENT") - .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("d") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("e") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("f") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("deprecated") - .setColorHex("#FF0000") - ); + @Test + public void extendedFieldTest() throws IOException { + Set<TagProperties> expectedTagProperties = + Set.of( + new TagProperties() + .setName("product_type_bool") + .setDescription("meta.fld.product_type_bool is true."), + new TagProperties() + .setName("product_type.my type") + .setDescription("meta.fld.product_type"), + new TagProperties() + .setName("MetaEnumExample.EVENT") + .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties().setName("d").setDescription("meta.fld.tag_list"), + new TagProperties().setName("e").setDescription("meta.fld.tag_list"), + new TagProperties().setName("f").setDescription("meta.fld.tag_list"), + new TagProperties().setName("deprecated").setColorHex("#FF0000")); - assertEquals(expectedTagProperties, - getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(new TagVisitor())) - .map(MetadataChangeProposalWrapper::getAspect) - .collect(Collectors.toSet())); - } -} \ No newline at end of file + assertEquals( + expectedTagProperties, + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(new TagVisitor())) + .map(MetadataChangeProposalWrapper::getAspect) + .collect(Collectors.toSet())); + } +} diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java index 4fd5c771caeba..4cff55afc92de 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java @@ -3,46 +3,54 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; -import com.linkedin.mxe.MetadataChangeProposal; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DataJobLineageAdd { - private DataJobLineageAdd() { - - } + private DataJobLineageAdd() {} /** * Adds lineage to an existing DataJob without affecting any lineage + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { String token = ""; - try (RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - )) { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder().urn(UrnUtils - .getUrn("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_456)")) - .addInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .addOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .addInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_123)")) - .addInputDatasetField(UrnUtils.getUrn( - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .addOutputDatasetField(UrnUtils.getUrn( - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + try (RestEmitter emitter = + RestEmitter.create(b -> b.server("http://localhost:8080").token(token))) { + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_456)")) + .addInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .addOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_123)")) + .addInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .addOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future<MetadataWriteResponse> response = emitter.emit(dataJobIOPatch); @@ -51,9 +59,5 @@ public static void main(String[] args) throws IOException, ExecutionException, I log.error("Failed to emit metadata to DataHub", e); throw new RuntimeException(e); } - } - } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java index ac368972e8dc9..342fbddde8223 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java @@ -15,70 +15,79 @@ import datahub.client.MetadataWriteResponse; import datahub.client.rest.RestEmitter; import datahub.event.MetadataChangeProposalWrapper; - import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; public class DatasetAdd { - - private DatasetAdd() { - - } - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); - CorpuserUrn userUrn = new CorpuserUrn("ingestion"); - AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); + private DatasetAdd() {} - SchemaMetadata schemaMetadata = new SchemaMetadata() - .setSchemaName("customer") - .setPlatform(new DataPlatformUrn("hive")) - .setVersion(0L) - .setHash("") - .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new OtherSchema().setRawSchema("__insert raw schema here__"))) - .setLastModified(lastModified); + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); + CorpuserUrn userUrn = new CorpuserUrn("ingestion"); + AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); - SchemaFieldArray fields = new SchemaFieldArray(); + SchemaMetadata schemaMetadata = + new SchemaMetadata() + .setSchemaName("customer") + .setPlatform(new DataPlatformUrn("hive")) + .setVersion(0L) + .setHash("") + .setPlatformSchema( + SchemaMetadata.PlatformSchema.create( + new OtherSchema().setRawSchema("__insert raw schema here__"))) + .setLastModified(lastModified); - SchemaField field1 = new SchemaField() - .setFieldPath("address.zipcode") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("VARCHAR(50)") - .setDescription("This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") - .setLastModified(lastModified); - fields.add(field1); + SchemaFieldArray fields = new SchemaFieldArray(); - SchemaField field2 = new SchemaField().setFieldPath("address.street") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("VARCHAR(100)") - .setDescription("Street corresponding to the address") - .setLastModified(lastModified); - fields.add(field2); + SchemaField field1 = + new SchemaField() + .setFieldPath("address.zipcode") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(50)") + .setDescription( + "This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") + .setLastModified(lastModified); + fields.add(field1); - SchemaField field3 = new SchemaField().setFieldPath("last_sold_date") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) - .setNativeDataType("Date") - .setDescription("Date of the last sale date for this property") - .setLastModified(lastModified); - fields.add(field3); + SchemaField field2 = + new SchemaField() + .setFieldPath("address.street") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(100)") + .setDescription("Street corresponding to the address") + .setLastModified(lastModified); + fields.add(field2); - schemaMetadata.setFields(fields); + SchemaField field3 = + new SchemaField() + .setFieldPath("last_sold_date") + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) + .setNativeDataType("Date") + .setDescription("Date of the last sale date for this property") + .setLastModified(lastModified); + fields.add(field3); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn(datasetUrn) - .upsert() - .aspect(schemaMetadata) - .build(); + schemaMetadata.setFields(fields); - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); - System.out.println(response.get().getResponseContent()); - } + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(datasetUrn) + .upsert() + .aspect(schemaMetadata) + .build(); -} \ No newline at end of file + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } +} diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java index 5d1698556cac5..b30cb5166df70 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java @@ -1,55 +1,49 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; -import com.linkedin.mxe.MetadataChangeProposal; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesAdd { - private DatasetCustomPropertiesAdd() { - - } + private DatasetCustomPropertiesAdd() {} /** - * Adds properties to an existing custom properties aspect without affecting any existing properties + * Adds properties to an existing custom properties aspect without affecting any existing + * properties + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .addCustomProperty("cluster_name", "datahubproject.acryl.io") - .addCustomProperty("retention_time", "2 years") - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - try { - Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); - - System.out.println(response.get().getResponseContent()); - } catch (Exception e) { - log.error("Failed to emit metadata to DataHub", e); - throw e; - } finally { - emitter.close(); - } - + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .addCustomProperty("cluster_name", "datahubproject.acryl.io") + .addCustomProperty("retention_time", "2 years") + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + try { + Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); + + System.out.println(response.get().getResponseContent()); + } catch (Exception e) { + log.error("Failed to emit metadata to DataHub", e); + throw e; + } finally { + emitter.close(); } - + } } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java index 9a0ec2030be48..0a89e87060698 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java @@ -10,47 +10,40 @@ import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesAddRemove { - private DatasetCustomPropertiesAddRemove() { - - } + private DatasetCustomPropertiesAddRemove() {} /** * Applies Add and Remove property operations on an existing custom properties aspect without * affecting any other properties + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .addCustomProperty("cluster_name", "datahubproject.acryl.io") - .removeCustomProperty("retention_time") - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - try { - Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); - - System.out.println(response.get().getResponseContent()); - } catch (Exception e) { - log.error("Failed to emit metadata to DataHub", e); - throw e; - } finally { - emitter.close(); - } - + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .addCustomProperty("cluster_name", "datahubproject.acryl.io") + .removeCustomProperty("retention_time") + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + try { + Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); + + System.out.println(response.get().getResponseContent()); + } catch (Exception e) { + log.error("Failed to emit metadata to DataHub", e); + throw e; + } finally { + emitter.close(); } - + } } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java index 1d4c937e2f6a0..053c1f068e048 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java @@ -11,17 +11,15 @@ import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesReplace { - private DatasetCustomPropertiesReplace() { - - } + private DatasetCustomPropertiesReplace() {} /** - * Replaces the existing custom properties map with a new map. - * Fields like dataset name, description etc remain unchanged. + * Replaces the existing custom properties map with a new map. Fields like dataset name, + * description etc remain unchanged. + * * @param args * @throws IOException */ @@ -29,16 +27,14 @@ public static void main(String[] args) throws IOException { Map<String, String> customPropsMap = new HashMap<>(); customPropsMap.put("cluster_name", "datahubproject.acryl.io"); customPropsMap.put("retention_time", "2 years"); - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .setCustomProperties(customPropsMap) - .build(); + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .setCustomProperties(customPropsMap) + .build(); String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); try { Future<MetadataWriteResponse> response = emitter.emit(datasetPropertiesProposal); @@ -48,9 +44,5 @@ public static void main(String[] args) throws IOException { } finally { emitter.close(); } - } - } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java index 077489a9e02d9..233434ccf7002 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java @@ -4,37 +4,32 @@ import datahub.client.MetadataWriteResponse; import datahub.client.rest.RestEmitter; import datahub.event.MetadataChangeProposalWrapper; - import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; public class TagCreate { - - private TagCreate() { - - } - - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - TagProperties tagProperties = new TagProperties() - .setName("Deprecated") - .setDescription("Having this tag means this column or table is deprecated."); - - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("tag") - .entityUrn("urn:li:tag:deprecated") - .upsert() - .aspect(tagProperties) - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); - System.out.println(response.get().getResponseContent()); - - } + private TagCreate() {} + + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + TagProperties tagProperties = + new TagProperties() + .setName("Deprecated") + .setDescription("Having this tag means this column or table is deprecated."); + + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("tag") + .entityUrn("urn:li:tag:deprecated") + .upsert() + .aspect(tagProperties) + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + Future<MetadataWriteResponse> response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java index 2b9d20009eeb7..3dc5cfc919c16 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java @@ -7,25 +7,27 @@ public class HdfsIn2HdfsOut1 { - private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut1.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut1.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { + public static void main(String[] args) { - System.out.println("Inside main"); - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + System.out.println("Inside main"); + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); + Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - Dataset<Row> df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + Dataset<Row> df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); - // InsertIntoHadoopFsRelationCommand - df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + // InsertIntoHadoopFsRelationCommand + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - spark.stop(); - } + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java index ed7dd95431a34..34a5e5dfaef97 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java @@ -1,6 +1,5 @@ package test.spark.lineage; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -8,30 +7,31 @@ public class HdfsIn2HdfsOut2 { - private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut2.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut2.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - Dataset<Row> df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - // InsertIntoHadoopFsRelationCommand - df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); + Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - Dataset<Row> dfO = spark - .sql("select v1.c1 as a1, v1.c2 as b1, v2.c1 as c1, v2.c2 as d1 from v1 join v2 on v1.id = v2.id"); + Dataset<Row> df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); - // InsertIntoHadoopFsRelationCommand - dfO.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - spark.stop(); + // InsertIntoHadoopFsRelationCommand + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - } + Dataset<Row> dfO = + spark.sql( + "select v1.c1 as a1, v1.c2 as b1, v2.c1 as c1, v2.c2 as d1 from v1 join v2 on v1.id = v2.id"); + // InsertIntoHadoopFsRelationCommand + dfO.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java index b2bafcfade35d..1fc6d0374d2ed 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java @@ -7,29 +7,44 @@ public class HdfsIn2HiveCreateInsertTable { - private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateInsertTable.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME,"foo4")); - - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); - - Dataset<Row> df = df1.join(df2, "id").drop("id"); - - df.write().mode(SaveMode.Overwrite).saveAsTable(Utils.tbl(TEST_NAME,"foo4")); // CreateDataSourceTableAsSelectCommand - df.write().mode(SaveMode.Append).saveAsTable(Utils.tbl(TEST_NAME,"foo4")); // CreateDataSourceTableAsSelectCommand - df.write().insertInto(Utils.tbl(TEST_NAME,"foo4")); // InsertIntoHadoopFsRelationCommand - - spark.stop(); - } - + private static final String TEST_NAME = + "Java" + HdfsIn2HiveCreateInsertTable.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; + + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo4")); + + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); + + Dataset<Row> df = df1.join(df2, "id").drop("id"); + + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(Utils.tbl(TEST_NAME, "foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Append) + .saveAsTable(Utils.tbl(TEST_NAME, "foo4")); // CreateDataSourceTableAsSelectCommand + df.write().insertInto(Utils.tbl(TEST_NAME, "foo4")); // InsertIntoHadoopFsRelationCommand + + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java index ca15bfee111fe..6d9cc032f7e9d 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java @@ -7,27 +7,39 @@ public class HdfsIn2HiveCreateTable { - private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateTable.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME,"foo3")); - - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); - - Dataset<Row> df = df1.join(df2, "id").drop("id"); - - df.write().mode(SaveMode.Overwrite).saveAsTable(Utils.tbl(TEST_NAME,"foo3")); // CreateDataSourceTableAsSelectCommand - - spark.stop(); - } - + private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateTable.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; + + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo3")); + + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); + + Dataset<Row> df = df1.join(df2, "id").drop("id"); + + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(Utils.tbl(TEST_NAME, "foo3")); // CreateDataSourceTableAsSelectCommand + + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java index 6b8de329ba05a..7d71136e27f24 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java @@ -6,44 +6,66 @@ public class HiveInHiveOut { - private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "foo5") + " as " - + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "hivetab") + " as " + "(select * from " - + Utils.tbl(TEST_NAME, "foo5") + ")"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "hivetab") + + " as " + + "(select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - // InsertIntoHiveTable - spark.sql( - "insert into " + Utils.tbl(TEST_NAME, "hivetab") + " (select * from " + Utils.tbl(TEST_NAME, "foo5") + ")"); + // InsertIntoHiveTable + spark.sql( + "insert into " + + Utils.tbl(TEST_NAME, "hivetab") + + " (select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); + Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); - // InsertIntoHiveTable - df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); - - spark.stop(); - } + // InsertIntoHiveTable + df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java index 2d31b72998637..598b347cd2064 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java @@ -6,44 +6,65 @@ public class HiveInHiveOut_test1 { - private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().enableHiveSupport().getOrCreate(); + public static void main(String[] args) { + SparkSession spark = SparkSession.builder().enableHiveSupport().getOrCreate(); - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "foo5") + " as " - + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "hivetab") + " as " + "(select * from " - + Utils.tbl(TEST_NAME, "foo5") + ")"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "hivetab") + + " as " + + "(select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - // InsertIntoHiveTable - spark.sql( - "insert into " + Utils.tbl(TEST_NAME, "hivetab") + " (select * from " + Utils.tbl(TEST_NAME, "foo5") + ")"); + // InsertIntoHiveTable + spark.sql( + "insert into " + + Utils.tbl(TEST_NAME, "hivetab") + + " (select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); + Dataset<Row> df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); - // InsertIntoHiveTable - df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); - - spark.stop(); - } + // InsertIntoHiveTable + df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java index 22007a8d41e90..278d7068f20bc 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java @@ -1,7 +1,7 @@ package test.spark.lineage; public class Utils { - public static String tbl(String testDb ,String tbl) { + public static String tbl(String testDb, String tbl) { return testDb + "." + tbl; } } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java index 90410332c3d7a..1dda979bfcefd 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java @@ -1,6 +1,17 @@ package datahub.spark; +import com.google.common.base.Splitter; +import com.typesafe.config.Config; import datahub.spark.consumer.impl.CoalesceJobsEmitter; +import datahub.spark.consumer.impl.McpEmitter; +import datahub.spark.model.AppEndEvent; +import datahub.spark.model.AppStartEvent; +import datahub.spark.model.DatasetLineage; +import datahub.spark.model.LineageConsumer; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.SQLQueryExecEndEvent; +import datahub.spark.model.SQLQueryExecStartEvent; +import datahub.spark.model.dataset.SparkDataset; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; @@ -15,7 +26,7 @@ import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - +import lombok.extern.slf4j.Slf4j; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.SparkEnv; @@ -30,27 +41,12 @@ import org.apache.spark.sql.execution.SQLExecution; import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionEnd; import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart; - -import com.google.common.base.Splitter; -import com.typesafe.config.Config; - -import datahub.spark.consumer.impl.McpEmitter; -import datahub.spark.model.AppEndEvent; -import datahub.spark.model.AppStartEvent; -import datahub.spark.model.DatasetLineage; -import datahub.spark.model.LineageConsumer; -import datahub.spark.model.LineageUtils; -import datahub.spark.model.SQLQueryExecEndEvent; -import datahub.spark.model.SQLQueryExecStartEvent; -import datahub.spark.model.dataset.SparkDataset; -import lombok.extern.slf4j.Slf4j; import org.apache.spark.util.JsonProtocol; import org.json4s.jackson.JsonMethods$; import scala.collection.JavaConversions; import scala.runtime.AbstractFunction1; import scala.runtime.AbstractPartialFunction; - @Slf4j public class DatahubSparkListener extends SparkListener { @@ -63,7 +59,8 @@ public class DatahubSparkListener extends SparkListener { public static final String COALESCE_KEY = "coalesce_jobs"; private final Map<String, AppStartEvent> appDetails = new ConcurrentHashMap<>(); - private final Map<String, Map<Long, SQLQueryExecStartEvent>> appSqlDetails = new ConcurrentHashMap<>(); + private final Map<String, Map<Long, SQLQueryExecStartEvent>> appSqlDetails = + new ConcurrentHashMap<>(); private final Map<String, McpEmitter> appEmitters = new ConcurrentHashMap<>(); private final Map<String, Config> appConfig = new ConcurrentHashMap<>(); @@ -77,15 +74,22 @@ private class SqlStartTask { private final SparkContext ctx; private final LogicalPlan plan; - public SqlStartTask(SparkListenerSQLExecutionStart sqlStart, LogicalPlan plan, SparkContext ctx) { + public SqlStartTask( + SparkListenerSQLExecutionStart sqlStart, LogicalPlan plan, SparkContext ctx) { this.sqlStart = sqlStart; this.plan = plan; this.ctx = ctx; String jsonPlan = (plan != null) ? plan.toJSON() : null; String sqlStartJson = - (sqlStart != null) ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) : null; - log.debug("SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", sqlStartJson, jsonPlan, ctx); + (sqlStart != null) + ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) + : null; + log.debug( + "SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", + sqlStartJson, + jsonPlan, + ctx); } public void run() { @@ -104,40 +108,55 @@ public void run() { return; } - appSqlDetails.get(ctx.applicationId()) - .put(sqlStart.executionId(), - new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(), - sqlStart.time(), sqlStart.executionId(), null)); - log.debug("PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n"); + appSqlDetails + .get(ctx.applicationId()) + .put( + sqlStart.executionId(), + new SQLQueryExecStartEvent( + ctx.conf().get("spark.master"), + getPipelineName(ctx), + ctx.applicationId(), + sqlStart.time(), + sqlStart.executionId(), + null)); + log.debug( + "PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n"); log.debug(plan.toString()); - Optional<? extends Collection<SparkDataset>> outputDS = DatasetExtractor.asDataset(plan, ctx, true); + Optional<? extends Collection<SparkDataset>> outputDS = + DatasetExtractor.asDataset(plan, ctx, true); if (!outputDS.isPresent() || outputDS.get().isEmpty()) { - log.debug("Skipping execution as no output dataset present for execution id: " + ctx.applicationId() + ":" - + sqlStart.executionId()); + log.debug( + "Skipping execution as no output dataset present for execution id: " + + ctx.applicationId() + + ":" + + sqlStart.executionId()); return; } // Here assumption is that there will be only single target for single sql query DatasetLineage lineage = - new DatasetLineage(sqlStart.description(), plan.toString(), outputDS.get().iterator().next()); + new DatasetLineage( + sqlStart.description(), plan.toString(), outputDS.get().iterator().next()); Collection<QueryPlan<?>> allInners = new ArrayList<>(); - plan.collect(new AbstractPartialFunction<LogicalPlan, Void>() { - - @Override - public Void apply(LogicalPlan plan) { - log.debug("CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); - Optional<? extends Collection<SparkDataset>> inputDS = DatasetExtractor.asDataset(plan, ctx, false); - inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); - allInners.addAll(JavaConversions.asJavaCollection(plan.innerChildren())); - return null; - } + plan.collect( + new AbstractPartialFunction<LogicalPlan, Void>() { + + @Override + public Void apply(LogicalPlan plan) { + log.debug("CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); + Optional<? extends Collection<SparkDataset>> inputDS = + DatasetExtractor.asDataset(plan, ctx, false); + inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); + allInners.addAll(JavaConversions.asJavaCollection(plan.innerChildren())); + return null; + } - @Override - public boolean isDefinedAt(LogicalPlan x) { - return true; - } - }); + @Override + public boolean isDefinedAt(LogicalPlan x) { + return true; + } + }); for (QueryPlan<?> qp : allInners) { if (!(qp instanceof LogicalPlan)) { @@ -145,28 +164,42 @@ public boolean isDefinedAt(LogicalPlan x) { } LogicalPlan nestedPlan = (LogicalPlan) qp; - nestedPlan.collect(new AbstractPartialFunction<LogicalPlan, Void>() { - - @Override - public Void apply(LogicalPlan plan) { - log.debug("INNER CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); - Optional<? extends Collection<SparkDataset>> inputDS = DatasetExtractor.asDataset(plan, ctx, false); - inputDS.ifPresent( - x -> log.debug("source added for " + ctx.appName() + "/" + sqlStart.executionId() + ": " + x)); - inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); - return null; - } - - @Override - public boolean isDefinedAt(LogicalPlan x) { - return true; - } - }); + nestedPlan.collect( + new AbstractPartialFunction<LogicalPlan, Void>() { + + @Override + public Void apply(LogicalPlan plan) { + log.debug("INNER CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); + Optional<? extends Collection<SparkDataset>> inputDS = + DatasetExtractor.asDataset(plan, ctx, false); + inputDS.ifPresent( + x -> + log.debug( + "source added for " + + ctx.appName() + + "/" + + sqlStart.executionId() + + ": " + + x)); + inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); + return null; + } + + @Override + public boolean isDefinedAt(LogicalPlan x) { + return true; + } + }); } SQLQueryExecStartEvent evt = - new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(), - sqlStart.time(), sqlStart.executionId(), lineage); + new SQLQueryExecStartEvent( + ctx.conf().get("spark.master"), + getPipelineName(ctx), + ctx.applicationId(), + sqlStart.time(), + sqlStart.executionId(), + lineage); appSqlDetails.get(ctx.applicationId()).put(sqlStart.executionId(), evt); @@ -185,14 +218,16 @@ public boolean isDefinedAt(LogicalPlan x) { public void onApplicationStart(SparkListenerApplicationStart applicationStart) { try { log.info("Application started: " + applicationStart); - LineageUtils.findSparkCtx().foreach(new AbstractFunction1<SparkContext, Void>() { - - @Override - public Void apply(SparkContext sc) { - checkOrCreateApplicationSetup(sc); - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1<SparkContext, Void>() { + + @Override + public Void apply(SparkContext sc) { + checkOrCreateApplicationSetup(sc); + return null; + } + }); super.onApplicationStart(applicationStart); } catch (Exception e) { // log error, but don't impact thread @@ -207,41 +242,52 @@ public Void apply(SparkContext sc) { @Override public void onApplicationEnd(SparkListenerApplicationEnd applicationEnd) { try { - LineageUtils.findSparkCtx().foreach(new AbstractFunction1<SparkContext, Void>() { - - @Override - public Void apply(SparkContext sc) { - log.info("Application ended : {} {}", sc.appName(), sc.applicationId()); - AppStartEvent start = appDetails.remove(sc.applicationId()); - appSqlDetails.remove(sc.applicationId()); - if (start == null) { - log.error("Application end event received, but start event missing for appId " + sc.applicationId()); - } else { - AppEndEvent evt = new AppEndEvent(LineageUtils.getMaster(sc), getPipelineName(sc), sc.applicationId(), - applicationEnd.time(), start); - - McpEmitter emitter = appEmitters.get(sc.applicationId()); - if (emitter != null) { - emitter.accept(evt); - try { - emitter.close(); - appEmitters.remove(sc.applicationId()); - } catch (Exception e) { - log.warn("Failed to close underlying emitter due to {}", e.getMessage()); - } - } - consumers().forEach(x -> { - x.accept(evt); - try { - x.close(); - } catch (IOException e) { - log.warn("Failed to close lineage consumer", e); - } - }); - } - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1<SparkContext, Void>() { + + @Override + public Void apply(SparkContext sc) { + log.info("Application ended : {} {}", sc.appName(), sc.applicationId()); + AppStartEvent start = appDetails.remove(sc.applicationId()); + appSqlDetails.remove(sc.applicationId()); + if (start == null) { + log.error( + "Application end event received, but start event missing for appId " + + sc.applicationId()); + } else { + AppEndEvent evt = + new AppEndEvent( + LineageUtils.getMaster(sc), + getPipelineName(sc), + sc.applicationId(), + applicationEnd.time(), + start); + + McpEmitter emitter = appEmitters.get(sc.applicationId()); + if (emitter != null) { + emitter.accept(evt); + try { + emitter.close(); + appEmitters.remove(sc.applicationId()); + } catch (Exception e) { + log.warn("Failed to close underlying emitter due to {}", e.getMessage()); + } + } + consumers() + .forEach( + x -> { + x.accept(evt); + try { + x.close(); + } catch (IOException e) { + log.warn("Failed to close lineage consumer", e); + } + }); + } + return null; + } + }); super.onApplicationEnd(applicationEnd); } catch (Exception e) { // log error, but don't impact thread @@ -276,27 +322,37 @@ public void onOtherEvent(SparkListenerEvent event) { } public void processExecutionEnd(SparkListenerSQLExecutionEnd sqlEnd) { - LineageUtils.findSparkCtx().foreach(new AbstractFunction1<SparkContext, Void>() { - - @Override - public Void apply(SparkContext sc) { - SQLQueryExecStartEvent start = appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId()); - if (start == null) { - log.error( - "Execution end event received, but start event missing for appId/sql exec Id " + sc.applicationId() + ":" - + sqlEnd.executionId()); - } else if (start.getDatasetLineage() != null) { - SQLQueryExecEndEvent evt = - new SQLQueryExecEndEvent(LineageUtils.getMaster(sc), sc.appName(), sc.applicationId(), sqlEnd.time(), - sqlEnd.executionId(), start); - McpEmitter emitter = appEmitters.get(sc.applicationId()); - if (emitter != null) { - emitter.accept(evt); - } - } - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1<SparkContext, Void>() { + + @Override + public Void apply(SparkContext sc) { + SQLQueryExecStartEvent start = + appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId()); + if (start == null) { + log.error( + "Execution end event received, but start event missing for appId/sql exec Id " + + sc.applicationId() + + ":" + + sqlEnd.executionId()); + } else if (start.getDatasetLineage() != null) { + SQLQueryExecEndEvent evt = + new SQLQueryExecEndEvent( + LineageUtils.getMaster(sc), + sc.appName(), + sc.applicationId(), + sqlEnd.time(), + sqlEnd.executionId(), + start); + McpEmitter emitter = appEmitters.get(sc.applicationId()); + if (emitter != null) { + emitter.accept(evt); + } + } + return null; + } + }); } private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { @@ -306,15 +362,27 @@ private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { if (datahubConfig == null) { Config datahubConf = LineageUtils.parseSparkConfig(); appConfig.put(appId, datahubConf); - Config pipelineConfig = datahubConf.hasPath(PIPELINE_KEY) ? datahubConf.getConfig(PIPELINE_KEY) - : com.typesafe.config.ConfigFactory.empty(); + Config pipelineConfig = + datahubConf.hasPath(PIPELINE_KEY) + ? datahubConf.getConfig(PIPELINE_KEY) + : com.typesafe.config.ConfigFactory.empty(); AppStartEvent evt = - new AppStartEvent(LineageUtils.getMaster(ctx), getPipelineName(ctx), appId, ctx.startTime(), ctx.sparkUser(), + new AppStartEvent( + LineageUtils.getMaster(ctx), + getPipelineName(ctx), + appId, + ctx.startTime(), + ctx.sparkUser(), pipelineConfig); - appEmitters.computeIfAbsent(appId, - s -> datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) ? new CoalesceJobsEmitter( - datahubConf) : new McpEmitter(datahubConf)).accept(evt); + appEmitters + .computeIfAbsent( + appId, + s -> + datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) + ? new CoalesceJobsEmitter(datahubConf) + : new McpEmitter(datahubConf)) + .accept(evt); consumers().forEach(c -> c.accept(evt)); appDetails.put(appId, evt); appSqlDetails.put(appId, new ConcurrentHashMap<>()); @@ -322,7 +390,8 @@ private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { } private String getPipelineName(SparkContext cx) { - Config datahubConfig = appConfig.computeIfAbsent(cx.applicationId(), s -> LineageUtils.parseSparkConfig()); + Config datahubConfig = + appConfig.computeIfAbsent(cx.applicationId(), s -> LineageUtils.parseSparkConfig()); String name = ""; if (datahubConfig.hasPath(DATABRICKS_CLUSTER_KEY)) { name = datahubConfig.getString(DATABRICKS_CLUSTER_KEY) + "_" + cx.applicationId(); @@ -339,8 +408,10 @@ private String getPipelineName(SparkContext cx) { private void processExecution(SparkListenerSQLExecutionStart sqlStart) { QueryExecution queryExec = SQLExecution.getQueryExecution(sqlStart.executionId()); if (queryExec == null) { - log.error("Skipping processing for sql exec Id" + sqlStart.executionId() - + " as Query execution context could not be read from current spark state"); + log.error( + "Skipping processing for sql exec Id" + + sqlStart.executionId() + + " as Query execution context could not be read from current spark state"); return; } LogicalPlan plan = queryExec.optimizedPlan(); @@ -354,7 +425,8 @@ private List<LineageConsumer> consumers() { SparkConf conf = SparkEnv.get().conf(); if (conf.contains(CONSUMER_TYPE_KEY)) { String consumerTypes = conf.get(CONSUMER_TYPE_KEY); - return StreamSupport.stream(Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false) + return StreamSupport.stream( + Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false) .map(x -> LineageUtils.getConsumer(x)) .filter(Objects::nonNull) .collect(Collectors.toList()); diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java index 51f5d561b26ae..ec8177bbc0e5c 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java @@ -1,5 +1,13 @@ package datahub.spark; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.FabricType; +import com.typesafe.config.Config; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.dataset.CatalogTableDataset; +import datahub.spark.model.dataset.HdfsPathDataset; +import datahub.spark.model.dataset.JdbcDataset; +import datahub.spark.model.dataset.SparkDataset; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -10,7 +18,7 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.spark.SparkContext; @@ -32,17 +40,6 @@ import org.apache.spark.sql.hive.execution.HiveTableScanExec; import org.apache.spark.sql.hive.execution.InsertIntoHiveTable; import org.apache.spark.sql.sources.BaseRelation; - -import com.google.common.collect.ImmutableSet; -import com.linkedin.common.FabricType; -import com.typesafe.config.Config; - -import datahub.spark.model.LineageUtils; -import datahub.spark.model.dataset.CatalogTableDataset; -import datahub.spark.model.dataset.HdfsPathDataset; -import datahub.spark.model.dataset.JdbcDataset; -import datahub.spark.model.dataset.SparkDataset; -import lombok.extern.slf4j.Slf4j; import scala.Option; import scala.collection.JavaConversions; import scala.runtime.AbstractFunction1; @@ -50,196 +47,287 @@ @Slf4j public class DatasetExtractor { - private static final Map<Class<? extends LogicalPlan>, PlanToDataset> PLAN_TO_DATASET = new HashMap<>(); - private static final Map<Class<? extends SparkPlan>, SparkPlanToDataset> SPARKPLAN_TO_DATASET = new HashMap<>(); - private static final Map<Class<? extends BaseRelation>, RelationToDataset> REL_TO_DATASET = new HashMap<>(); - private static final Set<Class<? extends LogicalPlan>> OUTPUT_CMD = ImmutableSet.of( - InsertIntoHadoopFsRelationCommand.class, SaveIntoDataSourceCommand.class, - CreateDataSourceTableAsSelectCommand.class, CreateHiveTableAsSelectCommand.class, InsertIntoHiveTable.class); + private static final Map<Class<? extends LogicalPlan>, PlanToDataset> PLAN_TO_DATASET = + new HashMap<>(); + private static final Map<Class<? extends SparkPlan>, SparkPlanToDataset> SPARKPLAN_TO_DATASET = + new HashMap<>(); + private static final Map<Class<? extends BaseRelation>, RelationToDataset> REL_TO_DATASET = + new HashMap<>(); + private static final Set<Class<? extends LogicalPlan>> OUTPUT_CMD = + ImmutableSet.of( + InsertIntoHadoopFsRelationCommand.class, + SaveIntoDataSourceCommand.class, + CreateDataSourceTableAsSelectCommand.class, + CreateHiveTableAsSelectCommand.class, + InsertIntoHiveTable.class); private static final String DATASET_ENV_KEY = "metadata.dataset.env"; private static final String DATASET_PLATFORM_INSTANCE_KEY = "metadata.dataset.platformInstance"; private static final String TABLE_HIVE_PLATFORM_ALIAS = "metadata.table.hive_platform_alias"; private static final String INCLUDE_SCHEME_KEY = "metadata.include_scheme"; private static final String REMOVE_PARTITION_PATTERN = "metadata.remove_partition_pattern"; - // TODO InsertIntoHiveDirCommand, InsertIntoDataSourceDirCommand - private DatasetExtractor() { + // TODO InsertIntoHiveDirCommand, InsertIntoDataSourceDirCommand - } + private DatasetExtractor() {} private static interface PlanToDataset { - Optional<? extends Collection<SparkDataset>> fromPlanNode(LogicalPlan plan, SparkContext ctx, Config datahubConfig); + Optional<? extends Collection<SparkDataset>> fromPlanNode( + LogicalPlan plan, SparkContext ctx, Config datahubConfig); } private static interface RelationToDataset { - Optional<? extends Collection<SparkDataset>> fromRelation(BaseRelation rel, SparkContext ctx, Config datahubConfig); + Optional<? extends Collection<SparkDataset>> fromRelation( + BaseRelation rel, SparkContext ctx, Config datahubConfig); } private static interface SparkPlanToDataset { - Optional<? extends Collection<SparkDataset>> fromSparkPlanNode(SparkPlan plan, SparkContext ctx, - Config datahubConfig); + Optional<? extends Collection<SparkDataset>> fromSparkPlanNode( + SparkPlan plan, SparkContext ctx, Config datahubConfig); } static { - - SPARKPLAN_TO_DATASET.put(FileSourceScanExec.class, (p, ctx, datahubConfig) -> { - - BaseRelation baseRel = ((FileSourceScanExec) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - - }); - - SPARKPLAN_TO_DATASET.put(HiveTableScanExec.class, (p, ctx, datahubConfig) -> { - - HiveTableRelation baseRel = ((HiveTableScanExec) p).relation(); - if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); - - }); - - SPARKPLAN_TO_DATASET.put(RowDataSourceScanExec.class, (p, ctx, datahubConfig) -> { - BaseRelation baseRel = ((RowDataSourceScanExec) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - }); - - SPARKPLAN_TO_DATASET.put(InMemoryTableScanExec.class, (p, ctx, datahubConfig) -> { - InMemoryRelation baseRel = ((InMemoryTableScanExec) p).relation(); - if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); - - }); - - PLAN_TO_DATASET.put(InsertIntoHadoopFsRelationCommand.class, (p, ctx, datahubConfig) -> { - InsertIntoHadoopFsRelationCommand cmd = (InsertIntoHadoopFsRelationCommand) p; - if (cmd.catalogTable().isDefined()) { - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.catalogTable().get(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - } - return Optional.of(Collections.singletonList(new HdfsPathDataset(cmd.outputPath(), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(LogicalRelation.class, (p, ctx, datahubConfig) -> { - BaseRelation baseRel = ((LogicalRelation) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - }); - - PLAN_TO_DATASET.put(SaveIntoDataSourceCommand.class, (p, ctx, datahubConfig) -> { - - SaveIntoDataSourceCommand cmd = (SaveIntoDataSourceCommand) p; - - Map<String, String> options = JavaConversions.mapAsJavaMap(cmd.options()); - String url = options.getOrDefault("url", ""); // e.g. jdbc:postgresql://localhost:5432/sparktestdb - if (url.contains("jdbc")) { - String tbl = options.get("dbtable"); - return Optional.of(Collections.singletonList( - new JdbcDataset(url, tbl, getCommonPlatformInstance(datahubConfig), getCommonFabricType(datahubConfig)))); - } else if (options.containsKey("path")) { - return Optional.of(Collections.singletonList(new HdfsPathDataset(new Path(options.get("path")), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - } else { - return Optional.empty(); - } - }); - - PLAN_TO_DATASET.put(CreateDataSourceTableAsSelectCommand.class, (p, ctx, datahubConfig) -> { - CreateDataSourceTableAsSelectCommand cmd = (CreateDataSourceTableAsSelectCommand) p; - // TODO what of cmd.mode() - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.table(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - PLAN_TO_DATASET.put(CreateHiveTableAsSelectCommand.class, (p, ctx, datahubConfig) -> { - CreateHiveTableAsSelectCommand cmd = (CreateHiveTableAsSelectCommand) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.tableDesc(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - PLAN_TO_DATASET.put(InsertIntoHiveTable.class, (p, ctx, datahubConfig) -> { - InsertIntoHiveTable cmd = (InsertIntoHiveTable) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.table(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(HiveTableRelation.class, (p, ctx, datahubConfig) -> { - HiveTableRelation cmd = (HiveTableRelation) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.tableMeta(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - - REL_TO_DATASET.put(HadoopFsRelation.class, (r, ctx, datahubConfig) -> { - List<Path> res = JavaConversions.asJavaCollection(((HadoopFsRelation) r).location().rootPaths()).stream() - .map(p -> getDirectoryPath(p, ctx.hadoopConfiguration())).distinct().collect(Collectors.toList()); - - // TODO mapping to URN TBD - return Optional.of(Collections.singletonList(new HdfsPathDataset(res.get(0), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - }); - REL_TO_DATASET.put(JDBCRelation.class, (r, ctx, datahubConfig) -> { - JDBCRelation rel = (JDBCRelation) r; - Option<String> tbl = rel.jdbcOptions().parameters().get(JDBCOptions.JDBC_TABLE_NAME()); - if (tbl.isEmpty()) { - return Optional.empty(); - } - - return Optional.of(Collections.singletonList(new JdbcDataset(rel.jdbcOptions().url(), tbl.get(), - getCommonPlatformInstance(datahubConfig), getCommonFabricType(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(InMemoryRelation.class, (plan, ctx, datahubConfig) -> { - SparkPlan cachedPlan = ((InMemoryRelation) plan).cachedPlan(); - ArrayList<SparkDataset> datasets = new ArrayList<>(); - cachedPlan.collectLeaves().toList().foreach(new AbstractFunction1<SparkPlan, Void>() { - - @Override - public Void apply(SparkPlan leafPlan) { - - if (SPARKPLAN_TO_DATASET.containsKey(leafPlan.getClass())) { - Optional<? extends Collection<SparkDataset>> dataset = SPARKPLAN_TO_DATASET.get(leafPlan.getClass()) - .fromSparkPlanNode(leafPlan, ctx, datahubConfig); - dataset.ifPresent(x -> datasets.addAll(x)); + SPARKPLAN_TO_DATASET.put( + FileSourceScanExec.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((FileSourceScanExec) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + HiveTableScanExec.class, + (p, ctx, datahubConfig) -> { + HiveTableRelation baseRel = ((HiveTableScanExec) p).relation(); + if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + RowDataSourceScanExec.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((RowDataSourceScanExec) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + InMemoryTableScanExec.class, + (p, ctx, datahubConfig) -> { + InMemoryRelation baseRel = ((InMemoryTableScanExec) p).relation(); + if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); + }); + + PLAN_TO_DATASET.put( + InsertIntoHadoopFsRelationCommand.class, + (p, ctx, datahubConfig) -> { + InsertIntoHadoopFsRelationCommand cmd = (InsertIntoHadoopFsRelationCommand) p; + if (cmd.catalogTable().isDefined()) { + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.catalogTable().get(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + } + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + cmd.outputPath(), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + LogicalRelation.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((LogicalRelation) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + PLAN_TO_DATASET.put( + SaveIntoDataSourceCommand.class, + (p, ctx, datahubConfig) -> { + SaveIntoDataSourceCommand cmd = (SaveIntoDataSourceCommand) p; + + Map<String, String> options = JavaConversions.mapAsJavaMap(cmd.options()); + String url = + options.getOrDefault("url", ""); // e.g. jdbc:postgresql://localhost:5432/sparktestdb + if (url.contains("jdbc")) { + String tbl = options.get("dbtable"); + return Optional.of( + Collections.singletonList( + new JdbcDataset( + url, + tbl, + getCommonPlatformInstance(datahubConfig), + getCommonFabricType(datahubConfig)))); + } else if (options.containsKey("path")) { + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + new Path(options.get("path")), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); } else { - log.error(leafPlan.getClass() + " is not yet supported. Please contact datahub team for further support."); + return Optional.empty(); + } + }); + + PLAN_TO_DATASET.put( + CreateDataSourceTableAsSelectCommand.class, + (p, ctx, datahubConfig) -> { + CreateDataSourceTableAsSelectCommand cmd = (CreateDataSourceTableAsSelectCommand) p; + // TODO what of cmd.mode() + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.table(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + PLAN_TO_DATASET.put( + CreateHiveTableAsSelectCommand.class, + (p, ctx, datahubConfig) -> { + CreateHiveTableAsSelectCommand cmd = (CreateHiveTableAsSelectCommand) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.tableDesc(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + PLAN_TO_DATASET.put( + InsertIntoHiveTable.class, + (p, ctx, datahubConfig) -> { + InsertIntoHiveTable cmd = (InsertIntoHiveTable) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.table(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + HiveTableRelation.class, + (p, ctx, datahubConfig) -> { + HiveTableRelation cmd = (HiveTableRelation) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.tableMeta(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + REL_TO_DATASET.put( + HadoopFsRelation.class, + (r, ctx, datahubConfig) -> { + List<Path> res = + JavaConversions.asJavaCollection(((HadoopFsRelation) r).location().rootPaths()) + .stream() + .map(p -> getDirectoryPath(p, ctx.hadoopConfiguration())) + .distinct() + .collect(Collectors.toList()); + + // TODO mapping to URN TBD + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + res.get(0), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); + }); + REL_TO_DATASET.put( + JDBCRelation.class, + (r, ctx, datahubConfig) -> { + JDBCRelation rel = (JDBCRelation) r; + Option<String> tbl = rel.jdbcOptions().parameters().get(JDBCOptions.JDBC_TABLE_NAME()); + if (tbl.isEmpty()) { + return Optional.empty(); } - return null; - } - }); - return datasets.isEmpty() ? Optional.empty() : Optional.of(datasets); - }); + + return Optional.of( + Collections.singletonList( + new JdbcDataset( + rel.jdbcOptions().url(), + tbl.get(), + getCommonPlatformInstance(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + InMemoryRelation.class, + (plan, ctx, datahubConfig) -> { + SparkPlan cachedPlan = ((InMemoryRelation) plan).cachedPlan(); + ArrayList<SparkDataset> datasets = new ArrayList<>(); + cachedPlan + .collectLeaves() + .toList() + .foreach( + new AbstractFunction1<SparkPlan, Void>() { + + @Override + public Void apply(SparkPlan leafPlan) { + + if (SPARKPLAN_TO_DATASET.containsKey(leafPlan.getClass())) { + Optional<? extends Collection<SparkDataset>> dataset = + SPARKPLAN_TO_DATASET + .get(leafPlan.getClass()) + .fromSparkPlanNode(leafPlan, ctx, datahubConfig); + dataset.ifPresent(x -> datasets.addAll(x)); + } else { + log.error( + leafPlan.getClass() + + " is not yet supported. Please contact datahub team for further support."); + } + return null; + } + }); + return datasets.isEmpty() ? Optional.empty() : Optional.of(datasets); + }); } - static Optional<? extends Collection<SparkDataset>> asDataset(LogicalPlan logicalPlan, SparkContext ctx, - boolean outputNode) { + static Optional<? extends Collection<SparkDataset>> asDataset( + LogicalPlan logicalPlan, SparkContext ctx, boolean outputNode) { if (!outputNode && OUTPUT_CMD.contains(logicalPlan.getClass())) { return Optional.empty(); } if (!PLAN_TO_DATASET.containsKey(logicalPlan.getClass())) { - log.error(logicalPlan.getClass() + " is not supported yet. Please contact datahub team for further support. "); + log.error( + logicalPlan.getClass() + + " is not supported yet. Please contact datahub team for further support. "); return Optional.empty(); } Config datahubconfig = LineageUtils.parseSparkConfig(); - return PLAN_TO_DATASET.get(logicalPlan.getClass()).fromPlanNode(logicalPlan, ctx, datahubconfig); + return PLAN_TO_DATASET + .get(logicalPlan.getClass()) + .fromPlanNode(logicalPlan, ctx, datahubconfig); } private static Path getDirectoryPath(Path p, Configuration hadoopConf) { @@ -255,9 +343,10 @@ private static Path getDirectoryPath(Path p, Configuration hadoopConf) { } private static FabricType getCommonFabricType(Config datahubConfig) { - String fabricTypeString = datahubConfig.hasPath(DATASET_ENV_KEY) - ? datahubConfig.getString(DATASET_ENV_KEY).toUpperCase() - : "PROD"; + String fabricTypeString = + datahubConfig.hasPath(DATASET_ENV_KEY) + ? datahubConfig.getString(DATASET_ENV_KEY).toUpperCase() + : "PROD"; FabricType fabricType = null; try { fabricType = FabricType.valueOf(fabricTypeString); @@ -269,22 +358,26 @@ private static FabricType getCommonFabricType(Config datahubConfig) { } private static String getCommonPlatformInstance(Config datahubConfig) { - return datahubConfig.hasPath(DATASET_PLATFORM_INSTANCE_KEY) ? datahubConfig.getString(DATASET_PLATFORM_INSTANCE_KEY) + return datahubConfig.hasPath(DATASET_PLATFORM_INSTANCE_KEY) + ? datahubConfig.getString(DATASET_PLATFORM_INSTANCE_KEY) : null; } private static String getTableHivePlatformAlias(Config datahubConfig) { - return datahubConfig.hasPath(TABLE_HIVE_PLATFORM_ALIAS) ? datahubConfig.getString(TABLE_HIVE_PLATFORM_ALIAS) - : "hive"; + return datahubConfig.hasPath(TABLE_HIVE_PLATFORM_ALIAS) + ? datahubConfig.getString(TABLE_HIVE_PLATFORM_ALIAS) + : "hive"; } private static boolean getIncludeScheme(Config datahubConfig) { - return datahubConfig.hasPath(INCLUDE_SCHEME_KEY) ? datahubConfig.getBoolean(INCLUDE_SCHEME_KEY) + return datahubConfig.hasPath(INCLUDE_SCHEME_KEY) + ? datahubConfig.getBoolean(INCLUDE_SCHEME_KEY) : true; } private static String getRemovePartitionPattern(Config datahubConfig) { - return datahubConfig.hasPath(REMOVE_PARTITION_PATTERN) ? datahubConfig.getString(REMOVE_PARTITION_PATTERN) + return datahubConfig.hasPath(REMOVE_PARTITION_PATTERN) + ? datahubConfig.getString(REMOVE_PARTITION_PATTERN) : null; } } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java index 6ddc5729d88f6..4e6eadc61bae0 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java @@ -1,30 +1,27 @@ package datahub.spark.consumer.impl; -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - -import com.linkedin.data.template.StringMap; - import com.linkedin.common.DataJobUrnArray; import com.linkedin.common.DatasetUrnArray; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataJobInfo; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.datajob.JobStatus; import com.typesafe.config.Config; - import datahub.event.MetadataChangeProposalWrapper; import datahub.spark.model.AppEndEvent; import datahub.spark.model.AppStartEvent; import datahub.spark.model.LineageEvent; import datahub.spark.model.SQLQueryExecStartEvent; +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -37,7 +34,8 @@ public class CoalesceJobsEmitter extends McpEmitter { public CoalesceJobsEmitter(Config datahubConf) { super(datahubConf); - parentJobUrn = datahubConf.hasPath(PARENT_JOB_KEY) ? datahubConf.getString(PARENT_JOB_KEY) : null; + parentJobUrn = + datahubConf.hasPath(PARENT_JOB_KEY) ? datahubConf.getString(PARENT_JOB_KEY) : null; log.info("CoalesceJobsEmitter initialised with " + PARENT_JOB_KEY + ":" + parentJobUrn); } @@ -50,13 +48,21 @@ public void accept(LineageEvent evt) { } else if (evt instanceof SQLQueryExecStartEvent) { SQLQueryExecStartEvent sqlQueryExecStartEvent = (SQLQueryExecStartEvent) evt; sqlQueryExecStartEvents.add(sqlQueryExecStartEvent); - log.debug("SQLQueryExecStartEvent received for processing. for app: " + sqlQueryExecStartEvent.getAppId() + ":" - + sqlQueryExecStartEvent.getAppName() + "sqlID: " + sqlQueryExecStartEvent.getSqlQueryExecId()); + log.debug( + "SQLQueryExecStartEvent received for processing. for app: " + + sqlQueryExecStartEvent.getAppId() + + ":" + + sqlQueryExecStartEvent.getAppName() + + "sqlID: " + + sqlQueryExecStartEvent.getSqlQueryExecId()); } else if (evt instanceof AppEndEvent) { AppEndEvent appEndEvent = (AppEndEvent) evt; if (appStartEvent == null) { - log.error("Application End event received for processing but start event is not received for processing for " - + appEndEvent.getAppId() + "-" + appEndEvent.getAppName()); + log.error( + "Application End event received for processing but start event is not received for processing for " + + appEndEvent.getAppId() + + "-" + + appEndEvent.getAppName()); return; } log.debug("AppEndEvent received for processing. for app start :" + appEndEvent.getAppId()); @@ -65,7 +71,8 @@ public void accept(LineageEvent evt) { } } - private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents(AppEndEvent appEndEvent) { + private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents( + AppEndEvent appEndEvent) { DataJobUrn jobUrn = new DataJobUrn(appStartEvent.getFlowUrn(), appStartEvent.getAppName()); @@ -85,11 +92,15 @@ private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents(AppEnd log.warn(PARENT_JOB_KEY + " is not a valid Datajob URN. Skipping setting up upstream job."); } - DataJobInputOutput jobio = new DataJobInputOutput().setInputDatasets(new DatasetUrnArray(inSet)) - .setOutputDatasets(new DatasetUrnArray(outSet)).setInputDatajobs(upStreamjobs); + DataJobInputOutput jobio = + new DataJobInputOutput() + .setInputDatasets(new DatasetUrnArray(inSet)) + .setOutputDatasets(new DatasetUrnArray(outSet)) + .setInputDatajobs(upStreamjobs); - MetadataChangeProposalWrapper<?> mcpJobIO = MetadataChangeProposalWrapper - .create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobio)); + MetadataChangeProposalWrapper<?> mcpJobIO = + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobio)); StringMap customProps = new StringMap(); customProps.put("startedAt", appStartEvent.timeStr()); @@ -97,15 +108,17 @@ private List<MetadataChangeProposalWrapper> squashSQLQueryExecStartEvents(AppEnd customProps.put("appName", appStartEvent.getAppName()); customProps.put("completedAt", appEndEvent.timeStr()); - DataJobInfo jobInfo = new DataJobInfo().setName(appStartEvent.getAppName()) - .setType(DataJobInfo.Type.create("sparkJob")); + DataJobInfo jobInfo = + new DataJobInfo() + .setName(appStartEvent.getAppName()) + .setType(DataJobInfo.Type.create("sparkJob")); jobInfo.setCustomProperties(customProps); jobInfo.setStatus(JobStatus.COMPLETED); - MetadataChangeProposalWrapper<?> mcpJobInfo = MetadataChangeProposalWrapper - .create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); + MetadataChangeProposalWrapper<?> mcpJobInfo = + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); return Arrays.asList(mcpJobIO, mcpJobInfo); - } @Override @@ -120,5 +133,4 @@ class DataSetUrnComparator implements Comparator<DatasetUrn> { public int compare(DatasetUrn urn1, DatasetUrn urn2) { return urn1.toString().compareTo(urn2.toString()); } - -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java index 336246fa9d3e8..918ce48d1cf42 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java @@ -1,23 +1,20 @@ package datahub.spark.consumer.impl; -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; - import com.typesafe.config.Config; - import datahub.client.Emitter; import datahub.client.rest.RestEmitter; import datahub.client.rest.RestEmitterConfig; import datahub.event.MetadataChangeProposalWrapper; import datahub.spark.model.LineageConsumer; import datahub.spark.model.LineageEvent; +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class McpEmitter implements LineageConsumer { @@ -27,19 +24,21 @@ public class McpEmitter implements LineageConsumer { private static final String GMS_URL_KEY = "rest.server"; private static final String GMS_AUTH_TOKEN = "rest.token"; private static final String DISABLE_SSL_VERIFICATION_KEY = "rest.disable_ssl_verification"; + private Optional<Emitter> getEmitter() { Optional<Emitter> emitter = Optional.empty(); switch (emitterType) { - case "rest": - if (restEmitterConfig.isPresent()) { - emitter = Optional.of(new RestEmitter(restEmitterConfig.get())); - } - break; - - default: - log.error("DataHub Transport {} not recognized. DataHub Lineage emission will not work", emitterType); - break; - + case "rest": + if (restEmitterConfig.isPresent()) { + emitter = Optional.of(new RestEmitter(restEmitterConfig.get())); + } + break; + + default: + log.error( + "DataHub Transport {} not recognized. DataHub Lineage emission will not work", + emitterType); + break; } return emitter; } @@ -47,22 +46,28 @@ private Optional<Emitter> getEmitter() { protected void emit(List<MetadataChangeProposalWrapper> mcpws) { Optional<Emitter> emitter = getEmitter(); if (emitter.isPresent()) { - mcpws.stream().map(mcpw -> { - try { - log.debug("emitting mcpw: " + mcpw); - return emitter.get().emit(mcpw); - } catch (IOException ioException) { - log.error("Failed to emit metadata to DataHub", ioException); - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toList()).forEach(future -> { - try { - log.info(future.get().toString()); - } catch (InterruptedException | ExecutionException e) { - // log error, but don't impact thread - log.error("Failed to emit metadata to DataHub", e); - } - }); + mcpws.stream() + .map( + mcpw -> { + try { + log.debug("emitting mcpw: " + mcpw); + return emitter.get().emit(mcpw); + } catch (IOException ioException) { + log.error("Failed to emit metadata to DataHub", ioException); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()) + .forEach( + future -> { + try { + log.info(future.get().toString()); + } catch (InterruptedException | ExecutionException e) { + // log error, but don't impact thread + log.error("Failed to emit metadata to DataHub", e); + } + }); try { emitter.get().close(); } catch (IOException e) { @@ -72,31 +77,45 @@ protected void emit(List<MetadataChangeProposalWrapper> mcpws) { } public McpEmitter(Config datahubConf) { - emitterType = datahubConf.hasPath(TRANSPORT_KEY) ? datahubConf.getString(TRANSPORT_KEY) : "rest"; - switch (emitterType) { + emitterType = + datahubConf.hasPath(TRANSPORT_KEY) ? datahubConf.getString(TRANSPORT_KEY) : "rest"; + switch (emitterType) { case "rest": - String gmsUrl = datahubConf.hasPath(GMS_URL_KEY) ? datahubConf.getString(GMS_URL_KEY) - : "http://localhost:8080"; - String token = datahubConf.hasPath(GMS_AUTH_TOKEN) ? datahubConf.getString(GMS_AUTH_TOKEN) : null; - boolean disableSslVerification = datahubConf.hasPath(DISABLE_SSL_VERIFICATION_KEY) ? datahubConf.getBoolean( - DISABLE_SSL_VERIFICATION_KEY) : false; - log.info("REST Emitter Configuration: GMS url {}{}", gmsUrl, - (datahubConf.hasPath(GMS_URL_KEY) ? "" : "(default)")); - if (token != null) { - log.info("REST Emitter Configuration: Token {}", (token != null) ? "XXXXX" : "(empty)"); - } - if (disableSslVerification) { - log.warn("REST Emitter Configuration: ssl verification will be disabled."); - } - restEmitterConfig = Optional.of(RestEmitterConfig.builder() - .server(gmsUrl).token(token) - .disableSslVerification(disableSslVerification).build()); - - break; + String gmsUrl = + datahubConf.hasPath(GMS_URL_KEY) + ? datahubConf.getString(GMS_URL_KEY) + : "http://localhost:8080"; + String token = + datahubConf.hasPath(GMS_AUTH_TOKEN) ? datahubConf.getString(GMS_AUTH_TOKEN) : null; + boolean disableSslVerification = + datahubConf.hasPath(DISABLE_SSL_VERIFICATION_KEY) + ? datahubConf.getBoolean(DISABLE_SSL_VERIFICATION_KEY) + : false; + log.info( + "REST Emitter Configuration: GMS url {}{}", + gmsUrl, + (datahubConf.hasPath(GMS_URL_KEY) ? "" : "(default)")); + if (token != null) { + log.info("REST Emitter Configuration: Token {}", (token != null) ? "XXXXX" : "(empty)"); + } + if (disableSslVerification) { + log.warn("REST Emitter Configuration: ssl verification will be disabled."); + } + restEmitterConfig = + Optional.of( + RestEmitterConfig.builder() + .server(gmsUrl) + .token(token) + .disableSslVerification(disableSslVerification) + .build()); + + break; default: - log.error("DataHub Transport {} not recognized. DataHub Lineage emission will not work", emitterType); - break; - } + log.error( + "DataHub Transport {} not recognized. DataHub Lineage emission will not work", + emitterType); + break; + } } @Override @@ -107,8 +126,6 @@ public void accept(LineageEvent evt) { @Override public void close() throws IOException { // Nothing to close at this point - - } - -} \ No newline at end of file + } +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java index 64aef77ddce2f..ac4d3a96308f3 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class AppEndEvent extends LineageEvent { @@ -28,9 +27,11 @@ public List<MetadataChangeProposalWrapper> asMetadataEvents() { StringMap customProps = start.customProps(); customProps.put("completedAt", timeStr()); - DataFlowInfo flowInfo = new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps); + DataFlowInfo flowInfo = + new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps); - return Collections.singletonList(MetadataChangeProposalWrapper.create( - b -> b.entityType("dataFlow").entityUrn(flowUrn).upsert().aspect(flowInfo))); + return Collections.singletonList( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(flowUrn).upsert().aspect(flowInfo))); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java index 393de44164ac2..b7f9b462c409f 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java @@ -1,19 +1,17 @@ package datahub.spark.model; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; - import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataFlowInfo; import com.typesafe.config.Config; - import datahub.event.MetadataChangeProposalWrapper; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; @@ -28,7 +26,12 @@ public class AppStartEvent extends LineageEvent { private final String sparkUser; private Config pipelineConfig; - public AppStartEvent(String master, String appName, String appId, long time, String sparkUser, + public AppStartEvent( + String master, + String appName, + String appId, + long time, + String sparkUser, Config pipelineConfig) { super(master, appName, appId, time); this.sparkUser = sparkUser; @@ -38,18 +41,22 @@ public AppStartEvent(String master, String appName, String appId, long time, Str public DataFlowUrn getFlowUrn() { return LineageUtils.flowUrn(getMaster(), getAppName()); } - + @Override public List<MetadataChangeProposalWrapper> asMetadataEvents() { ArrayList<MetadataChangeProposalWrapper> mcps = new ArrayList<MetadataChangeProposalWrapper>(); if (this.pipelineConfig.hasPath(PLATFORM_INSTANCE_KEY)) { try { - DataPlatformInstance dpi = new DataPlatformInstance().setPlatform(new DataPlatformUrn(PLATFORM_SPARK)) - .setInstance(LineageUtils.dataPlatformInstanceUrn(PLATFORM_SPARK, - this.pipelineConfig.getString(PLATFORM_INSTANCE_KEY))); - mcps.add(MetadataChangeProposalWrapper - .create(b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(dpi))); + DataPlatformInstance dpi = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn(PLATFORM_SPARK)) + .setInstance( + LineageUtils.dataPlatformInstanceUrn( + PLATFORM_SPARK, this.pipelineConfig.getString(PLATFORM_INSTANCE_KEY))); + mcps.add( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(dpi))); } catch (URISyntaxException e) { // log error, but don't impact thread StringWriter s = new StringWriter(); @@ -59,9 +66,11 @@ public List<MetadataChangeProposalWrapper> asMetadataEvents() { p.close(); } } - DataFlowInfo flowInfo = new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps()); - mcps.add(MetadataChangeProposalWrapper - .create(b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(flowInfo))); + DataFlowInfo flowInfo = + new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps()); + mcps.add( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(flowInfo))); return mcps; } @@ -73,4 +82,4 @@ StringMap customProps() { customProps.put("sparkUser", sparkUser); return customProps; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java index 9583ab69a2d73..996a911ced9f9 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java @@ -1,11 +1,9 @@ package datahub.spark.model; +import datahub.spark.model.dataset.SparkDataset; import java.util.Collections; import java.util.HashSet; import java.util.Set; - -import datahub.spark.model.dataset.SparkDataset; - import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; @@ -16,14 +14,11 @@ public class DatasetLineage { private final Set<SparkDataset> sources = new HashSet<>(); - @Getter - private final String callSiteShort; - - @Getter - private final String plan; + @Getter private final String callSiteShort; + + @Getter private final String plan; - @Getter - private final SparkDataset sink; + @Getter private final SparkDataset sink; public void addSource(SparkDataset source) { sources.add(source); diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java index 890ed6329c47b..aa2d998ea5c99 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java @@ -3,5 +3,4 @@ import java.io.Closeable; import java.util.function.Consumer; -public interface LineageConsumer extends Consumer<LineageEvent>, Closeable { -} +public interface LineageConsumer extends Consumer<LineageEvent>, Closeable {} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java index 37b949a454b0d..a88474650c510 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java @@ -1,9 +1,8 @@ package datahub.spark.model; +import datahub.event.MetadataChangeProposalWrapper; import java.util.Date; import java.util.List; - -import datahub.event.MetadataChangeProposalWrapper; import lombok.Data; @Data @@ -18,4 +17,4 @@ public abstract class LineageEvent { public String timeStr() { return new Date(getTime()).toInstant().toString(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java index ad837f034ad64..ad628666a263d 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java @@ -8,15 +8,12 @@ import com.linkedin.common.urn.Urn; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; - import lombok.extern.slf4j.Slf4j; - import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.SparkContext$; @@ -35,33 +32,38 @@ public class LineageUtils { /* This is for generating urn from a hash of the plan */ // private static Function<String, String> PATH_REPLACER = (x -> x); - private LineageUtils() { - - } + private LineageUtils() {} - public static Urn dataPlatformInstanceUrn(String platform, String instance) throws URISyntaxException { - return new Urn("dataPlatformInstance", + public static Urn dataPlatformInstanceUrn(String platform, String instance) + throws URISyntaxException { + return new Urn( + "dataPlatformInstance", new TupleKey(Arrays.asList(new DataPlatformUrn(platform).toString(), instance))); } public static DataFlowUrn flowUrn(String master, String appName) { - return new DataFlowUrn("spark", appName, master.replaceAll(":", "_").replaceAll("/", "_").replaceAll("[_]+", "_")); + return new DataFlowUrn( + "spark", appName, master.replaceAll(":", "_").replaceAll("/", "_").replaceAll("[_]+", "_")); } public static Option<SparkContext> findSparkCtx() { - return SparkSession.getActiveSession().map(new AbstractFunction1<SparkSession, SparkContext>() { - - @Override - public SparkContext apply(SparkSession sess) { - return sess.sparkContext(); - } - }).orElse(new AbstractFunction0<Option<SparkContext>>() { - - @Override - public Option<SparkContext> apply() { - return SparkContext$.MODULE$.getActive(); - } - }); + return SparkSession.getActiveSession() + .map( + new AbstractFunction1<SparkSession, SparkContext>() { + + @Override + public SparkContext apply(SparkSession sess) { + return sess.sparkContext(); + } + }) + .orElse( + new AbstractFunction0<Option<SparkContext>>() { + + @Override + public Option<SparkContext> apply() { + return SparkContext$.MODULE$.getActive(); + } + }); } public static String getMaster(SparkContext ctx) { @@ -79,14 +81,16 @@ public static LineageConsumer getConsumer(String consumerType) { public static Config parseSparkConfig() { SparkConf conf = SparkEnv.get().conf(); - String propertiesString = Arrays.stream(conf.getAllWithPrefix("spark.datahub.")) - .map(tup -> tup._1 + "= \"" + tup._2 + "\"").collect(Collectors.joining("\n")); + String propertiesString = + Arrays.stream(conf.getAllWithPrefix("spark.datahub.")) + .map(tup -> tup._1 + "= \"" + tup._2 + "\"") + .collect(Collectors.joining("\n")); return ConfigFactory.parseString(propertiesString); } // TODO: URN creation with platform instance needs to be inside DatasetUrn class - public static DatasetUrn createDatasetUrn(String platform, String platformInstance, String name, - FabricType fabricType) { + public static DatasetUrn createDatasetUrn( + String platform, String platformInstance, String name, FabricType fabricType) { String datasteName = platformInstance == null ? name : platformInstance + "." + name; return new DatasetUrn(new DataPlatformUrn(platform), datasteName, fabricType); } @@ -103,10 +107,10 @@ public static DatasetUrn createDatasetUrn(String platform, String platformInstan * ""); s = s.replaceAll("Statistics:[^\n]+\n", ""); s = * s.replaceAll("Table Properties:[^\n]+\n", ""); // * System.out.println("CLEAN: " + s); return s; } - * + * * public static void setPathReplacer(Function<String, String> replacer) { * PATH_REPLACER = replacer; } - * + * * public static String hash(String s) { s = PATH_REPLACER.apply(s); * log.debug("PATH REPLACED " + s); return Hashing.md5().hashString(s, * Charset.forName("US-ASCII")).toString(); } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java index 6505cd586b2b5..17d5b941bced2 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class SQLQueryExecEndEvent extends LineageEvent { @@ -17,7 +16,12 @@ public class SQLQueryExecEndEvent extends LineageEvent { private final long sqlQueryExecId; private final SQLQueryExecStartEvent start; - public SQLQueryExecEndEvent(String master, String appName, String appId, long time, long sqlQueryExecId, + public SQLQueryExecEndEvent( + String master, + String appName, + String appId, + long time, + long sqlQueryExecId, SQLQueryExecStartEvent start) { super(master, appName, appId, time); this.sqlQueryExecId = sqlQueryExecId; @@ -33,6 +37,7 @@ public List<MetadataChangeProposalWrapper> asMetadataEvents() { DataJobInfo jobInfo = start.jobInfo().setCustomProperties(customProps); return Collections.singletonList( - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo))); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo))); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java index 0919f40c7e1c9..dbd56a59838bc 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.data.template.StringMap; -import datahub.spark.model.dataset.SparkDataset; import com.linkedin.datajob.DataJobInfo; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.datajob.JobStatus; import datahub.event.MetadataChangeProposalWrapper; +import datahub.spark.model.dataset.SparkDataset; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -17,14 +17,18 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class SQLQueryExecStartEvent extends LineageEvent { private final long sqlQueryExecId; private final DatasetLineage datasetLineage; - public SQLQueryExecStartEvent(String master, String appName, String appId, long time, long sqlQueryExecId, + public SQLQueryExecStartEvent( + String master, + String appName, + String appId, + long time, + long sqlQueryExecId, DatasetLineage datasetLineage) { super(master, appName, appId, time); this.sqlQueryExecId = sqlQueryExecId; @@ -35,20 +39,24 @@ public SQLQueryExecStartEvent(String master, String appName, String appId, long public List<MetadataChangeProposalWrapper> asMetadataEvents() { DataJobUrn jobUrn = jobUrn(); MetadataChangeProposalWrapper mcpJobIO = - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobIO())); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobIO())); DataJobInfo jobInfo = jobInfo(); jobInfo.setCustomProperties(customProps()); jobInfo.setStatus(JobStatus.IN_PROGRESS); MetadataChangeProposalWrapper mcpJobInfo = - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); return Arrays.asList(mcpJobIO, mcpJobInfo); } DataJobInfo jobInfo() { - return new DataJobInfo().setName(datasetLineage.getCallSiteShort()).setType(DataJobInfo.Type.create("sparkJob")); + return new DataJobInfo() + .setName(datasetLineage.getCallSiteShort()) + .setType(DataJobInfo.Type.create("sparkJob")); } DataJobUrn jobUrn() { @@ -91,12 +99,14 @@ public DatasetUrnArray getOuputDatasets() { public DatasetUrnArray getInputDatasets() { DatasetUrnArray in = new DatasetUrnArray(); - Set<SparkDataset> sources = new TreeSet<>(new Comparator<SparkDataset>() { - @Override - public int compare(SparkDataset x, SparkDataset y) { - return x.urn().toString().compareTo(y.urn().toString()); - } - }); + Set<SparkDataset> sources = + new TreeSet<>( + new Comparator<SparkDataset>() { + @Override + public int compare(SparkDataset x, SparkDataset y) { + return x.urn().toString().compareTo(y.urn().toString()); + } + }); sources.addAll(datasetLineage.getSources()); // maintain ordering for (SparkDataset source : sources) { in.add(source.urn()); @@ -106,8 +116,10 @@ public int compare(SparkDataset x, SparkDataset y) { } private DataJobInputOutput jobIO() { - DataJobInputOutput io = new DataJobInputOutput().setInputDatasets(getInputDatasets()) - .setOutputDatasets(getOuputDatasets()); + DataJobInputOutput io = + new DataJobInputOutput() + .setInputDatasets(getInputDatasets()) + .setOutputDatasets(getOuputDatasets()); return io; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java index 47552c69d78c4..13f70392f5bf5 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java @@ -1,20 +1,19 @@ package datahub.spark.model.dataset; -import org.apache.spark.sql.catalyst.catalog.CatalogTable; - import com.linkedin.common.FabricType; - import lombok.ToString; +import org.apache.spark.sql.catalyst.catalog.CatalogTable; @ToString public class CatalogTableDataset extends SparkDataset { - public CatalogTableDataset(CatalogTable table, String platformInstance, String platform, FabricType fabricType) { + public CatalogTableDataset( + CatalogTable table, String platformInstance, String platform, FabricType fabricType) { this(table.qualifiedName(), platformInstance, platform, fabricType); } - public CatalogTableDataset(String dsName, String platformInstance, String platform, FabricType fabricType) { + public CatalogTableDataset( + String dsName, String platformInstance, String platform, FabricType fabricType) { super(platform, platformInstance, dsName, fabricType); } - } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java index 700aef5d6b15a..c9b05f6a1d22f 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java @@ -1,12 +1,9 @@ package datahub.spark.model.dataset; -import org.apache.hadoop.fs.Path; - import com.linkedin.common.FabricType; - -import lombok.ToString; - import java.net.URI; +import lombok.ToString; +import org.apache.hadoop.fs.Path; @ToString public class HdfsPathDataset extends SparkDataset { @@ -30,18 +27,22 @@ private static String getPlatform(Path path) { } public HdfsPathDataset( - Path path, - String platformInstance, - boolean includeScheme, - FabricType fabricType, - String removePartitionPattern) { + Path path, + String platformInstance, + boolean includeScheme, + FabricType fabricType, + String removePartitionPattern) { // TODO check static partitions? - this(getPath(path, includeScheme, removePartitionPattern), platformInstance, getPlatform(path), fabricType); + this( + getPath(path, includeScheme, removePartitionPattern), + platformInstance, + getPlatform(path), + fabricType); } - public HdfsPathDataset(String pathUri, String platformInstance, String platform, FabricType fabricType) { + public HdfsPathDataset( + String pathUri, String platformInstance, String platform, FabricType fabricType) { // TODO check static partitions? super(platform, platformInstance, pathUri, fabricType); } - } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java index ea156b49fada7..1cdca6092bcb7 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java @@ -1,17 +1,16 @@ package datahub.spark.model.dataset; -import java.util.HashMap; -import java.util.Map; - import com.linkedin.common.FabricType; - import io.opentracing.contrib.jdbc.parser.URLParser; +import java.util.HashMap; +import java.util.Map; import lombok.ToString; @ToString public class JdbcDataset extends SparkDataset { - //TODO: Should map to the central location on datahub for platform names + // TODO: Should map to the central location on datahub for platform names private static final Map<String, String> PLATFORM_NAME_MAPPING = new HashMap<>(); + static { PLATFORM_NAME_MAPPING.put("postgresql", "postgres"); } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java index 546b737576e60..64e14f5e31542 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java @@ -2,16 +2,16 @@ import com.linkedin.common.FabricType; import com.linkedin.common.urn.DatasetUrn; - import datahub.spark.model.LineageUtils; import lombok.EqualsAndHashCode; @EqualsAndHashCode public abstract class SparkDataset { - + private DatasetUrn urn; - - public SparkDataset(String platform, String platformInstance, String name, FabricType fabricType) { + + public SparkDataset( + String platform, String platformInstance, String name, FabricType fabricType) { super(); this.urn = LineageUtils.createDatasetUrn(platform, platformInstance, name, fabricType); } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java index 447200d855a36..2df468fc03e74 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java @@ -3,13 +3,13 @@ import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; +import com.linkedin.common.FabricType; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.List; import java.util.Properties; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; @@ -29,8 +29,6 @@ import org.mockserver.socket.PortFactory; import org.mockserver.verify.VerificationTimes; -import com.linkedin.common.FabricType; - public class TestCoalesceJobLineage { private static final boolean MOCK_GMS = Boolean.valueOf("true"); // if false, MCPs get written to real GMS server (see GMS_PORT) @@ -59,29 +57,42 @@ public class TestCoalesceJobLineage { private static SparkSession spark; private static Properties jdbcConnnProperties; private static ClientAndServer mockServer; - @Rule - public TestRule mockServerWatcher = new TestWatcher() { - @Override - protected void finished(Description description) { - if (!VERIFY_EXPECTED) { - return; - } - verifyTestScenario(description.getMethodName()); - clear(); - super.finished(description); - } - }; + @Rule + public TestRule mockServerWatcher = + new TestWatcher() { + + @Override + protected void finished(Description description) { + if (!VERIFY_EXPECTED) { + return; + } + verifyTestScenario(description.getMethodName()); + clear(); + super.finished(description); + } + }; private static String addLocalPath(String s) { - return s.replaceAll("file:/" + RESOURCE_DIR, "file://" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); + return s.replaceAll( + "file:/" + RESOURCE_DIR, "file://" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); } public static void resetBaseExpectations() { - mockServer.when(request().withMethod("GET").withPath("/config").withHeader("Content-type", "application/json"), - Times.unlimited()).respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); mockServer - .when(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); + mockServer + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), Times.unlimited()) .respond(HttpResponse.response().withStatusCode(200)); } @@ -95,11 +106,16 @@ public static void initMockServer() { public static void verifyTestScenario(String testName) { String expectationFileName = testName + ".json"; try { - List<String> expected = Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName).toAbsolutePath()); + List<String> expected = + Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName).toAbsolutePath()); for (String content : expected) { String swappedContent = addLocalPath(content); - mockServer.verify(request().withMethod("POST").withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal").withBody(new JsonBody(swappedContent)), + mockServer.verify( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withBody(new JsonBody(swappedContent)), VerificationTimes.atLeast(1)); } } catch (IOException ioe) { @@ -112,23 +128,33 @@ public void setup() { resetBaseExpectations(); System.setProperty("user.dir", Paths.get("coalesce-test").toAbsolutePath().toString()); - spark = SparkSession.builder().appName(APP_NAME).config("spark.master", MASTER) - .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") - .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) - .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()).config("spark.datahub.coalesce_jobs", "true") - .config("spark.datahub.parent.datajob_urn", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") - .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()).enableHiveSupport().getOrCreate(); + spark = + SparkSession.builder() + .appName(APP_NAME) + .config("spark.master", MASTER) + .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") + .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) + .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) + .config("spark.datahub.coalesce_jobs", "true") + .config( + "spark.datahub.parent.datajob_urn", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") + .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .enableHiveSupport() + .getOrCreate(); spark.sql("drop database if exists " + TEST_DB + " cascade"); spark.sql("create database " + TEST_DB); } private static void clear() { - mockServer - .clear(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal")); + mockServer.clear( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal")); } @After @@ -150,27 +176,44 @@ public static void verify(int numRequests) { return; } mockServer.verify( - request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), VerificationTimes.exactly(numRequests)); } @Test public void testHiveInHiveOutCoalesce() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(new File(DATA_DIR + "/in1.csv").getAbsolutePath()).withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(new File(DATA_DIR + "/in2.csv").getAbsolutePath()).withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(new File(DATA_DIR + "/in1.csv").getAbsolutePath()) + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(new File(DATA_DIR + "/in2.csv").getAbsolutePath()) + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); // CreateHiveTableAsSelectCommand spark.sql( - "create table " + tbl("foo_coalesce") + " as " + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + "create table " + + tbl("foo_coalesce") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); // CreateHiveTableAsSelectCommand - spark.sql("create table " + tbl("hivetab") + " as " + "(select * from " + tbl("foo_coalesce") + ")"); + spark.sql( + "create table " + tbl("hivetab") + " as " + "(select * from " + tbl("foo_coalesce") + ")"); // InsertIntoHiveTable spark.sql("insert into " + tbl("hivetab") + " (select * from " + tbl("foo_coalesce") + ")"); @@ -181,5 +224,4 @@ public void testHiveInHiveOutCoalesce() throws Exception { df.write().insertInto(tbl("hivetab")); Thread.sleep(5000); } - } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index b2280d171e378..3a70c10e0c1f9 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -5,6 +5,16 @@ import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; +import com.linkedin.common.FabricType; +import datahub.spark.model.DatasetLineage; +import datahub.spark.model.LineageConsumer; +import datahub.spark.model.LineageEvent; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.SQLQueryExecStartEvent; +import datahub.spark.model.dataset.CatalogTableDataset; +import datahub.spark.model.dataset.HdfsPathDataset; +import datahub.spark.model.dataset.JdbcDataset; +import datahub.spark.model.dataset.SparkDataset; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -18,7 +28,6 @@ import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -43,23 +52,11 @@ import org.mockserver.socket.PortFactory; import org.mockserver.verify.VerificationTimes; import org.testcontainers.containers.PostgreSQLContainer; - -import com.linkedin.common.FabricType; - -import datahub.spark.model.DatasetLineage; -import datahub.spark.model.LineageConsumer; -import datahub.spark.model.LineageEvent; -import datahub.spark.model.LineageUtils; -import datahub.spark.model.SQLQueryExecStartEvent; -import datahub.spark.model.dataset.CatalogTableDataset; -import datahub.spark.model.dataset.HdfsPathDataset; -import datahub.spark.model.dataset.JdbcDataset; -import datahub.spark.model.dataset.SparkDataset; import org.testcontainers.containers.wait.strategy.Wait; -//!!!! IMP !!!!!!!! -//Add the test number before naming the test. This will ensure that tests run in specified order. -//This is necessary to have fixed query execution numbers. Otherwise tests will fail. +// !!!! IMP !!!!!!!! +// Add the test number before naming the test. This will ensure that tests run in specified order. +// This is necessary to have fixed query execution numbers. Otherwise tests will fail. @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TestSparkJobsLineage { private static final boolean MOCK_GMS = Boolean.valueOf("true"); @@ -88,40 +85,53 @@ public class TestSparkJobsLineage { private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset"; private static final String TABLE_PLATFORM = "hive"; - @ClassRule - public static PostgreSQLContainer<?> db; + @ClassRule public static PostgreSQLContainer<?> db; + static { - db = new PostgreSQLContainer<>("postgres:9.6.12") - .withDatabaseName("sparktestdb"); + db = new PostgreSQLContainer<>("postgres:9.6.12").withDatabaseName("sparktestdb"); db.waitingFor(Wait.forListeningPort()).withStartupTimeout(Duration.ofMinutes(15)).start(); } + private static SparkSession spark; private static Properties jdbcConnnProperties; private static DatasetLineageAccumulator acc; private static ClientAndServer mockServer; - @Rule - public TestRule mockServerWatcher = new TestWatcher() { - @Override - protected void finished(Description description) { - if (!VERIFY_EXPECTED) { - return; - } - verifyTestScenario(description.getMethodName()); - clear(); - super.finished(description); - } - }; + @Rule + public TestRule mockServerWatcher = + new TestWatcher() { + + @Override + protected void finished(Description description) { + if (!VERIFY_EXPECTED) { + return; + } + verifyTestScenario(description.getMethodName()); + clear(); + super.finished(description); + } + }; private static String addLocalPath(String s) { - return s.replaceAll("file:/" + RESOURCE_DIR, "file:" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); + return s.replaceAll( + "file:/" + RESOURCE_DIR, "file:" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); } public static void resetBaseExpectations() { - mockServer.when(request().withMethod("GET").withPath("/config").withHeader("Content-type", "application/json"), - Times.unlimited()).respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); mockServer - .when(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); + mockServer + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), Times.unlimited()) .respond(HttpResponse.response().withStatusCode(200)); } @@ -134,11 +144,16 @@ public static void init() { public static void verifyTestScenario(String testName) { String expectationFileName = testName + ".json"; try { - List<String> expected = Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName)); + List<String> expected = + Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName)); for (String content : expected) { String swappedContent = addLocalPath(content); - mockServer.verify(request().withMethod("POST").withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal").withBody(new JsonBody(swappedContent)), + mockServer.verify( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withBody(new JsonBody(swappedContent)), VerificationTimes.atLeast(1)); } } catch (IOException ioe) { @@ -151,7 +166,10 @@ public static void verify(int numRequests) { return; } mockServer.verify( - request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), VerificationTimes.exactly(numRequests)); } @@ -162,14 +180,19 @@ public static void setup() { LineageUtils.registerConsumer("accumulator", acc); init(); - spark = SparkSession.builder().appName(APP_NAME).config("spark.master", MASTER) - .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") - .config("spark.datahub.lineage.consumerTypes", "accumulator") - .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) - .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) - .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()).enableHiveSupport().getOrCreate(); + spark = + SparkSession.builder() + .appName(APP_NAME) + .config("spark.master", MASTER) + .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") + .config("spark.datahub.lineage.consumerTypes", "accumulator") + .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) + .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) + .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .enableHiveSupport() + .getOrCreate(); spark.sql("drop database if exists " + TEST_DB + " cascade"); spark.sql("create database " + TEST_DB); @@ -184,8 +207,11 @@ public static void setup() { } private static void clear() { - mockServer - .clear(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal")); + mockServer.clear( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal")); } @AfterClass @@ -222,7 +248,8 @@ private static DatasetLineage dsl(String callSite, SparkDataset sink, SparkDatas } private static HdfsPathDataset hdfsDs(String fileName) { - return new HdfsPathDataset("file:" + abs(DATA_DIR + "/" + fileName), DATASET_PLATFORM_INSTANCE, "hdfs", DATASET_ENV); + return new HdfsPathDataset( + "file:" + abs(DATA_DIR + "/" + fileName), DATASET_PLATFORM_INSTANCE, "hdfs", DATASET_ENV); } private static JdbcDataset pgDs(String tbl) { @@ -230,7 +257,8 @@ private static JdbcDataset pgDs(String tbl) { } private static CatalogTableDataset catTblDs(String tbl) { - return new CatalogTableDataset(tbl(tbl), DATASET_PLATFORM_INSTANCE, TABLE_PLATFORM, DATASET_ENV); + return new CatalogTableDataset( + tbl(tbl), DATASET_PLATFORM_INSTANCE, TABLE_PLATFORM, DATASET_ENV); } private static String tbl(String tbl) { @@ -259,8 +287,9 @@ public void test1HdfsInOut() throws Exception { df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); - Dataset<Row> df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + Dataset<Row> df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); // InsertIntoHadoopFsRelationCommand df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out.csv"); @@ -274,11 +303,21 @@ public void test1HdfsInOut() throws Exception { @Test public void test5HdfsInJdbcOut() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); Dataset<Row> df = df1.join(df2, "id").drop("id"); @@ -300,8 +339,13 @@ public void test3HdfsJdbcInJdbcOut() throws Exception { c.createStatement().execute("insert into foo2 values('a', 4);"); c.close(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo2", jdbcConnnProperties); @@ -320,16 +364,30 @@ public void test3HdfsJdbcInJdbcOut() throws Exception { @Test public void test2HdfsInHiveOut() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); Dataset<Row> df = df1.join(df2, "id").drop("id"); - df.write().mode(SaveMode.Overwrite).saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand - df.write().mode(SaveMode.Append).saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Append) + .saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand df.write().insertInto(tbl("foo4")); // InsertIntoHadoopFsRelationCommand Thread.sleep(5000); @@ -345,18 +403,31 @@ public void test2HdfsInHiveOut() throws Exception { @Test public void test4HiveInHiveOut() throws Exception { - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); // CreateHiveTableAsSelectCommand spark.sql( - "create table " + tbl("foo5") + " as " + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + "create table " + + tbl("foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); check(dsl(catTblDs("foo5"), hdfsDs("in1.csv"), hdfsDs("in2.csv")), acc.getLineages().get(0)); @@ -388,13 +459,23 @@ public void test6HdfsJdbcInJdbcOutTwoLevel() throws Exception { c.createStatement().execute("insert into foo6 values('a', 4);"); c.close(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo6", jdbcConnnProperties); - Dataset<Row> df3 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b3"); + Dataset<Row> df3 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b3"); Dataset<Row> df = df1.join(df2, "a").drop("id").join(df3, "a"); @@ -402,7 +483,9 @@ public void test6HdfsJdbcInJdbcOutTwoLevel() throws Exception { // JDBCRelation input df.write().mode(SaveMode.Overwrite).jdbc(db.getJdbcUrl(), "foo7", jdbcConnnProperties); Thread.sleep(5000); - check(dsl(pgDs("foo7"), hdfsDs("in1.csv"), hdfsDs("in2.csv"), pgDs("foo6")), acc.getLineages().get(0)); + check( + dsl(pgDs("foo7"), hdfsDs("in1.csv"), hdfsDs("in2.csv"), pgDs("foo6")), + acc.getLineages().get(0)); if (VERIFY_EXPECTED) { verify(1 * N); } @@ -413,16 +496,26 @@ public void test7HdfsInPersistHdfsOut() throws Exception { Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in3.csv"); - Dataset<Row> df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in4.csv").withColumnRenamed("c2", "d") - .withColumnRenamed("c1", "c").withColumnRenamed("id", "id2"); - Dataset<Row> df = df1.join(df2, df1.col("id").equalTo(df2.col("id2")), "inner") - .filter(df1.col("id").equalTo("id_filter")).persist(StorageLevel.MEMORY_ONLY()); + Dataset<Row> df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in4.csv") + .withColumnRenamed("c2", "d") + .withColumnRenamed("c1", "c") + .withColumnRenamed("id", "id2"); + Dataset<Row> df = + df1.join(df2, df1.col("id").equalTo(df2.col("id2")), "inner") + .filter(df1.col("id").equalTo("id_filter")) + .persist(StorageLevel.MEMORY_ONLY()); df.show(); // InsertIntoHadoopFsRelationCommand df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out_persist.csv"); Thread.sleep(5000); - check(dsl(hdfsDs("out_persist.csv"), hdfsDs("in3.csv"), hdfsDs("in4.csv")), acc.getLineages().get(0)); + check( + dsl(hdfsDs("out_persist.csv"), hdfsDs("in3.csv"), hdfsDs("in4.csv")), + acc.getLineages().get(0)); if (VERIFY_EXPECTED) { verify(1 * N); } @@ -436,10 +529,19 @@ public void test8PersistHdfsJdbcInJdbcOut() throws Exception { c.createStatement().execute("insert into foo8 values('a', 4);"); c.close(); - Dataset<Row> df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset<Row> df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); - Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties).persist(StorageLevel.MEMORY_ONLY()); + Dataset<Row> df2 = + spark + .read() + .jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties) + .persist(StorageLevel.MEMORY_ONLY()); Dataset<Row> df = df1.join(df2, "a"); @@ -452,19 +554,24 @@ public void test8PersistHdfsJdbcInJdbcOut() throws Exception { verify(1 * N); } } - - // This test cannot be executed individually. It depends upon previous tests to create tables in the database. + + // This test cannot be executed individually. It depends upon previous tests to create tables in + // the database. @Test public void test9PersistJdbcInHdfsOut() throws Exception { Connection c = db.createConnection(""); - + Dataset<Row> df1 = spark.read().jdbc(db.getJdbcUrl(), "foo9", jdbcConnnProperties); df1 = df1.withColumnRenamed("b", "b1"); - Dataset<Row> df2 = spark.read().jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties).persist(StorageLevel.DISK_ONLY_2()); + Dataset<Row> df2 = + spark + .read() + .jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties) + .persist(StorageLevel.DISK_ONLY_2()); Dataset<Row> df = df1.join(df2, "a"); - + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out_persist.csv"); Thread.sleep(5000); check(dsl(hdfsDs("out_persist.csv"), pgDs("foo2"), pgDs("foo3")), acc.getLineages().get(0)); @@ -472,7 +579,7 @@ public void test9PersistJdbcInHdfsOut() throws Exception { verify(1 * N); } } - + private static class DatasetLineageAccumulator implements LineageConsumer { boolean closed = false; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java index acd8bff8c8c47..d1c1110329ad8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java @@ -6,36 +6,33 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; - +import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Objects; public class DeprecationUtils { - private DeprecationUtils() { } + private DeprecationUtils() {} - @Nullable - public static Deprecation getDeprecation( - @Nonnull EntityService entityService, - @Nonnull String urn, - Urn actor, - @Nullable String note, - boolean deprecated, - @Nullable Long decommissionTime - ) { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - urn, - Constants.DEPRECATION_ASPECT_NAME, - entityService, - new Deprecation()); - if (deprecation == null) { - return null; - } - deprecation.setActor(actor); - deprecation.setDeprecated(deprecated); - deprecation.setDecommissionTime(decommissionTime, SetMode.REMOVE_IF_NULL); - deprecation.setNote(Objects.requireNonNullElse(note, "")); - return deprecation; + @Nullable + public static Deprecation getDeprecation( + @Nonnull EntityService entityService, + @Nonnull String urn, + Urn actor, + @Nullable String note, + boolean deprecated, + @Nullable Long decommissionTime) { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + urn, Constants.DEPRECATION_ASPECT_NAME, entityService, new Deprecation()); + if (deprecation == null) { + return null; } + deprecation.setActor(actor); + deprecation.setDeprecated(deprecated); + deprecation.setDecommissionTime(decommissionTime, SetMode.REMOVE_IF_NULL); + deprecation.setNote(Objects.requireNonNullElse(note, "")); + return deprecation; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index dff9a22de8efd..53b974b560e2a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.client; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static com.linkedin.metadata.search.utils.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; @@ -70,578 +73,741 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static com.linkedin.metadata.search.utils.SearchUtils.*; - - @Slf4j @RequiredArgsConstructor public class JavaEntityClient implements EntityClient { - private static final int DEFAULT_RETRY_INTERVAL = 2; - private static final int DEFAULT_RETRY_COUNT = 3; - - private final static Set<String> NON_RETRYABLE = Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); - - private final Clock _clock = Clock.systemUTC(); - - private final EntityService _entityService; - private final DeleteEntityService _deleteEntityService; - private final EntitySearchService _entitySearchService; - private final CachingEntitySearchService _cachingEntitySearchService; - private final SearchService _searchService; - private final LineageSearchService _lineageSearchService; - private final TimeseriesAspectService _timeseriesAspectService; - private final EventProducer _eventProducer; - private final RestliEntityClient _restliEntityClient; - - @Nullable - public EntityResponse getV2( - @Nonnull String entityName, - @Nonnull final Urn urn, - @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set<String> projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntityV2(entityName, urn, projectedAspects); - } - - @Nonnull - public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { - return _entityService.getEntity(urn, ImmutableSet.of()); - } - - @Nonnull - @Override - public Map<Urn, EntityResponse> batchGetV2( - @Nonnull String entityName, - @Nonnull Set<Urn> urns, - @Nullable Set<String> aspectNames, - @Nonnull Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set<String> projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntitiesV2(entityName, urns, projectedAspects); - } - - @Nonnull - public Map<Urn, EntityResponse> batchGetVersionedV2( - @Nonnull String entityName, - @Nonnull final Set<VersionedUrn> versionedUrns, - @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set<String> projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntitiesVersionedV2(versionedUrns, projectedAspects); - } - - @Nonnull - public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) { - return _entityService.getEntities(urns, ImmutableSet.of()); - } - - /** - * Gets autocomplete results - * - * @param entityType the type of entity to autocomplete against - * @param query search query - * @param field field of the dataset to autocomplete against - * @param requestFilters autocomplete filters - * @param limit max number of autocomplete results - * @throws RemoteInvocationException - */ - @Nonnull - public AutoCompleteResult autoComplete( - @Nonnull String entityType, - @Nonnull String query, - @Nullable Filter requestFilters, - @Nonnull int limit, - @Nullable String field, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _cachingEntitySearchService.autoComplete(entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); - } - - /** - * Gets autocomplete results - * - * @param entityType the type of entity to autocomplete against - * @param query search query - * @param requestFilters autocomplete filters - * @param limit max number of autocomplete results - * @throws RemoteInvocationException - */ - @Nonnull - public AutoCompleteResult autoComplete( - @Nonnull String entityType, - @Nonnull String query, - @Nullable Filter requestFilters, - @Nonnull int limit, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _cachingEntitySearchService.autoComplete(entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); - } - - /** - * Gets autocomplete results - * - * @param entityType entity type being browse - * @param path path being browsed - * @param requestFilters browse filters - * @param start start offset of first dataset - * @param limit max number of datasets - * @throws RemoteInvocationException - */ - @Nonnull - public BrowseResult browse( - @Nonnull String entityType, - @Nonnull String path, - @Nullable Map<String, String> requestFilters, - int start, - int limit, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return ValidationUtils.validateBrowseResult( - _cachingEntitySearchService.browse(entityType, path, newFilter(requestFilters), start, limit, null), _entityService); - } - - - /** - * Gets browse V2 snapshot of a given path - * - * @param entityName entity being browsed - * @param path path being browsed - * @param filter browse filter - * @param input search query - * @param start start offset of first group - * @param count max number of results requested - * @throws RemoteInvocationException - */ - @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) { - // TODO: cache browseV2 results - return _entitySearchService.browseV2(entityName, path, filter, input, start, count); - } - - @SneakyThrows - @Deprecated - public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - Objects.requireNonNull(authentication, "authentication must not be null"); - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - _entityService.ingestEntity(entity, auditStamp); - } - - @SneakyThrows - @Deprecated - public void updateWithSystemMetadata( - @Nonnull final Entity entity, - @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - if (systemMetadata == null) { - update(entity, authentication); - return; - } - - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - - _entityService.ingestEntity(entity, auditStamp, systemMetadata); - tryIndexRunId(com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()), systemMetadata); - } - - @SneakyThrows - @Deprecated - public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - _entityService.ingestEntities(entities.stream().collect(Collectors.toList()), auditStamp, ImmutableList.of()); - } - - /** - * Searches for entities matching to a given query and filters - * - * @param input search query - * @param requestFilters search filters - * @param start start offset for search results - * @param count max number of search results requested - * @param searchFlags - * @return a set of search results - * @throws RemoteInvocationException - */ - @Nonnull - @WithSpan - @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - - return ValidationUtils.validateSearchResult(_entitySearchService.search(List.of(entity), input, newFilter(requestFilters), - null, start, count, searchFlags), _entityService); - } - - /** - * Deprecated! Use 'filter' or 'search' instead. - * - * Filters for entities matching to a given query and filters - * - * @param requestFilters search filters - * @param start start offset for search results - * @param count max number of search results requested - * @return a set of list results - * @throws RemoteInvocationException - */ - @Deprecated - @Nonnull - public ListResult list( - @Nonnull String entity, - @Nullable Map<String, String> requestFilters, - int start, - int count, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateListResult(toListResult( - _entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), _entityService); - } - - /** - * Searches for datasets matching to a given query and filters - * - * @param input search query - * @param filter search filters - * @param sortCriterion sort criterion - * @param start start offset for search results - * @param count max number of search results requested - * @return Snapshot key - * @throws RemoteInvocationException - */ - @Nonnull - @Override - public SearchResult search( - @Nonnull String entity, - @Nonnull String input, - @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, - int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - return ValidationUtils.validateSearchResult( - _entitySearchService.search(List.of(entity), input, filter, sortCriterion, start, count, searchFlags), _entityService); - } - - @Nonnull - public SearchResult searchAcrossEntities( - @Nonnull List<String> entities, - @Nonnull String input, - @Nullable Filter filter, - int start, - int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return searchAcrossEntities(entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); - } - - /** - * Searches for entities matching to a given query and filters across multiple entity types - * - * @param entities entity types to search (if empty, searches all entities) - * @param input search query - * @param filter search filters - * @param start start offset for search results - * @param count max number of search results requested - * @param facets list of facets we want aggregations for - * @param sortCriterion sorting criterion - * @return Snapshot key - * @throws RemoteInvocationException - */ - @Nonnull - public SearchResult searchAcrossEntities( - @Nonnull List<String> entities, - @Nonnull String input, - @Nullable Filter filter, - int start, - int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication, - @Nullable List<String> facets) throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return ValidationUtils.validateSearchResult( - _searchService.searchAcrossEntities(entities, input, filter, sortCriterion, start, count, finalFlags, facets), _entityService); - } - - @Nonnull - @Override - public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, - @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) - throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return ValidationUtils.validateScrollResult( - _searchService.scrollAcrossEntities(entities, input, filter, null, scrollId, keepAlive, count, - finalFlags), _entityService); - } - - @Nonnull - @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, start, count, null, null, searchFlags), _entityService); - } - - @Nonnull - @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, start, count, startTimeMillis, endTimeMillis, searchFlags), _entityService); - } - - @Nonnull - @Override - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); - return ValidationUtils.validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, scrollId, keepAlive, count, startTimeMillis, endTimeMillis, finalFlags), _entityService); - } - - /** - * Gets browse path(s) given dataset urn - * - * @param urn urn for the entity - * @return list of paths given urn - * @throws RemoteInvocationException - */ - @Nonnull - public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return new StringArray(_entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); - } - - public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) throws RemoteInvocationException { - _entityService.setWritable(canWrite); - } - - @Nonnull - public Map<String, Long> batchGetTotalEntityCount( - @Nonnull List<String> entityNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _searchService.docCountPerEntity(entityNames); - } - - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return _entityService.listUrns(entityName, start, count); - } - - /** - * Hard delete an entity with a particular urn. - */ - public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - _entityService.deleteUrn(urn); - } - - @Override - public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) - throws RemoteInvocationException { - withRetry(() -> _deleteEntityService.deleteReferencesTo(urn, false), "deleteEntityReferences"); - } - - @Nonnull - @Override - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return ValidationUtils.validateSearchResult(_entitySearchService.filter(entity, filter, sortCriterion, start, count), - _entityService); - } - - @Override - public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return _entityService.exists(urn); - } - - @SneakyThrows - @Override - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - } - - @SneakyThrows - @Override - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - } - - @SneakyThrows - @Override - public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); - response.setEntityName(entity); - response.setAspectName(aspect); - if (startTimeMillis != null) { - response.setStartTimeMillis(startTimeMillis); + private static final int DEFAULT_RETRY_INTERVAL = 2; + private static final int DEFAULT_RETRY_COUNT = 3; + + private static final Set<String> NON_RETRYABLE = + Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); + + private final Clock _clock = Clock.systemUTC(); + + private final EntityService _entityService; + private final DeleteEntityService _deleteEntityService; + private final EntitySearchService _entitySearchService; + private final CachingEntitySearchService _cachingEntitySearchService; + private final SearchService _searchService; + private final LineageSearchService _lineageSearchService; + private final TimeseriesAspectService _timeseriesAspectService; + private final EventProducer _eventProducer; + private final RestliEntityClient _restliEntityClient; + + @Nullable + public EntityResponse getV2( + @Nonnull String entityName, + @Nonnull final Urn urn, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set<String> projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntityV2(entityName, urn, projectedAspects); + } + + @Nonnull + public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + return _entityService.getEntity(urn, ImmutableSet.of()); + } + + @Nonnull + @Override + public Map<Urn, EntityResponse> batchGetV2( + @Nonnull String entityName, + @Nonnull Set<Urn> urns, + @Nullable Set<String> aspectNames, + @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set<String> projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntitiesV2(entityName, urns, projectedAspects); + } + + @Nonnull + public Map<Urn, EntityResponse> batchGetVersionedV2( + @Nonnull String entityName, + @Nonnull final Set<VersionedUrn> versionedUrns, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set<String> projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntitiesVersionedV2(versionedUrns, projectedAspects); + } + + @Nonnull + public Map<Urn, Entity> batchGet( + @Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) { + return _entityService.getEntities(urns, ImmutableSet.of()); + } + + /** + * Gets autocomplete results + * + * @param entityType the type of entity to autocomplete against + * @param query search query + * @param field field of the dataset to autocomplete against + * @param requestFilters autocomplete filters + * @param limit max number of autocomplete results + * @throws RemoteInvocationException + */ + @Nonnull + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _cachingEntitySearchService.autoComplete( + entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); + } + + /** + * Gets autocomplete results + * + * @param entityType the type of entity to autocomplete against + * @param query search query + * @param requestFilters autocomplete filters + * @param limit max number of autocomplete results + * @throws RemoteInvocationException + */ + @Nonnull + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _cachingEntitySearchService.autoComplete( + entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); + } + + /** + * Gets autocomplete results + * + * @param entityType entity type being browse + * @param path path being browsed + * @param requestFilters browse filters + * @param start start offset of first dataset + * @param limit max number of datasets + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map<String, String> requestFilters, + int start, + int limit, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateBrowseResult( + _cachingEntitySearchService.browse( + entityType, path, newFilter(requestFilters), start, limit, null), + _entityService); + } + + /** + * Gets browse V2 snapshot of a given path + * + * @param entityName entity being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { + // TODO: cache browseV2 results + return _entitySearchService.browseV2(entityName, path, filter, input, start, count); + } + + @SneakyThrows + @Deprecated + public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + Objects.requireNonNull(authentication, "authentication must not be null"); + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + _entityService.ingestEntity(entity, auditStamp); + } + + @SneakyThrows + @Deprecated + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (systemMetadata == null) { + update(entity, authentication); + return; + } + + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + + _entityService.ingestEntity(entity, auditStamp, systemMetadata); + tryIndexRunId( + com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()), systemMetadata); + } + + @SneakyThrows + @Deprecated + public void batchUpdate( + @Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + _entityService.ingestEntities( + entities.stream().collect(Collectors.toList()), auditStamp, ImmutableList.of()); + } + + /** + * Searches for entities matching to a given query and filters + * + * @param input search query + * @param requestFilters search filters + * @param start start offset for search results + * @param count max number of search results requested + * @param searchFlags + * @return a set of search results + * @throws RemoteInvocationException + */ + @Nonnull + @WithSpan + @Override + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + + return ValidationUtils.validateSearchResult( + _entitySearchService.search( + List.of(entity), input, newFilter(requestFilters), null, start, count, searchFlags), + _entityService); + } + + /** + * Deprecated! Use 'filter' or 'search' instead. + * + * <p>Filters for entities matching to a given query and filters + * + * @param requestFilters search filters + * @param start start offset for search results + * @param count max number of search results requested + * @return a set of list results + * @throws RemoteInvocationException + */ + @Deprecated + @Nonnull + public ListResult list( + @Nonnull String entity, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateListResult( + toListResult( + _entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), + _entityService); + } + + /** + * Searches for datasets matching to a given query and filters + * + * @param input search query + * @param filter search filters + * @param sortCriterion sort criterion + * @param start start offset for search results + * @param count max number of search results requested + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + @Override + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + return ValidationUtils.validateSearchResult( + _entitySearchService.search( + List.of(entity), input, filter, sortCriterion, start, count, searchFlags), + _entityService); + } + + @Nonnull + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return searchAcrossEntities( + entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + } + + /** + * Searches for entities matching to a given query and filters across multiple entity types + * + * @param entities entity types to search (if empty, searches all entities) + * @param input search query + * @param filter search filters + * @param start start offset for search results + * @param count max number of search results requested + * @param facets list of facets we want aggregations for + * @param sortCriterion sorting criterion + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication, + @Nullable List<String> facets) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateSearchResult( + _searchService.searchAcrossEntities( + entities, input, filter, sortCriterion, start, count, finalFlags, facets), + _entityService); + } + + @Nonnull + @Override + public ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateScrollResult( + _searchService.scrollAcrossEntities( + entities, input, filter, null, scrollId, keepAlive, count, finalFlags), + _entityService); + } + + @Nonnull + @Override + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + start, + count, + null, + null, + searchFlags), + _entityService); + } + + @Nonnull + @Override + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags), + _entityService); + } + + @Nonnull + @Override + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); + return ValidationUtils.validateLineageScrollResult( + _lineageSearchService.scrollAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + finalFlags), + _entityService); + } + + /** + * Gets browse path(s) given dataset urn + * + * @param urn urn for the entity + * @return list of paths given urn + * @throws RemoteInvocationException + */ + @Nonnull + public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return new StringArray(_entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); + } + + public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + _entityService.setWritable(canWrite); + } + + @Nonnull + public Map<String, Long> batchGetTotalEntityCount( + @Nonnull List<String> entityNames, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _searchService.docCountPerEntity(entityNames); + } + + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.listUrns(entityName, start, count); + } + + /** Hard delete an entity with a particular urn. */ + public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + _entityService.deleteUrn(urn); + } + + @Override + public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) + throws RemoteInvocationException { + withRetry(() -> _deleteEntityService.deleteReferencesTo(urn, false), "deleteEntityReferences"); + } + + @Nonnull + @Override + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateSearchResult( + _entitySearchService.filter(entity, filter, sortCriterion, start, count), _entityService); + } + + @Override + public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.exists(urn); + } + + @SneakyThrows + @Override + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + } + + @SneakyThrows + @Override + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + } + + @SneakyThrows + @Override + public List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); + response.setEntityName(entity); + response.setAspectName(aspect); + if (startTimeMillis != null) { + response.setStartTimeMillis(startTimeMillis); + } + if (endTimeMillis != null) { + response.setEndTimeMillis(endTimeMillis); + } + if (limit != null) { + response.setLimit(limit); + } + if (filter != null) { + response.setFilter(filter); + } + response.setValues( + new EnvelopedAspectArray( + _timeseriesAspectService.getAspectValues( + Urn.createFromString(urn), + entity, + aspect, + startTimeMillis, + endTimeMillis, + limit, + filter, + sort))); + return response.getValues(); + } + + // TODO: Factor out ingest logic into a util that can be accessed by the java client and the + // resource + @Override + public String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { + String actorUrnStr = + authentication.getActor() != null + ? authentication.getActor().toUrnStr() + : Constants.UNKNOWN_ACTOR; + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); + final List<MetadataChangeProposal> additionalChanges = + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); + + Stream<MetadataChangeProposal> proposalStream = + Stream.concat(Stream.of(metadataChangeProposal), additionalChanges.stream()); + AspectsBatch batch = + AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) + .build(); + + IngestResult one = + _entityService.ingestProposal(batch, auditStamp, async).stream().findFirst().get(); + + Urn urn = one.getUrn(); + tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); + return urn.toString(); + } + + @SneakyThrows + @Override + public <T extends RecordTemplate> Optional<T> getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class<T> aspectClass, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + VersionedAspect entity = + _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + if (entity != null && entity.hasAspect()) { + DataMap rawAspect = ((DataMap) entity.data().get("aspect")); + if (rawAspect.containsKey(aspectClass.getCanonicalName())) { + DataMap aspectDataMap = rawAspect.getDataMap(aspectClass.getCanonicalName()); + return Optional.of(RecordUtils.toRecordTemplate(aspectClass, aspectDataMap)); } - if (endTimeMillis != null) { - response.setEndTimeMillis(endTimeMillis); - } - if (limit != null) { - response.setLimit(limit); - } - if (filter != null) { - response.setFilter(filter); - } - response.setValues(new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues(Urn.createFromString(urn), entity, aspect, startTimeMillis, - endTimeMillis, limit, filter, sort))); - return response.getValues(); - } - - // TODO: Factor out ingest logic into a util that can be accessed by the java client and the resource - @Override - public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException { - String actorUrnStr = authentication.getActor() != null ? authentication.getActor().toUrnStr() : Constants.UNKNOWN_ACTOR; - final AuditStamp auditStamp = - new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); - final List<MetadataChangeProposal> additionalChanges = - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); - - Stream<MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - additionalChanges.stream()); - AspectsBatch batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(); - - IngestResult one = _entityService.ingestProposal(batch, auditStamp, async).stream() - .findFirst().get(); - - Urn urn = one.getUrn(); - tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); - return urn.toString(); - } - - @SneakyThrows - @Override - public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull final Authentication authentication) throws RemoteInvocationException { - VersionedAspect entity = _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - if (entity != null && entity.hasAspect()) { - DataMap rawAspect = ((DataMap) entity.data().get("aspect")); - if (rawAspect.containsKey(aspectClass.getCanonicalName())) { - DataMap aspectDataMap = rawAspect.getDataMap(aspectClass.getCanonicalName()); - return Optional.of(RecordUtils.toRecordTemplate(aspectClass, aspectDataMap)); - } - } - return Optional.empty(); } - - @SneakyThrows - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException { - VersionedAspect entity = _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - if (entity == null) { - return null; + return Optional.empty(); + } + + @SneakyThrows + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + VersionedAspect entity = + _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + if (entity == null) { + return null; + } + + if (entity.hasAspect()) { + DataMap rawAspect = ((DataMap) entity.data().get("aspect")); + return rawAspect; + } + + return null; + } + + @Override + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull Authentication authentication) + throws Exception { + _eventProducer.producePlatformEvent(name, key, event); + } + + @Override + public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + throws Exception { + _restliEntityClient.rollbackIngestion(runId, authentication); + } + + private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { + if (systemMetadata != null && systemMetadata.hasRunId()) { + _entitySearchService.appendRunId( + entityUrn.getEntityType(), entityUrn, systemMetadata.getRunId()); + } + } + + protected <T> T withRetry(@Nonnull final Supplier<T> block, @Nullable String counterPrefix) { + final BackoffPolicy backoffPolicy = new ExponentialBackoff(DEFAULT_RETRY_INTERVAL); + int attemptCount = 0; + + while (attemptCount < DEFAULT_RETRY_COUNT + 1) { + try { + return block.get(); + } catch (Throwable ex) { + MetricUtils.counter(this.getClass(), buildMetricName(ex, counterPrefix)).inc(); + + final boolean skipRetry = + NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) + || (ex.getCause() != null + && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); + + if (attemptCount == DEFAULT_RETRY_COUNT || skipRetry) { + throw ex; + } else { + attemptCount = attemptCount + 1; + try { + Thread.sleep(backoffPolicy.nextBackoff(attemptCount, ex) * 1000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } - - if (entity.hasAspect()) { - DataMap rawAspect = ((DataMap) entity.data().get("aspect")); - return rawAspect; - } - - return null; - } - - @Override - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, - @Nonnull Authentication authentication) throws Exception { - _eventProducer.producePlatformEvent(name, key, event); - } - - @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) throws Exception { - _restliEntityClient.rollbackIngestion(runId, authentication); + } } - private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { - if (systemMetadata != null && systemMetadata.hasRunId()) { - _entitySearchService.appendRunId(entityUrn.getEntityType(), entityUrn, systemMetadata.getRunId()); - } - } + // Should never hit this line. + throw new IllegalStateException("No JavaEntityClient call executed."); + } - protected <T> T withRetry(@Nonnull final Supplier<T> block, @Nullable String counterPrefix) { - final BackoffPolicy backoffPolicy = new ExponentialBackoff(DEFAULT_RETRY_INTERVAL); - int attemptCount = 0; - - while (attemptCount < DEFAULT_RETRY_COUNT + 1) { - try { - return block.get(); - } catch (Throwable ex) { - MetricUtils.counter(this.getClass(), buildMetricName(ex, counterPrefix)).inc(); - - final boolean skipRetry = NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) - || (ex.getCause() != null && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); - - if (attemptCount == DEFAULT_RETRY_COUNT || skipRetry) { - throw ex; - } else { - attemptCount = attemptCount + 1; - try { - Thread.sleep(backoffPolicy.nextBackoff(attemptCount, ex) * 1000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - } - } + private String buildMetricName(Throwable throwable, @Nullable String counterPrefix) { + StringBuilder builder = new StringBuilder(); - // Should never hit this line. - throw new IllegalStateException("No JavaEntityClient call executed."); + // deleteEntityReferences_failures + if (counterPrefix != null) { + builder.append(counterPrefix).append(MetricUtils.DELIMITER); } - private String buildMetricName(Throwable throwable, @Nullable String counterPrefix) { - StringBuilder builder = new StringBuilder(); - - // deleteEntityReferences_failures - if (counterPrefix != null) { - builder.append(counterPrefix).append(MetricUtils.DELIMITER); - } - - return builder.append("exception") - .append(MetricUtils.DELIMITER) - .append(throwable.getClass().getName()) - .toString(); - } + return builder + .append("exception") + .append(MetricUtils.DELIMITER) + .append(throwable.getClass().getName()) + .toString(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java index 6b5a3d5bfb06e..0ac18b4aacc04 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java @@ -2,9 +2,9 @@ import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClientCache; -import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.event.EventProducer; @@ -15,25 +15,37 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import lombok.Getter; - -/** - * Java backed SystemEntityClient - */ +/** Java backed SystemEntityClient */ @Getter public class SystemJavaEntityClient extends JavaEntityClient implements SystemEntityClient { - private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final EntityClientCache entityClientCache; + private final Authentication systemAuthentication; - public SystemJavaEntityClient(EntityService entityService, DeleteEntityService deleteEntityService, - EntitySearchService entitySearchService, CachingEntitySearchService cachingEntitySearchService, - SearchService searchService, LineageSearchService lineageSearchService, - TimeseriesAspectService timeseriesAspectService, EventProducer eventProducer, - RestliEntityClient restliEntityClient, Authentication systemAuthentication, - EntityClientCacheConfig cacheConfig) { - super(entityService, deleteEntityService, entitySearchService, cachingEntitySearchService, searchService, - lineageSearchService, timeseriesAspectService, eventProducer, restliEntityClient); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); - } + public SystemJavaEntityClient( + EntityService entityService, + DeleteEntityService deleteEntityService, + EntitySearchService entitySearchService, + CachingEntitySearchService cachingEntitySearchService, + SearchService searchService, + LineageSearchService lineageSearchService, + TimeseriesAspectService timeseriesAspectService, + EventProducer eventProducer, + RestliEntityClient restliEntityClient, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + super( + entityService, + deleteEntityService, + entitySearchService, + cachingEntitySearchService, + searchService, + lineageSearchService, + timeseriesAspectService, + eventProducer, + restliEntityClient); + this.systemAuthentication = systemAuthentication; + this.entityClientCache = + buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java b/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java index 660c1291a5651..c740f8562d8fe 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java @@ -5,19 +5,13 @@ import lombok.NonNull; import lombok.Value; - -/** - * A value class that holds the components of a key for metadata retrieval. - */ +/** A value class that holds the components of a key for metadata retrieval. */ @Value public class AspectKey<URN extends Urn, ASPECT extends RecordTemplate> { - @NonNull - Class<ASPECT> aspectClass; + @NonNull Class<ASPECT> aspectClass; - @NonNull - URN urn; + @NonNull URN urn; - @NonNull - Long version; + @NonNull Long version; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java index 7acb9ca0cbd64..999140759b09b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java @@ -15,7 +15,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public abstract class BaseReadDAO<ASPECT_UNION extends UnionTemplate, URN extends Urn> { public static final long FIRST_VERSION = 0; @@ -41,12 +40,10 @@ public BaseReadDAO(@Nonnull Set<Class<? extends RecordTemplate>> aspects) { * @return a mapping of given keys to the corresponding metadata aspect. */ @Nonnull - public abstract Map<AspectKey<URN, ? extends RecordTemplate>, Optional<? extends RecordTemplate>> get( - @Nonnull Set<AspectKey<URN, ? extends RecordTemplate>> keys); + public abstract Map<AspectKey<URN, ? extends RecordTemplate>, Optional<? extends RecordTemplate>> + get(@Nonnull Set<AspectKey<URN, ? extends RecordTemplate>> keys); - /** - * Similar to {@link #get(Set)} but only using only one {@link AspectKey}. - */ + /** Similar to {@link #get(Set)} but only using only one {@link AspectKey}. */ @Nonnull public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull AspectKey<URN, ASPECT> key) { return (Optional<ASPECT>) get(Collections.singleton(key)).get(key); @@ -56,21 +53,21 @@ public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull AspectKey<U * Similar to {@link #get(AspectKey)} but with each component of the key broken out as arguments. */ @Nonnull - public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn, - long version) { + public <ASPECT extends RecordTemplate> Optional<ASPECT> get( + @Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn, long version) { return get(new AspectKey<>(aspectClass, urn, version)); } - /** - * Similar to {@link #get(Class, Urn, long)} but always retrieves the latest version. - */ + /** Similar to {@link #get(Class, Urn, long)} but always retrieves the latest version. */ @Nonnull - public <ASPECT extends RecordTemplate> Optional<ASPECT> get(@Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn) { + public <ASPECT extends RecordTemplate> Optional<ASPECT> get( + @Nonnull Class<ASPECT> aspectClass, @Nonnull URN urn) { return get(aspectClass, urn, LATEST_VERSION); } /** - * Similar to {@link #get(Class, Urn)} but retrieves multiple aspects latest versions associated with multiple URNs. + * Similar to {@link #get(Class, Urn)} but retrieves multiple aspects latest versions associated + * with multiple URNs. * * <p>The returned {@link Map} contains all the . */ @@ -85,20 +82,22 @@ public Map<URN, Map<Class<? extends RecordTemplate>, Optional<? extends RecordTe } } - final Map<URN, Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>>> results = new HashMap<>(); - get(keys).entrySet().forEach(entry -> { - final AspectKey<URN, ? extends RecordTemplate> key = entry.getKey(); - final URN urn = key.getUrn(); - results.putIfAbsent(urn, new HashMap<>()); - results.get(urn).put(key.getAspectClass(), entry.getValue()); - }); + final Map<URN, Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>>> + results = new HashMap<>(); + get(keys) + .entrySet() + .forEach( + entry -> { + final AspectKey<URN, ? extends RecordTemplate> key = entry.getKey(); + final URN urn = key.getUrn(); + results.putIfAbsent(urn, new HashMap<>()); + results.get(urn).put(key.getAspectClass(), entry.getValue()); + }); return results; } - /** - * Similar to {@link #get(Set, Set)} but only for one URN. - */ + /** Similar to {@link #get(Set, Set)} but only for one URN. */ @Nonnull public Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>> get( @Nonnull Set<Class<? extends RecordTemplate>> aspectClasses, @Nonnull URN urn) { @@ -112,16 +111,15 @@ public Map<Class<? extends RecordTemplate>, Optional<? extends RecordTemplate>> return results.get(urn); } - /** - * Similar to {@link #get(Set, Set)} but only for one aspect. - */ + /** Similar to {@link #get(Set, Set)} but only for one aspect. */ @Nonnull public <ASPECT extends RecordTemplate> Map<URN, Optional<ASPECT>> get( @Nonnull Class<ASPECT> aspectClass, @Nonnull Set<URN> urns) { - return get(Collections.singleton(aspectClass), urns).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> (Optional<ASPECT>) entry.getValue().get(aspectClass))); + return get(Collections.singleton(aspectClass), urns).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> (Optional<ASPECT>) entry.getValue().get(aspectClass))); } protected void checkValidAspect(@Nonnull Class<? extends RecordTemplate> aspectClass) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java index 42dd3f0405a6a..ae27f9f7e6f1a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java @@ -7,150 +7,162 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import io.ebean.PagedList; import io.ebean.Transaction; - -import java.util.stream.Stream; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.sql.Timestamp; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** - * An interface specifying create, update, and read operations against metadata entity aspects. - * This interface is meant to abstract away the storage concerns of these pieces of metadata, permitting any underlying - * storage system to be used. + * An interface specifying create, update, and read operations against metadata entity aspects. This + * interface is meant to abstract away the storage concerns of these pieces of metadata, permitting + * any underlying storage system to be used. * - * Requirements for any implementation: - * 1. Being able to map its internal storage representation to {@link EntityAspect}; - * 2. Honor the internal versioning semantics. The latest version of any aspect is set to 0 for efficient retrieval. - * In most cases only the latest state of an aspect will be fetched. See {@link EntityServiceImpl} for more details. + * <p>Requirements for any implementation: 1. Being able to map its internal storage representation + * to {@link EntityAspect}; 2. Honor the internal versioning semantics. The latest version of any + * aspect is set to 0 for efficient retrieval. In most cases only the latest state of an aspect will + * be fetched. See {@link EntityServiceImpl} for more details. * - * TODO: This interface exposes {@link #runInTransactionWithRetry(Supplier, int)} because {@link EntityServiceImpl} concerns - * itself with batching multiple commands into a single transaction. It exposes storage concerns somewhat and it'd be - * worth looking into ways to move this responsibility inside {@link AspectDao} implementations. + * <p>TODO: This interface exposes {@link #runInTransactionWithRetry(Supplier, int)} because {@link + * EntityServiceImpl} concerns itself with batching multiple commands into a single transaction. It + * exposes storage concerns somewhat and it'd be worth looking into ways to move this responsibility + * inside {@link AspectDao} implementations. */ public interface AspectDao { - String ASPECT_WRITE_COUNT_METRIC_NAME = "aspectWriteCount"; - String ASPECT_WRITE_BYTES_METRIC_NAME = "aspectWriteBytes"; - - @Nullable - EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String aspectName, final long version); - - @Nullable - EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key); - - @Nonnull - Map<EntityAspectIdentifier, EntityAspect> batchGet(@Nonnull final Set<EntityAspectIdentifier> keys); - - @Nonnull - List<EntityAspect> getAspectsInRange(@Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis); - - @Nullable - default EntityAspect getLatestAspect(@Nonnull final String urn, @Nonnull final String aspectName) { - return getLatestAspects(Map.of(urn, Set.of(aspectName))).getOrDefault(urn, Map.of()) - .getOrDefault(aspectName, null); - } - - @Nonnull - Map<String, Map<String, EntityAspect>> getLatestAspects(Map<String, Set<String>> urnAspects); - - void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert); - - void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); - - long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion); - - void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); - - @Nonnull - ListResult<String> listUrns( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final int start, - final int pageSize); - - @Nonnull - Integer countAspect( - @Nonnull final String aspectName, - @Nullable String urnLike); - - @Nonnull - PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args); - - @Nonnull - Stream<EntityAspect> streamAspects(String entityName, String aspectName); - - int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); - - @Nonnull - ListResult<String> listLatestAspectMetadata( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final int start, - final int pageSize); - - @Nonnull - ListResult<String> listAspectMetadata( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final long version, - final int start, - final int pageSize); - - Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<String>> urnAspectMap); - - default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - return getNextVersions(urn, Set.of(aspectName)).get(aspectName); - } - - default Map<String, Long> getNextVersions(@Nonnull final String urn, @Nonnull final Set<String> aspectNames) { - return getNextVersions(Map.of(urn, aspectNames)).get(urn); - } - - long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); - - void setWritable(boolean canWrite); - - @Nonnull - <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, final int maxTransactionRetry); - - @Nonnull - default <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, AspectsBatch batch, - final int maxTransactionRetry) { - return runInTransactionWithRetry(block, maxTransactionRetry); - } - - default void incrementWriteMetrics(String aspectName, long count, long bytes) { - MetricUtils.counter(this.getClass(), - String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_COUNT_METRIC_NAME, aspectName))).inc(count); - MetricUtils.counter(this.getClass(), - String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_BYTES_METRIC_NAME, aspectName))).inc(bytes); - } + String ASPECT_WRITE_COUNT_METRIC_NAME = "aspectWriteCount"; + String ASPECT_WRITE_BYTES_METRIC_NAME = "aspectWriteBytes"; + + @Nullable + EntityAspect getAspect( + @Nonnull final String urn, @Nonnull final String aspectName, final long version); + + @Nullable + EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key); + + @Nonnull + Map<EntityAspectIdentifier, EntityAspect> batchGet( + @Nonnull final Set<EntityAspectIdentifier> keys); + + @Nonnull + List<EntityAspect> getAspectsInRange( + @Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis); + + @Nullable + default EntityAspect getLatestAspect( + @Nonnull final String urn, @Nonnull final String aspectName) { + return getLatestAspects(Map.of(urn, Set.of(aspectName))) + .getOrDefault(urn, Map.of()) + .getOrDefault(aspectName, null); + } + + @Nonnull + Map<String, Map<String, EntityAspect>> getLatestAspects(Map<String, Set<String>> urnAspects); + + void saveAspect( + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert); + + void saveAspect( + @Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); + + long saveLatestAspect( + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion); + + void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); + + @Nonnull + ListResult<String> listUrns( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final int start, + final int pageSize); + + @Nonnull + Integer countAspect(@Nonnull final String aspectName, @Nullable String urnLike); + + @Nonnull + PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args); + + @Nonnull + Stream<EntityAspect> streamAspects(String entityName, String aspectName); + + int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); + + @Nonnull + ListResult<String> listLatestAspectMetadata( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final int start, + final int pageSize); + + @Nonnull + ListResult<String> listAspectMetadata( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final long version, + final int start, + final int pageSize); + + Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<String>> urnAspectMap); + + default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { + return getNextVersions(urn, Set.of(aspectName)).get(aspectName); + } + + default Map<String, Long> getNextVersions( + @Nonnull final String urn, @Nonnull final Set<String> aspectNames) { + return getNextVersions(Map.of(urn, aspectNames)).get(urn); + } + + long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); + + void setWritable(boolean canWrite); + + @Nonnull + <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, final int maxTransactionRetry); + + @Nonnull + default <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, + AspectsBatch batch, + final int maxTransactionRetry) { + return runInTransactionWithRetry(block, maxTransactionRetry); + } + + default void incrementWriteMetrics(String aspectName, long count, long bytes) { + MetricUtils.counter( + this.getClass(), + String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_COUNT_METRIC_NAME, aspectName))) + .inc(count); + MetricUtils.counter( + this.getClass(), + String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_BYTES_METRIC_NAME, aspectName))) + .inc(bytes); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java index c16a41cbaf84b..485eb2b1af943 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java @@ -3,14 +3,15 @@ import javax.annotation.Nonnull; /** - * This interface is a split-off from {@link AspectDao} to segregate the methods that are only called by data migration - * tasks. This separation is not technically necessary, but it felt dangerous to leave entire-table queries mixed - * with the rest. + * This interface is a split-off from {@link AspectDao} to segregate the methods that are only + * called by data migration tasks. This separation is not technically necessary, but it felt + * dangerous to leave entire-table queries mixed with the rest. */ public interface AspectMigrationsDao { /** * Return a paged list of _all_ URNs in the database. + * * @param start Start offset of a page. * @param pageSize Number of records in a page. * @return An iterable of {@code String} URNs. @@ -20,14 +21,17 @@ public interface AspectMigrationsDao { /** * Return the count of entities (unique URNs) in the database. + * * @return Count of entities. */ long countEntities(); /** * Check if any record of given {@param aspectName} exists in the database. + * * @param aspectName Name of an entity aspect to search for. - * @return {@code true} if at least one record of given {@param aspectName} is found. {@code false} otherwise. + * @return {@code true} if at least one record of given {@param aspectName} is found. {@code + * false} otherwise. */ boolean checkIfAspectExists(@Nonnull final String aspectName); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java index 8296edd615aad..eaf9b1a2cc415 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java @@ -1,19 +1,18 @@ package com.linkedin.metadata.entity; +import java.sql.Timestamp; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import javax.annotation.Nonnull; -import java.sql.Timestamp; - /** - * This is an internal representation of an entity aspect record {@link EntityServiceImpl} and {@link AspectDao} - * implementations are using. While {@link AspectDao} implementations have their own aspect record implementations, - * they cary implementation details that should not leak outside. Therefore, this is the type to use in public - * {@link AspectDao} methods. + * This is an internal representation of an entity aspect record {@link EntityServiceImpl} and + * {@link AspectDao} implementations are using. While {@link AspectDao} implementations have their + * own aspect record implementations, they cary implementation details that should not leak outside. + * Therefore, this is the type to use in public {@link AspectDao} methods. */ @Getter @Setter @@ -22,25 +21,23 @@ @EqualsAndHashCode public class EntityAspect { - @Nonnull - private String urn; + @Nonnull private String urn; - @Nonnull - private String aspect; + @Nonnull private String aspect; - private long version; + private long version; - private String metadata; + private String metadata; - private String systemMetadata; + private String systemMetadata; - private Timestamp createdOn; + private Timestamp createdOn; - private String createdBy; + private String createdBy; - private String createdFor; + private String createdFor; - public EntityAspectIdentifier toAspectIdentifier() { - return new EntityAspectIdentifier(getUrn(), getAspect(), getVersion()); - } + public EntityAspectIdentifier toAspectIdentifier() { + return new EntityAspectIdentifier(getUrn(), getAspect(), getVersion()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java index cb360192c0120..887bd3910310d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java @@ -6,10 +6,10 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - /** - * This class holds values required to construct a unique key to identify an entity aspect record in a database. - * Its existence started mainly for compatibility with {@link com.linkedin.metadata.entity.ebean.EbeanAspectV2.PrimaryKey} + * This class holds values required to construct a unique key to identify an entity aspect record in + * a database. Its existence started mainly for compatibility with {@link + * com.linkedin.metadata.entity.ebean.EbeanAspectV2.PrimaryKey} */ @Value @Slf4j @@ -19,10 +19,12 @@ public class EntityAspectIdentifier { long version; public static EntityAspectIdentifier fromEbean(EbeanAspectV2 ebeanAspectV2) { - return new EntityAspectIdentifier(ebeanAspectV2.getUrn(), ebeanAspectV2.getAspect(), ebeanAspectV2.getVersion()); + return new EntityAspectIdentifier( + ebeanAspectV2.getUrn(), ebeanAspectV2.getAspect(), ebeanAspectV2.getVersion()); } public static EntityAspectIdentifier fromCassandra(CassandraAspect cassandraAspect) { - return new EntityAspectIdentifier(cassandraAspect.getUrn(), cassandraAspect.getAspect(), cassandraAspect.getVersion()); + return new EntityAspectIdentifier( + cassandraAspect.getUrn(), cassandraAspect.getAspect(), cassandraAspect.getVersion()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index 57f88e31deea5..a333839416556 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.BrowsePathUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.*; + import com.codahale.metrics.Timer; -import com.linkedin.data.template.GetMode; -import com.linkedin.data.template.SetMode; -import com.linkedin.entity.client.SystemEntityClient; -import com.linkedin.metadata.config.PreProcessHooks; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ModelConversionException; import com.google.common.collect.ImmutableList; @@ -22,7 +22,9 @@ import com.linkedin.common.urn.VersionedUrnUtils; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.data.template.DataTemplateUtil; +import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.SetMode; import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; import com.linkedin.data.template.UnionTemplate; @@ -32,19 +34,21 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.Aspect; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.metadata.entity.ebean.transactions.PatchBatchItem; import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; @@ -67,7 +71,7 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import io.ebean.PagedList; - +import io.ebean.Transaction; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -84,51 +88,45 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.EntityNotFoundException; - -import io.ebean.Transaction; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.BrowsePathUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.*; - - /** - * A class specifying create, update, and read operations against metadata entities and aspects - * by primary key (urn). + * A class specifying create, update, and read operations against metadata entities and aspects by + * primary key (urn). * - * This interface is meant to abstract away the storage concerns of these pieces of metadata, permitting - * any underlying storage system to be used in materializing GMS domain objects, which are implemented using Pegasus - * {@link RecordTemplate}s. + * <p>This interface is meant to abstract away the storage concerns of these pieces of metadata, + * permitting any underlying storage system to be used in materializing GMS domain objects, which + * are implemented using Pegasus {@link RecordTemplate}s. * - * Internal versioning semantics - * ============================= + * <p>Internal versioning semantics ============================= * - * The latest version of any aspect is set to 0 for efficient retrieval; in most cases the latest state of an aspect - * will be the only fetched. + * <p>The latest version of any aspect is set to 0 for efficient retrieval; in most cases the latest + * state of an aspect will be the only fetched. * - * As such, 0 is treated as a special number. Once an aspect is no longer the latest, versions will increment - * monotonically, starting from 1. Thus, the second-to-last version of an aspect will be equal to total # versions - * of the aspect - 1. + * <p>As such, 0 is treated as a special number. Once an aspect is no longer the latest, versions + * will increment monotonically, starting from 1. Thus, the second-to-last version of an aspect will + * be equal to total # versions of the aspect - 1. * - * For example, if there are 5 instances of a single aspect, the latest will have version 0, and the second-to-last - * will have version 4. The "true" latest version of an aspect is always equal to the highest stored version - * of a given aspect + 1. + * <p>For example, if there are 5 instances of a single aspect, the latest will have version 0, and + * the second-to-last will have version 4. The "true" latest version of an aspect is always equal to + * the highest stored version of a given aspect + 1. * - * Note that currently, implementations of this interface are responsible for producing Metadata Change Log on - * ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, SystemMetadata, RecordTemplate, SystemMetadata, - * MetadataChangeProposal, Urn, AuditStamp, AspectSpec)}. + * <p>Note that currently, implementations of this interface are responsible for producing Metadata + * Change Log on ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, + * SystemMetadata, RecordTemplate, SystemMetadata, MetadataChangeProposal, Urn, AuditStamp, + * AspectSpec)}. * - * TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this class. + * <p>TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this + * class. */ @Slf4j public class EntityServiceImpl implements EntityService { @@ -137,7 +135,6 @@ public class EntityServiceImpl implements EntityService { * As described above, the latest version of an aspect should <b>always</b> take the value 0, with * monotonically increasing version incrementing as usual once the latest version is replaced. */ - private static final int DEFAULT_MAX_TRANSACTION_RETRY = 3; protected final AspectDao _aspectDao; @@ -146,8 +143,7 @@ public class EntityServiceImpl implements EntityService { private final Map<String, Set<String>> _entityToValidAspects; private RetentionService _retentionService; private final Boolean _alwaysEmitChangeLog; - @Getter - private final UpdateIndicesService _updateIndicesService; + @Getter private final UpdateIndicesService _updateIndicesService; private final PreProcessHooks _preProcessHooks; protected static final int MAX_KEYS_PER_QUERY = 500; @@ -160,17 +156,24 @@ public EntityServiceImpl( final boolean alwaysEmitChangeLog, final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks) { - this(aspectDao, producer, entityRegistry, alwaysEmitChangeLog, updateIndicesService, preProcessHooks, DEFAULT_MAX_TRANSACTION_RETRY); + this( + aspectDao, + producer, + entityRegistry, + alwaysEmitChangeLog, + updateIndicesService, + preProcessHooks, + DEFAULT_MAX_TRANSACTION_RETRY); } public EntityServiceImpl( - @Nonnull final AspectDao aspectDao, - @Nonnull final EventProducer producer, - @Nonnull final EntityRegistry entityRegistry, - final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, - final PreProcessHooks preProcessHooks, - final Integer retry) { + @Nonnull final AspectDao aspectDao, + @Nonnull final EventProducer producer, + @Nonnull final EntityRegistry entityRegistry, + final boolean alwaysEmitChangeLog, + final UpdateIndicesService updateIndicesService, + final PreProcessHooks preProcessHooks, + final Integer retry) { _aspectDao = aspectDao; _producer = producer; @@ -188,8 +191,8 @@ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { } /** - * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided - * set of aspect names. + * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided set + * of aspect names. * * @param urns set of urns to fetch aspects for * @param aspectNames aspects to fetch for each urn in urns set @@ -197,8 +200,7 @@ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { */ @Override public Map<Urn, List<RecordTemplate>> getLatestAspects( - @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) { + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { Map<EntityAspectIdentifier, EntityAspect> batchGetResults = getLatestAspect(urns, aspectNames); @@ -211,69 +213,88 @@ public Map<Urn, List<RecordTemplate>> getLatestAspects( } // Add "key" aspects for each urn. TODO: Replace this with a materialized key aspect. - urnToAspects.keySet().forEach(key -> { - final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); - urnToAspects.get(key).add(keyAspect); - }); - - batchGetResults.forEach((key, aspectEntry) -> { - final Urn urn = toUrn(key.getUrn()); - final String aspectName = key.getAspect(); - // for now, don't add the key aspect here- we have already added it above - if (aspectName.equals(getKeyAspectName(urn))) { - return; - } + urnToAspects + .keySet() + .forEach( + key -> { + final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); + urnToAspects.get(key).add(keyAspect); + }); + + batchGetResults.forEach( + (key, aspectEntry) -> { + final Urn urn = toUrn(key.getUrn()); + final String aspectName = key.getAspect(); + // for now, don't add the key aspect here- we have already added it above + if (aspectName.equals(getKeyAspectName(urn))) { + return; + } - final RecordTemplate aspectRecord = - EntityUtils.toAspectRecord(urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); - urnToAspects.putIfAbsent(urn, new ArrayList<>()); - urnToAspects.get(urn).add(aspectRecord); - }); + final RecordTemplate aspectRecord = + EntityUtils.toAspectRecord( + urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); + urnToAspects.putIfAbsent(urn, new ArrayList<>()); + urnToAspects.get(urn).add(aspectRecord); + }); return urnToAspects; } @Nonnull @Override - public Map<String, RecordTemplate> getLatestAspectsForUrn(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { - Map<EntityAspectIdentifier, EntityAspect> batchGetResults = getLatestAspect(new HashSet<>(Arrays.asList(urn)), aspectNames); + public Map<String, RecordTemplate> getLatestAspectsForUrn( + @Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { + Map<EntityAspectIdentifier, EntityAspect> batchGetResults = + getLatestAspect(new HashSet<>(Arrays.asList(urn)), aspectNames); final Map<String, RecordTemplate> result = new HashMap<>(); - batchGetResults.forEach((key, aspectEntry) -> { - final String aspectName = key.getAspect(); - final RecordTemplate aspectRecord = EntityUtils.toAspectRecord(urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); - result.put(aspectName, aspectRecord); - }); + batchGetResults.forEach( + (key, aspectEntry) -> { + final String aspectName = key.getAspect(); + final RecordTemplate aspectRecord = + EntityUtils.toAspectRecord( + urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); + result.put(aspectName, aspectRecord); + }); return result; } /** * Retrieves an aspect having a specific {@link Urn}, name, & version. * - * Note that once we drop support for legacy aspect-specific resources, - * we should make this a protected method. Only visible for backwards compatibility. + * <p>Note that once we drop support for legacy aspect-specific resources, we should make this a + * protected method. Only visible for backwards compatibility. * * @param urn an urn associated with the requested aspect * @param aspectName name of the aspect requested * @param version specific version of the aspect being requests - * @return the {@link RecordTemplate} representation of the requested aspect object, or null if one cannot be found + * @return the {@link RecordTemplate} representation of the requested aspect object, or null if + * one cannot be found */ @Nullable @Override - public RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { + public RecordTemplate getAspect( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { - log.debug("Invoked getAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); + log.debug( + "Invoked getAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); version = calculateVersionNumber(urn, aspectName, version); - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - final Optional<EntityAspect> maybeAspect = Optional.ofNullable(_aspectDao.getAspect(primaryKey)); - return maybeAspect.map( - aspect -> EntityUtils.toAspectRecord(urn, aspectName, aspect.getMetadata(), getEntityRegistry())).orElse(null); + final EntityAspectIdentifier primaryKey = + new EntityAspectIdentifier(urn.toString(), aspectName, version); + final Optional<EntityAspect> maybeAspect = + Optional.ofNullable(_aspectDao.getAspect(primaryKey)); + return maybeAspect + .map( + aspect -> + EntityUtils.toAspectRecord( + urn, aspectName, aspect.getMetadata(), getEntityRegistry())) + .orElse(null); } /** - * Retrieves the latest aspects for the given urn as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given urn as dynamic aspect objects (Without having to + * define union objects) * * @param entityName name of the entity to fetch * @param urn urn of entity to fetch @@ -285,13 +306,14 @@ public RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String as public EntityResponse getEntityV2( @Nonnull final String entityName, @Nonnull final Urn urn, - @Nonnull final Set<String> aspectNames) throws URISyntaxException { + @Nonnull final Set<String> aspectNames) + throws URISyntaxException { return getEntitiesV2(entityName, Collections.singleton(urn), aspectNames).get(urn); } /** - * Retrieves the latest aspects for the given set of urns as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given set of urns as dynamic aspect objects (Without + * having to define union objects) * * @param entityName name of the entity to fetch * @param urns set of urns to fetch @@ -302,29 +324,31 @@ public EntityResponse getEntityV2( public Map<Urn, EntityResponse> getEntitiesV2( @Nonnull final String entityName, @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException { - return getLatestEnvelopedAspects(entityName, urns, aspectNames) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); + @Nonnull final Set<String> aspectNames) + throws URISyntaxException { + return getLatestEnvelopedAspects(entityName, urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); } /** - * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects (Without + * having to define union objects) * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link Entity} object */ @Override public Map<Urn, EntityResponse> getEntitiesVersionedV2( - @Nonnull final Set<VersionedUrn> versionedUrns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException { - return getVersionedEnvelopedAspects(versionedUrns, aspectNames) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); + @Nonnull final Set<VersionedUrn> versionedUrns, @Nonnull final Set<String> aspectNames) + throws URISyntaxException { + return getVersionedEnvelopedAspects(versionedUrns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); } /** @@ -338,16 +362,21 @@ public Map<Urn, EntityResponse> getEntitiesVersionedV2( @Override public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( // TODO: entityName is unused, can we remove this as a param? - @Nonnull String entityName, - @Nonnull Set<Urn> urns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { - - final Set<EntityAspectIdentifier> dbKeys = urns.stream() - .map(urn -> aspectNames.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet()); + @Nonnull String entityName, @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { + + final Set<EntityAspectIdentifier> dbKeys = + urns.stream() + .map( + urn -> + aspectNames.stream() + .map( + aspectName -> + new EntityAspectIdentifier( + urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet()); return getCorrespondingAspects(dbKeys, urns); } @@ -355,61 +384,86 @@ public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link EnvelopedAspect} object */ @Override public Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( - @Nonnull Set<VersionedUrn> versionedUrns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { + @Nonnull Set<VersionedUrn> versionedUrns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { - Map<String, Map<String, Long>> urnAspectVersionMap = versionedUrns.stream() - .collect(Collectors.toMap(versionedUrn -> versionedUrn.getUrn().toString(), - versionedUrn -> VersionedUrnUtils.convertVersionStamp(versionedUrn.getVersionStamp()))); + Map<String, Map<String, Long>> urnAspectVersionMap = + versionedUrns.stream() + .collect( + Collectors.toMap( + versionedUrn -> versionedUrn.getUrn().toString(), + versionedUrn -> + VersionedUrnUtils.convertVersionStamp(versionedUrn.getVersionStamp()))); // Cover full/partial versionStamp - final Set<EntityAspectIdentifier> dbKeys = urnAspectVersionMap.entrySet().stream() - .filter(entry -> !entry.getValue().isEmpty()) - .map(entry -> aspectNames.stream() - .filter(aspectName -> entry.getValue().containsKey(aspectName)) - .map(aspectName -> new EntityAspectIdentifier(entry.getKey(), aspectName, - entry.getValue().get(aspectName))) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet()); + final Set<EntityAspectIdentifier> dbKeys = + urnAspectVersionMap.entrySet().stream() + .filter(entry -> !entry.getValue().isEmpty()) + .map( + entry -> + aspectNames.stream() + .filter(aspectName -> entry.getValue().containsKey(aspectName)) + .map( + aspectName -> + new EntityAspectIdentifier( + entry.getKey(), aspectName, entry.getValue().get(aspectName))) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet()); // Cover empty versionStamp - dbKeys.addAll(urnAspectVersionMap.entrySet().stream() - .filter(entry -> entry.getValue().isEmpty()) - .map(entry -> aspectNames.stream() - .map(aspectName -> new EntityAspectIdentifier(entry.getKey(), aspectName, 0L)) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet())); - - return getCorrespondingAspects(dbKeys, versionedUrns.stream() - .map(versionedUrn -> versionedUrn.getUrn().toString()) - .map(UrnUtils::getUrn).collect(Collectors.toSet())); - } - - private Map<Urn, List<EnvelopedAspect>> getCorrespondingAspects(Set<EntityAspectIdentifier> dbKeys, Set<Urn> urns) { - - final Map<EntityAspectIdentifier, EnvelopedAspect> envelopedAspectMap = getEnvelopedAspects(dbKeys); + dbKeys.addAll( + urnAspectVersionMap.entrySet().stream() + .filter(entry -> entry.getValue().isEmpty()) + .map( + entry -> + aspectNames.stream() + .map( + aspectName -> + new EntityAspectIdentifier(entry.getKey(), aspectName, 0L)) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet())); + + return getCorrespondingAspects( + dbKeys, + versionedUrns.stream() + .map(versionedUrn -> versionedUrn.getUrn().toString()) + .map(UrnUtils::getUrn) + .collect(Collectors.toSet())); + } + + private Map<Urn, List<EnvelopedAspect>> getCorrespondingAspects( + Set<EntityAspectIdentifier> dbKeys, Set<Urn> urns) { + + final Map<EntityAspectIdentifier, EnvelopedAspect> envelopedAspectMap = + getEnvelopedAspects(dbKeys); // Group result by Urn - final Map<String, List<EnvelopedAspect>> urnToAspects = envelopedAspectMap.entrySet() - .stream() - .collect(Collectors.groupingBy(entry -> entry.getKey().getUrn(), - Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); + final Map<String, List<EnvelopedAspect>> urnToAspects = + envelopedAspectMap.entrySet().stream() + .collect( + Collectors.groupingBy( + entry -> entry.getKey().getUrn(), + Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); final Map<Urn, List<EnvelopedAspect>> result = new HashMap<>(); for (Urn urn : urns) { - List<EnvelopedAspect> aspects = urnToAspects.getOrDefault(urn.toString(), Collections.emptyList()); + List<EnvelopedAspect> aspects = + urnToAspects.getOrDefault(urn.toString(), Collections.emptyList()); EnvelopedAspect keyAspect = getKeyEnvelopedAspect(urn); // Add key aspect if it does not exist in the returned aspects - if (aspects.isEmpty() || aspects.stream().noneMatch(aspect -> keyAspect.getName().equals(aspect.getName()))) { - result.put(urn, ImmutableList.<EnvelopedAspect>builder().addAll(aspects).add(keyAspect).build()); + if (aspects.isEmpty() + || aspects.stream().noneMatch(aspect -> keyAspect.getName().equals(aspect.getName()))) { + result.put( + urn, ImmutableList.<EnvelopedAspect>builder().addAll(aspects).add(keyAspect).build()); } else { result.put(urn, aspects); } @@ -427,33 +481,42 @@ private Map<Urn, List<EnvelopedAspect>> getCorrespondingAspects(Set<EntityAspect */ @Override public EnvelopedAspect getLatestEnvelopedAspect( - @Nonnull final String entityName, - @Nonnull final Urn urn, - @Nonnull final String aspectName) throws Exception { - return getLatestEnvelopedAspects(entityName, ImmutableSet.of(urn), ImmutableSet.of(aspectName)).getOrDefault(urn, Collections.emptyList()) + @Nonnull final String entityName, @Nonnull final Urn urn, @Nonnull final String aspectName) + throws Exception { + return getLatestEnvelopedAspects(entityName, ImmutableSet.of(urn), ImmutableSet.of(aspectName)) + .getOrDefault(urn, Collections.emptyList()) .stream() .filter(envelopedAspect -> envelopedAspect.getName().equals(aspectName)) .findFirst() .orElse(null); } - /** - * Retrieves an {@link VersionedAspect}, or null if one cannot be found. - */ + /** Retrieves an {@link VersionedAspect}, or null if one cannot be found. */ @Nullable @Override - public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { + public VersionedAspect getVersionedAspect( + @Nonnull Urn urn, @Nonnull String aspectName, long version) { - log.debug("Invoked getVersionedAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); + log.debug( + "Invoked getVersionedAspect with urn: {}, aspectName: {}, version: {}", + urn, + aspectName, + version); VersionedAspect result = new VersionedAspect(); version = calculateVersionNumber(urn, aspectName, version); - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - final Optional<EntityAspect> maybeAspect = Optional.ofNullable(_aspectDao.getAspect(primaryKey)); + final EntityAspectIdentifier primaryKey = + new EntityAspectIdentifier(urn.toString(), aspectName, version); + final Optional<EntityAspect> maybeAspect = + Optional.ofNullable(_aspectDao.getAspect(primaryKey)); RecordTemplate aspectRecord = - maybeAspect.map(aspect -> EntityUtils.toAspectRecord(urn, aspectName, aspect.getMetadata(), getEntityRegistry())) + maybeAspect + .map( + aspect -> + EntityUtils.toAspectRecord( + urn, aspectName, aspect.getMetadata(), getEntityRegistry())) .orElse(null); if (aspectRecord == null) { @@ -472,8 +535,8 @@ public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspe /** * Retrieves a list of all aspects belonging to an entity of a particular type, sorted by urn. * - * Note that once we drop support for legacy 'getAllDataPlatforms' endpoint, - * we can drop support for this unless otherwise required. Only visible for backwards compatibility. + * <p>Note that once we drop support for legacy 'getAllDataPlatforms' endpoint, we can drop + * support for this unless otherwise required. Only visible for backwards compatibility. * * @param entityName name of the entity type the aspect belongs to, e.g. 'dataset' * @param aspectName name of the aspect requested, e.g. 'ownership' @@ -489,25 +552,39 @@ public ListResult<RecordTemplate> listLatestAspects( final int start, final int count) { - log.debug("Invoked listLatestAspects with entityName: {}, aspectName: {}, start: {}, count: {}", entityName, - aspectName, start, count); + log.debug( + "Invoked listLatestAspects with entityName: {}, aspectName: {}, start: {}, count: {}", + entityName, + aspectName, + start, + count); final ListResult<String> aspectMetadataList = _aspectDao.listLatestAspectMetadata(entityName, aspectName, start, count); final List<RecordTemplate> aspects = new ArrayList<>(); for (int i = 0; i < aspectMetadataList.getValues().size(); i++) { - aspects.add(EntityUtils.toAspectRecord(aspectMetadataList.getMetadata().getExtraInfos().get(i).getUrn(), aspectName, - aspectMetadataList.getValues().get(i), getEntityRegistry())); + aspects.add( + EntityUtils.toAspectRecord( + aspectMetadataList.getMetadata().getExtraInfos().get(i).getUrn(), + aspectName, + aspectMetadataList.getValues().get(i), + getEntityRegistry())); } - return new ListResult<>(aspects, aspectMetadataList.getMetadata(), aspectMetadataList.getNextStart(), - aspectMetadataList.isHasNext(), aspectMetadataList.getTotalCount(), aspectMetadataList.getTotalPageCount(), + return new ListResult<>( + aspects, + aspectMetadataList.getMetadata(), + aspectMetadataList.getNextStart(), + aspectMetadataList.isHasNext(), + aspectMetadataList.getTotalCount(), + aspectMetadataList.getTotalPageCount(), aspectMetadataList.getPageSize()); } /** * Common batch-like pattern used primarily in tests. + * * @param entityUrn the entity urn * @param pairList list of aspects in pairs of aspect name and record template * @param auditStamp audit stamp @@ -515,38 +592,46 @@ public ListResult<RecordTemplate> listLatestAspects( * @return update result */ @Override - public List<UpdateAspectResult> ingestAspects(@Nonnull Urn entityUrn, - List<Pair<String, RecordTemplate>> pairList, - @Nonnull final AuditStamp auditStamp, - SystemMetadata systemMetadata) { - List<? extends AbstractBatchItem> items = pairList.stream() - .map(pair -> UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(pair.getKey()) - .aspect(pair.getValue()) - .systemMetadata(systemMetadata) - .build(_entityRegistry)) + public List<UpdateAspectResult> ingestAspects( + @Nonnull Urn entityUrn, + List<Pair<String, RecordTemplate>> pairList, + @Nonnull final AuditStamp auditStamp, + SystemMetadata systemMetadata) { + List<? extends AbstractBatchItem> items = + pairList.stream() + .map( + pair -> + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) .collect(Collectors.toList()); return ingestAspects(AspectsBatchImpl.builder().items(items).build(), auditStamp, true, true); } /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataChangeLog}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataChangeLog}. * * @param aspectsBatch aspects to write * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time - * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in correspondence upon - * successful update + * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in + * correspondence upon successful update * @return the {@link RecordTemplate} representation of the written aspect object */ @Override - public List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspectsBatch, - @Nonnull final AuditStamp auditStamp, - boolean emitMCL, - boolean overwrite) { - - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); - List<UpdateAspectResult> ingestResults = ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); + public List<UpdateAspectResult> ingestAspects( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite) { + + Timer.Context ingestToLocalDBTimer = + MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); + List<UpdateAspectResult> ingestResults = + ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); List<UpdateAspectResult> mclResults = emitMCL(ingestResults, emitMCL); ingestToLocalDBTimer.stop(); @@ -554,135 +639,197 @@ public List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspect } /** - * Checks whether there is an actual update to the aspect by applying the updateLambda - * If there is an update, push the new version into the local DB. - * Otherwise, do not push the new version, but just update the system metadata. + * Checks whether there is an actual update to the aspect by applying the updateLambda If there is + * an update, push the new version into the local DB. Otherwise, do not push the new version, but + * just update the system metadata. * - * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of the aspect being - * inserted, and a function to apply to the latest version of the aspect to get the updated version + * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of + * the aspect being inserted, and a function to apply to the latest version of the aspect to + * get the updated version * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time * @return Details about the new and old version of the aspect */ @Nonnull - private List<UpdateAspectResult> ingestAspectsToLocalDB(@Nonnull final AspectsBatch aspectsBatch, - @Nonnull final AuditStamp auditStamp, - boolean overwrite) { + private List<UpdateAspectResult> ingestAspectsToLocalDB( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean overwrite) { if (aspectsBatch.containsDuplicateAspects()) { log.warn(String.format("Batch contains duplicates: %s", aspectsBatch)); } - return _aspectDao.runInTransactionWithRetry((tx) -> { - // Read before write is unfortunate, however batch it - Map<String, Set<String>> urnAspects = aspectsBatch.getUrnAspectsMap(); - // read #1 - Map<String, Map<String, EntityAspect>> latestAspects = _aspectDao.getLatestAspects(urnAspects); - // read #2 - Map<String, Map<String, Long>> nextVersions = _aspectDao.getNextVersions(urnAspects); - - List<UpsertBatchItem> items = aspectsBatch.getItems().stream() - .map(item -> { - if (item instanceof UpsertBatchItem) { - return (UpsertBatchItem) item; - } else { - // patch to upsert - PatchBatchItem patchBatchItem = (PatchBatchItem) item; - final String urnStr = patchBatchItem.getUrn().toString(); - final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(patchBatchItem.getAspectName()); - final RecordTemplate currentValue = latest != null - ? EntityUtils.toAspectRecord(patchBatchItem.getUrn(), patchBatchItem.getAspectName(), latest.getMetadata(), _entityRegistry) : null; - return patchBatchItem.applyPatch(_entityRegistry, currentValue); - } - }) - .collect(Collectors.toList()); - - // Database Upsert results - List<UpdateAspectResult> upsertResults = items.stream() - .map(item -> { - final String urnStr = item.getUrn().toString(); - final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); - final long nextVersion = nextVersions.getOrDefault(urnStr, Map.of()).getOrDefault(item.getAspectName(), 0L); - - final UpdateAspectResult result; - if (overwrite || latest == null) { - result = ingestAspectToLocalDB(tx, item.getUrn(), item.getAspectName(), item.getAspect(), - auditStamp, item.getSystemMetadata(), latest, nextVersion).toBuilder().request(item).build(); - - // support inner-batch upserts - latestAspects.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); - nextVersions.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), nextVersion + 1); - } else { - RecordTemplate oldValue = EntityUtils.toAspectRecord(item.getUrn().getEntityType(), item.getAspectName(), - latest.getMetadata(), getEntityRegistry()); - SystemMetadata oldMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - result = UpdateAspectResult.builder() - .urn(item.getUrn()) - .request(item) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(oldMetadata) - .newSystemMetadata(oldMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(latest.getVersion()) - .build(); - } - - return result; - }).collect(Collectors.toList()); - - // commit upserts prior to retention or kafka send, if supported by impl - if (tx != null) { - tx.commitAndContinue(); - } + return _aspectDao.runInTransactionWithRetry( + (tx) -> { + // Read before write is unfortunate, however batch it + Map<String, Set<String>> urnAspects = aspectsBatch.getUrnAspectsMap(); + // read #1 + Map<String, Map<String, EntityAspect>> latestAspects = + _aspectDao.getLatestAspects(urnAspects); + // read #2 + Map<String, Map<String, Long>> nextVersions = _aspectDao.getNextVersions(urnAspects); + + List<UpsertBatchItem> items = + aspectsBatch.getItems().stream() + .map( + item -> { + if (item instanceof UpsertBatchItem) { + return (UpsertBatchItem) item; + } else { + // patch to upsert + PatchBatchItem patchBatchItem = (PatchBatchItem) item; + final String urnStr = patchBatchItem.getUrn().toString(); + final EntityAspect latest = + latestAspects + .getOrDefault(urnStr, Map.of()) + .get(patchBatchItem.getAspectName()); + final RecordTemplate currentValue = + latest != null + ? EntityUtils.toAspectRecord( + patchBatchItem.getUrn(), + patchBatchItem.getAspectName(), + latest.getMetadata(), + _entityRegistry) + : null; + return patchBatchItem.applyPatch(_entityRegistry, currentValue); + } + }) + .collect(Collectors.toList()); + + // Database Upsert results + List<UpdateAspectResult> upsertResults = + items.stream() + .map( + item -> { + final String urnStr = item.getUrn().toString(); + final EntityAspect latest = + latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); + final long nextVersion = + nextVersions + .getOrDefault(urnStr, Map.of()) + .getOrDefault(item.getAspectName(), 0L); + + final UpdateAspectResult result; + if (overwrite || latest == null) { + result = + ingestAspectToLocalDB( + tx, + item.getUrn(), + item.getAspectName(), + item.getAspect(), + auditStamp, + item.getSystemMetadata(), + latest, + nextVersion) + .toBuilder() + .request(item) + .build(); + + // support inner-batch upserts + latestAspects + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); + nextVersions + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), nextVersion + 1); + } else { + RecordTemplate oldValue = + EntityUtils.toAspectRecord( + item.getUrn().getEntityType(), + item.getAspectName(), + latest.getMetadata(), + getEntityRegistry()); + SystemMetadata oldMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + result = + UpdateAspectResult.builder() + .urn(item.getUrn()) + .request(item) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(oldMetadata) + .newSystemMetadata(oldMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(latest.getVersion()) + .build(); + } + + return result; + }) + .collect(Collectors.toList()); + + // commit upserts prior to retention or kafka send, if supported by impl + if (tx != null) { + tx.commitAndContinue(); + } - // Retention optimization and tx - if (_retentionService != null) { - List<RetentionService.RetentionContext> retentionBatch = upsertResults.stream() - // Only consider retention when there was a previous version - .filter(result -> latestAspects.containsKey(result.getUrn().toString()) - && latestAspects.get(result.getUrn().toString()).containsKey(result.getRequest().getAspectName())) - .filter(result -> { - RecordTemplate oldAspect = result.getOldValue(); - RecordTemplate newAspect = result.getNewValue(); - // Apply retention policies if there was an update to existing aspect value - return oldAspect != newAspect && oldAspect != null && _retentionService != null; - }) - .map(result -> RetentionService.RetentionContext.builder() - .urn(result.getUrn()) - .aspectName(result.getRequest().getAspectName()) - .maxVersion(Optional.of(result.getMaxVersion())) - .build()) - .collect(Collectors.toList()); - _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); - } else { - log.warn("Retention service is missing!"); - } + // Retention optimization and tx + if (_retentionService != null) { + List<RetentionService.RetentionContext> retentionBatch = + upsertResults.stream() + // Only consider retention when there was a previous version + .filter( + result -> + latestAspects.containsKey(result.getUrn().toString()) + && latestAspects + .get(result.getUrn().toString()) + .containsKey(result.getRequest().getAspectName())) + .filter( + result -> { + RecordTemplate oldAspect = result.getOldValue(); + RecordTemplate newAspect = result.getNewValue(); + // Apply retention policies if there was an update to existing aspect + // value + return oldAspect != newAspect + && oldAspect != null + && _retentionService != null; + }) + .map( + result -> + RetentionService.RetentionContext.builder() + .urn(result.getUrn()) + .aspectName(result.getRequest().getAspectName()) + .maxVersion(Optional.of(result.getMaxVersion())) + .build()) + .collect(Collectors.toList()); + _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); + } else { + log.warn("Retention service is missing!"); + } - return upsertResults; - }, aspectsBatch, DEFAULT_MAX_TRANSACTION_RETRY); + return upsertResults; + }, + aspectsBatch, + DEFAULT_MAX_TRANSACTION_RETRY); } @Nonnull private List<UpdateAspectResult> emitMCL(List<UpdateAspectResult> sqlResults, boolean emitMCL) { - List<UpdateAspectResult> withEmitMCL = sqlResults.stream() + List<UpdateAspectResult> withEmitMCL = + sqlResults.stream() .map(result -> emitMCL ? conditionallyProduceMCLAsync(result) : result) .collect(Collectors.toList()); // join futures messages, capture error state - List<Pair<Boolean, UpdateAspectResult>> statusPairs = withEmitMCL.stream() + List<Pair<Boolean, UpdateAspectResult>> statusPairs = + withEmitMCL.stream() .filter(result -> result.getMclFuture() != null) - .map(result -> { - try { - result.getMclFuture().get(); - return Pair.of(true, result); - } catch (InterruptedException | ExecutionException e) { - return Pair.of(false, result); - } - }).collect(Collectors.toList()); + .map( + result -> { + try { + result.getMclFuture().get(); + return Pair.of(true, result); + } catch (InterruptedException | ExecutionException e) { + return Pair.of(false, result); + } + }) + .collect(Collectors.toList()); if (statusPairs.stream().anyMatch(p -> !p.getFirst())) { - log.error("Failed to produce MCLs: {}", statusPairs.stream() + log.error( + "Failed to produce MCLs: {}", + statusPairs.stream() .filter(p -> !p.getFirst()) .map(Pair::getValue) .map(v -> v.getRequest().toString()) @@ -695,12 +842,14 @@ private List<UpdateAspectResult> emitMCL(List<UpdateAspectResult> sqlResults, bo } /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataAuditEvent}. * - * This method runs a read -> write atomically in a single transaction, this is to prevent multiple IDs from being created. + * <p>This method runs a read -> write atomically in a single transaction, this is to prevent + * multiple IDs from being created. * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. + * <p>Note that in general, this should not be used externally. It is currently serving upgrade + * scripts and is as such public. * * @param urn an urn associated with the new aspect * @param aspectName name of the aspect being inserted @@ -711,15 +860,22 @@ private List<UpdateAspectResult> emitMCL(List<UpdateAspectResult> sqlResults, bo */ @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, - @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, - @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { - log.debug("Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", urn, aspectName, newValue); - - AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() - .one(UpsertBatchItem.builder() + public RecordTemplate ingestAspectIfNotPresent( + @Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { + log.debug( + "Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", + urn, + aspectName, + newValue); + + AspectsBatchImpl aspectsBatch = + AspectsBatchImpl.builder() + .one( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(newValue) @@ -733,150 +889,208 @@ public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, /** * Wrapper around batch method for single item + * * @param proposal the proposal * @param auditStamp an audit stamp representing the time and actor proposing the change - * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @param async a flag to control whether we commit to primary store or just write to proposal log + * before returning * @return an {@link IngestResult} containing the results */ @Override - public IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { - return ingestProposal(AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), auditStamp, - async).stream().findFirst().get(); + public IngestResult ingestProposal( + MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { + return ingestProposal( + AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), + auditStamp, + async) + .stream() + .findFirst() + .get(); } /** - * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any additional aspects or do any - * enrichment, instead it changes only those which are provided inside the metadata change proposal. + * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any + * additional aspects or do any enrichment, instead it changes only those which are provided + * inside the metadata change proposal. * - * Do not use this method directly for creating new entities, as it DOES NOT create an Entity Key aspect in the DB. Instead, - * use an Entity Client. + * <p>Do not use this method directly for creating new entities, as it DOES NOT create an Entity + * Key aspect in the DB. Instead, use an Entity Client. * * @param aspectsBatch the proposals to ingest * @param auditStamp an audit stamp representing the time and actor proposing the change - * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @param async a flag to control whether we commit to primary store or just write to proposal log + * before returning * @return an {@link IngestResult} containing the results */ @Override - public Set<IngestResult> ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { + public Set<IngestResult> ingestProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { - Stream<IngestResult> timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch, auditStamp); - Stream<IngestResult> nonTimeseriesIngestResults = async ? ingestProposalAsync(aspectsBatch) - : ingestProposalSync(aspectsBatch, auditStamp); + Stream<IngestResult> timeseriesIngestResults = + ingestTimeseriesProposal(aspectsBatch, auditStamp); + Stream<IngestResult> nonTimeseriesIngestResults = + async ? ingestProposalAsync(aspectsBatch) : ingestProposalSync(aspectsBatch, auditStamp); - return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults).collect(Collectors.toSet()); + return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults) + .collect(Collectors.toSet()); } /** * Timeseries is pass through to MCL, no MCP + * * @param aspectsBatch timeseries upserts batch * @param auditStamp provided audit information * @return returns ingest proposal result, however was never in the MCP topic */ - private Stream<IngestResult> ingestTimeseriesProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp) { - List<? extends AbstractBatchItem> unsupported = aspectsBatch.getItems().stream() - .filter(item -> item.getAspectSpec().isTimeseries() && item.getChangeType() != ChangeType.UPSERT) + private Stream<IngestResult> ingestTimeseriesProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp) { + List<? extends AbstractBatchItem> unsupported = + aspectsBatch.getItems().stream() + .filter( + item -> + item.getAspectSpec().isTimeseries() + && item.getChangeType() != ChangeType.UPSERT) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { - throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() - .map(AbstractBatchItem::getChangeType).collect(Collectors.toSet())); + throw new UnsupportedOperationException( + "ChangeType not supported: " + + unsupported.stream() + .map(AbstractBatchItem::getChangeType) + .collect(Collectors.toSet())); } - List<Pair<UpsertBatchItem, Optional<Pair<Future<?>, Boolean>>>> timeseriesResults = aspectsBatch.getItems().stream() + List<Pair<UpsertBatchItem, Optional<Pair<Future<?>, Boolean>>>> timeseriesResults = + aspectsBatch.getItems().stream() .filter(item -> item.getAspectSpec().isTimeseries()) .map(item -> (UpsertBatchItem) item) - .map(item -> Pair.of(item, conditionallyProduceMCLAsync(null, null, item.getAspect(), item.getSystemMetadata(), - item.getMetadataChangeProposal(), item.getUrn(), auditStamp, item.getAspectSpec()))) + .map( + item -> + Pair.of( + item, + conditionallyProduceMCLAsync( + null, + null, + item.getAspect(), + item.getSystemMetadata(), + item.getMetadataChangeProposal(), + item.getUrn(), + auditStamp, + item.getAspectSpec()))) .collect(Collectors.toList()); - return timeseriesResults.stream().map(result -> { - Optional<Pair<Future<?>, Boolean>> emissionStatus = result.getSecond(); - - emissionStatus.ifPresent(status -> { - try { - status.getFirst().get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); - - UpsertBatchItem request = result.getFirst(); - return IngestResult.builder() - .urn(request.getUrn()) - .request(request) - .publishedMCL(emissionStatus.map(status -> status.getFirst() != null).orElse(false)) - .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) - .build(); - }); + return timeseriesResults.stream() + .map( + result -> { + Optional<Pair<Future<?>, Boolean>> emissionStatus = result.getSecond(); + + emissionStatus.ifPresent( + status -> { + try { + status.getFirst().get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + UpsertBatchItem request = result.getFirst(); + return IngestResult.builder() + .urn(request.getUrn()) + .request(request) + .publishedMCL( + emissionStatus.map(status -> status.getFirst() != null).orElse(false)) + .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) + .build(); + }); } /** * For async ingestion of non-timeseries, any change type + * * @param aspectsBatch non-timeseries ingest aspects * @return produced items to the MCP topic */ private Stream<IngestResult> ingestProposalAsync(AspectsBatch aspectsBatch) { - List<? extends AbstractBatchItem> nonTimeseries = aspectsBatch.getItems().stream() + List<? extends AbstractBatchItem> nonTimeseries = + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList()); - List<Future<?>> futures = nonTimeseries.stream().map(item -> - // When async is turned on, we write to proposal log and return without waiting - _producer.produceMetadataChangeProposal(item.getUrn(), item.getMetadataChangeProposal())) + List<Future<?>> futures = + nonTimeseries.stream() + .map( + item -> + // When async is turned on, we write to proposal log and return without waiting + _producer.produceMetadataChangeProposal( + item.getUrn(), item.getMetadataChangeProposal())) .filter(Objects::nonNull) .collect(Collectors.toList()); try { - return nonTimeseries.stream().map(item -> - IngestResult.builder() + return nonTimeseries.stream() + .map( + item -> + IngestResult.builder() .urn(item.getUrn()) .request(item) .publishedMCP(true) .build()); } finally { - futures.forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } } - private Stream<IngestResult> ingestProposalSync(AspectsBatch aspectsBatch, AuditStamp auditStamp) { - AspectsBatchImpl nonTimeseries = AspectsBatchImpl.builder() - .items(aspectsBatch.getItems().stream() + private Stream<IngestResult> ingestProposalSync( + AspectsBatch aspectsBatch, AuditStamp auditStamp) { + AspectsBatchImpl nonTimeseries = + AspectsBatchImpl.builder() + .items( + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList())) .build(); - List<? extends AbstractBatchItem> unsupported = nonTimeseries.getItems().stream() - .filter(item -> item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH - && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) + List<? extends AbstractBatchItem> unsupported = + nonTimeseries.getItems().stream() + .filter( + item -> + item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH + && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { - throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() - .map(item -> item.getMetadataChangeProposal().getChangeType()).collect(Collectors.toSet())); + throw new UnsupportedOperationException( + "ChangeType not supported: " + + unsupported.stream() + .map(item -> item.getMetadataChangeProposal().getChangeType()) + .collect(Collectors.toSet())); } List<UpdateAspectResult> upsertResults = ingestAspects(nonTimeseries, auditStamp, true, true); - return upsertResults.stream().map(result -> { - AbstractBatchItem item = result.getRequest(); + return upsertResults.stream() + .map( + result -> { + AbstractBatchItem item = result.getRequest(); - return IngestResult.builder() - .urn(item.getUrn()) - .request(item) - .publishedMCL(result.getMclFuture() != null) - .sqlCommitted(true) - .isUpdate(result.getOldValue() != null) - .build(); - }); + return IngestResult.builder() + .urn(item.getUrn()) + .request(item) + .publishedMCL(result.getMclFuture() != null) + .sqlCommitted(true) + .isUpdate(result.getOldValue() != null) + .build(); + }); } @Override - public String batchApplyRetention(Integer start, Integer count, Integer attemptWithVersion, String aspectName, - String urn) { + public String batchApplyRetention( + Integer start, Integer count, Integer attemptWithVersion, String aspectName, String urn) { BulkApplyRetentionArgs args = new BulkApplyRetentionArgs(); if (start == null) { start = 0; @@ -900,7 +1114,8 @@ private boolean preprocessEvent(MetadataChangeLog metadataChangeLog) { if (_preProcessHooks.isUiEnabled()) { if (metadataChangeLog.getSystemMetadata() != null) { if (metadataChangeLog.getSystemMetadata().getProperties() != null) { - if (UI_SOURCE.equals(metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { + if (UI_SOURCE.equals( + metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { // Pre-process the update indices hook for UI updates to avoid perceived lag from Kafka _updateIndicesService.handleChangeEvent(metadataChangeLog); return true; @@ -918,19 +1133,24 @@ public Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLi @Nonnull @Override - public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger) { + public RestoreIndicesResult restoreIndices( + @Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger) { RestoreIndicesResult result = new RestoreIndicesResult(); int ignored = 0; int rowsMigrated = 0; logger.accept(String.format("Args are %s", args)); - logger.accept(String.format( - "Reading rows %s through %s from the aspects table started.", args.start, args.start + args.batchSize)); + logger.accept( + String.format( + "Reading rows %s through %s from the aspects table started.", + args.start, args.start + args.batchSize)); long startTime = System.currentTimeMillis(); PagedList<EbeanAspectV2> rows = _aspectDao.getPagedAspects(args); result.timeSqlQueryMs = System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); - logger.accept(String.format( - "Reading rows %s through %s from the aspects table completed.", args.start, args.start + args.batchSize)); + logger.accept( + String.format( + "Reading rows %s through %s from the aspects table completed.", + args.start, args.start + args.batchSize)); LinkedList<Future<?>> futures = new LinkedList<>(); @@ -942,8 +1162,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - logger.accept(String.format("Failed to bind Urn with value %s into Urn object: %s. Ignoring row.", - aspect.getKey().getUrn(), e)); + logger.accept( + String.format( + "Failed to bind Urn with value %s into Urn object: %s. Ignoring row.", + aspect.getKey().getUrn(), e)); ignored = ignored + 1; continue; } @@ -956,8 +1178,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - logger.accept(String.format("Failed to find entity with name %s in Entity Registry: %s. Ignoring row.", - entityName, e)); + logger.accept( + String.format( + "Failed to find entity with name %s in Entity Registry: %s. Ignoring row.", + entityName, e)); ignored = ignored + 1; continue; } @@ -968,8 +1192,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No // 3. Verify that the aspect is a valid aspect associated with the entity AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - logger.accept(String.format("Failed to find aspect with name %s associated with entity named %s", aspectName, - entityName)); + logger.accept( + String.format( + "Failed to find aspect with name %s associated with entity named %s", + aspectName, entityName)); ignored = ignored + 1; continue; } @@ -979,10 +1205,14 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No // 4. Create record from json aspect final RecordTemplate aspectRecord; try { - aspectRecord = EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + aspectRecord = + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - logger.accept(String.format("Failed to deserialize row %s for entity %s, aspect %s: %s. Ignoring row.", - aspect.getMetadata(), entityName, aspectName, e)); + logger.accept( + String.format( + "Failed to deserialize row %s for entity %s, aspect %s: %s. Ignoring row.", + aspect.getMetadata(), entityName, aspectName, e)); ignored = ignored + 1; continue; } @@ -990,32 +1220,50 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No startTime = System.currentTimeMillis(); // Force indexing to skip diff mode and fix error states - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(aspect.getSystemMetadata()); - StringMap properties = latestSystemMetadata.getProperties() != null ? latestSystemMetadata.getProperties() - : new StringMap(); + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(aspect.getSystemMetadata()); + StringMap properties = + latestSystemMetadata.getProperties() != null + ? latestSystemMetadata.getProperties() + : new StringMap(); properties.put(FORCE_INDEXING_KEY, Boolean.TRUE.toString()); latestSystemMetadata.setProperties(properties); // 5. Produce MAE events for the aspect record - futures.add(alwaysProduceMCLAsync(urn, entityName, aspectName, aspectSpec, null, aspectRecord, null, - latestSystemMetadata, - new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - ChangeType.RESTATE).getFirst()); + futures.add( + alwaysProduceMCLAsync( + urn, + entityName, + aspectName, + aspectSpec, + null, + aspectRecord, + null, + latestSystemMetadata, + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + ChangeType.RESTATE) + .getFirst()); result.sendMessageMs += System.currentTimeMillis() - startTime; rowsMigrated++; } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); try { TimeUnit.MILLISECONDS.sleep(args.batchDelayMs); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } result.ignored = ignored; result.rowsMigrated = rowsMigrated; @@ -1030,12 +1278,16 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No * @param count the count */ @Override - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count) { - log.debug("Invoked listUrns with entityName: {}, start: {}, count: {}", entityName, start, count); + public ListUrnsResult listUrns( + @Nonnull final String entityName, final int start, final int count) { + log.debug( + "Invoked listUrns with entityName: {}, start: {}, count: {}", entityName, start, count); // If a keyAspect exists, the entity exists. - final String keyAspectName = getEntityRegistry().getEntitySpec(entityName).getKeyAspectSpec().getName(); - final ListResult<String> keyAspectList = _aspectDao.listUrns(entityName, keyAspectName, start, count); + final String keyAspectName = + getEntityRegistry().getEntitySpec(entityName).getKeyAspectSpec().getName(); + final ListResult<String> keyAspectList = + _aspectDao.listUrns(entityName, keyAspectName, start, count); final ListUrnsResult result = new ListUrnsResult(); result.setStart(start); @@ -1048,8 +1300,8 @@ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start try { entityUrns.add(Urn.createFromString(urn)); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to convert urn %s found in db to Urn object.", urn), - e); + throw new IllegalArgumentException( + String.format("Failed to convert urn %s found in db to Urn object.", urn), e); } } result.setEntities(entityUrns); @@ -1057,17 +1309,20 @@ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start } /** - * Default implementations. Subclasses should feel free to override if it's more efficient to do so. + * Default implementations. Subclasses should feel free to override if it's more efficient to do + * so. */ @Override public Entity getEntity(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { - return getEntities(Collections.singleton(urn), aspectNames).values().stream().findFirst().orElse(null); + return getEntities(Collections.singleton(urn), aspectNames).values().stream() + .findFirst() + .orElse(null); } /** * Deprecated! Use getEntitiesV2 instead. * - * Retrieves multiple entities. + * <p>Retrieves multiple entities. * * @param urns set of urns to fetch * @param aspectNames set of aspects to fetch @@ -1075,70 +1330,115 @@ public Entity getEntity(@Nonnull final Urn urn, @Nonnull final Set<String> aspec */ @Deprecated @Override - public Map<Urn, Entity> getEntities(@Nonnull final Set<Urn> urns, @Nonnull Set<String> aspectNames) { + public Map<Urn, Entity> getEntities( + @Nonnull final Set<Urn> urns, @Nonnull Set<String> aspectNames) { log.debug("Invoked getEntities with urns {}, aspects {}", urns, aspectNames); if (urns.isEmpty()) { return Collections.emptyMap(); } - return getSnapshotUnions(urns, aspectNames).entrySet() - .stream() + return getSnapshotUnions(urns, aspectNames).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntity(entry.getValue()))); } @Override - public Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog) { + public Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull final AspectSpec aspectSpec, + @Nonnull final MetadataChangeLog metadataChangeLog) { Future<?> future = _producer.produceMetadataChangeLog(urn, aspectSpec, metadataChangeLog); return Pair.of(future, preprocessEvent(metadataChangeLog)); } @Override - public Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, - @Nonnull final ChangeType changeType) { - final MetadataChangeLog metadataChangeLog = constructMCL(null, entityName, urn, changeType, aspectName, auditStamp, - newAspectValue, newSystemMetadata, oldAspectValue, oldSystemMetadata); + public Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, + @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, + @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, + @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType) { + final MetadataChangeLog metadataChangeLog = + constructMCL( + null, + entityName, + urn, + changeType, + aspectName, + auditStamp, + newAspectValue, + newSystemMetadata, + oldAspectValue, + oldSystemMetadata); return alwaysProduceMCLAsync(urn, aspectSpec, metadataChangeLog); } - public Optional<Pair<Future<?>, Boolean>> conditionallyProduceMCLAsync(@Nullable RecordTemplate oldAspect, - @Nullable SystemMetadata oldSystemMetadata, - RecordTemplate newAspect, SystemMetadata newSystemMetadata, - @Nullable MetadataChangeProposal mcp, Urn entityUrn, - AuditStamp auditStamp, AspectSpec aspectSpec) { + public Optional<Pair<Future<?>, Boolean>> conditionallyProduceMCLAsync( + @Nullable RecordTemplate oldAspect, + @Nullable SystemMetadata oldSystemMetadata, + RecordTemplate newAspect, + SystemMetadata newSystemMetadata, + @Nullable MetadataChangeProposal mcp, + Urn entityUrn, + AuditStamp auditStamp, + AspectSpec aspectSpec) { boolean isNoOp = oldAspect == newAspect; if (!isNoOp || _alwaysEmitChangeLog || shouldAspectEmitChangeLog(aspectSpec)) { - log.debug("Producing MetadataChangeLog for ingested aspect {}, urn {}", aspectSpec.getName(), entityUrn); - - final MetadataChangeLog metadataChangeLog = constructMCL(mcp, urnToEntityName(entityUrn), entityUrn, - isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, aspectSpec.getName(), auditStamp, newAspect, newSystemMetadata, - oldAspect, oldSystemMetadata); + log.debug( + "Producing MetadataChangeLog for ingested aspect {}, urn {}", + aspectSpec.getName(), + entityUrn); + + final MetadataChangeLog metadataChangeLog = + constructMCL( + mcp, + urnToEntityName(entityUrn), + entityUrn, + isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, + aspectSpec.getName(), + auditStamp, + newAspect, + newSystemMetadata, + oldAspect, + oldSystemMetadata); log.debug("Serialized MCL event: {}", metadataChangeLog); - Pair<Future<?>, Boolean> emissionStatus = alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); + Pair<Future<?>, Boolean> emissionStatus = + alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); return emissionStatus.getFirst() != null ? Optional.of(emissionStatus) : Optional.empty(); } else { log.debug( - "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", - aspectSpec.getName(), entityUrn); + "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", + aspectSpec.getName(), + entityUrn); return Optional.empty(); } } private UpdateAspectResult conditionallyProduceMCLAsync(UpdateAspectResult result) { AbstractBatchItem request = result.getRequest(); - Optional<Pair<Future<?>, Boolean>> emissionStatus = conditionallyProduceMCLAsync(result.getOldValue(), result.getOldSystemMetadata(), - result.getNewValue(), result.getNewSystemMetadata(), - request.getMetadataChangeProposal(), result.getUrn(), result.getAuditStamp(), request.getAspectSpec()); - - return emissionStatus.map(status -> - result.toBuilder() - .mclFuture(status.getFirst()) - .processedMCL(status.getSecond()) - .build() - ).orElse(result); + Optional<Pair<Future<?>, Boolean>> emissionStatus = + conditionallyProduceMCLAsync( + result.getOldValue(), + result.getOldSystemMetadata(), + result.getNewValue(), + result.getNewSystemMetadata(), + request.getMetadataChangeProposal(), + result.getUrn(), + result.getAuditStamp(), + request.getAspectSpec()); + + return emissionStatus + .map( + status -> + result.toBuilder() + .mclFuture(status.getFirst()) + .processedMCL(status.getSecond()) + .build()) + .orElse(result); } @Override @@ -1148,10 +1448,15 @@ public RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final Str } @Override - public void ingestEntities(@Nonnull final List<Entity> entities, @Nonnull final AuditStamp auditStamp, + public void ingestEntities( + @Nonnull final List<Entity> entities, + @Nonnull final AuditStamp auditStamp, @Nonnull final List<SystemMetadata> systemMetadata) { log.debug("Invoked ingestEntities with entities {}, audit stamp {}", entities, auditStamp); - Streams.zip(entities.stream(), systemMetadata.stream(), (a, b) -> new Pair<Entity, SystemMetadata>(a, b)) + Streams.zip( + entities.stream(), + systemMetadata.stream(), + (a, b) -> new Pair<Entity, SystemMetadata>(a, b)) .forEach(pair -> ingestEntity(pair.getFirst(), auditStamp, pair.getSecond())); } @@ -1166,42 +1471,50 @@ public SystemMetadata ingestEntity(Entity entity, AuditStamp auditStamp) { } @Override - public void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { - log.debug("Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", entity, auditStamp, systemMetadata.toString()); + public void ingestEntity( + @Nonnull Entity entity, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { + log.debug( + "Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", + entity, + auditStamp, + systemMetadata.toString()); ingestSnapshotUnion(entity.getValue(), auditStamp, systemMetadata); } @Nonnull - protected Map<Urn, Snapshot> getSnapshotUnions(@Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { - return getSnapshotRecords(urns, aspectNames).entrySet() - .stream() + protected Map<Urn, Snapshot> getSnapshotUnions( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + return getSnapshotRecords(urns, aspectNames).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toSnapshotUnion(entry.getValue()))); } @Nonnull - protected Map<Urn, RecordTemplate> getSnapshotRecords(@Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) { - return getLatestAspectUnions(urns, aspectNames).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toSnapshotRecord(entry.getKey(), entry.getValue()))); + protected Map<Urn, RecordTemplate> getSnapshotRecords( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + return getLatestAspectUnions(urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toSnapshotRecord(entry.getKey(), entry.getValue()))); } @Nonnull protected Map<Urn, List<UnionTemplate>> getLatestAspectUnions( - @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) { - return getLatestAspects(urns, aspectNames).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue() - .stream() - .map(aspectRecord -> toAspectUnion(entry.getKey(), aspectRecord)) - .collect(Collectors.toList()))); + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + return getLatestAspects(urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> + entry.getValue().stream() + .map(aspectRecord -> toAspectUnion(entry.getKey(), aspectRecord)) + .collect(Collectors.toList()))); } /** - Returns true if entityType should have some aspect as per its definition - but aspects given does not have that aspect + * Returns true if entityType should have some aspect as per its definition but aspects given does + * not have that aspect */ private boolean isAspectMissing(String entityType, String aspectName, Set<String> aspects) { return _entityRegistry.getEntitySpec(entityType).getAspectSpecMap().containsKey(aspectName) @@ -1209,32 +1522,37 @@ private boolean isAspectMissing(String entityType, String aspectName, Set<String } @Override - public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects) { + public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects) { List<Pair<String, RecordTemplate>> returnAspects = new ArrayList<>(); final String keyAspectName = getKeyAspectName(urn); - final Map<String, RecordTemplate> latestAspects = new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); + final Map<String, RecordTemplate> latestAspects = + new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); // key aspect: does not exist in database && is being written - boolean generateDefaults = !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); + boolean generateDefaults = + !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); // conditionally generate defaults if (generateDefaults) { String entityType = urnToEntityName(urn); Set<String> aspectsToGet = new HashSet<>(); - boolean shouldCheckBrowsePath = isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckBrowsePath = + isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckBrowsePath) { aspectsToGet.add(BROWSE_PATHS_ASPECT_NAME); } - boolean shouldCheckBrowsePathV2 = isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckBrowsePathV2 = + isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckBrowsePathV2) { aspectsToGet.add(BROWSE_PATHS_V2_ASPECT_NAME); } - boolean shouldCheckDataPlatform = isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckDataPlatform = + isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckDataPlatform) { aspectsToGet.add(DATA_PLATFORM_INSTANCE_ASPECT_NAME); } @@ -1242,8 +1560,9 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO // fetch additional aspects latestAspects.putAll(getLatestAspectsForUrn(urn, aspectsToGet)); - if (shouldCheckBrowsePath && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { + if (shouldCheckBrowsePath + && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { try { BrowsePaths generatedBrowsePath = buildDefaultBrowsePath(urn); returnAspects.add(Pair.of(BROWSE_PATHS_ASPECT_NAME, generatedBrowsePath)); @@ -1252,8 +1571,9 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO } } - if (shouldCheckBrowsePathV2 && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { + if (shouldCheckBrowsePathV2 + && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { try { BrowsePathsV2 generatedBrowsePathV2 = buildDefaultBrowsePathV2(urn, false); returnAspects.add(Pair.of(BROWSE_PATHS_V2_ASPECT_NAME, generatedBrowsePathV2)); @@ -1262,11 +1582,13 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO } } - if (shouldCheckDataPlatform && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null - && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + if (shouldCheckDataPlatform + && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null + && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { RecordTemplate keyAspect = includedAspects.get(keyAspectName); DataPlatformInstanceUtils.buildDataPlatformInstance(entityType, keyAspect) - .ifPresent(aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); + .ifPresent( + aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); } } @@ -1274,8 +1596,8 @@ public Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsO } @Override - public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects) { + public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects) { final String keyAspectName = getKeyAspectName(urn); @@ -1284,10 +1606,12 @@ public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnu } else { // No key aspect being written, generate it and potentially suggest writing it later HashMap<String, RecordTemplate> includedWithKeyAspect = new HashMap<>(includedAspects); - Pair<String, RecordTemplate> keyAspect = Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); + Pair<String, RecordTemplate> keyAspect = + Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); includedWithKeyAspect.put(keyAspect.getKey(), keyAspect.getValue()); - Pair<Boolean, List<Pair<String, RecordTemplate>>> returnAspects = generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); + Pair<Boolean, List<Pair<String, RecordTemplate>>> returnAspects = + generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); // missing key aspect in database, add it if (!returnAspects.getFirst()) { @@ -1298,24 +1622,36 @@ public List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnu } } - private void ingestSnapshotUnion(@Nonnull final Snapshot snapshotUnion, @Nonnull final AuditStamp auditStamp, + private void ingestSnapshotUnion( + @Nonnull final Snapshot snapshotUnion, + @Nonnull final AuditStamp auditStamp, SystemMetadata systemMetadata) { - final RecordTemplate snapshotRecord = RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion); + final RecordTemplate snapshotRecord = + RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshot(snapshotRecord); final List<Pair<String, RecordTemplate>> aspectRecordsToIngest = NewModelUtils.getAspectsFromSnapshot(snapshotRecord); log.info("INGEST urn {} with system metadata {}", urn.toString(), systemMetadata.toString()); - aspectRecordsToIngest.addAll(generateDefaultAspectsIfMissing(urn, - aspectRecordsToIngest.stream().collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); - - AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() - .items(aspectRecordsToIngest.stream().map(pair -> UpsertBatchItem.builder() - .urn(urn) - .aspectName(pair.getKey()) - .aspect(pair.getValue()) - .systemMetadata(systemMetadata) - .build(_entityRegistry)).collect(Collectors.toList())) + aspectRecordsToIngest.addAll( + generateDefaultAspectsIfMissing( + urn, + aspectRecordsToIngest.stream() + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); + + AspectsBatchImpl aspectsBatch = + AspectsBatchImpl.builder() + .items( + aspectRecordsToIngest.stream() + .map( + pair -> + UpsertBatchItem.builder() + .urn(urn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) + .collect(Collectors.toList())) .build(); ingestAspects(aspectsBatch, auditStamp, true, true); @@ -1333,7 +1669,8 @@ public AspectSpec getKeyAspectSpec(@Nonnull final String entityName) { } @Override - public Optional<AspectSpec> getAspectSpec(@Nonnull final String entityName, @Nonnull final String aspectName) { + public Optional<AspectSpec> getAspectSpec( + @Nonnull final String entityName, @Nonnull final String aspectName) { final EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); return Optional.ofNullable(entitySpec.getAspectSpec(aspectName)); } @@ -1355,25 +1692,29 @@ protected Snapshot toSnapshotUnion(@Nonnull final RecordTemplate snapshotRecord) return snapshot; } - protected RecordTemplate toSnapshotRecord(@Nonnull final Urn urn, - @Nonnull final List<UnionTemplate> aspectUnionTemplates) { + protected RecordTemplate toSnapshotRecord( + @Nonnull final Urn urn, @Nonnull final List<UnionTemplate> aspectUnionTemplates) { final String entityName = urnToEntityName(urn); final EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); return com.datahub.util.ModelUtils.newSnapshot( - getDataTemplateClassFromSchema(entitySpec.getSnapshotSchema(), RecordTemplate.class), urn, + getDataTemplateClassFromSchema(entitySpec.getSnapshotSchema(), RecordTemplate.class), + urn, aspectUnionTemplates); } - protected UnionTemplate toAspectUnion(@Nonnull final Urn urn, @Nonnull final RecordTemplate aspectRecord) { + protected UnionTemplate toAspectUnion( + @Nonnull final Urn urn, @Nonnull final RecordTemplate aspectRecord) { final EntitySpec entitySpec = _entityRegistry.getEntitySpec(urnToEntityName(urn)); final TyperefDataSchema aspectSchema = entitySpec.getAspectTyperefSchema(); if (aspectSchema == null) { throw new RuntimeException( - String.format("Aspect schema for %s is null: v4 operation is not supported on this entity registry", + String.format( + "Aspect schema for %s is null: v4 operation is not supported on this entity registry", entitySpec.getName())); } return com.datahub.util.ModelUtils.newAspectUnion( - getDataTemplateClassFromSchema(entitySpec.getAspectTyperefSchema(), UnionTemplate.class), aspectRecord); + getDataTemplateClassFromSchema(entitySpec.getAspectTyperefSchema(), UnionTemplate.class), + aspectRecord); } protected Urn toUrn(final String urnStr) { @@ -1381,26 +1722,32 @@ protected Urn toUrn(final String urnStr) { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { log.error("Failed to convert urn string {} into Urn object", urnStr); - throw new ModelConversionException(String.format("Failed to convert urn string %s into Urn object ", urnStr), e); + throw new ModelConversionException( + String.format("Failed to convert urn string %s into Urn object ", urnStr), e); } } - private EntityResponse toEntityResponse(final Urn urn, final List<EnvelopedAspect> envelopedAspects) { + private EntityResponse toEntityResponse( + final Urn urn, final List<EnvelopedAspect> envelopedAspects) { final EntityResponse response = new EntityResponse(); response.setUrn(urn); response.setEntityName(urnToEntityName(urn)); - response.setAspects(new EnvelopedAspectMap( - envelopedAspects.stream().collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)) - )); + response.setAspects( + new EnvelopedAspectMap( + envelopedAspects.stream() + .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)))); return response; } private Map<String, Set<String>> buildEntityToValidAspects(final EntityRegistry entityRegistry) { - return entityRegistry.getEntitySpecs() - .values() - .stream() - .collect(Collectors.toMap(EntitySpec::getName, - entry -> entry.getAspectSpecs().stream().map(AspectSpec::getName).collect(Collectors.toSet()))); + return entityRegistry.getEntitySpecs().values().stream() + .collect( + Collectors.toMap( + EntitySpec::getName, + entry -> + entry.getAspectSpecs().stream() + .map(AspectSpec::getName) + .collect(Collectors.toSet()))); } @Override @@ -1429,44 +1776,68 @@ public void setWritable(boolean canWrite) { } @Override - public RollbackRunResult rollbackRun(List<AspectRowSummary> aspectRows, String runId, boolean hardDelete) { + public RollbackRunResult rollbackRun( + List<AspectRowSummary> aspectRows, String runId, boolean hardDelete) { return rollbackWithConditions(aspectRows, Collections.singletonMap("runId", runId), hardDelete); } @Override - public RollbackRunResult rollbackWithConditions(List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete) { + public RollbackRunResult rollbackWithConditions( + List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete) { List<AspectRowSummary> removedAspects = new ArrayList<>(); AtomicInteger rowsDeletedFromEntityDeletion = new AtomicInteger(0); - List<Future<?>> futures = aspectRows.stream().map(aspectToRemove -> { - RollbackResult result = deleteAspect(aspectToRemove.getUrn(), aspectToRemove.getAspectName(), - conditions, hardDelete); - if (result != null) { - Optional<AspectSpec> aspectSpec = getAspectSpec(result.entityName, result.aspectName); - if (!aspectSpec.isPresent()) { - log.error("Issue while rolling back: unknown aspect {} for entity {}", result.entityName, result.aspectName); - return null; - } - - rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); - removedAspects.add(aspectToRemove); - return alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), aspectSpec.get(), - result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), - // TODO: use properly attributed audit stamp. - createSystemAuditStamp(), - result.getChangeType()).getFirst(); - } - - return null; - }).filter(Objects::nonNull).collect(Collectors.toList()); + List<Future<?>> futures = + aspectRows.stream() + .map( + aspectToRemove -> { + RollbackResult result = + deleteAspect( + aspectToRemove.getUrn(), + aspectToRemove.getAspectName(), + conditions, + hardDelete); + if (result != null) { + Optional<AspectSpec> aspectSpec = + getAspectSpec(result.entityName, result.aspectName); + if (!aspectSpec.isPresent()) { + log.error( + "Issue while rolling back: unknown aspect {} for entity {}", + result.entityName, + result.aspectName); + return null; + } + + rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); + removedAspects.add(aspectToRemove); + return alwaysProduceMCLAsync( + result.getUrn(), + result.getEntityName(), + result.getAspectName(), + aspectSpec.get(), + result.getOldValue(), + result.getNewValue(), + result.getOldSystemMetadata(), + result.getNewSystemMetadata(), + // TODO: use properly attributed audit stamp. + createSystemAuditStamp(), + result.getChangeType()) + .getFirst(); + } + + return null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); - futures.forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion.get()); } @@ -1490,8 +1861,14 @@ public RollbackRunResult deleteUrn(Urn urn) { return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion); } - SystemMetadata latestKeySystemMetadata = EntityUtils.parseSystemMetadata(latestKey.getSystemMetadata()); - RollbackResult result = deleteAspect(urn.toString(), keyAspectName, Collections.singletonMap("runId", latestKeySystemMetadata.getRunId()), true); + SystemMetadata latestKeySystemMetadata = + EntityUtils.parseSystemMetadata(latestKey.getSystemMetadata()); + RollbackResult result = + deleteAspect( + urn.toString(), + keyAspectName, + Collections.singletonMap("runId", latestKeySystemMetadata.getRunId()), + true); if (result != null) { AspectRowSummary summary = new AspectRowSummary(); @@ -1503,11 +1880,20 @@ public RollbackRunResult deleteUrn(Urn urn) { rowsDeletedFromEntityDeletion = result.additionalRowsAffected; removedAspects.add(summary); - Future<?> future = alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), keySpec, - result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), - // TODO: Use a proper inferred audit stamp - createSystemAuditStamp(), - result.getChangeType()).getFirst(); + Future<?> future = + alwaysProduceMCLAsync( + result.getUrn(), + result.getEntityName(), + result.getAspectName(), + keySpec, + result.getOldValue(), + result.getNewValue(), + result.getOldSystemMetadata(), + result.getNewSystemMetadata(), + // TODO: Use a proper inferred audit stamp + createSystemAuditStamp(), + result.getChangeType()) + .getFirst(); if (future != null) { try { @@ -1530,9 +1916,12 @@ public RollbackRunResult deleteUrn(Urn urn) { @Override public Boolean exists(Urn urn) { final Set<String> aspectsToFetch = getEntityAspectNames(urn); - final List<EntityAspectIdentifier> dbKeys = aspectsToFetch.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); + final List<EntityAspectIdentifier> dbKeys = + aspectsToFetch.stream() + .map( + aspectName -> + new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList()); Map<EntityAspectIdentifier, EntityAspect> aspects = _aspectDao.batchGet(new HashSet(dbKeys)); return aspects.values().stream().anyMatch(aspect -> aspect != null); @@ -1553,14 +1942,16 @@ public Boolean isSoftDeleted(@Nonnull final Urn urn) { @Override public Boolean exists(Urn urn, String aspectName) { - EntityAspectIdentifier dbKey = new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION); + EntityAspectIdentifier dbKey = + new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION); Map<EntityAspectIdentifier, EntityAspect> aspects = _aspectDao.batchGet(Set.of(dbKey)); return aspects.values().stream().anyMatch(Objects::nonNull); } @Nullable @Override - public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete) { + public RollbackResult deleteAspect( + String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete) { // Validate pre-conditions before running queries Urn entityUrn; EntitySpec entitySpec; @@ -1573,120 +1964,153 @@ public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map<S throw new RuntimeException(String.format("Failed to extract urn from %s", urn)); } - final RollbackResult result = _aspectDao.runInTransactionWithRetry((tx) -> { - Integer additionalRowsDeleted = 0; + final RollbackResult result = + _aspectDao.runInTransactionWithRetry( + (tx) -> { + Integer additionalRowsDeleted = 0; - // 1. Fetch the latest existing version of the aspect. - final EntityAspect latest = _aspectDao.getLatestAspect(urn, aspectName); + // 1. Fetch the latest existing version of the aspect. + final EntityAspect latest = _aspectDao.getLatestAspect(urn, aspectName); - // 1.1 If no latest exists, skip this aspect - if (latest == null) { - return null; - } + // 1.1 If no latest exists, skip this aspect + if (latest == null) { + return null; + } - // 2. Compare the match conditions, if they don't match, ignore. - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - if (!filterMatch(latestSystemMetadata, conditions)) { - return null; - } - String latestMetadata = latest.getMetadata(); + // 2. Compare the match conditions, if they don't match, ignore. + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + if (!filterMatch(latestSystemMetadata, conditions)) { + return null; + } + String latestMetadata = latest.getMetadata(); - // 3. Check if this is a key aspect - Boolean isKeyAspect = false; - try { - isKeyAspect = getKeyAspectName(Urn.createFromString(urn)).equals(aspectName); - } catch (URISyntaxException e) { - log.error("Error occurred while parsing urn: {}", urn, e); - } + // 3. Check if this is a key aspect + Boolean isKeyAspect = false; + try { + isKeyAspect = getKeyAspectName(Urn.createFromString(urn)).equals(aspectName); + } catch (URISyntaxException e) { + log.error("Error occurred while parsing urn: {}", urn, e); + } - // 4. Fetch all preceding aspects, that match - List<EntityAspect> aspectsToDelete = new ArrayList<>(); - long maxVersion = _aspectDao.getMaxVersion(urn, aspectName); - EntityAspect survivingAspect = null; - String previousMetadata = null; - boolean filterMatch = true; - while (maxVersion > 0 && filterMatch) { - EntityAspect candidateAspect = _aspectDao.getAspect(urn, aspectName, maxVersion); - SystemMetadata previousSysMetadata = EntityUtils.parseSystemMetadata(candidateAspect.getSystemMetadata()); - filterMatch = filterMatch(previousSysMetadata, conditions); - if (filterMatch) { - aspectsToDelete.add(candidateAspect); - maxVersion = maxVersion - 1; - } else { - survivingAspect = candidateAspect; - previousMetadata = survivingAspect.getMetadata(); - } - } + // 4. Fetch all preceding aspects, that match + List<EntityAspect> aspectsToDelete = new ArrayList<>(); + long maxVersion = _aspectDao.getMaxVersion(urn, aspectName); + EntityAspect survivingAspect = null; + String previousMetadata = null; + boolean filterMatch = true; + while (maxVersion > 0 && filterMatch) { + EntityAspect candidateAspect = _aspectDao.getAspect(urn, aspectName, maxVersion); + SystemMetadata previousSysMetadata = + EntityUtils.parseSystemMetadata(candidateAspect.getSystemMetadata()); + filterMatch = filterMatch(previousSysMetadata, conditions); + if (filterMatch) { + aspectsToDelete.add(candidateAspect); + maxVersion = maxVersion - 1; + } else { + survivingAspect = candidateAspect; + previousMetadata = survivingAspect.getMetadata(); + } + } - // 5. Apply deletes and fix up latest row - - aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); - - if (survivingAspect != null) { - // if there was a surviving aspect, copy its information into the latest row - // eBean does not like us updating a pkey column (version) for the surviving aspect - // as a result we copy information from survivingAspect to latest and delete survivingAspect - latest.setMetadata(survivingAspect.getMetadata()); - latest.setSystemMetadata(survivingAspect.getSystemMetadata()); - latest.setCreatedOn(survivingAspect.getCreatedOn()); - latest.setCreatedBy(survivingAspect.getCreatedBy()); - latest.setCreatedFor(survivingAspect.getCreatedFor()); - _aspectDao.saveAspect(tx, latest, false); - // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); - _aspectDao.deleteAspect(tx, survivingAspect); - } else { - if (isKeyAspect) { - if (hardDelete) { - // If this is the key aspect, delete the entity entirely. - additionalRowsDeleted = _aspectDao.deleteUrn(tx, urn); - } else if (entitySpec.hasAspect(Constants.STATUS_ASPECT_NAME)) { - // soft delete by setting status.removed=true (if applicable) - final Status statusAspect = new Status(); - statusAspect.setRemoved(true); - - final MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType(entityUrn.getEntityType()); - gmce.setAspectName(Constants.STATUS_ASPECT_NAME); - gmce.setAspect(GenericRecordUtils.serializeAspect(statusAspect)); - final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - this.ingestProposal(gmce, auditStamp, false); - } - } else { - // Else, only delete the specific aspect. - _aspectDao.deleteAspect(tx, latest); - } - } + // 5. Apply deletes and fix up latest row + + aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); + + if (survivingAspect != null) { + // if there was a surviving aspect, copy its information into the latest row + // eBean does not like us updating a pkey column (version) for the surviving aspect + // as a result we copy information from survivingAspect to latest and delete + // survivingAspect + latest.setMetadata(survivingAspect.getMetadata()); + latest.setSystemMetadata(survivingAspect.getSystemMetadata()); + latest.setCreatedOn(survivingAspect.getCreatedOn()); + latest.setCreatedBy(survivingAspect.getCreatedBy()); + latest.setCreatedFor(survivingAspect.getCreatedFor()); + _aspectDao.saveAspect(tx, latest, false); + // metrics + _aspectDao.incrementWriteMetrics( + aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); + _aspectDao.deleteAspect(tx, survivingAspect); + } else { + if (isKeyAspect) { + if (hardDelete) { + // If this is the key aspect, delete the entity entirely. + additionalRowsDeleted = _aspectDao.deleteUrn(tx, urn); + } else if (entitySpec.hasAspect(Constants.STATUS_ASPECT_NAME)) { + // soft delete by setting status.removed=true (if applicable) + final Status statusAspect = new Status(); + statusAspect.setRemoved(true); + + final MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType(entityUrn.getEntityType()); + gmce.setAspectName(Constants.STATUS_ASPECT_NAME); + gmce.setAspect(GenericRecordUtils.serializeAspect(statusAspect)); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + this.ingestProposal(gmce, auditStamp, false); + } + } else { + // Else, only delete the specific aspect. + _aspectDao.deleteAspect(tx, latest); + } + } - // 6. Emit the Update - try { - final RecordTemplate latestValue = latest == null ? null - : EntityUtils.toAspectRecord(Urn.createFromString(latest.getUrn()), latest.getAspect(), - latestMetadata, getEntityRegistry()); - - final RecordTemplate previousValue = survivingAspect == null ? null - : EntityUtils.toAspectRecord(Urn.createFromString(survivingAspect.getUrn()), - survivingAspect.getAspect(), previousMetadata, getEntityRegistry()); - - final Urn urnObj = Urn.createFromString(urn); - // We are not deleting key aspect if hardDelete has not been set so do not return a rollback result - if (isKeyAspect && !hardDelete) { - return null; - } - return new RollbackResult(urnObj, urnObj.getEntityType(), latest.getAspect(), latestValue, - previousValue, latestSystemMetadata, - previousValue == null ? null : EntityUtils.parseSystemMetadata(survivingAspect.getSystemMetadata()), - survivingAspect == null ? ChangeType.DELETE : ChangeType.UPSERT, isKeyAspect, additionalRowsDeleted); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to emit the update for urn %s", urn)); - } catch (IllegalStateException e) { - log.warn("Unable to find aspect, rollback result will not be sent. Error: {}", e.getMessage()); - return null; - } - }, DEFAULT_MAX_TRANSACTION_RETRY); + // 6. Emit the Update + try { + final RecordTemplate latestValue = + latest == null + ? null + : EntityUtils.toAspectRecord( + Urn.createFromString(latest.getUrn()), + latest.getAspect(), + latestMetadata, + getEntityRegistry()); + + final RecordTemplate previousValue = + survivingAspect == null + ? null + : EntityUtils.toAspectRecord( + Urn.createFromString(survivingAspect.getUrn()), + survivingAspect.getAspect(), + previousMetadata, + getEntityRegistry()); + + final Urn urnObj = Urn.createFromString(urn); + // We are not deleting key aspect if hardDelete has not been set so do not return a + // rollback result + if (isKeyAspect && !hardDelete) { + return null; + } + return new RollbackResult( + urnObj, + urnObj.getEntityType(), + latest.getAspect(), + latestValue, + previousValue, + latestSystemMetadata, + previousValue == null + ? null + : EntityUtils.parseSystemMetadata(survivingAspect.getSystemMetadata()), + survivingAspect == null ? ChangeType.DELETE : ChangeType.UPSERT, + isKeyAspect, + additionalRowsDeleted); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to emit the update for urn %s", urn)); + } catch (IllegalStateException e) { + log.warn( + "Unable to find aspect, rollback result will not be sent. Error: {}", + e.getMessage()); + return null; + } + }, + DEFAULT_MAX_TRANSACTION_RETRY); return result; } @@ -1720,21 +2144,32 @@ protected AuditStamp createSystemAuditStamp() { } @Nonnull - private Map<EntityAspectIdentifier, EntityAspect> getLatestAspect(@Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + private Map<EntityAspectIdentifier, EntityAspect> getLatestAspect( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { log.debug("Invoked getLatestAspects with urns: {}, aspectNames: {}", urns, aspectNames); // Create DB keys - final Set<EntityAspectIdentifier> dbKeys = urns.stream().map(urn -> { - final Set<String> aspectsToFetch = aspectNames.isEmpty() ? getEntityAspectNames(urn) : aspectNames; - return aspectsToFetch.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); - }).flatMap(List::stream).collect(Collectors.toSet()); + final Set<EntityAspectIdentifier> dbKeys = + urns.stream() + .map( + urn -> { + final Set<String> aspectsToFetch = + aspectNames.isEmpty() ? getEntityAspectNames(urn) : aspectNames; + return aspectsToFetch.stream() + .map( + aspectName -> + new EntityAspectIdentifier( + urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList()); + }) + .flatMap(List::stream) + .collect(Collectors.toSet()); Map<EntityAspectIdentifier, EntityAspect> batchGetResults = new HashMap<>(); Iterators.partition(dbKeys.iterator(), MAX_KEYS_PER_QUERY) - .forEachRemaining(batch -> batchGetResults.putAll(_aspectDao.batchGet(ImmutableSet.copyOf(batch)))); + .forEachRemaining( + batch -> batchGetResults.putAll(_aspectDao.batchGet(ImmutableSet.copyOf(batch)))); return batchGetResults; } @@ -1743,14 +2178,16 @@ private Map<EntityAspectIdentifier, EntityAspect> getLatestAspect(@Nonnull final * To do this, we want to fetch the maximum version and subtract the negative version from that. Since -1 represents * the maximum version, we need to add 1 to the final result. */ - private long calculateVersionNumber(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { + private long calculateVersionNumber( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { if (version < 0) { return _aspectDao.getMaxVersion(urn.toString(), aspectName) + version + 1; } return version; } - private Map<EntityAspectIdentifier, EnvelopedAspect> getEnvelopedAspects(final Set<EntityAspectIdentifier> dbKeys) { + private Map<EntityAspectIdentifier, EnvelopedAspect> getEnvelopedAspects( + final Set<EntityAspectIdentifier> dbKeys) { final Map<EntityAspectIdentifier, EnvelopedAspect> result = new HashMap<>(); final Map<EntityAspectIdentifier, EntityAspect> dbEntries = _aspectDao.batchGet(dbKeys); @@ -1764,29 +2201,36 @@ private Map<EntityAspectIdentifier, EnvelopedAspect> getEnvelopedAspects(final S } // Aspect found. Now turn it into an EnvelopedAspect - final com.linkedin.entity.Aspect aspect = RecordUtils.toRecordTemplate(com.linkedin.entity.Aspect.class, currAspectEntry - .getMetadata()); + final com.linkedin.entity.Aspect aspect = + RecordUtils.toRecordTemplate( + com.linkedin.entity.Aspect.class, currAspectEntry.getMetadata()); final EnvelopedAspect envelopedAspect = new EnvelopedAspect(); envelopedAspect.setName(currAspectEntry.getAspect()); envelopedAspect.setVersion(currAspectEntry.getVersion()); - // TODO: I think we can assume this here, adding as it's a required field so object mapping barfs when trying to access it, + // TODO: I think we can assume this here, adding as it's a required field so object mapping + // barfs when trying to access it, // since nowhere else is using it should be safe for now at least envelopedAspect.setType(AspectType.VERSIONED); envelopedAspect.setValue(aspect); try { if (currAspectEntry.getSystemMetadata() != null) { - final SystemMetadata systemMetadata = RecordUtils.toRecordTemplate(SystemMetadata.class, currAspectEntry.getSystemMetadata()); + final SystemMetadata systemMetadata = + RecordUtils.toRecordTemplate( + SystemMetadata.class, currAspectEntry.getSystemMetadata()); envelopedAspect.setSystemMetadata(systemMetadata); } } catch (Exception e) { - log.warn("Exception encountered when setting system metadata on enveloped aspect {}. Error: {}", envelopedAspect.getName(), e); + log.warn( + "Exception encountered when setting system metadata on enveloped aspect {}. Error: {}", + envelopedAspect.getName(), + e); } - envelopedAspect.setCreated(new AuditStamp() - .setActor(UrnUtils.getUrn(currAspectEntry.getCreatedBy())) - .setTime(currAspectEntry.getCreatedOn().getTime()) - ); + envelopedAspect.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(currAspectEntry.getCreatedBy())) + .setTime(currAspectEntry.getCreatedOn().getTime())); result.put(currKey, envelopedAspect); } return result; @@ -1802,40 +2246,50 @@ private EnvelopedAspect getKeyEnvelopedAspect(final Urn urn) { envelopedAspect.setName(keySpec.getName()); envelopedAspect.setVersion(ASPECT_LATEST_VERSION); envelopedAspect.setValue(aspect); - // TODO: I think we can assume this here, adding as it's a required field so object mapping barfs when trying to access it, + // TODO: I think we can assume this here, adding as it's a required field so object mapping + // barfs when trying to access it, // since nowhere else is using it should be safe for now at least envelopedAspect.setType(AspectType.VERSIONED); envelopedAspect.setCreated( - new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); return envelopedAspect; } @Nonnull private UpdateAspectResult ingestAspectToLocalDB( - @Nullable Transaction tx, - @Nonnull final Urn urn, - @Nonnull final String aspectName, - @Nonnull final RecordTemplate newValue, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata providedSystemMetadata, - @Nullable final EntityAspect latest, - @Nonnull final Long nextVersion) { - - // Set the "last run id" to be the run id provided with the new system metadata. This will be stored in index + @Nullable Transaction tx, + @Nonnull final Urn urn, + @Nonnull final String aspectName, + @Nonnull final RecordTemplate newValue, + @Nonnull final AuditStamp auditStamp, + @Nonnull final SystemMetadata providedSystemMetadata, + @Nullable final EntityAspect latest, + @Nonnull final Long nextVersion) { + + // Set the "last run id" to be the run id provided with the new system metadata. This will be + // stored in index // for all aspects that have a run id, regardless of whether they change. - providedSystemMetadata.setLastRunId(providedSystemMetadata.getRunId(GetMode.NULL), SetMode.IGNORE_NULL); + providedSystemMetadata.setLastRunId( + providedSystemMetadata.getRunId(GetMode.NULL), SetMode.IGNORE_NULL); // 2. Compare the latest existing and new. final RecordTemplate oldValue = - latest == null ? null : EntityUtils.toAspectRecord(urn, aspectName, latest.getMetadata(), getEntityRegistry()); + latest == null + ? null + : EntityUtils.toAspectRecord( + urn, aspectName, latest.getMetadata(), getEntityRegistry()); // 3. If there is no difference between existing and new, we just update // the lastObserved in system metadata. RunId should stay as the original runId if (oldValue != null && DataTemplateUtil.areEqual(oldValue, newValue)) { - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); latestSystemMetadata.setLastObserved(providedSystemMetadata.getLastObserved()); - latestSystemMetadata.setLastRunId(providedSystemMetadata.getLastRunId(GetMode.NULL), SetMode.IGNORE_NULL); + latestSystemMetadata.setLastRunId( + providedSystemMetadata.getLastRunId(GetMode.NULL), SetMode.IGNORE_NULL); latest.setSystemMetadata(RecordUtils.toJsonString(latestSystemMetadata)); @@ -1843,55 +2297,70 @@ private UpdateAspectResult ingestAspectToLocalDB( _aspectDao.saveAspect(tx, latest, false); // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); + _aspectDao.incrementWriteMetrics( + aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); return UpdateAspectResult.builder() - .urn(urn) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) - .newSystemMetadata(latestSystemMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(0) - .build(); + .urn(urn) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(latestSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(0) + .build(); } // 4. Save the newValue as the latest version log.debug("Ingesting aspect with name {}, urn {}", aspectName, urn); String newValueStr = EntityUtils.toJsonAspect(newValue); - long versionOfOld = _aspectDao.saveLatestAspect(tx, urn.toString(), aspectName, latest == null ? null : EntityUtils.toJsonAspect(oldValue), - latest == null ? null : latest.getCreatedBy(), latest == null ? null : latest.getCreatedFor(), - latest == null ? null : latest.getCreatedOn(), latest == null ? null : latest.getSystemMetadata(), - newValueStr, auditStamp.getActor().toString(), - auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, - new Timestamp(auditStamp.getTime()), EntityUtils.toJsonAspect(providedSystemMetadata), nextVersion); + long versionOfOld = + _aspectDao.saveLatestAspect( + tx, + urn.toString(), + aspectName, + latest == null ? null : EntityUtils.toJsonAspect(oldValue), + latest == null ? null : latest.getCreatedBy(), + latest == null ? null : latest.getCreatedFor(), + latest == null ? null : latest.getCreatedOn(), + latest == null ? null : latest.getSystemMetadata(), + newValueStr, + auditStamp.getActor().toString(), + auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, + new Timestamp(auditStamp.getTime()), + EntityUtils.toJsonAspect(providedSystemMetadata), + nextVersion); // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); + _aspectDao.incrementWriteMetrics( + aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); return UpdateAspectResult.builder() - .urn(urn) - .oldValue(oldValue) - .newValue(newValue) - .oldSystemMetadata(latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) - .newSystemMetadata(providedSystemMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(versionOfOld) - .build(); + .urn(urn) + .oldValue(oldValue) + .newValue(newValue) + .oldSystemMetadata( + latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(providedSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(versionOfOld) + .build(); } /** * Builds the default browse path aspects for a subset of well-supported entities. * - * This method currently supports datasets, charts, dashboards, data flows, data jobs, and glossary terms. + * <p>This method currently supports datasets, charts, dashboards, data flows, data jobs, and + * glossary terms. */ @Nonnull @Override public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISyntaxException { Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - String defaultBrowsePath = getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); + String defaultBrowsePath = + getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); StringArray browsePaths = new StringArray(); browsePaths.add(defaultBrowsePath); BrowsePaths browsePathAspect = new BrowsePaths(); @@ -1902,19 +2371,19 @@ public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISynt /** * Builds the default browse path V2 aspects for all entities. * - * This method currently supports datasets, charts, dashboards, and data jobs best. Everything else - * will have a basic "Default" folder added to their browsePathV2. + * <p>This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. */ @Nonnull @Override - public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException { + public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) + throws URISyntaxException { Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - return BrowsePathV2Utils.getDefaultBrowsePathV2(urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); + return BrowsePathV2Utils.getDefaultBrowsePathV2( + urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); } - /** - * Returns a delimiter on which the name of an asset may be split. - */ + /** Returns a delimiter on which the name of an asset may be split. */ private Character getDataPlatformDelimiter(Urn urn) { // Attempt to construct the appropriate Data Platform URN Urn dataPlatformUrn = buildDataPlatformUrn(urn, this.getEntityRegistry()); @@ -1932,15 +2401,20 @@ private Character getDataPlatformDelimiter(Urn urn) { @Nullable private DataPlatformInfo getDataPlatformInfo(Urn urn) { try { - final EntityResponse entityResponse = getEntityV2( - Constants.DATA_PLATFORM_ENTITY_NAME, - urn, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - ); - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects() - .containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { + final EntityResponse entityResponse = + getEntityV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + urn, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { return new DataPlatformInfo( - entityResponse.getAspects().get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); + entityResponse + .getAspects() + .get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) + .getValue() + .data()); } } catch (Exception e) { log.warn(String.format("Failed to find Data Platform Info for urn %s", urn)); @@ -1949,7 +2423,8 @@ private DataPlatformInfo getDataPlatformInfo(Urn urn) { } private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspectSpec) { - final List<RelationshipFieldSpec> relationshipFieldSpecs = aspectSpec.getRelationshipFieldSpecs(); + final List<RelationshipFieldSpec> relationshipFieldSpecs = + aspectSpec.getRelationshipFieldSpecs(); return relationshipFieldSpecs.stream().anyMatch(RelationshipFieldSpec::isLineageRelationship); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index ffd63479589bc..c2a0a211f9e76 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.datahub.util.RecordUtils; import com.google.common.base.Preconditions; import com.linkedin.common.AuditStamp; @@ -18,24 +21,17 @@ import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.List; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityUtils { - private EntityUtils() { - } + private EntityUtils() {} public static final int URN_NUM_BYTES_LIMIT = 512; public static final String URN_DELIMITER_SEPARATOR = "␟"; @@ -63,17 +59,19 @@ public static AuditStamp getAuditStamp(Urn actor) { } public static void ingestChangeProposals( - @Nonnull List<MetadataChangeProposal> changes, - @Nonnull EntityService entityService, - @Nonnull Urn actor, - @Nonnull Boolean async - ) { - entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(changes, entityService.getEntityRegistry()).build(), getAuditStamp(actor), async); + @Nonnull List<MetadataChangeProposal> changes, + @Nonnull EntityService entityService, + @Nonnull Urn actor, + @Nonnull Boolean async) { + entityService.ingestProposal( + AspectsBatchImpl.builder().mcps(changes, entityService.getEntityRegistry()).build(), + getAuditStamp(actor), + async); } /** * Get aspect from entity + * * @param entityUrn URN of the entity * @param aspectName aspect name string * @param entityService EntityService obj @@ -82,11 +80,10 @@ public static void ingestChangeProposals( */ @Nullable public static RecordTemplate getAspectFromEntity( - String entityUrn, - String aspectName, - EntityService entityService, - RecordTemplate defaultValue - ) { + String entityUrn, + String aspectName, + EntityService entityService, + RecordTemplate defaultValue) { Urn urn = getUrnFromString(entityUrn); if (urn == null) { return defaultValue; @@ -99,11 +96,10 @@ public static RecordTemplate getAspectFromEntity( return aspect; } catch (Exception e) { log.error( - "Error constructing aspect from entity. Entity: {} aspect: {}. Error: {}", - entityUrn, - aspectName, - e.toString() - ); + "Error constructing aspect from entity. Entity: {} aspect: {}. Error: {}", + entityUrn, + aspectName, + e.toString()); return null; } } @@ -114,7 +110,8 @@ public static RecordTemplate toAspectRecord( @Nonnull final String aspectName, @Nonnull final String jsonAspect, @Nonnull final EntityRegistry entityRegistry) { - return toAspectRecord(PegasusUtils.urnToEntityName(entityUrn), aspectName, jsonAspect, entityRegistry); + return toAspectRecord( + PegasusUtils.urnToEntityName(entityUrn), aspectName, jsonAspect, entityRegistry); } /** @@ -131,13 +128,17 @@ public static RecordTemplate toAspectRecord( @Nonnull final EntityRegistry entityRegistry) { final EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); final AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); - //TODO: aspectSpec can be null here - Preconditions.checkState(aspectSpec != null, String.format("Aspect %s could not be found", aspectName)); + // TODO: aspectSpec can be null here + Preconditions.checkState( + aspectSpec != null, String.format("Aspect %s could not be found", aspectName)); final RecordDataSchema aspectSchema = aspectSpec.getPegasusSchema(); - RecordTemplate aspectRecord = RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), jsonAspect); - RecordTemplateValidator.validate(aspectRecord, validationFailure -> { - log.warn(String.format("Failed to validate record %s against its schema.", aspectRecord)); - }); + RecordTemplate aspectRecord = + RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), jsonAspect); + RecordTemplateValidator.validate( + aspectRecord, + validationFailure -> { + log.warn(String.format("Failed to validate record %s against its schema.", aspectRecord)); + }); return aspectRecord; } @@ -151,16 +152,14 @@ public static SystemMetadata parseSystemMetadata(String jsonSystemMetadata) { return RecordUtils.toRecordTemplate(SystemMetadata.class, jsonSystemMetadata); } - /** - * Check if entity is removed (removed=true in Status aspect) and exists - */ + /** Check if entity is removed (removed=true in Status aspect) and exists */ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) { try { - + if (!entityService.exists(entityUrn)) { return false; } - + EnvelopedAspect statusAspect = entityService.getLatestEnvelopedAspect(entityUrn.getEntityType(), entityUrn, "status"); if (statusAspect == null) { @@ -174,7 +173,8 @@ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) } } - public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { + public static RecordTemplate buildKeyAspect( + @Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { final EntitySpec spec = entityRegistry.getEntitySpec(urnToEntityName(urn)); final AspectSpec keySpec = spec.getKeyAspectSpec(); return EntityKeyUtils.convertUrnToEntityKey(urn, keySpec); @@ -183,18 +183,27 @@ public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegist public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entityRegistry.getEntitySpec(urn.getEntityType())); - RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), validationResult -> { - throw new IllegalArgumentException("Invalid urn: " + urn + "\n Cause: " - + validationResult.getMessages()); }, validator); + RecordTemplateValidator.validate( + EntityUtils.buildKeyAspect(entityRegistry, urn), + validationResult -> { + throw new IllegalArgumentException( + "Invalid urn: " + urn + "\n Cause: " + validationResult.getMessages()); + }, + validator); if (urn.toString().trim().length() != urn.toString().length()) { - throw new IllegalArgumentException("Error: cannot provide an URN with leading or trailing whitespace"); + throw new IllegalArgumentException( + "Error: cannot provide an URN with leading or trailing whitespace"); } if (URLEncoder.encode(urn.toString()).length() > URN_NUM_BYTES_LIMIT) { - throw new IllegalArgumentException("Error: cannot provide an URN longer than " + Integer.toString(URN_NUM_BYTES_LIMIT) + " bytes (when URL encoded)"); + throw new IllegalArgumentException( + "Error: cannot provide an URN longer than " + + Integer.toString(URN_NUM_BYTES_LIMIT) + + " bytes (when URL encoded)"); } if (urn.toString().contains(URN_DELIMITER_SEPARATOR)) { - throw new IllegalArgumentException("Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); + throw new IllegalArgumentException( + "Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); } try { Urn.createFromString(urn.toString()); @@ -202,5 +211,4 @@ public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull throw new IllegalArgumentException(e); } } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java index 81eb5d4eb947c..c0ee01abe0a84 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java @@ -20,12 +20,10 @@ import org.reflections.Reflections; import org.reflections.scanners.Scanner; - public class NewModelUtils { private static final ClassLoader CLASS_LOADER = DummySnapshot.class.getClassLoader(); - private NewModelUtils() { - } + private NewModelUtils() {} public static <T extends DataTemplate> String getAspectName(@Nonnull Class<T> aspectClass) { return aspectClass.getCanonicalName(); @@ -36,9 +34,9 @@ public static Class<? extends RecordTemplate> getAspectClass(@Nonnull String asp return getClassFromName(aspectName, RecordTemplate.class); } - @Nonnull - public static <T> Class<? extends T> getClassFromName(@Nonnull String className, @Nonnull Class<T> parentClass) { + public static <T> Class<? extends T> getClassFromName( + @Nonnull String className, @Nonnull Class<T> parentClass) { try { return CLASS_LOADER.loadClass(className).asSubclass(parentClass); } catch (ClassNotFoundException var3) { @@ -47,8 +45,8 @@ public static <T> Class<? extends T> getClassFromName(@Nonnull String className, } @Nonnull - public static <SNAPSHOT extends RecordTemplate> List<Pair<String, RecordTemplate>> getAspectsFromSnapshot( - @Nonnull SNAPSHOT snapshot) { + public static <SNAPSHOT extends RecordTemplate> + List<Pair<String, RecordTemplate>> getAspectsFromSnapshot(@Nonnull SNAPSHOT snapshot) { SnapshotValidator.validateSnapshotSchema(snapshot.getClass()); return getAspects(snapshot); } @@ -57,28 +55,34 @@ public static <SNAPSHOT extends RecordTemplate> List<Pair<String, RecordTemplate private static List<Pair<String, RecordTemplate>> getAspects(@Nonnull RecordTemplate snapshot) { Class<? extends WrappingArrayTemplate> clazz = getAspectsArrayClass(snapshot.getClass()); WrappingArrayTemplate aspectArray = - (WrappingArrayTemplate) RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); + (WrappingArrayTemplate) + RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); List<Pair<String, RecordTemplate>> aspects = new ArrayList(); - aspectArray.forEach((item) -> { - try { - RecordTemplate aspect = RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item); - String name = PegasusUtils.getAspectNameFromSchema(aspect.schema()); - aspects.add(Pair.of(name, aspect)); - } catch (InvalidSchemaException e) { - // ignore fields that are not part of the union - } catch (TemplateOutputCastException e) { - // ignore fields that are not part of the union - } - }); + aspectArray.forEach( + (item) -> { + try { + RecordTemplate aspect = + RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item); + String name = PegasusUtils.getAspectNameFromSchema(aspect.schema()); + aspects.add(Pair.of(name, aspect)); + } catch (InvalidSchemaException e) { + // ignore fields that are not part of the union + } catch (TemplateOutputCastException e) { + // ignore fields that are not part of the union + } + }); return aspects; } - @Nonnull - private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTemplate> getAspectsArrayClass( - @Nonnull Class<SNAPSHOT> snapshotClass) { + private static <SNAPSHOT extends RecordTemplate> + Class<? extends WrappingArrayTemplate> getAspectsArrayClass( + @Nonnull Class<SNAPSHOT> snapshotClass) { try { - return snapshotClass.getMethod("getAspects").getReturnType().asSubclass(WrappingArrayTemplate.class); + return snapshotClass + .getMethod("getAspects") + .getReturnType() + .asSubclass(WrappingArrayTemplate.class); } catch (ClassCastException | NoSuchMethodException var2) { throw new RuntimeException(var2); } @@ -86,10 +90,10 @@ private static <SNAPSHOT extends RecordTemplate> Class<? extends WrappingArrayTe @Nonnull public static Set<Class<? extends RecordTemplate>> getAllEntities() { - return (Set) (new Reflections("com.linkedin.metadata.entity", new Scanner[0])).getSubTypesOf(RecordTemplate.class) - .stream() - .filter(EntityValidator::isValidEntitySchema) - .collect(Collectors.toSet()); + return (Set) + (new Reflections("com.linkedin.metadata.entity", new Scanner[0])) + .getSubTypesOf(RecordTemplate.class).stream() + .filter(EntityValidator::isValidEntitySchema) + .collect(Collectors.toSet()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java index 7804aa2067088..43df42713cc4d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java @@ -2,23 +2,24 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; - import javax.annotation.Nonnull; public class AspectStorageValidationUtil { - private AspectStorageValidationUtil() { - } + private AspectStorageValidationUtil() {} /** * Check if entity aspect table exists in the database. + * * @param session * @return {@code true} if table exists. */ public static boolean checkTableExists(@Nonnull CqlSession session) { - String query = String.format("SELECT table_name \n " - + "FROM system_schema.tables where table_name = '%s' allow filtering;", - CassandraAspect.TABLE_NAME); + String query = + String.format( + "SELECT table_name \n " + + "FROM system_schema.tables where table_name = '%s' allow filtering;", + CassandraAspect.TABLE_NAME); ResultSet rs = session.execute(query); return rs.all().size() > 0; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java index 891a47130fe25..d68386291acb3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java @@ -1,23 +1,22 @@ package com.linkedin.metadata.entity.cassandra; import com.datastax.oss.driver.api.core.cql.Row; -import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityAspectIdentifier; +import java.sql.Timestamp; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import javax.annotation.Nonnull; -import java.sql.Timestamp; - /** - * This class represents entity aspect records stored in Cassandra database. - * It's also aware of {@link EntityAspect} which is a shared in-memory representation of an aspect record and knows - * how to translate itself to it. + * This class represents entity aspect records stored in Cassandra database. It's also aware of + * {@link EntityAspect} which is a shared in-memory representation of an aspect record and knows how + * to translate itself to it. * - * TODO: Consider using datastax java driver `@Entity` - * (see: https://docs.datastax.com/en/developer/java-driver/4.13/manual/mapper/entities/) + * <p>TODO: Consider using datastax java driver `@Entity` (see: + * https://docs.datastax.com/en/developer/java-driver/4.13/manual/mapper/entities/) */ @Getter @Setter @@ -61,7 +60,9 @@ public static EntityAspect rowToEntityAspect(@Nonnull Row row) { row.getLong(CassandraAspect.VERSION_COLUMN), row.getString(CassandraAspect.METADATA_COLUMN), row.getString(CassandraAspect.SYSTEM_METADATA_COLUMN), - row.getInstant(CassandraAspect.CREATED_ON_COLUMN) == null ? null : Timestamp.from(row.getInstant(CassandraAspect.CREATED_ON_COLUMN)), + row.getInstant(CassandraAspect.CREATED_ON_COLUMN) == null + ? null + : Timestamp.from(row.getInstant(CassandraAspect.CREATED_ON_COLUMN)), row.getString(CassandraAspect.CREATED_BY_COLUMN), row.getString(CassandraAspect.CREATED_FOR_COLUMN)); } @@ -73,5 +74,4 @@ public static EntityAspectIdentifier rowToAspectIdentifier(@Nonnull Row row) { row.getString(CassandraAspect.ASPECT_COLUMN), row.getLong(CassandraAspect.VERSION_COLUMN)); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java index 9f4a36efb4501..3293bc6178e43 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity.cassandra; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,6 +34,8 @@ import com.linkedin.metadata.query.ExtraInfo; import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; +import io.ebean.PagedList; +import io.ebean.Transaction; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -44,14 +49,8 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import io.ebean.PagedList; -import io.ebean.Transaction; import lombok.extern.slf4j.Slf4j; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class CassandraAspectDao implements AspectDao, AspectMigrationsDao { @@ -88,16 +87,22 @@ public EntityAspect getLatestAspect(@Nonnull String urn, @Nonnull String aspectN } @Override - public Map<String, Map<String, EntityAspect>> getLatestAspects(Map<String, Set<String>> urnAspects) { + public Map<String, Map<String, EntityAspect>> getLatestAspects( + Map<String, Set<String>> urnAspects) { return urnAspects.entrySet().stream() - .map(entry -> Map.entry(entry.getKey(), entry.getValue().stream() - .map(aspectName -> { - EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); - return aspect != null ? Map.entry(aspectName, aspect) : null; - }) - .filter(Objects::nonNull) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .map( + entry -> + Map.entry( + entry.getKey(), + entry.getValue().stream() + .map( + aspectName -> { + EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); + return aspect != null ? Map.entry(aspectName, aspect) : null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } @Override @@ -110,48 +115,63 @@ public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspec @Override public long countEntities() { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .distinct() - .column(CassandraAspect.URN_COLUMN) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .distinct() + .column(CassandraAspect.URN_COLUMN) + .build(); ResultSet rs = _cqlSession.execute(ss); // TODO: make sure it doesn't blow up on a large database - // Getting a count of distinct values in a Cassandra query doesn't seem to be feasible, but counting them in the app is dangerous - // The saving grace here is that the only place where this method is used should only run once, what the database is still young + // Getting a count of distinct values in a Cassandra query doesn't seem to be feasible, but + // counting them in the app is dangerous + // The saving grace here is that the only place where this method is used should only run once, + // what the database is still young return rs.all().size(); } @Override public boolean checkIfAspectExists(@Nonnull String aspectName) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .column(CassandraAspect.URN_COLUMN) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .limit(1) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .column(CassandraAspect.URN_COLUMN) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .limit(1) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.one() != null; } - private Map<String, Long> getMaxVersions(@Nonnull final String urn, @Nonnull final Set<String> aspectNames) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .selectors( - Selector.column(CassandraAspect.URN_COLUMN), - Selector.column(CassandraAspect.ASPECT_COLUMN), - Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)).as(CassandraAspect.VERSION_COLUMN)) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).in(aspectNamesToLiterals(aspectNames)) - .groupBy(ImmutableList.of(Selector.column(CassandraAspect.URN_COLUMN), Selector.column(CassandraAspect.ASPECT_COLUMN))) - .build(); + private Map<String, Long> getMaxVersions( + @Nonnull final String urn, @Nonnull final Set<String> aspectNames) { + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .selectors( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN), + Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .as(CassandraAspect.VERSION_COLUMN)) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .in(aspectNamesToLiterals(aspectNames)) + .groupBy( + ImmutableList.of( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN))) + .build(); ResultSet rs = _cqlSession.execute(ss); - Map<String, Long> aspectVersions = rs.all().stream() - .collect(Collectors.toMap( - row -> row.getString(CassandraAspect.ASPECT_COLUMN), - row -> row.getLong(CassandraAspect.VERSION_COLUMN))); + Map<String, Long> aspectVersions = + rs.all().stream() + .collect( + Collectors.toMap( + row -> row.getString(CassandraAspect.ASPECT_COLUMN), + row -> row.getLong(CassandraAspect.VERSION_COLUMN))); // For each requested aspect that didn't come back from DB, add a version -1 for (String aspect : aspectNames) { @@ -164,7 +184,8 @@ private Map<String, Long> getMaxVersions(@Nonnull final String urn, @Nonnull fin } @Override - public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { + public void saveAspect( + @Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { validateConnection(); SimpleStatement statement = generateSaveStatement(aspect, insert); _cqlSession.execute(statement); @@ -174,7 +195,8 @@ public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, f // TODO: look into supporting pagination @Override @Nonnull - public Map<EntityAspectIdentifier, EntityAspect> batchGet(@Nonnull final Set<EntityAspectIdentifier> keys) { + public Map<EntityAspectIdentifier, EntityAspect> batchGet( + @Nonnull final Set<EntityAspectIdentifier> keys) { validateConnection(); return keys.stream() .map(this::getAspect) @@ -210,13 +232,17 @@ public ListResult<String> listAspectMetadata( final int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); @@ -224,53 +250,58 @@ public ListResult<String> listAspectMetadata( OffsetPager offsetPager = new OffsetPager(pageSize); Page<Row> page = offsetPager.getPage(rs, pageNumber); - final List<EntityAspect> aspects = page - .getElements() - .stream().map(CassandraAspect::rowToEntityAspect) - .collect(Collectors.toList()); + final List<EntityAspect> aspects = + page.getElements().stream() + .map(CassandraAspect::rowToEntityAspect) + .collect(Collectors.toList()); // TODO: address performance issue for getting total count // https://www.datastax.com/blog/running-count-expensive-cassandra - SimpleStatement ssCount = selectFrom(CassandraAspect.TABLE_NAME) - .countAll() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ssCount = + selectFrom(CassandraAspect.TABLE_NAME) + .countAll() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); long totalCount = _cqlSession.execute(ssCount).one().getLong(0); - final List<String> aspectMetadatas = aspects - .stream() - .map(EntityAspect::getMetadata) - .collect(Collectors.toList()); + final List<String> aspectMetadatas = + aspects.stream().map(EntityAspect::getMetadata).collect(Collectors.toList()); - final ListResultMetadata listResultMetadata = toListResultMetadata(aspects - .stream() - .map(CassandraAspectDao::toExtraInfo) - .collect(Collectors.toList())); + final ListResultMetadata listResultMetadata = + toListResultMetadata( + aspects.stream().map(CassandraAspectDao::toExtraInfo).collect(Collectors.toList())); - return toListResult(aspectMetadatas, listResultMetadata, start, pageNumber, pageSize, totalCount); + return toListResult( + aspectMetadatas, listResultMetadata, start, pageNumber, pageSize, totalCount); } @Override @Nonnull - public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { + public <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException; do { try { - // TODO: Try to bend this code to make use of Cassandra batches. This method is called from single-urn operations, so perf should not suffer much + // TODO: Try to bend this code to make use of Cassandra batches. This method is called from + // single-urn operations, so perf should not suffer much return block.apply(null); } catch (DriverException exception) { lastException = exception; } } while (++retryCount <= maxTransactionRetry); - throw new RetryLimitReached("Failed to add after " + maxTransactionRetry + " retries", lastException); + throw new RetryLimitReached( + "Failed to add after " + maxTransactionRetry + " retries", lastException); } private <T> ListResult<T> toListResult( @@ -283,17 +314,18 @@ private <T> ListResult<T> toListResult( final int numPages = (int) (totalCount / pageSize + (totalCount % pageSize == 0 ? 0 : 1)); final boolean hasNext = pageNumber < numPages; - final int nextStart = (start != null && hasNext) ? (pageNumber * pageSize) : ListResult.INVALID_NEXT_START; + final int nextStart = + (start != null && hasNext) ? (pageNumber * pageSize) : ListResult.INVALID_NEXT_START; return ListResult.<T>builder() - .values(values) - .metadata(listResultMetadata) - .nextStart(nextStart) - .hasNext(hasNext) - .totalCount((int) totalCount) - .totalPageCount(numPages) - .pageSize(pageSize) - .build(); + .values(values) + .metadata(listResultMetadata) + .nextStart(nextStart) + .hasNext(hasNext) + .totalCount((int) totalCount) + .totalPageCount(numPages) + .pageSize(pageSize) + .build(); } @Nonnull @@ -336,12 +368,16 @@ private static AuditStamp toAuditStamp(@Nonnull final EntityAspect aspect) { @Override public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect) { validateConnection(); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspect.getAspect())) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(aspect.getVersion())) - .ifExists() - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(aspect.getUrn())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspect.getAspect())) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(aspect.getVersion())) + .ifExists() + .build(); _cqlSession.execute(ss); } @@ -349,9 +385,11 @@ public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect a @Override public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .build(); ResultSet rs = _cqlSession.execute(ss); // TODO: look into how to get around this for counts in Cassandra // https://stackoverflow.com/questions/28611459/how-to-know-affected-rows-in-cassandracql @@ -359,11 +397,14 @@ public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { } public List<EntityAspect> getAllAspects(String urn, String aspectName) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.all().stream().map(CassandraAspect::rowToEntityAspect).collect(Collectors.toList()); @@ -373,13 +414,17 @@ public List<EntityAspect> getAllAspects(String urn, String aspectName) { @Nullable public EntityAspect getAspect(@Nonnull String urn, @Nonnull String aspectName, long version) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .limit(1) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .limit(1) + .build(); ResultSet rs = _cqlSession.execute(ss); Row row = rs.one(); @@ -395,17 +440,20 @@ public ListResult<String> listUrns( final int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .columns( - CassandraAspect.URN_COLUMN, - CassandraAspect.ASPECT_COLUMN, - CassandraAspect.VERSION_COLUMN - ) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(ASPECT_LATEST_VERSION)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .columns( + CassandraAspect.URN_COLUMN, + CassandraAspect.ASPECT_COLUMN, + CassandraAspect.VERSION_COLUMN) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); @@ -414,17 +462,20 @@ public ListResult<String> listUrns( Page<Row> page = offsetPager.getPage(rs, pageNumber); - final List<String> urns = page - .getElements() - .stream().map(row -> CassandraAspect.rowToAspectIdentifier(row).getUrn()) - .collect(Collectors.toList()); + final List<String> urns = + page.getElements().stream() + .map(row -> CassandraAspect.rowToAspectIdentifier(row).getUrn()) + .collect(Collectors.toList()); // TODO: address performance issue for getting total count // https://www.datastax.com/blog/running-count-expensive-cassandra - SimpleStatement ssCount = selectFrom(CassandraAspect.TABLE_NAME) + SimpleStatement ssCount = + selectFrom(CassandraAspect.TABLE_NAME) .countAll() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(ASPECT_LATEST_VERSION)) .allowFiltering() .build(); @@ -457,9 +508,8 @@ public Stream<EntityAspect> streamAspects(String entityName, String aspectName) @Nonnull public Iterable<String> listAllUrns(int start, int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .column(CassandraAspect.URN_COLUMN) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME).column(CassandraAspect.URN_COLUMN).build(); ResultSet rs = _cqlSession.execute(ss); @@ -467,9 +517,8 @@ public Iterable<String> listAllUrns(int start, int pageSize) { OffsetPager offsetPager = new OffsetPager(pageSize); Page<Row> page = offsetPager.getPage(rs, pageNumber); - return page - .getElements() - .stream().map(row -> row.getString(CassandraAspect.URN_COLUMN)) + return page.getElements().stream() + .map(row -> row.getString(CassandraAspect.URN_COLUMN)) .collect(Collectors.toList()); } @@ -496,21 +545,20 @@ public Map<String, Map<String, Long>> getNextVersions(Map<String, Set<String>> u @Override public long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion - ) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion) { validateConnection(); if (!_canWrite) { @@ -521,7 +569,8 @@ public long saveLatestAspect( BatchStatement batch = BatchStatement.newInstance(BatchType.UNLOGGED); if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - final EntityAspect aspect = new EntityAspect( + final EntityAspect aspect = + new EntityAspect( urn, aspectName, largestVersion, @@ -529,13 +578,13 @@ public long saveLatestAspect( oldSystemMetadata, oldTime, oldActor, - oldImpersonator - ); + oldImpersonator); batch = batch.add(generateSaveStatement(aspect, true)); } // Save newValue as the latest version (v0) - final EntityAspect aspect = new EntityAspect( + final EntityAspect aspect = + new EntityAspect( urn, aspectName, ASPECT_LATEST_VERSION, @@ -543,8 +592,7 @@ public long saveLatestAspect( newSystemMetadata, newTime, newActor, - newImpersonator - ); + newImpersonator); batch = batch.add(generateSaveStatement(aspect, oldAspectMetadata == null)); _cqlSession.execute(batch); return largestVersion; @@ -558,7 +606,8 @@ private SimpleStatement generateSaveStatement(EntityAspect aspect, boolean inser throw new RuntimeException(e); } if (insert) { - Insert ri = insertInto(CassandraAspect.TABLE_NAME) + Insert ri = + insertInto(CassandraAspect.TABLE_NAME) .value(CassandraAspect.URN_COLUMN, literal(aspect.getUrn())) .value(CassandraAspect.ASPECT_COLUMN, literal(aspect.getAspect())) .value(CassandraAspect.VERSION_COLUMN, literal(aspect.getVersion())) @@ -572,16 +621,23 @@ private SimpleStatement generateSaveStatement(EntityAspect aspect, boolean inser return ri.build(); } else { - UpdateWithAssignments uwa = update(CassandraAspect.TABLE_NAME) + UpdateWithAssignments uwa = + update(CassandraAspect.TABLE_NAME) .setColumn(CassandraAspect.METADATA_COLUMN, literal(aspect.getMetadata())) - .setColumn(CassandraAspect.SYSTEM_METADATA_COLUMN, literal(aspect.getSystemMetadata())) - .setColumn(CassandraAspect.CREATED_ON_COLUMN, literal(aspect.getCreatedOn().getTime())) + .setColumn( + CassandraAspect.SYSTEM_METADATA_COLUMN, literal(aspect.getSystemMetadata())) + .setColumn( + CassandraAspect.CREATED_ON_COLUMN, literal(aspect.getCreatedOn().getTime())) .setColumn(CassandraAspect.CREATED_BY_COLUMN, literal(aspect.getCreatedBy())) .setColumn(CassandraAspect.CREATED_FOR_COLUMN, literal(aspect.getCreatedFor())); - Update u = uwa.whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspect.getAspect())) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(aspect.getVersion())) + Update u = + uwa.whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(aspect.getUrn())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspect.getAspect())) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(aspect.getVersion())) .ifExists(); return u.build(); @@ -595,28 +651,28 @@ public void setWritable(boolean canWrite) { @Override public void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); - final EntityAspect aspect = new EntityAspect( - urn, - aspectName, - version, - aspectMetadata, - systemMetadata, - timestamp, - actor, - impersonator - ); + final EntityAspect aspect = + new EntityAspect( + urn, + aspectName, + version, + aspectMetadata, + systemMetadata, + timestamp, + actor, + impersonator); saveAspect(tx, aspect, insert); @@ -626,16 +682,22 @@ public void saveAspect( @Override @Nonnull - public List<EntityAspect> getAspectsInRange(@Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { + public List<EntityAspect> getAspectsInRange( + @Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).in(aspectNamesToLiterals(aspectNames)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isLessThanOrEqualTo(literal(startTimeMillis)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isGreaterThan(literal(endTimeMillis)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .in(aspectNamesToLiterals(aspectNames)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isLessThanOrEqualTo(literal(startTimeMillis)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isGreaterThan(literal(endTimeMillis)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java index 9ebb6b26fc43d..6a1ba72c37676 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.entity.cassandra; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; + import com.datahub.util.RecordUtils; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -10,10 +14,11 @@ import com.datastax.oss.driver.api.querybuilder.select.Selector; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; -import com.linkedin.metadata.entity.EntityAspectIdentifier; -import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; @@ -23,13 +28,7 @@ import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; -import com.linkedin.metadata.Constants; import io.opentelemetry.extension.annotations.WithSpan; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.sql.Timestamp; import java.time.Clock; import java.util.List; @@ -37,10 +36,10 @@ import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -58,31 +57,38 @@ public EntityService getEntityService() { @Override protected AspectsBatch buildAspectsBatch(List<MetadataChangeProposal> mcps) { - return AspectsBatchImpl.builder() - .mcps(mcps, _entityService.getEntityRegistry()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, _entityService.getEntityRegistry()).build(); } @Override @WithSpan protected void applyRetention(List<RetentionContext> retentionContexts) { - List<RetentionContext> nonEmptyContexts = retentionContexts.stream() - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()) + List<RetentionContext> nonEmptyContexts = + retentionContexts.stream() + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) .collect(Collectors.toList()); - nonEmptyContexts.forEach(context -> { - if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { - Retention retentionPolicy = context.getRetentionPolicy().get(); - applyVersionBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getVersion(), context.getMaxVersion()); - } + nonEmptyContexts.forEach( + context -> { + if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyVersionBasedRetention( + context.getUrn(), + context.getAspectName(), + retentionPolicy.getVersion(), + context.getMaxVersion()); + } - if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { - Retention retentionPolicy = context.getRetentionPolicy().get(); - applyTimeBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); - } - }); + if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyTimeBasedRetention( + context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); + } + }); } @Override @@ -111,18 +117,22 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } final String aspectNameFromRecord = id.getAspect(); // Get the retention policies to apply from the local retention policy map - Optional<Retention> retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - retentionPolicy.ifPresent(retention -> - applyRetention(List.of(RetentionContext.builder() - .urn(urn) - .aspectName(aspectNameFromRecord) - .retentionPolicy(retentionPolicy) - .maxVersion(Optional.of(id.getVersion())) - .build()))); + Optional<Retention> retentionPolicy = + getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + retentionPolicy.ifPresent( + retention -> + applyRetention( + List.of( + RetentionContext.builder() + .urn(urn) + .aspectName(aspectNameFromRecord) + .retentionPolicy(retentionPolicy) + .maxVersion(Optional.of(id.getVersion())) + .build()))); i += 1; if (i % _batchSize == 0) { @@ -134,7 +144,8 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } @Override - public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args) { + public BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args) { log.error("batchApplyRetentionEntities not implemented for cassandra"); return null; } @@ -147,23 +158,31 @@ private void applyVersionBasedRetention( long largestVersion = maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn, aspectName)); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isLessThanOrEqualTo(literal(largestVersion - retention.getMaxVersions() + 1L)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isLessThanOrEqualTo(literal(largestVersion - retention.getMaxVersions() + 1L)) + .build(); _cqlSession.execute(ss); } private long getMaxVersion(@Nonnull final Urn urn, @Nonnull final String aspectName) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .orderBy(CassandraAspect.VERSION_COLUMN, ClusteringOrder.DESC) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .orderBy(CassandraAspect.VERSION_COLUMN, ClusteringOrder.DESC) + .build(); ResultSet rs = _cqlSession.execute(ss); Row row = rs.one(); return row.getLong(CassandraAspect.VERSION_COLUMN); @@ -174,47 +193,69 @@ private void applyTimeBasedRetention( @Nonnull final String aspectName, @Nonnull final TimeBasedRetention retention) { Timestamp threshold = new Timestamp(_clock.millis() - retention.getMaxAgeInSeconds() * 1000); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isLessThanOrEqualTo(literal(threshold)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isLessThanOrEqualTo(literal(threshold)) + .build(); _cqlSession.execute(ss); } - private List<EntityAspectIdentifier> queryCandidates(@Nullable String entityName, @Nullable String aspectName) { - Select select = selectFrom(CassandraAspect.TABLE_NAME) - .selectors( - Selector.column(CassandraAspect.URN_COLUMN), - Selector.column(CassandraAspect.ASPECT_COLUMN), - Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)).as(CassandraAspect.VERSION_COLUMN)) - .allowFiltering(); + private List<EntityAspectIdentifier> queryCandidates( + @Nullable String entityName, @Nullable String aspectName) { + Select select = + selectFrom(CassandraAspect.TABLE_NAME) + .selectors( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN), + Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .as(CassandraAspect.VERSION_COLUMN)) + .allowFiltering(); if (aspectName != null) { select = select.whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)); } - select = select.whereColumn(CassandraAspect.VERSION_COLUMN).isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)); + select = + select + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)); if (entityName != null) { select = select.whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)); } - select = select.groupBy(ImmutableList.of(Selector.column(CassandraAspect.URN_COLUMN), Selector.column(CassandraAspect.ASPECT_COLUMN))); + select = + select.groupBy( + ImmutableList.of( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN))); SimpleStatement ss = select.build(); ResultSet rs = _cqlSession.execute(ss); - return rs.all().stream().map(CassandraAspect::rowToAspectIdentifier).collect(Collectors.toList()); + return rs.all().stream() + .map(CassandraAspect::rowToAspectIdentifier) + .collect(Collectors.toList()); } private Map<String, DataHubRetentionConfig> getAllRetentionPolicies() { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(Constants.DATAHUB_RETENTION_ASPECT)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(Constants.ASPECT_LATEST_VERSION)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(Constants.DATAHUB_RETENTION_ASPECT)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(Constants.ASPECT_LATEST_VERSION)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.all().stream() .map(CassandraAspect::rowToEntityAspect) - .collect(Collectors.toMap( - EntityAspect::getUrn, - aspect -> RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, aspect.getMetadata()))); + .collect( + Collectors.toMap( + EntityAspect::getUrn, + aspect -> + RecordUtils.toRecordTemplate( + DataHubRetentionConfig.class, aspect.getMetadata()))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java index c0aef268e14c9..b02ee0170354e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java @@ -1,30 +1,30 @@ package com.linkedin.metadata.entity.ebean; +import static io.ebean.Expr.ne; + import com.linkedin.metadata.Constants; import io.ebean.Database; import io.ebean.SqlQuery; import io.ebean.SqlRow; - import java.util.List; -import static io.ebean.Expr.ne; - - public class AspectStorageValidationUtil { - private AspectStorageValidationUtil() { - - } + private AspectStorageValidationUtil() {} public static long getV1RowCount(Database server) { return server.find(EbeanAspectV1.class).findCount(); } /** - * Get the number of rows created not by the DataHub system actor (urn:li:corpuser:__datahub_system) + * Get the number of rows created not by the DataHub system actor + * (urn:li:corpuser:__datahub_system) */ public static long getV2NonSystemRowCount(Database server) { - return server.find(EbeanAspectV2.class).where(ne("createdby", Constants.SYSTEM_ACTOR)).findCount(); + return server + .find(EbeanAspectV2.class) + .where(ne("createdby", Constants.SYSTEM_ACTOR)) + .findCount(); } public static boolean checkV2TableExists(Database server) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index c16c98b34f3eb..b2b47c1d5ba32 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.codahale.metrics.MetricRegistry; import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; @@ -19,8 +21,8 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; -import io.ebean.DuplicateKeyException; import io.ebean.Database; +import io.ebean.DuplicateKeyException; import io.ebean.ExpressionList; import io.ebean.Junction; import io.ebean.PagedList; @@ -45,14 +47,10 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import javax.persistence.PersistenceException; import javax.persistence.Table; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - @Slf4j public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { @@ -64,8 +62,10 @@ public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { // while its storage is being migrated private boolean _canWrite = true; - // Why 375? From tuning, this seems to be about the largest size we can get without having ebean batch issues. - // This may be able to be moved up, 375 is a bit conservative. However, we should be careful to tweak this without + // Why 375? From tuning, this seems to be about the largest size we can get without having ebean + // batch issues. + // This may be able to be moved up, 375 is a bit conservative. However, we should be careful to + // tweak this without // more testing. private int _queryKeysCount = 375; // 0 means no pagination on keys @@ -79,8 +79,7 @@ public void setWritable(boolean canWrite) { } /** - * Return the {@link Database} server instance used for customized queries. - * Only used in tests. + * Return the {@link Database} server instance used for customized queries. Only used in tests. */ public Database getServer() { return _server; @@ -96,8 +95,9 @@ private boolean validateConnection() { return true; } if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { - log.error("GMS is on a newer version than your storage layer. Please refer to " - + "https://datahubproject.io/docs/advanced/no-code-upgrade to view the upgrade guide."); + log.error( + "GMS is on a newer version than your storage layer. Please refer to " + + "https://datahubproject.io/docs/advanced/no-code-upgrade to view the upgrade guide."); _canWrite = false; return false; } else { @@ -106,24 +106,22 @@ private boolean validateConnection() { } } - @Override public long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion - ) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion) { validateConnection(); if (!_canWrite) { @@ -133,27 +131,47 @@ public long saveLatestAspect( long largestVersion = ASPECT_LATEST_VERSION; if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - saveAspect(tx, urn, aspectName, oldAspectMetadata, oldActor, oldImpersonator, oldTime, oldSystemMetadata, largestVersion, true); + saveAspect( + tx, + urn, + aspectName, + oldAspectMetadata, + oldActor, + oldImpersonator, + oldTime, + oldSystemMetadata, + largestVersion, + true); } // Save newValue as the latest version (v0) - saveAspect(tx, urn, aspectName, newAspectMetadata, newActor, newImpersonator, newTime, newSystemMetadata, ASPECT_LATEST_VERSION, oldAspectMetadata == null); + saveAspect( + tx, + urn, + aspectName, + newAspectMetadata, + newActor, + newImpersonator, + newTime, + newSystemMetadata, + ASPECT_LATEST_VERSION, + oldAspectMetadata == null); return largestVersion; } @Override public void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); @@ -171,12 +189,14 @@ public void saveAspect( } @Override - public void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { + public void saveAspect( + @Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { EbeanAspectV2 ebeanAspect = EbeanAspectV2.fromEntityAspect(aspect); saveEbeanAspect(tx, ebeanAspect, insert); } - private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { + private void saveEbeanAspect( + @Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { validateConnection(); if (insert) { _server.insert(ebeanAspect, tx); @@ -186,17 +206,22 @@ private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspec } @Override - public Map<String, Map<String, EntityAspect>> getLatestAspects(@Nonnull Map<String, Set<String>> urnAspects) { + public Map<String, Map<String, EntityAspect>> getLatestAspects( + @Nonnull Map<String, Set<String>> urnAspects) { validateConnection(); - List<EbeanAspectV2.PrimaryKey> keys = urnAspects.entrySet().stream() - .flatMap(entry -> entry.getValue().stream() - .map(aspect -> new EbeanAspectV2.PrimaryKey(entry.getKey(), aspect, ASPECT_LATEST_VERSION)) - ).collect(Collectors.toList()); + List<EbeanAspectV2.PrimaryKey> keys = + urnAspects.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream() + .map( + aspect -> + new EbeanAspectV2.PrimaryKey( + entry.getKey(), aspect, ASPECT_LATEST_VERSION))) + .collect(Collectors.toList()); - List<EbeanAspectV2> results = _server.find(EbeanAspectV2.class) - .where().idIn(keys) - .findList(); + List<EbeanAspectV2> results = _server.find(EbeanAspectV2.class).where().idIn(keys).findList(); return toUrnAspectMap(results); } @@ -204,7 +229,8 @@ public Map<String, Map<String, EntityAspect>> getLatestAspects(@Nonnull Map<Stri @Override public long countEntities() { validateConnection(); - return _server.find(EbeanAspectV2.class) + return _server + .find(EbeanAspectV2.class) .setDistinct(true) .select(EbeanAspectV2.URN_COLUMN) .findCount(); @@ -213,7 +239,8 @@ public long countEntities() { @Override public boolean checkIfAspectExists(@Nonnull String aspectName) { validateConnection(); - return _server.find(EbeanAspectV2.class) + return _server + .find(EbeanAspectV2.class) .where() .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) .exists(); @@ -221,7 +248,8 @@ public boolean checkIfAspectExists(@Nonnull String aspectName) { @Override @Nullable - public EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String aspectName, final long version) { + public EntityAspect getAspect( + @Nonnull final String urn, @Nonnull final String aspectName, final long version) { return getAspect(new EntityAspectIdentifier(urn, aspectName, version)); } @@ -229,7 +257,8 @@ public EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String a @Nullable public EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key) { validateConnection(); - EbeanAspectV2.PrimaryKey primaryKey = new EbeanAspectV2.PrimaryKey(key.getUrn(), key.getAspect(), key.getVersion()); + EbeanAspectV2.PrimaryKey primaryKey = + new EbeanAspectV2.PrimaryKey(key.getUrn(), key.getAspect(), key.getVersion()); EbeanAspectV2 ebeanAspect = _server.find(EbeanAspectV2.class, primaryKey); return ebeanAspect == null ? null : ebeanAspect.toEntityAspect(); } @@ -244,46 +273,60 @@ public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect a @Override public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); - return _server.createQuery(EbeanAspectV2.class).where().eq(EbeanAspectV2.URN_COLUMN, urn).delete(tx); + return _server + .createQuery(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.URN_COLUMN, urn) + .delete(tx); } @Override @Nonnull - public Map<EntityAspectIdentifier, EntityAspect> batchGet(@Nonnull final Set<EntityAspectIdentifier> keys) { + public Map<EntityAspectIdentifier, EntityAspect> batchGet( + @Nonnull final Set<EntityAspectIdentifier> keys) { validateConnection(); if (keys.isEmpty()) { return Collections.emptyMap(); } - final Set<EbeanAspectV2.PrimaryKey> ebeanKeys = keys.stream().map(EbeanAspectV2.PrimaryKey::fromAspectIdentifier).collect(Collectors.toSet()); + final Set<EbeanAspectV2.PrimaryKey> ebeanKeys = + keys.stream() + .map(EbeanAspectV2.PrimaryKey::fromAspectIdentifier) + .collect(Collectors.toSet()); final List<EbeanAspectV2> records; if (_queryKeysCount == 0) { records = batchGet(ebeanKeys, ebeanKeys.size()); } else { records = batchGet(ebeanKeys, _queryKeysCount); } - return records.stream().collect(Collectors.toMap(record -> record.getKey().toAspectIdentifier(), EbeanAspectV2::toEntityAspect)); + return records.stream() + .collect( + Collectors.toMap( + record -> record.getKey().toAspectIdentifier(), EbeanAspectV2::toEntityAspect)); } /** - * BatchGet that allows pagination on keys to avoid large queries. - * TODO: can further improve by running the sub queries in parallel + * BatchGet that allows pagination on keys to avoid large queries. TODO: can further improve by + * running the sub queries in parallel * * @param keys a set of keys with urn, aspect and version * @param keysCount the max number of keys for each sub query */ @Nonnull - private List<EbeanAspectV2> batchGet(@Nonnull final Set<EbeanAspectV2.PrimaryKey> keys, final int keysCount) { + private List<EbeanAspectV2> batchGet( + @Nonnull final Set<EbeanAspectV2.PrimaryKey> keys, final int keysCount) { validateConnection(); int position = 0; final int totalPageCount = QueryUtils.getTotalPageCount(keys.size(), keysCount); - final List<EbeanAspectV2> finalResult = batchGetUnion(new ArrayList<>(keys), keysCount, position); + final List<EbeanAspectV2> finalResult = + batchGetUnion(new ArrayList<>(keys), keysCount, position); while (QueryUtils.hasMore(position, keysCount, totalPageCount)) { position += keysCount; - final List<EbeanAspectV2> oneStatementResult = batchGetUnion(new ArrayList<>(keys), keysCount, position); + final List<EbeanAspectV2> oneStatementResult = + batchGetUnion(new ArrayList<>(keys), keysCount, position); finalResult.addAll(oneStatementResult); } @@ -291,8 +334,8 @@ private List<EbeanAspectV2> batchGet(@Nonnull final Set<EbeanAspectV2.PrimaryKey } /** - * Builds a single SELECT statement for batch get, which selects one entity, and then can be UNION'd with other SELECT - * statements. + * Builds a single SELECT statement for batch get, which selects one entity, and then can be + * UNION'd with other SELECT statements. */ private String batchGetSelect( final int selectId, @@ -310,48 +353,52 @@ private String batchGetSelect( outputParamsToValues.put(aspectArg, aspect); outputParamsToValues.put(versionArg, version); - return String.format("SELECT urn, aspect, version, metadata, systemMetadata, createdOn, createdBy, createdFor " + return String.format( + "SELECT urn, aspect, version, metadata, systemMetadata, createdOn, createdBy, createdFor " + "FROM %s WHERE urn = :%s AND aspect = :%s AND version = :%s", EbeanAspectV2.class.getAnnotation(Table.class).name(), urnArg, aspectArg, versionArg); } @Nonnull private List<EbeanAspectV2> batchGetUnion( - @Nonnull final List<EbeanAspectV2.PrimaryKey> keys, - final int keysCount, - final int position) { + @Nonnull final List<EbeanAspectV2.PrimaryKey> keys, final int keysCount, final int position) { validateConnection(); - // Build one SELECT per key and then UNION ALL the results. This can be much more performant than OR'ing the + // Build one SELECT per key and then UNION ALL the results. This can be much more performant + // than OR'ing the // conditions together. Our query will look like: // SELECT * FROM metadata_aspect WHERE urn = 'urn0' AND aspect = 'aspect0' AND version = 0 // UNION ALL // SELECT * FROM metadata_aspect WHERE urn = 'urn0' AND aspect = 'aspect1' AND version = 0 // ... - // Note: UNION ALL should be safe and more performant than UNION. We're selecting the entire entity key (as well + // Note: UNION ALL should be safe and more performant than UNION. We're selecting the entire + // entity key (as well // as data), so each result should be unique. No need to deduplicate. - // Another note: ebean doesn't support UNION ALL, so we need to manually build the SQL statement ourselves. + // Another note: ebean doesn't support UNION ALL, so we need to manually build the SQL statement + // ourselves. final StringBuilder sb = new StringBuilder(); final int end = Math.min(keys.size(), position + keysCount); final Map<String, Object> params = new HashMap<>(); for (int index = position; index < end; index++) { - sb.append(batchGetSelect( - index - position, - keys.get(index).getUrn(), - keys.get(index).getAspect(), - keys.get(index).getVersion(), - params)); + sb.append( + batchGetSelect( + index - position, + keys.get(index).getUrn(), + keys.get(index).getAspect(), + keys.get(index).getVersion(), + params)); if (index != end - 1) { sb.append(" UNION ALL "); } } - final RawSql rawSql = RawSqlBuilder.parse(sb.toString()) - .columnMapping(EbeanAspectV2.URN_COLUMN, "key.urn") - .columnMapping(EbeanAspectV2.ASPECT_COLUMN, "key.aspect") - .columnMapping(EbeanAspectV2.VERSION_COLUMN, "key.version") - .create(); + final RawSql rawSql = + RawSqlBuilder.parse(sb.toString()) + .columnMapping(EbeanAspectV2.URN_COLUMN, "key.urn") + .columnMapping(EbeanAspectV2.ASPECT_COLUMN, "key.aspect") + .columnMapping(EbeanAspectV2.VERSION_COLUMN, "key.version") + .create(); final Query<EbeanAspectV2> query = _server.find(EbeanAspectV2.class).setRawSql(rawSql); @@ -373,23 +420,24 @@ public ListResult<String> listUrns( validateConnection(); final String urnPrefixMatcher = "urn:li:" + entityName + ":%"; - final PagedList<EbeanAspectV2> pagedList = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.KEY_ID) - .where() - .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) - .setFirstRow(start) - .setMaxRows(pageSize) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .findPagedList(); + final PagedList<EbeanAspectV2> pagedList = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.KEY_ID) + .where() + .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) + .setFirstRow(start) + .setMaxRows(pageSize) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .findPagedList(); - final List<String> urns = pagedList - .getList() - .stream() - .map(entry -> entry.getKey().getUrn()) - .collect(Collectors.toList()); + final List<String> urns = + pagedList.getList().stream() + .map(entry -> entry.getKey().getUrn()) + .collect(Collectors.toList()); return toListResult(urns, null, pagedList, start); } @@ -397,7 +445,9 @@ public ListResult<String> listUrns( @Nonnull @Override public Integer countAspect(@Nonnull String aspectName, @Nullable String urnLike) { - ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class) + ExpressionList<EbeanAspectV2> exp = + _server + .find(EbeanAspectV2.class) .select(EbeanAspectV2.KEY_ID) .where() .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) @@ -412,7 +462,9 @@ public Integer countAspect(@Nonnull String aspectName, @Nullable String urnLike) @Nonnull @Override public PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args) { - ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class) + ExpressionList<EbeanAspectV2> exp = + _server + .find(EbeanAspectV2.class) .select(EbeanAspectV2.ALL_COLUMNS) .where() .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); @@ -425,24 +477,26 @@ public PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args) { if (args.urnLike != null) { exp = exp.like(EbeanAspectV2.URN_COLUMN, args.urnLike); } - return exp.orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .orderBy() - .asc(EbeanAspectV2.ASPECT_COLUMN) - .setFirstRow(args.start) - .setMaxRows(args.batchSize) - .findPagedList(); + return exp.orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .orderBy() + .asc(EbeanAspectV2.ASPECT_COLUMN) + .setFirstRow(args.start) + .setMaxRows(args.batchSize) + .findPagedList(); } @Override @Nonnull public Stream<EntityAspect> streamAspects(String entityName, String aspectName) { - ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%"); + ExpressionList<EbeanAspectV2> exp = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%"); return exp.query().findStream().map(EbeanAspectV2::toEntityAspect); } @@ -450,14 +504,16 @@ public Stream<EntityAspect> streamAspects(String entityName, String aspectName) @Nonnull public Iterable<String> listAllUrns(int start, int pageSize) { validateConnection(); - PagedList<EbeanAspectV2> ebeanAspects = _server.find(EbeanAspectV2.class) - .setDistinct(true) - .select(EbeanAspectV2.URN_COLUMN) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .setFirstRow(start) - .setMaxRows(pageSize) - .findPagedList(); + PagedList<EbeanAspectV2> ebeanAspects = + _server + .find(EbeanAspectV2.class) + .setDistinct(true) + .select(EbeanAspectV2.URN_COLUMN) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .setFirstRow(start) + .setMaxRows(pageSize) + .findPagedList(); return ebeanAspects.getList().stream().map(EbeanAspectV2::getUrn).collect(Collectors.toList()); } @@ -473,21 +529,27 @@ public ListResult<String> listAspectMetadata( validateConnection(); final String urnPrefixMatcher = "urn:li:" + entityName + ":%"; - final PagedList<EbeanAspectV2> pagedList = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .eq(EbeanAspectV2.VERSION_COLUMN, version) - .setFirstRow(start) - .setMaxRows(pageSize) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .findPagedList(); + final PagedList<EbeanAspectV2> pagedList = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .eq(EbeanAspectV2.VERSION_COLUMN, version) + .setFirstRow(start) + .setMaxRows(pageSize) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .findPagedList(); - final List<String> aspects = pagedList.getList().stream().map(EbeanAspectV2::getMetadata).collect(Collectors.toList()); - final ListResultMetadata listResultMetadata = toListResultMetadata(pagedList.getList().stream().map( - EbeanAspectDao::toExtraInfo).collect(Collectors.toList())); + final List<String> aspects = + pagedList.getList().stream().map(EbeanAspectV2::getMetadata).collect(Collectors.toList()); + final ListResultMetadata listResultMetadata = + toListResultMetadata( + pagedList.getList().stream() + .map(EbeanAspectDao::toExtraInfo) + .collect(Collectors.toList())); return toListResult(aspects, listResultMetadata, pagedList, start); } @@ -504,21 +566,26 @@ public ListResult<String> listLatestAspectMetadata( @Override @Nonnull - public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { + public <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, final int maxTransactionRetry) { return runInTransactionWithRetry(block, null, maxTransactionRetry); } @Override @Nonnull - public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> block, @Nullable AspectsBatch batch, - final int maxTransactionRetry) { + public <T> T runInTransactionWithRetry( + @Nonnull final Function<Transaction, T> block, + @Nullable AspectsBatch batch, + final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException = null; T result = null; do { - try (Transaction transaction = _server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction = + _server.beginTransaction( + TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); result = block.apply(transaction); transaction.commit(); @@ -526,8 +593,15 @@ public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> b break; } catch (PersistenceException exception) { if (exception instanceof DuplicateKeyException) { - if (batch != null && batch.getItems().stream().allMatch(a -> a.getAspectName().equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { - log.warn("Skipping DuplicateKeyException retry since aspect is the key aspect. {}", batch.getUrnAspectsMap().keySet()); + if (batch != null + && batch.getItems().stream() + .allMatch( + a -> + a.getAspectName() + .equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { + log.warn( + "Skipping DuplicateKeyException retry since aspect is the key aspect. {}", + batch.getUrnAspectsMap().keySet()); continue; } } @@ -540,7 +614,8 @@ public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> b if (lastException != null) { MetricUtils.counter(MetricRegistry.name(this.getClass(), "txFailedAfterRetries")).inc(); - throw new RetryLimitReached("Failed to add after " + maxTransactionRetry + " retries", lastException); + throw new RetryLimitReached( + "Failed to add after " + maxTransactionRetry + " retries", lastException); } return result; @@ -549,7 +624,9 @@ public <T> T runInTransactionWithRetry(@Nonnull final Function<Transaction, T> b @Override public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { validateConnection(); - final List<EbeanAspectV2.PrimaryKey> result = _server.find(EbeanAspectV2.class) + final List<EbeanAspectV2.PrimaryKey> result = + _server + .find(EbeanAspectV2.class) .where() .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) @@ -561,37 +638,35 @@ public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspec return result.isEmpty() ? -1 : result.get(0).getVersion(); } - public Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<String>> urnAspects) { + public Map<String, Map<String, Long>> getNextVersions( + @Nonnull Map<String, Set<String>> urnAspects) { validateConnection(); - Junction<EbeanAspectV2> queryJunction = _server.find(EbeanAspectV2.class) + Junction<EbeanAspectV2> queryJunction = + _server + .find(EbeanAspectV2.class) .select("urn, aspect, max(version)") .where() .in("urn", urnAspects.keySet()) .or(); ExpressionList<EbeanAspectV2> exp = null; - for (Map.Entry<String, Set<String>> entry: urnAspects.entrySet()) { + for (Map.Entry<String, Set<String>> entry : urnAspects.entrySet()) { if (exp == null) { - exp = queryJunction.and() - .eq("urn", entry.getKey()) - .in("aspect", entry.getValue()) - .endAnd(); + exp = queryJunction.and().eq("urn", entry.getKey()).in("aspect", entry.getValue()).endAnd(); } else { - exp = exp.and() - .eq("urn", entry.getKey()) - .in("aspect", entry.getValue()) - .endAnd(); + exp = exp.and().eq("urn", entry.getKey()).in("aspect", entry.getValue()).endAnd(); } } Map<String, Map<String, Long>> result = new HashMap<>(); // Default next version 0 - urnAspects.forEach((key, value) -> { - Map<String, Long> defaultNextVersion = new HashMap<>(); - value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); - result.put(key, defaultNextVersion); - }); + urnAspects.forEach( + (key, value) -> { + Map<String, Long> defaultNextVersion = new HashMap<>(); + value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); + result.put(key, defaultNextVersion); + }); if (exp == null) { return result; @@ -599,7 +674,7 @@ public Map<String, Map<String, Long>> getNextVersions(@Nonnull Map<String, Set<S List<EbeanAspectV2.PrimaryKey> dbResults = exp.endOr().findIds(); - for (EbeanAspectV2.PrimaryKey key: dbResults) { + for (EbeanAspectV2.PrimaryKey key : dbResults) { if (result.get(key.getUrn()).get(key.getAspect()) <= key.getVersion()) { result.get(key.getUrn()).put(key.getAspect(), key.getVersion() + 1L); } @@ -615,7 +690,9 @@ private <T> ListResult<T> toListResult( @Nonnull final PagedList<?> pagedList, @Nullable final Integer start) { final int nextStart = - (start != null && pagedList.hasNext()) ? start + pagedList.getList().size() : ListResult.INVALID_NEXT_START; + (start != null && pagedList.hasNext()) + ? start + pagedList.getList().size() + : ListResult.INVALID_NEXT_START; return ListResult.<T>builder() // Format .values(values) @@ -667,32 +744,44 @@ private ListResultMetadata toListResultMetadata(@Nonnull final List<ExtraInfo> e @Override @Nonnull - public List<EntityAspect> getAspectsInRange(@Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { + public List<EntityAspect> getAspectsInRange( + @Nonnull Urn urn, Set<String> aspectNames, long startTimeMillis, long endTimeMillis) { validateConnection(); - List<EbeanAspectV2> ebeanAspects = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) - .in(EbeanAspectV2.ASPECT_COLUMN, aspectNames) - .inRange(EbeanAspectV2.CREATED_ON_COLUMN, new Timestamp(startTimeMillis), new Timestamp(endTimeMillis)) - .findList(); + List<EbeanAspectV2> ebeanAspects = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) + .in(EbeanAspectV2.ASPECT_COLUMN, aspectNames) + .inRange( + EbeanAspectV2.CREATED_ON_COLUMN, + new Timestamp(startTimeMillis), + new Timestamp(endTimeMillis)) + .findList(); return ebeanAspects.stream().map(EbeanAspectV2::toEntityAspect).collect(Collectors.toList()); } private static Map<String, EntityAspect> toAspectMap(Set<EbeanAspectV2> beans) { - return beans.stream().map(bean -> Map.entry(bean.getAspect(), bean)) - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); + return beans.stream() + .map(bean -> Map.entry(bean.getAspect(), bean)) + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); } - private static Map<String, Map<String, EntityAspect>> toUrnAspectMap(Collection<EbeanAspectV2> beans) { + private static Map<String, Map<String, EntityAspect>> toUrnAspectMap( + Collection<EbeanAspectV2> beans) { return beans.stream() - .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) - .entrySet().stream() - .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) + .entrySet() + .stream() + .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } - private static String buildMetricName(EntitySpec entitySpec, AspectSpec aspectSpec, String status) { - return String.join(MetricUtils.DELIMITER, List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); + private static String buildMetricName( + EntitySpec entitySpec, AspectSpec aspectSpec, String status) { + return String.join( + MetricUtils.DELIMITER, + List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java index 3d2a4a5ae051c..648b7cd6a65b0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java @@ -16,10 +16,7 @@ import lombok.NonNull; import lombok.Setter; - -/** - * Schema definition for the legacy aspect table. - */ +/** Schema definition for the legacy aspect table. */ @Getter @Setter @Entity @@ -38,9 +35,7 @@ public class EbeanAspectV1 extends Model { public static final String CREATED_BY_COLUMN = "createdBy"; public static final String CREATED_FOR_COLUMN = "createdFor"; - /** - * Key for an aspect in the table. - */ + /** Key for an aspect in the table. */ @Embeddable @Getter @AllArgsConstructor @@ -65,10 +60,7 @@ public static class PrimaryKey { private long version; } - @NonNull - @EmbeddedId - @Index - protected PrimaryKey key; + @NonNull @EmbeddedId @Index protected PrimaryKey key; @NonNull @Lob diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java index 3215542ffd347..71e52ed403b9b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java @@ -19,10 +19,7 @@ import lombok.NonNull; import lombok.Setter; - -/** - * Schema definition for the new aspect table. - */ +/** Schema definition for the new aspect table. */ @Getter @Setter @NoArgsConstructor @@ -45,9 +42,7 @@ public class EbeanAspectV2 extends Model { public static final String SYSTEM_METADATA_COLUMN = "systemmetadata"; - /** - * Key for an aspect in the table. - */ + /** Key for an aspect in the table. */ @Embeddable @Getter @AllArgsConstructor @@ -80,10 +75,7 @@ public EntityAspectIdentifier toAspectIdentifier() { } } - @NonNull - @EmbeddedId - @Index - protected PrimaryKey key; + @NonNull @EmbeddedId @Index protected PrimaryKey key; @NonNull @Column(name = URN_COLUMN, length = 500, nullable = false) @@ -115,9 +107,24 @@ public EntityAspectIdentifier toAspectIdentifier() { @Column(name = SYSTEM_METADATA_COLUMN, nullable = true) protected String systemMetadata; - public EbeanAspectV2(String urn, String aspect, long version, String metadata, Timestamp createdOn, String createdBy, - String createdFor, String systemMetadata) { - this(new PrimaryKey(urn, aspect, version), urn, aspect, version, metadata, createdOn, createdBy, createdFor, + public EbeanAspectV2( + String urn, + String aspect, + long version, + String metadata, + Timestamp createdOn, + String createdBy, + String createdFor, + String systemMetadata) { + this( + new PrimaryKey(urn, aspect, version), + urn, + aspect, + version, + metadata, + createdOn, + createdBy, + createdFor, systemMetadata); } @@ -131,8 +138,7 @@ public EntityAspect toEntityAspect() { getSystemMetadata(), getCreatedOn(), getCreatedBy(), - getCreatedFor() - ); + getCreatedFor()); } public static EbeanAspectV2 fromEntityAspect(EntityAspect aspect) { @@ -144,7 +150,6 @@ public static EbeanAspectV2 fromEntityAspect(EntityAspect aspect) { aspect.getCreatedOn(), aspect.getCreatedBy(), aspect.getCreatedFor(), - aspect.getSystemMetadata() - ); + aspect.getSystemMetadata()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java index d94ec1fa7ae2b..e12f0f8f1b5d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java @@ -1,7 +1,8 @@ package com.linkedin.metadata.entity.ebean; -import com.linkedin.common.urn.Urn; import com.datahub.util.RecordUtils; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; @@ -13,7 +14,6 @@ import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; -import com.linkedin.metadata.Constants; import io.ebean.Database; import io.ebean.Expression; import io.ebean.ExpressionList; @@ -36,7 +36,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class EbeanRetentionService extends RetentionService { @@ -53,22 +52,26 @@ public EntityService getEntityService() { @Override protected AspectsBatch buildAspectsBatch(List<MetadataChangeProposal> mcps) { - return AspectsBatchImpl.builder() - .mcps(mcps, _entityService.getEntityRegistry()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, _entityService.getEntityRegistry()).build(); } @Override @WithSpan protected void applyRetention(List<RetentionContext> retentionContexts) { - List<RetentionContext> nonEmptyContexts = retentionContexts.stream() - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()).collect(Collectors.toList()); + List<RetentionContext> nonEmptyContexts = + retentionContexts.stream() + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) + .collect(Collectors.toList()); // Only run delete if at least one of the retention policies are applicable if (!nonEmptyContexts.isEmpty()) { - ExpressionList<EbeanAspectV2> deleteQuery = _server.find(EbeanAspectV2.class) + ExpressionList<EbeanAspectV2> deleteQuery = + _server + .find(EbeanAspectV2.class) .where() .ne(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) .or(); @@ -78,25 +81,32 @@ protected void applyRetention(List<RetentionContext> retentionContexts) { Retention retentionPolicy = context.getRetentionPolicy().get(); if (retentionPolicy.hasVersion()) { - boolean appliedVersion = getVersionBasedRetentionQuery(context.getUrn(), context.getAspectName(), - retentionPolicy.getVersion(), context.getMaxVersion()) - .map(expr -> - deleteQuery.and() - .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) - .add(expr) - .endAnd() - ).isPresent(); + boolean appliedVersion = + getVersionBasedRetentionQuery( + context.getUrn(), + context.getAspectName(), + retentionPolicy.getVersion(), + context.getMaxVersion()) + .map( + expr -> + deleteQuery + .and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(expr) + .endAnd()) + .isPresent(); applied = appliedVersion || applied; } if (retentionPolicy.hasTime()) { - deleteQuery.and() - .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) - .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) - .endAnd(); + deleteQuery + .and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) + .endAnd(); applied = true; } } @@ -108,13 +118,15 @@ protected void applyRetention(List<RetentionContext> retentionContexts) { } private long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - List<EbeanAspectV2> result = _server.find(EbeanAspectV2.class) - .where() - .eq("urn", urn) - .eq("aspect", aspectName) - .orderBy() - .desc("version") - .findList(); + List<EbeanAspectV2> result = + _server + .find(EbeanAspectV2.class) + .where() + .eq("urn", urn) + .eq("aspect", aspectName) + .orderBy() + .desc("version") + .findList(); if (result.size() == 0) { return -1; } @@ -126,57 +138,63 @@ private Optional<Expression> getVersionBasedRetentionQuery( @Nonnull String aspectName, @Nonnull final VersionBasedRetention retention, @Nonnull final Optional<Long> maxVersionFromUpdate) { - long largestVersion = maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn.toString(), aspectName)); + long largestVersion = + maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn.toString(), aspectName)); if (largestVersion < retention.getMaxVersions()) { return Optional.empty(); } return Optional.of( - new SimpleExpression(EbeanAspectV2.VERSION_COLUMN, Op.LT, largestVersion - retention.getMaxVersions() + 1)); + new SimpleExpression( + EbeanAspectV2.VERSION_COLUMN, Op.LT, largestVersion - retention.getMaxVersions() + 1)); } private Expression getTimeBasedRetentionQuery(@Nonnull final TimeBasedRetention retention) { - return new SimpleExpression(EbeanAspectV2.CREATED_ON_COLUMN, Op.LT, + return new SimpleExpression( + EbeanAspectV2.CREATED_ON_COLUMN, + Op.LT, new Timestamp(_clock.millis() - retention.getMaxAgeInSeconds() * 1000)); } private void applyRetention( - PagedList<EbeanAspectV2> rows, - Map<String, DataHubRetentionConfig> retentionPolicyMap, - BulkApplyRetentionResult applyRetentionResult - ) { + PagedList<EbeanAspectV2> rows, + Map<String, DataHubRetentionConfig> retentionPolicyMap, + BulkApplyRetentionResult applyRetentionResult) { try (Transaction transaction = _server.beginTransaction(TxScope.required())) { transaction.setBatchMode(true); transaction.setBatchSize(_batchSize); - List<RetentionContext> retentionContexts = rows.getList().stream() + List<RetentionContext> retentionContexts = + rows.getList().stream() .filter(row -> row.getVersion() != 0) - .map(row -> { - // 1. Extract an Entity type from the entity Urn - Urn urn; - try { - urn = Urn.createFromString(row.getUrn()); - } catch (Exception e) { - log.error("Failed to serialize urn {}", row.getUrn(), e); - return null; - } - - final String aspectNameFromRecord = row.getAspect(); - log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); - // Get the retention policies to apply from the local retention policy map - Optional<Retention> retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - - return RetentionService.RetentionContext.builder() + .map( + row -> { + // 1. Extract an Entity type from the entity Urn + Urn urn; + try { + urn = Urn.createFromString(row.getUrn()); + } catch (Exception e) { + log.error("Failed to serialize urn {}", row.getUrn(), e); + return null; + } + + final String aspectNameFromRecord = row.getAspect(); + log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); + // Get the retention policies to apply from the local retention policy map + Optional<Retention> retentionPolicy = + getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + + return RetentionService.RetentionContext.builder() .urn(urn) .aspectName(aspectNameFromRecord) .retentionPolicy(retentionPolicy) .maxVersion(Optional.of(row.getVersion())) .build(); - }) + }) .filter(Objects::nonNull) .collect(Collectors.toList()); @@ -209,7 +227,8 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } @Override - public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args) { + public BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args) { long startTime = System.currentTimeMillis(); BulkApplyRetentionResult result = new BulkApplyRetentionResult(); @@ -223,13 +242,18 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe result.timeRetentionPolicyMapMs = System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); - //only supports version based retention for batch apply - //find urn, aspect pair where distinct versions > 20 to apply retention policy - Query<EbeanAspectV2> query = _server.find(EbeanAspectV2.class) + // only supports version based retention for batch apply + // find urn, aspect pair where distinct versions > 20 to apply retention policy + Query<EbeanAspectV2> query = + _server + .find(EbeanAspectV2.class) .setDistinct(true) - .select(String.format( - "%s, %s, count(%s)", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, EbeanAspectV2.VERSION_COLUMN) - ); + .select( + String.format( + "%s, %s, count(%s)", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.VERSION_COLUMN)); ExpressionList<EbeanAspectV2> exp = null; if (args.urn != null || args.aspectName != null) { exp = query.where(); @@ -246,8 +270,8 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe exp = exp.having(); } - PagedList<EbeanAspectV2> rows = exp - .gt(String.format("count(%s)", EbeanAspectV2.VERSION_COLUMN), args.attemptWithVersion) + PagedList<EbeanAspectV2> rows = + exp.gt(String.format("count(%s)", EbeanAspectV2.VERSION_COLUMN), args.attemptWithVersion) .setFirstRow(args.start) .setMaxRows(args.count) .findPagedList(); @@ -262,7 +286,8 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe log.error("Failed to serialize urn {}", row.getUrn(), e); continue; } - PagedList<EbeanAspectV2> rowsToChange = queryCandidates(row.getUrn(), null, row.getAspect()) + PagedList<EbeanAspectV2> rowsToChange = + queryCandidates(row.getUrn(), null, row.getAspect()) .setFirstRow(args.start) .setMaxRows(args.count) .findPagedList(); @@ -275,25 +300,39 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe } private Map<String, DataHubRetentionConfig> getAllRetentionPolicies() { - return _server.find(EbeanAspectV2.class) - .select(String.format("%s, %s, %s", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, - EbeanAspectV2.METADATA_COLUMN)) + return _server + .find(EbeanAspectV2.class) + .select( + String.format( + "%s, %s, %s", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.METADATA_COLUMN)) .where() .eq(EbeanAspectV2.ASPECT_COLUMN, Constants.DATAHUB_RETENTION_ASPECT) .eq(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) .findList() .stream() - .collect(Collectors.toMap(EbeanAspectV2::getUrn, - row -> RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, row.getMetadata()))); + .collect( + Collectors.toMap( + EbeanAspectV2::getUrn, + row -> + RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, row.getMetadata()))); } - private ExpressionList<EbeanAspectV2> queryCandidates(@Nullable String urn, - @Nullable String entityName, @Nullable String aspectName) { - ExpressionList<EbeanAspectV2> query = _server.find(EbeanAspectV2.class) - .setDistinct(true) - .select(String.format("%s, %s, max(%s)", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, - EbeanAspectV2.VERSION_COLUMN)) - .where(); + private ExpressionList<EbeanAspectV2> queryCandidates( + @Nullable String urn, @Nullable String entityName, @Nullable String aspectName) { + ExpressionList<EbeanAspectV2> query = + _server + .find(EbeanAspectV2.class) + .setDistinct(true) + .select( + String.format( + "%s, %s, max(%s)", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.VERSION_COLUMN)) + .where(); if (urn != null) { query.eq(EbeanAspectV2.URN_COLUMN, urn); } @@ -306,10 +345,13 @@ private ExpressionList<EbeanAspectV2> queryCandidates(@Nullable String urn, return query; } - private PagedList<EbeanAspectV2> getPagedAspects(@Nullable String entityName, @Nullable String aspectName, - final int start, final int pageSize) { - return queryCandidates(null, entityName, aspectName).orderBy( - EbeanAspectV2.URN_COLUMN + ", " + EbeanAspectV2.ASPECT_COLUMN) + private PagedList<EbeanAspectV2> getPagedAspects( + @Nullable String entityName, + @Nullable String aspectName, + final int start, + final int pageSize) { + return queryCandidates(null, entityName, aspectName) + .orderBy(EbeanAspectV2.URN_COLUMN + ", " + EbeanAspectV2.ASPECT_COLUMN) .setFirstRow(start) .setMaxRows(pageSize) .findPagedList(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java index ca5e070bc5ca7..11261afdaa0b2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java @@ -5,63 +5,67 @@ import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Getter @Builder(toBuilder = true) public class AspectsBatchImpl implements AspectsBatch { - private final List<? extends AbstractBatchItem> items; - - public static class AspectsBatchImplBuilder { - /** - * Just one aspect record template - * @param data aspect data - * @return builder - */ - public AspectsBatchImplBuilder one(AbstractBatchItem data) { - this.items = List.of(data); - return this; - } + private final List<? extends AbstractBatchItem> items; - public AspectsBatchImplBuilder mcps(List<MetadataChangeProposal> mcps, EntityRegistry entityRegistry) { - this.items = mcps.stream().map(mcp -> { - if (mcp.getChangeType().equals(ChangeType.PATCH)) { - return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); - } else { - return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); - } - }).collect(Collectors.toList()); - return this; - } + public static class AspectsBatchImplBuilder { + /** + * Just one aspect record template + * + * @param data aspect data + * @return builder + */ + public AspectsBatchImplBuilder one(AbstractBatchItem data) { + this.items = List.of(data); + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - AspectsBatchImpl that = (AspectsBatchImpl) o; - return Objects.equals(items, that.items); + public AspectsBatchImplBuilder mcps( + List<MetadataChangeProposal> mcps, EntityRegistry entityRegistry) { + this.items = + mcps.stream() + .map( + mcp -> { + if (mcp.getChangeType().equals(ChangeType.PATCH)) { + return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); + } else { + return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); + } + }) + .collect(Collectors.toList()); + return this; } + } - @Override - public int hashCode() { - return Objects.hash(items); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "AspectsBatchImpl{" + "items=" + items + '}'; + if (o == null || getClass() != o.getClass()) { + return false; } + AspectsBatchImpl that = (AspectsBatchImpl) o; + return Objects.equals(items, that.items); + } + + @Override + public int hashCode() { + return Objects.hash(items); + } + + @Override + public String toString() { + return "AspectsBatchImpl{" + "items=" + items + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java index cc0b3d915b407..f9b1e340d5541 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean.transactions; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -20,169 +22,195 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Objects; - -import static com.linkedin.metadata.Constants.*; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Getter @Builder(toBuilder = true) public class PatchBatchItem extends AbstractBatchItem { - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final Patch patch; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.PATCH; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { + UpsertBatchItem.UpsertBatchItemBuilder builder = + UpsertBatchItem.builder() + .urn(getUrn()) + .aspectName(getAspectName()) + .metadataChangeProposal(getMetadataChangeProposal()) + .systemMetadata(getSystemMetadata()); + + AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + + RecordTemplate currentValue = + recordTemplate != null + ? recordTemplate + : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + + if (currentValue == null) { + // Attempting to patch a value to an aspect which has no default value and no existing value. + throw new UnsupportedOperationException( + String.format( + "Patch not supported for aspect with name %s. " + + "Default aspect is required because no aspect currently exists for urn %s.", + getAspectName(), getUrn())); } - // urn an urn associated with the new aspect - private final Urn urn; - // aspectName name of the aspect being inserted - private final String aspectName; - private final SystemMetadata systemMetadata; - - private final Patch patch; - - private final MetadataChangeProposal metadataChangeProposal; - - // derived - private final EntitySpec entitySpec; - private final AspectSpec aspectSpec; - - @Override - public ChangeType getChangeType() { - return ChangeType.PATCH; + try { + builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); + } catch (JsonProcessingException | JsonPatchException e) { + throw new RuntimeException(e); } - @Override - public void validateUrn(EntityRegistry entityRegistry, Urn urn) { - EntityUtils.validateUrn(entityRegistry, urn); - } + return builder.build(entityRegistry); + } + + public static class PatchBatchItemBuilder { - public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { - UpsertBatchItem.UpsertBatchItemBuilder builder = UpsertBatchItem.builder() - .urn(getUrn()) - .aspectName(getAspectName()) - .metadataChangeProposal(getMetadataChangeProposal()) - .systemMetadata(getSystemMetadata()); + public PatchBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); - AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); - RecordTemplate currentValue = recordTemplate != null ? recordTemplate - : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); - if (currentValue == null) { - // Attempting to patch a value to an aspect which has no default value and no existing value. - throw new UnsupportedOperationException(String.format("Patch not supported for aspect with name %s. " - + "Default aspect is required because no aspect currently exists for urn %s.", getAspectName(), getUrn())); - } + if (this.patch == null) { + throw new IllegalArgumentException( + String.format("Missing patch to apply. Aspect: %s", this.aspectSpec.getName())); + } - try { - builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); - } catch (JsonProcessingException | JsonPatchException e) { - throw new RuntimeException(e); - } + return new PatchBatchItem( + this.urn, + this.aspectName, + generateSystemMetadataIfEmpty(this.systemMetadata), + this.patch, + this.metadataChangeProposal, + this.entitySpec, + this.aspectSpec); + } - return builder.build(entityRegistry); + public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { + throw new UnsupportedOperationException( + "ChangeType not supported: " + + mcp.getChangeType() + + " for aspect " + + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + PatchBatchItemBuilder builder = + PatchBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .patch(convertToJsonPatch(mcp)); + + return builder.build(entityRegistry); } - public static class PatchBatchItemBuilder { - - public PatchBatchItem build(EntityRegistry entityRegistry) { - EntityUtils.validateUrn(entityRegistry, this.urn); - log.debug("entity type = {}", this.urn.getEntityType()); - - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); - log.debug("entity spec = {}", this.entitySpec); - - aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); - log.debug("aspect spec = {}", this.aspectSpec); - - if (this.patch == null) { - throw new IllegalArgumentException(String.format("Missing patch to apply. Aspect: %s", - this.aspectSpec.getName())); - } - - return new PatchBatchItem(this.urn, this.aspectName, generateSystemMetadataIfEmpty(this.systemMetadata), - this.patch, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); - } - - public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); - - if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() - + " for aspect " + mcp.getAspectName()); - } - - Urn urn = mcp.getEntityUrn(); - if (urn == null) { - urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); - } - - PatchBatchItemBuilder builder = PatchBatchItem.builder() - .urn(urn) - .aspectName(mcp.getAspectName()) - .systemMetadata(mcp.getSystemMetadata()) - .metadataChangeProposal(mcp) - .patch(convertToJsonPatch(mcp)); - - return builder.build(entityRegistry); - } - - private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { - this.entitySpec = entitySpec; - return this; - } - - private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { - this.aspectSpec = aspectSpec; - return this; - } - - private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { - JsonNode json; - try { - json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); - return JsonPatch.fromJson(json); - } catch (IOException e) { - throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); - } - } + private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - PatchBatchItem that = (PatchBatchItem) o; - return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && patch.equals(that.patch); + private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; } - @Override - public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, patch); + private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { + JsonNode json; + try { + json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); + return JsonPatch.fromJson(json); + } catch (IOException e) { + throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); + } } + } - @Override - public String toString() { - return "PatchBatchItem{" - + "urn=" + urn - + ", aspectName='" + aspectName - + '\'' - + ", systemMetadata=" + systemMetadata - + ", patch=" + patch - + '}'; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } + PatchBatchItem that = (PatchBatchItem) o; + return urn.equals(that.urn) + && aspectName.equals(that.aspectName) + && Objects.equals(systemMetadata, that.systemMetadata) + && patch.equals(that.patch); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, patch); + } + + @Override + public String toString() { + return "PatchBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", patch=" + + patch + + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java index bd58d267a8308..c232e4846f7d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean.transactions; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.datahub.util.exception.ModelConversionException; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -16,158 +18,172 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.sql.Timestamp; +import java.util.Objects; import lombok.Builder; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import java.sql.Timestamp; -import java.util.Objects; - -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - @Slf4j @Getter @Builder(toBuilder = true) public class UpsertBatchItem extends AbstractBatchItem { - // urn an urn associated with the new aspect - private final Urn urn; - // aspectName name of the aspect being inserted - private final String aspectName; - private final SystemMetadata systemMetadata; - - private final RecordTemplate aspect; - - private final MetadataChangeProposal metadataChangeProposal; - - // derived - private final EntitySpec entitySpec; - private final AspectSpec aspectSpec; - - @Override - public ChangeType getChangeType() { - return ChangeType.UPSERT; + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final RecordTemplate aspect; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.UPSERT; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { + EntityAspect latest = new EntityAspect(); + latest.setAspect(getAspectName()); + latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); + latest.setUrn(getUrn().toString()); + latest.setVersion(ASPECT_LATEST_VERSION); + latest.setCreatedOn(new Timestamp(auditStamp.getTime())); + latest.setCreatedBy(auditStamp.getActor().toString()); + return latest; + } + + public static class UpsertBatchItemBuilder { + + public UpsertBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); + + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); + + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); + + ValidationUtils.validateRecordTemplate( + entityRegistry, this.entitySpec, this.urn, this.aspect); + + return new UpsertBatchItem( + this.urn, + this.aspectName, + AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), + this.aspect, + this.metadataChangeProposal, + this.entitySpec, + this.aspectSpec); } - @Override - public void validateUrn(EntityRegistry entityRegistry, Urn urn) { - EntityUtils.validateUrn(entityRegistry, urn); + public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { + throw new IllegalArgumentException( + "Invalid MCP, this class only supports change type of UPSERT."); + } + + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { + throw new UnsupportedOperationException( + "ChangeType not supported: " + + mcp.getChangeType() + + " for aspect " + + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + UpsertBatchItemBuilder builder = + UpsertBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .aspect(convertToRecordTemplate(mcp, aspectSpec)); + + return builder.build(entityRegistry); } - public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { - EntityAspect latest = new EntityAspect(); - latest.setAspect(getAspectName()); - latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); - latest.setUrn(getUrn().toString()); - latest.setVersion(ASPECT_LATEST_VERSION); - latest.setCreatedOn(new Timestamp(auditStamp.getTime())); - latest.setCreatedBy(auditStamp.getActor().toString()); - return latest; + private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; } - public static class UpsertBatchItemBuilder { - - public UpsertBatchItem build(EntityRegistry entityRegistry) { - EntityUtils.validateUrn(entityRegistry, this.urn); - log.debug("entity type = {}", this.urn.getEntityType()); - - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); - log.debug("entity spec = {}", this.entitySpec); - - aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); - log.debug("aspect spec = {}", this.aspectSpec); - - ValidationUtils.validateRecordTemplate(entityRegistry, this.entitySpec, this.urn, this.aspect); - - return new UpsertBatchItem(this.urn, this.aspectName, AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), - this.aspect, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); - } - - public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { - if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { - throw new IllegalArgumentException("Invalid MCP, this class only supports change type of UPSERT."); - } - - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); - - if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() - + " for aspect " + mcp.getAspectName()); - } - - Urn urn = mcp.getEntityUrn(); - if (urn == null) { - urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); - } - - UpsertBatchItemBuilder builder = UpsertBatchItem.builder() - .urn(urn) - .aspectName(mcp.getAspectName()) - .systemMetadata(mcp.getSystemMetadata()) - .metadataChangeProposal(mcp) - .aspect(convertToRecordTemplate(mcp, aspectSpec)); - - return builder.build(entityRegistry); - } - - private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { - this.entitySpec = entitySpec; - return this; - } - - private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { - this.aspectSpec = aspectSpec; - return this; - } - - private static RecordTemplate convertToRecordTemplate(MetadataChangeProposal mcp, AspectSpec aspectSpec) { - RecordTemplate aspect; - try { - aspect = GenericRecordUtils.deserializeAspect(mcp.getAspect().getValue(), - mcp.getAspect().getContentType(), aspectSpec); - ValidationUtils.validateOrThrow(aspect); - } catch (ModelConversionException e) { - throw new RuntimeException( - String.format("Could not deserialize %s for aspect %s", mcp.getAspect().getValue(), - mcp.getAspectName())); - } - log.debug("aspect = {}", aspect); - return aspect; - } + private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - UpsertBatchItem that = (UpsertBatchItem) o; - return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && aspect.equals(that.aspect); + private static RecordTemplate convertToRecordTemplate( + MetadataChangeProposal mcp, AspectSpec aspectSpec) { + RecordTemplate aspect; + try { + aspect = + GenericRecordUtils.deserializeAspect( + mcp.getAspect().getValue(), mcp.getAspect().getContentType(), aspectSpec); + ValidationUtils.validateOrThrow(aspect); + } catch (ModelConversionException e) { + throw new RuntimeException( + String.format( + "Could not deserialize %s for aspect %s", + mcp.getAspect().getValue(), mcp.getAspectName())); + } + log.debug("aspect = {}", aspect); + return aspect; } + } - @Override - public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, aspect); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "UpsertBatchItem{" - + "urn=" - + urn - + ", aspectName='" - + aspectName - + '\'' - + ", systemMetadata=" - + systemMetadata - + ", aspect=" - + aspect - + '}'; + if (o == null || getClass() != o.getClass()) { + return false; } + UpsertBatchItem that = (UpsertBatchItem) o; + return urn.equals(that.urn) + && aspectName.equals(that.aspectName) + && Objects.equals(systemMetadata, that.systemMetadata) + && aspect.equals(that.aspect); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, aspect); + } + + @Override + public String toString() { + return "UpsertBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", aspect=" + + aspect + + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java index 4c4bfb41867ef..ad8fbfdf2eddd 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java @@ -24,12 +24,10 @@ import java.util.stream.Collectors; import lombok.Setter; - public class EntityRegistryUrnValidator implements Validator { private final EntityRegistry _entityRegistry; - @Setter - private EntitySpec currentEntitySpec = null; + @Setter private EntitySpec currentEntitySpec = null; public EntityRegistryUrnValidator(EntityRegistry entityRegistry) { _entityRegistry = entityRegistry; @@ -43,45 +41,61 @@ public void validate(ValidatorContext context) { } protected void validateUrnField(ValidatorContext context) { - if (Type.TYPEREF.equals(context.dataElement().getSchema().getType()) && ((NamedDataSchema) context.dataElement() - .getSchema()).getName().endsWith("Urn")) { + if (Type.TYPEREF.equals(context.dataElement().getSchema().getType()) + && ((NamedDataSchema) context.dataElement().getSchema()).getName().endsWith("Urn")) { try { // Validate Urn matches field type and that it generates a valid key String urnStr = (String) context.dataElement().getValue(); Urn urn = Urn.createFromString(urnStr); EntitySpec entitySpec = _entityRegistry.getEntitySpec(urn.getEntityType()); - RecordTemplate entityKey = EntityKeyUtils.convertUrnToEntityKey(urn, - entitySpec.getKeyAspectSpec()); + RecordTemplate entityKey = + EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); NamedDataSchema namedDataSchema = ((NamedDataSchema) context.dataElement().getSchema()); Class<? extends Urn> urnClass; try { - String schemaName = ((Map<String, String>) namedDataSchema.getProperties().get("java")).get("class"); + String schemaName = + ((Map<String, String>) namedDataSchema.getProperties().get("java")).get("class"); urnClass = (Class<? extends Urn>) Class.forName(schemaName); urnClass.getDeclaredMethod("createFromString", String.class).invoke(null, urnStr); } catch (ClassNotFoundException | ClassCastException | NoSuchMethodException e) { - throw new IllegalArgumentException("Unrecognized Urn class: " + namedDataSchema.getName(), e); + throw new IllegalArgumentException( + "Unrecognized Urn class: " + namedDataSchema.getName(), e); } catch (InvocationTargetException | IllegalAccessException e) { - throw new IllegalArgumentException("Unable to instantiate urn type: " + namedDataSchema.getName() + " with urn: " + urnStr, e); + throw new IllegalArgumentException( + "Unable to instantiate urn type: " + + namedDataSchema.getName() + + " with urn: " + + urnStr, + e); } // Validate generic Urn is valid entity type for relationship destination PathSpec fieldPath = context.dataElement().getSchemaPathSpec(); - List<RelationshipFieldSpec> relationshipSpecs = currentEntitySpec.getRelationshipFieldSpecs().stream().filter(relationshipFieldSpec -> - relationshipFieldSpec.getPath().equals(fieldPath)) - .collect(Collectors.toList()); + List<RelationshipFieldSpec> relationshipSpecs = + currentEntitySpec.getRelationshipFieldSpecs().stream() + .filter(relationshipFieldSpec -> relationshipFieldSpec.getPath().equals(fieldPath)) + .collect(Collectors.toList()); if (!relationshipSpecs.isEmpty()) { for (RelationshipFieldSpec relationshipFieldSpec : relationshipSpecs) { - boolean isValidDestination = relationshipFieldSpec.getValidDestinationTypes().stream() - .anyMatch(destinationType -> destinationType.equals(urn.getEntityType())); + boolean isValidDestination = + relationshipFieldSpec.getValidDestinationTypes().stream() + .anyMatch(destinationType -> destinationType.equals(urn.getEntityType())); if (!isValidDestination) { throw new IllegalArgumentException( - "Entity type for urn: " + urn + " is not a valid destination for field path: " + fieldPath); + "Entity type for urn: " + + urn + + " is not a valid destination for field path: " + + fieldPath); } } } } catch (URISyntaxException | IllegalArgumentException e) { - context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid: %s", - context.dataElement().getValue(), e.getMessage())); + context.addResult( + new Message( + context.dataElement().path(), + "\"Provided urn %s\" is invalid: %s", + context.dataElement().getValue(), + e.getMessage())); context.setHasFix(false); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java index e7934bc47be3f..12e39f0349143 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java @@ -15,45 +15,41 @@ @Slf4j public class RecordTemplateValidator { - private static final ValidationOptions DEFAULT_VALIDATION_OPTIONS = new ValidationOptions( - RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, - CoercionMode.NORMAL, - UnrecognizedFieldMode.DISALLOW - ); - - private static final UrnValidator URN_VALIDATOR = new UrnValidator(); - - /** - * Validates a {@link RecordTemplate} and applies a function if validation fails - * - * @param record record to be validated.ailure. - */ - public static void validate(RecordTemplate record, Consumer<ValidationResult> onValidationFailure) { - final ValidationResult result = ValidateDataAgainstSchema.validate( - record, - DEFAULT_VALIDATION_OPTIONS, - URN_VALIDATOR); - if (!result.isValid()) { - onValidationFailure.accept(result); - } + private static final ValidationOptions DEFAULT_VALIDATION_OPTIONS = + new ValidationOptions( + RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, + CoercionMode.NORMAL, + UnrecognizedFieldMode.DISALLOW); + + private static final UrnValidator URN_VALIDATOR = new UrnValidator(); + + /** + * Validates a {@link RecordTemplate} and applies a function if validation fails + * + * @param record record to be validated.ailure. + */ + public static void validate( + RecordTemplate record, Consumer<ValidationResult> onValidationFailure) { + final ValidationResult result = + ValidateDataAgainstSchema.validate(record, DEFAULT_VALIDATION_OPTIONS, URN_VALIDATOR); + if (!result.isValid()) { + onValidationFailure.accept(result); } - - /** - * Validates a {@link RecordTemplate} and applies a function if validation fails - * - * @param record record to be validated.ailure. - */ - public static void validate(RecordTemplate record, Consumer<ValidationResult> onValidationFailure, Validator validator) { - final ValidationResult result = ValidateDataAgainstSchema.validate( - record, - DEFAULT_VALIDATION_OPTIONS, - validator); - if (!result.isValid()) { - onValidationFailure.accept(result); - } + } + + /** + * Validates a {@link RecordTemplate} and applies a function if validation fails + * + * @param record record to be validated.ailure. + */ + public static void validate( + RecordTemplate record, Consumer<ValidationResult> onValidationFailure, Validator validator) { + final ValidationResult result = + ValidateDataAgainstSchema.validate(record, DEFAULT_VALIDATION_OPTIONS, validator); + if (!result.isValid()) { + onValidationFailure.accept(result); } + } - private RecordTemplateValidator() { - - } + private RecordTemplateValidator() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java index e0b026fa84d18..6a86a02a94449 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.entity.validation; -/** - * Exception thrown when a metadata record cannot be validated against its schema. - */ +/** Exception thrown when a metadata record cannot be validated against its schema. */ public class ValidationException extends RuntimeException { public ValidationException(final String message) { super(message); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 6182b27333cbb..7f23bacdc4758 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -7,27 +7,27 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Consumer; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ValidationUtils { /** - * Validates a {@link RecordTemplate} and throws {@link com.linkedin.restli.server.RestLiServiceException} - * if validation fails. + * Validates a {@link RecordTemplate} and throws {@link + * com.linkedin.restli.server.RestLiServiceException} if validation fails. * * @param record record to be validated. */ public static void validateOrThrow(RecordTemplate record) { - RecordTemplateValidator.validate(record, validationResult -> { - throw new ValidationException( - String.format("Failed to validate record with class %s: %s", - record.getClass().getName(), - validationResult.getMessages().toString())); - }); + RecordTemplateValidator.validate( + record, + validationResult -> { + throw new ValidationException( + String.format( + "Failed to validate record with class %s: %s", + record.getClass().getName(), validationResult.getMessages().toString())); + }); } /** @@ -36,41 +36,51 @@ public static void validateOrThrow(RecordTemplate record) { * @param record record to be validated.ailure. */ public static void validateOrWarn(RecordTemplate record) { - RecordTemplateValidator.validate(record, validationResult -> { - log.warn(String.format("Failed to validate record %s against its schema.", record)); - }); + RecordTemplateValidator.validate( + record, + validationResult -> { + log.warn(String.format("Failed to validate record %s against its schema.", record)); + }); } public static AspectSpec validate(EntitySpec entitySpec, String aspectName) { if (aspectName == null || aspectName.isEmpty()) { - throw new UnsupportedOperationException("Aspect name is required for create and update operations"); + throw new UnsupportedOperationException( + "Aspect name is required for create and update operations"); } AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); + String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); } return aspectSpec; } - public static void validateRecordTemplate(EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { + public static void validateRecordTemplate( + EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entitySpec); - Consumer<ValidationResult> resultFunction = validationResult -> { - throw new IllegalArgumentException("Invalid format for aspect: " + entitySpec.getName() + "\n Cause: " - + validationResult.getMessages()); }; - RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); + Consumer<ValidationResult> resultFunction = + validationResult -> { + throw new IllegalArgumentException( + "Invalid format for aspect: " + + entitySpec.getName() + + "\n Cause: " + + validationResult.getMessages()); + }; + RecordTemplateValidator.validate( + EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); RecordTemplateValidator.validate(aspect, resultFunction, validator); } - public static void validateRecordTemplate(EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { + public static void validateRecordTemplate( + EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { EntitySpec entitySpec = entityRegistry.getEntitySpec(urn.getEntityType()); validateRecordTemplate(entityRegistry, entitySpec, urn, aspect); } - private ValidationUtils() { - } -} \ No newline at end of file + private ValidationUtils() {} +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java index 90e171d3c357e..becf86cdbe92f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java @@ -4,22 +4,20 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.snapshot.Snapshot; -import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataAuditOperation; +import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. - */ +/** Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. */ public interface EntityEventProducer { /** - * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a - * new & previous Entity {@link Snapshot}. - * @param urn the urn associated with the entity changed + * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a new & previous Entity {@link + * Snapshot}. + * + * @param urn the urn associated with the entity changed * @param oldSnapshot a {@link RecordTemplate} corresponding to the old snapshot. * @param newSnapshot a {@link RecordTemplate} corresponding to the new snapshot. * @param oldSystemMetadata @@ -31,12 +29,10 @@ void produceMetadataAuditEvent( @Nonnull final Snapshot newSnapshot, @Nullable SystemMetadata oldSystemMetadata, @Nullable SystemMetadata newSystemMetadata, - MetadataAuditOperation operation - ); + MetadataAuditOperation operation); /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. + * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a new & previous aspect. * * @param urn the urn associated with the entity changed * @param aspectSpec aspect spec of the aspect being updated @@ -45,6 +41,5 @@ void produceMetadataAuditEvent( void produceMetadataChangeLog( @Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog - ); + @Nonnull final MetadataChangeLog metadataChangeLog); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java index ffadc07124727..a809c7f9a3e31 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java @@ -12,61 +12,52 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. - */ +/** Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. */ public interface EventProducer { /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. + * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a new & previous aspect. * * @param urn the urn associated with the entity changed * @param aspectSpec aspect spec of the aspect being updated * @param metadataChangeLog metadata change log to push into MCL kafka topic - * * @return A {@link Future} object that reports when the message has been produced. */ Future<?> produceMetadataChangeLog( @Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog - ); + @Nonnull final MetadataChangeLog metadataChangeLog); /** * Produces a {@link com.linkedin.mxe.MetadataChangeProposal} as an async update to an entity * * @param urn the urn associated with the change proposal. * @param metadataChangeProposal metadata change proposal to push into MCP kafka topic. - * * @return A {@link Future} object that reports when the message has been produced. */ @WithSpan - Future<?> produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull MetadataChangeProposal metadataChangeProposal); + Future<?> produceMetadataChangeProposal( + @Nonnull final Urn urn, @Nonnull MetadataChangeProposal metadataChangeProposal); /** * Produces a generic platform "event". * - * @param name the name, or type, of the event to produce, as defined in the {@link EntityRegistry}. - * @param key an optional partitioning key for the event. If not provided, the name of the event will be used. - * @param payload the event payload itself. This will be serialized to JSON and produced as a system event. - * + * @param name the name, or type, of the event to produce, as defined in the {@link + * EntityRegistry}. + * @param key an optional partitioning key for the event. If not provided, the name of the event + * will be used. + * @param payload the event payload itself. This will be serialized to JSON and produced as a + * system event. * @return A {@link Future} object that reports when the message has been produced. */ Future<?> producePlatformEvent( - @Nonnull String name, - @Nullable String key, - @Nonnull PlatformEvent payload - ); + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent payload); /** - * Creates an entry on the history log of when the indices were last rebuilt with the latest configuration. + * Creates an entry on the history log of when the indices were last rebuilt with the latest + * configuration. * * @param event the history event to send to the DataHub Upgrade history topic */ - void produceDataHubUpgradeHistoryEvent( - @Nonnull DataHubUpgradeHistoryEvent event - ); + void produceDataHubUpgradeHistoryEvent(@Nonnull DataHubUpgradeHistoryEvent event); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java index 891844045b016..c54ba4a222b73 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; + import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; import com.linkedin.common.EntityRelationships; @@ -14,54 +16,60 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; - - @Slf4j public class JavaGraphClient implements GraphClient { GraphService _graphService; + public JavaGraphClient(@Nonnull GraphService graphService) { this._graphService = graphService; } /** - * Returns a list of related entities for a given entity, set of edge types, and direction relative to the - * source node + * Returns a list of related entities for a given entity, set of edge types, and direction + * relative to the source node */ @Nonnull @Override - public EntityRelationships getRelatedEntities(String rawUrn, List<String> relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count, String actor) { + public EntityRelationships getRelatedEntities( + String rawUrn, + List<String> relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count, + String actor) { start = start == null ? 0 : start; count = count == null ? DEFAULT_PAGE_SIZE : count; RelatedEntitiesResult relatedEntitiesResult = - _graphService.findRelatedEntities(null, + _graphService.findRelatedEntities( + null, QueryUtils.newFilter("urn", rawUrn), null, EMPTY_FILTER, relationshipTypes, QueryUtils.newRelationshipFilter(EMPTY_FILTER, direction), start, - count - ); + count); - final EntityRelationshipArray entityArray = new EntityRelationshipArray( - relatedEntitiesResult.getEntities().stream().map( - entity -> { - try { - return new EntityRelationship() - .setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to convert urnStr %s found in the Graph to an Urn object", entity.getUrn())); - } - } - ).collect(Collectors.toList()) - ); + final EntityRelationshipArray entityArray = + new EntityRelationshipArray( + relatedEntitiesResult.getEntities().stream() + .map( + entity -> { + try { + return new EntityRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to convert urnStr %s found in the Graph to an Urn object", + entity.getUrn())); + } + }) + .collect(Collectors.toList())); return new EntityRelationships() .setStart(relatedEntitiesResult.getStart()) @@ -71,14 +79,23 @@ public EntityRelationships getRelatedEntities(String rawUrn, List<String> relati } /** - * Returns lineage relationships for given entity in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * Returns lineage relationships for given entity in the DataHub graph. Lineage relationship + * denotes whether an entity is directly upstream or downstream of another entity */ @Nonnull @Override - public EntityLineageResult getLineageEntities(String rawUrn, LineageDirection direction, @Nullable Integer start, - @Nullable Integer count, int maxHops, String actor) { - return _graphService.getLineage(UrnUtils.getUrn(rawUrn), direction, start != null ? start : 0, - count != null ? count : 100, maxHops); + public EntityLineageResult getLineageEntities( + String rawUrn, + LineageDirection direction, + @Nullable Integer start, + @Nullable Integer count, + int maxHops, + String actor) { + return _graphService.getLineage( + UrnUtils.getUrn(rawUrn), + direction, + start != null ? start : 0, + count != null ? count : 100, + maxHops); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java index 7a2f0825b31cc..bdf405fe36c07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.Siblings; @@ -20,9 +22,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class SiblingGraphService { @@ -31,58 +30,55 @@ public class SiblingGraphService { private final GraphService _graphService; @Nonnull - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops) { - return ValidationUtils.validateEntityLineageResult(getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - false, - new HashSet<>(), - null, - null), - _entityService); + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops) { + return ValidationUtils.validateEntityLineageResult( + getLineage( + entityUrn, direction, offset, count, maxHops, false, new HashSet<>(), null, null), + _entityService); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - int offset, int count, int maxHops, boolean separateSiblings, @Nonnull Set<Urn> visitedUrns, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops, + boolean separateSiblings, + @Nonnull Set<Urn> visitedUrns, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { if (separateSiblings) { - return ValidationUtils.validateEntityLineageResult(_graphService.getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - startTimeMillis, - endTimeMillis), _entityService); + return ValidationUtils.validateEntityLineageResult( + _graphService.getLineage( + entityUrn, direction, offset, count, maxHops, startTimeMillis, endTimeMillis), + _entityService); } if (maxHops > 1) { throw new UnsupportedOperationException( - String.format("More than 1 hop is not supported for %s", this.getClass().getSimpleName())); + String.format( + "More than 1 hop is not supported for %s", this.getClass().getSimpleName())); } EntityLineageResult entityLineage = _graphService.getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - startTimeMillis, - endTimeMillis); + entityUrn, direction, offset, count, maxHops, startTimeMillis, endTimeMillis); - Siblings siblingAspectOfEntity = (Siblings) _entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + Siblings siblingAspectOfEntity = + (Siblings) _entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); // if you have siblings, we want to fetch their lineage too and merge it in if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { @@ -104,19 +100,23 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi if (visitedUrns.contains(siblingUrn)) { continue; } - // need to call siblingGraphService to get sibling results for this sibling entity in case there is more than one sibling - EntityLineageResult nextEntityLineage = filterLineageResultFromSiblings(siblingUrn, allSiblingsInGroup, - getLineage( + // need to call siblingGraphService to get sibling results for this sibling entity in case + // there is more than one sibling + EntityLineageResult nextEntityLineage = + filterLineageResultFromSiblings( siblingUrn, - direction, - offset, - count, - maxHops, - false, - visitedUrns, - startTimeMillis, - endTimeMillis), - entityLineage); + allSiblingsInGroup, + getLineage( + siblingUrn, + direction, + offset, + count, + maxHops, + false, + visitedUrns, + startTimeMillis, + endTimeMillis), + entityLineage); // Update offset and count to fetch the correct number of edges from the next sibling node offset = Math.max(0, offset - nextEntityLineage.getTotal()); @@ -124,86 +124,116 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi entityLineage.setFiltered(getFiltered(entityLineage) + getFiltered(nextEntityLineage)); entityLineage = nextEntityLineage; - }; + } + ; } return ValidationUtils.validateEntityLineageResult(entityLineage, _entityService); } private int getFiltered(@Nullable EntityLineageResult entityLineageResult) { - return (entityLineageResult != null && entityLineageResult.getFiltered() != null ? entityLineageResult.getFiltered() : 0); + return (entityLineageResult != null && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0); } - // takes a lineage result and removes any nodes that are siblings of some other node already in the result + // takes a lineage result and removes any nodes that are siblings of some other node already in + // the result private EntityLineageResult filterLineageResultFromSiblings( @Nonnull final Urn urn, @Nonnull final Set<Urn> allSiblingsInGroup, @Nonnull final EntityLineageResult entityLineageResult, - @Nullable final EntityLineageResult existingResult - ) { + @Nullable final EntityLineageResult existingResult) { int numFiltered = 0; // 1) remove the source entities siblings from this entity's downstreams - final Map<Boolean, List<LineageRelationship>> partitionedFilteredRelationships = entityLineageResult.getRelationships() - .stream().collect(Collectors.partitioningBy( - lineageRelationship -> !allSiblingsInGroup.contains(lineageRelationship.getEntity()) - || lineageRelationship.getEntity().equals(urn))); + final Map<Boolean, List<LineageRelationship>> partitionedFilteredRelationships = + entityLineageResult.getRelationships().stream() + .collect( + Collectors.partitioningBy( + lineageRelationship -> + !allSiblingsInGroup.contains(lineageRelationship.getEntity()) + || lineageRelationship.getEntity().equals(urn))); numFiltered += partitionedFilteredRelationships.get(Boolean.FALSE).size(); - final List<LineageRelationship> filteredRelationships = partitionedFilteredRelationships.get(Boolean.TRUE); + final List<LineageRelationship> filteredRelationships = + partitionedFilteredRelationships.get(Boolean.TRUE); // 2) filter out existing lineage to avoid duplicates in our combined result - final Set<Urn> existingUrns = existingResult != null - ? existingResult.getRelationships().stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()) - : new HashSet<>(); - - Map<Boolean, List<LineageRelationship>> partitionedUniqueFilteredRelationships = filteredRelationships.stream().collect( - Collectors.partitioningBy(lineageRelationship -> !existingUrns.contains(lineageRelationship.getEntity()))); + final Set<Urn> existingUrns = + existingResult != null + ? existingResult.getRelationships().stream() + .map(LineageRelationship::getEntity) + .collect(Collectors.toSet()) + : new HashSet<>(); + + Map<Boolean, List<LineageRelationship>> partitionedUniqueFilteredRelationships = + filteredRelationships.stream() + .collect( + Collectors.partitioningBy( + lineageRelationship -> + !existingUrns.contains(lineageRelationship.getEntity()))); numFiltered += partitionedUniqueFilteredRelationships.get(Boolean.FALSE).size(); - List<LineageRelationship> uniqueFilteredRelationships = partitionedUniqueFilteredRelationships.get(Boolean.TRUE); + List<LineageRelationship> uniqueFilteredRelationships = + partitionedUniqueFilteredRelationships.get(Boolean.TRUE); // 3) combine this entity's lineage with the lineage we've already seen - final List<LineageRelationship> combinedResults = Stream.concat( - uniqueFilteredRelationships.stream(), - existingResult != null ? existingResult.getRelationships().stream() : ImmutableList.<LineageRelationship>of().stream()) - .collect(Collectors.toList()); + final List<LineageRelationship> combinedResults = + Stream.concat( + uniqueFilteredRelationships.stream(), + existingResult != null + ? existingResult.getRelationships().stream() + : ImmutableList.<LineageRelationship>of().stream()) + .collect(Collectors.toList()); // 4) fetch the siblings of each lineage result - final Set<Urn> combinedResultUrns = combinedResults.stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()); + final Set<Urn> combinedResultUrns = + combinedResults.stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()); final Map<Urn, List<RecordTemplate>> siblingAspects = _entityService.getLatestAspects(combinedResultUrns, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); - // 5) if you are not primary & your sibling is in the results, filter yourself out of the return set - Map<Boolean, List<LineageRelationship>> partitionedFilteredSiblings = combinedResults.stream().collect(Collectors.partitioningBy(result -> { - Optional<RecordTemplate> optionalSiblingsAspect = siblingAspects.get(result.getEntity()).stream().filter( - aspect -> aspect instanceof Siblings - ).findAny(); - - if (optionalSiblingsAspect.isEmpty()) { - return true; - } - - final Siblings siblingsAspect = (Siblings) optionalSiblingsAspect.get(); - - if (siblingsAspect.isPrimary()) { - return true; - } - - // if you are not primary and your sibling exists in the result set, filter yourself out - return siblingsAspect.getSiblings().stream().noneMatch(combinedResultUrns::contains); - })); + // 5) if you are not primary & your sibling is in the results, filter yourself out of the return + // set + Map<Boolean, List<LineageRelationship>> partitionedFilteredSiblings = + combinedResults.stream() + .collect( + Collectors.partitioningBy( + result -> { + Optional<RecordTemplate> optionalSiblingsAspect = + siblingAspects.get(result.getEntity()).stream() + .filter(aspect -> aspect instanceof Siblings) + .findAny(); + + if (optionalSiblingsAspect.isEmpty()) { + return true; + } + + final Siblings siblingsAspect = (Siblings) optionalSiblingsAspect.get(); + + if (siblingsAspect.isPrimary()) { + return true; + } + + // if you are not primary and your sibling exists in the result set, filter + // yourself out + return siblingsAspect.getSiblings().stream() + .noneMatch(combinedResultUrns::contains); + })); numFiltered += partitionedFilteredSiblings.get(Boolean.FALSE).size(); uniqueFilteredRelationships = partitionedFilteredSiblings.get(Boolean.TRUE); EntityLineageResult combinedLineageResult = new EntityLineageResult(); combinedLineageResult.setStart(entityLineageResult.getStart()); - combinedLineageResult.setRelationships(new LineageRelationshipArray(uniqueFilteredRelationships)); - combinedLineageResult.setTotal(entityLineageResult.getTotal() + (existingResult != null ? existingResult.getTotal() : 0)); + combinedLineageResult.setRelationships( + new LineageRelationshipArray(uniqueFilteredRelationships)); + combinedLineageResult.setTotal( + entityLineageResult.getTotal() + (existingResult != null ? existingResult.getTotal() : 0)); combinedLineageResult.setCount(uniqueFilteredRelationships.size()); - combinedLineageResult.setFiltered(numFiltered + getFiltered(existingResult) + getFiltered(entityLineageResult)); + combinedLineageResult.setFiltered( + numFiltered + getFiltered(existingResult) + getFiltered(entityLineageResult)); return ValidationUtils.validateEntityLineageResult(combinedLineageResult, _entityService); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java index dcef0f9f192ed..393297b64e0d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java @@ -6,93 +6,96 @@ import io.github.resilience4j.retry.Retry; import io.github.resilience4j.retry.RetryConfig; import io.grpc.StatusRuntimeException; -import lombok.extern.slf4j.Slf4j; - import java.time.Duration; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DgraphExecutor { - // requests are retried with an exponential randomized backoff - // wait 0.01s, 0.02s, 0.04s, 0.08s, ..., 10s, all ±50% - private static final Duration INITIAL_DURATION = Duration.ofMillis(10); - private static final Duration MAX_DURATION = Duration.ofSeconds(10); - private static final double BACKOFF_MULTIPLIER = 2.0; - private static final double RANDOMIZATION_FACTOR = 0.5; + // requests are retried with an exponential randomized backoff + // wait 0.01s, 0.02s, 0.04s, 0.08s, ..., 10s, all ±50% + private static final Duration INITIAL_DURATION = Duration.ofMillis(10); + private static final Duration MAX_DURATION = Duration.ofSeconds(10); + private static final double BACKOFF_MULTIPLIER = 2.0; + private static final double RANDOMIZATION_FACTOR = 0.5; - private final DgraphClient _client; - private final Retry _retry; + private final DgraphClient _client; + private final Retry _retry; - public DgraphExecutor(DgraphClient client, int maxAttempts) { - this._client = client; + public DgraphExecutor(DgraphClient client, int maxAttempts) { + this._client = client; - RetryConfig config = RetryConfig.custom() - .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(INITIAL_DURATION, BACKOFF_MULTIPLIER, RANDOMIZATION_FACTOR, MAX_DURATION)) - .retryOnException(DgraphExecutor::isRetryableException) - .failAfterMaxAttempts(true) - .maxAttempts(maxAttempts) - .build(); - this._retry = Retry.of("DgraphExecutor", config); - } + RetryConfig config = + RetryConfig.custom() + .intervalFunction( + IntervalFunction.ofExponentialRandomBackoff( + INITIAL_DURATION, BACKOFF_MULTIPLIER, RANDOMIZATION_FACTOR, MAX_DURATION)) + .retryOnException(DgraphExecutor::isRetryableException) + .failAfterMaxAttempts(true) + .maxAttempts(maxAttempts) + .build(); + this._retry = Retry.of("DgraphExecutor", config); + } - /** - * Executes the given DgraphClient call and retries retry-able exceptions. - * Subsequent executions will experience an exponential randomized backoff. - * - * @param func call on the provided DgraphClient - * @param <T> return type of the function - * @return return value of the function - * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded - */ - public <T> T executeFunction(Function<DgraphClient, T> func) { - return Retry.decorateFunction(this._retry, func).apply(_client); - } + /** + * Executes the given DgraphClient call and retries retry-able exceptions. Subsequent executions + * will experience an exponential randomized backoff. + * + * @param func call on the provided DgraphClient + * @param <T> return type of the function + * @return return value of the function + * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded + */ + public <T> T executeFunction(Function<DgraphClient, T> func) { + return Retry.decorateFunction(this._retry, func).apply(_client); + } - /** - * Executes the given DgraphClient call and retries retry-able exceptions. - * Subsequent executions will experience an exponential randomized backoff. - * - * @param func call on the provided DgraphClient - * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded - */ - public void executeConsumer(Consumer<DgraphClient> func) { - this._retry.executeSupplier(() -> { - func.accept(_client); - return null; + /** + * Executes the given DgraphClient call and retries retry-able exceptions. Subsequent executions + * will experience an exponential randomized backoff. + * + * @param func call on the provided DgraphClient + * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded + */ + public void executeConsumer(Consumer<DgraphClient> func) { + this._retry.executeSupplier( + () -> { + func.accept(_client); + return null; }); - } + } - /** - * Defines which DgraphClient exceptions are being retried. - * - * @param t exception from DgraphClient - * @return true if this exception can be retried - */ - private static boolean isRetryableException(Throwable t) { - // unwrap RuntimeException and ExecutionException - while (true) { - if ((t instanceof RuntimeException || t instanceof ExecutionException) && t.getCause() != null) { - t = t.getCause(); - continue; - } - break; - } + /** + * Defines which DgraphClient exceptions are being retried. + * + * @param t exception from DgraphClient + * @return true if this exception can be retried + */ + private static boolean isRetryableException(Throwable t) { + // unwrap RuntimeException and ExecutionException + while (true) { + if ((t instanceof RuntimeException || t instanceof ExecutionException) + && t.getCause() != null) { + t = t.getCause(); + continue; + } + break; + } - // retry-able exceptions - if (t instanceof TxnConflictException - || t instanceof StatusRuntimeException && ( - t.getMessage().contains("operation opIndexing is already running") - || t.getMessage().contains("Please retry") - || t.getMessage().contains("DEADLINE_EXCEEDED:") - || t.getMessage().contains("context deadline exceeded") - || t.getMessage().contains("Only leader can decide to commit or abort") - )) { - log.debug("retrying request due to {}", t.getMessage()); - return true; - } - return false; + // retry-able exceptions + if (t instanceof TxnConflictException + || t instanceof StatusRuntimeException + && (t.getMessage().contains("operation opIndexing is already running") + || t.getMessage().contains("Please retry") + || t.getMessage().contains("DEADLINE_EXCEEDED:") + || t.getMessage().contains("context deadline exceeded") + || t.getMessage().contains("Only leader can decide to commit or abort"))) { + log.debug("retrying request due to {}", t.getMessage()); + return true; } + return false; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 14a9a17401702..0d8b7655fddeb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph.dgraph; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; @@ -41,665 +43,740 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DgraphGraphService implements GraphService { - // calls to Dgraph cluster will be retried if they throw retry-able exceptions - // with a max number of attempts of 160 a call will finally fail after around 15 minutes - private static final int MAX_ATTEMPTS = 160; - - private final @Nonnull DgraphExecutor _dgraph; - private final @Nonnull LineageRegistry _lineageRegistry; - - private static final String URN_RELATIONSHIP_TYPE = "urn"; - private static final String TYPE_RELATIONSHIP_TYPE = "type"; - private static final String KEY_RELATIONSHIP_TYPE = "key"; - - - @Getter(lazy = true) - // we want to defer initialization of schema (accessing Dgraph server) to the first time accessing _schema - private final DgraphSchema _schema = getSchema(); - - public DgraphGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull DgraphClient client) { - _lineageRegistry = lineageRegistry; - this._dgraph = new DgraphExecutor(client, MAX_ATTEMPTS); - } - - protected @Nonnull DgraphSchema getSchema() { - Response response = _dgraph.executeFunction(dgraphClient -> - dgraphClient.newReadOnlyTransaction().doRequest( - Request.newBuilder().setQuery("schema { predicate }").build() - ) - ); - DgraphSchema schema = getSchema(response.getJson().toStringUtf8()).withDgraph(_dgraph); - - if (schema.isEmpty()) { - Operation setSchema = Operation.newBuilder() - .setSchema("" - + "<urn>: string @index(hash) @upsert .\n" - + "<type>: string @index(hash) .\n" - + "<key>: string @index(hash) .\n" - ) - .build(); - _dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); - } - - return schema; + // calls to Dgraph cluster will be retried if they throw retry-able exceptions + // with a max number of attempts of 160 a call will finally fail after around 15 minutes + private static final int MAX_ATTEMPTS = 160; + + private final @Nonnull DgraphExecutor _dgraph; + private final @Nonnull LineageRegistry _lineageRegistry; + + private static final String URN_RELATIONSHIP_TYPE = "urn"; + private static final String TYPE_RELATIONSHIP_TYPE = "type"; + private static final String KEY_RELATIONSHIP_TYPE = "key"; + + @Getter(lazy = true) + // we want to defer initialization of schema (accessing Dgraph server) to the first time accessing + // _schema + private final DgraphSchema _schema = getSchema(); + + public DgraphGraphService( + @Nonnull LineageRegistry lineageRegistry, @Nonnull DgraphClient client) { + _lineageRegistry = lineageRegistry; + this._dgraph = new DgraphExecutor(client, MAX_ATTEMPTS); + } + + protected @Nonnull DgraphSchema getSchema() { + Response response = + _dgraph.executeFunction( + dgraphClient -> + dgraphClient + .newReadOnlyTransaction() + .doRequest(Request.newBuilder().setQuery("schema { predicate }").build())); + DgraphSchema schema = getSchema(response.getJson().toStringUtf8()).withDgraph(_dgraph); + + if (schema.isEmpty()) { + Operation setSchema = + Operation.newBuilder() + .setSchema( + "" + + "<urn>: string @index(hash) @upsert .\n" + + "<type>: string @index(hash) .\n" + + "<key>: string @index(hash) .\n") + .build(); + _dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); } - protected static @Nonnull DgraphSchema getSchema(@Nonnull String json) { - Map<String, Object> data = getDataFromResponseJson(json); - - Object schemaObj = data.get("schema"); - if (!(schemaObj instanceof List<?>)) { - log.info("The result from Dgraph did not contain a 'schema' field, or that field is not a List"); - return DgraphSchema.empty(); - } - - List<?> schemaList = (List<?>) schemaObj; - Set<String> fieldNames = schemaList.stream().flatMap(fieldObj -> { - if (!(fieldObj instanceof Map)) { - return Stream.empty(); - } - - Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; - if (!(fieldMap.containsKey("predicate") && fieldMap.get("predicate") instanceof String)) { - return Stream.empty(); - } - - String fieldName = (String) fieldMap.get("predicate"); - return Stream.of(fieldName); - }).filter(f -> !f.startsWith("dgraph.")).collect(Collectors.toSet()); + return schema; + } - Object typesObj = data.get("types"); - if (!(typesObj instanceof List<?>)) { - log.info("The result from Dgraph did not contain a 'types' field, or that field is not a List"); - return DgraphSchema.empty(); - } + protected static @Nonnull DgraphSchema getSchema(@Nonnull String json) { + Map<String, Object> data = getDataFromResponseJson(json); - List<?> types = (List<?>) typesObj; - Map<String, Set<String>> typeFields = types.stream().flatMap(typeObj -> { - if (!(typeObj instanceof Map)) { - return Stream.empty(); - } + Object schemaObj = data.get("schema"); + if (!(schemaObj instanceof List<?>)) { + log.info( + "The result from Dgraph did not contain a 'schema' field, or that field is not a List"); + return DgraphSchema.empty(); + } - Map<?, ?> typeMap = (Map<?, ?>) typeObj; - if (!(typeMap.containsKey("fields") - && typeMap.containsKey("name") - && typeMap.get("fields") instanceof List<?> - && typeMap.get("name") instanceof String)) { - return Stream.empty(); - } + List<?> schemaList = (List<?>) schemaObj; + Set<String> fieldNames = + schemaList.stream() + .flatMap( + fieldObj -> { + if (!(fieldObj instanceof Map)) { + return Stream.empty(); + } - String typeName = (String) typeMap.get("name"); - List<?> fieldsList = (List<?>) typeMap.get("fields"); + Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; + if (!(fieldMap.containsKey("predicate") + && fieldMap.get("predicate") instanceof String)) { + return Stream.empty(); + } + + String fieldName = (String) fieldMap.get("predicate"); + return Stream.of(fieldName); + }) + .filter(f -> !f.startsWith("dgraph.")) + .collect(Collectors.toSet()); + + Object typesObj = data.get("types"); + if (!(typesObj instanceof List<?>)) { + log.info( + "The result from Dgraph did not contain a 'types' field, or that field is not a List"); + return DgraphSchema.empty(); + } - Set<String> fields = fieldsList.stream().flatMap(fieldObj -> { - if (!(fieldObj instanceof Map<?, ?>)) { + List<?> types = (List<?>) typesObj; + Map<String, Set<String>> typeFields = + types.stream() + .flatMap( + typeObj -> { + if (!(typeObj instanceof Map)) { return Stream.empty(); - } + } - Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; - if (!(fieldMap.containsKey("name") && fieldMap.get("name") instanceof String)) { + Map<?, ?> typeMap = (Map<?, ?>) typeObj; + if (!(typeMap.containsKey("fields") + && typeMap.containsKey("name") + && typeMap.get("fields") instanceof List<?> + && typeMap.get("name") instanceof String)) { return Stream.empty(); - } + } + + String typeName = (String) typeMap.get("name"); + List<?> fieldsList = (List<?>) typeMap.get("fields"); + + Set<String> fields = + fieldsList.stream() + .flatMap( + fieldObj -> { + if (!(fieldObj instanceof Map<?, ?>)) { + return Stream.empty(); + } + + Map<?, ?> fieldMap = (Map<?, ?>) fieldObj; + if (!(fieldMap.containsKey("name") + && fieldMap.get("name") instanceof String)) { + return Stream.empty(); + } + + String fieldName = (String) fieldMap.get("name"); + return Stream.of(fieldName); + }) + .filter(f -> !f.startsWith("dgraph.")) + .collect(Collectors.toSet()); + return Stream.of(Pair.of(typeName, fields)); + }) + .filter(t -> !t.getKey().startsWith("dgraph.")) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + + return new DgraphSchema(fieldNames, typeFields); + } + + @Override + public LineageRegistry getLineageRegistry() { + return _lineageRegistry; + } + + @Override + public void addEdge(Edge edge) { + log.debug( + String.format( + "Adding Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); + + // add the relationship type to the schema + // TODO: translate edge name to allowed dgraph uris + String sourceEntityType = getDgraphType(edge.getSource()); + String relationshipType = edge.getRelationshipType(); + get_schema() + .ensureField( + sourceEntityType, + relationshipType, + URN_RELATIONSHIP_TYPE, + TYPE_RELATIONSHIP_TYPE, + KEY_RELATIONSHIP_TYPE); + + // lookup the source and destination nodes + // TODO: add escape for string values + String query = + String.format( + "query {\n" + + " src as var(func: eq(urn, \"%s\"))\n" + + " dst as var(func: eq(urn, \"%s\"))\n" + + "}", + edge.getSource(), edge.getDestination()); + String srcVar = "uid(src)"; + String dstVar = "uid(dst)"; + + // edge case: source and destination are same node + if (edge.getSource().equals(edge.getDestination())) { + query = + String.format( + "query {\n" + " node as var(func: eq(urn, \"%s\"))\n" + "}", edge.getSource()); + srcVar = "uid(node)"; + dstVar = "uid(node)"; + } + + // create source and destination nodes if they do not exist + // and create the new edge between them + // TODO: add escape for string values + // TODO: translate edge name to allowed dgraph uris + StringJoiner mutations = new StringJoiner("\n"); + mutations.add( + String.format("%s <dgraph.type> \"%s\" .", srcVar, getDgraphType(edge.getSource()))); + mutations.add(String.format("%s <urn> \"%s\" .", srcVar, edge.getSource())); + mutations.add(String.format("%s <type> \"%s\" .", srcVar, edge.getSource().getEntityType())); + mutations.add(String.format("%s <key> \"%s\" .", srcVar, edge.getSource().getEntityKey())); + if (!edge.getSource().equals(edge.getDestination())) { + mutations.add( + String.format("%s <dgraph.type> \"%s\" .", dstVar, getDgraphType(edge.getDestination()))); + mutations.add(String.format("%s <urn> \"%s\" .", dstVar, edge.getDestination())); + mutations.add( + String.format("%s <type> \"%s\" .", dstVar, edge.getDestination().getEntityType())); + mutations.add( + String.format("%s <key> \"%s\" .", dstVar, edge.getDestination().getEntityKey())); + } + mutations.add(String.format("%s <%s> %s .", srcVar, edge.getRelationshipType(), dstVar)); + + log.debug("Query: " + query); + log.debug("Mutations: " + mutations); + + // construct the upsert + Mutation mutation = + Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(mutations.toString())).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + // run the request + _dgraph.executeFunction(client -> client.newTransaction().doRequest(request)); + } + + private static @Nonnull String getDgraphType(@Nonnull Urn urn) { + return urn.getNamespace() + ":" + urn.getEntityType(); + } + + // Returns reversed and directed relationship types: + // <rel> returns <~rel> on outgoing and <rel> on incoming and both on undirected + private static List<String> getDirectedRelationshipTypes( + List<String> relationships, RelationshipDirection direction) { + + if (direction == RelationshipDirection.OUTGOING + || direction == RelationshipDirection.UNDIRECTED) { + List<String> outgoingRelationships = + relationships.stream().map(type -> "~" + type).collect(Collectors.toList()); + + if (direction == RelationshipDirection.OUTGOING) { + return outgoingRelationships; + } else { + relationships = new ArrayList<>(relationships); + relationships.addAll(outgoingRelationships); + } + } - String fieldName = (String) fieldMap.get("name"); - return Stream.of(fieldName); - }).filter(f -> !f.startsWith("dgraph.")).collect(Collectors.toSet()); - return Stream.of(Pair.of(typeName, fields)); - }).filter(t -> !t.getKey().startsWith("dgraph.")).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + // we need to remove duplicates in order to not cause invalid queries in dgraph + return new ArrayList<>(new LinkedHashSet(relationships)); + } + + protected static String getQueryForRelatedEntities( + @Nullable List<String> sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List<String> destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List<String> relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + int offset, + int count) { + if (relationshipTypes.isEmpty()) { + // we would have to construct a query that never returns any results + // just do not call this method in the first place + throw new IllegalArgumentException("The relationship types must not be empty"); + } - return new DgraphSchema(fieldNames, typeFields); + if (sourceEntityFilter.hasCriteria() || destinationEntityFilter.hasCriteria()) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support criteria in source or destination entity filter"); } - @Override - public LineageRegistry getLineageRegistry() { - return _lineageRegistry; + //noinspection ConstantConditions + if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() > 1 + || destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() > 1) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support multiple OR criteria in source or destination entity filter"); } - @Override - public void addEdge(Edge edge) { - log.debug(String.format("Adding Edge source: %s, destination: %s, type: %s", - edge.getSource(), - edge.getDestination(), - edge.getRelationshipType())); - - // add the relationship type to the schema - // TODO: translate edge name to allowed dgraph uris - String sourceEntityType = getDgraphType(edge.getSource()); - String relationshipType = edge.getRelationshipType(); - get_schema().ensureField(sourceEntityType, relationshipType, URN_RELATIONSHIP_TYPE, TYPE_RELATIONSHIP_TYPE, KEY_RELATIONSHIP_TYPE); - - // lookup the source and destination nodes - // TODO: add escape for string values - String query = String.format("query {\n" - + " src as var(func: eq(urn, \"%s\"))\n" - + " dst as var(func: eq(urn, \"%s\"))\n" - + "}", edge.getSource(), edge.getDestination()); - String srcVar = "uid(src)"; - String dstVar = "uid(dst)"; - - // edge case: source and destination are same node - if (edge.getSource().equals(edge.getDestination())) { - query = String.format("query {\n" - + " node as var(func: eq(urn, \"%s\"))\n" - + "}", edge.getSource()); - srcVar = "uid(node)"; - dstVar = "uid(node)"; - } - - // create source and destination nodes if they do not exist - // and create the new edge between them - // TODO: add escape for string values - // TODO: translate edge name to allowed dgraph uris - StringJoiner mutations = new StringJoiner("\n"); - mutations.add(String.format("%s <dgraph.type> \"%s\" .", srcVar, getDgraphType(edge.getSource()))); - mutations.add(String.format("%s <urn> \"%s\" .", srcVar, edge.getSource())); - mutations.add(String.format("%s <type> \"%s\" .", srcVar, edge.getSource().getEntityType())); - mutations.add(String.format("%s <key> \"%s\" .", srcVar, edge.getSource().getEntityKey())); - if (!edge.getSource().equals(edge.getDestination())) { - mutations.add(String.format("%s <dgraph.type> \"%s\" .", dstVar, getDgraphType(edge.getDestination()))); - mutations.add(String.format("%s <urn> \"%s\" .", dstVar, edge.getDestination())); - mutations.add(String.format("%s <type> \"%s\" .", dstVar, edge.getDestination().getEntityType())); - mutations.add(String.format("%s <key> \"%s\" .", dstVar, edge.getDestination().getEntityKey())); - } - mutations.add(String.format("%s <%s> %s .", srcVar, edge.getRelationshipType(), dstVar)); - - log.debug("Query: " + query); - log.debug("Mutations: " + mutations); - - // construct the upsert - Mutation mutation = Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(mutations.toString())) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - // run the request - _dgraph.executeFunction(client -> client.newTransaction().doRequest(request)); + //noinspection ConstantConditions + if (relationshipFilter.hasCriteria() + || relationshipFilter.hasOr() && relationshipFilter.getOr().size() > 0) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support any criteria for the relationship filter"); } - private static @Nonnull String getDgraphType(@Nonnull Urn urn) { - return urn.getNamespace() + ":" + urn.getEntityType(); + // We are not querying for <src> <relationship> <dest> and return <dest> + // but we reverse the relationship and query for <dest> <~relationship> <src> + // this guarantees there are no duplicates among the returned <dest>s + final List<String> directedRelationshipTypes = + getDirectedRelationshipTypes(relationshipTypes, relationshipFilter.getDirection()); + + List<String> filters = new ArrayList<>(); + + Set<String> destinationNodeFilterNames = new HashSet<>(); + String sourceTypeFilterName = null; + String destinationTypeFilterName = null; + List<String> sourceFilterNames = new ArrayList<>(); + List<String> destinationFilterNames = new ArrayList<>(); + List<String> relationshipTypeFilterNames = new ArrayList<>(); + + if (sourceTypes != null && sourceTypes.size() > 0) { + sourceTypeFilterName = "sourceType"; + // TODO: escape string value + final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); + sourceTypes.forEach(type -> joiner.add(type)); + filters.add( + String.format( + "%s as var(func: eq(<type>, %s))", sourceTypeFilterName, joiner.toString())); } - // Returns reversed and directed relationship types: - // <rel> returns <~rel> on outgoing and <rel> on incoming and both on undirected - private static List<String> getDirectedRelationshipTypes(List<String> relationships, - RelationshipDirection direction) { - - if (direction == RelationshipDirection.OUTGOING || direction == RelationshipDirection.UNDIRECTED) { - List<String> outgoingRelationships = relationships.stream() - .map(type -> "~" + type).collect(Collectors.toList()); - - if (direction == RelationshipDirection.OUTGOING) { - return outgoingRelationships; - } else { - relationships = new ArrayList<>(relationships); - relationships.addAll(outgoingRelationships); - } - } - - // we need to remove duplicates in order to not cause invalid queries in dgraph - return new ArrayList<>(new LinkedHashSet(relationships)); + if (destinationTypes != null && destinationTypes.size() > 0) { + destinationTypeFilterName = "destinationType"; + final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); + destinationTypes.forEach(type -> joiner.add(type)); + // TODO: escape string value + filters.add( + String.format( + "%s as var(func: eq(<type>, %s))", destinationTypeFilterName, joiner.toString())); } - protected static String getQueryForRelatedEntities(@Nullable List<String> sourceTypes, - @Nonnull Filter sourceEntityFilter, - @Nullable List<String> destinationTypes, - @Nonnull Filter destinationEntityFilter, - @Nonnull List<String> relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter, - int offset, - int count) { - if (relationshipTypes.isEmpty()) { - // we would have to construct a query that never returns any results - // just do not call this method in the first place - throw new IllegalArgumentException("The relationship types must not be empty"); - } - - - if (sourceEntityFilter.hasCriteria() || destinationEntityFilter.hasCriteria()) { - throw new IllegalArgumentException("The DgraphGraphService does not support criteria in source or destination entity filter"); - } - - //noinspection ConstantConditions - if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() > 1 - || destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() > 1) { - throw new IllegalArgumentException("The DgraphGraphService does not support multiple OR criteria in source or destination entity filter"); - } - - //noinspection ConstantConditions - if (relationshipFilter.hasCriteria() || relationshipFilter.hasOr() && relationshipFilter.getOr().size() > 0) { - throw new IllegalArgumentException("The DgraphGraphService does not support any criteria for the relationship filter"); - } - - // We are not querying for <src> <relationship> <dest> and return <dest> - // but we reverse the relationship and query for <dest> <~relationship> <src> - // this guarantees there are no duplicates among the returned <dest>s - final List<String> directedRelationshipTypes = getDirectedRelationshipTypes( - relationshipTypes, relationshipFilter.getDirection() - ); - - List<String> filters = new ArrayList<>(); - - Set<String> destinationNodeFilterNames = new HashSet<>(); - String sourceTypeFilterName = null; - String destinationTypeFilterName = null; - List<String> sourceFilterNames = new ArrayList<>(); - List<String> destinationFilterNames = new ArrayList<>(); - List<String> relationshipTypeFilterNames = new ArrayList<>(); - - if (sourceTypes != null && sourceTypes.size() > 0) { - sourceTypeFilterName = "sourceType"; - // TODO: escape string value - final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); - sourceTypes.forEach(type -> joiner.add(type)); - filters.add(String.format("%s as var(func: eq(<type>, %s))", sourceTypeFilterName, joiner.toString())); - } - - if (destinationTypes != null && destinationTypes.size() > 0) { - destinationTypeFilterName = "destinationType"; - final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); - destinationTypes.forEach(type -> joiner.add(type)); - // TODO: escape string value - filters.add(String.format("%s as var(func: eq(<type>, %s))", destinationTypeFilterName, joiner.toString())); - } - - //noinspection ConstantConditions - if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() == 1) { - CriterionArray sourceCriteria = sourceEntityFilter.getOr().get(0).getAnd(); - IntStream.range(0, sourceCriteria.size()) - .forEach(idx -> { - String sourceFilterName = "sourceFilter" + (idx + 1); - sourceFilterNames.add(sourceFilterName); - Criterion criterion = sourceCriteria.get(idx); - // TODO: escape field name and string value - filters.add(String.format("%s as var(func: eq(<%s>, \"%s\"))", sourceFilterName, criterion.getField(), criterion.getValue())); - }); - } - - //noinspection ConstantConditions - if (destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() == 1) { - CriterionArray destinationCriteria = destinationEntityFilter.getOr().get(0).getAnd(); - IntStream.range(0, destinationCriteria.size()) - .forEach(idx -> { - String sourceFilterName = "destinationFilter" + (idx + 1); - destinationFilterNames.add(sourceFilterName); - Criterion criterion = destinationCriteria.get(idx); - // TODO: escape field name and string value - filters.add(String.format("%s as var(func: eq(<%s>, \"%s\"))", sourceFilterName, criterion.getField(), criterion.getValue())); - }); - } - - IntStream.range(0, directedRelationshipTypes.size()) - .forEach(idx -> { - String relationshipTypeFilterName = "relationshipType" + (idx + 1); - relationshipTypeFilterNames.add(relationshipTypeFilterName); - // TODO: escape string value - filters.add(String.format("%s as var(func: has(<%s>))", relationshipTypeFilterName, directedRelationshipTypes.get(idx))); - }); - - // the destination node filter is the first filter that is being applied on the destination node - // we can add multiple filters, they will combine as OR - if (destinationTypeFilterName != null) { - destinationNodeFilterNames.add(destinationTypeFilterName); - } - destinationNodeFilterNames.addAll(destinationFilterNames); - destinationNodeFilterNames.addAll(relationshipTypeFilterNames); - - StringJoiner destinationNodeFilterJoiner = new StringJoiner(", "); - destinationNodeFilterNames.stream().sorted().forEach(destinationNodeFilterJoiner::add); - String destinationNodeFilter = destinationNodeFilterJoiner.toString(); - - String filterConditions = getFilterConditions( - sourceTypeFilterName, destinationTypeFilterName, - sourceFilterNames, destinationFilterNames, - relationshipTypeFilterNames, directedRelationshipTypes - ); - - StringJoiner relationshipsJoiner = new StringJoiner("\n "); - getRelationships(sourceTypeFilterName, sourceFilterNames, directedRelationshipTypes) - .forEach(relationshipsJoiner::add); - String relationships = relationshipsJoiner.toString(); - - StringJoiner filterJoiner = new StringJoiner("\n "); - filters.forEach(filterJoiner::add); - String filterExpressions = filterJoiner.toString(); - - return String.format("query {\n" - + " %s\n" - + "\n" - + " result (func: uid(%s), first: %d, offset: %d) %s {\n" - + " <urn>\n" - + " %s\n" - + " }\n" - + "}", - filterExpressions, - destinationNodeFilter, - count, offset, - filterConditions, - relationships); + //noinspection ConstantConditions + if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() == 1) { + CriterionArray sourceCriteria = sourceEntityFilter.getOr().get(0).getAnd(); + IntStream.range(0, sourceCriteria.size()) + .forEach( + idx -> { + String sourceFilterName = "sourceFilter" + (idx + 1); + sourceFilterNames.add(sourceFilterName); + Criterion criterion = sourceCriteria.get(idx); + // TODO: escape field name and string value + filters.add( + String.format( + "%s as var(func: eq(<%s>, \"%s\"))", + sourceFilterName, criterion.getField(), criterion.getValue())); + }); } - @Override - public void upsertEdge(final Edge edge) { - throw new UnsupportedOperationException("Upsert edge not supported by Neo4JGraphService at this time."); + //noinspection ConstantConditions + if (destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() == 1) { + CriterionArray destinationCriteria = destinationEntityFilter.getOr().get(0).getAnd(); + IntStream.range(0, destinationCriteria.size()) + .forEach( + idx -> { + String sourceFilterName = "destinationFilter" + (idx + 1); + destinationFilterNames.add(sourceFilterName); + Criterion criterion = destinationCriteria.get(idx); + // TODO: escape field name and string value + filters.add( + String.format( + "%s as var(func: eq(<%s>, \"%s\"))", + sourceFilterName, criterion.getField(), criterion.getValue())); + }); } - @Override - public void removeEdge(final Edge edge) { - throw new UnsupportedOperationException("Remove edge not supported by DgraphGraphService at this time."); + IntStream.range(0, directedRelationshipTypes.size()) + .forEach( + idx -> { + String relationshipTypeFilterName = "relationshipType" + (idx + 1); + relationshipTypeFilterNames.add(relationshipTypeFilterName); + // TODO: escape string value + filters.add( + String.format( + "%s as var(func: has(<%s>))", + relationshipTypeFilterName, directedRelationshipTypes.get(idx))); + }); + + // the destination node filter is the first filter that is being applied on the destination node + // we can add multiple filters, they will combine as OR + if (destinationTypeFilterName != null) { + destinationNodeFilterNames.add(destinationTypeFilterName); + } + destinationNodeFilterNames.addAll(destinationFilterNames); + destinationNodeFilterNames.addAll(relationshipTypeFilterNames); + + StringJoiner destinationNodeFilterJoiner = new StringJoiner(", "); + destinationNodeFilterNames.stream().sorted().forEach(destinationNodeFilterJoiner::add); + String destinationNodeFilter = destinationNodeFilterJoiner.toString(); + + String filterConditions = + getFilterConditions( + sourceTypeFilterName, destinationTypeFilterName, + sourceFilterNames, destinationFilterNames, + relationshipTypeFilterNames, directedRelationshipTypes); + + StringJoiner relationshipsJoiner = new StringJoiner("\n "); + getRelationships(sourceTypeFilterName, sourceFilterNames, directedRelationshipTypes) + .forEach(relationshipsJoiner::add); + String relationships = relationshipsJoiner.toString(); + + StringJoiner filterJoiner = new StringJoiner("\n "); + filters.forEach(filterJoiner::add); + String filterExpressions = filterJoiner.toString(); + + return String.format( + "query {\n" + + " %s\n" + + "\n" + + " result (func: uid(%s), first: %d, offset: %d) %s {\n" + + " <urn>\n" + + " %s\n" + + " }\n" + + "}", + filterExpressions, destinationNodeFilter, count, offset, filterConditions, relationships); + } + + @Override + public void upsertEdge(final Edge edge) { + throw new UnsupportedOperationException( + "Upsert edge not supported by Neo4JGraphService at this time."); + } + + @Override + public void removeEdge(final Edge edge) { + throw new UnsupportedOperationException( + "Remove edge not supported by DgraphGraphService at this time."); + } + + @Nonnull + @Override + public RelatedEntitiesResult findRelatedEntities( + @Nullable List<String> sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List<String> destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List<String> relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + int offset, + int count) { + + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { + return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); + } + if (relationshipTypes.isEmpty() + || relationshipTypes.stream() + .noneMatch(relationship -> get_schema().hasField(relationship))) { + return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } - @Nonnull - @Override - public RelatedEntitiesResult findRelatedEntities(@Nullable List<String> sourceTypes, - @Nonnull Filter sourceEntityFilter, - @Nullable List<String> destinationTypes, - @Nonnull Filter destinationEntityFilter, - @Nonnull List<String> relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter, - int offset, - int count) { - - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { - return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); - } - if (relationshipTypes.isEmpty() || relationshipTypes.stream().noneMatch(relationship -> get_schema().hasField(relationship))) { - return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); - } - - String query = getQueryForRelatedEntities( - sourceTypes, sourceEntityFilter, - destinationTypes, destinationEntityFilter, - relationshipTypes.stream().filter(get_schema()::hasField).collect(Collectors.toList()), - relationshipFilter, - offset, count - ); - - Request request = Request.newBuilder() - .setQuery(query) - .build(); - - log.debug("Query: " + query); - Response response = _dgraph.executeFunction(client -> client.newReadOnlyTransaction().doRequest(request)); - String json = response.getJson().toStringUtf8(); - Map<String, Object> data = getDataFromResponseJson(json); - - List<RelatedEntity> entities = getRelatedEntitiesFromResponseData(data); - int total = offset + entities.size(); - if (entities.size() == count) { - // indicate that there might be more results - total++; - } - return new RelatedEntitiesResult(offset, entities.size(), total, entities); + String query = + getQueryForRelatedEntities( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes.stream().filter(get_schema()::hasField).collect(Collectors.toList()), + relationshipFilter, + offset, + count); + + Request request = Request.newBuilder().setQuery(query).build(); + + log.debug("Query: " + query); + Response response = + _dgraph.executeFunction(client -> client.newReadOnlyTransaction().doRequest(request)); + String json = response.getJson().toStringUtf8(); + Map<String, Object> data = getDataFromResponseJson(json); + + List<RelatedEntity> entities = getRelatedEntitiesFromResponseData(data); + int total = offset + entities.size(); + if (entities.size() == count) { + // indicate that there might be more results + total++; + } + return new RelatedEntitiesResult(offset, entities.size(), total, entities); + } + + // Creates filter conditions from destination to source nodes + protected static @Nonnull String getFilterConditions( + @Nullable String sourceTypeFilterName, + @Nullable String destinationTypeFilterName, + @Nonnull List<String> sourceFilterNames, + @Nonnull List<String> destinationFilterNames, + @Nonnull List<String> relationshipTypeFilterNames, + @Nonnull List<String> relationshipTypes) { + if (relationshipTypes.size() != relationshipTypeFilterNames.size()) { + throw new IllegalArgumentException( + "relationshipTypeFilterNames and relationshipTypes " + + "must have same size: " + + relationshipTypeFilterNames + + " vs. " + + relationshipTypes); } - // Creates filter conditions from destination to source nodes - protected static @Nonnull String getFilterConditions(@Nullable String sourceTypeFilterName, - @Nullable String destinationTypeFilterName, - @Nonnull List<String> sourceFilterNames, - @Nonnull List<String> destinationFilterNames, - @Nonnull List<String> relationshipTypeFilterNames, - @Nonnull List<String> relationshipTypes) { - if (relationshipTypes.size() != relationshipTypeFilterNames.size()) { - throw new IllegalArgumentException("relationshipTypeFilterNames and relationshipTypes " - + "must have same size: " + relationshipTypeFilterNames + " vs. " + relationshipTypes); - } - - if (sourceTypeFilterName == null - && destinationTypeFilterName == null - && sourceFilterNames.isEmpty() - && destinationFilterNames.isEmpty() - && relationshipTypeFilterNames.isEmpty()) { - return ""; - } - - StringJoiner andJoiner = new StringJoiner(" AND\n "); - if (destinationTypeFilterName != null) { - andJoiner.add(String.format("uid(%s)", destinationTypeFilterName)); - } - - destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid(%s)", filter))); - - if (!relationshipTypes.isEmpty()) { - StringJoiner orJoiner = new StringJoiner(" OR\n "); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> orJoiner.add(getRelationshipCondition( - relationshipTypes.get(idx), relationshipTypeFilterNames.get(idx), - sourceTypeFilterName, sourceFilterNames - ))); - String relationshipCondition = orJoiner.toString(); - andJoiner.add(String.format("(\n %s\n )", relationshipCondition)); - } - - String conditions = andJoiner.toString(); - return String.format("@filter(\n %s\n )", conditions); + if (sourceTypeFilterName == null + && destinationTypeFilterName == null + && sourceFilterNames.isEmpty() + && destinationFilterNames.isEmpty() + && relationshipTypeFilterNames.isEmpty()) { + return ""; } - protected static String getRelationshipCondition(@Nonnull String relationshipType, - @Nonnull String relationshipTypeFilterName, - @Nullable String objectFilterName, - @Nonnull List<String> destinationFilterNames) { - StringJoiner andJoiner = new StringJoiner(" AND "); - andJoiner.add(String.format("uid(%s)", relationshipTypeFilterName)); - if (objectFilterName != null) { - andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, objectFilterName)); - } - destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, filter))); - return andJoiner.toString(); + StringJoiner andJoiner = new StringJoiner(" AND\n "); + if (destinationTypeFilterName != null) { + andJoiner.add(String.format("uid(%s)", destinationTypeFilterName)); } + destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid(%s)", filter))); + + if (!relationshipTypes.isEmpty()) { + StringJoiner orJoiner = new StringJoiner(" OR\n "); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + orJoiner.add( + getRelationshipCondition( + relationshipTypes.get(idx), + relationshipTypeFilterNames.get(idx), + sourceTypeFilterName, + sourceFilterNames))); + String relationshipCondition = orJoiner.toString(); + andJoiner.add(String.format("(\n %s\n )", relationshipCondition)); + } - // Creates filter conditions from destination to source nodes - protected static @Nonnull List<String> getRelationships(@Nullable String sourceTypeFilterName, - @Nonnull List<String> sourceFilterNames, - @Nonnull List<String> relationshipTypes) { - return relationshipTypes.stream().map(relationshipType -> { - StringJoiner andJoiner = new StringJoiner(" AND "); - if (sourceTypeFilterName != null) { + String conditions = andJoiner.toString(); + return String.format("@filter(\n %s\n )", conditions); + } + + protected static String getRelationshipCondition( + @Nonnull String relationshipType, + @Nonnull String relationshipTypeFilterName, + @Nullable String objectFilterName, + @Nonnull List<String> destinationFilterNames) { + StringJoiner andJoiner = new StringJoiner(" AND "); + andJoiner.add(String.format("uid(%s)", relationshipTypeFilterName)); + if (objectFilterName != null) { + andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, objectFilterName)); + } + destinationFilterNames.forEach( + filter -> andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, filter))); + return andJoiner.toString(); + } + + // Creates filter conditions from destination to source nodes + protected static @Nonnull List<String> getRelationships( + @Nullable String sourceTypeFilterName, + @Nonnull List<String> sourceFilterNames, + @Nonnull List<String> relationshipTypes) { + return relationshipTypes.stream() + .map( + relationshipType -> { + StringJoiner andJoiner = new StringJoiner(" AND "); + if (sourceTypeFilterName != null) { andJoiner.add(String.format("uid(%s)", sourceTypeFilterName)); - } - sourceFilterNames.forEach(filterName -> andJoiner.add(String.format("uid(%s)", filterName))); + } + sourceFilterNames.forEach( + filterName -> andJoiner.add(String.format("uid(%s)", filterName))); - if (andJoiner.length() > 0) { + if (andJoiner.length() > 0) { return String.format("<%s> @filter( %s ) { <uid> }", relationshipType, andJoiner); - } else { + } else { return String.format("<%s> { <uid> }", relationshipType); - } - }).collect(Collectors.toList()); + } + }) + .collect(Collectors.toList()); + } + + protected static Map<String, Object> getDataFromResponseJson(String json) { + ObjectMapper mapper = new ObjectMapper(); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + TypeReference<HashMap<String, Object>> typeRef = + new TypeReference<HashMap<String, Object>>() {}; + try { + return mapper.readValue(json, typeRef); + } catch (IOException e) { + throw new RuntimeException("Failed to parse response json: " + json.substring(0, 1000), e); } - - protected static Map<String, Object> getDataFromResponseJson(String json) { - ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() { }; - try { - return mapper.readValue(json, typeRef); - } catch (IOException e) { - throw new RuntimeException("Failed to parse response json: " + json.substring(0, 1000), e); - } + } + + protected static List<RelatedEntity> getRelatedEntitiesFromResponseData( + Map<String, Object> data) { + Object obj = data.get("result"); + if (!(obj instanceof List<?>)) { + throw new IllegalArgumentException( + "The result from Dgraph did not contain a 'result' field, or that field is not a List"); } - protected static List<RelatedEntity> getRelatedEntitiesFromResponseData(Map<String, Object> data) { - Object obj = data.get("result"); - if (!(obj instanceof List<?>)) { - throw new IllegalArgumentException( - "The result from Dgraph did not contain a 'result' field, or that field is not a List" - ); - } - - List<?> results = (List<?>) obj; - return results.stream().flatMap(destinationObj -> { - if (!(destinationObj instanceof Map)) { + List<?> results = (List<?>) obj; + return results.stream() + .flatMap( + destinationObj -> { + if (!(destinationObj instanceof Map)) { return Stream.empty(); - } + } - Map<?, ?> destination = (Map<?, ?>) destinationObj; - if (destination.containsKey("urn") && destination.get("urn") instanceof String) { + Map<?, ?> destination = (Map<?, ?>) destinationObj; + if (destination.containsKey("urn") && destination.get("urn") instanceof String) { String urn = (String) destination.get("urn"); return destination.entrySet().stream() - .filter(entry -> !entry.getKey().equals("urn")) - .flatMap(entry -> { - Object relationshipObj = entry.getKey(); - Object sourcesObj = entry.getValue(); - if (!(relationshipObj instanceof String && sourcesObj instanceof List)) { - return Stream.empty(); - } - - String relationship = (String) relationshipObj; - List<?> sources = (List<?>) sourcesObj; - - if (sources.size() == 0) { - return Stream.empty(); - } - - if (relationship.startsWith("~")) { - relationship = relationship.substring(1); - } - - return Stream.of(relationship); + .filter(entry -> !entry.getKey().equals("urn")) + .flatMap( + entry -> { + Object relationshipObj = entry.getKey(); + Object sourcesObj = entry.getValue(); + if (!(relationshipObj instanceof String && sourcesObj instanceof List)) { + return Stream.empty(); + } + + String relationship = (String) relationshipObj; + List<?> sources = (List<?>) sourcesObj; + + if (sources.size() == 0) { + return Stream.empty(); + } + + if (relationship.startsWith("~")) { + relationship = relationship.substring(1); + } + + return Stream.of(relationship); }) - // for undirected we get duplicate relationships - .distinct() - .map(relationship -> new RelatedEntity(relationship, urn)); - } - - return Stream.empty(); - }).collect(Collectors.toList()); - } - - @Override - public void removeNode(@Nonnull Urn urn) { - String query = String.format("query {\n" - + " node as var(func: eq(urn, \"%s\"))\n" - + "}", urn); - String deletion = "uid(node) * * ."; - - log.debug("Query: " + query); - log.debug("Delete: " + deletion); - - Mutation mutation = Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(deletion)) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + // for undirected we get duplicate relationships + .distinct() + .map(relationship -> new RelatedEntity(relationship, urn)); + } + + return Stream.empty(); + }) + .collect(Collectors.toList()); + } + + @Override + public void removeNode(@Nonnull Urn urn) { + String query = String.format("query {\n" + " node as var(func: eq(urn, \"%s\"))\n" + "}", urn); + String deletion = "uid(node) * * ."; + + log.debug("Query: " + query); + log.debug("Delete: " + deletion); + + Mutation mutation = + Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(deletion)).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } + + @Override + public void removeEdgesFromNode( + @Nonnull Urn urn, + @Nonnull List<String> relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter) { + if (relationshipTypes.isEmpty()) { + return; } - @Override - public void removeEdgesFromNode(@Nonnull Urn urn, - @Nonnull List<String> relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter) { - if (relationshipTypes.isEmpty()) { - return; - } - - RelationshipDirection direction = relationshipFilter.getDirection(); + RelationshipDirection direction = relationshipFilter.getDirection(); - if (direction == RelationshipDirection.OUTGOING || direction == RelationshipDirection.UNDIRECTED) { - removeOutgoingEdgesFromNode(urn, relationshipTypes); - } - - if (direction == RelationshipDirection.INCOMING || direction == RelationshipDirection.UNDIRECTED) { - removeIncomingEdgesFromNode(urn, relationshipTypes); - } + if (direction == RelationshipDirection.OUTGOING + || direction == RelationshipDirection.UNDIRECTED) { + removeOutgoingEdgesFromNode(urn, relationshipTypes); } - private void removeOutgoingEdgesFromNode(@Nonnull Urn urn, - @Nonnull List<String> relationshipTypes) { - // TODO: add escape for string values - String query = String.format("query {\n" - + " node as var(func: eq(<urn>, \"%s\"))\n" - + "}", urn); - - Value star = Value.newBuilder().setDefaultVal("_STAR_ALL").build(); - List<NQuad> deletions = relationshipTypes.stream().map(relationshipType -> - NQuad.newBuilder() + if (direction == RelationshipDirection.INCOMING + || direction == RelationshipDirection.UNDIRECTED) { + removeIncomingEdgesFromNode(urn, relationshipTypes); + } + } + + private void removeOutgoingEdgesFromNode( + @Nonnull Urn urn, @Nonnull List<String> relationshipTypes) { + // TODO: add escape for string values + String query = + String.format("query {\n" + " node as var(func: eq(<urn>, \"%s\"))\n" + "}", urn); + + Value star = Value.newBuilder().setDefaultVal("_STAR_ALL").build(); + List<NQuad> deletions = + relationshipTypes.stream() + .map( + relationshipType -> + NQuad.newBuilder() .setSubject("uid(node)") .setPredicate(relationshipType) .setObjectValue(star) - .build() - ).collect(Collectors.toList()); - - log.debug("Query: " + query); - log.debug("Deletions: " + deletions); - - Mutation mutation = Mutation.newBuilder() - .addAllDel(deletions) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); - } - - private void removeIncomingEdgesFromNode(@Nonnull Urn urn, - @Nonnull List<String> relationshipTypes) { - // TODO: add escape for string values - StringJoiner reverseEdges = new StringJoiner("\n "); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> - reverseEdges.add("<~" + relationshipTypes.get(idx) + "> { uids" + (idx + 1) + " as uid }") - ); - String query = String.format("query {\n" + .build()) + .collect(Collectors.toList()); + + log.debug("Query: " + query); + log.debug("Deletions: " + deletions); + + Mutation mutation = Mutation.newBuilder().addAllDel(deletions).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } + + private void removeIncomingEdgesFromNode( + @Nonnull Urn urn, @Nonnull List<String> relationshipTypes) { + // TODO: add escape for string values + StringJoiner reverseEdges = new StringJoiner("\n "); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + reverseEdges.add( + "<~" + relationshipTypes.get(idx) + "> { uids" + (idx + 1) + " as uid }")); + String query = + String.format( + "query {\n" + " node as var(func: eq(<urn>, \"%s\"))\n" + "\n" + " var(func: uid(node)) @normalize {\n" + " %s\n" + " }\n" - + "}", urn, reverseEdges); - - StringJoiner deletions = new StringJoiner("\n"); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> - deletions.add("uid(uids" + (idx + 1) + ") <" + relationshipTypes.get(idx) + "> uid(node) .") - ); - - log.debug("Query: " + query); - log.debug("Deletions: " + deletions); - - Mutation mutation = Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(deletions.toString())) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); - } + + "}", + urn, reverseEdges); - @Override - public void configure() { } + StringJoiner deletions = new StringJoiner("\n"); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + deletions.add( + "uid(uids" + (idx + 1) + ") <" + relationshipTypes.get(idx) + "> uid(node) .")); - @Override - public void clear() { - log.debug("dropping Dgraph data"); + log.debug("Query: " + query); + log.debug("Deletions: " + deletions); - Operation dropAll = Operation.newBuilder().setDropOp(Operation.DropOp.ALL).build(); - _dgraph.executeConsumer(client -> client.alter(dropAll)); + Mutation mutation = + Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(deletions.toString())).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); - // drop schema cache - get_schema().clear(); + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } - // setup urn, type and key relationships - getSchema(); - } + @Override + public void configure() {} + + @Override + public void clear() { + log.debug("dropping Dgraph data"); + + Operation dropAll = Operation.newBuilder().setDropOp(Operation.DropOp.ALL).build(); + _dgraph.executeConsumer(client -> client.alter(dropAll)); + + // drop schema cache + get_schema().clear(); + + // setup urn, type and key relationships + getSchema(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java index fc1c64ea3cc03..8c4b37716e798 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java @@ -1,9 +1,6 @@ package com.linkedin.metadata.graph.dgraph; import io.dgraph.DgraphProto; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -11,118 +8,125 @@ import java.util.Set; import java.util.StringJoiner; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; -/** - * Provides a thread-safe Dgraph schema. Returned data structures are immutable. - */ +/** Provides a thread-safe Dgraph schema. Returned data structures are immutable. */ @Slf4j public class DgraphSchema { - private final @Nonnull Set<String> fields; - private final @Nonnull Map<String, Set<String>> types; - private final DgraphExecutor dgraph; - - public static DgraphSchema empty() { - return new DgraphSchema(Collections.emptySet(), Collections.emptyMap(), null); - } - - public DgraphSchema(@Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types) { - this(fields, types, null); - } - - public DgraphSchema(@Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types, DgraphExecutor dgraph) { - this.fields = fields; - this.types = types; - this.dgraph = dgraph; - } - - /** - * Adds the given DgraphExecutor to this schema returning a new instance. - * Be aware this and the new instance share the underlying fields and types datastructures. - * - * @param dgraph dgraph executor to add - * @return new instance - */ - public DgraphSchema withDgraph(DgraphExecutor dgraph) { - return new DgraphSchema(this.fields, this.types, dgraph); + private final @Nonnull Set<String> fields; + private final @Nonnull Map<String, Set<String>> types; + private final DgraphExecutor dgraph; + + public static DgraphSchema empty() { + return new DgraphSchema(Collections.emptySet(), Collections.emptyMap(), null); + } + + public DgraphSchema(@Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types) { + this(fields, types, null); + } + + public DgraphSchema( + @Nonnull Set<String> fields, @Nonnull Map<String, Set<String>> types, DgraphExecutor dgraph) { + this.fields = fields; + this.types = types; + this.dgraph = dgraph; + } + + /** + * Adds the given DgraphExecutor to this schema returning a new instance. Be aware this and the + * new instance share the underlying fields and types datastructures. + * + * @param dgraph dgraph executor to add + * @return new instance + */ + public DgraphSchema withDgraph(DgraphExecutor dgraph) { + return new DgraphSchema(this.fields, this.types, dgraph); + } + + public synchronized boolean isEmpty() { + return fields.isEmpty(); + } + + public synchronized Set<String> getFields() { + // Provide an unmodifiable copy + return Collections.unmodifiableSet(new HashSet<>(fields)); + } + + public synchronized Set<String> getFields(String typeName) { + // Provide an unmodifiable copy + return Collections.unmodifiableSet( + new HashSet<>(types.getOrDefault(typeName, Collections.emptySet()))); + } + + public synchronized Map<String, Set<String>> getTypes() { + // Provide an unmodifiable copy of the map and contained sets + return Collections.unmodifiableMap( + new HashSet<>(types.entrySet()) + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> Collections.unmodifiableSet(new HashSet<>(e.getValue()))))); + } + + public synchronized boolean hasType(String typeName) { + return types.containsKey(typeName); + } + + public synchronized boolean hasField(String fieldName) { + return fields.contains(fieldName); + } + + public synchronized boolean hasField(String typeName, String fieldName) { + return types.getOrDefault(typeName, Collections.emptySet()).contains(fieldName); + } + + public synchronized void ensureField( + String typeName, String fieldName, String... existingFieldNames) { + // quickly check if the field is known for this type + if (hasField(typeName, fieldName)) { + return; } - synchronized public boolean isEmpty() { - return fields.isEmpty(); - } - - synchronized public Set<String> getFields() { - // Provide an unmodifiable copy - return Collections.unmodifiableSet(new HashSet<>(fields)); - } - - synchronized public Set<String> getFields(String typeName) { - // Provide an unmodifiable copy - return Collections.unmodifiableSet(new HashSet<>(types.getOrDefault(typeName, Collections.emptySet()))); - } - - synchronized public Map<String, Set<String>> getTypes() { - // Provide an unmodifiable copy of the map and contained sets - return Collections.unmodifiableMap( - new HashSet<>(types.entrySet()).stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> Collections.unmodifiableSet(new HashSet<>(e.getValue())) - )) - ); - } - - synchronized public boolean hasType(String typeName) { - return types.containsKey(typeName); - } - - synchronized public boolean hasField(String fieldName) { - return fields.contains(fieldName); - } + // add type and field to schema + StringJoiner schema = new StringJoiner("\n"); - synchronized public boolean hasField(String typeName, String fieldName) { - return types.getOrDefault(typeName, Collections.emptySet()).contains(fieldName); + if (!fields.contains(fieldName)) { + schema.add(String.format("<%s>: [uid] @reverse .", fieldName)); } - synchronized public void ensureField(String typeName, String fieldName, String... existingFieldNames) { - // quickly check if the field is known for this type - if (hasField(typeName, fieldName)) { - return; - } - - // add type and field to schema - StringJoiner schema = new StringJoiner("\n"); - - if (!fields.contains(fieldName)) { - schema.add(String.format("<%s>: [uid] @reverse .", fieldName)); - } - - // update the schema on the Dgraph cluster - Set<String> allTypesFields = new HashSet<>(Arrays.asList(existingFieldNames)); - allTypesFields.addAll(types.getOrDefault(typeName, Collections.emptySet())); - allTypesFields.add(fieldName); - - if (dgraph != null) { - log.info("Adding predicate {} for type {} to schema", fieldName, typeName); - - StringJoiner type = new StringJoiner("\n "); - allTypesFields.stream().map(t -> "<" + t + ">").forEach(type::add); - schema.add(String.format("type <%s> {\n %s\n}", typeName, type)); - log.debug("Adding to schema: " + schema); - DgraphProto.Operation setSchema = DgraphProto.Operation.newBuilder().setSchema(schema.toString()).setRunInBackground(true).build(); - dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); - } - - // now that the schema has been updated on dgraph we can cache this new type / field - // ensure type and fields of type exist - if (!types.containsKey(typeName)) { - types.put(typeName, new HashSet<>()); - } - types.get(typeName).add(fieldName); - fields.add(fieldName); + // update the schema on the Dgraph cluster + Set<String> allTypesFields = new HashSet<>(Arrays.asList(existingFieldNames)); + allTypesFields.addAll(types.getOrDefault(typeName, Collections.emptySet())); + allTypesFields.add(fieldName); + + if (dgraph != null) { + log.info("Adding predicate {} for type {} to schema", fieldName, typeName); + + StringJoiner type = new StringJoiner("\n "); + allTypesFields.stream().map(t -> "<" + t + ">").forEach(type::add); + schema.add(String.format("type <%s> {\n %s\n}", typeName, type)); + log.debug("Adding to schema: " + schema); + DgraphProto.Operation setSchema = + DgraphProto.Operation.newBuilder() + .setSchema(schema.toString()) + .setRunInBackground(true) + .build(); + dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); } - synchronized public void clear() { - types.clear(); - fields.clear(); + // now that the schema has been updated on dgraph we can cache this new type / field + // ensure type and fields of type exist + if (!types.containsKey(typeName)) { + types.put(typeName, new HashSet<>()); } + types.get(typeName).add(fieldName); + fields.add(fieldName); + } + + public synchronized void clear() { + types.clear(); + fields.clear(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 946931a54f4ec..92960bc9222ab 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -1,7 +1,8 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; + import com.codahale.metrics.Timer; -import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.datahub.util.exception.ESQueryException; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; @@ -10,6 +11,7 @@ import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.graph.GraphFilters; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.graph.LineageRelationship; @@ -55,12 +57,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; - - -/** - * A search DAO for Elasticsearch backend. - */ +/** A search DAO for Elasticsearch backend. */ @Slf4j @RequiredArgsConstructor public class ESGraphQueryDAO { @@ -83,22 +80,29 @@ public class ESGraphQueryDAO { static final String UI = "UI"; @Nonnull - public static void addFilterToQueryBuilder(@Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { + public static void addFilterToQueryBuilder( + @Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { BoolQueryBuilder orQuery = new BoolQueryBuilder(); for (ConjunctiveCriterion conjunction : filter.getOr()) { final BoolQueryBuilder andQuery = new BoolQueryBuilder(); final List<Criterion> criterionArray = conjunction.getAnd(); - if (!criterionArray.stream().allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { - throw new RuntimeException("Currently Elastic query filter only supports EQUAL condition " + criterionArray); + if (!criterionArray.stream() + .allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { + throw new RuntimeException( + "Currently Elastic query filter only supports EQUAL condition " + criterionArray); } criterionArray.forEach( - criterion -> andQuery.must(QueryBuilders.termQuery(node + "." + criterion.getField(), criterion.getValue()))); + criterion -> + andQuery.must( + QueryBuilders.termQuery( + node + "." + criterion.getField(), criterion.getValue()))); orQuery.should(andQuery); } rootQuery.must(orQuery); } - private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, final int offset, final int count) { + private SearchResponse executeSearchQuery( + @Nonnull final QueryBuilder query, final int offset, final int count) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -121,8 +125,12 @@ private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, fin } } - private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, @Nullable Object[] sort, @Nullable String pitId, - @Nonnull String keepAlive, final int count) { + private SearchResponse executeSearchQuery( + @Nonnull final QueryBuilder query, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + final int count) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -141,36 +149,51 @@ private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, @Nu log.error("Search query failed", e); throw new ESQueryException("Search query failed:", e); } - } - public SearchResponse getSearchResponse(@Nullable final List<String> sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List<String> destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - final int offset, final int count) { + public SearchResponse getSearchResponse( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count) { BoolQueryBuilder finalQuery = - buildQuery(sourceTypes, sourceEntityFilter, destinationTypes, destinationEntityFilter, relationshipTypes, + buildQuery( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, relationshipFilter); return executeSearchQuery(finalQuery, offset, count); } - public static BoolQueryBuilder buildQuery(@Nullable final List<String> sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List<String> destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + public static BoolQueryBuilder buildQuery( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); // set source filter - String sourceNode = relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; + String sourceNode = + relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; if (sourceTypes != null && sourceTypes.size() > 0) { finalQuery.must(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); } addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); // set destination filter - String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; + String destinationNode = + relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; if (destinationTypes != null && destinationTypes.size() > 0) { finalQuery.must(QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); } @@ -180,16 +203,24 @@ public static BoolQueryBuilder buildQuery(@Nullable final List<String> sourceTyp if (relationshipTypes.size() > 0) { BoolQueryBuilder relationshipQuery = QueryBuilders.boolQuery(); relationshipTypes.forEach( - relationshipType -> relationshipQuery.should(QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); + relationshipType -> + relationshipQuery.should( + QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); finalQuery.must(relationshipQuery); } return finalQuery; } @WithSpan - public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, - int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + public LineageResponse getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { List<LineageRelationship> result = new ArrayList<>(); long currentTime = System.currentTimeMillis(); long remainingTime = graphQueryConfiguration.getTimeoutSeconds() * 1000; @@ -207,8 +238,11 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect } if (remainingTime < 0) { - log.info("Timed out while fetching lineage for {} with direction {}, maxHops {}. Returning results so far", - entityUrn, direction, maxHops); + log.info( + "Timed out while fetching lineage for {} with direction {}, maxHops {}. Returning results so far", + entityUrn, + direction, + maxHops); break; } @@ -225,7 +259,10 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect startTimeMillis, endTimeMillis); result.addAll(oneHopRelationships); - currentLevel = oneHopRelationships.stream().map(LineageRelationship::getEntity).collect(Collectors.toList()); + currentLevel = + oneHopRelationships.stream() + .map(LineageRelationship::getEntity) + .collect(Collectors.toList()); currentTime = System.currentTimeMillis(); remainingTime = timeoutTime - currentTime; } @@ -235,7 +272,10 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect if (offset >= response.getTotal()) { subList = Collections.emptyList(); } else { - subList = response.getLineageRelationships().subList(offset, Math.min(offset + count, response.getTotal())); + subList = + response + .getLineageRelationships() + .subList(offset, Math.min(offset + count, response.getTotal())); } return new LineageResponse(response.getTotal(), subList); @@ -243,23 +283,35 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect // Get 1-hop lineage relationships asynchronously in batches with timeout @WithSpan - public List<LineageRelationship> getLineageRelationshipsInBatches(@Nonnull List<Urn> entityUrns, - @Nonnull LineageDirection direction, GraphFilters graphFilters, Set<Urn> visitedEntities, int numHops, - long remainingTime, Map<Urn, UrnArrayArray> existingPaths, @Nullable Long startTimeMillis, + public List<LineageRelationship> getLineageRelationshipsInBatches( + @Nonnull List<Urn> entityUrns, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + Set<Urn> visitedEntities, + int numHops, + long remainingTime, + Map<Urn, UrnArrayArray> existingPaths, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { List<List<Urn>> batches = Lists.partition(entityUrns, graphQueryConfiguration.getBatchSize()); - return ConcurrencyUtils.getAllCompleted(batches.stream() - .map(batchUrns -> CompletableFuture.supplyAsync( - () -> getLineageRelationships( - batchUrns, - direction, - graphFilters, - visitedEntities, - numHops, - existingPaths, - startTimeMillis, - endTimeMillis))) - .collect(Collectors.toList()), remainingTime, TimeUnit.MILLISECONDS) + return ConcurrencyUtils.getAllCompleted( + batches.stream() + .map( + batchUrns -> + CompletableFuture.supplyAsync( + () -> + getLineageRelationships( + batchUrns, + direction, + graphFilters, + visitedEntities, + numHops, + existingPaths, + startTimeMillis, + endTimeMillis))) + .collect(Collectors.toList()), + remainingTime, + TimeUnit.MILLISECONDS) .stream() .flatMap(List::stream) .collect(Collectors.toList()); @@ -267,42 +319,56 @@ public List<LineageRelationship> getLineageRelationshipsInBatches(@Nonnull List< // Get 1-hop lineage relationships @WithSpan - private List<LineageRelationship> getLineageRelationships(@Nonnull List<Urn> entityUrns, - @Nonnull LineageDirection direction, GraphFilters graphFilters, Set<Urn> visitedEntities, int numHops, - Map<Urn, UrnArrayArray> existingPaths, @Nullable Long startTimeMillis, + private List<LineageRelationship> getLineageRelationships( + @Nonnull List<Urn> entityUrns, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + Set<Urn> visitedEntities, + int numHops, + Map<Urn, UrnArrayArray> existingPaths, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - Map<String, List<Urn>> urnsPerEntityType = entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); - Map<String, List<EdgeInfo>> edgesPerEntityType = urnsPerEntityType.keySet() - .stream() - .collect(Collectors.toMap(Function.identity(), - entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); + Map<String, List<Urn>> urnsPerEntityType = + entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); + Map<String, List<EdgeInfo>> edgesPerEntityType = + urnsPerEntityType.keySet().stream() + .collect( + Collectors.toMap( + Function.identity(), + entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Get all relation types relevant to the set of urns to hop from - urnsPerEntityType.forEach((entityType, urns) -> finalQuery.should( - getQueryForLineage( - urns, - edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), - graphFilters, - startTimeMillis, - endTimeMillis))); - SearchResponse response = executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); + urnsPerEntityType.forEach( + (entityType, urns) -> + finalQuery.should( + getQueryForLineage( + urns, + edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), + graphFilters, + startTimeMillis, + endTimeMillis))); + SearchResponse response = + executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); Set<Urn> entityUrnSet = new HashSet<>(entityUrns); // Get all valid edges given the set of urns to hop from - Set<Pair<String, EdgeInfo>> validEdges = edgesPerEntityType.entrySet() - .stream() - .flatMap(entry -> entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) - .collect(Collectors.toSet()); - return extractRelationships(entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); + Set<Pair<String, EdgeInfo>> validEdges = + edgesPerEntityType.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) + .collect(Collectors.toSet()); + return extractRelationships( + entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); } // Get search query for given list of edges and source urns @VisibleForTesting public static QueryBuilder getQueryForLineage( - @Nonnull List<Urn> urns, - @Nonnull List<EdgeInfo> lineageEdges, - @Nonnull GraphFilters graphFilters, - @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis) { + @Nonnull List<Urn> urns, + @Nonnull List<EdgeInfo> lineageEdges, + @Nonnull GraphFilters graphFilters, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { BoolQueryBuilder query = QueryBuilders.boolQuery(); if (lineageEdges.isEmpty()) { return query; @@ -328,43 +394,46 @@ public static QueryBuilder getQueryForLineage( if (startTimeMillis != null && endTimeMillis != null) { query.must(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); } else { - log.debug(String.format( - "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", - startTimeMillis, - endTimeMillis)); + log.debug( + String.format( + "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", + startTimeMillis, endTimeMillis)); } return query; } /** - * Adds an individual relationship edge to a running set of unique paths to each node in the graph. + * Adds an individual relationship edge to a running set of unique paths to each node in the + * graph. * - * Specifically, this method updates 'existingPaths', which is a map of an entity urn representing a node in the - * lineage graph to the full paths that can be traversed to reach it from a the origin node for which lineage - * was requested. + * <p>Specifically, this method updates 'existingPaths', which is a map of an entity urn + * representing a node in the lineage graph to the full paths that can be traversed to reach it + * from a the origin node for which lineage was requested. * - * This method strictly assumes that edges are being added IN ORDER, level-by-level working outwards from the originally - * requested source node. If edges are added to the path set in an out of order manner, then the paths to a given node - * may be partial / incomplete. + * <p>This method strictly assumes that edges are being added IN ORDER, level-by-level working + * outwards from the originally requested source node. If edges are added to the path set in an + * out of order manner, then the paths to a given node may be partial / incomplete. * - * Note that calling this method twice with the same edge is not safe. It will result in duplicate paths being appended - * into the list of paths to the provided child urn. + * <p>Note that calling this method twice with the same edge is not safe. It will result in + * duplicate paths being appended into the list of paths to the provided child urn. * - * @param existingPaths a running set of unique, uni-directional paths to each node in the graph starting from the original root node - * for which lineage was requested. - * @param parentUrn the "parent" node (or source node) in the edge to add. This is a logical source node in a uni-directional path from the source - * to the destination node. Note that this is NOT always the URN corresponding to the "source" field that is physically stored - * inside the Graph Store. - * @param childUrn the "child" node (or dest node) in the edge to add. This is a logical dest node in a uni-directional path from the - * source to the destination node. Note that this is NOT always the URN corresponding to the "destination" field that is - * physically stored inside the Graph Store. + * @param existingPaths a running set of unique, uni-directional paths to each node in the graph + * starting from the original root node for which lineage was requested. + * @param parentUrn the "parent" node (or source node) in the edge to add. This is a logical + * source node in a uni-directional path from the source to the destination node. Note that + * this is NOT always the URN corresponding to the "source" field that is physically stored + * inside the Graph Store. + * @param childUrn the "child" node (or dest node) in the edge to add. This is a logical dest node + * in a uni-directional path from the source to the destination node. Note that this is NOT + * always the URN corresponding to the "destination" field that is physically stored inside + * the Graph Store. */ @VisibleForTesting public static void addEdgeToPaths( - @Nonnull final Map<Urn, UrnArrayArray> existingPaths, - @Nonnull final Urn parentUrn, - @Nonnull final Urn childUrn) { + @Nonnull final Map<Urn, UrnArrayArray> existingPaths, + @Nonnull final Urn parentUrn, + @Nonnull final Urn childUrn) { // Collect all full-paths to this child node. This is what will be returned. UrnArrayArray pathsToParent = existingPaths.get(parentUrn); if (pathsToParent != null && pathsToParent.size() > 0) { @@ -388,16 +457,22 @@ public static void addEdgeToPaths( } } - // Given set of edges and the search response, extract all valid edges that originate from the input entityUrns + // Given set of edges and the search response, extract all valid edges that originate from the + // input entityUrns @WithSpan - private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> entityUrns, - @Nonnull SearchResponse searchResponse, Set<Pair<String, EdgeInfo>> validEdges, Set<Urn> visitedEntities, - int numHops, Map<Urn, UrnArrayArray> existingPaths) { + private static List<LineageRelationship> extractRelationships( + @Nonnull Set<Urn> entityUrns, + @Nonnull SearchResponse searchResponse, + Set<Pair<String, EdgeInfo>> validEdges, + Set<Urn> visitedEntities, + int numHops, + Map<Urn, UrnArrayArray> existingPaths) { final List<LineageRelationship> result = new LinkedList<>(); final SearchHit[] hits = searchResponse.getHits().getHits(); for (SearchHit hit : hits) { final Map<String, Object> document = hit.getSourceAsMap(); - final Urn sourceUrn = UrnUtils.getUrn(((Map<String, Object>) document.get(SOURCE)).get("urn").toString()); + final Urn sourceUrn = + UrnUtils.getUrn(((Map<String, Object>) document.get(SOURCE)).get("urn").toString()); final Urn destinationUrn = UrnUtils.getUrn(((Map<String, Object>) document.get(DESTINATION)).get("urn").toString()); final String type = document.get(RELATIONSHIP_TYPE).toString(); @@ -406,9 +481,11 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> final Number updatedOnNumber = (Number) document.getOrDefault(UPDATED_ON, null); final Long updatedOn = updatedOnNumber != null ? updatedOnNumber.longValue() : null; final String createdActorString = (String) document.getOrDefault(CREATED_ACTOR, null); - final Urn createdActor = createdActorString == null ? null : UrnUtils.getUrn(createdActorString); + final Urn createdActor = + createdActorString == null ? null : UrnUtils.getUrn(createdActorString); final String updatedActorString = (String) document.getOrDefault(UPDATED_ACTOR, null); - final Urn updatedActor = updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); + final Urn updatedActor = + updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); final Map<String, Object> properties; if (document.containsKey(PROPERTIES) && document.get(PROPERTIES) instanceof Map) { properties = (Map<String, Object>) document.get(PROPERTIES); @@ -422,9 +499,14 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> // Skip if already visited // Skip if edge is not a valid outgoing edge // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(destinationUrn) && validEdges.contains( - Pair.of(sourceUrn.getEntityType(), - new EdgeInfo(type, RelationshipDirection.OUTGOING, destinationUrn.getEntityType().toLowerCase())))) { + if (!visitedEntities.contains(destinationUrn) + && validEdges.contains( + Pair.of( + sourceUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.OUTGOING, + destinationUrn.getEntityType().toLowerCase())))) { visitedEntities.add(destinationUrn); // Append the edge to a set of unique graph paths. addEdgeToPaths(existingPaths, sourceUrn, destinationUrn); @@ -433,7 +515,9 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> type, destinationUrn, numHops, - existingPaths.getOrDefault(destinationUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. + existingPaths.getOrDefault( + destinationUrn, + new UrnArrayArray()), // Fetch the paths to the next level entity. createdOn, createdActor, updatedOn, @@ -448,21 +532,29 @@ private static List<LineageRelationship> extractRelationships(@Nonnull Set<Urn> // Skip if already visited // Skip if edge is not a valid outgoing edge // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(sourceUrn) && validEdges.contains( - Pair.of(destinationUrn.getEntityType(), new EdgeInfo(type, RelationshipDirection.INCOMING, sourceUrn.getEntityType().toLowerCase())))) { + if (!visitedEntities.contains(sourceUrn) + && validEdges.contains( + Pair.of( + destinationUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.INCOMING, + sourceUrn.getEntityType().toLowerCase())))) { visitedEntities.add(sourceUrn); // Append the edge to a set of unique graph paths. addEdgeToPaths(existingPaths, destinationUrn, sourceUrn); - final LineageRelationship relationship = createLineageRelationship( - type, - sourceUrn, - numHops, - existingPaths.getOrDefault(sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. - createdOn, - createdActor, - updatedOn, - updatedActor, - isManual); + final LineageRelationship relationship = + createLineageRelationship( + type, + sourceUrn, + numHops, + existingPaths.getOrDefault( + sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); result.add(relationship); } } @@ -479,10 +571,13 @@ private static LineageRelationship createLineageRelationship( @Nullable final Urn createdActor, @Nullable final Long updatedOn, @Nullable final Urn updatedActor, - final boolean isManual - ) { + final boolean isManual) { final LineageRelationship relationship = - new LineageRelationship().setType(type).setEntity(entityUrn).setDegree(numHops).setPaths(paths); + new LineageRelationship() + .setType(type) + .setEntity(entityUrn) + .setDegree(numHops) + .setPaths(paths); if (createdOn != null) { relationship.setCreatedOn(createdOn); } @@ -507,18 +602,19 @@ private static BoolQueryBuilder getOutGoingEdgeQuery( outgoingEdgeQuery.must(buildUrnFilters(urns, SOURCE)); outgoingEdgeQuery.must(buildEdgeFilters(outgoingEdges)); outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + outgoingEdgeQuery.must( + buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return outgoingEdgeQuery; } private static BoolQueryBuilder getIncomingEdgeQuery( - @Nonnull List<Urn> urns, List<EdgeInfo> incomingEdges, - @Nonnull GraphFilters graphFilters) { + @Nonnull List<Urn> urns, List<EdgeInfo> incomingEdges, @Nonnull GraphFilters graphFilters) { BoolQueryBuilder incomingEdgeQuery = QueryBuilders.boolQuery(); incomingEdgeQuery.must(buildUrnFilters(urns, DESTINATION)); incomingEdgeQuery.must(buildEdgeFilters(incomingEdges)); incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + incomingEdgeQuery.must( + buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return incomingEdgeQuery; } @@ -530,16 +626,21 @@ private static UrnArray clonePath(final UrnArray basePath) { } } - private static QueryBuilder buildEntityTypesFilter(@Nonnull List<String> entityTypes, @Nonnull String prefix) { - return QueryBuilders.termsQuery(prefix + ".entityType", entityTypes.stream().map(Object::toString).collect(Collectors.toList())); + private static QueryBuilder buildEntityTypesFilter( + @Nonnull List<String> entityTypes, @Nonnull String prefix) { + return QueryBuilders.termsQuery( + prefix + ".entityType", + entityTypes.stream().map(Object::toString).collect(Collectors.toList())); } private static QueryBuilder buildUrnFilters(@Nonnull List<Urn> urns, @Nonnull String prefix) { - return QueryBuilders.termsQuery(prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); + return QueryBuilders.termsQuery( + prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); } private static QueryBuilder buildEdgeFilters(@Nonnull List<EdgeInfo> edgeInfos) { - return QueryBuilders.termsQuery("relationshipType", + return QueryBuilders.termsQuery( + "relationshipType", edgeInfos.stream().map(EdgeInfo::getType).distinct().collect(Collectors.toList())); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java index f8b0e8a291e7a..5d722a034fafc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.buildQuery; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipFilter; @@ -16,10 +19,6 @@ import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.reindex.BulkByScrollResponse; -import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.buildQuery; -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; - - @Slf4j @RequiredArgsConstructor public class ESGraphWriteDAO { @@ -36,8 +35,8 @@ public class ESGraphWriteDAO { * @param docId the ID of the document */ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { - final UpdateRequest updateRequest = new UpdateRequest( - indexConvention.getIndexName(INDEX_NAME), docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexConvention.getIndexName(INDEX_NAME), docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -56,15 +55,24 @@ public void deleteDocument(@Nonnull String docId) { bulkProcessor.add(deleteRequest); } - public BulkByScrollResponse deleteByQuery(@Nullable final String sourceType, @Nonnull final Filter sourceEntityFilter, - @Nullable final String destinationType, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + public BulkByScrollResponse deleteByQuery( + @Nullable final String sourceType, + @Nonnull final Filter sourceEntityFilter, + @Nullable final String destinationType, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = - buildQuery(sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), sourceEntityFilter, - destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), destinationEntityFilter, - relationshipTypes, relationshipFilter); + buildQuery( + sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), + sourceEntityFilter, + destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), + destinationEntityFilter, + relationshipTypes, + relationshipFilter); - return bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) - .orElse(null); + return bulkProcessor + .deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) + .orElse(null); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 5fdf4d45ffa3b..6c828c0e7c6ae 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -48,7 +48,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.index.query.QueryBuilders; - @Slf4j @RequiredArgsConstructor public class ElasticSearchGraphService implements GraphService, ElasticSearchIndexed { @@ -99,10 +98,7 @@ private String toDocument(@Nonnull final Edge edge) { throw new UnsupportedOperationException( String.format( "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", - entry.getKey(), - entry.getValue() - ) - ); + entry.getKey(), entry.getValue())); } } searchDocument.set("properties", propertiesObject); @@ -113,8 +109,11 @@ private String toDocument(@Nonnull final Edge edge) { private String toDocId(@Nonnull final Edge edge) { String rawDocId = - edge.getSource().toString() + DOC_DELIMETER + edge.getRelationshipType() + DOC_DELIMETER + edge.getDestination() - .toString(); + edge.getSource().toString() + + DOC_DELIMETER + + edge.getRelationshipType() + + DOC_DELIMETER + + edge.getDestination().toString(); try { byte[] bytesOfRawDocID = rawDocId.getBytes(StandardCharsets.UTF_8); @@ -160,48 +159,55 @@ public RelatedEntitiesResult findRelatedEntities( @Nonnull final RelationshipFilter relationshipFilter, final int offset, final int count) { - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); - String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; - - SearchResponse response = _graphReadDAO.getSearchResponse( - sourceTypes, - sourceEntityFilter, - destinationTypes, - destinationEntityFilter, - relationshipTypes, - relationshipFilter, - offset, - count - ); + String destinationNode = + relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; + + SearchResponse response = + _graphReadDAO.getSearchResponse( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + offset, + count); if (response == null) { return new RelatedEntitiesResult(offset, 0, 0, ImmutableList.of()); } int totalCount = (int) response.getHits().getTotalHits().value; - final List<RelatedEntity> relationships = Arrays.stream(response.getHits().getHits()) - .map(hit -> { - final String urnStr = - ((HashMap<String, String>) hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)).getOrDefault( - "urn", null); - final String relationshipType = (String) hit.getSourceAsMap().get("relationshipType"); - - if (urnStr == null || relationshipType == null) { - log.error(String.format( - "Found null urn string, relationship type, aspect name or path spec in Elastic index. " - + "urnStr: %s, relationshipType: %s", - urnStr, relationshipType)); - return null; - } - - return new RelatedEntity(relationshipType, urnStr); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + final List<RelatedEntity> relationships = + Arrays.stream(response.getHits().getHits()) + .map( + hit -> { + final String urnStr = + ((HashMap<String, String>) + hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)) + .getOrDefault("urn", null); + final String relationshipType = + (String) hit.getSourceAsMap().get("relationshipType"); + + if (urnStr == null || relationshipType == null) { + log.error( + String.format( + "Found null urn string, relationship type, aspect name or path spec in Elastic index. " + + "urnStr: %s, relationshipType: %s", + urnStr, relationshipType)); + return null; + } + + return new RelatedEntity(relationshipType, urnStr); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); return new RelatedEntitiesResult(offset, relationships.size(), totalCount, relationships); } @@ -209,22 +215,18 @@ public RelatedEntitiesResult findRelatedEntities( @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, GraphFilters graphFilters, int offset, - int count, int maxHops) { + int count, + int maxHops) { ESGraphQueryDAO.LineageResponse lineageResponse = _graphReadDAO.getLineage( - entityUrn, - direction, - graphFilters, - offset, - count, - maxHops, - null, - null); - return new EntityLineageResult().setRelationships( - new LineageRelationshipArray(lineageResponse.getLineageRelationships())) + entityUrn, direction, graphFilters, offset, count, maxHops, null, null); + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageResponse.getLineageRelationships())) .setStart(offset) .setCount(count) .setTotal(lineageResponse.getTotal()); @@ -233,10 +235,15 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, GraphFilters graphFilters, int offset, - int count, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { ESGraphQueryDAO.LineageResponse lineageResponse = _graphReadDAO.getLineage( entityUrn, @@ -247,8 +254,8 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi maxHops, startTimeMillis, endTimeMillis); - return new EntityLineageResult().setRelationships( - new LineageRelationshipArray(lineageResponse.getLineageRelationships())) + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageResponse.getLineageRelationships())) .setStart(offset) .setCount(count) .setTotal(lineageResponse.getTotal()); @@ -262,7 +269,9 @@ private Filter createUrnFilter(@Nonnull final Urn urn) { criterion.setField("urn"); criterion.setValue(urn.toString()); criterionArray.add(criterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criterionArray)))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criterionArray)))); return filter; } @@ -272,26 +281,16 @@ public void removeNode(@Nonnull final Urn urn) { Filter emptyFilter = new Filter().setOr(new ConjunctiveCriterionArray()); List<String> relationshipTypes = new ArrayList<>(); - RelationshipFilter outgoingFilter = new RelationshipFilter().setDirection(RelationshipDirection.OUTGOING); - RelationshipFilter incomingFilter = new RelationshipFilter().setDirection(RelationshipDirection.INCOMING); + RelationshipFilter outgoingFilter = + new RelationshipFilter().setDirection(RelationshipDirection.OUTGOING); + RelationshipFilter incomingFilter = + new RelationshipFilter().setDirection(RelationshipDirection.INCOMING); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - outgoingFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, outgoingFilter); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - incomingFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, incomingFilter); return; } @@ -305,13 +304,7 @@ public void removeEdgesFromNode( Filter emptyFilter = new Filter().setOr(new ConjunctiveCriterionArray()); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - relationshipFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, relationshipFilter); } @Override @@ -328,8 +321,11 @@ public void configure() { @Override public List<ReindexConfig> buildReindexConfigs() throws IOException { - return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME), - GraphRelationshipMappingsBuilder.getMappings(), Collections.emptyMap())); + return List.of( + _indexBuilder.buildReindexState( + _indexConvention.getIndexName(INDEX_NAME), + GraphRelationshipMappingsBuilder.getMappings(), + Collections.emptyMap())); } @Override @@ -340,7 +336,8 @@ public void reindexAll() { @VisibleForTesting @Override public void clear() { - _esBulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); + _esBulkProcessor.deleteByQuery( + QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java index cf97cf56023ad..21f2bf6c89204 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java @@ -5,11 +5,10 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; - @Slf4j public class GraphRelationshipMappingsBuilder { - private GraphRelationshipMappingsBuilder() { } + private GraphRelationshipMappingsBuilder() {} public static Map<String, Object> getMappings() { Map<String, Object> mappings = new HashMap<>(); @@ -27,19 +26,19 @@ private static Map<String, Object> getMappingsForKeyword() { private static Map<String, Object> getMappingsForEntity() { - Map<String, Object> mappings = ImmutableMap.<String, Object>builder() - .put("urn", getMappingsForKeyword()) - .put("entityType", getMappingsForKeyword()) - .build(); + Map<String, Object> mappings = + ImmutableMap.<String, Object>builder() + .put("urn", getMappingsForKeyword()) + .put("entityType", getMappingsForKeyword()) + .build(); return ImmutableMap.of("properties", mappings); } private static Map<String, Object> getMappingsForEdgeProperties() { - Map<String, Object> propertyMappings = ImmutableMap.<String, Object>builder() - .put("source", getMappingsForKeyword()) - .build(); + Map<String, Object> propertyMappings = + ImmutableMap.<String, Object>builder().put("source", getMappingsForKeyword()).build(); return ImmutableMap.of("properties", propertyMappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java index 1df938f902e0f..7ee84ce834cfa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java @@ -1,28 +1,33 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.*; + import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; -import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.*; - @Slf4j public class TimeFilterUtils { /** - * In order to filter for edges that fall into a specific filter window, we perform a range-overlap query. - * Note that both a start time and an end time must be provided in order to add the filters. + * In order to filter for edges that fall into a specific filter window, we perform a + * range-overlap query. Note that both a start time and an end time must be provided in order to + * add the filters. * - * A range overlap query compares 2 time windows for ANY overlap. This essentially equates to a union operation. - * Each window is characterized by 2 points in time: a start time (e.g. created time of the edge) and an end time - * (e.g. last updated time of an edge). + * <p>A range overlap query compares 2 time windows for ANY overlap. This essentially equates to a + * union operation. Each window is characterized by 2 points in time: a start time (e.g. created + * time of the edge) and an end time (e.g. last updated time of an edge). * * @param startTimeMillis the start of the time filter window * @param endTimeMillis the end of the time filter window */ - public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, final long endTimeMillis) { - log.debug(String.format("Adding edge time filters for start time: %s, end time: %s", startTimeMillis, endTimeMillis)); + public static QueryBuilder getEdgeTimeFilterQuery( + final long startTimeMillis, final long endTimeMillis) { + log.debug( + String.format( + "Adding edge time filters for start time: %s, end time: %s", + startTimeMillis, endTimeMillis)); /* * One of the following must be true in order for the edge to be returned (should = OR) * @@ -30,7 +35,7 @@ public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, fi * 2. The createdOn and updatedOn window does not exist on the edge at all (support legacy cases) * 3. Special lineage case: The edge is marked as a "manual" edge, meaning that the time filters should NOT be applied. */ - BoolQueryBuilder timeFilterQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder timeFilterQuery = QueryBuilders.boolQuery(); timeFilterQuery.should(buildTimeWindowFilter(startTimeMillis, endTimeMillis)); timeFilterQuery.should(buildTimestampsMissingFilter()); timeFilterQuery.should(buildManualLineageFilter()); @@ -38,61 +43,54 @@ public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, fi } /** - * Builds a filter that compares 2 windows on a timeline and returns true for any overlap. This logic - * is a bit tricky so change with caution. - * - * The first window comes from start time and end time provided by the user. - * The second window comes from the createdOn and updatedOn timestamps present on graph edges. + * Builds a filter that compares 2 windows on a timeline and returns true for any overlap. This + * logic is a bit tricky so change with caution. * - * Also accounts for the case where createdOn or updatedOn is MISSING, and in such cases performs - * a point overlap instead of a range overlap. + * <p>The first window comes from start time and end time provided by the user. The second window + * comes from the createdOn and updatedOn timestamps present on graph edges. * - * Range Examples: + * <p>Also accounts for the case where createdOn or updatedOn is MISSING, and in such cases + * performs a point overlap instead of a range overlap. * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + * <p>Range Examples: * - * = true + * <p>start time -> end time |-----| createdOn -> updatedOn |-----| * - * start time -> end time |------| - * createdOn -> updatedOn |--| + * <p>= true * - * = true + * <p>start time -> end time |------| createdOn -> updatedOn |--| * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + * <p>= true * - * = true + * <p>start time -> end time |-----| createdOn -> updatedOn |-----| * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + * <p>= true * - * = false + * <p>start time -> end time |-----| createdOn -> updatedOn |-----| * + * <p>= false * - * Point Examples: + * <p>Point Examples: * - * start time -> end time |-----| - * updatedOn | + * <p>start time -> end time |-----| updatedOn | * - * = true + * <p>= true * - * start time -> end time |-----| - * updatedOn | + * <p>start time -> end time |-----| updatedOn | * - * = false + * <p>= false * - * and same for createdOn. + * <p>and same for createdOn. * - * Assumptions are that startTimeMillis is always before or equal to endTimeMillis, - * and createdOn is always before or equal to updatedOn. + * <p>Assumptions are that startTimeMillis is always before or equal to endTimeMillis, and + * createdOn is always before or equal to updatedOn. * * @param startTimeMillis the start time of the window in milliseconds * @param endTimeMillis the end time of the window in milliseconds - * * @return Query Builder with time window filters appended. */ - private static QueryBuilder buildTimeWindowFilter(final long startTimeMillis, final long endTimeMillis) { + private static QueryBuilder buildTimeWindowFilter( + final long startTimeMillis, final long endTimeMillis) { final BoolQueryBuilder timeWindowQuery = QueryBuilders.boolQuery(); /* @@ -107,12 +105,14 @@ private static QueryBuilder buildTimeWindowFilter(final long startTimeMillis, fi // Build filter comparing createdOn time to startTime->endTime window. BoolQueryBuilder createdOnFilter = QueryBuilders.boolQuery(); createdOnFilter.must(QueryBuilders.existsQuery(CREATED_ON)); - createdOnFilter.must(QueryBuilders.rangeQuery(CREATED_ON).gte(startTimeMillis).lte(endTimeMillis)); + createdOnFilter.must( + QueryBuilders.rangeQuery(CREATED_ON).gte(startTimeMillis).lte(endTimeMillis)); // Build filter comparing updatedOn time to startTime->endTime window. BoolQueryBuilder updatedOnFilter = QueryBuilders.boolQuery(); updatedOnFilter.must(QueryBuilders.existsQuery(UPDATED_ON)); - updatedOnFilter.must(QueryBuilders.rangeQuery(UPDATED_ON).gte(startTimeMillis).lte(endTimeMillis)); + updatedOnFilter.must( + QueryBuilders.rangeQuery(UPDATED_ON).gte(startTimeMillis).lte(endTimeMillis)); // Now - OR the 2 point comparison conditions together. timeWindowQuery.should(createdOnFilter); @@ -141,5 +141,5 @@ private static QueryBuilder buildManualLineageFilter() { return QueryBuilders.termQuery(String.format("%s.%s", PROPERTIES, SOURCE), UI); } - private TimeFilterUtils() { } + private TimeFilterUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index ac57fb7db2b78..217d54c5c0b0f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -56,7 +56,6 @@ import org.neo4j.driver.exceptions.Neo4jException; import org.neo4j.driver.types.Relationship; - @Slf4j public class Neo4jGraphService implements GraphService { @@ -69,7 +68,10 @@ public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driv this(lineageRegistry, driver, SessionConfig.defaultConfig()); } - public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driver driver, @Nonnull SessionConfig sessionConfig) { + public Neo4jGraphService( + @Nonnull LineageRegistry lineageRegistry, + @Nonnull Driver driver, + @Nonnull SessionConfig sessionConfig) { this._lineageRegistry = lineageRegistry; this._driver = driver; this._sessionConfig = sessionConfig; @@ -83,22 +85,24 @@ public LineageRegistry getLineageRegistry() { @Override public void addEdge(@Nonnull final Edge edge) { - log.debug(String.format("Adding Edge source: %s, destination: %s, type: %s", - edge.getSource(), - edge.getDestination(), - edge.getRelationshipType())); + log.debug( + String.format( + "Adding Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); final String sourceType = edge.getSource().getEntityType(); final String destinationType = edge.getDestination().getEntityType(); final String sourceUrn = edge.getSource().toString(); final String destinationUrn = edge.getDestination().toString(); - // Introduce startUrn, endUrn for real source node and destination node without consider direct or indirect pattern match + // Introduce startUrn, endUrn for real source node and destination node without consider direct + // or indirect pattern match String endUrn = destinationUrn; String startUrn = sourceUrn; String endType = destinationType; String startType = sourceType; - // Extra relationship typename start with r_ for direct-outgoing-downstream/indirect-incoming-upstream relationships + // Extra relationship typename start with r_ for + // direct-outgoing-downstream/indirect-incoming-upstream relationships String reverseRelationshipType = "r_" + edge.getRelationshipType(); if (isSourceDestReversed(sourceType, edge.getRelationshipType())) { @@ -117,10 +121,23 @@ public void addEdge(@Nonnull final Edge edge) { // Add/Update relationship final String mergeRelationshipTemplate = "MATCH (source:%s {urn: '%s'}),(destination:%s {urn: '%s'}) MERGE (source)-[r:%s]->(destination) "; - String statement = String.format(mergeRelationshipTemplate, sourceType, sourceUrn, destinationType, destinationUrn, - edge.getRelationshipType()); - - String statementR = String.format(mergeRelationshipTemplate, startType, startUrn, endType, endUrn, reverseRelationshipType); + String statement = + String.format( + mergeRelationshipTemplate, + sourceType, + sourceUrn, + destinationType, + destinationUrn, + edge.getRelationshipType()); + + String statementR = + String.format( + mergeRelationshipTemplate, + startType, + startUrn, + endType, + endUrn, + reverseRelationshipType); // Add/Update relationship properties String setCreatedOnTemplate; @@ -152,20 +169,23 @@ public void addEdge(@Nonnull final Edge edge) { Set.of("createdOn", "createdActor", "updatedOn", "updatedActor", "startUrn", "endUrn"); if (preservedKeySet.contains(entry.getKey())) { throw new UnsupportedOperationException( - String.format("Tried setting properties on graph edge but property key is preserved. Key: %s", + String.format( + "Tried setting properties on graph edge but property key is preserved. Key: %s", entry.getKey())); } if (entry.getValue() instanceof String) { setPropertyTemplate = String.format("r.%s = '%s'", entry.getKey(), entry.getValue()); propertiesTemplateJoiner.add(setPropertyTemplate); } else { - throw new UnsupportedOperationException(String.format( - "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", - entry.getKey(), entry.getValue())); + throw new UnsupportedOperationException( + String.format( + "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", + entry.getKey(), entry.getValue())); } } } - final String setStartEndUrnTemplate = String.format("r.startUrn = '%s', r.endUrn = '%s'", startUrn, endUrn); + final String setStartEndUrnTemplate = + String.format("r.startUrn = '%s', r.endUrn = '%s'", startUrn, endUrn); propertiesTemplateJoiner.add(setStartEndUrnTemplate); if (!StringUtils.isEmpty(propertiesTemplateJoiner.toString())) { statementR = String.format("%s SET %s", statementR, propertiesTemplateJoiner); @@ -184,8 +204,9 @@ public void upsertEdge(final Edge edge) { @Override public void removeEdge(final Edge edge) { log.debug( - String.format("Deleting Edge source: %s, destination: %s, type: %s", edge.getSource(), edge.getDestination(), - edge.getRelationshipType())); + String.format( + "Deleting Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); final String sourceType = edge.getSource().getEntityType(); final String destinationType = edge.getDestination().getEntityType(); @@ -208,11 +229,24 @@ public void removeEdge(final Edge edge) { final List<Statement> statements = new ArrayList<>(); // DELETE relationship - final String mergeRelationshipTemplate = "MATCH (source:%s {urn: '%s'})-[r:%s]->(destination:%s {urn: '%s'}) DELETE r"; + final String mergeRelationshipTemplate = + "MATCH (source:%s {urn: '%s'})-[r:%s]->(destination:%s {urn: '%s'}) DELETE r"; final String statement = - String.format(mergeRelationshipTemplate, sourceType, sourceUrn, edge.getRelationshipType(), destinationType, + String.format( + mergeRelationshipTemplate, + sourceType, + sourceUrn, + edge.getRelationshipType(), + destinationType, destinationUrn); - final String statementR = String.format(mergeRelationshipTemplate, startType, startUrn, reverseRelationshipType, endType, endUrn); + final String statementR = + String.format( + mergeRelationshipTemplate, + startType, + startUrn, + reverseRelationshipType, + endType, + endUrn); statements.add(buildStatement(statement, new HashMap<>())); statements.add(buildStatement(statementR, new HashMap<>())); @@ -222,49 +256,74 @@ public void removeEdge(final Edge edge) { @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops) { + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops) { return getLineage(entityUrn, direction, graphFilters, offset, count, maxHops, null, null); } @Nonnull @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops, @Nullable Long startTimeMillis, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { log.debug(String.format("Neo4j getLineage maxHops = %d", maxHops)); final var statementAndParams = - generateLineageStatementAndParameters(entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); + generateLineageStatementAndParameters( + entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); final var statement = statementAndParams.getFirst(); final var parameters = statementAndParams.getSecond(); List<Record> neo4jResult = - statement != null ? runQuery(buildStatement(statement, parameters)).list() : new ArrayList<>(); + statement != null + ? runQuery(buildStatement(statement, parameters)).list() + : new ArrayList<>(); LineageRelationshipArray relations = new LineageRelationshipArray(); - neo4jResult.stream().skip(offset).limit(count).forEach(item -> { - String urn = item.values().get(2).asNode().get("urn").asString(); - try { - final var path = item.get(1).asPath(); - final List<Urn> nodeListAsPath = StreamSupport.stream( - path.nodes().spliterator(), false) - .map(node -> createFromString(node.get("urn").asString())) - .collect(Collectors.toList()); - - final var firstRelationship = Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); - - relations.add(new LineageRelationship().setEntity(Urn.createFromString(urn)) - // although firstRelationship should never be absent, provide "" as fallback value - .setType(firstRelationship.map(Relationship::type).orElse("")) - .setDegree(path.length()) - .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); - } catch (URISyntaxException ignored) { - log.warn(String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); - } - }); - EntityLineageResult result = new EntityLineageResult().setStart(offset) + neo4jResult.stream() + .skip(offset) + .limit(count) + .forEach( + item -> { + String urn = item.values().get(2).asNode().get("urn").asString(); + try { + final var path = item.get(1).asPath(); + final List<Urn> nodeListAsPath = + StreamSupport.stream(path.nodes().spliterator(), false) + .map(node -> createFromString(node.get("urn").asString())) + .collect(Collectors.toList()); + + final var firstRelationship = + Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); + + relations.add( + new LineageRelationship() + .setEntity(Urn.createFromString(urn)) + // although firstRelationship should never be absent, provide "" as fallback + // value + .setType(firstRelationship.map(Relationship::type).orElse("")) + .setDegree(path.length()) + .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); + } catch (URISyntaxException ignored) { + log.warn( + String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); + } + }); + EntityLineageResult result = + new EntityLineageResult() + .setStart(offset) .setCount(relations.size()) .setRelationships(relations) .setTotal(neo4jResult.size()); @@ -277,7 +336,8 @@ private String getPathFindingLabelFilter(List<String> entityNames) { return entityNames.stream().map(x -> String.format("+%s", x)).collect(Collectors.joining("|")); } - private String getPathFindingRelationshipFilter(@Nonnull List<String> entityNames, @Nullable LineageDirection direction) { + private String getPathFindingRelationshipFilter( + @Nonnull List<String> entityNames, @Nullable LineageDirection direction) { // relationshipFilter supports mixing different directions for various relation types, // so simply transform entries lineage registry into format of filter final var filterComponents = new HashSet<String>(); @@ -293,8 +353,10 @@ private String getPathFindingRelationshipFilter(@Nonnull List<String> entityName } } else { // return disjunctive combination of edge types regardless of direction - for (final var direction1 : List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { - for (final var edgeInfo : _lineageRegistry.getLineageRelationships(entityName, direction1)) { + for (final var direction1 : + List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { + for (final var edgeInfo : + _lineageRegistry.getLineageRelationships(entityName, direction1)) { filterComponents.add(edgeInfo.getType()); } } @@ -304,87 +366,111 @@ private String getPathFindingRelationshipFilter(@Nonnull List<String> entityName } private Pair<String, Map<String, Object>> generateLineageStatementAndParameters( - @Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int maxHops, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { - final var parameterMap = new HashMap<String, Object>(Map.of( - "urn", entityUrn.toString(), - "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), - "relationshipFilter", getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), direction), - "maxHops", maxHops - )); + final var parameterMap = + new HashMap<String, Object>( + Map.of( + "urn", entityUrn.toString(), + "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), + "relationshipFilter", + getPathFindingRelationshipFilter( + graphFilters.getAllowedEntityTypes(), direction), + "maxHops", maxHops)); if (startTimeMillis == null && endTimeMillis == null) { // if no time filtering required, simply find all expansion paths to other nodes - final var statement = "MATCH (a {urn: $urn}) " - + "CALL apoc.path.spanningTree(a, { " - + " relationshipFilter: $relationshipFilter, " - + " labelFilter: $labelFilter, " - + " minLevel: 1, " - + " maxLevel: $maxHops " - + "}) " - + "YIELD path " - + "WITH a, path AS path " - + "RETURN a, path, last(nodes(path));"; + final var statement = + "MATCH (a {urn: $urn}) " + + "CALL apoc.path.spanningTree(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD path " + + "WITH a, path AS path " + + "RETURN a, path, last(nodes(path));"; return Pair.of(statement, parameterMap); } else { // when needing time filtering, possibility on multiple paths between two // nodes must be considered, and we need to construct more complex query // use r_ edges until they are no longer useful - final var relationFilter = getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) - .replaceAll("(\\w+)", "r_$1"); + final var relationFilter = + getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) + .replaceAll("(\\w+)", "r_$1"); final var relationshipPattern = String.format( (direction == LineageDirection.UPSTREAM ? "<-[:%s*1..%d]-" : "-[:%s*1..%d]->"), - relationFilter, maxHops); + relationFilter, + maxHops); // two steps: // 1. find list of nodes reachable within maxHops // 2. find the shortest paths from start node to every other node in these nodes - // (note: according to the docs of shortestPath, WHERE conditions are applied during path exploration, not + // (note: according to the docs of shortestPath, WHERE conditions are applied during path + // exploration, not // after path exploration is done) - final var statement = "MATCH (a {urn: $urn}) " - + "CALL apoc.path.subgraphNodes(a, { " - + " relationshipFilter: $relationshipFilter, " - + " labelFilter: $labelFilter, " - + " minLevel: 1, " - + " maxLevel: $maxHops " - + "}) " - + "YIELD node AS b " - + "WITH a, b " - + "MATCH path = shortestPath((a)" + relationshipPattern + "(b)) " - + "WHERE a <> b " - + " AND ALL(rt IN relationships(path) WHERE " - + " (EXISTS(rt.source) AND rt.source = 'UI') OR " - + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " - + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " - + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " - + " ) " - + "RETURN a, path, b;"; + final var statement = + "MATCH (a {urn: $urn}) " + + "CALL apoc.path.subgraphNodes(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD node AS b " + + "WITH a, b " + + "MATCH path = shortestPath((a)" + + relationshipPattern + + "(b)) " + + "WHERE a <> b " + + " AND ALL(rt IN relationships(path) WHERE " + + " (EXISTS(rt.source) AND rt.source = 'UI') OR " + + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " + + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " + + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " + + " ) " + + "RETURN a, path, b;"; // provide dummy start/end time when not provided, so no need to // format clause differently if either of them is missing parameterMap.put("startTimeMillis", startTimeMillis == null ? 0 : startTimeMillis); - parameterMap.put("endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); + parameterMap.put( + "endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); return Pair.of(statement, parameterMap); } } @Nonnull - public RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> sourceTypes, - @Nonnull final Filter sourceEntityFilter, @Nullable final List<String> destinationTypes, - @Nonnull final Filter destinationEntityFilter, @Nonnull final List<String> relationshipTypes, - @Nonnull final RelationshipFilter relationshipFilter, final int offset, final int count) { - - log.debug(String.format("Finding related Neo4j nodes sourceType: %s, sourceEntityFilter: %s, destinationType: %s, ", - sourceTypes, sourceEntityFilter, destinationTypes) + String.format( - "destinationEntityFilter: %s, relationshipTypes: %s, relationshipFilter: %s, ", destinationEntityFilter, - relationshipTypes, relationshipFilter) + String.format("offset: %s, count: %s", offset, count)); + public RelatedEntitiesResult findRelatedEntities( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count) { - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { + log.debug( + String.format( + "Finding related Neo4j nodes sourceType: %s, sourceEntityFilter: %s, destinationType: %s, ", + sourceTypes, sourceEntityFilter, destinationTypes) + + String.format( + "destinationEntityFilter: %s, relationshipTypes: %s, relationshipFilter: %s, ", + destinationEntityFilter, relationshipTypes, relationshipFilter) + + String.format("offset: %s, count: %s", offset, count)); + + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } @@ -401,7 +487,9 @@ public RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> so matchTemplate = "MATCH (src %s)-[r%s %s]->(dest %s)%s"; } - final String returnNodes = String.format("RETURN dest, type(r)"); // Return both related entity and the relationship type. + final String returnNodes = + String.format( + "RETURN dest, type(r)"); // Return both related entity and the relationship type. final String returnCount = "RETURN count(*)"; // For getting the total results. String relationshipTypeFilter = ""; @@ -411,44 +499,70 @@ public RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> so String whereClause = computeEntityTypeWhereClause(sourceTypes, destinationTypes); - // Build Statement strings + // Build Statement strings String baseStatementString = - String.format(matchTemplate, srcCriteria, relationshipTypeFilter, edgeCriteria, destCriteria, whereClause); + String.format( + matchTemplate, + srcCriteria, + relationshipTypeFilter, + edgeCriteria, + destCriteria, + whereClause); log.info(baseStatementString); - final String resultStatementString = String.format("%s %s SKIP $offset LIMIT $count", baseStatementString, returnNodes); + final String resultStatementString = + String.format("%s %s SKIP $offset LIMIT $count", baseStatementString, returnNodes); final String countStatementString = String.format("%s %s", baseStatementString, returnCount); // Build Statements - final Statement resultStatement = new Statement(resultStatementString, ImmutableMap.of("offset", offset, "count", count)); - final Statement countStatement = new Statement(countStatementString, Collections.emptyMap()); + final Statement resultStatement = + new Statement(resultStatementString, ImmutableMap.of("offset", offset, "count", count)); + final Statement countStatement = new Statement(countStatementString, Collections.emptyMap()); // Execute Queries - final List<RelatedEntity> relatedEntities = runQuery(resultStatement).list(record -> - new RelatedEntity( - record.values().get(1).asString(), // Relationship Type - record.values().get(0).asNode().get("urn").asString())); // Urn TODO: Validate this works against Neo4j. + final List<RelatedEntity> relatedEntities = + runQuery(resultStatement) + .list( + record -> + new RelatedEntity( + record.values().get(1).asString(), // Relationship Type + record + .values() + .get(0) + .asNode() + .get("urn") + .asString())); // Urn TODO: Validate this works against Neo4j. final int totalCount = runQuery(countStatement).single().get(0).asInt(); return new RelatedEntitiesResult(offset, relatedEntities.size(), totalCount, relatedEntities); } - private String computeEntityTypeWhereClause(@Nonnull final List<String> sourceTypes, - @Nonnull final List<String> destinationTypes) { + private String computeEntityTypeWhereClause( + @Nonnull final List<String> sourceTypes, @Nonnull final List<String> destinationTypes) { String whereClause = " WHERE left(type(r), 2)<>'r_' "; Boolean hasSourceTypes = sourceTypes != null && !sourceTypes.isEmpty(); Boolean hasDestTypes = destinationTypes != null && !destinationTypes.isEmpty(); if (hasSourceTypes && hasDestTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s AND %s", - sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR ")), - destinationTypes.stream().map(type -> "dest:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s AND %s", + sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR ")), + destinationTypes.stream() + .map(type -> "dest:" + type) + .collect(Collectors.joining(" OR "))); } else if (hasSourceTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s", - sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s", + sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR "))); } else if (hasDestTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s", - destinationTypes.stream().map(type -> "dest:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s", + destinationTypes.stream() + .map(type -> "dest:" + type) + .collect(Collectors.joining(" OR "))); } return whereClause; } @@ -468,28 +582,25 @@ public void removeNode(@Nonnull final Urn urn) { } /** - * Remove relationships and reverse relationships by check incoming/outgoing relationships. - * for example: - * a-[consumes]->b, a<-[r_consumes]-b - * a-[produces]->b, a-[r_produces]->b - * should not remove a<-[r_downstreamOf]-b when relationshipDirection equal incoming. - * should remove a-[consumes]->b, a<-[r_consumes]-b, a-[produces]->b, a-[r_produces]->b - * when relationshipDirection equal outgoing. + * Remove relationships and reverse relationships by check incoming/outgoing relationships. for + * example: a-[consumes]->b, a<-[r_consumes]-b a-[produces]->b, a-[r_produces]->b should not + * remove a<-[r_downstreamOf]-b when relationshipDirection equal incoming. should remove + * a-[consumes]->b, a<-[r_consumes]-b, a-[produces]->b, a-[r_produces]->b when + * relationshipDirection equal outgoing. * * @param urn Entity relationship type * @param relationshipTypes Entity relationship type * @param relationshipFilter Query relationship filter - * */ public void removeEdgesFromNode( @Nonnull final Urn urn, @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { - log.debug(String.format("Removing Neo4j edge types from node with urn: %s, types: %s, filter: %s", - urn, - relationshipTypes, - relationshipFilter)); + log.debug( + String.format( + "Removing Neo4j edge types from node with urn: %s, types: %s, filter: %s", + urn, relationshipTypes, relationshipFilter)); // also delete any relationship going to or from it final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); @@ -518,9 +629,13 @@ public void removeEdgesFromNode( relationshipTypeFilter = ""; if (!relationshipTypes.isEmpty()) { relationshipTypeFilter = - ":" + StringUtils.join(relationshipTypes, "|") + "|r_" + StringUtils.join(relationshipTypes, "|r_"); + ":" + + StringUtils.join(relationshipTypes, "|") + + "|r_" + + StringUtils.join(relationshipTypes, "|r_"); } - final String statementNoDirection = String.format(matchDeleteTemplate, relationshipTypeFilter); + final String statementNoDirection = + String.format(matchDeleteTemplate, relationshipTypeFilter); runQuery(buildStatement(statementNoDirection, params)).consume(); } else { for (Record typeDest : neo4jResult) { @@ -602,12 +717,13 @@ private synchronized ExecutionResult executeStatements(@Nonnull List<Statement> try (final Session session = _driver.session(_sessionConfig)) { do { try { - session.writeTransaction(tx -> { - for (Statement statement : statements) { - tx.run(statement.getCommandText(), statement.getParams()); - } - return 0; - }); + session.writeTransaction( + tx -> { + for (Statement statement : statements) { + tx.run(statement.getCommandText(), statement.getParams()); + } + return 0; + }); lastException = null; break; } catch (Neo4jException e) { @@ -618,7 +734,8 @@ private synchronized ExecutionResult executeStatements(@Nonnull List<Statement> if (lastException != null) { throw new RetryLimitReached( - "Failed to execute Neo4j write transaction after " + MAX_TRANSACTION_RETRY + " retries", lastException); + "Failed to execute Neo4j write transaction after " + MAX_TRANSACTION_RETRY + " retries", + lastException); } stopWatch.stop(); @@ -650,7 +767,8 @@ private static String toCriterionString(@Nonnull String key, @Nonnull Object val } /** - * Converts {@link RelationshipFilter} to neo4j query criteria, filter criterion condition requires to be EQUAL. + * Converts {@link RelationshipFilter} to neo4j query criteria, filter criterion condition + * requires to be EQUAL. * * @param filter Query relationship filter * @return Neo4j criteria string @@ -661,7 +779,8 @@ private static String relationshipFilterToCriteria(@Nonnull RelationshipFilter f } /** - * Converts {@link Filter} to neo4j query criteria, filter criterion condition requires to be EQUAL. + * Converts {@link Filter} to neo4j query criteria, filter criterion condition requires to be + * EQUAL. * * @param filter Query Filter * @return Neo4j criteria string @@ -674,9 +793,11 @@ private static String filterToCriteria(@Nonnull Filter filter) { private static String disjunctionToCriteria(final ConjunctiveCriterionArray disjunction) { if (disjunction.size() > 1) { // TODO: Support disjunctions (ORs). - throw new UnsupportedOperationException("Neo4j query filter only supports 1 set of conjunction criteria"); + throw new UnsupportedOperationException( + "Neo4j query filter only supports 1 set of conjunction criteria"); } - final CriterionArray criterionArray = disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); + final CriterionArray criterionArray = + disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); return criterionToString(criterionArray); } @@ -688,20 +809,21 @@ private static String disjunctionToCriteria(final ConjunctiveCriterionArray disj */ @Nonnull private static String criterionToString(@Nonnull CriterionArray criterionArray) { - if (!criterionArray.stream().allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { - throw new RuntimeException("Neo4j query filter only support EQUAL condition " + criterionArray); + if (!criterionArray.stream() + .allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { + throw new RuntimeException( + "Neo4j query filter only support EQUAL condition " + criterionArray); } final StringJoiner joiner = new StringJoiner(",", "{", "}"); - criterionArray.forEach(criterion -> joiner.add(toCriterionString(criterion.getField(), criterion.getValue()))); + criterionArray.forEach( + criterion -> joiner.add(toCriterionString(criterion.getField(), criterion.getValue()))); return joiner.length() <= 2 ? "" : joiner.toString(); } - /** - * Gets Node based on Urn, if not exist, creates placeholder node. - */ + /** Gets Node based on Urn, if not exist, creates placeholder node. */ @Nonnull private Statement getOrInsertNode(@Nonnull Urn urn) { final String nodeType = urn.getEntityType(); @@ -721,32 +843,31 @@ public boolean supportsMultiHop() { } /** - * Reverse incoming/outgoing direction check by compare sourceType and relationshipType to LineageSpec. - * for example: - * sourceType: dataset, relationshipType: downstreamOf. - * downstreamOf relationship type and outgoing relationship direction for dataset from LineageSpec, - * is inside upstreamEdges. - * source(dataset) -[downstreamOf]-> dest means upstreamEdge for source(dataset) - * dest -[r_downstreamOf]-> source(dataset), need reverse source and dest - * * - * sourceType: datajob, relationshipType: produces. - * produces relationship type and outgoing relationship direction for datajob from LineageSpec, - * is inside downstreamEdges. - * source(datajob) -[produces]-> dest means downstreamEdge for source(datajob) - * source(dataset) -[r_produces]-> dest, do not need to reverse source and dest + * Reverse incoming/outgoing direction check by compare sourceType and relationshipType to + * LineageSpec. for example: sourceType: dataset, relationshipType: downstreamOf. downstreamOf + * relationship type and outgoing relationship direction for dataset from LineageSpec, is inside + * upstreamEdges. source(dataset) -[downstreamOf]-> dest means upstreamEdge for source(dataset) + * dest -[r_downstreamOf]-> source(dataset), need reverse source and dest * sourceType: datajob, + * relationshipType: produces. produces relationship type and outgoing relationship direction for + * datajob from LineageSpec, is inside downstreamEdges. source(datajob) -[produces]-> dest means + * downstreamEdge for source(datajob) source(dataset) -[r_produces]-> dest, do not need to reverse + * source and dest * * @param sourceType Entity type * @param relationshipType Entity relationship type - * */ - private boolean isSourceDestReversed(@Nonnull String sourceType, @Nonnull String relationshipType) { + private boolean isSourceDestReversed( + @Nonnull String sourceType, @Nonnull String relationshipType) { // Get real direction by check INCOMING/OUTGOING direction and RelationshipType LineageRegistry.LineageSpec sourceLineageSpec = getLineageRegistry().getLineageSpec(sourceType); if (sourceLineageSpec != null) { - List<LineageRegistry.EdgeInfo> upstreamCheck = sourceLineageSpec.getUpstreamEdges() - .stream() - .filter(t -> t.getDirection() == RelationshipDirection.OUTGOING && t.getType().equals(relationshipType)) - .collect(Collectors.toList()); + List<LineageRegistry.EdgeInfo> upstreamCheck = + sourceLineageSpec.getUpstreamEdges().stream() + .filter( + t -> + t.getDirection() == RelationshipDirection.OUTGOING + && t.getType().equals(relationshipType)) + .collect(Collectors.toList()); if (!upstreamCheck.isEmpty() || sourceType.equals("schemaField")) { return true; } @@ -754,8 +875,7 @@ private boolean isSourceDestReversed(@Nonnull String sourceType, @Nonnull String return false; } - protected static @Nullable - Urn createFromString(@Nonnull String rawUrn) { + protected static @Nullable Urn createFromString(@Nonnull String rawUrn) { try { return Urn.createFromString(rawUrn); } catch (URISyntaxException e) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index 6985ceb00afd2..35d75de482007 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -40,24 +40,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class MostPopularSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set<String> SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set<String> SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -82,11 +81,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to determine whether DataHub usage index exists"); } @@ -95,15 +99,15 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getMostPopular").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) .map(Optional::get) @@ -122,13 +126,15 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for all entity view events query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); source.query(query); // Find the entities with the most views - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT * 2); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT * 2); source.aggregation(aggregation); source.size(0); @@ -139,12 +145,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional<RecommendationContent> buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java index dc30d4c80abc0..0815ffadd05c1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java @@ -41,24 +41,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlyEditedSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set<String> SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set<String> SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -83,11 +82,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -96,18 +100,19 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyEdited").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) - .map(Optional::get).limit(MAX_CONTENT) + .map(Optional::get) + .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { log.error("Search query to get most recently edited entities failed", e); @@ -122,16 +127,19 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity action events query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_ACTION_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_ACTION_EVENT.getType())); source.query(query); // Find the entity with the largest last viewed timestamp String lastViewed = "last_viewed"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT) - .order(BucketOrder.aggregation(lastViewed, false)) - .subAggregation(AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT) + .order(BucketOrder.aggregation(lastViewed, false)) + .subAggregation( + AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -142,13 +150,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional<RecommendationContent> buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java index 0836c569ed5d1..47ffebee2e947 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java @@ -41,24 +41,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlyViewedSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set<String> SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set<String> SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -83,11 +82,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -96,18 +100,19 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyViewed").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) - .map(Optional::get).limit(MAX_CONTENT) + .map(Optional::get) + .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { log.error("Search query to get most recently viewed entities failed", e); @@ -122,18 +127,23 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity view events of the user requesting recommendation query.must( - QueryBuilders.termQuery(ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), userUrn.toString())); + QueryBuilders.termQuery( + ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), + userUrn.toString())); query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); source.query(query); // Find the entity with the largest last viewed timestamp String lastViewed = "last_viewed"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT) - .order(BucketOrder.aggregation(lastViewed, false)) - .subAggregation(AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT) + .order(BucketOrder.aggregation(lastViewed, false)) + .subAggregation( + AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -144,12 +154,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional<RecommendationContent> buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java index 75375df77ed6f..b862de320db36 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java @@ -6,7 +6,6 @@ import java.time.temporal.TemporalUnit; import lombok.Data; - @Data public class EntityLineageResultCacheKey { private final Urn sourceUrn; @@ -15,17 +14,27 @@ public class EntityLineageResultCacheKey { private final Long endTimeMillis; private final Integer maxHops; - public EntityLineageResultCacheKey(Urn sourceUrn, LineageDirection direction, Long startTimeMillis, - Long endTimeMillis, Integer maxHops, TemporalUnit resolution) { + public EntityLineageResultCacheKey( + Urn sourceUrn, + LineageDirection direction, + Long startTimeMillis, + Long endTimeMillis, + Integer maxHops, + TemporalUnit resolution) { this.sourceUrn = sourceUrn; this.direction = direction; this.maxHops = maxHops; long endOffset = resolution.getDuration().getSeconds() * 1000; this.startTimeMillis = - startTimeMillis == null ? null : Instant.ofEpochMilli(startTimeMillis).truncatedTo(resolution).toEpochMilli(); - this.endTimeMillis = endTimeMillis == null ? null - : Instant.ofEpochMilli(endTimeMillis + endOffset).truncatedTo(resolution).toEpochMilli(); - + startTimeMillis == null + ? null + : Instant.ofEpochMilli(startTimeMillis).truncatedTo(resolution).toEpochMilli(); + this.endTimeMillis = + endTimeMillis == null + ? null + : Instant.ofEpochMilli(endTimeMillis + endOffset) + .truncatedTo(resolution) + .toEpochMilli(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index 9b8e9bce7e670..f6358e4aeb207 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.search; -import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -10,6 +12,7 @@ import com.linkedin.data.template.LongMap; import com.linkedin.data.template.StringArray; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; import com.linkedin.metadata.graph.EntityLineageResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.LineageDirection; @@ -27,7 +30,6 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.search.utils.SearchUtils; import io.opentelemetry.extension.annotations.WithSpan; - import java.net.URISyntaxException; import java.time.temporal.ChronoUnit; import java.util.Collections; @@ -44,21 +46,17 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections.CollectionUtils; import org.springframework.cache.Cache; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - @RequiredArgsConstructor @Slf4j public class LineageSearchService { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) @@ -66,8 +64,7 @@ public class LineageSearchService { .setSkipHighlighting(true); private final SearchService _searchService; private final GraphService _graphService; - @Nullable - private final Cache cache; + @Nullable private final Cache cache; private final boolean cacheEnabled; private final SearchLineageCacheConfiguration cacheConfiguration; @@ -75,20 +72,27 @@ public class LineageSearchService { private static final String DEGREE_FILTER = "degree"; private static final String DEGREE_FILTER_INPUT = "degree.keyword"; - private static final AggregationMetadata DEGREE_FILTER_GROUP = new AggregationMetadata().setName(DEGREE_FILTER) - .setDisplayName("Degree of Dependencies") - .setAggregations(new LongMap()) - .setFilterValues(new FilterValueArray(ImmutableList.of(new FilterValue().setValue("1").setFacetCount(0), - new FilterValue().setValue("2").setFacetCount(0), new FilterValue().setValue("3+").setFacetCount(0)))); + private static final AggregationMetadata DEGREE_FILTER_GROUP = + new AggregationMetadata() + .setName(DEGREE_FILTER) + .setDisplayName("Degree of Dependencies") + .setAggregations(new LongMap()) + .setFilterValues( + new FilterValueArray( + ImmutableList.of( + new FilterValue().setValue("1").setFacetCount(0), + new FilterValue().setValue("2").setFacetCount(0), + new FilterValue().setValue("3+").setFacetCount(0)))); private static final int MAX_RELATIONSHIPS = 1000000; private static final int MAX_TERMS = 50000; - private static final Set<String> PLATFORM_ENTITY_TYPES = ImmutableSet.of( - DATASET_ENTITY_NAME, - CHART_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME); + private static final Set<String> PLATFORM_ENTITY_TYPES = + ImmutableSet.of( + DATASET_ENTITY_NAME, + CHART_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME); /** * Gets a list of documents that match given search request that is related to the input entity @@ -98,20 +102,32 @@ public class LineageSearchService { * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 - * @param inputFilters the request map with fields and values as filters to be applied to search hits + * @param inputFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @return a {@link LineageSearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link LineageSearchResult} that contains a list of matched documents and related + * search result metadata */ @Nonnull @WithSpan - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags) { - - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags) { + + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); long startTime = System.nanoTime(); log.debug("Cache enabled {}, Input :{}:", cacheEnabled, input); @@ -123,8 +139,9 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull } // Cache multihop result for faster performance - final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey(sourceUrn, direction, startTimeMillis, - endTimeMillis, maxHops, ChronoUnit.DAYS); + final EntityLineageResultCacheKey cacheKey = + new EntityLineageResultCacheKey( + sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); CachedEntityLineageResult cachedLineageResult = null; if (cacheEnabled) { @@ -139,12 +156,12 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull FreshnessStats freshnessStats = new FreshnessStats().setCached(Boolean.FALSE); if (cachedLineageResult == null || finalFlags.isSkipCache()) { lineageResult = - _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, - endTimeMillis); + _graphService.getLineage( + sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); if (cacheEnabled) { try { - cache.put(cacheKey, - new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); + cache.put( + cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); } catch (Exception e) { log.warn("Failed to add cacheKey {}", cacheKey, e); } @@ -156,22 +173,36 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull systemFreshness.put("LineageGraphCache", cachedLineageResult.getTimestamp()); freshnessStats.setSystemFreshness(systemFreshness); // set up cache refill if needed - if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis()) { + if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { log.info("Cached lineage entry for: {} is older than one day. Will refill.", sourceUrn); Integer finalMaxHops = maxHops; - this.cacheRefillExecutor.submit(() -> { - log.debug("Cache refill started."); - CachedEntityLineageResult reFetchLineageResult = cache.get(cacheKey, CachedEntityLineageResult.class); - if (reFetchLineageResult == null || System.currentTimeMillis() - reFetchLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis() - ) { - // we have to refetch - EntityLineageResult result = _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, finalMaxHops, startTimeMillis, endTimeMillis); - cache.put(cacheKey, result); - log.debug("Refilled Cached lineage entry for: {}.", sourceUrn); - } else { - log.debug("Cache refill not needed. {}", System.currentTimeMillis() - reFetchLineageResult.getTimestamp()); - } - }); + this.cacheRefillExecutor.submit( + () -> { + log.debug("Cache refill started."); + CachedEntityLineageResult reFetchLineageResult = + cache.get(cacheKey, CachedEntityLineageResult.class); + if (reFetchLineageResult == null + || System.currentTimeMillis() - reFetchLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { + // we have to refetch + EntityLineageResult result = + _graphService.getLineage( + sourceUrn, + direction, + 0, + MAX_RELATIONSHIPS, + finalMaxHops, + startTimeMillis, + endTimeMillis); + cache.put(cacheKey, result); + log.debug("Refilled Cached lineage entry for: {}.", sourceUrn); + } else { + log.debug( + "Cache refill not needed. {}", + System.currentTimeMillis() - reFetchLineageResult.getTimestamp()); + } + }); } } @@ -179,68 +210,92 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); lineageResult.setRelationships(updatedRelationships); - // Filter hopped result based on the set of entities to return and inputFilters before sending to search + // Filter hopped result based on the set of entities to return and inputFilters before sending + // to search List<LineageRelationship> lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); - String lineageGraphInfo = String.format("Lineage Graph = time(ms):%s size:%s", + String lineageGraphInfo = + String.format( + "Lineage Graph = time(ms):%s size:%s", (System.nanoTime() - startTime) / (1000.0 * 1000.0), lineageRelationships.size()); startTime = System.nanoTime(); long numEntities = 0; String codePath = null; try { Filter reducedFilters = - SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + SearchUtils.removeCriteria( + inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); if (canDoLightning(lineageRelationships, input, reducedFilters, sortCriterion)) { codePath = "lightning"; // use lightning approach to return lineage search results - LineageSearchResult lineageSearchResult = getLightningSearchResult(lineageRelationships, - reducedFilters, from, size, new HashSet<>(entities)); + LineageSearchResult lineageSearchResult = + getLightningSearchResult( + lineageRelationships, reducedFilters, from, size, new HashSet<>(entities)); if (!lineageSearchResult.getEntities().isEmpty()) { - log.debug("Lightning Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + log.debug( + "Lightning Lineage entity result: {}", + lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; } else { codePath = "tortoise"; - LineageSearchResult lineageSearchResult = getSearchResultInBatches(lineageRelationships, input, - reducedFilters, sortCriterion, from, size, finalFlags); + LineageSearchResult lineageSearchResult = + getSearchResultInBatches( + lineageRelationships, input, reducedFilters, sortCriterion, from, size, finalFlags); if (!lineageSearchResult.getEntities().isEmpty()) { - log.debug("Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + log.debug( + "Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; } } finally { - log.info("{}; Lineage Search({}) = time(ms):{} size:{}", lineageGraphInfo, codePath, - (System.nanoTime() - startTime) / (1000.0 * 1000.0), numEntities); + log.info( + "{}; Lineage Search({}) = time(ms):{} size:{}", + lineageGraphInfo, + codePath, + (System.nanoTime() - startTime) / (1000.0 * 1000.0), + numEntities); } } - @VisibleForTesting - boolean canDoLightning(List<LineageRelationship> lineageRelationships, String input, Filter inputFilters, + boolean canDoLightning( + List<LineageRelationship> lineageRelationships, + String input, + Filter inputFilters, SortCriterion sortCriterion) { - boolean simpleFilters = inputFilters == null || inputFilters.getOr() == null - || inputFilters.getOr() - .stream() - .allMatch(criterion -> criterion.getAnd() - .stream() - .allMatch(criterion1 -> "platform".equals(criterion1.getField()) || "origin".equals(criterion1.getField()) - ) - ); + boolean simpleFilters = + inputFilters == null + || inputFilters.getOr() == null + || inputFilters.getOr().stream() + .allMatch( + criterion -> + criterion.getAnd().stream() + .allMatch( + criterion1 -> + "platform".equals(criterion1.getField()) + || "origin".equals(criterion1.getField()))); return (lineageRelationships.size() > cacheConfiguration.getLightningThreshold()) - && input.equals("*") && simpleFilters && sortCriterion == null; + && input.equals("*") + && simpleFilters + && sortCriterion == null; } @VisibleForTesting - LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRelationships, - Filter inputFilters, int from, int size, Set<String> entityNames) { + LineageSearchResult getLightningSearchResult( + List<LineageRelationship> lineageRelationships, + Filter inputFilters, + int from, + int size, + Set<String> entityNames) { // Contruct result objects - LineageSearchResult finalResult = new LineageSearchResult() - .setMetadata(new SearchResultMetadata()); + LineageSearchResult finalResult = + new LineageSearchResult().setMetadata(new SearchResultMetadata()); LineageSearchEntityArray lineageSearchEntityArray = new LineageSearchEntityArray(); AggregationMetadata entityTypeAgg = constructAggMetadata("Type", "entity"); AggregationMetadata platformTypeAgg = constructAggMetadata("Platform", "platform"); @@ -258,7 +313,7 @@ LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRe // environment int start = 0; int numElements = 0; - for (LineageRelationship relnship: lineageRelationships) { + for (LineageRelationship relnship : lineageRelationships) { Urn entityUrn = relnship.getEntity(); String entityType = entityUrn.getEntityType(); // Apply platform, entity types, and environment filters @@ -274,16 +329,27 @@ LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRe Set<String> platformCriteriaValues = null; Set<String> originCriteriaValues = null; if (inputFilters != null && inputFilters.getOr() != null) { - platformCriteriaValues = inputFilters.getOr().stream().map(ConjunctiveCriterion::getAnd).flatMap( - CriterionArray::stream).filter(criterion -> "platform".equals(criterion.getField())).map(Criterion::getValue) - .collect(Collectors.toSet()); - originCriteriaValues = inputFilters.getOr().stream().map(ConjunctiveCriterion::getAnd).flatMap( - CriterionArray::stream).filter(criterion -> "origin".equals(criterion.getField())).map(Criterion::getValue) - .collect(Collectors.toSet()); + platformCriteriaValues = + inputFilters.getOr().stream() + .map(ConjunctiveCriterion::getAnd) + .flatMap(CriterionArray::stream) + .filter(criterion -> "platform".equals(criterion.getField())) + .map(Criterion::getValue) + .collect(Collectors.toSet()); + originCriteriaValues = + inputFilters.getOr().stream() + .map(ConjunctiveCriterion::getAnd) + .flatMap(CriterionArray::stream) + .filter(criterion -> "origin".equals(criterion.getField())) + .map(Criterion::getValue) + .collect(Collectors.toSet()); } - boolean isNotFiltered = (entityNames.isEmpty() || entityNames.contains(entityUrn.getEntityType())) - && (CollectionUtils.isEmpty(platformCriteriaValues) || (platform != null && platformCriteriaValues.contains(platform))) - && (CollectionUtils.isEmpty(originCriteriaValues) || (environment != null && originCriteriaValues.contains(environment))); + boolean isNotFiltered = + (entityNames.isEmpty() || entityNames.contains(entityUrn.getEntityType())) + && (CollectionUtils.isEmpty(platformCriteriaValues) + || (platform != null && platformCriteriaValues.contains(platform))) + && (CollectionUtils.isEmpty(originCriteriaValues) + || (environment != null && originCriteriaValues.contains(environment))); if (isNotFiltered) { start++; @@ -297,53 +363,59 @@ LineageSearchResult getLightningSearchResult(List<LineageRelationship> lineageRe } // entityType - entityTypeAggregations.compute(entityType, - (key, value) -> value == null ? 1L : ++value); + entityTypeAggregations.compute(entityType, (key, value) -> value == null ? 1L : ++value); // platform if (platform != null) { - platformTypeAggregations.compute(platform, - (key, value) -> value == null ? 1L : ++value); + platformTypeAggregations.compute(platform, (key, value) -> value == null ? 1L : ++value); } // environment if (environment != null) { - environmentAggregations.compute(environment, - (key, value) -> value == null ? 1L : ++value); + environmentAggregations.compute( + environment, (key, value) -> value == null ? 1L : ++value); } } } aggregationMetadataArray.add(DEGREE_FILTER_GROUP); if (platformTypeAggregations.keySet().size() > 0) { - for (Map.Entry<String, Long> platformCount: platformTypeAggregations.entrySet()) { + for (Map.Entry<String, Long> platformCount : platformTypeAggregations.entrySet()) { try { - platformTypeAgg.getFilterValues().add(new FilterValue() - .setValue(platformCount.getKey()) - .setFacetCount(platformCount.getValue()) - .setEntity(Urn.createFromString(platformCount.getKey())) - ); + platformTypeAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(platformCount.getKey()) + .setFacetCount(platformCount.getValue()) + .setEntity(Urn.createFromString(platformCount.getKey()))); platformTypeAgg.getAggregations().put(platformCount.getKey(), platformCount.getValue()); } catch (URISyntaxException e) { log.warn("Unexpected exception: {}", e.getMessage()); } } - aggregationMetadataArray.add(platformTypeAgg); + aggregationMetadataArray.add(platformTypeAgg); } if (entityTypeAggregations.keySet().size() > 0) { - for (Map.Entry<String, Long> entityCount: entityTypeAggregations.entrySet()) { - entityTypeAgg.getFilterValues().add(new FilterValue() - .setValue(entityCount.getKey()) - .setFacetCount(entityCount.getValue())); + for (Map.Entry<String, Long> entityCount : entityTypeAggregations.entrySet()) { + entityTypeAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(entityCount.getKey()) + .setFacetCount(entityCount.getValue())); entityTypeAgg.getAggregations().put(entityCount.getKey(), entityCount.getValue()); } aggregationMetadataArray.add(entityTypeAgg); } if (environmentAggregations.keySet().size() > 0) { - for (Map.Entry<String, Long> entityCount: environmentAggregations.entrySet()) { - environmentAgg.getFilterValues().add(new FilterValue() - .setValue(entityCount.getKey()) - .setFacetCount(entityCount.getValue())); + for (Map.Entry<String, Long> entityCount : environmentAggregations.entrySet()) { + environmentAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(entityCount.getKey()) + .setFacetCount(entityCount.getValue())); environmentAgg.getAggregations().put(entityCount.getKey(), entityCount.getValue()); } aggregationMetadataArray.add(environmentAgg); @@ -374,18 +446,24 @@ private String getPlatform(String entityType, Urn entityUrn) { return platform; } - // Necessary so we don't filter out schemaField entities and so that we search to get the parent reference entity - private LineageRelationshipArray convertSchemaFieldRelationships(EntityLineageResult lineageResult) { - return lineageResult.getRelationships().stream().map(relationship -> { - if (relationship.getEntity().getEntityType().equals("schemaField")) { - Urn entity = getSchemaFieldReferenceUrn(relationship.getEntity()); - relationship.setEntity(entity); - } - return relationship; - }).collect(Collectors.toCollection(LineageRelationshipArray::new)); + // Necessary so we don't filter out schemaField entities and so that we search to get the parent + // reference entity + private LineageRelationshipArray convertSchemaFieldRelationships( + EntityLineageResult lineageResult) { + return lineageResult.getRelationships().stream() + .map( + relationship -> { + if (relationship.getEntity().getEntityType().equals("schemaField")) { + Urn entity = getSchemaFieldReferenceUrn(relationship.getEntity()); + relationship.setEntity(entity); + } + return relationship; + }) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); } - private Map<Urn, LineageRelationship> generateUrnToRelationshipMap(List<LineageRelationship> lineageRelationships) { + private Map<Urn, LineageRelationship> generateUrnToRelationshipMap( + List<LineageRelationship> lineageRelationships) { Map<Urn, LineageRelationship> urnToRelationship = new HashMap<>(); for (LineageRelationship relationship : lineageRelationships) { LineageRelationship existingRelationship = urnToRelationship.get(relationship.getEntity()); @@ -401,32 +479,49 @@ private Map<Urn, LineageRelationship> generateUrnToRelationshipMap(List<LineageR } // Search service can only take up to 50K term filter, so query search service in batches - private LineageSearchResult getSearchResultInBatches(List<LineageRelationship> lineageRelationships, - @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, int from, int size, + private LineageSearchResult getSearchResultInBatches( + List<LineageRelationship> lineageRelationships, + @Nonnull String input, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); LineageSearchResult finalResult = - new LineageSearchResult().setEntities(new LineageSearchEntityArray(Collections.emptyList())) + new LineageSearchResult() + .setEntities(new LineageSearchEntityArray(Collections.emptyList())) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) .setFrom(from) .setPageSize(size) .setNumEntities(0); - List<List<LineageRelationship>> batchedRelationships = Lists.partition(lineageRelationships, MAX_TERMS); + List<List<LineageRelationship>> batchedRelationships = + Lists.partition(lineageRelationships, MAX_TERMS); int queryFrom = from; int querySize = size; for (List<LineageRelationship> batch : batchedRelationships) { - List<String> entitiesToQuery = batch.stream() - .map(relationship -> relationship.getEntity().getEntityType()) - .distinct() - .collect(Collectors.toList()); + List<String> entitiesToQuery = + batch.stream() + .map(relationship -> relationship.getEntity().getEntityType()) + .distinct() + .collect(Collectors.toList()); Map<Urn, LineageRelationship> urnToRelationship = generateUrnToRelationshipMap(batch); Filter finalFilter = buildFilter(urnToRelationship.keySet(), inputFilters); - LineageSearchResult resultForBatch = buildLineageSearchResult( - _searchService.searchAcrossEntities(entitiesToQuery, input, finalFilter, sortCriterion, queryFrom, querySize, - finalFlags), urnToRelationship); + LineageSearchResult resultForBatch = + buildLineageSearchResult( + _searchService.searchAcrossEntities( + entitiesToQuery, + input, + finalFilter, + sortCriterion, + queryFrom, + querySize, + finalFlags), + urnToRelationship); queryFrom = Math.max(0, from - resultForBatch.getNumEntities()); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = merge(finalResult, resultForBatch); @@ -442,34 +537,44 @@ public static LineageSearchResult merge(LineageSearchResult one, LineageSearchRe finalResult.getEntities().addAll(two.getEntities()); finalResult.setNumEntities(one.getNumEntities() + two.getNumEntities()); - Map<String, AggregationMetadata> aggregations = one.getMetadata() + Map<String, AggregationMetadata> aggregations = + one.getMetadata().getAggregations().stream() + .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); + two.getMetadata() .getAggregations() - .stream() - .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); - two.getMetadata().getAggregations().forEach(metadata -> { - if (aggregations.containsKey(metadata.getName())) { - aggregations.put(metadata.getName(), SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); - } else { - aggregations.put(metadata.getName(), metadata); - } - }); - finalResult.getMetadata().setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); + .forEach( + metadata -> { + if (aggregations.containsKey(metadata.getName())) { + aggregations.put( + metadata.getName(), + SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); + } else { + aggregations.put(metadata.getName(), metadata); + } + }); + finalResult + .getMetadata() + .setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); return finalResult; } private Predicate<Integer> convertFilterToPredicate(List<String> degreeFilterValues) { - return degreeFilterValues.stream().map(value -> { - switch (value) { - case "1": - return (Predicate<Integer>) (Integer numHops) -> (numHops == 1); - case "2": - return (Predicate<Integer>) (Integer numHops) -> (numHops == 2); - case "3+": - return (Predicate<Integer>) (Integer numHops) -> (numHops > 2); - default: - throw new IllegalArgumentException(String.format("%s is not a valid filter value for degree filters", value)); - } - }).reduce(x -> false, Predicate::or); + return degreeFilterValues.stream() + .map( + value -> { + switch (value) { + case "1": + return (Predicate<Integer>) (Integer numHops) -> (numHops == 1); + case "2": + return (Predicate<Integer>) (Integer numHops) -> (numHops == 2); + case "3+": + return (Predicate<Integer>) (Integer numHops) -> (numHops > 2); + default: + throw new IllegalArgumentException( + String.format("%s is not a valid filter value for degree filters", value)); + } + }) + .reduce(x -> false, Predicate::or); } private Urn getSchemaFieldReferenceUrn(Urn urn) { @@ -484,24 +589,29 @@ private Urn getSchemaFieldReferenceUrn(Urn urn) { return urn; } - private List<LineageRelationship> filterRelationships(@Nonnull EntityLineageResult lineageResult, - @Nonnull Set<String> entities, @Nullable Filter inputFilters) { - Stream<LineageRelationship> relationshipsFilteredByEntities = lineageResult.getRelationships().stream(); + private List<LineageRelationship> filterRelationships( + @Nonnull EntityLineageResult lineageResult, + @Nonnull Set<String> entities, + @Nullable Filter inputFilters) { + Stream<LineageRelationship> relationshipsFilteredByEntities = + lineageResult.getRelationships().stream(); if (!entities.isEmpty()) { - relationshipsFilteredByEntities = relationshipsFilteredByEntities.filter( - relationship -> entities.contains(relationship.getEntity().getEntityType())); + relationshipsFilteredByEntities = + relationshipsFilteredByEntities.filter( + relationship -> entities.contains(relationship.getEntity().getEntityType())); } if (inputFilters != null && !CollectionUtils.isEmpty(inputFilters.getOr())) { ConjunctiveCriterion conjunctiveCriterion = inputFilters.getOr().get(0); if (conjunctiveCriterion.hasAnd()) { - List<String> degreeFilter = conjunctiveCriterion.getAnd() - .stream() - .filter(criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)) - .flatMap(c -> c.getValues().stream()) - .collect(Collectors.toList()); + List<String> degreeFilter = + conjunctiveCriterion.getAnd().stream() + .filter(criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)) + .flatMap(c -> c.getValues().stream()) + .collect(Collectors.toList()); if (!degreeFilter.isEmpty()) { Predicate<Integer> degreePredicate = convertFilterToPredicate(degreeFilter); - return relationshipsFilteredByEntities.filter(relationship -> degreePredicate.test(relationship.getDegree())) + return relationshipsFilteredByEntities + .filter(relationship -> degreePredicate.test(relationship.getDegree())) .collect(Collectors.toList()); } } @@ -510,9 +620,12 @@ private List<LineageRelationship> filterRelationships(@Nonnull EntityLineageResu } private Filter buildFilter(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); if (inputFilters == null) { return QueryUtils.newFilter(urnMatchCriterion); } @@ -527,21 +640,27 @@ private Filter buildFilter(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters return QueryUtils.newFilter(urnMatchCriterion); } - private LineageSearchResult buildLineageSearchResult(@Nonnull SearchResult searchResult, - Map<Urn, LineageRelationship> urnToRelationship) { - AggregationMetadataArray aggregations = new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); - return new LineageSearchResult().setEntities(new LineageSearchEntityArray(searchResult.getEntities() - .stream() - .map(searchEntity -> buildLineageSearchEntity(searchEntity, urnToRelationship.get(searchEntity.getEntity()))) - .collect(Collectors.toList()))) + private LineageSearchResult buildLineageSearchResult( + @Nonnull SearchResult searchResult, Map<Urn, LineageRelationship> urnToRelationship) { + AggregationMetadataArray aggregations = + new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); + return new LineageSearchResult() + .setEntities( + new LineageSearchEntityArray( + searchResult.getEntities().stream() + .map( + searchEntity -> + buildLineageSearchEntity( + searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + .collect(Collectors.toList()))) .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) .setFrom(searchResult.getFrom()) .setPageSize(searchResult.getPageSize()) .setNumEntities(searchResult.getNumEntities()); } - private LineageSearchEntity buildLineageSearchEntity(@Nonnull SearchEntity searchEntity, - @Nullable LineageRelationship lineageRelationship) { + private LineageSearchEntity buildLineageSearchEntity( + @Nonnull SearchEntity searchEntity, @Nullable LineageRelationship lineageRelationship) { LineageSearchEntity entity = new LineageSearchEntity(searchEntity.data()); if (lineageRelationship != null) { entity.setPaths(lineageRelationship.getPaths()); @@ -558,34 +677,50 @@ private LineageSearchEntity buildLineageSearchEntity(@Nonnull SearchEntity searc * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 - * @param inputFilters the request map with fields and values as filters to be applied to search hits + * @param inputFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return - * @return a {@link LineageSearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link LineageSearchResult} that contains a list of matched documents and related + * search result metadata */ @Nonnull @WithSpan - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nonnull SearchFlags searchFlags) { + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int size, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nonnull SearchFlags searchFlags) { // Cache multihop result for faster performance - final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey(sourceUrn, direction, startTimeMillis, - endTimeMillis, maxHops, ChronoUnit.DAYS); - CachedEntityLineageResult cachedLineageResult = cacheEnabled - ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; + final EntityLineageResultCacheKey cacheKey = + new EntityLineageResultCacheKey( + sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); + CachedEntityLineageResult cachedLineageResult = + cacheEnabled ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; EntityLineageResult lineageResult; if (cachedLineageResult == null) { maxHops = maxHops != null ? maxHops : 1000; - lineageResult = _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, - startTimeMillis, endTimeMillis); + lineageResult = + _graphService.getLineage( + sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); if (cacheEnabled) { - cache.put(cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); + cache.put( + cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); } } else { lineageResult = cachedLineageResult.getEntityLineageResult(); - if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis()) { + if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { log.warn("Cached lineage entry for: {} is older than one day.", sourceUrn); } } @@ -594,39 +729,67 @@ public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); lineageResult.setRelationships(updatedRelationships); - // Filter hopped result based on the set of entities to return and inputFilters before sending to search + // Filter hopped result based on the set of entities to return and inputFilters before sending + // to search List<LineageRelationship> lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); Filter reducedFilters = - SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); - return getScrollResultInBatches(lineageRelationships, input != null ? input : "*", reducedFilters, sortCriterion, - scrollId, keepAlive, size, searchFlags); + SearchUtils.removeCriteria( + inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + return getScrollResultInBatches( + lineageRelationships, + input != null ? input : "*", + reducedFilters, + sortCriterion, + scrollId, + keepAlive, + size, + searchFlags); } // Search service can only take up to 50K term filter, so query search service in batches - private LineageScrollResult getScrollResultInBatches(List<LineageRelationship> lineageRelationships, - @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, - @Nonnull String keepAlive, int size, @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + private LineageScrollResult getScrollResultInBatches( + List<LineageRelationship> lineageRelationships, + @Nonnull String input, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int size, + @Nonnull SearchFlags searchFlags) { + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); LineageScrollResult finalResult = - new LineageScrollResult().setEntities(new LineageSearchEntityArray(Collections.emptyList())) + new LineageScrollResult() + .setEntities(new LineageSearchEntityArray(Collections.emptyList())) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) .setPageSize(size) .setNumEntities(0); - List<List<LineageRelationship>> batchedRelationships = Lists.partition(lineageRelationships, MAX_TERMS); + List<List<LineageRelationship>> batchedRelationships = + Lists.partition(lineageRelationships, MAX_TERMS); int querySize = size; for (List<LineageRelationship> batch : batchedRelationships) { - List<String> entitiesToQuery = batch.stream() - .map(relationship -> relationship.getEntity().getEntityType()) - .distinct() - .collect(Collectors.toList()); + List<String> entitiesToQuery = + batch.stream() + .map(relationship -> relationship.getEntity().getEntityType()) + .distinct() + .collect(Collectors.toList()); Map<Urn, LineageRelationship> urnToRelationship = generateUrnToRelationshipMap(batch); Filter finalFilter = buildFilter(urnToRelationship.keySet(), inputFilters); - LineageScrollResult resultForBatch = buildLineageScrollResult( - _searchService.scrollAcrossEntities(entitiesToQuery, input, finalFilter, sortCriterion, scrollId, keepAlive, querySize, - finalFlags), urnToRelationship); + LineageScrollResult resultForBatch = + buildLineageScrollResult( + _searchService.scrollAcrossEntities( + entitiesToQuery, + input, + finalFilter, + sortCriterion, + scrollId, + keepAlive, + querySize, + finalFlags), + urnToRelationship); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = mergeScrollResult(finalResult, resultForBatch); } @@ -635,16 +798,23 @@ private LineageScrollResult getScrollResultInBatches(List<LineageRelationship> l return finalResult.setPageSize(size); } - private LineageScrollResult buildLineageScrollResult(@Nonnull ScrollResult scrollResult, - Map<Urn, LineageRelationship> urnToRelationship) { - AggregationMetadataArray aggregations = new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); - LineageScrollResult lineageScrollResult = new LineageScrollResult().setEntities(new LineageSearchEntityArray(scrollResult.getEntities() - .stream() - .map(searchEntity -> buildLineageSearchEntity(searchEntity, urnToRelationship.get(searchEntity.getEntity()))) - .collect(Collectors.toList()))) - .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); + private LineageScrollResult buildLineageScrollResult( + @Nonnull ScrollResult scrollResult, Map<Urn, LineageRelationship> urnToRelationship) { + AggregationMetadataArray aggregations = + new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); + LineageScrollResult lineageScrollResult = + new LineageScrollResult() + .setEntities( + new LineageSearchEntityArray( + scrollResult.getEntities().stream() + .map( + searchEntity -> + buildLineageSearchEntity( + searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + .collect(Collectors.toList()))) + .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); if (scrollResult.getScrollId() != null) { lineageScrollResult.setScrollId(scrollResult.getScrollId()); @@ -653,23 +823,30 @@ private LineageScrollResult buildLineageScrollResult(@Nonnull ScrollResult scrol } @SneakyThrows - public static LineageScrollResult mergeScrollResult(LineageScrollResult one, LineageScrollResult two) { + public static LineageScrollResult mergeScrollResult( + LineageScrollResult one, LineageScrollResult two) { LineageScrollResult finalResult = one.clone(); finalResult.getEntities().addAll(two.getEntities()); finalResult.setNumEntities(one.getNumEntities() + two.getNumEntities()); - Map<String, AggregationMetadata> aggregations = one.getMetadata() + Map<String, AggregationMetadata> aggregations = + one.getMetadata().getAggregations().stream() + .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); + two.getMetadata() .getAggregations() - .stream() - .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); - two.getMetadata().getAggregations().forEach(metadata -> { - if (aggregations.containsKey(metadata.getName())) { - aggregations.put(metadata.getName(), SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); - } else { - aggregations.put(metadata.getName(), metadata); - } - }); - finalResult.getMetadata().setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); + .forEach( + metadata -> { + if (aggregations.containsKey(metadata.getName())) { + aggregations.put( + metadata.getName(), + SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); + } else { + aggregations.put(metadata.getName(), metadata); + } + }); + finalResult + .getMetadata() + .setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); if (two.getScrollId() != null) { finalResult.setScrollId(two.getScrollId()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index c99e4a94feb29..3bcc163613c5e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.codahale.metrics.Timer; import com.linkedin.data.template.LongMap; import com.linkedin.metadata.query.SearchFlags; @@ -21,9 +23,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class SearchService { private final CachingEntitySearchService _cachingEntitySearchService; @@ -41,36 +40,52 @@ public SearchService( public Map<String, Long> docCountPerEntity(@Nonnull List<String> entityNames) { return entityNames.stream() - .collect(Collectors.toMap(Function.identity(), - entityName -> _entityDocCountCache.getEntityDocCount().getOrDefault(entityName.toLowerCase(), 0L))); + .collect( + Collectors.toMap( + Function.identity(), + entityName -> + _entityDocCountCache + .getEntityDocCount() + .getOrDefault(entityName.toLowerCase(), 0L))); } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags) { + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags) { List<String> entitiesToSearch = getEntitiesToSearch(entityNames); if (entitiesToSearch.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } SearchResult result = - _cachingEntitySearchService.search(entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); + _cachingEntitySearchService.search( + entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); try { - return result.copy().setEntities(new SearchEntityArray(_searchRanker.rank(result.getEntities()))); + return result + .copy() + .setEntities(new SearchEntityArray(_searchRanker.rank(result.getEntities()))); } catch (Exception e) { log.error("Failed to rank: {}, exception - {}", result, e.toString()); throw new RuntimeException("Failed to rank " + result.toString()); @@ -78,37 +93,55 @@ public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String in } @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, @Nullable SearchFlags searchFlags) { - return searchAcrossEntities(entities, input, postFilters, sortCriterion, from, size, searchFlags, null); + return searchAcrossEntities( + entities, input, postFilters, sortCriterion, from, size, searchFlags, null); } /** - * Gets a list of documents that match given search request across multiple entities. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request across multiple entities. The results + * are aggregated and filters are applied to the search hits and not the aggregation results. * * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { - log.debug(String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, from, size)); + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { + log.debug( + String.format( + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entities, input, postFilters, sortCriterion, from, size)); // DEPRECATED - // This is the legacy version of `_entityType`-- it operates as a special case and does not support ORs, Unions, etc. - // We will still provide it for backwards compatibility but when sending filters to the backend use the new - // filter name `_entityType` that we provide above. This is just provided to prevent a breaking change for old clients. + // This is the legacy version of `_entityType`-- it operates as a special case and does not + // support ORs, Unions, etc. + // We will still provide it for backwards compatibility but when sending filters to the backend + // use the new + // filter name `_entityType` that we provide above. This is just provided to prevent a breaking + // change for old clients. boolean aggregateByLegacyEntityFacet = facets != null && facets.contains("entity"); if (aggregateByLegacyEntityFacet) { facets = new ArrayList<>(facets); @@ -119,29 +152,49 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } - SearchResult result = _cachingEntitySearchService.search(nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); + SearchResult result = + _cachingEntitySearchService.search( + nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); if (facets == null || facets.contains("entity") || facets.contains("_entityType")) { - Optional<AggregationMetadata> entityTypeAgg = result.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals(INDEX_VIRTUAL_FIELD)).findFirst(); + Optional<AggregationMetadata> entityTypeAgg = + result.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals(INDEX_VIRTUAL_FIELD)) + .findFirst(); if (entityTypeAgg.isPresent()) { LongMap numResultsPerEntity = entityTypeAgg.get().getAggregations(); - result.getMetadata() + result + .getMetadata() .getAggregations() - .add(new AggregationMetadata().setName("entity") - .setDisplayName("Type") - .setAggregations(numResultsPerEntity) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(numResultsPerEntity, Collections.emptySet())))); + .add( + new AggregationMetadata() + .setName("entity") + .setDisplayName("Type") + .setAggregations(numResultsPerEntity) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + numResultsPerEntity, Collections.emptySet())))); } else { - // Should not happen due to the adding of the _entityType aggregation before, but if it does, best-effort count of entity types + // Should not happen due to the adding of the _entityType aggregation before, but if it + // does, best-effort count of entity types // Will not include entity types that had 0 results - Map<String, Long> numResultsPerEntity = result.getEntities().stream().collect(Collectors.groupingBy( - entity -> entity.getEntity().getEntityType(), Collectors.counting())); - result.getMetadata() + Map<String, Long> numResultsPerEntity = + result.getEntities().stream() + .collect( + Collectors.groupingBy( + entity -> entity.getEntity().getEntityType(), Collectors.counting())); + result + .getMetadata() .getAggregations() - .add(new AggregationMetadata().setName("entity") - .setDisplayName("Type") - .setAggregations(new LongMap(numResultsPerEntity)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(numResultsPerEntity, Collections.emptySet())))); + .add( + new AggregationMetadata() + .setName("entity") + .setDisplayName("Type") + .setAggregations(new LongMap(numResultsPerEntity)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + numResultsPerEntity, Collections.emptySet())))); } } return result; @@ -149,15 +202,18 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul /** * If no entities are provided, fallback to the list of non-empty entities + * * @param inputEntities the requested entities * @return some entities to search */ private List<String> getEntitiesToSearch(@Nonnull List<String> inputEntities) { List<String> nonEmptyEntities; - List<String> lowercaseEntities = inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); + List<String> lowercaseEntities = + inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); if (lowercaseEntities.isEmpty()) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(); } } else { @@ -168,35 +224,53 @@ private List<String> getEntitiesToSearch(@Nonnull List<String> inputEntities) { } /** - * Gets a list of documents that match given search request across multiple entities. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request across multiple entities. The results + * are aggregated and filters are applied to the search hits and not the aggregation results. * * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier for passing to search backend * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); List<String> entitiesToSearch = getEntitiesToSearch(entities); if (entitiesToSearch.isEmpty()) { // No indices with non-zero entries: skip querying and return empty result return getEmptyScrollResult(size); } - return _cachingEntitySearchService.scroll(entitiesToSearch, input, postFilters, sortCriterion, scrollId, keepAlive, size, searchFlags); + return _cachingEntitySearchService.scroll( + entitiesToSearch, + input, + postFilters, + sortCriterion, + scrollId, + keepAlive, + size, + searchFlags); } private static SearchResult getEmptySearchResult(int from, int size) { - return new SearchResult().setEntities(new SearchEntityArray()) + return new SearchResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setFrom(from) .setPageSize(size) @@ -204,7 +278,8 @@ private static SearchResult getEmptySearchResult(int from, int size) { } private static ScrollResult getEmptyScrollResult(int size) { - return new ScrollResult().setEntities(new SearchEntityArray()) + return new ScrollResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setPageSize(size) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java index cc7cd8ce28bae..0ecdb83ed20ee 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.cache; +import static com.datahub.util.RecordUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; @@ -16,23 +18,17 @@ import lombok.Value; import org.springframework.cache.Cache; -import static com.datahub.util.RecordUtils.*; - - -/** - * Wrapper class to allow searching in batches and caching the results. - */ +/** Wrapper class to allow searching in batches and caching the results. */ @RequiredArgsConstructor public class CacheableSearcher<K> { - @Nonnull - private final Cache cache; + @Nonnull private final Cache cache; private final int batchSize; - // Function that executes search and retrieves the search result given the query batch (from, size) + // Function that executes search and retrieves the search result given the query batch (from, + // size) private final Function<QueryPagination, SearchResult> searcher; // Function that generates the cache key given the query batch (from, size) private final Function<QueryPagination, K> cacheKeyGenerator; - @Nullable - private final SearchFlags searchFlags; + @Nullable private final SearchFlags searchFlags; private final boolean enableCache; @Value @@ -42,9 +38,10 @@ public static class QueryPagination implements Serializable { } /** - * Get search results corresponding to the input "from" and "size" - * It goes through batches, starting from the beginning, until we get enough results to return - * This let's us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to) + * Get search results corresponding to the input "from" and "size" It goes through batches, + * starting from the beginning, until we get enough results to return This let's us have batches + * that return a variable number of results (we have no idea which batch the "from" "size" page + * corresponds to) */ public SearchResult getSearchResults(int from, int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getSearchResults").time()) { @@ -67,14 +64,16 @@ public SearchResult getSearchResults(int from, int size) { resultEntities.addAll(batchedResult.getEntities().subList(startInBatch, endInBatch)); foundStart = true; } - // If current batch is smaller than the requested batch size, the next batch will return empty. + // If current batch is smaller than the requested batch size, the next batch will return + // empty. if (currentBatchSize < batchSize) { break; } resultsSoFar += currentBatchSize; batchId++; } while (resultsSoFar < from + size); - return new SearchResult().setEntities(new SearchEntityArray(resultEntities)) + return new SearchResult() + .setEntities(new SearchEntityArray(resultEntities)) .setMetadata(batchedResult.getMetadata()) .setFrom(from) .setPageSize(size) @@ -93,13 +92,16 @@ private SearchResult getBatch(int batchId) { if (enableCache) { K cacheKey = cacheKeyGenerator.apply(batch); if ((searchFlags == null || !searchFlags.isSkipCache())) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "getBatch_cache_access").time(); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "getBatch_cache_access").time(); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(SearchResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time(); result = searcher.apply(batch); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java index 49fd3157437d1..9d4cb0c9ac613 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.search.cache; -import com.linkedin.metadata.graph.EntityLineageResult; -import java.io.Serializable; -import lombok.Data; - import static com.datahub.util.RecordUtils.*; import static com.linkedin.metadata.search.utils.GZIPUtil.*; +import com.linkedin.metadata.graph.EntityLineageResult; +import java.io.Serializable; +import lombok.Data; @Data public class CachedEntityLineageResult implements Serializable { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java index 95f208e185df1..2c99c71acf749 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.search.cache; -import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.google.common.base.Suppliers; +import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.utils.ConcurrencyUtils; @@ -13,24 +13,27 @@ import java.util.function.Supplier; import java.util.stream.Collectors; - public class EntityDocCountCache { private final EntityRegistry _entityRegistry; private final EntitySearchService _entitySearchService; private final Supplier<Map<String, Long>> entityDocCount; - public EntityDocCountCache(EntityRegistry entityRegistry, EntitySearchService entitySearchService, + public EntityDocCountCache( + EntityRegistry entityRegistry, + EntitySearchService entitySearchService, EntityDocCountCacheConfiguration config) { _entityRegistry = entityRegistry; _entitySearchService = entitySearchService; - entityDocCount = Suppliers.memoizeWithExpiration(this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); + entityDocCount = + Suppliers.memoizeWithExpiration( + this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); } private Map<String, Long> fetchEntityDocCount() { - return ConcurrencyUtils - .transformAndCollectAsync(_entityRegistry.getEntitySpecs().keySet(), - Function.identity(), - Collectors.toMap(Function.identity(), _entitySearchService::docCount)); + return ConcurrencyUtils.transformAndCollectAsync( + _entityRegistry.getEntitySpecs().keySet(), + Function.identity(), + Collectors.toMap(Function.identity(), _entitySearchService::docCount)); } @WithSpan @@ -39,8 +42,7 @@ public Map<String, Long> getEntityDocCount() { } public List<String> getNonEmptyEntities() { - return getEntityDocCount().entrySet() - .stream() + return getEntityDocCount().entrySet().stream() .filter(entry -> entry.getValue() > 0) .map(Map.Entry::getKey) .collect(Collectors.toList()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java index db414d70603dc..eaeae0cfc1556 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.client; +import static com.datahub.util.RecordUtils.toJsonString; +import static com.datahub.util.RecordUtils.toRecordTemplate; + import com.codahale.metrics.Timer; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; @@ -21,25 +24,23 @@ import org.springframework.cache.Cache; import org.springframework.cache.CacheManager; -import static com.datahub.util.RecordUtils.toJsonString; -import static com.datahub.util.RecordUtils.toRecordTemplate; - - @RequiredArgsConstructor public class CachingEntitySearchService { private static final String ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME = "entitySearchServiceSearch"; - private static final String ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME = "entitySearchServiceAutoComplete"; + private static final String ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME = + "entitySearchServiceAutoComplete"; private static final String ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME = "entitySearchServiceBrowse"; public static final String ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME = "entitySearchServiceScroll"; private final CacheManager cacheManager; - private final EntitySearchService entitySearchService; // This is a shared component, also used in search aggregation + private final EntitySearchService + entitySearchService; // This is a shared component, also used in search aggregation private final int batchSize; private final boolean enableCache; /** - * Retrieves cached search results. If the query has been cached, this will return quickly. If not, a full - * search request will be made. + * Retrieves cached search results. If the query has been cached, this will return quickly. If + * not, a full search request will be made. * * @param entityName the name of the entity to search * @param query the search query @@ -49,7 +50,6 @@ public class CachingEntitySearchService { * @param size the count * @param flags additional search flags * @param facets list of facets we want aggregations for - * * @return a {@link SearchResult} containing the requested batch of search results */ public SearchResult search( @@ -61,7 +61,8 @@ public SearchResult search( int size, @Nullable SearchFlags flags, @Nullable List<String> facets) { - return getCachedSearchResults(entityNames, query, filters, sortCriterion, from, size, flags, facets); + return getCachedSearchResults( + entityNames, query, filters, sortCriterion, from, size, flags, facets); } /** @@ -72,7 +73,6 @@ public SearchResult search( * @param filters the filters to include * @param limit the max number of results to return * @param flags additional search flags - * * @return a {@link SearchResult} containing the requested batch of search results */ public AutoCompleteResult autoComplete( @@ -93,7 +93,6 @@ public AutoCompleteResult autoComplete( * @param filters the request map with fields and values as filters * @param from index of the first entity located in path * @param size the max number of entities contained in the response - * * @return a {@link SearchResult} containing the requested batch of search results */ public BrowseResult browse( @@ -107,8 +106,8 @@ public BrowseResult browse( } /** - * Retrieves cached scroll results. If the query has been cached, this will return quickly. If not, a full - * scroll request will be made. + * Retrieves cached scroll results. If the query has been cached, this will return quickly. If + * not, a full scroll request will be made. * * @param entities the names of the entities to search * @param query the search query @@ -118,7 +117,6 @@ public BrowseResult browse( * @param keepAlive the string representation of how long to keep point in time alive * @param size the count * @param flags additional search flags - * * @return a {@link ScrollResult} containing the requested batch of scroll results */ public ScrollResult scroll( @@ -130,15 +128,15 @@ public ScrollResult scroll( @Nullable String keepAlive, int size, @Nullable SearchFlags flags) { - return getCachedScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); + return getCachedScrollResults( + entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); } - - /** - * Get search results corresponding to the input "from" and "size" - * It goes through batches, starting from the beginning, until we get enough results to return - * This lets us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to) + * Get search results corresponding to the input "from" and "size" It goes through batches, + * starting from the beginning, until we get enough results to return This lets us have batches + * that return a variable number of results (we have no idea which batch the "from" "size" page + * corresponds to) */ public SearchResult getCachedSearchResults( @Nonnull List<String> entityNames, @@ -150,19 +148,33 @@ public SearchResult getCachedSearchResults( @Nullable SearchFlags flags, @Nullable List<String> facets) { return new CacheableSearcher<>( - cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), - batchSize, - querySize -> getRawSearchResults(entityNames, query, filters, sortCriterion, querySize.getFrom(), - querySize.getSize(), flags, facets), - querySize -> Septet.with(entityNames, query, filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, flags != null ? toJsonString(flags) : null, - facets, querySize), flags, enableCache).getSearchResults(from, size); + cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), + batchSize, + querySize -> + getRawSearchResults( + entityNames, + query, + filters, + sortCriterion, + querySize.getFrom(), + querySize.getSize(), + flags, + facets), + querySize -> + Septet.with( + entityNames, + query, + filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, + facets, + querySize), + flags, + enableCache) + .getSearchResults(from, size); } - - /** - * Returns cached auto-complete results. - */ + /** Returns cached auto-complete results. */ public AutoCompleteResult getCachedAutoCompleteResults( @Nonnull String entityName, @Nonnull String input, @@ -170,19 +182,29 @@ public AutoCompleteResult getCachedAutoCompleteResults( @Nullable Filter filters, int limit, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME); AutoCompleteResult result; if (enableCache(flags)) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); - Object cacheKey = Sextet.with(entityName, input, field, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, limit); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); + Object cacheKey = + Sextet.with( + entityName, + input, + field, + filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, + limit); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(AutoCompleteResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); result = getRawAutoCompleteResults(entityName, input, field, filters, limit); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); @@ -196,9 +218,7 @@ public AutoCompleteResult getCachedAutoCompleteResults( } } - /** - * Returns cached browse results. - */ + /** Returns cached browse results. */ public BrowseResult getCachedBrowseResults( @Nonnull String entityName, @Nonnull String path, @@ -206,19 +226,29 @@ public BrowseResult getCachedBrowseResults( int from, int size, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME); BrowseResult result; if (enableCache(flags)) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "browse_cache_access").time(); - Object cacheKey = Sextet.with(entityName, path, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, from, size); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "browse_cache_access").time(); + Object cacheKey = + Sextet.with( + entityName, + path, + filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, + from, + size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(BrowseResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); result = getRawBrowseResults(entityName, path, filters, from, size); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); @@ -232,9 +262,7 @@ public BrowseResult getCachedBrowseResults( } } - /** - * Returns cached scroll results. - */ + /** Returns cached scroll results. */ public ScrollResult getCachedScrollResults( @Nonnull List<String> entities, @Nonnull String query, @@ -244,37 +272,62 @@ public ScrollResult getCachedScrollResults( @Nullable String keepAlive, int size, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { - boolean isFullText = Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { + boolean isFullText = + Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); ScrollResult result; if (enableCache(flags)) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); - Object cacheKey = Septet.with(entities, query, - filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, - flags != null ? toJsonString(flags) : null, - scrollId, size); + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); + Object cacheKey = + Septet.with( + entities, + query, + filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, + scrollId, + size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(ScrollResult.class, json) : null; cacheAccess.stop(); if (result == null) { Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "scroll_cache_miss").time(); - result = getRawScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, isFullText, flags); + result = + getRawScrollResults( + entities, + query, + filters, + sortCriterion, + scrollId, + keepAlive, + size, + isFullText, + flags); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); MetricUtils.counter(this.getClass(), "scroll_cache_miss_count").inc(); } } else { - result = getRawScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, isFullText, flags); + result = + getRawScrollResults( + entities, + query, + filters, + sortCriterion, + scrollId, + keepAlive, + size, + isFullText, + flags); } return result; } } - /** - * Executes the expensive search query using the {@link EntitySearchService} - */ + /** Executes the expensive search query using the {@link EntitySearchService} */ private SearchResult getRawSearchResults( final List<String> entityNames, final String input, @@ -284,46 +337,31 @@ private SearchResult getRawSearchResults( final int count, @Nullable final SearchFlags searchFlags, @Nullable final List<String> facets) { - return entitySearchService.search(entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); + return entitySearchService.search( + entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); } - /** - * Executes the expensive autocomplete query using the {@link EntitySearchService} - */ + /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private AutoCompleteResult getRawAutoCompleteResults( final String entityName, final String input, final String field, final Filter filters, final int limit) { - return entitySearchService.autoComplete( - entityName, - input, - field, - filters, - limit); + return entitySearchService.autoComplete(entityName, input, field, filters, limit); } - /** - * Executes the expensive autocomplete query using the {@link EntitySearchService} - */ + /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private BrowseResult getRawBrowseResults( final String entityName, final String input, final Filter filters, final int start, final int count) { - return entitySearchService.browse( - entityName, - input, - filters, - start, - count); + return entitySearchService.browse(entityName, input, filters, start, count); } - /** - * Executes the expensive search query using the {@link EntitySearchService} - */ + /** Executes the expensive search query using the {@link EntitySearchService} */ private ScrollResult getRawScrollResults( final List<String> entities, final String input, @@ -336,31 +374,15 @@ private ScrollResult getRawScrollResults( @Nullable final SearchFlags searchFlags) { if (fulltext) { return entitySearchService.fullTextScroll( - entities, - input, - filters, - sortCriterion, - scrollId, - keepAlive, - count, - searchFlags); + entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); } else { - return entitySearchService.structuredScroll(entities, - input, - filters, - sortCriterion, - scrollId, - keepAlive, - count, - searchFlags); + return entitySearchService.structuredScroll( + entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); } } - /** - * Returns true if the cache should be used or skipped when fetching search results - */ + /** Returns true if the cache should be used or skipped when fetching search results */ private boolean enableCache(final SearchFlags searchFlags) { return enableCache && (searchFlags == null || !searchFlags.isSkipCache()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 68a5483fa469c..f40da59a149fa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -17,19 +17,16 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.SearchUtils; - +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.util.List; import java.util.Map; import java.util.Optional; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchResponse; - @Slf4j @RequiredArgsConstructor public class ElasticSearchService implements EntitySearchService, ElasticSearchIndexed { @@ -66,15 +63,19 @@ public long docCount(@Nonnull String entityName) { } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { - log.debug(String.format("Upserting Search document entityName: %s, document: %s, docId: %s", entityName, document, - docId)); + public void upsertDocument( + @Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { + log.debug( + String.format( + "Upserting Search document entityName: %s, document: %s, docId: %s", + entityName, document, docId)); esWriteDAO.upsertDocument(entityName, document, docId); } @Override public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { - log.debug(String.format("Deleting Search document entityName: %s, docId: %s", entityName, docId)); + log.debug( + String.format("Deleting Search document entityName: %s, docId: %s", entityName, docId)); esWriteDAO.deleteDocument(entityName, docId); } @@ -82,12 +83,15 @@ public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable String runId) { final Optional<String> maybeDocId = SearchUtils.getDocId(urn); if (!maybeDocId.isPresent()) { - log.warn(String.format("Failed to append run id, could not generate a doc id for urn %s", urn)); + log.warn( + String.format("Failed to append run id, could not generate a doc id for urn %s", urn)); return; } final String docId = maybeDocId.get(); log.debug(String.format("Appending run id for entityName: %s, docId: %s", entityName, docId)); - esWriteDAO.applyScriptUpdate(entityName, docId, + esWriteDAO.applyScriptUpdate( + entityName, + docId, /* Script used to apply updates to the runId field of the index. This script saves the past N run ids which touched a particular URN in the search index. @@ -99,102 +103,161 @@ public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable + "ctx._source.runId.add('%s'); " + "if (ctx._source.runId.length > %s) { ctx._source.runId.remove(0) } } " + "} else { ctx._source.runId = ['%s'] }", - runId, - runId, - MAX_RUN_IDS_INDEXED, - runId)); + runId, runId, MAX_RUN_IDS_INDEXED, runId)); } @Nonnull @Override - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags) { + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags) { return search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, null); } @Nonnull - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { - log.debug(String.format( - "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entityNames, input, postFilters, sortCriterion, from, size)); - return esSearchDAO.search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { + log.debug( + String.format( + "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entityNames, input, postFilters, sortCriterion, from, size)); + return esSearchDAO.search( + entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); } @Nonnull @Override - public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, int from, int size) { + public SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { log.debug( - String.format("Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", + String.format( + "Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", entityName, filters, sortCriterion, from, size)); return esSearchDAO.filter(entityName, filters, sortCriterion, from, size); } @Nonnull @Override - public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit) { - log.debug(String.format("Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", - entityName, query, field, requestParams, limit)); + public AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit) { + log.debug( + String.format( + "Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", + entityName, query, field, requestParams, limit)); return esSearchDAO.autoComplete(entityName, query, field, requestParams, limit); } @Nonnull @Override - public Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit) { - log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityNames != null ? entityNames.toString() : null, field, - requestParams, limit); + public Map<String, Long> aggregateByValue( + @Nullable List<String> entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit) { + log.debug( + "Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", + entityNames != null ? entityNames.toString() : null, + field, + requestParams, + limit); return esSearchDAO.aggregateByValue(entityNames, field, requestParams, limit); } @Nonnull @Override - public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, + public BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filters, + int from, int size) { log.debug( - String.format("Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", entityName, - path, filters, from, size)); + String.format( + "Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", + entityName, path, filters, from, size)); return esBrowseDAO.browse(entityName, path, filters, from, size); } @Nonnull @Override - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { return esBrowseDAO.browseV2(entityName, path, filter, input, start, count); } @Nonnull @Override public List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { - log.debug(String.format("Getting browse paths for entity entityName: %s, urn: %s", entityName, urn)); + log.debug( + String.format("Getting browse paths for entity entityName: %s, urn: %s", entityName, urn)); return esBrowseDAO.getBrowsePaths(entityName, urn); } @Nonnull @Override - public ScrollResult fullTextScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult fullTextScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); flags.setFulltext(true); - return esSearchDAO.scroll(entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, - flags); + return esSearchDAO.scroll( + entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); } @Nonnull @Override - public ScrollResult structuredScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult structuredScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); flags.setFulltext(false); - return esSearchDAO.scroll(entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); + return esSearchDAO.scroll( + entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); } public Optional<SearchResponse> raw(@Nonnull String indexName, @Nullable String jsonQuery) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java index 43431e93622f7..388dcea784cbb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java @@ -1,11 +1,14 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.google.common.collect.ImmutableMap; - +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.version.GitVersion; +import com.linkedin.util.Pair; +import io.github.resilience4j.retry.Retry; +import io.github.resilience4j.retry.RetryConfig; +import io.github.resilience4j.retry.RetryRegistry; import java.io.IOException; import java.time.Duration; import java.time.Instant; @@ -21,11 +24,6 @@ import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -import com.linkedin.util.Pair; -import io.github.resilience4j.retry.Retry; -import io.github.resilience4j.retry.RetryConfig; -import io.github.resilience4j.retry.RetryRegistry; import javax.annotation.Nullable; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -37,6 +35,7 @@ import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.GetAliasesResponse; @@ -54,55 +53,52 @@ import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.ReindexRequest; -import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.tasks.TaskInfo; - @Slf4j public class ESIndexBuilder { private final RestHighLevelClient _searchClient; - @Getter - private final int numShards; + @Getter private final int numShards; - @Getter - private final int numReplicas; + @Getter private final int numReplicas; - @Getter - private final int numRetries; + @Getter private final int numRetries; - @Getter - private final int refreshIntervalSeconds; + @Getter private final int refreshIntervalSeconds; - @Getter - private final Map<String, Map<String, String>> indexSettingOverrides; + @Getter private final Map<String, Map<String, String>> indexSettingOverrides; - @Getter - private final boolean enableIndexSettingsReindex; + @Getter private final boolean enableIndexSettingsReindex; - @Getter - private final boolean enableIndexMappingsReindex; + @Getter private final boolean enableIndexMappingsReindex; - @Getter - private final ElasticSearchConfiguration elasticSearchConfiguration; + @Getter private final ElasticSearchConfiguration elasticSearchConfiguration; - @Getter - private final GitVersion gitVersion; + @Getter private final GitVersion gitVersion; - final private static RequestOptions REQUEST_OPTIONS = RequestOptions.DEFAULT.toBuilder() - .setRequestConfig(RequestConfig.custom() - .setSocketTimeout(180 * 1000).build()).build(); + private static final RequestOptions REQUEST_OPTIONS = + RequestOptions.DEFAULT.toBuilder() + .setRequestConfig(RequestConfig.custom().setSocketTimeout(180 * 1000).build()) + .build(); private final RetryRegistry retryRegistry; - public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numReplicas, int numRetries, - int refreshIntervalSeconds, Map<String, Map<String, String>> indexSettingOverrides, - boolean enableIndexSettingsReindex, boolean enableIndexMappingsReindex, - ElasticSearchConfiguration elasticSearchConfiguration, GitVersion gitVersion) { + public ESIndexBuilder( + RestHighLevelClient searchClient, + int numShards, + int numReplicas, + int numRetries, + int refreshIntervalSeconds, + Map<String, Map<String, String>> indexSettingOverrides, + boolean enableIndexSettingsReindex, + boolean enableIndexMappingsReindex, + ElasticSearchConfiguration elasticSearchConfiguration, + GitVersion gitVersion) { this._searchClient = searchClient; this.numShards = numShards; this.numReplicas = numReplicas; @@ -114,7 +110,8 @@ public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numRe this.elasticSearchConfiguration = elasticSearchConfiguration; this.gitVersion = gitVersion; - RetryConfig config = RetryConfig.custom() + RetryConfig config = + RetryConfig.custom() .maxAttempts(Math.max(1, numRetries)) .waitDuration(Duration.ofSeconds(10)) .retryOnException(e -> e instanceof OpenSearchException) @@ -125,8 +122,11 @@ public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numRe this.retryRegistry = RetryRegistry.of(config); } - public ReindexConfig buildReindexState(String indexName, Map<String, Object> mappings, Map<String, Object> settings) throws IOException { - ReindexConfig.ReindexConfigBuilder builder = ReindexConfig.builder() + public ReindexConfig buildReindexState( + String indexName, Map<String, Object> mappings, Map<String, Object> settings) + throws IOException { + ReindexConfig.ReindexConfigBuilder builder = + ReindexConfig.builder() .name(indexName) .enableIndexSettingsReindex(enableIndexSettingsReindex) .enableIndexMappingsReindex(enableIndexMappingsReindex) @@ -142,7 +142,8 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map builder.targetSettings(targetSetting); // Check if index exists - boolean exists = _searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); + boolean exists = + _searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); builder.exists(exists); // If index doesn't exist, no reindex @@ -150,7 +151,9 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map return builder.build(); } - Settings currentSettings = _searchClient.indices() + Settings currentSettings = + _searchClient + .indices() .getSettings(new GetSettingsRequest().indices(indexName), RequestOptions.DEFAULT) .getIndexToSettings() .values() @@ -158,7 +161,9 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map .next(); builder.currentSettings(currentSettings); - Map<String, Object> currentMappings = _searchClient.indices() + Map<String, Object> currentMappings = + _searchClient + .indices() .getMapping(new GetMappingsRequest().indices(indexName), RequestOptions.DEFAULT) .mappings() .values() @@ -172,16 +177,19 @@ public ReindexConfig buildReindexState(String indexName, Map<String, Object> map } /** - * Builds index with given name, mappings and settings - * Deprecated: Use the `buildIndex(ReindexConfig indexState) to enforce conventions via ReindexConfig class - * earlier in the process. + * Builds index with given name, mappings and settings Deprecated: Use the + * `buildIndex(ReindexConfig indexState) to enforce conventions via ReindexConfig class earlier in + * the process. + * * @param indexName index name * @param mappings ES mappings * @param settings ES settings * @throws IOException ES error */ @Deprecated - public void buildIndex(String indexName, Map<String, Object> mappings, Map<String, Object> settings) throws IOException { + public void buildIndex( + String indexName, Map<String, Object> mappings, Map<String, Object> settings) + throws IOException { buildIndex(buildReindexState(indexName, mappings, settings)); } @@ -210,15 +218,20 @@ public void buildIndex(ReindexConfig indexState) throws IOException { if (indexState.requiresApplySettings()) { UpdateSettingsRequest request = new UpdateSettingsRequest(indexState.name()); - Map<String, Object> indexSettings = ((Map<String, Object>) indexState.targetSettings().get("index")) + Map<String, Object> indexSettings = + ((Map<String, Object>) indexState.targetSettings().get("index")) .entrySet().stream() - .filter(e -> ReindexConfig.SETTINGS_DYNAMIC.contains(e.getKey())) - .collect(Collectors.toMap(e -> "index." + e.getKey(), Map.Entry::getValue)); + .filter(e -> ReindexConfig.SETTINGS_DYNAMIC.contains(e.getKey())) + .collect(Collectors.toMap(e -> "index." + e.getKey(), Map.Entry::getValue)); request.settings(indexSettings); - boolean ack = _searchClient.indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexState.name(), - ReindexConfig.OBJECT_MAPPER.writeValueAsString(indexSettings), ack); + boolean ack = + _searchClient.indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexState.name(), + ReindexConfig.OBJECT_MAPPER.writeValueAsString(indexSettings), + ack); } } else { try { @@ -231,30 +244,40 @@ public void buildIndex(ReindexConfig indexState) throws IOException { /** * Apply mappings changes if reindex is not required + * * @param indexState the state of the current and target index settings/mappings - * @param suppressError during reindex logic this is not an error, for structured properties it is an error + * @param suppressError during reindex logic this is not an error, for structured properties it is + * an error * @throws IOException communication issues with ES */ public void applyMappings(ReindexConfig indexState, boolean suppressError) throws IOException { if (indexState.isPureMappingsAddition()) { log.info("Updating index {} mappings in place.", indexState.name()); - PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); + PutMappingRequest request = + new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); _searchClient.indices().putMapping(request, RequestOptions.DEFAULT); log.info("Updated index {} with new mappings", indexState.name()); } else { if (!suppressError) { - log.error("Attempted to apply invalid mappings. Current: {} Target: {}", indexState.currentMappings(), - indexState.targetMappings()); + log.error( + "Attempted to apply invalid mappings. Current: {} Target: {}", + indexState.currentMappings(), + indexState.targetMappings()); } } } - public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options, ReindexConfig config) + public String reindexInPlaceAsync( + String indexAlias, + @Nullable QueryBuilder filterQuery, + BatchWriteOperationsOptions options, + ReindexConfig config) throws Exception { - GetAliasesResponse aliasesResponse = _searchClient.indices().getAlias( - new GetAliasesRequest(indexAlias), RequestOptions.DEFAULT); + GetAliasesResponse aliasesResponse = + _searchClient.indices().getAlias(new GetAliasesRequest(indexAlias), RequestOptions.DEFAULT); if (aliasesResponse.getAliases().isEmpty()) { - throw new IllegalArgumentException(String.format("Input to reindexInPlaceAsync should be an alias. %s is not", indexAlias)); + throw new IllegalArgumentException( + String.format("Input to reindexInPlaceAsync should be an alias. %s is not", indexAlias)); } // Point alias at new index @@ -262,9 +285,12 @@ public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filt createIndex(nextIndexName, config); renameReindexedIndices(_searchClient, indexAlias, null, nextIndexName, false); - return submitReindex(aliasesResponse.getAliases().keySet().toArray(new String[0]), - nextIndexName, options.getBatchSize(), - TimeValue.timeValueSeconds(options.getTimeoutSeconds()), filterQuery); + return submitReindex( + aliasesResponse.getAliases().keySet().toArray(new String[0]), + nextIndexName, + options.getBatchSize(), + TimeValue.timeValueSeconds(options.getTimeoutSeconds()), + filterQuery); } private static String getNextIndexName(String base, long startTime) { @@ -286,10 +312,14 @@ private void reindex(ReindexConfig indexState) throws Throwable { String parentTaskId; if (previousTaskInfo.isPresent()) { - log.info("Reindex task {} in progress with description {}. Attempting to continue task from breakpoint.", - previousTaskInfo.get().getTaskId(), previousTaskInfo.get().getDescription()); + log.info( + "Reindex task {} in progress with description {}. Attempting to continue task from breakpoint.", + previousTaskInfo.get().getTaskId(), + previousTaskInfo.get().getDescription()); parentTaskId = previousTaskInfo.get().getParentTaskId().toString(); - tempIndexName = ESUtils.extractTargetIndex(previousTaskInfo.get().getHeaders().get(ESUtils.OPAQUE_ID_HEADER)); + tempIndexName = + ESUtils.extractTargetIndex( + previousTaskInfo.get().getHeaders().get(ESUtils.OPAQUE_ID_HEADER)); } else { // Create new index createIndex(tempIndexName, indexState); @@ -304,7 +334,11 @@ private void reindex(ReindexConfig indexState) throws Throwable { long documentCountsLastUpdated = System.currentTimeMillis(); while (System.currentTimeMillis() < timeoutAt) { - log.info("Task: {} - Reindexing from {} to {} in progress...", parentTaskId, indexState.name(), tempIndexName); + log.info( + "Task: {} - Reindexing from {} to {} in progress...", + parentTaskId, + indexState.name(), + tempIndexName); Pair<Long, Long> tempDocumentsCount = getDocumentCounts(indexState.name(), tempIndexName); if (!tempDocumentsCount.equals(documentCounts)) { @@ -313,18 +347,28 @@ private void reindex(ReindexConfig indexState) throws Throwable { } if (documentCounts.getFirst().equals(documentCounts.getSecond())) { - log.info("Task: {} - Reindexing {} to {} task was successful", parentTaskId, indexState.name(), tempIndexName); + log.info( + "Task: {} - Reindexing {} to {} task was successful", + parentTaskId, + indexState.name(), + tempIndexName); reindexTaskCompleted = true; break; } else { - log.warn("Task: {} - Document counts do not match {} != {}. Complete: {}%", parentTaskId, documentCounts.getFirst(), - documentCounts.getSecond(), 100 * (1.0f * documentCounts.getSecond()) / documentCounts.getFirst()); + log.warn( + "Task: {} - Document counts do not match {} != {}. Complete: {}%", + parentTaskId, + documentCounts.getFirst(), + documentCounts.getSecond(), + 100 * (1.0f * documentCounts.getSecond()) / documentCounts.getFirst()); long lastUpdateDelta = System.currentTimeMillis() - documentCountsLastUpdated; if (lastUpdateDelta > (300 * 1000)) { - if (reindexCount <= numRetries) { - log.warn("No change in index count after 5 minutes, re-triggering reindex #{}.", reindexCount); + if (reindexCount <= numRetries) { + log.warn( + "No change in index count after 5 minutes, re-triggering reindex #{}.", + reindexCount); submitReindex(indexState.name(), tempIndexName); reindexCount = reindexCount + 1; documentCountsLastUpdated = System.currentTimeMillis(); // reset timer @@ -341,37 +385,63 @@ private void reindex(ReindexConfig indexState) throws Throwable { if (!reindexTaskCompleted) { if (elasticSearchConfiguration.getBuildIndices().isAllowDocCountMismatch() - && elasticSearchConfiguration.getBuildIndices().isCloneIndices()) { - log.warn("Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}\n" - + "This condition is explicitly ALLOWED, please refer to latest clone if original index is required.", - indexState.name(), documentCounts.getFirst(), documentCounts.getSecond()); + && elasticSearchConfiguration.getBuildIndices().isCloneIndices()) { + log.warn( + "Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}\n" + + "This condition is explicitly ALLOWED, please refer to latest clone if original index is required.", + indexState.name(), + documentCounts.getFirst(), + documentCounts.getSecond()); } else { - log.error("Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", - indexState.name(), documentCounts.getFirst(), documentCounts.getSecond()); - diff(indexState.name(), tempIndexName, Math.max(documentCounts.getFirst(), documentCounts.getSecond())); - throw new RuntimeException(String.format("Reindex from %s to %s failed. Document count %s != %s", indexState.name(), tempIndexName, - documentCounts.getFirst(), documentCounts.getSecond())); + log.error( + "Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", + indexState.name(), + documentCounts.getFirst(), + documentCounts.getSecond()); + diff( + indexState.name(), + tempIndexName, + Math.max(documentCounts.getFirst(), documentCounts.getSecond())); + throw new RuntimeException( + String.format( + "Reindex from %s to %s failed. Document count %s != %s", + indexState.name(), + tempIndexName, + documentCounts.getFirst(), + documentCounts.getSecond())); } } } catch (Throwable e) { - log.error("Failed to reindex {} to {}: Exception {}", indexState.name(), tempIndexName, e.toString()); - _searchClient.indices().delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT); + log.error( + "Failed to reindex {} to {}: Exception {}", + indexState.name(), + tempIndexName, + e.toString()); + _searchClient + .indices() + .delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT); throw e; } log.info("Reindex from {} to {} succeeded", indexState.name(), tempIndexName); - renameReindexedIndices(_searchClient, indexState.name(), indexState.indexPattern(), tempIndexName, true); + renameReindexedIndices( + _searchClient, indexState.name(), indexState.indexPattern(), tempIndexName, true); log.info("Finished setting up {}", indexState.name()); } - public static void renameReindexedIndices(RestHighLevelClient searchClient, String originalName, @Nullable String pattern, String newName, boolean deleteOld) + public static void renameReindexedIndices( + RestHighLevelClient searchClient, + String originalName, + @Nullable String pattern, + String newName, + boolean deleteOld) throws IOException { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(originalName); if (pattern != null) { getAliasesRequest.indices(pattern); } - GetAliasesResponse aliasesResponse = searchClient.indices().getAlias( - getAliasesRequest, RequestOptions.DEFAULT); + GetAliasesResponse aliasesResponse = + searchClient.indices().getAlias(getAliasesRequest, RequestOptions.DEFAULT); // If not aliased, delete the original index final Collection<String> aliasedIndexDelete; @@ -384,23 +454,31 @@ public static void renameReindexedIndices(RestHighLevelClient searchClient, Stri } // Add alias for the new index - AliasActions removeAction = deleteOld ? AliasActions.removeIndex() : AliasActions.remove().alias(originalName); + AliasActions removeAction = + deleteOld ? AliasActions.removeIndex() : AliasActions.remove().alias(originalName); removeAction.indices(aliasedIndexDelete.toArray(new String[0])); AliasActions addAction = AliasActions.add().alias(originalName).index(newName); - searchClient.indices() - .updateAliases(new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction), + searchClient + .indices() + .updateAliases( + new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction), RequestOptions.DEFAULT); } - private String submitReindex(String[] sourceIndices, String destinationIndex, - int batchSize, @Nullable TimeValue timeout, - @Nullable QueryBuilder sourceFilterQuery) throws IOException { - ReindexRequest reindexRequest = new ReindexRequest() - .setSourceIndices(sourceIndices) - .setDestIndex(destinationIndex) - .setMaxRetries(numRetries) - .setAbortOnVersionConflict(false) - .setSourceBatchSize(batchSize); + private String submitReindex( + String[] sourceIndices, + String destinationIndex, + int batchSize, + @Nullable TimeValue timeout, + @Nullable QueryBuilder sourceFilterQuery) + throws IOException { + ReindexRequest reindexRequest = + new ReindexRequest() + .setSourceIndices(sourceIndices) + .setDestIndex(destinationIndex) + .setMaxRetries(numRetries) + .setAbortOnVersionConflict(false) + .setSourceBatchSize(batchSize); if (timeout != null) { reindexRequest.setTimeout(timeout); } @@ -408,26 +486,34 @@ private String submitReindex(String[] sourceIndices, String destinationIndex, reindexRequest.setSourceQuery(sourceFilterQuery); } - RequestOptions requestOptions = ESUtils.buildReindexTaskRequestOptions(gitVersion.getVersion(), sourceIndices[0], - destinationIndex); - TaskSubmissionResponse reindexTask = _searchClient.submitReindexTask(reindexRequest, requestOptions); + RequestOptions requestOptions = + ESUtils.buildReindexTaskRequestOptions( + gitVersion.getVersion(), sourceIndices[0], destinationIndex); + TaskSubmissionResponse reindexTask = + _searchClient.submitReindexTask(reindexRequest, requestOptions); return reindexTask.getTask(); } private String submitReindex(String sourceIndex, String destinationIndex) throws IOException { - return submitReindex(new String[]{sourceIndex}, destinationIndex, 2500, null, null); + return submitReindex(new String[] {sourceIndex}, destinationIndex, 2500, null, null); } - private Pair<Long, Long> getDocumentCounts(String sourceIndex, String destinationIndex) throws Throwable { + private Pair<Long, Long> getDocumentCounts(String sourceIndex, String destinationIndex) + throws Throwable { // Check whether reindex succeeded by comparing document count - // There can be some delay between the reindex finishing and count being fully up to date, so try multiple times + // There can be some delay between the reindex finishing and count being fully up to date, so + // try multiple times long originalCount = 0; long reindexedCount = 0; for (int i = 0; i < this.numRetries; i++) { // Check if reindex succeeded by comparing document counts - originalCount = retryRegistry.retry("retrySourceIndexCount") + originalCount = + retryRegistry + .retry("retrySourceIndexCount") .executeCheckedSupplier(() -> getCount(sourceIndex)); - reindexedCount = retryRegistry.retry("retryDestinationIndexCount") + reindexedCount = + retryRegistry + .retry("retryDestinationIndexCount") .executeCheckedSupplier(() -> getCount(destinationIndex)); if (originalCount == reindexedCount) { break; @@ -445,13 +531,20 @@ private Pair<Long, Long> getDocumentCounts(String sourceIndex, String destinatio private Optional<TaskInfo> getTaskInfoByHeader(String indexName) throws Throwable { Retry retryWithDefaultConfig = retryRegistry.retry("getTaskInfoByHeader"); - return retryWithDefaultConfig.executeCheckedSupplier(() -> { - ListTasksRequest listTasksRequest = new ListTasksRequest().setDetailed(true); - List<TaskInfo> taskInfos = _searchClient.tasks().list(listTasksRequest, REQUEST_OPTIONS).getTasks(); - return taskInfos.stream() - .filter(info -> ESUtils.prefixMatch(info.getHeaders().get(ESUtils.OPAQUE_ID_HEADER), gitVersion.getVersion(), - indexName)).findFirst(); - }); + return retryWithDefaultConfig.executeCheckedSupplier( + () -> { + ListTasksRequest listTasksRequest = new ListTasksRequest().setDetailed(true); + List<TaskInfo> taskInfos = + _searchClient.tasks().list(listTasksRequest, REQUEST_OPTIONS).getTasks(); + return taskInfos.stream() + .filter( + info -> + ESUtils.prefixMatch( + info.getHeaders().get(ESUtils.OPAQUE_ID_HEADER), + gitVersion.getVersion(), + indexName)) + .findFirst(); + }); } private void diff(String indexA, String indexB, long maxDocs) { @@ -470,12 +563,17 @@ private void diff(String indexA, String indexB, long maxDocs) { SearchResponse responseA = _searchClient.search(indexARequest, RequestOptions.DEFAULT); SearchResponse responseB = _searchClient.search(indexBRequest, RequestOptions.DEFAULT); - Set<String> actual = Arrays.stream(responseB.getHits().getHits()) - .map(SearchHit::getId).collect(Collectors.toSet()); + Set<String> actual = + Arrays.stream(responseB.getHits().getHits()) + .map(SearchHit::getId) + .collect(Collectors.toSet()); - log.error("Missing {}", Arrays.stream(responseA.getHits().getHits()) + log.error( + "Missing {}", + Arrays.stream(responseA.getHits().getHits()) .filter(doc -> !actual.contains(doc.getId())) - .map(SearchHit::getSourceAsString).collect(Collectors.toSet())); + .map(SearchHit::getSourceAsString) + .collect(Collectors.toSet())); } catch (IOException e) { throw new RuntimeException(e); } @@ -483,7 +581,10 @@ private void diff(String indexA, String indexB, long maxDocs) { } private long getCount(@Nonnull String indexName) throws IOException { - return _searchClient.count(new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), RequestOptions.DEFAULT) + return _searchClient + .count( + new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), + RequestOptions.DEFAULT) .getCount(); } @@ -496,30 +597,48 @@ private void createIndex(String indexName, ReindexConfig state) throws IOExcepti log.info("Created index {}", indexName); } - public static void cleanIndex(RestHighLevelClient searchClient, ElasticSearchConfiguration esConfig, ReindexConfig indexState) { - log.info("Checking for orphan index pattern {} older than {} {}", indexState.indexPattern(), - esConfig.getBuildIndices().getRetentionValue(), - esConfig.getBuildIndices().getRetentionUnit()); - - getOrphanedIndices(searchClient, esConfig, indexState).forEach(orphanIndex -> { - log.warn("Deleting orphan index {}.", orphanIndex); - try { - searchClient.indices().delete(new DeleteIndexRequest().indices(orphanIndex), RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + public static void cleanIndex( + RestHighLevelClient searchClient, + ElasticSearchConfiguration esConfig, + ReindexConfig indexState) { + log.info( + "Checking for orphan index pattern {} older than {} {}", + indexState.indexPattern(), + esConfig.getBuildIndices().getRetentionValue(), + esConfig.getBuildIndices().getRetentionUnit()); + + getOrphanedIndices(searchClient, esConfig, indexState) + .forEach( + orphanIndex -> { + log.warn("Deleting orphan index {}.", orphanIndex); + try { + searchClient + .indices() + .delete(new DeleteIndexRequest().indices(orphanIndex), RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } - private static List<String> getOrphanedIndices(RestHighLevelClient searchClient, ElasticSearchConfiguration esConfig, - ReindexConfig indexState) { + private static List<String> getOrphanedIndices( + RestHighLevelClient searchClient, + ElasticSearchConfiguration esConfig, + ReindexConfig indexState) { List<String> orphanedIndices = new ArrayList<>(); try { - Date retentionDate = Date.from(Instant.now() - .minus(Duration.of(esConfig.getBuildIndices().getRetentionValue(), - ChronoUnit.valueOf(esConfig.getBuildIndices().getRetentionUnit())))); - - GetIndexResponse response = searchClient.indices().get(new GetIndexRequest(indexState.indexCleanPattern()), RequestOptions.DEFAULT); + Date retentionDate = + Date.from( + Instant.now() + .minus( + Duration.of( + esConfig.getBuildIndices().getRetentionValue(), + ChronoUnit.valueOf(esConfig.getBuildIndices().getRetentionUnit())))); + + GetIndexResponse response = + searchClient + .indices() + .get(new GetIndexRequest(indexState.indexCleanPattern()), RequestOptions.DEFAULT); for (String index : response.getIndices()) { var creationDateStr = response.getSetting(index, "index.creation_date"); @@ -530,7 +649,8 @@ private static List<String> getOrphanedIndices(RestHighLevelClient searchClient, continue; } - if (response.getAliases().containsKey(index) && response.getAliases().get(index).size() == 0) { + if (response.getAliases().containsKey(index) + && response.getAliases().get(index).size() == 0) { log.info("Index {} is orphaned", index); orphanedIndices.add(index); } @@ -539,7 +659,9 @@ private static List<String> getOrphanedIndices(RestHighLevelClient searchClient, if (e.getMessage().contains("index_not_found_exception")) { log.info("No orphaned indices found with pattern {}", indexState.indexCleanPattern()); } else { - log.error("An error occurred when trying to identify orphaned indices. Exception: {}", e.getMessage()); + log.error( + "An error occurred when trying to identify orphaned indices. Exception: {}", + e.getMessage()); } } return orphanedIndices; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java index 56cb26b09dc33..4489c661bb2ed 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java @@ -3,50 +3,50 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @RequiredArgsConstructor @Slf4j public class EntityIndexBuilders implements ElasticSearchIndexed { - private final ESIndexBuilder indexBuilder; - private final EntityRegistry entityRegistry; - private final IndexConvention indexConvention; - private final SettingsBuilder settingsBuilder; - - public ESIndexBuilder getIndexBuilder() { - return indexBuilder; + private final ESIndexBuilder indexBuilder; + private final EntityRegistry entityRegistry; + private final IndexConvention indexConvention; + private final SettingsBuilder settingsBuilder; + + public ESIndexBuilder getIndexBuilder() { + return indexBuilder; + } + + @Override + public void reindexAll() { + for (ReindexConfig config : buildReindexConfigs()) { + try { + indexBuilder.buildIndex(config); + } catch (IOException e) { + throw new RuntimeException(e); + } } - - @Override - public void reindexAll() { - for (ReindexConfig config : buildReindexConfigs()) { - try { - indexBuilder.buildIndex(config); - } catch (IOException e) { + } + + @Override + public List<ReindexConfig> buildReindexConfigs() { + Map<String, Object> settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map<String, Object> mappings = MappingsBuilder.getMappings(entitySpec); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings); + } catch (IOException e) { throw new RuntimeException(e); - } - } - } - - @Override - public List<ReindexConfig> buildReindexConfigs() { - Map<String, Object> settings = settingsBuilder.getSettings(); - return entityRegistry.getEntitySpecs().values().stream().map(entitySpec -> { - try { - Map<String, Object> mappings = MappingsBuilder.getMappings(entitySpec); - return indexBuilder.buildReindexState(indexConvention.getIndexName(entitySpec), mappings, settings); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList()); - } + } + }) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 13a0f57ccea99..f85a0dcb06a07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchScoreFieldSpec; @@ -14,20 +16,19 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; - - @Slf4j public class MappingsBuilder { - private static final Map<String, String> PARTIAL_NGRAM_CONFIG = ImmutableMap.of( + private static final Map<String, String> PARTIAL_NGRAM_CONFIG = + ImmutableMap.of( TYPE, "search_as_you_type", MAX_SHINGLE_SIZE, "4", DOC_VALUES, "false"); - public static Map<String, String> getPartialNgramConfigWithOverrides(Map<String, String> overrides) { + public static Map<String, String> getPartialNgramConfigWithOverrides( + Map<String, String> overrides) { return Stream.concat(PARTIAL_NGRAM_CONFIG.entrySet().stream(), overrides.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } public static final Map<String, String> KEYWORD_TYPE_MAP = ImmutableMap.of(TYPE, KEYWORD); @@ -45,16 +46,19 @@ public static Map<String, String> getPartialNgramConfigWithOverrides(Map<String, public static final String PROPERTIES = "properties"; - private MappingsBuilder() { - } + private MappingsBuilder() {} public static Map<String, Object> getMappings(@Nonnull final EntitySpec entitySpec) { Map<String, Object> mappings = new HashMap<>(); - entitySpec.getSearchableFieldSpecs() + entitySpec + .getSearchableFieldSpecs() .forEach(searchableFieldSpec -> mappings.putAll(getMappingsForField(searchableFieldSpec))); - entitySpec.getSearchScoreFieldSpecs() - .forEach(searchScoreFieldSpec -> mappings.putAll(getMappingsForSearchScoreField(searchScoreFieldSpec))); + entitySpec + .getSearchScoreFieldSpecs() + .forEach( + searchScoreFieldSpec -> + mappings.putAll(getMappingsForSearchScoreField(searchScoreFieldSpec))); // Fixed fields mappings.put("urn", getMappingsForUrn()); @@ -65,64 +69,70 @@ public static Map<String, Object> getMappings(@Nonnull final EntitySpec entitySp private static Map<String, Object> getMappingsForUrn() { Map<String, Object> subFields = new HashMap<>(); - subFields.put(DELIMITED, ImmutableMap.of( + subFields.put( + DELIMITED, + ImmutableMap.of( TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, URN_ANALYZER, SEARCH_ANALYZER, URN_SEARCH_ANALYZER, - SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER) - ); - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - ImmutableMap.of( - ANALYZER, PARTIAL_URN_COMPONENT - ) - )); + SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER)); + subFields.put( + NGRAM, + getPartialNgramConfigWithOverrides(ImmutableMap.of(ANALYZER, PARTIAL_URN_COMPONENT))); return ImmutableMap.<String, Object>builder() - .put(TYPE, ESUtils.KEYWORD_FIELD_TYPE) - .put(FIELDS, subFields) - .build(); + .put(TYPE, ESUtils.KEYWORD_FIELD_TYPE) + .put(FIELDS, subFields) + .build(); } private static Map<String, Object> getMappingsForRunId() { return ImmutableMap.<String, Object>builder().put(TYPE, ESUtils.KEYWORD_FIELD_TYPE).build(); } - private static Map<String, Object> getMappingsForField(@Nonnull final SearchableFieldSpec searchableFieldSpec) { + private static Map<String, Object> getMappingsForField( + @Nonnull final SearchableFieldSpec searchableFieldSpec) { FieldType fieldType = searchableFieldSpec.getSearchableAnnotation().getFieldType(); Map<String, Object> mappings = new HashMap<>(); Map<String, Object> mappingForField = new HashMap<>(); if (fieldType == FieldType.KEYWORD) { mappingForField.putAll(getMappingsForKeyword()); - } else if (fieldType == FieldType.TEXT || fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) { + } else if (fieldType == FieldType.TEXT + || fieldType == FieldType.TEXT_PARTIAL + || fieldType == FieldType.WORD_GRAM) { mappingForField.putAll(getMappingsForSearchText(fieldType)); } else if (fieldType == FieldType.BROWSE_PATH) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); - mappingForField.put(FIELDS, - ImmutableMap.of(LENGTH, ImmutableMap.of( - TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, - ANALYZER, SLASH_PATTERN_ANALYZER))); + mappingForField.put( + FIELDS, + ImmutableMap.of( + LENGTH, + ImmutableMap.of( + TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, ANALYZER, SLASH_PATTERN_ANALYZER))); mappingForField.put(ANALYZER, BROWSE_PATH_HIERARCHY_ANALYZER); mappingForField.put(FIELDDATA, true); } else if (fieldType == FieldType.BROWSE_PATH_V2) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); - mappingForField.put(FIELDS, - ImmutableMap.of(LENGTH, ImmutableMap.of( - TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, - ANALYZER, UNIT_SEPARATOR_PATTERN_ANALYZER))); + mappingForField.put( + FIELDS, + ImmutableMap.of( + LENGTH, + ImmutableMap.of( + TYPE, + ESUtils.TOKEN_COUNT_FIELD_TYPE, + ANALYZER, + UNIT_SEPARATOR_PATTERN_ANALYZER))); mappingForField.put(ANALYZER, BROWSE_PATH_V2_HIERARCHY_ANALYZER); mappingForField.put(FIELDDATA, true); - } else if (fieldType == FieldType.URN || fieldType == FieldType.URN_PARTIAL) { + } else if (fieldType == FieldType.URN || fieldType == FieldType.URN_PARTIAL) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); mappingForField.put(ANALYZER, URN_ANALYZER); mappingForField.put(SEARCH_ANALYZER, URN_SEARCH_ANALYZER); mappingForField.put(SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER); Map<String, Object> subFields = new HashMap<>(); if (fieldType == FieldType.URN_PARTIAL) { - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - Map.of( - ANALYZER, PARTIAL_URN_COMPONENT - ) - )); + subFields.put( + NGRAM, getPartialNgramConfigWithOverrides(Map.of(ANALYZER, PARTIAL_URN_COMPONENT))); } subFields.put(KEYWORD, KEYWORD_TYPE_MAP); mappingForField.put(FIELDS, subFields); @@ -141,12 +151,17 @@ private static Map<String, Object> getMappingsForField(@Nonnull final Searchable } mappings.put(searchableFieldSpec.getSearchableAnnotation().getFieldName(), mappingForField); - searchableFieldSpec.getSearchableAnnotation() + searchableFieldSpec + .getSearchableAnnotation() .getHasValuesFieldName() - .ifPresent(fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.BOOLEAN_FIELD_TYPE))); - searchableFieldSpec.getSearchableAnnotation() + .ifPresent( + fieldName -> + mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.BOOLEAN_FIELD_TYPE))); + searchableFieldSpec + .getSearchableAnnotation() .getNumValuesFieldName() - .ifPresent(fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.LONG_FIELD_TYPE))); + .ifPresent( + fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.LONG_FIELD_TYPE))); mappings.putAll(getMappingsForFieldNameAliases(searchableFieldSpec)); return mappings; @@ -167,26 +182,25 @@ private static Map<String, Object> getMappingsForSearchText(FieldType fieldType) mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER); Map<String, Object> subFields = new HashMap<>(); if (fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) { - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - ImmutableMap.of( - ANALYZER, PARTIAL_ANALYZER - ) - )); + subFields.put( + NGRAM, getPartialNgramConfigWithOverrides(ImmutableMap.of(ANALYZER, PARTIAL_ANALYZER))); if (fieldType == FieldType.WORD_GRAM) { - for (Map.Entry<String, String> entry : Map.of( - WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER, - WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER, - WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER).entrySet()) { + for (Map.Entry<String, String> entry : + Map.of( + WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER, + WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER, + WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER) + .entrySet()) { String fieldName = entry.getKey(); String analyzerName = entry.getValue(); - subFields.put(fieldName, ImmutableMap.of( - TYPE, ESUtils.TEXT_FIELD_TYPE, - ANALYZER, analyzerName - )); + subFields.put( + fieldName, ImmutableMap.of(TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, analyzerName)); } } } - subFields.put(DELIMITED, ImmutableMap.of( + subFields.put( + DELIMITED, + ImmutableMap.of( TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, TEXT_ANALYZER, SEARCH_ANALYZER, TEXT_SEARCH_ANALYZER, @@ -199,19 +213,23 @@ private static Map<String, Object> getMappingsForSearchText(FieldType fieldType) private static Map<String, Object> getMappingsForSearchScoreField( @Nonnull final SearchScoreFieldSpec searchScoreFieldSpec) { - return ImmutableMap.of(searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(), + return ImmutableMap.of( + searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(), ImmutableMap.of(TYPE, ESUtils.DOUBLE_FIELD_TYPE)); } - private static Map<String, Object> getMappingsForFieldNameAliases(@Nonnull final SearchableFieldSpec searchableFieldSpec) { + private static Map<String, Object> getMappingsForFieldNameAliases( + @Nonnull final SearchableFieldSpec searchableFieldSpec) { Map<String, Object> mappings = new HashMap<>(); - List<String> fieldNameAliases = searchableFieldSpec.getSearchableAnnotation().getFieldNameAliases(); - fieldNameAliases.forEach(alias -> { - Map<String, Object> aliasMappings = new HashMap<>(); - aliasMappings.put(TYPE, ALIAS); - aliasMappings.put(PATH, searchableFieldSpec.getSearchableAnnotation().getFieldName()); - mappings.put(alias, aliasMappings); - }); + List<String> fieldNameAliases = + searchableFieldSpec.getSearchableAnnotation().getFieldNameAliases(); + fieldNameAliases.forEach( + alias -> { + Map<String, Object> aliasMappings = new HashMap<>(); + aliasMappings.put(TYPE, ALIAS); + aliasMappings.put(PATH, searchableFieldSpec.getSearchableAnnotation().getFieldName()); + mappings.put(alias, aliasMappings); + }); return mappings; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java index 8b8a48f5d9cda..e3155c9f943cc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java @@ -1,256 +1,298 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; -import lombok.Builder; -import lombok.Getter; -import lombok.experimental.Accessors; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.common.settings.Settings; - import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static com.linkedin.metadata.Constants.*; - +import lombok.Builder; +import lombok.Getter; +import lombok.experimental.Accessors; +import lombok.extern.slf4j.Slf4j; +import org.opensearch.common.settings.Settings; @Slf4j @Builder @Getter @Accessors(fluent = true) public class ReindexConfig { - public final static ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - /* - Most index settings are default values and populated by Elastic. This list is an include list to determine which - settings we care about when a difference is present. - */ - public static final List<String> SETTINGS_DYNAMIC = ImmutableList.of("number_of_replicas", "refresh_interval"); - // These setting require reindex - public static final List<String> SETTINGS_STATIC = ImmutableList.of("number_of_shards"); - public static final List<String> SETTINGS = Stream.concat( - SETTINGS_DYNAMIC.stream(), SETTINGS_STATIC.stream()).collect(Collectors.toList()); + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - final private String name; - final private boolean exists; - final private Settings currentSettings; - final private Map<String, Object> targetSettings; - final private Map<String, Object> currentMappings; - final private Map<String, Object> targetMappings; - final private boolean enableIndexMappingsReindex; - final private boolean enableIndexSettingsReindex; - final private String version; + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } - /* Calculated */ - final private boolean requiresReindex; - final private boolean requiresApplySettings; - final private boolean requiresApplyMappings; - final private boolean isPureMappingsAddition; - final private boolean isSettingsReindex; + /* + Most index settings are default values and populated by Elastic. This list is an include list to determine which + settings we care about when a difference is present. + */ + public static final List<String> SETTINGS_DYNAMIC = + ImmutableList.of("number_of_replicas", "refresh_interval"); + // These setting require reindex + public static final List<String> SETTINGS_STATIC = ImmutableList.of("number_of_shards"); + public static final List<String> SETTINGS = + Stream.concat(SETTINGS_DYNAMIC.stream(), SETTINGS_STATIC.stream()) + .collect(Collectors.toList()); - public static ReindexConfigBuilder builder() { - return new CalculatedBuilder(); - } + private final String name; + private final boolean exists; + private final Settings currentSettings; + private final Map<String, Object> targetSettings; + private final Map<String, Object> currentMappings; + private final Map<String, Object> targetMappings; + private final boolean enableIndexMappingsReindex; + private final boolean enableIndexSettingsReindex; + private final String version; - public static class ReindexConfigBuilder { - // hide calculated fields - private ReindexConfigBuilder requiresReindex(boolean ignored) { - return this; - } - private ReindexConfigBuilder requiresApplySettings(boolean ignored) { - return this; - } - private ReindexConfigBuilder requiresApplyMappings(boolean ignored) { - return this; - } - private ReindexConfigBuilder isPureMappingsAddition(boolean ignored) { - return this; - } - private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { - return this; - } + /* Calculated */ + private final boolean requiresReindex; + private final boolean requiresApplySettings; + private final boolean requiresApplyMappings; + private final boolean isPureMappingsAddition; + private final boolean isSettingsReindex; - // ensure sorted - public ReindexConfigBuilder currentMappings(Map<String, Object> currentMappings) { - this.currentMappings = sortMap(currentMappings); - return this; - } - public ReindexConfigBuilder targetMappings(Map<String, Object> targetMappings) { - this.targetMappings = sortMap(targetMappings); - return this; - } + public static ReindexConfigBuilder builder() { + return new CalculatedBuilder(); + } - private static TreeMap<String, Object> sortMap(Map<String, Object> input) { - return input.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> { - if (e.getValue() instanceof Map) { - return sortMap((Map<String, Object>) e.getValue()); - } else { - return String.valueOf(e.getValue()); - } - }, - (oldValue, newValue) -> newValue, TreeMap::new)); - } + public static class ReindexConfigBuilder { + // hide calculated fields + private ReindexConfigBuilder requiresReindex(boolean ignored) { + return this; } - /** - * Implement calculated fields - */ - public String indexPattern() { - return name + "*"; + private ReindexConfigBuilder requiresApplySettings(boolean ignored) { + return this; } - public String indexCleanPattern() { - return name + "_*"; + private ReindexConfigBuilder requiresApplyMappings(boolean ignored) { + return this; } - private static class CalculatedBuilder extends ReindexConfigBuilder { - @Override - public ReindexConfig build() { - if (super.exists) { - /* Consider mapping changes */ - MapDifference<String, Object> mappingsDiff = Maps.difference( - getOrDefault(super.currentMappings, List.of("properties")), - getOrDefault(super.targetMappings, List.of("properties"))); - super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty() - || !mappingsDiff.entriesOnlyOnRight().isEmpty(); - super.isPureMappingsAddition = super.requiresApplyMappings - && mappingsDiff.entriesDiffering().isEmpty() - && !mappingsDiff.entriesOnlyOnRight().isEmpty(); + private ReindexConfigBuilder isPureMappingsAddition(boolean ignored) { + return this; + } - if (super.requiresApplyMappings && super.isPureMappingsAddition) { - log.info("Index: {} - New fields have been added to index. Adding: {}", - super.name, mappingsDiff.entriesOnlyOnRight()); - } else if (super.requiresApplyMappings) { - log.info("Index: {} - There's diff between new mappings (left) and old mappings (right): {}", - super.name, mappingsDiff.entriesDiffering()); - } + private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { + return this; + } - /* Consider analysis and settings changes */ - super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); - super.isSettingsReindex = isSettingsReindexRequired(); + // ensure sorted + public ReindexConfigBuilder currentMappings(Map<String, Object> currentMappings) { + this.currentMappings = sortMap(currentMappings); + return this; + } - /* Determine reindexing required - some settings and mappings do not require reindex, analysis always does */ - if (super.requiresApplyMappings && !super.isPureMappingsAddition) { - if (super.enableIndexMappingsReindex) { - super.requiresReindex = true; - } else { - log.warn("Index: {} - There's diff between new mappings, however reindexing is DISABLED.", super.name); - } - } - if (super.isSettingsReindex) { - try { - if (!isAnalysisEqual()) { - log.info("Index: {} - There's an update to `analysis` settings that requires reindexing. Target: {} Current: {}", - super.name, OBJECT_MAPPER.writeValueAsString(super.targetSettings), super.currentSettings); - } - if (!isSettingsEqual()) { - log.info("Index: {} - There's an update to settings that requires reindexing. Target: {} Current: {}", - super.name, OBJECT_MAPPER.writeValueAsString(super.targetSettings), super.currentSettings); - } - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - if (super.enableIndexSettingsReindex) { - super.requiresReindex = true; + public ReindexConfigBuilder targetMappings(Map<String, Object> targetMappings) { + this.targetMappings = sortMap(targetMappings); + return this; + } + + private static TreeMap<String, Object> sortMap(Map<String, Object> input) { + return input.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> { + if (e.getValue() instanceof Map) { + return sortMap((Map<String, Object>) e.getValue()); } else { - log.warn("Index: {} - There's an update to settings that requires reindexing, however reindexing is DISABLED", super.name); + return String.valueOf(e.getValue()); } - } - } - return super.build(); - } + }, + (oldValue, newValue) -> newValue, + TreeMap::new)); + } + } - private static TreeMap<String, Object> getOrDefault(Map<String, Object> map, List<String> path) { - if (map == null) { - return new TreeMap<>(); - } + /** Implement calculated fields */ + public String indexPattern() { + return name + "*"; + } - TreeMap<String, Object> item = (TreeMap<String, Object>) map.getOrDefault(path.get(0), new TreeMap()); - if (path.size() == 1) { - return item; - } else { - return getOrDefault(item, path.subList(1, path.size())); - } + public String indexCleanPattern() { + return name + "_*"; + } + + private static class CalculatedBuilder extends ReindexConfigBuilder { + @Override + public ReindexConfig build() { + if (super.exists) { + /* Consider mapping changes */ + MapDifference<String, Object> mappingsDiff = + Maps.difference( + getOrDefault(super.currentMappings, List.of("properties")), + getOrDefault(super.targetMappings, List.of("properties"))); + super.requiresApplyMappings = + !mappingsDiff.entriesDiffering().isEmpty() + || !mappingsDiff.entriesOnlyOnRight().isEmpty(); + super.isPureMappingsAddition = + super.requiresApplyMappings + && mappingsDiff.entriesDiffering().isEmpty() + && !mappingsDiff.entriesOnlyOnRight().isEmpty(); + + if (super.requiresApplyMappings && super.isPureMappingsAddition) { + log.info( + "Index: {} - New fields have been added to index. Adding: {}", + super.name, + mappingsDiff.entriesOnlyOnRight()); + } else if (super.requiresApplyMappings) { + log.info( + "Index: {} - There's diff between new mappings (left) and old mappings (right): {}", + super.name, + mappingsDiff.entriesDiffering()); } - private boolean isAnalysisEqual() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return true; + /* Consider analysis and settings changes */ + super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); + super.isSettingsReindex = isSettingsReindexRequired(); + + /* Determine reindexing required - some settings and mappings do not require reindex, analysis always does */ + if (super.requiresApplyMappings && !super.isPureMappingsAddition) { + if (super.enableIndexMappingsReindex) { + super.requiresReindex = true; + } else { + log.warn( + "Index: {} - There's diff between new mappings, however reindexing is DISABLED.", + super.name); + } + } + if (super.isSettingsReindex) { + try { + if (!isAnalysisEqual()) { + log.info( + "Index: {} - There's an update to `analysis` settings that requires reindexing. Target: {} Current: {}", + super.name, + OBJECT_MAPPER.writeValueAsString(super.targetSettings), + super.currentSettings); } - Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); - if (!indexSettings.containsKey("analysis")) { - return true; + if (!isSettingsEqual()) { + log.info( + "Index: {} - There's an update to settings that requires reindexing. Target: {} Current: {}", + super.name, + OBJECT_MAPPER.writeValueAsString(super.targetSettings), + super.currentSettings); } - // Compare analysis section - Map<String, Object> newAnalysis = (Map<String, Object>) indexSettings.get("analysis"); - Settings oldAnalysis = super.currentSettings.getByPrefix("index.analysis."); - return equalsGroup(newAnalysis, oldAnalysis); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + if (super.enableIndexSettingsReindex) { + super.requiresReindex = true; + } else { + log.warn( + "Index: {} - There's an update to settings that requires reindexing, however reindexing is DISABLED", + super.name); + } } + } + return super.build(); + } - private boolean isSettingsEqual() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return true; - } - Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); - return SETTINGS.stream() - .allMatch(settingKey -> Objects.equals(indexSettings.get(settingKey).toString(), - super.currentSettings.get("index." + settingKey))); - } + private static TreeMap<String, Object> getOrDefault( + Map<String, Object> map, List<String> path) { + if (map == null) { + return new TreeMap<>(); + } - private boolean isSettingsReindexRequired() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return false; - } - Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); + TreeMap<String, Object> item = + (TreeMap<String, Object>) map.getOrDefault(path.get(0), new TreeMap()); + if (path.size() == 1) { + return item; + } else { + return getOrDefault(item, path.subList(1, path.size())); + } + } - if (SETTINGS_STATIC.stream().anyMatch(settingKey -> - !Objects.equals(indexSettings.get(settingKey).toString(), super.currentSettings.get("index." + settingKey)))) { - return true; - } + private boolean isAnalysisEqual() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return true; + } + Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); + if (!indexSettings.containsKey("analysis")) { + return true; + } + // Compare analysis section + Map<String, Object> newAnalysis = (Map<String, Object>) indexSettings.get("analysis"); + Settings oldAnalysis = super.currentSettings.getByPrefix("index.analysis."); + return equalsGroup(newAnalysis, oldAnalysis); + } - return indexSettings.containsKey("analysis") - && !equalsGroup((Map<String, Object>) indexSettings.get("analysis"), - super.currentSettings.getByPrefix("index.analysis.")); - } + private boolean isSettingsEqual() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return true; + } + Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); + return SETTINGS.stream() + .allMatch( + settingKey -> + Objects.equals( + indexSettings.get(settingKey).toString(), + super.currentSettings.get("index." + settingKey))); } - private static boolean equalsGroup(Map<String, Object> newSettings, Settings oldSettings) { - if (!newSettings.keySet().equals(oldSettings.names())) { - return false; - } + private boolean isSettingsReindexRequired() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return false; + } + Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index"); - for (String key : newSettings.keySet()) { - // Skip urn stop filter, as adding new entities will cause this filter to change - // No need to reindex every time a new entity is added - if (key.equals("urn_stop_filter")) { - continue; - } - if (newSettings.get(key) instanceof Map) { - if (!equalsGroup((Map<String, Object>) newSettings.get(key), oldSettings.getByPrefix(key + "."))) { - return false; - } - } else if (newSettings.get(key) instanceof List) { - if (!newSettings.get(key).equals(oldSettings.getAsList(key))) { - return false; - } - } else { - if (!newSettings.get(key).toString().equals(oldSettings.get(key))) { - return false; - } - } - } + if (SETTINGS_STATIC.stream() + .anyMatch( + settingKey -> + !Objects.equals( + indexSettings.get(settingKey).toString(), + super.currentSettings.get("index." + settingKey)))) { return true; + } + + return indexSettings.containsKey("analysis") + && !equalsGroup( + (Map<String, Object>) indexSettings.get("analysis"), + super.currentSettings.getByPrefix("index.analysis.")); + } + } + + private static boolean equalsGroup(Map<String, Object> newSettings, Settings oldSettings) { + if (!newSettings.keySet().equals(oldSettings.names())) { + return false; + } + + for (String key : newSettings.keySet()) { + // Skip urn stop filter, as adding new entities will cause this filter to change + // No need to reindex every time a new entity is added + if (key.equals("urn_stop_filter")) { + continue; + } + if (newSettings.get(key) instanceof Map) { + if (!equalsGroup( + (Map<String, Object>) newSettings.get(key), oldSettings.getByPrefix(key + "."))) { + return false; + } + } else if (newSettings.get(key) instanceof List) { + if (!newSettings.get(key).equals(oldSettings.getAsList(key))) { + return false; + } + } else { + if (!newSettings.get(key).toString().equals(oldSettings.get(key))) { + return false; + } + } } + return true; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java index e180c8296b48d..d1eedbbce0495 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java @@ -2,22 +2,18 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import java.util.List; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang3.StringUtils; -import org.springframework.core.io.Resource; -import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.lang3.StringUtils; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - -/** - * Builder for generating settings for elasticsearch indices - */ +/** Builder for generating settings for elasticsearch indices */ public class SettingsBuilder { // ElasticSearch Property Map Keys @@ -42,7 +38,7 @@ public class SettingsBuilder { public static final String REPLACEMENT = "replacement"; public static final String PRESERVE_ORIGINAL = "preserve_original"; public static final String SEARCH_ANALYZER = "search_analyzer"; - public static final String SEARCH_QUOTE_ANALYZER = "search_quote_analyzer"; + public static final String SEARCH_QUOTE_ANALYZER = "search_quote_analyzer"; public static final String CUSTOM_QUOTE_ANALYZER = "quote_analyzer"; public static final String SPLIT_ON_NUMERICS = "split_on_numerics"; public static final String SPLIT_ON_CASE_CHANGE = "split_on_case_change"; @@ -98,9 +94,10 @@ public class SettingsBuilder { public static final String TRIM = "trim"; // MultiFilters - public static final String MULTIFILTER_GRAPH_1 = String.join(",", LOWERCASE, STICKY_DELIMITER_GRAPH); - public static final String MULTIFILTER_GRAPH_2 = String.join(",", LOWERCASE, ALPHANUM_SPACE_ONLY, - DEFAULT_SYN_GRAPH); + public static final String MULTIFILTER_GRAPH_1 = + String.join(",", LOWERCASE, STICKY_DELIMITER_GRAPH); + public static final String MULTIFILTER_GRAPH_2 = + String.join(",", LOWERCASE, ALPHANUM_SPACE_ONLY, DEFAULT_SYN_GRAPH); public static final String MULTIFILTER_1 = String.join(",", MULTIFILTER_GRAPH_1, FLATTEN_GRAPH); public static final String MULTIFILTER_2 = String.join(",", MULTIFILTER_GRAPH_2, FLATTEN_GRAPH); @@ -117,20 +114,15 @@ public class SettingsBuilder { public static final String UNIT_SEPARATOR_TOKENIZER = "unit_separator_tokenizer"; public static final String WORD_GRAM_TOKENIZER = "word_gram_tokenizer"; // Do not remove the space, needed for multi-term synonyms - public static final List<String> ALPHANUM_SPACE_PATTERNS = ImmutableList.of( - "([a-z0-9 _-]{2,})", - "([a-z0-9 ]{2,})", - "\\\"([^\\\"]*)\\\"" - ); + public static final List<String> ALPHANUM_SPACE_PATTERNS = + ImmutableList.of("([a-z0-9 _-]{2,})", "([a-z0-9 ]{2,})", "\\\"([^\\\"]*)\\\""); public static final List<String> DATAHUB_STOP_WORDS_LIST = ImmutableList.of("urn", "li"); - public static final List<String> WORD_DELIMITER_TYPE_TABLE = ImmutableList.of( - ": => SUBWORD_DELIM", - "_ => ALPHANUM", - "- => ALPHA" - ); - public static final List<String> INDEX_TOKEN_FILTERS = ImmutableList.of( + public static final List<String> WORD_DELIMITER_TYPE_TABLE = + ImmutableList.of(": => SUBWORD_DELIM", "_ => ALPHANUM", "- => ALPHA"); + public static final List<String> INDEX_TOKEN_FILTERS = + ImmutableList.of( ASCII_FOLDING, MULTIFILTER, TRIM, @@ -143,7 +135,8 @@ public class SettingsBuilder { UNIQUE, MIN_LENGTH); - public static final List<String> SEARCH_TOKEN_FILTERS = ImmutableList.of( + public static final List<String> SEARCH_TOKEN_FILTERS = + ImmutableList.of( ASCII_FOLDING, MULTIFILTER_GRAPH, TRIM, @@ -156,25 +149,15 @@ public class SettingsBuilder { UNIQUE, MIN_LENGTH); - public static final List<String> QUOTED_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - LOWERCASE, - REMOVE_QUOTES, - DATAHUB_STOP_WORDS, - STOP, - MIN_LENGTH); + public static final List<String> QUOTED_TOKEN_FILTERS = + ImmutableList.of( + ASCII_FOLDING, LOWERCASE, REMOVE_QUOTES, DATAHUB_STOP_WORDS, STOP, MIN_LENGTH); - public static final List<String> PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - AUTOCOMPLETE_CUSTOM_DELIMITER, - LOWERCASE); + public static final List<String> PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS = + ImmutableList.of(ASCII_FOLDING, AUTOCOMPLETE_CUSTOM_DELIMITER, LOWERCASE); - public static final List<String> WORD_GRAM_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - LOWERCASE, - TRIM, - REMOVE_QUOTES - ); + public static final List<String> WORD_GRAM_TOKEN_FILTERS = + ImmutableList.of(ASCII_FOLDING, LOWERCASE, TRIM, REMOVE_QUOTES); public final Map<String, Object> settings; @@ -193,7 +176,9 @@ public Map<String, Object> getSettings() { private static Map<String, Object> buildSettings(String mainTokenizer) throws IOException { ImmutableMap.Builder<String, Object> settings = ImmutableMap.builder(); settings.put(MAX_NGRAM_DIFF, 17); - settings.put(ANALYSIS, ImmutableMap.<String, Object>builder() + settings.put( + ANALYSIS, + ImmutableMap.<String, Object>builder() .put(FILTER, buildFilters()) .put(TOKENIZER, buildTokenizers()) .put(NORMALIZER, buildNormalizers()) @@ -203,12 +188,15 @@ private static Map<String, Object> buildSettings(String mainTokenizer) throws IO } private static Map<String, Object> buildFilters() throws IOException { - PathMatchingResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); + PathMatchingResourcePatternResolver resourceResolver = + new PathMatchingResourcePatternResolver(); ImmutableMap.Builder<String, Object> filters = ImmutableMap.builder(); // Filter to split string into words - filters.put(AUTOCOMPLETE_CUSTOM_DELIMITER, ImmutableMap.<String, Object>builder() + filters.put( + AUTOCOMPLETE_CUSTOM_DELIMITER, + ImmutableMap.<String, Object>builder() .put(TYPE, WORD_DELIMITER) .put(SPLIT_ON_NUMERICS, false) .put(SPLIT_ON_CASE_CHANGE, false) @@ -216,7 +204,9 @@ private static Map<String, Object> buildFilters() throws IOException { .put(TYPE_TABLE, WORD_DELIMITER_TYPE_TABLE) .build()); - filters.put(STICKY_DELIMITER_GRAPH, ImmutableMap.<String, Object>builder() + filters.put( + STICKY_DELIMITER_GRAPH, + ImmutableMap.<String, Object>builder() .put(TYPE, WORD_DELIMITER_GRAPH) .put(SPLIT_ON_NUMERICS, false) .put(SPLIT_ON_CASE_CHANGE, false) @@ -225,22 +215,30 @@ private static Map<String, Object> buildFilters() throws IOException { .put(TYPE_TABLE, WORD_DELIMITER_TYPE_TABLE) .build()); - filters.put(DATAHUB_STOP_WORDS, ImmutableMap.<String, Object>builder() + filters.put( + DATAHUB_STOP_WORDS, + ImmutableMap.<String, Object>builder() .put(TYPE, STOP) .put(IGNORE_CASE, "true") .put(STOPWORDS, DATAHUB_STOP_WORDS_LIST) .build()); - filters.put(MIN_LENGTH, ImmutableMap.<String, Object>builder() - .put(TYPE, "length") - .put("min", "3") - .build()); + filters.put( + MIN_LENGTH, + ImmutableMap.<String, Object>builder().put(TYPE, "length").put("min", "3").build()); - Resource stemOverride = resourceResolver.getResource("classpath:elasticsearch/stem_override.txt"); - try (BufferedReader reader = new BufferedReader(new InputStreamReader(stemOverride.getInputStream()))) { - filters.put(STEM_OVERRIDE, ImmutableMap.<String, Object>builder() + Resource stemOverride = + resourceResolver.getResource("classpath:elasticsearch/stem_override.txt"); + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(stemOverride.getInputStream()))) { + filters.put( + STEM_OVERRIDE, + ImmutableMap.<String, Object>builder() .put(TYPE, "stemmer_override") - .put("rules", reader.lines() + .put( + "rules", + reader + .lines() .map(String::trim) .map(String::toLowerCase) .filter(line -> !line.isEmpty() && !line.startsWith("#")) @@ -248,42 +246,50 @@ private static Map<String, Object> buildFilters() throws IOException { .build()); } - filters.put(ALPHANUM_SPACE_ONLY, ImmutableMap.<String, Object>builder() + filters.put( + ALPHANUM_SPACE_ONLY, + ImmutableMap.<String, Object>builder() .put(TYPE, "pattern_capture") .put(PATTERNS, ALPHANUM_SPACE_PATTERNS) .build()); - filters.put(REMOVE_QUOTES, ImmutableMap.<String, Object>builder() + filters.put( + REMOVE_QUOTES, + ImmutableMap.<String, Object>builder() .put(TYPE, "pattern_replace") .put(PATTERN, "['\"]") .put(REPLACEMENT, "") .build()); // Index Time - filters.put(MULTIFILTER, ImmutableMap.<String, Object>builder() + filters.put( + MULTIFILTER, + ImmutableMap.<String, Object>builder() .put(TYPE, "multiplexer") - .put(FILTERS, ImmutableList.of( - MULTIFILTER_1, - MULTIFILTER_2 - )) + .put(FILTERS, ImmutableList.of(MULTIFILTER_1, MULTIFILTER_2)) .build()); // Search Time - filters.put(MULTIFILTER_GRAPH, ImmutableMap.<String, Object>builder() + filters.put( + MULTIFILTER_GRAPH, + ImmutableMap.<String, Object>builder() .put(TYPE, "multiplexer") - .put(FILTERS, ImmutableList.of( - MULTIFILTER_GRAPH_1, - MULTIFILTER_GRAPH_2 - )) + .put(FILTERS, ImmutableList.of(MULTIFILTER_GRAPH_1, MULTIFILTER_GRAPH_2)) .build()); Resource[] synonyms = resourceResolver.getResources("classpath:elasticsearch/synonyms/*.txt"); - for (Resource syn: synonyms) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(syn.getInputStream()))) { - filters.put(String.format("%s_syn_graph", FilenameUtils.getBaseName(syn.getFilename())), ImmutableMap.<String, Object>builder() + for (Resource syn : synonyms) { + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(syn.getInputStream()))) { + filters.put( + String.format("%s_syn_graph", FilenameUtils.getBaseName(syn.getFilename())), + ImmutableMap.<String, Object>builder() .put(TYPE, "synonym_graph") .put(LENIENT, "false") - .put(SYNONYMS, reader.lines() + .put( + SYNONYMS, + reader + .lines() .map(String::trim) .map(String::toLowerCase) .filter(line -> !line.isEmpty() && !line.startsWith("#")) @@ -291,15 +297,18 @@ private static Map<String, Object> buildFilters() throws IOException { .build()); } - for (Map.Entry<String, Integer> entry : Map.of(WORD_GRAM_2_FILTER, 2, WORD_GRAM_3_FILTER, 3, WORD_GRAM_4_FILTER, 4).entrySet()) { + for (Map.Entry<String, Integer> entry : + Map.of(WORD_GRAM_2_FILTER, 2, WORD_GRAM_3_FILTER, 3, WORD_GRAM_4_FILTER, 4).entrySet()) { String filterName = entry.getKey(); Integer gramSize = entry.getValue(); - filters.put(filterName, ImmutableMap.<String, Object>builder() - .put(TYPE, SHINGLE) - .put("min_shingle_size", gramSize) - .put("max_shingle_size", gramSize) - .put("output_unigrams", false) - .build()); + filters.put( + filterName, + ImmutableMap.<String, Object>builder() + .put(TYPE, SHINGLE) + .put("min_shingle_size", gramSize) + .put("max_shingle_size", gramSize) + .put("output_unigrams", false) + .build()); } } @@ -309,20 +318,16 @@ private static Map<String, Object> buildFilters() throws IOException { private static Map<String, Object> buildTokenizers() { ImmutableMap.Builder<String, Object> tokenizers = ImmutableMap.builder(); // Tokenize by slashes - tokenizers.put(SLASH_TOKENIZER, - ImmutableMap.<String, Object>builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[/]") - .build()); + tokenizers.put( + SLASH_TOKENIZER, + ImmutableMap.<String, Object>builder().put(TYPE, PATTERN).put(PATTERN, "[/]").build()); + tokenizers.put( + UNIT_SEPARATOR_TOKENIZER, + ImmutableMap.<String, Object>builder().put(TYPE, PATTERN).put(PATTERN, "[␟]").build()); - tokenizers.put(UNIT_SEPARATOR_TOKENIZER, - ImmutableMap.<String, Object>builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[␟]") - .build()); - - tokenizers.put(UNIT_SEPARATOR_PATH_TOKENIZER, + tokenizers.put( + UNIT_SEPARATOR_PATH_TOKENIZER, ImmutableMap.<String, Object>builder() .put(TYPE, PATH_HIERARCHY_TOKENIZER) .put(DELIMITER, "␟") @@ -331,16 +336,15 @@ private static Map<String, Object> buildTokenizers() { // Tokenize by most special chars // Do NOT tokenize by whitespace to keep multi-word synonyms in the same token // The split by whitespace is done later in the token filters phase - tokenizers.put(MAIN_TOKENIZER, - ImmutableMap.<String, Object>builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[(),./:]") - .build()); + tokenizers.put( + MAIN_TOKENIZER, + ImmutableMap.<String, Object>builder().put(TYPE, PATTERN).put(PATTERN, "[(),./:]").build()); // Tokenize by whitespace and most special chars for wordgrams // only split on - when not preceded by a whitespace to preserve exclusion functionality // i.e. "logging-events-bkcp" and "logging-events -bckp" should be handled differently - tokenizers.put(WORD_GRAM_TOKENIZER, + tokenizers.put( + WORD_GRAM_TOKENIZER, ImmutableMap.<String, Object>builder() .put(TYPE, PATTERN) .put(PATTERN, "[(),./:\\s_]|(?<=\\S)(-)") @@ -353,8 +357,11 @@ private static Map<String, Object> buildTokenizers() { private static Map<String, Object> buildNormalizers() { ImmutableMap.Builder<String, Object> normalizers = ImmutableMap.builder(); // Analyzer for partial matching (i.e. autocomplete) - Prefix matching of each token - normalizers.put(KEYWORD_NORMALIZER, - ImmutableMap.<String, Object>builder().put(FILTER, ImmutableList.of(LOWERCASE, ASCII_FOLDING)).build()); + normalizers.put( + KEYWORD_NORMALIZER, + ImmutableMap.<String, Object>builder() + .put(FILTER, ImmutableList.of(LOWERCASE, ASCII_FOLDING)) + .build()); return normalizers.build(); } @@ -364,90 +371,119 @@ private static Map<String, Object> buildAnalyzers(String mainTokenizer) { ImmutableMap.Builder<String, Object> analyzers = ImmutableMap.builder(); // Analyzer for splitting by slashes (used to get depth of browsePath) - analyzers.put(SLASH_PATTERN_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + SLASH_PATTERN_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, SLASH_TOKENIZER) .put(FILTER, ImmutableList.of(LOWERCASE)) .build()); // Analyzer for splitting by unit-separator (used to get depth of browsePathV2) - analyzers.put(UNIT_SEPARATOR_PATTERN_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + UNIT_SEPARATOR_PATTERN_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, UNIT_SEPARATOR_TOKENIZER) .put(FILTER, ImmutableList.of(LOWERCASE)) .build()); // Analyzer for matching browse path - analyzers.put(BROWSE_PATH_HIERARCHY_ANALYZER, ImmutableMap.<String, Object>builder() - .put(TOKENIZER, PATH_HIERARCHY_TOKENIZER) - .build()); + analyzers.put( + BROWSE_PATH_HIERARCHY_ANALYZER, + ImmutableMap.<String, Object>builder().put(TOKENIZER, PATH_HIERARCHY_TOKENIZER).build()); // Analyzer for matching browse path v2 - analyzers.put(BROWSE_PATH_V2_HIERARCHY_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + BROWSE_PATH_V2_HIERARCHY_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, UNIT_SEPARATOR_PATH_TOKENIZER) .build()); // Analyzer for case-insensitive exact matching - Only used when building queries - analyzers.put(KEYWORD_LOWERCASE_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + KEYWORD_LOWERCASE_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, KEYWORD_TOKENIZER) .put(FILTER, ImmutableList.of("trim", LOWERCASE, ASCII_FOLDING, SNOWBALL)) .build()); // Analyzer for quotes words - analyzers.put(CUSTOM_QUOTE_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + CUSTOM_QUOTE_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, KEYWORD_TOKENIZER) .put(FILTER, QUOTED_TOKEN_FILTERS) .build()); // Analyzer for text tokenized into words (split by spaces, periods, and slashes) - analyzers.put(TEXT_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + TEXT_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, INDEX_TOKEN_FILTERS) .build()); - analyzers.put(TEXT_SEARCH_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + TEXT_SEARCH_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, SEARCH_TOKEN_FILTERS) .build()); // Analyzer for getting urn components - analyzers.put(URN_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + URN_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, INDEX_TOKEN_FILTERS) .build()); - analyzers.put(URN_SEARCH_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + URN_SEARCH_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, SEARCH_TOKEN_FILTERS) .build()); // Support word grams - for (Map.Entry<String, String> entry : Map.of( - WORD_GRAM_2_ANALYZER, WORD_GRAM_2_FILTER, - WORD_GRAM_3_ANALYZER, WORD_GRAM_3_FILTER, - WORD_GRAM_4_ANALYZER, WORD_GRAM_4_FILTER).entrySet()) { + for (Map.Entry<String, String> entry : + Map.of( + WORD_GRAM_2_ANALYZER, WORD_GRAM_2_FILTER, + WORD_GRAM_3_ANALYZER, WORD_GRAM_3_FILTER, + WORD_GRAM_4_ANALYZER, WORD_GRAM_4_FILTER) + .entrySet()) { String analyzerName = entry.getKey(); String filterName = entry.getValue(); - analyzers.put(analyzerName, ImmutableMap.<String, Object>builder() - .put(TOKENIZER, WORD_GRAM_TOKENIZER) - .put(FILTER, ImmutableList.<Object>builder() - .addAll(WORD_GRAM_TOKEN_FILTERS) - .add(filterName).build()) - .build()); + analyzers.put( + analyzerName, + ImmutableMap.<String, Object>builder() + .put(TOKENIZER, WORD_GRAM_TOKENIZER) + .put( + FILTER, + ImmutableList.<Object>builder() + .addAll(WORD_GRAM_TOKEN_FILTERS) + .add(filterName) + .build()) + .build()); } - // For special analysis, the substitution can be read from the configuration (chinese tokenizer: ik_smart / smartCN) + // For special analysis, the substitution can be read from the configuration (chinese tokenizer: + // ik_smart / smartCN) // Analyzer for partial matching (i.e. autocomplete) - Prefix matching of each token - analyzers.put(PARTIAL_ANALYZER, ImmutableMap.<String, Object>builder() + analyzers.put( + PARTIAL_ANALYZER, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS) .build()); // Analyzer for partial matching urn components - analyzers.put(PARTIAL_URN_COMPONENT, ImmutableMap.<String, Object>builder() + analyzers.put( + PARTIAL_URN_COMPONENT, + ImmutableMap.<String, Object>builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS) .build()); - return analyzers.build(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 5fd0a80d23c50..5ea60b24a577a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query; +import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; + import com.codahale.metrics.Timer; import com.datahub.util.exception.ESQueryException; import com.google.common.annotations.VisibleForTesting; @@ -54,9 +56,6 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; - - @Slf4j @RequiredArgsConstructor public class ESBrowseDAO { @@ -64,10 +63,8 @@ public class ESBrowseDAO { private final EntityRegistry entityRegistry; private final RestHighLevelClient client; private final IndexConvention indexConvention; - @Nonnull - private final SearchConfiguration searchConfiguration; - @Nullable - private final CustomSearchConfiguration customSearchConfiguration; + @Nonnull private final SearchConfiguration searchConfiguration; + @Nullable private final CustomSearchConfiguration customSearchConfiguration; private static final String BROWSE_PATH = "browsePaths"; private static final String BROWSE_PATH_DEPTH = "browsePaths.length"; @@ -107,19 +104,26 @@ private class BrowseGroupsResultV2 { * @return a {@link BrowseResult} that contains a list of groups/entities */ @Nonnull - public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, + public BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filters, + int from, int size) { final Map<String, String> requestMap = SearchUtils.getRequestMap(filters); try { - final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); + final String indexName = + indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { groupsResponse = - client.search(constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); + client.search( + constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); } - final BrowseGroupsResult browseGroupsResult = extractGroupsResponse(groupsResponse, path, from, size); + final BrowseGroupsResult browseGroupsResult = + extractGroupsResponse(groupsResponse, path, from, size); final int numGroups = browseGroupsResult.getTotalGroups(); // Based on the number of groups returned, compute the from and size to query for entities @@ -131,14 +135,19 @@ public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nu final SearchResponse entitiesResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esEntitiesSearch").time()) { entitiesResponse = - client.search(constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), + client.search( + constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), RequestOptions.DEFAULT); } final int numEntities = (int) entitiesResponse.getHits().getTotalHits().value; - final List<BrowseResultEntity> browseResultEntityList = extractEntitiesResponse(entitiesResponse, path); - - return new BrowseResult().setMetadata( - new BrowseResultMetadata().setTotalNumEntities(browseGroupsResult.getTotalNumEntities()).setPath(path)) + final List<BrowseResultEntity> browseResultEntityList = + extractEntitiesResponse(entitiesResponse, path); + + return new BrowseResult() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) .setEntities(new BrowseResultEntityArray(browseResultEntityList)) .setGroups(new BrowseResultGroupArray(browseGroupsResult.getGroups())) .setNumEntities(numEntities) @@ -176,8 +185,8 @@ private AggregationBuilder buildAggregations(@Nonnull String path) { * @return {@link SearchRequest} */ @Nonnull - protected SearchRequest constructGroupsSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map<String, String> requestMap) { + protected SearchRequest constructGroupsSearchRequest( + @Nonnull String indexName, @Nonnull String path, @Nonnull Map<String, String> requestMap) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); @@ -196,8 +205,8 @@ protected SearchRequest constructGroupsSearchRequest(@Nonnull String indexName, * @return {@link QueryBuilder} */ @Nonnull - private QueryBuilder buildQueryString(@Nonnull String path, @Nonnull Map<String, String> requestMap, - boolean isGroupQuery) { + private QueryBuilder buildQueryString( + @Nonnull String path, @Nonnull Map<String, String> requestMap, boolean isGroupQuery) { final int browseDepthVal = getPathDepth(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); @@ -229,13 +238,17 @@ private QueryBuilder buildQueryString(@Nonnull String path, @Nonnull Map<String, */ @VisibleForTesting @Nonnull - SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map<String, String> requestMap, int from, int size) { + SearchRequest constructEntitiesSearchRequest( + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map<String, String> requestMap, + int from, + int size) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.from(from); searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(new String[]{BROWSE_PATH, URN}, null); + searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); searchSourceBuilder.query(buildQueryString(path, requestMap, false)); searchRequest.source(searchSourceBuilder); @@ -254,8 +267,13 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull */ @VisibleForTesting @Nonnull - SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map<String, String> requestMap, @Nullable Object[] sort, @Nullable String pitId, @Nonnull String keepAlive, + SearchRequest constructEntitiesSearchRequest( + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map<String, String> requestMap, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, int size) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -263,7 +281,7 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(new String[]{BROWSE_PATH, URN}, null); + searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); searchSourceBuilder.query(buildQueryString(path, requestMap, false)); searchRequest.source(searchSourceBuilder); @@ -278,19 +296,24 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull * @return {@link BrowseResultMetadata} */ @Nonnull - private BrowseGroupsResult extractGroupsResponse(@Nonnull SearchResponse groupsResponse, @Nonnull String path, - int from, int size) { + private BrowseGroupsResult extractGroupsResponse( + @Nonnull SearchResponse groupsResponse, @Nonnull String path, int from, int size) { final ParsedTerms groups = groupsResponse.getAggregations().get(GROUP_AGG); - final List<BrowseResultGroup> groupsAgg = groups.getBuckets() - .stream() - .map(group -> new BrowseResultGroup().setName(getSimpleName(group.getKeyAsString())) - .setCount(group.getDocCount())) - .collect(Collectors.toList()); + final List<BrowseResultGroup> groupsAgg = + groups.getBuckets().stream() + .map( + group -> + new BrowseResultGroup() + .setName(getSimpleName(group.getKeyAsString())) + .setCount(group.getDocCount())) + .collect(Collectors.toList()); // Get the groups that are in the from to from + size range - final List<BrowseResultGroup> paginatedGroups = groupsAgg.size() <= from ? Collections.emptyList() - : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); - return new BrowseGroupsResult(paginatedGroups, groupsAgg.size(), - (int) groupsResponse.getHits().getTotalHits().value); + final List<BrowseResultGroup> paginatedGroups = + groupsAgg.size() <= from + ? Collections.emptyList() + : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); + return new BrowseGroupsResult( + paginatedGroups, groupsAgg.size(), (int) groupsResponse.getHits().getTotalHits().value); } /** @@ -301,18 +324,22 @@ private BrowseGroupsResult extractGroupsResponse(@Nonnull SearchResponse groupsR */ @VisibleForTesting @Nonnull - List<BrowseResultEntity> extractEntitiesResponse(@Nonnull SearchResponse entitiesResponse, - @Nonnull String currentPath) { + List<BrowseResultEntity> extractEntitiesResponse( + @Nonnull SearchResponse entitiesResponse, @Nonnull String currentPath) { final List<BrowseResultEntity> entityMetadataArray = new ArrayList<>(); - Arrays.stream(entitiesResponse.getHits().getHits()).forEach(hit -> { - try { - final List<String> allPaths = (List<String>) hit.getSourceAsMap().get(BROWSE_PATH); - entityMetadataArray.add(new BrowseResultEntity().setName((String) hit.getSourceAsMap().get(URN)) - .setUrn(Urn.createFromString((String) hit.getSourceAsMap().get(URN)))); - } catch (URISyntaxException e) { - log.error("URN is not valid: " + e.toString()); - } - }); + Arrays.stream(entitiesResponse.getHits().getHits()) + .forEach( + hit -> { + try { + final List<String> allPaths = (List<String>) hit.getSourceAsMap().get(BROWSE_PATH); + entityMetadataArray.add( + new BrowseResultEntity() + .setName((String) hit.getSourceAsMap().get(URN)) + .setUrn(Urn.createFromString((String) hit.getSourceAsMap().get(URN)))); + } catch (URISyntaxException e) { + log.error("URN is not valid: " + e.toString()); + } + }); return entityMetadataArray; } @@ -344,7 +371,8 @@ private static int getPathDepth(@Nonnull String path) { public List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(new SearchSourceBuilder().query(QueryBuilders.termQuery(URN, urn.toString()))); + searchRequest.source( + new SearchSourceBuilder().query(QueryBuilders.termQuery(URN, urn.toString()))); final SearchHit[] searchHits; try { searchHits = client.search(searchRequest, RequestOptions.DEFAULT).getHits().getHits(); @@ -363,20 +391,32 @@ public List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) return (List<String>) sourceMap.get(BROWSE_PATH); } - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { try { final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { final String finalInput = input.isEmpty() ? "*" : input; groupsResponse = - client.search(constructGroupsSearchRequestV2(entityName, path, filter, finalInput), RequestOptions.DEFAULT); + client.search( + constructGroupsSearchRequestV2(entityName, path, filter, finalInput), + RequestOptions.DEFAULT); } - final BrowseGroupsResultV2 browseGroupsResult = extractGroupsResponseV2(groupsResponse, path, start, count); + final BrowseGroupsResultV2 browseGroupsResult = + extractGroupsResponseV2(groupsResponse, path, start, count); final int numGroups = browseGroupsResult.getTotalGroups(); - return new BrowseResultV2().setMetadata( - new BrowseResultMetadata().setTotalNumEntities(browseGroupsResult.getTotalNumEntities()).setPath(path)) + return new BrowseResultV2() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) .setGroups(new BrowseResultGroupV2Array(browseGroupsResult.getGroups())) .setNumGroups(numGroups) .setFrom(start) @@ -388,12 +428,21 @@ public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, } @Nonnull - private SearchRequest constructGroupsSearchRequestV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input) { + private SearchRequest constructGroupsSearchRequestV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); - searchSourceBuilder.query(buildQueryStringV2(entityName, path, SearchUtil.transformFilterForEntities(filter, indexConvention), input)); + searchSourceBuilder.query( + buildQueryStringV2( + entityName, + path, + SearchUtil.transformFilterForEntities(filter, indexConvention), + input)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -412,21 +461,24 @@ private String getSimpleNameV2(@Nonnull String path) { return path.substring(path.lastIndexOf(BROWSE_V2_DELIMITER) + 1); } - private static int getPathDepthV2(@Nonnull String path) { return StringUtils.countMatches(path, BROWSE_V2_DELIMITER); } @Nonnull - private QueryBuilder buildQueryStringV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input) { + private QueryBuilder buildQueryStringV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - QueryBuilder query = SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getQuery(input, false); + QueryBuilder query = + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + .getQuery(input, false); queryBuilder.must(query); filterSoftDeletedByDefault(filter, queryBuilder); @@ -467,19 +519,19 @@ private AggregationBuilder buildAggregationsV2(@Nonnull String path) { * @return {@link BrowseResultMetadata} */ @Nonnull - private BrowseGroupsResultV2 extractGroupsResponseV2(@Nonnull SearchResponse groupsResponse, @Nonnull String path, - int from, int size) { + private BrowseGroupsResultV2 extractGroupsResponseV2( + @Nonnull SearchResponse groupsResponse, @Nonnull String path, int from, int size) { final ParsedTerms groups = groupsResponse.getAggregations().get(GROUP_AGG); - final List<BrowseResultGroupV2> groupsAgg = groups.getBuckets() - .stream() - .map(this::mapBrowseResultGroupV2) - .collect(Collectors.toList()); + final List<BrowseResultGroupV2> groupsAgg = + groups.getBuckets().stream().map(this::mapBrowseResultGroupV2).collect(Collectors.toList()); // Get the groups that are in the from to from + size range - final List<BrowseResultGroupV2> paginatedGroups = groupsAgg.size() <= from ? Collections.emptyList() - : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); - return new BrowseGroupsResultV2(paginatedGroups, groupsAgg.size(), - (int) groupsResponse.getHits().getTotalHits().value); + final List<BrowseResultGroupV2> paginatedGroups = + groupsAgg.size() <= from + ? Collections.emptyList() + : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); + return new BrowseGroupsResultV2( + paginatedGroups, groupsAgg.size(), (int) groupsResponse.getHits().getTotalHits().value); } private boolean hasSubGroups(Terms.Bucket group) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 960a5b38826b1..0718448a6453e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -1,12 +1,16 @@ package com.linkedin.metadata.search.elasticsearch.query; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.codahale.metrics.Timer; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.datahub.util.exception.ESQueryException; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.annotations.VisibleForTesting; import com.linkedin.data.template.LongMap; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.AutoCompleteResult; @@ -45,24 +49,18 @@ import org.opensearch.client.core.CountRequest; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.SearchModule; import org.opensearch.search.builder.SearchSourceBuilder; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - - -/** - * A search DAO for Elasticsearch backend. - */ +/** A search DAO for Elasticsearch backend. */ @Slf4j @RequiredArgsConstructor public class ESSearchDAO { private static final NamedXContentRegistry X_CONTENT_REGISTRY; + static { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); X_CONTENT_REGISTRY = new NamedXContentRegistry(searchModule.getNamedXContents()); @@ -73,15 +71,14 @@ public class ESSearchDAO { private final IndexConvention indexConvention; private final boolean pointInTimeCreationEnabled; private final String elasticSearchImplementation; - @Nonnull - private final SearchConfiguration searchConfiguration; - @Nullable - private final CustomSearchConfiguration customSearchConfiguration; + @Nonnull private final SearchConfiguration searchConfiguration; + @Nullable private final CustomSearchConfiguration customSearchConfiguration; public long docCount(@Nonnull String entityName) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); CountRequest countRequest = - new CountRequest(indexConvention.getIndexName(entitySpec)).query(SearchRequestHandler.getFilterQuery(null)); + new CountRequest(indexConvention.getIndexName(entitySpec)) + .query(SearchRequestHandler.getFilterQuery(null)); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) { return client.count(countRequest, RequestOptions.DEFAULT).getCount(); } catch (IOException e) { @@ -92,15 +89,21 @@ public long docCount(@Nonnull String entityName) { @Nonnull @WithSpan - private SearchResult executeAndExtract(@Nonnull List<EntitySpec> entitySpec, @Nonnull SearchRequest searchRequest, - @Nullable Filter filter, int from, int size) { + private SearchResult executeAndExtract( + @Nonnull List<EntitySpec> entitySpec, + @Nonnull SearchRequest searchRequest, + @Nullable Filter filter, + int from, + int size) { long id = System.currentTimeMillis(); - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) { log.debug("Executing request {}: {}", id, searchRequest); final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return transformIndexIntoEntityName(SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + return transformIndexIntoEntityName( + SearchRequestHandler.getBuilder( + entitySpec, searchConfiguration, customSearchConfiguration) .extractResult(searchResponse, filter, from, size)); } catch (Exception e) { log.error("Search query failed", e); @@ -116,33 +119,47 @@ private String transformIndexToken(String name, int entityTypeIdx) { } String[] tokens = name.split(AGGREGATION_SEPARATOR_CHAR); if (entityTypeIdx < tokens.length) { - tokens[entityTypeIdx] = indexConvention.getEntityName(tokens[entityTypeIdx]).orElse(tokens[entityTypeIdx]); + tokens[entityTypeIdx] = + indexConvention.getEntityName(tokens[entityTypeIdx]).orElse(tokens[entityTypeIdx]); } return String.join(AGGREGATION_SEPARATOR_CHAR, tokens); } - private AggregationMetadata transformAggregationMetadata(@Nonnull AggregationMetadata aggMeta, int entityTypeIdx) { + private AggregationMetadata transformAggregationMetadata( + @Nonnull AggregationMetadata aggMeta, int entityTypeIdx) { if (entityTypeIdx >= 0) { - aggMeta.setAggregations(new LongMap( - aggMeta.getAggregations().entrySet().stream().collect( - Collectors.toMap(entry -> transformIndexToken(entry.getKey(), entityTypeIdx), Map.Entry::getValue)))); + aggMeta.setAggregations( + new LongMap( + aggMeta.getAggregations().entrySet().stream() + .collect( + Collectors.toMap( + entry -> transformIndexToken(entry.getKey(), entityTypeIdx), + Map.Entry::getValue)))); aggMeta.setFilterValues( new FilterValueArray( - aggMeta.getFilterValues().stream().map( - filterValue -> filterValue.setValue(transformIndexToken(filterValue.getValue(), entityTypeIdx))) - .collect(Collectors.toList()) - )); - + aggMeta.getFilterValues().stream() + .map( + filterValue -> + filterValue.setValue( + transformIndexToken(filterValue.getValue(), entityTypeIdx))) + .collect(Collectors.toList()))); } return aggMeta; } @VisibleForTesting public SearchResult transformIndexIntoEntityName(SearchResult result) { - return result.setMetadata(result.getMetadata().setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); + return result.setMetadata( + result + .getMetadata() + .setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); } + private ScrollResult transformIndexIntoEntityName(ScrollResult result) { - return result.setMetadata(result.getMetadata().setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); + return result.setMetadata( + result + .getMetadata() + .setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); } private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadataArray aggArray) { @@ -157,15 +174,22 @@ private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadat @Nonnull @WithSpan - private ScrollResult executeAndExtract(@Nonnull List<EntitySpec> entitySpecs, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, - @Nullable String scrollId, @Nullable String keepAlive, int size) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { + private ScrollResult executeAndExtract( + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull SearchRequest searchRequest, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int size) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return transformIndexIntoEntityName(SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .extractScrollResult(searchResponse, - filter, scrollId, keepAlive, size, supportsPointInTime())); + return transformIndexIntoEntityName( + SearchRequestHandler.getBuilder( + entitySpecs, searchConfiguration, customSearchConfiguration) + .extractScrollResult( + searchResponse, filter, scrollId, keepAlive, size, supportsPointInTime())); } catch (Exception e) { log.error("Search query failed: {}", searchRequest, e); throw new ESQueryException("Search query failed:", e); @@ -173,32 +197,42 @@ private ScrollResult executeAndExtract(@Nonnull List<EntitySpec> entitySpecs, @N } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags Structured or full text search modes, plus other misc options * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { + public SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { final String finalInput = input.isEmpty() ? "*" : input; Timer.Context searchRequestTimer = MetricUtils.timer(this.getClass(), "searchRequest").time(); - List<EntitySpec> entitySpecs = entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); + List<EntitySpec> entitySpecs = + entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); Filter transformedFilters = transformFilterForEntities(postFilters, indexConvention); // Step 1: construct the query - final SearchRequest searchRequest = SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getSearchRequest(finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); - searchRequest.indices(entityNames.stream() - .map(indexConvention::getEntityIndexName) - .toArray(String[]::new)); + final SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getSearchRequest( + finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); + searchRequest.indices( + entityNames.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new)); searchRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well return executeAndExtract(entitySpecs, searchRequest, transformedFilters, from, size); @@ -207,21 +241,26 @@ public SearchResult search(@Nonnull List<String> entityNames, @Nonnull String in /** * Gets a list of documents after applying the input filters. * - * @param filters the request map with fields and values to be applied as filters to the search query + * @param filters the request map with fields and values to be applied as filters to the search + * query * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return - * @return a {@link SearchResult} that contains a list of filtered documents and related search result metadata + * @return a {@link SearchResult} that contains a list of filtered documents and related search + * result metadata */ @Nonnull - public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, int from, int size) { + public SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); Filter transformedFilters = transformFilterForEntities(filters, indexConvention); final SearchRequest searchRequest = - SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getFilterRequest(transformedFilters, sortCriterion, from, size); + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + .getFilterRequest(transformedFilters, sortCriterion, from, size); searchRequest.indices(indexConvention.getIndexName(entitySpec)); return executeAndExtract(List.of(entitySpec), searchRequest, transformedFilters, from, size); @@ -230,7 +269,8 @@ public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, /** * Returns a list of suggestions given type ahead query. * - * <p>The advanced auto complete can take filters and provides suggestions based on filtered context. + * <p>The advanced auto complete can take filters and provides suggestions based on filtered + * context. * * @param query the type ahead query text * @param field the field name for the auto complete @@ -239,12 +279,18 @@ public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, * @return A list of suggestions as string */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit) { + public AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit) { try { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); AutocompleteRequestHandler builder = AutocompleteRequestHandler.getBuilder(entitySpec); - SearchRequest req = builder.getSearchRequest(query, field, transformFilterForEntities(requestParams, indexConvention), limit); + SearchRequest req = + builder.getSearchRequest( + query, field, transformFilterForEntities(requestParams, indexConvention), limit); req.indices(indexConvention.getIndexName(entitySpec)); SearchResponse searchResponse = client.search(req, RequestOptions.DEFAULT); return builder.extractResult(searchResponse, query); @@ -264,18 +310,27 @@ public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull Stri * @return */ @Nonnull - public Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit) { - final SearchRequest searchRequest = SearchRequestHandler.getAggregationRequest(field, transformFilterForEntities(requestParams, indexConvention), limit); + public Map<String, Long> aggregateByValue( + @Nullable List<String> entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit) { + final SearchRequest searchRequest = + SearchRequestHandler.getAggregationRequest( + field, transformFilterForEntities(requestParams, indexConvention), limit); if (entityNames == null) { String indexName = indexConvention.getAllEntityIndicesPattern(); searchRequest.indices(indexName); } else { - Stream<String> stream = entityNames.stream().map(entityRegistry::getEntitySpec).map(indexConvention::getIndexName); + Stream<String> stream = + entityNames.stream() + .map(entityRegistry::getEntitySpec) + .map(indexConvention::getIndexName); searchRequest.indices(stream.toArray(String[]::new)); } - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well return SearchRequestHandler.extractTermAggregations(searchResponse, field); @@ -286,28 +341,35 @@ public Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @N } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll Id to convert to a PIT ID and Sort array to pass to ElasticSearch * @param keepAlive string representation of the time to keep a point in time alive * @param size the number of search hits to return - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public ScrollResult scroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, SearchFlags searchFlags) { + public ScrollResult scroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + SearchFlags searchFlags) { final String finalInput = input.isEmpty() ? "*" : input; - String[] indexArray = entities.stream() - .map(indexConvention::getEntityIndexName) - .toArray(String[]::new); + String[] indexArray = + entities.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new); Timer.Context scrollRequestTimer = MetricUtils.timer(this.getClass(), "scrollRequest").time(); - List<EntitySpec> entitySpecs = entities.stream() - .map(entityRegistry::getEntitySpec) - .collect(Collectors.toList()); + List<EntitySpec> entitySpecs = + entities.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); String pitId = null; Object[] sort = null; if (scrollId != null) { @@ -326,39 +388,55 @@ public ScrollResult scroll(@Nonnull List<String> entities, @Nonnull String input Filter transformedFilters = transformFilterForEntities(postFilters, indexConvention); // Step 1: construct the query - final SearchRequest searchRequest = SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getSearchRequest(finalInput, transformedFilters, sortCriterion, sort, pitId, keepAlive, size, searchFlags); + final SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getSearchRequest( + finalInput, + transformedFilters, + sortCriterion, + sort, + pitId, + keepAlive, + size, + searchFlags); - // PIT specifies indices in creation so it doesn't support specifying indices on the request, so we only specify if not using PIT + // PIT specifies indices in creation so it doesn't support specifying indices on the request, so + // we only specify if not using PIT if (!supportsPointInTime()) { searchRequest.indices(indexArray); } scrollRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well - return executeAndExtract(entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); + return executeAndExtract( + entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); } public Optional<SearchResponse> raw(@Nonnull String indexName, @Nullable String jsonQuery) { - return Optional.ofNullable(jsonQuery).map(json -> { - try { - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, json); - SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); + return Optional.ofNullable(jsonQuery) + .map( + json -> { + try { + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, json); + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); - SearchRequest searchRequest = new SearchRequest(indexConvention.getIndexName(indexName)); - searchRequest.source(searchSourceBuilder); + SearchRequest searchRequest = + new SearchRequest(indexConvention.getIndexName(indexName)); + searchRequest.source(searchSourceBuilder); - return client.search(searchRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + return client.search(searchRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } private boolean supportsPointInTime() { - return pointInTimeCreationEnabled && ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH.equalsIgnoreCase(elasticSearchImplementation); + return pointInTimeCreationEnabled + && ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH.equalsIgnoreCase(elasticSearchImplementation); } private String createPointInTime(String[] indexArray, String keepAlive) { @@ -367,8 +445,8 @@ private String createPointInTime(String[] indexArray, String keepAlive) { request.addParameter("keep_alive", keepAlive); try { Response response = client.getLowLevelClient().performRequest(request); - Map<String, Object> mappedResponse = OBJECT_MAPPER.readValue(response.getEntity().getContent(), - new TypeReference<>() { }); + Map<String, Object> mappedResponse = + OBJECT_MAPPER.readValue(response.getEntity().getContent(), new TypeReference<>() {}); return (String) mappedResponse.get("id"); } catch (IOException e) { log.error("Failed to generate PointInTime Identifier.", e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index e2bdea84eda0e..7a8056c0b59d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.search.utils.ESUtils; @@ -14,9 +16,6 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class AggregationQueryBuilder { @@ -32,43 +31,51 @@ public AggregationQueryBuilder( this._allFacetFields = getAllFacetFields(annotations); } - /** - * Get the set of default aggregations, across all facets. - */ + /** Get the set of default aggregations, across all facets. */ public List<AggregationBuilder> getAggregations() { return getAggregations(null); } /** - * Get aggregations for a search request for the given facets provided, and if none are provided, then get aggregations for all. + * Get aggregations for a search request for the given facets provided, and if none are provided, + * then get aggregations for all. */ public List<AggregationBuilder> getAggregations(@Nullable List<String> facets) { final Set<String> facetsToAggregate; if (facets != null) { - facets.stream().filter(f -> !isValidAggregate(f)).forEach(facet -> { - log.warn(String.format("Requested facet for search filter aggregations that isn't part of the default filters. Provided: %s; Available: %s", facet, - _defaultFacetFields)); - }); - facetsToAggregate = facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); + facets.stream() + .filter(f -> !isValidAggregate(f)) + .forEach( + facet -> { + log.warn( + String.format( + "Requested facet for search filter aggregations that isn't part of the default filters. Provided: %s; Available: %s", + facet, _defaultFacetFields)); + }); + facetsToAggregate = + facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); } else { facetsToAggregate = _defaultFacetFields; } - return facetsToAggregate.stream().map(this::facetToAggregationBuilder).collect(Collectors.toList()); + return facetsToAggregate.stream() + .map(this::facetToAggregationBuilder) + .collect(Collectors.toList()); } - private Set<String> getDefaultFacetFields(final List<SearchableAnnotation> annotations) { - Set<String> facets = annotations.stream() - .flatMap(annotation -> getDefaultFacetFieldsFromAnnotation(annotation).stream()) - .collect(Collectors.toSet()); + Set<String> facets = + annotations.stream() + .flatMap(annotation -> getDefaultFacetFieldsFromAnnotation(annotation).stream()) + .collect(Collectors.toSet()); facets.add(INDEX_VIRTUAL_FIELD); return facets; } private Set<String> getAllFacetFields(final List<SearchableAnnotation> annotations) { - Set<String> facets = annotations.stream() - .flatMap(annotation -> getAllFacetFieldsFromAnnotation(annotation).stream()) - .collect(Collectors.toSet()); + Set<String> facets = + annotations.stream() + .flatMap(annotation -> getAllFacetFieldsFromAnnotation(annotation).stream()) + .collect(Collectors.toSet()); facets.add(INDEX_VIRTUAL_FIELD); return facets; } @@ -129,4 +136,4 @@ List<String> getAllFacetFieldsFromAnnotation(final SearchableAnnotation annotati } return facetsFromAnnotation; } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index bba3a9fa4232d..cdcdae2f3d311 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; @@ -34,33 +36,32 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; -import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; - - @Slf4j public class AutocompleteRequestHandler { private final List<String> _defaultAutocompleteFields; - private static final Map<EntitySpec, AutocompleteRequestHandler> AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = - new ConcurrentHashMap<>(); + private static final Map<EntitySpec, AutocompleteRequestHandler> + AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); public AutocompleteRequestHandler(@Nonnull EntitySpec entitySpec) { - _defaultAutocompleteFields = Stream.concat(entitySpec.getSearchableFieldSpecs() - .stream() - .map(SearchableFieldSpec::getSearchableAnnotation) - .filter(SearchableAnnotation::isEnableAutocomplete) - .map(SearchableAnnotation::getFieldName), - Stream.of("urn")) - .collect(Collectors.toList()); + _defaultAutocompleteFields = + Stream.concat( + entitySpec.getSearchableFieldSpecs().stream() + .map(SearchableFieldSpec::getSearchableAnnotation) + .filter(SearchableAnnotation::isEnableAutocomplete) + .map(SearchableAnnotation::getFieldName), + Stream.of("urn")) + .collect(Collectors.toList()); } public static AutocompleteRequestHandler getBuilder(@Nonnull EntitySpec entitySpec) { - return AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME.computeIfAbsent(entitySpec, - k -> new AutocompleteRequestHandler(entitySpec)); + return AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME.computeIfAbsent( + entitySpec, k -> new AutocompleteRequestHandler(entitySpec)); } - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { + public SearchRequest getSearchRequest( + @Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(limit); @@ -78,25 +79,27 @@ private QueryBuilder getQuery(@Nonnull String query, @Nullable String field) { public static QueryBuilder getQuery(List<String> autocompleteFields, @Nonnull String query) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Search for exact matches with higher boost and ngram matches - MultiMatchQueryBuilder autocompleteQueryBuilder = QueryBuilders.multiMatchQuery(query) - .type(MultiMatchQueryBuilder.Type.BOOL_PREFIX); - - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - autocompleteFields.forEach(fieldName -> { - if ("urn".equals(fieldName)) { - autocompleteQueryBuilder.field(fieldName + ".ngram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._2gram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._3gram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._4gram", urnBoost); - } else { - autocompleteQueryBuilder.field(fieldName + ".ngram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._2gram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._3gram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._4gram"); - } - - finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(fieldName + ".delimited", query)); - }); + MultiMatchQueryBuilder autocompleteQueryBuilder = + QueryBuilders.multiMatchQuery(query).type(MultiMatchQueryBuilder.Type.BOOL_PREFIX); + + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + autocompleteFields.forEach( + fieldName -> { + if ("urn".equals(fieldName)) { + autocompleteQueryBuilder.field(fieldName + ".ngram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._2gram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._3gram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._4gram", urnBoost); + } else { + autocompleteQueryBuilder.field(fieldName + ".ngram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._2gram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._3gram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._4gram"); + } + + finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(fieldName + ".delimited", query)); + }); finalQuery.should(autocompleteQueryBuilder); @@ -111,11 +114,14 @@ private HighlightBuilder getHighlights(@Nullable String field) { highlightBuilder.preTags(""); highlightBuilder.postTags(""); // Check for each field name and any subfields - getAutocompleteFields(field).forEach(fieldName -> highlightBuilder - .field(fieldName) - .field(fieldName + ".*") - .field(fieldName + ".ngram") - .field(fieldName + ".delimited")); + getAutocompleteFields(field) + .forEach( + fieldName -> + highlightBuilder + .field(fieldName) + .field(fieldName + ".*") + .field(fieldName + ".ngram") + .field(fieldName + ".delimited")); return highlightBuilder; } @@ -126,19 +132,20 @@ private List<String> getAutocompleteFields(@Nullable String field) { return _defaultAutocompleteFields; } - public AutoCompleteResult extractResult(@Nonnull SearchResponse searchResponse, @Nonnull String input) { + public AutoCompleteResult extractResult( + @Nonnull SearchResponse searchResponse, @Nonnull String input) { Set<String> results = new LinkedHashSet<>(); Set<AutoCompleteEntity> entityResults = new HashSet<>(); for (SearchHit hit : searchResponse.getHits()) { - Optional<String> matchedFieldValue = hit.getHighlightFields() - .entrySet() - .stream() - .findFirst() - .map(entry -> entry.getValue().getFragments()[0].string()); + Optional<String> matchedFieldValue = + hit.getHighlightFields().entrySet().stream() + .findFirst() + .map(entry -> entry.getValue().getFragments()[0].string()); Optional<String> matchedUrn = Optional.ofNullable((String) hit.getSourceAsMap().get("urn")); try { if (matchedUrn.isPresent()) { - entityResults.add(new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); + entityResults.add( + new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); } } catch (URISyntaxException e) { throw new RuntimeException(String.format("Failed to create urn %s", matchedUrn.get()), e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java index 55a3474fd9f35..478d633fe3c55 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java @@ -2,42 +2,43 @@ import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nullable; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; - +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Builder(builderMethodName = "hiddenBuilder") @Getter public class CustomizedQueryHandler { - private CustomSearchConfiguration customSearchConfiguration; - @Builder.Default - private List<Map.Entry<Pattern, QueryConfiguration>> queryConfigurations = List.of(); + private CustomSearchConfiguration customSearchConfiguration; - public Optional<QueryConfiguration> lookupQueryConfig(String query) { - return queryConfigurations.stream() - .filter(e -> e.getKey().matcher(query).matches()) - .map(Map.Entry::getValue) - .findFirst(); - } + @Builder.Default + private List<Map.Entry<Pattern, QueryConfiguration>> queryConfigurations = List.of(); + + public Optional<QueryConfiguration> lookupQueryConfig(String query) { + return queryConfigurations.stream() + .filter(e -> e.getKey().matcher(query).matches()) + .map(Map.Entry::getValue) + .findFirst(); + } - public static CustomizedQueryHandlerBuilder builder(@Nullable CustomSearchConfiguration customSearchConfiguration) { - CustomizedQueryHandlerBuilder builder = hiddenBuilder() - .customSearchConfiguration(customSearchConfiguration); + public static CustomizedQueryHandlerBuilder builder( + @Nullable CustomSearchConfiguration customSearchConfiguration) { + CustomizedQueryHandlerBuilder builder = + hiddenBuilder().customSearchConfiguration(customSearchConfiguration); - if (customSearchConfiguration != null) { - builder.queryConfigurations(customSearchConfiguration.getQueryConfigurations().stream() - .map(cfg -> Map.entry(Pattern.compile(cfg.getQueryRegex()), cfg)) - .collect(Collectors.toList())); - } - return builder; + if (customSearchConfiguration != null) { + builder.queryConfigurations( + customSearchConfiguration.getQueryConfigurations().stream() + .map(cfg -> Map.entry(Pattern.compile(cfg.getQueryRegex()), cfg)) + .collect(Collectors.toList())); } + return builder; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java index 79c00fc7cdd20..3a7e72deed2fe 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java @@ -3,7 +3,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; - public class PITAwareSearchRequest extends SearchRequest { private IndicesOptions indicesOptions; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java index 3e4f3427e7658..1fe4a74968e42 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import java.io.IOException; import java.io.Serializable; import java.nio.charset.StandardCharsets; @@ -8,10 +10,6 @@ import lombok.Data; import lombok.NoArgsConstructor; - -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - @Data @AllArgsConstructor @NoArgsConstructor @@ -22,7 +20,9 @@ public class SearchAfterWrapper implements Serializable { public static SearchAfterWrapper fromScrollId(String scrollId) { try { - return OBJECT_MAPPER.readValue(Base64.getDecoder().decode(scrollId.getBytes(StandardCharsets.UTF_8)), SearchAfterWrapper.class); + return OBJECT_MAPPER.readValue( + Base64.getDecoder().decode(scrollId.getBytes(StandardCharsets.UTF_8)), + SearchAfterWrapper.class); } catch (IOException e) { throw new IllegalStateException("Invalid scroll Id cannot be mapped: " + scrollId, e); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java index a75ed40ffca52..7709ff16f7940 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java @@ -1,175 +1,162 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; + import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import java.util.Set; +import javax.annotation.Nonnull; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.experimental.Accessors; -import javax.annotation.Nonnull; - -import java.util.Set; - -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; - - @Builder @Getter @Accessors(fluent = true) @EqualsAndHashCode public class SearchFieldConfig { - public static final float DEFAULT_BOOST = 1.0f; - - public static final Set<String> KEYWORD_FIELDS = Set.of("urn", "runId", "_index"); - public static final Set<String> PATH_HIERARCHY_FIELDS = Set.of("browsePathV2"); - - // These should not be used directly since there is a specific - // order in which these rules need to be evaluated for exceptions to - // the rules. - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_DELIMITED_SUBFIELD = - Set.of( - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.WORD_GRAM - // NOT URN_PARTIAL (urn field is special) - ); - // NOT comprehensive - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_KEYWORD_SUBFIELD = - Set.of( - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.URN_PARTIAL - ); - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH = - Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH - ); - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH_V2 = - Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH_V2 - ); - private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BASE_KEYWORD = - Set.of( - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.WORD_GRAM, - // not analyzed - SearchableAnnotation.FieldType.BOOLEAN, - SearchableAnnotation.FieldType.COUNT, - SearchableAnnotation.FieldType.DATETIME, - SearchableAnnotation.FieldType.OBJECT - ); - // NOT true for `urn` - public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_URN_TEXT = - Set.of( - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.URN_PARTIAL - ); - - public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_WORD_GRAM = - Set.of( - SearchableAnnotation.FieldType.WORD_GRAM - ); - - @Nonnull - private final String fieldName; - @Nonnull - private final String shortName; - @Builder.Default - private final Float boost = DEFAULT_BOOST; - private final String analyzer; - private boolean hasKeywordSubfield; - private boolean hasDelimitedSubfield; - private boolean hasWordGramSubfields; - private boolean isQueryByDefault; - private boolean isDelimitedSubfield; - private boolean isKeywordSubfield; - private boolean isWordGramSubfield; - - public static SearchFieldConfig detectSubFieldType(@Nonnull SearchableFieldSpec fieldSpec) { - final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); - final String fieldName = searchableAnnotation.getFieldName(); - final float boost = (float) searchableAnnotation.getBoostScore(); - final SearchableAnnotation.FieldType fieldType = searchableAnnotation.getFieldType(); - return detectSubFieldType(fieldName, boost, fieldType, searchableAnnotation.isQueryByDefault()); - } - - public static SearchFieldConfig detectSubFieldType(String fieldName, - SearchableAnnotation.FieldType fieldType, - boolean isQueryByDefault) { - return detectSubFieldType(fieldName, DEFAULT_BOOST, fieldType, isQueryByDefault); + public static final float DEFAULT_BOOST = 1.0f; + + public static final Set<String> KEYWORD_FIELDS = Set.of("urn", "runId", "_index"); + public static final Set<String> PATH_HIERARCHY_FIELDS = Set.of("browsePathV2"); + + // These should not be used directly since there is a specific + // order in which these rules need to be evaluated for exceptions to + // the rules. + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_DELIMITED_SUBFIELD = + Set.of( + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.WORD_GRAM + // NOT URN_PARTIAL (urn field is special) + ); + // NOT comprehensive + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_KEYWORD_SUBFIELD = + Set.of( + SearchableAnnotation.FieldType.URN, + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.URN_PARTIAL); + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH = + Set.of(SearchableAnnotation.FieldType.BROWSE_PATH); + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BROWSE_PATH_V2 = + Set.of(SearchableAnnotation.FieldType.BROWSE_PATH_V2); + private static final Set<SearchableAnnotation.FieldType> TYPES_WITH_BASE_KEYWORD = + Set.of( + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.WORD_GRAM, + // not analyzed + SearchableAnnotation.FieldType.BOOLEAN, + SearchableAnnotation.FieldType.COUNT, + SearchableAnnotation.FieldType.DATETIME, + SearchableAnnotation.FieldType.OBJECT); + // NOT true for `urn` + public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_URN_TEXT = + Set.of(SearchableAnnotation.FieldType.URN, SearchableAnnotation.FieldType.URN_PARTIAL); + + public static final Set<SearchableAnnotation.FieldType> TYPES_WITH_WORD_GRAM = + Set.of(SearchableAnnotation.FieldType.WORD_GRAM); + + @Nonnull private final String fieldName; + @Nonnull private final String shortName; + @Builder.Default private final Float boost = DEFAULT_BOOST; + private final String analyzer; + private boolean hasKeywordSubfield; + private boolean hasDelimitedSubfield; + private boolean hasWordGramSubfields; + private boolean isQueryByDefault; + private boolean isDelimitedSubfield; + private boolean isKeywordSubfield; + private boolean isWordGramSubfield; + + public static SearchFieldConfig detectSubFieldType(@Nonnull SearchableFieldSpec fieldSpec) { + final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); + final String fieldName = searchableAnnotation.getFieldName(); + final float boost = (float) searchableAnnotation.getBoostScore(); + final SearchableAnnotation.FieldType fieldType = searchableAnnotation.getFieldType(); + return detectSubFieldType(fieldName, boost, fieldType, searchableAnnotation.isQueryByDefault()); + } + + public static SearchFieldConfig detectSubFieldType( + String fieldName, SearchableAnnotation.FieldType fieldType, boolean isQueryByDefault) { + return detectSubFieldType(fieldName, DEFAULT_BOOST, fieldType, isQueryByDefault); + } + + public static SearchFieldConfig detectSubFieldType( + String fieldName, + float boost, + SearchableAnnotation.FieldType fieldType, + boolean isQueryByDefault) { + return SearchFieldConfig.builder() + .fieldName(fieldName) + .boost(boost) + .analyzer(getAnalyzer(fieldName, fieldType)) + .hasKeywordSubfield(hasKeywordSubfield(fieldName, fieldType)) + .hasDelimitedSubfield(hasDelimitedSubfield(fieldName, fieldType)) + .hasWordGramSubfields(hasWordGramSubfields(fieldName, fieldType)) + .isQueryByDefault(isQueryByDefault) + .build(); + } + + public boolean isKeyword() { + return KEYWORD_ANALYZER.equals(analyzer()) || isKeyword(fieldName()); + } + + private static boolean hasDelimitedSubfield( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !fieldName.contains(".") + && ("urn".equals(fieldName) || TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)); + } + + private static boolean hasWordGramSubfields( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !fieldName.contains(".") && (TYPES_WITH_WORD_GRAM.contains(fieldType)); + } + + private static boolean hasKeywordSubfield( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !"urn".equals(fieldName) + && !fieldName.contains(".") + && (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType) // if delimited then also has keyword + || TYPES_WITH_KEYWORD_SUBFIELD.contains(fieldType)); + } + + private static boolean isKeyword(String fieldName) { + return fieldName.endsWith(".keyword") || KEYWORD_FIELDS.contains(fieldName); + } + + private static String getAnalyzer(String fieldName, SearchableAnnotation.FieldType fieldType) { + // order is important + if (TYPES_WITH_BROWSE_PATH.contains(fieldType)) { + return BROWSE_PATH_HIERARCHY_ANALYZER; + } else if (TYPES_WITH_BROWSE_PATH_V2.contains(fieldType)) { + return BROWSE_PATH_V2_HIERARCHY_ANALYZER; + // sub fields + } else if (isKeyword(fieldName)) { + return KEYWORD_ANALYZER; + } else if (fieldName.endsWith(".delimited")) { + return TEXT_SEARCH_ANALYZER; + // non-subfield cases below + } else if (TYPES_WITH_BASE_KEYWORD.contains(fieldType)) { + return KEYWORD_ANALYZER; + } else if (TYPES_WITH_URN_TEXT.contains(fieldType)) { + return URN_SEARCH_ANALYZER; + } else { + throw new IllegalStateException( + String.format("Unknown analyzer for fieldName: %s, fieldType: %s", fieldName, fieldType)); } - - public static SearchFieldConfig detectSubFieldType(String fieldName, - float boost, - SearchableAnnotation.FieldType fieldType, - boolean isQueryByDefault) { - return SearchFieldConfig.builder() - .fieldName(fieldName) - .boost(boost) - .analyzer(getAnalyzer(fieldName, fieldType)) - .hasKeywordSubfield(hasKeywordSubfield(fieldName, fieldType)) - .hasDelimitedSubfield(hasDelimitedSubfield(fieldName, fieldType)) - .hasWordGramSubfields(hasWordGramSubfields(fieldName, fieldType)) - .isQueryByDefault(isQueryByDefault) - .build(); - } - - public boolean isKeyword() { - return KEYWORD_ANALYZER.equals(analyzer()) || isKeyword(fieldName()); - } - - private static boolean hasDelimitedSubfield(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !fieldName.contains(".") - && ("urn".equals(fieldName) || TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)); - } - - private static boolean hasWordGramSubfields(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !fieldName.contains(".") - && (TYPES_WITH_WORD_GRAM.contains(fieldType)); - } - private static boolean hasKeywordSubfield(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !"urn".equals(fieldName) - && !fieldName.contains(".") - && (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType) // if delimited then also has keyword - || TYPES_WITH_KEYWORD_SUBFIELD.contains(fieldType)); - } - private static boolean isKeyword(String fieldName) { - return fieldName.endsWith(".keyword") - || KEYWORD_FIELDS.contains(fieldName); - } - - private static String getAnalyzer(String fieldName, SearchableAnnotation.FieldType fieldType) { - // order is important - if (TYPES_WITH_BROWSE_PATH.contains(fieldType)) { - return BROWSE_PATH_HIERARCHY_ANALYZER; - } else if (TYPES_WITH_BROWSE_PATH_V2.contains(fieldType)) { - return BROWSE_PATH_V2_HIERARCHY_ANALYZER; - // sub fields - } else if (isKeyword(fieldName)) { - return KEYWORD_ANALYZER; - } else if (fieldName.endsWith(".delimited")) { - return TEXT_SEARCH_ANALYZER; - // non-subfield cases below - } else if (TYPES_WITH_BASE_KEYWORD.contains(fieldType)) { - return KEYWORD_ANALYZER; - } else if (TYPES_WITH_URN_TEXT.contains(fieldType)) { - return URN_SEARCH_ANALYZER; - } else { - throw new IllegalStateException(String.format("Unknown analyzer for fieldName: %s, fieldType: %s", fieldName, fieldType)); - } - } - - public static class SearchFieldConfigBuilder { - public SearchFieldConfigBuilder fieldName(@Nonnull String fieldName) { - this.fieldName = fieldName; - isDelimitedSubfield(fieldName.endsWith(".delimited")); - isKeywordSubfield(fieldName.endsWith(".keyword")); - isWordGramSubfield(fieldName.contains("wordGrams")); - shortName(fieldName.split("[.]")[0]); - return this; - } + } + + public static class SearchFieldConfigBuilder { + public SearchFieldConfigBuilder fieldName(@Nonnull String fieldName) { + this.fieldName = fieldName; + isDelimitedSubfield(fieldName.endsWith(".delimited")); + isKeywordSubfield(fieldName.endsWith(".keyword")); + isWordGramSubfield(fieldName.contains("wordGrams")); + shortName(fieldName.split("[.]")[0]); + return this; } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java index ce88f31449c35..7ddccb0d56724 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java @@ -1,6 +1,14 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.*; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; @@ -8,16 +16,12 @@ import com.linkedin.metadata.config.search.custom.BoolQueryConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.StreamReadConstraints; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.metadata.Constants; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchScoreAnnotation; import com.linkedin.metadata.models.annotation.SearchableAnnotation; - +import com.linkedin.metadata.search.utils.ESUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -32,16 +36,14 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.search.utils.ESUtils; import lombok.extern.slf4j.Slf4j; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.Operator; @@ -54,20 +56,25 @@ import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.opensearch.search.SearchModule; -import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.*; - - @Slf4j public class SearchQueryBuilder { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { OBJECT_MAPPER.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH, Constants.MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault( + Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH, + Constants.MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final NamedXContentRegistry X_CONTENT_REGISTRY; + static { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); X_CONTENT_REGISTRY = new NamedXContentRegistry(searchModule.getNamedXContents()); @@ -80,49 +87,63 @@ public class SearchQueryBuilder { private final CustomizedQueryHandler customizedQueryHandler; - public SearchQueryBuilder(@Nonnull SearchConfiguration searchConfiguration, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public SearchQueryBuilder( + @Nonnull SearchConfiguration searchConfiguration, + @Nullable CustomSearchConfiguration customSearchConfiguration) { this.exactMatchConfiguration = searchConfiguration.getExactMatch(); this.partialConfiguration = searchConfiguration.getPartial(); this.wordGramConfiguration = searchConfiguration.getWordGram(); this.customizedQueryHandler = CustomizedQueryHandler.builder(customSearchConfiguration).build(); } - public QueryBuilder buildQuery(@Nonnull List<EntitySpec> entitySpecs, @Nonnull String query, boolean fulltext) { - QueryConfiguration customQueryConfig = customizedQueryHandler.lookupQueryConfig(query).orElse(null); + public QueryBuilder buildQuery( + @Nonnull List<EntitySpec> entitySpecs, @Nonnull String query, boolean fulltext) { + QueryConfiguration customQueryConfig = + customizedQueryHandler.lookupQueryConfig(query).orElse(null); - final QueryBuilder queryBuilder = buildInternalQuery(customQueryConfig, entitySpecs, query, fulltext); + final QueryBuilder queryBuilder = + buildInternalQuery(customQueryConfig, entitySpecs, query, fulltext); return buildScoreFunctions(customQueryConfig, entitySpecs, queryBuilder); } /** * Constructs the search query. + * * @param customQueryConfig custom configuration * @param entitySpecs entities being searched * @param query search string * @param fulltext use fulltext queries * @return query builder */ - private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQueryConfig, @Nonnull List<EntitySpec> entitySpecs, - @Nonnull String query, boolean fulltext) { + private QueryBuilder buildInternalQuery( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull String query, + boolean fulltext) { final String sanitizedQuery = query.replaceFirst("^:+", ""); - final BoolQueryBuilder finalQuery = Optional.ofNullable(customQueryConfig) + final BoolQueryBuilder finalQuery = + Optional.ofNullable(customQueryConfig) .flatMap(cqc -> boolQueryBuilder(cqc, sanitizedQuery)) .orElse(QueryBuilders.boolQuery()); if (fulltext && !query.startsWith(STRUCTURED_QUERY_PREFIX)) { getSimpleQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); - getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); + getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, sanitizedQuery) + .ifPresent(finalQuery::should); } else { - final String withoutQueryPrefix = query.startsWith(STRUCTURED_QUERY_PREFIX) ? query.substring(STRUCTURED_QUERY_PREFIX.length()) : query; + final String withoutQueryPrefix = + query.startsWith(STRUCTURED_QUERY_PREFIX) + ? query.substring(STRUCTURED_QUERY_PREFIX.length()) + : query; QueryStringQueryBuilder queryBuilder = QueryBuilders.queryStringQuery(withoutQueryPrefix); queryBuilder.defaultOperator(Operator.AND); - getStandardFields(entitySpecs).forEach(entitySpec -> - queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); + getStandardFields(entitySpecs) + .forEach(entitySpec -> queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); finalQuery.should(queryBuilder); if (exactMatchConfiguration.isEnableStructured()) { - getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix).ifPresent(finalQuery::should); + getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix) + .ifPresent(finalQuery::should); } } @@ -130,7 +151,9 @@ private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQuery } /** - * Gets searchable fields from all entities in the input collection. De-duplicates fields across entities. + * Gets searchable fields from all entities in the input collection. De-duplicates fields across + * entities. + * * @param entitySpecs: Entity specs to extract searchable fields from * @return A set of SearchFieldConfigs containing the searchable fields from the input entities. */ @@ -138,31 +161,42 @@ private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQuery public Set<SearchFieldConfig> getStandardFields(@Nonnull Collection<EntitySpec> entitySpecs) { Set<SearchFieldConfig> fields = new HashSet<>(); // Always present - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - - fields.add(SearchFieldConfig.detectSubFieldType("urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); - fields.add(SearchFieldConfig.detectSubFieldType("urn.delimited", urnBoost * partialConfiguration.getUrnFactor(), - SearchableAnnotation.FieldType.URN, true)); + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn.delimited", + urnBoost * partialConfiguration.getUrnFactor(), + SearchableAnnotation.FieldType.URN, + true)); entitySpecs.stream() .map(this::getFieldsFromEntitySpec) .flatMap(Set::stream) - .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)).forEach((key, value) -> - fields.add( - new SearchFieldConfig( - key, - value.get(0).shortName(), - (float) value.stream().mapToDouble(SearchFieldConfig::boost).average().getAsDouble(), - value.get(0).analyzer(), - value.stream().anyMatch(SearchFieldConfig::hasKeywordSubfield), - value.stream().anyMatch(SearchFieldConfig::hasDelimitedSubfield), - value.stream().anyMatch(SearchFieldConfig::hasWordGramSubfields), - true, - value.stream().anyMatch(SearchFieldConfig::isDelimitedSubfield), - value.stream().anyMatch(SearchFieldConfig::isKeywordSubfield), - value.stream().anyMatch(SearchFieldConfig::isWordGramSubfield) - )) - ); + .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)) + .forEach( + (key, value) -> + fields.add( + new SearchFieldConfig( + key, + value.get(0).shortName(), + (float) + value.stream() + .mapToDouble(SearchFieldConfig::boost) + .average() + .getAsDouble(), + value.get(0).analyzer(), + value.stream().anyMatch(SearchFieldConfig::hasKeywordSubfield), + value.stream().anyMatch(SearchFieldConfig::hasDelimitedSubfield), + value.stream().anyMatch(SearchFieldConfig::hasWordGramSubfields), + true, + value.stream().anyMatch(SearchFieldConfig::isDelimitedSubfield), + value.stream().anyMatch(SearchFieldConfig::isKeywordSubfield), + value.stream().anyMatch(SearchFieldConfig::isWordGramSubfield)))); return fields; } @@ -182,38 +216,44 @@ public Set<SearchFieldConfig> getFieldsFromEntitySpec(EntitySpec entitySpec) { if (SearchFieldConfig.detectSubFieldType(fieldSpec).hasDelimitedSubfield()) { final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); - fields.add(SearchFieldConfig.detectSubFieldType(searchFieldConfig.fieldName() + ".delimited", - searchFieldConfig.boost() * partialConfiguration.getFactor(), - searchableAnnotation.getFieldType(), searchableAnnotation.isQueryByDefault())); + fields.add( + SearchFieldConfig.detectSubFieldType( + searchFieldConfig.fieldName() + ".delimited", + searchFieldConfig.boost() * partialConfiguration.getFactor(), + searchableAnnotation.getFieldType(), + searchableAnnotation.isQueryByDefault())); if (SearchFieldConfig.detectSubFieldType(fieldSpec).hasWordGramSubfields()) { - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams2") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getTwoGramFactor()) - .analyzer(WORD_GRAM_2_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams3") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getThreeGramFactor()) - .analyzer(WORD_GRAM_3_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams4") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getFourGramFactor()) - .analyzer(WORD_GRAM_4_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams2") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getTwoGramFactor()) + .analyzer(WORD_GRAM_2_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams3") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getThreeGramFactor()) + .analyzer(WORD_GRAM_3_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams4") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getFourGramFactor()) + .analyzer(WORD_GRAM_4_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); } } } @@ -224,11 +264,18 @@ private Set<SearchFieldConfig> getStandardFields(@Nonnull EntitySpec entitySpec) Set<SearchFieldConfig> fields = new HashSet<>(); // Always present - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - - fields.add(SearchFieldConfig.detectSubFieldType("urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); - fields.add(SearchFieldConfig.detectSubFieldType("urn.delimited", urnBoost * partialConfiguration.getUrnFactor(), - SearchableAnnotation.FieldType.URN, true)); + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn.delimited", + urnBoost * partialConfiguration.getUrnFactor(), + SearchableAnnotation.FieldType.URN, + true)); fields.addAll(getFieldsFromEntitySpec(entitySpec)); @@ -242,9 +289,11 @@ private static String unquote(String query) { private static boolean isQuoted(String query) { return Stream.of("\"", "'").anyMatch(query::contains); } - private Optional<QueryBuilder> getSimpleQuery(@Nullable QueryConfiguration customQueryConfig, - List<EntitySpec> entitySpecs, - String sanitizedQuery) { + + private Optional<QueryBuilder> getSimpleQuery( + @Nullable QueryConfiguration customQueryConfig, + List<EntitySpec> entitySpecs, + String sanitizedQuery) { Optional<QueryBuilder> result = Optional.empty(); final boolean executeSimpleQuery; @@ -263,25 +312,34 @@ private Optional<QueryBuilder> getSimpleQuery(@Nullable QueryConfiguration custo BoolQueryBuilder simplePerField = QueryBuilders.boolQuery(); // Simple query string does not use per field analyzers // Group the fields by analyzer - Map<String, List<SearchFieldConfig>> analyzerGroup = entitySpecs.stream() + Map<String, List<SearchFieldConfig>> analyzerGroup = + entitySpecs.stream() .map(this::getStandardFields) .flatMap(Set::stream) .filter(SearchFieldConfig::isQueryByDefault) .collect(Collectors.groupingBy(SearchFieldConfig::analyzer)); - analyzerGroup.keySet().stream().sorted().filter(str -> !str.contains("word_gram")).forEach(analyzer -> { - List<SearchFieldConfig> fieldConfigs = analyzerGroup.get(analyzer); - SimpleQueryStringBuilder simpleBuilder = QueryBuilders.simpleQueryStringQuery(sanitizedQuery); - simpleBuilder.analyzer(analyzer); - simpleBuilder.defaultOperator(Operator.AND); - Map<String, List<SearchFieldConfig>> fieldAnalyzers = fieldConfigs.stream().collect(Collectors.groupingBy(SearchFieldConfig::fieldName)); - // De-duplicate fields across different indices - for (Map.Entry<String, List<SearchFieldConfig>> fieldAnalyzer : fieldAnalyzers.entrySet()) { - SearchFieldConfig cfg = fieldAnalyzer.getValue().get(0); - simpleBuilder.field(cfg.fieldName(), cfg.boost()); - } - simplePerField.should(simpleBuilder); - }); + analyzerGroup.keySet().stream() + .sorted() + .filter(str -> !str.contains("word_gram")) + .forEach( + analyzer -> { + List<SearchFieldConfig> fieldConfigs = analyzerGroup.get(analyzer); + SimpleQueryStringBuilder simpleBuilder = + QueryBuilders.simpleQueryStringQuery(sanitizedQuery); + simpleBuilder.analyzer(analyzer); + simpleBuilder.defaultOperator(Operator.AND); + Map<String, List<SearchFieldConfig>> fieldAnalyzers = + fieldConfigs.stream() + .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)); + // De-duplicate fields across different indices + for (Map.Entry<String, List<SearchFieldConfig>> fieldAnalyzer : + fieldAnalyzers.entrySet()) { + SearchFieldConfig cfg = fieldAnalyzer.getValue().get(0); + simpleBuilder.field(cfg.fieldName(), cfg.boost()); + } + simplePerField.should(simpleBuilder); + }); result = Optional.of(simplePerField); } @@ -289,99 +347,133 @@ private Optional<QueryBuilder> getSimpleQuery(@Nullable QueryConfiguration custo return result; } - private Optional<QueryBuilder> getPrefixAndExactMatchQuery(@Nullable QueryConfiguration customQueryConfig, - @Nonnull List<EntitySpec> entitySpecs, - String query) { + private Optional<QueryBuilder> getPrefixAndExactMatchQuery( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List<EntitySpec> entitySpecs, + String query) { - final boolean isPrefixQuery = customQueryConfig == null ? exactMatchConfiguration.isWithPrefix() : customQueryConfig.isPrefixMatchQuery(); + final boolean isPrefixQuery = + customQueryConfig == null + ? exactMatchConfiguration.isWithPrefix() + : customQueryConfig.isPrefixMatchQuery(); final boolean isExactQuery = customQueryConfig == null || customQueryConfig.isExactMatchQuery(); - BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); String unquotedQuery = unquote(query); - getStandardFields(entitySpecs).forEach(searchFieldConfig -> { - if (searchFieldConfig.isDelimitedSubfield() && isPrefixQuery) { - finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(searchFieldConfig.fieldName(), query) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getPrefixFactor() - * exactMatchConfiguration.getCaseSensitivityFactor()) - .queryName(searchFieldConfig.shortName())); // less than exact - } - - if (searchFieldConfig.isKeyword() && isExactQuery) { - // It is important to use the subfield .keyword (it uses a different normalizer) - // The non-.keyword field removes case information - - // Exact match case-sensitive - finalQuery.should( - QueryBuilders.termQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), unquotedQuery) - .caseInsensitive(false) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor()) - .queryName(searchFieldConfig.shortName())); - - // Exact match case-insensitive - finalQuery.should( - QueryBuilders.termQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), unquotedQuery) - .caseInsensitive(true) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor() - * exactMatchConfiguration.getCaseSensitivityFactor()) - .queryName(searchFieldConfig.fieldName())); - } - - if (searchFieldConfig.isWordGramSubfield() && isPrefixQuery) { - finalQuery.should( - QueryBuilders.matchPhraseQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), - unquotedQuery) - .boost(searchFieldConfig.boost() * getWordGramFactor(searchFieldConfig.fieldName())) - .queryName(searchFieldConfig.shortName())); - } - }); + getStandardFields(entitySpecs) + .forEach( + searchFieldConfig -> { + if (searchFieldConfig.isDelimitedSubfield() && isPrefixQuery) { + finalQuery.should( + QueryBuilders.matchPhrasePrefixQuery(searchFieldConfig.fieldName(), query) + .boost( + searchFieldConfig.boost() + * exactMatchConfiguration.getPrefixFactor() + * exactMatchConfiguration.getCaseSensitivityFactor()) + .queryName(searchFieldConfig.shortName())); // less than exact + } + + if (searchFieldConfig.isKeyword() && isExactQuery) { + // It is important to use the subfield .keyword (it uses a different normalizer) + // The non-.keyword field removes case information + + // Exact match case-sensitive + finalQuery.should( + QueryBuilders.termQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .caseInsensitive(false) + .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor()) + .queryName(searchFieldConfig.shortName())); + + // Exact match case-insensitive + finalQuery.should( + QueryBuilders.termQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .caseInsensitive(true) + .boost( + searchFieldConfig.boost() + * exactMatchConfiguration.getExactFactor() + * exactMatchConfiguration.getCaseSensitivityFactor()) + .queryName(searchFieldConfig.fieldName())); + } + + if (searchFieldConfig.isWordGramSubfield() && isPrefixQuery) { + finalQuery.should( + QueryBuilders.matchPhraseQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .boost( + searchFieldConfig.boost() + * getWordGramFactor(searchFieldConfig.fieldName())) + .queryName(searchFieldConfig.shortName())); + } + }); return finalQuery.should().size() > 0 ? Optional.of(finalQuery) : Optional.empty(); } - private FunctionScoreQueryBuilder buildScoreFunctions(@Nullable QueryConfiguration customQueryConfig, - @Nonnull List<EntitySpec> entitySpecs, - @Nonnull QueryBuilder queryBuilder) { + private FunctionScoreQueryBuilder buildScoreFunctions( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull QueryBuilder queryBuilder) { if (customQueryConfig != null) { // Prefer configuration function scoring over annotation scoring return functionScoreQueryBuilder(customQueryConfig, queryBuilder); } else { - return QueryBuilders.functionScoreQuery(queryBuilder, buildAnnotationScoreFunctions(entitySpecs)) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions - .boostMode(CombineFunction.MULTIPLY); // Multiply score function with the score from query; + return QueryBuilders.functionScoreQuery( + queryBuilder, buildAnnotationScoreFunctions(entitySpecs)) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions + .boostMode( + CombineFunction.MULTIPLY); // Multiply score function with the score from query; } } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotationScoreFunctions(@Nonnull List<EntitySpec> entitySpecs) { + private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotationScoreFunctions( + @Nonnull List<EntitySpec> entitySpecs) { List<FunctionScoreQueryBuilder.FilterFunctionBuilder> finalScoreFunctions = new ArrayList<>(); // Add a default weight of 1.0 to make sure the score function is larger than 1 finalScoreFunctions.add( - new FunctionScoreQueryBuilder.FilterFunctionBuilder(ScoreFunctionBuilders.weightFactorFunction(1.0f))); - - Map<String, SearchableAnnotation> annotations = entitySpecs.stream() - .map(EntitySpec::getSearchableFieldSpecs) - .flatMap(List::stream) - .map(SearchableFieldSpec::getSearchableAnnotation) - .collect(Collectors.toMap(SearchableAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1.0f))); + + Map<String, SearchableAnnotation> annotations = + entitySpecs.stream() + .map(EntitySpec::getSearchableFieldSpecs) + .flatMap(List::stream) + .map(SearchableFieldSpec::getSearchableAnnotation) + .collect( + Collectors.toMap( + SearchableAnnotation::getFieldName, + annotation -> annotation, + (annotation1, annotation2) -> annotation1)); for (Map.Entry<String, SearchableAnnotation> annotationEntry : annotations.entrySet()) { SearchableAnnotation annotation = annotationEntry.getValue(); - annotation - .getWeightsPerFieldValue() - .entrySet() - .stream() - .map(entry -> buildWeightFactorFunction(annotation.getFieldName(), entry.getKey(), - entry.getValue())).forEach(finalScoreFunctions::add); + annotation.getWeightsPerFieldValue().entrySet().stream() + .map( + entry -> + buildWeightFactorFunction( + annotation.getFieldName(), entry.getKey(), entry.getValue())) + .forEach(finalScoreFunctions::add); } - Map<String, SearchScoreAnnotation> searchScoreAnnotationMap = entitySpecs.stream() - .map(EntitySpec::getSearchScoreFieldSpecs) - .flatMap(List::stream) - .map(SearchScoreFieldSpec::getSearchScoreAnnotation) - .collect(Collectors.toMap(SearchScoreAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); - for (Map.Entry<String, SearchScoreAnnotation> searchScoreAnnotationEntry : searchScoreAnnotationMap.entrySet()) { + Map<String, SearchScoreAnnotation> searchScoreAnnotationMap = + entitySpecs.stream() + .map(EntitySpec::getSearchScoreFieldSpecs) + .flatMap(List::stream) + .map(SearchScoreFieldSpec::getSearchScoreAnnotation) + .collect( + Collectors.toMap( + SearchScoreAnnotation::getFieldName, + annotation -> annotation, + (annotation1, annotation2) -> annotation1)); + for (Map.Entry<String, SearchScoreAnnotation> searchScoreAnnotationEntry : + searchScoreAnnotationMap.entrySet()) { SearchScoreAnnotation annotation = searchScoreAnnotationEntry.getValue(); finalScoreFunctions.add(buildScoreFunctionFromSearchScoreAnnotation(annotation)); } @@ -389,14 +481,15 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotation return finalScoreFunctions.toArray(new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction(@Nonnull String fieldName, - @Nonnull Object fieldValue, double weight) { - return new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery(fieldName, fieldValue), + private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction( + @Nonnull String fieldName, @Nonnull Object fieldValue, double weight) { + return new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery(fieldName, fieldValue), ScoreFunctionBuilders.weightFactorFunction((float) weight)); } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctionFromSearchScoreAnnotation( - @Nonnull SearchScoreAnnotation annotation) { + private static FunctionScoreQueryBuilder.FilterFunctionBuilder + buildScoreFunctionFromSearchScoreAnnotation(@Nonnull SearchScoreAnnotation annotation) { FieldValueFactorFunctionBuilder scoreFunction = ScoreFunctionBuilders.fieldValueFactorFunction(annotation.getFieldName()); scoreFunction.factor((float) annotation.getWeight()); @@ -405,7 +498,8 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctio return new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction); } - private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotation.Modifier modifier) { + private static FieldValueFactorFunction.Modifier mapModifier( + SearchScoreAnnotation.Modifier modifier) { switch (modifier) { case LOG: return FieldValueFactorFunction.Modifier.LOG1P; @@ -422,33 +516,43 @@ private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotati } } - public FunctionScoreQueryBuilder functionScoreQueryBuilder(QueryConfiguration customQueryConfiguration, - QueryBuilder queryBuilder) { + public FunctionScoreQueryBuilder functionScoreQueryBuilder( + QueryConfiguration customQueryConfiguration, QueryBuilder queryBuilder) { return toFunctionScoreQueryBuilder(queryBuilder, customQueryConfiguration.getFunctionScore()); } - public Optional<BoolQueryBuilder> boolQueryBuilder(QueryConfiguration customQueryConfiguration, String query) { + public Optional<BoolQueryBuilder> boolQueryBuilder( + QueryConfiguration customQueryConfiguration, String query) { if (customQueryConfiguration.getBoolQuery() != null) { - log.debug("Using custom query configuration queryRegex: {}", customQueryConfiguration.getQueryRegex()); + log.debug( + "Using custom query configuration queryRegex: {}", + customQueryConfiguration.getQueryRegex()); } - return Optional.ofNullable(customQueryConfiguration.getBoolQuery()).map(bq -> toBoolQueryBuilder(query, bq)); + return Optional.ofNullable(customQueryConfiguration.getBoolQuery()) + .map(bq -> toBoolQueryBuilder(query, bq)); } private BoolQueryBuilder toBoolQueryBuilder(String query, BoolQueryConfiguration boolQuery) { try { - String jsonFragment = OBJECT_MAPPER.writeValueAsString(boolQuery) - .replace("\"{{query_string}}\"", OBJECT_MAPPER.writeValueAsString(query)) - .replace("\"{{unquoted_query_string}}\"", OBJECT_MAPPER.writeValueAsString(unquote(query))); - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, jsonFragment); + String jsonFragment = + OBJECT_MAPPER + .writeValueAsString(boolQuery) + .replace("\"{{query_string}}\"", OBJECT_MAPPER.writeValueAsString(query)) + .replace( + "\"{{unquoted_query_string}}\"", + OBJECT_MAPPER.writeValueAsString(unquote(query))); + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, jsonFragment); return BoolQueryBuilder.fromXContent(parser); } catch (IOException e) { throw new RuntimeException(e); } } - private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder(QueryBuilder queryBuilder, - Map<String, Object> params) { + private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder( + QueryBuilder queryBuilder, Map<String, Object> params) { try { HashMap<String, Object> body = new HashMap<>(params); if (!body.isEmpty()) { @@ -457,11 +561,11 @@ private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder(QueryBuilder query body.put("query", OBJECT_MAPPER.readValue(queryBuilder.toString(), Map.class)); - String jsonFragment = OBJECT_MAPPER.writeValueAsString(Map.of( - "function_score", body - )); - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, jsonFragment); + String jsonFragment = OBJECT_MAPPER.writeValueAsString(Map.of("function_score", body)); + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, jsonFragment); return (FunctionScoreQueryBuilder) FunctionScoreQueryBuilder.parseInnerQueryBuilder(parser); } catch (IOException e) { throw new RuntimeException(e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 0df6afd49c373..80da30229b74c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -1,13 +1,18 @@ package com.linkedin.metadata.search.elasticsearch.query.request; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; +import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.DoubleMap; import com.linkedin.data.template.LongMap; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; @@ -51,7 +56,6 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; import org.opensearch.action.search.SearchRequest; @@ -72,25 +76,21 @@ import org.opensearch.search.fetch.subphase.highlight.HighlightField; import org.opensearch.search.suggest.term.TermSuggestion; -import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; -import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class SearchRequestHandler { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) .setSkipAggregates(false) .setSkipHighlighting(false); - private static final Map<List<EntitySpec>, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); + private static final Map<List<EntitySpec>, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = + new ConcurrentHashMap<>(); private static final String REMOVED = "removed"; private static final String URN_FILTER = "urn"; - private static final String[] FIELDS_TO_FETCH = new String[]{"urn", "usageCountLast30Days"}; - private static final String[] URN_FIELD = new String[]{"urn"}; + private static final String[] FIELDS_TO_FETCH = new String[] {"urn", "usageCountLast30Days"}; + private static final String[] URN_FIELD = new String[] {"urn"}; private final List<EntitySpec> _entitySpecs; private final Set<String> _defaultQueryFieldNames; @@ -100,19 +100,24 @@ public class SearchRequestHandler { private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; - private SearchRequestHandler(@Nonnull EntitySpec entitySpec, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + private SearchRequestHandler( + @Nonnull EntitySpec entitySpec, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { this(ImmutableList.of(entitySpec), configs, customSearchConfiguration); } - private SearchRequestHandler(@Nonnull List<EntitySpec> entitySpecs, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + private SearchRequestHandler( + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { _entitySpecs = entitySpecs; List<SearchableAnnotation> annotations = getSearchableAnnotations(); _defaultQueryFieldNames = getDefaultQueryFieldNames(annotations); - _filtersToDisplayName = annotations.stream() - .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); + _filtersToDisplayName = + annotations.stream() + .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); _filtersToDisplayName.put(INDEX_VIRTUAL_FIELD, "Type"); _highlights = getHighlights(); _searchQueryBuilder = new SearchQueryBuilder(configs, customSearchConfiguration); @@ -120,16 +125,22 @@ private SearchRequestHandler(@Nonnull List<EntitySpec> entitySpecs, @Nonnull Sea _configs = configs; } - public static SearchRequestHandler getBuilder(@Nonnull EntitySpec entitySpec, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public static SearchRequestHandler getBuilder( + @Nonnull EntitySpec entitySpec, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { return REQUEST_HANDLER_BY_ENTITY_NAME.computeIfAbsent( - ImmutableList.of(entitySpec), k -> new SearchRequestHandler(entitySpec, configs, customSearchConfiguration)); + ImmutableList.of(entitySpec), + k -> new SearchRequestHandler(entitySpec, configs, customSearchConfiguration)); } - public static SearchRequestHandler getBuilder(@Nonnull List<EntitySpec> entitySpecs, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public static SearchRequestHandler getBuilder( + @Nonnull List<EntitySpec> entitySpecs, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { return REQUEST_HANDLER_BY_ENTITY_NAME.computeIfAbsent( - ImmutableList.copyOf(entitySpecs), k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); + ImmutableList.copyOf(entitySpecs), + k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); } private List<SearchableAnnotation> getSearchableAnnotations() { @@ -142,21 +153,22 @@ private List<SearchableAnnotation> getSearchableAnnotations() { @VisibleForTesting private Set<String> getDefaultQueryFieldNames(List<SearchableAnnotation> annotations) { - return Stream.concat(annotations.stream() - .filter(SearchableAnnotation::isQueryByDefault) - .map(SearchableAnnotation::getFieldName), + return Stream.concat( + annotations.stream() + .filter(SearchableAnnotation::isQueryByDefault) + .map(SearchableAnnotation::getFieldName), Stream.of("urn")) - .collect(Collectors.toSet()); + .collect(Collectors.toSet()); } // If values are not equal, throw error private BinaryOperator<String> mapMerger() { return (s1, s2) -> { - if (!StringUtils.equals(s1, s2)) { - throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); - } - return s1; - }; + if (!StringUtils.equals(s1, s2)) { + throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); + } + return s1; + }; } public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { @@ -168,7 +180,8 @@ public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { /** * Constructs the search query based on the query request. * - * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or later + * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or + * later * * @param input the search input text * @param filter the search filter @@ -180,10 +193,16 @@ public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { */ @Nonnull @WithSpan - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List<String> facets) { - SearchFlags finalSearchFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + public SearchRequest getSearchRequest( + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets) { + SearchFlags finalSearchFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -193,7 +212,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi searchSourceBuilder.fetchSource("urn", null); BoolQueryBuilder filterQuery = getFilterQuery(filter); - searchSourceBuilder.query(QueryBuilders.boolQuery() + searchSourceBuilder.query( + QueryBuilders.boolQuery() .must(getQuery(input, finalSearchFlags.isFulltext())) .filter(filterQuery)); if (!finalSearchFlags.isSkipAggregates()) { @@ -217,7 +237,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi /** * Constructs the search query based on the query request. * - * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or later + * <p>TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or + * later * * @param input the search input text * @param filter the search filter @@ -227,11 +248,18 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi */ @Nonnull @WithSpan - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable Object[] sort, @Nullable String pitId, @Nullable String keepAlive, - int size, SearchFlags searchFlags) { + public SearchRequest getSearchRequest( + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable Object[] sort, + @Nullable String pitId, + @Nullable String keepAlive, + int size, + SearchFlags searchFlags) { SearchRequest searchRequest = new PITAwareSearchRequest(); - SearchFlags finalSearchFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + SearchFlags finalSearchFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); @@ -240,7 +268,10 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi searchSourceBuilder.fetchSource("urn", null); BoolQueryBuilder filterQuery = getFilterQuery(filter); - searchSourceBuilder.query(QueryBuilders.boolQuery().must(getQuery(input, finalSearchFlags.isFulltext())).filter(filterQuery)); + searchSourceBuilder.query( + QueryBuilders.boolQuery() + .must(getQuery(input, finalSearchFlags.isFulltext())) + .filter(filterQuery)); if (!finalSearchFlags.isSkipAggregates()) { _aggregationQueryBuilder.getAggregations().forEach(searchSourceBuilder::aggregation); } @@ -256,8 +287,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi } /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to - * search results. + * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion + * to be applied to search results. * * @param filters {@link Filter} list of conditions with fields and values * @param sortCriterion {@link SortCriterion} to be applied to the search results @@ -266,8 +297,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi * @return {@link SearchRequest} that contains the filtered query */ @Nonnull - public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, - int size) { + public SearchRequest getFilterRequest( + @Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, int size) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filters); @@ -281,10 +312,10 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr } /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to - * search results. + * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion + * to be applied to search results. * - * TODO: Used in batch ingestion from ingestion scheduler + * <p>TODO: Used in batch ingestion from ingestion scheduler * * @param filters {@link Filter} list of conditions with fields and values * @param sortCriterion {@link SortCriterion} to be applied to the search results @@ -295,8 +326,13 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr * @return {@link SearchRequest} that contains the filtered query */ @Nonnull - public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCriterion sortCriterion, @Nullable Object[] sort, - @Nullable String pitId, @Nonnull String keepAlive, int size) { + public SearchRequest getFilterRequest( + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + int size) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filters); @@ -320,14 +356,16 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr * @return {@link SearchRequest} that contains the aggregation query */ @Nonnull - public static SearchRequest getAggregationRequest(@Nonnull String field, @Nullable Filter filter, int limit) { + public static SearchRequest getAggregationRequest( + @Nonnull String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filter); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQuery); searchSourceBuilder.size(0); - searchSourceBuilder.aggregation(AggregationBuilders.terms(field).field(ESUtils.toKeywordField(field, false)).size(limit)); + searchSourceBuilder.aggregation( + AggregationBuilders.terms(field).field(ESUtils.toKeywordField(field, false)).size(limit)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -347,19 +385,22 @@ public HighlightBuilder getHighlights() { // Check for each field name and any subfields _defaultQueryFieldNames.stream() - .flatMap(fieldName -> Stream.of(fieldName, fieldName + ".*")).distinct() - .forEach(highlightBuilder::field); + .flatMap(fieldName -> Stream.of(fieldName, fieldName + ".*")) + .distinct() + .forEach(highlightBuilder::field); return highlightBuilder; } @WithSpan - public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { + public SearchResult extractResult( + @Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List<SearchEntity> resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); - return new SearchResult().setEntities(new SearchEntityArray(resultList)) + return new SearchResult() + .setEntities(new SearchEntityArray(resultList)) .setMetadata(searchResultMetadata) .setFrom(from) .setPageSize(size) @@ -367,8 +408,13 @@ public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter } @WithSpan - public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Filter filter, @Nullable String scrollId, - @Nullable String keepAlive, int size, boolean supportsPointInTime) { + public ScrollResult extractScrollResult( + @Nonnull SearchResponse searchResponse, + Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + boolean supportsPointInTime) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List<SearchEntity> resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); @@ -379,15 +425,21 @@ public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Object[] sort = searchHits[searchHits.length - 1].getSortValues(); long expirationTimeMs = 0L; if (keepAlive != null && supportsPointInTime) { - expirationTimeMs = TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + System.currentTimeMillis(); + expirationTimeMs = + TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + + System.currentTimeMillis(); } - nextScrollId = new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs).toScrollId(); + nextScrollId = + new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs) + .toScrollId(); } - ScrollResult scrollResult = new ScrollResult().setEntities(new SearchEntityArray(resultList)) - .setMetadata(searchResultMetadata) - .setPageSize(size) - .setNumEntities(totalCount); + ScrollResult scrollResult = + new ScrollResult() + .setEntities(new SearchEntityArray(resultList)) + .setMetadata(searchResultMetadata) + .setPageSize(size) + .setNumEntities(totalCount); if (nextScrollId != null) { scrollResult.setScrollId(nextScrollId); @@ -418,17 +470,20 @@ private List<MatchedField> extractMatchedFields(@Nonnull SearchHit hit) { if (!highlightedFieldNamesAndValues.containsKey(queryName)) { if (hit.getFields().containsKey(queryName)) { for (Object fieldValue : hit.getFields().get(queryName).getValues()) { - highlightedFieldNamesAndValues.computeIfAbsent(queryName, k -> new HashSet<>()).add(fieldValue.toString()); + highlightedFieldNamesAndValues + .computeIfAbsent(queryName, k -> new HashSet<>()) + .add(fieldValue.toString()); } } else { highlightedFieldNamesAndValues.put(queryName, Set.of("")); } } } - return highlightedFieldNamesAndValues.entrySet() - .stream() + return highlightedFieldNamesAndValues.entrySet().stream() .flatMap( - entry -> entry.getValue().stream().map(value -> new MatchedField().setName(entry.getKey()).setValue(value))) + entry -> + entry.getValue().stream() + .map(value -> new MatchedField().setName(entry.getKey()).setValue(value))) .collect(Collectors.toList()); } @@ -438,11 +493,13 @@ private Optional<String> getFieldName(String matchedField) { } private Map<String, Double> extractFeatures(@Nonnull SearchHit searchHit) { - return ImmutableMap.of(Features.Name.SEARCH_BACKEND_SCORE.toString(), (double) searchHit.getScore()); + return ImmutableMap.of( + Features.Name.SEARCH_BACKEND_SCORE.toString(), (double) searchHit.getScore()); } private SearchEntity getResult(@Nonnull SearchHit hit) { - return new SearchEntity().setEntity(getUrnFromSearchHit(hit)) + return new SearchEntity() + .setEntity(getUrnFromSearchHit(hit)) .setMatchedFields(new MatchedFieldArray(extractMatchedFields(hit))) .setScore(hit.getScore()) .setFeatures(new DoubleMap(extractFeatures(hit))); @@ -456,7 +513,9 @@ private SearchEntity getResult(@Nonnull SearchHit hit) { */ @Nonnull private List<SearchEntity> getResults(@Nonnull SearchResponse searchResponse) { - return Arrays.stream(searchResponse.getHits().getHits()).map(this::getResult).collect(Collectors.toList()); + return Arrays.stream(searchResponse.getHits().getHits()) + .map(this::getResult) + .collect(Collectors.toList()); } @Nonnull @@ -473,15 +532,17 @@ private Urn getUrnFromSearchHit(@Nonnull SearchHit hit) { * * @param searchResponse the raw {@link SearchResponse} as obtained from the search engine * @param filter the provided Filter to use with Elasticsearch - * - * @return {@link SearchResultMetadata} with aggregation and list of urns obtained from {@link SearchResponse} + * @return {@link SearchResultMetadata} with aggregation and list of urns obtained from {@link + * SearchResponse} */ @Nonnull - private SearchResultMetadata extractSearchResultMetadata(@Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + private SearchResultMetadata extractSearchResultMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { final SearchResultMetadata searchResultMetadata = new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - final List<AggregationMetadata> aggregationMetadataList = extractAggregationMetadata(searchResponse, filter); + final List<AggregationMetadata> aggregationMetadataList = + extractAggregationMetadata(searchResponse, filter); searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); final List<SearchSuggestion> searchSuggestions = extractSearchSuggestions(searchResponse); @@ -494,34 +555,43 @@ private String computeDisplayName(String name) { if (_filtersToDisplayName.containsKey(name)) { return _filtersToDisplayName.get(name); } else if (name.contains(AGGREGATION_SEPARATOR_CHAR)) { - return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)).map(_filtersToDisplayName::get).collect( - Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); + return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)) + .map(_filtersToDisplayName::get) + .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } return name; } - private List<AggregationMetadata> extractAggregationMetadata(@Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + private List<AggregationMetadata> extractAggregationMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { final List<AggregationMetadata> aggregationMetadataList = new ArrayList<>(); if (searchResponse.getAggregations() == null) { return addFiltersToAggregationMetadata(aggregationMetadataList, filter); } - for (Map.Entry<String, Aggregation> entry : searchResponse.getAggregations().getAsMap().entrySet()) { - final Map<String, Long> oneTermAggResult = extractTermAggregations((ParsedTerms) entry.getValue(), entry.getKey().equals("_entityType")); + for (Map.Entry<String, Aggregation> entry : + searchResponse.getAggregations().getAsMap().entrySet()) { + final Map<String, Long> oneTermAggResult = + extractTermAggregations( + (ParsedTerms) entry.getValue(), entry.getKey().equals("_entityType")); if (oneTermAggResult.isEmpty()) { continue; } - final AggregationMetadata aggregationMetadata = new AggregationMetadata().setName(entry.getKey()) - .setDisplayName(computeDisplayName(entry.getKey())) - .setAggregations(new LongMap(oneTermAggResult)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); + final AggregationMetadata aggregationMetadata = + new AggregationMetadata() + .setName(entry.getKey()) + .setDisplayName(computeDisplayName(entry.getKey())) + .setAggregations(new LongMap(oneTermAggResult)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); aggregationMetadataList.add(aggregationMetadata); } return addFiltersToAggregationMetadata(aggregationMetadataList, filter); - } + } @WithSpan - public static Map<String, Long> extractTermAggregations(@Nonnull SearchResponse searchResponse, - @Nonnull String aggregationName) { + public static Map<String, Long> extractTermAggregations( + @Nonnull SearchResponse searchResponse, @Nonnull String aggregationName) { if (searchResponse.getAggregations() == null) { return Collections.emptyMap(); } @@ -530,7 +600,8 @@ public static Map<String, Long> extractTermAggregations(@Nonnull SearchResponse if (aggregation == null) { return Collections.emptyMap(); } - return extractTermAggregations((ParsedTerms) aggregation, aggregationName.equals("_entityType")); + return extractTermAggregations( + (ParsedTerms) aggregation, aggregationName.equals("_entityType")); } private List<SearchSuggestion> extractSearchSuggestions(@Nonnull SearchResponse searchResponse) { @@ -538,13 +609,18 @@ private List<SearchSuggestion> extractSearchSuggestions(@Nonnull SearchResponse if (searchResponse.getSuggest() != null) { TermSuggestion termSuggestion = searchResponse.getSuggest().getSuggestion(NAME_SUGGESTION); if (termSuggestion != null && termSuggestion.getEntries().size() > 0) { - termSuggestion.getEntries().get(0).getOptions().forEach(suggestOption -> { - SearchSuggestion searchSuggestion = new SearchSuggestion(); - searchSuggestion.setText(String.valueOf(suggestOption.getText())); - searchSuggestion.setFrequency(suggestOption.getFreq()); - searchSuggestion.setScore(suggestOption.getScore()); - searchSuggestions.add(searchSuggestion); - }); + termSuggestion + .getEntries() + .get(0) + .getOptions() + .forEach( + suggestOption -> { + SearchSuggestion searchSuggestion = new SearchSuggestion(); + searchSuggestion.setText(String.valueOf(suggestOption.getText())); + searchSuggestion.setFrequency(suggestOption.getFreq()); + searchSuggestion.setScore(suggestOption.getScore()); + searchSuggestions.add(searchSuggestion); + }); } } return searchSuggestions; @@ -552,6 +628,7 @@ private List<SearchSuggestion> extractSearchSuggestions(@Nonnull SearchResponse /** * Adds nested sub-aggregation values to the aggregated results + * * @param aggs The aggregations to traverse. Could be null (base case) * @return A map from names to aggregation count values */ @@ -568,8 +645,9 @@ private static Map<String, Long> recursivelyAddNestedSubAggs(@Nullable Aggregati String key = bucket.getKeyAsString(); // Gets filtered sub aggregation doc count if exist Map<String, Long> subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); - for (Map.Entry<String, Long> subAggEntry: subAggs.entrySet()) { - aggResult.put(key + AGGREGATION_SEPARATOR_CHAR + subAggEntry.getKey(), subAggEntry.getValue()); + for (Map.Entry<String, Long> subAggEntry : subAggs.entrySet()) { + aggResult.put( + key + AGGREGATION_SEPARATOR_CHAR + subAggEntry.getKey(), subAggEntry.getValue()); } long docCount = bucket.getDocCount(); if (docCount > 0) { @@ -588,7 +666,8 @@ private static Map<String, Long> recursivelyAddNestedSubAggs(@Nullable Aggregati * @return a map with aggregation key and corresponding doc counts */ @Nonnull - private static Map<String, Long> extractTermAggregations(@Nonnull ParsedTerms terms, boolean includeZeroes) { + private static Map<String, Long> extractTermAggregations( + @Nonnull ParsedTerms terms, boolean includeZeroes) { final Map<String, Long> aggResult = new HashMap<>(); List<? extends Terms.Bucket> bucketList = terms.getBuckets(); @@ -598,7 +677,9 @@ private static Map<String, Long> extractTermAggregations(@Nonnull ParsedTerms te // Gets filtered sub aggregation doc count if exist Map<String, Long> subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); for (Map.Entry<String, Long> subAggEntry : subAggs.entrySet()) { - aggResult.put(String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, subAggEntry.getKey()), subAggEntry.getValue()); + aggResult.put( + String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, subAggEntry.getKey()), + subAggEntry.getValue()); } long docCount = bucket.getDocCount(); if (includeZeroes || docCount > 0) { @@ -609,11 +690,10 @@ private static Map<String, Long> extractTermAggregations(@Nonnull ParsedTerms te return aggResult; } - /** - * Injects the missing conjunctive filters into the aggregations list. - */ - public List<AggregationMetadata> addFiltersToAggregationMetadata(@Nonnull final List<AggregationMetadata> originalMetadata, @Nullable final Filter filter) { - if (filter == null) { + /** Injects the missing conjunctive filters into the aggregations list. */ + public List<AggregationMetadata> addFiltersToAggregationMetadata( + @Nonnull final List<AggregationMetadata> originalMetadata, @Nullable final Filter filter) { + if (filter == null) { return originalMetadata; } if (filter.hasOr()) { @@ -624,14 +704,18 @@ public List<AggregationMetadata> addFiltersToAggregationMetadata(@Nonnull final return originalMetadata; } - void addOrFiltersToAggregationMetadata(@Nonnull final ConjunctiveCriterionArray or, @Nonnull final List<AggregationMetadata> originalMetadata) { + void addOrFiltersToAggregationMetadata( + @Nonnull final ConjunctiveCriterionArray or, + @Nonnull final List<AggregationMetadata> originalMetadata) { for (ConjunctiveCriterion conjunction : or) { // For each item in the conjunction, inject an empty aggregation if necessary addCriteriaFiltersToAggregationMetadata(conjunction.getAnd(), originalMetadata); } } - private void addCriteriaFiltersToAggregationMetadata(@Nonnull final CriterionArray criteria, @Nonnull final List<AggregationMetadata> originalMetadata) { + private void addCriteriaFiltersToAggregationMetadata( + @Nonnull final CriterionArray criteria, + @Nonnull final List<AggregationMetadata> originalMetadata) { for (Criterion criterion : criteria) { addCriterionFiltersToAggregationMetadata(criterion, originalMetadata); } @@ -642,19 +726,25 @@ private void addCriterionFiltersToAggregationMetadata( @Nonnull final List<AggregationMetadata> aggregationMetadata) { // We should never see duplicate aggregation for the same field in aggregation metadata list. - final Map<String, AggregationMetadata> aggregationMetadataMap = aggregationMetadata.stream().collect(Collectors.toMap( - AggregationMetadata::getName, agg -> agg)); + final Map<String, AggregationMetadata> aggregationMetadataMap = + aggregationMetadata.stream() + .collect(Collectors.toMap(AggregationMetadata::getName, agg -> agg)); // Map a filter criterion to a facet field (e.g. domains.keyword -> domains) final String finalFacetField = toFacetField(criterion.getField()); if (finalFacetField == null) { - log.warn(String.format("Found invalid filter field for entity search. Invalid or unrecognized facet %s", criterion.getField())); + log.warn( + String.format( + "Found invalid filter field for entity search. Invalid or unrecognized facet %s", + criterion.getField())); return; } - // We don't want to add urn filters to the aggregations we return as a sidecar to search results. - // They are automatically added by searchAcrossLineage and we dont need them to show up in the filter panel. + // We don't want to add urn filters to the aggregations we return as a sidecar to search + // results. + // They are automatically added by searchAcrossLineage and we dont need them to show up in the + // filter panel. if (finalFacetField.equals(URN_FILTER)) { return; } @@ -667,7 +757,10 @@ private void addCriterionFiltersToAggregationMetadata( */ AggregationMetadata originalAggMetadata = aggregationMetadataMap.get(finalFacetField); if (criterion.hasValues()) { - criterion.getValues().stream().forEach(value -> addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); + criterion.getValues().stream() + .forEach( + value -> + addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); } else { addMissingAggregationValueToAggregationMetadata(criterion.getValue(), originalAggMetadata); } @@ -678,21 +771,25 @@ private void addCriterionFiltersToAggregationMetadata( * If there are no results for a particular facet, it will NOT be in the original aggregation set returned by * Elasticsearch. */ - aggregationMetadata.add(buildAggregationMetadata( - finalFacetField, - _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), - new LongMap(criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), - new FilterValueArray(criterion.getValues().stream().map(value -> createFilterValue(value, 0L, true)).collect( - Collectors.toList()))) - ); + aggregationMetadata.add( + buildAggregationMetadata( + finalFacetField, + _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), + new LongMap( + criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), + new FilterValueArray( + criterion.getValues().stream() + .map(value -> createFilterValue(value, 0L, true)) + .collect(Collectors.toList())))); } } - private void addMissingAggregationValueToAggregationMetadata(@Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { - if ( - originalMetadata.getAggregations().entrySet().stream().noneMatch(entry -> value.equals(entry.getKey())) - || originalMetadata.getFilterValues().stream().noneMatch(entry -> entry.getValue().equals(value)) - ) { + private void addMissingAggregationValueToAggregationMetadata( + @Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { + if (originalMetadata.getAggregations().entrySet().stream() + .noneMatch(entry -> value.equals(entry.getKey())) + || originalMetadata.getFilterValues().stream() + .noneMatch(entry -> entry.getValue().equals(value))) { // No aggregation found for filtered value -- inject one! originalMetadata.getAggregations().put(value, 0L); originalMetadata.getFilterValues().add(createFilterValue(value, 0L, true)); @@ -712,8 +809,7 @@ private AggregationMetadata buildAggregationMetadata( } private List<Pair<String, String>> getFacetFieldDisplayNameFromAnnotation( - @Nonnull final SearchableAnnotation annotation - ) { + @Nonnull final SearchableAnnotation annotation) { final List<Pair<String, String>> facetsFromAnnotation = new ArrayList<>(); // Case 1: Default Keyword field if (annotation.isAddToFilters()) { @@ -721,9 +817,8 @@ private List<Pair<String, String>> getFacetFieldDisplayNameFromAnnotation( } // Case 2: HasX boolean field if (annotation.isAddHasValuesToFilters() && annotation.getHasValuesFieldName().isPresent()) { - facetsFromAnnotation.add(Pair.of( - annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName() - )); + facetsFromAnnotation.add( + Pair.of(annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName())); } return facetsFromAnnotation; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java index be64df3179a9d..b49218f4224a9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java @@ -1,6 +1,10 @@ package com.linkedin.metadata.search.elasticsearch.update; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.bulk.BulkProcessor; @@ -8,12 +12,6 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.support.WriteRequest; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - - @Slf4j public class BulkListener implements BulkProcessor.Listener { private static final Map<WriteRequest.RefreshPolicy, BulkListener> INSTANCES = new HashMap<>(); @@ -21,6 +19,7 @@ public class BulkListener implements BulkProcessor.Listener { public static BulkListener getInstance() { return INSTANCES.computeIfAbsent(null, BulkListener::new); } + public static BulkListener getInstance(WriteRequest.RefreshPolicy refreshPolicy) { return INSTANCES.computeIfAbsent(refreshPolicy, BulkListener::new); } @@ -41,10 +40,18 @@ public void beforeBulk(long executionId, BulkRequest request) { @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { - log.error("Failed to feed bulk request. Number of events: " + response.getItems().length + " Took time ms: " - + response.getIngestTookInMillis() + " Message: " + response.buildFailureMessage()); + log.error( + "Failed to feed bulk request. Number of events: " + + response.getItems().length + + " Took time ms: " + + response.getIngestTookInMillis() + + " Message: " + + response.buildFailureMessage()); } else { - log.info("Successfully fed bulk request. Number of events: " + response.getItems().length + " Took time ms: " + log.info( + "Successfully fed bulk request. Number of events: " + + response.getItems().length + + " Took time ms: " + response.getIngestTookInMillis()); } incrementMetrics(response); @@ -53,20 +60,24 @@ public void afterBulk(long executionId, BulkRequest request, BulkResponse respon @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { // Exception raised outside this method - log.error("Error feeding bulk request. No retries left. Request: {}", buildBulkRequestSummary(request), failure); + log.error( + "Error feeding bulk request. No retries left. Request: {}", + buildBulkRequestSummary(request), + failure); incrementMetrics(request, failure); } private static void incrementMetrics(BulkResponse response) { Arrays.stream(response.getItems()) - .map(req -> buildMetricName(req.getOpType(), req.status().name())) - .forEach(metricName -> MetricUtils.counter(BulkListener.class, metricName).inc()); + .map(req -> buildMetricName(req.getOpType(), req.status().name())) + .forEach(metricName -> MetricUtils.counter(BulkListener.class, metricName).inc()); } private static void incrementMetrics(BulkRequest request, Throwable failure) { request.requests().stream() - .map(req -> buildMetricName(req.opType(), "exception")) - .forEach(metricName -> MetricUtils.exceptionCounter(BulkListener.class, metricName, failure)); + .map(req -> buildMetricName(req.opType(), "exception")) + .forEach( + metricName -> MetricUtils.exceptionCounter(BulkListener.class, metricName, failure)); } private static String buildMetricName(DocWriteRequest.OpType opType, String status) { @@ -74,9 +85,12 @@ private static String buildMetricName(DocWriteRequest.OpType opType, String stat } public static String buildBulkRequestSummary(BulkRequest request) { - return request.requests().stream().map(req -> String.format( - "Failed to perform bulk request: index [%s], optype: [%s], type [%s], id [%s]", - req.index(), req.opType(), req.opType(), req.id()) - ).collect(Collectors.joining(";")); + return request.requests().stream() + .map( + req -> + String.format( + "Failed to perform bulk request: index [%s], optype: [%s], type [%s], id [%s]", + req.index(), req.opType(), req.opType(), req.id())) + .collect(Collectors.joining(";")); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java index a1e5b363d8a78..a2b9292eac6e4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java @@ -1,6 +1,9 @@ package com.linkedin.metadata.search.elasticsearch.update; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.io.Closeable; +import java.io.IOException; +import java.util.Optional; import lombok.AccessLevel; import lombok.Builder; import lombok.Getter; @@ -21,168 +24,181 @@ import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryRequest; -import java.io.Closeable; -import java.io.IOException; -import java.util.Optional; - - @Slf4j @Builder(builderMethodName = "hiddenBuilder") public class ESBulkProcessor implements Closeable { - private static final String ES_WRITES_METRIC = "num_elasticSearch_writes"; - private static final String ES_BATCHES_METRIC = "num_elasticSearch_batches_submitted"; - private static final String ES_DELETE_EXCEPTION_METRIC = "delete_by_query"; - private static final String ES_SUBMIT_DELETE_EXCEPTION_METRIC = "submit_delete_by_query_task"; - private static final String ES_SUBMIT_REINDEX_METRIC = "reindex_submit"; - private static final String ES_REINDEX_SUCCESS_METRIC = "reindex_success"; - private static final String ES_REINDEX_FAILED_METRIC = "reindex_failed"; - - public static ESBulkProcessor.ESBulkProcessorBuilder builder(RestHighLevelClient searchClient) { - return hiddenBuilder().searchClient(searchClient); - } - - @NonNull - private final RestHighLevelClient searchClient; - @Builder.Default - @NonNull - private Boolean async = false; - @Builder.Default - @NonNull - private Boolean batchDelete = false; - @Builder.Default - private Integer bulkRequestsLimit = 500; - @Builder.Default - private Integer bulkFlushPeriod = 1; - @Builder.Default - private Integer numRetries = 3; - @Builder.Default - private Long retryInterval = 1L; - @Builder.Default - private TimeValue defaultTimeout = TimeValue.timeValueMinutes(1); - @Getter - private final WriteRequest.RefreshPolicy writeRequestRefreshPolicy; - @Setter(AccessLevel.NONE) - @Getter(AccessLevel.NONE) - private final BulkProcessor bulkProcessor; - - private ESBulkProcessor(@NonNull RestHighLevelClient searchClient, @NonNull Boolean async, @NonNull Boolean batchDelete, - Integer bulkRequestsLimit, Integer bulkFlushPeriod, Integer numRetries, Long retryInterval, - TimeValue defaultTimeout, WriteRequest.RefreshPolicy writeRequestRefreshPolicy, - BulkProcessor ignored) { - this.searchClient = searchClient; - this.async = async; - this.batchDelete = batchDelete; - this.bulkRequestsLimit = bulkRequestsLimit; - this.bulkFlushPeriod = bulkFlushPeriod; - this.numRetries = numRetries; - this.retryInterval = retryInterval; - this.defaultTimeout = defaultTimeout; - this.writeRequestRefreshPolicy = writeRequestRefreshPolicy; - this.bulkProcessor = async ? toAsyncBulkProcessor() : toBulkProcessor(); - } - - public ESBulkProcessor add(DocWriteRequest<?> request) { - MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(); - bulkProcessor.add(request); - return this; - } - - public Optional<BulkByScrollResponse> deleteByQuery(QueryBuilder queryBuilder, String... indices) { - return deleteByQuery(queryBuilder, true, bulkRequestsLimit, defaultTimeout, indices); - } - - public Optional<BulkByScrollResponse> deleteByQuery(QueryBuilder queryBuilder, boolean refresh, String... indices) { - return deleteByQuery(queryBuilder, refresh, bulkRequestsLimit, defaultTimeout, indices); - } - - public Optional<BulkByScrollResponse> deleteByQuery(QueryBuilder queryBuilder, boolean refresh, - int limit, TimeValue timeout, String... indices) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest() - .setQuery(queryBuilder) - .setBatchSize(limit) - .setMaxRetries(numRetries) - .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) - .setTimeout(timeout) - .setRefresh(refresh); - deleteByQueryRequest.indices(indices); - - try { - if (!batchDelete) { - // flush pending writes - bulkProcessor.flush(); - } - // perform delete after local flush - final BulkByScrollResponse deleteResponse = searchClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT); - MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(deleteResponse.getTotal()); - return Optional.of(deleteResponse); - } catch (Exception e) { - log.error("ERROR: Failed to delete by query. See stacktrace for a more detailed error:", e); - MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_DELETE_EXCEPTION_METRIC, e); - } - - return Optional.empty(); - } - public Optional<TaskSubmissionResponse> deleteByQueryAsync(QueryBuilder queryBuilder, boolean refresh, - int limit, @Nullable TimeValue timeout, String... indices) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest() + private static final String ES_WRITES_METRIC = "num_elasticSearch_writes"; + private static final String ES_BATCHES_METRIC = "num_elasticSearch_batches_submitted"; + private static final String ES_DELETE_EXCEPTION_METRIC = "delete_by_query"; + private static final String ES_SUBMIT_DELETE_EXCEPTION_METRIC = "submit_delete_by_query_task"; + private static final String ES_SUBMIT_REINDEX_METRIC = "reindex_submit"; + private static final String ES_REINDEX_SUCCESS_METRIC = "reindex_success"; + private static final String ES_REINDEX_FAILED_METRIC = "reindex_failed"; + + public static ESBulkProcessor.ESBulkProcessorBuilder builder(RestHighLevelClient searchClient) { + return hiddenBuilder().searchClient(searchClient); + } + + @NonNull private final RestHighLevelClient searchClient; + @Builder.Default @NonNull private Boolean async = false; + @Builder.Default @NonNull private Boolean batchDelete = false; + @Builder.Default private Integer bulkRequestsLimit = 500; + @Builder.Default private Integer bulkFlushPeriod = 1; + @Builder.Default private Integer numRetries = 3; + @Builder.Default private Long retryInterval = 1L; + @Builder.Default private TimeValue defaultTimeout = TimeValue.timeValueMinutes(1); + @Getter private final WriteRequest.RefreshPolicy writeRequestRefreshPolicy; + + @Setter(AccessLevel.NONE) + @Getter(AccessLevel.NONE) + private final BulkProcessor bulkProcessor; + + private ESBulkProcessor( + @NonNull RestHighLevelClient searchClient, + @NonNull Boolean async, + @NonNull Boolean batchDelete, + Integer bulkRequestsLimit, + Integer bulkFlushPeriod, + Integer numRetries, + Long retryInterval, + TimeValue defaultTimeout, + WriteRequest.RefreshPolicy writeRequestRefreshPolicy, + BulkProcessor ignored) { + this.searchClient = searchClient; + this.async = async; + this.batchDelete = batchDelete; + this.bulkRequestsLimit = bulkRequestsLimit; + this.bulkFlushPeriod = bulkFlushPeriod; + this.numRetries = numRetries; + this.retryInterval = retryInterval; + this.defaultTimeout = defaultTimeout; + this.writeRequestRefreshPolicy = writeRequestRefreshPolicy; + this.bulkProcessor = async ? toAsyncBulkProcessor() : toBulkProcessor(); + } + + public ESBulkProcessor add(DocWriteRequest<?> request) { + MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(); + bulkProcessor.add(request); + return this; + } + + public Optional<BulkByScrollResponse> deleteByQuery( + QueryBuilder queryBuilder, String... indices) { + return deleteByQuery(queryBuilder, true, bulkRequestsLimit, defaultTimeout, indices); + } + + public Optional<BulkByScrollResponse> deleteByQuery( + QueryBuilder queryBuilder, boolean refresh, String... indices) { + return deleteByQuery(queryBuilder, refresh, bulkRequestsLimit, defaultTimeout, indices); + } + + public Optional<BulkByScrollResponse> deleteByQuery( + QueryBuilder queryBuilder, boolean refresh, int limit, TimeValue timeout, String... indices) { + DeleteByQueryRequest deleteByQueryRequest = + new DeleteByQueryRequest() .setQuery(queryBuilder) .setBatchSize(limit) .setMaxRetries(numRetries) .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) + .setTimeout(timeout) .setRefresh(refresh); - if (timeout != null) { - deleteByQueryRequest.setTimeout(timeout); - } - // count the number of conflicts, but do not abort the operation - deleteByQueryRequest.setConflicts("proceed"); - deleteByQueryRequest.indices(indices); - try { - // flush pending writes - bulkProcessor.flush(); - TaskSubmissionResponse resp = searchClient.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); - MetricUtils.counter(this.getClass(), ES_BATCHES_METRIC).inc(); - return Optional.of(resp); - } catch (Exception e) { - log.error("ERROR: Failed to submit a delete by query task. See stacktrace for a more detailed error:", e); - MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_SUBMIT_DELETE_EXCEPTION_METRIC, e); - } - return Optional.empty(); - } - - private BulkProcessor toBulkProcessor() { - return BulkProcessor.builder((request, bulkListener) -> { - try { - BulkResponse response = searchClient.bulk(request, RequestOptions.DEFAULT); - bulkListener.onResponse(response); - } catch (IOException e) { - bulkListener.onFailure(e); - throw new RuntimeException(e); - } - }, BulkListener.getInstance(writeRequestRefreshPolicy)) - .setBulkActions(bulkRequestsLimit) - .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) - // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other retry methods) - .setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) - .build(); - } + deleteByQueryRequest.indices(indices); - private BulkProcessor toAsyncBulkProcessor() { - return BulkProcessor.builder((request, bulkListener) -> { - searchClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); - }, BulkListener.getInstance(writeRequestRefreshPolicy)) - .setBulkActions(bulkRequestsLimit) - .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) - // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other retry methods) - .setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) - .build(); + try { + if (!batchDelete) { + // flush pending writes + bulkProcessor.flush(); + } + // perform delete after local flush + final BulkByScrollResponse deleteResponse = + searchClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT); + MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(deleteResponse.getTotal()); + return Optional.of(deleteResponse); + } catch (Exception e) { + log.error("ERROR: Failed to delete by query. See stacktrace for a more detailed error:", e); + MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_DELETE_EXCEPTION_METRIC, e); } - @Override - public void close() throws IOException { - bulkProcessor.close(); + return Optional.empty(); + } + + public Optional<TaskSubmissionResponse> deleteByQueryAsync( + QueryBuilder queryBuilder, + boolean refresh, + int limit, + @Nullable TimeValue timeout, + String... indices) { + DeleteByQueryRequest deleteByQueryRequest = + new DeleteByQueryRequest() + .setQuery(queryBuilder) + .setBatchSize(limit) + .setMaxRetries(numRetries) + .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) + .setRefresh(refresh); + if (timeout != null) { + deleteByQueryRequest.setTimeout(timeout); } - - public void flush() { - bulkProcessor.flush(); + // count the number of conflicts, but do not abort the operation + deleteByQueryRequest.setConflicts("proceed"); + deleteByQueryRequest.indices(indices); + try { + // flush pending writes + bulkProcessor.flush(); + TaskSubmissionResponse resp = + searchClient.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); + MetricUtils.counter(this.getClass(), ES_BATCHES_METRIC).inc(); + return Optional.of(resp); + } catch (Exception e) { + log.error( + "ERROR: Failed to submit a delete by query task. See stacktrace for a more detailed error:", + e); + MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_SUBMIT_DELETE_EXCEPTION_METRIC, e); } + return Optional.empty(); + } + + private BulkProcessor toBulkProcessor() { + return BulkProcessor.builder( + (request, bulkListener) -> { + try { + BulkResponse response = searchClient.bulk(request, RequestOptions.DEFAULT); + bulkListener.onResponse(response); + } catch (IOException e) { + bulkListener.onFailure(e); + throw new RuntimeException(e); + } + }, + BulkListener.getInstance(writeRequestRefreshPolicy)) + .setBulkActions(bulkRequestsLimit) + .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) + // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other + // retry methods) + .setBackoffPolicy( + BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) + .build(); + } + + private BulkProcessor toAsyncBulkProcessor() { + return BulkProcessor.builder( + (request, bulkListener) -> { + searchClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); + }, + BulkListener.getInstance(writeRequestRefreshPolicy)) + .setBulkActions(bulkRequestsLimit) + .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) + // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other + // retry methods) + .setBackoffPolicy( + BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) + .build(); + } + + @Override + public void close() throws IOException { + bulkProcessor.close(); + } + + public void flush() { + bulkProcessor.flush(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java index edcdf5654028c..306352523118b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java @@ -16,7 +16,6 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.script.Script; - @Slf4j @RequiredArgsConstructor public class ESWriteDAO { @@ -34,10 +33,11 @@ public class ESWriteDAO { * @param document the document to update / insert * @param docId the ID of the document */ - public void upsertDocument(@Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { + public void upsertDocument( + @Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); - final UpdateRequest updateRequest = new UpdateRequest( - indexName, docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -57,12 +57,12 @@ public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { bulkProcessor.add(new DeleteRequest(indexName).id(docId)); } - /** - * Applies a script to a particular document - */ - public void applyScriptUpdate(@Nonnull String entityName, @Nonnull String docId, @Nonnull String script) { + /** Applies a script to a particular document */ + public void applyScriptUpdate( + @Nonnull String entityName, @Nonnull String docId, @Nonnull String script) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); - UpdateRequest updateRequest = new UpdateRequest(indexName, docId) + UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .scriptedUpsert(true) .retryOnConflict(numRetries) @@ -70,9 +70,7 @@ public void applyScriptUpdate(@Nonnull String entityName, @Nonnull String docId, bulkProcessor.add(updateRequest); } - /** - * Clear all documents in all the indices - */ + /** Clear all documents in all the indices */ public void clear() { String[] indices = getIndices(indexConvention.getAllEntityIndicesPattern()); bulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), indices); @@ -80,11 +78,12 @@ public void clear() { private String[] getIndices(String pattern) { try { - GetIndexResponse response = searchClient.indices().get(new GetIndexRequest(pattern), RequestOptions.DEFAULT); + GetIndexResponse response = + searchClient.indices().get(new GetIndexRequest(pattern), RequestOptions.DEFAULT); return response.getIndices(); } catch (IOException e) { log.error("Failed to get indices using pattern {}", pattern); - return new String[]{}; + return new String[] {}; } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java index fb146a9f4d8cc..d0bcec9b4ef40 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java @@ -3,13 +3,8 @@ import com.linkedin.metadata.search.SearchEntity; import java.util.List; - -/** - * Interface for extractors that extract Features for each entity returned by search - */ +/** Interface for extractors that extract Features for each entity returned by search */ public interface FeatureExtractor { - /** - * Return the extracted features for each entity returned by search - */ + /** Return the extracted features for each entity returned by search */ List<Features> extractFeatures(List<SearchEntity> entities); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java index f1250ecd61021..2a9571b18b726 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java @@ -9,7 +9,6 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - @Slf4j @Value public class Features { @@ -54,11 +53,15 @@ public static Features merge(@Nonnull Features features1, @Nonnull Features feat } @Nonnull - public static List<Features> merge(@Nonnull List<Features> featureList1, @Nonnull List<Features> featureList2) { + public static List<Features> merge( + @Nonnull List<Features> featureList1, @Nonnull List<Features> featureList2) { if (featureList1.size() != featureList2.size()) { - throw new IllegalArgumentException(String.format("Expected both lists to have the same number of elements. %s != %s", + throw new IllegalArgumentException( + String.format( + "Expected both lists to have the same number of elements. %s != %s", featureList1.size(), featureList2.size())); } - return Streams.zip(featureList1.stream(), featureList2.stream(), Features::merge).collect(Collectors.toList()); + return Streams.zip(featureList1.stream(), featureList2.stream(), Features::merge) + .collect(Collectors.toList()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java index 4983cae3ddc27..555e90e189bc6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java @@ -13,10 +13,7 @@ import lombok.SneakyThrows; import lombok.Value; - -/** - * In memory ranker that re-ranks results returned by the search backend - */ +/** In memory ranker that re-ranks results returned by the search backend */ public abstract class SearchRanker<U extends Comparable<? super U>> { /** @@ -25,18 +22,19 @@ public abstract class SearchRanker<U extends Comparable<? super U>> { public abstract List<FeatureExtractor> getFeatureExtractors(); /** - * Return a comparable score for each entity returned by search backend. The ranker will rank based on this score + * Return a comparable score for each entity returned by search backend. The ranker will rank + * based on this score */ public abstract U score(SearchEntity searchEntity); - /** - * Rank the input list of entities - */ + /** Rank the input list of entities */ public List<SearchEntity> rank(List<SearchEntity> originalList) { List<SearchEntity> entitiesToRank = originalList; if (!getFeatureExtractors().isEmpty()) { - entitiesToRank = Streams.zip(originalList.stream(), fetchFeatures(originalList).stream(), this::updateFeatures) - .collect(Collectors.toList()); + entitiesToRank = + Streams.zip( + originalList.stream(), fetchFeatures(originalList).stream(), this::updateFeatures) + .collect(Collectors.toList()); } return entitiesToRank.stream() .map(entity -> new ScoredEntity<>(entity, score(entity))) @@ -45,26 +43,30 @@ public List<SearchEntity> rank(List<SearchEntity> originalList) { .collect(Collectors.toList()); } - /** - * Fetch features for each entity returned using the feature extractors - */ + /** Fetch features for each entity returned using the feature extractors */ private List<Features> fetchFeatures(List<SearchEntity> originalList) { List<Features> originalFeatures = - originalList.stream().map(SearchEntity::getFeatures).map(Features::from).collect(Collectors.toList()); - return ConcurrencyUtils.transformAndCollectAsync(getFeatureExtractors(), - extractor -> extractor.extractFeatures(originalList)).stream().reduce(originalFeatures, Features::merge); + originalList.stream() + .map(SearchEntity::getFeatures) + .map(Features::from) + .collect(Collectors.toList()); + return ConcurrencyUtils.transformAndCollectAsync( + getFeatureExtractors(), extractor -> extractor.extractFeatures(originalList)) + .stream() + .reduce(originalFeatures, Features::merge); } - /** - * Add the extracted features into each search entity to return the features in the response - */ + /** Add the extracted features into each search entity to return the features in the response */ @SneakyThrows private SearchEntity updateFeatures(SearchEntity originalEntity, Features features) { - return originalEntity.clone() - .setFeatures(new DoubleMap(features.getNumericFeatures() - .entrySet() - .stream() - .collect(Collectors.toMap(entry -> entry.getKey().toString(), Map.Entry::getValue)))); + return originalEntity + .clone() + .setFeatures( + new DoubleMap( + features.getNumericFeatures().entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), Map.Entry::getValue)))); } @Value diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java index 7d009495262cf..c3ab1b49f0e07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java @@ -6,9 +6,9 @@ import java.util.List; import java.util.Optional; - /** - * Simple ranker that diversifies the results between different entities. For the same entity, returns the same order from elasticsearch + * Simple ranker that diversifies the results between different entities. For the same entity, + * returns the same order from elasticsearch */ public class SimpleRanker extends SearchRanker<Double> { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index 49809cf933936..bfeb993390571 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -14,20 +14,16 @@ import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.models.extractor.FieldExtractor; - import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - - /** * Class that provides a utility function that transforms the snapshot object into a search document */ @@ -47,24 +43,33 @@ public class SearchDocumentTransformer { private SystemEntityClient entityClient; - private static final String BROWSE_PATH_V2_DELIMITER = "␟"; + private static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public Optional<String> transformSnapshot(final RecordTemplate snapshot, final EntitySpec entitySpec, - final Boolean forDelete) { + public Optional<String> transformSnapshot( + final RecordTemplate snapshot, final EntitySpec entitySpec, final Boolean forDelete) { final Map<SearchableFieldSpec, List<Object>> extractedSearchableFields = - FieldExtractor.extractFieldsFromSnapshot(snapshot, entitySpec, AspectSpec::getSearchableFieldSpecs, maxValueLength).entrySet() - // Delete expects urn to be preserved - .stream().filter(entry -> !forDelete || !"urn".equals(entry.getKey().getSearchableAnnotation().getFieldName())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + FieldExtractor.extractFieldsFromSnapshot( + snapshot, entitySpec, AspectSpec::getSearchableFieldSpecs, maxValueLength) + .entrySet() + // Delete expects urn to be preserved + .stream() + .filter( + entry -> + !forDelete + || !"urn".equals(entry.getKey().getSearchableAnnotation().getFieldName())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); final Map<SearchScoreFieldSpec, List<Object>> extractedSearchScoreFields = - FieldExtractor.extractFieldsFromSnapshot(snapshot, entitySpec, AspectSpec::getSearchScoreFieldSpecs, maxValueLength); + FieldExtractor.extractFieldsFromSnapshot( + snapshot, entitySpec, AspectSpec::getSearchScoreFieldSpecs, maxValueLength); if (extractedSearchableFields.isEmpty() && extractedSearchScoreFields.isEmpty()) { return Optional.empty(); } final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", snapshot.data().get("urn").toString()); - extractedSearchableFields.forEach((key, value) -> setSearchableValue(key, value, searchDocument, forDelete)); - extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); + extractedSearchableFields.forEach( + (key, value) -> setSearchableValue(key, value, searchDocument, forDelete)); + extractedSearchScoreFields.forEach( + (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); return Optional.of(searchDocument.toString()); } @@ -83,51 +88,71 @@ public Optional<String> transformAspect( if (!extractedSearchableFields.isEmpty() || !extractedSearchScoreFields.isEmpty()) { final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", urn.toString()); - extractedSearchableFields.forEach((key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); - extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); + extractedSearchableFields.forEach( + (key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); + extractedSearchScoreFields.forEach( + (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); result = Optional.of(searchDocument.toString()); } return result; } - public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List<Object> fieldValues, - final ObjectNode searchDocument, final Boolean forDelete) { + public void setSearchableValue( + final SearchableFieldSpec fieldSpec, + final List<Object> fieldValues, + final ObjectNode searchDocument, + final Boolean forDelete) { DataSchema.Type valueType = fieldSpec.getPegasusSchema().getType(); Optional<Object> firstValue = fieldValues.stream().findFirst(); boolean isArray = fieldSpec.isArray(); // Set hasValues field if exists - fieldSpec.getSearchableAnnotation().getHasValuesFieldName().ifPresent(fieldName -> { - if (forDelete) { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(false)); - return; - } - if (valueType == DataSchema.Type.BOOLEAN) { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode((Boolean) firstValue.orElse(false))); - } else { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(!fieldValues.isEmpty())); - } - }); + fieldSpec + .getSearchableAnnotation() + .getHasValuesFieldName() + .ifPresent( + fieldName -> { + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(false)); + return; + } + if (valueType == DataSchema.Type.BOOLEAN) { + searchDocument.set( + fieldName, + JsonNodeFactory.instance.booleanNode((Boolean) firstValue.orElse(false))); + } else { + searchDocument.set( + fieldName, JsonNodeFactory.instance.booleanNode(!fieldValues.isEmpty())); + } + }); // Set numValues field if exists - fieldSpec.getSearchableAnnotation().getNumValuesFieldName().ifPresent(fieldName -> { - if (forDelete) { - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) 0)); - return; - } - switch (valueType) { - case INT: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) firstValue.orElse(0))); - break; - case LONG: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Long) firstValue.orElse(0L))); - break; - default: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode(fieldValues.size())); - break; - } - }); + fieldSpec + .getSearchableAnnotation() + .getNumValuesFieldName() + .ifPresent( + fieldName -> { + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) 0)); + return; + } + switch (valueType) { + case INT: + searchDocument.set( + fieldName, + JsonNodeFactory.instance.numberNode((Integer) firstValue.orElse(0))); + break; + case LONG: + searchDocument.set( + fieldName, JsonNodeFactory.instance.numberNode((Long) firstValue.orElse(0L))); + break; + default: + searchDocument.set( + fieldName, JsonNodeFactory.instance.numberNode(fieldValues.size())); + break; + } + }); final String fieldName = fieldSpec.getSearchableAnnotation().getFieldName(); final FieldType fieldType = fieldSpec.getSearchableAnnotation().getFieldType(); @@ -143,26 +168,35 @@ public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List<O searchDocument.set(fieldName, JsonNodeFactory.instance.textNode(browsePathV2Value)); } else { ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxArrayLength)) - .forEach(value -> getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxArrayLength)) + .forEach( + value -> getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); searchDocument.set(fieldName, arrayNode); } } else if (valueType == DataSchema.Type.MAP) { ObjectNode dictDoc = JsonNodeFactory.instance.objectNode(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxObjectKeys)).forEach(fieldValue -> { - String[] keyValues = fieldValue.toString().split("="); - String key = keyValues[0]; - String value = keyValues[1]; - dictDoc.put(key, value); - }); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxObjectKeys)) + .forEach( + fieldValue -> { + String[] keyValues = fieldValue.toString().split("="); + String key = keyValues[0]; + String value = keyValues[1]; + dictDoc.put(key, value); + }); searchDocument.set(fieldName, dictDoc); } else if (!fieldValues.isEmpty()) { - getNodeForValue(valueType, fieldValues.get(0), fieldType).ifPresent(node -> searchDocument.set(fieldName, node)); + getNodeForValue(valueType, fieldValues.get(0), fieldType) + .ifPresent(node -> searchDocument.set(fieldName, node)); } } - public void setSearchScoreValue(final SearchScoreFieldSpec fieldSpec, final List<Object> fieldValues, - final ObjectNode searchDocument, final Boolean forDelete) { + public void setSearchScoreValue( + final SearchScoreFieldSpec fieldSpec, + final List<Object> fieldValues, + final ObjectNode searchDocument, + final Boolean forDelete) { DataSchema.Type valueType = fieldSpec.getPegasusSchema().getType(); final String fieldName = fieldSpec.getSearchScoreAnnotation().getFieldName(); @@ -193,12 +227,14 @@ public void setSearchScoreValue(final SearchScoreFieldSpec fieldSpec, final List default: // Only the above types are supported throw new IllegalArgumentException( - String.format("SearchScore fields must be a numeric type: field %s, value %s", fieldName, fieldValue)); + String.format( + "SearchScore fields must be a numeric type: field %s, value %s", + fieldName, fieldValue)); } } - private Optional<JsonNode> getNodeForValue(final DataSchema.Type schemaFieldType, final Object fieldValue, - final FieldType fieldType) { + private Optional<JsonNode> getNodeForValue( + final DataSchema.Type schemaFieldType, final Object fieldValue, final FieldType fieldType) { switch (schemaFieldType) { case BOOLEAN: return Optional.of(JsonNodeFactory.instance.booleanNode((Boolean) fieldValue)); @@ -206,30 +242,34 @@ private Optional<JsonNode> getNodeForValue(final DataSchema.Type schemaFieldType return Optional.of(JsonNodeFactory.instance.numberNode((Integer) fieldValue)); case LONG: return Optional.of(JsonNodeFactory.instance.numberNode((Long) fieldValue)); - // By default run toString + // By default run toString default: String value = fieldValue.toString(); // If index type is BROWSE_PATH, make sure the value starts with a slash if (fieldType == FieldType.BROWSE_PATH && !value.startsWith("/")) { value = "/" + value; } - return value.isEmpty() ? Optional.empty() + return value.isEmpty() + ? Optional.empty() : Optional.of(JsonNodeFactory.instance.textNode(fieldValue.toString())); } } /** - * The browsePathsV2 aspect is a list of objects and the @Searchable annotation specifies a - * list of strings that we receive. However, we want to aggregate those strings and store - * as a single string in ElasticSearch so we can do prefix matching against it. + * The browsePathsV2 aspect is a list of objects and the @Searchable annotation specifies a list + * of strings that we receive. However, we want to aggregate those strings and store as a single + * string in ElasticSearch so we can do prefix matching against it. */ private String getBrowsePathV2Value(@Nonnull final List<Object> fieldValues) { List<String> stringValues = new ArrayList<>(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxArrayLength)).forEach(value -> { - if (value instanceof String) { - stringValues.add((String) value); - } - }); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxArrayLength)) + .forEach( + value -> { + if (value instanceof String) { + stringValues.add((String) value); + } + }); String aggregatedValue = String.join(BROWSE_PATH_V2_DELIMITER, stringValues); // ensure browse path v2 starts with our delimiter if it's not empty if (!aggregatedValue.equals("") && !aggregatedValue.startsWith(BROWSE_PATH_V2_DELIMITER)) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java index a4b59c30607a3..af0f537de8629 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java @@ -23,39 +23,59 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class BrowsePathUtils { public static String getDefaultBrowsePath( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, - @Nonnull Character dataPlatformDelimiter) throws URISyntaxException { + @Nonnull Character dataPlatformDelimiter) + throws URISyntaxException { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataPlatformKey dpKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKey( - dsKey.getPlatform(), - getKeyAspectSpec(dsKey.getPlatform().getEntityType(), - entityRegistry)); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataPlatformKey dpKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKey( + dsKey.getPlatform(), + getKeyAspectSpec(dsKey.getPlatform().getEntityType(), entityRegistry)); String datasetNamePath = getDatasetPath(dsKey.getName(), dataPlatformDelimiter); - return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + datasetNamePath).toLowerCase(); + return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + datasetNamePath) + .toLowerCase(); case Constants.CHART_ENTITY_NAME: - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return ("/" + chartKey.getDashboardTool()); case Constants.DASHBOARD_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return ("/" + dashboardKey.getDashboardTool()).toLowerCase(); case Constants.DATA_FLOW_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster()) - .toLowerCase(); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster()).toLowerCase(); case Constants.DATA_JOB_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getCluster()).toLowerCase(); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getCluster()) + .toLowerCase(); default: return ""; } @@ -65,60 +85,130 @@ public static String getDefaultBrowsePath( public static Urn buildDataPlatformUrn(Urn urn, EntityRegistry entityRegistry) { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return dsKey.getPlatform(); case Constants.CHART_ENTITY_NAME: - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, chartKey.getDashboardTool())); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, chartKey.getDashboardTool())); case Constants.DASHBOARD_ENTITY_NAME: - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, dashboardKey.getDashboardTool())); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, dashboardKey.getDashboardTool())); case Constants.DATA_FLOW_ENTITY_NAME: - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, dataFlowKey.getOrchestrator())); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, dataFlowKey.getOrchestrator())); case Constants.DATA_JOB_ENTITY_NAME: - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, parentFlowKey.getOrchestrator())); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, parentFlowKey.getOrchestrator())); case Constants.NOTEBOOK_ENTITY_NAME: - NotebookKey notebookKey = (NotebookKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, notebookKey.getNotebookTool())); + NotebookKey notebookKey = + (NotebookKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, notebookKey.getNotebookTool())); default: // Could not resolve a data platform return null; } } - public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRegistry) throws URISyntaxException { + public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRegistry) + throws URISyntaxException { switch (urn.getEntityType()) { case "dataset": - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataPlatformKey dpKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKey( - dsKey.getPlatform(), - getKeyAspectSpec(dsKey.getPlatform().getEntityType(), - entityRegistry)); - return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + "/" - + dsKey.getName()).replace('.', '/').toLowerCase(); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataPlatformKey dpKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKey( + dsKey.getPlatform(), + getKeyAspectSpec(dsKey.getPlatform().getEntityType(), entityRegistry)); + return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + "/" + dsKey.getName()) + .replace('.', '/') + .toLowerCase(); case "chart": - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + chartKey.getDashboardTool() + "/" + chartKey.getChartId()).toLowerCase(); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + chartKey.getDashboardTool() + "/" + chartKey.getChartId()).toLowerCase(); case "dashboard": - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dashboardKey.getDashboardTool() + "/" + dashboardKey.getDashboardId()).toLowerCase(); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + dashboardKey.getDashboardTool() + "/" + dashboardKey.getDashboardId()) + .toLowerCase(); case "dataFlow": - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster() + "/" + dataFlowKey.getFlowId()) + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + + dataFlowKey.getOrchestrator() + + "/" + + dataFlowKey.getCluster() + + "/" + + dataFlowKey.getFlowId()) .toLowerCase(); case "dataJob": - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getFlowId() + "/" - + dataJobKey.getJobId()).toLowerCase(); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return ("/" + + parentFlowKey.getOrchestrator() + + "/" + + parentFlowKey.getFlowId() + + "/" + + dataJobKey.getJobId()) + .toLowerCase(); case "glossaryTerm": // TODO: Is this the best way to represent glossary term key? - GlossaryTermKey glossaryTermKey = (GlossaryTermKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + GlossaryTermKey glossaryTermKey = + (GlossaryTermKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return "/" + glossaryTermKey.getName().replace('.', '/').toLowerCase(); default: return ""; @@ -126,26 +216,28 @@ public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRe } /** - * Attempts to convert a dataset name into a proper browse path by splitting it using the Data Platform delimiter. - * If there are not > 1 name parts, then an empty string will be returned. + * Attempts to convert a dataset name into a proper browse path by splitting it using the Data + * Platform delimiter. If there are not > 1 name parts, then an empty string will be returned. */ - private static String getDatasetPath(@Nonnull final String datasetName, @Nonnull final Character delimiter) { + private static String getDatasetPath( + @Nonnull final String datasetName, @Nonnull final Character delimiter) { if (datasetName.contains(delimiter.toString())) { - final List<String> datasetNamePathParts = Arrays.asList(datasetName.split(Pattern.quote(delimiter.toString()))); + final List<String> datasetNamePathParts = + Arrays.asList(datasetName.split(Pattern.quote(delimiter.toString()))); System.out.println(datasetNamePathParts); // Omit the name from the path. - final String datasetPath = String.join("/", datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1)); + final String datasetPath = + String.join("/", datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1)); return datasetPath.startsWith("/") ? datasetPath : String.format("/%s", datasetPath); } return ""; } protected static AspectSpec getKeyAspectSpec( - final String entityName, - final EntityRegistry registry) { + final String entityName, final EntityRegistry registry) { final EntitySpec spec = registry.getEntitySpec(entityName); return spec.getKeyAspectSpec(); } - private BrowsePathUtils() { } -} \ No newline at end of file + private BrowsePathUtils() {} +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java index a7f5ea7a51e29..961167663e11f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.BrowsePathEntry; import com.linkedin.common.BrowsePathEntryArray; import com.linkedin.common.BrowsePathsV2; @@ -14,10 +16,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; @@ -25,52 +23,63 @@ import java.util.List; import java.util.regex.Pattern; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class BrowsePathV2Utils { - final private static String DEFAULT_FOLDER_NAME = "Default"; + private static final String DEFAULT_FOLDER_NAME = "Default"; /** * Generates a default browsePathsV2 aspect for a given urn. * - * If the entity has containers, get its whole container path and set those urns in the path of browsePathsV2. - * If it's a dataset, generate the path from the dataset name like we do for default browsePaths V1. - * If it's a data job, set its parent data flow in the path. - * For everything else, place it in a "Default" folder so we can still navigate to it through browse in the UI. - * This default method should be unneeded once ingestion produces higher quality browsePathsV2 aspects. + * <p>If the entity has containers, get its whole container path and set those urns in the path of + * browsePathsV2. If it's a dataset, generate the path from the dataset name like we do for + * default browsePaths V1. If it's a data job, set its parent data flow in the path. For + * everything else, place it in a "Default" folder so we can still navigate to it through browse + * in the UI. This default method should be unneeded once ingestion produces higher quality + * browsePathsV2 aspects. */ public static BrowsePathsV2 getDefaultBrowsePathV2( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, @Nonnull Character dataPlatformDelimiter, @Nonnull EntityService entityService, - boolean useContainerPaths) throws URISyntaxException { + boolean useContainerPaths) + throws URISyntaxException { BrowsePathsV2 result = new BrowsePathsV2(); BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - BrowsePathEntryArray datasetContainerPathEntries = useContainerPaths ? getContainerPathEntries(urn, entityService) : null; + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + BrowsePathEntryArray datasetContainerPathEntries = + useContainerPaths ? getContainerPathEntries(urn, entityService) : null; if (useContainerPaths && datasetContainerPathEntries.size() > 0) { browsePathEntries.addAll(datasetContainerPathEntries); } else { - BrowsePathEntryArray defaultDatasetPathEntries = getDefaultDatasetPathEntries(dsKey.getName(), dataPlatformDelimiter); + BrowsePathEntryArray defaultDatasetPathEntries = + getDefaultDatasetPathEntries(dsKey.getName(), dataPlatformDelimiter); if (defaultDatasetPathEntries.size() > 0) { - browsePathEntries.addAll(getDefaultDatasetPathEntries(dsKey.getName().toLowerCase(), dataPlatformDelimiter)); + browsePathEntries.addAll( + getDefaultDatasetPathEntries(dsKey.getName().toLowerCase(), dataPlatformDelimiter)); } else { browsePathEntries.add(createBrowsePathEntry(DEFAULT_FOLDER_NAME, null)); } } break; - // Some sources produce charts and dashboards with containers. If we have containers, use them, otherwise use default folder + // Some sources produce charts and dashboards with containers. If we have containers, use + // them, otherwise use default folder case Constants.CHART_ENTITY_NAME: case Constants.DASHBOARD_ENTITY_NAME: - BrowsePathEntryArray containerPathEntries = useContainerPaths ? getContainerPathEntries(urn, entityService) : null; + BrowsePathEntryArray containerPathEntries = + useContainerPaths ? getContainerPathEntries(urn, entityService) : null; if (useContainerPaths && containerPathEntries.size() > 0) { browsePathEntries.addAll(containerPathEntries); } else { @@ -78,8 +87,12 @@ public static BrowsePathsV2 getDefaultBrowsePathV2( } break; case Constants.DATA_JOB_ENTITY_NAME: - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - browsePathEntries.add(createBrowsePathEntry(dataJobKey.getFlow().toString(), dataJobKey.getFlow())); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + browsePathEntries.add( + createBrowsePathEntry(dataJobKey.getFlow().toString(), dataJobKey.getFlow())); break; default: browsePathEntries.add(createBrowsePathEntry(DEFAULT_FOLDER_NAME, null)); @@ -99,15 +112,15 @@ private static BrowsePathEntry createBrowsePathEntry(@Nonnull String id, @Nullab return pathEntry; } - private static void aggregateParentContainers(List<Urn> containerUrns, Urn entityUrn, EntityService entityService) { + private static void aggregateParentContainers( + List<Urn> containerUrns, Urn entityUrn, EntityService entityService) { try { - EntityResponse entityResponse = entityService.getEntityV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME) - ); + EntityResponse entityResponse = + entityService.getEntityV2( + entityUrn.getEntityType(), entityUrn, Collections.singleton(CONTAINER_ASPECT_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); @@ -116,50 +129,58 @@ private static void aggregateParentContainers(List<Urn> containerUrns, Urn entit aggregateParentContainers(containerUrns, containerUrn, entityService); } } catch (Exception e) { - log.error(String.format("Error getting containers for entity with urn %s while adding default browsePathV2", entityUrn), e); + log.error( + String.format( + "Error getting containers for entity with urn %s while adding default browsePathV2", + entityUrn), + e); } } /** - * Gets the path of containers for a given entity to create a browsePathV2 off of. - * Recursively call aggregateParentContainers to get the full container path to be included in this path. + * Gets the path of containers for a given entity to create a browsePathV2 off of. Recursively + * call aggregateParentContainers to get the full container path to be included in this path. */ - private static BrowsePathEntryArray getContainerPathEntries(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + private static BrowsePathEntryArray getContainerPathEntries( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); final List<Urn> containerUrns = new ArrayList<>(); aggregateParentContainers(containerUrns, entityUrn, entityService); - containerUrns.forEach(urn -> { - browsePathEntries.add(createBrowsePathEntry(urn.toString(), urn)); - }); + containerUrns.forEach( + urn -> { + browsePathEntries.add(createBrowsePathEntry(urn.toString(), urn)); + }); return browsePathEntries; } /** - * Attempts to convert a dataset name into a proper browse path by splitting it using the Data Platform delimiter. - * If there are not > 1 name parts, then an empty string will be returned. + * Attempts to convert a dataset name into a proper browse path by splitting it using the Data + * Platform delimiter. If there are not > 1 name parts, then an empty string will be returned. */ - private static BrowsePathEntryArray getDefaultDatasetPathEntries(@Nonnull final String datasetName, @Nonnull final Character delimiter) { + private static BrowsePathEntryArray getDefaultDatasetPathEntries( + @Nonnull final String datasetName, @Nonnull final Character delimiter) { BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); if (datasetName.contains(delimiter.toString())) { - final List<String> datasetNamePathParts = Arrays.stream(datasetName.split(Pattern.quote(delimiter.toString()))) + final List<String> datasetNamePathParts = + Arrays.stream(datasetName.split(Pattern.quote(delimiter.toString()))) .filter((name) -> !name.isEmpty()) .collect(Collectors.toList()); // Omit the name from the path. - datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1).forEach((part -> { - browsePathEntries.add(createBrowsePathEntry(part, null)); - })); + datasetNamePathParts + .subList(0, datasetNamePathParts.size() - 1) + .forEach( + (part -> { + browsePathEntries.add(createBrowsePathEntry(part, null)); + })); } return browsePathEntries; } protected static AspectSpec getKeyAspectSpec( - final String entityName, - final EntityRegistry registry) { + final String entityName, final EntityRegistry registry) { final EntitySpec spec = registry.getEntitySpec(entityName); return spec.getKeyAspectSpec(); } - private BrowsePathV2Utils() { - - } + private BrowsePathV2Utils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 53765acb8e29e..982b5c8d5f367 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; +import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.EntitySpec; @@ -19,8 +23,8 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RequestOptions; import org.apache.commons.lang.StringUtils; +import org.opensearch.client.RequestOptions; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -35,14 +39,7 @@ import org.opensearch.search.suggest.SuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; -import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; - - -/** - * TODO: Add more robust unit tests for this critical class. - */ +/** TODO: Add more robust unit tests for this critical class. */ @Slf4j public class ESUtils { @@ -64,33 +61,43 @@ public class ESUtils { public static final String TOKEN_COUNT_FIELD_TYPE = "token_count"; // End of field types - public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_KEYWORD = Set.of( - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.WORD_GRAM); - public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_TEXT = Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH, - SearchableAnnotation.FieldType.BROWSE_PATH_V2, - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.URN_PARTIAL); + public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_KEYWORD = + Set.of( + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.WORD_GRAM); + public static final Set<SearchableAnnotation.FieldType> FIELD_TYPES_STORED_AS_TEXT = + Set.of( + SearchableAnnotation.FieldType.BROWSE_PATH, + SearchableAnnotation.FieldType.BROWSE_PATH_V2, + SearchableAnnotation.FieldType.URN, + SearchableAnnotation.FieldType.URN_PARTIAL); public static final String ENTITY_NAME_FIELD = "_entityName"; public static final String NAME_SUGGESTION = "nameSuggestion"; - // we use this to make sure we filter for editable & non-editable fields. Also expands out top-level properties + // we use this to make sure we filter for editable & non-editable fields. Also expands out + // top-level properties // to field level properties - public static final Map<String, List<String>> FIELDS_TO_EXPANDED_FIELDS_LIST = new HashMap<String, List<String>>() {{ - put("tags", ImmutableList.of("tags", "fieldTags", "editedFieldTags")); - put("glossaryTerms", ImmutableList.of("glossaryTerms", "fieldGlossaryTerms", "editedFieldGlossaryTerms")); - put("fieldTags", ImmutableList.of("fieldTags", "editedFieldTags")); - put("fieldGlossaryTerms", ImmutableList.of("fieldGlossaryTerms", "editedFieldGlossaryTerms")); - put("fieldDescriptions", ImmutableList.of("fieldDescriptions", "editedFieldDescriptions")); - put("description", ImmutableList.of("description", "editedDescription")); - }}; - - public static final Set<String> BOOLEAN_FIELDS = ImmutableSet.of( - "removed" - ); + public static final Map<String, List<String>> FIELDS_TO_EXPANDED_FIELDS_LIST = + new HashMap<String, List<String>>() { + { + put("tags", ImmutableList.of("tags", "fieldTags", "editedFieldTags")); + put( + "glossaryTerms", + ImmutableList.of("glossaryTerms", "fieldGlossaryTerms", "editedFieldGlossaryTerms")); + put("fieldTags", ImmutableList.of("fieldTags", "editedFieldTags")); + put( + "fieldGlossaryTerms", + ImmutableList.of("fieldGlossaryTerms", "editedFieldGlossaryTerms")); + put( + "fieldDescriptions", + ImmutableList.of("fieldDescriptions", "editedFieldDescriptions")); + put("description", ImmutableList.of("description", "editedDescription")); + } + }; + + public static final Set<String> BOOLEAN_FIELDS = ImmutableSet.of("removed"); /* * Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/regexp-syntax.html for list of reserved @@ -98,17 +105,17 @@ public class ESUtils { */ private static final String ELASTICSEARCH_REGEXP_RESERVED_CHARACTERS = "?+*|{}[]()#@&<>~"; - private ESUtils() { - - } + private ESUtils() {} /** * Constructs the filter query given filter map. * - * <p>Multiple values can be selected for a filter, and it is currently modeled as string separated by comma + * <p>Multiple values can be selected for a filter, and it is currently modeled as string + * separated by comma * * @param filter the search filter - * @param isTimeseries whether filtering on timeseries index which has differing field type conventions + * @param isTimeseries whether filtering on timeseries index which has differing field type + * conventions * @return built filter query */ @Nonnull @@ -119,65 +126,82 @@ public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean } if (filter.getOr() != null) { // If caller is using the new Filters API, build boolean query from that. - filter.getOr().forEach(or -> finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); + filter + .getOr() + .forEach( + or -> + finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); } else if (filter.getCriteria() != null) { // Otherwise, build boolean query from the deprecated "criteria" field. log.warn("Received query Filter with a deprecated field 'criteria'. Use 'or' instead."); final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); - filter.getCriteria().forEach(criterion -> { - if (!criterion.getValue().trim().isEmpty() || criterion.hasValues() - || criterion.getCondition() == Condition.IS_NULL) { - andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } - }); + filter + .getCriteria() + .forEach( + criterion -> { + if (!criterion.getValue().trim().isEmpty() + || criterion.hasValues() + || criterion.getCondition() == Condition.IS_NULL) { + andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } + }); finalQueryBuilder.should(andQueryBuilder); } return finalQueryBuilder; } @Nonnull - public static BoolQueryBuilder buildConjunctiveFilterQuery(@Nonnull ConjunctiveCriterion conjunctiveCriterion, - boolean isTimeseries) { + public static BoolQueryBuilder buildConjunctiveFilterQuery( + @Nonnull ConjunctiveCriterion conjunctiveCriterion, boolean isTimeseries) { final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); - conjunctiveCriterion.getAnd().forEach(criterion -> { - if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) - || !criterion.getValue().trim().isEmpty() || criterion.hasValues()) { - if (!criterion.isNegated()) { - // `filter` instead of `must` (enables caching and bypasses scoring) - andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } else { - andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } - } - }); + conjunctiveCriterion + .getAnd() + .forEach( + criterion -> { + if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) + || !criterion.getValue().trim().isEmpty() + || criterion.hasValues()) { + if (!criterion.isNegated()) { + // `filter` instead of `must` (enables caching and bypasses scoring) + andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } else { + andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } + } + }); return andQueryBuilder; } /** - * Builds search query given a {@link Criterion}, containing field, value and association/condition between the two. + * Builds search query given a {@link Criterion}, containing field, value and + * association/condition between the two. * - * <p>If the condition between a field and value (specified in {@link Criterion}) is EQUAL, we construct a Terms query. - * In this case, a field can take multiple values, specified using comma as a delimiter - this method will split - * tokens accordingly. This is done because currently there is no support of associating two different {@link Criterion} - * in a {@link Filter} with an OR operator - default operator is AND. + * <p>If the condition between a field and value (specified in {@link Criterion}) is EQUAL, we + * construct a Terms query. In this case, a field can take multiple values, specified using comma + * as a delimiter - this method will split tokens accordingly. This is done because currently + * there is no support of associating two different {@link Criterion} in a {@link Filter} with an + * OR operator - default operator is AND. * - * <p>This approach of supporting multiple values using comma as delimiter, prevents us from specifying a value that has comma - * as one of it's characters. This is particularly true when one of the values is an urn e.g. "urn:li:example:(1,2,3)". - * Hence we do not split the value (using comma as delimiter) if the value starts with "urn:li:". - * TODO(https://github.com/datahub-project/datahub-gma/issues/51): support multiple values a field can take without using - * delimiters like comma. + * <p>This approach of supporting multiple values using comma as delimiter, prevents us from + * specifying a value that has comma as one of it's characters. This is particularly true when one + * of the values is an urn e.g. "urn:li:example:(1,2,3)". Hence we do not split the value (using + * comma as delimiter) if the value starts with "urn:li:". + * TODO(https://github.com/datahub-project/datahub-gma/issues/51): support multiple values a field + * can take without using delimiters like comma. * - * <p>If the condition between a field and value is not the same as EQUAL, a Range query is constructed. This - * condition does not support multiple values for the same field. + * <p>If the condition between a field and value is not the same as EQUAL, a Range query is + * constructed. This condition does not support multiple values for the same field. * - * <p>When CONTAIN, START_WITH and END_WITH conditions are used, the underlying logic is using wildcard query which is - * not performant according to ES. For details, please refer to: + * <p>When CONTAIN, START_WITH and END_WITH conditions are used, the underlying logic is using + * wildcard query which is not performant according to ES. For details, please refer to: * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-wildcard-query.html#wildcard-query-field-params * - * @param criterion {@link Criterion} single criterion which contains field, value and a comparison operator + * @param criterion {@link Criterion} single criterion which contains field, value and a + * comparison operator */ @Nonnull - public static QueryBuilder getQueryBuilderFromCriterion(@Nonnull final Criterion criterion, boolean isTimeseries) { + public static QueryBuilder getQueryBuilderFromCriterion( + @Nonnull final Criterion criterion, boolean isTimeseries) { final String fieldName = toFacetField(criterion.getField()); /* @@ -188,11 +212,12 @@ public static QueryBuilder getQueryBuilderFromCriterion(@Nonnull final Criterion * First we handle this expansion, if required, otherwise we build the filter as usual * without expansion. */ - final Optional<List<String>> maybeFieldToExpand = Optional.ofNullable(FIELDS_TO_EXPANDED_FIELDS_LIST.get( - fieldName)); + final Optional<List<String>> maybeFieldToExpand = + Optional.ofNullable(FIELDS_TO_EXPANDED_FIELDS_LIST.get(fieldName)); if (maybeFieldToExpand.isPresent()) { - return getQueryBuilderFromCriterionForFieldToExpand(maybeFieldToExpand.get(), criterion, isTimeseries); + return getQueryBuilderFromCriterionForFieldToExpand( + maybeFieldToExpand.get(), criterion, isTimeseries); } return getQueryBuilderFromCriterionForSingleField(criterion, isTimeseries); @@ -220,19 +245,21 @@ public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType f /** * Populates source field of search query with the sort order as per the criterion provided. * - * <p> - * If no sort criterion is provided then the default sorting criterion is chosen which is descending order of score - * Furthermore to resolve conflicts, the results are further sorted by ascending order of urn - * If the input sort criterion is urn itself, then no additional sort criterion is applied as there will be no conflicts. - * When sorting, set the unmappedType param to arbitrary "keyword" so we essentially ignore sorting where indices do not - * have the field we are sorting on. - * </p> + * <p>If no sort criterion is provided then the default sorting criterion is chosen which is + * descending order of score Furthermore to resolve conflicts, the results are further sorted by + * ascending order of urn If the input sort criterion is urn itself, then no additional sort + * criterion is applied as there will be no conflicts. When sorting, set the unmappedType param to + * arbitrary "keyword" so we essentially ignore sorting where indices do not have the field we are + * sorting on. * - * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort order + * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort + * order * @param sortCriterion {@link SortCriterion} to be applied to the search results */ - public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuilder, - @Nullable SortCriterion sortCriterion, List<EntitySpec> entitySpecs) { + public static void buildSortOrder( + @Nonnull SearchSourceBuilder searchSourceBuilder, + @Nullable SortCriterion sortCriterion, + List<EntitySpec> entitySpecs) { if (sortCriterion == null) { searchSourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); } else { @@ -252,12 +279,17 @@ public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuild } } if (fieldTypeForDefault.isEmpty()) { - log.warn("Sort criterion field " + sortCriterion.getField() + " was not found in any entity spec to be searched"); + log.warn( + "Sort criterion field " + + sortCriterion.getField() + + " was not found in any entity spec to be searched"); } final SortOrder esSortOrder = - (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC + (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC : SortOrder.DESC; - FieldSortBuilder sortBuilder = new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); + FieldSortBuilder sortBuilder = + new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); if (fieldTypeForDefault.isPresent()) { String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); if (esFieldtype != null) { @@ -266,17 +298,22 @@ public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuild } searchSourceBuilder.sort(sortBuilder); } - if (sortCriterion == null || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { - searchSourceBuilder.sort(new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); + if (sortCriterion == null + || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { + searchSourceBuilder.sort( + new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); } } /** - * Populates source field of search query with the suggestions query so that we get search suggestions back. - * Right now we are only supporting suggestions based on the virtual _entityName field alias. + * Populates source field of search query with the suggestions query so that we get search + * suggestions back. Right now we are only supporting suggestions based on the virtual _entityName + * field alias. */ - public static void buildNameSuggestions(@Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable String textInput) { - SuggestionBuilder<TermSuggestionBuilder> builder = SuggestBuilders.termSuggestion(ENTITY_NAME_FIELD).text(textInput); + public static void buildNameSuggestions( + @Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable String textInput) { + SuggestionBuilder<TermSuggestionBuilder> builder = + SuggestBuilders.termSuggestion(ENTITY_NAME_FIELD).text(textInput); SuggestBuilder suggestBuilder = new SuggestBuilder(); suggestBuilder.addSuggestion(NAME_SUGGESTION, builder); searchSourceBuilder.suggest(suggestBuilder); @@ -302,34 +339,43 @@ public static String toFacetField(@Nonnull final String filterField) { } @Nonnull - public static String toKeywordField(@Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { + public static String toKeywordField( + @Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { return skipKeywordSuffix || KEYWORD_FIELDS.contains(filterField) || PATH_HIERARCHY_FIELDS.contains(filterField) - || filterField.contains(".") ? filterField : filterField + ESUtils.KEYWORD_SUFFIX; + || filterField.contains(".") + ? filterField + : filterField + ESUtils.KEYWORD_SUFFIX; } - public static RequestOptions buildReindexTaskRequestOptions(String version, String indexName, String tempIndexName) { + public static RequestOptions buildReindexTaskRequestOptions( + String version, String indexName, String tempIndexName) { return RequestOptions.DEFAULT.toBuilder() .addHeader(OPAQUE_ID_HEADER, getOpaqueIdHeaderValue(version, indexName, tempIndexName)) .build(); } - public static String getOpaqueIdHeaderValue(String version, String indexName, String tempIndexName) { + public static String getOpaqueIdHeaderValue( + String version, String indexName, String tempIndexName) { return String.join(HEADER_VALUE_DELIMITER, version, indexName, tempIndexName); } public static boolean prefixMatch(String id, String version, String indexName) { return Optional.ofNullable(id) - .map(t -> t.startsWith(String.join(HEADER_VALUE_DELIMITER, version, indexName))).orElse(false); + .map(t -> t.startsWith(String.join(HEADER_VALUE_DELIMITER, version, indexName))) + .orElse(false); } public static String extractTargetIndex(String id) { return id.split("[" + HEADER_VALUE_DELIMITER + "]", 3)[2]; } - public static void setSearchAfter(SearchSourceBuilder searchSourceBuilder, @Nullable Object[] sort, - @Nullable String pitId, @Nullable String keepAlive) { + public static void setSearchAfter( + SearchSourceBuilder searchSourceBuilder, + @Nullable Object[] sort, + @Nullable String pitId, + @Nullable String keepAlive) { if (sort != null && sort.length > 0) { searchSourceBuilder.searchAfter(sort); } @@ -357,41 +403,61 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( criterionToQuery.setValue(criterion.getValue()); } criterionToQuery.setField(toKeywordField(field, isTimeseries)); - orQueryBuilder.should(getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); + orQueryBuilder.should( + getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); } return orQueryBuilder; } @Nonnull - private static QueryBuilder getQueryBuilderFromCriterionForSingleField(@Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { + private static QueryBuilder getQueryBuilderFromCriterionForSingleField( + @Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { final Condition condition = criterion.getCondition(); final String fieldName = toFacetField(criterion.getField()); if (condition == Condition.IS_NULL) { - return QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(criterion.getField())).queryName(fieldName); + return QueryBuilders.boolQuery() + .mustNot(QueryBuilders.existsQuery(criterion.getField())) + .queryName(fieldName); } else if (condition == Condition.EXISTS) { - return QueryBuilders.boolQuery().must(QueryBuilders.existsQuery(criterion.getField())).queryName(fieldName); + return QueryBuilders.boolQuery() + .must(QueryBuilders.existsQuery(criterion.getField())) + .queryName(fieldName); } else if (criterion.hasValues() || criterion.hasValue()) { if (condition == Condition.EQUAL) { return buildEqualsConditionFromCriterion(fieldName, criterion, isTimeseries); // TODO: Support multi-match on the following operators (using new 'values' field) } else if (condition == Condition.GREATER_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()).gt(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .gt(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.GREATER_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()).gte(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .gte(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.LESS_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()).lt(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .lt(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.LESS_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()).lte(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .lte(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.CONTAIN) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*").queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*") + .queryName(fieldName); } else if (condition == Condition.START_WITH) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*").queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*") + .queryName(fieldName); } else if (condition == Condition.END_WITH) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim())).queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim())) + .queryName(fieldName); } } throw new UnsupportedOperationException("Unsupported condition: " + condition); @@ -416,8 +482,8 @@ private static QueryBuilder buildEqualsConditionFromCriterion( } /** - * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which - * was created using the new multi-match 'values' field of Criterion.pdl model. + * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created + * using the new multi-match 'values' field of Criterion.pdl model. */ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( @Nonnull final String fieldName, @@ -432,39 +498,47 @@ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( return QueryBuilders.termQuery(fieldName, Boolean.parseBoolean(criterion.getValues().get(0))) .queryName(fieldName); } - return QueryBuilders.termsQuery(toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) + return QueryBuilders.termsQuery( + toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) .queryName(fieldName); } /** - * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which - * was created using the deprecated 'value' field of Criterion.pdl model. + * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created + * using the deprecated 'value' field of Criterion.pdl model. * - * Previously, we supported comma-separate values inside of a single string field, - * thus we have to account for splitting and matching against each value below. + * <p>Previously, we supported comma-separate values inside of a single string field, thus we have + * to account for splitting and matching against each value below. * - * For all new code, we should be using the new 'values' field for performing multi-match. This + * <p>For all new code, we should be using the new 'values' field for performing multi-match. This * is simply retained for backwards compatibility of the search API. */ private static QueryBuilder buildEqualsFromCriterionWithValue( @Nonnull final String fieldName, @Nonnull final Criterion criterion, final boolean isTimeseries) { - // If the value is an URN style value, then we do not attempt to split it by comma (for obvious reasons) + // If the value is an URN style value, then we do not attempt to split it by comma (for obvious + // reasons) if (isUrn(criterion.getValue())) { - return QueryBuilders.matchQuery(toKeywordField(criterion.getField(), isTimeseries), criterion.getValue().trim()) + return QueryBuilders.matchQuery( + toKeywordField(criterion.getField(), isTimeseries), criterion.getValue().trim()) .queryName(fieldName) .analyzer(KEYWORD_ANALYZER); } final BoolQueryBuilder filters = new BoolQueryBuilder(); // Cannot assume the existence of a .keyword or other subfield (unless contains `.`) // Cannot assume the type of the underlying field or subfield thus KEYWORD_ANALYZER is forced - List<String> fields = criterion.getField().contains(".") ? List.of(criterion.getField()) - : List.of(criterion.getField(), criterion.getField() + ".*"); + List<String> fields = + criterion.getField().contains(".") + ? List.of(criterion.getField()) + : List.of(criterion.getField(), criterion.getField() + ".*"); Arrays.stream(criterion.getValue().trim().split("\\s*,\\s*")) - .forEach(elem -> filters.should(QueryBuilders.multiMatchQuery(elem, fields.toArray(new String[0])) - .queryName(fieldName) - .analyzer(KEYWORD_ANALYZER))); + .forEach( + elem -> + filters.should( + QueryBuilders.multiMatchQuery(elem, fields.toArray(new String[0])) + .queryName(fieldName) + .analyzer(KEYWORD_ANALYZER))); return filters; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java index 62a8cd932885e..97eb6ade468ea 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java @@ -8,17 +8,24 @@ import java.util.Map; import java.util.Set; - public class FilterUtils { - private FilterUtils() { - } + private FilterUtils() {} private static final List<String> FILTER_RANKING = - ImmutableList.of("_entityType", "typeNames", "platform", "domains", "tags", "glossaryTerms", "container", "owners", + ImmutableList.of( + "_entityType", + "typeNames", + "platform", + "domains", + "tags", + "glossaryTerms", + "container", + "owners", "origin"); - public static List<AggregationMetadata> rankFilterGroups(Map<String, AggregationMetadata> aggregations) { + public static List<AggregationMetadata> rankFilterGroups( + Map<String, AggregationMetadata> aggregations) { Set<String> filterGroups = new HashSet<>(aggregations.keySet()); List<AggregationMetadata> finalAggregations = new ArrayList<>(aggregations.size()); for (String filterName : FILTER_RANKING) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java index b026686f7abfd..9f1041eaaeca3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java @@ -7,9 +7,8 @@ import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; - public class GZIPUtil { - private GZIPUtil() { } + private GZIPUtil() {} public static String gzipDecompress(byte[] gzipped) { String unzipped; @@ -30,7 +29,8 @@ public static String gzipDecompress(byte[] gzipped) { public static byte[] gzipCompress(String unzipped) { byte[] gzipped; - try (ByteArrayInputStream bis = new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); + try (ByteArrayInputStream bis = + new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gzipOutputStream = new GZIPOutputStream(bos)) { byte[] buffer = new byte[1024]; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java index 8b56ae0beb3f1..b8cf0626b7251 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java @@ -32,13 +32,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; - @Slf4j public class SearchUtils { - private SearchUtils() { - - } + private SearchUtils() {} public static Optional<String> getDocId(@Nonnull Urn urn) { try { @@ -64,23 +61,31 @@ public static Map<String, String> getRequestMap(@Nullable Filter requestParams) ConjunctiveCriterionArray disjunction = requestParams.getOr(); if (disjunction.size() > 1) { - throw new UnsupportedOperationException("To build request map, there must be only one conjunction group."); + throw new UnsupportedOperationException( + "To build request map, there must be only one conjunction group."); } - CriterionArray criterionArray = disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); + CriterionArray criterionArray = + disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); - criterionArray.forEach(criterion -> { - if (!com.linkedin.metadata.query.filter.Condition.EQUAL.equals(criterion.getCondition())) { - throw new UnsupportedOperationException("Unsupported condition: " + criterion.getCondition()); - } - }); + criterionArray.forEach( + criterion -> { + if (!com.linkedin.metadata.query.filter.Condition.EQUAL.equals( + criterion.getCondition())) { + throw new UnsupportedOperationException( + "Unsupported condition: " + criterion.getCondition()); + } + }); - return criterionArray.stream().collect(Collectors.toMap(Criterion::getField, Criterion::getValue)); + return criterionArray.stream() + .collect(Collectors.toMap(Criterion::getField, Criterion::getValue)); } public static boolean isUrn(@Nonnull String value) { - // TODO(https://github.com/datahub-project/datahub-gma/issues/51): This method is a bit of a hack to support searching for - // URNs that have commas in them, while also using commas a delimiter for search. We should stop supporting commas + // TODO(https://github.com/datahub-project/datahub-gma/issues/51): This method is a bit of a + // hack to support searching for + // URNs that have commas in them, while also using commas a delimiter for search. We should stop + // supporting commas // as delimiter, and then we can stop using this hack. return value.startsWith("urn:li:"); } @@ -104,40 +109,52 @@ public static String readResourceFile(@Nonnull Class clazz, @Nonnull String file } } - public static Filter removeCriteria(@Nullable Filter originalFilter, Predicate<Criterion> shouldRemove) { + public static Filter removeCriteria( + @Nullable Filter originalFilter, Predicate<Criterion> shouldRemove) { if (originalFilter != null && originalFilter.getOr() != null) { - return new Filter().setOr(new ConjunctiveCriterionArray(originalFilter.getOr() - .stream() - .map(criteria -> removeCriteria(criteria, shouldRemove)) - .filter(criteria -> !criteria.getAnd().isEmpty()) - .collect(Collectors.toList()))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + originalFilter.getOr().stream() + .map(criteria -> removeCriteria(criteria, shouldRemove)) + .filter(criteria -> !criteria.getAnd().isEmpty()) + .collect(Collectors.toList()))); } return originalFilter; } - private static ConjunctiveCriterion removeCriteria(@Nonnull ConjunctiveCriterion conjunctiveCriterion, - Predicate<Criterion> shouldRemove) { - return new ConjunctiveCriterion().setAnd(new CriterionArray(conjunctiveCriterion.getAnd() - .stream() - .filter(criterion -> !shouldRemove.test(criterion)) - .collect(Collectors.toList()))); + private static ConjunctiveCriterion removeCriteria( + @Nonnull ConjunctiveCriterion conjunctiveCriterion, Predicate<Criterion> shouldRemove) { + return new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + conjunctiveCriterion.getAnd().stream() + .filter(criterion -> !shouldRemove.test(criterion)) + .collect(Collectors.toList()))); } @SneakyThrows public static AggregationMetadata merge(AggregationMetadata one, AggregationMetadata two) { Map<String, Long> mergedMap = - Stream.concat(one.getAggregations().entrySet().stream(), two.getAggregations().entrySet().stream()) + Stream.concat( + one.getAggregations().entrySet().stream(), + two.getAggregations().entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, Long::sum)); - // we want to make sure the values that were used in the filter are prioritized to appear in the response aggregation - Set<String> filteredValues = Stream.concat(one.getFilterValues().stream(), two.getFilterValues().stream()).filter(val -> val.isFiltered()).map( - val -> val.getValue() - ).collect(Collectors.toSet()); + // we want to make sure the values that were used in the filter are prioritized to appear in the + // response aggregation + Set<String> filteredValues = + Stream.concat(one.getFilterValues().stream(), two.getFilterValues().stream()) + .filter(val -> val.isFiltered()) + .map(val -> val.getValue()) + .collect(Collectors.toSet()); return one.clone() - .setDisplayName(two.getDisplayName() != two.getName() ? two.getDisplayName() : one.getDisplayName()) + .setDisplayName( + two.getDisplayName() != two.getName() ? two.getDisplayName() : one.getDisplayName()) .setAggregations(new LongMap(mergedMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(mergedMap, filteredValues))); + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(mergedMap, filteredValues))); } public static ListResult toListResult(final SearchResult searchResult) { @@ -149,13 +166,16 @@ public static ListResult toListResult(final SearchResult searchResult) { listResult.setCount(searchResult.getPageSize()); listResult.setTotal(searchResult.getNumEntities()); listResult.setEntities( - new UrnArray(searchResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()))); + new UrnArray( + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); return listResult; } @SneakyThrows - public static SearchFlags applyDefaultSearchFlags(@Nullable SearchFlags inputFlags, @Nullable String query, - @Nonnull SearchFlags defaultFlags) { + public static SearchFlags applyDefaultSearchFlags( + @Nullable SearchFlags inputFlags, @Nullable String query, @Nonnull SearchFlags defaultFlags) { SearchFlags finalSearchFlags = inputFlags != null ? inputFlags : defaultFlags.copy(); if (!finalSearchFlags.hasFulltext() || finalSearchFlags.isFulltext() == null) { finalSearchFlags.setFulltext(defaultFlags.isFulltext()); @@ -163,7 +183,8 @@ public static SearchFlags applyDefaultSearchFlags(@Nullable SearchFlags inputFla if (query == null || Set.of("*", "").contains(query)) { // No highlighting if no query string finalSearchFlags.setSkipHighlighting(true); - } else if (!finalSearchFlags.hasSkipHighlighting() || finalSearchFlags.isSkipHighlighting() == null) { + } else if (!finalSearchFlags.hasSkipHighlighting() + || finalSearchFlags.isSkipHighlighting() == null) { finalSearchFlags.setSkipHighlighting(defaultFlags.isSkipHighlighting()); } if (!finalSearchFlags.hasSkipAggregates() || finalSearchFlags.isSkipAggregates() == null) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index ea7286112f870..b2c615c1f47f5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; @@ -41,7 +44,6 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; - import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; @@ -59,9 +61,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - @Slf4j public class UpdateIndicesService { private static final String DOWNSTREAM_OF = "DownstreamOf"; @@ -76,13 +75,12 @@ public class UpdateIndicesService { @Value("${featureFlags.graphServiceDiffModeEnabled:true}") private boolean _graphDiffMode; + @Value("${featureFlags.searchServiceDiffModeEnabled:true}") private boolean _searchDiffMode; - private static final Set<ChangeType> UPDATE_CHANGE_TYPES = ImmutableSet.of( - ChangeType.UPSERT, - ChangeType.RESTATE, - ChangeType.PATCH); + private static final Set<ChangeType> UPDATE_CHANGE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.RESTATE, ChangeType.PATCH); @VisibleForTesting public void setGraphDiffMode(boolean graphDiffMode) { @@ -95,13 +93,13 @@ public void setSearchDiffMode(boolean searchDiffMode) { } public UpdateIndicesService( - GraphService graphService, - EntitySearchService entitySearchService, - TimeseriesAspectService timeseriesAspectService, - SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, - SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { + GraphService graphService, + EntitySearchService entitySearchService, + TimeseriesAspectService timeseriesAspectService, + SystemMetadataService systemMetadataService, + EntityRegistry entityRegistry, + SearchDocumentTransformer searchDocumentTransformer, + EntityIndexBuilders entityIndexBuilders) { _graphService = graphService; _entitySearchService = entitySearchService; _timeseriesAspectService = timeseriesAspectService; @@ -123,14 +121,12 @@ public void handleChangeEvent(@Nonnull final MetadataChangeLog event) { } } - /** - * This very important method processes {@link MetadataChangeLog} events - * that represent changes to the Metadata Graph. + * This very important method processes {@link MetadataChangeLog} events that represent changes to + * the Metadata Graph. * - * In particular, it handles updating the Search, Graph, Timeseries, and - * System Metadata stores in response to a given change type to reflect - * the changes present in the new aspect. + * <p>In particular, it handles updating the Search, Graph, Timeseries, and System Metadata stores + * in response to a given change type to reflect the changes present in the new aspect. * * @param event the change event to be processed. */ @@ -147,23 +143,29 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro AspectSpec aspectSpec = entitySpec.getAspectSpec(event.getAspectName()); if (aspectSpec == null) { throw new RuntimeException( - String.format("Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", - event.getEntityType(), - event.getAspectName())); + String.format( + "Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", + event.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - aspectSpec); + RecordTemplate aspect = + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), event.getAspect().getContentType(), aspectSpec); GenericAspect previousAspectValue = event.getPreviousAspectValue(); - RecordTemplate previousAspect = previousAspectValue != null - ? GenericRecordUtils.deserializeAspect(previousAspectValue.getValue(), previousAspectValue.getContentType(), aspectSpec) - : null; + RecordTemplate previousAspect = + previousAspectValue != null + ? GenericRecordUtils.deserializeAspect( + previousAspectValue.getValue(), previousAspectValue.getContentType(), aspectSpec) + : null; // Step 0. If the aspect is timeseries, add to its timeseries index. if (aspectSpec.isTimeseries()) { - updateTimeseriesFields(event.getEntityType(), event.getAspectName(), urn, aspect, aspectSpec, + updateTimeseriesFields( + event.getEntityType(), + event.getAspectName(), + urn, + aspect, + aspectSpec, event.getSystemMetadata()); } else { // Inject into the System Metadata Index when an aspect is non-timeseries only. @@ -173,13 +175,16 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro } // Step 1. For all aspects, attempt to update Search - updateSearchService(entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); + updateSearchService( + entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); // Step 2. For all aspects, attempt to update Graph SystemMetadata systemMetadata = event.getSystemMetadata(); - if (_graphDiffMode && !(_graphService instanceof DgraphGraphService) - && (systemMetadata == null || systemMetadata.getProperties() == null - || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { + if (_graphDiffMode + && !(_graphService instanceof DgraphGraphService) + && (systemMetadata == null + || systemMetadata.getProperties() == null + || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { updateGraphServiceDiff(urn, aspectSpec, previousAspect, aspect, event); } else { updateGraphService(urn, aspectSpec, aspect, event); @@ -187,14 +192,14 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro } /** - * This very important method processes {@link MetadataChangeLog} deletion events - * to cleanup the Metadata Graph when an aspect or entity is removed. + * This very important method processes {@link MetadataChangeLog} deletion events to cleanup the + * Metadata Graph when an aspect or entity is removed. * - * In particular, it handles updating the Search, Graph, Timeseries, and - * System Metadata stores to reflect the deletion of a particular aspect. + * <p>In particular, it handles updating the Search, Graph, Timeseries, and System Metadata stores + * to reflect the deletion of a particular aspect. * - * Note that if an entity's key aspect is deleted, the entire entity will be purged - * from search, graph, timeseries, etc. + * <p>Note that if an entity's key aspect is deleted, the entire entity will be purged from + * search, graph, timeseries, etc. * * @param event the change event to be processed. */ @@ -211,19 +216,23 @@ public void handleDeleteChangeEvent(@Nonnull final MetadataChangeLog event) { AspectSpec aspectSpec = entitySpec.getAspectSpec(event.getAspectName()); if (aspectSpec == null) { throw new RuntimeException( - String.format("Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", - event.getEntityType(), - event.getAspectName())); + String.format( + "Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", + event.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = GenericRecordUtils.deserializeAspect(event.getPreviousAspectValue().getValue(), - event.getPreviousAspectValue().getContentType(), aspectSpec); + RecordTemplate aspect = + GenericRecordUtils.deserializeAspect( + event.getPreviousAspectValue().getValue(), + event.getPreviousAspectValue().getContentType(), + aspectSpec); Boolean isDeletingKey = event.getAspectName().equals(entitySpec.getKeyAspectName()); if (!aspectSpec.isTimeseries()) { deleteSystemMetadata(urn, aspectSpec, isDeletingKey); deleteGraphData(urn, aspectSpec, aspect, isDeletingKey, event); - deleteSearchData(_entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey); + deleteSearchData( + _entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey); } } @@ -231,8 +240,7 @@ public void handleDeleteChangeEvent(@Nonnull final MetadataChangeLog event) { private void updateFineGrainedEdgesAndRelationships( RecordTemplate aspect, List<Edge> edgesToAdd, - HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded - ) { + HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded) { UpstreamLineage upstreamLineage = new UpstreamLineage(aspect.data()); if (upstreamLineage.getFineGrainedLineages() != null) { for (FineGrainedLineage fineGrainedLineage : upstreamLineage.getFineGrainedLineages()) { @@ -243,8 +251,10 @@ private void updateFineGrainedEdgesAndRelationships( for (Urn downstream : fineGrainedLineage.getDownstreams()) { for (Urn upstream : fineGrainedLineage.getUpstreams()) { // TODO: add edges uniformly across aspects - edgesToAdd.add(new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); - Set<String> relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); + edgesToAdd.add( + new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); + Set<String> relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); urnToRelationshipTypesBeingAdded.put(downstream, relationshipTypes); } @@ -253,10 +263,14 @@ private void updateFineGrainedEdgesAndRelationships( } } - private Urn generateSchemaFieldUrn(@Nonnull final String resourceUrn, @Nonnull final String fieldPath) { - // we rely on schemaField fieldPaths to be encoded since we do that with fineGrainedLineage on the ingestion side - final String encodedFieldPath = fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); - final SchemaFieldKey key = new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); + private Urn generateSchemaFieldUrn( + @Nonnull final String resourceUrn, @Nonnull final String fieldPath) { + // we rely on schemaField fieldPaths to be encoded since we do that with fineGrainedLineage on + // the ingestion side + final String encodedFieldPath = + fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); + final SchemaFieldKey key = + new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); return EntityKeyUtils.convertEntityKeyToUrn(key, Constants.SCHEMA_FIELD_ENTITY_NAME); } @@ -265,15 +279,27 @@ private void updateInputFieldEdgesAndRelationships( @Nonnull final Urn urn, @Nonnull final InputFields inputFields, @Nonnull final List<Edge> edgesToAdd, - @Nonnull final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded - ) { + @Nonnull final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded) { if (inputFields.hasFields()) { for (final InputField field : inputFields.getFields()) { - if (field.hasSchemaFieldUrn() && field.hasSchemaField() && field.getSchemaField().hasFieldPath()) { - final Urn sourceFieldUrn = generateSchemaFieldUrn(urn.toString(), field.getSchemaField().getFieldPath()); + if (field.hasSchemaFieldUrn() + && field.hasSchemaField() + && field.getSchemaField().hasFieldPath()) { + final Urn sourceFieldUrn = + generateSchemaFieldUrn(urn.toString(), field.getSchemaField().getFieldPath()); // TODO: add edges uniformly across aspects - edgesToAdd.add(new Edge(sourceFieldUrn, field.getSchemaFieldUrn(), DOWNSTREAM_OF, null, null, null, null, null)); - final Set<String> relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(sourceFieldUrn, new HashSet<>()); + edgesToAdd.add( + new Edge( + sourceFieldUrn, + field.getSchemaFieldUrn(), + DOWNSTREAM_OF, + null, + null, + null, + null, + null)); + final Set<String> relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(sourceFieldUrn, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); urnToRelationshipTypesBeingAdded.put(sourceFieldUrn, relationshipTypes); } @@ -286,54 +312,59 @@ private Pair<List<Edge>, HashMap<Urn, Set<String>>> getEdgesAndRelationshipTypes @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, @Nonnull final MetadataChangeLog event, - final boolean isNewAspectVersion - ) { + final boolean isNewAspectVersion) { final List<Edge> edgesToAdd = new ArrayList<>(); final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = new HashMap<>(); - // we need to manually set schemaField <-> schemaField edges for fineGrainedLineage and inputFields + // we need to manually set schemaField <-> schemaField edges for fineGrainedLineage and + // inputFields // since @Relationship only links between the parent entity urn and something else. if (aspectSpec.getName().equals(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { updateFineGrainedEdgesAndRelationships(aspect, edgesToAdd, urnToRelationshipTypesBeingAdded); } if (aspectSpec.getName().equals(Constants.INPUT_FIELDS_ASPECT_NAME)) { final InputFields inputFields = new InputFields(aspect.data()); - updateInputFieldEdgesAndRelationships(urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); + updateInputFieldEdgesAndRelationships( + urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); } Map<RelationshipFieldSpec, List<Object>> extractedFields = FieldExtractor.extractFields(aspect, aspectSpec.getRelationshipFieldSpecs()); for (Map.Entry<RelationshipFieldSpec, List<Object>> entry : extractedFields.entrySet()) { - Set<String> relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(urn, new HashSet<>()); + Set<String> relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(urn, new HashSet<>()); relationshipTypes.add(entry.getKey().getRelationshipName()); urnToRelationshipTypesBeingAdded.put(urn, relationshipTypes); - final List<Edge> newEdges = GraphIndexUtils.extractGraphEdges(entry, aspect, urn, event, isNewAspectVersion); + final List<Edge> newEdges = + GraphIndexUtils.extractGraphEdges(entry, aspect, urn, event, isNewAspectVersion); edgesToAdd.addAll(newEdges); } return Pair.of(edgesToAdd, urnToRelationshipTypesBeingAdded); } - /** - * Process snapshot and update graph index - */ + /** Process snapshot and update graph index */ private void updateGraphService( @Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { Pair<List<Edge>, HashMap<Urn, Set<String>>> edgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, aspect, event, true); final List<Edge> edgesToAdd = edgeAndRelationTypes.getFirst(); - final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = edgeAndRelationTypes.getSecond(); + final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = + edgeAndRelationTypes.getSecond(); log.debug("Here's the relationship types found {}", urnToRelationshipTypesBeingAdded); if (urnToRelationshipTypesBeingAdded.size() > 0) { for (Map.Entry<Urn, Set<String>> entry : urnToRelationshipTypesBeingAdded.entrySet()) { - _graphService.removeEdgesFromNode(entry.getKey(), new ArrayList<>(entry.getValue()), - newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)); + _graphService.removeEdgesFromNode( + entry.getKey(), + new ArrayList<>(entry.getValue()), + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING)); } edgesToAdd.forEach(_graphService::addEdge); } @@ -344,15 +375,17 @@ private void updateGraphServiceDiff( @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspect, @Nonnull final RecordTemplate newAspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { Pair<List<Edge>, HashMap<Urn, Set<String>>> oldEdgeAndRelationTypes = null; if (oldAspect != null) { - oldEdgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, oldAspect, event, false); + oldEdgeAndRelationTypes = + getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, oldAspect, event, false); } final List<Edge> oldEdges = - oldEdgeAndRelationTypes != null ? oldEdgeAndRelationTypes.getFirst() : Collections.emptyList(); + oldEdgeAndRelationTypes != null + ? oldEdgeAndRelationTypes.getFirst() + : Collections.emptyList(); final Set<Edge> oldEdgeSet = new HashSet<>(oldEdges); Pair<List<Edge>, HashMap<Urn, Set<String>>> newEdgeAndRelationTypes = @@ -362,14 +395,12 @@ private void updateGraphServiceDiff( final Set<Edge> newEdgeSet = new HashSet<>(newEdges); // Edges to add - final List<Edge> additiveDifference = newEdgeSet.stream() - .filter(edge -> !oldEdgeSet.contains(edge)) - .collect(Collectors.toList()); + final List<Edge> additiveDifference = + newEdgeSet.stream().filter(edge -> !oldEdgeSet.contains(edge)).collect(Collectors.toList()); // Edges to remove - final List<Edge> subtractiveDifference = oldEdgeSet.stream() - .filter(edge -> !newEdgeSet.contains(edge)) - .collect(Collectors.toList()); + final List<Edge> subtractiveDifference = + oldEdgeSet.stream().filter(edge -> !newEdgeSet.contains(edge)).collect(Collectors.toList()); // Edges to update final List<Edge> mergedEdges = getMergedEdges(oldEdgeSet, newEdgeSet); @@ -394,17 +425,18 @@ private void updateGraphServiceDiff( } private static List<Edge> getMergedEdges(final Set<Edge> oldEdgeSet, final Set<Edge> newEdgeSet) { - final Map<Integer, com.linkedin.metadata.graph.Edge> oldEdgesMap = oldEdgeSet - .stream() - .map(edge -> Pair.of(edge.hashCode(), edge)) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); + final Map<Integer, com.linkedin.metadata.graph.Edge> oldEdgesMap = + oldEdgeSet.stream() + .map(edge -> Pair.of(edge.hashCode(), edge)) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); final List<com.linkedin.metadata.graph.Edge> mergedEdges = new ArrayList<>(); if (!oldEdgesMap.isEmpty()) { for (com.linkedin.metadata.graph.Edge newEdge : newEdgeSet) { if (oldEdgesMap.containsKey(newEdge.hashCode())) { final com.linkedin.metadata.graph.Edge oldEdge = oldEdgesMap.get(newEdge.hashCode()); - final com.linkedin.metadata.graph.Edge mergedEdge = GraphIndexUtils.mergeEdges(oldEdge, newEdge); + final com.linkedin.metadata.graph.Edge mergedEdge = + GraphIndexUtils.mergeEdges(oldEdge, newEdge); mergedEdges.add(mergedEdge); } } @@ -413,18 +445,21 @@ private static List<Edge> getMergedEdges(final Set<Edge> oldEdgeSet, final Set<E return mergedEdges; } - /** - * Process snapshot and update search index - */ - private void updateSearchService(String entityName, Urn urn, - AspectSpec aspectSpec, RecordTemplate aspect, - @Nullable SystemMetadata systemMetadata, @Nullable RecordTemplate previousAspect) { + /** Process snapshot and update search index */ + private void updateSearchService( + String entityName, + Urn urn, + AspectSpec aspectSpec, + RecordTemplate aspect, + @Nullable SystemMetadata systemMetadata, + @Nullable RecordTemplate previousAspect) { Optional<String> searchDocument; Optional<String> previousSearchDocument = Optional.empty(); try { searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, false); } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -439,14 +474,18 @@ private void updateSearchService(String entityName, Urn urn, } String searchDocumentValue = searchDocument.get(); - if (_searchDiffMode && (systemMetadata == null || systemMetadata.getProperties() == null - || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { + if (_searchDiffMode + && (systemMetadata == null + || systemMetadata.getProperties() == null + || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { if (previousAspect != null) { try { - previousSearchDocument = _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); + previousSearchDocument = + _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); } catch (Exception e) { log.error( - "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", e, + "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", + e, aspectSpec.getName()); } } @@ -463,11 +502,14 @@ private void updateSearchService(String entityName, Urn urn, _entitySearchService.upsertDocument(entityName, searchDocument.get(), docId.get()); } - /** - * Process snapshot and update time-series index - */ - private void updateTimeseriesFields(String entityType, String aspectName, Urn urn, RecordTemplate aspect, - AspectSpec aspectSpec, SystemMetadata systemMetadata) { + /** Process snapshot and update time-series index */ + private void updateTimeseriesFields( + String entityType, + String aspectName, + Urn urn, + RecordTemplate aspect, + AspectSpec aspectSpec, + SystemMetadata systemMetadata) { Map<String, JsonNode> documents; try { documents = TimeseriesAspectTransformer.transform(urn, aspect, aspectSpec, systemMetadata); @@ -475,12 +517,17 @@ private void updateTimeseriesFields(String entityType, String aspectName, Urn ur log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; } - documents.entrySet().forEach(document -> { - _timeseriesAspectService.upsertDocument(entityType, aspectName, document.getKey(), document.getValue()); - }); + documents + .entrySet() + .forEach( + document -> { + _timeseriesAspectService.upsertDocument( + entityType, aspectName, document.getKey(), document.getValue()); + }); } - private void updateSystemMetadata(SystemMetadata systemMetadata, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect) { + private void updateSystemMetadata( + SystemMetadata systemMetadata, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect) { _systemMetadataService.insert(systemMetadata, urn.toString(), aspectSpec.getName()); // If processing status aspect update all aspects for this urn to removed @@ -496,7 +543,9 @@ private void deleteSystemMetadata(Urn urn, AspectSpec aspectSpec, Boolean isKeyA _systemMetadataService.deleteUrn(urn.toString()); } else { // Delete all aspects from system metadata service - log.debug(String.format("Deleting system metadata for urn: %s, aspect: %s", urn, aspectSpec.getName())); + log.debug( + String.format( + "Deleting system metadata for urn: %s, aspect: %s", urn, aspectSpec.getName())); _systemMetadataService.deleteAspect(urn.toString(), aspectSpec.getName()); } } @@ -506,8 +555,7 @@ private void deleteGraphData( @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, @Nonnull final Boolean isKeyAspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { if (isKeyAspect) { _graphService.removeNode(urn); return; @@ -516,17 +564,27 @@ private void deleteGraphData( Pair<List<Edge>, HashMap<Urn, Set<String>>> edgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, aspect, event, true); - final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = edgeAndRelationTypes.getSecond(); + final HashMap<Urn, Set<String>> urnToRelationshipTypesBeingAdded = + edgeAndRelationTypes.getSecond(); if (urnToRelationshipTypesBeingAdded.size() > 0) { for (Map.Entry<Urn, Set<String>> entry : urnToRelationshipTypesBeingAdded.entrySet()) { - _graphService.removeEdgesFromNode(entry.getKey(), new ArrayList<>(entry.getValue()), - createRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)); + _graphService.removeEdgesFromNode( + entry.getKey(), + new ArrayList<>(entry.getValue()), + createRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING)); } } } - private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, String entityName, - AspectSpec aspectSpec, RecordTemplate aspect, Boolean isKeyAspect) { + private void deleteSearchData( + EntitySearchService entitySearchService, + Urn urn, + String entityName, + AspectSpec aspectSpec, + RecordTemplate aspect, + Boolean isKeyAspect) { String docId; try { docId = URLEncoder.encode(urn.toString(), "UTF-8"); @@ -542,9 +600,11 @@ private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, Optional<String> searchDocument; try { - searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO + searchDocument = + _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -560,14 +620,16 @@ private EntitySpec getEventEntitySpec(@Nonnull final MetadataChangeLog event) { return _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { throw new RuntimeException( - String.format("Failed to retrieve Entity Spec for entity with name %s. Cannot update indices for MCL.", + String.format( + "Failed to retrieve Entity Spec for entity with name %s. Cannot update indices for MCL.", event.getEntityType())); } } /** - * Allow internal use of the system entity client. Solves recursive dependencies between the UpdateIndicesService - * and the SystemJavaEntityClient + * Allow internal use of the system entity client. Solves recursive dependencies between the + * UpdateIndicesService and the SystemJavaEntityClient + * * @param systemEntityClient system entity client */ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java index 64ad88c08a741..9aa0cdca99f68 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java @@ -1,22 +1,20 @@ package com.linkedin.metadata.shared; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; - import java.io.IOException; import java.util.List; - public interface ElasticSearchIndexed { - /** - * The index configurations for the given service. - * @return List of reindex configurations - */ - List<ReindexConfig> buildReindexConfigs() throws IOException; + /** + * The index configurations for the given service. + * + * @return List of reindex configurations + */ + List<ReindexConfig> buildReindexConfigs() throws IOException; - /** - * Mirrors the service's functions which - * are expected to build/reindex as needed based - * on the reindex configurations above - */ - void reindexAll() throws IOException; + /** + * Mirrors the service's functions which are expected to build/reindex as needed based on the + * reindex configurations above + */ + void reindexAll() throws IOException; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java index 5eb03eb23d01a..cf1674ac00480 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.systemmetadata; +import static com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService.INDEX_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.search.utils.ESUtils; @@ -34,9 +36,6 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService.INDEX_NAME; - - @Slf4j @RequiredArgsConstructor public class ESSystemMetadataDAO { @@ -47,17 +46,18 @@ public class ESSystemMetadataDAO { /** * Gets the status of a Task running in ElasticSearch + * * @param taskId the task ID to get the status of */ public Optional<GetTaskResponse> getTaskStatus(@Nonnull String nodeId, long taskId) { - final GetTaskRequest taskRequest = new GetTaskRequest( - nodeId, - taskId - ); + final GetTaskRequest taskRequest = new GetTaskRequest(nodeId, taskId); try { return client.tasks().get(taskRequest, RequestOptions.DEFAULT); } catch (IOException e) { - log.error(String.format("ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", nodeId, taskId)); + log.error( + String.format( + "ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", + nodeId, taskId)); e.printStackTrace(); } return Optional.empty(); @@ -70,8 +70,8 @@ public Optional<GetTaskResponse> getTaskStatus(@Nonnull String nodeId, long task * @param docId the ID of the document */ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { - final UpdateRequest updateRequest = new UpdateRequest( - indexConvention.getIndexName(INDEX_NAME), docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexConvention.getIndexName(INDEX_NAME), docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -80,7 +80,8 @@ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { } public DeleteResponse deleteByDocId(@Nonnull final String docId) { - DeleteRequest deleteRequest = new DeleteRequest(indexConvention.getIndexName(INDEX_NAME), docId); + DeleteRequest deleteRequest = + new DeleteRequest(indexConvention.getIndexName(INDEX_NAME), docId); try { final DeleteResponse deleteResponse = client.delete(deleteRequest, RequestOptions.DEFAULT); @@ -96,24 +97,26 @@ public BulkByScrollResponse deleteByUrn(@Nonnull final String urn) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); finalQuery.must(QueryBuilders.termQuery("urn", urn)); - final Optional<BulkByScrollResponse> deleteResponse = bulkProcessor.deleteByQuery(finalQuery, - indexConvention.getIndexName(INDEX_NAME)); + final Optional<BulkByScrollResponse> deleteResponse = + bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)); return deleteResponse.orElse(null); } - public BulkByScrollResponse deleteByUrnAspect(@Nonnull final String urn, @Nonnull final String aspect) { + public BulkByScrollResponse deleteByUrnAspect( + @Nonnull final String urn, @Nonnull final String aspect) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); finalQuery.must(QueryBuilders.termQuery("urn", urn)); finalQuery.must(QueryBuilders.termQuery("aspect", aspect)); - final Optional<BulkByScrollResponse> deleteResponse = bulkProcessor.deleteByQuery(finalQuery, - indexConvention.getIndexName(INDEX_NAME)); + final Optional<BulkByScrollResponse> deleteResponse = + bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)); return deleteResponse.orElse(null); } - public SearchResponse findByParams(Map<String, String> searchParams, boolean includeSoftDeleted, int from, int size) { + public SearchResponse findByParams( + Map<String, String> searchParams, boolean includeSoftDeleted, int from, int size) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -147,8 +150,13 @@ public SearchResponse findByParams(Map<String, String> searchParams, boolean inc } // TODO: Scroll impl for searches bound by 10k limit - public SearchResponse findByParams(Map<String, String> searchParams, boolean includeSoftDeleted, @Nullable Object[] sort, - @Nullable String pitId, @Nonnull String keepAlive, int size) { + public SearchResponse findByParams( + Map<String, String> searchParams, + boolean includeSoftDeleted, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + int size) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -181,8 +189,8 @@ public SearchResponse findByParams(Map<String, String> searchParams, boolean inc return null; } - public SearchResponse findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, - int from, int size) { + public SearchResponse findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size) { Map<String, String> params = new HashMap<>(); params.put("registryName", registryName); params.put("registryVersion", registryVersion); @@ -210,11 +218,13 @@ public SearchResponse findRuns(Integer pageOffset, Integer pageSize) { bucketSort.size(pageSize); bucketSort.from(pageOffset); - TermsAggregationBuilder aggregation = AggregationBuilders.terms("runId") - .field("runId") - .subAggregation(AggregationBuilders.max("maxTimestamp").field("lastUpdated")) - .subAggregation(bucketSort) - .subAggregation(AggregationBuilders.filter("removed", QueryBuilders.termQuery("removed", "true"))); + TermsAggregationBuilder aggregation = + AggregationBuilders.terms("runId") + .field("runId") + .subAggregation(AggregationBuilders.max("maxTimestamp").field("lastUpdated")) + .subAggregation(bucketSort) + .subAggregation( + AggregationBuilders.filter("removed", QueryBuilders.termQuery("removed", "true"))); searchSourceBuilder.aggregation(aggregation); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java index e9ee1d6ee78d5..6fbe7cfe882ce 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java @@ -40,10 +40,10 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.ParsedMax; - @Slf4j @RequiredArgsConstructor -public class ElasticSearchSystemMetadataService implements SystemMetadataService, ElasticSearchIndexed { +public class ElasticSearchSystemMetadataService + implements SystemMetadataService, ElasticSearchIndexed { private final ESBulkProcessor _esBulkProcessor; private final IndexConvention _indexConvention; @@ -58,9 +58,15 @@ public class ElasticSearchSystemMetadataService implements SystemMetadataService private static final String FIELD_LAST_UPDATED = "lastUpdated"; private static final String FIELD_REGISTRY_NAME = "registryName"; private static final String FIELD_REGISTRY_VERSION = "registryVersion"; - private static final Set<String> INDEX_FIELD_SET = new HashSet<>( - Arrays.asList(FIELD_URN, FIELD_ASPECT, FIELD_RUNID, FIELD_LAST_UPDATED, FIELD_REGISTRY_NAME, - FIELD_REGISTRY_VERSION)); + private static final Set<String> INDEX_FIELD_SET = + new HashSet<>( + Arrays.asList( + FIELD_URN, + FIELD_ASPECT, + FIELD_RUNID, + FIELD_LAST_UPDATED, + FIELD_REGISTRY_NAME, + FIELD_REGISTRY_VERSION)); private String toDocument(SystemMetadata systemMetadata, String urn, String aspect) { final ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -112,12 +118,13 @@ public void setDocStatus(String urn, boolean removed) { final List<AspectRowSummary> aspectList = findByParams(ImmutableMap.of("urn", urn), !removed, 0, ESUtils.MAX_RESULT_SIZE); // for each -> toDocId and set removed to true for all - aspectList.forEach(aspect -> { - final String docId = toDocId(aspect.getUrn(), aspect.getAspectName()); - final ObjectNode document = JsonNodeFactory.instance.objectNode(); - document.put("removed", removed); - _esDAO.upsertDocument(docId, document.toString()); - }); + aspectList.forEach( + aspect -> { + final String docId = toDocId(aspect.getUrn(), aspect.getAspectName()); + final ObjectNode document = JsonNodeFactory.instance.objectNode(); + document.put("removed", removed); + _esDAO.upsertDocument(docId, document.toString()); + }); } @Override @@ -133,36 +140,44 @@ public void insert(@Nullable SystemMetadata systemMetadata, String urn, String a } @Override - public List<AspectRowSummary> findByRunId(String runId, boolean includeSoftDeleted, int from, int size) { - return findByParams(Collections.singletonMap(FIELD_RUNID, runId), includeSoftDeleted, from, size); + public List<AspectRowSummary> findByRunId( + String runId, boolean includeSoftDeleted, int from, int size) { + return findByParams( + Collections.singletonMap(FIELD_RUNID, runId), includeSoftDeleted, from, size); } @Override - public List<AspectRowSummary> findByUrn(String urn, boolean includeSoftDeleted, int from, int size) { + public List<AspectRowSummary> findByUrn( + String urn, boolean includeSoftDeleted, int from, int size) { return findByParams(Collections.singletonMap(FIELD_URN, urn), includeSoftDeleted, from, size); } @Override - public List<AspectRowSummary> findByParams(Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, - int size) { - SearchResponse searchResponse = _esDAO.findByParams(systemMetaParams, includeSoftDeleted, from, size); + public List<AspectRowSummary> findByParams( + Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, int size) { + SearchResponse searchResponse = + _esDAO.findByParams(systemMetaParams, includeSoftDeleted, from, size); if (searchResponse != null) { SearchHits hits = searchResponse.getHits(); - List<AspectRowSummary> summaries = Arrays.stream(hits.getHits()).map(hit -> { - Map<String, Object> values = hit.getSourceAsMap(); - AspectRowSummary summary = new AspectRowSummary(); - summary.setRunId((String) values.get(FIELD_RUNID)); - summary.setAspectName((String) values.get(FIELD_ASPECT)); - summary.setUrn((String) values.get(FIELD_URN)); - Object timestamp = values.get(FIELD_LAST_UPDATED); - if (timestamp instanceof Long) { - summary.setTimestamp((Long) timestamp); - } else if (timestamp instanceof Integer) { - summary.setTimestamp(Long.valueOf((Integer) timestamp)); - } - summary.setKeyAspect(((String) values.get(FIELD_ASPECT)).endsWith("Key")); - return summary; - }).collect(Collectors.toList()); + List<AspectRowSummary> summaries = + Arrays.stream(hits.getHits()) + .map( + hit -> { + Map<String, Object> values = hit.getSourceAsMap(); + AspectRowSummary summary = new AspectRowSummary(); + summary.setRunId((String) values.get(FIELD_RUNID)); + summary.setAspectName((String) values.get(FIELD_ASPECT)); + summary.setUrn((String) values.get(FIELD_URN)); + Object timestamp = values.get(FIELD_LAST_UPDATED); + if (timestamp instanceof Long) { + summary.setTimestamp((Long) timestamp); + } else if (timestamp instanceof Integer) { + summary.setTimestamp(Long.valueOf((Integer) timestamp)); + } + summary.setKeyAspect(((String) values.get(FIELD_ASPECT)).endsWith("Key")); + return summary; + }) + .collect(Collectors.toList()); return summaries; } else { return Collections.emptyList(); @@ -170,8 +185,8 @@ public List<AspectRowSummary> findByParams(Map<String, String> systemMetaParams, } @Override - public List<AspectRowSummary> findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, - int from, int size) { + public List<AspectRowSummary> findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size) { Map<String, String> registryParams = new HashMap<>(); registryParams.put(FIELD_REGISTRY_NAME, registryName); registryParams.put(FIELD_REGISTRY_VERSION, registryVersion); @@ -179,26 +194,34 @@ public List<AspectRowSummary> findByRegistry(String registryName, String registr } @Override - public List<IngestionRunSummary> listRuns(Integer pageOffset, Integer pageSize, boolean includeSoftDeleted) { + public List<IngestionRunSummary> listRuns( + Integer pageOffset, Integer pageSize, boolean includeSoftDeleted) { SearchResponse response = _esDAO.findRuns(pageOffset, pageSize); - List<? extends Terms.Bucket> buckets = ((ParsedStringTerms) response.getAggregations().get("runId")).getBuckets(); + List<? extends Terms.Bucket> buckets = + ((ParsedStringTerms) response.getAggregations().get("runId")).getBuckets(); if (!includeSoftDeleted) { - buckets.removeIf(bucket -> { - long totalDocs = bucket.getDocCount(); - long softDeletedDocs = ((ParsedFilter) bucket.getAggregations().get("removed")).getDocCount(); - return totalDocs == softDeletedDocs; - }); + buckets.removeIf( + bucket -> { + long totalDocs = bucket.getDocCount(); + long softDeletedDocs = + ((ParsedFilter) bucket.getAggregations().get("removed")).getDocCount(); + return totalDocs == softDeletedDocs; + }); } // TODO(gabe-lyons): add sample urns - return buckets.stream().map(bucket -> { - IngestionRunSummary entry = new IngestionRunSummary(); - entry.setRunId(bucket.getKeyAsString()); - entry.setTimestamp((long) ((ParsedMax) bucket.getAggregations().get("maxTimestamp")).getValue()); - entry.setRows(bucket.getDocCount()); - return entry; - }).collect(Collectors.toList()); + return buckets.stream() + .map( + bucket -> { + IngestionRunSummary entry = new IngestionRunSummary(); + entry.setRunId(bucket.getKeyAsString()); + entry.setTimestamp( + (long) ((ParsedMax) bucket.getAggregations().get("maxTimestamp")).getValue()); + entry.setRows(bucket.getDocCount()); + return entry; + }) + .collect(Collectors.toList()); } @Override @@ -215,8 +238,11 @@ public void configure() { @Override public List<ReindexConfig> buildReindexConfigs() throws IOException { - return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME), - SystemMetadataMappingsBuilder.getMappings(), Collections.emptyMap())); + return List.of( + _indexBuilder.buildReindexState( + _indexConvention.getIndexName(INDEX_NAME), + SystemMetadataMappingsBuilder.getMappings(), + Collections.emptyMap())); } @Override @@ -227,6 +253,7 @@ public void reindexAll() { @VisibleForTesting @Override public void clear() { - _esBulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); + _esBulkProcessor.deleteByQuery( + QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java index c19283aa44ac1..6bce654fb1481 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @AllArgsConstructor @Data public class SystemMetadataEntry { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java index 535610ffbf37f..6623580548706 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java @@ -5,11 +5,10 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; - @Slf4j public class SystemMetadataMappingsBuilder { - private SystemMetadataMappingsBuilder() { } + private SystemMetadataMappingsBuilder() {} public static Map<String, Object> getMappings() { Map<String, Object> mappings = new HashMap<>(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java index c0b1239ffa835..0105215565117 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java @@ -6,5 +6,4 @@ @Value @EqualsAndHashCode(callSuper = false) -public class MissingEntityAspect extends EntityAspect { -} +public class MissingEntityAspect extends EntityAspect {} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java index 9658d9d3ab036..a93d4880a7979 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline; +import static com.linkedin.common.urn.VersionedUrnUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -44,24 +47,30 @@ import javax.annotation.Nonnull; import org.apache.commons.collections.CollectionUtils; -import static com.linkedin.common.urn.VersionedUrnUtils.*; -import static com.linkedin.metadata.Constants.*; - public class TimelineServiceImpl implements TimelineService { - private static final long DEFAULT_LOOKBACK_TIME_WINDOW_MILLIS = 7 * 24 * 60 * 60 * 1000L; // 1 week lookback + private static final long DEFAULT_LOOKBACK_TIME_WINDOW_MILLIS = + 7 * 24 * 60 * 60 * 1000L; // 1 week lookback private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final long FIRST_TRANSACTION_ID = 0; private static final String BUILD_VALUE_COMPUTED = "computed"; private final AspectDao _aspectDao; private final EntityChangeEventGeneratorFactory _entityChangeEventGeneratorFactory; private final EntityRegistry _entityRegistry; - private final HashMap<String, HashMap<ChangeCategory, Set<String>>> entityTypeElementAspectRegistry = new HashMap<>(); + private final HashMap<String, HashMap<ChangeCategory, Set<String>>> + entityTypeElementAspectRegistry = new HashMap<>(); public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry entityRegistry) { this._aspectDao = aspectDao; @@ -76,56 +85,97 @@ public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry for (ChangeCategory elementName : ChangeCategory.values()) { Set<String> aspects = new HashSet<>(); switch (elementName) { - case TAG: { - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - aspects.add(GLOBAL_TAGS_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, GLOBAL_TAGS_ASPECT_NAME, - new GlobalTagsChangeEventGenerator()); - } + case TAG: + { + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + aspects.add(GLOBAL_TAGS_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + GLOBAL_TAGS_ASPECT_NAME, + new GlobalTagsChangeEventGenerator()); + } break; - case OWNER: { - aspects.add(OWNERSHIP_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, OWNERSHIP_ASPECT_NAME, - new OwnershipChangeEventGenerator()); - } + case OWNER: + { + aspects.add(OWNERSHIP_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + OWNERSHIP_ASPECT_NAME, + new OwnershipChangeEventGenerator()); + } break; - case DOCUMENTATION: { - aspects.add(INSTITUTIONAL_MEMORY_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, INSTITUTIONAL_MEMORY_ASPECT_NAME, - new InstitutionalMemoryChangeEventGenerator()); - aspects.add(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - new EditableDatasetPropertiesChangeEventGenerator()); - aspects.add(DATASET_PROPERTIES_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, DATASET_PROPERTIES_ASPECT_NAME, - new DatasetPropertiesChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - } + case DOCUMENTATION: + { + aspects.add(INSTITUTIONAL_MEMORY_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + new InstitutionalMemoryChangeEventGenerator()); + aspects.add(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + new EditableDatasetPropertiesChangeEventGenerator()); + aspects.add(DATASET_PROPERTIES_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + DATASET_PROPERTIES_ASPECT_NAME, + new DatasetPropertiesChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + } break; - case GLOSSARY_TERM: { - aspects.add(GLOSSARY_TERMS_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, GLOSSARY_TERMS_ASPECT_NAME, - new GlossaryTermsChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - } + case GLOSSARY_TERM: + { + aspects.add(GLOSSARY_TERMS_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + GLOSSARY_TERMS_ASPECT_NAME, + new GlossaryTermsChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + } break; - case TECHNICAL_SCHEMA: { - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - } + case TECHNICAL_SCHEMA: + { + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + } break; default: break; @@ -139,25 +189,34 @@ public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry for (ChangeCategory elementName : ChangeCategory.values()) { Set<String> aspects = new HashSet<>(); switch (elementName) { - case OWNER: { - aspects.add(OWNERSHIP_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityTypeGlossaryTerm, elementName, OWNERSHIP_ASPECT_NAME, - new OwnershipChangeEventGenerator()); - } - break; - case DOCUMENTATION: { - aspects.add(GLOSSARY_TERM_INFO_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityTypeGlossaryTerm, elementName, GLOSSARY_TERM_INFO_ASPECT_NAME, - new GlossaryTermInfoChangeEventGenerator()); - } - break; + case OWNER: + { + aspects.add(OWNERSHIP_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityTypeGlossaryTerm, + elementName, + OWNERSHIP_ASPECT_NAME, + new OwnershipChangeEventGenerator()); + } + break; + case DOCUMENTATION: + { + aspects.add(GLOSSARY_TERM_INFO_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityTypeGlossaryTerm, + elementName, + GLOSSARY_TERM_INFO_ASPECT_NAME, + new GlossaryTermInfoChangeEventGenerator()); + } + break; default: break; } glossaryTermElementAspectRegistry.put(elementName, aspects); } entityTypeElementAspectRegistry.put(DATASET_ENTITY_NAME, datasetElementAspectRegistry); - entityTypeElementAspectRegistry.put(GLOSSARY_TERM_ENTITY_NAME, glossaryTermElementAspectRegistry); + entityTypeElementAspectRegistry.put( + GLOSSARY_TERM_ENTITY_NAME, glossaryTermElementAspectRegistry); } Set<String> getAspectsFromElements(String entityType, Set<ChangeCategory> elementNames) { @@ -173,15 +232,21 @@ Set<String> getAspectsFromElements(String entityType, Set<ChangeCategory> elemen @Nonnull @Override - public List<ChangeTransaction> getTimeline(@Nonnull final Urn urn, @Nonnull final Set<ChangeCategory> elementNames, - long startTimeMillis, long endTimeMillis, String startVersionStamp, String endVersionStamp, + public List<ChangeTransaction> getTimeline( + @Nonnull final Urn urn, + @Nonnull final Set<ChangeCategory> elementNames, + long startTimeMillis, + long endTimeMillis, + String startVersionStamp, + String endVersionStamp, boolean rawDiffRequested) { Set<String> aspectNames = getAspectsFromElements(urn.getEntityType(), elementNames); // TODO: Add more logic for defaults if (startVersionStamp != null && startTimeMillis != 0) { - throw new IllegalArgumentException("Cannot specify both VersionStamp start and timestamp start"); + throw new IllegalArgumentException( + "Cannot specify both VersionStamp start and timestamp start"); } if (endTimeMillis == 0) { @@ -195,58 +260,67 @@ public List<ChangeTransaction> getTimeline(@Nonnull final Urn urn, @Nonnull fina // query EntitySpec entitySpec = _entityRegistry.getEntitySpec(urn.getEntityType()); List<AspectSpec> aspectSpecs = entitySpec.getAspectSpecs(); - Set<String> fullAspectNames = aspectSpecs.stream() - .filter(aspectSpec -> !aspectSpec.isTimeseries()) - .map(AspectSpec::getName) - .collect(Collectors.toSet()); - List<EntityAspect> aspectsInRange = this._aspectDao.getAspectsInRange(urn, fullAspectNames, startTimeMillis, - endTimeMillis); + Set<String> fullAspectNames = + aspectSpecs.stream() + .filter(aspectSpec -> !aspectSpec.isTimeseries()) + .map(AspectSpec::getName) + .collect(Collectors.toSet()); + List<EntityAspect> aspectsInRange = + this._aspectDao.getAspectsInRange(urn, fullAspectNames, startTimeMillis, endTimeMillis); // Prepopulate with all versioned aspectNames -> ignore timeseries using // registry - Map<String, TreeSet<EntityAspect>> aspectRowSetMap = constructAspectRowSetMap(urn, fullAspectNames, aspectsInRange); + Map<String, TreeSet<EntityAspect>> aspectRowSetMap = + constructAspectRowSetMap(urn, fullAspectNames, aspectsInRange); - Map<Long, SortedMap<String, Long>> timestampVersionCache = constructTimestampVersionCache(aspectRowSetMap); + Map<Long, SortedMap<String, Long>> timestampVersionCache = + constructTimestampVersionCache(aspectRowSetMap); // TODO: There are some extra steps happening here, we need to clean up how // transactions get combined across differs - SortedMap<Long, List<ChangeTransaction>> semanticDiffs = aspectRowSetMap.entrySet() - .stream() - .filter(entry -> aspectNames.contains(entry.getKey())) - .map(Map.Entry::getValue) - .map(value -> computeDiffs(value, urn.getEntityType(), elementNames, rawDiffRequested)) - .collect(TreeMap::new, this::combineComputedDiffsPerTransactionId, this::combineComputedDiffsPerTransactionId); + SortedMap<Long, List<ChangeTransaction>> semanticDiffs = + aspectRowSetMap.entrySet().stream() + .filter(entry -> aspectNames.contains(entry.getKey())) + .map(Map.Entry::getValue) + .map(value -> computeDiffs(value, urn.getEntityType(), elementNames, rawDiffRequested)) + .collect( + TreeMap::new, + this::combineComputedDiffsPerTransactionId, + this::combineComputedDiffsPerTransactionId); // TODO:Move this down assignSemanticVersions(semanticDiffs); - List<ChangeTransaction> changeTransactions = semanticDiffs.values().stream().collect(ArrayList::new, - ArrayList::addAll, ArrayList::addAll); - List<ChangeTransaction> combinedChangeTransactions = combineTransactionsByTimestamp(changeTransactions, - timestampVersionCache); + List<ChangeTransaction> changeTransactions = + semanticDiffs.values().stream() + .collect(ArrayList::new, ArrayList::addAll, ArrayList::addAll); + List<ChangeTransaction> combinedChangeTransactions = + combineTransactionsByTimestamp(changeTransactions, timestampVersionCache); combinedChangeTransactions.sort(Comparator.comparing(ChangeTransaction::getTimestamp)); return combinedChangeTransactions; } /** - * Constructs a map from aspect name to a sorted set of DB aspects by created - * timestamp. Set includes all aspects - * relevant to an entity and does a lookback by 1 for all aspects, creating - * sentinel values for when the oldest aspect - * possible has been retrieved or no value exists in the DB for an aspect - * - * @param urn urn of the entity + * Constructs a map from aspect name to a sorted set of DB aspects by created timestamp. Set + * includes all aspects relevant to an entity and does a lookback by 1 for all aspects, creating + * sentinel values for when the oldest aspect possible has been retrieved or no value exists in + * the DB for an aspect + * + * @param urn urn of the entity * @param fullAspectNames full list of aspects relevant to the entity - * @param aspectsInRange aspects returned by the range query by timestampm + * @param aspectsInRange aspects returned by the range query by timestampm * @return map constructed as described */ - private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap(Urn urn, Set<String> fullAspectNames, - List<EntityAspect> aspectsInRange) { + private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap( + Urn urn, Set<String> fullAspectNames, List<EntityAspect> aspectsInRange) { Map<String, TreeSet<EntityAspect>> aspectRowSetMap = new HashMap<>(); fullAspectNames.forEach( - aspectName -> aspectRowSetMap.put(aspectName, new TreeSet<>(Comparator.comparing(EntityAspect::getCreatedOn)))); - aspectsInRange.forEach(row -> { - TreeSet<EntityAspect> rowList = aspectRowSetMap.get(row.getAspect()); - rowList.add(row); - }); + aspectName -> + aspectRowSetMap.put( + aspectName, new TreeSet<>(Comparator.comparing(EntityAspect::getCreatedOn)))); + aspectsInRange.forEach( + row -> { + TreeSet<EntityAspect> rowList = aspectRowSetMap.get(row.getAspect()); + rowList.add(row); + }); // we need to pull previous versions of these aspects that are currently at a 0 Map<String, Long> nextVersions = _aspectDao.getNextVersions(urn.toString(), fullAspectNames); @@ -267,9 +341,11 @@ private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap(Urn urn, Set // get the next version long versionToGet = 0; if (oldestAspect != null) { - versionToGet = (oldestAspect.getVersion() == 0L) ? nextVersion - 1 : oldestAspect.getVersion() - 1; + versionToGet = + (oldestAspect.getVersion() == 0L) ? nextVersion - 1 : oldestAspect.getVersion() - 1; } - EntityAspect row = _aspectDao.getAspect(urn.toString(), aspectMinVersion.getKey(), versionToGet); + EntityAspect row = + _aspectDao.getAspect(urn.toString(), aspectMinVersion.getKey(), versionToGet); if (row != null) { aspectRowSetMap.get(row.getAspect()).add(row); } else { @@ -281,8 +357,7 @@ private Map<String, TreeSet<EntityAspect>> constructAspectRowSetMap(Urn urn, Set } private boolean isOldestPossible(EntityAspect oldestAspect, long nextVersion) { - return (((oldestAspect.getVersion() == 0L) - && (nextVersion == 1L)) + return (((oldestAspect.getVersion() == 0L) && (nextVersion == 1L)) || (oldestAspect.getVersion() == 1L)); } @@ -295,19 +370,20 @@ private MissingEntityAspect createSentinel(String aspectName) { } /** - * Constructs a map from timestamp to a sorted map of aspect name -> version for - * use in constructing the version stamp - * - * @param aspectRowSetMap map constructed as described in - * {@link TimelineServiceImpl#constructAspectRowSetMap} + * Constructs a map from timestamp to a sorted map of aspect name -> version for use in + * constructing the version stamp + * + * @param aspectRowSetMap map constructed as described in {@link + * TimelineServiceImpl#constructAspectRowSetMap} * @return map as described */ private Map<Long, SortedMap<String, Long>> constructTimestampVersionCache( Map<String, TreeSet<EntityAspect>> aspectRowSetMap) { - Set<EntityAspect> aspects = aspectRowSetMap.values().stream() - .flatMap(TreeSet::stream) - .filter(aspect -> aspect.getVersion() != -1L) - .collect(Collectors.toSet()); + Set<EntityAspect> aspects = + aspectRowSetMap.values().stream() + .flatMap(TreeSet::stream) + .filter(aspect -> aspect.getVersion() != -1L) + .collect(Collectors.toSet()); Map<Long, SortedMap<String, Long>> timestampVersionCache = new HashMap<>(); for (EntityAspect aspect : aspects) { if (timestampVersionCache.containsKey(aspect.getCreatedOn().getTime())) { @@ -341,8 +417,11 @@ private Map<Long, SortedMap<String, Long>> constructTimestampVersionCache( return timestampVersionCache; } - private SortedMap<Long, List<ChangeTransaction>> computeDiffs(TreeSet<EntityAspect> aspectTimeline, - String entityType, Set<ChangeCategory> elementNames, boolean rawDiffsRequested) { + private SortedMap<Long, List<ChangeTransaction>> computeDiffs( + TreeSet<EntityAspect> aspectTimeline, + String entityType, + Set<ChangeCategory> elementNames, + boolean rawDiffsRequested) { EntityAspect previousValue = null; SortedMap<Long, List<ChangeTransaction>> changeTransactionsMap = new TreeMap<>(); long transactionId; @@ -350,7 +429,8 @@ private SortedMap<Long, List<ChangeTransaction>> computeDiffs(TreeSet<EntityAspe transactionId = currentValue.getCreatedOn().getTime(); if (previousValue != null) { // we skip the first element and only compare once we have two in hand - changeTransactionsMap.put(transactionId, + changeTransactionsMap.put( + transactionId, computeDiff(previousValue, currentValue, entityType, elementNames, rawDiffsRequested)); } previousValue = currentValue; @@ -358,30 +438,39 @@ private SortedMap<Long, List<ChangeTransaction>> computeDiffs(TreeSet<EntityAspe return changeTransactionsMap; } - private List<ChangeTransaction> computeDiff(@Nonnull EntityAspect previousValue, @Nonnull EntityAspect currentValue, - String entityType, Set<ChangeCategory> elementNames, boolean rawDiffsRequested) { + private List<ChangeTransaction> computeDiff( + @Nonnull EntityAspect previousValue, + @Nonnull EntityAspect currentValue, + String entityType, + Set<ChangeCategory> elementNames, + boolean rawDiffsRequested) { String aspectName = currentValue.getAspect(); List<ChangeTransaction> semanticChangeTransactions = new ArrayList<>(); JsonPatch rawDiff = getRawDiff(previousValue, currentValue); for (ChangeCategory element : elementNames) { EntityChangeEventGenerator entityChangeEventGenerator; - entityChangeEventGenerator = _entityChangeEventGeneratorFactory.getGenerator(entityType, element, aspectName); + entityChangeEventGenerator = + _entityChangeEventGeneratorFactory.getGenerator(entityType, element, aspectName); if (entityChangeEventGenerator != null) { try { ChangeTransaction changeTransaction = - entityChangeEventGenerator.getSemanticDiff(previousValue, currentValue, element, rawDiff, - rawDiffsRequested); + entityChangeEventGenerator.getSemanticDiff( + previousValue, currentValue, element, rawDiff, rawDiffsRequested); if (CollectionUtils.isNotEmpty(changeTransaction.getChangeEvents())) { semanticChangeTransactions.add(changeTransaction); } } catch (Exception e) { - semanticChangeTransactions.add(ChangeTransaction.builder() - .semVerChange(SemanticChangeType.EXCEPTIONAL) - .changeEvents(Collections.singletonList(ChangeEvent.builder() - .description(String.format("%s:%s", e.getClass().getName(), e.getMessage())) - .build())) - .build()); + semanticChangeTransactions.add( + ChangeTransaction.builder() + .semVerChange(SemanticChangeType.EXCEPTIONAL) + .changeEvents( + Collections.singletonList( + ChangeEvent.builder() + .description( + String.format("%s:%s", e.getClass().getName(), e.getMessage())) + .build())) + .build()); } } } @@ -401,7 +490,8 @@ private JsonPatch getRawDiff(EntityAspect previousValue, EntityAspect currentVal } } - private void combineComputedDiffsPerTransactionId(@Nonnull SortedMap<Long, List<ChangeTransaction>> semanticDiffs, + private void combineComputedDiffsPerTransactionId( + @Nonnull SortedMap<Long, List<ChangeTransaction>> semanticDiffs, @Nonnull SortedMap<Long, List<ChangeTransaction>> computedDiffs) { for (Map.Entry<Long, List<ChangeTransaction>> entry : computedDiffs.entrySet()) { if (!semanticDiffs.containsKey(entry.getKey())) { @@ -414,18 +504,22 @@ private void combineComputedDiffsPerTransactionId(@Nonnull SortedMap<Long, List< } } - private void assignSemanticVersions(SortedMap<Long, List<ChangeTransaction>> changeTransactionsMap) { + private void assignSemanticVersions( + SortedMap<Long, List<ChangeTransaction>> changeTransactionsMap) { SemanticVersion curGroupVersion = null; long transactionId = FIRST_TRANSACTION_ID - 1; for (Map.Entry<Long, List<ChangeTransaction>> entry : changeTransactionsMap.entrySet()) { if (transactionId >= entry.getKey()) { - throw new IllegalArgumentException(String.format("transactionId should be < previous. %s >= %s", - transactionId, entry.getKey())); + throw new IllegalArgumentException( + String.format( + "transactionId should be < previous. %s >= %s", transactionId, entry.getKey())); } transactionId = entry.getKey(); SemanticChangeType highestChangeInGroup = SemanticChangeType.NONE; - ChangeTransaction highestChangeTransaction = entry.getValue().stream() - .max(Comparator.comparing(ChangeTransaction::getSemVerChange)).orElse(null); + ChangeTransaction highestChangeTransaction = + entry.getValue().stream() + .max(Comparator.comparing(ChangeTransaction::getSemVerChange)) + .orElse(null); if (highestChangeTransaction != null) { highestChangeInGroup = highestChangeTransaction.getSemVerChange(); } @@ -436,8 +530,8 @@ private void assignSemanticVersions(SortedMap<Long, List<ChangeTransaction>> cha } } - private SemanticVersion getGroupSemanticVersion(SemanticChangeType highestChangeInGroup, - SemanticVersion previousVersion) { + private SemanticVersion getGroupSemanticVersion( + SemanticChangeType highestChangeInGroup, SemanticVersion previousVersion) { if (previousVersion == null) { // Start with all 0s if there is no previous version. return SemanticVersion.builder() @@ -477,10 +571,11 @@ private SemanticVersion getGroupSemanticVersion(SemanticChangeType highestChange return previousVersion; } - private List<ChangeTransaction> combineTransactionsByTimestamp(List<ChangeTransaction> changeTransactions, + private List<ChangeTransaction> combineTransactionsByTimestamp( + List<ChangeTransaction> changeTransactions, Map<Long, SortedMap<String, Long>> timestampVersionCache) { - Map<Long, List<ChangeTransaction>> transactionsByTimestamp = changeTransactions.stream() - .collect(Collectors.groupingBy(ChangeTransaction::getTimestamp)); + Map<Long, List<ChangeTransaction>> transactionsByTimestamp = + changeTransactions.stream().collect(Collectors.groupingBy(ChangeTransaction::getTimestamp)); List<ChangeTransaction> combinedChangeTransactions = new ArrayList<>(); for (List<ChangeTransaction> transactionList : transactionsByTimestamp.values()) { if (!transactionList.isEmpty()) { @@ -490,14 +585,17 @@ private List<ChangeTransaction> combineTransactionsByTimestamp(List<ChangeTransa for (int i = 1; i < transactionList.size(); i++) { ChangeTransaction element = transactionList.get(i); result.getChangeEvents().addAll(element.getChangeEvents()); - maxSemanticChangeType = maxSemanticChangeType.compareTo(element.getSemVerChange()) >= 0 - ? maxSemanticChangeType - : element.getSemVerChange(); - maxSemVer = maxSemVer.compareTo(element.getSemVer()) >= 0 ? maxSemVer : element.getSemVer(); + maxSemanticChangeType = + maxSemanticChangeType.compareTo(element.getSemVerChange()) >= 0 + ? maxSemanticChangeType + : element.getSemVerChange(); + maxSemVer = + maxSemVer.compareTo(element.getSemVer()) >= 0 ? maxSemVer : element.getSemVer(); } result.setSemVerChange(maxSemanticChangeType); result.setSemanticVersion(maxSemVer); - result.setVersionStamp(constructVersionStamp(timestampVersionCache.get(result.getTimestamp()))); + result.setVersionStamp( + constructVersionStamp(timestampVersionCache.get(result.getTimestamp()))); combinedChangeTransactions.add(result); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java index a9c5d56a7e445..84308d9b2311f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java @@ -27,8 +27,7 @@ public DatasetSchemaFieldChangeEvent( String description, String fieldPath, Urn fieldUrn, - boolean nullable - ) { + boolean nullable) { super( entityUrn, category, @@ -37,12 +36,9 @@ public DatasetSchemaFieldChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "fieldUrn", fieldUrn.toString(), - "nullable", nullable - ), + "nullable", nullable), auditStamp, semVerChange, - description - ); + description); } } - diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java index eddacf3714f61..f1d9862fb33a2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java @@ -4,8 +4,8 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeCategory; -import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.ChangeEvent; +import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.SemanticChangeType; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -27,8 +27,7 @@ public SchemaFieldGlossaryTermChangeEvent( String description, String fieldPath, Urn parentUrn, - Urn termUrn - ) { + Urn termUrn) { super( entityUrn, category, @@ -37,11 +36,9 @@ public SchemaFieldGlossaryTermChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "parentUrn", parentUrn.toString(), - "termUrn", termUrn.toString() - ), + "termUrn", termUrn.toString()), auditStamp, semVerChange, - description - ); + description); } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java index b4553f9048e3a..10e6ae6ca4af8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java @@ -4,8 +4,8 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeCategory; -import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.ChangeEvent; +import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.SemanticChangeType; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -27,8 +27,7 @@ public SchemaFieldTagChangeEvent( String description, String fieldPath, Urn parentUrn, - Urn tagUrn - ) { + Urn tagUrn) { super( entityUrn, category, @@ -37,11 +36,9 @@ public SchemaFieldTagChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "parentUrn", parentUrn.toString(), - "tagUrn", tagUrn.toString() - ), + "tagUrn", tagUrn.toString()), auditStamp, semVerChange, - description - ); + description); } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java index 5a306635f6a81..33dfdb68cb9e6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java @@ -12,7 +12,6 @@ import lombok.Getter; import lombok.Value; - @EqualsAndHashCode(callSuper = true) @Value @Getter @@ -26,19 +25,15 @@ public DomainChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn domainUrn - ) { + Urn domainUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "domainUrn", domainUrn.toString() - ), + ImmutableMap.of("domainUrn", domainUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java index dfa659dad67b1..564dc63c1a678 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java @@ -13,7 +13,6 @@ import lombok.Value; import lombok.experimental.NonFinal; - @EqualsAndHashCode(callSuper = true) @Value @NonFinal @@ -28,19 +27,15 @@ public GlossaryTermChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn termUrn - ) { + Urn termUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "termUrn", termUrn.toString() - ), + ImmutableMap.of("termUrn", termUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java index b9efe7113bcfb..fc4f0327b7704 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java @@ -27,8 +27,7 @@ public OwnerChangeEvent( SemanticChangeType semVerChange, String description, Urn ownerUrn, - OwnershipType ownerType - ) { + OwnershipType ownerType) { super( entityUrn, category, @@ -36,11 +35,9 @@ public OwnerChangeEvent( modifier, ImmutableMap.of( "ownerUrn", ownerUrn.toString(), - "ownerType", ownerType.toString() - ), + "ownerType", ownerType.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java index 09dba21ff3988..b19a4a1558ab6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java @@ -13,7 +13,6 @@ import lombok.Value; import lombok.experimental.NonFinal; - @EqualsAndHashCode(callSuper = true) @Value @NonFinal @@ -28,19 +27,15 @@ public TagChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn tagUrn - ) { + Urn tagUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "tagUrn", tagUrn.toString() - ), + ImmutableMap.of("tagUrn", tagUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java index 60008826afc61..f83eded55ff9c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java @@ -5,20 +5,13 @@ import lombok.AllArgsConstructor; import lombok.Value; - -/** - * Thin wrapper for an aspect value which is used within the Entity Change Event API. - */ +/** Thin wrapper for an aspect value which is used within the Entity Change Event API. */ @Value @AllArgsConstructor public class Aspect<T extends RecordTemplate> { - /** - * The aspect value itself. - */ + /** The aspect value itself. */ T value; - /** - * System metadata - */ + /** System metadata */ SystemMetadata systemMetadata; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java index b615189fe7314..1a8e54e5baf4a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSortedMap; import com.linkedin.assertion.AssertionResult; import com.linkedin.assertion.AssertionRunEvent; @@ -14,10 +16,8 @@ import java.util.Map; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class AssertionRunEventChangeEventGenerator extends EntityChangeEventGenerator<AssertionRunEvent> { +public class AssertionRunEventChangeEventGenerator + extends EntityChangeEventGenerator<AssertionRunEvent> { @Override public List<ChangeEvent> getChangeEvents( @Nonnull Urn urn, @@ -39,22 +39,22 @@ private List<ChangeEvent> computeDiffs( boolean isNewCompleted = isCompleted(newAspect); if (isNewCompleted && !isPreviousCompleted) { - return Collections.singletonList(ChangeEvent.builder() - .category(ChangeCategory.RUN) - .operation(ChangeOperation.COMPLETED) - .auditStamp(auditStamp) - .entityUrn(entityUrn) - .parameters(buildParameters(newAspect)) - .build()); + return Collections.singletonList( + ChangeEvent.builder() + .category(ChangeCategory.RUN) + .operation(ChangeOperation.COMPLETED) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .parameters(buildParameters(newAspect)) + .build()); } return Collections.emptyList(); } private boolean isCompleted(final AssertionRunEvent assertionRunEvent) { - return assertionRunEvent != null && assertionRunEvent.getStatus() - .toString() - .equals(ASSERTION_RUN_EVENT_STATUS_COMPLETE); + return assertionRunEvent != null + && assertionRunEvent.getStatus().toString().equals(ASSERTION_RUN_EVENT_STATUS_COMPLETE); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java index e5237cc5abc39..f6192294e5701 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java @@ -13,25 +13,29 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ChangeEventGeneratorUtils { - public static Urn getSchemaFieldUrn(@Nonnull String datasetUrnStr, @Nonnull String schemaFieldPath) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrnStr, schemaFieldPath)); + public static Urn getSchemaFieldUrn( + @Nonnull String datasetUrnStr, @Nonnull String schemaFieldPath) { + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrnStr, schemaFieldPath)); } public static Urn getSchemaFieldUrn(@Nonnull Urn datasetUrn, @Nonnull String schemaFieldPath) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrn.toString(), schemaFieldPath)); + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrn.toString(), schemaFieldPath)); } public static Urn getSchemaFieldUrn(@Nonnull Urn datasetUrn, @Nonnull SchemaField schemaField) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrn, getFieldPathV1(schemaField))); + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrn, getFieldPathV1(schemaField))); } public static String getFieldPathV1(@Nonnull SchemaField field) { - String[] v1PathTokens = Arrays.stream(field.getFieldPath().split("\\.")) - .filter(x -> !(x.startsWith("[") || x.endsWith("]"))) - .toArray(String[]::new); + String[] v1PathTokens = + Arrays.stream(field.getFieldPath().split("\\.")) + .filter(x -> !(x.startsWith("[") || x.endsWith("]"))) + .toArray(String[]::new); return String.join(".", v1PathTokens); } @@ -42,19 +46,22 @@ public static List<ChangeEvent> convertEntityTagChangeEvents( return entityTagChangeEvents.stream() .filter(entityTagChangeEvent -> entityTagChangeEvent instanceof TagChangeEvent) .map(entityTagChangeEvent -> (TagChangeEvent) entityTagChangeEvent) - .map(entityTagChangeEvent -> - SchemaFieldTagChangeEvent.schemaFieldTagChangeEventBuilder() - .modifier(entityTagChangeEvent.getModifier()) - .entityUrn(entityTagChangeEvent.getEntityUrn()) - .category(entityTagChangeEvent.getCategory()) - .operation(entityTagChangeEvent.getOperation()) - .semVerChange(entityTagChangeEvent.getSemVerChange()) - .description(entityTagChangeEvent.getDescription()) - .tagUrn(UrnUtils.getUrn((String) entityTagChangeEvent.getParameters().get("tagUrn"))) - .auditStamp(entityTagChangeEvent.getAuditStamp()) - .fieldPath(fieldPath) - .parentUrn(parentUrn) - .build()) + .map( + entityTagChangeEvent -> + SchemaFieldTagChangeEvent.schemaFieldTagChangeEventBuilder() + .modifier(entityTagChangeEvent.getModifier()) + .entityUrn(entityTagChangeEvent.getEntityUrn()) + .category(entityTagChangeEvent.getCategory()) + .operation(entityTagChangeEvent.getOperation()) + .semVerChange(entityTagChangeEvent.getSemVerChange()) + .description(entityTagChangeEvent.getDescription()) + .tagUrn( + UrnUtils.getUrn( + (String) entityTagChangeEvent.getParameters().get("tagUrn"))) + .auditStamp(entityTagChangeEvent.getAuditStamp()) + .fieldPath(fieldPath) + .parentUrn(parentUrn) + .build()) .collect(Collectors.toList()); } @@ -63,23 +70,30 @@ public static List<ChangeEvent> convertEntityGlossaryTermChangeEvents( @Nonnull Urn parentUrn, @Nonnull List<ChangeEvent> entityGlossaryTermChangeEvents) { return entityGlossaryTermChangeEvents.stream() - .filter(entityGlossaryTermChangeEvent -> entityGlossaryTermChangeEvent instanceof GlossaryTermChangeEvent) - .map(entityGlossaryTermChangeEvent -> (GlossaryTermChangeEvent) entityGlossaryTermChangeEvent) - .map(entityGlossaryTermChangeEvent -> - SchemaFieldGlossaryTermChangeEvent.schemaFieldGlossaryTermChangeEventBuilder() - .modifier(entityGlossaryTermChangeEvent.getModifier()) - .entityUrn(entityGlossaryTermChangeEvent.getEntityUrn()) - .category(entityGlossaryTermChangeEvent.getCategory()) - .operation(entityGlossaryTermChangeEvent.getOperation()) - .semVerChange(entityGlossaryTermChangeEvent.getSemVerChange()) - .description(entityGlossaryTermChangeEvent.getDescription()) - .termUrn(UrnUtils.getUrn((String) entityGlossaryTermChangeEvent.getParameters().get("termUrn"))) - .auditStamp(entityGlossaryTermChangeEvent.getAuditStamp()) - .fieldPath(fieldPath) - .parentUrn(parentUrn) - .build()) + .filter( + entityGlossaryTermChangeEvent -> + entityGlossaryTermChangeEvent instanceof GlossaryTermChangeEvent) + .map( + entityGlossaryTermChangeEvent -> + (GlossaryTermChangeEvent) entityGlossaryTermChangeEvent) + .map( + entityGlossaryTermChangeEvent -> + SchemaFieldGlossaryTermChangeEvent.schemaFieldGlossaryTermChangeEventBuilder() + .modifier(entityGlossaryTermChangeEvent.getModifier()) + .entityUrn(entityGlossaryTermChangeEvent.getEntityUrn()) + .category(entityGlossaryTermChangeEvent.getCategory()) + .operation(entityGlossaryTermChangeEvent.getOperation()) + .semVerChange(entityGlossaryTermChangeEvent.getSemVerChange()) + .description(entityGlossaryTermChangeEvent.getDescription()) + .termUrn( + UrnUtils.getUrn( + (String) entityGlossaryTermChangeEvent.getParameters().get("termUrn"))) + .auditStamp(entityGlossaryTermChangeEvent.getAuditStamp()) + .fieldPath(fieldPath) + .parentUrn(parentUrn) + .build()) .collect(Collectors.toList()); } - private ChangeEventGeneratorUtils() { } + private ChangeEventGeneratorUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java index a3e5a051a47e3..ca30060b5ed29 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.dataprocess.DataProcessInstanceRelationships; @@ -18,15 +20,13 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class DataProcessInstanceRunEventChangeEventGenerator extends EntityChangeEventGenerator<DataProcessInstanceRunEvent> { private static final String COMPLETED_STATUS = "COMPLETED"; private static final String STARTED_STATUS = "STARTED"; - public DataProcessInstanceRunEventChangeEventGenerator(@Nonnull final SystemEntityClient entityClient) { + public DataProcessInstanceRunEventChangeEventGenerator( + @Nonnull final SystemEntityClient entityClient) { super(entityClient); } @@ -50,15 +50,17 @@ private List<ChangeEvent> computeDiffs( final DataProcessRunStatus newStatus = getStatus(newAspect); if (newStatus != null && !newStatus.equals(previousStatus)) { - String operationType = newStatus.equals(DataProcessRunStatus.COMPLETE) ? COMPLETED_STATUS : STARTED_STATUS; - - return Collections.singletonList(ChangeEvent.builder() - .category(ChangeCategory.RUN) - .operation(ChangeOperation.valueOf(operationType)) - .auditStamp(auditStamp) - .entityUrn(entityUrn) - .parameters(buildParameters(newAspect, entityUrn)) - .build()); + String operationType = + newStatus.equals(DataProcessRunStatus.COMPLETE) ? COMPLETED_STATUS : STARTED_STATUS; + + return Collections.singletonList( + ChangeEvent.builder() + .category(ChangeCategory.RUN) + .operation(ChangeOperation.valueOf(operationType)) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .parameters(buildParameters(newAspect, entityUrn)) + .build()); } return Collections.emptyList(); @@ -70,8 +72,8 @@ private DataProcessRunStatus getStatus(DataProcessInstanceRunEvent dataProcessIn } @Nonnull - private Map<String, Object> buildParameters(@Nonnull final DataProcessInstanceRunEvent runEvent, - @Nonnull final String entityUrnString) { + private Map<String, Object> buildParameters( + @Nonnull final DataProcessInstanceRunEvent runEvent, @Nonnull final String entityUrnString) { final Map<String, Object> parameters = new HashMap<>(); if (runEvent.hasAttempt()) { parameters.put(ATTEMPT_KEY, runEvent.getAttempt()); @@ -106,8 +108,9 @@ private DataProcessInstanceRelationships getRelationships(@Nonnull final String EntityResponse entityResponse; try { entityUrn = Urn.createFromString(entityUrnString); - entityResponse = _entityClient.getV2(entityUrn, - Collections.singleton(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME)); + entityResponse = + _entityClient.getV2( + entityUrn, Collections.singleton(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME)); } catch (Exception e) { return null; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java index ddfa6530c6999..850df81675b8c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -17,46 +20,55 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; - - -public class DatasetPropertiesChangeEventGenerator extends EntityChangeEventGenerator<DatasetProperties> { - private static List<ChangeEvent> computeDiffs(DatasetProperties baseDatasetProperties, - @Nonnull DatasetProperties targetDatasetProperties, @Nonnull String entityUrn, AuditStamp auditStamp) { +public class DatasetPropertiesChangeEventGenerator + extends EntityChangeEventGenerator<DatasetProperties> { + private static List<ChangeEvent> computeDiffs( + DatasetProperties baseDatasetProperties, + @Nonnull DatasetProperties targetDatasetProperties, + @Nonnull String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; if (baseDescription == null && targetDescription != null) { // Description added - changeEvents.add(ChangeEvent.builder().entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) + .auditStamp(auditStamp) + .build()); } else if (baseDescription != null && targetDescription == null) { // Description removed. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { // Description has been modified. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .auditStamp(auditStamp) + .build()); } return changeEvents; } @@ -70,17 +82,23 @@ private static DatasetProperties getDatasetPropertiesFromAspect(EntityAspect ent } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME) || !currentValue.getAspect() - .equals(DATASET_PROPERTIES_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME) + || !currentValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + DATASET_PROPERTIES_ASPECT_NAME); } List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { DatasetProperties baseDatasetProperties = getDatasetPropertiesFromAspect(previousValue); DatasetProperties targetDatasetProperties = getDatasetPropertiesFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs( + baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java index 3ef6f51f99203..59516bfae0533 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java @@ -12,14 +12,16 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Differ responsible for determining whether an entity has been soft-deleted or soft-created. - */ +/** Differ responsible for determining whether an entity has been soft-deleted or soft-created. */ public class DeprecationChangeEventGenerator extends EntityChangeEventGenerator<Deprecation> { @Override - public List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<Deprecation> from, @Nonnull Aspect<Deprecation> to, @Nonnull AuditStamp auditStamp) { + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<Deprecation> from, + @Nonnull Aspect<Deprecation> to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } @@ -31,19 +33,21 @@ private List<ChangeEvent> computeDiffs( // Ensure that it is the deprecation status which has actually been changed. - // If the entity was not previously deprecated, but is now deprecated, then return a deprecated event. + // If the entity was not previously deprecated, but is now deprecated, then return a deprecated + // event. if (!isDeprecated(baseDeprecation) && isDeprecated(targetDeprecation)) { return Collections.singletonList( ChangeEvent.builder() - .category(ChangeCategory.DEPRECATION) - .operation(ChangeOperation.MODIFY) - .entityUrn(entityUrn) - .auditStamp(auditStamp) - .parameters(ImmutableMap.of("status", "DEPRECATED")) - .build()); + .category(ChangeCategory.DEPRECATION) + .operation(ChangeOperation.MODIFY) + .entityUrn(entityUrn) + .auditStamp(auditStamp) + .parameters(ImmutableMap.of("status", "DEPRECATED")) + .build()); } - // If the entity was previously deprecated, but is not not deprecated, then return a un-deprecated event. + // If the entity was previously deprecated, but is not not deprecated, then return a + // un-deprecated event. if (isDeprecated(baseDeprecation) && !isDeprecated(targetDeprecation)) { return Collections.singletonList( ChangeEvent.builder() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java index a10565a7c958b..1ffcd3cfc2ba4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -16,30 +18,37 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class EditableDatasetPropertiesChangeEventGenerator extends EntityChangeEventGenerator<EditableDatasetProperties> { public static final String DESCRIPTION_ADDED = "Documentation for '%s' has been added: '%s'."; public static final String DESCRIPTION_REMOVED = "Documentation for '%s' has been removed: '%s'."; - public static final String DESCRIPTION_CHANGED = "Documentation of '%s' has been changed from '%s' to '%s'."; + public static final String DESCRIPTION_CHANGED = + "Documentation of '%s' has been changed from '%s' to '%s'."; - private static List<ChangeEvent> computeDiffs(EditableDatasetProperties baseDatasetProperties, - EditableDatasetProperties targetDatasetProperties, String entityUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + EditableDatasetProperties baseDatasetProperties, + EditableDatasetProperties targetDatasetProperties, + String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); ChangeEvent descriptionChangeEvent = - getDescriptionChangeEvent(baseDatasetProperties, targetDatasetProperties, entityUrn, auditStamp); + getDescriptionChangeEvent( + baseDatasetProperties, targetDatasetProperties, entityUrn, auditStamp); if (descriptionChangeEvent != null) { changeEvents.add(descriptionChangeEvent); } return changeEvents; } - private static ChangeEvent getDescriptionChangeEvent(EditableDatasetProperties baseDatasetProperties, - EditableDatasetProperties targetDatasetProperties, String entityUrn, AuditStamp auditStamp) { - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; + private static ChangeEvent getDescriptionChangeEvent( + EditableDatasetProperties baseDatasetProperties, + EditableDatasetProperties targetDatasetProperties, + String entityUrn, + AuditStamp auditStamp) { + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; if (baseDescription == null && targetDescription != null) { // Description added return ChangeEvent.builder() @@ -60,45 +69,59 @@ private static ChangeEvent getDescriptionChangeEvent(EditableDatasetProperties b .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) .auditStamp(auditStamp) .build(); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { // Description has been modified. return ChangeEvent.builder() .entityUrn(entityUrn) .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.MODIFY) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) .auditStamp(auditStamp) .build(); } return null; } - private static EditableDatasetProperties getEditableDatasetPropertiesFromAspect(EntityAspect entityAspect) { + private static EditableDatasetProperties getEditableDatasetPropertiesFromAspect( + EntityAspect entityAspect) { if (entityAspect != null && entityAspect.getMetadata() != null) { - return RecordUtils.toRecordTemplate(EditableDatasetProperties.class, entityAspect.getMetadata()); + return RecordUtils.toRecordTemplate( + EditableDatasetProperties.class, entityAspect.getMetadata()); } return null; } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME) || !currentValue.getAspect() - .equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)) { - throw new IllegalArgumentException("Aspect is not " + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); + if (!previousValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME) + || !currentValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)) { + throw new IllegalArgumentException( + "Aspect is not " + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); } List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { - EditableDatasetProperties baseDatasetProperties = getEditableDatasetPropertiesFromAspect(previousValue); - EditableDatasetProperties targetDatasetProperties = getEditableDatasetPropertiesFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); + EditableDatasetProperties baseDatasetProperties = + getEditableDatasetPropertiesFromAspect(previousValue); + EditableDatasetProperties targetDatasetProperties = + getEditableDatasetPropertiesFromAspect(currentValue); + changeEvents.addAll( + computeDiffs( + baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java index 4a1de4c3421ed..1f094bb6ca989 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -16,7 +19,6 @@ import com.linkedin.schema.EditableSchemaFieldInfo; import com.linkedin.schema.EditableSchemaFieldInfoArray; import com.linkedin.schema.EditableSchemaMetadata; - import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -27,11 +29,8 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; - - -public class EditableSchemaMetadataChangeEventGenerator extends EntityChangeEventGenerator<EditableSchemaMetadata> { +public class EditableSchemaMetadataChangeEventGenerator + extends EntityChangeEventGenerator<EditableSchemaMetadata> { public static final String FIELD_DOCUMENTATION_ADDED_FORMAT = "Documentation for the field '%s' of '%s' has been added: '%s'"; public static final String FIELD_DOCUMENTATION_REMOVED_FORMAT = @@ -42,45 +41,59 @@ public class EditableSchemaMetadataChangeEventGenerator extends EntityChangeEven Stream.of(ChangeCategory.DOCUMENTATION, ChangeCategory.TAG, ChangeCategory.GLOSSARY_TERM) .collect(Collectors.toSet()); - private static void sortEditableSchemaMetadataByFieldPath(EditableSchemaMetadata editableSchemaMetadata) { + private static void sortEditableSchemaMetadataByFieldPath( + EditableSchemaMetadata editableSchemaMetadata) { if (editableSchemaMetadata == null) { return; } List<EditableSchemaFieldInfo> editableSchemaFieldInfos = new ArrayList<>(editableSchemaMetadata.getEditableSchemaFieldInfo()); editableSchemaFieldInfos.sort(Comparator.comparing(EditableSchemaFieldInfo::getFieldPath)); - editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray(editableSchemaFieldInfos)); + editableSchemaMetadata.setEditableSchemaFieldInfo( + new EditableSchemaFieldInfoArray(editableSchemaFieldInfos)); } - private static List<ChangeEvent> getAllChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, String entityUrn, ChangeCategory changeCategory, + private static List<ChangeEvent> getAllChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + String entityUrn, + ChangeCategory changeCategory, AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); Urn datasetFieldUrn = getDatasetFieldUrn(baseFieldInfo, targetFieldInfo, entityUrn); if (changeCategory == ChangeCategory.DOCUMENTATION) { - ChangeEvent documentationChangeEvent = getDocumentationChangeEvent(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp); + ChangeEvent documentationChangeEvent = + getDocumentationChangeEvent(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp); if (documentationChangeEvent != null) { changeEvents.add(documentationChangeEvent); } } if (changeCategory == ChangeCategory.TAG) { - changeEvents.addAll(getTagChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); + changeEvents.addAll( + getTagChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); } if (changeCategory == ChangeCategory.GLOSSARY_TERM) { - changeEvents.addAll(getGlossaryTermChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); + changeEvents.addAll( + getGlossaryTermChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); } return changeEvents; } - private static List<ChangeEvent> computeDiffs(EditableSchemaMetadata baseEditableSchemaMetadata, - EditableSchemaMetadata targetEditableSchemaMetadata, String entityUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + EditableSchemaMetadata baseEditableSchemaMetadata, + EditableSchemaMetadata targetEditableSchemaMetadata, + String entityUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { sortEditableSchemaMetadataByFieldPath(baseEditableSchemaMetadata); sortEditableSchemaMetadataByFieldPath(targetEditableSchemaMetadata); List<ChangeEvent> changeEvents = new ArrayList<>(); EditableSchemaFieldInfoArray baseFieldInfos = - (baseEditableSchemaMetadata != null) ? baseEditableSchemaMetadata.getEditableSchemaFieldInfo() + (baseEditableSchemaMetadata != null) + ? baseEditableSchemaMetadata.getEditableSchemaFieldInfo() : new EditableSchemaFieldInfoArray(); - EditableSchemaFieldInfoArray targetFieldInfos = targetEditableSchemaMetadata.getEditableSchemaFieldInfo(); + EditableSchemaFieldInfoArray targetFieldInfos = + targetEditableSchemaMetadata.getEditableSchemaFieldInfo(); int baseIdx = 0; int targetIdx = 0; while (baseIdx < baseFieldInfos.size() && targetIdx < targetFieldInfos.size()) { @@ -88,16 +101,20 @@ private static List<ChangeEvent> computeDiffs(EditableSchemaMetadata baseEditabl EditableSchemaFieldInfo targetFieldInfo = targetFieldInfos.get(targetIdx); int comparison = baseFieldInfo.getFieldPath().compareTo(targetFieldInfo.getFieldPath()); if (comparison == 0) { - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents( + baseFieldInfo, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++baseIdx; ++targetIdx; } else if (comparison < 0) { // EditableFieldInfo got removed. - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); ++baseIdx; } else { // EditableFieldInfo got added. - changeEvents.addAll(getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++targetIdx; } } @@ -105,29 +122,36 @@ private static List<ChangeEvent> computeDiffs(EditableSchemaMetadata baseEditabl while (baseIdx < baseFieldInfos.size()) { // Handle removed baseFieldInfo EditableSchemaFieldInfo baseFieldInfo = baseFieldInfos.get(baseIdx); - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); ++baseIdx; } while (targetIdx < targetFieldInfos.size()) { // Handle newly added targetFieldInfo EditableSchemaFieldInfo targetFieldInfo = targetFieldInfos.get(targetIdx); - changeEvents.addAll(getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++targetIdx; } return changeEvents; } - private static EditableSchemaMetadata getEditableSchemaMetadataFromAspect(EntityAspect entityAspect) { + private static EditableSchemaMetadata getEditableSchemaMetadataFromAspect( + EntityAspect entityAspect) { if (entityAspect != null && entityAspect.getMetadata() != null) { return RecordUtils.toRecordTemplate(EditableSchemaMetadata.class, entityAspect.getMetadata()); } return null; } - private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { + private static ChangeEvent getDocumentationChangeEvent( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { String baseFieldDescription = (baseFieldInfo != null) ? baseFieldInfo.getDescription() : null; - String targetFieldDescription = (targetFieldInfo != null) ? targetFieldInfo.getDescription() : null; + String targetFieldDescription = + (targetFieldInfo != null) ? targetFieldInfo.getDescription() : null; if (baseFieldDescription == null && targetFieldDescription != null) { return ChangeEvent.builder() @@ -136,8 +160,12 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.ADD) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(FIELD_DOCUMENTATION_ADDED_FORMAT, targetFieldInfo.getFieldPath(), datasetFieldUrn, - targetFieldDescription)) + .description( + String.format( + FIELD_DOCUMENTATION_ADDED_FORMAT, + targetFieldInfo.getFieldPath(), + datasetFieldUrn, + targetFieldDescription)) .auditStamp(auditStamp) .build(); } @@ -149,23 +177,32 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.REMOVE) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(FIELD_DOCUMENTATION_REMOVED_FORMAT, + .description( + String.format( + FIELD_DOCUMENTATION_REMOVED_FORMAT, Optional.ofNullable(targetFieldInfo).map(EditableSchemaFieldInfo::getFieldPath), - datasetFieldUrn, baseFieldDescription)) + datasetFieldUrn, + baseFieldDescription)) .auditStamp(auditStamp) .build(); } - if (baseFieldDescription != null && targetFieldDescription != null && !baseFieldDescription.equals( - targetFieldDescription)) { + if (baseFieldDescription != null + && targetFieldDescription != null + && !baseFieldDescription.equals(targetFieldDescription)) { return ChangeEvent.builder() .modifier(targetFieldInfo.getFieldPath()) .entityUrn(datasetFieldUrn.toString()) .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.MODIFY) .semVerChange(SemanticChangeType.PATCH) - .description(String.format(FIELD_DOCUMENTATION_UPDATED_FORMAT, targetFieldInfo.getFieldPath(), datasetFieldUrn, - baseFieldDescription, targetFieldDescription)) + .description( + String.format( + FIELD_DOCUMENTATION_UPDATED_FORMAT, + targetFieldInfo.getFieldPath(), + datasetFieldUrn, + baseFieldDescription, + targetFieldDescription)) .auditStamp(auditStamp) .build(); } @@ -173,69 +210,86 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b return null; } - private static List<ChangeEvent> getGlossaryTermChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { - GlossaryTerms baseGlossaryTerms = (baseFieldInfo != null) ? baseFieldInfo.getGlossaryTerms() : null; - GlossaryTerms targetGlossaryTerms = (targetFieldInfo != null) ? targetFieldInfo.getGlossaryTerms() : null; + private static List<ChangeEvent> getGlossaryTermChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { + GlossaryTerms baseGlossaryTerms = + (baseFieldInfo != null) ? baseFieldInfo.getGlossaryTerms() : null; + GlossaryTerms targetGlossaryTerms = + (targetFieldInfo != null) ? targetFieldInfo.getGlossaryTerms() : null; // 1. Get EntityGlossaryTermChangeEvent, then rebind into a SchemaFieldGlossaryTermChangeEvent. List<ChangeEvent> entityGlossaryTermsChangeEvents = - GlossaryTermsChangeEventGenerator.computeDiffs(baseGlossaryTerms, targetGlossaryTerms, - datasetFieldUrn.toString(), auditStamp); + GlossaryTermsChangeEventGenerator.computeDiffs( + baseGlossaryTerms, targetGlossaryTerms, datasetFieldUrn.toString(), auditStamp); if (targetFieldInfo != null || baseFieldInfo != null) { - String fieldPath = targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); + String fieldPath = + targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); // 2. Convert EntityGlossaryTermChangeEvent into a SchemaFieldGlossaryTermChangeEvent. return convertEntityGlossaryTermChangeEvents( - fieldPath, - datasetFieldUrn, - entityGlossaryTermsChangeEvents); + fieldPath, datasetFieldUrn, entityGlossaryTermsChangeEvents); } return Collections.emptyList(); } - private static List<ChangeEvent> getTagChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> getTagChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { GlobalTags baseGlobalTags = (baseFieldInfo != null) ? baseFieldInfo.getGlobalTags() : null; - GlobalTags targetGlobalTags = (targetFieldInfo != null) ? targetFieldInfo.getGlobalTags() : null; + GlobalTags targetGlobalTags = + (targetFieldInfo != null) ? targetFieldInfo.getGlobalTags() : null; // 1. Get EntityTagChangeEvent, then rebind into a SchemaFieldTagChangeEvent. List<ChangeEvent> entityTagChangeEvents = - GlobalTagsChangeEventGenerator.computeDiffs(baseGlobalTags, targetGlobalTags, datasetFieldUrn.toString(), - auditStamp); + GlobalTagsChangeEventGenerator.computeDiffs( + baseGlobalTags, targetGlobalTags, datasetFieldUrn.toString(), auditStamp); if (targetFieldInfo != null || baseFieldInfo != null) { - String fieldPath = targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); + String fieldPath = + targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); // 2. Convert EntityTagChangeEvent into a SchemaFieldTagChangeEvent. - return convertEntityTagChangeEvents( - fieldPath, - datasetFieldUrn, - entityTagChangeEvents); + return convertEntityTagChangeEvents(fieldPath, datasetFieldUrn, entityTagChangeEvents); } return Collections.emptyList(); } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME) || !currentValue.getAspect() - .equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME) + || !currentValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - EditableSchemaMetadata baseEditableSchemaMetadata = getEditableSchemaMetadataFromAspect(previousValue); - EditableSchemaMetadata targetEditableSchemaMetadata = getEditableSchemaMetadataFromAspect(currentValue); + EditableSchemaMetadata baseEditableSchemaMetadata = + getEditableSchemaMetadataFromAspect(previousValue); + EditableSchemaMetadata targetEditableSchemaMetadata = + getEditableSchemaMetadataFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (SUPPORTED_CATEGORIES.contains(element)) { changeEvents.addAll( - computeDiffs(baseEditableSchemaMetadata, targetEditableSchemaMetadata, currentValue.getUrn(), element, null)); + computeDiffs( + baseEditableSchemaMetadata, + targetEditableSchemaMetadata, + currentValue.getUrn(), + element, + null)); } // Assess the highest change at the transaction(schema) level. @@ -264,14 +318,37 @@ public List<ChangeEvent> getChangeEvents( @Nonnull Aspect<EditableSchemaMetadata> to, @Nonnull AuditStamp auditStamp) { final List<ChangeEvent> changeEvents = new ArrayList<>(); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.DOCUMENTATION, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TAG, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.GLOSSARY_TERM, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.DOCUMENTATION, + auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TAG, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.TECHNICAL_SCHEMA, + auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.GLOSSARY_TERM, + auditStamp)); return changeEvents; } - private static Urn getDatasetFieldUrn(final EditableSchemaFieldInfo previous, final EditableSchemaFieldInfo latest, String entityUrn) { + private static Urn getDatasetFieldUrn( + final EditableSchemaFieldInfo previous, + final EditableSchemaFieldInfo latest, + String entityUrn) { return previous != null ? getSchemaFieldUrn(UrnUtils.getUrn(entityUrn), previous.getFieldPath()) : getSchemaFieldUrn(UrnUtils.getUrn(entityUrn), latest.getFieldPath()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java index d5539ec3d3822..0c98eefe90ef2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java @@ -13,38 +13,44 @@ import java.util.List; import javax.annotation.Nonnull; - -/** - * An abstract class to generate {@link ChangeEvent}s for a given entity aspect. - */ +/** An abstract class to generate {@link ChangeEvent}s for a given entity aspect. */ public abstract class EntityChangeEventGenerator<T extends RecordTemplate> { // TODO: Add a check for supported aspects protected SystemEntityClient _entityClient; protected Authentication _authentication; - public EntityChangeEventGenerator() { - } + public EntityChangeEventGenerator() {} public EntityChangeEventGenerator(@Nonnull final SystemEntityClient entityClient) { _entityClient = entityClient; } @Deprecated - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { // TODO: Migrate away from using getSemanticDiff. throw new UnsupportedOperationException(); } /** - * TODO: Migrate callers of the above API to below. The recommendation is to move timeline response creation into - * 2-stage. First stage generate change events, second stage derive semantic meaning + filter those change events. + * TODO: Migrate callers of the above API to below. The recommendation is to move timeline + * response creation into 2-stage. First stage generate change events, second stage derive + * semantic meaning + filter those change events. * - * Returns all {@link ChangeEvent}s computed from a raw aspect change. + * <p>Returns all {@link ChangeEvent}s computed from a raw aspect change. * - * Note that the {@link ChangeEvent} list can contain multiple {@link ChangeCategory} inside of it, - * it is expected that the caller will filter the set of events as required. + * <p>Note that the {@link ChangeEvent} list can contain multiple {@link ChangeCategory} inside of + * it, it is expected that the caller will filter the set of events as required. */ - public abstract List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<T> from, @Nonnull Aspect<T> to, @Nonnull AuditStamp auditStamp); + public abstract List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<T> from, + @Nonnull Aspect<T> to, + @Nonnull AuditStamp auditStamp); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java index 330be8560c72b..824dc10ab2732 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java @@ -4,20 +4,25 @@ import java.util.HashMap; import java.util.Map; - /** - * A cheap factory for generating EntityChangeEvents, keyed by entity-type, element-type, aspect-name + * A cheap factory for generating EntityChangeEvents, keyed by entity-type, element-type, + * aspect-name */ public class EntityChangeEventGeneratorFactory { - private final Map<String, EntityChangeEventGenerator> _entityChangeEventGeneratorMap = new HashMap<>(); + private final Map<String, EntityChangeEventGenerator> _entityChangeEventGeneratorMap = + new HashMap<>(); - public void addGenerator(String entityName, ChangeCategory elementName, String aspectName, + public void addGenerator( + String entityName, + ChangeCategory elementName, + String aspectName, EntityChangeEventGenerator differ) { _entityChangeEventGeneratorMap.put(entityName + elementName.name() + aspectName, differ); } - public EntityChangeEventGenerator getGenerator(String entityName, ChangeCategory category, String aspectName) { + public EntityChangeEventGenerator getGenerator( + String entityName, ChangeCategory category, String aspectName) { return _entityChangeEventGeneratorMap.get(entityName + category.name() + aspectName); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java index 84c4343dc63ee..3a94bedcbd0ce 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java @@ -7,18 +7,15 @@ import java.util.Set; import javax.annotation.Nonnull; - -/** - * A registry that maps an aspect name to one or more {@link EntityChangeEventGenerator}s. - */ +/** A registry that maps an aspect name to one or more {@link EntityChangeEventGenerator}s. */ public class EntityChangeEventGeneratorRegistry { - private final Map<String, Set<EntityChangeEventGenerator<?>>> entityChangeEventGenerators = new HashMap<>(); + private final Map<String, Set<EntityChangeEventGenerator<?>>> entityChangeEventGenerators = + new HashMap<>(); - /** - * Registers a new EntityChangeEventGenerator for the given aspect. - */ - public void register(@Nonnull final String aspectName, + /** Registers a new EntityChangeEventGenerator for the given aspect. */ + public void register( + @Nonnull final String aspectName, @Nonnull final EntityChangeEventGenerator<?> entityChangeEventGenerator) { Objects.requireNonNull(aspectName); Objects.requireNonNull(entityChangeEventGenerator); @@ -26,10 +23,9 @@ public void register(@Nonnull final String aspectName, entityChangeEventGenerators.get(aspectName).add(entityChangeEventGenerator); } - /** - * Registers a new Enity Change Generator, or null if one does not exist. - */ - public Set<EntityChangeEventGenerator<?>> getEntityChangeEventGenerators(@Nonnull final String aspectName) { + /** Registers a new Enity Change Generator, or null if one does not exist. */ + public Set<EntityChangeEventGenerator<?>> getEntityChangeEventGenerators( + @Nonnull final String aspectName) { final String key = Objects.requireNonNull(aspectName); return this.entityChangeEventGenerators.getOrDefault(key, new HashSet<>()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java index 7055c95a73eff..c90e96d3860fb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java @@ -10,11 +10,12 @@ import java.util.List; import javax.annotation.Nonnull; - /** - * A general purpose differ which simply determines whether an entity has been created or hard deleted. + * A general purpose differ which simply determines whether an entity has been created or hard + * deleted. */ -public class EntityKeyChangeEventGenerator<K extends RecordTemplate> extends EntityChangeEventGenerator<K> { +public class EntityKeyChangeEventGenerator<K extends RecordTemplate> + extends EntityChangeEventGenerator<K> { @Override public List<ChangeEvent> getChangeEvents( @Nonnull Urn urn, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java index 460a0ae399a5f..ef40c5dc81a3c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,19 +21,20 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GlobalTagsChangeEventGenerator extends EntityChangeEventGenerator<GlobalTags> { private static final String TAG_ADDED_FORMAT = "Tag '%s' added to entity '%s'."; private static final String TAG_REMOVED_FORMAT = "Tag '%s' removed from entity '%s'."; - public static List<ChangeEvent> computeDiffs(GlobalTags baseGlobalTags, GlobalTags targetGlobalTags, String entityUrn, + public static List<ChangeEvent> computeDiffs( + GlobalTags baseGlobalTags, + GlobalTags targetGlobalTags, + String entityUrn, AuditStamp auditStamp) { sortGlobalTagsByTagUrn(baseGlobalTags); sortGlobalTagsByTagUrn(targetGlobalTags); List<ChangeEvent> changeEvents = new ArrayList<>(); - TagAssociationArray baseTags = (baseGlobalTags != null) ? baseGlobalTags.getTags() : new TagAssociationArray(); + TagAssociationArray baseTags = + (baseGlobalTags != null) ? baseGlobalTags.getTags() : new TagAssociationArray(); TagAssociationArray targetTags = (targetGlobalTags != null) ? targetGlobalTags.getTags() : new TagAssociationArray(); int baseTagIdx = 0; @@ -39,36 +42,46 @@ public static List<ChangeEvent> computeDiffs(GlobalTags baseGlobalTags, GlobalTa while (baseTagIdx < baseTags.size() && targetTagIdx < targetTags.size()) { TagAssociation baseTagAssociation = baseTags.get(baseTagIdx); TagAssociation targetTagAssociation = targetTags.get(targetTagIdx); - int comparison = baseTagAssociation.getTag().toString().compareTo(targetTagAssociation.getTag().toString()); + int comparison = + baseTagAssociation + .getTag() + .toString() + .compareTo(targetTagAssociation.getTag().toString()); if (comparison == 0) { // No change to this tag. ++baseTagIdx; ++targetTagIdx; } else if (comparison < 0) { // Tag got removed. - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(baseTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(baseTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(baseTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(baseTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++baseTagIdx; } else { // Tag got added. - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(targetTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(targetTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(targetTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(targetTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++targetTagIdx; } } @@ -76,31 +89,35 @@ public static List<ChangeEvent> computeDiffs(GlobalTags baseGlobalTags, GlobalTa while (baseTagIdx < baseTags.size()) { // Handle removed tags. TagAssociation baseTagAssociation = baseTags.get(baseTagIdx); - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(baseTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(baseTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(baseTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(baseTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++baseTagIdx; } while (targetTagIdx < targetTags.size()) { // Handle newly added tags. TagAssociation targetTagAssociation = targetTags.get(targetTagIdx); - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(targetTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(targetTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(targetTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(targetTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++targetTagIdx; } return changeEvents; @@ -123,10 +140,14 @@ private static GlobalTags getGlobalTagsFromAspect(EntityAspect entityAspect) { } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOBAL_TAGS_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + GLOBAL_TAGS_ASPECT_NAME); } @@ -134,7 +155,8 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec GlobalTags targetGlobalTags = getGlobalTagsFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.TAG) { - changeEvents.addAll(computeDiffs(baseGlobalTags, targetGlobalTags, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseGlobalTags, targetGlobalTags, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java index f8b7794df531f..eb002a9a83cea 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -17,97 +20,111 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; - +public class GlossaryTermInfoChangeEventGenerator + extends EntityChangeEventGenerator<GlossaryTermInfo> { + private static List<ChangeEvent> computeDiffs( + GlossaryTermInfo baseDatasetProperties, + @Nonnull GlossaryTermInfo targetDatasetProperties, + @Nonnull String entityUrn, + AuditStamp auditStamp) { + List<ChangeEvent> changeEvents = new ArrayList<>(); + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDefinition() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDefinition() : null; -public class GlossaryTermInfoChangeEventGenerator extends EntityChangeEventGenerator<GlossaryTermInfo> { - private static List<ChangeEvent> computeDiffs(GlossaryTermInfo baseDatasetProperties, - @Nonnull GlossaryTermInfo targetDatasetProperties, @Nonnull String entityUrn, AuditStamp auditStamp) { - List<ChangeEvent> changeEvents = new ArrayList<>(); - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDefinition() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDefinition() : null; - - if (baseDescription == null && targetDescription != null) { - // Description added - changeEvents.add(ChangeEvent.builder().entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription == null) { - // Description removed. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { - // Description has been modified. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) - .auditStamp(auditStamp) - .build()); - } - return changeEvents; + if (baseDescription == null && targetDescription != null) { + // Description added + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null && targetDescription == null) { + // Description removed. + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { + // Description has been modified. + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .auditStamp(auditStamp) + .build()); } + return changeEvents; + } - @Nullable - private static GlossaryTermInfo getGlossaryTermInfoFromAspect(EntityAspect entityAspect) { - if (entityAspect != null && entityAspect.getMetadata() != null) { - return RecordUtils.toRecordTemplate(GlossaryTermInfo.class, entityAspect.getMetadata()); - } - return null; + @Nullable + private static GlossaryTermInfo getGlossaryTermInfoFromAspect(EntityAspect entityAspect) { + if (entityAspect != null && entityAspect.getMetadata() != null) { + return RecordUtils.toRecordTemplate(GlossaryTermInfo.class, entityAspect.getMetadata()); } + return null; + } - @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERM_INFO_ASPECT_NAME); - } - List<ChangeEvent> changeEvents = new ArrayList<>(); - if (element == ChangeCategory.DOCUMENTATION) { - GlossaryTermInfo baseGlossaryTermInfo = getGlossaryTermInfoFromAspect(previousValue); - GlossaryTermInfo targetGlossaryTermInfo = getGlossaryTermInfoFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseGlossaryTermInfo, targetGlossaryTermInfo, currentValue.getUrn(), null)); - } - - // Assess the highest change at the transaction(schema) level. - SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; - ChangeEvent highestChangeEvent = - changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); - if (highestChangeEvent != null) { - highestSemanticChange = highestChangeEvent.getSemVerChange(); - } - - return ChangeTransaction.builder() - .semVerChange(highestSemanticChange) - .changeEvents(changeEvents) - .timestamp(currentValue.getCreatedOn().getTime()) - .rawDiff(rawDiffsRequested ? rawDiff : null) - .actor(currentValue.getCreatedBy()) - .build(); + @Override + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERM_INFO_ASPECT_NAME); + } + List<ChangeEvent> changeEvents = new ArrayList<>(); + if (element == ChangeCategory.DOCUMENTATION) { + GlossaryTermInfo baseGlossaryTermInfo = getGlossaryTermInfoFromAspect(previousValue); + GlossaryTermInfo targetGlossaryTermInfo = getGlossaryTermInfoFromAspect(currentValue); + changeEvents.addAll( + computeDiffs(baseGlossaryTermInfo, targetGlossaryTermInfo, currentValue.getUrn(), null)); } - @Override - public List<ChangeEvent> getChangeEvents( - @Nonnull Urn urn, - @Nonnull String entity, - @Nonnull String aspect, - @Nonnull Aspect<GlossaryTermInfo> from, - @Nonnull Aspect<GlossaryTermInfo> to, - @Nonnull AuditStamp auditStamp) { - return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); + // Assess the highest change at the transaction(schema) level. + SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; + ChangeEvent highestChangeEvent = + changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); + if (highestChangeEvent != null) { + highestSemanticChange = highestChangeEvent.getSemVerChange(); } + + return ChangeTransaction.builder() + .semVerChange(highestSemanticChange) + .changeEvents(changeEvents) + .timestamp(currentValue.getCreatedOn().getTime()) + .rawDiff(rawDiffsRequested ? rawDiff : null) + .actor(currentValue.getCreatedBy()) + .build(); + } + + @Override + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<GlossaryTermInfo> from, + @Nonnull Aspect<GlossaryTermInfo> to, + @Nonnull AuditStamp auditStamp) { + return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java index 22b2033ec52bc..6e56a7e7bbeb1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,62 +21,78 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GlossaryTermsChangeEventGenerator extends EntityChangeEventGenerator<GlossaryTerms> { private static final String GLOSSARY_TERM_ADDED_FORMAT = "Term '%s' added to entity '%s'."; private static final String GLOSSARY_TERM_REMOVED_FORMAT = "Term '%s' removed from entity '%s'."; - public static List<ChangeEvent> computeDiffs(GlossaryTerms baseGlossaryTerms, GlossaryTerms targetGlossaryTerms, - String entityUrn, AuditStamp auditStamp) { + public static List<ChangeEvent> computeDiffs( + GlossaryTerms baseGlossaryTerms, + GlossaryTerms targetGlossaryTerms, + String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); sortGlossaryTermsByGlossaryTermUrn(baseGlossaryTerms); sortGlossaryTermsByGlossaryTermUrn(targetGlossaryTerms); GlossaryTermAssociationArray baseTerms = - (baseGlossaryTerms != null) ? baseGlossaryTerms.getTerms() : new GlossaryTermAssociationArray(); + (baseGlossaryTerms != null) + ? baseGlossaryTerms.getTerms() + : new GlossaryTermAssociationArray(); GlossaryTermAssociationArray targetTerms = - (targetGlossaryTerms != null) ? targetGlossaryTerms.getTerms() : new GlossaryTermAssociationArray(); + (targetGlossaryTerms != null) + ? targetGlossaryTerms.getTerms() + : new GlossaryTermAssociationArray(); int baseGlossaryTermIdx = 0; int targetGlossaryTermIdx = 0; while (baseGlossaryTermIdx < baseTerms.size() && targetGlossaryTermIdx < targetTerms.size()) { GlossaryTermAssociation baseGlossaryTermAssociation = baseTerms.get(baseGlossaryTermIdx); - GlossaryTermAssociation targetGlossaryTermAssociation = targetTerms.get(targetGlossaryTermIdx); + GlossaryTermAssociation targetGlossaryTermAssociation = + targetTerms.get(targetGlossaryTermIdx); int comparison = - baseGlossaryTermAssociation.getUrn().toString().compareTo(targetGlossaryTermAssociation.getUrn().toString()); + baseGlossaryTermAssociation + .getUrn() + .toString() + .compareTo(targetGlossaryTermAssociation.getUrn().toString()); if (comparison == 0) { ++baseGlossaryTermIdx; ++targetGlossaryTermIdx; } else if (comparison < 0) { // GlossaryTerm got removed. - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(baseGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_REMOVED_FORMAT, baseGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(baseGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(baseGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_REMOVED_FORMAT, + baseGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(baseGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++baseGlossaryTermIdx; } else { // GlossaryTerm got added. - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(targetGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_ADDED_FORMAT, targetGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(targetGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(targetGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_ADDED_FORMAT, + targetGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(targetGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++targetGlossaryTermIdx; } } @@ -82,33 +100,42 @@ public static List<ChangeEvent> computeDiffs(GlossaryTerms baseGlossaryTerms, Gl while (baseGlossaryTermIdx < baseTerms.size()) { // Handle removed glossary terms. GlossaryTermAssociation baseGlossaryTermAssociation = baseTerms.get(baseGlossaryTermIdx); - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(baseGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_REMOVED_FORMAT, baseGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(baseGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(baseGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_REMOVED_FORMAT, + baseGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(baseGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++baseGlossaryTermIdx; } while (targetGlossaryTermIdx < targetTerms.size()) { // Handle newly added glossary terms. - GlossaryTermAssociation targetGlossaryTermAssociation = targetTerms.get(targetGlossaryTermIdx); - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(targetGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_ADDED_FORMAT, targetGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(targetGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + GlossaryTermAssociation targetGlossaryTermAssociation = + targetTerms.get(targetGlossaryTermIdx); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(targetGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_ADDED_FORMAT, + targetGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(targetGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++targetGlossaryTermIdx; } return changeEvents; @@ -119,7 +146,8 @@ private static void sortGlossaryTermsByGlossaryTermUrn(GlossaryTerms globalGloss return; } List<GlossaryTermAssociation> glossaryTerms = new ArrayList<>(globalGlossaryTerms.getTerms()); - glossaryTerms.sort(Comparator.comparing(GlossaryTermAssociation::getUrn, Comparator.comparing(Urn::toString))); + glossaryTerms.sort( + Comparator.comparing(GlossaryTermAssociation::getUrn, Comparator.comparing(Urn::toString))); globalGlossaryTerms.setTerms(new GlossaryTermAssociationArray(glossaryTerms)); } @@ -131,15 +159,19 @@ private static GlossaryTerms getGlossaryTermsFromAspect(EntityAspect entityAspec } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOSSARY_TERMS_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERMS_ASPECT_NAME); } @@ -147,7 +179,8 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec GlossaryTerms targetGlossaryTerms = getGlossaryTermsFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.GLOSSARY_TERM) { - changeEvents.addAll(computeDiffs(baseGlossaryTerms, targetGlossaryTerms, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseGlossaryTerms, targetGlossaryTerms, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java index a23d76e47755c..bf3ff3293d2a2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,10 +21,8 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class InstitutionalMemoryChangeEventGenerator extends EntityChangeEventGenerator<InstitutionalMemory> { +public class InstitutionalMemoryChangeEventGenerator + extends EntityChangeEventGenerator<InstitutionalMemory> { private static final String INSTITUTIONAL_MEMORY_ADDED_FORMAT = "Institutional Memory '%s' with documentation of '%s' has been added: '%s'"; @@ -31,17 +31,22 @@ public class InstitutionalMemoryChangeEventGenerator extends EntityChangeEventGe private static final String INSTITUTIONAL_MEMORY_MODIFIED_FORMAT = "Documentation of Institutional Memory '%s' of '%s' has been changed from '%s' to '%s'."; - private static List<ChangeEvent> computeDiffs(InstitutionalMemory baseInstitutionalMemory, - InstitutionalMemory targetInstitutionalMemory, String entityUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + InstitutionalMemory baseInstitutionalMemory, + InstitutionalMemory targetInstitutionalMemory, + String entityUrn, + AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); sortElementsByUrl(baseInstitutionalMemory); sortElementsByUrl(targetInstitutionalMemory); InstitutionalMemoryMetadataArray baseElements = - (baseInstitutionalMemory != null) ? baseInstitutionalMemory.getElements() + (baseInstitutionalMemory != null) + ? baseInstitutionalMemory.getElements() : new InstitutionalMemoryMetadataArray(); InstitutionalMemoryMetadataArray targetElements = - (targetInstitutionalMemory != null) ? targetInstitutionalMemory.getElements() + (targetInstitutionalMemory != null) + ? targetInstitutionalMemory.getElements() : new InstitutionalMemoryMetadataArray(); int baseIdx = 0; @@ -53,46 +58,60 @@ private static List<ChangeEvent> computeDiffs(InstitutionalMemory baseInstitutio if (comparison == 0) { if (!baseElement.getDescription().equals(targetElement.getDescription())) { // InstitutionalMemory description has changed. - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.PATCH) - .description(String.format(INSTITUTIONAL_MEMORY_MODIFIED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription(), targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.PATCH) + .description( + String.format( + INSTITUTIONAL_MEMORY_MODIFIED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription(), + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); } ++baseIdx; ++targetIdx; } else if (comparison < 0) { // InstitutionalMemory got removed. - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_REMOVED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_REMOVED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++baseIdx; } else { // InstitutionalMemory got added.. - changeEvents.add(ChangeEvent.builder() - .modifier(targetElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_ADDED_FORMAT, targetElement.getUrl(), entityUrn, - targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(targetElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_ADDED_FORMAT, + targetElement.getUrl(), + entityUrn, + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++targetIdx; } } @@ -100,34 +119,42 @@ private static List<ChangeEvent> computeDiffs(InstitutionalMemory baseInstitutio while (baseIdx < baseElements.size()) { // InstitutionalMemory got removed. InstitutionalMemoryMetadata baseElement = baseElements.get(baseIdx); - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_REMOVED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_REMOVED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++baseIdx; } while (targetIdx < targetElements.size()) { // Newly added owners. InstitutionalMemoryMetadata targetElement = targetElements.get(targetIdx); // InstitutionalMemory got added.. - changeEvents.add(ChangeEvent.builder() - .modifier(targetElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_ADDED_FORMAT, targetElement.getUrl(), entityUrn, - targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(targetElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_ADDED_FORMAT, + targetElement.getUrl(), + entityUrn, + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++targetIdx; } return changeEvents; @@ -145,20 +172,26 @@ private static void sortElementsByUrl(InstitutionalMemory institutionalMemory) { return; } List<InstitutionalMemoryMetadata> elements = new ArrayList<>(institutionalMemory.getElements()); - elements.sort(Comparator.comparing(InstitutionalMemoryMetadata::getUrl, Comparator.comparing(Url::toString))); + elements.sort( + Comparator.comparing( + InstitutionalMemoryMetadata::getUrl, Comparator.comparing(Url::toString))); institutionalMemory.setElements(new InstitutionalMemoryMetadataArray(elements)); } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME) || !currentValue.getAspect() - .equals(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME) + || !currentValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + INSTITUTIONAL_MEMORY_ASPECT_NAME); } @@ -166,7 +199,9 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec InstitutionalMemory targetInstitutionalMemory = getInstitutionalMemoryFromAspect(currentValue); List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { - changeEvents.addAll(computeDiffs(baseInstitutionalMemory, targetInstitutionalMemory, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs( + baseInstitutionalMemory, targetInstitutionalMemory, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java index f5697aea25b9a..b32958508cf24 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,23 +21,21 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipChangeEventGenerator extends EntityChangeEventGenerator<Ownership> { private static final String OWNER_ADDED_FORMAT = "'%s' added as a `%s` of '%s'."; private static final String OWNER_REMOVED_FORMAT = "'%s' removed as a `%s` of '%s'."; private static final String OWNERSHIP_TYPE_CHANGE_FORMAT = "'%s''s ownership type changed from '%s' to '%s' for '%s'."; - private static List<ChangeEvent> computeDiffs(Ownership baseOwnership, Ownership targetOwnership, String entityUrn, - AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + Ownership baseOwnership, Ownership targetOwnership, String entityUrn, AuditStamp auditStamp) { List<ChangeEvent> changeEvents = new ArrayList<>(); sortOwnersByUrn(baseOwnership); sortOwnersByUrn(targetOwnership); OwnerArray baseOwners = (baseOwnership != null) ? baseOwnership.getOwners() : new OwnerArray(); - OwnerArray targetOwners = (targetOwnership != null) ? targetOwnership.getOwners() : new OwnerArray(); + OwnerArray targetOwners = + (targetOwnership != null) ? targetOwnership.getOwners() : new OwnerArray(); int baseOwnerIdx = 0; int targetOwnerIdx = 0; @@ -46,49 +46,66 @@ private static List<ChangeEvent> computeDiffs(Ownership baseOwnership, Ownership if (comparison == 0) { if (!baseOwner.getType().equals(targetOwner.getType())) { // Ownership type has changed. - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getType().name()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.PATCH) - .description( - String.format(OWNERSHIP_TYPE_CHANGE_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), - targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getType().name()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.PATCH) + .description( + String.format( + OWNERSHIP_TYPE_CHANGE_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); } ++baseOwnerIdx; ++targetOwnerIdx; } else if (comparison < 0) { // Owner got removed - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(baseOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_REMOVED_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), entityUrn)) - .ownerUrn(baseOwner.getOwner()) - .ownerType(baseOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(baseOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_REMOVED_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + entityUrn)) + .ownerUrn(baseOwner.getOwner()) + .ownerType(baseOwner.getType()) + .auditStamp(auditStamp) + .build()); ++baseOwnerIdx; } else { // Owner got added. - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_ADDED_FORMAT, targetOwner.getOwner().getId(), targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_ADDED_FORMAT, + targetOwner.getOwner().getId(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); ++targetOwnerIdx; } } @@ -96,33 +113,45 @@ private static List<ChangeEvent> computeDiffs(Ownership baseOwnership, Ownership while (baseOwnerIdx < baseOwners.size()) { // Handle removed owners. Owner baseOwner = baseOwners.get(baseOwnerIdx); - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(baseOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_REMOVED_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), entityUrn)) - .ownerUrn(baseOwner.getOwner()) - .ownerType(baseOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(baseOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_REMOVED_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + entityUrn)) + .ownerUrn(baseOwner.getOwner()) + .ownerType(baseOwner.getType()) + .auditStamp(auditStamp) + .build()); ++baseOwnerIdx; } while (targetOwnerIdx < targetOwners.size()) { // Newly added owners. Owner targetOwner = targetOwners.get(targetOwnerIdx); - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_ADDED_FORMAT, targetOwner.getOwner().getId(), targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_ADDED_FORMAT, + targetOwner.getOwner().getId(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); ++targetOwnerIdx; } return changeEvents; @@ -145,15 +174,19 @@ private static void sortOwnersByUrn(Ownership ownership) { } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(OWNERSHIP_ASPECT_NAME) || !currentValue.getAspect() - .equals(OWNERSHIP_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(OWNERSHIP_ASPECT_NAME) + || !currentValue.getAspect().equals(OWNERSHIP_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + OWNERSHIP_ASPECT_NAME); } @@ -162,11 +195,13 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec List<ChangeEvent> changeEvents = new ArrayList<>(); if (element == ChangeCategory.OWNER) { - changeEvents.addAll(computeDiffs(baseOwnership, targetOwnership, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseOwnership, targetOwnership, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. - // Why isn't this done at changeevent level - what if transaction contains multiple category events? + // Why isn't this done at changeevent level - what if transaction contains multiple category + // events? SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; ChangeEvent highestChangeEvent = changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java index 2e0a8586cba60..1fd5d6e2c0f7a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -29,14 +31,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; -import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; - - @Slf4j public class SchemaMetadataChangeEventGenerator extends EntityChangeEventGenerator<SchemaMetadata> { private static final String SCHEMA_METADATA_ASPECT_NAME = "schemaMetadata"; - private static final String BACKWARDS_INCOMPATIBLE_DESC = "A backwards incompatible change due to"; - private static final String BACK_AND_FORWARD_COMPATIBLE_DESC = "A forwards & backwards compatible change due to "; + private static final String BACKWARDS_INCOMPATIBLE_DESC = + "A backwards incompatible change due to"; + private static final String BACK_AND_FORWARD_COMPATIBLE_DESC = + "A forwards & backwards compatible change due to "; private static final String FIELD_DESCRIPTION_ADDED_FORMAT = "The description '%s' for the field '%s' has been added."; private static final String FIELD_DESCRIPTION_REMOVED_FORMAT = @@ -44,8 +45,11 @@ public class SchemaMetadataChangeEventGenerator extends EntityChangeEventGenerat private static final String FIELD_DESCRIPTION_MODIFIED_FORMAT = "The description for the field '%s' has been changed from '%s' to '%s'."; - private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, @Nullable SchemaField targetField, - String datasetFieldUrn, AuditStamp auditStamp) { + private static ChangeEvent getDescriptionChange( + @Nullable SchemaField baseField, + @Nullable SchemaField targetField, + String datasetFieldUrn, + AuditStamp auditStamp) { String baseDescription = (baseField != null) ? baseField.getDescription() : null; String targetDescription = (targetField != null) ? targetField.getDescription() : null; if (baseDescription == null && targetDescription != null) { @@ -55,7 +59,9 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.MINOR) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_ADDED_FORMAT, targetDescription, targetField.getFieldPath())) + .description( + String.format( + FIELD_DESCRIPTION_ADDED_FORMAT, targetDescription, targetField.getFieldPath())) .auditStamp(auditStamp) .build(); } @@ -66,7 +72,9 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.MINOR) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_REMOVED_FORMAT, baseDescription, baseField.getFieldPath())) + .description( + String.format( + FIELD_DESCRIPTION_REMOVED_FORMAT, baseDescription, baseField.getFieldPath())) .auditStamp(auditStamp) .build(); } @@ -77,26 +85,36 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.PATCH) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_MODIFIED_FORMAT, baseField.getFieldPath(), baseDescription, - targetDescription)) + .description( + String.format( + FIELD_DESCRIPTION_MODIFIED_FORMAT, + baseField.getFieldPath(), + baseDescription, + targetDescription)) .auditStamp(auditStamp) .build(); } return null; } - private static List<ChangeEvent> getGlobalTagChangeEvents(SchemaField baseField, SchemaField targetField, + private static List<ChangeEvent> getGlobalTagChangeEvents( + SchemaField baseField, + SchemaField targetField, String parentUrnStr, String datasetFieldUrn, AuditStamp auditStamp) { // 1. Get EntityTagChangeEvent, then rebind into a SchemaFieldTagChangeEvent. List<ChangeEvent> entityTagChangeEvents = - GlobalTagsChangeEventGenerator.computeDiffs(baseField != null ? baseField.getGlobalTags() : null, - targetField != null ? targetField.getGlobalTags() : null, datasetFieldUrn, auditStamp); + GlobalTagsChangeEventGenerator.computeDiffs( + baseField != null ? baseField.getGlobalTags() : null, + targetField != null ? targetField.getGlobalTags() : null, + datasetFieldUrn, + auditStamp); if (baseField != null || targetField != null) { - String fieldPath = targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); + String fieldPath = + targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); // 2. Convert EntityTagChangeEvent into a SchemaFieldTagChangeEvent. final Urn parentUrn; try { @@ -106,27 +124,30 @@ private static List<ChangeEvent> getGlobalTagChangeEvents(SchemaField baseField, return Collections.emptyList(); } - return convertEntityTagChangeEvents( - fieldPath, - parentUrn, - entityTagChangeEvents); + return convertEntityTagChangeEvents(fieldPath, parentUrn, entityTagChangeEvents); } return Collections.emptyList(); } - private static List<ChangeEvent> getGlossaryTermsChangeEvents(SchemaField baseField, SchemaField targetField, + private static List<ChangeEvent> getGlossaryTermsChangeEvents( + SchemaField baseField, + SchemaField targetField, String parentUrnStr, String datasetFieldUrn, AuditStamp auditStamp) { // 1. Get EntityGlossaryTermChangeEvent, then rebind into a SchemaFieldGlossaryTermChangeEvent. List<ChangeEvent> entityGlossaryTermsChangeEvents = - GlossaryTermsChangeEventGenerator.computeDiffs(baseField != null ? baseField.getGlossaryTerms() : null, - targetField != null ? targetField.getGlossaryTerms() : null, datasetFieldUrn, auditStamp); + GlossaryTermsChangeEventGenerator.computeDiffs( + baseField != null ? baseField.getGlossaryTerms() : null, + targetField != null ? targetField.getGlossaryTerms() : null, + datasetFieldUrn, + auditStamp); if (targetField != null || baseField != null) { - String fieldPath = targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); + String fieldPath = + targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); // 2. Convert EntityGlossaryTermChangeEvent into a SchemaFieldGlossaryTermChangeEvent. final Urn parentUrn; try { @@ -137,16 +158,18 @@ private static List<ChangeEvent> getGlossaryTermsChangeEvents(SchemaField baseFi } return convertEntityGlossaryTermChangeEvents( - fieldPath, - parentUrn, - entityGlossaryTermsChangeEvents); + fieldPath, parentUrn, entityGlossaryTermsChangeEvents); } return Collections.emptyList(); } - private static List<ChangeEvent> getFieldPropertyChangeEvents(SchemaField baseField, SchemaField targetField, - Urn datasetUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List<ChangeEvent> getFieldPropertyChangeEvents( + SchemaField baseField, + SchemaField targetField, + Urn datasetUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { List<ChangeEvent> propChangeEvents = new ArrayList<>(); String datasetFieldUrn; if (targetField != null) { @@ -157,7 +180,8 @@ private static List<ChangeEvent> getFieldPropertyChangeEvents(SchemaField baseFi // Description Change. if (ChangeCategory.DOCUMENTATION.equals(changeCategory)) { - ChangeEvent descriptionChangeEvent = getDescriptionChange(baseField, targetField, datasetFieldUrn, auditStamp); + ChangeEvent descriptionChangeEvent = + getDescriptionChange(baseField, targetField, datasetFieldUrn, auditStamp); if (descriptionChangeEvent != null) { propChangeEvents.add(descriptionChangeEvent); } @@ -165,20 +189,28 @@ private static List<ChangeEvent> getFieldPropertyChangeEvents(SchemaField baseFi // Global Tags if (ChangeCategory.TAG.equals(changeCategory)) { - propChangeEvents.addAll(getGlobalTagChangeEvents(baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); + propChangeEvents.addAll( + getGlobalTagChangeEvents( + baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); } // Glossary terms. if (ChangeCategory.GLOSSARY_TERM.equals(changeCategory)) { - propChangeEvents.addAll(getGlossaryTermsChangeEvents(baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); + propChangeEvents.addAll( + getGlossaryTermsChangeEvents( + baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); } return propChangeEvents; } // TODO: This could use some cleanup, lots of repeated logic and tenuous conditionals - private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaMetadata targetSchema, - Urn datasetUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List<ChangeEvent> computeDiffs( + SchemaMetadata baseSchema, + SchemaMetadata targetSchema, + Urn datasetUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { // Sort the fields by their field path. if (baseSchema != null) { sortFieldsByPath(baseSchema); @@ -188,8 +220,10 @@ private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaM } // Performs ordinal based diff, primarily based on fixed field ordinals and their types. - SchemaFieldArray baseFields = (baseSchema != null ? baseSchema.getFields() : new SchemaFieldArray()); - SchemaFieldArray targetFields = targetSchema != null ? targetSchema.getFields() : new SchemaFieldArray(); + SchemaFieldArray baseFields = + (baseSchema != null ? baseSchema.getFields() : new SchemaFieldArray()); + SchemaFieldArray targetFields = + targetSchema != null ? targetSchema.getFields() : new SchemaFieldArray(); int baseFieldIdx = 0; int targetFieldIdx = 0; List<ChangeEvent> changeEvents = new ArrayList<>(); @@ -197,7 +231,7 @@ private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaM while (baseFieldIdx < baseFields.size() && targetFieldIdx < targetFields.size()) { SchemaField curBaseField = baseFields.get(baseFieldIdx); SchemaField curTargetField = targetFields.get(targetFieldIdx); - //TODO: Re-evaluate ordinal processing? + // TODO: Re-evaluate ordinal processing? int comparison = curBaseField.getFieldPath().compareTo(curTargetField.getFieldPath()); if (renamedFields.contains(curBaseField)) { baseFieldIdx++; @@ -208,61 +242,78 @@ private static List<ChangeEvent> computeDiffs(SchemaMetadata baseSchema, SchemaM if (!curBaseField.getNativeDataType().equals(curTargetField.getNativeDataType())) { // Non-backward compatible change + Major version bump if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(String.format("%s native datatype of the field '%s' changed from '%s' to '%s'.", - BACKWARDS_INCOMPATIBLE_DESC, getFieldPathV1(curTargetField), curBaseField.getNativeDataType(), - curTargetField.getNativeDataType())) - .fieldPath(curBaseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) - .nullable(curBaseField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + String.format( + "%s native datatype of the field '%s' changed from '%s' to '%s'.", + BACKWARDS_INCOMPATIBLE_DESC, + getFieldPathV1(curTargetField), + curBaseField.getNativeDataType(), + curTargetField.getNativeDataType())) + .fieldPath(curBaseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) + .nullable(curBaseField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; ++targetFieldIdx; } List<ChangeEvent> propChangeEvents = - getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; ++targetFieldIdx; } else if (comparison < 0) { - // Base Field was removed or was renamed. Non-backward compatible change + Major version bump + // Base Field was removed or was renamed. Non-backward compatible change + Major version + // bump // Check for rename, if rename coincides with other modifications we assume drop/add. - // Assumes that two different fields on the same schema would not have the same description, terms, + // Assumes that two different fields on the same schema would not have the same description, + // terms, // or tags and share the same type - SchemaField renamedField = findRenamedField(curBaseField, - targetFields.subList(targetFieldIdx, targetFields.size()), renamedFields); + SchemaField renamedField = + findRenamedField( + curBaseField, + targetFields.subList(targetFieldIdx, targetFields.size()), + renamedFields); if (renamedField == null) { processRemoval(changeCategory, changeEvents, datasetUrn, curBaseField, auditStamp); ++baseFieldIdx; } else { changeEvents.add(generateRenameEvent(datasetUrn, curBaseField, renamedField, auditStamp)); - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; renamedFields.add(renamedField); } } else { - // The targetField got added or a renaming occurred. Forward & backwards compatible change + minor version bump. - SchemaField renamedField = findRenamedField(curTargetField, - baseFields.subList(baseFieldIdx, baseFields.size()), renamedFields); + // The targetField got added or a renaming occurred. Forward & backwards compatible change + + // minor version bump. + SchemaField renamedField = + findRenamedField( + curTargetField, baseFields.subList(baseFieldIdx, baseFields.size()), renamedFields); if (renamedField == null) { processAdd(changeCategory, changeEvents, datasetUrn, curTargetField, auditStamp); ++targetFieldIdx; } else { - changeEvents.add(generateRenameEvent(datasetUrn, renamedField, curTargetField, auditStamp)); - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + changeEvents.add( + generateRenameEvent(datasetUrn, renamedField, curTargetField, auditStamp)); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++targetFieldIdx; renamedFields.add(renamedField); @@ -287,7 +338,8 @@ BACKWARDS_INCOMPATIBLE_DESC, getFieldPathV1(curTargetField), curBaseField.getNat } // Handle primary key constraint change events. - List<ChangeEvent> primaryKeyChangeEvents = getPrimaryKeyChangeEvents(baseSchema, targetSchema, datasetUrn, auditStamp); + List<ChangeEvent> primaryKeyChangeEvents = + getPrimaryKeyChangeEvents(baseSchema, targetSchema, datasetUrn, auditStamp); changeEvents.addAll(primaryKeyChangeEvents); // Handle foreign key constraint change events. @@ -306,16 +358,19 @@ private static void sortFieldsByPath(SchemaMetadata schemaMetadata) { schemaMetadata.setFields(new SchemaFieldArray(schemaFields)); } - private static SchemaField findRenamedField(SchemaField curField, List<SchemaField> targetFields, Set<SchemaField> renamedFields) { + private static SchemaField findRenamedField( + SchemaField curField, List<SchemaField> targetFields, Set<SchemaField> renamedFields) { return targetFields.stream() .filter(schemaField -> isRenamed(curField, schemaField)) .filter(field -> !renamedFields.contains(field)) - .findFirst().orElse(null); + .findFirst() + .orElse(null); } private static boolean isRenamed(SchemaField curField, SchemaField schemaField) { return curField.getNativeDataType().equals(schemaField.getNativeDataType()) - && parentFieldsMatch(curField, schemaField) && descriptionsMatch(curField, schemaField); + && parentFieldsMatch(curField, schemaField) + && descriptionsMatch(curField, schemaField); } private static boolean parentFieldsMatch(SchemaField curField, SchemaField schemaField) { @@ -324,73 +379,98 @@ private static boolean parentFieldsMatch(SchemaField curField, SchemaField schem if (curFieldIndex > 0 && schemaFieldIndex > 0) { String curFieldParentPath = curField.getFieldPath().substring(0, curFieldIndex); String schemaFieldParentPath = schemaField.getFieldPath().substring(0, schemaFieldIndex); - return StringUtils.isNotBlank(curFieldParentPath) && curFieldParentPath.equals(schemaFieldParentPath); + return StringUtils.isNotBlank(curFieldParentPath) + && curFieldParentPath.equals(schemaFieldParentPath); } // No parent field return curFieldIndex < 0 && schemaFieldIndex < 0; } private static boolean descriptionsMatch(SchemaField curField, SchemaField schemaField) { - return StringUtils.isNotBlank(curField.getDescription()) && curField.getDescription().equals(schemaField.getDescription()); + return StringUtils.isNotBlank(curField.getDescription()) + && curField.getDescription().equals(schemaField.getDescription()); } - private static void processRemoval(ChangeCategory changeCategory, List<ChangeEvent> changeEvents, Urn datasetUrn, - SchemaField baseField, AuditStamp auditStamp) { + private static void processRemoval( + ChangeCategory changeCategory, + List<ChangeEvent> changeEvents, + Urn datasetUrn, + SchemaField baseField, + AuditStamp auditStamp) { if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .modifier(getSchemaFieldUrn(datasetUrn, baseField).toString()) - .entityUrn(datasetUrn.toString()) - .category(ChangeCategory.TECHNICAL_SCHEMA) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " removal of field: '" + getFieldPathV1(baseField) + "'.") - .fieldPath(baseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, baseField)) - .nullable(baseField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .modifier(getSchemaFieldUrn(datasetUrn, baseField).toString()) + .entityUrn(datasetUrn.toString()) + .category(ChangeCategory.TECHNICAL_SCHEMA) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " removal of field: '" + + getFieldPathV1(baseField) + + "'.") + .fieldPath(baseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, baseField)) + .nullable(baseField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(baseField, null, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents(baseField, null, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); } - private static void processAdd(ChangeCategory changeCategory, List<ChangeEvent> changeEvents, Urn datasetUrn, - SchemaField targetField, AuditStamp auditStamp) { + private static void processAdd( + ChangeCategory changeCategory, + List<ChangeEvent> changeEvents, + Urn datasetUrn, + SchemaField targetField, + AuditStamp auditStamp) { if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .modifier(getSchemaFieldUrn(datasetUrn, targetField).toString()) - .entityUrn(datasetUrn.toString()) - .category(ChangeCategory.TECHNICAL_SCHEMA) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(BACK_AND_FORWARD_COMPATIBLE_DESC + "the newly added field '" + getFieldPathV1(targetField) + "'.") - .fieldPath(targetField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, targetField)) - .nullable(targetField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .modifier(getSchemaFieldUrn(datasetUrn, targetField).toString()) + .entityUrn(datasetUrn.toString()) + .category(ChangeCategory.TECHNICAL_SCHEMA) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + BACK_AND_FORWARD_COMPATIBLE_DESC + + "the newly added field '" + + getFieldPathV1(targetField) + + "'.") + .fieldPath(targetField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, targetField)) + .nullable(targetField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List<ChangeEvent> propChangeEvents = getFieldPropertyChangeEvents(null, targetField, datasetUrn, - changeCategory, auditStamp); + List<ChangeEvent> propChangeEvents = + getFieldPropertyChangeEvents(null, targetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); } - private static ChangeEvent generateRenameEvent(Urn datasetUrn, SchemaField curBaseField, SchemaField curTargetField, - AuditStamp auditStamp) { - return DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(BACK_AND_FORWARD_COMPATIBLE_DESC + "renaming of the field '" + getFieldPathV1(curBaseField) - + " to " + getFieldPathV1(curTargetField) + "'.") - .fieldPath(curBaseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) - .nullable(curBaseField.isNullable()) - .auditStamp(auditStamp) - .build(); + private static ChangeEvent generateRenameEvent( + Urn datasetUrn, SchemaField curBaseField, SchemaField curTargetField, AuditStamp auditStamp) { + return DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + BACK_AND_FORWARD_COMPATIBLE_DESC + + "renaming of the field '" + + getFieldPathV1(curBaseField) + + " to " + + getFieldPathV1(curTargetField) + + "'.") + .fieldPath(curBaseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) + .nullable(curBaseField.isNullable()) + .auditStamp(auditStamp) + .build(); } private static SchemaMetadata getSchemaMetadataFromAspect(EntityAspect entityAspect) { @@ -407,49 +487,73 @@ private static List<ChangeEvent> getForeignKeyChangeEvents() { return foreignKeyChangeEvents; } - private static List<ChangeEvent> getPrimaryKeyChangeEvents(SchemaMetadata baseSchema, SchemaMetadata targetSchema, - Urn datasetUrn, AuditStamp auditStamp) { + private static List<ChangeEvent> getPrimaryKeyChangeEvents( + SchemaMetadata baseSchema, + SchemaMetadata targetSchema, + Urn datasetUrn, + AuditStamp auditStamp) { List<ChangeEvent> primaryKeyChangeEvents = new ArrayList<>(); Set<String> basePrimaryKeys = - (baseSchema != null && baseSchema.getPrimaryKeys() != null) ? new HashSet<>(baseSchema.getPrimaryKeys()) + (baseSchema != null && baseSchema.getPrimaryKeys() != null) + ? new HashSet<>(baseSchema.getPrimaryKeys()) : new HashSet<>(); Set<String> targetPrimaryKeys = - (targetSchema != null && targetSchema.getPrimaryKeys() != null) ? new HashSet<>(targetSchema.getPrimaryKeys()) : new HashSet<>(); + (targetSchema != null && targetSchema.getPrimaryKeys() != null) + ? new HashSet<>(targetSchema.getPrimaryKeys()) + : new HashSet<>(); Set<String> removedBaseKeys = - basePrimaryKeys.stream().filter(key -> !targetPrimaryKeys.contains(key)).collect(Collectors.toSet()); + basePrimaryKeys.stream() + .filter(key -> !targetPrimaryKeys.contains(key)) + .collect(Collectors.toSet()); for (String removedBaseKeyField : removedBaseKeys) { - primaryKeyChangeEvents.add(ChangeEvent.builder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn.toString(), removedBaseKeyField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " removal of the primary key field '" + removedBaseKeyField + "'") - .auditStamp(auditStamp) - .build()); + primaryKeyChangeEvents.add( + ChangeEvent.builder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn.toString(), removedBaseKeyField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " removal of the primary key field '" + + removedBaseKeyField + + "'") + .auditStamp(auditStamp) + .build()); } Set<String> addedTargetKeys = - targetPrimaryKeys.stream().filter(key -> !basePrimaryKeys.contains(key)).collect(Collectors.toSet()); + targetPrimaryKeys.stream() + .filter(key -> !basePrimaryKeys.contains(key)) + .collect(Collectors.toSet()); for (String addedTargetKeyField : addedTargetKeys) { - primaryKeyChangeEvents.add(ChangeEvent.builder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, addedTargetKeyField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " addition of the primary key field '" + addedTargetKeyField + "'") - .auditStamp(auditStamp) - .build()); + primaryKeyChangeEvents.add( + ChangeEvent.builder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, addedTargetKeyField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " addition of the primary key field '" + + addedTargetKeyField + + "'") + .auditStamp(auditStamp) + .build()); } return primaryKeyChangeEvents; } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory changeCategory, JsonPatch rawDiff, boolean rawDiffRequested) { - if (!previousValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME) || !currentValue.getAspect() - .equals(SCHEMA_METADATA_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory changeCategory, + JsonPatch rawDiff, + boolean rawDiffRequested) { + if (!previousValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME) + || !currentValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + SCHEMA_METADATA_ASPECT_NAME); } @@ -462,9 +566,14 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec List<ChangeEvent> changeEvents; try { - changeEvents = new ArrayList<>( - computeDiffs(baseSchema, targetSchema, DatasetUrn.createFromString(currentValue.getUrn()), changeCategory, - null)); + changeEvents = + new ArrayList<>( + computeDiffs( + baseSchema, + targetSchema, + DatasetUrn.createFromString(currentValue.getUrn()), + changeCategory, + null)); } catch (URISyntaxException e) { throw new IllegalArgumentException("Malformed DatasetUrn " + currentValue.getUrn()); } @@ -472,7 +581,9 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec // Assess the highest change at the transaction(schema) level. SemanticChangeType highestSematicChange = SemanticChangeType.NONE; changeEvents = - changeEvents.stream().filter(changeEvent -> changeEvent.getCategory() == changeCategory).collect(Collectors.toList()); + changeEvents.stream() + .filter(changeEvent -> changeEvent.getCategory() == changeCategory) + .collect(Collectors.toList()); ChangeEvent highestChangeEvent = changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); if (highestChangeEvent != null) { @@ -496,11 +607,17 @@ public List<ChangeEvent> getChangeEvents( @Nonnull Aspect<SchemaMetadata> to, @Nonnull AuditStamp auditStamp) { final List<ChangeEvent> changeEvents = new ArrayList<>(); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.DOCUMENTATION, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TAG, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.GLOSSARY_TERM, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.DOCUMENTATION, auditStamp)); + changeEvents.addAll( + computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TAG, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.GLOSSARY_TERM, auditStamp)); return changeEvents; } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java index d97a3fa4f65dd..da8cf3e3b49c9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java @@ -13,22 +13,27 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** - * This is a simple differ that compares to Domains aspects and assumes that each domain - * will have a single domain (currently the semantic contract). + * This is a simple differ that compares to Domains aspects and assumes that each domain will have a + * single domain (currently the semantic contract). */ public class SingleDomainChangeEventGenerator extends EntityChangeEventGenerator<Domains> { @Override - public List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<Domains> from, @Nonnull Aspect<Domains> to, @Nonnull AuditStamp auditStamp) { + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<Domains> from, + @Nonnull Aspect<Domains> to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } - private List<ChangeEvent> computeDiffs(Domains baseDomains, Domains targetDomains, String entityUrn, - AuditStamp auditStamp) { + private List<ChangeEvent> computeDiffs( + Domains baseDomains, Domains targetDomains, String entityUrn, AuditStamp auditStamp) { - // Simply fetch the first element from each domains list and compare. If they are different, emit + // Simply fetch the first element from each domains list and compare. If they are different, + // emit // a domain ADD / REMOVE event. if (isDomainSet(baseDomains, targetDomains)) { return Collections.singletonList( @@ -72,8 +77,7 @@ private List<ChangeEvent> computeDiffs(Domains baseDomains, Domains targetDomain .modifier(targetDomains.getDomains().get(0).toString()) .domainUrn(targetDomains.getDomains().get(0)) .auditStamp(auditStamp) - .build() - ); + .build()); } return Collections.emptyList(); @@ -88,7 +92,9 @@ private boolean isDomainUnset(@Nullable final Domains from, @Nullable final Doma } private boolean isDomainChanged(@Nullable final Domains from, @Nullable final Domains to) { - return !isDomainEmpty(from) && !isDomainEmpty(to) && !from.getDomains().get(0).equals(to.getDomains().get(0)); + return !isDomainEmpty(from) + && !isDomainEmpty(to) + && !from.getDomains().get(0).equals(to.getDomains().get(0)); } private boolean isDomainEmpty(@Nullable final Domains domains) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java index 17f6798586417..df8aa4dd4ca71 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java @@ -11,28 +11,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Differ responsible for determining whether an entity has been soft-deleted or soft-created. - */ +/** Differ responsible for determining whether an entity has been soft-deleted or soft-created. */ public class StatusChangeEventGenerator extends EntityChangeEventGenerator<Status> { @Override - public List<ChangeEvent> getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect<Status> from, @Nonnull Aspect<Status> to, @Nonnull AuditStamp auditStamp) { + public List<ChangeEvent> getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect<Status> from, + @Nonnull Aspect<Status> to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } - private List<ChangeEvent> computeDiffs(Status baseStatus, Status targetStatus, String entityUrn, - AuditStamp auditStamp) { + private List<ChangeEvent> computeDiffs( + Status baseStatus, Status targetStatus, String entityUrn, AuditStamp auditStamp) { // If the new status is "removed", then return a soft-deletion event. if (isRemoved(targetStatus)) { return Collections.singletonList( ChangeEvent.builder() - .category(ChangeCategory.LIFECYCLE) - .operation(ChangeOperation.SOFT_DELETE) - .auditStamp(auditStamp) - .entityUrn(entityUrn).build()); + .category(ChangeCategory.LIFECYCLE) + .operation(ChangeOperation.SOFT_DELETE) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .build()); } // If the new status is "unremoved", then return an reinstatement event. @@ -42,7 +45,8 @@ private List<ChangeEvent> computeDiffs(Status baseStatus, Status targetStatus, S .category(ChangeCategory.LIFECYCLE) .operation(ChangeOperation.REINSTATE) .auditStamp(auditStamp) - .entityUrn(entityUrn).build()); + .entityUrn(entityUrn) + .build()); } return Collections.emptyList(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index 3e8f83a531b59..eec7680a56ecb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeseries.elastic; +import static com.linkedin.metadata.Constants.*; + import com.codahale.metrics.Timer; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ESQueryException; @@ -35,7 +37,6 @@ import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; import com.linkedin.timeseries.GroupingBucket; - import com.linkedin.timeseries.TimeseriesIndexSizeResult; import com.linkedin.util.Pair; import java.io.IOException; @@ -70,16 +71,21 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.Constants.*; - - @Slf4j -public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectService, ElasticSearchIndexed { +public class ElasticSearchTimeseriesAspectService + implements TimeseriesAspectService, ElasticSearchIndexed { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String TIMESTAMP_FIELD = "timestampMillis"; private static final String EVENT_FIELD = "event"; private static final Integer DEFAULT_LIMIT = 10000; @@ -92,9 +98,13 @@ public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectSer private final ESAggregatedStatsDAO _esAggregatedStatsDAO; private final EntityRegistry _entityRegistry; - public ElasticSearchTimeseriesAspectService(@Nonnull RestHighLevelClient searchClient, - @Nonnull IndexConvention indexConvention, @Nonnull TimeseriesAspectIndexBuilders indexBuilders, - @Nonnull EntityRegistry entityRegistry, @Nonnull ESBulkProcessor bulkProcessor, int numRetries) { + public ElasticSearchTimeseriesAspectService( + @Nonnull RestHighLevelClient searchClient, + @Nonnull IndexConvention indexConvention, + @Nonnull TimeseriesAspectIndexBuilders indexBuilders, + @Nonnull EntityRegistry entityRegistry, + @Nonnull ESBulkProcessor bulkProcessor, + int numRetries) { _indexConvention = indexConvention; _indexBuilders = indexBuilders; _searchClient = searchClient; @@ -111,10 +121,14 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { Object event = docFields.get(EVENT_FIELD); GenericAspect genericAspect; try { - genericAspect = new GenericAspect().setValue( - ByteString.unsafeWrap(OBJECT_MAPPER.writeValueAsString(event).getBytes(StandardCharsets.UTF_8))); + genericAspect = + new GenericAspect() + .setValue( + ByteString.unsafeWrap( + OBJECT_MAPPER.writeValueAsString(event).getBytes(StandardCharsets.UTF_8))); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to deserialize event from the timeseries aspect index: " + e); + throw new RuntimeException( + "Failed to deserialize event from the timeseries aspect index: " + e); } genericAspect.setContentType("application/json"); envelopedAspect.setAspect(genericAspect); @@ -122,9 +136,11 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { if (systemMetadata != null) { try { envelopedAspect.setSystemMetadata( - RecordUtils.toRecordTemplate(SystemMetadata.class, OBJECT_MAPPER.writeValueAsString(systemMetadata))); + RecordUtils.toRecordTemplate( + SystemMetadata.class, OBJECT_MAPPER.writeValueAsString(systemMetadata))); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to deserialize system metadata from the timeseries aspect index: " + e); + throw new RuntimeException( + "Failed to deserialize system metadata from the timeseries aspect index: " + e); } } @@ -141,7 +157,8 @@ public List<ReindexConfig> buildReindexConfigs() { return _indexBuilders.buildReindexConfigs(); } - public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) + public String reindexAsync( + String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { return _indexBuilders.reindexAsync(index, filterQuery, options); } @@ -152,11 +169,14 @@ public void reindexAll() { } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull String docId, + public void upsertDocument( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull String docId, @Nonnull JsonNode document) { String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final UpdateRequest updateRequest = new UpdateRequest( - indexName, docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .docAsUpsert(true) .doc(document.toString(), XContentType.JSON) @@ -169,21 +189,29 @@ public List<TimeseriesIndexSizeResult> getIndexSizes() { List<TimeseriesIndexSizeResult> res = new ArrayList<>(); try { String indicesPattern = _indexConvention.getAllTimeseriesAspectIndicesPattern(); - Response r = _searchClient.getLowLevelClient().performRequest(new Request("GET", "/" + indicesPattern + "/_stats")); + Response r = + _searchClient + .getLowLevelClient() + .performRequest(new Request("GET", "/" + indicesPattern + "/_stats")); JsonNode body = new ObjectMapper().readTree(r.getEntity().getContent()); - body.get("indices").fields().forEachRemaining(entry -> { - TimeseriesIndexSizeResult elemResult = new TimeseriesIndexSizeResult(); - elemResult.setIndexName(entry.getKey()); - Optional<Pair<String, String>> indexEntityAndAspect = _indexConvention.getEntityAndAspectName(entry.getKey()); - if (indexEntityAndAspect.isPresent()) { - elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); - elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); - } - int sizeBytes = entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); - float sizeMb = (float) sizeBytes / 1000; - elemResult.setSizeMb(sizeMb); - res.add(elemResult); - }); + body.get("indices") + .fields() + .forEachRemaining( + entry -> { + TimeseriesIndexSizeResult elemResult = new TimeseriesIndexSizeResult(); + elemResult.setIndexName(entry.getKey()); + Optional<Pair<String, String>> indexEntityAndAspect = + _indexConvention.getEntityAndAspectName(entry.getKey()); + if (indexEntityAndAspect.isPresent()) { + elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); + elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); + } + int sizeBytes = + entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); + float sizeMb = (float) sizeBytes / 1000; + elemResult.setSizeMb(sizeMb); + res.add(elemResult); + }); return res; } catch (IOException e) { throw new RuntimeException(e); @@ -194,10 +222,10 @@ public List<TimeseriesIndexSizeResult> getIndexSizes() { public long countByFilter( @Nonnull final String entityName, @Nonnull final String aspectName, - @Nullable final Filter filter - ) { + @Nullable final Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final BoolQueryBuilder filterQueryBuilder = QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); CountRequest countRequest = new CountRequest(); countRequest.query(filterQueryBuilder); countRequest.indices(indexName); @@ -220,20 +248,25 @@ public List<EnvelopedAspect> getAspectValues( @Nullable final Integer limit, @Nullable final Filter filter, @Nullable final SortCriterion sort) { - final BoolQueryBuilder filterQueryBuilder = QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); filterQueryBuilder.must(QueryBuilders.matchQuery("urn", urn.toString())); // NOTE: We are interested only in the un-exploded rows as only they carry the `event` payload. filterQueryBuilder.mustNot(QueryBuilders.termQuery(MappingsBuilder.IS_EXPLODED_FIELD, true)); if (startTimeMillis != null) { - Criterion startTimeCriterion = new Criterion().setField(TIMESTAMP_FIELD) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTimeMillis.toString()); + Criterion startTimeCriterion = + new Criterion() + .setField(TIMESTAMP_FIELD) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); } if (endTimeMillis != null) { - Criterion endTimeCriterion = new Criterion().setField(TIMESTAMP_FIELD) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTimeMillis.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(TIMESTAMP_FIELD) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); } final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -242,7 +275,8 @@ public List<EnvelopedAspect> getAspectValues( if (sort != null) { final SortOrder esSortOrder = - (sort.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC + (sort.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC : SortOrder.DESC; searchSourceBuilder.sort(SortBuilders.fieldSort(sort.getField()).order(esSortOrder)); } else { @@ -258,8 +292,10 @@ public List<EnvelopedAspect> getAspectValues( log.debug("Search request is: " + searchRequest); SearchHits hits; - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) { + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); hits = searchResponse.getHits(); } catch (Exception e) { log.error("Search query failed:", e); @@ -272,17 +308,23 @@ public List<EnvelopedAspect> getAspectValues( @Override @Nonnull - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { - return _esAggregatedStatsDAO.getAggregatedStats(entityName, aspectName, aggregationSpecs, filter, groupingBuckets); + return _esAggregatedStatsDAO.getAggregatedStats( + entityName, aspectName, aggregationSpecs, filter, groupingBuckets); } /** - * A generic delete by filter API which uses elasticsearch's deleteByQuery. - * NOTE: There is no need for the client to explicitly walk each scroll page with this approach. Elastic will synchronously - * delete all of the documents matching the query that is specified by the filter, and internally handles the batching logic - * by the scroll page size specified(i.e. the DEFAULT_LIMIT value of 10,000). + * A generic delete by filter API which uses elasticsearch's deleteByQuery. NOTE: There is no need + * for the client to explicitly walk each scroll page with this approach. Elastic will + * synchronously delete all of the documents matching the query that is specified by the filter, + * and internally handles the batching logic by the scroll page size specified(i.e. the + * DEFAULT_LIMIT value of 10,000). + * * @param entityName the name of the entity. * @param aspectName the name of the aspect. * @param filter the filter to be used for deletion of the documents on the index. @@ -290,14 +332,18 @@ public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull Stri */ @Nonnull @Override - public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter) { + public DeleteAspectValuesResult deleteAspectValues( + @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); - final Optional<DeleteAspectValuesResult> result = _bulkProcessor - .deleteByQuery(filterQueryBuilder, false, DEFAULT_LIMIT, TimeValue.timeValueMinutes(10), indexName) - .map(response -> new DeleteAspectValuesResult().setNumDocsDeleted(response.getDeleted())); + final Optional<DeleteAspectValuesResult> result = + _bulkProcessor + .deleteByQuery( + filterQueryBuilder, false, DEFAULT_LIMIT, TimeValue.timeValueMinutes(10), indexName) + .map( + response -> + new DeleteAspectValuesResult().setNumDocsDeleted(response.getDeleted())); if (result.isPresent()) { return result.get(); @@ -309,14 +355,20 @@ public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @ @Nonnull @Override - public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String deleteAspectValuesAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); final int batchSize = options.getBatchSize() > 0 ? options.getBatchSize() : DEFAULT_LIMIT; - TimeValue timeout = options.getTimeoutSeconds() > 0 ? TimeValue.timeValueSeconds(options.getTimeoutSeconds()) : null; - final Optional<TaskSubmissionResponse> result = _bulkProcessor - .deleteByQueryAsync(filterQueryBuilder, false, batchSize, timeout, indexName); + TimeValue timeout = + options.getTimeoutSeconds() > 0 + ? TimeValue.timeValueSeconds(options.getTimeoutSeconds()) + : null; + final Optional<TaskSubmissionResponse> result = + _bulkProcessor.deleteByQueryAsync(filterQueryBuilder, false, batchSize, timeout, indexName); if (result.isPresent()) { return result.get().getTask(); @@ -327,7 +379,10 @@ public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull Strin } @Override - public String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); @@ -350,10 +405,16 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) for (Map.Entry<String, EntitySpec> entry : _entityRegistry.getEntitySpecs().entrySet()) { for (AspectSpec aspectSpec : entry.getValue().getAspectSpecs()) { if (aspectSpec.isTimeseries()) { - DeleteAspectValuesResult result = this.deleteAspectValues(entry.getKey(), aspectSpec.getName(), filter); - rollbackResult.setNumDocsDeleted(rollbackResult.getNumDocsDeleted() + result.getNumDocsDeleted()); - log.info("Number of timeseries docs deleted for entity:{}, aspect:{}, runId:{}={}", entry.getKey(), - aspectSpec.getName(), runId, result.getNumDocsDeleted()); + DeleteAspectValuesResult result = + this.deleteAspectValues(entry.getKey(), aspectSpec.getName(), filter); + rollbackResult.setNumDocsDeleted( + rollbackResult.getNumDocsDeleted() + result.getNumDocsDeleted()); + log.info( + "Number of timeseries docs deleted for entity:{}, aspect:{}, runId:{}={}", + entry.getKey(), + aspectSpec.getName(), + runId, + result.getNumDocsDeleted()); } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java index 37a5dc304cf7f..5bb523c8a8c1e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java @@ -8,7 +8,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class MappingsBuilder { public static final String URN_FIELD = "urn"; @@ -24,13 +23,14 @@ public class MappingsBuilder { public static final String PARTITION_SPEC_TIME_PARTITION = "timePartition"; public static final String RUN_ID_FIELD = "runId"; - private MappingsBuilder() { - } + private MappingsBuilder() {} public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSpec) { if (!aspectSpec.isTimeseries()) { throw new IllegalArgumentException( - String.format("Cannot apply timeseries field indexing for a non-timeseries aspect %s", aspectSpec.getName())); + String.format( + "Cannot apply timeseries field indexing for a non-timeseries aspect %s", + aspectSpec.getName())); } Map<String, Object> mappings = new HashMap<>(); @@ -41,16 +41,24 @@ public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSp mappings.put(TIMESTAMP_FIELD, ImmutableMap.of("type", "date")); mappings.put(TIMESTAMP_MILLIS_FIELD, ImmutableMap.of("type", "date")); mappings.put(EVENT_GRANULARITY, ImmutableMap.of("type", "keyword")); - mappings.put(PARTITION_SPEC, ImmutableMap.of("properties", - ImmutableMap.of(PARTITION_SPEC_PARTITION, ImmutableMap.of("type", "keyword"), PARTITION_SPEC_TIME_PARTITION, - ImmutableMap.of("type", "keyword")))); + mappings.put( + PARTITION_SPEC, + ImmutableMap.of( + "properties", + ImmutableMap.of( + PARTITION_SPEC_PARTITION, + ImmutableMap.of("type", "keyword"), + PARTITION_SPEC_TIME_PARTITION, + ImmutableMap.of("type", "keyword")))); mappings.put(EVENT_FIELD, ImmutableMap.of("type", "object", "enabled", false)); mappings.put(SYSTEM_METADATA_FIELD, ImmutableMap.of("type", "object", "enabled", false)); mappings.put(IS_EXPLODED_FIELD, ImmutableMap.of("type", "boolean")); - aspectSpec.getTimeseriesFieldSpecs() + aspectSpec + .getTimeseriesFieldSpecs() .forEach(x -> mappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); - aspectSpec.getTimeseriesFieldCollectionSpecs() + aspectSpec + .getTimeseriesFieldCollectionSpecs() .forEach(x -> mappings.put(x.getName(), getTimeseriesFieldCollectionSpecMapping(x))); return ImmutableMap.of("properties", mappings); @@ -59,11 +67,16 @@ public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSp private static Map<String, Object> getTimeseriesFieldCollectionSpecMapping( TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec) { Map<String, Object> collectionMappings = new HashMap<>(); - collectionMappings.put(timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(), + collectionMappings.put( + timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(), getFieldMapping(DataSchema.Type.STRING)); - timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap() + timeseriesFieldCollectionSpec + .getTimeseriesFieldSpecMap() .values() - .forEach(x -> collectionMappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); + .forEach( + x -> + collectionMappings.put( + x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); return ImmutableMap.of("properties", collectionMappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java index e9518ed8c39fa..564bcb2a242cb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java @@ -7,19 +7,17 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.util.Pair; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import com.linkedin.util.Pair; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.QueryBuilder; - @Slf4j @RequiredArgsConstructor public class TimeseriesAspectIndexBuilders implements ElasticSearchIndexed { @@ -38,7 +36,8 @@ public void reindexAll() { } } - public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) + public String reindexAsync( + String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { Optional<Pair<String, String>> entityAndAspect = _indexConvention.getEntityAndAspectName(index); if (entityAndAspect.isEmpty()) { @@ -54,31 +53,42 @@ public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, Bat } } if (!entitySpec.hasAspect(aspectName)) { - throw new IllegalArgumentException(String.format("Could not find aspect %s of entity %s", aspectName, entityName)); + throw new IllegalArgumentException( + String.format("Could not find aspect %s of entity %s", aspectName, entityName)); } - ReindexConfig config = _indexBuilder.buildReindexState(index, - MappingsBuilder.getMappings(_entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName)), - Collections.emptyMap()); + ReindexConfig config = + _indexBuilder.buildReindexState( + index, + MappingsBuilder.getMappings( + _entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName)), + Collections.emptyMap()); return _indexBuilder.reindexInPlaceAsync(index, filterQuery, options, config); } @Override public List<ReindexConfig> buildReindexConfigs() { return _entityRegistry.getEntitySpecs().values().stream() - .flatMap(entitySpec -> entitySpec.getAspectSpecs().stream() + .flatMap( + entitySpec -> + entitySpec.getAspectSpecs().stream() .map(aspectSpec -> Pair.of(entitySpec, aspectSpec))) - .filter(pair -> pair.getSecond().isTimeseries()) - .map(pair -> { + .filter(pair -> pair.getSecond().isTimeseries()) + .map( + pair -> { try { return _indexBuilder.buildReindexState( - _indexConvention.getTimeseriesAspectIndexName(pair.getFirst().getName(), pair.getSecond().getName()), - MappingsBuilder.getMappings(pair.getSecond()), Collections.emptyMap()); + _indexConvention.getTimeseriesAspectIndexName( + pair.getFirst().getName(), pair.getSecond().getName()), + MappingsBuilder.getMappings(pair.getSecond()), + Collections.emptyMap()); } catch (IOException e) { - log.error("Issue while building timeseries field index for entity {} aspect {}", pair.getFirst().getName(), - pair.getSecond().getName()); + log.error( + "Issue while building timeseries field index for entity {} aspect {}", + pair.getFirst().getName(), + pair.getSecond().getName()); throw new RuntimeException(e); } - }).collect(Collectors.toList()); + }) + .collect(Collectors.toList()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java index 316d25d1f37f4..539e5dfbaa1d0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java @@ -49,7 +49,6 @@ import org.opensearch.search.aggregations.pipeline.ParsedBucketMetricValue; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j public class ESAggregatedStatsDAO { private static final String ES_AGGREGATION_PREFIX = "agg_"; @@ -66,7 +65,9 @@ public class ESAggregatedStatsDAO { private final RestHighLevelClient _searchClient; private final EntityRegistry _entityRegistry; - public ESAggregatedStatsDAO(@Nonnull IndexConvention indexConvention, @Nonnull RestHighLevelClient searchClient, + public ESAggregatedStatsDAO( + @Nonnull IndexConvention indexConvention, + @Nonnull RestHighLevelClient searchClient, @Nonnull EntityRegistry entityRegistry) { _indexConvention = indexConvention; _searchClient = searchClient; @@ -94,7 +95,8 @@ private static String getAggregationSpecAggDisplayName(final AggregationSpec agg prefix = "cardinality_"; break; default: - throw new IllegalArgumentException("Unknown AggregationSpec type" + aggregationSpec.getAggregationType()); + throw new IllegalArgumentException( + "Unknown AggregationSpec type" + aggregationSpec.getAggregationType()); } return prefix + aggregationSpec.getFieldPath(); } @@ -103,12 +105,19 @@ private static String getGroupingBucketAggName(final GroupingBucket groupingBuck if (groupingBucket.getType() == GroupingBucketType.DATE_GROUPING_BUCKET) { return toEsAggName(ES_AGGREGATION_PREFIX + groupingBucket.getKey()); } - return toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + groupingBucket.getKey()); + return toEsAggName( + ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + groupingBucket.getKey()); } - private static void rowGenHelper(final Aggregations lowestAggs, final int curLevel, final int lastLevel, - final List<StringArray> rows, final Stack<String> row, final ImmutableList<GroupingBucket> groupingBuckets, - final ImmutableList<AggregationSpec> aggregationSpecs, AspectSpec aspectSpec) { + private static void rowGenHelper( + final Aggregations lowestAggs, + final int curLevel, + final int lastLevel, + final List<StringArray> rows, + final Stack<String> row, + final ImmutableList<GroupingBucket> groupingBuckets, + final ImmutableList<AggregationSpec> aggregationSpecs, + AspectSpec aspectSpec) { if (curLevel == lastLevel) { // (Base-case): We are at the lowest level of nested bucket aggregations. // Append member aggregation values to the row and add the row to the output. @@ -123,7 +132,7 @@ private static void rowGenHelper(final Aggregations lowestAggs, final int curLev row.pop(); } } else if (curLevel < lastLevel) { - //(Recursive-case): We are still processing the nested group-by multi-bucket aggregations. + // (Recursive-case): We are still processing the nested group-by multi-bucket aggregations. // For each bucket, add the key to the row and recur down for full row construction. GroupingBucket curGroupingBucket = groupingBuckets.get(curLevel); String curGroupingBucketAggName = getGroupingBucketAggName(curGroupingBucket); @@ -136,7 +145,14 @@ private static void rowGenHelper(final Aggregations lowestAggs, final int curLev row.push(b.getKeyAsString()); } // Recur down - rowGenHelper(b.getAggregations(), curLevel + 1, lastLevel, rows, row, groupingBuckets, aggregationSpecs, + rowGenHelper( + b.getAggregations(), + curLevel + 1, + lastLevel, + rows, + row, + groupingBuckets, + aggregationSpecs, aspectSpec); // Remove the row value we have added for this level. row.pop(); @@ -179,11 +195,12 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str if (fieldPath.equals(MappingsBuilder.EVENT_GRANULARITY)) { return DataSchema.Type.RECORD; } - + String[] memberParts = fieldPath.split("\\."); if (memberParts.length == 1) { // Search in the timeseriesFieldSpecs. - TimeseriesFieldSpec timeseriesFieldSpec = aspectSpec.getTimeseriesFieldSpecMap().get(memberParts[0]); + TimeseriesFieldSpec timeseriesFieldSpec = + aspectSpec.getTimeseriesFieldSpecMap().get(memberParts[0]); if (timeseriesFieldSpec != null) { return timeseriesFieldSpec.getPegasusSchema().getType(); } @@ -196,8 +213,8 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str } else if (memberParts.length == 2) { // Check if partitionSpec if (memberParts[0].equals(MappingsBuilder.PARTITION_SPEC)) { - if (memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_PARTITION) || memberParts[1].equals( - MappingsBuilder.PARTITION_SPEC_TIME_PARTITION)) { + if (memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_PARTITION) + || memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION)) { return DataSchema.Type.STRING; } else { throw new IllegalArgumentException("Unknown partitionSpec member" + memberParts[1]); @@ -208,44 +225,53 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec = aspectSpec.getTimeseriesFieldCollectionSpecMap().get(memberParts[0]); if (timeseriesFieldCollectionSpec != null) { - if (timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey().equals(memberParts[1])) { + if (timeseriesFieldCollectionSpec + .getTimeseriesFieldCollectionAnnotation() + .getKey() + .equals(memberParts[1])) { // Matched against the collection stat key. return DataSchema.Type.STRING; } - TimeseriesFieldSpec tsFieldSpec = timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap().get(memberParts[1]); + TimeseriesFieldSpec tsFieldSpec = + timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap().get(memberParts[1]); if (tsFieldSpec != null) { // Matched against a collection stat field. return tsFieldSpec.getPegasusSchema().getType(); } } } - throw new IllegalArgumentException("Unknown TimeseriesField or TimeseriesFieldCollection: " + fieldPath); + throw new IllegalArgumentException( + "Unknown TimeseriesField or TimeseriesFieldCollection: " + fieldPath); } - private static DataSchema.Type getGroupingBucketKeyType(@Nonnull AspectSpec aspectSpec, - @Nonnull GroupingBucket groupingBucket) { + private static DataSchema.Type getGroupingBucketKeyType( + @Nonnull AspectSpec aspectSpec, @Nonnull GroupingBucket groupingBucket) { return getTimeseriesFieldType(aspectSpec, groupingBucket.getKey()); } - private static DataSchema.Type getAggregationSpecMemberType(@Nonnull AspectSpec aspectSpec, - @Nonnull AggregationSpec aggregationSpec) { + private static DataSchema.Type getAggregationSpecMemberType( + @Nonnull AspectSpec aspectSpec, @Nonnull AggregationSpec aggregationSpec) { return getTimeseriesFieldType(aspectSpec, aggregationSpec.getFieldPath()); } - private static List<String> genColumnNames(GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { - List<String> groupingBucketNames = Arrays.stream(groupingBuckets).map(t -> t.getKey()).collect(Collectors.toList()); + private static List<String> genColumnNames( + GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { + List<String> groupingBucketNames = + Arrays.stream(groupingBuckets).map(t -> t.getKey()).collect(Collectors.toList()); - List<String> aggregationNames = Arrays.stream(aggregationSpecs) - .map(ESAggregatedStatsDAO::getAggregationSpecAggDisplayName) - .collect(Collectors.toList()); + List<String> aggregationNames = + Arrays.stream(aggregationSpecs) + .map(ESAggregatedStatsDAO::getAggregationSpecAggDisplayName) + .collect(Collectors.toList()); List<String> columnNames = - Stream.concat(groupingBucketNames.stream(), aggregationNames.stream()).collect(Collectors.toList()); + Stream.concat(groupingBucketNames.stream(), aggregationNames.stream()) + .collect(Collectors.toList()); return columnNames; } - private static List<String> genColumnTypes(AspectSpec aspectSpec, GroupingBucket[] groupingBuckets, - AggregationSpec[] aggregationSpecs) { + private static List<String> genColumnTypes( + AspectSpec aspectSpec, GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { List<String> columnTypes = new ArrayList<>(); for (GroupingBucket g : groupingBuckets) { DataSchema.Type type = getGroupingBucketKeyType(aspectSpec, g); @@ -282,14 +308,17 @@ private static List<String> genColumnTypes(AspectSpec aspectSpec, GroupingBucket break; default: throw new IllegalArgumentException( - "Type generation not yet supported for aggregation type: " + aggregationSpec.getAggregationType()); + "Type generation not yet supported for aggregation type: " + + aggregationSpec.getAggregationType()); } } return columnTypes; } - private static String extractAggregationValue(@Nonnull final Aggregations aggregations, - @Nonnull final AspectSpec aspectSpec, @Nonnull final AggregationSpec aggregationSpec) { + private static String extractAggregationValue( + @Nonnull final Aggregations aggregations, + @Nonnull final AspectSpec aspectSpec, + @Nonnull final AggregationSpec aggregationSpec) { String memberAggName = getAggregationSpecAggESName(aggregationSpec); Object memberAgg = aggregations.get(memberAggName); DataSchema.Type memberType = getAggregationSpecMemberType(aspectSpec, aggregationSpec); @@ -309,36 +338,42 @@ private static String extractAggregationValue(@Nonnull final Aggregations aggreg case FLOAT: return String.valueOf(((ParsedSum) memberAgg).getValue()); default: - throw new IllegalArgumentException("Unexpected type encountered for sum aggregation: " + memberType); + throw new IllegalArgumentException( + "Unexpected type encountered for sum aggregation: " + memberType); } } else if (memberAgg instanceof ParsedCardinality) { // This will always be a long value as string. return String.valueOf(((ParsedCardinality) memberAgg).getValue()); } else { - throw new UnsupportedOperationException("Member aggregations other than latest and sum not supported yet."); + throw new UnsupportedOperationException( + "Member aggregations other than latest and sum not supported yet."); } return defaultValue; } - private AspectSpec getTimeseriesAspectSpec(@Nonnull String entityName, @Nonnull String aspectName) { + private AspectSpec getTimeseriesAspectSpec( + @Nonnull String entityName, @Nonnull String aspectName) { EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - new IllegalArgumentException(String.format("Unrecognized aspect name {} for entity {}", aspectName, entityName)); + new IllegalArgumentException( + String.format("Unrecognized aspect name {} for entity {}", aspectName, entityName)); } else if (!aspectSpec.isTimeseries()) { new IllegalArgumentException( - String.format("aspect name {} for entity {} is not a timeseries aspect", aspectName, entityName)); + String.format( + "aspect name {} for entity {} is not a timeseries aspect", aspectName, entityName)); } return aspectSpec; } - /** - * Get the aggregated metrics for the given dataset or column from a time series aspect. - */ + /** Get the aggregated metrics for the given dataset or column from a time series aspect. */ @Nonnull - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { // Setup the filter query builder using the input filter provided. @@ -371,51 +406,62 @@ public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull Stri log.debug("Search request is: " + searchRequest); try { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); - return generateResponseFromElastic(searchResponse, groupingBuckets, aggregationSpecs, aspectSpec); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); + return generateResponseFromElastic( + searchResponse, groupingBuckets, aggregationSpecs, aspectSpec); } catch (Exception e) { log.error("Search query failed: " + e.getMessage()); throw new ESQueryException("Search query failed:", e); } } - private void addAggregationBuildersFromAggregationSpec(AspectSpec aspectSpec, AggregationBuilder baseAggregation, - AggregationSpec aggregationSpec) { + private void addAggregationBuildersFromAggregationSpec( + AspectSpec aspectSpec, AggregationBuilder baseAggregation, AggregationSpec aggregationSpec) { String fieldPath = aggregationSpec.getFieldPath(); String esFieldName = fieldPath; switch (aggregationSpec.getAggregationType()) { case LATEST: // Construct the terms aggregation with a max timestamp sub-aggregation. - String termsAggName = toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + fieldPath); - AggregationBuilder termsAgg = AggregationBuilders.terms(termsAggName) - .field(esFieldName) - .size(MAX_TERM_BUCKETS) - .subAggregation(AggregationBuilders.max(ES_AGG_MAX_TIMESTAMP).field(ES_FIELD_TIMESTAMP)); + String termsAggName = + toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + fieldPath); + AggregationBuilder termsAgg = + AggregationBuilders.terms(termsAggName) + .field(esFieldName) + .size(MAX_TERM_BUCKETS) + .subAggregation( + AggregationBuilders.max(ES_AGG_MAX_TIMESTAMP).field(ES_FIELD_TIMESTAMP)); baseAggregation.subAggregation(termsAgg); // Construct the max_bucket pipeline aggregation MaxBucketPipelineAggregationBuilder maxBucketPipelineAgg = - PipelineAggregatorBuilders.maxBucket(getAggregationSpecAggESName(aggregationSpec), + PipelineAggregatorBuilders.maxBucket( + getAggregationSpecAggESName(aggregationSpec), termsAggName + ">" + ES_AGG_MAX_TIMESTAMP); baseAggregation.subAggregation(maxBucketPipelineAgg); break; case SUM: AggregationBuilder sumAgg = - AggregationBuilders.sum(getAggregationSpecAggESName(aggregationSpec)).field(esFieldName); + AggregationBuilders.sum(getAggregationSpecAggESName(aggregationSpec)) + .field(esFieldName); baseAggregation.subAggregation(sumAgg); break; case CARDINALITY: AggregationBuilder cardinalityAgg = - AggregationBuilders.cardinality(getAggregationSpecAggESName(aggregationSpec)).field(esFieldName); + AggregationBuilders.cardinality(getAggregationSpecAggESName(aggregationSpec)) + .field(esFieldName); baseAggregation.subAggregation(cardinalityAgg); break; default: - throw new IllegalStateException("Unexpected value: " + aggregationSpec.getAggregationType()); + throw new IllegalStateException( + "Unexpected value: " + aggregationSpec.getAggregationType()); } } - private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuilder(AspectSpec aspectSpec, - @Nullable AggregationBuilder baseAggregationBuilder, @Nullable GroupingBucket[] groupingBuckets) { + private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuilder( + AspectSpec aspectSpec, + @Nullable AggregationBuilder baseAggregationBuilder, + @Nullable GroupingBucket[] groupingBuckets) { AggregationBuilder firstAggregationBuilder = baseAggregationBuilder; AggregationBuilder lastAggregationBuilder = baseAggregationBuilder; @@ -427,18 +473,20 @@ private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuil if (!curGroupingBucket.getKey().equals(ES_FIELD_TIMESTAMP)) { throw new IllegalArgumentException("Date Grouping bucket is not:" + ES_FIELD_TIMESTAMP); } - curAggregationBuilder = AggregationBuilders.dateHistogram(ES_AGG_TIMESTAMP) - .field(ES_FIELD_TIMESTAMP) - .calendarInterval(getHistogramInterval(curGroupingBucket.getTimeWindowSize())); + curAggregationBuilder = + AggregationBuilders.dateHistogram(ES_AGG_TIMESTAMP) + .field(ES_FIELD_TIMESTAMP) + .calendarInterval(getHistogramInterval(curGroupingBucket.getTimeWindowSize())); } else if (curGroupingBucket.getType() == GroupingBucketType.STRING_GROUPING_BUCKET) { // Process the string grouping bucket using the 'terms' aggregation. // The field can be Keyword, Numeric, ip, boolean, or binary. String fieldName = ESUtils.toKeywordField(curGroupingBucket.getKey(), true); DataSchema.Type fieldType = getGroupingBucketKeyType(aspectSpec, curGroupingBucket); - curAggregationBuilder = AggregationBuilders.terms(getGroupingBucketAggName(curGroupingBucket)) - .field(fieldName) - .size(MAX_TERM_BUCKETS) - .order(BucketOrder.aggregation("_key", true)); + curAggregationBuilder = + AggregationBuilders.terms(getGroupingBucketAggName(curGroupingBucket)) + .field(fieldName) + .size(MAX_TERM_BUCKETS) + .order(BucketOrder.aggregation("_key", true)); } if (firstAggregationBuilder == null) { firstAggregationBuilder = curAggregationBuilder; @@ -453,8 +501,11 @@ private Pair<AggregationBuilder, AggregationBuilder> makeGroupingAggregationBuil return Pair.of(firstAggregationBuilder, lastAggregationBuilder); } - private GenericTable generateResponseFromElastic(SearchResponse searchResponse, GroupingBucket[] groupingBuckets, - AggregationSpec[] aggregationSpecs, AspectSpec aspectSpec) { + private GenericTable generateResponseFromElastic( + SearchResponse searchResponse, + GroupingBucket[] groupingBuckets, + AggregationSpec[] aggregationSpecs, + AspectSpec aspectSpec) { GenericTable resultTable = new GenericTable(); // 1. Generate the column names. @@ -470,8 +521,15 @@ private GenericTable generateResponseFromElastic(SearchResponse searchResponse, Aggregations aggregations = searchResponse.getAggregations(); Stack<String> rowAcc = new Stack<>(); - rowGenHelper(aggregations, 0, groupingBuckets.length, rows, rowAcc, - ImmutableList.copyOf(groupingBuckets), ImmutableList.copyOf(aggregationSpecs), aspectSpec); + rowGenHelper( + aggregations, + 0, + groupingBuckets.length, + rows, + rowAcc, + ImmutableList.copyOf(groupingBuckets), + ImmutableList.copyOf(aggregationSpecs), + aspectSpec); if (!rowAcc.isEmpty()) { throw new IllegalStateException("Expected stack to be empty."); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java index 4e41603ca1411..c0f66acaaca5a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeseries.transformer; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -31,25 +33,28 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; -import static com.linkedin.metadata.Constants.*; - - -/** - * Class that provides a utility function that transforms the timeseries aspect into a document - */ +/** Class that provides a utility function that transforms the timeseries aspect into a document */ @Slf4j public class TimeseriesAspectTransformer { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } - private TimeseriesAspectTransformer() { - } + private TimeseriesAspectTransformer() {} - public static Map<String, JsonNode> transform(@Nonnull final Urn urn, @Nonnull final RecordTemplate timeseriesAspect, - @Nonnull final AspectSpec aspectSpec, @Nullable final SystemMetadata systemMetadata) + public static Map<String, JsonNode> transform( + @Nonnull final Urn urn, + @Nonnull final RecordTemplate timeseriesAspect, + @Nonnull final AspectSpec aspectSpec, + @Nullable final SystemMetadata systemMetadata) throws JsonProcessingException { ObjectNode commonDocument = getCommonDocument(urn, timeseriesAspect, systemMetadata); Map<String, JsonNode> finalDocuments = new HashMap<>(); @@ -58,9 +63,12 @@ public static Map<String, JsonNode> transform(@Nonnull final Urn urn, @Nonnull f ObjectNode document = JsonNodeFactory.instance.objectNode(); document.setAll(commonDocument); document.set(MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(false)); - document.set(MappingsBuilder.EVENT_FIELD, OBJECT_MAPPER.readTree(RecordUtils.toJsonString(timeseriesAspect))); + document.set( + MappingsBuilder.EVENT_FIELD, + OBJECT_MAPPER.readTree(RecordUtils.toJsonString(timeseriesAspect))); if (systemMetadata != null) { - document.set(MappingsBuilder.SYSTEM_METADATA_FIELD, + document.set( + MappingsBuilder.SYSTEM_METADATA_FIELD, OBJECT_MAPPER.readTree(RecordUtils.toJsonString(systemMetadata))); } final Map<TimeseriesFieldSpec, List<Object>> timeseriesFieldValueMap = @@ -70,22 +78,30 @@ public static Map<String, JsonNode> transform(@Nonnull final Urn urn, @Nonnull f // Create new rows for the member collection fields. final Map<TimeseriesFieldCollectionSpec, List<Object>> timeseriesFieldCollectionValueMap = - FieldExtractor.extractFields(timeseriesAspect, aspectSpec.getTimeseriesFieldCollectionSpecs()); + FieldExtractor.extractFields( + timeseriesAspect, aspectSpec.getTimeseriesFieldCollectionSpecs()); timeseriesFieldCollectionValueMap.forEach( - (key, values) -> finalDocuments.putAll(getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); + (key, values) -> + finalDocuments.putAll( + getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); return finalDocuments; } - private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final RecordTemplate timeseriesAspect, + private static ObjectNode getCommonDocument( + @Nonnull final Urn urn, + final RecordTemplate timeseriesAspect, @Nullable final SystemMetadata systemMetadata) { if (!timeseriesAspect.data().containsKey(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)) { - throw new IllegalArgumentException("Input timeseries aspect does not contain a timestampMillis field"); + throw new IllegalArgumentException( + "Input timeseries aspect does not contain a timestampMillis field"); } ObjectNode document = JsonNodeFactory.instance.objectNode(); document.put(MappingsBuilder.URN_FIELD, urn.toString()); - document.put(MappingsBuilder.TIMESTAMP_FIELD, + document.put( + MappingsBuilder.TIMESTAMP_FIELD, (Long) timeseriesAspect.data().get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)); - document.put(MappingsBuilder.TIMESTAMP_MILLIS_FIELD, + document.put( + MappingsBuilder.TIMESTAMP_MILLIS_FIELD, (Long) timeseriesAspect.data().get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)); if (systemMetadata != null && systemMetadata.getRunId() != null) { // We need this as part of the common document for rollback support. @@ -94,7 +110,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record Object eventGranularity = timeseriesAspect.data().get(MappingsBuilder.EVENT_GRANULARITY); if (eventGranularity != null) { try { - document.put(MappingsBuilder.EVENT_GRANULARITY, OBJECT_MAPPER.writeValueAsString(eventGranularity)); + document.put( + MappingsBuilder.EVENT_GRANULARITY, OBJECT_MAPPER.writeValueAsString(eventGranularity)); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Failed to convert eventGranulairty to Json string!", e); } @@ -105,7 +122,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record Object partition = partitionSpec.get(MappingsBuilder.PARTITION_SPEC_PARTITION); Object timePartition = partitionSpec.get(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION); if (partition != null && timePartition != null) { - throw new IllegalArgumentException("Both partition and timePartition cannot be specified in partitionSpec!"); + throw new IllegalArgumentException( + "Both partition and timePartition cannot be specified in partitionSpec!"); } else if (partition != null) { ObjectNode partitionDoc = JsonNodeFactory.instance.objectNode(); partitionDoc.put(MappingsBuilder.PARTITION_SPEC_PARTITION, partition.toString()); @@ -113,14 +131,16 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record } else if (timePartition != null) { ObjectNode timePartitionDoc = JsonNodeFactory.instance.objectNode(); try { - timePartitionDoc.put(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION, + timePartitionDoc.put( + MappingsBuilder.PARTITION_SPEC_TIME_PARTITION, OBJECT_MAPPER.writeValueAsString(timePartition)); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Failed to convert timePartition to Json string!", e); } document.set(MappingsBuilder.PARTITION_SPEC, timePartitionDoc); } else { - throw new IllegalArgumentException("Both partition and timePartition cannot be null in partitionSpec."); + throw new IllegalArgumentException( + "Both partition and timePartition cannot be null in partitionSpec."); } } String messageId = (String) timeseriesAspect.data().get(MappingsBuilder.MESSAGE_ID_FIELD); @@ -131,8 +151,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record return document; } - private static void setTimeseriesField(final ObjectNode document, final TimeseriesFieldSpec fieldSpec, - List<Object> valueList) { + private static void setTimeseriesField( + final ObjectNode document, final TimeseriesFieldSpec fieldSpec, List<Object> valueList) { if (valueList.size() == 0) { return; } @@ -154,21 +174,26 @@ private static void setTimeseriesField(final ObjectNode document, final Timeseri case ARRAY: ArrayDataSchema dataSchema = (ArrayDataSchema) fieldSpec.getPegasusSchema(); if (valueList.get(0) instanceof List<?>) { - // This is the hack for non-stat-collection array fields. They will end up getting oddly serialized to a string otherwise. + // This is the hack for non-stat-collection array fields. They will end up getting oddly + // serialized to a string otherwise. valueList = (List<Object>) valueList.get(0); } ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(valueList.size()); - valueList.stream().map(x -> { - if (dataSchema.getItems().getType() == DataSchema.Type.RECORD) { - try { - return OBJECT_MAPPER.writeValueAsString(x); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to convert collection element to Json string!", e); - } - } else { - return x.toString(); - } - }).forEach(arrayNode::add); + valueList.stream() + .map( + x -> { + if (dataSchema.getItems().getType() == DataSchema.Type.RECORD) { + try { + return OBJECT_MAPPER.writeValueAsString(x); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException( + "Failed to convert collection element to Json string!", e); + } + } else { + return x.toString(); + } + }) + .forEach(arrayNode::add); valueNode = JsonNodeFactory.instance.textNode(arrayNode.toString()); break; case RECORD: @@ -189,15 +214,21 @@ private static void setTimeseriesField(final ObjectNode document, final Timeseri } private static Map<String, JsonNode> getTimeseriesFieldCollectionDocuments( - final TimeseriesFieldCollectionSpec fieldSpec, final List<Object> values, final ObjectNode commonDocument) { + final TimeseriesFieldCollectionSpec fieldSpec, + final List<Object> values, + final ObjectNode commonDocument) { return values.stream() .map(value -> getTimeseriesFieldCollectionDocument(fieldSpec, value, commonDocument)) .collect( - Collectors.toMap(keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), Pair::getSecond)); + Collectors.toMap( + keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), + Pair::getSecond)); } private static Pair<String, ObjectNode> getTimeseriesFieldCollectionDocument( - final TimeseriesFieldCollectionSpec fieldSpec, final Object value, final ObjectNode timeseriesInfoDocument) { + final TimeseriesFieldCollectionSpec fieldSpec, + final Object value, + final ObjectNode timeseriesInfoDocument) { ObjectNode finalDocument = JsonNodeFactory.instance.objectNode(); finalDocument.setAll(timeseriesInfoDocument); RecordTemplate collectionComponent = (RecordTemplate) value; @@ -205,18 +236,24 @@ private static Pair<String, ObjectNode> getTimeseriesFieldCollectionDocument( Optional<Object> key = RecordUtils.getFieldValue(collectionComponent, fieldSpec.getKeyPath()); if (!key.isPresent()) { throw new IllegalArgumentException( - String.format("Key %s for timeseries collection field %s is missing", fieldSpec.getKeyPath(), - fieldSpec.getName())); + String.format( + "Key %s for timeseries collection field %s is missing", + fieldSpec.getKeyPath(), fieldSpec.getName())); } - componentDocument.set(fieldSpec.getTimeseriesFieldCollectionAnnotation().getKey(), + componentDocument.set( + fieldSpec.getTimeseriesFieldCollectionAnnotation().getKey(), JsonNodeFactory.instance.textNode(key.get().toString())); - Map<TimeseriesFieldSpec, List<Object>> statFields = FieldExtractor.extractFields(collectionComponent, - new ArrayList<>(fieldSpec.getTimeseriesFieldSpecMap().values())); + Map<TimeseriesFieldSpec, List<Object>> statFields = + FieldExtractor.extractFields( + collectionComponent, new ArrayList<>(fieldSpec.getTimeseriesFieldSpecMap().values())); statFields.forEach((k, v) -> setTimeseriesField(componentDocument, k, v)); finalDocument.set(fieldSpec.getName(), componentDocument); - finalDocument.set(MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(true)); - // Return the pair of component key and the document. We use the key later to build the unique docId. - return new Pair<>(fieldSpec.getTimeseriesFieldCollectionAnnotation().getCollectionName() + key.get(), + finalDocument.set( + MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(true)); + // Return the pair of component key and the document. We use the key later to build the unique + // docId. + return new Pair<>( + fieldSpec.getTimeseriesFieldCollectionAnnotation().getCollectionName() + key.get(), finalDocument); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java b/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java index a482cdeb659ca..7914d82748fa5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class GitVersion { String version; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java index e9183738c786c..9a64e397c9184 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java @@ -15,11 +15,9 @@ import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; - public class AspectGenerationUtils { - private AspectGenerationUtils() { - } + private AspectGenerationUtils() {} @Nonnull public static AuditStamp createAuditStamp() { @@ -37,7 +35,8 @@ public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull St } @Nonnull - public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull String runId, @Nonnull String lastRunId) { + public static SystemMetadata createSystemMetadata( + long lastObserved, @Nonnull String runId, @Nonnull String lastRunId) { SystemMetadata metadata = new SystemMetadata(); metadata.setLastObserved(lastObserved); metadata.setRunId(runId); @@ -47,7 +46,8 @@ public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull St @Nonnull public static CorpUserKey createCorpUserKey(Urn urn) { - return (CorpUserKey) EntityKeyUtils.convertUrnToEntityKeyInternal(urn, new CorpUserKey().schema()); + return (CorpUserKey) + EntityKeyUtils.convertUrnToEntityKeyInternal(urn, new CorpUserKey().schema()); } @Nonnull diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index e95378a616d97..2113e5a04f3a2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -14,19 +14,19 @@ import java.util.Map; import javax.annotation.Nonnull; - public class AspectIngestionUtils { - private AspectIngestionUtils() { - } + private AspectIngestionUtils() {} @Nonnull - public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects(EntityService entityService, int aspectCount) { + public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects( + EntityService entityService, int aspectCount) { return ingestCorpUserKeyAspects(entityService, aspectCount, 0); } @Nonnull - public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects(EntityService entityService, int aspectCount, int startIndex) { + public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects( + EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserKey()); Map<Urn, CorpUserKey> aspects = new HashMap<>(); List<UpsertBatchItem> items = new LinkedList<>(); @@ -34,24 +34,31 @@ public static Map<Urn, CorpUserKey> ingestCorpUserKeyAspects(EntityService entit Urn urn = UrnUtils.getUrn(String.format("urn:li:corpuser:tester%d", i)); CorpUserKey aspect = AspectGenerationUtils.createCorpUserKey(urn); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() - .urn(urn) - .aspectName(aspectName) - .aspect(aspect) - .systemMetadata(AspectGenerationUtils.createSystemMetadata()) + items.add( + UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(aspect) + .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } @Nonnull - public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount) { + public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects( + @Nonnull final EntityService entityService, int aspectCount) { return ingestCorpUserInfoAspects(entityService, aspectCount, 0); } @Nonnull - public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { + public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects( + @Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); Map<Urn, CorpUserInfo> aspects = new HashMap<>(); List<UpsertBatchItem> items = new LinkedList<>(); @@ -60,24 +67,31 @@ public static Map<Urn, CorpUserInfo> ingestCorpUserInfoAspects(@Nonnull final En String email = String.format("email%d@test.com", i); CorpUserInfo aspect = AspectGenerationUtils.createCorpUserInfo(email); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(aspect) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } @Nonnull - public static Map<Urn, ChartInfo> ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount) { + public static Map<Urn, ChartInfo> ingestChartInfoAspects( + @Nonnull final EntityService entityService, int aspectCount) { return ingestChartInfoAspects(entityService, aspectCount, 0); } @Nonnull - public static Map<Urn, ChartInfo> ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { + public static Map<Urn, ChartInfo> ingestChartInfoAspects( + @Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new ChartInfo()); Map<Urn, ChartInfo> aspects = new HashMap<>(); List<UpsertBatchItem> items = new LinkedList<>(); @@ -87,14 +101,19 @@ public static Map<Urn, ChartInfo> ingestChartInfoAspects(@Nonnull final EntitySe String description = String.format("Test description %d", i); ChartInfo aspect = AspectGenerationUtils.createChartInfo(title, description); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(aspect) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java index 54fb2bc8b1f65..258b40cac6371 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java @@ -1,11 +1,13 @@ package com.linkedin.metadata; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; + import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.dataset.DatasetProperties; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.TestEntityRegistry; @@ -23,19 +25,16 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class AspectUtilsTest { protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - public AspectUtilsTest() throws EntityRegistryException { - } + public AspectUtilsTest() throws EntityRegistryException {} @Test public void testAdditionalChanges() { @@ -45,18 +44,21 @@ public void testAdditionalChanges() { EventProducer mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - EntityServiceImpl entityServiceImpl = new EntityServiceImpl(aspectDao, mockProducer, _testEntityRegistry, true, - null, preProcessHooks); + EntityServiceImpl entityServiceImpl = + new EntityServiceImpl( + aspectDao, mockProducer, _testEntityRegistry, true, null, preProcessHooks); MetadataChangeProposal proposal1 = new MetadataChangeProposal(); - proposal1.setEntityUrn(new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); + proposal1.setEntityUrn( + new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); proposal1.setAspectName("datasetProperties"); DatasetProperties datasetProperties = new DatasetProperties().setName("name"); proposal1.setAspect(GenericRecordUtils.serializeAspect(datasetProperties)); proposal1.setEntityType("dataset"); proposal1.setChangeType(ChangeType.PATCH); - List<MetadataChangeProposal> proposalList = AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); + List<MetadataChangeProposal> proposalList = + AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); // proposals for key aspect, browsePath, browsePathV2, dataPlatformInstance Assert.assertEquals(proposalList.size(), 4); Assert.assertEquals(proposalList.get(0).getChangeType(), ChangeType.UPSERT); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java index 5645573917f00..70b7f587f0c19 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java @@ -1,16 +1,13 @@ package com.linkedin.metadata; +import static org.testng.Assert.assertEquals; + import com.datastax.driver.core.KeyspaceMetadata; import com.datastax.driver.core.Row; import com.datastax.driver.core.Session; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.linkedin.metadata.entity.cassandra.CassandraAspect; -import org.testcontainers.containers.CassandraContainer; -import org.testcontainers.utility.DockerImageName; - -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; import java.net.InetSocketAddress; import java.time.Duration; import java.util.Arrays; @@ -18,32 +15,37 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; - -import static org.testng.Assert.assertEquals; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; +import org.testcontainers.containers.CassandraContainer; +import org.testcontainers.utility.DockerImageName; public class CassandraTestUtils { - private CassandraTestUtils() { - } + private CassandraTestUtils() {} private static final String KEYSPACE_NAME = "test"; private static final String IMAGE_NAME = "cassandra:3.11"; public static CassandraContainer setupContainer() { - final DockerImageName imageName = DockerImageName - .parse(IMAGE_NAME) - .asCompatibleSubstituteFor("cassandra"); + final DockerImageName imageName = + DockerImageName.parse(IMAGE_NAME).asCompatibleSubstituteFor("cassandra"); CassandraContainer container = new CassandraContainer(imageName); - container.withEnv("JVM_OPTS", "-Xms64M -Xmx96M") - .withStartupTimeout(Duration.ofMinutes(5)) // usually < 1min - .start(); + container + .withEnv("JVM_OPTS", "-Xms64M -Xmx96M") + .withStartupTimeout(Duration.ofMinutes(5)) // usually < 1min + .start(); try (Session session = container.getCluster().connect()) { - session.execute(String.format("CREATE KEYSPACE IF NOT EXISTS %s WITH replication = \n" - + "{'class':'SimpleStrategy','replication_factor':'1'};", KEYSPACE_NAME)); session.execute( - String.format("create table %s.%s (urn varchar, \n" + String.format( + "CREATE KEYSPACE IF NOT EXISTS %s WITH replication = \n" + + "{'class':'SimpleStrategy','replication_factor':'1'};", + KEYSPACE_NAME)); + session.execute( + String.format( + "create table %s.%s (urn varchar, \n" + "aspect varchar, \n" + "systemmetadata varchar, \n" + "version bigint, \n" @@ -54,14 +56,13 @@ public static CassandraContainer setupContainer() { + "entity varchar, \n" + "primary key ((urn), aspect, version)) \n" + "with clustering order by (aspect asc, version asc);", - KEYSPACE_NAME, - CassandraAspect.TABLE_NAME)); + KEYSPACE_NAME, CassandraAspect.TABLE_NAME)); List<KeyspaceMetadata> keyspaces = session.getCluster().getMetadata().getKeyspaces(); - List<KeyspaceMetadata> filteredKeyspaces = keyspaces - .stream() - .filter(km -> km.getName().equals(KEYSPACE_NAME)) - .collect(Collectors.toList()); + List<KeyspaceMetadata> filteredKeyspaces = + keyspaces.stream() + .filter(km -> km.getName().equals(KEYSPACE_NAME)) + .collect(Collectors.toList()); assertEquals(filteredKeyspaces.size(), 1); } @@ -72,20 +73,22 @@ public static CassandraContainer setupContainer() { public static CqlSession createTestSession(@Nonnull final CassandraContainer container) { Map<String, String> sessionConfig = createTestServerConfig(container); int port = Integer.parseInt(sessionConfig.get("port")); - List<InetSocketAddress> addresses = Arrays.stream(sessionConfig.get("hosts").split(",")) - .map(host -> new InetSocketAddress(host, port)) - .collect(Collectors.toList()); + List<InetSocketAddress> addresses = + Arrays.stream(sessionConfig.get("hosts").split(",")) + .map(host -> new InetSocketAddress(host, port)) + .collect(Collectors.toList()); String dc = sessionConfig.get("datacenter"); String ks = sessionConfig.get("keyspace"); String username = sessionConfig.get("username"); String password = sessionConfig.get("password"); - CqlSessionBuilder csb = CqlSession.builder() - .addContactPoints(addresses) - .withLocalDatacenter(dc) - .withKeyspace(ks) - .withAuthCredentials(username, password); + CqlSessionBuilder csb = + CqlSession.builder() + .addContactPoints(addresses) + .withLocalDatacenter(dc) + .withKeyspace(ks) + .withAuthCredentials(username, password); if (sessionConfig.containsKey("useSsl") && sessionConfig.get("useSsl").equals("true")) { try { @@ -99,22 +102,29 @@ public static CqlSession createTestSession(@Nonnull final CassandraContainer con } @Nonnull - private static Map<String, String> createTestServerConfig(@Nonnull final CassandraContainer container) { - return new HashMap<String, String>() {{ - put("keyspace", KEYSPACE_NAME); - put("username", container.getUsername()); - put("password", container.getPassword()); - put("hosts", container.getHost()); - put("port", container.getMappedPort(9042).toString()); - put("datacenter", "datacenter1"); - put("useSsl", "false"); - }}; + private static Map<String, String> createTestServerConfig( + @Nonnull final CassandraContainer container) { + return new HashMap<String, String>() { + { + put("keyspace", KEYSPACE_NAME); + put("username", container.getUsername()); + put("password", container.getPassword()); + put("hosts", container.getHost()); + put("port", container.getMappedPort(9042).toString()); + put("datacenter", "datacenter1"); + put("useSsl", "false"); + } + }; } public static void purgeData(CassandraContainer container) { try (Session session = container.getCluster().connect()) { session.execute(String.format("TRUNCATE %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)); - List<Row> rs = session.execute(String.format("SELECT * FROM %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)).all(); + List<Row> rs = + session + .execute( + String.format("SELECT * FROM %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)) + .all(); assertEquals(rs.size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java index 364ccd86d45fd..4ab421dab79dc 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java @@ -4,18 +4,20 @@ public class DockerTestUtils { - final private static int MIN_MEMORY_NEEDED_GB = 7; + private static final int MIN_MEMORY_NEEDED_GB = 7; - public static void checkContainerEngine(DockerClient dockerClient) { - final long dockerEngineMemoryBytes = dockerClient.infoCmd().exec().getMemTotal(); - final long dockerEngineMemoryGB = dockerEngineMemoryBytes / 1000 / 1000 / 1000; - if (dockerEngineMemoryGB < MIN_MEMORY_NEEDED_GB) { - final String error = String.format("Total Docker memory configured: %s GB (%d bytes) is below the minimum threshold " - + "of %d GB", dockerEngineMemoryGB, dockerEngineMemoryBytes, MIN_MEMORY_NEEDED_GB); - throw new IllegalStateException(error); - } + public static void checkContainerEngine(DockerClient dockerClient) { + final long dockerEngineMemoryBytes = dockerClient.infoCmd().exec().getMemTotal(); + final long dockerEngineMemoryGB = dockerEngineMemoryBytes / 1000 / 1000 / 1000; + if (dockerEngineMemoryGB < MIN_MEMORY_NEEDED_GB) { + final String error = + String.format( + "Total Docker memory configured: %s GB (%d bytes) is below the minimum threshold " + + "of %d GB", + dockerEngineMemoryGB, dockerEngineMemoryBytes, MIN_MEMORY_NEEDED_GB); + throw new IllegalStateException(error); } + } - private DockerTestUtils() { - } + private DockerTestUtils() {} } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java index c6eefede8a860..ed5c882ace23e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java @@ -4,13 +4,11 @@ import io.ebean.DatabaseFactory; import io.ebean.config.DatabaseConfig; import io.ebean.datasource.DataSourceConfig; - import javax.annotation.Nonnull; public class EbeanTestUtils { - private EbeanTestUtils() { - } + private EbeanTestUtils() {} @Nonnull public static Database createTestServer(String instanceId) { @@ -22,7 +20,8 @@ private static DatabaseConfig createTestingH2ServerConfig(String instanceId) { DataSourceConfig dataSourceConfig = new DataSourceConfig(); dataSourceConfig.setUsername("tester"); dataSourceConfig.setPassword(""); - dataSourceConfig.setUrl(String.format("jdbc:h2:mem:%s;IGNORECASE=TRUE;mode=mysql;", instanceId)); + dataSourceConfig.setUrl( + String.format("jdbc:h2:mem:%s;IGNORECASE=TRUE;mode=mysql;", instanceId)); dataSourceConfig.setDriver("org.h2.Driver"); DatabaseConfig serverConfig = new DatabaseConfig(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java index 57c14608a7881..fd218add2a945 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java @@ -4,11 +4,9 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; - public class TestEntitySpecBuilder { - private TestEntitySpecBuilder() { - } + private TestEntitySpecBuilder() {} public static EntitySpec getSpec() { return new EntitySpecBuilder().buildEntitySpec(new TestEntitySnapshot().schema()); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java b/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java index 4b1b8c89b030b..ea4bb69d942f9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java @@ -1,7 +1,5 @@ package com.linkedin.metadata; -import com.datahub.test.TestBrowsePaths; -import com.datahub.test.TestBrowsePathsV2; import com.datahub.test.BrowsePathEntry; import com.datahub.test.BrowsePathEntryArray; import com.datahub.test.KeyPartEnum; @@ -9,6 +7,8 @@ import com.datahub.test.SimpleNestedRecord1; import com.datahub.test.SimpleNestedRecord2; import com.datahub.test.SimpleNestedRecord2Array; +import com.datahub.test.TestBrowsePaths; +import com.datahub.test.TestBrowsePathsV2; import com.datahub.test.TestEntityAspect; import com.datahub.test.TestEntityAspectArray; import com.datahub.test.TestEntityInfo; @@ -21,10 +21,8 @@ import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; - public class TestEntityUtil { - private TestEntityUtil() { - } + private TestEntityUtil() {} public static Urn getTestEntityUrn() { return new TestEntityUrn("key", "urn", "VALUE_1"); @@ -38,15 +36,39 @@ public static TestEntityInfo getTestEntityInfo(Urn urn) { TestEntityInfo testEntityInfo = new TestEntityInfo(); testEntityInfo.setTextField("test"); testEntityInfo.setTextArrayField(new StringArray(ImmutableList.of("testArray1", "testArray2"))); - testEntityInfo.setNestedRecordField(new SimpleNestedRecord1().setNestedIntegerField(1).setNestedForeignKey(urn)); - testEntityInfo.setNestedRecordArrayField(new SimpleNestedRecord2Array( - ImmutableList.of(new SimpleNestedRecord2().setNestedArrayStringField("nestedArray1"), - new SimpleNestedRecord2().setNestedArrayStringField("nestedArray2") - .setNestedArrayArrayField(new StringArray(ImmutableList.of("testNestedArray1", "testNestedArray2")))))); - testEntityInfo.setCustomProperties(new StringMap(ImmutableMap.of("key1", "value1", "key2", "value2", - "shortValue", "123", "longValue", "0123456789"))); - testEntityInfo.setEsObjectField(new StringMap(ImmutableMap.of("key1", "value1", "key2", "value2", - "shortValue", "123", "longValue", "0123456789"))); + testEntityInfo.setNestedRecordField( + new SimpleNestedRecord1().setNestedIntegerField(1).setNestedForeignKey(urn)); + testEntityInfo.setNestedRecordArrayField( + new SimpleNestedRecord2Array( + ImmutableList.of( + new SimpleNestedRecord2().setNestedArrayStringField("nestedArray1"), + new SimpleNestedRecord2() + .setNestedArrayStringField("nestedArray2") + .setNestedArrayArrayField( + new StringArray( + ImmutableList.of("testNestedArray1", "testNestedArray2")))))); + testEntityInfo.setCustomProperties( + new StringMap( + ImmutableMap.of( + "key1", + "value1", + "key2", + "value2", + "shortValue", + "123", + "longValue", + "0123456789"))); + testEntityInfo.setEsObjectField( + new StringMap( + ImmutableMap.of( + "key1", + "value1", + "key2", + "value2", + "shortValue", + "123", + "longValue", + "0123456789"))); return testEntityInfo; } @@ -55,7 +77,8 @@ public static TestEntitySnapshot getSnapshot() { Urn urn = getTestEntityUrn(); snapshot.setUrn(urn); - TestBrowsePaths browsePaths = new TestBrowsePaths().setPaths(new StringArray(ImmutableList.of("/a/b/c", "d/e/f"))); + TestBrowsePaths browsePaths = + new TestBrowsePaths().setPaths(new StringArray(ImmutableList.of("/a/b/c", "d/e/f"))); BrowsePathEntryArray browsePathV2Entries = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("levelOne"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("levelTwo"); @@ -64,10 +87,14 @@ public static TestEntitySnapshot getSnapshot() { TestBrowsePathsV2 browsePathsV2 = new TestBrowsePathsV2().setPath(browsePathV2Entries); SearchFeatures searchFeatures = new SearchFeatures().setFeature1(2).setFeature2(1); - TestEntityAspectArray aspects = new TestEntityAspectArray( - ImmutableList.of(TestEntityAspect.create(getTestEntityKey(urn)), - TestEntityAspect.create(getTestEntityInfo(urn)), TestEntityAspect.create(browsePaths), - TestEntityAspect.create(searchFeatures), TestEntityAspect.create(browsePathsV2))); + TestEntityAspectArray aspects = + new TestEntityAspectArray( + ImmutableList.of( + TestEntityAspect.create(getTestEntityKey(urn)), + TestEntityAspect.create(getTestEntityInfo(urn)), + TestEntityAspect.create(browsePaths), + TestEntityAspect.create(searchFeatures), + TestEntityAspect.create(browsePathsV2))); snapshot.setAspects(aspects); return snapshot; } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java index f52bc26b5c538..fba11f24f4c44 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.client; +import static org.mockito.Mockito.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.codahale.metrics.Counter; import com.linkedin.data.template.RequiredFieldNotPresentException; import com.linkedin.entity.client.RestliEntityClient; @@ -12,124 +16,115 @@ import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.function.Supplier; import org.mockito.MockedStatic; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.util.function.Supplier; - -import static org.mockito.Mockito.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class JavaEntityClientTest { - private EntityService _entityService; - private DeleteEntityService _deleteEntityService; - private EntitySearchService _entitySearchService; - private CachingEntitySearchService _cachingEntitySearchService; - private SearchService _searchService; - private LineageSearchService _lineageSearchService; - private TimeseriesAspectService _timeseriesAspectService; - private EventProducer _eventProducer; - private RestliEntityClient _restliEntityClient; - private MockedStatic<MetricUtils> _metricUtils; - private Counter _counter; - - - @BeforeMethod - public void setupTest() { - _entityService = mock(EntityService.class); - _deleteEntityService = mock(DeleteEntityService.class); - _entitySearchService = mock(EntitySearchService.class); - _cachingEntitySearchService = mock(CachingEntitySearchService.class); - _searchService = mock(SearchService.class); - _lineageSearchService = mock(LineageSearchService.class); - _timeseriesAspectService = mock(TimeseriesAspectService.class); - _eventProducer = mock(EventProducer.class); - _restliEntityClient = mock(RestliEntityClient.class); - _metricUtils = mockStatic(MetricUtils.class); - _counter = mock(Counter.class); - when(MetricUtils.counter(any(), any())).thenReturn(_counter); - } - - @AfterMethod - public void closeTest() { - _metricUtils.close(); - } - - private JavaEntityClient getJavaEntityClient() { - return new JavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - _restliEntityClient); - } - - @Test - void testSuccessWithNoRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - - when(mockSupplier.get()).thenReturn(42); - - assertEquals(client.withRetry(mockSupplier, null), 42); - verify(mockSupplier, times(1)).get(); - _metricUtils.verify(() -> MetricUtils.counter(any(), any()), times(0)); - } - - @Test - void testSuccessAfterMultipleRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - Exception e = new IllegalArgumentException(); - - when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenReturn(42); - - assertEquals(client.withRetry(mockSupplier, "test"), 42); - verify(mockSupplier, times(4)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), - times(3) - ); - } - - @Test - void testThrowAfterMultipleRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - Exception e = new IllegalArgumentException(); - - when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenThrow(e); - - assertThrows(IllegalArgumentException.class, () -> client.withRetry(mockSupplier, "test")); - verify(mockSupplier, times(4)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), - times(4) - ); - } - - @Test - void testThrowAfterNonRetryableException() { - JavaEntityClient client = getJavaEntityClient(); - Supplier<Object> mockSupplier = mock(Supplier.class); - Exception e = new RequiredFieldNotPresentException("test"); - - when(mockSupplier.get()).thenThrow(e); - - assertThrows(RequiredFieldNotPresentException.class, () -> client.withRetry(mockSupplier, null)); - verify(mockSupplier, times(1)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "exception_" + e.getClass().getName()), - times(1) - ); - } -} \ No newline at end of file + private EntityService _entityService; + private DeleteEntityService _deleteEntityService; + private EntitySearchService _entitySearchService; + private CachingEntitySearchService _cachingEntitySearchService; + private SearchService _searchService; + private LineageSearchService _lineageSearchService; + private TimeseriesAspectService _timeseriesAspectService; + private EventProducer _eventProducer; + private RestliEntityClient _restliEntityClient; + private MockedStatic<MetricUtils> _metricUtils; + private Counter _counter; + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _deleteEntityService = mock(DeleteEntityService.class); + _entitySearchService = mock(EntitySearchService.class); + _cachingEntitySearchService = mock(CachingEntitySearchService.class); + _searchService = mock(SearchService.class); + _lineageSearchService = mock(LineageSearchService.class); + _timeseriesAspectService = mock(TimeseriesAspectService.class); + _eventProducer = mock(EventProducer.class); + _restliEntityClient = mock(RestliEntityClient.class); + _metricUtils = mockStatic(MetricUtils.class); + _counter = mock(Counter.class); + when(MetricUtils.counter(any(), any())).thenReturn(_counter); + } + + @AfterMethod + public void closeTest() { + _metricUtils.close(); + } + + private JavaEntityClient getJavaEntityClient() { + return new JavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + _eventProducer, + _restliEntityClient); + } + + @Test + void testSuccessWithNoRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + + when(mockSupplier.get()).thenReturn(42); + + assertEquals(client.withRetry(mockSupplier, null), 42); + verify(mockSupplier, times(1)).get(); + _metricUtils.verify(() -> MetricUtils.counter(any(), any()), times(0)); + } + + @Test + void testSuccessAfterMultipleRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + Exception e = new IllegalArgumentException(); + + when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenReturn(42); + + assertEquals(client.withRetry(mockSupplier, "test"), 42); + verify(mockSupplier, times(4)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), + times(3)); + } + + @Test + void testThrowAfterMultipleRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + Exception e = new IllegalArgumentException(); + + when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenThrow(e); + + assertThrows(IllegalArgumentException.class, () -> client.withRetry(mockSupplier, "test")); + verify(mockSupplier, times(4)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), + times(4)); + } + + @Test + void testThrowAfterNonRetryableException() { + JavaEntityClient client = getJavaEntityClient(); + Supplier<Object> mockSupplier = mock(Supplier.class); + Exception e = new RequiredFieldNotPresentException("test"); + + when(mockSupplier.get()).thenThrow(e); + + assertThrows( + RequiredFieldNotPresentException.class, () -> client.withRetry(mockSupplier, null)); + verify(mockSupplier, times(1)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "exception_" + e.getClass().getName()), + times(1)); + } +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java index 10a73cbe532a2..e13c2d9fd1005 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java @@ -1,39 +1,39 @@ package com.linkedin.metadata.elasticsearch.update; -import com.linkedin.metadata.search.elasticsearch.update.BulkListener; -import org.opensearch.action.bulk.BulkRequest; -import org.opensearch.action.support.WriteRequest; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.ArgumentMatchers.any; -import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.metadata.search.elasticsearch.update.BulkListener; +import org.mockito.Mockito; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.support.WriteRequest; +import org.testng.annotations.Test; public class BulkListenerTest { - @Test - public void testConstructor() { - BulkListener test = BulkListener.getInstance(); - assertNotNull(test); - assertEquals(test, BulkListener.getInstance()); - assertNotEquals(test, BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE)); - } + @Test + public void testConstructor() { + BulkListener test = BulkListener.getInstance(); + assertNotNull(test); + assertEquals(test, BulkListener.getInstance()); + assertNotEquals(test, BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE)); + } - @Test - public void testDefaultPolicy() { - BulkListener test = BulkListener.getInstance(); + @Test + public void testDefaultPolicy() { + BulkListener test = BulkListener.getInstance(); - BulkRequest mockRequest1 = Mockito.mock(BulkRequest.class); - test.beforeBulk(0L, mockRequest1); - verify(mockRequest1, times(0)).setRefreshPolicy(any(WriteRequest.RefreshPolicy.class)); + BulkRequest mockRequest1 = Mockito.mock(BulkRequest.class); + test.beforeBulk(0L, mockRequest1); + verify(mockRequest1, times(0)).setRefreshPolicy(any(WriteRequest.RefreshPolicy.class)); - BulkRequest mockRequest2 = Mockito.mock(BulkRequest.class); - test = BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE); - test.beforeBulk(0L, mockRequest2); - verify(mockRequest2, times(1)).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - } + BulkRequest mockRequest2 = Mockito.mock(BulkRequest.class); + test = BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE); + test.beforeBulk(0L, mockRequest2); + verify(mockRequest2, times(1)).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java index 2d84c9f3444de..94e57b80d8113 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java @@ -1,18 +1,18 @@ package com.linkedin.metadata.elasticsearch.update; +import static org.testng.Assert.assertNotNull; + import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; -import org.opensearch.client.RestHighLevelClient; import org.mockito.Mockito; +import org.opensearch.client.RestHighLevelClient; import org.testng.annotations.Test; -import static org.testng.Assert.assertNotNull; - public class ESBulkProcessorTest { - @Test - public void testESBulkProcessorBuilder() { - RestHighLevelClient mock = Mockito.mock(RestHighLevelClient.class); - ESBulkProcessor test = ESBulkProcessor.builder(mock).build(); - assertNotNull(test); - } + @Test + public void testESBulkProcessorBuilder() { + RestHighLevelClient mock = Mockito.mock(RestHighLevelClient.class); + ESBulkProcessor test = ESBulkProcessor.builder(mock).build(); + assertNotNull(test); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java index 6a331647583d2..6d464d9cd9a10 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.AspectIngestionUtils; @@ -18,11 +21,7 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - -abstract public class AspectMigrationsDaoTest<T extends AspectMigrationsDao> { +public abstract class AspectMigrationsDaoTest<T extends AspectMigrationsDao> { protected T _migrationsDao; @@ -37,8 +36,11 @@ abstract public class AspectMigrationsDaoTest<T extends AspectMigrationsDao> { protected AspectMigrationsDaoTest() throws EntityRegistryException { _snapshotEntityRegistry = new TestEntityRegistry(); - _configEntityRegistry = new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); + _configEntityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + _testEntityRegistry = + new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); } @Test @@ -46,8 +48,10 @@ public void testListAllUrns() throws AssertionError { final int totalAspects = 30; final int pageSize = 25; final int lastPageSize = 5; - Map<Urn, CorpUserKey> ingestedAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); - List<String> ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); + Map<Urn, CorpUserKey> ingestedAspects = + AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); + List<String> ingestedUrns = + ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); List<String> seenUrns = new ArrayList<>(); Iterable<String> page1 = _migrationsDao.listAllUrns(0, pageSize); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java index 70161fe640707..d94de604bf44d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -15,15 +17,11 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class CassandraAspectMigrationsDaoTest extends AspectMigrationsDaoTest<CassandraAspectDao> { private CassandraContainer _cassandraContainer; - public CassandraAspectMigrationsDaoTest() throws EntityRegistryException { - } + public CassandraAspectMigrationsDaoTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -49,8 +47,14 @@ private void configureComponents() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(dao, _mockProducer, _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + dao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); @@ -59,8 +63,8 @@ private void configureComponents() { /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java index 50e562b76c4e6..74c81ff2e8602 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; @@ -8,6 +10,7 @@ import com.linkedin.metadata.AspectGenerationUtils; import com.linkedin.metadata.AspectIngestionUtils; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -28,22 +31,20 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - /** - * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a Cassandra database. + * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a + * Cassandra database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ -public class CassandraEntityServiceTest extends EntityServiceTest<CassandraAspectDao, CassandraRetentionService> { +public class CassandraEntityServiceTest + extends EntityServiceTest<CassandraAspectDao, CassandraRetentionService> { private CassandraContainer _cassandraContainer; - public CassandraEntityServiceTest() throws EntityRegistryException { - } + public CassandraEntityServiceTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -69,16 +70,22 @@ private void configureComponents() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { @@ -99,7 +106,8 @@ public void testIngestListLatestAspects() throws AssertionError { final int expectedTotalPages = 4; final int expectedEntitiesInLastPage = 10; - Map<Urn, CorpUserInfo> writtenAspects = AspectIngestionUtils.ingestCorpUserInfoAspects(_entityServiceImpl, totalEntities); + Map<Urn, CorpUserInfo> writtenAspects = + AspectIngestionUtils.ingestCorpUserInfoAspects(_entityServiceImpl, totalEntities); Set<Urn> writtenUrns = writtenAspects.keySet(); String entity = writtenUrns.stream().findFirst().get().getEntityType(); String aspect = AspectGenerationUtils.getAspectName(new CorpUserInfo()); @@ -111,7 +119,8 @@ public void testIngestListLatestAspects() throws AssertionError { int expectedEntityCount = isLastPage ? expectedEntitiesInLastPage : pageSize; int expectedNextStart = isLastPage ? -1 : pageStart + pageSize; - ListResult<RecordTemplate> page = _entityServiceImpl.listLatestAspects(entity, aspect, pageStart, pageSize); + ListResult<RecordTemplate> page = + _entityServiceImpl.listLatestAspects(entity, aspect, pageStart, pageSize); // Check paging metadata works as expected assertEquals(page.getNextStart(), expectedNextStart); @@ -121,15 +130,26 @@ public void testIngestListLatestAspects() throws AssertionError { assertEquals(page.getValues().size(), expectedEntityCount); // Remember all URNs we've seen returned for later assertions - readUrns.addAll(page.getMetadata().getExtraInfos().stream().map(ExtraInfo::getUrn).collect(Collectors.toList())); + readUrns.addAll( + page.getMetadata().getExtraInfos().stream() + .map(ExtraInfo::getUrn) + .collect(Collectors.toList())); } assertEquals(readUrns.size(), writtenUrns.size()); - // Check that all URNs we've created were seen in some page or other (also check that none were seen more than once) - // We can't be strict on exact order of items in the responses because Cassandra query limitations get in the way here. + // Check that all URNs we've created were seen in some page or other (also check that none were + // seen more than once) + // We can't be strict on exact order of items in the responses because Cassandra query + // limitations get in the way here. for (Urn wUrn : writtenUrns) { - long matchingUrnCount = readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); - assertEquals(matchingUrnCount, 1L, String.format("Each URN should appear exactly once. %s appeared %d times.", wUrn, matchingUrnCount)); + long matchingUrnCount = + readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); + assertEquals( + matchingUrnCount, + 1L, + String.format( + "Each URN should appear exactly once. %s appeared %d times.", + wUrn, matchingUrnCount)); } } @@ -147,7 +167,8 @@ public void testIngestListUrns() throws AssertionError { final int expectedTotalPages = 4; final int expectedEntitiesInLastPage = 10; - Map<Urn, CorpUserKey> writtenAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalEntities); + Map<Urn, CorpUserKey> writtenAspects = + AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalEntities); Set<Urn> writtenUrns = writtenAspects.keySet(); String entity = writtenUrns.stream().findFirst().get().getEntityType(); @@ -169,11 +190,19 @@ public void testIngestListUrns() throws AssertionError { } assertEquals(readUrns.size(), writtenUrns.size()); - // Check that all URNs we've created were seen in some page or other (also check that none were seen more than once) - // We can't be strict on exact order of items in the responses because Cassandra query limitations get in the way here. + // Check that all URNs we've created were seen in some page or other (also check that none were + // seen more than once) + // We can't be strict on exact order of items in the responses because Cassandra query + // limitations get in the way here. for (Urn wUrn : writtenUrns) { - long matchingUrnCount = readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); - assertEquals(matchingUrnCount, 1L, String.format("Each URN should appear exactly once. %s appeared %d times.", wUrn, matchingUrnCount)); + long matchingUrnCount = + readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); + assertEquals( + matchingUrnCount, + 1L, + String.format( + "Each URN should appear exactly once. %s appeared %d times.", + wUrn, matchingUrnCount)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java index 98f9ce241b850..496744770dba8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java @@ -1,6 +1,9 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.AssertJUnit.*; + import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -10,6 +13,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.GraphService; @@ -28,11 +32,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.AssertJUnit.*; - - public class DeleteEntityServiceTest { protected EbeanAspectDao _aspectDao; @@ -46,14 +45,21 @@ public class DeleteEntityServiceTest { protected EntityRegistry _entityRegistry; public DeleteEntityServiceTest() { - _entityRegistry = new ConfigEntityRegistry(Snapshot.class.getClassLoader() - .getResourceAsStream("entity-registry.yml")); + _entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); _aspectDao = mock(EbeanAspectDao.class); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, mock(EventProducer.class), _entityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + mock(EventProducer.class), + _entityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _deleteEntityService = new DeleteEntityService(_entityServiceImpl, _graphService); } @@ -66,11 +72,19 @@ public void testDeleteUniqueRefGeneratesValidMCP() { final Urn container = UrnUtils.getUrn("urn:li:container:d1006cf3-3ff9-48e3-85cd-26eb23775ab2"); final RelatedEntitiesResult mockRelatedEntities = - new RelatedEntitiesResult(0, 1, 1, ImmutableList.of(new RelatedEntity("IsPartOf", dataset.toString()))); - - Mockito.when(_graphService.findRelatedEntities(null, newFilter("urn", container.toString()), - null, EMPTY_FILTER, ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000)) + new RelatedEntitiesResult( + 0, 1, 1, ImmutableList.of(new RelatedEntity("IsPartOf", dataset.toString()))); + + Mockito.when( + _graphService.findRelatedEntities( + null, + newFilter("urn", container.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000)) .thenReturn(mockRelatedEntities); final EntityResponse entityResponse = new EntityResponse(); @@ -78,14 +92,16 @@ public void testDeleteUniqueRefGeneratesValidMCP() { entityResponse.setEntityName(dataset.getEntityType()); final Container containerAspect = new Container(); containerAspect.setContainer(container); - final EntityAspectIdentifier dbKey = new EntityAspectIdentifier(dataset.toString(), Constants.CONTAINER_ASPECT_NAME, 0); + final EntityAspectIdentifier dbKey = + new EntityAspectIdentifier(dataset.toString(), Constants.CONTAINER_ASPECT_NAME, 0); final EntityAspect dbValue = new EntityAspect(); dbValue.setUrn(dataset.toString()); dbValue.setVersion(0); dbValue.setAspect(Constants.CONTAINER_ASPECT_NAME); dbValue.setMetadata(RecordUtils.toJsonString(containerAspect)); - dbValue.setSystemMetadata(RecordUtils.toJsonString(SystemMetadataUtils.createDefaultSystemMetadata())); + dbValue.setSystemMetadata( + RecordUtils.toJsonString(SystemMetadataUtils.createDefaultSystemMetadata())); final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp(); dbValue.setCreatedBy(auditStamp.getActor().toString()); dbValue.setCreatedOn(new Timestamp(auditStamp.getTime())); @@ -93,15 +109,25 @@ public void testDeleteUniqueRefGeneratesValidMCP() { final Map<EntityAspectIdentifier, EntityAspect> dbEntries = Map.of(dbKey, dbValue); Mockito.when(_aspectDao.batchGet(Mockito.any())).thenReturn(dbEntries); - RollbackResult result = new RollbackResult(container, Constants.DATASET_ENTITY_NAME, - Constants.CONTAINER_ASPECT_NAME, containerAspect, null, null, null, - ChangeType.DELETE, false, 1); + RollbackResult result = + new RollbackResult( + container, + Constants.DATASET_ENTITY_NAME, + Constants.CONTAINER_ASPECT_NAME, + containerAspect, + null, + null, + null, + ChangeType.DELETE, + false, + 1); Mockito.when(_aspectDao.runInTransactionWithRetry(Mockito.any(), Mockito.anyInt())) .thenReturn(result); - final DeleteReferencesResponse response = _deleteEntityService.deleteReferencesTo(container, false); + final DeleteReferencesResponse response = + _deleteEntityService.deleteReferencesTo(container, false); assertEquals(1, (int) response.getTotal()); assertFalse(response.getRelatedAspects().isEmpty()); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java index 67c9bd0a9e014..943ad2967de42 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java @@ -14,71 +14,62 @@ public class DeleteEntityUtilsTest extends TestCase { - /** - * Tests that Aspect Processor deletes the entire struct if it no longer has any fields - */ + /** Tests that Aspect Processor deletes the entire struct if it no longer has any fields */ @Test public void testEmptyStructRemoval() { final String value = "{\"key_a\": \"hello\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: optional string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a")); assertFalse(updatedAspect.data().containsKey("key_a")); assertTrue(updatedAspect.data().isEmpty()); } - /** - * Tests that Aspect Processor deletes & removes optional values from a struct. - */ + /** Tests that Aspect Processor deletes & removes optional values from a struct. */ @Test public void testOptionalFieldRemoval() { final String value = "{\"key_a\": \"hello\", \"key_b\": \"world\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a")); assertFalse(updatedAspect.data().containsKey("key_a")); assertTrue(updatedAspect.data().containsKey("key_b")); assertEquals("world", updatedAspect.data().get("key_b")); } - /** - * Tests that Aspect Processor does not delete a non-optional value from a struct. - */ + /** Tests that Aspect Processor does not delete a non-optional value from a struct. */ @Test public void testNonOptionalFieldRemoval() { final String value = "{\"key_a\": \"hello\", \"key_b\": \"world\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: string\n" + "key_b: string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - assertNull(DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a"))); + assertNull( + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a"))); } /** - * Tests that Aspect Processor deletes a required value from a record referenced by another record. + * Tests that Aspect Processor deletes a required value from a record referenced by another + * record. */ @Test public void testNestedFieldRemoval() { @@ -86,24 +77,21 @@ public void testNestedFieldRemoval() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_a")); assertFalse(updatedAspect.data().containsKey("key_c")); } /** - * Tests that Aspect Processor is able to delete an optional sub-field while preserving nested structs. + * Tests that Aspect Processor is able to delete an optional sub-field while preserving nested + * structs. */ @Test public void testOptionalNestedFieldRemoval() { @@ -111,18 +99,15 @@ public void testOptionalNestedFieldRemoval() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertNotSame(aspect.data().get("key_c"), updatedAspect.data().get("key_c")); @@ -133,8 +118,8 @@ public void testOptionalNestedFieldRemoval() { } /** - * Tests that the Aspect Processor will delete an entire struct if after removal of a field, it becomes empty & - * is optional at some higher level. + * Tests that the Aspect Processor will delete an entire struct if after removal of a field, it + * becomes empty & is optional at some higher level. */ @Test public void testRemovalOptionalFieldWithNonOptionalSubfield() { @@ -142,18 +127,15 @@ public void testRemovalOptionalFieldWithNonOptionalSubfield() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("world", aspect, schema, - new PathSpec("key_c", "key_b")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "world", aspect, schema, new PathSpec("key_c", "key_b")); assertFalse(updatedAspect.data().containsKey("key_c")); } @@ -164,15 +146,14 @@ public void testRemovalFromSingleArray() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: array[string]\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: array[string]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_a")).size()); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a", "*")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a", "*")); assertTrue(updatedAspect.data().containsKey("key_a")); assertTrue(((DataList) updatedAspect.data().get("key_a")).isEmpty()); @@ -184,15 +165,14 @@ public void testRemovalFromMultipleArray() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: array[string]\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: array[string]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_a")).size()); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a", "*")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a", "*")); assertTrue(updatedAspect.data().containsKey("key_a")); assertEquals(1, ((DataList) updatedAspect.data().get("key_a")).size()); @@ -200,28 +180,27 @@ public void testRemovalFromMultipleArray() { } /** - * Tests that Aspect Processor is able to remove sub-field from array field while preserving nested structs. + * Tests that Aspect Processor is able to remove sub-field from array field while preserving + * nested structs. */ @Test public void testRemovalNestedFieldFromArray() { - final String value = "{\"key_c\": [{\"key_a\": \"hello\", \"key_b\": \"world\"}, {\"key_b\": \"extra info\"}]}"; + final String value = + "{\"key_c\": [{\"key_a\": \"hello\", \"key_b\": \"world\"}, {\"key_b\": \"extra info\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(2, ((DataList) updatedAspect.data().get("key_c")).size()); @@ -229,99 +208,95 @@ public void testRemovalNestedFieldFromArray() { assertNotSame(aspect.data().get("key_c"), updatedAspect.data().get("key_c")); // key_a field from first element from key_c should have been removed - assertFalse(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_b")); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); - assertEquals("world", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).get("key_b")); - assertEquals("extra info", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); + assertFalse( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_b")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); + assertEquals( + "world", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).get("key_b")); + assertEquals( + "extra info", + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); } - /** - * Tests that Aspect Processor is able to remove element from array field. - */ + /** Tests that Aspect Processor is able to remove element from array field. */ @Test public void testRemovalElementFromArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}, {\"key_b\": \"extra info\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(2, ((DataList) updatedAspect.data().get("key_c")).size()); // First element from key_c should have been emptied - assertFalse(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); + assertFalse( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).isEmpty()); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); - assertEquals("extra info", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); + assertEquals( + "extra info", + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); } - /** - * Tests that Aspect Processor removes array if empty when removing underlying structs - */ + /** Tests that Aspect Processor removes array if empty when removing underlying structs */ @Test public void testRemovalEmptyArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(1, ((DataList) updatedAspect.data().get("key_c")).size()); assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).isEmpty()); } - /** - * Tests that Aspect Processor removes optional array field from struct when it is empty - */ + /** Tests that Aspect Processor removes optional array field from struct when it is empty */ @Test public void testRemovalOptionalEmptyArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional array[simple_record]\n" - + "}"); + pdlSchemaParser.parse( + "record complex_record {\n" + "key_c: optional array[simple_record]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); // contains an empty key_c assertTrue(updatedAspect.data().containsKey("key_c")); @@ -329,7 +304,8 @@ public void testRemovalOptionalEmptyArray() { } /** - * Tests that Aspect Processor removes nested structs more than 1 level deep from an optional field. + * Tests that Aspect Processor removes nested structs more than 1 level deep from an optional + * field. */ @Test public void testNestedNonOptionalSubFieldsOnOptionalField() { @@ -337,46 +313,50 @@ public void testNestedNonOptionalSubFieldsOnOptionalField() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record_1 {\n" - + "key_a: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record_1 {\n" + "key_a: string\n" + "}"); - pdlSchemaParser.parse("record simple_record_2 {\n" - + "key_b: simple_record_1\n" - + "}"); + pdlSchemaParser.parse("record simple_record_2 {\n" + "key_b: simple_record_1\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record_2\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record_2\n" + "}"); assertTrue(aspect.data().containsKey("key_c")); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_b", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_b", "key_a")); assertFalse(updatedAspect.data().containsKey("key_c")); } /** - * Tests that the aspect processor is able to remove fields that are deeply nested where the top-level field is - * optional. - * This example is based on the SchemaMetadata object. + * Tests that the aspect processor is able to remove fields that are deeply nested where the + * top-level field is optional. This example is based on the SchemaMetadata object. */ @Test public void testSchemaMetadataDelete() { - final String value = "{\"fields\": [{\"globalTags\": {\"tags\": [{\"tag\": \"urn:li:tag:Dimension\"}]}}]}"; + final String value = + "{\"fields\": [{\"globalTags\": {\"tags\": [{\"tag\": \"urn:li:tag:Dimension\"}]}}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final Aspect updatedAspect = - DeleteEntityUtils.getAspectWithReferenceRemoved("urn:li:tag:Dimension", aspect, SchemaMetadata.dataSchema(), + DeleteEntityUtils.getAspectWithReferenceRemoved( + "urn:li:tag:Dimension", + aspect, + SchemaMetadata.dataSchema(), new PathSpec("fields", "*", "globalTags", "tags", "*", "tag")); assertFalse(updatedAspect.data().toString().contains("urn:li:tag:Dimension")); assertTrue(updatedAspect.data().containsKey("fields")); // tags must be empty, not field assertEquals(1, ((DataList) updatedAspect.data().get("fields")).size()); - assertEquals(0, ((DataList) ((DataMap) ((DataMap) ((DataList) updatedAspect.data().get("fields")).get(0)) - .get("globalTags")).get("tags")).size()); + assertEquals( + 0, + ((DataList) + ((DataMap) + ((DataMap) ((DataList) updatedAspect.data().get("fields")).get(0)) + .get("globalTags")) + .get("tags")) + .size()); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java index 30d821662d377..2430ebb1f94be 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java @@ -1,9 +1,13 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.AspectIngestionUtils; -import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -19,27 +23,28 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EbeanAspectMigrationsDaoTest extends AspectMigrationsDaoTest<EbeanAspectDao> { - public EbeanAspectMigrationsDaoTest() throws EntityRegistryException { - } + public EbeanAspectMigrationsDaoTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { - Database server = EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); + Database server = + EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); _mockProducer = mock(EventProducer.class); EbeanAspectDao dao = new EbeanAspectDao(server); dao.setConnectionValidated(true); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(dao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + dao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); @@ -51,12 +56,15 @@ public void testStreamAspects() throws AssertionError { final int totalAspects = 30; Map<Urn, CorpUserKey> ingestedAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); - List<String> ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); + List<String> ingestedUrns = + ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); - Stream<EntityAspect> aspectStream = _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME); + Stream<EntityAspect> aspectStream = + _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME); List<EntityAspect> aspectList = aspectStream.collect(Collectors.toList()); assertEquals(ingestedUrns.size(), aspectList.size()); - Set<String> urnsFetched = aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet()); + Set<String> urnsFetched = + aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet()); for (String urn : ingestedUrns) { assertTrue(urnsFetched.contains(urn)); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index e8a7d8740d328..eeb014f7afdc2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -1,15 +1,19 @@ package com.linkedin.metadata.entity; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.AuditStamp; -import com.linkedin.metadata.Constants; -import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.DataTemplateUtil; import com.linkedin.data.template.RecordTemplate; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.AspectGenerationUtils; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; @@ -27,11 +31,6 @@ import io.ebean.Transaction; import io.ebean.TxScope; import io.ebean.annotation.TxIsolation; -import org.apache.commons.lang3.tuple.Triple; -import org.testng.Assert; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collection; import java.util.List; @@ -41,23 +40,23 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static org.mockito.Mockito.mock; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - +import org.apache.commons.lang3.tuple.Triple; +import org.testng.Assert; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; /** - * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a relational database. + * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a + * relational database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ -public class EbeanEntityServiceTest extends EntityServiceTest<EbeanAspectDao, EbeanRetentionService> { +public class EbeanEntityServiceTest + extends EntityServiceTest<EbeanAspectDao, EbeanRetentionService> { - public EbeanEntityServiceTest() throws EntityRegistryException { - } + public EbeanEntityServiceTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { @@ -69,16 +68,22 @@ public void setupTest() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { @@ -111,30 +116,32 @@ public void testIngestListLatestAspects() throws AssertionError { // Ingest CorpUserInfo Aspect #3 CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - List<UpsertBatchItem> items = List.of( + List<UpsertBatchItem> items = + List.of( UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects - ListResult<RecordTemplate> batch1 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); + ListResult<RecordTemplate> batch1 = + _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); assertEquals(batch1.getNextStart(), 2); assertEquals(batch1.getPageSize(), 2); @@ -144,7 +151,8 @@ public void testIngestListLatestAspects() throws AssertionError { assertTrue(DataTemplateUtil.areEqual(writeAspect1, batch1.getValues().get(0))); assertTrue(DataTemplateUtil.areEqual(writeAspect2, batch1.getValues().get(1))); - ListResult<RecordTemplate> batch2 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 2, 2); + ListResult<RecordTemplate> batch2 = + _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 2, 2); assertEquals(batch2.getValues().size(), 1); assertTrue(DataTemplateUtil.areEqual(writeAspect3, batch2.getValues().get(0))); } @@ -175,27 +183,28 @@ public void testIngestListUrns() throws AssertionError { // Ingest CorpUserInfo Aspect #3 RecordTemplate writeAspect3 = AspectGenerationUtils.createCorpUserKey(entityUrn3); - List<UpsertBatchItem> items = List.of( + List<UpsertBatchItem> items = + List.of( UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects urns ListUrnsResult batch1 = _entityServiceImpl.listUrns(entityUrn1.getEntityType(), 0, 2); @@ -221,12 +230,13 @@ public void testIngestListUrns() throws AssertionError { public void testNestedTransactions() throws AssertionError { Database server = _aspectDao.getServer(); - try (Transaction transaction = server.beginTransaction(TxScope.requiresNew() - .setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction = + server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); // Work 1 - try (Transaction transaction2 = server.beginTransaction(TxScope.requiresNew() - .setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction2 = + server.beginTransaction( + TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction2.setBatchMode(true); // Work 2 transaction2.commit(); @@ -240,20 +250,21 @@ public void testNestedTransactions() throws AssertionError { System.out.println("done"); } - @Test public void dataGeneratorThreadingTest() { DataGenerator dataGenerator = new DataGenerator(_entityServiceImpl); List<String> aspects = List.of("status", "globalTags", "glossaryTerms"); - List<List<MetadataChangeProposal>> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List<List<MetadataChangeProposal>> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); // Expected no duplicates aspects - List<String> duplicates = testData.stream() + List<String> duplicates = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.groupingBy(Triple::toString)) - .entrySet().stream() + .entrySet() + .stream() .filter(e -> e.getValue().size() > 1) .map(Map.Entry::getKey) .collect(Collectors.toList()); @@ -271,38 +282,48 @@ public void multiThreadingTest() { // Add data List<String> aspects = List.of("status", "globalTags", "glossaryTerms"); - List<List<MetadataChangeProposal>> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List<List<MetadataChangeProposal>> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); executeThreadingTest(_entityServiceImpl, testData, 15); // Expected aspects - Set<Triple<String, String, Long>> generatedAspectIds = testData.stream() + Set<Triple<String, String, Long>> generatedAspectIds = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.toSet()); // Actual inserts - Set<Triple<String, String, Long>> actualAspectIds = server.sqlQuery( - "select urn, aspect, version from metadata_aspect_v2").findList().stream() - .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + Set<Triple<String, String, Long>> actualAspectIds = + server.sqlQuery("select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map( + row -> + Triple.of( + row.getString("urn"), row.getString("aspect"), row.getLong("version"))) .collect(Collectors.toSet()); // Assert State - Set<Triple<String, String, Long>> additions = actualAspectIds.stream() + Set<Triple<String, String, Long>> additions = + actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + assertEquals( + additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); - Set<Triple<String, String, Long>> missing = generatedAspectIds.stream() + Set<Triple<String, String, Long>> missing = + generatedAspectIds.stream() .filter(id -> !actualAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + assertEquals( + missing.size(), + 0, + String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); } /** - * Don't blame multi-threading for what might not be a threading issue. - * Perform the multi-threading test with 1 thread. + * Don't blame multi-threading for what might not be a threading issue. Perform the + * multi-threading test with 1 thread. */ @Test public void singleThreadingTest() { @@ -311,85 +332,106 @@ public void singleThreadingTest() { // Add data List<String> aspects = List.of("status", "globalTags", "glossaryTerms"); - List<List<MetadataChangeProposal>> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List<List<MetadataChangeProposal>> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); executeThreadingTest(_entityServiceImpl, testData, 1); // Expected aspects - Set<Triple<String, String, Long>> generatedAspectIds = testData.stream() + Set<Triple<String, String, Long>> generatedAspectIds = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.toSet()); // Actual inserts - Set<Triple<String, String, Long>> actualAspectIds = server.sqlQuery( - "select urn, aspect, version from metadata_aspect_v2").findList().stream() - .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + Set<Triple<String, String, Long>> actualAspectIds = + server.sqlQuery("select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map( + row -> + Triple.of( + row.getString("urn"), row.getString("aspect"), row.getLong("version"))) .collect(Collectors.toSet()); // Assert State - Set<Triple<String, String, Long>> additions = actualAspectIds.stream() + Set<Triple<String, String, Long>> additions = + actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + assertEquals( + additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); - Set<Triple<String, String, Long>> missing = generatedAspectIds.stream() + Set<Triple<String, String, Long>> missing = + generatedAspectIds.stream() .filter(id -> !actualAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + assertEquals( + missing.size(), + 0, + String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); } - private static void executeThreadingTest(EntityServiceImpl entityService, List<List<MetadataChangeProposal>> testData, - int threadCount) { + private static void executeThreadingTest( + EntityServiceImpl entityService, + List<List<MetadataChangeProposal>> testData, + int threadCount) { Database server = ((EbeanAspectDao) entityService._aspectDao).getServer(); server.sqlUpdate("truncate metadata_aspect_v2"); - int count = Objects.requireNonNull(server.sqlQuery( - "select count(*) as cnt from metadata_aspect_v2").findOne()).getInteger("cnt"); + int count = + Objects.requireNonNull( + server.sqlQuery("select count(*) as cnt from metadata_aspect_v2").findOne()) + .getInteger("cnt"); assertEquals(count, 0, "Expected exactly 0 rows at the start."); // Create ingest proposals in parallel, mimic the smoke-test ingestion - final LinkedBlockingQueue<List<MetadataChangeProposal>> queue = new LinkedBlockingQueue<>(threadCount * 2); + final LinkedBlockingQueue<List<MetadataChangeProposal>> queue = + new LinkedBlockingQueue<>(threadCount * 2); // Spin up workers - List<Thread> writeThreads = IntStream.range(0, threadCount) + List<Thread> writeThreads = + IntStream.range(0, threadCount) .mapToObj(threadId -> new Thread(new MultiThreadTestWorker(queue, entityService))) .collect(Collectors.toList()); writeThreads.forEach(Thread::start); - testData.forEach(mcps -> { - try { - queue.put(mcps); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + testData.forEach( + mcps -> { + try { + queue.put(mcps); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); // Terminate workers with empty mcp - IntStream.range(0, threadCount).forEach(threadId -> { - try { - queue.put(List.of()); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + IntStream.range(0, threadCount) + .forEach( + threadId -> { + try { + queue.put(List.of()); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); // Wait for threads to finish - writeThreads.forEach(thread -> { - try { - thread.join(10000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + writeThreads.forEach( + thread -> { + try { + thread.join(10000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); } private static class MultiThreadTestWorker implements Runnable { private final EntityServiceImpl entityService; private final LinkedBlockingQueue<List<MetadataChangeProposal>> queue; - public MultiThreadTestWorker(LinkedBlockingQueue<List<MetadataChangeProposal>> queue, EntityServiceImpl entityService) { + public MultiThreadTestWorker( + LinkedBlockingQueue<List<MetadataChangeProposal>> queue, EntityServiceImpl entityService) { this.queue = queue; this.entityService = entityService; } @@ -404,9 +446,8 @@ public void run() { final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)); auditStamp.setTime(System.currentTimeMillis()); - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(mcps, entityService.getEntityRegistry()) - .build(); + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(mcps, entityService.getEntityRegistry()).build(); entityService.ingestProposal(batch, auditStamp, false); } } catch (InterruptedException | URISyntaxException ie) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index f205adc128ed2..f03811da35ea8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -68,1417 +72,1586 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - /** * A class to test {@link EntityServiceImpl} * - * This class is generic to allow same integration tests to be reused to test all supported storage backends. - * If you're adding another storage backend - you should create a new test class that extends this one providing - * hard implementations of {@link AspectDao} and {@link RetentionService} and implements {@code @BeforeMethod} etc - * to set up and tear down state. + * <p>This class is generic to allow same integration tests to be reused to test all supported + * storage backends. If you're adding another storage backend - you should create a new test class + * that extends this one providing hard implementations of {@link AspectDao} and {@link + * RetentionService} and implements {@code @BeforeMethod} etc to set up and tear down state. * - * If you realise that a feature you want to test, sadly, has divergent behaviours between different storage implementations, - * that you can't rectify - you should make the test method abstract and implement it in all implementations of this class. + * <p>If you realise that a feature you want to test, sadly, has divergent behaviours between + * different storage implementations, that you can't rectify - you should make the test method + * abstract and implement it in all implementations of this class. * * @param <T_AD> {@link AspectDao} implementation. * @param <T_RS> {@link RetentionService} implementation. */ -abstract public class EntityServiceTest<T_AD extends AspectDao, T_RS extends RetentionService> { - - protected EntityServiceImpl _entityServiceImpl; - protected T_AD _aspectDao; - protected T_RS _retentionService; - - protected static final AuditStamp TEST_AUDIT_STAMP = AspectGenerationUtils.createAuditStamp(); - protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); - protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - protected final EntityRegistry _testEntityRegistry = - new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - protected EventProducer _mockProducer; - protected UpdateIndicesService _mockUpdateIndicesService; - - protected EntityServiceTest() throws EntityRegistryException { - } - - // This test had to be split out because Cassandra relational databases have different result ordering restrictions - @Test - abstract public void testIngestListLatestAspects() throws Exception; - - // This test had to be split out because Cassandra relational databases have different result ordering restrictions - @Test - abstract public void testIngestListUrns() throws Exception; - - // This test had to be split out because Cassandra doesn't support nested transactions - @Test - abstract public void testNestedTransactions() throws Exception; - - @Test - public void testIngestGetEntity() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - // 1. Ingest Entity - _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); - - // 2. Retrieve Entity - com.linkedin.entity.Entity readEntity = _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); - - // 3. Compare Entity Objects - assertEquals(readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), +public abstract class EntityServiceTest<T_AD extends AspectDao, T_RS extends RetentionService> { + + protected EntityServiceImpl _entityServiceImpl; + protected T_AD _aspectDao; + protected T_RS _retentionService; + + protected static final AuditStamp TEST_AUDIT_STAMP = AspectGenerationUtils.createAuditStamp(); + protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); + protected final EntityRegistry _configEntityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + protected final EntityRegistry _testEntityRegistry = + new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); + protected EventProducer _mockProducer; + protected UpdateIndicesService _mockUpdateIndicesService; + + protected EntityServiceTest() throws EntityRegistryException {} + + // This test had to be split out because Cassandra relational databases have different result + // ordering restrictions + @Test + public abstract void testIngestListLatestAspects() throws Exception; + + // This test had to be split out because Cassandra relational databases have different result + // ordering restrictions + @Test + public abstract void testIngestListUrns() throws Exception; + + // This test had to be split out because Cassandra doesn't support nested transactions + @Test + public abstract void testNestedTransactions() throws Exception; + + @Test + public void testIngestGetEntity() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + // 1. Ingest Entity + _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); + + // 2. Retrieve Entity + com.linkedin.entity.Entity readEntity = + _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); + + // 3. Compare Entity Objects + assertEquals( + readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey = new CorpUserKey(); - expectedKey.setUsername("test"); - assertTrue(DataTemplateUtil.areEqual(expectedKey, - readEntity.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testAddKey() throws Exception { - // Test Writing a CorpUser Key - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - // 1. Ingest Entity - _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); - - // 2. Retrieve Entity - com.linkedin.entity.Entity readEntity = _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); - - // 3. Compare Entity Objects - assertEquals(readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), + CorpUserKey expectedKey = new CorpUserKey(); + expectedKey.setUsername("test"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey, + readEntity + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testAddKey() throws Exception { + // Test Writing a CorpUser Key + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + // 1. Ingest Entity + _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); + + // 2. Retrieve Entity + com.linkedin.entity.Entity readEntity = + _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); + + // 3. Compare Entity Objects + assertEquals( + readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey = new CorpUserKey(); - expectedKey.setUsername("test"); - assertTrue(DataTemplateUtil.areEqual(expectedKey, - readEntity.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestGetEntities() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map<Urn, Entity> readEntities = - _entityServiceImpl.getEntities(ImmutableSet.of(entityUrn1, entityUrn2), Collections.emptySet()); - - // 3. Compare Entity Objects - - // Entity 1 - com.linkedin.entity.Entity readEntity1 = readEntities.get(entityUrn1); - assertEquals(readEntity1.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0), + CorpUserKey expectedKey = new CorpUserKey(); + expectedKey.setUsername("test"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey, + readEntity + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntities() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map<Urn, Entity> readEntities = + _entityServiceImpl.getEntities( + ImmutableSet.of(entityUrn1, entityUrn2), Collections.emptySet()); + + // 3. Compare Entity Objects + + // Entity 1 + com.linkedin.entity.Entity readEntity1 = readEntities.get(entityUrn1); + assertEquals( + readEntity1.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity1.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, - readEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - // Entity 2 - com.linkedin.entity.Entity readEntity2 = readEntities.get(entityUrn2); - assertEquals(readEntity2.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - Optional<CorpUserAspect> writer2UserInfo = writeEntity2.getValue().getCorpUserSnapshot().getAspects() - .stream().filter(CorpUserAspect::isCorpUserInfo).findAny(); - Optional<CorpUserAspect> reader2UserInfo = writeEntity2.getValue().getCorpUserSnapshot().getAspects() - .stream().filter(CorpUserAspect::isCorpUserInfo).findAny(); - - assertTrue(writer2UserInfo.isPresent(), "Writer2 user info exists"); - assertTrue(reader2UserInfo.isPresent(), "Reader2 user info exists"); - assertTrue(DataTemplateUtil.areEqual(writer2UserInfo.get(), reader2UserInfo.get()), "UserInfo's are the same"); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, - readEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), - mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), - mclCaptor.capture()); - mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey1, + readEntity1 + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + // Entity 2 + com.linkedin.entity.Entity readEntity2 = readEntities.get(entityUrn2); + assertEquals( + readEntity2.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + Optional<CorpUserAspect> writer2UserInfo = + writeEntity2.getValue().getCorpUserSnapshot().getAspects().stream() + .filter(CorpUserAspect::isCorpUserInfo) + .findAny(); + Optional<CorpUserAspect> reader2UserInfo = + writeEntity2.getValue().getCorpUserSnapshot().getAspects().stream() + .filter(CorpUserAspect::isCorpUserInfo) + .findAny(); + + assertTrue(writer2UserInfo.isPresent(), "Writer2 user info exists"); + assertTrue(reader2UserInfo.isPresent(), "Reader2 user info exists"); + assertTrue( + DataTemplateUtil.areEqual(writer2UserInfo.get(), reader2UserInfo.get()), + "UserInfo's are the same"); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey2, + readEntity2 + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), mclCaptor.capture()); + mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntitiesV2() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + String aspectName = "corpUserInfo"; + String keyName = "corpUserKey"; + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map<Urn, EntityResponse> readEntities = + _entityServiceImpl.getEntitiesV2( + "corpuser", ImmutableSet.of(entityUrn1, entityUrn2), ImmutableSet.of(aspectName)); + + // 3. Compare Entity Objects + + // Entity 1 + EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); + assertEquals(readEntityResponse1.getAspects().size(), 2); // Key + Info aspect. + EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); + assertEquals(envelopedAspect1.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect1.getValue().data()))); + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); + + // Entity 2 + EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); + assertEquals(readEntityResponse2.getAspects().size(), 2); // Key + Info aspect. + EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); + assertEquals(envelopedAspect2.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect2.getValue().data()))); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntitiesVersionedV2() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + VersionedUrn versionedUrn1 = + new VersionedUrn().setUrn(entityUrn1).setVersionStamp("corpUserInfo:0"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + VersionedUrn versionedUrn2 = new VersionedUrn().setUrn(entityUrn2); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + String aspectName = "corpUserInfo"; + String keyName = "corpUserKey"; + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map<Urn, EntityResponse> readEntities = + _entityServiceImpl.getEntitiesVersionedV2( + ImmutableSet.of(versionedUrn1, versionedUrn2), ImmutableSet.of(aspectName)); + + // 3. Compare Entity Objects + + // Entity 1 + EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); + assertEquals(2, readEntityResponse1.getAspects().size()); // Key + Info aspect. + EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); + assertEquals(envelopedAspect1.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect1.getValue().data()))); + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); + + // Entity 2 + EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); + assertEquals(2, readEntityResponse2.getAspects().size()); // Key + Info aspect. + EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); + assertEquals(envelopedAspect2.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect2.getValue().data()))); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestAspectsGetLatestAspects() throws Exception { + + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + Status writeAspect1 = new Status().setRemoved(false); + String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); + pairToIngest.add(getAspectRecordPair(writeAspect1, Status.class)); + + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName2 = AspectGenerationUtils.getAspectName(writeAspect2); + pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn( + entityUrn, new HashSet<>(Arrays.asList(aspectName1, aspectName2))); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testReingestAspectsGetLatestAspects() throws Exception { + + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + writeAspect1.setCustomProperties(new StringMap()); + String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); + pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); + + GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); + + initialChangeLog.setAspect(aspect); + initialChangeLog.setSystemMetadata(metadata1); - @Test - public void testIngestGetEntitiesV2() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - String aspectName = "corpUserInfo"; - String keyName = "corpUserKey"; - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map<Urn, EntityResponse> readEntities = - _entityServiceImpl.getEntitiesV2("corpuser", ImmutableSet.of(entityUrn1, entityUrn2), ImmutableSet.of(aspectName)); - - // 3. Compare Entity Objects - - // Entity 1 - EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); - assertEquals(readEntityResponse1.getAspects().size(), 2); // Key + Info aspect. - EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); - assertEquals(envelopedAspect1.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect1.getValue().data()))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); - - // Entity 2 - EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); - assertEquals(readEntityResponse2.getAspects().size(), 2); // Key + Info aspect. - EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); - assertEquals(envelopedAspect2.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect2.getValue().data()))); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), - Mockito.any(), Mockito.any()); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(aspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(aspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); - @Test - public void testIngestGetEntitiesVersionedV2() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - VersionedUrn versionedUrn1 = new VersionedUrn().setUrn(entityUrn1).setVersionStamp("corpUserInfo:0"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - VersionedUrn versionedUrn2 = new VersionedUrn().setUrn(entityUrn2); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - String aspectName = "corpUserInfo"; - String keyName = "corpUserKey"; - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map<Urn, EntityResponse> readEntities = - _entityServiceImpl.getEntitiesVersionedV2(ImmutableSet.of(versionedUrn1, versionedUrn2), ImmutableSet.of(aspectName)); - - // 3. Compare Entity Objects - - // Entity 1 - EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); - assertEquals(2, readEntityResponse1.getAspects().size()); // Key + Info aspect. - EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); - assertEquals(envelopedAspect1.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect1.getValue().data()))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); - - // Entity 2 - EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); - assertEquals(2, readEntityResponse2.getAspects().size()); // Key + Info aspect. - EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); - assertEquals(envelopedAspect2.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect2.getValue().data()))); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), - Mockito.any(), Mockito.any()); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestAspectsGetLatestAspects() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - - Status writeAspect1 = new Status().setRemoved(false); - String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); - pairToIngest.add(getAspectRecordPair(writeAspect1, Status.class)); - - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName2 = AspectGenerationUtils.getAspectName(writeAspect2); - pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(Arrays.asList(aspectName1, aspectName2)) - ); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); - assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testReingestAspectsGetLatestAspects() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - writeAspect1.setCustomProperties(new StringMap()); - String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); - pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + verifyNoMoreInteractions(_mockProducer); + } - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); + @Test + public void testReingestLineageAspect() throws Exception { - GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - initialChangeLog.setAspect(aspect); - initialChangeLog.setSystemMetadata(metadata1); + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(aspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(aspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); + initialChangeLog.setAspect(aspect); + initialChangeLog.setSystemMetadata(metadata1); - - verifyNoMoreInteractions(_mockProducer); + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(aspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(aspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); + + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); + + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testReingestLineageProposal() throws Exception { + + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); + + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + MetadataChangeProposal mcp1 = new MetadataChangeProposal(); + mcp1.setEntityType(entityUrn.getEntityType()); + GenericAspect genericAspect = GenericRecordUtils.serializeAspect(upstreamLineage); + mcp1.setAspect(genericAspect); + mcp1.setEntityUrn(entityUrn); + mcp1.setChangeType(ChangeType.UPSERT); + mcp1.setSystemMetadata(metadata1); + mcp1.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); + + _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); + + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); + + initialChangeLog.setAspect(genericAspect); + initialChangeLog.setSystemMetadata(metadata1); + + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(genericAspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(genericAspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map<String, RecordTemplate> latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); + + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); + + _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestTimeseriesAspect() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProfile datasetProfile = new DatasetProfile(); + datasetProfile.setRowCount(1000); + datasetProfile.setColumnCount(15); + datasetProfile.setTimestampMillis(0L); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProfile"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + } + + @Test + public void testAsyncProposalVersioned() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProperties datasetProperties = new DatasetProperties(); + datasetProperties.setName("Foo Bar"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProperties"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); + verify(_mockProducer, times(0)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + verify(_mockProducer, times(1)) + .produceMetadataChangeProposal(Mockito.eq(entityUrn), Mockito.eq(gmce)); + } + + @Test + public void testAsyncProposalTimeseries() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProfile datasetProfile = new DatasetProfile(); + datasetProfile.setRowCount(1000); + datasetProfile.setColumnCount(15); + datasetProfile.setTimestampMillis(0L); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProfile"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + verify(_mockProducer, times(0)) + .produceMetadataChangeProposal(Mockito.eq(entityUrn), Mockito.eq(gmce)); + } + + @Test + public void testUpdateGetAspect() throws AssertionError { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + AspectSpec corpUserInfoSpec = + _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #1 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + // Ingest CorpUserInfo Aspect #2 + writeAspect.setEmail("newemail@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testGetAspectAtVersion() throws AssertionError { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + AspectSpec corpUserInfoSpec = + _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #1 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); + writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); + writtenVersionedAspect1.setVersion(0); + + VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); + writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); + writtenVersionedAspect2.setVersion(0); + + VersionedAspect readAspectVersion2 = + _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testRollbackAspect() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Ingest CorpUserInfo Aspect #3 + CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since this run has been overwritten + AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); + rollbackOverwrittenAspect.setRunId("run-123"); + rollbackOverwrittenAspect.setAspectName(aspectName); + rollbackOverwrittenAspect.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-123", true); + + // assert nothing was deleted + RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); + + RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); + + // this should delete the most recent aspect + AspectRowSummary rollbackRecentAspect = new AspectRowSummary(); + rollbackRecentAspect.setRunId("run-456"); + rollbackRecentAspect.setAspectName(aspectName); + rollbackRecentAspect.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-456", true); + + // assert the new most recent aspect is the original one + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readNewRecentAspect)); + } + + @Test + public void testRollbackKey() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since the key should have been written in the furst run + AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); + rollbackKeyWithWrongRunId.setRunId("run-456"); + rollbackKeyWithWrongRunId.setAspectName("corpUserKey"); + rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithWrongRunId), "run-456", true); + + // assert nothing was deleted + RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); + + RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); + + // this should delete the most recent aspect + AspectRowSummary rollbackKeyWithCorrectRunId = new AspectRowSummary(); + rollbackKeyWithCorrectRunId.setRunId("run-123"); + rollbackKeyWithCorrectRunId.setAspectName("corpUserKey"); + rollbackKeyWithCorrectRunId.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithCorrectRunId), "run-123", true); + + // assert the new most recent aspect is null + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); + } + + @Test + public void testRollbackUrn() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Ingest CorpUserInfo Aspect #3 + CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since the key should have been written in the furst run + AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); + rollbackKeyWithWrongRunId.setRunId("run-456"); + rollbackKeyWithWrongRunId.setAspectName("CorpUserKey"); + rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); + + // this should delete all related aspects + _entityServiceImpl.deleteUrn(UrnUtils.getUrn("urn:li:corpuser:test1")); + + // assert the new most recent aspect is null + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); + + RecordTemplate deletedKeyAspect = _entityServiceImpl.getAspect(entityUrn1, "corpUserKey", 0); + assertTrue(DataTemplateUtil.areEqual(null, deletedKeyAspect)); + } + + @Test + public void testIngestGetLatestAspect() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + + reset(_mockProducer); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); + EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); + + assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNotNull(mcl.getPreviousAspectValue()); + assertNotNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetLatestEnvelopedAspect() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + EnvelopedAspect readAspect1 = + _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); + assertTrue( + DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + EnvelopedAspect readAspect2 = + _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); + EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); + EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); + + assertTrue( + DataTemplateUtil.areEqual(writeAspect2, new CorpUserInfo(readAspect2.getValue().data()))); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestSameAspect() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + SystemMetadata metadata3 = + AspectGenerationUtils.createSystemMetadata(1635792689, "run-123", "run-456"); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + + ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + + reset(_mockProducer); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + EntityAspect readAspectDao2 = + _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); + + assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); + assertFalse( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertFalse( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata1)); + + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata3)); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testRetention() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); + CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); + + String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); + // Ingest Status Aspect + Status writeAspect2 = new Status().setRemoved(true); + Status writeAspect2a = new Status().setRemoved(false); + Status writeAspect2b = new Status().setRemoved(true); + + List<UpsertBatchItem> items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1b) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2b) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); + + _retentionService.setRetention( + null, + null, + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(2)))); + _retentionService.setRetention( + "corpuser", + "status", + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(4)))); + + // Ingest CorpUserInfo Aspect again + CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); + // Ingest Status Aspect again + Status writeAspect2c = new Status().setRemoved(false); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1c) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2c) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); + + // Reset retention policies + _retentionService.setRetention( + null, + null, + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(1)))); + _retentionService.deleteRetention("corpuser", "status"); + // Invoke batch apply + _retentionService.batchApplyRetention(null, null); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10) + .getTotalCount(), + 1); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10) + .getTotalCount(), + 1); + } + + @Test + public void testIngestAspectIfNotPresent() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); + CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); + CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); + + String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); + // Ingest Status Aspect + Status writeAspect2 = new Status().setRemoved(true); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); + Status writeAspect2a = new Status().setRemoved(false); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); + Status writeAspect2b = new Status().setRemoved(true); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); + + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 0), writeAspect1); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 0), writeAspect2); + + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1)); + + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10) + .getTotalCount(), + 1); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10) + .getTotalCount(), + 1); + } + + /** + * Equivalence for mocks fails when directly using the object as when converting from + * RecordTemplate from JSON it reorders the fields. This simulates pulling the historical + * SystemMetadata from the previous call. + */ + protected <T extends RecordTemplate> T simulatePullFromDB(T aspect, Class<T> clazz) + throws Exception { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + return RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + } + + @Test + public void testRestoreIndices() throws Exception { + if (this instanceof EbeanEntityServiceTest) { + String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; + Urn entityUrn = UrnUtils.getUrn(urnStr); + List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + clearInvocations(_mockProducer); + + RestoreIndicesArgs args = new RestoreIndicesArgs(); + args.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); + args.setBatchSize(1); + args.setStart(0); + args.setBatchDelayMs(1L); + args.setNumThreads(1); + args.setUrn(urnStr); + _entityServiceImpl.restoreIndices(args, obj -> {}); + + ArgumentCaptor<MetadataChangeLog> mclCaptor = + ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "dataset"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.RESTATE); + assertEquals(mcl.getSystemMetadata().getProperties().get(FORCE_INDEXING_KEY), "true"); } - - @Test - public void testReingestLineageAspect() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); - - GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); - - initialChangeLog.setAspect(aspect); - initialChangeLog.setSystemMetadata(metadata1); - - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(aspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(aspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); - - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); - - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); - - - verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testValidateUrn() throws Exception { + // Valid URN + Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); + EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); + + // URN with trailing whitespace + Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); + Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); } - @Test - public void testReingestLineageProposal() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); + // Urn purely too long + String stringTooLong = "a".repeat(510); - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - MetadataChangeProposal mcp1 = new MetadataChangeProposal(); - mcp1.setEntityType(entityUrn.getEntityType()); - GenericAspect genericAspect = GenericRecordUtils.serializeAspect(upstreamLineage); - mcp1.setAspect(genericAspect); - mcp1.setEntityUrn(entityUrn); - mcp1.setChangeType(ChangeType.UPSERT); - mcp1.setSystemMetadata(metadata1); - mcp1.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); - - _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); - - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); - - initialChangeLog.setAspect(genericAspect); - initialChangeLog.setSystemMetadata(metadata1); - - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(genericAspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(genericAspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); - - Map<String, RecordTemplate> latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); - - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); - - _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); - - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestTimeseriesAspect() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProfile datasetProfile = new DatasetProfile(); - datasetProfile.setRowCount(1000); - datasetProfile.setColumnCount(15); - datasetProfile.setTimestampMillis(0L); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProfile"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); - } - - @Test - public void testAsyncProposalVersioned() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProperties datasetProperties = new DatasetProperties(); - datasetProperties.setName("Foo Bar"); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProperties"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); - verify(_mockProducer, times(0)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - verify(_mockProducer, times(1)).produceMetadataChangeProposal(Mockito.eq(entityUrn), - Mockito.eq(gmce)); - } - - - @Test - public void testAsyncProposalTimeseries() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProfile datasetProfile = new DatasetProfile(); - datasetProfile.setRowCount(1000); - datasetProfile.setColumnCount(15); - datasetProfile.setTimestampMillis(0L); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProfile"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - verify(_mockProducer, times(0)).produceMetadataChangeProposal(Mockito.eq(entityUrn), - Mockito.eq(gmce)); - } - - @Test - public void testUpdateGetAspect() throws AssertionError { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); - - RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - // Ingest CorpUserInfo Aspect #2 - writeAspect.setEmail("newemail@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); - - RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testGetAspectAtVersion() throws AssertionError { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); - - VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); - writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); - writtenVersionedAspect1.setVersion(0); - - VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - - // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); - - VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); - writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); - writtenVersionedAspect2.setVersion(0); - - VersionedAspect readAspectVersion2 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testRollbackAspect() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Ingest CorpUserInfo Aspect #3 - CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since this run has been overwritten - AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); - rollbackOverwrittenAspect.setRunId("run-123"); - rollbackOverwrittenAspect.setAspectName(aspectName); - rollbackOverwrittenAspect.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-123", true); - - // assert nothing was deleted - RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); - - RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); - - // this should delete the most recent aspect - AspectRowSummary rollbackRecentAspect = new AspectRowSummary(); - rollbackRecentAspect.setRunId("run-456"); - rollbackRecentAspect.setAspectName(aspectName); - rollbackRecentAspect.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-456", true); - - // assert the new most recent aspect is the original one - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readNewRecentAspect)); - } - - @Test - public void testRollbackKey() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(keyAspectName) - .aspect(writeKey1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since the key should have been written in the furst run - AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); - rollbackKeyWithWrongRunId.setRunId("run-456"); - rollbackKeyWithWrongRunId.setAspectName("corpUserKey"); - rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithWrongRunId), "run-456", true); - - // assert nothing was deleted - RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); - - RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); - - // this should delete the most recent aspect - AspectRowSummary rollbackKeyWithCorrectRunId = new AspectRowSummary(); - rollbackKeyWithCorrectRunId.setRunId("run-123"); - rollbackKeyWithCorrectRunId.setAspectName("corpUserKey"); - rollbackKeyWithCorrectRunId.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithCorrectRunId), "run-123", true); - - // assert the new most recent aspect is null - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); - } - - @Test - public void testRollbackUrn() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Ingest CorpUserInfo Aspect #3 - CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(keyAspectName) - .aspect(writeKey1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since the key should have been written in the furst run - AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); - rollbackKeyWithWrongRunId.setRunId("run-456"); - rollbackKeyWithWrongRunId.setAspectName("CorpUserKey"); - rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); - - // this should delete all related aspects - _entityServiceImpl.deleteUrn(UrnUtils.getUrn("urn:li:corpuser:test1")); - - // assert the new most recent aspect is null - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); - - RecordTemplate deletedKeyAspect = _entityServiceImpl.getAspect(entityUrn1, "corpUserKey", 0); - assertTrue(DataTemplateUtil.areEqual(null, deletedKeyAspect)); + Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); + Assert.fail("Should have raised IllegalArgumentException for URN too long"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); } - @Test - public void testIngestGetLatestAspect() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - - reset(_mockProducer); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNotNull(mcl.getPreviousAspectValue()); - assertNotNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); + // Urn too long when URL encoded + StringBuilder buildStringTooLongWhenEncoded = new StringBuilder(); + StringBuilder buildStringSameLengthWhenEncoded = new StringBuilder(); + for (int i = 0; i < 200; i++) { + buildStringTooLongWhenEncoded.append('>'); + buildStringSameLengthWhenEncoded.append('a'); } - - @Test - public void testIngestGetLatestEnvelopedAspect() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - EnvelopedAspect readAspect1 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - EnvelopedAspect readAspect2 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); - EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, new CorpUserInfo(readAspect2.getValue().data()))); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestSameAspect() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - SystemMetadata metadata3 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-123", "run-456"); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - - reset(_mockProducer); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); - assertFalse(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertFalse(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata1)); - - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata3)); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testRetention() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - - String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); - // Ingest Status Aspect - Status writeAspect2 = new Status().setRemoved(true); - Status writeAspect2a = new Status().setRemoved(false); - Status writeAspect2b = new Status().setRemoved(true); - - List<UpsertBatchItem> items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1a) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1b) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2a) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2b) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); - - _retentionService.setRetention(null, null, new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(2)))); - _retentionService.setRetention("corpuser", "status", new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(4)))); - - // Ingest CorpUserInfo Aspect again - CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); - // Ingest Status Aspect again - Status writeAspect2c = new Status().setRemoved(false); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1c) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2c) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); - - // Reset retention policies - _retentionService.setRetention(null, null, new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(1)))); - _retentionService.deleteRetention("corpuser", "status"); - // Invoke batch apply - _retentionService.batchApplyRetention(null, null); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10).getTotalCount(), 1); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10).getTotalCount(), 1); - } - - @Test - public void testIngestAspectIfNotPresent() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); - - String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); - // Ingest Status Aspect - Status writeAspect2 = new Status().setRemoved(true); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); - Status writeAspect2a = new Status().setRemoved(false); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); - Status writeAspect2b = new Status().setRemoved(true); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); - - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 0), writeAspect1); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 0), writeAspect2); - - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1)); - - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10).getTotalCount(), 1); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10).getTotalCount(), 1); + Urn testUrnTooLongWhenEncoded = + new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); + Urn testUrnSameLengthWhenEncoded = + new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); + // Same length when encoded should be allowed, the encoded one should not be + EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); + Assert.fail("Should have raised IllegalArgumentException for URN too long"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); } - /** - * Equivalence for mocks fails when directly using the object as when converting from RecordTemplate from JSON it - * reorders the fields. This simulates pulling the historical SystemMetadata from the previous call. - */ - protected <T extends RecordTemplate> T simulatePullFromDB(T aspect, Class<T> clazz) throws Exception { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - return RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + // Urn containing disallowed character + Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); + Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); + Assert.fail( + "Should have raised IllegalArgumentException for URN containing the illegal char"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); } - - @Test - public void testRestoreIndices() throws Exception { - if (this instanceof EbeanEntityServiceTest) { - String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; - Urn entityUrn = UrnUtils.getUrn(urnStr); - List<Pair<String, RecordTemplate>> pairToIngest = new ArrayList<>(); - - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - clearInvocations(_mockProducer); - - RestoreIndicesArgs args = new RestoreIndicesArgs(); - args.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); - args.setBatchSize(1); - args.setStart(0); - args.setBatchDelayMs(1L); - args.setNumThreads(1); - args.setUrn(urnStr); - _entityServiceImpl.restoreIndices(args, obj -> { - }); - - ArgumentCaptor<MetadataChangeLog> mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog( - Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "dataset"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.RESTATE); - assertEquals(mcl.getSystemMetadata().getProperties().get(FORCE_INDEXING_KEY), "true"); - } - } - - @Test - public void testValidateUrn() throws Exception { - // Valid URN - Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); - EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); - - // URN with trailing whitespace - Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); - Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); - } - - // Urn purely too long - String stringTooLong = "a".repeat(510); - - Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); - Assert.fail("Should have raised IllegalArgumentException for URN too long"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); - } - - // Urn too long when URL encoded - StringBuilder buildStringTooLongWhenEncoded = new StringBuilder(); - StringBuilder buildStringSameLengthWhenEncoded = new StringBuilder(); - for (int i = 0; i < 200; i++) { - buildStringTooLongWhenEncoded.append('>'); - buildStringSameLengthWhenEncoded.append('a'); - } - Urn testUrnTooLongWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); - Urn testUrnSameLengthWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); - // Same length when encoded should be allowed, the encoded one should not be - EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); - Assert.fail("Should have raised IllegalArgumentException for URN too long"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); - } - - // Urn containing disallowed character - Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); - Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); - EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); - Assert.fail("Should have raised IllegalArgumentException for URN containing the illegal char"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); - } - - Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); - try { - EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); - Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("mismatched paren nesting")); - } - - Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); - try { - EntityUtils.validateUrn(_testEntityRegistry, invalidType); - Assert.fail("Should have raised IllegalArgumentException for URN with non-existent entity type"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); - } - - Urn validFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); - EntityUtils.validateUrn(_testEntityRegistry, validFabricType); - - Urn invalidFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); - try { - EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); - Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains(invalidFabricType.toString())); - } - - Urn urnEndingInComma = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); - try { - EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); - Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains(urnEndingInComma.toString())); - } + Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); + try { + EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); + Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("mismatched paren nesting")); } - @Test - public void testUIPreProcessedProposal() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - EditableDatasetProperties datasetProperties = new EditableDatasetProperties(); - datasetProperties.setDescription("Foo Bar"); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("editableDatasetProperties"); - SystemMetadata systemMetadata = new SystemMetadata(); - StringMap properties = new StringMap(); - properties.put(APP_SOURCE, UI_SOURCE); - systemMetadata.setProperties(properties); - gmce.setSystemMetadata(systemMetadata); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); - ArgumentCaptor<MetadataChangeLog> captor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), captor.capture()); - assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); + try { + EntityUtils.validateUrn(_testEntityRegistry, invalidType); + Assert.fail( + "Should have raised IllegalArgumentException for URN with non-existent entity type"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); } - @Nonnull - protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) throws Exception { - CorpuserUrn corpuserUrn = CorpuserUrn.createFromUrn(entityUrn); - com.linkedin.entity.Entity entity = new com.linkedin.entity.Entity(); - Snapshot snapshot = new Snapshot(); - CorpUserSnapshot corpUserSnapshot = new CorpUserSnapshot(); - List<CorpUserAspect> userAspects = new ArrayList<>(); - userAspects.add(CorpUserAspect.create(AspectGenerationUtils.createCorpUserInfo(email))); - corpUserSnapshot.setAspects(new CorpUserAspectArray(userAspects)); - corpUserSnapshot.setUrn(corpuserUrn); - snapshot.setCorpUserSnapshot(corpUserSnapshot); - entity.setValue(snapshot); - return entity; + Urn validFabricType = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); + EntityUtils.validateUrn(_testEntityRegistry, validFabricType); + + Urn invalidFabricType = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); + try { + EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); + Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains(invalidFabricType.toString())); } - protected <T extends RecordTemplate> Pair<String, RecordTemplate> getAspectRecordPair(T aspect, Class<T> clazz) - throws Exception { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - RecordTemplate recordTemplate = RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); - return new Pair<>(AspectGenerationUtils.getAspectName(aspect), recordTemplate); + Urn urnEndingInComma = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); + try { + EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); + Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains(urnEndingInComma.toString())); } + } + + @Test + public void testUIPreProcessedProposal() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + EditableDatasetProperties datasetProperties = new EditableDatasetProperties(); + datasetProperties.setDescription("Foo Bar"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("editableDatasetProperties"); + SystemMetadata systemMetadata = new SystemMetadata(); + StringMap properties = new StringMap(); + properties.put(APP_SOURCE, UI_SOURCE); + systemMetadata.setProperties(properties); + gmce.setSystemMetadata(systemMetadata); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + ArgumentCaptor<MetadataChangeLog> captor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), captor.capture()); + assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + } + + @Nonnull + protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) + throws Exception { + CorpuserUrn corpuserUrn = CorpuserUrn.createFromUrn(entityUrn); + com.linkedin.entity.Entity entity = new com.linkedin.entity.Entity(); + Snapshot snapshot = new Snapshot(); + CorpUserSnapshot corpUserSnapshot = new CorpUserSnapshot(); + List<CorpUserAspect> userAspects = new ArrayList<>(); + userAspects.add(CorpUserAspect.create(AspectGenerationUtils.createCorpUserInfo(email))); + corpUserSnapshot.setAspects(new CorpUserAspectArray(userAspects)); + corpUserSnapshot.setUrn(corpuserUrn); + snapshot.setCorpUserSnapshot(corpUserSnapshot); + entity.setValue(snapshot); + return entity; + } + + protected <T extends RecordTemplate> Pair<String, RecordTemplate> getAspectRecordPair( + T aspect, Class<T> clazz) throws Exception { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + RecordTemplate recordTemplate = + RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + return new Pair<>(AspectGenerationUtils.getAspectName(aspect), recordTemplate); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java index e90ffd8a4bcb7..680d4079851eb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java @@ -14,16 +14,15 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class TestEntityRegistry implements EntityRegistry { private final Map<String, EntitySpec> entityNameToSpec; public TestEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) - .buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); } @Nonnull diff --git a/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java index c7ab24e87a873..a98386f6f871b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.extractor; +import static org.testng.AssertJUnit.assertEquals; + import com.datahub.test.TestEntityAspect; import com.datahub.test.TestEntityAspectArray; import com.datahub.test.TestEntityInfo; @@ -12,9 +14,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - - public class AspectExtractorTest { @Test public void testExtractor() { @@ -23,7 +22,8 @@ public void testExtractor() { TestEntityKey testEntityKey = TestEntityUtil.getTestEntityKey(urn); TestEntityInfo testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); snapshot.setAspects( - new TestEntityAspectArray(TestEntityAspect.create(testEntityKey), TestEntityAspect.create(testEntityInfo))); + new TestEntityAspectArray( + TestEntityAspect.create(testEntityKey), TestEntityAspect.create(testEntityInfo))); Map<String, RecordTemplate> result = AspectExtractor.extractAspectRecords(snapshot); assertEquals(result.size(), 2); assertEquals(result.get("testEntityKey"), testEntityKey); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java index cbc4825a3b557..1adb5d1ab3952 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.extractor; +import static org.testng.Assert.assertEquals; + import com.datahub.test.TestEntityInfo; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntitySpecBuilder; @@ -15,65 +17,91 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; - - public class FieldExtractorTest { @Test public void testExtractor() { EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); AspectSpec testEntityInfoSpec = testEntitySpec.getAspectSpec("testEntityInfo"); - Map<String, SearchableFieldSpec> nameToSpec = testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); + Map<String, SearchableFieldSpec> nameToSpec = + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); TestEntityInfo testEntityInfo = new TestEntityInfo(); Map<SearchableFieldSpec, List<Object>> result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); - assertEquals(result, testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); + assertEquals( + result, + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); Urn urn = TestEntityUtil.getTestEntityUrn(); testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); - result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); + result = + FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); assertEquals(result.get(nameToSpec.get("textFieldOverride")), ImmutableList.of("test")); assertEquals(result.get(nameToSpec.get("foreignKey")), ImmutableList.of()); assertEquals(result.get(nameToSpec.get("nestedForeignKey")), ImmutableList.of(urn)); - assertEquals(result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); + assertEquals( + result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); assertEquals(result.get(nameToSpec.get("nestedIntegerField")), ImmutableList.of(1)); - assertEquals(result.get(nameToSpec.get("nestedArrayStringField")), ImmutableList.of("nestedArray1", "nestedArray2")); - assertEquals(result.get(nameToSpec.get("nestedArrayArrayField")), ImmutableList.of("testNestedArray1", "testNestedArray2")); - assertEquals(result.get(nameToSpec.get("customProperties")), ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); - assertEquals(result.get(nameToSpec.get("esObjectField")), ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); + assertEquals( + result.get(nameToSpec.get("nestedArrayStringField")), + ImmutableList.of("nestedArray1", "nestedArray2")); + assertEquals( + result.get(nameToSpec.get("nestedArrayArrayField")), + ImmutableList.of("testNestedArray1", "testNestedArray2")); + assertEquals( + result.get(nameToSpec.get("customProperties")), + ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); + assertEquals( + result.get(nameToSpec.get("esObjectField")), + ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); } @Test public void testExtractorMaxValueLength() { EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); AspectSpec testEntityInfoSpec = testEntitySpec.getAspectSpec("testEntityInfo"); - Map<String, SearchableFieldSpec> nameToSpec = testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); + Map<String, SearchableFieldSpec> nameToSpec = + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); TestEntityInfo testEntityInfo = new TestEntityInfo(); Map<SearchableFieldSpec, List<Object>> result = - FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); - assertEquals(result, testEntityInfoSpec.getSearchableFieldSpecs() - .stream() + FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); + assertEquals( + result, + testEntityInfoSpec.getSearchableFieldSpecs().stream() .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); Urn urn = TestEntityUtil.getTestEntityUrn(); testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); - result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs(), 1); + result = + FieldExtractor.extractFields( + testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs(), 1); assertEquals(result.get(nameToSpec.get("textFieldOverride")), ImmutableList.of("test")); assertEquals(result.get(nameToSpec.get("foreignKey")), ImmutableList.of()); assertEquals(result.get(nameToSpec.get("nestedForeignKey")), ImmutableList.of(urn)); - assertEquals(result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); + assertEquals( + result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); assertEquals(result.get(nameToSpec.get("nestedIntegerField")), ImmutableList.of(1)); - assertEquals(result.get(nameToSpec.get("nestedArrayStringField")), ImmutableList.of("nestedArray1", "nestedArray2")); - assertEquals(result.get(nameToSpec.get("nestedArrayArrayField")), ImmutableList.of("testNestedArray1", "testNestedArray2")); - assertEquals(result.get(nameToSpec.get("customProperties")), ImmutableList.of(), "Expected no matching values because of value limit of 1"); - assertEquals(result.get(nameToSpec.get("esObjectField")), ImmutableList.of(), "Expected no matching values because of value limit of 1"); + assertEquals( + result.get(nameToSpec.get("nestedArrayStringField")), + ImmutableList.of("nestedArray1", "nestedArray2")); + assertEquals( + result.get(nameToSpec.get("nestedArrayArrayField")), + ImmutableList.of("testNestedArray1", "testNestedArray2")); + assertEquals( + result.get(nameToSpec.get("customProperties")), + ImmutableList.of(), + "Expected no matching values because of value limit of 1"); + assertEquals( + result.get(nameToSpec.get("esObjectField")), + ImmutableList.of(), + "Expected no matching values because of value limit of 1"); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java index 38a20ef4b7a9b..2af1eeb46f2ba 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java @@ -1,17 +1,20 @@ package com.linkedin.metadata.graph; +import static org.testng.Assert.*; + import com.linkedin.common.urn.UrnUtils; import java.util.Collections; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EdgeTest { - private static final String SOURCE_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:foo,source1,PROD)"; - private static final String SOURCE_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:foo,source2,PROD)"; - private static final String DESTINATION_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:foo,destination1,PROD)"; - private static final String DESTINATION_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:foo,destination2,PROD)"; + private static final String SOURCE_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:foo,source1,PROD)"; + private static final String SOURCE_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:foo,source2,PROD)"; + private static final String DESTINATION_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:foo,destination1,PROD)"; + private static final String DESTINATION_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:foo,destination2,PROD)"; private static final String DOWNSTREAM_RELATIONSHIP_TYPE = "DownstreamOf"; private static final Long TIMESTAMP_1 = 1L; private static final Long TIMESTAMP_2 = 2L; @@ -21,39 +24,43 @@ public class EdgeTest { @Test public void testEdgeEquals() { // First edge - final Edge edge1 = new Edge( - UrnUtils.getUrn(SOURCE_URN_1), - UrnUtils.getUrn(DESTINATION_URN_1), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_2), - Collections.emptyMap()); + final Edge edge1 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_1), + UrnUtils.getUrn(DESTINATION_URN_1), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_2), + Collections.emptyMap()); - // Second edge has same source, destination, and relationship type as edge1, and should be considered the same edge. + // Second edge has same source, destination, and relationship type as edge1, and should be + // considered the same edge. // All other fields are different. - final Edge edge2 = new Edge( - UrnUtils.getUrn(SOURCE_URN_1), - UrnUtils.getUrn(DESTINATION_URN_1), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_2, - UrnUtils.getUrn(ACTOR_URN_2), - TIMESTAMP_2, - UrnUtils.getUrn(ACTOR_URN_2), - Collections.emptyMap()); + final Edge edge2 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_1), + UrnUtils.getUrn(DESTINATION_URN_1), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_2, + UrnUtils.getUrn(ACTOR_URN_2), + TIMESTAMP_2, + UrnUtils.getUrn(ACTOR_URN_2), + Collections.emptyMap()); assertEquals(edge1, edge2); // Third edge has different source and destination as edge1, and thus is not the same edge. - final Edge edge3 = new Edge( - UrnUtils.getUrn(SOURCE_URN_2), - UrnUtils.getUrn(DESTINATION_URN_2), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - Collections.emptyMap()); + final Edge edge3 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_2), + UrnUtils.getUrn(DESTINATION_URN_2), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + Collections.emptyMap()); assertNotEquals(edge1, edge3); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java index 12cd24ae9986d..3a51344d5779d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java @@ -1,5 +1,14 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataJobUrn; @@ -27,64 +36,56 @@ import java.util.stream.IntStream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - - /** - * Base class for testing any GraphService implementation. - * Derive the test class from this base and get your GraphService implementation - * tested with all these tests. + * Base class for testing any GraphService implementation. Derive the test class from this base and + * get your GraphService implementation tested with all these tests. * - * You can add implementation specific tests in derived classes, or add general tests - * here and have all existing implementations tested in the same way. + * <p>You can add implementation specific tests in derived classes, or add general tests here and + * have all existing implementations tested in the same way. * - * The `getPopulatedGraphService` method calls `GraphService.addEdge` to provide a populated Graph. - * Feel free to add a test to your test implementation that calls `getPopulatedGraphService` and - * asserts the state of the graph in an implementation specific way. + * <p>The `getPopulatedGraphService` method calls `GraphService.addEdge` to provide a populated + * Graph. Feel free to add a test to your test implementation that calls `getPopulatedGraphService` + * and asserts the state of the graph in an implementation specific way. */ -abstract public class GraphServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class GraphServiceTestBase extends AbstractTestNGSpringContextTests { private static class RelatedEntityComparator implements Comparator<RelatedEntity> { @Override public int compare(RelatedEntity left, RelatedEntity right) { - int cmp = left.relationshipType.compareTo(right.relationshipType); - if (cmp != 0) { - return cmp; - } - return left.urn.compareTo(right.urn); + int cmp = left.relationshipType.compareTo(right.relationshipType); + if (cmp != 0) { + return cmp; + } + return left.urn.compareTo(right.urn); } } - protected static final RelatedEntityComparator RELATED_ENTITY_COMPARATOR = new RelatedEntityComparator(); + protected static final RelatedEntityComparator RELATED_ENTITY_COMPARATOR = + new RelatedEntityComparator(); - /** - * Some test URN types. - */ + /** Some test URN types. */ protected static String datasetType = "dataset"; + protected static String userType = "user"; - /** - * Some test datasets. - */ - protected static String datasetOneUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; - protected static String datasetTwoUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; - protected static String datasetThreeUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; - protected static String datasetFourUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; - protected static String datasetFiveUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; + /** Some test datasets. */ + protected static String datasetOneUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; + + protected static String datasetTwoUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; + protected static String datasetThreeUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; + protected static String datasetFourUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; + protected static String datasetFiveUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; protected static Urn datasetOneUrn = createFromString(datasetOneUrnString); protected static Urn datasetTwoUrn = createFromString(datasetTwoUrnString); @@ -94,73 +95,85 @@ public int compare(RelatedEntity left, RelatedEntity right) { protected static String unknownUrnString = "urn:li:unknown:(urn:li:unknown:Unknown)"; - /** - * Some dataset owners. - */ - protected static String userOneUrnString = "urn:li:" + userType + ":(urn:li:user:system,Ingress,PROD)"; - protected static String userTwoUrnString = "urn:li:" + userType + ":(urn:li:user:individual,UserA,DEV)"; + /** Some dataset owners. */ + protected static String userOneUrnString = + "urn:li:" + userType + ":(urn:li:user:system,Ingress,PROD)"; + + protected static String userTwoUrnString = + "urn:li:" + userType + ":(urn:li:user:individual,UserA,DEV)"; protected static Urn userOneUrn = createFromString(userOneUrnString); protected static Urn userTwoUrn = createFromString(userTwoUrnString); protected static Urn unknownUrn = createFromString(unknownUrnString); - /** - * Some data jobs - */ - protected static Urn dataJobOneUrn = new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job1"); - protected static Urn dataJobTwoUrn = new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job2"); + /** Some data jobs */ + protected static Urn dataJobOneUrn = + new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job1"); - /** - * Some test relationships. - */ + protected static Urn dataJobTwoUrn = + new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job2"); + + /** Some test relationships. */ protected static String downstreamOf = "DownstreamOf"; + protected static String hasOwner = "HasOwner"; protected static String knowsUser = "KnowsUser"; protected static String produces = "Produces"; protected static String consumes = "Consumes"; - protected static Set<String> allRelationshipTypes = new HashSet<>(Arrays.asList(downstreamOf, hasOwner, knowsUser)); - - /** - * Some expected related entities. - */ - protected static RelatedEntity downstreamOfDatasetOneRelatedEntity = new RelatedEntity(downstreamOf, datasetOneUrnString); - protected static RelatedEntity downstreamOfDatasetTwoRelatedEntity = new RelatedEntity(downstreamOf, datasetTwoUrnString); - protected static RelatedEntity downstreamOfDatasetThreeRelatedEntity = new RelatedEntity(downstreamOf, datasetThreeUrnString); - protected static RelatedEntity downstreamOfDatasetFourRelatedEntity = new RelatedEntity(downstreamOf, datasetFourUrnString); - - protected static RelatedEntity hasOwnerDatasetOneRelatedEntity = new RelatedEntity(hasOwner, datasetOneUrnString); - protected static RelatedEntity hasOwnerDatasetTwoRelatedEntity = new RelatedEntity(hasOwner, datasetTwoUrnString); - protected static RelatedEntity hasOwnerDatasetThreeRelatedEntity = new RelatedEntity(hasOwner, datasetThreeUrnString); - protected static RelatedEntity hasOwnerDatasetFourRelatedEntity = new RelatedEntity(hasOwner, datasetFourUrnString); - protected static RelatedEntity hasOwnerUserOneRelatedEntity = new RelatedEntity(hasOwner, userOneUrnString); - protected static RelatedEntity hasOwnerUserTwoRelatedEntity = new RelatedEntity(hasOwner, userTwoUrnString); - - protected static RelatedEntity knowsUserOneRelatedEntity = new RelatedEntity(knowsUser, userOneUrnString); - protected static RelatedEntity knowsUserTwoRelatedEntity = new RelatedEntity(knowsUser, userTwoUrnString); - - /** - * Some relationship filters. - */ - protected static RelationshipFilter outgoingRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING); - protected static RelationshipFilter incomingRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING); - protected static RelationshipFilter undirectedRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.UNDIRECTED); - - /** - * Any source and destination type value. - */ + protected static Set<String> allRelationshipTypes = + new HashSet<>(Arrays.asList(downstreamOf, hasOwner, knowsUser)); + + /** Some expected related entities. */ + protected static RelatedEntity downstreamOfDatasetOneRelatedEntity = + new RelatedEntity(downstreamOf, datasetOneUrnString); + + protected static RelatedEntity downstreamOfDatasetTwoRelatedEntity = + new RelatedEntity(downstreamOf, datasetTwoUrnString); + protected static RelatedEntity downstreamOfDatasetThreeRelatedEntity = + new RelatedEntity(downstreamOf, datasetThreeUrnString); + protected static RelatedEntity downstreamOfDatasetFourRelatedEntity = + new RelatedEntity(downstreamOf, datasetFourUrnString); + + protected static RelatedEntity hasOwnerDatasetOneRelatedEntity = + new RelatedEntity(hasOwner, datasetOneUrnString); + protected static RelatedEntity hasOwnerDatasetTwoRelatedEntity = + new RelatedEntity(hasOwner, datasetTwoUrnString); + protected static RelatedEntity hasOwnerDatasetThreeRelatedEntity = + new RelatedEntity(hasOwner, datasetThreeUrnString); + protected static RelatedEntity hasOwnerDatasetFourRelatedEntity = + new RelatedEntity(hasOwner, datasetFourUrnString); + protected static RelatedEntity hasOwnerUserOneRelatedEntity = + new RelatedEntity(hasOwner, userOneUrnString); + protected static RelatedEntity hasOwnerUserTwoRelatedEntity = + new RelatedEntity(hasOwner, userTwoUrnString); + + protected static RelatedEntity knowsUserOneRelatedEntity = + new RelatedEntity(knowsUser, userOneUrnString); + protected static RelatedEntity knowsUserTwoRelatedEntity = + new RelatedEntity(knowsUser, userTwoUrnString); + + /** Some relationship filters. */ + protected static RelationshipFilter outgoingRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING); + + protected static RelationshipFilter incomingRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING); + protected static RelationshipFilter undirectedRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.UNDIRECTED); + + /** Any source and destination type value. */ protected static @Nullable List<String> anyType = null; - /** - * Timeout used to test concurrent ops in doTestConcurrentOp. - */ + /** Timeout used to test concurrent ops in doTestConcurrentOp. */ protected Duration getTestConcurrentOpTimeout() { - return Duration.ofMinutes(1); + return Duration.ofMinutes(1); } @BeforeMethod public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -176,26 +189,26 @@ public void testStaticUrns() { } /** - * Provides the current GraphService instance to test. This is being called by the test method - * at most once. The serviced graph should be empty. + * Provides the current GraphService instance to test. This is being called by the test method at + * most once. The serviced graph should be empty. * * @return the GraphService instance to test * @throws Exception on failure */ @Nonnull - abstract protected GraphService getGraphService() throws Exception; + protected abstract GraphService getGraphService() throws Exception; /** - * Allows the specific GraphService test implementation to wait for GraphService writes to - * be synced / become available to reads. + * Allows the specific GraphService test implementation to wait for GraphService writes to be + * synced / become available to reads. * * @throws Exception on failure */ - abstract protected void syncAfterWrite() throws Exception; + protected abstract void syncAfterWrite() throws Exception; /** - * Calls getGraphService to retrieve the test GraphService and populates it - * with edges via `GraphService.addEdge`. + * Calls getGraphService to retrieve the test GraphService and populates it with edges via + * `GraphService.addEdge`. * * @return test GraphService * @throws Exception on failure @@ -203,19 +216,17 @@ public void testStaticUrns() { protected GraphService getPopulatedGraphService() throws Exception { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( + List<Edge> edges = + Arrays.asList( new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetTwoUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetThreeUrn, userTwoUrn, hasOwner, null, null, null, null, null), new Edge(datasetFourUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null), - new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null) - ); + new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null)); edges.forEach(service::addEdge); syncAfterWrite(); @@ -226,27 +237,24 @@ protected GraphService getPopulatedGraphService() throws Exception { protected GraphService getLineagePopulatedGraphService() throws Exception { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( + List<Edge> edges = + Arrays.asList( new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetTwoUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetThreeUrn, userTwoUrn, hasOwner, null, null, null, null, null), new Edge(datasetFourUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null), new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(dataJobOneUrn, datasetOneUrn, consumes, null, null, null, null, null), new Edge(dataJobOneUrn, datasetTwoUrn, consumes, null, null, null, null, null), new Edge(dataJobOneUrn, datasetThreeUrn, produces, null, null, null, null, null), new Edge(dataJobOneUrn, datasetFourUrn, produces, null, null, null, null, null), new Edge(dataJobTwoUrn, datasetOneUrn, consumes, null, null, null, null, null), new Edge(dataJobTwoUrn, datasetTwoUrn, consumes, null, null, null, null, null), - new Edge(dataJobTwoUrn, dataJobOneUrn, downstreamOf, null, null, null, null, null) - ); + new Edge(dataJobTwoUrn, dataJobOneUrn, downstreamOf, null, null, null, null, null)); edges.forEach(service::addEdge); syncAfterWrite(); @@ -254,8 +262,7 @@ protected GraphService getLineagePopulatedGraphService() throws Exception { return service; } - protected static @Nullable - Urn createFromString(@Nonnull String rawUrn) { + protected static @Nullable Urn createFromString(@Nonnull String rawUrn) { try { return Urn.createFromString(rawUrn); } catch (URISyntaxException e) { @@ -264,10 +271,12 @@ Urn createFromString(@Nonnull String rawUrn) { } protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, List<RelatedEntity> expected) { - assertEqualsAnyOrder(actual, new RelatedEntitiesResult(0, expected.size(), expected.size(), expected)); + assertEqualsAnyOrder( + actual, new RelatedEntitiesResult(0, expected.size(), expected.size(), expected)); } - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { assertEquals(actual.start, expected.start); assertEquals(actual.count, expected.count); assertEquals(actual.total, expected.total); @@ -276,141 +285,156 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected) { assertEquals( - actual.stream().sorted().collect(Collectors.toList()), - expected.stream().sorted().collect(Collectors.toList()) - ); + actual.stream().sorted().collect(Collectors.toList()), + expected.stream().sorted().collect(Collectors.toList())); } - protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected, Comparator<T> comparator) { + protected <T> void assertEqualsAnyOrder( + List<T> actual, List<T> expected, Comparator<T> comparator) { assertEquals( - actual.stream().sorted(comparator).collect(Collectors.toList()), - expected.stream().sorted(comparator).collect(Collectors.toList()) - ); + actual.stream().sorted(comparator).collect(Collectors.toList()), + expected.stream().sorted(comparator).collect(Collectors.toList())); } @DataProvider(name = "AddEdgeTests") public Object[][] getAddEdgeTests() { - return new Object[][]{ - new Object[]{ - Arrays.asList(), - Arrays.asList(), - Arrays.asList() - }, - new Object[]{ - Arrays.asList(new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)), - Arrays.asList(downstreamOfDatasetTwoRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity) - }, - new Object[]{ - Arrays.asList( - new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetTwoUrn, datasetThreeUrn, downstreamOf, null, null, null, null, null) - ), - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[]{ - Arrays.asList( - new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), - new Edge(datasetTwoUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null) - ), - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserTwoRelatedEntity - ), - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, - hasOwnerDatasetOneRelatedEntity, - hasOwnerDatasetTwoRelatedEntity, - knowsUserOneRelatedEntity - ) - }, - new Object[]{ - Arrays.asList( - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null) - ), - Arrays.asList(knowsUserOneRelatedEntity), - Arrays.asList(knowsUserOneRelatedEntity) - } + return new Object[][] { + new Object[] {Arrays.asList(), Arrays.asList(), Arrays.asList()}, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)), + Arrays.asList(downstreamOfDatasetTwoRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), + new Edge(datasetTwoUrn, datasetThreeUrn, downstreamOf, null, null, null, null, null)), + Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), + new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), + new Edge(datasetTwoUrn, userTwoUrn, hasOwner, null, null, null, null, null), + new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null)), + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, + hasOwnerUserTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserOneRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null)), + Arrays.asList(knowsUserOneRelatedEntity), + Arrays.asList(knowsUserOneRelatedEntity) + } }; } @Test(dataProvider = "AddEdgeTests") - public void testAddEdge(List<Edge> edges, List<RelatedEntity> expectedOutgoing, List<RelatedEntity> expectedIncoming) throws Exception { - GraphService service = getGraphService(); - - edges.forEach(service::addEdge); - syncAfterWrite(); - - RelatedEntitiesResult relatedOutgoing = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships, - 0, 100 - ); - assertEqualsAnyOrder(relatedOutgoing, expectedOutgoing); - - RelatedEntitiesResult relatedIncoming = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships, - 0, 100 - ); - assertEqualsAnyOrder(relatedIncoming, expectedIncoming); + public void testAddEdge( + List<Edge> edges, List<RelatedEntity> expectedOutgoing, List<RelatedEntity> expectedIncoming) + throws Exception { + GraphService service = getGraphService(); + + edges.forEach(service::addEdge); + syncAfterWrite(); + + RelatedEntitiesResult relatedOutgoing = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder(relatedOutgoing, expectedOutgoing); + + RelatedEntitiesResult relatedIncoming = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder(relatedIncoming, expectedIncoming); } @Test public void testPopulatedGraphService() throws Exception { - GraphService service = getPopulatedGraphService(); - - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); - assertEqualsAnyOrder( - relatedOutgoingEntitiesBeforeRemove, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - RelatedEntitiesResult relatedIncomingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships, - 0, 100); - assertEqualsAnyOrder( - relatedIncomingEntitiesBeforeRemove, - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity, - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); + GraphService service = getPopulatedGraphService(); + + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedOutgoingEntitiesBeforeRemove, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); + RelatedEntitiesResult relatedIncomingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedIncomingEntitiesBeforeRemove, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, + hasOwnerDatasetFourRelatedEntity, + knowsUserOneRelatedEntity, + knowsUserTwoRelatedEntity)); } @Test public void testPopulatedGraphServiceGetLineage() throws Exception { GraphService service = getLineagePopulatedGraphService(); - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 0); assertEquals(upstreamLineage.getRelationships().size(), 0); - EntityLineageResult downstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); + EntityLineageResult downstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 3); assertEquals(downstreamLineage.getRelationships().size(), 3); - Map<Urn, LineageRelationship> relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + Map<Urn, LineageRelationship> relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetTwoUrn)); assertEquals(relationships.get(datasetTwoUrn).getType(), downstreamOf); assertTrue(relationships.containsKey(dataJobOneUrn)); @@ -421,22 +445,25 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 2); assertEquals(upstreamLineage.getRelationships().size(), 2); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetTwoUrn)); assertEquals(relationships.get(datasetTwoUrn).getType(), downstreamOf); assertTrue(relationships.containsKey(dataJobOneUrn)); assertEquals(relationships.get(dataJobOneUrn).getType(), produces); - downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); + downstreamLineage = + service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 0); assertEquals(downstreamLineage.getRelationships().size(), 0); upstreamLineage = service.getLineage(dataJobOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 2); assertEquals(upstreamLineage.getRelationships().size(), 2); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetOneUrn)); assertEquals(relationships.get(datasetOneUrn).getType(), consumes); assertTrue(relationships.containsKey(datasetTwoUrn)); @@ -445,8 +472,9 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { downstreamLineage = service.getLineage(dataJobOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 3); assertEquals(downstreamLineage.getRelationships().size(), 3); - relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetThreeUrn)); assertEquals(relationships.get(datasetThreeUrn).getType(), produces); assertTrue(relationships.containsKey(datasetFourUrn)); @@ -458,458 +486,425 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { @DataProvider(name = "FindRelatedEntitiesSourceEntityFilterTests") public Object[][] getFindRelatedEntitiesSourceEntityFilterTests() { return new Object[][] { - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) - } + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesSourceEntityFilterTests") - public void testFindRelatedEntitiesSourceEntityFilter(Filter sourceEntityFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceEntityFilter( + Filter sourceEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - sourceEntityFilter, - EMPTY_FILTER, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + sourceEntityFilter, + EMPTY_FILTER, + relationshipTypes, + relationships, + expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesDestinationEntityFilterTests") public Object[][] getFindRelatedEntitiesDestinationEntityFilterTests() { return new Object[][] { - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList() - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - } + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesDestinationEntityFilterTests") - public void testFindRelatedEntitiesDestinationEntityFilter(Filter destinationEntityFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationEntityFilter( + Filter destinationEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - EMPTY_FILTER, - destinationEntityFilter, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + EMPTY_FILTER, + destinationEntityFilter, + relationshipTypes, + relationships, + expectedRelatedEntities); } private void doTestFindRelatedEntities( - final Filter sourceEntityFilter, - final Filter destinationEntityFilter, - List<String> relationshipTypes, - final RelationshipFilter relationshipFilter, - List<RelatedEntity> expectedRelatedEntities - ) throws Exception { + final Filter sourceEntityFilter, + final Filter destinationEntityFilter, + List<String> relationshipTypes, + final RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - anyType, sourceEntityFilter, - anyType, destinationEntityFilter, - relationshipTypes, relationshipFilter, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + anyType, + sourceEntityFilter, + anyType, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + 0, + 10); assertEqualsAnyOrder(relatedEntities, expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesSourceTypeTests") public Object[][] getFindRelatedEntitiesSourceTypeTests() { - return new Object[][]{ - new Object[] { - null, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - // "" used to be any type before v0.9.0, which is now encoded by null - new Object[] { - "", - Arrays.asList(downstreamOf), - outgoingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - incomingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - undirectedRelationships, - Collections.emptyList() - }, - - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[]{ - userType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList() - }, - - new Object[]{ - userType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - new Object[]{ - userType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - } + return new Object[][] { + new Object[] { + null, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + + // "" used to be any type before v0.9.0, which is now encoded by null + new Object[] { + "", Arrays.asList(downstreamOf), outgoingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), incomingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), undirectedRelationships, Collections.emptyList() + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] {userType, Arrays.asList(downstreamOf), outgoingRelationships, Arrays.asList()}, + new Object[] {userType, Arrays.asList(downstreamOf), incomingRelationships, Arrays.asList()}, + new Object[] { + userType, Arrays.asList(downstreamOf), undirectedRelationships, Arrays.asList() + }, + new Object[] {userType, Arrays.asList(hasOwner), outgoingRelationships, Arrays.asList()}, + new Object[] { + userType, + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + userType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesSourceTypeTests") - public void testFindRelatedEntitiesSourceType(String entityTypeFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String entityTypeFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, - anyType, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, + anyType, + relationshipTypes, + relationships, + expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesDestinationTypeTests") public Object[][] getFindRelatedEntitiesDestinationTypeTests() { return new Object[][] { - new Object[] { - null, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[] { - "", - Arrays.asList(downstreamOf), - outgoingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - incomingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - undirectedRelationships, - Collections.emptyList() - }, - - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[] { - datasetType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - datasetType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - new Object[] { - datasetType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - - new Object[] { - userType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) - }, - new Object[] { - userType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - userType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) - } + new Object[] { + null, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + "", Arrays.asList(downstreamOf), outgoingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), incomingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), undirectedRelationships, Collections.emptyList() + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] {datasetType, Arrays.asList(hasOwner), outgoingRelationships, Arrays.asList()}, + new Object[] { + datasetType, + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + userType, + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) + }, + new Object[] {userType, Arrays.asList(hasOwner), incomingRelationships, Arrays.asList()}, + new Object[] { + userType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesDestinationTypeTests") - public void testFindRelatedEntitiesDestinationType(String entityTypeFilter, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String entityTypeFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - anyType, - entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + anyType, + entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, + relationshipTypes, + relationships, + expectedRelatedEntities); } private void doTestFindRelatedEntities( - final List<String> sourceType, - final List<String> destinationType, - final List<String> relationshipTypes, - final RelationshipFilter relationshipFilter, - List<RelatedEntity> expectedRelatedEntities - ) throws Exception { + final List<String> sourceType, + final List<String> destinationType, + final List<String> relationshipTypes, + final RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - sourceType, EMPTY_FILTER, - destinationType, EMPTY_FILTER, - relationshipTypes, relationshipFilter, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + sourceType, + EMPTY_FILTER, + destinationType, + EMPTY_FILTER, + relationshipTypes, + relationshipFilter, + 0, + 10); assertEqualsAnyOrder(relatedEntities, expectedRelatedEntities); } - private void doTestFindRelatedEntitiesEntityType(@Nullable List<String> sourceType, - @Nullable List<String> destinationType, - @Nonnull String relationshipType, - @Nonnull RelationshipFilter relationshipFilter, - @Nonnull GraphService service, - @Nonnull RelatedEntity... expectedEntities) { - RelatedEntitiesResult actualEntities = service.findRelatedEntities( - sourceType, EMPTY_FILTER, - destinationType, EMPTY_FILTER, - Arrays.asList(relationshipType), relationshipFilter, - 0, 100 - ); + private void doTestFindRelatedEntitiesEntityType( + @Nullable List<String> sourceType, + @Nullable List<String> destinationType, + @Nonnull String relationshipType, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull GraphService service, + @Nonnull RelatedEntity... expectedEntities) { + RelatedEntitiesResult actualEntities = + service.findRelatedEntities( + sourceType, + EMPTY_FILTER, + destinationType, + EMPTY_FILTER, + Arrays.asList(relationshipType), + relationshipFilter, + 0, + 100); assertEqualsAnyOrder(actualEntities, Arrays.asList(expectedEntities)); } @@ -921,18 +916,41 @@ public void testFindRelatedEntitiesNullSourceType() throws Exception { assertNotNull(nullUrn); RelatedEntity nullRelatedEntity = new RelatedEntity(downstreamOf, nullUrn.toString()); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, null, downstreamOf, outgoingRelationships, service); - service.addEdge(new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); + service.addEdge( + new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + downstreamOfDatasetOneRelatedEntity); service.addEdge(new Edge(datasetOneUrn, nullUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service, nullRelatedEntity); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, nullRelatedEntity, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + ImmutableList.of("null"), + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity, + downstreamOfDatasetOneRelatedEntity); } @Test @@ -943,97 +961,143 @@ public void testFindRelatedEntitiesNullDestinationType() throws Exception { assertNotNull(nullUrn); RelatedEntity nullRelatedEntity = new RelatedEntity(downstreamOf, nullUrn.toString()); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, null, downstreamOf, outgoingRelationships, service); - service.addEdge(new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); + service.addEdge( + new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + downstreamOfDatasetOneRelatedEntity); service.addEdge(new Edge(datasetOneUrn, nullUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service, nullRelatedEntity); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, nullRelatedEntity, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + ImmutableList.of("null"), + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity, + downstreamOfDatasetOneRelatedEntity); } @Test public void testFindRelatedEntitiesRelationshipTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult allOutgoingRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ); - assertEqualsAnyOrder( - allOutgoingRelatedEntities, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - - RelatedEntitiesResult allIncomingRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships, - 0, 100 - ); + RelatedEntitiesResult allOutgoingRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder( - allIncomingRelatedEntities, - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity, - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - - RelatedEntitiesResult allUnknownRelationshipTypeRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList("unknownRelationshipType", "unseenRelationshipType"), outgoingRelationships, - 0, 100 - ); + allOutgoingRelatedEntities, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); + + RelatedEntitiesResult allIncomingRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); assertEqualsAnyOrder( - allUnknownRelationshipTypeRelatedEntities, - Collections.emptyList() - ); - - RelatedEntitiesResult someUnknownRelationshipTypeRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList("unknownRelationshipType", downstreamOf), outgoingRelationships, - 0, 100 - ); + allIncomingRelatedEntities, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, + hasOwnerDatasetFourRelatedEntity, + knowsUserOneRelatedEntity, + knowsUserTwoRelatedEntity)); + + RelatedEntitiesResult allUnknownRelationshipTypeRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList("unknownRelationshipType", "unseenRelationshipType"), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder(allUnknownRelationshipTypeRelatedEntities, Collections.emptyList()); + + RelatedEntitiesResult someUnknownRelationshipTypeRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList("unknownRelationshipType", downstreamOf), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder( - someUnknownRelationshipTypeRelatedEntities, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - ); + someUnknownRelationshipTypeRelatedEntities, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity)); } @Test public void testFindRelatedEntitiesNoRelationshipTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Collections.emptyList(), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Collections.emptyList(), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); - // does the test actually test something? is the Collections.emptyList() the only reason why we did not get any related urns? - RelatedEntitiesResult relatedEntitiesAll = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 10 - ); + // does the test actually test something? is the Collections.emptyList() the only reason why we + // did not get any related urns? + RelatedEntitiesResult relatedEntitiesAll = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 10); assertNotEquals(relatedEntitiesAll.entities, Collections.emptyList()); } @@ -1042,21 +1106,29 @@ public void testFindRelatedEntitiesNoRelationshipTypes() throws Exception { public void testFindRelatedEntitiesAllFilters() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(userType), newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(userType), + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Arrays.asList(hasOwnerUserOneRelatedEntity)); - relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(userType), newFilter("urn", userTwoUrnString), - Arrays.asList(hasOwner), incomingRelationships, - 0, 10 - ); + relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(userType), + newFilter("urn", userTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); } @@ -1065,21 +1137,29 @@ public void testFindRelatedEntitiesAllFilters() throws Exception { public void testFindRelatedEntitiesMultipleEntityTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType, userType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(datasetType, userType), newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType, userType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(datasetType, userType), + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Arrays.asList(hasOwnerUserOneRelatedEntity)); - relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType, userType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(datasetType, userType), newFilter("urn", userTwoUrnString), - Arrays.asList(hasOwner), incomingRelationships, - 0, 10 - ); + relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType, userType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(datasetType, userType), + newFilter("urn", userTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); } @@ -1089,161 +1169,227 @@ public void testFindRelatedEntitiesOffsetAndCount() throws Exception { GraphService service = getPopulatedGraphService(); // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult allRelatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ); + RelatedEntitiesResult allRelatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); List<RelatedEntity> individualRelatedEntities = new ArrayList<>(); IntStream.range(0, allRelatedEntities.entities.size()) - .forEach(idx -> individualRelatedEntities.addAll( + .forEach( + idx -> + individualRelatedEntities.addAll( service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - idx, 1 - ).entities - )); + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + idx, + 1) + .entities)); Assert.assertEquals(individualRelatedEntities, allRelatedEntities.entities); } @DataProvider(name = "RemoveEdgesFromNodeTests") public Object[][] getRemoveEdgesFromNodeTests() { return new Object[][] { - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(), - }, - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(), - Arrays.asList() - }, - - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - outgoingRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity) - }, - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - incomingRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList() - }, - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - undirectedRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(), - Arrays.asList() - } + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(), + }, + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(), + Arrays.asList() + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + outgoingRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity) + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + incomingRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList() + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + undirectedRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(), + Arrays.asList() + } }; } @Test(dataProvider = "RemoveEdgesFromNodeTests") - public void testRemoveEdgesFromNode(@Nonnull Urn nodeToRemoveFrom, - @Nonnull List<String> relationTypes, - @Nonnull RelationshipFilter relationshipFilter, - List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, - List<RelatedEntity> expectedIncomingRelatedUrnsBeforeRemove, - List<RelatedEntity> expectedOutgoingRelatedUrnsAfterRemove, - List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) throws Exception { + public void testRemoveEdgesFromNode( + @Nonnull Urn nodeToRemoveFrom, + @Nonnull List<String> relationTypes, + @Nonnull RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, + List<RelatedEntity> expectedIncomingRelatedUrnsBeforeRemove, + List<RelatedEntity> expectedOutgoingRelatedUrnsAfterRemove, + List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) + throws Exception { GraphService service = getPopulatedGraphService(); List<String> allOtherRelationTypes = - allRelationshipTypes.stream() - .filter(relation -> !relationTypes.contains(relation)) - .collect(Collectors.toList()); + allRelationshipTypes.stream() + .filter(relation -> !relationTypes.contains(relation)) + .collect(Collectors.toList()); assertTrue(allOtherRelationTypes.size() > 0); - RelatedEntitiesResult actualOutgoingRelatedUrnsBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult actualIncomingRelatedUrnsBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(actualOutgoingRelatedUrnsBeforeRemove, expectedOutgoingRelatedUrnsBeforeRemove); - assertEqualsAnyOrder(actualIncomingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove); + RelatedEntitiesResult actualOutgoingRelatedUrnsBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult actualIncomingRelatedUrnsBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + actualOutgoingRelatedUrnsBeforeRemove, expectedOutgoingRelatedUrnsBeforeRemove); + assertEqualsAnyOrder( + actualIncomingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove); // we expect these do not change - RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, incomingRelationships, - 0, 100); + RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + incomingRelationships, + 0, + 100); - service.removeEdgesFromNode( - nodeToRemoveFrom, - relationTypes, - relationshipFilter - ); + service.removeEdgesFromNode(nodeToRemoveFrom, relationTypes, relationshipFilter); syncAfterWrite(); - RelatedEntitiesResult actualOutgoingRelatedUrnsAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult actualIncomingRelatedUrnsAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(actualOutgoingRelatedUrnsAfterRemove, expectedOutgoingRelatedUrnsAfterRemove); - assertEqualsAnyOrder(actualIncomingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); + RelatedEntitiesResult actualOutgoingRelatedUrnsAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult actualIncomingRelatedUrnsAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + actualOutgoingRelatedUrnsAfterRemove, expectedOutgoingRelatedUrnsAfterRemove); + assertEqualsAnyOrder( + actualIncomingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); // assert these did not change - RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove, relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove); - assertEqualsAnyOrder(relatedEntitiesOfOtherIncomingRelationTypesAfterRemove, relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove); + RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove, + relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove); + assertEqualsAnyOrder( + relatedEntitiesOfOtherIncomingRelationTypesAfterRemove, + relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove); } @Test @@ -1252,50 +1398,53 @@ public void testRemoveEdgesFromNodeNoRelationshipTypes() throws Exception { Urn nodeToRemoveFrom = datasetOneUrn; // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); - - // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported by all implementations - service.removeEdgesFromNode( - nodeToRemoveFrom, - Collections.emptyList(), - outgoingRelationships - ); - service.removeEdgesFromNode( - nodeToRemoveFrom, - Collections.emptyList(), - incomingRelationships - ); + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + + // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported + // by all implementations + service.removeEdgesFromNode(nodeToRemoveFrom, Collections.emptyList(), outgoingRelationships); + service.removeEdgesFromNode(nodeToRemoveFrom, Collections.emptyList(), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemove, relatedOutgoingEntitiesBeforeRemove); - // does the test actually test something? is the Collections.emptyList() the only reason why we did not see changes? + // does the test actually test something? is the Collections.emptyList() the only reason why we + // did not see changes? service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships); service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemoveAll = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemoveAll = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemoveAll, Collections.emptyList()); } @@ -1305,30 +1454,35 @@ public void testRemoveEdgesFromUnknownNode() throws Exception { Urn nodeToRemoveFrom = unknownUrn; // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); - // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported by all implementations + // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported + // by all implementations service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships); service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemove, relatedOutgoingEntitiesBeforeRemove); } @@ -1341,17 +1495,18 @@ public void testRemoveNode() throws Exception { // assert the modified graph assertEqualsAnyOrder( - service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ), - Arrays.asList( - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100), + Arrays.asList( + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); } @Test @@ -1359,20 +1514,30 @@ public void testRemoveUnknownNode() throws Exception { GraphService service = getPopulatedGraphService(); // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult entitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult entitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); service.removeNode(unknownUrn); syncAfterWrite(); - RelatedEntitiesResult entitiesAfterRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult entitiesAfterRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(entitiesBeforeRemove, entitiesAfterRemove); } @@ -1385,87 +1550,113 @@ public void testClear() throws Exception { service.clear(); syncAfterWrite(); - // assert the modified graph: check all nodes related to upstreamOf and nextVersionOf edges again + // assert the modified graph: check all nodes related to upstreamOf and nextVersionOf edges + // again assertEqualsAnyOrder( - service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); assertEqualsAnyOrder( - service.findRelatedEntities( - ImmutableList.of(userType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(hasOwner), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + ImmutableList.of(userType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); assertEqualsAnyOrder( - service.findRelatedEntities( - anyType, EMPTY_FILTER, - ImmutableList.of(userType), EMPTY_FILTER, - Arrays.asList(knowsUser), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + ImmutableList.of(userType), + EMPTY_FILTER, + Arrays.asList(knowsUser), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); } private List<Edge> getFullyConnectedGraph(int nodes, List<String> relationshipTypes) { - List<Edge> edges = new ArrayList<>(); - - for (int sourceNode = 1; sourceNode <= nodes; sourceNode++) { - for (int destinationNode = 1; destinationNode <= nodes; destinationNode++) { - for (String relationship : relationshipTypes) { - int sourceType = sourceNode % 3; - Urn source = createFromString("urn:li:type" + sourceType + ":(urn:li:node" + sourceNode + ")"); - int destinationType = destinationNode % 3; - Urn destination = createFromString("urn:li:type" + destinationType + ":(urn:li:node" + destinationNode + ")"); - - edges.add(new Edge(source, destination, relationship, null, null, null, null, null)); - } - } + List<Edge> edges = new ArrayList<>(); + + for (int sourceNode = 1; sourceNode <= nodes; sourceNode++) { + for (int destinationNode = 1; destinationNode <= nodes; destinationNode++) { + for (String relationship : relationshipTypes) { + int sourceType = sourceNode % 3; + Urn source = + createFromString("urn:li:type" + sourceType + ":(urn:li:node" + sourceNode + ")"); + int destinationType = destinationNode % 3; + Urn destination = + createFromString( + "urn:li:type" + destinationType + ":(urn:li:node" + destinationNode + ")"); + + edges.add(new Edge(source, destination, relationship, null, null, null, null, null)); + } } + } - return edges; + return edges; } @Test public void testConcurrentAddEdge() throws Exception { - final GraphService service = getGraphService(); - - // too many edges may cause too many threads throwing - // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked worker - int nodes = 5; - int relationshipTypes = 3; - List<String> allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); - List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); - - List<Runnable> operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.addEdge(edge); - } - }).collect(Collectors.toList()); - - doTestConcurrentOp(operations); - syncAfterWrite(); - - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); - - Set<RelatedEntity> expectedRelatedEntities = edges.stream() - .map(edge -> new RelatedEntity(edge.getRelationshipType(), edge.getDestination().toString())) - .collect(Collectors.toSet()); - assertEquals(new HashSet<>(relatedEntities.entities), expectedRelatedEntities); + final GraphService service = getGraphService(); + + // too many edges may cause too many threads throwing + // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked + // worker + int nodes = 5; + int relationshipTypes = 3; + List<String> allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); + List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); + + List<Runnable> operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.addEdge(edge); + } + }) + .collect(Collectors.toList()); + + doTestConcurrentOp(operations); + syncAfterWrite(); + + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); + + Set<RelatedEntity> expectedRelatedEntities = + edges.stream() + .map( + edge -> + new RelatedEntity(edge.getRelationshipType(), edge.getDestination().toString())) + .collect(Collectors.toSet()); + assertEquals(new HashSet<>(relatedEntities.entities), expectedRelatedEntities); } @Test @@ -1474,7 +1665,10 @@ public void testConcurrentRemoveEdgesFromNode() throws Exception { int nodes = 5; int relationshipTypes = 3; - List<String> allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); + List<String> allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); // add fully connected graph @@ -1482,43 +1676,63 @@ public void testConcurrentRemoveEdgesFromNode() throws Exception { syncAfterWrite(); // assert the graph is there - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntities.entities.size(), nodes * relationshipTypes); // delete all edges concurrently - List<Runnable> operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.removeEdgesFromNode(edge.getSource(), Arrays.asList(edge.getRelationshipType()), outgoingRelationships); - } - }).collect(Collectors.toList()); + List<Runnable> operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.removeEdgesFromNode( + edge.getSource(), + Arrays.asList(edge.getRelationshipType()), + outgoingRelationships); + } + }) + .collect(Collectors.toList()); doTestConcurrentOp(operations); syncAfterWrite(); // assert the graph is gone - RelatedEntitiesResult relatedEntitiesAfterDeletion = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntitiesAfterDeletion = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntitiesAfterDeletion.entities.size(), 0); - } + } @Test public void testConcurrentRemoveNodes() throws Exception { final GraphService service = getGraphService(); // too many edges may cause too many threads throwing - // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked worker + // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked + // worker int nodes = 5; int relationshipTypes = 3; - List<String> allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); + List<String> allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); List<Edge> edges = getFullyConnectedGraph(nodes, allRelationships); // add fully connected graph @@ -1526,106 +1740,131 @@ public void testConcurrentRemoveNodes() throws Exception { syncAfterWrite(); // assert the graph is there - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntities.entities.size(), nodes * relationshipTypes); // remove all nodes concurrently // nodes will be removed multiple times - List<Runnable> operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.removeNode(edge.getSource()); - } - }).collect(Collectors.toList()); + List<Runnable> operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.removeNode(edge.getSource()); + } + }) + .collect(Collectors.toList()); doTestConcurrentOp(operations); syncAfterWrite(); // assert the graph is gone - RelatedEntitiesResult relatedEntitiesAfterDeletion = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntitiesAfterDeletion = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntitiesAfterDeletion.entities.size(), 0); } private void doTestConcurrentOp(List<Runnable> operations) throws Exception { - final Queue<Throwable> throwables = new ConcurrentLinkedQueue<>(); - final CountDownLatch started = new CountDownLatch(operations.size()); - final CountDownLatch finished = new CountDownLatch(operations.size()); - operations.forEach(operation -> new Thread(new Runnable() { - @Override - public void run() { - try { - started.countDown(); - - try { - if (!started.await(10, TimeUnit.SECONDS)) { - fail("Timed out waiting for all threads to start"); + final Queue<Throwable> throwables = new ConcurrentLinkedQueue<>(); + final CountDownLatch started = new CountDownLatch(operations.size()); + final CountDownLatch finished = new CountDownLatch(operations.size()); + operations.forEach( + operation -> + new Thread( + new Runnable() { + @Override + public void run() { + try { + started.countDown(); + + try { + if (!started.await(10, TimeUnit.SECONDS)) { + fail("Timed out waiting for all threads to start"); + } + } catch (InterruptedException e) { + fail("Got interrupted waiting for all threads to start"); + } + + operation.run(); + } catch (Throwable t) { + t.printStackTrace(); + throwables.add(t); + } + finished.countDown(); } - } catch (InterruptedException e) { - fail("Got interrupted waiting for all threads to start"); - } - - operation.run(); - } catch (Throwable t) { - t.printStackTrace(); - throwables.add(t); - } - finished.countDown(); - } - }).start()); - - assertTrue(finished.await(getTestConcurrentOpTimeout().toMillis(), TimeUnit.MILLISECONDS)); - throwables.forEach(throwable -> System.err.printf(System.currentTimeMillis() + ": exception occurred: %s%n", throwable)); - assertEquals(throwables.size(), 0); + }) + .start()); + + assertTrue(finished.await(getTestConcurrentOpTimeout().toMillis(), TimeUnit.MILLISECONDS)); + throwables.forEach( + throwable -> + System.err.printf( + System.currentTimeMillis() + ": exception occurred: %s%n", throwable)); + assertEquals(throwables.size(), 0); } @Test public void testPopulatedGraphServiceGetLineageMultihop() throws Exception { - GraphService service = getLineagePopulatedGraphService(); - - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); - assertEquals(upstreamLineage.getTotal().intValue(), 0); - assertEquals(upstreamLineage.getRelationships().size(), 0); - - EntityLineageResult downstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); - - assertEquals(downstreamLineage.getTotal().intValue(), 5); - assertEquals(downstreamLineage.getRelationships().size(), 5); - Map<Urn, LineageRelationship> relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); - assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(datasetThreeUrn)); - assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(datasetFourUrn)); - assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(dataJobTwoUrn)); - assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); - - upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); - assertEquals(upstreamLineage.getTotal().intValue(), 3); - assertEquals(upstreamLineage.getRelationships().size(), 3); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); - assertTrue(relationships.containsKey(datasetOneUrn)); - assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); - - downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); - assertEquals(downstreamLineage.getTotal().intValue(), 0); - assertEquals(downstreamLineage.getRelationships().size(), 0); + GraphService service = getLineagePopulatedGraphService(); + + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineage.getTotal().intValue(), 0); + assertEquals(upstreamLineage.getRelationships().size(), 0); + + EntityLineageResult downstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + + assertEquals(downstreamLineage.getTotal().intValue(), 5); + assertEquals(downstreamLineage.getRelationships().size(), 5); + Map<Urn, LineageRelationship> relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + assertTrue(relationships.containsKey(datasetTwoUrn)); + assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(datasetThreeUrn)); + assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(datasetFourUrn)); + assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(dataJobOneUrn)); + assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(dataJobTwoUrn)); + assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); + + upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineage.getTotal().intValue(), 3); + assertEquals(upstreamLineage.getRelationships().size(), 3); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + assertTrue(relationships.containsKey(datasetOneUrn)); + assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(datasetTwoUrn)); + assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(dataJobOneUrn)); + assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + + downstreamLineage = + service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + assertEquals(downstreamLineage.getTotal().intValue(), 0); + assertEquals(downstreamLineage.getRelationships().size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java index d8cd6ed05b2ec..481db53eafbbe 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java @@ -1,14 +1,9 @@ package com.linkedin.metadata.graph.dgraph; -import com.github.dockerjava.api.command.InspectContainerResponse; -import lombok.NonNull; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; -import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; -import org.testcontainers.containers.wait.strategy.WaitAllStrategy; -import org.testcontainers.containers.wait.strategy.WaitStrategy; -import org.testcontainers.utility.DockerImageName; +import static java.net.HttpURLConnection.HTTP_OK; +import static java.util.stream.Collectors.toSet; +import com.github.dockerjava.api.command.InspectContainerResponse; import java.time.Duration; import java.util.Arrays; import java.util.HashMap; @@ -16,223 +11,235 @@ import java.util.Set; import java.util.StringJoiner; import java.util.stream.Stream; - -import static java.net.HttpURLConnection.HTTP_OK; -import static java.util.stream.Collectors.toSet; +import lombok.NonNull; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.containers.wait.strategy.WaitAllStrategy; +import org.testcontainers.containers.wait.strategy.WaitStrategy; +import org.testcontainers.utility.DockerImageName; public class DgraphContainer extends GenericContainer<DgraphContainer> { - /** - * The image defaults to the official Dgraph image: <a href="https://hub.docker.com/_/dgraph/dgraph">Dgraph</a>. - */ - public static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("dgraph/dgraph"); + /** + * The image defaults to the official Dgraph image: <a + * href="https://hub.docker.com/_/dgraph/dgraph">Dgraph</a>. + */ + public static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("dgraph/dgraph"); - private static final int HTTP_PORT = 8080; + private static final int HTTP_PORT = 8080; - private static final int GRPC_PORT = 9080; + private static final int GRPC_PORT = 9080; - private boolean started = false; + private boolean started = false; - @Override - protected void containerIsStarted(InspectContainerResponse containerInfo) { - super.containerIsStarted(containerInfo); - started = true; - } + @Override + protected void containerIsStarted(InspectContainerResponse containerInfo) { + super.containerIsStarted(containerInfo); + started = true; + } - @Override - protected void containerIsStopped(InspectContainerResponse containerInfo) { - super.containerIsStopped(containerInfo); - started = false; - } + @Override + protected void containerIsStopped(InspectContainerResponse containerInfo) { + super.containerIsStopped(containerInfo); + started = false; + } - private final Map<String, String> zeroArguments = new HashMap<>(); + private final Map<String, String> zeroArguments = new HashMap<>(); - private final Map<String, String> alphaArguments = new HashMap<>(); + private final Map<String, String> alphaArguments = new HashMap<>(); - /** - * Creates a DgraphContainer using a specific docker image. Connect the container - * to another DgraphContainer to form a cluster via `peerAlias`. - * - * @param dockerImageName The docker image to use. - */ - public DgraphContainer(@NonNull final DockerImageName dockerImageName) { - super(dockerImageName); + /** + * Creates a DgraphContainer using a specific docker image. Connect the container to another + * DgraphContainer to form a cluster via `peerAlias`. + * + * @param dockerImageName The docker image to use. + */ + public DgraphContainer(@NonNull final DockerImageName dockerImageName) { + super(dockerImageName); - dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); + dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); - WaitStrategy waitForLeader = new LogMessageWaitStrategy() - .withRegEx(".* Got Zero leader: .*\n"); - WaitStrategy waitForCluster = new LogMessageWaitStrategy() - .withRegEx(".* Server is ready\n"); - WaitStrategy waitForHttp = new HttpWaitStrategy() + WaitStrategy waitForLeader = new LogMessageWaitStrategy().withRegEx(".* Got Zero leader: .*\n"); + WaitStrategy waitForCluster = new LogMessageWaitStrategy().withRegEx(".* Server is ready\n"); + WaitStrategy waitForHttp = + new HttpWaitStrategy() .forPort(HTTP_PORT) .forStatusCodeMatching(response -> response == HTTP_OK); - this.waitStrategy = new WaitAllStrategy() + this.waitStrategy = + new WaitAllStrategy() .withStrategy(waitForLeader) .withStrategy(waitForCluster) .withStrategy(waitForHttp) .withStartupTimeout(Duration.ofMinutes(1)); - if (dockerImageName.getVersionPart().compareTo("v21.03.0") < 0) { - withAlphaArgument("whitelist", "0.0.0.0/0"); - } else { - withAlphaArgumentValues("security", "whitelist=0.0.0.0/0"); - } - - addExposedPorts(HTTP_PORT, GRPC_PORT); - } - - /** - * Adds an argument to the zero command. - * - * @param argument name of the argument - * @param value value, null if argument is a flag - * @return this - */ - public DgraphContainer withZeroArgument(@NonNull String argument, String value) { - addArgument(zeroArguments, argument, value); - return this; - } - - /** - * Adds a value to an argument list to the zero command. - * - * Some arguments of the zero command form a list of values, e.g. `audit` or `raft`. - * These values are separated by a ";". Setting multiple values for those arguments should - * be done via this method. - * - * @param argument name of the argument - * @param values values to add to the argument - * @return this - */ - public DgraphContainer withZeroArgumentValues(@NonNull String argument, @NonNull String... values) { - addArgumentValues(zeroArguments, argument, values); - return this; - } - - /** - * Adds an argument to the alpha command. - * - * @param argument name of the argument - * @param value value, null if argument is a flag - * @return this - */ - public DgraphContainer withAlphaArgument(@NonNull String argument, String value) { - addArgument(alphaArguments, argument, value); - return this; - } - - /** - * Adds a value to an argument list to the alpha command. - * - * Some arguments of the alpha command form a list of values, e.g. `audit` or `raft`. - * These values are separated by a ";". Setting multiple values for those arguments should - * be done via this method. - * - * @param argument name of the argument - * @param values values to add to the argument - * @return this - */ - public DgraphContainer withAlphaArgumentValues(@NonNull String argument, @NonNull String... values) { - addArgumentValues(alphaArguments, argument, values); - return this; - } - - private void addArgument(Map<String, String> arguments, @NonNull String argument, String value) { - if (started) { - throw new IllegalStateException("The container started already, cannot amend command arguments"); - } - - arguments.put(argument, value); - } - - private void addArgumentValues(Map<String, String> arguments, @NonNull String argument, @NonNull String... values) { - if (started) { - throw new IllegalStateException("The container started already, cannot amend command arguments"); - } - - StringJoiner joiner = new StringJoiner("; "); - Arrays.stream(values).forEach(joiner::add); - String value = joiner.toString(); - - if (arguments.containsKey(argument)) { - arguments.put(argument, arguments.get(argument) + "; " + value); - } else { - arguments.put(argument, value); - } - } - - /** - * Provides the command used to start the zero process. Command line arguments can be added - * by calling `withZeroArgument` and `withZeroArgumentValues` before calling this method. - * @return command string - */ - public @NonNull String getZeroCommand() { - return getCommand("dgraph zero", zeroArguments); - } - - /** - * Provides the command used to start the alpha process. Command line arguments can be added - * by calling `withAlphaArgument` and `withAlphaArgumentValues` before calling this method. - * @return command string - */ - public @NonNull String getAlphaCommand() { - return getCommand("dgraph alpha", alphaArguments); - } - - private @NonNull String getCommand(@NonNull String command, @NonNull Map<String, String> arguments) { - StringJoiner joiner = new StringJoiner(" --"); - - arguments.entrySet().stream() - .sorted(Map.Entry.comparingByKey()) - .map(argument -> { - if (argument.getValue() == null) { - return argument.getKey(); - } else { - return argument.getKey() + " \"" + argument.getValue() + "\""; - } - }).forEach(joiner::add); - - if (joiner.length() == 0) { - return command; - } else { - return command + " --" + joiner; - } - } - - @Override - public void start() { - String zeroCommand = this.getZeroCommand(); - String alhpaCommand = this.getAlphaCommand(); - this.setCommand("/bin/bash", "-c", zeroCommand + " & " + alhpaCommand); - super.start(); - } - - @Override - public Set<Integer> getLivenessCheckPortNumbers() { - return Stream.of(getHttpPort(), getGrpcPort()) - .map(this::getMappedPort) - .collect(toSet()); - } - - @Override - protected void configure() { } - - public int getHttpPort() { - return getMappedPort(HTTP_PORT); - } - - public int getGrpcPort() { - return getMappedPort(GRPC_PORT); - } - - public String getHttpUrl() { - return String.format("http://%s:%d", getHost(), getHttpPort()); - } - - public String getGrpcUrl() { - return String.format("%s:%d", getHost(), getGrpcPort()); - } - + if (dockerImageName.getVersionPart().compareTo("v21.03.0") < 0) { + withAlphaArgument("whitelist", "0.0.0.0/0"); + } else { + withAlphaArgumentValues("security", "whitelist=0.0.0.0/0"); + } + + addExposedPorts(HTTP_PORT, GRPC_PORT); + } + + /** + * Adds an argument to the zero command. + * + * @param argument name of the argument + * @param value value, null if argument is a flag + * @return this + */ + public DgraphContainer withZeroArgument(@NonNull String argument, String value) { + addArgument(zeroArguments, argument, value); + return this; + } + + /** + * Adds a value to an argument list to the zero command. + * + * <p>Some arguments of the zero command form a list of values, e.g. `audit` or `raft`. These + * values are separated by a ";". Setting multiple values for those arguments should be done via + * this method. + * + * @param argument name of the argument + * @param values values to add to the argument + * @return this + */ + public DgraphContainer withZeroArgumentValues( + @NonNull String argument, @NonNull String... values) { + addArgumentValues(zeroArguments, argument, values); + return this; + } + + /** + * Adds an argument to the alpha command. + * + * @param argument name of the argument + * @param value value, null if argument is a flag + * @return this + */ + public DgraphContainer withAlphaArgument(@NonNull String argument, String value) { + addArgument(alphaArguments, argument, value); + return this; + } + + /** + * Adds a value to an argument list to the alpha command. + * + * <p>Some arguments of the alpha command form a list of values, e.g. `audit` or `raft`. These + * values are separated by a ";". Setting multiple values for those arguments should be done via + * this method. + * + * @param argument name of the argument + * @param values values to add to the argument + * @return this + */ + public DgraphContainer withAlphaArgumentValues( + @NonNull String argument, @NonNull String... values) { + addArgumentValues(alphaArguments, argument, values); + return this; + } + + private void addArgument(Map<String, String> arguments, @NonNull String argument, String value) { + if (started) { + throw new IllegalStateException( + "The container started already, cannot amend command arguments"); + } + + arguments.put(argument, value); + } + + private void addArgumentValues( + Map<String, String> arguments, @NonNull String argument, @NonNull String... values) { + if (started) { + throw new IllegalStateException( + "The container started already, cannot amend command arguments"); + } + + StringJoiner joiner = new StringJoiner("; "); + Arrays.stream(values).forEach(joiner::add); + String value = joiner.toString(); + + if (arguments.containsKey(argument)) { + arguments.put(argument, arguments.get(argument) + "; " + value); + } else { + arguments.put(argument, value); + } + } + + /** + * Provides the command used to start the zero process. Command line arguments can be added by + * calling `withZeroArgument` and `withZeroArgumentValues` before calling this method. + * + * @return command string + */ + public @NonNull String getZeroCommand() { + return getCommand("dgraph zero", zeroArguments); + } + + /** + * Provides the command used to start the alpha process. Command line arguments can be added by + * calling `withAlphaArgument` and `withAlphaArgumentValues` before calling this method. + * + * @return command string + */ + public @NonNull String getAlphaCommand() { + return getCommand("dgraph alpha", alphaArguments); + } + + private @NonNull String getCommand( + @NonNull String command, @NonNull Map<String, String> arguments) { + StringJoiner joiner = new StringJoiner(" --"); + + arguments.entrySet().stream() + .sorted(Map.Entry.comparingByKey()) + .map( + argument -> { + if (argument.getValue() == null) { + return argument.getKey(); + } else { + return argument.getKey() + " \"" + argument.getValue() + "\""; + } + }) + .forEach(joiner::add); + + if (joiner.length() == 0) { + return command; + } else { + return command + " --" + joiner; + } + } + + @Override + public void start() { + String zeroCommand = this.getZeroCommand(); + String alhpaCommand = this.getAlphaCommand(); + this.setCommand("/bin/bash", "-c", zeroCommand + " & " + alhpaCommand); + super.start(); + } + + @Override + public Set<Integer> getLivenessCheckPortNumbers() { + return Stream.of(getHttpPort(), getGrpcPort()).map(this::getMappedPort).collect(toSet()); + } + + @Override + protected void configure() {} + + public int getHttpPort() { + return getMappedPort(HTTP_PORT); + } + + public int getGrpcPort() { + return getMappedPort(GRPC_PORT); + } + + public String getHttpUrl() { + return String.format("http://%s:%d", getHost(), getHttpPort()); + } + + public String getGrpcUrl() { + return String.format("%s:%d", getHost(), getGrpcPort()); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java index abf9bf532ddd8..40b8e83b56d03 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java @@ -1,5 +1,12 @@ package com.linkedin.metadata.graph.dgraph; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.GraphServiceTestBase; @@ -16,15 +23,6 @@ import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; import io.grpc.MethodDescriptor; -import lombok.extern.slf4j.Slf4j; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testng.annotations.AfterClass; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.time.Duration; import java.util.Arrays; import java.util.Collections; @@ -32,89 +30,94 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testng.annotations.AfterClass; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") @Slf4j public class DgraphGraphServiceTest extends GraphServiceTestBase { - private ManagedChannel _channel; - private DgraphGraphService _service; - private DgraphContainer _container; - - @Override - protected Duration getTestConcurrentOpTimeout() { - return Duration.ofMinutes(5); - } - - @BeforeClass - public void setup() { - _container = new DgraphContainer(DgraphContainer.DEFAULT_IMAGE_NAME.withTag("v21.03.0")) - .withTmpFs(Collections.singletonMap("/dgraph", "rw,noexec,nosuid,size=1g")) - .withStartupTimeout(Duration.ofMinutes(1)) - .withStartupAttempts(3); - checkContainerEngine(_container.getDockerClient()); - _container.start(); - Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(log); - _container.followOutput(logConsumer); - } - - @BeforeMethod - public void connect() { - LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); - _channel = ManagedChannelBuilder - .forAddress(_container.getHost(), _container.getGrpcPort()) - .usePlaintext() - .build(); - - // https://discuss.dgraph.io/t/dgraph-java-client-setting-deadlines-per-call/3056 - ClientInterceptor timeoutInterceptor = new ClientInterceptor() { - @Override - public <REQ, RESP> ClientCall<REQ, RESP> interceptCall( - MethodDescriptor<REQ, RESP> method, CallOptions callOptions, Channel next) { - return next.newCall(method, callOptions.withDeadlineAfter(30, TimeUnit.SECONDS)); - } + private ManagedChannel _channel; + private DgraphGraphService _service; + private DgraphContainer _container; + + @Override + protected Duration getTestConcurrentOpTimeout() { + return Duration.ofMinutes(5); + } + + @BeforeClass + public void setup() { + _container = + new DgraphContainer(DgraphContainer.DEFAULT_IMAGE_NAME.withTag("v21.03.0")) + .withTmpFs(Collections.singletonMap("/dgraph", "rw,noexec,nosuid,size=1g")) + .withStartupTimeout(Duration.ofMinutes(1)) + .withStartupAttempts(3); + checkContainerEngine(_container.getDockerClient()); + _container.start(); + Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(log); + _container.followOutput(logConsumer); + } + + @BeforeMethod + public void connect() { + LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); + _channel = + ManagedChannelBuilder.forAddress(_container.getHost(), _container.getGrpcPort()) + .usePlaintext() + .build(); + + // https://discuss.dgraph.io/t/dgraph-java-client-setting-deadlines-per-call/3056 + ClientInterceptor timeoutInterceptor = + new ClientInterceptor() { + @Override + public <REQ, RESP> ClientCall<REQ, RESP> interceptCall( + MethodDescriptor<REQ, RESP> method, CallOptions callOptions, Channel next) { + return next.newCall(method, callOptions.withDeadlineAfter(30, TimeUnit.SECONDS)); + } }; - DgraphGrpc.DgraphStub stub = DgraphGrpc.newStub(_channel).withInterceptors(timeoutInterceptor); - _service = new DgraphGraphService(lineageRegistry, new DgraphClient(stub)); - } - - @AfterMethod - public void disconnect() throws InterruptedException { - try { - _channel.shutdownNow(); - _channel.awaitTermination(10, TimeUnit.SECONDS); - } finally { - _channel = null; - _service = null; - } - } - - @AfterClass - public void tearDown() { - _container.stop(); - } - - @Nonnull - @Override - protected GraphService getGraphService() { - _service.clear(); - return _service; + DgraphGrpc.DgraphStub stub = DgraphGrpc.newStub(_channel).withInterceptors(timeoutInterceptor); + _service = new DgraphGraphService(lineageRegistry, new DgraphClient(stub)); + } + + @AfterMethod + public void disconnect() throws InterruptedException { + try { + _channel.shutdownNow(); + _channel.awaitTermination(10, TimeUnit.SECONDS); + } finally { + _channel = null; + _service = null; } - - @Override - protected void syncAfterWrite() { } - - @Test - public void testGetSchema() { - DgraphSchema schema = DgraphGraphService.getSchema("{\n" + } + + @AfterClass + public void tearDown() { + _container.stop(); + } + + @Nonnull + @Override + protected GraphService getGraphService() { + _service.clear(); + return _service; + } + + @Override + protected void syncAfterWrite() {} + + @Test + public void testGetSchema() { + DgraphSchema schema = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -156,45 +159,69 @@ public void testGetSchema() { + " }\n" + " ]\n" + " }"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); + assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); - }}); - - assertEquals(schema.getFields("ns:typeOne"), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); - assertEquals(schema.getFields("ns:typeTwo"), new HashSet<>(Arrays.asList("PredTwo"))); - assertEquals(schema.getFields("ns:unknown"), Collections.emptySet()); - - schema.ensureField("newType", "newField"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + } + }); + + assertEquals( + schema.getFields("ns:typeOne"), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); + assertEquals(schema.getFields("ns:typeTwo"), new HashSet<>(Arrays.asList("PredTwo"))); + assertEquals(schema.getFields("ns:unknown"), Collections.emptySet()); + + schema.ensureField("newType", "newField"); + assertEquals( + schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField"))); + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - - schema.ensureField("ns:typeOne", "otherField"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + } + }); + + schema.ensureField("ns:typeOne", "otherField"); + assertEquals( + schema.getFields(), + new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo", "otherField"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - - schema.ensureField("ns:typeTwo", "PredTwo"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); - assertEquals(schema.getTypes(), new HashMap<String, Set<String>>() {{ + } + }); + + schema.ensureField("ns:typeTwo", "PredTwo"); + assertEquals( + schema.getFields(), + new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); + assertEquals( + schema.getTypes(), + new HashMap<String, Set<String>>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo", "otherField"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - } - - @Test - public void testGetSchemaIncomplete() { - DgraphSchema schemaWithNonListTypes = DgraphGraphService.getSchema("{\n" + } + }); + } + + @Test + public void testGetSchemaIncomplete() { + DgraphSchema schemaWithNonListTypes = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -208,9 +235,11 @@ public void testGetSchemaIncomplete() { + " ],\n" + " \"types\": \"not a list\"\n" + " }"); - assertTrue(schemaWithNonListTypes.isEmpty(), "Should be empty if type field is not a list"); + assertTrue(schemaWithNonListTypes.isEmpty(), "Should be empty if type field is not a list"); - DgraphSchema schemaWithoutTypes = DgraphGraphService.getSchema("{\n" + DgraphSchema schemaWithoutTypes = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -223,570 +252,575 @@ public void testGetSchemaIncomplete() { + " }\n" + " ]" + " }"); - assertTrue(schemaWithoutTypes.isEmpty(), "Should be empty if no type field exists"); - - DgraphSchema schemaWithNonListSchema = DgraphGraphService.getSchema("{\n" - + " \"schema\": \"not a list\"" - + " }"); - assertTrue(schemaWithNonListSchema.isEmpty(), "Should be empty if schema field is not a list"); - - DgraphSchema schemaWithoutSchema = DgraphGraphService.getSchema("{ }"); - assertTrue(schemaWithoutSchema.isEmpty(), "Should be empty if no schema field exists"); - } - - @Test - public void testGetSchemaDgraph() { - // TODO: test that dgraph schema gets altered - } - - @Test - public void testGetFilterConditions() { - // no filters - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList()), - "" - ); - - // source type not supported without restricting relationship types - // there must be as many relation type filter names as there are relationships - assertEquals( - DgraphGraphService.getFilterConditions( - "sourceTypeFilter", - null, - Collections.emptyList(), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceTypeFilter))\n" - + " )\n" - + " )" - ); - - // destination type - assertEquals( - DgraphGraphService.getFilterConditions( - null, - "destinationTypeFilter", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationTypeFilter)\n" - + " )" - ); - - // source filter not supported without restricting relationship types - // there must be as many relation type filter names as there are relationships - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter))\n" - + " )\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter1)) AND " - + "uid_in(<relationship>, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter1", "RelationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceFilter1)) AND " - + "uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(RelationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceFilter1)) AND " - + "uid_in(<relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - - // destination filters - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Arrays.asList("destinationFilter"), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationFilter)\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Arrays.asList("destinationFilter1", "destinationFilter2"), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2)\n" - + " )" - ); - - // relationship type filters require relationship types - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Collections.emptyList(), - Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " (\n" - + " uid(relationshipTypeFilter1) OR\n" - + " uid(relationshipTypeFilter2)\n" - + " )\n" - + " )" - ); - - // all filters at once - assertEquals( - DgraphGraphService.getFilterConditions( - "sourceTypeFilter", - "destinationTypeFilter", - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("destinationFilter1", "destinationFilter2"), - Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " uid(destinationTypeFilter) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceTypeFilter)) AND " - + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceTypeFilter)) AND " - + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - - // TODO: check getFilterConditions throws an exception when relationshipTypes and - // relationshipTypeFilterNames do not have the same size - } - - @Test - public void testGetRelationships() { - // no relationships - assertEquals( - DgraphGraphService.getRelationships( - null, - Collections.emptyList(), - Collections.emptyList()), - Collections.emptyList() - ); - - // one relationship but no filters - assertEquals( - DgraphGraphService.getRelationships( - null, - Collections.emptyList(), - Arrays.asList("relationship") - ), - Arrays.asList("<relationship> { <uid> }") - ); - - // more relationship and source type filter - assertEquals( - DgraphGraphService.getRelationships( - "sourceTypeFilter", - Collections.emptyList(), - Arrays.asList("relationship1", "~relationship2") - ), - Arrays.asList( - "<relationship1> @filter( uid(sourceTypeFilter) ) { <uid> }", - "<~relationship2> @filter( uid(sourceTypeFilter) ) { <uid> }" - ) - ); - - // more relationship, source type and source filters - assertEquals( - DgraphGraphService.getRelationships( - "sourceTypeFilter", - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("relationship1", "~relationship2") - ), - Arrays.asList( - "<relationship1> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", - "<~relationship2> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }" - ) - ); - - // more relationship and only source filters - assertEquals( - DgraphGraphService.getRelationships( - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("relationship1", "~relationship2", "relationship3") - ), - Arrays.asList( - "<relationship1> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", - "<~relationship2> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", - "<relationship3> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }" - ) - ); - - // two relationship and only one source filter - assertEquals( - DgraphGraphService.getRelationships( - null, - Arrays.asList("sourceFilter"), - Arrays.asList("~relationship1", "~relationship2") - ), - Arrays.asList( - "<~relationship1> @filter( uid(sourceFilter) ) { <uid> }", - "<~relationship2> @filter( uid(sourceFilter) ) { <uid> }" - ) - ); - } - - @Test - public void testGetRelationshipCondition() { - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - null, - Collections.emptyList()), - "uid(relationshipFilter)" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Collections.emptyList()), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Arrays.asList("destinationFilter")), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " - + "uid_in(<relationship>, uid(destinationFilter))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Arrays.asList("destinationFilter1", "destinationFilter2")), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " - + "uid_in(<relationship>, uid(destinationFilter1)) AND uid_in(<relationship>, uid(destinationFilter2))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - null, - Arrays.asList("destinationFilter1", "destinationFilter2")), - "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationFilter1)) AND " - + "uid_in(<relationship>, uid(destinationFilter2))" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesOutgoing() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.OUTGOING, - "query {\n" - + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<~relationship1>))\n" - + " relationshipType2 as var(func: has(<~relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " - + "first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<~relationship1>, uid(sourceType)) AND " - + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<~relationship2>, uid(sourceType)) AND " - + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " <urn>\n" - + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " }\n" - + "}" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesIncoming() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.INCOMING, - "query {\n" - + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<relationship1>))\n" - + " relationshipType2 as var(func: has(<relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " - + "first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " - + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " - + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " <urn>\n" - + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " }\n" - + "}" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesUndirected() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.UNDIRECTED, - "query {\n" - + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<relationship1>))\n" - + " relationshipType2 as var(func: has(<relationship2>))\n" - + " relationshipType3 as var(func: has(<~relationship1>))\n" - + " relationshipType4 as var(func: has(<~relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, " - + "relationshipType1, relationshipType2, relationshipType3, relationshipType4), first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " - + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " - + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType3) AND uid_in(<~relationship1>, uid(sourceType)) AND " - + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType4) AND uid_in(<~relationship2>, uid(sourceType)) AND " - + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " <urn>\n" - + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" - + " }\n" - + "}" - ); - } - - private void doTestGetQueryForRelatedEntitiesDirection(@Nonnull RelationshipDirection direction, @Nonnull String expectedQuery) { - assertEquals( - DgraphGraphService.getQueryForRelatedEntities( - ImmutableList.of("sourceType"), - newFilter(new HashMap<String, String>() {{ - put("urn", "urn:ns:type:source-key"); - put("key", "source-key"); - }}), - ImmutableList.of("destinationType"), - newFilter(new HashMap<String, String>() {{ + assertTrue(schemaWithoutTypes.isEmpty(), "Should be empty if no type field exists"); + + DgraphSchema schemaWithNonListSchema = + DgraphGraphService.getSchema("{\n" + " \"schema\": \"not a list\"" + " }"); + assertTrue(schemaWithNonListSchema.isEmpty(), "Should be empty if schema field is not a list"); + + DgraphSchema schemaWithoutSchema = DgraphGraphService.getSchema("{ }"); + assertTrue(schemaWithoutSchema.isEmpty(), "Should be empty if no schema field exists"); + } + + @Test + public void testGetSchemaDgraph() { + // TODO: test that dgraph schema gets altered + } + + @Test + public void testGetFilterConditions() { + // no filters + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()), + ""); + + // source type not supported without restricting relationship types + // there must be as many relation type filter names as there are relationships + assertEquals( + DgraphGraphService.getFilterConditions( + "sourceTypeFilter", + null, + Collections.emptyList(), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceTypeFilter))\n" + + " )\n" + + " )"); + + // destination type + assertEquals( + DgraphGraphService.getFilterConditions( + null, + "destinationTypeFilter", + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + " uid(destinationTypeFilter)\n" + " )"); + + // source filter not supported without restricting relationship types + // there must be as many relation type filter names as there are relationships + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter))\n" + + " )\n" + + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(<relationship>, uid(sourceFilter1)) AND " + + "uid_in(<relationship>, uid(sourceFilter2))\n" + + " )\n" + + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter1", "RelationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceFilter1)) AND " + + "uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(RelationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceFilter1)) AND " + + "uid_in(<relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " )"); + + // destination filters + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Arrays.asList("destinationFilter"), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + " uid(destinationFilter)\n" + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Arrays.asList("destinationFilter1", "destinationFilter2"), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2)\n" + + " )"); + + // relationship type filters require relationship types + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Collections.emptyList(), + Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " (\n" + + " uid(relationshipTypeFilter1) OR\n" + + " uid(relationshipTypeFilter2)\n" + + " )\n" + + " )"); + + // all filters at once + assertEquals( + DgraphGraphService.getFilterConditions( + "sourceTypeFilter", + "destinationTypeFilter", + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("destinationFilter1", "destinationFilter2"), + Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " uid(destinationTypeFilter) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipTypeFilter1) AND uid_in(<relationship1>, uid(sourceTypeFilter)) AND " + + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipTypeFilter2) AND uid_in(<relationship2>, uid(sourceTypeFilter)) AND " + + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " )"); + + // TODO: check getFilterConditions throws an exception when relationshipTypes and + // relationshipTypeFilterNames do not have the same size + } + + @Test + public void testGetRelationships() { + // no relationships + assertEquals( + DgraphGraphService.getRelationships(null, Collections.emptyList(), Collections.emptyList()), + Collections.emptyList()); + + // one relationship but no filters + assertEquals( + DgraphGraphService.getRelationships( + null, Collections.emptyList(), Arrays.asList("relationship")), + Arrays.asList("<relationship> { <uid> }")); + + // more relationship and source type filter + assertEquals( + DgraphGraphService.getRelationships( + "sourceTypeFilter", + Collections.emptyList(), + Arrays.asList("relationship1", "~relationship2")), + Arrays.asList( + "<relationship1> @filter( uid(sourceTypeFilter) ) { <uid> }", + "<~relationship2> @filter( uid(sourceTypeFilter) ) { <uid> }")); + + // more relationship, source type and source filters + assertEquals( + DgraphGraphService.getRelationships( + "sourceTypeFilter", + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("relationship1", "~relationship2")), + Arrays.asList( + "<relationship1> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", + "<~relationship2> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }")); + + // more relationship and only source filters + assertEquals( + DgraphGraphService.getRelationships( + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("relationship1", "~relationship2", "relationship3")), + Arrays.asList( + "<relationship1> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", + "<~relationship2> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }", + "<relationship3> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }")); + + // two relationship and only one source filter + assertEquals( + DgraphGraphService.getRelationships( + null, Arrays.asList("sourceFilter"), Arrays.asList("~relationship1", "~relationship2")), + Arrays.asList( + "<~relationship1> @filter( uid(sourceFilter) ) { <uid> }", + "<~relationship2> @filter( uid(sourceFilter) ) { <uid> }")); + } + + @Test + public void testGetRelationshipCondition() { + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", "relationshipFilter", null, Collections.emptyList()), + "uid(relationshipFilter)"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", "relationshipFilter", "destinationTypeFilter", Collections.emptyList()), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + "destinationTypeFilter", + Arrays.asList("destinationFilter")), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " + + "uid_in(<relationship>, uid(destinationFilter))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + "destinationTypeFilter", + Arrays.asList("destinationFilter1", "destinationFilter2")), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationTypeFilter)) AND " + + "uid_in(<relationship>, uid(destinationFilter1)) AND uid_in(<relationship>, uid(destinationFilter2))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + null, + Arrays.asList("destinationFilter1", "destinationFilter2")), + "uid(relationshipFilter) AND uid_in(<relationship>, uid(destinationFilter1)) AND " + + "uid_in(<relationship>, uid(destinationFilter2))"); + } + + @Test + public void testGetQueryForRelatedEntitiesOutgoing() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.OUTGOING, + "query {\n" + + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<~relationship1>))\n" + + " relationshipType2 as var(func: has(<~relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " + + "first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<~relationship1>, uid(sourceType)) AND " + + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<~relationship2>, uid(sourceType)) AND " + + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " <urn>\n" + + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " }\n" + + "}"); + } + + @Test + public void testGetQueryForRelatedEntitiesIncoming() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.INCOMING, + "query {\n" + + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<relationship1>))\n" + + " relationshipType2 as var(func: has(<relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " + + "first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " + + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " + + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " <urn>\n" + + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " }\n" + + "}"); + } + + @Test + public void testGetQueryForRelatedEntitiesUndirected() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.UNDIRECTED, + "query {\n" + + " sourceType as var(func: eq(<type>, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(<type>, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(<urn>, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(<key>, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(<urn>, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(<key>, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<relationship1>))\n" + + " relationshipType2 as var(func: has(<relationship2>))\n" + + " relationshipType3 as var(func: has(<~relationship1>))\n" + + " relationshipType4 as var(func: has(<~relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, " + + "relationshipType1, relationshipType2, relationshipType3, relationshipType4), first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<relationship1>, uid(sourceType)) AND " + + "uid_in(<relationship1>, uid(sourceFilter1)) AND uid_in(<relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<relationship2>, uid(sourceType)) AND " + + "uid_in(<relationship2>, uid(sourceFilter1)) AND uid_in(<relationship2>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType3) AND uid_in(<~relationship1>, uid(sourceType)) AND " + + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType4) AND uid_in(<~relationship2>, uid(sourceType)) AND " + + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " <urn>\n" + + " <relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { <uid> }\n" + + " }\n" + + "}"); + } + + private void doTestGetQueryForRelatedEntitiesDirection( + @Nonnull RelationshipDirection direction, @Nonnull String expectedQuery) { + assertEquals( + DgraphGraphService.getQueryForRelatedEntities( + ImmutableList.of("sourceType"), + newFilter( + new HashMap<String, String>() { + { + put("urn", "urn:ns:type:source-key"); + put("key", "source-key"); + } + }), + ImmutableList.of("destinationType"), + newFilter( + new HashMap<String, String>() { + { + put("urn", "urn:ns:type:dest-key"); + put("key", "dest-key"); + } + }), + Arrays.asList("relationship1", "relationship2"), + newRelationshipFilter(EMPTY_FILTER, direction), + 0, + 100), + expectedQuery); + } + + @Test + public void testGetDestinationUrnsFromResponseData() { + // no results + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put("result", Collections.emptyList()); + } + }), + Collections.emptyList()); + + // one result and one relationship with two sources + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put( + "result", + Arrays.asList( + new HashMap<String, Object>() { + { put("urn", "urn:ns:type:dest-key"); - put("key", "dest-key"); - }}), - Arrays.asList("relationship1", "relationship2"), - newRelationshipFilter(EMPTY_FILTER, direction), - 0, 100 - ), - expectedQuery - ); - } - - @Test - public void testGetDestinationUrnsFromResponseData() { - // no results - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Collections.emptyList()); - }} - ), - Collections.emptyList() - ); - - // one result and one relationship with two sources - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Arrays.asList( - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key"); - put("~pred", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }} - )); - }} - ), - Arrays.asList(new RelatedEntity("pred", "urn:ns:type:dest-key")) - ); - - // multiple results and one relationship - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Arrays.asList( - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-1"); - put("~pred", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-2"); - put("~pred", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }} - )); - }} - ), - Arrays.asList( - new RelatedEntity("pred", "urn:ns:type:dest-key-1"), - new RelatedEntity("pred", "urn:ns:type:dest-key-2") - ) - ); - - // multiple results and relationships - assertEqualsAnyOrder( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap<String, Object>() {{ - put("result", Arrays.asList( - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-1"); - put("~pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-2"); - put("~pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-3"); - put("pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x3"); - }} - )); - put("~pred1", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x1"); - }}, - new HashMap<String, Object>() {{ - put("uid", "0x4"); - }} - )); - }}, - new HashMap<String, Object>() {{ - put("urn", "urn:ns:type:dest-key-4"); - put("pred2", Arrays.asList( - new HashMap<String, Object>() {{ - put("uid", "0x5"); - }} - )); - }} - )); - }} - ), - Arrays.asList( - new RelatedEntity("pred1", "urn:ns:type:dest-key-1"), - new RelatedEntity("pred1", "urn:ns:type:dest-key-2"), - new RelatedEntity("pred1", "urn:ns:type:dest-key-3"), - new RelatedEntity("pred2", "urn:ns:type:dest-key-4") - ), - RELATED_ENTITY_COMPARATOR - ); - } - - @Override - public void testPopulatedGraphServiceGetLineageMultihop() { - // TODO: Remove this overridden method once the multihop for dGraph is implemented! - } + put( + "~pred", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + })); + } + }), + Arrays.asList(new RelatedEntity("pred", "urn:ns:type:dest-key"))); + + // multiple results and one relationship + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put( + "result", + Arrays.asList( + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-1"); + put( + "~pred", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-2"); + put( + "~pred", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + })); + } + }), + Arrays.asList( + new RelatedEntity("pred", "urn:ns:type:dest-key-1"), + new RelatedEntity("pred", "urn:ns:type:dest-key-2"))); + + // multiple results and relationships + assertEqualsAnyOrder( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap<String, Object>() { + { + put( + "result", + Arrays.asList( + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-1"); + put( + "~pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-2"); + put( + "~pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-3"); + put( + "pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x3"); + } + })); + put( + "~pred1", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x1"); + } + }, + new HashMap<String, Object>() { + { + put("uid", "0x4"); + } + })); + } + }, + new HashMap<String, Object>() { + { + put("urn", "urn:ns:type:dest-key-4"); + put( + "pred2", + Arrays.asList( + new HashMap<String, Object>() { + { + put("uid", "0x5"); + } + })); + } + })); + } + }), + Arrays.asList( + new RelatedEntity("pred1", "urn:ns:type:dest-key-1"), + new RelatedEntity("pred1", "urn:ns:type:dest-key-2"), + new RelatedEntity("pred1", "urn:ns:type:dest-key-3"), + new RelatedEntity("pred2", "urn:ns:type:dest-key-4")), + RELATED_ENTITY_COMPARATOR); + } + + @Override + public void testPopulatedGraphServiceGetLineageMultihop() { + // TODO: Remove this overridden method once the multihop for dGraph is implemented! + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java index 6f63209f9c380..f1113368601c6 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.graph.neo4j; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.FabricType; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.DataPlatformUrn; @@ -18,7 +21,12 @@ import com.linkedin.metadata.query.filter.RelationshipFilter; import java.util.Arrays; import java.util.Collections; - +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.neo4j.driver.Driver; import org.neo4j.driver.GraphDatabase; import org.testng.SkipException; @@ -27,17 +35,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.testng.Assert.assertEquals; - - public class Neo4jGraphServiceTest extends GraphServiceTestBase { private Neo4jTestServerBuilder _serverBuilder; @@ -51,7 +48,8 @@ public void init() { _serverBuilder = new Neo4jTestServerBuilder(); _serverBuilder.newServer(); _driver = GraphDatabase.driver(_serverBuilder.boltURI()); - _client = new Neo4jGraphService(new LineageRegistry(SnapshotEntityRegistry.getInstance()), _driver); + _client = + new Neo4jGraphService(new LineageRegistry(SnapshotEntityRegistry.getInstance()), _driver); _client.clear(); } @@ -66,17 +64,16 @@ public void tearDown() { } @Override - protected @Nonnull - GraphService getGraphService() { + protected @Nonnull GraphService getGraphService() { return _client; } @Override - protected void syncAfterWrite() { - } + protected void syncAfterWrite() {} @Override - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { // https://github.com/datahub-project/datahub/issues/3118 // Neo4jGraphService produces duplicates, which is here ignored until fixed // actual.count and actual.total not tested due to duplicates @@ -85,20 +82,20 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie } @Override - protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected, Comparator<T> comparator) { + protected <T> void assertEqualsAnyOrder( + List<T> actual, List<T> expected, Comparator<T> comparator) { // https://github.com/datahub-project/datahub/issues/3118 // Neo4jGraphService produces duplicates, which is here ignored until fixed - assertEquals( - new HashSet<>(actual), - new HashSet<>(expected) - ); + assertEquals(new HashSet<>(actual), new HashSet<>(expected)); } @Override - public void testFindRelatedEntitiesSourceType(String datasetType, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3119 throw new SkipException("Neo4jGraphService does not support empty source type"); @@ -108,14 +105,17 @@ public void testFindRelatedEntitiesSourceType(String datasetType, // only test cases with "user" type fail due to this bug throw new SkipException("Neo4jGraphService does not apply source / destination types"); } - super.testFindRelatedEntitiesSourceType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesSourceType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationType(String datasetType, - List<String> relationshipTypes, - RelationshipFilter relationships, - List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3119 throw new SkipException("Neo4jGraphService does not support empty destination type"); @@ -125,7 +125,8 @@ public void testFindRelatedEntitiesDestinationType(String datasetType, // only test cases with "HasOwner" relatioship fail due to this bug throw new SkipException("Neo4jGraphService does not apply source / destination types"); } - super.testFindRelatedEntitiesDestinationType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Test @@ -160,7 +161,8 @@ public void testRemoveEdgesFromNodeNoRelationshipTypes() { @Override public void testConcurrentAddEdge() { // https://github.com/datahub-project/datahub/issues/3141 - throw new SkipException("Neo4jGraphService does not manage to add all edges added concurrently"); + throw new SkipException( + "Neo4jGraphService does not manage to add all edges added concurrently"); } @Test @@ -179,28 +181,42 @@ public void testConcurrentRemoveNodes() { @Test public void testRemoveEdge() throws Exception { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); TagUrn tagUrn = new TagUrn("newTag"); Edge edge = new Edge(datasetUrn, tagUrn, TAG_RELATIONSHIP, null, null, null, null, null); getGraphService().addEdge(edge); - RelatedEntitiesResult result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + RelatedEntitiesResult result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 1); getGraphService().removeEdge(edge); - result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 0); } private Set<UrnArray> getPathUrnArraysFromLineageResult(EntityLineageResult result) { - return result.getRelationships() - .stream() + return result.getRelationships().stream() .map(x -> x.getPaths().get(0)) .collect(Collectors.toSet()); } @@ -209,22 +225,23 @@ private Set<UrnArray> getPathUrnArraysFromLineageResult(EntityLineageResult resu public void testGetLineage() { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), - - // another path between d2 and d5 which is shorter - // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 - new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), - new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null) - ); + List<Edge> edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), + + // another path between d2 and d5 which is shorter + // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 + new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), + new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null)); edges.forEach(service::addEdge); // simple path finding - final var upstreamLineageDataset3Hop3 = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + final var upstreamLineageDataset3Hop3 = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); assertEquals(upstreamLineageDataset3Hop3.getTotal().intValue(), 3); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDataset3Hop3), @@ -234,7 +251,8 @@ public void testGetLineage() { new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); // simple path finding - final var upstreamLineageDatasetFiveHop2 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + final var upstreamLineageDatasetFiveHop2 = + service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageDatasetFiveHop2.getTotal().intValue(), 4); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDatasetFiveHop2), @@ -244,8 +262,10 @@ public void testGetLineage() { new UrnArray(datasetFiveUrn, datasetFourUrn), new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); - // there are two paths from p5 to p1, one longer and one shorter, and the longer one is discarded from result - final var upstreamLineageDataset5Hop5 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); + // there are two paths from p5 to p1, one longer and one shorter, and the longer one is + // discarded from result + final var upstreamLineageDataset5Hop5 = + service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); assertEquals(upstreamLineageDataset5Hop5.getTotal().intValue(), 5); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDataset5Hop5), @@ -257,7 +277,8 @@ public void testGetLineage() { new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); // downstream lookup - final var downstreamLineageDataset1Hop2 = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + final var downstreamLineageDataset1Hop2 = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); assertEquals(downstreamLineageDataset1Hop2.getTotal().intValue(), 4); assertEquals( getPathUrnArraysFromLineageResult(downstreamLineageDataset1Hop2), @@ -272,17 +293,18 @@ public void testGetLineage() { public void testGetLineageTimeFilterQuery() throws Exception { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - new Edge(datasetFourUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null) - ); + List<Edge> edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFourUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null)); edges.forEach(service::addEdge); // no time filtering - EntityLineageResult upstreamLineageTwoHops = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + EntityLineageResult upstreamLineageTwoHops = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageTwoHops.getTotal().intValue(), 2); assertEquals(upstreamLineageTwoHops.getRelationships().size(), 2); assertEquals( @@ -292,16 +314,17 @@ public void testGetLineageTimeFilterQuery() throws Exception { new UrnArray(datasetFourUrn, datasetThreeUrn, datasetTwoUrn))); // with time filtering - EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); + EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getTotal().intValue(), 1); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getRelationships().size(), 1); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageTwoHopsWithTimeFilter), - Set.of( - new UrnArray(datasetFourUrn, datasetThreeUrn))); + Set.of(new UrnArray(datasetFourUrn, datasetThreeUrn))); // with time filtering - EntityLineageResult upstreamLineageTimeFilter = service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); + EntityLineageResult upstreamLineageTimeFilter = + service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); assertEquals(upstreamLineageTimeFilter.getTotal().intValue(), 2); assertEquals(upstreamLineageTimeFilter.getRelationships().size(), 2); assertEquals( @@ -311,32 +334,33 @@ public void testGetLineageTimeFilterQuery() throws Exception { new UrnArray(datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); // with time filtering - EntityLineageResult downstreamLineageTimeFilter = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); + EntityLineageResult downstreamLineageTimeFilter = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); assertEquals(downstreamLineageTimeFilter.getTotal().intValue(), 1); assertEquals(downstreamLineageTimeFilter.getRelationships().size(), 1); assertEquals( getPathUrnArraysFromLineageResult(downstreamLineageTimeFilter), - Set.of( - new UrnArray(datasetOneUrn, dataJobOneUrn))); + Set.of(new UrnArray(datasetOneUrn, dataJobOneUrn))); } @Test public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { GraphService service = getGraphService(); - List<Edge> edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + List<Edge> edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) - new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null) - ); + // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) + new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null)); edges.forEach(service::addEdge); // no time filtering, shorter path from d3 to d1 is returned - EntityLineageResult upstreamLineageNoTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + EntityLineageResult upstreamLineageNoTimeFiltering = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageNoTimeFiltering), Set.of( @@ -345,7 +369,8 @@ public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { new UrnArray(datasetThreeUrn, datasetOneUrn))); // with time filtering, shorter path from d3 to d1 is excluded so longer path is returned - EntityLineageResult upstreamLineageTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); + EntityLineageResult upstreamLineageTimeFiltering = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageTimeFiltering), Set.of( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java index ba4e4cec37914..fa04de340e12f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java @@ -1,9 +1,8 @@ package com.linkedin.metadata.graph.neo4j; +import apoc.path.PathExplorer; import java.io.File; import java.net.URI; - -import apoc.path.PathExplorer; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.harness.Neo4j; import org.neo4j.harness.Neo4jBuilder; @@ -19,9 +18,7 @@ private Neo4jTestServerBuilder(Neo4jBuilder builder) { } public Neo4jTestServerBuilder() { - this(new InProcessNeo4jBuilder() - .withProcedure(PathExplorer.class) - ); + this(new InProcessNeo4jBuilder().withProcedure(PathExplorer.class)); } public Neo4jTestServerBuilder(File workingDirectory) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java index baed3ade0d207..9fc9490bfd7ef 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java @@ -23,7 +23,8 @@ public class ESGraphQueryDAOTest { - private static final String TEST_QUERY_FILE = "elasticsearch/sample_filters/lineage_query_filters_1.json"; + private static final String TEST_QUERY_FILE = + "elasticsearch/sample_filters/lineage_query_filters_1.json"; @Test private static void testGetQueryForLineageFullArguments() throws Exception { @@ -32,20 +33,19 @@ private static void testGetQueryForLineageFullArguments() throws Exception { String expectedQuery = Resources.toString(url, StandardCharsets.UTF_8); List<Urn> urns = new ArrayList<>(); - List<LineageRegistry.EdgeInfo> edgeInfos = new ArrayList<>(ImmutableList.of( - new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, Constants.DATASET_ENTITY_NAME) - )); + List<LineageRegistry.EdgeInfo> edgeInfos = + new ArrayList<>( + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", + RelationshipDirection.INCOMING, + Constants.DATASET_ENTITY_NAME))); GraphFilters graphFilters = new GraphFilters(ImmutableList.of(Constants.DATASET_ENTITY_NAME)); Long startTime = 0L; Long endTime = 1L; - QueryBuilder builder = ESGraphQueryDAO.getQueryForLineage( - urns, - edgeInfos, - graphFilters, - startTime, - endTime - ); + QueryBuilder builder = + ESGraphQueryDAO.getQueryForLineage(urns, edgeInfos, graphFilters, startTime, endTime); Assert.assertEquals(builder.toString(), expectedQuery); } @@ -59,73 +59,51 @@ private static void testAddEdgeToPaths() { // Case 0: Add with no existing paths. Map<Urn, UrnArrayArray> nodePaths = new HashMap<>(); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - UrnArrayArray expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + UrnArrayArray expectedPathsToChild = + new UrnArrayArray(ImmutableList.of(new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 1: No paths to parent. nodePaths = new HashMap<>(); - nodePaths.put(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,Other,PROD)"), new UrnArrayArray()); + nodePaths.put( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,Other,PROD)"), + new UrnArrayArray()); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray(ImmutableList.of(new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 2: 1 Existing Path to Parent Node nodePaths = new HashMap<>(); - Urn testParentParent = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent,PROD)"); - UrnArrayArray existingPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )) - )); + Urn testParentParent = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent,PROD)"); + UrnArrayArray existingPathsToParent = + new UrnArrayArray( + ImmutableList.of(new UrnArray(ImmutableList.of(testParentParent, testParent)))); nodePaths.put(testParent, existingPathsToParent); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 3: > 1 Existing Paths to Parent Node nodePaths = new HashMap<>(); - Urn testParentParent2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent2,PROD)"); - UrnArrayArray existingPathsToParent2 = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + Urn testParentParent2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent2,PROD)"); + UrnArrayArray existingPathsToParent2 = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); nodePaths.put(testParent, existingPathsToParent2); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)), + new UrnArray(ImmutableList.of(testParentParent2, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 4: Build graph from empty by adding multiple edges @@ -139,34 +117,23 @@ private static void testAddEdgeToPaths() { Assert.assertNull(nodePaths.get(testParentParent2)); // Verify paths to testParent - UrnArrayArray expectedPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + UrnArrayArray expectedPathsToParent = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); Assert.assertEquals(nodePaths.get(testParent), expectedPathsToParent); // Verify paths to testChild - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)), + new UrnArray(ImmutableList.of(testParentParent2, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); - // Case 5: Mainly documentation: Verify that if you build the graph out of order bad things happen. + // Case 5: Mainly documentation: Verify that if you build the graph out of order bad things + // happen. // Also test duplicate edge addition nodePaths = new HashMap<>(); // Add edge to testChild first! Before path to testParent has been constructed. @@ -182,29 +149,19 @@ private static void testAddEdgeToPaths() { Assert.assertNull(nodePaths.get(testParentParent2)); // Verify paths to testParent - expectedPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + expectedPathsToParent = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); Assert.assertEquals(nodePaths.get(testParent), expectedPathsToParent); // Verify paths to testChild are INCORRECT: partial & duplicated - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParent, testChild)), + new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 0ce43c9d31571..2f8fba0083aa7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.graph.search; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; @@ -26,6 +30,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import io.datahubproject.test.search.SearchTestUtils; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import javax.annotation.Nonnull; import org.junit.Assert; import org.opensearch.client.RestHighLevelClient; import org.testng.SkipException; @@ -33,27 +43,16 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; - -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.testng.Assert.assertEquals; - -abstract public class SearchGraphServiceTestBase extends GraphServiceTestBase { +public abstract class SearchGraphServiceTestBase extends GraphServiceTestBase { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); private final IndexConvention _indexConvention = new IndexConventionImpl(null); private final String _indexName = _indexConvention.getIndexName(INDEX_NAME); @@ -76,9 +75,19 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchGraphService buildService() { LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); - ESGraphQueryDAO readDAO = new ESGraphQueryDAO(getSearchClient(), lineageRegistry, _indexConvention, GraphQueryConfiguration.testDefaults); + ESGraphQueryDAO readDAO = + new ESGraphQueryDAO( + getSearchClient(), + lineageRegistry, + _indexConvention, + GraphQueryConfiguration.testDefaults); ESGraphWriteDAO writeDAO = new ESGraphWriteDAO(_indexConvention, getBulkProcessor(), 1); - return new ElasticSearchGraphService(lineageRegistry, getBulkProcessor(), _indexConvention, writeDAO, readDAO, + return new ElasticSearchGraphService( + lineageRegistry, + getBulkProcessor(), + _indexConvention, + writeDAO, + readDAO, getIndexBuilder()); } @@ -94,7 +103,8 @@ protected void syncAfterWrite() throws Exception { } @Override - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { // https://github.com/datahub-project/datahub/issues/3115 // ElasticSearchGraphService produces duplicates, which is here ignored until fixed // actual.count and actual.total not tested due to duplicates @@ -103,112 +113,160 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie } @Override - protected <T> void assertEqualsAnyOrder(List<T> actual, List<T> expected, Comparator<T> comparator) { + protected <T> void assertEqualsAnyOrder( + List<T> actual, List<T> expected, Comparator<T> comparator) { // https://github.com/datahub-project/datahub/issues/3115 // ElasticSearchGraphService produces duplicates, which is here ignored until fixed assertEquals(new HashSet<>(actual), new HashSet<>(expected)); } @Override - public void testFindRelatedEntitiesSourceEntityFilter(Filter sourceEntityFilter, List<String> relationshipTypes, - RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceEntityFilter( + Filter sourceEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testFindRelatedEntitiesSourceEntityFilter(sourceEntityFilter, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesSourceEntityFilter( + sourceEntityFilter, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationEntityFilter(Filter destinationEntityFilter, - List<String> relationshipTypes, RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) + public void testFindRelatedEntitiesDestinationEntityFilter( + Filter destinationEntityFilter, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testFindRelatedEntitiesDestinationEntityFilter(destinationEntityFilter, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationEntityFilter( + destinationEntityFilter, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesSourceType(String datasetType, List<String> relationshipTypes, - RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3116 throw new SkipException("ElasticSearchGraphService does not support empty source type"); } - super.testFindRelatedEntitiesSourceType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesSourceType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationType(String datasetType, List<String> relationshipTypes, - RelationshipFilter relationships, List<RelatedEntity> expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String datasetType, + List<String> relationshipTypes, + RelationshipFilter relationships, + List<RelatedEntity> expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3116 throw new SkipException("ElasticSearchGraphService does not support empty destination type"); } - super.testFindRelatedEntitiesDestinationType(datasetType, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Test @Override public void testFindRelatedEntitiesNoRelationshipTypes() { // https://github.com/datahub-project/datahub/issues/3117 - throw new SkipException("ElasticSearchGraphService does not support empty list of relationship types"); + throw new SkipException( + "ElasticSearchGraphService does not support empty list of relationship types"); } @Override - public void testRemoveEdgesFromNode(@Nonnull Urn nodeToRemoveFrom, @Nonnull List<String> relationTypes, - @Nonnull RelationshipFilter relationshipFilter, List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, + public void testRemoveEdgesFromNode( + @Nonnull Urn nodeToRemoveFrom, + @Nonnull List<String> relationTypes, + @Nonnull RelationshipFilter relationshipFilter, + List<RelatedEntity> expectedOutgoingRelatedUrnsBeforeRemove, List<RelatedEntity> expectedIncomingRelatedUrnsBeforeRemove, List<RelatedEntity> expectedOutgoingRelatedUrnsAfterRemove, - List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) throws Exception { + List<RelatedEntity> expectedIncomingRelatedUrnsAfterRemove) + throws Exception { if (relationshipFilter.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testRemoveEdgesFromNode(nodeToRemoveFrom, relationTypes, relationshipFilter, - expectedOutgoingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove, - expectedOutgoingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); + super.testRemoveEdgesFromNode( + nodeToRemoveFrom, + relationTypes, + relationshipFilter, + expectedOutgoingRelatedUrnsBeforeRemove, + expectedIncomingRelatedUrnsBeforeRemove, + expectedOutgoingRelatedUrnsAfterRemove, + expectedIncomingRelatedUrnsAfterRemove); } @Test @Override public void testRemoveEdgesFromNodeNoRelationshipTypes() { // https://github.com/datahub-project/datahub/issues/3117 - throw new SkipException("ElasticSearchGraphService does not support empty list of relationship types"); + throw new SkipException( + "ElasticSearchGraphService does not support empty list of relationship types"); } @Test // TODO: Only in ES for now since unimplemented in other services public void testRemoveEdge() throws Exception { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); TagUrn tagUrn = new TagUrn("newTag"); Edge edge = new Edge(datasetUrn, tagUrn, TAG_RELATIONSHIP, null, null, null, null, null); getGraphService().addEdge(edge); syncAfterWrite(); - RelatedEntitiesResult result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + RelatedEntitiesResult result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 1); getGraphService().removeEdge(edge); syncAfterWrite(); - result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 0); } @@ -239,15 +297,39 @@ public void testTimestampLineage() throws Exception { // Populate one upstream and two downstream edges at initialTime Long initialTime = 1000L; - List<Edge> edges = Arrays.asList( - // One upstream edge - new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, initialTime, null, initialTime, null, null), - // Two downstream - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, initialTime, null, initialTime, null, null), - new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, initialTime, null, initialTime, null, null), - // One with null values, should always be returned - new Edge(datasetFiveUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null) - ); + List<Edge> edges = + Arrays.asList( + // One upstream edge + new Edge( + datasetTwoUrn, + datasetOneUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + // Two downstream + new Edge( + datasetThreeUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + new Edge( + datasetFourUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + // One with null values, should always be returned + new Edge(datasetFiveUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)); edges.forEach(getGraphService()::addEdge); syncAfterWrite(); @@ -259,120 +341,103 @@ public void testTimestampLineage() throws Exception { Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Timestamp before - upstreamResult = getUpstreamLineage(datasetTwoUrn, - 0L, - initialTime - 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - 0L, - initialTime - 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, 0L, initialTime - 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, 0L, initialTime - 10); Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); // Timestamp after - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime + 10, - initialTime + 100); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime + 10, - initialTime + 100); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); // Timestamp included - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime - 10, - initialTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime - 10, - initialTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Update only one of the downstream edges Long updatedTime = 2000L; - edges = Arrays.asList( - new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, initialTime, null, updatedTime, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, initialTime, null, updatedTime, null, null) - ); + edges = + Arrays.asList( + new Edge( + datasetTwoUrn, + datasetOneUrn, + downstreamOf, + initialTime, + null, + updatedTime, + null, + null), + new Edge( + datasetThreeUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + updatedTime, + null, + null)); edges.forEach(getGraphService()::addEdge); syncAfterWrite(); // Without timestamps - upstreamResult = getUpstreamLineage(datasetTwoUrn, - null, - null); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - null, - null); + upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); + downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Window includes initial time and updated time - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime - 10, - updatedTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime - 10, - updatedTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Window includes updated time but not initial time - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime + 10, - updatedTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime + 10, - updatedTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(2), downstreamResult.getTotal()); - } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The Upstream lineage for urn from the window from startTime to endTime */ private EntityLineageResult getUpstreamLineage(Urn urn, Long startTime, Long endTime) { - return getLineage(urn, - LineageDirection.UPSTREAM, - startTime, - endTime); + return getLineage(urn, LineageDirection.UPSTREAM, startTime, endTime); } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The Downstream lineage for urn from the window from startTime to endTime */ private EntityLineageResult getDownstreamLineage(Urn urn, Long startTime, Long endTime) { - return getLineage(urn, - LineageDirection.DOWNSTREAM, - startTime, - endTime); + return getLineage(urn, LineageDirection.DOWNSTREAM, startTime, endTime); } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param direction Direction to query (upstream/downstream) * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The lineage for urn from the window from startTime to endTime in direction */ - private EntityLineageResult getLineage(Urn urn, LineageDirection direction, Long startTime, Long endTime) { - return getGraphService().getLineage(urn, - direction, - 0, - 0, - 3, - startTime, - endTime); + private EntityLineageResult getLineage( + Urn urn, LineageDirection direction, Long startTime, Long endTime) { + return getGraphService().getLineage(urn, direction, 0, 0, 3, startTime, endTime); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java index 989f9ae197239..3c892dddb70e1 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java @@ -1,17 +1,18 @@ package com.linkedin.metadata.graph.search; import com.google.common.io.Resources; +import com.linkedin.metadata.graph.elastic.TimeFilterUtils; import java.net.URL; import java.nio.charset.StandardCharsets; - -import com.linkedin.metadata.graph.elastic.TimeFilterUtils; import org.opensearch.index.query.QueryBuilder; import org.testng.Assert; import org.testng.annotations.Test; public class TimeFilterUtilsTest { - private static final String TEST_QUERY_FILE = "elasticsearch/sample_filters/lineage_time_query_filters_1.json"; + private static final String TEST_QUERY_FILE = + "elasticsearch/sample_filters/lineage_time_query_filters_1.json"; + @Test private static void testGetEdgeTimeFilterQuery() throws Exception { URL url = Resources.getResource(TEST_QUERY_FILE); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java index 7b550311bf823..b2c49857cb0b9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.search.elasticsearch.ElasticSearchSuite; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; - import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -16,12 +15,9 @@ @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class SearchGraphServiceElasticSearchTest extends SearchGraphServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override @@ -45,5 +41,4 @@ protected ESIndexBuilder getIndexBuilder() { public void initTest() { AssertJUnit.assertNotNull(_searchClient); } - } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java index eabfb523fb910..28b545f817539 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java @@ -15,12 +15,9 @@ @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class SearchGraphServiceOpenSearchTest extends SearchGraphServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override @@ -44,5 +41,4 @@ protected ESIndexBuilder getIndexBuilder() { public void initTest() { AssertJUnit.assertNotNull(_searchClient); } - } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java index c6677c171b30e..df332cacaa751 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.graph.sibling; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.Siblings; @@ -24,27 +29,23 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class SiblingGraphServiceTest { - /** - * Some test URN types. - */ + /** Some test URN types. */ protected static String datasetType = "dataset"; - /** - * Some test datasets. - */ - protected static String datasetOneUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; - protected static String datasetTwoUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; - protected static String datasetThreeUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; - protected static String datasetFourUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; - protected static String datasetFiveUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; + /** Some test datasets. */ + protected static String datasetOneUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; + + protected static String datasetTwoUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; + protected static String datasetThreeUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; + protected static String datasetFourUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; + protected static String datasetFiveUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; protected static Urn datasetOneUrn = createFromString(datasetOneUrnString); protected static Urn datasetTwoUrn = createFromString(datasetTwoUrnString); @@ -52,11 +53,9 @@ public class SiblingGraphServiceTest { protected static Urn datasetFourUrn = createFromString(datasetFourUrnString); protected static Urn datasetFiveUrn = createFromString(datasetFiveUrnString); - - /** - * Some test relationships. - */ + /** Some test relationships. */ protected static String downstreamOf = "DownstreamOf"; + protected static String upstreamOf = "UpstreamOf"; private GraphService _graphService; @@ -100,15 +99,15 @@ public void testNoSiblingMetadata() { mockResult.setFiltered(0); mockResult.setRelationships(relationships); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(null); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert sibling graph service is a pass through in the case that there is no sibling metadataa assertEquals(upstreamLineage, mockResult); @@ -145,24 +144,23 @@ public void testNoSiblingInResults() { mockResult.setFiltered(0); mockResult.setRelationships(relationships); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); siblingMockResult.setStart(0); siblingMockResult.setTotal(0); siblingMockResult.setCount(0); siblingMockResult.setRelationships(new LineageRelationshipArray()); - when(_graphService.getLineage( - datasetFiveUrn, LineageDirection.UPSTREAM, 0, 97, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 97, 1, null, null)) + .thenReturn(siblingMockResult); Siblings noRelevantSiblingsResponse = new Siblings(); noRelevantSiblingsResponse.setPrimary(true); noRelevantSiblingsResponse.setSiblings(new UrnArray(ImmutableList.of(datasetFiveUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(noRelevantSiblingsResponse); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(noRelevantSiblingsResponse); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -176,17 +174,18 @@ public void testNoSiblingInResults() { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert sibling graph service is a pass through in the case that your sibling has no lineage assertEquals(upstreamLineage, mockResult); @@ -227,20 +226,18 @@ public void testSiblingInResult() throws Exception { siblingMockResult.setCount(0); siblingMockResult.setRelationships(new LineageRelationshipArray()); - when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 98, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 98, 1, null, null)) + .thenReturn(siblingMockResult); - - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -254,11 +251,11 @@ public void testSiblingInResult() throws Exception { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); @@ -270,7 +267,8 @@ public void testSiblingInResult() throws Exception { expectedResult.setFiltered(1); expectedResult.setRelationships(new LineageRelationshipArray(relationship1, relationship2)); - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your sibling will be filtered out of your lineage assertEquals(upstreamLineage, expectedResult); @@ -311,7 +309,8 @@ public void testCombineSiblingResult() { expectedRelationships.add(relationship2); expectedRelationships.add(relationship4); - expectedRelationships.add(relationship1); // expect just one relationship1 despite duplicates in sibling lineage + expectedRelationships.add( + relationship1); // expect just one relationship1 despite duplicates in sibling lineage expectedResult.setCount(3); expectedResult.setStart(0); @@ -326,27 +325,39 @@ public void testCombineSiblingResult() { siblingRelationships.add(relationship2); siblingRelationships.add(relationship4); - siblingRelationships.add(relationship1); // duplicate from sibling's lineage, we should not see duplicates in result + siblingRelationships.add( + relationship1); // duplicate from sibling's lineage, we should not see duplicates in result siblingMockResult.setStart(0); siblingMockResult.setTotal(3); siblingMockResult.setCount(2); siblingMockResult.setRelationships(siblingRelationships); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> siblingMockResult.clone()); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> siblingMockResult.clone()); when(_graphService.getLineage( - Mockito.eq(datasetFourUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockResult.clone()); + Mockito.eq(datasetFourUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockResult.clone()); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -360,18 +371,19 @@ public void testCombineSiblingResult() { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFiveUrn, ImmutableList.of(dataset3Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFiveUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will be combined with your siblings lineage assertEquals(upstreamLineage, expectedResult); @@ -430,20 +442,18 @@ public void testUpstreamOfSiblings() { siblingMockResult.setCount(2); siblingMockResult.setRelationships(siblingRelationships); - when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null)) + .thenReturn(siblingMockResult); - - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -465,37 +475,37 @@ public void testUpstreamOfSiblings() { dataset5Siblings.setPrimary(true); dataset5Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings), - datasetFiveUrn, ImmutableList.of(dataset5Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings), + datasetFiveUrn, ImmutableList.of(dataset5Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will not contain two siblings assertEquals(upstreamLineage, expectedResult); when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(siblingMockResult); - + datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(siblingMockResult); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null)) + .thenReturn(mockResult); siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(false); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetFourUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1); @@ -510,7 +520,8 @@ public void testUpstreamOfSiblings() { } // we should be combining lineage of siblings of siblings - // ie. dataset1 has sibling dataset2. dataset 2 has siblings dataset1 and dataset3. dataset3 has sibling dataset2. dataset3 has upstream dataset4. + // ie. dataset1 has sibling dataset2. dataset 2 has siblings dataset1 and dataset3. dataset3 has + // sibling dataset2. dataset3 has upstream dataset4. // requesting upstream for dataset1 should give us dataset4 @Test public void testUpstreamOfSiblingSiblings() { @@ -547,57 +558,77 @@ public void testUpstreamOfSiblingSiblings() { emptyLineageResult.setCount(0); when(_graphService.getLineage( - Mockito.eq(datasetOneUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(emptyLineageResult); + Mockito.eq(datasetOneUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(emptyLineageResult); when(_graphService.getLineage( - Mockito.eq(datasetTwoUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(emptyLineageResult); + Mockito.eq(datasetTwoUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(emptyLineageResult); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(mockResult); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(mockResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(true); dataset1Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - when(_mockEntityService.getLatestAspect(datasetOneUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset1Siblings); + when(_mockEntityService.getLatestAspect(datasetOneUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset1Siblings); Siblings dataset2Siblings = new Siblings(); dataset2Siblings.setPrimary(true); dataset2Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetOneUrn, datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetTwoUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset2Siblings); + when(_mockEntityService.getLatestAspect(datasetTwoUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset2Siblings); Siblings dataset3Siblings = new Siblings(); dataset3Siblings.setPrimary(true); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset3Siblings); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset3Siblings); Siblings dataset4Siblings = new Siblings(); dataset4Siblings.setPrimary(true); dataset4Siblings.setSiblings(new UrnArray()); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset4Siblings); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset4Siblings); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; for (Urn urn : List.of(datasetOneUrn, datasetTwoUrn, datasetThreeUrn)) { - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 100, 1); assertEquals(upstreamLineage, expectedResult); } @@ -659,26 +690,38 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { siblingMockResult.setRelationships(siblingRelationships); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> siblingMockResult.clone()); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> siblingMockResult.clone()); when(_graphService.getLineage( - Mockito.eq(datasetFourUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockResult.clone()); + Mockito.eq(datasetFourUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockResult.clone()); Siblings primarySibling = new Siblings(); primarySibling.setPrimary(true); primarySibling.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(primarySibling); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(primarySibling); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(false); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetFourUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -700,19 +743,20 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { dataset5Siblings.setPrimary(true); dataset5Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - Map<Urn, List<RecordTemplate>> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings), - datasetFiveUrn, ImmutableList.of(dataset5Siblings) - ); + Map<Urn, List<RecordTemplate>> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings), + datasetFiveUrn, ImmutableList.of(dataset5Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will not contain two siblings assertEquals(upstreamLineage, expectedResult); @@ -733,11 +777,19 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { @Test public void testSiblingCombinations() throws URISyntaxException { - Urn primarySiblingUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:dbt,PrimarySibling,PROD)"); - Urn alternateSiblingUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,SecondarySibling,PROD)"); - - Urn upstreamUrn1 = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream1,PROD)"); - Urn upstreamUrn2 = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream2,PROD)"); + Urn primarySiblingUrn = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:dbt,PrimarySibling,PROD)"); + Urn alternateSiblingUrn = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,SecondarySibling,PROD)"); + + Urn upstreamUrn1 = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream1,PROD)"); + Urn upstreamUrn2 = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream2,PROD)"); LineageRelationshipArray alternateDownstreamRelationships = new LineageRelationshipArray(); // Populate sibling service @@ -745,13 +797,15 @@ public void testSiblingCombinations() throws URISyntaxException { primarySiblings.setPrimary(true); primarySiblings.setSiblings(new UrnArray(ImmutableList.of(alternateSiblingUrn))); - when(_mockEntityService.getLatestAspect(primarySiblingUrn, SIBLINGS_ASPECT_NAME)).thenReturn(primarySiblings); + when(_mockEntityService.getLatestAspect(primarySiblingUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(primarySiblings); Siblings secondarySiblings = new Siblings(); secondarySiblings.setPrimary(false); secondarySiblings.setSiblings(new UrnArray(ImmutableList.of(primarySiblingUrn))); - when(_mockEntityService.getLatestAspect(alternateSiblingUrn, SIBLINGS_ASPECT_NAME)).thenReturn(secondarySiblings); + when(_mockEntityService.getLatestAspect(alternateSiblingUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(secondarySiblings); Map<Urn, List<RecordTemplate>> siblingsMap = new HashMap<>(); siblingsMap.put(primarySiblingUrn, ImmutableList.of(primarySiblings)); @@ -760,7 +814,13 @@ public void testSiblingCombinations() throws URISyntaxException { // Create many downstreams of the alternate URN string final int numDownstreams = 42; for (int i = 0; i < numDownstreams; i++) { - Urn downstreamUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Downstream" + i + ",PROD)"); + Urn downstreamUrn = + Urn.createFromString( + "urn:li:" + + datasetType + + ":(urn:li:dataPlatform:snowflake,Downstream" + + i + + ",PROD)"); LineageRelationship relationship = new LineageRelationship(); relationship.setDegree(0); relationship.setType(upstreamOf); @@ -785,9 +845,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockAlternateUpstreamResult.setCount(3); when(_graphService.getLineage( - Mockito.eq(alternateSiblingUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockAlternateUpstreamResult.clone()); + Mockito.eq(alternateSiblingUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockAlternateUpstreamResult.clone()); EntityLineageResult mockAlternateDownstreamResult = new EntityLineageResult(); mockAlternateDownstreamResult.setRelationships(alternateDownstreamRelationships); @@ -796,9 +861,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockAlternateDownstreamResult.setCount(numDownstreams); when(_graphService.getLineage( - Mockito.eq(alternateSiblingUrn), Mockito.eq(LineageDirection.DOWNSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockAlternateDownstreamResult.clone()); + Mockito.eq(alternateSiblingUrn), + Mockito.eq(LineageDirection.DOWNSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockAlternateDownstreamResult.clone()); // Set up mocks for primary sibling LineageRelationshipArray primaryUpstreamRelationships = new LineageRelationshipArray(); @@ -818,9 +888,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockPrimaryUpstreamResult.setCount(2); when(_graphService.getLineage( - Mockito.eq(primarySiblingUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockPrimaryUpstreamResult.clone()); + Mockito.eq(primarySiblingUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockPrimaryUpstreamResult.clone()); LineageRelationshipArray primaryDowntreamRelationships = new LineageRelationshipArray(); LineageRelationship relationship = new LineageRelationship(); @@ -836,26 +911,23 @@ public void testSiblingCombinations() throws URISyntaxException { mockPrimaryDownstreamResult.setCount(1); when(_graphService.getLineage( - Mockito.eq(primarySiblingUrn), Mockito.eq(LineageDirection.DOWNSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockPrimaryDownstreamResult.clone()); - + Mockito.eq(primarySiblingUrn), + Mockito.eq(LineageDirection.DOWNSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockPrimaryDownstreamResult.clone()); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; // Tests for separateSiblings = true: primary sibling - EntityLineageResult primaryDownstreamSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult primaryDownstreamSeparated = + service.getLineage( + primarySiblingUrn, LineageDirection.DOWNSTREAM, 0, 100, 1, true, Set.of(), null, null); LineageRelationshipArray expectedRelationships = new LineageRelationshipArray(); expectedRelationships.add(relationship); @@ -869,16 +941,9 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primaryDownstreamSeparated, expectedResultPrimarySeparated); - EntityLineageResult primaryUpstreamSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult primaryUpstreamSeparated = + service.getLineage( + primarySiblingUrn, LineageDirection.UPSTREAM, 0, 100, 1, true, Set.of(), null, null); EntityLineageResult expectedResultPrimaryUpstreamSeparated = new EntityLineageResult(); expectedResultPrimaryUpstreamSeparated.setCount(2); expectedResultPrimaryUpstreamSeparated.setStart(0); @@ -889,16 +954,17 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primaryUpstreamSeparated, expectedResultPrimaryUpstreamSeparated); // Test for separateSiblings = true, secondary sibling - EntityLineageResult secondarySiblingSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult secondarySiblingSeparated = + service.getLineage( + alternateSiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + true, + Set.of(), + null, + null); EntityLineageResult expectedResultSecondarySeparated = new EntityLineageResult(); expectedResultSecondarySeparated.setCount(numDownstreams); @@ -909,16 +975,9 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(secondarySiblingSeparated, expectedResultSecondarySeparated); - EntityLineageResult secondaryUpstreamSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult secondaryUpstreamSeparated = + service.getLineage( + alternateSiblingUrn, LineageDirection.UPSTREAM, 0, 100, 1, true, Set.of(), null, null); EntityLineageResult expectedResultSecondaryUpstreamSeparated = new EntityLineageResult(); expectedResultSecondaryUpstreamSeparated.setCount(3); expectedResultSecondaryUpstreamSeparated.setStart(0); @@ -929,16 +988,17 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(secondaryUpstreamSeparated, expectedResultSecondaryUpstreamSeparated); // Test for separateSiblings = false, primary sibling - EntityLineageResult primarySiblingNonSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null); + EntityLineageResult primarySiblingNonSeparated = + service.getLineage( + primarySiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); EntityLineageResult expectedResultPrimaryNonSeparated = new EntityLineageResult(); expectedResultPrimaryNonSeparated.setCount(numDownstreams); expectedResultPrimaryNonSeparated.setStart(0); @@ -947,17 +1007,17 @@ public void testSiblingCombinations() throws URISyntaxException { expectedResultPrimaryNonSeparated.setRelationships(alternateDownstreamRelationships); assertEquals(primarySiblingNonSeparated, expectedResultPrimaryNonSeparated); - EntityLineageResult primarySiblingNonSeparatedUpstream = service.getLineage( - primarySiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null - ); + EntityLineageResult primarySiblingNonSeparatedUpstream = + service.getLineage( + primarySiblingUrn, + LineageDirection.UPSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); EntityLineageResult expectedResultPrimaryUpstreamNonSeparated = new EntityLineageResult(); expectedResultPrimaryUpstreamNonSeparated.setCount(2); expectedResultPrimaryUpstreamNonSeparated.setStart(0); @@ -967,29 +1027,30 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primarySiblingNonSeparatedUpstream, expectedResultPrimaryUpstreamNonSeparated); // Test for separateSiblings = false, secondary sibling - EntityLineageResult secondarySiblingNonSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null); + EntityLineageResult secondarySiblingNonSeparated = + service.getLineage( + alternateSiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); assertEquals(secondarySiblingNonSeparated, expectedResultPrimaryNonSeparated); - EntityLineageResult secondarySiblingNonSeparatedUpstream = service.getLineage( - alternateSiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null - ); + EntityLineageResult secondarySiblingNonSeparatedUpstream = + service.getLineage( + alternateSiblingUrn, + LineageDirection.UPSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); assertEquals(secondarySiblingNonSeparatedUpstream, expectedResultPrimaryUpstreamNonSeparated); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java index 60e63ed001768..c0faf6fdfee6c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.recommendation; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntityUtil; @@ -11,34 +14,56 @@ import java.util.stream.Collectors; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class RecommendationsServiceTest { private final TestSource nonEligibleSource = - new TestSource("not eligible", "nonEligible", RecommendationRenderType.ENTITY_NAME_LIST, false, + new TestSource( + "not eligible", + "nonEligible", + RecommendationRenderType.ENTITY_NAME_LIST, + false, getContentFromString(ImmutableList.of("test"))); private final TestSource emptySource = - new TestSource("empty", "empty", RecommendationRenderType.ENTITY_NAME_LIST, true, ImmutableList.of()); + new TestSource( + "empty", "empty", RecommendationRenderType.ENTITY_NAME_LIST, true, ImmutableList.of()); private final TestSource valuesSource = - new TestSource("values", "values", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "values", + "values", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromString(ImmutableList.of("test"))); private final TestSource multiValuesSource = - new TestSource("multiValues", "multiValues", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "multiValues", + "multiValues", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromString(ImmutableList.of("test1", "test2", "test3", "test4"))); private final TestSource urnsSource = - new TestSource("urns", "urns", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "urns", + "urns", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromUrns(ImmutableList.of(TestEntityUtil.getTestEntityUrn()))); private final TestSource multiUrnsSource = - new TestSource("multiUrns", "multiUrns", RecommendationRenderType.ENTITY_NAME_LIST, true, - getContentFromUrns(ImmutableList.of(TestEntityUtil.getTestEntityUrn(), TestEntityUtil.getTestEntityUrn(), - TestEntityUtil.getTestEntityUrn()))); + new TestSource( + "multiUrns", + "multiUrns", + RecommendationRenderType.ENTITY_NAME_LIST, + true, + getContentFromUrns( + ImmutableList.of( + TestEntityUtil.getTestEntityUrn(), + TestEntityUtil.getTestEntityUrn(), + TestEntityUtil.getTestEntityUrn()))); private final RecommendationModuleRanker ranker = new SimpleRecommendationRanker(); private List<RecommendationContent> getContentFromString(List<String> values) { - return values.stream().map(value -> new RecommendationContent().setValue(value)).collect(Collectors.toList()); + return values.stream() + .map(value -> new RecommendationContent().setValue(value)) + .collect(Collectors.toList()); } private List<RecommendationContent> getContentFromUrns(List<Urn> urns) { @@ -50,15 +75,24 @@ private List<RecommendationContent> getContentFromUrns(List<Urn> urns) { @Test public void testService() throws URISyntaxException { // Test non-eligible and empty - RecommendationsService service = new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); - List<RecommendationModule> result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + RecommendationsService service = + new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); + List<RecommendationModule> result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertTrue(result.isEmpty()); // Test empty with one valid source - service = new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + service = + new RecommendationsService( + ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertEquals(result.size(), 1); RecommendationModule module = result.get(0); assertEquals(module.getTitle(), "values"); @@ -67,10 +101,14 @@ public void testService() throws URISyntaxException { assertEquals(module.getContent(), valuesSource.getContents()); // Test multiple sources - service = new RecommendationsService(ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), - ranker); - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + service = + new RecommendationsService( + ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), ranker); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertEquals(result.size(), 4); module = result.get(0); assertEquals(module.getTitle(), "values"); @@ -94,8 +132,11 @@ public void testService() throws URISyntaxException { assertEquals(module.getContent(), multiUrnsSource.getContents()); // Test limit - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 2); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 2); assertEquals(result.size(), 2); module = result.get(0); assertEquals(module.getTitle(), "values"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java index 0dc517eaf0d1c..dcc59d0632954 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.recommendation.candidatesource; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; @@ -19,15 +27,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.eq; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - - public class EntitySearchAggregationCandidateSourceTest { private EntitySearchService _entitySearchService = Mockito.mock(EntitySearchService.class); private EntitySearchAggregationSource _valueBasedCandidateSource; @@ -44,7 +43,8 @@ public void setup() { _urnBasedCandidateSource = buildCandidateSource("testUrn", true); } - private EntitySearchAggregationSource buildCandidateSource(String identifier, boolean isValueUrn) { + private EntitySearchAggregationSource buildCandidateSource( + String identifier, boolean isValueUrn) { return new EntitySearchAggregationSource(_entitySearchService) { @Override protected String getSearchFieldName() { @@ -77,7 +77,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return true; } }; @@ -85,9 +86,11 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Test public void testWhenSearchServiceReturnsEmpty() { - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(Collections.emptyMap()); - List<RecommendationContent> candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + List<RecommendationContent> candidates = + _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertTrue(candidates.isEmpty()); assertFalse(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); } @@ -95,9 +98,11 @@ public void testWhenSearchServiceReturnsEmpty() { @Test public void testWhenSearchServiceReturnsValueResults() { // One result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L)); - List<RecommendationContent> candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + List<RecommendationContent> candidates = + _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), "value1"); @@ -107,14 +112,16 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value1")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); assertTrue(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); // Multiple result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L)); candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); @@ -126,7 +133,8 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value3")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 3L); @@ -138,7 +146,8 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value2")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); @@ -153,7 +162,8 @@ public void testWhenSearchServiceReturnsUrnResults() { Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3"); Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L)); - List<RecommendationContent> candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); + List<RecommendationContent> candidates = + _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), testUrn1.toString()); @@ -163,7 +173,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn1.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); @@ -171,7 +182,9 @@ public void testWhenSearchServiceReturnsUrnResults() { // Multiple result Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) - .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); + .thenReturn( + ImmutableMap.of( + testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); @@ -182,7 +195,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn3.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 3L); @@ -194,7 +208,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn2.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java index f5c3569821e00..3998e45195b25 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java @@ -14,13 +14,12 @@ public class RecommendationUtilsTest { private void testIsSupportedEntityType() { Urn testUrn = UrnUtils.getUrn("urn:li:corpuser:john"); Assert.assertTrue( - RecommendationUtils.isSupportedEntityType(testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME)) - ); + RecommendationUtils.isSupportedEntityType( + testUrn, + ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME))); Assert.assertFalse( - RecommendationUtils.isSupportedEntityType(testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME)) - ); - Assert.assertFalse( - RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet()) - ); + RecommendationUtils.isSupportedEntityType( + testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME))); + Assert.assertFalse(RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet())); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java index 31672b6aa885f..666deb2c419d7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; - @Getter @RequiredArgsConstructor public class TestSource implements RecommendationSource { @@ -36,13 +35,14 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return eligible; } @Override - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return contents; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java index 1757883f1a5a9..57fa51ffbdd90 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java @@ -1,32 +1,34 @@ package com.linkedin.metadata.search; -import java.time.temporal.ChronoUnit; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotSame; +import java.time.temporal.ChronoUnit; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; public class LineageSearchResultCacheKeyTest extends AbstractTestNGSpringContextTests { @Test public void testNulls() { // ensure no NPE - assertEquals(new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); + assertEquals( + new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); } @Test public void testDateTruncation() { // expect start of day milli - assertEquals(new EntityLineageResultCacheKey(null, null, 1679529600000L, - 1679615999999L, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, 1679530293000L, - 1679530293001L, null, ChronoUnit.DAYS)); - assertNotSame(new EntityLineageResultCacheKey(null, null, 1679529600000L, - 1679616000000L, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, 1679530293000L, - 1679530293001L, null, ChronoUnit.DAYS)); + assertEquals( + new EntityLineageResultCacheKey( + null, null, 1679529600000L, 1679615999999L, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey( + null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); + assertNotSame( + new EntityLineageResultCacheKey( + null, null, 1679529600000L, 1679616000000L, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey( + null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java index 696e3b62834bd..079ec08462515 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java @@ -1,5 +1,22 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -46,11 +63,21 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.opensearch.client.RestHighLevelClient; import org.opensearch.action.search.SearchRequest; +import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; @@ -58,50 +85,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - -abstract public class LineageServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class LineageServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -116,11 +115,13 @@ abstract public class LineageServiceTestBase extends AbstractTestNGSpringContext private static final Urn TEST_URN = TestEntityUtil.getTestEntityUrn(); private static final String TEST = "test"; private static final String TEST1 = "test1"; - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); @BeforeClass public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -137,20 +138,29 @@ public void setup() { } private void resetService(boolean withCache, boolean withLightingCache) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); entityDocCountCacheConfiguration.setTtlSeconds(600L); - SearchLineageCacheConfiguration searchLineageCacheConfiguration = new SearchLineageCacheConfiguration(); + SearchLineageCacheConfiguration searchLineageCacheConfiguration = + new SearchLineageCacheConfiguration(); searchLineageCacheConfiguration.setTtlSeconds(600L); searchLineageCacheConfiguration.setLightningThreshold(withLightingCache ? -1 : 300); - _lineageSearchService = spy(new LineageSearchService( - new SearchService( - new EntityDocCountCache(_entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), - cachingEntitySearchService, - new SimpleRanker()), - _graphService, _cacheManager.getCache("test"), withCache, searchLineageCacheConfiguration)); + _lineageSearchService = + spy( + new LineageSearchService( + new SearchService( + new EntityDocCountCache( + _entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), + cachingEntitySearchService, + new SimpleRanker()), + _graphService, + _cacheManager.getCache("test"), + withCache, + searchLineageCacheConfiguration)); } @BeforeMethod @@ -163,13 +173,27 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildEntitySearchService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention, _settingsBuilder); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); _searchClientSpy = spy(getSearchClient()); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, _searchClientSpy, _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, _searchClientSpy, _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); - ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, _searchClientSpy, _indexConvention, getBulkProcessor(), 1); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + _searchClientSpy, + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + _searchClientSpy, + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); + ESWriteDAO writeDAO = + new ESWriteDAO(_entityRegistry, _searchClientSpy, _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); } @@ -179,7 +203,8 @@ private void clearCache(boolean withLightingCache) { } private EntityLineageResult mockResult(List<LineageRelationship> lineageRelationships) { - return new EntityLineageResult().setRelationships(new LineageRelationshipArray(lineageRelationships)) + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageRelationships)) .setStart(0) .setCount(10) .setTotal(lineageRelationships.size()); @@ -187,18 +212,34 @@ private EntityLineageResult mockResult(List<LineageRelationship> lineageRelation @Test public void testSearchService() throws Exception { - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageSearchResult searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); - //just testing null input does not throw any exception + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + // just testing null input does not throw any exception searchAcrossLineage(null, null); searchResult = searchAcrossLineage(null, TEST); @@ -216,16 +257,32 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); @@ -255,15 +312,25 @@ public void testSearchService() throws Exception { assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); // Verify that highlighting was turned off in the query - ArgumentCaptor<SearchRequest> searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); + ArgumentCaptor<SearchRequest> searchRequestCaptor = + ArgumentCaptor.forClass(SearchRequest.class); Mockito.verify(_searchClientSpy, times(1)).search(searchRequestCaptor.capture(), any()); SearchRequest capturedRequest = searchRequestCaptor.getValue(); assertNull(capturedRequest.source().highlighter()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); @@ -273,55 +340,136 @@ public void testSearchService() throws Exception { Mockito.reset(_graphService); // Case 1: Use the maxHops in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", 1000, null, null, 0, 10, null, null, - new SearchFlags().setSkipCache(false)); + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + 1000, + null, + null, + 0, + 10, + null, + null, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); // Case 2: Use the start and end time in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), "test1", - null, null, null, 0, 10, 0L, 1L, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + "test1", + null, + null, + null, + 0, + 10, + 0L, + 1L, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", null, null, null, 0, 10, 0L, 1L, - new SearchFlags().setSkipCache(false)); + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + null, + null, + null, + 0, + 10, + 0L, + 1L, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); clearCache(false); @@ -330,19 +478,28 @@ public void testSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); - } @Test public void testScrollAcrossLineage() throws Exception { - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageScrollResult scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); assertNull(scrollResult.getScrollId()); @@ -351,9 +508,18 @@ public void testScrollAcrossLineage() throws Exception { assertNull(scrollResult.getScrollId()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); // just testing null input does not throw any exception scrollAcrossLineage(null, null); @@ -374,17 +540,33 @@ public void testScrollAcrossLineage() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); assertEquals(scrollResult.getEntities().size(), 0); assertNull(scrollResult.getScrollId()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 1); assertEquals(scrollResult.getEntities().get(0).getEntity(), urn); @@ -407,9 +589,12 @@ public void testScrollAcrossLineage() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); @@ -426,15 +611,31 @@ public void testLightningSearchService() throws Exception { // Enable lightning resetService(true, true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageSearchResult searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); clearCache(true); @@ -448,32 +649,51 @@ public void testLightningSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(0).getDegree().intValue(), 1); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); searchResult = searchAcrossLineage(QueryUtils.newFilter("degree.keyword", "1"), testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(0).getDegree().intValue(), 1); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); searchResult = searchAcrossLineage(QueryUtils.newFilter("degree.keyword", "2"), testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); // resets spy Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -488,16 +708,27 @@ public void testLightningSearchService() throws Exception { searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().size(), 1); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); // Test Cache Behavior @@ -505,59 +736,144 @@ public void testLightningSearchService() throws Exception { reset(_lineageSearchService); // Case 1: Use the maxHops in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, - new SearchFlags().setSkipCache(false)); + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); - + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Case 2: Use the start and end time in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), "*", - null, null, null, 0, 10, 0L, 1L, - new SearchFlags().setSkipCache(false)); + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + "*", + null, + null, + null, + 0, + 10, + 0L, + 1L, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", null, null, null, 0, 10, 0L, 1L, + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + null, + null, + null, + 0, + 10, + 0L, + 1L, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(4)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(4)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); /* * Test filtering @@ -566,70 +882,163 @@ public void testLightningSearchService() throws Exception { // Entity searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(DATASET_ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(DATASET_ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); // Platform ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue("urn:li:dataPlatform:kafka").setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion() + .setField("platform") + .setValue("urn:li:dataPlatform:kafka") + .setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion degreeCrit = new Criterion().setField("degree.keyword").setValue("2").setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(degreeCrit)))); + Criterion degreeCrit = + new Criterion().setField("degree.keyword").setValue("2").setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(degreeCrit)))); Filter filter = new Filter().setOr(conCritArr); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, filter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, filter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(4)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(4)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); // Environment Filter originFilter = QueryUtils.newFilter("origin", "PROD"); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, originFilter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + originFilter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(5)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(5)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, originFilter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + originFilter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(6)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(6)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); @@ -640,13 +1049,15 @@ public void testLightningSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); - } @Test @@ -660,11 +1071,13 @@ public void testLightningEnvFiltering() throws Exception { platformCounts.put(kafkaPlatform, 200); platformCounts.put(hivePlatform, 50); platformCounts.put(bigQueryPlatform, 100); - List<LineageRelationship> prodLineageRelationships = constructGraph(platformCounts, FabricType.PROD); + List<LineageRelationship> prodLineageRelationships = + constructGraph(platformCounts, FabricType.PROD); // DEV platformCounts.put(kafkaPlatform, 300); - List<LineageRelationship> devLineageRelationships = constructGraph(platformCounts, FabricType.DEV); + List<LineageRelationship> devLineageRelationships = + constructGraph(platformCounts, FabricType.DEV); List<LineageRelationship> lineageRelationships = new ArrayList<>(); lineageRelationships.addAll(prodLineageRelationships); @@ -675,49 +1088,67 @@ public void testLightningEnvFiltering() throws Exception { int size = 10; Set<String> entityNames = Collections.emptySet(); - LineageSearchResult lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + LineageSearchResult lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); // assert that we have the right aggs per env - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("DEV")).findFirst().get(), Long.valueOf(300)); - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("PROD")).findFirst().get(), Long.valueOf(200)); + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("DEV")) + .findFirst() + .get(), + Long.valueOf(300)); + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("PROD")) + .findFirst() + .get(), + Long.valueOf(200)); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion originCrit = new Criterion().setField("origin").setValue("DEV").setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); + Criterion originCrit = + new Criterion().setField("origin").setValue("DEV").setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); from = 500; size = 10; filter = new Filter().setOr(conCritArr); - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); // assert that if the query has an env filter, it is applied correctly - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("DEV")).findFirst().get(), Long.valueOf(300)); - assertTrue(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") && x.getAggregations().containsKey("PROD")) - .collect(Collectors.toList()).isEmpty()); - - + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("DEV")) + .findFirst() + .get(), + Long.valueOf(300)); + assertTrue( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin") && x.getAggregations().containsKey("PROD")) + .collect(Collectors.toList()) + .isEmpty()); } - @Test public void testLightningPagination() throws Exception { Map<String, Integer> platformCounts = new HashMap<>(); @@ -731,35 +1162,41 @@ public void testLightningPagination() throws Exception { List<LineageRelationship> lineageRelationships = constructGraph(platformCounts); - Filter filter = QueryUtils.newFilter("platform", kafkaPlatform); int from = 0; int size = 10; Set<String> entityNames = Collections.emptySet(); - LineageSearchResult lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + LineageSearchResult lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); from = 50; size = 20; - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 20); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name50"); - + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name50"); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); - Criterion platform2Crit = new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform2Crit = + new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); critArr = new CriterionArray(ImmutableList.of(platform2Crit)); @@ -768,25 +1205,31 @@ public void testLightningPagination() throws Exception { from = 500; size = 10; filter = new Filter().setOr(conCritArr); - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(600)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), hivePlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); - + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), hivePlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); // Verify aggregations from = 0; size = 10; - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - null, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, null, from, size, entityNames); // Static Degree agg is the first element - LongMap platformAggs = lineageSearchResult.getMetadata().getAggregations().get(1).getAggregations(); - LongMap entityTypeAggs = lineageSearchResult.getMetadata().getAggregations().get(2).getAggregations(); - LongMap environmentAggs = lineageSearchResult.getMetadata().getAggregations().get(3).getAggregations(); + LongMap platformAggs = + lineageSearchResult.getMetadata().getAggregations().get(1).getAggregations(); + LongMap entityTypeAggs = + lineageSearchResult.getMetadata().getAggregations().get(2).getAggregations(); + LongMap environmentAggs = + lineageSearchResult.getMetadata().getAggregations().get(3).getAggregations(); assertEquals(platformAggs.get(kafkaPlatform), Long.valueOf(500)); assertEquals(platformAggs.get(hivePlatform), Long.valueOf(100)); assertEquals(platformAggs.get(bigQueryPlatform), Long.valueOf(200)); @@ -798,18 +1241,21 @@ private List<LineageRelationship> constructGraph(Map<String, Integer> platformCo return constructGraph(platformCounts, FabricType.PROD); } - private List<LineageRelationship> constructGraph(Map<String, Integer> platformCounts, final FabricType env) { + private List<LineageRelationship> constructGraph( + Map<String, Integer> platformCounts, final FabricType env) { List<LineageRelationship> lineageRelationships = new ArrayList<>(); - platformCounts.forEach((key, value) -> { - for (int i = 0; i < value; i++) { - try { - lineageRelationships.add( - constructLineageRelationship(new DatasetUrn(DataPlatformUrn.createFromString(key), "name" + i, env))); - } catch (URISyntaxException e) { - throw new RuntimeException(e); + platformCounts.forEach( + (key, value) -> { + for (int i = 0; i < value; i++) { + try { + lineageRelationships.add( + constructLineageRelationship( + new DatasetUrn(DataPlatformUrn.createFromString(key), "name" + i, env))); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } } - } - }); + }); return lineageRelationships; } @@ -820,19 +1266,40 @@ private LineageRelationship constructLineageRelationship(Urn urn) { .setType("DOWNSTREAM") .setDegree(1) .setPaths(new UrnArrayArray()); - } // Convenience method to reduce spots where we're sending the same params private LineageSearchResult searchAcrossLineage(@Nullable Filter filter, @Nullable String input) { - return _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, - null, filter, null, 0, 10, null, null, + return _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + input, + null, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(true)); } - private LineageScrollResult scrollAcrossLineage(@Nullable Filter filter, @Nullable String input, String scrollId, int size) { - return _lineageSearchService.scrollAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, - null, filter, null, scrollId, "5m", size, null, null, + private LineageScrollResult scrollAcrossLineage( + @Nullable Filter filter, @Nullable String input, String scrollId, int size) { + return _lineageSearchService.scrollAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + input, + null, + filter, + null, + scrollId, + "5m", + size, + null, + null, new SearchFlags().setSkipCache(true)); } @@ -851,29 +1318,39 @@ public void testCanDoLightning() throws Exception { platformCounts.put(hivePlatform, 100); platformCounts.put(bigQueryPlatform, 200); - List<LineageRelationship> lineageRelationships = constructGraph(platformCounts, FabricType.PROD); + List<LineageRelationship> lineageRelationships = + constructGraph(platformCounts, FabricType.PROD); Filter filter = QueryUtils.newFilter("platform", kafkaPlatform); int from = 0; int size = 10; Set<String> entityNames = Collections.emptySet(); - Assert.assertTrue(_lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); + Assert.assertTrue( + _lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); - Criterion platform2Crit = new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform2Crit = + new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); critArr = new CriterionArray(ImmutableList.of(platform2Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion originCrit = new Criterion().setField("origin").setValue(FabricType.PROD.name()).setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); + Criterion originCrit = + new Criterion() + .setField("origin") + .setValue(FabricType.PROD.name()) + .setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); from = 500; size = 10; filter = new Filter().setOr(conCritArr); - Assert.assertTrue(_lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); + Assert.assertTrue( + _lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java index c0144d36843f5..71f35adabce36 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -32,6 +36,7 @@ import com.linkedin.metadata.search.ranker.SimpleRanker; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; @@ -40,29 +45,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - - -abstract public class SearchServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class SearchServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -85,18 +83,18 @@ public void setup() { } private void resetSearchService() { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - _cacheManager, - _elasticSearchService, - 100, - true); + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); entityDocCountCacheConfiguration.setTtlSeconds(600L); - _searchService = new SearchService( - new EntityDocCountCache(_entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), - cachingEntitySearchService, - new SimpleRanker()); + _searchService = + new SearchService( + new EntityDocCountCache( + _entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), + cachingEntitySearchService, + new SimpleRanker()); } @BeforeMethod @@ -108,13 +106,26 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildEntitySearchService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention, _settingsBuilder); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, getSearchClient(), _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); - ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, - getBulkProcessor(), 1); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); + ESWriteDAO writeDAO = + new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); } @@ -126,11 +137,18 @@ private void clearCache() { @Test public void testSearchService() throws Exception { SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true).setSkipCache(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(true).setSkipCache(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -143,8 +161,9 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -158,8 +177,9 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "'test2'", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); clearCache(); @@ -170,37 +190,46 @@ public void testSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "'test2'", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); } @Test public void testAdvancedSearchOr() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Criterion subtypeCriterion = new Criterion() - .setField("subtypes") - .setCondition(Condition.EQUAL) - .setValue("") - .setValues(new StringArray(ImmutableList.of("view"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(subtypeCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Criterion subtypeCriterion = + new Criterion() + .setField("subtypes") + .setCondition(Condition.EQUAL) + .setValue("") + .setValues(new StringArray(ImmutableList.of("view"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))), + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(subtypeCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -237,8 +266,15 @@ public void testAdvancedSearchOr() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 2); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(1).getEntity(), urn2); @@ -247,28 +283,38 @@ public void testAdvancedSearchOr() throws Exception { @Test public void testAdvancedSearchSoftDelete() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Criterion removedCriterion = new Criterion() - .setField("removed") - .setCondition(Condition.EQUAL) - .setValue("") - .setValues(new StringArray(ImmutableList.of("true"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion, removedCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Criterion removedCriterion = + new Criterion() + .setField("removed") + .setCondition(Condition.EQUAL) + .setValue("") + .setValues(new StringArray(ImmutableList.of("true"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(filterCriterion, removedCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -308,8 +354,15 @@ public void testAdvancedSearchSoftDelete() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -317,23 +370,30 @@ public void testAdvancedSearchSoftDelete() throws Exception { @Test public void testAdvancedSearchNegated() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setNegated(true) + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -373,8 +433,15 @@ public void testAdvancedSearchNegated() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn3); clearCache(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java index a4c359b3595c2..b544faa061f0e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -23,6 +27,8 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.util.List; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; @@ -30,29 +36,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.List; - -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - -abstract public class TestEntityTestBase extends AbstractTestNGSpringContextTests { +public abstract class TestEntityTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -72,7 +71,8 @@ public void setup() { @BeforeClass public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -84,10 +84,24 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, getSearchClient(), _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); @@ -95,12 +109,18 @@ private ElasticSearchService buildService() { @Test public void testElasticSearchServiceStructuredQuery() throws Exception { - SearchResult searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + SearchResult searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); BrowseResult browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -112,10 +132,20 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "foreignKey:Node", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), + "foreignKey:Node", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); @@ -125,7 +155,9 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -137,7 +169,9 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); @@ -148,23 +182,33 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); } @Test public void testElasticSearchServiceFulltext() throws Exception { - SearchResult searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + SearchResult searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); @@ -177,13 +221,17 @@ public void testElasticSearchServiceFulltext() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), - ImmutableMap.of("textFieldOverride", 1L)); + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); ObjectNode document2 = JsonNodeFactory.instance.objectNode(); @@ -194,21 +242,31 @@ public void testElasticSearchServiceFulltext() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), - ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java index 354b7dc5f609e..175c48e198185 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.cache; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.Streams; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; @@ -18,18 +21,19 @@ import org.springframework.cache.concurrent.ConcurrentMapCacheManager; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class CacheableSearcherTest { private CacheManager cacheManager = new ConcurrentMapCacheManager(); @Test public void testCacheableSearcherWhenEmpty() { CacheableSearcher<Integer> emptySearcher = - new CacheableSearcher<>(cacheManager.getCache("emptySearcher"), 10, this::getEmptySearchResult, - CacheableSearcher.QueryPagination::getFrom, null, true); + new CacheableSearcher<>( + cacheManager.getCache("emptySearcher"), + 10, + this::getEmptySearchResult, + CacheableSearcher.QueryPagination::getFrom, + null, + true); assertTrue(emptySearcher.getSearchResults(0, 0).getEntities().isEmpty()); assertTrue(emptySearcher.getSearchResults(0, 10).getEntities().isEmpty()); assertTrue(emptySearcher.getSearchResults(5, 10).getEntities().isEmpty()); @@ -38,8 +42,13 @@ public void testCacheableSearcherWhenEmpty() { @Test public void testCacheableSearcherWithFixedNumResults() { CacheableSearcher<Integer> fixedBatchSearcher = - new CacheableSearcher<>(cacheManager.getCache("fixedBatchSearcher"), 10, qs -> getSearchResult(qs, 10), - CacheableSearcher.QueryPagination::getFrom, null, true); + new CacheableSearcher<>( + cacheManager.getCache("fixedBatchSearcher"), + 10, + qs -> getSearchResult(qs, 10), + CacheableSearcher.QueryPagination::getFrom, + null, + true); SearchResult result = fixedBatchSearcher.getSearchResults(0, 0); assertTrue(result.getEntities().isEmpty()); @@ -48,21 +57,28 @@ public void testCacheableSearcherWithFixedNumResults() { result = fixedBatchSearcher.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); result = fixedBatchSearcher.getSearchResults(5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) + .collect(Collectors.toList())); } @Test public void testCacheableSearcherWithVariableNumResults() { CacheableSearcher<Integer> variableBatchSearcher = - new CacheableSearcher<>(cacheManager.getCache("variableBatchSearcher"), 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, null, + new CacheableSearcher<>( + cacheManager.getCache("variableBatchSearcher"), + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, true); SearchResult result = variableBatchSearcher.getSearchResults(0, 0); @@ -72,21 +88,30 @@ public void testCacheableSearcherWithVariableNumResults() { result = variableBatchSearcher.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); result = variableBatchSearcher.getSearchResults(5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) + .collect(Collectors.toList())); result = variableBatchSearcher.getSearchResults(5, 100); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 100); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 20).stream(), getUrns(0, 30).stream(), - getUrns(0, 40).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat( + getUrns(5, 10).stream(), + getUrns(0, 20).stream(), + getUrns(0, 30).stream(), + getUrns(0, 40).stream(), + getUrns(0, 5).stream()) + .collect(Collectors.toList())); } @Test @@ -94,26 +119,36 @@ public void testCacheableSearcherEnabled() { // Verify cache is not interacted with when cache disabled Cache mockCache = Mockito.mock(Cache.class); CacheableSearcher<Integer> cacheDisabled = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, null, + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, false); SearchResult result = cacheDisabled.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verifyNoInteractions(mockCache); Mockito.reset(mockCache); // Verify cache is updated when cache enabled, but skip cache passed through CacheableSearcher<Integer> skipCache = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(true), true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + new SearchFlags().setSkipCache(true), + true); result = skipCache.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(0)).get(Mockito.any(), Mockito.any(Class.class)); @@ -121,13 +156,18 @@ public void testCacheableSearcherEnabled() { // Test cache hit when searchFlags is null CacheableSearcher<Integer> nullFlags = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - null, true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, + true); result = nullFlags.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(1)).get(Mockito.any(), Mockito.any(Class.class)); @@ -135,20 +175,26 @@ public void testCacheableSearcherEnabled() { // Test cache hit when skipCache is false CacheableSearcher<Integer> useCache = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(false), true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + new SearchFlags().setSkipCache(false), + true); result = useCache.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(1)).get(Mockito.any(), Mockito.any(Class.class)); } private SearchResult getEmptySearchResult(CacheableSearcher.QueryPagination queryPagination) { - return new SearchResult().setEntities(new SearchEntityArray()) + return new SearchResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setFrom(queryPagination.getFrom()) .setPageSize(queryPagination.getSize()) @@ -161,11 +207,15 @@ private List<Urn> getUrns(int start, int end) { .collect(Collectors.toList()); } - private SearchResult getSearchResult(CacheableSearcher.QueryPagination queryPagination, int batchSize) { + private SearchResult getSearchResult( + CacheableSearcher.QueryPagination queryPagination, int batchSize) { assert (batchSize <= queryPagination.getSize()); List<SearchEntity> entities = - getUrns(0, batchSize).stream().map(urn -> new SearchEntity().setEntity(urn)).collect(Collectors.toList()); - return new SearchResult().setEntities(new SearchEntityArray(entities)) + getUrns(0, batchSize).stream() + .map(urn -> new SearchEntity().setEntity(urn)) + .collect(Collectors.toList()); + return new SearchResult() + .setEntities(new SearchEntityArray(entities)) .setNumEntities(1000) .setFrom(queryPagination.getFrom()) .setPageSize(queryPagination.getSize()) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java index 750423a024dcc..0810bbc9d19f8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java @@ -7,26 +7,26 @@ import org.testcontainers.containers.GenericContainer; import org.testng.annotations.AfterSuite; - @TestConfiguration public class ElasticSearchSuite extends AbstractTestNGSpringContextTests { - private static final ElasticsearchTestContainer ELASTICSEARCH_TEST_CONTAINER; - private static GenericContainer<?> container; - static { - ELASTICSEARCH_TEST_CONTAINER = new ElasticsearchTestContainer(); - } + private static final ElasticsearchTestContainer ELASTICSEARCH_TEST_CONTAINER; + private static GenericContainer<?> container; - @AfterSuite - public void after() { - ELASTICSEARCH_TEST_CONTAINER.stopContainer(); - } + static { + ELASTICSEARCH_TEST_CONTAINER = new ElasticsearchTestContainer(); + } + + @AfterSuite + public void after() { + ELASTICSEARCH_TEST_CONTAINER.stopContainer(); + } - @Bean(name = "testSearchContainer") - public GenericContainer<?> testSearchContainer() { - if (container == null) { - container = ELASTICSEARCH_TEST_CONTAINER.startContainer(); - } - return container; + @Bean(name = "testSearchContainer") + public GenericContainer<?> testSearchContainer() { + if (container == null) { + container = ELASTICSEARCH_TEST_CONTAINER.startContainer(); } + return container; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java index cfacd4c15409a..ea5b9a74b476e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java @@ -1,9 +1,11 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.search.fixtures.GoldenTestBase; -import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.fixtures.GoldenTestBase; +import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; @@ -11,34 +13,35 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class GoldenElasticSearchTest extends GoldenTestBase { - @Autowired - @Qualifier("longTailSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - - @NotNull - @Override - protected EntityRegistry getEntityRegistry() { - return entityRegistry; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - assertNotNull(searchService); - } + @Autowired + @Qualifier("longTailSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @NotNull + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + assertNotNull(searchService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java index 20f4ee52f0e62..911a21767bdea 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.search.indexbuilder.IndexBuilderTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; @@ -8,23 +10,19 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class IndexBuilderElasticSearchTest extends IndexBuilderTestBase { - @Autowired - private RestHighLevelClient _searchClient; + @Autowired private RestHighLevelClient _searchClient; - @NotNull - @Override - protected RestHighLevelClient getSearchClient() { - return _searchClient; - } + @NotNull + @Override + protected RestHighLevelClient getSearchClient() { + return _searchClient; + } - @Test - public void initTest() { - assertNotNull(_searchClient); - } + @Test + public void initTest() { + assertNotNull(_searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java index 0cb49bc555421..1fed3380a342d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; -import io.datahubproject.test.fixtures.search.SearchLineageFixtureConfiguration; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; +import io.datahubproject.test.fixtures.search.SearchLineageFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; @@ -12,32 +12,35 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({ElasticSearchSuite.class, SearchLineageFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchLineageFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageDataFixtureElasticSearchTest extends LineageDataFixtureTestBase { - @Autowired - @Qualifier("searchLineageSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("searchLineageLineageSearchService") - protected LineageSearchService lineageService; - - @NotNull - @Override - protected LineageSearchService getLineageService() { - return lineageService; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - AssertJUnit.assertNotNull(lineageService); - } + @Autowired + @Qualifier("searchLineageSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("searchLineageLineageSearchService") + protected LineageSearchService lineageService; + + @NotNull + @Override + protected LineageSearchService getLineageService() { + return lineageService; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + AssertJUnit.assertNotNull(lineageService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java index 613ec5a26ff66..8c4195f9ff534 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.search.elasticsearch; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.search.LineageServiceTestBase; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import com.linkedin.metadata.search.LineageServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; @@ -14,20 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageServiceElasticSearchTest extends LineageServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java index 855f46d239118..eea352a866042 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java @@ -1,11 +1,12 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.SampleDataFixtureTestBase; import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; - import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import lombok.Getter; import org.opensearch.client.RestHighLevelClient; @@ -14,32 +15,30 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - -/** - * Runs sample data fixture tests for Elasticsearch test container - */ +/** Runs sample data fixture tests for Elasticsearch test container */ @Getter -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SampleDataFixtureElasticSearchTest extends SampleDataFixtureTestBase { - @Autowired - private RestHighLevelClient searchClient; + @Autowired private RestHighLevelClient searchClient; - @Autowired - @Qualifier("sampleDataSearchService") - protected SearchService searchService; + @Autowired + @Qualifier("sampleDataSearchService") + protected SearchService searchService; - @Autowired - @Qualifier("sampleDataEntityClient") - protected EntityClient entityClient; + @Autowired + @Qualifier("sampleDataEntityClient") + protected EntityClient entityClient; - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; - @Test - public void initTest() { - assertNotNull(searchClient); - } + @Test + public void initTest() { + assertNotNull(searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java index 1a6a20cd9df9d..e5af1978be5d2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java @@ -1,29 +1,29 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.search.query.SearchDAOTestBase; -import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; - +import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import lombok.Getter; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Import; - import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Getter -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchDAOElasticSearchTest extends SearchDAOTestBase { - @Autowired - private RestHighLevelClient searchClient; - @Autowired - private SearchConfiguration searchConfiguration; + @Autowired private RestHighLevelClient searchClient; + @Autowired private SearchConfiguration searchConfiguration; + @Autowired @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java index a9e9feac28007..7133971847f98 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.search.elasticsearch; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.search.SearchServiceTestBase; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import com.linkedin.metadata.search.SearchServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchServiceElasticSearchTest extends SearchServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java index 7365887fb9b2e..a23cd5b051ecb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.systemmetadata.SystemMetadataServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.systemmetadata.SystemMetadataServiceTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -11,16 +11,12 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class SystemMetadataServiceElasticSearchTest extends SystemMetadataServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java index bec610b20dca1..843da17fbd132 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class TestEntityElasticSearchTest extends TestEntityTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java index 5b85904edc923..6ebe42d0181e4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.timeseries.search.TimeseriesAspectServiceTestBase; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.timeseries.search.TimeseriesAspectServiceTestBase; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -14,12 +14,9 @@ @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class TimeseriesAspectServiceElasticSearchTest extends TimeseriesAspectServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index ed81f3cebd027..fba9d5359d29f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.fixtures; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; +import static org.testng.Assert.assertTrue; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -8,151 +13,165 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertTrue; -import static org.testng.AssertJUnit.assertNotNull; +public abstract class GoldenTestBase extends AbstractTestNGSpringContextTests { + + private static final List<String> SEARCHABLE_LONGTAIL_ENTITIES = + Stream.of( + EntityType.CHART, + EntityType.CONTAINER, + EntityType.DASHBOARD, + EntityType.DATASET, + EntityType.DOMAIN, + EntityType.TAG) + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); -abstract public class GoldenTestBase extends AbstractTestNGSpringContextTests { - - private static final List<String> SEARCHABLE_LONGTAIL_ENTITIES = Stream.of(EntityType.CHART, EntityType.CONTAINER, - EntityType.DASHBOARD, EntityType.DATASET, EntityType.DOMAIN, EntityType.TAG - ).map(EntityTypeMapper::getName) - .collect(Collectors.toList()); - - @Nonnull - abstract protected EntityRegistry getEntityRegistry(); - - @Nonnull - abstract protected SearchService getSearchService(); - - @Test - public void testNameMatchPetProfiles() { - /* - Searching for "pet profiles" should return "pet_profiles" as the first 2 search results - */ - assertNotNull(getSearchService()); - assertNotNull(getEntityRegistry()); - SearchResult searchResult = searchAcrossCustomEntities(getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("pet_profiles")); - assertTrue(secondResultUrn.toString().contains("pet_profiles")); - } - - @Test - public void testNameMatchPetProfile() { - /* - Searching for "pet profile" should return "pet_profiles" as the first 2 search results - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("pet_profiles")); - assertTrue(secondResultUrn.toString().contains("pet_profiles")); - } - - @Test - public void testGlossaryTerms() { - /* - Searching for "ReturnRate" should return all tables that have the glossary term applied before - anything else - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); - SearchEntityArray entities = searchResult.getEntities(); - assertTrue(searchResult.getEntities().size() >= 4); - MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); - MatchedFieldArray secondResultMatchedFields = entities.get(1).getMatchedFields(); - MatchedFieldArray thirdResultMatchedFields = entities.get(2).getMatchedFields(); - MatchedFieldArray fourthResultMatchedFields = entities.get(3).getMatchedFields(); - - assertTrue(firstResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(secondResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(thirdResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(fourthResultMatchedFields.toString().contains("ReturnRate")); - } - - @Test - public void testNameMatchPartiallyQualified() { - /* - Searching for "analytics.pet_details" (partially qualified) should return the fully qualified table - name as the first search results before any others - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("snowflake,long_tail_companions.analytics.pet_details")); - assertTrue(secondResultUrn.toString().contains("dbt,long_tail_companions.analytics.pet_details")); - } - - @Test - public void testNameMatchCollaborativeActionitems() { - /* - Searching for "collaborative actionitems" should return "collaborative_actionitems" as the first search - result, followed by "collaborative_actionitems_old" - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - // Checks that the table name is not suffixed with anything - assertTrue(firstResultUrn.toString().contains("collaborative_actionitems,")); - assertTrue(secondResultUrn.toString().contains("collaborative_actionitems_old")); - - Double firstResultScore = searchResult.getEntities().get(0).getScore(); - Double secondResultScore = searchResult.getEntities().get(1).getScore(); - - // Checks that the scores aren't tied so that we are matching on table name more than column name - assertTrue(firstResultScore > secondResultScore); - } - - @Test - public void testNameMatchCustomerOrders() { - /* - Searching for "customer orders" should return "customer_orders" as the first search - result, not suffixed by anything - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - - // Checks that the table name is not suffixed with anything - assertTrue(firstResultUrn.toString().contains("customer_orders,"), - "Expected firstResultUrn to contain `customer_orders,` but results are " - + searchResult.getEntities().stream() - .map(e -> String.format("(Score: %s Urn: %s)", e.getScore(), e.getEntity().getId())) - .collect(Collectors.joining(", "))); - - Double firstResultScore = searchResult.getEntities().get(0).getScore(); - Double secondResultScore = searchResult.getEntities().get(1).getScore(); - - // Checks that the scores aren't tied so that we are matching on table name more than column name - assertTrue(firstResultScore > secondResultScore); - } + @Nonnull + protected abstract SearchService getSearchService(); + @Test + public void testNameMatchPetProfiles() { + /* + Searching for "pet profiles" should return "pet_profiles" as the first 2 search results + */ + assertNotNull(getSearchService()); + assertNotNull(getEntityRegistry()); + SearchResult searchResult = + searchAcrossCustomEntities( + getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue(firstResultUrn.toString().contains("pet_profiles")); + assertTrue(secondResultUrn.toString().contains("pet_profiles")); + } + + @Test + public void testNameMatchPetProfile() { + /* + Searching for "pet profile" should return "pet_profiles" as the first 2 search results + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue(firstResultUrn.toString().contains("pet_profiles")); + assertTrue(secondResultUrn.toString().contains("pet_profiles")); + } + + @Test + public void testGlossaryTerms() { + /* + Searching for "ReturnRate" should return all tables that have the glossary term applied before + anything else + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); + SearchEntityArray entities = searchResult.getEntities(); + assertTrue(searchResult.getEntities().size() >= 4); + MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); + MatchedFieldArray secondResultMatchedFields = entities.get(1).getMatchedFields(); + MatchedFieldArray thirdResultMatchedFields = entities.get(2).getMatchedFields(); + MatchedFieldArray fourthResultMatchedFields = entities.get(3).getMatchedFields(); + + assertTrue(firstResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(secondResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(thirdResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(fourthResultMatchedFields.toString().contains("ReturnRate")); + } + + @Test + public void testNameMatchPartiallyQualified() { + /* + Searching for "analytics.pet_details" (partially qualified) should return the fully qualified table + name as the first search results before any others + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue( + firstResultUrn.toString().contains("snowflake,long_tail_companions.analytics.pet_details")); + assertTrue( + secondResultUrn.toString().contains("dbt,long_tail_companions.analytics.pet_details")); + } + + @Test + public void testNameMatchCollaborativeActionitems() { + /* + Searching for "collaborative actionitems" should return "collaborative_actionitems" as the first search + result, followed by "collaborative_actionitems_old" + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + // Checks that the table name is not suffixed with anything + assertTrue(firstResultUrn.toString().contains("collaborative_actionitems,")); + assertTrue(secondResultUrn.toString().contains("collaborative_actionitems_old")); + + Double firstResultScore = searchResult.getEntities().get(0).getScore(); + Double secondResultScore = searchResult.getEntities().get(1).getScore(); + + // Checks that the scores aren't tied so that we are matching on table name more than column + // name + assertTrue(firstResultScore > secondResultScore); + } + + @Test + public void testNameMatchCustomerOrders() { /* - Tests that should pass but do not yet can be added below here, with the following annotation: - @Test(enabled = false) - */ + Searching for "customer orders" should return "customer_orders" as the first search + result, not suffixed by anything + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + + // Checks that the table name is not suffixed with anything + assertTrue( + firstResultUrn.toString().contains("customer_orders,"), + "Expected firstResultUrn to contain `customer_orders,` but results are " + + searchResult.getEntities().stream() + .map(e -> String.format("(Score: %s Urn: %s)", e.getScore(), e.getEntity().getId())) + .collect(Collectors.joining(", "))); + + Double firstResultScore = searchResult.getEntities().get(0).getScore(); + Double secondResultScore = searchResult.getEntities().get(1).getScore(); + + // Checks that the scores aren't tied so that we are matching on table name more than column + // name + assertTrue(firstResultScore > secondResultScore); + } + + /* + Tests that should pass but do not yet can be added below here, with the following annotation: + @Test(enabled = false) + */ } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java index eaf8feedeb6ed..44fe5ea8ac9ae 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java @@ -1,61 +1,64 @@ package com.linkedin.metadata.search.fixtures; +import static io.datahubproject.test.search.SearchTestUtils.lineage; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; +import java.net.URISyntaxException; +import javax.annotation.Nonnull; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.net.URISyntaxException; - -import static io.datahubproject.test.search.SearchTestUtils.lineage; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - -abstract public class LineageDataFixtureTestBase extends AbstractTestNGSpringContextTests { - - @Nonnull - abstract protected LineageSearchService getLineageService(); +public abstract class LineageDataFixtureTestBase extends AbstractTestNGSpringContextTests { - @Nonnull - abstract protected SearchService getSearchService(); + @Nonnull + protected abstract LineageSearchService getLineageService(); + @Nonnull + protected abstract SearchService getSearchService(); - @Test - public void testFixtureInitialization() { - assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); - assertEquals(noResult.getEntities().size(), 0); + @Test + public void testFixtureInitialization() { + assertNotNull(getSearchService()); + SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + assertEquals(noResult.getEntities().size(), 0); - SearchResult result = searchAcrossEntities(getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); - assertEquals(result.getEntities().size(), 1); + SearchResult result = + searchAcrossEntities( + getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); + assertEquals(result.getEntities().size(), 1); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - LineageSearchResult lineageResult = lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); - assertEquals(lineageResult.getEntities().size(), 10); - } + LineageSearchResult lineageResult = + lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); + assertEquals(lineageResult.getEntities().size(), 10); + } - @Test - public void testDatasetLineage() throws URISyntaxException { - Urn testUrn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); + @Test + public void testDatasetLineage() throws URISyntaxException { + Urn testUrn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - // 1 hops - LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); - assertEquals(lineageResult.getEntities().size(), 10); + // 1 hops + LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); + assertEquals(lineageResult.getEntities().size(), 10); - // 2 hops - lineageResult = lineage(getLineageService(), testUrn, 2); - assertEquals(lineageResult.getEntities().size(), 5); + // 2 hops + lineageResult = lineage(getLineageService(), testUrn, 2); + assertEquals(lineageResult.getEntities().size(), 5); - // 3 hops - lineageResult = lineage(getLineageService(), testUrn, 3); - assertEquals(lineageResult.getEntities().size(), 12); - } + // 3 hops + lineageResult = lineage(getLineageService(), testUrn, 3); + assertEquals(lineageResult.getEntities().size(), 12); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index 69dd5c80bef1d..a1af2325ee0ed 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -1,5 +1,16 @@ package com.linkedin.metadata.search.fixtures; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder.STRUCTURED_QUERY_PREFIX; +import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static io.datahubproject.test.search.SearchTestUtils.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertSame; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -32,6 +43,16 @@ import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.r2.RemoteInvocationException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import javax.annotation.Nonnull; import org.junit.Assert; import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; @@ -45,1470 +66,1882 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; - -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder.STRUCTURED_QUERY_PREFIX; -import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; -import static io.datahubproject.test.search.SearchTestUtils.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertSame; -import static org.testng.Assert.assertTrue; - -abstract public class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { - protected static final Authentication AUTHENTICATION = - new Authentication(new Actor(ActorType.USER, "test"), ""); - - @Nonnull - abstract protected EntityRegistry getEntityRegistry(); - - @Nonnull - abstract protected SearchService getSearchService(); - - @Nonnull - abstract protected EntityClient getEntityClient(); - - @Nonnull - abstract protected RestHighLevelClient getSearchClient(); - - @Test - public void testSearchFieldConfig() throws IOException { - /* - For every field in every entity fixture, ensure proper detection of field types and analyzers - */ - Map<EntitySpec, String> fixtureEntities = new HashMap<>(); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dataset"), "smpldat_datasetindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("chart"), "smpldat_chartindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("container"), "smpldat_containerindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("corpgroup"), "smpldat_corpgroupindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("corpuser"), "smpldat_corpuserindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dashboard"), "smpldat_dashboardindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dataflow"), "smpldat_dataflowindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("datajob"), "smpldat_datajobindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("domain"), "smpldat_domainindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("glossarynode"), "smpldat_glossarynodeindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("glossaryterm"), "smpldat_glossarytermindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlfeature"), "smpldat_mlfeatureindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlfeaturetable"), "smpldat_mlfeaturetableindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodelgroup"), "smpldat_mlmodelgroupindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodel"), "smpldat_mlmodelindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlprimarykey"), "smpldat_mlprimarykeyindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("tag"), "smpldat_tagindex_v2"); - - for (Map.Entry<EntitySpec, String> entry : fixtureEntities.entrySet()) { - EntitySpec entitySpec = entry.getKey(); - GetMappingsRequest req = new GetMappingsRequest().indices(entry.getValue()); - - GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); - Map<String, Map<String, Object>> mappings = (Map<String, Map<String, Object>>) resp.mappings() - .get(entry.getValue()).sourceAsMap().get("properties"); - - // For every fieldSpec determine whether the SearchFieldConfig is accurate - for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) { - SearchFieldConfig test = SearchFieldConfig.detectSubFieldType(fieldSpec); - - if (!test.fieldName().contains(".")) { - Map<String, Object> actual = mappings.get(test.fieldName()); - - final String expectedAnalyzer; - if (actual.get("search_analyzer") != null) { - expectedAnalyzer = (String) actual.get("search_analyzer"); - } else if (actual.get("analyzer") != null) { - expectedAnalyzer = (String) actual.get("analyzer"); - } else { - expectedAnalyzer = "keyword"; - } - - assertEquals(test.analyzer(), expectedAnalyzer, - String.format("Expected search analyzer to match for entity: `%s`field: `%s`", - entitySpec.getName(), test.fieldName())); - - if (test.hasDelimitedSubfield()) { - assertTrue(((Map<String, Map<String, String>>) actual.get("fields")).containsKey("delimited"), - String.format("Expected entity: `%s` field to have .delimited subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } else { - boolean nosubfield = !actual.containsKey("fields") - || !((Map<String, Map<String, String>>) actual.get("fields")).containsKey("delimited"); - assertTrue(nosubfield, String.format("Expected entity: `%s` field to NOT have .delimited subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } - if (test.hasKeywordSubfield()) { - assertTrue(((Map<String, Map<String, String>>) actual.get("fields")).containsKey("keyword"), - String.format("Expected entity: `%s` field to have .keyword subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } else { - boolean nosubfield = !actual.containsKey("fields") - || !((Map<String, Map<String, String>>) actual.get("fields")).containsKey("keyword"); - assertTrue(nosubfield, String.format("Expected entity: `%s` field to NOT have .keyword subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } - } else { - // this is a subfield therefore cannot have a subfield - assertFalse(test.hasKeywordSubfield()); - assertFalse(test.hasDelimitedSubfield()); - assertFalse(test.hasWordGramSubfields()); - - String[] fieldAndSubfield = test.fieldName().split("[.]", 2); - - Map<String, Object> actualParent = mappings.get(fieldAndSubfield[0]); - Map<String, Object> actualSubfield = ((Map<String, Map<String, Object>>) actualParent.get("fields")).get(fieldAndSubfield[0]); - - String expectedAnalyzer = actualSubfield.get("search_analyzer") != null ? (String) actualSubfield.get("search_analyzer") - : "keyword"; - - assertEquals(test.analyzer(), expectedAnalyzer, - String.format("Expected search analyzer to match for field `%s`", test.fieldName())); - } - } +public abstract class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { + protected static final Authentication AUTHENTICATION = + new Authentication(new Actor(ActorType.USER, "test"), ""); + + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); + + @Nonnull + protected abstract SearchService getSearchService(); + + @Nonnull + protected abstract EntityClient getEntityClient(); + + @Nonnull + protected abstract RestHighLevelClient getSearchClient(); + + @Test + public void testSearchFieldConfig() throws IOException { + /* + For every field in every entity fixture, ensure proper detection of field types and analyzers + */ + Map<EntitySpec, String> fixtureEntities = new HashMap<>(); + fixtureEntities.put(getEntityRegistry().getEntitySpec("dataset"), "smpldat_datasetindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("chart"), "smpldat_chartindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("container"), "smpldat_containerindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("corpgroup"), "smpldat_corpgroupindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("corpuser"), "smpldat_corpuserindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("dashboard"), "smpldat_dashboardindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("dataflow"), "smpldat_dataflowindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("datajob"), "smpldat_datajobindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("domain"), "smpldat_domainindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("glossarynode"), "smpldat_glossarynodeindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("glossaryterm"), "smpldat_glossarytermindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlfeature"), "smpldat_mlfeatureindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlfeaturetable"), "smpldat_mlfeaturetableindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlmodelgroup"), "smpldat_mlmodelgroupindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodel"), "smpldat_mlmodelindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlprimarykey"), "smpldat_mlprimarykeyindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("tag"), "smpldat_tagindex_v2"); + + for (Map.Entry<EntitySpec, String> entry : fixtureEntities.entrySet()) { + EntitySpec entitySpec = entry.getKey(); + GetMappingsRequest req = new GetMappingsRequest().indices(entry.getValue()); + + GetMappingsResponse resp = + getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); + Map<String, Map<String, Object>> mappings = + (Map<String, Map<String, Object>>) + resp.mappings().get(entry.getValue()).sourceAsMap().get("properties"); + + // For every fieldSpec determine whether the SearchFieldConfig is accurate + for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) { + SearchFieldConfig test = SearchFieldConfig.detectSubFieldType(fieldSpec); + + if (!test.fieldName().contains(".")) { + Map<String, Object> actual = mappings.get(test.fieldName()); + + final String expectedAnalyzer; + if (actual.get("search_analyzer") != null) { + expectedAnalyzer = (String) actual.get("search_analyzer"); + } else if (actual.get("analyzer") != null) { + expectedAnalyzer = (String) actual.get("analyzer"); + } else { + expectedAnalyzer = "keyword"; + } + + assertEquals( + test.analyzer(), + expectedAnalyzer, + String.format( + "Expected search analyzer to match for entity: `%s`field: `%s`", + entitySpec.getName(), test.fieldName())); + + if (test.hasDelimitedSubfield()) { + assertTrue( + ((Map<String, Map<String, String>>) actual.get("fields")).containsKey("delimited"), + String.format( + "Expected entity: `%s` field to have .delimited subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } else { + boolean nosubfield = + !actual.containsKey("fields") + || !((Map<String, Map<String, String>>) actual.get("fields")) + .containsKey("delimited"); + assertTrue( + nosubfield, + String.format( + "Expected entity: `%s` field to NOT have .delimited subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } + if (test.hasKeywordSubfield()) { + assertTrue( + ((Map<String, Map<String, String>>) actual.get("fields")).containsKey("keyword"), + String.format( + "Expected entity: `%s` field to have .keyword subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } else { + boolean nosubfield = + !actual.containsKey("fields") + || !((Map<String, Map<String, String>>) actual.get("fields")) + .containsKey("keyword"); + assertTrue( + nosubfield, + String.format( + "Expected entity: `%s` field to NOT have .keyword subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } + } else { + // this is a subfield therefore cannot have a subfield + assertFalse(test.hasKeywordSubfield()); + assertFalse(test.hasDelimitedSubfield()); + assertFalse(test.hasWordGramSubfields()); + + String[] fieldAndSubfield = test.fieldName().split("[.]", 2); + + Map<String, Object> actualParent = mappings.get(fieldAndSubfield[0]); + Map<String, Object> actualSubfield = + ((Map<String, Map<String, Object>>) actualParent.get("fields")) + .get(fieldAndSubfield[0]); + + String expectedAnalyzer = + actualSubfield.get("search_analyzer") != null + ? (String) actualSubfield.get("search_analyzer") + : "keyword"; + + assertEquals( + test.analyzer(), + expectedAnalyzer, + String.format("Expected search analyzer to match for field `%s`", test.fieldName())); } + } } - - @Test - public void testGetSortOrder() { - String dateFieldName = "lastOperationTime"; - List<String> entityNamesToTestSearch = List.of("dataset", "chart", "corpgroup"); - List<EntitySpec> entitySpecs = entityNamesToTestSearch.stream().map( - name -> getEntityRegistry().getEntitySpec(name)) + } + + @Test + public void testGetSortOrder() { + String dateFieldName = "lastOperationTime"; + List<String> entityNamesToTestSearch = List.of("dataset", "chart", "corpgroup"); + List<EntitySpec> entitySpecs = + entityNamesToTestSearch.stream() + .map(name -> getEntityRegistry().getEntitySpec(name)) .collect(Collectors.toList()); - SearchSourceBuilder builder = new SearchSourceBuilder(); - SortCriterion sortCriterion = new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); - ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); - List<SortBuilder<?>> sorts = builder.sorts(); - assertEquals(sorts.size(), 2); // sort by last modified and then by urn - for (SortBuilder sort : sorts) { - assertTrue(sort instanceof FieldSortBuilder); - FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; - if (fieldSortBuilder.getFieldName().equals(dateFieldName)) { - assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.DESC); - assertEquals(fieldSortBuilder.unmappedType(), "date"); - } else { - assertEquals(fieldSortBuilder.getFieldName(), "urn"); - } - } - - // Test alias field - String entityNameField = "_entityName"; - SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); - SortCriterion nameCriterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); - ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); - sorts = nameBuilder.sorts(); - assertEquals(sorts.size(), 2); - for (SortBuilder sort : sorts) { - assertTrue(sort instanceof FieldSortBuilder); - FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; - if (fieldSortBuilder.getFieldName().equals(entityNameField)) { - assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.ASC); - assertEquals(fieldSortBuilder.unmappedType(), "keyword"); - } else { - assertEquals(fieldSortBuilder.getFieldName(), "urn"); - } - } + SearchSourceBuilder builder = new SearchSourceBuilder(); + SortCriterion sortCriterion = + new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); + ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); + List<SortBuilder<?>> sorts = builder.sorts(); + assertEquals(sorts.size(), 2); // sort by last modified and then by urn + for (SortBuilder sort : sorts) { + assertTrue(sort instanceof FieldSortBuilder); + FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; + if (fieldSortBuilder.getFieldName().equals(dateFieldName)) { + assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.DESC); + assertEquals(fieldSortBuilder.unmappedType(), "date"); + } else { + assertEquals(fieldSortBuilder.getFieldName(), "urn"); + } } - @Test - public void testDatasetHasTags() throws IOException { - GetMappingsRequest req = new GetMappingsRequest() - .indices("smpldat_datasetindex_v2"); - GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); - Map<String, Map<String, String>> mappings = (Map<String, Map<String, String>>) resp.mappings() - .get("smpldat_datasetindex_v2").sourceAsMap().get("properties"); - assertTrue(mappings.containsKey("hasTags")); - assertEquals(mappings.get("hasTags"), Map.of("type", "boolean")); + // Test alias field + String entityNameField = "_entityName"; + SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); + SortCriterion nameCriterion = + new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); + ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); + sorts = nameBuilder.sorts(); + assertEquals(sorts.size(), 2); + for (SortBuilder sort : sorts) { + assertTrue(sort instanceof FieldSortBuilder); + FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; + if (fieldSortBuilder.getFieldName().equals(entityNameField)) { + assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.ASC); + assertEquals(fieldSortBuilder.unmappedType(), "keyword"); + } else { + assertEquals(fieldSortBuilder.getFieldName(), "urn"); + } } - - @Test - public void testFixtureInitialization() { - assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); - assertEquals(0, noResult.getEntities().size()); - - final SearchResult result = searchAcrossEntities(getSearchService(), "test"); - - Map<String, Integer> expectedTypes = Map.of( - "dataset", 13, - "chart", 0, - "container", 1, - "dashboard", 0, - "tag", 0, - "mlmodel", 0 - ); - - Map<String, List<Urn>> actualTypes = new HashMap<>(); - for (String key : expectedTypes.keySet()) { - actualTypes.put(key, result.getEntities().stream() - .map(SearchEntity::getEntity).filter(entity -> key.equals(entity.getEntityType())).collect(Collectors.toList())); - } - - expectedTypes.forEach((key, value) -> - assertEquals(actualTypes.get(key).size(), value.intValue(), - String.format("Expected entity `%s` matches for %s. Found %s", value, key, - result.getEntities().stream() - .filter(e -> e.getEntity().getEntityType().equals(key)) - .map(e -> e.getEntity().getEntityKey()) - .collect(Collectors.toList())))); + } + + @Test + public void testDatasetHasTags() throws IOException { + GetMappingsRequest req = new GetMappingsRequest().indices("smpldat_datasetindex_v2"); + GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); + Map<String, Map<String, String>> mappings = + (Map<String, Map<String, String>>) + resp.mappings().get("smpldat_datasetindex_v2").sourceAsMap().get("properties"); + assertTrue(mappings.containsKey("hasTags")); + assertEquals(mappings.get("hasTags"), Map.of("type", "boolean")); + } + + @Test + public void testFixtureInitialization() { + assertNotNull(getSearchService()); + SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + assertEquals(0, noResult.getEntities().size()); + + final SearchResult result = searchAcrossEntities(getSearchService(), "test"); + + Map<String, Integer> expectedTypes = + Map.of( + "dataset", 13, + "chart", 0, + "container", 1, + "dashboard", 0, + "tag", 0, + "mlmodel", 0); + + Map<String, List<Urn>> actualTypes = new HashMap<>(); + for (String key : expectedTypes.keySet()) { + actualTypes.put( + key, + result.getEntities().stream() + .map(SearchEntity::getEntity) + .filter(entity -> key.equals(entity.getEntityType())) + .collect(Collectors.toList())); } - @Test - public void testDataPlatform() { - Map<String, Integer> expected = ImmutableMap.<String, Integer>builder() - .put("urn:li:dataPlatform:BigQuery", 8) - .put("urn:li:dataPlatform:hive", 3) - .put("urn:li:dataPlatform:mysql", 5) - .put("urn:li:dataPlatform:s3", 1) - .put("urn:li:dataPlatform:hdfs", 1) - .put("urn:li:dataPlatform:graph", 1) - .put("urn:li:dataPlatform:dbt", 9) - .put("urn:li:dataplatform:BigQuery", 8) - .put("urn:li:dataplatform:hive", 3) - .put("urn:li:dataplatform:mysql", 5) - .put("urn:li:dataplatform:s3", 1) - .put("urn:li:dataplatform:hdfs", 1) - .put("urn:li:dataplatform:graph", 1) - .put("urn:li:dataplatform:dbt", 9) - .build(); - - expected.forEach((key, value) -> { - SearchResult result = searchAcrossEntities(getSearchService(), key); - assertEquals(result.getEntities().size(), value.intValue(), - String.format("Unexpected data platform `%s` hits.", key)); // max is 100 without pagination + expectedTypes.forEach( + (key, value) -> + assertEquals( + actualTypes.get(key).size(), + value.intValue(), + String.format( + "Expected entity `%s` matches for %s. Found %s", + value, + key, + result.getEntities().stream() + .filter(e -> e.getEntity().getEntityType().equals(key)) + .map(e -> e.getEntity().getEntityKey()) + .collect(Collectors.toList())))); + } + + @Test + public void testDataPlatform() { + Map<String, Integer> expected = + ImmutableMap.<String, Integer>builder() + .put("urn:li:dataPlatform:BigQuery", 8) + .put("urn:li:dataPlatform:hive", 3) + .put("urn:li:dataPlatform:mysql", 5) + .put("urn:li:dataPlatform:s3", 1) + .put("urn:li:dataPlatform:hdfs", 1) + .put("urn:li:dataPlatform:graph", 1) + .put("urn:li:dataPlatform:dbt", 9) + .put("urn:li:dataplatform:BigQuery", 8) + .put("urn:li:dataplatform:hive", 3) + .put("urn:li:dataplatform:mysql", 5) + .put("urn:li:dataplatform:s3", 1) + .put("urn:li:dataplatform:hdfs", 1) + .put("urn:li:dataplatform:graph", 1) + .put("urn:li:dataplatform:dbt", 9) + .build(); + + expected.forEach( + (key, value) -> { + SearchResult result = searchAcrossEntities(getSearchService(), key); + assertEquals( + result.getEntities().size(), + value.intValue(), + String.format( + "Unexpected data platform `%s` hits.", key)); // max is 100 without pagination }); - } - - @Test - public void testUrn() { + } + + @Test + public void testUrn() { + List.of( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.austin311_derived,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:graph,graph-test,PROD)", + "urn:li:chart:(looker,baz1)", + "urn:li:dashboard:(looker,baz)", + "urn:li:mlFeature:(test_feature_table_all_feature_dtypes,test_BOOL_LIST_feature)", + "urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)") + .forEach( + query -> + assertTrue( + searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, + String.format("Unexpected >1 urn result for `%s`", query))); + } + + @Test + public void testExactTable() { + SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); + assertEquals( + results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); + assertEquals( + results.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)"); + } + + @Test + public void testStemming() { + List<Set<String>> testSets = List.of( - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.austin311_derived,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:graph,graph-test,PROD)", - "urn:li:chart:(looker,baz1)", - "urn:li:dashboard:(looker,baz)", - "urn:li:mlFeature:(test_feature_table_all_feature_dtypes,test_BOOL_LIST_feature)", - "urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)" - ).forEach(query -> - assertTrue(searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, - String.format("Unexpected >1 urn result for `%s`", query)) - ); - } - - @Test - public void testExactTable() { - SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); - assertEquals(results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); - assertEquals(results.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)"); - } - - @Test - public void testStemming() { - List<Set<String>> testSets = List.of( - Set.of("log", "logs", "logging"), - Set.of("border", "borders", "bordered", "bordering"), - Set.of("indicates", "indicate", "indicated") - ); - - testSets.forEach(testSet -> { - Integer expectedResults = null; - for (String testQuery : testSet) { - SearchResult results = searchAcrossEntities(getSearchService(), testQuery); - - assertTrue(results.hasEntities() && !results.getEntities().isEmpty(), - String.format("Expected search results for `%s`", testQuery)); - if (expectedResults == null) { - expectedResults = results.getNumEntities(); - } - assertEquals(expectedResults, results.getNumEntities(), - String.format("Expected all result counts to match after stemming. %s", testSet)); + Set.of("log", "logs", "logging"), + Set.of("border", "borders", "bordered", "bordering"), + Set.of("indicates", "indicate", "indicated")); + + testSets.forEach( + testSet -> { + Integer expectedResults = null; + for (String testQuery : testSet) { + SearchResult results = searchAcrossEntities(getSearchService(), testQuery); + + assertTrue( + results.hasEntities() && !results.getEntities().isEmpty(), + String.format("Expected search results for `%s`", testQuery)); + if (expectedResults == null) { + expectedResults = results.getNumEntities(); } - }); - } - - @Test - public void testStemmingOverride() throws IOException { - Set<String> testSet = Set.of("customer", "customers"); - - Set<SearchResult> results = testSet.stream() - .map(test -> searchAcrossEntities(getSearchService(), test)) - .collect(Collectors.toSet()); - - results.forEach(r -> assertTrue(r.hasEntities() && !r.getEntities().isEmpty(), "Expected search results")); - assertEquals(results.stream().map(r -> r.getEntities().size()).distinct().count(), 1, + assertEquals( + expectedResults, + results.getNumEntities(), String.format("Expected all result counts to match after stemming. %s", testSet)); - - // Additional inspect token - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "customers" - ); - - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("customer"), "Expected `customer` and not `custom`"); - } - - @Test - public void testDelimitedSynonym() throws IOException { - List<String> expectedTokens = List.of("cac"); - List<String> analyzers = List.of( - "urn_component", - "word_delimited", - "query_urn_component", - "query_word_delimited" - ); - List<String> testTexts = List.of( - "customer acquisition cost", - "cac", - "urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)" - ); - - for (String analyzer : analyzers) { - for (String text : testTexts) { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - analyzer, text - ); - List<String> tokens = getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedTokens.forEach(expected -> assertTrue(tokens.contains(expected), - String.format("Analyzer: `%s` Text: `%s` - Expected token `%s` in tokens: %s", - analyzer, text, expected, tokens))); - } - } - - // {"urn":"urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)","id":"cac_table",... - List<String> testSet = List.of( - "cac", - "customer acquisition cost" - ); - List<Integer> resultCounts = testSet.stream().map(q -> { - SearchResult result = searchAcrossEntities(getSearchService(), q); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - "Expected search results for: " + q); - return result.getEntities().size(); - }).collect(Collectors.toList()); - } - - @Test - public void testNegateAnalysis() throws IOException { - String queryWithMinus = "logging_events -bckp"; - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("logging_events -bckp", "logging_ev", "-bckp", "log", "event", "bckp")); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_gram_3", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("logging events -bckp")); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_gram_4", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of()); - + } + }); + } + + @Test + public void testStemmingOverride() throws IOException { + Set<String> testSet = Set.of("customer", "customers"); + + Set<SearchResult> results = + testSet.stream() + .map(test -> searchAcrossEntities(getSearchService(), test)) + .collect(Collectors.toSet()); + + results.forEach( + r -> assertTrue(r.hasEntities() && !r.getEntities().isEmpty(), "Expected search results")); + assertEquals( + results.stream().map(r -> r.getEntities().size()).distinct().count(), + 1, + String.format("Expected all result counts to match after stemming. %s", testSet)); + + // Additional inspect token + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", "customers"); + + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("customer"), "Expected `customer` and not `custom`"); + } + + @Test + public void testDelimitedSynonym() throws IOException { + List<String> expectedTokens = List.of("cac"); + List<String> analyzers = + List.of("urn_component", "word_delimited", "query_urn_component", "query_word_delimited"); + List<String> testTexts = + List.of( + "customer acquisition cost", + "cac", + "urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)"); + + for (String analyzer : analyzers) { + for (String text : testTexts) { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, text); + List<String> tokens = + getTokens(request) + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()); + expectedTokens.forEach( + expected -> + assertTrue( + tokens.contains(expected), + String.format( + "Analyzer: `%s` Text: `%s` - Expected token `%s` in tokens: %s", + analyzer, text, expected, tokens))); + } } - @Test - public void testWordGram() throws IOException { - String text = "hello.cat_cool_customer"; - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat", "cat cool", "cool customer")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat cool", "cat cool customer")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat cool customer")); - - String testMoreSeparators = "quick.brown:fox jumped-LAZY_Dog"; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown", "brown fox", "fox jumped", "jumped lazy", "lazy dog")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown fox", "brown fox jumped", "fox jumped lazy", "jumped lazy dog")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown fox jumped", "brown fox jumped lazy", "fox jumped lazy dog")); - - String textWithQuotesAndDuplicateWord = "\"my_db.my_exact_table\""; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db", "db my", "my exact", "exact table")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db my", "db my exact", "my exact table")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db my exact", "db my exact table")); - - String textWithParens = "(hi) there"; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithParens); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hi there")); - - String oneWordText = "hello"; - for (String analyzer : List.of("word_gram_2", "word_gram_3", "word_gram_4")) { - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, oneWordText); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of()); - } + // {"urn":"urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)","id":"cac_table",... + List<String> testSet = List.of("cac", "customer acquisition cost"); + List<Integer> resultCounts = + testSet.stream() + .map( + q -> { + SearchResult result = searchAcrossEntities(getSearchService(), q); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + "Expected search results for: " + q); + return result.getEntities().size(); + }) + .collect(Collectors.toList()); + } + + @Test + public void testNegateAnalysis() throws IOException { + String queryWithMinus = "logging_events -bckp"; + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("logging_events -bckp", "logging_ev", "-bckp", "log", "event", "bckp")); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("logging events -bckp")); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of()); + } + + @Test + public void testWordGram() throws IOException { + String text = "hello.cat_cool_customer"; + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat", "cat cool", "cool customer")); + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat cool", "cat cool customer")); + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat cool customer")); + + String testMoreSeparators = "quick.brown:fox jumped-LAZY_Dog"; + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_2", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown", "brown fox", "fox jumped", "jumped lazy", "lazy dog")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_3", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown fox", "brown fox jumped", "fox jumped lazy", "jumped lazy dog")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_4", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown fox jumped", "brown fox jumped lazy", "fox jumped lazy dog")); + + String textWithQuotesAndDuplicateWord = "\"my_db.my_exact_table\""; + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_2", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db", "db my", "my exact", "exact table")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_3", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db my", "db my exact", "my exact table")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_4", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db my exact", "db my exact table")); + + String textWithParens = "(hi) there"; + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithParens); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hi there")); + + String oneWordText = "hello"; + for (String analyzer : List.of("word_gram_2", "word_gram_3", "word_gram_4")) { + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, oneWordText); + assertEquals( + getTokens(request) + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()), + List.of()); } - - @Test - public void testUrnSynonym() throws IOException { - List<String> expectedTokens = List.of("bigquery"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.bq_audit.cloudaudit_googleapis_com_activity,PROD)" - ); - List<String> indexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedTokens.forEach(expected -> assertTrue(indexTokens.contains(expected), + } + + @Test + public void testUrnSynonym() throws IOException { + List<String> expectedTokens = List.of("bigquery"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.bq_audit.cloudaudit_googleapis_com_activity,PROD)"); + List<String> indexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedTokens.forEach( + expected -> + assertTrue( + indexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, indexTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - "big query" - ); - List<String> queryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(queryTokens, List.of("big query", "big", "query", "bigquery")); - - List<String> testSet = List.of( - "bigquery", - "big query" - ); - List<SearchResult> results = testSet.stream().map(query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), "Expected search results for: " + query); - return result; - }).collect(Collectors.toList()); - - assertEquals(results.stream().map(r -> r.getEntities().size()).distinct().count(), 1, - String.format("Expected all result counts (%s) to match after synonyms. %s", results, testSet)); - Assert.assertArrayEquals(results.get(0).getEntities().stream().map(e -> e.getEntity().toString()).sorted().toArray(String[]::new), - results.get(1).getEntities().stream().map(e -> e.getEntity().toString()).sorted().toArray(String[]::new)); - } - - @Test - public void testTokenization() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "my_table" - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("my_tabl", "tabl"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "my_table" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("my_tabl", "tabl"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationWithNumber() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "harshal-playground-306419.test_schema.austin311_derived" - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "austin311_deriv", "austin311", "deriv"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "harshal-playground-306419.test_schema.austin311_derived" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "austin311_deriv", "austin311", "deriv"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationQuote() throws IOException { - String testQuery = "\"test2\""; - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - testQuery - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationQuoteUnderscore() throws IOException { - String testQuery = "\"raw_orders\""; - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - testQuery - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders", "raw_ord", "raw", "order"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders", "raw_ord", "raw", "order"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders"), String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationDataPlatform() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.excess_deaths_derived,PROD)" - ); - List<String> tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", "bigquery", "big", "query", - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "excess_deaths_deriv", "excess", "death", "deriv", - "prod", "production"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset-ac611929-c3ac-4b92-aafb-f4603ddb408a,PROD)" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", "hive", - "samplehivedataset-ac611929-c3ac-4b92-aafb-f4603ddb408a", - "samplehivedataset", "ac611929", "c3ac", "4b92", "aafb", "f4603ddb408a", - "prod", "production"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:test_rollback,rollback_test_dataset,TEST)" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", - "test_rollback", "test", "rollback", "rollback_test_dataset"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testChartAutoComplete() throws InterruptedException, IOException { - // Two charts exist Baz Chart 1 & Baz Chart 2 - List.of("B", "Ba", "Baz", "Baz ", "Baz C", "Baz Ch", "Baz Cha", "Baz Char", "Baz Chart", "Baz Chart ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); - assertTrue(result.getEntities().size() == 2, - String.format("Expected 2 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testDatasetAutoComplete() { - List.of("excess", "excess_", "excess_d", "excess_de", "excess_death", "excess_deaths", "excess_deaths_d", - "excess_deaths_de", "excess_deaths_der", "excess_deaths_derived") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new DatasetType(getEntityClient()), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected >= 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testContainerAutoComplete() { - List.of("cont", "container", "container-a", "container-auto", "container-autocomp", "container-autocomp-te", - "container-autocomp-test") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new ContainerType(getEntityClient()), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected >= 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testGroupAutoComplete() { - List.of("T", "Te", "Tes", "Test ", "Test G", "Test Gro", "Test Group ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new CorpGroupType(getEntityClient()), query); - assertTrue(result.getEntities().size() == 1, - String.format("Expected 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testUserAutoComplete() { - List.of("D", "Da", "Dat", "Data ", "Data H", "Data Hu", "Data Hub", "Data Hub ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new CorpUserType(getEntityClient(), null), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected at least 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", "big query"); + List<String> queryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(queryTokens, List.of("big query", "big", "query", "bigquery")); + + List<String> testSet = List.of("bigquery", "big query"); + List<SearchResult> results = + testSet.stream() + .map( + query -> { + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + "Expected search results for: " + query); + return result; + }) + .collect(Collectors.toList()); - @Test - public void testSmokeTestQueries() { - Map<String, Integer> expectedFulltextMinimums = Map.of( - "sample", 3, - "covid", 2, - "\"raw_orders\"", 6, - STRUCTURED_QUERY_PREFIX + "sample", 3, - STRUCTURED_QUERY_PREFIX + "\"sample\"", 2, - STRUCTURED_QUERY_PREFIX + "covid", 2, - STRUCTURED_QUERY_PREFIX + "\"raw_orders\"", 1 - ); - - Map<String, SearchResult> results = expectedFulltextMinimums.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedFulltextMinimums.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s fulltext results, expected %s results.", key, actualCount, - expectedCount)); + assertEquals( + results.stream().map(r -> r.getEntities().size()).distinct().count(), + 1, + String.format( + "Expected all result counts (%s) to match after synonyms. %s", results, testSet)); + Assert.assertArrayEquals( + results.get(0).getEntities().stream() + .map(e -> e.getEntity().toString()) + .sorted() + .toArray(String[]::new), + results.get(1).getEntities().stream() + .map(e -> e.getEntity().toString()) + .sorted() + .toArray(String[]::new)); + } + + @Test + public void testTokenization() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", "my_table"); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("my_tabl", "tabl"), String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", "my_table"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("my_tabl", "tabl"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationWithNumber() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "word_delimited", + "harshal-playground-306419.test_schema.austin311_derived"); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "austin311_deriv", + "austin311", + "deriv"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "harshal-playground-306419.test_schema.austin311_derived"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "austin311_deriv", + "austin311", + "deriv"), + String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationQuote() throws IOException { + String testQuery = "\"test2\""; + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", testQuery); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationQuoteUnderscore() throws IOException { + String testQuery = "\"raw_orders\""; + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", testQuery); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of("raw_orders", "raw_ord", "raw", "order"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of("raw_orders", "raw_ord", "raw", "order"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("raw_orders"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationDataPlatform() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.excess_deaths_derived,PROD)"); + List<String> tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "bigquery", + "big", + "query", + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "excess_deaths_deriv", + "excess", + "death", + "deriv", + "prod", + "production"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset-ac611929-c3ac-4b92-aafb-f4603ddb408a,PROD)"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "hive", + "samplehivedataset-ac611929-c3ac-4b92-aafb-f4603ddb408a", + "samplehivedataset", + "ac611929", + "c3ac", + "4b92", + "aafb", + "f4603ddb408a", + "prod", + "production"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:test_rollback,rollback_test_dataset,TEST)"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "test_rollback", + "test", + "rollback", + "rollback_test_dataset"), + String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testChartAutoComplete() throws InterruptedException, IOException { + // Two charts exist Baz Chart 1 & Baz Chart 2 + List.of( + "B", + "Ba", + "Baz", + "Baz ", + "Baz C", + "Baz Ch", + "Baz Cha", + "Baz Char", + "Baz Chart", + "Baz Chart ") + .forEach( + query -> { + try { + AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); + assertTrue( + result.getEntities().size() == 2, + String.format( + "Expected 2 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testDatasetAutoComplete() { + List.of( + "excess", + "excess_", + "excess_d", + "excess_de", + "excess_death", + "excess_deaths", + "excess_deaths_d", + "excess_deaths_de", + "excess_deaths_der", + "excess_deaths_derived") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new DatasetType(getEntityClient()), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected >= 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testContainerAutoComplete() { + List.of( + "cont", + "container", + "container-a", + "container-auto", + "container-autocomp", + "container-autocomp-te", + "container-autocomp-test") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new ContainerType(getEntityClient()), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected >= 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testGroupAutoComplete() { + List.of("T", "Te", "Tes", "Test ", "Test G", "Test Gro", "Test Group ") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new CorpGroupType(getEntityClient()), query); + assertTrue( + result.getEntities().size() == 1, + String.format( + "Expected 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testUserAutoComplete() { + List.of("D", "Da", "Dat", "Data ", "Data H", "Data Hu", "Data Hub", "Data Hub ") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new CorpUserType(getEntityClient(), null), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected at least 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testSmokeTestQueries() { + Map<String, Integer> expectedFulltextMinimums = + Map.of( + "sample", + 3, + "covid", + 2, + "\"raw_orders\"", + 6, + STRUCTURED_QUERY_PREFIX + "sample", + 3, + STRUCTURED_QUERY_PREFIX + "\"sample\"", + 2, + STRUCTURED_QUERY_PREFIX + "covid", + 2, + STRUCTURED_QUERY_PREFIX + "\"raw_orders\"", + 1); + + Map<String, SearchResult> results = + expectedFulltextMinimums.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedFulltextMinimums.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s fulltext results, expected %s results.", + key, actualCount, expectedCount)); }); - Map<String, Integer> expectedStructuredMinimums = Map.of( - "sample", 3, - "covid", 2, - "\"raw_orders\"", 1 - ); - - results = expectedStructuredMinimums.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchStructured(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedStructuredMinimums.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s structured results, expected %s results.", key, actualCount, - expectedCount)); + Map<String, Integer> expectedStructuredMinimums = + Map.of( + "sample", 3, + "covid", 2, + "\"raw_orders\"", 1); + + results = + expectedStructuredMinimums.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchStructured(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedStructuredMinimums.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s structured results, expected %s results.", + key, actualCount, expectedCount)); }); - } - - @Test - public void testMinNumberLengthLimit() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "data2022.data22" - ); - List<String> expected = List.of("data2022", "data22"); - List<String> actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Expected: %s Actual: %s", expected, actual)); - } - - @Test - public void testUnderscore() throws IOException { - String testQuery = "bad_fraud_id"; - List<String> expected = List.of("bad_fraud_id", "bad", "fraud"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - testQuery - ); - - List<String> actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Analayzer: query_word_delimited Expected: %s Actual: %s", expected, actual)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - testQuery - ); - actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Analyzer: word_delimited Expected: %s Actual: %s", expected, actual)); - - } - - @Test - public void testFacets() { - Set<String> expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); - expectedFacets.forEach(facet -> { - assertTrue(testResult.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + } + + @Test + public void testMinNumberLengthLimit() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_delimited", "data2022.data22"); + List<String> expected = List.of("data2022", "data22"); + List<String> actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(actual, expected, String.format("Expected: %s Actual: %s", expected, actual)); + } + + @Test + public void testUnderscore() throws IOException { + String testQuery = "bad_fraud_id"; + List<String> expected = List.of("bad_fraud_id", "bad", "fraud"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", testQuery); + + List<String> actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + actual, + expected, + String.format("Analayzer: query_word_delimited Expected: %s Actual: %s", expected, actual)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", testQuery); + actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + actual, + expected, + String.format("Analyzer: word_delimited Expected: %s Actual: %s", expected, actual)); + } + + @Test + public void testFacets() { + Set<String> expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); + SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityAggMeta = testResult.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); - expectedEntityTypeCounts.put("container", 0L); - expectedEntityTypeCounts.put("corpuser", 0L); - expectedEntityTypeCounts.put("corpgroup", 0L); - expectedEntityTypeCounts.put("mlmodel", 0L); - expectedEntityTypeCounts.put("mlfeaturetable", 1L); - expectedEntityTypeCounts.put("mlmodelgroup", 1L); - expectedEntityTypeCounts.put("dataflow", 1L); - expectedEntityTypeCounts.put("glossarynode", 1L); - expectedEntityTypeCounts.put("mlfeature", 0L); - expectedEntityTypeCounts.put("datajob", 2L); - expectedEntityTypeCounts.put("domain", 0L); - expectedEntityTypeCounts.put("tag", 0L); - expectedEntityTypeCounts.put("glossaryterm", 2L); - expectedEntityTypeCounts.put("mlprimarykey", 1L); - expectedEntityTypeCounts.put("dataset", 9L); - expectedEntityTypeCounts.put("chart", 0L); - expectedEntityTypeCounts.put("dashboard", 0L); - assertEquals(entityAggMeta.getAggregations(), expectedEntityTypeCounts); - } - - @Test - public void testNestedAggregation() { - Set<String> expectedFacets = Set.of("platform"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult.getMetadata().getAggregations().size(), 1); - expectedFacets.forEach(facet -> { - assertTrue(testResult.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityAggMeta = + testResult.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); + expectedEntityTypeCounts.put("container", 0L); + expectedEntityTypeCounts.put("corpuser", 0L); + expectedEntityTypeCounts.put("corpgroup", 0L); + expectedEntityTypeCounts.put("mlmodel", 0L); + expectedEntityTypeCounts.put("mlfeaturetable", 1L); + expectedEntityTypeCounts.put("mlmodelgroup", 1L); + expectedEntityTypeCounts.put("dataflow", 1L); + expectedEntityTypeCounts.put("glossarynode", 1L); + expectedEntityTypeCounts.put("mlfeature", 0L); + expectedEntityTypeCounts.put("datajob", 2L); + expectedEntityTypeCounts.put("domain", 0L); + expectedEntityTypeCounts.put("tag", 0L); + expectedEntityTypeCounts.put("glossaryterm", 2L); + expectedEntityTypeCounts.put("mlprimarykey", 1L); + expectedEntityTypeCounts.put("dataset", 9L); + expectedEntityTypeCounts.put("chart", 0L); + expectedEntityTypeCounts.put("dashboard", 0L); + assertEquals(entityAggMeta.getAggregations(), expectedEntityTypeCounts); + } + + @Test + public void testNestedAggregation() { + Set<String> expectedFacets = Set.of("platform"); + SearchResult testResult = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult.getMetadata().getAggregations().size(), 1); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); - SearchResult testResult2 = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult2.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResult2.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult2.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); + SearchResult testResult2 = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult2.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult2.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult2.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityTypeAggMeta = testResult2.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("_entityType")).findFirst().get(); - AggregationMetadata entityAggMeta = testResult2.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - assertEquals(entityTypeAggMeta.getAggregations(), entityAggMeta.getAggregations()); - Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); - expectedEntityTypeCounts.put("container", 0L); - expectedEntityTypeCounts.put("corpuser", 0L); - expectedEntityTypeCounts.put("corpgroup", 0L); - expectedEntityTypeCounts.put("mlmodel", 0L); - expectedEntityTypeCounts.put("mlfeaturetable", 1L); - expectedEntityTypeCounts.put("mlmodelgroup", 1L); - expectedEntityTypeCounts.put("dataflow", 1L); - expectedEntityTypeCounts.put("glossarynode", 1L); - expectedEntityTypeCounts.put("mlfeature", 0L); - expectedEntityTypeCounts.put("datajob", 2L); - expectedEntityTypeCounts.put("domain", 0L); - expectedEntityTypeCounts.put("tag", 0L); - expectedEntityTypeCounts.put("glossaryterm", 2L); - expectedEntityTypeCounts.put("mlprimarykey", 1L); - expectedEntityTypeCounts.put("dataset", 9L); - expectedEntityTypeCounts.put("chart", 0L); - expectedEntityTypeCounts.put("dashboard", 0L); - assertEquals(entityTypeAggMeta.getAggregations(), expectedEntityTypeCounts); - - expectedFacets = Set.of("platform", "typeNames", "entity"); - SearchResult testResult3 = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult3.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResult3.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult3.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityTypeAggMeta = + testResult2.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("_entityType")) + .findFirst() + .get(); + AggregationMetadata entityAggMeta = + testResult2.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + assertEquals(entityTypeAggMeta.getAggregations(), entityAggMeta.getAggregations()); + Map<String, Long> expectedEntityTypeCounts = new HashMap<>(); + expectedEntityTypeCounts.put("container", 0L); + expectedEntityTypeCounts.put("corpuser", 0L); + expectedEntityTypeCounts.put("corpgroup", 0L); + expectedEntityTypeCounts.put("mlmodel", 0L); + expectedEntityTypeCounts.put("mlfeaturetable", 1L); + expectedEntityTypeCounts.put("mlmodelgroup", 1L); + expectedEntityTypeCounts.put("dataflow", 1L); + expectedEntityTypeCounts.put("glossarynode", 1L); + expectedEntityTypeCounts.put("mlfeature", 0L); + expectedEntityTypeCounts.put("datajob", 2L); + expectedEntityTypeCounts.put("domain", 0L); + expectedEntityTypeCounts.put("tag", 0L); + expectedEntityTypeCounts.put("glossaryterm", 2L); + expectedEntityTypeCounts.put("mlprimarykey", 1L); + expectedEntityTypeCounts.put("dataset", 9L); + expectedEntityTypeCounts.put("chart", 0L); + expectedEntityTypeCounts.put("dashboard", 0L); + assertEquals(entityTypeAggMeta.getAggregations(), expectedEntityTypeCounts); + + expectedFacets = Set.of("platform", "typeNames", "entity"); + SearchResult testResult3 = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult3.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult3.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult3.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityTypeAggMeta3 = testResult3.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("_entityType")).findFirst().get(); - AggregationMetadata entityAggMeta3 = testResult3.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - assertEquals(entityTypeAggMeta3.getAggregations(), entityAggMeta3.getAggregations()); - assertEquals(entityTypeAggMeta3.getAggregations(), expectedEntityTypeCounts); - - String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); - expectedFacets = Set.of(singleNestedFacet); - SearchResult testResultSingleNested = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); - Map<String, Long> expectedNestedFacetCounts = new HashMap<>(); - expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); - expectedNestedFacetCounts.put("glossarynode␞urn:li:corpuser:jdoe", 1L); - expectedNestedFacetCounts.put("dataflow␞urn:li:corpuser:datahub", 1L); - expectedNestedFacetCounts.put("mlfeaturetable", 1L); - expectedNestedFacetCounts.put("mlmodelgroup", 1L); - expectedNestedFacetCounts.put("glossarynode", 1L); - expectedNestedFacetCounts.put("dataflow", 1L); - expectedNestedFacetCounts.put("mlmodelgroup␞urn:li:corpuser:some-user", 1L); - expectedNestedFacetCounts.put("datajob", 2L); - expectedNestedFacetCounts.put("glossaryterm␞urn:li:corpuser:jdoe", 2L); - expectedNestedFacetCounts.put("glossaryterm", 2L); - expectedNestedFacetCounts.put("dataset", 9L); - expectedNestedFacetCounts.put("mlprimarykey", 1L); - assertEquals(testResultSingleNested.getMetadata().getAggregations().get(0).getAggregations(), expectedNestedFacetCounts); - - expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); - SearchResult testResultNested = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResultNested.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResultNested.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityTypeAggMeta3 = + testResult3.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("_entityType")) + .findFirst() + .get(); + AggregationMetadata entityAggMeta3 = + testResult3.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + assertEquals(entityTypeAggMeta3.getAggregations(), entityAggMeta3.getAggregations()); + assertEquals(entityTypeAggMeta3.getAggregations(), expectedEntityTypeCounts); + + String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); + expectedFacets = Set.of(singleNestedFacet); + SearchResult testResultSingleNested = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); + Map<String, Long> expectedNestedFacetCounts = new HashMap<>(); + expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); + expectedNestedFacetCounts.put("glossarynode␞urn:li:corpuser:jdoe", 1L); + expectedNestedFacetCounts.put("dataflow␞urn:li:corpuser:datahub", 1L); + expectedNestedFacetCounts.put("mlfeaturetable", 1L); + expectedNestedFacetCounts.put("mlmodelgroup", 1L); + expectedNestedFacetCounts.put("glossarynode", 1L); + expectedNestedFacetCounts.put("dataflow", 1L); + expectedNestedFacetCounts.put("mlmodelgroup␞urn:li:corpuser:some-user", 1L); + expectedNestedFacetCounts.put("datajob", 2L); + expectedNestedFacetCounts.put("glossaryterm␞urn:li:corpuser:jdoe", 2L); + expectedNestedFacetCounts.put("glossaryterm", 2L); + expectedNestedFacetCounts.put("dataset", 9L); + expectedNestedFacetCounts.put("mlprimarykey", 1L); + assertEquals( + testResultSingleNested.getMetadata().getAggregations().get(0).getAggregations(), + expectedNestedFacetCounts); + + expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); + SearchResult testResultNested = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResultNested.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResultNested.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - List<AggregationMetadata> expectedNestedAgg = testResultNested.getMetadata().getAggregations().stream().filter( - agg -> agg.getName().equals(singleNestedFacet)).collect(Collectors.toList()); - assertEquals(expectedNestedAgg.size(), 1); - AggregationMetadata nestedAgg = expectedNestedAgg.get(0); - assertEquals(nestedAgg.getDisplayName(), String.format("Type%sOwned By", AGGREGATION_SEPARATOR_CHAR)); - } - - @Test - public void testPartialUrns() throws IOException { - Set<String> expectedQueryTokens = Set.of("dataplatform", "data platform", "samplehdfsdataset", "prod", "production"); - Set<String> expectedIndexTokens = Set.of("dataplatform", "data platform", "hdfs", "samplehdfsdataset", "prod", "production"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - List<String> searchQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedQueryTokens.forEach(expected -> assertTrue(searchQueryTokens.contains(expected), + List<AggregationMetadata> expectedNestedAgg = + testResultNested.getMetadata().getAggregations().stream() + .filter(agg -> agg.getName().equals(singleNestedFacet)) + .collect(Collectors.toList()); + assertEquals(expectedNestedAgg.size(), 1); + AggregationMetadata nestedAgg = expectedNestedAgg.get(0); + assertEquals( + nestedAgg.getDisplayName(), String.format("Type%sOwned By", AGGREGATION_SEPARATOR_CHAR)); + } + + @Test + public void testPartialUrns() throws IOException { + Set<String> expectedQueryTokens = + Set.of("dataplatform", "data platform", "samplehdfsdataset", "prod", "production"); + Set<String> expectedIndexTokens = + Set.of("dataplatform", "data platform", "hdfs", "samplehdfsdataset", "prod", "production"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "query_urn_component", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + List<String> searchQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedQueryTokens.forEach( + expected -> + assertTrue( + searchQueryTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchQueryTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - List<String> searchIndexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedIndexTokens.forEach(expected -> assertTrue(searchIndexTokens.contains(expected), + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + List<String> searchIndexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedIndexTokens.forEach( + expected -> + assertTrue( + searchIndexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchIndexTokens))); - } - - @Test - public void testPartialUnderscoreUrns() throws IOException { - String testQuery = ":(urn:li:dataPlatform:hdfs,party_email,PROD)"; - Set<String> expectedQueryTokens = Set.of("dataplatform", "data platform", "hdfs", "party_email", "parti", - "email", "prod", "production"); - Set<String> expectedIndexTokens = Set.of("dataplatform", "data platform", "hdfs", "party_email", "parti", - "email", "prod", "production"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - testQuery - ); - List<String> searchQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedQueryTokens.forEach(expected -> assertTrue(searchQueryTokens.contains(expected), + } + + @Test + public void testPartialUnderscoreUrns() throws IOException { + String testQuery = ":(urn:li:dataPlatform:hdfs,party_email,PROD)"; + Set<String> expectedQueryTokens = + Set.of( + "dataplatform", + "data platform", + "hdfs", + "party_email", + "parti", + "email", + "prod", + "production"); + Set<String> expectedIndexTokens = + Set.of( + "dataplatform", + "data platform", + "hdfs", + "party_email", + "parti", + "email", + "prod", + "production"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", testQuery); + List<String> searchQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedQueryTokens.forEach( + expected -> + assertTrue( + searchQueryTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchQueryTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - testQuery - ); - List<String> searchIndexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedIndexTokens.forEach(expected -> assertTrue(searchIndexTokens.contains(expected), + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", testQuery); + List<String> searchIndexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedIndexTokens.forEach( + expected -> + assertTrue( + searchIndexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchIndexTokens))); - } - - @Test - public void testScrollAcrossEntities() throws IOException { - String query = "logging_events"; - final int batchSize = 1; - int totalResults = 0; - String scrollId = null; - do { - ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); - int numResults = result.hasEntities() ? result.getEntities().size() : 0; - assertTrue(numResults <= batchSize); - totalResults += numResults; - scrollId = result.getScrollId(); - } while (scrollId != null); - // expect 8 total matching results - assertEquals(totalResults, 8); - } - - @Test - public void testSearchAcrossMultipleEntities() { - String query = "logging_events"; - SearchResult result = search(getSearchService(), query); - assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 4); - result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 4); - } - - @Test - public void testQuotedAnalyzer() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"party_email\"" - ); - List<String> searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("party_email"), searchQuotedQueryTokens, String.format("Actual %s", searchQuotedQueryTokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test2\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test2"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"party_email\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("party_email"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test2\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test2"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test_BYTES_LIST_feature\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test_bytes_list_feature"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - "test_BYTES_LIST_feature" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertTrue(searchQuotedQueryTokens.contains("test_bytes_list_featur")); - } - - @Test - public void testFragmentUrns() { - List<String> testSet = List.of( - "hdfs,SampleHdfsDataset,PROD", - "hdfs,SampleHdfsDataset", - "SampleHdfsDataset", - "(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", - "urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD", - "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - - testSet.forEach(query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected partial urn search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); + } + + @Test + public void testScrollAcrossEntities() throws IOException { + String query = "logging_events"; + final int batchSize = 1; + int totalResults = 0; + String scrollId = null; + do { + ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); + int numResults = result.hasEntities() ? result.getEntities().size() : 0; + assertTrue(numResults <= batchSize); + totalResults += numResults; + scrollId = result.getScrollId(); + } while (scrollId != null); + // expect 8 total matching results + assertEquals(totalResults, 8); + } + + @Test + public void testSearchAcrossMultipleEntities() { + String query = "logging_events"; + SearchResult result = search(getSearchService(), query); + assertEquals((int) result.getNumEntities(), 8); + result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 8); + result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 4); + result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 4); + } + + @Test + public void testQuotedAnalyzer() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"party_email\""); + List<String> searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + List.of("party_email"), + searchQuotedQueryTokens, + String.format("Actual %s", searchQuotedQueryTokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", "\"test2\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test2"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"party_email\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("party_email"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", "\"test2\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test2"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"test_BYTES_LIST_feature\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test_bytes_list_feature"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", "test_BYTES_LIST_feature"); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertTrue(searchQuotedQueryTokens.contains("test_bytes_list_featur")); + } + + @Test + public void testFragmentUrns() { + List<String> testSet = + List.of( + "hdfs,SampleHdfsDataset,PROD", + "hdfs,SampleHdfsDataset", + "SampleHdfsDataset", + "(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", + "urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD", + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + + testSet.forEach( + query -> { + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected partial urn search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); }); - } - - @Test - public void testPlatformTest() { - List<String> testFields = List.of("platform.keyword", "platform"); - final String testPlatform = "urn:li:dataPlatform:dbt"; - - // Ensure backend code path works as expected - List<SearchResult> results = testFields.stream() - .map(fieldName -> { - final String query = String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); - SearchResult result = searchStructured(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - return result; + } + + @Test + public void testPlatformTest() { + List<String> testFields = List.of("platform.keyword", "platform"); + final String testPlatform = "urn:li:dataPlatform:dbt"; + + // Ensure backend code path works as expected + List<SearchResult> results = + testFields.stream() + .map( + fieldName -> { + final String query = + String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); + SearchResult result = searchStructured(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format( + "%s - Expected search results to include matched fields", query)); + return result; }) - .collect(Collectors.toList()); - - IntStream.range(0, testFields.size()).forEach(idx -> { - assertEquals(results.get(idx).getEntities().size(), 9, - String.format("Search results for fields `%s` != 9", testFields.get(idx))); - }); + .collect(Collectors.toList()); - // Construct problematic search entity query - List<Filter> testFilters = testFields.stream() - .map(fieldName -> { - Filter filter = new Filter(); - ArrayList<Criterion> criteria = new ArrayList<>(); - Criterion hasPlatformCriterion = new Criterion().setField(fieldName).setCondition(Condition.EQUAL).setValue(testPlatform); - criteria.add(hasPlatformCriterion); - filter.setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); - return filter; - }).collect(Collectors.toList()); - - // Test variations of fulltext flags - for (Boolean fulltextFlag : List.of(true, false)) { - - // Test field variations with/without .keyword - List<SearchResult> entityClientResults = testFilters.stream().map(filter -> { - try { - return getEntityClient().search("dataset", "*", filter, null, 0, 100, - AUTHENTICATION, new SearchFlags().setFulltext(fulltextFlag)); - } catch (RemoteInvocationException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toList()); - - IntStream.range(0, testFields.size()).forEach(idx -> { - assertEquals(entityClientResults.get(idx).getEntities().size(), 9, - String.format("Search results for entityClient fields (fulltextFlag: %s): `%s` != 9", fulltextFlag, testFields.get(idx))); + IntStream.range(0, testFields.size()) + .forEach( + idx -> { + assertEquals( + results.get(idx).getEntities().size(), + 9, + String.format("Search results for fields `%s` != 9", testFields.get(idx))); }); - } - } - - @Test - public void testStructQueryFieldMatch() { - String query = STRUCTURED_QUERY_PREFIX + "name: customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 1); - } - - @Test - public void testStructQueryFieldPrefixMatch() { - String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 2); - } - - @Test - public void testStructQueryCustomPropertiesKeyPrefix() { - String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 9); - } - - @Test - public void testStructQueryCustomPropertiesMatch() { - String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + // Construct problematic search entity query + List<Filter> testFilters = + testFields.stream() + .map( + fieldName -> { + Filter filter = new Filter(); + ArrayList<Criterion> criteria = new ArrayList<>(); + Criterion hasPlatformCriterion = + new Criterion() + .setField(fieldName) + .setCondition(Condition.EQUAL) + .setValue(testPlatform); + criteria.add(hasPlatformCriterion); + filter.setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + return filter; + }) + .collect(Collectors.toList()); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); + // Test variations of fulltext flags + for (Boolean fulltextFlag : List.of(true, false)) { - assertEquals(result.getEntities().size(), 5); + // Test field variations with/without .keyword + List<SearchResult> entityClientResults = + testFilters.stream() + .map( + filter -> { + try { + return getEntityClient() + .search( + "dataset", + "*", + filter, + null, + 0, + 100, + AUTHENTICATION, + new SearchFlags().setFulltext(fulltextFlag)); + } catch (RemoteInvocationException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList()); + + IntStream.range(0, testFields.size()) + .forEach( + idx -> { + assertEquals( + entityClientResults.get(idx).getEntities().size(), + 9, + String.format( + "Search results for entityClient fields (fulltextFlag: %s): `%s` != 9", + fulltextFlag, testFields.get(idx))); + }); } - - @Test - public void testCustomPropertiesQuoted() { - Map<String, Integer> expectedResults = Map.of( - "\"materialization=view\"", 3, - STRUCTURED_QUERY_PREFIX + "customProperties:\"materialization=view\"", 3 - ); - - Map<String, SearchResult> results = expectedResults.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedResults.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s fulltext results, expected %s results.", key, actualCount, - expectedCount)); + } + + @Test + public void testStructQueryFieldMatch() { + String query = STRUCTURED_QUERY_PREFIX + "name: customers"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + } + + @Test + public void testStructQueryFieldPrefixMatch() { + String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 2); + } + + @Test + public void testStructQueryCustomPropertiesKeyPrefix() { + String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 9); + } + + @Test + public void testStructQueryCustomPropertiesMatch() { + String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 5); + } + + @Test + public void testCustomPropertiesQuoted() { + Map<String, Integer> expectedResults = + Map.of( + "\"materialization=view\"", + 3, + STRUCTURED_QUERY_PREFIX + "customProperties:\"materialization=view\"", + 3); + + Map<String, SearchResult> results = + expectedResults.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedResults.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s fulltext results, expected %s results.", + key, actualCount, expectedCount)); }); + } + + @Test + public void testStructQueryFieldPaths() { + String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 3); + } + + @Test + public void testStructQueryBoolean() { + String query = + STRUCTURED_QUERY_PREFIX + + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 2); + + query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; + result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + + query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; + result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + } + + @Test + public void testStructQueryBrowsePaths() { + String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 9); + } + + @Test + public void testOr() { + String query = "stg_customers | logging_events"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 9); + + query = "stg_customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 1); + + query = "logging_events"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testNegate() { + String query = "logging_events -bckp"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 7); + + query = "logging_events"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testPrefix() { + String query = "bigquery"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + + query = "big*"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testParens() { + String query = "dbt | (bigquery + covid19)"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 11); + + query = "dbt"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 9); + + query = "bigquery + covid19"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 2); + + query = "bigquery"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + + query = "covid19"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 2); + } + + @Test + public void testGram() { + String query = "jaffle shop customers"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", + "Expected exact match in 1st position"); + + query = "shop customers source"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers_source,PROD)", + "Expected ngram match in 1st position"); + + query = "jaffle shop stg customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)", + "Expected ngram match in 1st position"); + + query = "jaffle shop transformers customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.transformers_customers,PROD)", + "Expected ngram match in 1st position"); + + query = "shop raw customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.raw_customers,PROD)", + "Expected ngram match in 1st position"); + } + + @Test + public void testPrefixVsExact() { + String query = "\"customers\""; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 10); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", + "Expected exact match and 1st position"); + } + + // Note: This test can fail if not using .keyword subfields (check for possible query builder + // regression) + @Test + public void testPrefixVsExactCaseSensitivity() { + List<String> insensitiveExactMatches = + List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); + for (String query : insensitiveExactMatches) { + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), insensitiveExactMatches.size()); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", + "Expected exact match as first match with matching case"); } - - @Test - public void testStructQueryFieldPaths() { - String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 3); - } - - @Test - public void testStructQueryBoolean() { - String query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 2); - - query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; - result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 1); - - query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; - result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 1); - } - - @Test - public void testStructQueryBrowsePaths() { - String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 9); - } - - @Test - public void testOr() { - String query = "stg_customers | logging_events"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 9); - - query = "stg_customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 1); - - query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testNegate() { - String query = "logging_events -bckp"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 7); - - query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testPrefix() { - String query = "bigquery"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - - query = "big*"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testParens() { - String query = "dbt | (bigquery + covid19)"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 11); - - query = "dbt"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 9); - - query = "bigquery + covid19"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 2); - - query = "bigquery"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - - query = "covid19"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 2); - } - @Test - public void testGram() { - String query = "jaffle shop customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", - "Expected exact match in 1st position"); - - query = "shop customers source"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers_source,PROD)", - "Expected ngram match in 1st position"); - - query = "jaffle shop stg customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)", - "Expected ngram match in 1st position"); - - query = "jaffle shop transformers customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.transformers_customers,PROD)", - "Expected ngram match in 1st position"); - - query = "shop raw customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.raw_customers,PROD)", - "Expected ngram match in 1st position"); - } - - @Test - public void testPrefixVsExact() { - String query = "\"customers\""; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", - "Expected exact match and 1st position"); - } - - // Note: This test can fail if not using .keyword subfields (check for possible query builder regression) - @Test - public void testPrefixVsExactCaseSensitivity() { - List<String> insensitiveExactMatches = List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); - for (String query : insensitiveExactMatches) { - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), insensitiveExactMatches.size()); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", - "Expected exact match as first match with matching case"); - } - } - - @Test - public void testColumnExactMatch() { - String query = "unit_data"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", - "Expected table name exact match first"); - - query = "special_column_only_present_here_info"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + "important_units" + ",PROD)", - "Expected table with column name exact match first"); - } - - @Test - public void testSortOrdering() { - String query = "unit_data"; - SortCriterion criterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField("lastOperationTime"); - SearchResult result = getSearchService().searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, criterion, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true), null); - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - } - - private Stream<AnalyzeResponse.AnalyzeToken> getTokens(AnalyzeRequest request) throws IOException { - return getSearchClient().indices().analyze(request, RequestOptions.DEFAULT).getTokens().stream(); - } + } + + @Test + public void testColumnExactMatch() { + String query = "unit_data"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", + "Expected table name exact match first"); + + query = "special_column_only_present_here_info"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + "important_units" + ",PROD)", + "Expected table with column name exact match first"); + } + + @Test + public void testSortOrdering() { + String query = "unit_data"; + SortCriterion criterion = + new SortCriterion().setOrder(SortOrder.ASCENDING).setField("lastOperationTime"); + SearchResult result = + getSearchService() + .searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + criterion, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + null); + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + } + + private Stream<AnalyzeResponse.AnalyzeToken> getTokens(AnalyzeRequest request) + throws IOException { + return getSearchClient() + .indices() + .analyze(request, RequestOptions.DEFAULT) + .getTokens() + .stream(); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java index 4472af339c074..2c395875a1d6b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java @@ -1,10 +1,19 @@ package com.linkedin.metadata.search.indexbuilder; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.systemmetadata.SystemMetadataMappingsBuilder; import com.linkedin.metadata.version.GitVersion; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.opensearch.OpenSearchException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; @@ -20,198 +29,270 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; +public abstract class IndexBuilderTestBase extends AbstractTestNGSpringContextTests { -import static org.testng.Assert.*; - -abstract public class IndexBuilderTestBase extends AbstractTestNGSpringContextTests { + @Nonnull + protected abstract RestHighLevelClient getSearchClient(); - @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + private static IndicesClient _indexClient; + private static final String TEST_INDEX_NAME = "esindex_builder_test"; + private static ESIndexBuilder testDefaultBuilder; - private static IndicesClient _indexClient; - private static final String TEST_INDEX_NAME = "esindex_builder_test"; - private static ESIndexBuilder testDefaultBuilder; + @BeforeClass + public void setup() { + _indexClient = getSearchClient().indices(); + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + testDefaultBuilder = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + } + @BeforeMethod + public static void wipe() throws Exception { + try { + _indexClient + .getAlias(new GetAliasesRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT) + .getAliases() + .keySet() + .forEach( + index -> { + try { + _indexClient.delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); - @BeforeClass - public void setup() { - _indexClient = getSearchClient().indices(); - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - testDefaultBuilder = new ESIndexBuilder(getSearchClient(), 1, 0, 0, - 0, Map.of(), false, false, - new ElasticSearchConfiguration(), gitVersion); + _indexClient.delete(new DeleteIndexRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT); + } catch (OpenSearchException exception) { + if (exception.status() != RestStatus.NOT_FOUND) { + throw exception; + } } + } - @BeforeMethod - public static void wipe() throws Exception { - try { - _indexClient.getAlias(new GetAliasesRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT) - .getAliases().keySet().forEach(index -> { - try { - _indexClient.delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - - _indexClient.delete(new DeleteIndexRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT); - } catch (OpenSearchException exception) { - if (exception.status() != RestStatus.NOT_FOUND) { - throw exception; - } - } - } + public static GetIndexResponse getTestIndex() throws IOException { + return _indexClient.get( + new GetIndexRequest(TEST_INDEX_NAME).includeDefaults(true), RequestOptions.DEFAULT); + } - public static GetIndexResponse getTestIndex() throws IOException { - return _indexClient.get(new GetIndexRequest(TEST_INDEX_NAME).includeDefaults(true), RequestOptions.DEFAULT); - } + @Test + public void testESIndexBuilderCreation() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder customIndexBuilder = + new ESIndexBuilder( + getSearchClient(), + 2, + 0, + 1, + 0, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + customIndexBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + GetIndexResponse resp = getTestIndex(); - @Test - public void testESIndexBuilderCreation() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder customIndexBuilder = new ESIndexBuilder(getSearchClient(), 2, 0, 1, - 0, Map.of(), false, false, - new ElasticSearchConfiguration(), gitVersion); - customIndexBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - GetIndexResponse resp = getTestIndex(); - - assertEquals("2", resp.getSetting(TEST_INDEX_NAME, "index.number_of_shards")); - assertEquals("0", resp.getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals("0s", resp.getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - } + assertEquals("2", resp.getSetting(TEST_INDEX_NAME, "index.number_of_shards")); + assertEquals("0", resp.getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals("0s", resp.getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + } - @Test - public void testMappingReindex() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder enabledMappingReindex = new ESIndexBuilder(getSearchClient(), 1, 0, 0, - 0, Map.of(), false, true, - new ElasticSearchConfiguration(), gitVersion); + @Test + public void testMappingReindex() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder enabledMappingReindex = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + true, + new ElasticSearchConfiguration(), + gitVersion); - // No mappings - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + // No mappings + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - // add new mappings - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + // add new mappings + enabledMappingReindex.buildIndex( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); - String afterAddedMappingCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - assertEquals(beforeCreationDate, afterAddedMappingCreationDate, "Expected no reindex on *adding* mappings"); + String afterAddedMappingCreationDate = + getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + assertEquals( + beforeCreationDate, + afterAddedMappingCreationDate, + "Expected no reindex on *adding* mappings"); - // change mappings - Map<String, Object> newProps = ((Map<String, Object>) SystemMetadataMappingsBuilder.getMappings().get("properties")) - .entrySet().stream() - .map(m -> !m.getKey().equals("urn") ? m - : Map.entry("urn", ImmutableMap.<String, Object>builder().put("type", "text").build())) + // change mappings + Map<String, Object> newProps = + ((Map<String, Object>) SystemMetadataMappingsBuilder.getMappings().get("properties")) + .entrySet().stream() + .map( + m -> + !m.getKey().equals("urn") + ? m + : Map.entry( + "urn", + ImmutableMap.<String, Object>builder().put("type", "text").build())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of("properties", newProps), Map.of()); + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of("properties", newProps), Map.of()); - assertTrue(Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), - "Expected original index to be replaced with alias"); + assertTrue( + Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), + "Expected original index to be replaced with alias"); - Map.Entry<String, List<AliasMetadata>> newIndex = getTestIndex().getAliases().entrySet().stream() - .filter(e -> e.getValue().stream().anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) - .findFirst().get(); - String afterChangedMappingCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); - assertNotEquals(beforeCreationDate, afterChangedMappingCreationDate, "Expected reindex on *changing* mappings"); - } + Map.Entry<String, List<AliasMetadata>> newIndex = + getTestIndex().getAliases().entrySet().stream() + .filter( + e -> + e.getValue().stream() + .anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) + .findFirst() + .get(); + String afterChangedMappingCreationDate = + getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); + assertNotEquals( + beforeCreationDate, + afterChangedMappingCreationDate, + "Expected reindex on *changing* mappings"); + } + + @Test + public void testSettingsNumberOfShardsReindex() throws Exception { + // Set test defaults + testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertEquals("1", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_shards")); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + + String expectedShards = "5"; + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder changedShardBuilder = + new ESIndexBuilder( + getSearchClient(), + Integer.parseInt(expectedShards), + testDefaultBuilder.getNumReplicas(), + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + + // add new shard setting + changedShardBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertTrue( + Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), + "Expected original index to be replaced with alias"); + + Map.Entry<String, List<AliasMetadata>> newIndex = + getTestIndex().getAliases().entrySet().stream() + .filter( + e -> + e.getValue().stream() + .anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) + .findFirst() + .get(); - @Test - public void testSettingsNumberOfShardsReindex() throws Exception { - // Set test defaults - testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertEquals("1", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_shards")); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - String expectedShards = "5"; - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder changedShardBuilder = new ESIndexBuilder(getSearchClient(), - Integer.parseInt(expectedShards), + String afterCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); + assertNotEquals( + beforeCreationDate, afterCreationDate, "Expected reindex to result in different timestamp"); + assertEquals( + expectedShards, + getTestIndex().getSetting(newIndex.getKey(), "index.number_of_shards"), + "Expected number of shards: " + expectedShards); + } + + @Test + public void testSettingsNoReindex() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + List<ESIndexBuilder> noReindexBuilders = + List.of( + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas() + 1, + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas(), + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds() + 10, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards() + 1, testDefaultBuilder.getNumReplicas(), testDefaultBuilder.getNumRetries(), testDefaultBuilder.getRefreshIntervalSeconds(), Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion); - - // add new shard setting - changedShardBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertTrue(Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), - "Expected original index to be replaced with alias"); - - Map.Entry<String, List<AliasMetadata>> newIndex = getTestIndex().getAliases().entrySet().stream() - .filter(e -> e.getValue().stream().anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) - .findFirst().get(); - - String afterCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); - assertNotEquals(beforeCreationDate, afterCreationDate, "Expected reindex to result in different timestamp"); - assertEquals(expectedShards, getTestIndex().getSetting(newIndex.getKey(), "index.number_of_shards"), - "Expected number of shards: " + expectedShards); - } + false, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas() + 1, + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion)); - @Test - public void testSettingsNoReindex() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - List<ESIndexBuilder> noReindexBuilders = List.of( - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas() + 1, - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas(), - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds() + 10, - Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards() + 1, - testDefaultBuilder.getNumReplicas(), - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas() + 1, - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion) - ); - - for (ESIndexBuilder builder : noReindexBuilders) { - // Set test defaults - testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertEquals("0", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals("0s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - // build index with builder - builder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertTrue(Arrays.asList(getTestIndex().getIndices()).contains(TEST_INDEX_NAME), - "Expected original index to remain"); - String afterCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - assertEquals(beforeCreationDate, afterCreationDate, "Expected no difference in index timestamp"); - assertEquals(String.valueOf(builder.getNumReplicas()), getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals(builder.getRefreshIntervalSeconds() + "s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - - wipe(); - } - } + for (ESIndexBuilder builder : noReindexBuilders) { + // Set test defaults + testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertEquals("0", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals("0s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + + // build index with builder + builder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertTrue( + Arrays.asList(getTestIndex().getIndices()).contains(TEST_INDEX_NAME), + "Expected original index to remain"); + String afterCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + assertEquals( + beforeCreationDate, afterCreationDate, "Expected no difference in index timestamp"); + assertEquals( + String.valueOf(builder.getNumReplicas()), + getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals( + builder.getRefreshIntervalSeconds() + "s", + getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + + wipe(); + } + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index d9f2f0e5aac94..02bd186ccc183 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -1,16 +1,14 @@ package com.linkedin.metadata.search.indexbuilder; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.TestEntitySpecBuilder; -import java.util.Map; - import com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder; +import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class MappingsBuilderTest { @Test @@ -19,14 +17,33 @@ public void testMappingsBuilder() { assertEquals(result.size(), 1); Map<String, Object> properties = (Map<String, Object>) result.get("properties"); assertEquals(properties.size(), 20); - assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword", + assertEquals( + properties.get("urn"), + ImmutableMap.of( + "type", + "keyword", "fields", - ImmutableMap.of("delimited", - ImmutableMap.of("type", "text", "analyzer", "urn_component", "search_analyzer", "query_urn_component", - "search_quote_analyzer", "quote_analyzer"), - "ngram", - ImmutableMap.of("type", "search_as_you_type", "max_shingle_size", "4", "doc_values", "false", - "analyzer", "partial_urn_component")))); + ImmutableMap.of( + "delimited", + ImmutableMap.of( + "type", + "text", + "analyzer", + "urn_component", + "search_analyzer", + "query_urn_component", + "search_quote_analyzer", + "quote_analyzer"), + "ngram", + ImmutableMap.of( + "type", + "search_as_you_type", + "max_shingle_size", + "4", + "doc_values", + "false", + "analyzer", + "partial_urn_component")))); assertEquals(properties.get("runId"), ImmutableMap.of("type", "keyword")); assertTrue(properties.containsKey("browsePaths")); assertTrue(properties.containsKey("browsePathV2")); @@ -37,24 +54,30 @@ public void testMappingsBuilder() { Map<String, Object> keyPart3FieldSubfields = (Map<String, Object>) keyPart3Field.get("fields"); assertEquals(keyPart3FieldSubfields.size(), 1); assertTrue(keyPart3FieldSubfields.containsKey("keyword")); - Map<String, Object> customPropertiesField = (Map<String, Object>) properties.get("customProperties"); + Map<String, Object> customPropertiesField = + (Map<String, Object>) properties.get("customProperties"); assertEquals(customPropertiesField.get("type"), "keyword"); assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> customPropertiesFieldSubfields = (Map<String, Object>) customPropertiesField.get("fields"); + Map<String, Object> customPropertiesFieldSubfields = + (Map<String, Object>) customPropertiesField.get("fields"); assertEquals(customPropertiesFieldSubfields.size(), 1); assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT - Map<String, Object> nestedArrayStringField = (Map<String, Object>) properties.get("nestedArrayStringField"); + Map<String, Object> nestedArrayStringField = + (Map<String, Object>) properties.get("nestedArrayStringField"); assertEquals(nestedArrayStringField.get("type"), "keyword"); assertEquals(nestedArrayStringField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> nestedArrayStringFieldSubfields = (Map<String, Object>) nestedArrayStringField.get("fields"); + Map<String, Object> nestedArrayStringFieldSubfields = + (Map<String, Object>) nestedArrayStringField.get("fields"); assertEquals(nestedArrayStringFieldSubfields.size(), 2); assertTrue(nestedArrayStringFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayStringFieldSubfields.containsKey("keyword")); - Map<String, Object> nestedArrayArrayField = (Map<String, Object>) properties.get("nestedArrayArrayField"); + Map<String, Object> nestedArrayArrayField = + (Map<String, Object>) properties.get("nestedArrayArrayField"); assertEquals(nestedArrayArrayField.get("type"), "keyword"); assertEquals(nestedArrayArrayField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> nestedArrayArrayFieldSubfields = (Map<String, Object>) nestedArrayArrayField.get("fields"); + Map<String, Object> nestedArrayArrayFieldSubfields = + (Map<String, Object>) nestedArrayArrayField.get("fields"); assertEquals(nestedArrayArrayFieldSubfields.size(), 2); assertTrue(nestedArrayArrayFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayArrayFieldSubfields.containsKey("keyword")); @@ -77,7 +100,8 @@ public void testMappingsBuilder() { Map<String, Object> textArrayField = (Map<String, Object>) properties.get("textArrayField"); assertEquals(textArrayField.get("type"), "keyword"); assertEquals(textArrayField.get("normalizer"), "keyword_normalizer"); - Map<String, Object> textArrayFieldSubfields = (Map<String, Object>) textArrayField.get("fields"); + Map<String, Object> textArrayFieldSubfields = + (Map<String, Object>) textArrayField.get("fields"); assertEquals(textArrayFieldSubfields.size(), 3); assertTrue(textArrayFieldSubfields.containsKey("delimited")); assertTrue(textArrayFieldSubfields.containsKey("ngram")); @@ -108,7 +132,8 @@ public void testMappingsBuilder() { Map<String, Object> nestedForeignKey = (Map<String, Object>) properties.get("nestedForeignKey"); assertEquals(nestedForeignKey.get("type"), "text"); assertEquals(nestedForeignKey.get("analyzer"), "urn_component"); - Map<String, Object> nestedForeignKeySubfields = (Map<String, Object>) nestedForeignKey.get("fields"); + Map<String, Object> nestedForeignKeySubfields = + (Map<String, Object>) nestedForeignKey.get("fields"); assertEquals(nestedForeignKeySubfields.size(), 2); assertTrue(nestedForeignKeySubfields.containsKey("keyword")); assertTrue(nestedForeignKeySubfields.containsKey("ngram")); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java index 3896ba749e85e..dd30010b08758 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.GoldenTestBase; @@ -11,34 +13,35 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class GoldenOpenSearchTest extends GoldenTestBase { - @Autowired - @Qualifier("longTailSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - - @NotNull - @Override - protected EntityRegistry getEntityRegistry() { - return entityRegistry; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - assertNotNull(searchService); - } + @Autowired + @Qualifier("longTailSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @NotNull + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + assertNotNull(searchService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java index 312b56364bd91..ef1ed51eb4799 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.search.indexbuilder.IndexBuilderTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; @@ -8,23 +10,19 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class IndexBuilderOpenSearchTest extends IndexBuilderTestBase { - @Autowired - private RestHighLevelClient _searchClient; + @Autowired private RestHighLevelClient _searchClient; - @NotNull - @Override - protected RestHighLevelClient getSearchClient() { - return _searchClient; - } + @NotNull + @Override + protected RestHighLevelClient getSearchClient() { + return _searchClient; + } - @Test - public void initTest() { - assertNotNull(_searchClient); - } + @Test + public void initTest() { + assertNotNull(_searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java index 6fc0677ad6e39..cc17e3287544c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java @@ -12,32 +12,35 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({OpenSearchSuite.class, SearchLineageFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchLineageFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageDataFixtureOpenSearchTest extends LineageDataFixtureTestBase { - @Autowired - @Qualifier("searchLineageSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("searchLineageLineageSearchService") - protected LineageSearchService lineageService; - - @NotNull - @Override - protected LineageSearchService getLineageService() { - return lineageService; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - AssertJUnit.assertNotNull(lineageService); - } + @Autowired + @Qualifier("searchLineageSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("searchLineageLineageSearchService") + protected LineageSearchService lineageService; + + @NotNull + @Override + protected LineageSearchService getLineageService() { + return lineageService; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + AssertJUnit.assertNotNull(lineageService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java index 1a6242c2211fd..26c2cf28cdeca 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageServiceOpenSearchTest extends LineageServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java index 559c623c97d5a..42a178893e837 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java @@ -10,22 +10,23 @@ @TestConfiguration public class OpenSearchSuite extends AbstractTestNGSpringContextTests { - private static final OpenSearchTestContainer OPENSEARCH_TEST_CONTAINER; - private static GenericContainer<?> container; - static { - OPENSEARCH_TEST_CONTAINER = new OpenSearchTestContainer(); - } + private static final OpenSearchTestContainer OPENSEARCH_TEST_CONTAINER; + private static GenericContainer<?> container; - @AfterSuite - public void after() { - OPENSEARCH_TEST_CONTAINER.stopContainer(); - } + static { + OPENSEARCH_TEST_CONTAINER = new OpenSearchTestContainer(); + } + + @AfterSuite + public void after() { + OPENSEARCH_TEST_CONTAINER.stopContainer(); + } - @Bean(name = "testSearchContainer") - public GenericContainer<?> testSearchContainer() { - if (container == null) { - container = OPENSEARCH_TEST_CONTAINER.startContainer(); - } - return container; + @Bean(name = "testSearchContainer") + public GenericContainer<?> testSearchContainer() { + if (container == null) { + container = OPENSEARCH_TEST_CONTAINER.startContainer(); } + return container; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java index 081eb5f70fc85..d358ba177f91d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; @@ -13,32 +15,30 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - -/** - * Runs sample data fixture tests for Opensearch test container - */ +/** Runs sample data fixture tests for Opensearch test container */ @Getter -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SampleDataFixtureOpenSearchTest extends SampleDataFixtureTestBase { - @Autowired - private RestHighLevelClient searchClient; + @Autowired private RestHighLevelClient searchClient; - @Autowired - @Qualifier("sampleDataSearchService") - protected SearchService searchService; + @Autowired + @Qualifier("sampleDataSearchService") + protected SearchService searchService; - @Autowired - @Qualifier("sampleDataEntityClient") - protected EntityClient entityClient; + @Autowired + @Qualifier("sampleDataEntityClient") + protected EntityClient entityClient; - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; - @Test - public void initTest() { - assertNotNull(searchClient); - } + @Test + public void initTest() { + assertNotNull(searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java index 0b166975da0d1..7f799d8eebf0a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.search.query.SearchDAOTestBase; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -12,16 +14,16 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Getter -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchDAOOpenSearchTest extends SearchDAOTestBase { - @Autowired - private RestHighLevelClient searchClient; - @Autowired - private SearchConfiguration searchConfiguration; + @Autowired private RestHighLevelClient searchClient; + @Autowired private SearchConfiguration searchConfiguration; + @Autowired @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java index 8a55ba7b37ef9..1127ba2089a91 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchServiceOpenSearchTest extends SearchServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java index f0bb8e1c12479..7ba90319cf1d3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java @@ -11,16 +11,12 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class SystemMetadataServiceOpenSearchTest extends SystemMetadataServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java index 467f7fb43be1b..80db8864014c3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class TestEntityOpenSearchTest extends TestEntityTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java index 3333b9f0942f5..63dffa9c21004 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java @@ -14,12 +14,9 @@ @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class TimeseriesAspectServiceOpenSearchTest extends TimeseriesAspectServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java index 91e7747afb4a1..a261b53f25c60 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java @@ -1,12 +1,18 @@ package com.linkedin.metadata.search.query; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.entity.TestEntityRegistry; +import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -23,32 +29,24 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; - @Import(SearchCommonTestConfiguration.class) public class BrowseDAOTest extends AbstractTestNGSpringContextTests { private RestHighLevelClient _mockClient; private ESBrowseDAO _browseDAO; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @BeforeMethod public void setup() { _mockClient = mock(RestHighLevelClient.class); - _browseDAO = new ESBrowseDAO( - new TestEntityRegistry(), - _mockClient, - new IndexConventionImpl("es_browse_dao_test"), - _searchConfiguration, - _customSearchConfiguration - ); + _browseDAO = + new ESBrowseDAO( + new TestEntityRegistry(), + _mockClient, + new IndexConventionImpl("es_browse_dao_test"), + _searchConfiguration, + _customSearchConfiguration); } public static Urn makeUrn(Object id) { @@ -76,7 +74,7 @@ public void testGetBrowsePath() throws Exception { // Test the case of single search hit & browsePaths field doesn't exist sourceMap.remove("browse_paths"); when(mockSearchHit.getSourceAsMap()).thenReturn(sourceMap); - when(mockSearchHits.getHits()).thenReturn(new SearchHit[]{mockSearchHit}); + when(mockSearchHits.getHits()).thenReturn(new SearchHit[] {mockSearchHit}); when(mockSearchResponse.getHits()).thenReturn(mockSearchHits); when(_mockClient.search(any(), eq(RequestOptions.DEFAULT))).thenReturn(mockSearchResponse); assertEquals(_browseDAO.getBrowsePaths("dataset", dummyUrn).size(), 0); @@ -84,11 +82,11 @@ public void testGetBrowsePath() throws Exception { // Test the case of single search hit & browsePaths field exists sourceMap.put("browsePaths", Collections.singletonList("foo")); when(mockSearchHit.getSourceAsMap()).thenReturn(sourceMap); - when(mockSearchHits.getHits()).thenReturn(new SearchHit[]{mockSearchHit}); + when(mockSearchHits.getHits()).thenReturn(new SearchHit[] {mockSearchHit}); when(mockSearchResponse.getHits()).thenReturn(mockSearchHits); when(_mockClient.search(any(), eq(RequestOptions.DEFAULT))).thenReturn(mockSearchResponse); List<String> browsePaths = _browseDAO.getBrowsePaths("dataset", dummyUrn); assertEquals(browsePaths.size(), 1); assertEquals(browsePaths.get(0), "foo"); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java index 2dbc142d45071..ba909dc3822c5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java @@ -1,5 +1,12 @@ package com.linkedin.metadata.search.query; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.fail; + import com.datahub.test.Snapshot; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.LongMap; @@ -22,286 +29,404 @@ import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; -import org.opensearch.client.RestHighLevelClient; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import org.opensearch.client.RestHighLevelClient; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -abstract public class SearchDAOTestBase extends AbstractTestNGSpringContextTests { - - abstract protected RestHighLevelClient getSearchClient(); - - abstract protected SearchConfiguration getSearchConfiguration(); - - abstract protected IndexConvention getIndexConvention(); - - EntityRegistry _entityRegistry = new SnapshotEntityRegistry(new Snapshot()); - - - @Test - public void testTransformFilterForEntitiesNoChange() { - Criterion c = new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertEquals(f, transformedFilter); +public abstract class SearchDAOTestBase extends AbstractTestNGSpringContextTests { + + protected abstract RestHighLevelClient getSearchClient(); + + protected abstract SearchConfiguration getSearchConfiguration(); + + protected abstract IndexConvention getIndexConvention(); + + EntityRegistry _entityRegistry = new SnapshotEntityRegistry(new Snapshot()); + + @Test + public void testTransformFilterForEntitiesNoChange() { + Criterion c = + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertEquals(f, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesNullFilter() { + Filter transformedFilter = SearchUtil.transformFilterForEntities(null, getIndexConvention()); + assertNotNull(getIndexConvention()); + assertEquals(null, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesWithChanges() { + + Criterion c = + new Criterion() + .setValue("dataset") + .setValues(new StringArray(ImmutableList.of("dataset"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesNullFilter() { - Filter transformedFilter = SearchUtil.transformFilterForEntities(null, getIndexConvention()); - assertNotNull(getIndexConvention()); - assertEquals(null, transformedFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datasetindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); + + assertEquals(expectedNewFilter, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesWithUnderscore() { + + Criterion c = + new Criterion() + .setValue("data_job") + .setValues(new StringArray(ImmutableList.of("data_job"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithChanges() { - - Criterion c = new Criterion().setValue("dataset").setValues( - new StringArray(ImmutableList.of("dataset")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datasetindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datasetindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); - - assertEquals(expectedNewFilter, transformedFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datajobindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datajobindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); + + assertEquals(transformedFilter, expectedNewFilter); + } + + @Test + public void testTransformFilterForEntitiesWithSomeChanges() { + + Criterion criterionChanged = + new Criterion() + .setValue("dataset") + .setValues(new StringArray(ImmutableList.of("dataset"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + Criterion criterionUnchanged = + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(criterionChanged, criterionUnchanged)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithUnderscore() { - - Criterion c = new Criterion().setValue("data_job").setValues( - new StringArray(ImmutableList.of("data_job")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datajobindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datajobindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); - - assertEquals(transformedFilter, expectedNewFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datasetindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(expectedNewCriterion, criterionUnchanged)))); + + assertEquals(expectedNewFilter, transformedFilter); + } + + @Test + public void testTransformIndexIntoEntityNameSingle() { + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + getIndexConvention(), + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + // Empty aggregations + final SearchResultMetadata searchResultMetadata = + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); + SearchResult result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata(searchResultMetadata) + .setFrom(0) + .setPageSize(100) + .setNumEntities(30); + SearchResult expectedResult = null; + try { + expectedResult = result.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithSomeChanges() { - - Criterion criterionChanged = new Criterion().setValue("dataset").setValues( - new StringArray(ImmutableList.of("dataset")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - Criterion criterionUnchanged = new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criterionChanged, criterionUnchanged)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datasetindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datasetindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion, criterionUnchanged)))); - - assertEquals(expectedNewFilter, transformedFilter); + assertEquals(expectedResult, searchDAO.transformIndexIntoEntityName(result)); + + // one facet, do not transform + Map<String, Long> aggMap = Map.of("urn:li:corpuser:datahub", Long.valueOf(3)); + + List<AggregationMetadata> aggregationMetadataList = new ArrayList<>(); + aggregationMetadataList.add( + new AggregationMetadata() + .setName("owners") + .setDisplayName("Owned by") + .setAggregations(new LongMap(aggMap)) + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(aggMap, Collections.emptySet())))); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + result.setMetadata(searchResultMetadata); + + try { + expectedResult = result.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - @Test - public void testTransformIndexIntoEntityNameSingle() { - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), getIndexConvention(), false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - // Empty aggregations - final SearchResultMetadata searchResultMetadata = - new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - SearchResult result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(searchResultMetadata) - .setFrom(0) - .setPageSize(100) - .setNumEntities(30); - SearchResult expectedResult = null; - try { - expectedResult = result.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(expectedResult, searchDAO.transformIndexIntoEntityName(result)); - - // one facet, do not transform - Map<String, Long> aggMap = Map.of("urn:li:corpuser:datahub", Long.valueOf(3)); - - List<AggregationMetadata> aggregationMetadataList = new ArrayList<>(); - aggregationMetadataList.add(new AggregationMetadata().setName("owners") - .setDisplayName("Owned by") - .setAggregations(new LongMap(aggMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(aggMap, Collections.emptySet()))) - ); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); - result.setMetadata(searchResultMetadata); - - try { - expectedResult = result.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - - // one facet, transform - Map<String, Long> entityTypeMap = Map.of("smpldat_datasetindex_v2", Long.valueOf(3)); - - aggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType") + // one facet, transform + Map<String, Long> entityTypeMap = Map.of("smpldat_datasetindex_v2", Long.valueOf(3)); + + aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType") .setDisplayName("Type") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); - result.setMetadata(searchResultMetadata); - - Map<String, Long> expectedEntityTypeMap = Map.of("dataset", Long.valueOf(3)); - - List<AggregationMetadata> expectedAggregationMetadataList = List.of( - new AggregationMetadata().setName("_entityType") - .setDisplayName("Type") - .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - expectedResult.setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - } - - @Test - public void testTransformIndexIntoEntityNameNested() { - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), getIndexConvention(), false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - // One nested facet - Map<String, Long> entityTypeMap = Map.of( - String.format("smpldat_datasetindex_v2%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("smpldat_datasetindex_v2%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "smpldat_datasetindex_v2", Long.valueOf(20) - ); - List<AggregationMetadata> aggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType␞owners") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + result.setMetadata(searchResultMetadata); + + Map<String, Long> expectedEntityTypeMap = Map.of("dataset", Long.valueOf(3)); + + List<AggregationMetadata> expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType") + .setDisplayName("Type") + .setAggregations(new LongMap(expectedEntityTypeMap)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + expectedResult.setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + } + + @Test + public void testTransformIndexIntoEntityNameNested() { + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + getIndexConvention(), + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + // One nested facet + Map<String, Long> entityTypeMap = + Map.of( + String.format( + "smpldat_datasetindex_v2%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format( + "smpldat_datasetindex_v2%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "smpldat_datasetindex_v2", + Long.valueOf(20)); + List<AggregationMetadata> aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType␞owners") .setDisplayName("Type␞Owned By") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - SearchResult result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(aggregationMetadataList) - )) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - - Map<String, Long> expectedEntityTypeMap = Map.of( - String.format("dataset%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("dataset%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "dataset", Long.valueOf(20) - ); - - List<AggregationMetadata> expectedAggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType␞owners") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + SearchResult result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(aggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + + Map<String, Long> expectedEntityTypeMap = + Map.of( + String.format("dataset%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format("dataset%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "dataset", + Long.valueOf(20)); + + List<AggregationMetadata> expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType␞owners") .setDisplayName("Type␞Owned By") .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - SearchResult expectedResult = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(expectedAggregationMetadataList))) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - - // One nested facet, opposite order - entityTypeMap = Map.of( - String.format("urn:li:corpuser:datahub%ssmpldat_datasetindex_v2", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("urn:li:corpuser:datahub%ssmpldat_chartindex_v2", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "urn:li:corpuser:datahub", Long.valueOf(20) - ); - aggregationMetadataList = List.of(new AggregationMetadata().setName("owners␞_entityType") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + SearchResult expectedResult = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + + // One nested facet, opposite order + entityTypeMap = + Map.of( + String.format( + "urn:li:corpuser:datahub%ssmpldat_datasetindex_v2", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format( + "urn:li:corpuser:datahub%ssmpldat_chartindex_v2", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "urn:li:corpuser:datahub", + Long.valueOf(20)); + aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("owners␞_entityType") .setDisplayName("Owned By␞Type") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(aggregationMetadataList) - )) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - - expectedEntityTypeMap = Map.of( - String.format("urn:li:corpuser:datahub%sdataset", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("urn:li:corpuser:datahub%schart", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "urn:li:corpuser:datahub", Long.valueOf(20) - ); - - expectedAggregationMetadataList = List.of(new AggregationMetadata().setName("owners␞_entityType") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(aggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + + expectedEntityTypeMap = + Map.of( + String.format("urn:li:corpuser:datahub%sdataset", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format("urn:li:corpuser:datahub%schart", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "urn:li:corpuser:datahub", + Long.valueOf(20)); + + expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("owners␞_entityType") .setDisplayName("Owned By␞Type") .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - expectedResult = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(expectedAggregationMetadataList))) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - } + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + expectedResult = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java index 66e7b62741f4c..b52f4cd4e92f7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java @@ -1,47 +1,45 @@ package com.linkedin.metadata.search.query.request; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.google.common.collect.ImmutableList; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilder; import org.testng.Assert; import org.testng.annotations.Test; - public class AggregationQueryBuilderTest { @Test public void testGetDefaultAggregationsHasFields() { - SearchableAnnotation annotation = new SearchableAnnotation( - "test", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - true, - Optional.empty(), - Optional.of("Has Test"), - 1.0, - Optional.of("hasTest"), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation = + new SearchableAnnotation( + "test", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + true, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation)); List<AggregationBuilder> aggs = builder.getAggregations(); @@ -51,27 +49,27 @@ public void testGetDefaultAggregationsHasFields() { @Test public void testGetDefaultAggregationsFields() { - SearchableAnnotation annotation = new SearchableAnnotation( - "test", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - true, - false, - Optional.of("Test Filter"), - Optional.empty(), - 1.0, - Optional.empty(), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation = + new SearchableAnnotation( + "test", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + true, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation)); List<AggregationBuilder> aggs = builder.getAggregations(); @@ -81,56 +79,53 @@ public void testGetDefaultAggregationsFields() { @Test public void testGetSpecificAggregationsHasFields() { - SearchableAnnotation annotation1 = new SearchableAnnotation( - "test1", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - false, - Optional.empty(), - Optional.of("Has Test"), - 1.0, - Optional.of("hasTest1"), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); - - SearchableAnnotation annotation2 = new SearchableAnnotation( - "test2", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - false, - Optional.of("Test Filter"), - Optional.empty(), - 1.0, - Optional.empty(), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation1 = + new SearchableAnnotation( + "test1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest1"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); + + SearchableAnnotation annotation2 = + new SearchableAnnotation( + "test2", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation1, annotation2)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); // Case 1: Ask for fields that should exist. - List<AggregationBuilder> aggs = builder.getAggregations( - ImmutableList.of("test1", "test2", "hasTest1") - ); + List<AggregationBuilder> aggs = + builder.getAggregations(ImmutableList.of("test1", "test2", "hasTest1")); Assert.assertEquals(aggs.size(), 3); Set<String> facets = aggs.stream().map(AggregationBuilder::getName).collect(Collectors.toSet()); Assert.assertEquals(ImmutableSet.of("test1", "test2", "hasTest1"), facets); // Case 2: Ask for fields that should NOT exist. - aggs = builder.getAggregations( - ImmutableList.of("hasTest2") - ); + aggs = builder.getAggregations(ImmutableList.of("hasTest2")); Assert.assertEquals(aggs.size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java index 34b98f38254cd..ab832eb1ac24f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java @@ -1,10 +1,12 @@ package com.linkedin.metadata.search.query.request; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.linkedin.metadata.TestEntitySpecBuilder; +import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import java.util.List; import java.util.Map; - -import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -14,12 +16,9 @@ import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class AutocompleteRequestHandlerTest { - private AutocompleteRequestHandler handler = AutocompleteRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec()); + private AutocompleteRequestHandler handler = + AutocompleteRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec()); @Test public void testDefaultAutocompleteRequest() { @@ -38,7 +37,8 @@ public void testDefaultAutocompleteRequest() { assertTrue(queryFields.containsKey("keyPart1.ngram._4gram")); assertEquals(autocompleteQuery.type(), MultiMatchQueryBuilder.Type.BOOL_PREFIX); - MatchPhrasePrefixQueryBuilder prefixQuery = (MatchPhrasePrefixQueryBuilder) query.should().get(0); + MatchPhrasePrefixQueryBuilder prefixQuery = + (MatchPhrasePrefixQueryBuilder) query.should().get(0); assertEquals("keyPart1.delimited", prefixQuery.fieldName()); assertEquals(query.mustNot().size(), 1); @@ -75,7 +75,8 @@ public void testAutocompleteRequestWithField() { assertTrue(queryFields.containsKey("field.ngram._4gram")); assertEquals(autocompleteQuery.type(), MultiMatchQueryBuilder.Type.BOOL_PREFIX); - MatchPhrasePrefixQueryBuilder prefixQuery = (MatchPhrasePrefixQueryBuilder) query.should().get(0); + MatchPhrasePrefixQueryBuilder prefixQuery = + (MatchPhrasePrefixQueryBuilder) query.should().get(0); assertEquals("field.delimited", prefixQuery.fieldName()); MatchQueryBuilder removedFilter = (MatchQueryBuilder) query.mustNot().get(0); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java index 6b6664ffdf30e..105ee2652dc30 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java @@ -1,14 +1,21 @@ package com.linkedin.metadata.search.query.request; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.BoolQueryConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.search.elasticsearch.query.request.CustomizedQueryHandler; import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -17,172 +24,192 @@ import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.testng.annotations.Test; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - public class CustomizedQueryHandlerTest { - public static final ObjectMapper TEST_MAPPER = new YAMLMapper(); - private static final CustomSearchConfiguration TEST_CONFIG; - static { - try { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_test.yml"); - TEST_CONFIG = customConfiguration.resolve(TEST_MAPPER); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - public static final SearchQueryBuilder SEARCH_QUERY_BUILDER; - static { - SEARCH_QUERY_BUILDER = new SearchQueryBuilder(new SearchConfiguration(), TEST_CONFIG); - } - private static final List<QueryConfiguration> EXPECTED_CONFIGURATION = List.of( - QueryConfiguration.builder() - .queryRegex("[*]|") - .simpleQuery(false) - .exactMatchQuery(false) - .prefixMatchQuery(false) - .functionScore(Map.of("score_mode", "avg", "boost_mode", "multiply", - "functions", List.of( - Map.of( - "weight", 1, - "filter", Map.<String, Object>of("match_all", Map.<String, Object>of())), - Map.of( - "weight", 0.5, - "filter", Map.<String, Object>of("term", Map.of( - "materialized", Map.of("value", true) - ))), - Map.of( - "weight", 0.5, - "filter", Map.<String, Object>of("term", Map.<String, Object>of( - "deprecated", Map.of("value", true) - ))) - ))) - .build(), - QueryConfiguration.builder() - .queryRegex(".*") - .simpleQuery(true) - .exactMatchQuery(true) - .prefixMatchQuery(true) - .boolQuery(BoolQueryConfiguration.builder() - .must(List.of( - Map.of("term", Map.of("name", "{{query_string}}")) - )) - .build()) - .functionScore(Map.of("score_mode", "avg", "boost_mode", "multiply", - "functions", List.of( - Map.of( - "weight", 1, - "filter", Map.<String, Object>of("match_all", Map.<String, Object>of())), - Map.of( - "weight", 0.5, - "filter", Map.<String, Object>of("term", Map.of( - "materialized", Map.of("value", true) - ))), - Map.of( - "weight", 1.5, - "filter", Map.<String, Object>of("term", Map.<String, Object>of( - "deprecated", Map.of("value", false) - ))) - ))) - .build() - ); - - - @Test - public void configParsingTest() { - assertNotNull(TEST_CONFIG); - assertEquals(TEST_CONFIG.getQueryConfigurations(), EXPECTED_CONFIGURATION); + public static final ObjectMapper TEST_MAPPER = new YAMLMapper(); + private static final CustomSearchConfiguration TEST_CONFIG; + + static { + try { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_test.yml"); + TEST_CONFIG = customConfiguration.resolve(TEST_MAPPER); + } catch (IOException e) { + throw new RuntimeException(e); } - - @Test - public void customizedQueryHandlerInitTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - - assertEquals(test.getQueryConfigurations().stream().map(e -> e.getKey().toString()).collect(Collectors.toList()), - List.of("[*]|", ".*")); - - assertEquals(test.getQueryConfigurations().stream() - .map(e -> Map.entry(e.getKey().toString(), e.getValue())) - .collect(Collectors.toList()), - EXPECTED_CONFIGURATION.stream() - .map(cfg -> Map.entry(cfg.getQueryRegex(), cfg)) - .collect(Collectors.toList())); - } - - @Test - public void patternMatchTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - - for (String selectAllQuery: List.of("*", "")) { - QueryConfiguration actual = test.lookupQueryConfig(selectAllQuery).get(); - assertEquals(actual, EXPECTED_CONFIGURATION.get(0), String.format("Failed to match: `%s`", selectAllQuery)); - } - - for (String otherQuery: List.of("foo", "bar")) { - QueryConfiguration actual = test.lookupQueryConfig(otherQuery).get(); - assertEquals(actual, EXPECTED_CONFIGURATION.get(1)); - } + } + + public static final SearchQueryBuilder SEARCH_QUERY_BUILDER; + + static { + SEARCH_QUERY_BUILDER = new SearchQueryBuilder(new SearchConfiguration(), TEST_CONFIG); + } + + private static final List<QueryConfiguration> EXPECTED_CONFIGURATION = + List.of( + QueryConfiguration.builder() + .queryRegex("[*]|") + .simpleQuery(false) + .exactMatchQuery(false) + .prefixMatchQuery(false) + .functionScore( + Map.of( + "score_mode", + "avg", + "boost_mode", + "multiply", + "functions", + List.of( + Map.of( + "weight", + 1, + "filter", + Map.<String, Object>of("match_all", Map.<String, Object>of())), + Map.of( + "weight", + 0.5, + "filter", + Map.<String, Object>of( + "term", Map.of("materialized", Map.of("value", true)))), + Map.of( + "weight", + 0.5, + "filter", + Map.<String, Object>of( + "term", + Map.<String, Object>of("deprecated", Map.of("value", true))))))) + .build(), + QueryConfiguration.builder() + .queryRegex(".*") + .simpleQuery(true) + .exactMatchQuery(true) + .prefixMatchQuery(true) + .boolQuery( + BoolQueryConfiguration.builder() + .must(List.of(Map.of("term", Map.of("name", "{{query_string}}")))) + .build()) + .functionScore( + Map.of( + "score_mode", + "avg", + "boost_mode", + "multiply", + "functions", + List.of( + Map.of( + "weight", + 1, + "filter", + Map.<String, Object>of("match_all", Map.<String, Object>of())), + Map.of( + "weight", + 0.5, + "filter", + Map.<String, Object>of( + "term", Map.of("materialized", Map.of("value", true)))), + Map.of( + "weight", + 1.5, + "filter", + Map.<String, Object>of( + "term", + Map.<String, Object>of("deprecated", Map.of("value", false))))))) + .build()); + + @Test + public void configParsingTest() { + assertNotNull(TEST_CONFIG); + assertEquals(TEST_CONFIG.getQueryConfigurations(), EXPECTED_CONFIGURATION); + } + + @Test + public void customizedQueryHandlerInitTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + + assertEquals( + test.getQueryConfigurations().stream() + .map(e -> e.getKey().toString()) + .collect(Collectors.toList()), + List.of("[*]|", ".*")); + + assertEquals( + test.getQueryConfigurations().stream() + .map(e -> Map.entry(e.getKey().toString(), e.getValue())) + .collect(Collectors.toList()), + EXPECTED_CONFIGURATION.stream() + .map(cfg -> Map.entry(cfg.getQueryRegex(), cfg)) + .collect(Collectors.toList())); + } + + @Test + public void patternMatchTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + + for (String selectAllQuery : List.of("*", "")) { + QueryConfiguration actual = test.lookupQueryConfig(selectAllQuery).get(); + assertEquals( + actual, + EXPECTED_CONFIGURATION.get(0), + String.format("Failed to match: `%s`", selectAllQuery)); } - @Test - public void functionScoreQueryBuilderTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - MatchAllQueryBuilder inputQuery = QueryBuilders.matchAllQuery(); - - /* - * Test select star - */ - FunctionScoreQueryBuilder selectStarTest = SEARCH_QUERY_BUILDER.functionScoreQueryBuilder(test.lookupQueryConfig("*").get(), - inputQuery); - - FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedSelectStarScoreFunctions = { - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - ScoreFunctionBuilders.weightFactorFunction(1f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("materialized", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("deprecated", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ) - }; - FunctionScoreQueryBuilder expectedSelectStar = new FunctionScoreQueryBuilder(expectedSelectStarScoreFunctions) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) - .boostMode(CombineFunction.MULTIPLY); - - assertEquals(selectStarTest, expectedSelectStar); - - /* - * Test default (non-select start) - */ - FunctionScoreQueryBuilder defaultTest = SEARCH_QUERY_BUILDER.functionScoreQueryBuilder(test.lookupQueryConfig("foobar").get(), inputQuery); - - FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedDefaultScoreFunctions = { - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - ScoreFunctionBuilders.weightFactorFunction(1f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("materialized", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("deprecated", false), - ScoreFunctionBuilders.weightFactorFunction(1.5f) - ) - }; - FunctionScoreQueryBuilder expectedDefault = new FunctionScoreQueryBuilder(expectedDefaultScoreFunctions) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) - .boostMode(CombineFunction.MULTIPLY); - - assertEquals(defaultTest, expectedDefault); + for (String otherQuery : List.of("foo", "bar")) { + QueryConfiguration actual = test.lookupQueryConfig(otherQuery).get(); + assertEquals(actual, EXPECTED_CONFIGURATION.get(1)); } + } + + @Test + public void functionScoreQueryBuilderTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + MatchAllQueryBuilder inputQuery = QueryBuilders.matchAllQuery(); + + /* + * Test select star + */ + FunctionScoreQueryBuilder selectStarTest = + SEARCH_QUERY_BUILDER.functionScoreQueryBuilder( + test.lookupQueryConfig("*").get(), inputQuery); + + FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedSelectStarScoreFunctions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("materialized", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("deprecated", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)) + }; + FunctionScoreQueryBuilder expectedSelectStar = + new FunctionScoreQueryBuilder(expectedSelectStarScoreFunctions) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) + .boostMode(CombineFunction.MULTIPLY); + + assertEquals(selectStarTest, expectedSelectStar); + + /* + * Test default (non-select start) + */ + FunctionScoreQueryBuilder defaultTest = + SEARCH_QUERY_BUILDER.functionScoreQueryBuilder( + test.lookupQueryConfig("foobar").get(), inputQuery); + + FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedDefaultScoreFunctions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("materialized", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("deprecated", false), + ScoreFunctionBuilders.weightFactorFunction(1.5f)) + }; + FunctionScoreQueryBuilder expectedDefault = + new FunctionScoreQueryBuilder(expectedDefaultScoreFunctions) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) + .boostMode(CombineFunction.MULTIPLY); + + assertEquals(defaultTest, expectedDefault); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java index 9c0815efdc8b4..42f4f480bfbdd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java @@ -1,23 +1,33 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TEXT_SEARCH_ANALYZER; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.URN_SEARCH_ANALYZER; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableList; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.PathSpec; -import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; -import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.WordGramConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.TestEntitySpecBuilder; - import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; +import com.linkedin.util.Pair; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.io.IOException; import java.util.List; import java.util.Map; @@ -25,9 +35,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.util.Pair; +import org.mockito.Mockito; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -37,28 +45,18 @@ import org.opensearch.index.query.SimpleQueryStringBuilder; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TEXT_SEARCH_ANALYZER; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.URN_SEARCH_ANALYZER; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - @Import(SearchCommonTestConfiguration.class) public class SearchQueryBuilderTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; + @Autowired private EntityRegistry entityRegistry; public static SearchConfiguration testQueryConfig; + static { testQueryConfig = new SearchConfiguration(); testQueryConfig.setMaxTermBucketSize(20); @@ -84,25 +82,31 @@ public class SearchQueryBuilderTest extends AbstractTestNGSpringContextTests { testQueryConfig.setWordGram(wordGramConfiguration); testQueryConfig.setPartial(partialConfiguration); } - public static final SearchQueryBuilder TEST_BUILDER = new SearchQueryBuilder(testQueryConfig, null); + + public static final SearchQueryBuilder TEST_BUILDER = + new SearchQueryBuilder(testQueryConfig, null); @Test public void testQueryBuilderFulltext() { FunctionScoreQueryBuilder result = - (FunctionScoreQueryBuilder) TEST_BUILDER.buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", - true); + (FunctionScoreQueryBuilder) + TEST_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 2); BoolQueryBuilder analyzerGroupQuery = (BoolQueryBuilder) shouldQueries.get(0); - SimpleQueryStringBuilder keywordQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(0); + SimpleQueryStringBuilder keywordQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(0); assertEquals(keywordQuery.value(), "testQuery"); assertEquals(keywordQuery.analyzer(), "keyword"); Map<String, Float> keywordFields = keywordQuery.fields(); assertEquals(keywordFields.size(), 9); - assertEquals(keywordFields, Map.of( + assertEquals( + keywordFields, + Map.of( "urn", 10.f, "textArrayField", 1.0f, "customProperties", 1.0f, @@ -111,47 +115,55 @@ public void testQueryBuilderFulltext() { "textFieldOverride", 1.0f, "nestedArrayStringField", 1.0f, "keyPart1", 10.0f, - "esObjectField", 1.0f - )); + "esObjectField", 1.0f)); - SimpleQueryStringBuilder urnComponentQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(1); + SimpleQueryStringBuilder urnComponentQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(1); assertEquals(urnComponentQuery.value(), "testQuery"); assertEquals(urnComponentQuery.analyzer(), URN_SEARCH_ANALYZER); - assertEquals(urnComponentQuery.fields(), Map.of( + assertEquals( + urnComponentQuery.fields(), + Map.of( "nestedForeignKey", 1.0f, - "foreignKey", 1.0f - )); + "foreignKey", 1.0f)); - SimpleQueryStringBuilder fulltextQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(2); + SimpleQueryStringBuilder fulltextQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(2); assertEquals(fulltextQuery.value(), "testQuery"); assertEquals(fulltextQuery.analyzer(), TEXT_SEARCH_ANALYZER); - assertEquals(fulltextQuery.fields(), Map.of( + assertEquals( + fulltextQuery.fields(), + Map.of( "textFieldOverride.delimited", 0.4f, "keyPart1.delimited", 4.0f, "nestedArrayArrayField.delimited", 0.4f, "urn.delimited", 7.0f, "textArrayField.delimited", 0.4f, "nestedArrayStringField.delimited", 0.4f, - "wordGramField.delimited", 0.4f - )); + "wordGramField.delimited", 0.4f)); BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(1); assertTrue(boolPrefixQuery.should().size() > 0); - List<Pair<String, Float>> prefixFieldWeights = boolPrefixQuery.should().stream().map(prefixQuery -> { - if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { - MatchPhrasePrefixQueryBuilder builder = (MatchPhrasePrefixQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } else if (prefixQuery instanceof TermQueryBuilder) { - // exact - TermQueryBuilder builder = (TermQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { - // ngram - MatchPhraseQueryBuilder builder = (MatchPhraseQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } - }).collect(Collectors.toList()); + List<Pair<String, Float>> prefixFieldWeights = + boolPrefixQuery.should().stream() + .map( + prefixQuery -> { + if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { + MatchPhrasePrefixQueryBuilder builder = + (MatchPhrasePrefixQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } else if (prefixQuery instanceof TermQueryBuilder) { + // exact + TermQueryBuilder builder = (TermQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { + // ngram + MatchPhraseQueryBuilder builder = (MatchPhraseQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } + }) + .collect(Collectors.toList()); assertEquals(prefixFieldWeights.size(), 28); @@ -165,19 +177,21 @@ public void testQueryBuilderFulltext() { Pair.of("wordGramField.wordGrams3", 2.25f), Pair.of("wordGramField.wordGrams4", 3.2399998f), Pair.of("wordGramField.keyword", 10.0f), - Pair.of("wordGramField.keyword", 7.0f) - ).forEach(p -> assertTrue(prefixFieldWeights.contains(p), "Missing: " + p)); + Pair.of("wordGramField.keyword", 7.0f)) + .forEach(p -> assertTrue(prefixFieldWeights.contains(p), "Missing: " + p)); // Validate scorer - FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = result.filterFunctionBuilders(); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = + result.filterFunctionBuilders(); assertEquals(scoringFunctions.length, 3); } @Test public void testQueryBuilderStructured() { FunctionScoreQueryBuilder result = - (FunctionScoreQueryBuilder) TEST_BUILDER.buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), - "testQuery", false); + (FunctionScoreQueryBuilder) + TEST_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", false); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 2); @@ -194,17 +208,20 @@ public void testQueryBuilderStructured() { assertEquals(keywordFields.get("esObjectField").floatValue(), 1.0f); // Validate scorer - FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = result.filterFunctionBuilders(); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = + result.filterFunctionBuilders(); assertEquals(scoringFunctions.length, 3); } private static final SearchQueryBuilder TEST_CUSTOM_BUILDER; + static { try { CustomConfiguration customConfiguration = new CustomConfiguration(); customConfiguration.setEnabled(true); customConfiguration.setFile("search_config_builder_test.yml"); - CustomSearchConfiguration customSearchConfiguration = customConfiguration.resolve(new YAMLMapper()); + CustomSearchConfiguration customSearchConfiguration = + customConfiguration.resolve(new YAMLMapper()); TEST_CUSTOM_BUILDER = new SearchQueryBuilder(testQueryConfig, customSearchConfiguration); } catch (IOException e) { throw new RuntimeException(e); @@ -214,8 +231,10 @@ public void testQueryBuilderStructured() { @Test public void testCustomSelectAll() { for (String triggerQuery : List.of("*", "")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); @@ -226,8 +245,10 @@ public void testCustomSelectAll() { @Test public void testCustomExactMatch() { for (String triggerQuery : List.of("test_table", "'single quoted'", "\"double quoted\"")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); @@ -236,18 +257,22 @@ public void testCustomExactMatch() { BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(0); assertTrue(boolPrefixQuery.should().size() > 0); - List<QueryBuilder> queries = boolPrefixQuery.should().stream().map(prefixQuery -> { - if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { - // prefix - return (MatchPhrasePrefixQueryBuilder) prefixQuery; - } else if (prefixQuery instanceof TermQueryBuilder) { - // exact - return (TermQueryBuilder) prefixQuery; - } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { - // ngram - return (MatchPhraseQueryBuilder) prefixQuery; - } - }).collect(Collectors.toList()); + List<QueryBuilder> queries = + boolPrefixQuery.should().stream() + .map( + prefixQuery -> { + if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { + // prefix + return (MatchPhrasePrefixQueryBuilder) prefixQuery; + } else if (prefixQuery instanceof TermQueryBuilder) { + // exact + return (TermQueryBuilder) prefixQuery; + } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { + // ngram + return (MatchPhraseQueryBuilder) prefixQuery; + } + }) + .collect(Collectors.toList()); assertFalse(queries.isEmpty(), "Expected queries with specific types"); } @@ -256,24 +281,30 @@ public void testCustomExactMatch() { @Test public void testCustomDefault() { for (String triggerQuery : List.of("foo", "bar", "foo\"bar", "foo:bar")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List<QueryBuilder> shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 3); - List<QueryBuilder> queries = mainQuery.should().stream().map(query -> { - if (query instanceof SimpleQueryStringBuilder) { - return (SimpleQueryStringBuilder) query; - } else if (query instanceof MatchAllQueryBuilder) { - // custom - return (MatchAllQueryBuilder) query; - } else { - // exact - return (BoolQueryBuilder) query; - } - }).collect(Collectors.toList()); + List<QueryBuilder> queries = + mainQuery.should().stream() + .map( + query -> { + if (query instanceof SimpleQueryStringBuilder) { + return (SimpleQueryStringBuilder) query; + } else if (query instanceof MatchAllQueryBuilder) { + // custom + return (MatchAllQueryBuilder) query; + } else { + // exact + return (BoolQueryBuilder) query; + } + }) + .collect(Collectors.toList()); assertEquals(queries.size(), 3, "Expected queries with specific types"); @@ -287,41 +318,52 @@ public void testCustomDefault() { } } - /** - * Tests to make sure that the fields are correctly combined across search-able entities - */ + /** Tests to make sure that the fields are correctly combined across search-able entities */ @Test public void testGetStandardFieldsEntitySpec() { - List<EntitySpec> entitySpecs = Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) + List<EntitySpec> entitySpecs = + Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) .map(entityRegistry::getEntitySpec) .collect(Collectors.toList()); assertTrue(entitySpecs.size() > 30, "Expected at least 30 searchable entities in the registry"); // Count of the distinct field names - Set<String> expectedFieldNames = Stream.concat( - // Standard urn fields plus entitySpec sourced fields - Stream.of("urn", "urn.delimited"), - entitySpecs.stream() - .flatMap(spec -> TEST_CUSTOM_BUILDER.getFieldsFromEntitySpec(spec).stream()) - .map(SearchFieldConfig::fieldName)) + Set<String> expectedFieldNames = + Stream.concat( + // Standard urn fields plus entitySpec sourced fields + Stream.of("urn", "urn.delimited"), + entitySpecs.stream() + .flatMap(spec -> TEST_CUSTOM_BUILDER.getFieldsFromEntitySpec(spec).stream()) + .map(SearchFieldConfig::fieldName)) .collect(Collectors.toSet()); - Set<String> actualFieldNames = TEST_CUSTOM_BUILDER.getStandardFields(entitySpecs).stream() + Set<String> actualFieldNames = + TEST_CUSTOM_BUILDER.getStandardFields(entitySpecs).stream() .map(SearchFieldConfig::fieldName) .collect(Collectors.toSet()); - assertEquals(actualFieldNames, expectedFieldNames, - String.format("Missing: %s Extra: %s", - expectedFieldNames.stream().filter(f -> !actualFieldNames.contains(f)).collect(Collectors.toSet()), - actualFieldNames.stream().filter(f -> !expectedFieldNames.contains(f)).collect(Collectors.toSet()))); + assertEquals( + actualFieldNames, + expectedFieldNames, + String.format( + "Missing: %s Extra: %s", + expectedFieldNames.stream() + .filter(f -> !actualFieldNames.contains(f)) + .collect(Collectors.toSet()), + actualFieldNames.stream() + .filter(f -> !expectedFieldNames.contains(f)) + .collect(Collectors.toSet()))); } @Test public void testGetStandardFields() { - Set<SearchFieldConfig> fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); + Set<SearchFieldConfig> fieldConfigs = + TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); assertEquals(fieldConfigs.size(), 21); - assertEquals(fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( + assertEquals( + fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), + Set.of( "nestedArrayArrayField", "esObjectField", "foreignKey", @@ -344,45 +386,90 @@ public void testGetStandardFields() { "urn", "wordGramField.wordGrams2")); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("keyPart1")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 10.0F)); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("nestedForeignKey")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("textFieldOverride")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("keyPart1")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(10.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("nestedForeignKey")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("textFieldOverride")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); EntitySpec mockEntitySpec = Mockito.mock(EntitySpec.class); - Mockito.when(mockEntitySpec.getSearchableFieldSpecs()).thenReturn(List.of( - new SearchableFieldSpec( + Mockito.when(mockEntitySpec.getSearchableFieldSpecs()) + .thenReturn( + List.of( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("fieldDoesntExistInOriginal", - SearchableAnnotation.FieldType.TEXT, - true, true, false, false, - Optional.empty(), Optional.empty(), 13.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), + new SearchableAnnotation( + "fieldDoesntExistInOriginal", + SearchableAnnotation.FieldType.TEXT, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 13.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), Mockito.mock(DataSchema.class)), - new SearchableFieldSpec( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("keyPart1", - SearchableAnnotation.FieldType.KEYWORD, - true, true, false, false, - Optional.empty(), Optional.empty(), 20.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), + new SearchableAnnotation( + "keyPart1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 20.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), Mockito.mock(DataSchema.class)), - new SearchableFieldSpec( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("textFieldOverride", - SearchableAnnotation.FieldType.WORD_GRAM, - true, true, false, false, - Optional.empty(), Optional.empty(), 3.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), - Mockito.mock(DataSchema.class))) - ); - - fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); - // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the textFieldOverride + new SearchableAnnotation( + "textFieldOverride", + SearchableAnnotation.FieldType.WORD_GRAM, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 3.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), + Mockito.mock(DataSchema.class)))); + + fieldConfigs = + TEST_CUSTOM_BUILDER.getStandardFields( + ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); + // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the + // textFieldOverride assertEquals(fieldConfigs.size(), 26); - assertEquals(fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( + assertEquals( + fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), + Set.of( "nestedArrayArrayField", "esObjectField", "foreignKey", @@ -411,13 +498,25 @@ public void testGetStandardFields() { "textFieldOverride.wordGrams4")); // Field which only exists in first one: Should be the same - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("nestedForeignKey")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("nestedForeignKey")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); // Average boost value: 10 vs. 20 -> 15 - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("keyPart1")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 15.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("keyPart1")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(15.0F)); // Field which added word gram fields: Original boost should be boost value averaged - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("textFieldOverride")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 2.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("textFieldOverride")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(2.0F)); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java index 0ea035a10f91d..3afb04afb917b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java @@ -1,14 +1,26 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.metadata.utils.SearchUtil.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.google.common.collect.ImmutableList; -import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.config.search.WordGramConfiguration; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; -import com.linkedin.metadata.TestEntitySpecBuilder; -import com.linkedin.metadata.config.search.WordGramConfiguration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -19,16 +31,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.ExistsQueryBuilder; @@ -45,16 +47,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.linkedin.metadata.utils.SearchUtil.*; -import static org.testng.Assert.*; - - @Import(SearchCommonTestConfiguration.class) public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; + @Autowired private EntityRegistry entityRegistry; public static SearchConfiguration testQueryConfig; + static { testQueryConfig = new SearchConfiguration(); testQueryConfig.setMaxTermBucketSize(20); @@ -84,29 +82,42 @@ public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { @Test public void testDatasetFieldsAndHighlights() { EntitySpec entitySpec = entityRegistry.getEntitySpec("dataset"); - SearchRequestHandler datasetHandler = SearchRequestHandler.getBuilder(entitySpec, testQueryConfig, null); + SearchRequestHandler datasetHandler = + SearchRequestHandler.getBuilder(entitySpec, testQueryConfig, null); /* - Ensure efficient query performance, we do not expect upstream/downstream/fineGrained lineage - */ - List<String> highlightFields = datasetHandler.getHighlights().fields().stream() + Ensure efficient query performance, we do not expect upstream/downstream/fineGrained lineage + */ + List<String> highlightFields = + datasetHandler.getHighlights().fields().stream() .map(HighlightBuilder.Field::name) .collect(Collectors.toList()); - assertTrue(highlightFields.stream().noneMatch( - fieldName -> fieldName.contains("upstream") || fieldName.contains("downstream") - ), "unexpected lineage fields in highlights: " + highlightFields); + assertTrue( + highlightFields.stream() + .noneMatch( + fieldName -> fieldName.contains("upstream") || fieldName.contains("downstream")), + "unexpected lineage fields in highlights: " + highlightFields); } @Test public void testSearchRequestHandlerHighlightingTurnedOff() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - SearchRequest searchRequest = requestHandler.getSearchRequest("testQuery", null, null, 0, - 10, new SearchFlags().setFulltext(false).setSkipHighlighting(true), null); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "testQuery", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(false).setSkipHighlighting(true), + null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); // Filters - Collection<AggregationBuilder> aggBuilders = sourceBuilder.aggregations().getAggregatorFactories(); + Collection<AggregationBuilder> aggBuilders = + sourceBuilder.aggregations().getAggregatorFactories(); // Expect 2 aggregations: textFieldOverride and _index assertEquals(aggBuilders.size(), 2); for (AggregationBuilder aggBuilder : aggBuilders) { @@ -123,44 +134,73 @@ public void testSearchRequestHandlerHighlightingTurnedOff() { @Test public void testSearchRequestHandler() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - SearchRequest searchRequest = requestHandler.getSearchRequest("testQuery", null, null, 0, - 10, new SearchFlags().setFulltext(false), null); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "testQuery", null, null, 0, 10, new SearchFlags().setFulltext(false), null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); // Filters - Collection<AggregationBuilder> aggBuilders = sourceBuilder.aggregations().getAggregatorFactories(); + Collection<AggregationBuilder> aggBuilders = + sourceBuilder.aggregations().getAggregatorFactories(); // Expect 2 aggregations: textFieldOverride and _index - assertEquals(aggBuilders.size(), 2); - for (AggregationBuilder aggBuilder : aggBuilders) { - if (aggBuilder.getName().equals("textFieldOverride")) { - TermsAggregationBuilder filterPanelBuilder = (TermsAggregationBuilder) aggBuilder; - assertEquals(filterPanelBuilder.field(), "textFieldOverride.keyword"); - } else if (!aggBuilder.getName().equals("_entityType")) { - fail("Found unexepected aggregation: " + aggBuilder.getName()); - } - } + assertEquals(aggBuilders.size(), 2); + for (AggregationBuilder aggBuilder : aggBuilders) { + if (aggBuilder.getName().equals("textFieldOverride")) { + TermsAggregationBuilder filterPanelBuilder = (TermsAggregationBuilder) aggBuilder; + assertEquals(filterPanelBuilder.field(), "textFieldOverride.keyword"); + } else if (!aggBuilder.getName().equals("_entityType")) { + fail("Found unexepected aggregation: " + aggBuilder.getName()); + } + } // Highlights HighlightBuilder highlightBuilder = sourceBuilder.highlighter(); List<String> fields = - highlightBuilder.fields().stream().map(HighlightBuilder.Field::name).collect(Collectors.toList()); + highlightBuilder.fields().stream() + .map(HighlightBuilder.Field::name) + .collect(Collectors.toList()); assertEquals(fields.size(), 22); List<String> highlightableFields = - ImmutableList.of("keyPart1", "textArrayField", "textFieldOverride", "foreignKey", "nestedForeignKey", - "nestedArrayStringField", "nestedArrayArrayField", "customProperties", "esObjectField", "wordGramField"); - highlightableFields.forEach(field -> { - assertTrue(fields.contains(field), "Missing: " + field); - assertTrue(fields.contains(field + ".*"), "Missing: " + field + ".*"); - }); + ImmutableList.of( + "keyPart1", + "textArrayField", + "textFieldOverride", + "foreignKey", + "nestedForeignKey", + "nestedArrayStringField", + "nestedArrayArrayField", + "customProperties", + "esObjectField", + "wordGramField"); + highlightableFields.forEach( + field -> { + assertTrue(fields.contains(field), "Missing: " + field); + assertTrue(fields.contains(field + ".*"), "Missing: " + field + ".*"); + }); } @Test public void testAggregationsInSearch() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - final String nestedAggString = String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); - SearchRequest searchRequest = requestHandler.getSearchRequest("*", null, null, 0, - 10, new SearchFlags().setFulltext(true), List.of("textFieldOverride", "_entityType", nestedAggString, AGGREGATION_SEPARATOR_CHAR, "not_a_facet")); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + final String nestedAggString = + String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "*", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(true), + List.of( + "textFieldOverride", + "_entityType", + nestedAggString, + AGGREGATION_SEPARATOR_CHAR, + "not_a_facet")); SearchSourceBuilder sourceBuilder = searchRequest.source(); // Filters Collection<AggregationBuilder> aggregationBuilders = @@ -168,17 +208,28 @@ public void testAggregationsInSearch() { assertEquals(aggregationBuilders.size(), 3); // Expected aggregations - AggregationBuilder expectedTextFieldAggregationBuilder = AggregationBuilders.terms("textFieldOverride") - .field("textFieldOverride.keyword").size(testQueryConfig.getMaxTermBucketSize()); - AggregationBuilder expectedEntityTypeAggregationBuilder = AggregationBuilders.terms("_entityType") - .field("_index").size(testQueryConfig.getMaxTermBucketSize()).minDocCount(0); - AggregationBuilder expectedNestedAggregationBuilder = AggregationBuilders.terms(nestedAggString).field("_index") - .size(testQueryConfig.getMaxTermBucketSize()).minDocCount(0) - .subAggregation(AggregationBuilders.terms(nestedAggString) - .field("textFieldOverride.keyword").size(testQueryConfig.getMaxTermBucketSize())); + AggregationBuilder expectedTextFieldAggregationBuilder = + AggregationBuilders.terms("textFieldOverride") + .field("textFieldOverride.keyword") + .size(testQueryConfig.getMaxTermBucketSize()); + AggregationBuilder expectedEntityTypeAggregationBuilder = + AggregationBuilders.terms("_entityType") + .field("_index") + .size(testQueryConfig.getMaxTermBucketSize()) + .minDocCount(0); + AggregationBuilder expectedNestedAggregationBuilder = + AggregationBuilders.terms(nestedAggString) + .field("_index") + .size(testQueryConfig.getMaxTermBucketSize()) + .minDocCount(0) + .subAggregation( + AggregationBuilders.terms(nestedAggString) + .field("textFieldOverride.keyword") + .size(testQueryConfig.getMaxTermBucketSize())); for (AggregationBuilder builder : aggregationBuilders) { - if (builder.getName().equals("textFieldOverride") || builder.getName().equals("_entityType")) { + if (builder.getName().equals("textFieldOverride") + || builder.getName().equals("_entityType")) { assertTrue(builder.getSubAggregations().isEmpty()); if (builder.getName().equalsIgnoreCase("textFieldOverride")) { assertEquals(builder, expectedTextFieldAggregationBuilder); @@ -200,7 +251,8 @@ public void testAggregationsInSearch() { @Test public void testFilteredSearch() { - final SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + final SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); final BoolQueryBuilder testQuery = constructFilterQuery(requestHandler, false); @@ -210,7 +262,6 @@ public void testFilteredSearch() { testRemovedQuery(queryWithRemoved); - final BoolQueryBuilder testQueryScroll = constructFilterQuery(requestHandler, true); testFilterQuery(testQueryScroll); @@ -220,138 +271,190 @@ public void testFilteredSearch() { testRemovedQuery(queryWithRemovedScroll); } - private BoolQueryBuilder constructFilterQuery(SearchRequestHandler requestHandler, boolean scroll) { - final Criterion filterCriterion = new Criterion() - .setField("keyword") - .setCondition(Condition.EQUAL) - .setValue("some value"); + private BoolQueryBuilder constructFilterQuery( + SearchRequestHandler requestHandler, boolean scroll) { + final Criterion filterCriterion = + new Criterion().setField("keyword").setCondition(Condition.EQUAL).setValue("some value"); - final Filter filterWithoutRemovedCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); + final Filter filterWithoutRemovedCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); final BoolQueryBuilder testQuery; if (scroll) { - testQuery = (BoolQueryBuilder) requestHandler - .getSearchRequest("testQuery", filterWithoutRemovedCondition, null, null, null, - "5m", 10, new SearchFlags().setFulltext(false)) - .source() - .query(); + testQuery = + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithoutRemovedCondition, + null, + null, + null, + "5m", + 10, + new SearchFlags().setFulltext(false)) + .source() + .query(); } else { testQuery = - (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithoutRemovedCondition, null, - 0, 10, new SearchFlags().setFulltext(false), null).source().query(); + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithoutRemovedCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(false), + null) + .source() + .query(); } return testQuery; } private void testFilterQuery(BoolQueryBuilder testQuery) { - Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .map(or -> (BoolQueryBuilder) or) - .flatMap(or -> { - System.out.println("processing: " + or.mustNot()); - return or.mustNot().stream(); - }) - .filter(and -> and instanceof MatchQueryBuilder) - .map(and -> (MatchQueryBuilder) and) - .filter(match -> match.fieldName().equals("removed")) - .findAny(); - - assertTrue(mustNotHaveRemovedCondition.isPresent(), "Expected must not have removed condition to exist" - + " if filter does not have it"); + Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .map(or -> (BoolQueryBuilder) or) + .flatMap( + or -> { + System.out.println("processing: " + or.mustNot()); + return or.mustNot().stream(); + }) + .filter(and -> and instanceof MatchQueryBuilder) + .map(and -> (MatchQueryBuilder) and) + .filter(match -> match.fieldName().equals("removed")) + .findAny(); + + assertTrue( + mustNotHaveRemovedCondition.isPresent(), + "Expected must not have removed condition to exist" + " if filter does not have it"); } - private BoolQueryBuilder constructRemovedQuery(SearchRequestHandler requestHandler, boolean scroll) { - final Criterion filterCriterion = new Criterion() - .setField("keyword") - .setCondition(Condition.EQUAL) - .setValue("some value"); - - final Criterion removedCriterion = new Criterion() - .setField("removed") - .setCondition(Condition.EQUAL) - .setValue(String.valueOf(false)); - - final Filter filterWithRemovedCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion, removedCriterion))) - )); + private BoolQueryBuilder constructRemovedQuery( + SearchRequestHandler requestHandler, boolean scroll) { + final Criterion filterCriterion = + new Criterion().setField("keyword").setCondition(Condition.EQUAL).setValue("some value"); + + final Criterion removedCriterion = + new Criterion() + .setField("removed") + .setCondition(Condition.EQUAL) + .setValue(String.valueOf(false)); + + final Filter filterWithRemovedCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(filterCriterion, removedCriterion))))); final BoolQueryBuilder queryWithRemoved; if (scroll) { - queryWithRemoved = (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithRemovedCondition, - null, null, null, "5m", 10, new SearchFlags().setFulltext(false)).source().query(); + queryWithRemoved = + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithRemovedCondition, + null, + null, + null, + "5m", + 10, + new SearchFlags().setFulltext(false)) + .source() + .query(); } else { queryWithRemoved = - (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithRemovedCondition, - null, 0, 10, new SearchFlags().setFulltext(false), null).source().query(); + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithRemovedCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(false), + null) + .source() + .query(); } return queryWithRemoved; } private void testRemovedQuery(BoolQueryBuilder queryWithRemoved) { - Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = queryWithRemoved.must() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .map(or -> (BoolQueryBuilder) or) - .flatMap(or -> { - System.out.println("processing: " + or.mustNot()); - return or.mustNot().stream(); - }) - .filter(and -> and instanceof MatchQueryBuilder) - .map(and -> (MatchQueryBuilder) and) - .filter(match -> match.fieldName().equals("removed")) - .findAny(); - - assertFalse(mustNotHaveRemovedCondition.isPresent(), "Expect `must not have removed` condition to not" - + " exist because filter already has it a condition for the removed property"); + Optional<MatchQueryBuilder> mustNotHaveRemovedCondition = + queryWithRemoved.must().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .map(or -> (BoolQueryBuilder) or) + .flatMap( + or -> { + System.out.println("processing: " + or.mustNot()); + return or.mustNot().stream(); + }) + .filter(and -> and instanceof MatchQueryBuilder) + .map(and -> (MatchQueryBuilder) and) + .filter(match -> match.fieldName().equals("removed")) + .findAny(); + + assertFalse( + mustNotHaveRemovedCondition.isPresent(), + "Expect `must not have removed` condition to not" + + " exist because filter already has it a condition for the removed property"); } // For fields that are one of EDITABLE_FIELD_TO_QUERY_PAIRS, we want to make sure - // a filter that has a list of values like below will filter on all values by generating a terms query + // a filter that has a list of values like below will filter on all values by generating a terms + // query // field EQUAL [value1, value2, ...] @Test public void testFilterFieldTagsByValues() { - final Criterion filterCriterion = new Criterion() - .setField("fieldTags") - .setCondition(Condition.EQUAL) - .setValue("v1") - .setValues(new StringArray("v1", "v2")); + final Criterion filterCriterion = + new Criterion() + .setField("fieldTags") + .setCondition(Condition.EQUAL) + .setValue("v1") + .setValues(new StringArray("v1", "v2")); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [bool] -> should -> [terms] - List<TermsQueryBuilder> termsQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof TermsQueryBuilder) - .map(should -> (TermsQueryBuilder) should) - .collect(Collectors.toList()); + List<TermsQueryBuilder> termsQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof TermsQueryBuilder) + .map(should -> (TermsQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(termsQueryBuilders.size() == 2, "Expected to find two terms queries"); Map<String, List<String>> termsMap = new HashMap<>(); - termsQueryBuilders.forEach(termsQueryBuilder -> { - String field = termsQueryBuilder.fieldName(); - List<Object> values = termsQueryBuilder.values(); - List<String> strValues = new ArrayList<>(); - for (Object value : values) { - assertTrue(value instanceof String, - "Expected value to be String, got: " + value.getClass()); - strValues.add((String) value); - } - Collections.sort(strValues); - termsMap.put(field, strValues); - }); + termsQueryBuilders.forEach( + termsQueryBuilder -> { + String field = termsQueryBuilder.fieldName(); + List<Object> values = termsQueryBuilder.values(); + List<String> strValues = new ArrayList<>(); + for (Object value : values) { + assertTrue( + value instanceof String, "Expected value to be String, got: " + value.getClass()); + strValues.add((String) value); + } + Collections.sort(strValues); + termsMap.put(field, strValues); + }); assertTrue(termsMap.containsKey("fieldTags.keyword")); assertTrue(termsMap.containsKey("editedFieldTags.keyword")); @@ -367,35 +470,35 @@ public void testFilterFieldTagsByValues() { // pair of fields @Test public void testFilterFieldTagsByValue() { - final Criterion filterCriterion = new Criterion() - .setField("fieldTags") - .setCondition(Condition.EQUAL) - .setValue("v1"); + final Criterion filterCriterion = + new Criterion().setField("fieldTags").setCondition(Condition.EQUAL).setValue("v1"); final BoolQueryBuilder testQuery = getQuery(filterCriterion); - // bool -> must -> [bool] -> should -> [bool] -> must -> [bool] -> should -> [bool] -> should -> [match] - List<MultiMatchQueryBuilder> matchQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).should().stream()) - .filter(should -> should instanceof MultiMatchQueryBuilder) - .map(should -> (MultiMatchQueryBuilder) should) - .collect(Collectors.toList()); + // bool -> must -> [bool] -> should -> [bool] -> must -> [bool] -> should -> [bool] -> should -> + // [match] + List<MultiMatchQueryBuilder> matchQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).should().stream()) + .filter(should -> should instanceof MultiMatchQueryBuilder) + .map(should -> (MultiMatchQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(matchQueryBuilders.size() == 2, "Expected to find two match queries"); Map<String, String> matchMap = new HashMap<>(); - matchQueryBuilders.forEach(matchQueryBuilder -> { - Set<String> fields = matchQueryBuilder.fields().keySet(); - assertTrue(matchQueryBuilder.value() instanceof String); - fields.forEach(field -> matchMap.put(field, (String) matchQueryBuilder.value())); - }); + matchQueryBuilders.forEach( + matchQueryBuilder -> { + Set<String> fields = matchQueryBuilder.fields().keySet(); + assertTrue(matchQueryBuilder.value() instanceof String); + fields.forEach(field -> matchMap.put(field, (String) matchQueryBuilder.value())); + }); assertTrue(matchMap.containsKey("fieldTags.keyword")); assertTrue(matchMap.containsKey("editedFieldTags.keyword")); @@ -407,65 +510,68 @@ public void testFilterFieldTagsByValue() { // Test fields not in EDITABLE_FIELD_TO_QUERY_PAIRS with a single value @Test public void testFilterPlatformByValue() { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("mysql"); + final Criterion filterCriterion = + new Criterion().setField("platform").setCondition(Condition.EQUAL).setValue("mysql"); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [bool] -> should -> [match] - List<MultiMatchQueryBuilder> matchQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof MultiMatchQueryBuilder) - .map(should -> (MultiMatchQueryBuilder) should) - .collect(Collectors.toList()); + List<MultiMatchQueryBuilder> matchQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof MultiMatchQueryBuilder) + .map(should -> (MultiMatchQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(matchQueryBuilders.size() == 1, "Expected to find one match query"); MultiMatchQueryBuilder matchQueryBuilder = matchQueryBuilders.get(0); - assertEquals(matchQueryBuilder.fields(), Map.of( + assertEquals( + matchQueryBuilder.fields(), + Map.of( "platform", 1.0f, - "platform.*", 1.0f) - ); + "platform.*", 1.0f)); assertEquals(matchQueryBuilder.value(), "mysql"); } // Test fields not in EDITABLE_FIELD_TO_QUERY_PAIRS with a list of values @Test public void testFilterPlatformByValues() { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("mysql") - .setValues(new StringArray("mysql", "bigquery")); + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("mysql") + .setValues(new StringArray("mysql", "bigquery")); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [terms] - List<TermsQueryBuilder> termsQueryBuilders = testQuery.filter() - .stream() - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof TermsQueryBuilder) - .map(must -> (TermsQueryBuilder) must) - .collect(Collectors.toList()); + List<TermsQueryBuilder> termsQueryBuilders = + testQuery.filter().stream() + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof TermsQueryBuilder) + .map(must -> (TermsQueryBuilder) must) + .collect(Collectors.toList()); assertTrue(termsQueryBuilders.size() == 1, "Expected to find one terms query"); final TermsQueryBuilder termsQueryBuilder = termsQueryBuilders.get(0); assertEquals(termsQueryBuilder.fieldName(), "platform.keyword"); Set<String> values = new HashSet<>(); - termsQueryBuilder.values().forEach(value -> { - assertTrue(value instanceof String); - values.add((String) value); - }); + termsQueryBuilder + .values() + .forEach( + value -> { + assertTrue(value instanceof String); + values.add((String) value); + }); assertEquals(values.size(), 2, "Expected two platform filter values"); assertTrue(values.contains("mysql")); @@ -511,18 +617,20 @@ public void testBrowsePathQueryFilter() { } private BoolQueryBuilder getQuery(final Criterion filterCriterion) { - final Filter filter = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); - - final SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder( - TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - - return (BoolQueryBuilder) requestHandler - .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) - .source() - .query(); + final Filter filter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); + + final SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + + return (BoolQueryBuilder) + requestHandler + .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) + .source() + .query(); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java index 8888ef59ad7d2..6e2d90287d5d9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.transformer; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + import com.datahub.test.TestEntitySnapshot; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -13,28 +18,29 @@ import com.linkedin.metadata.models.EntitySpec; import java.io.IOException; import java.util.Optional; - import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; - - public class SearchDocumentTransformerTest { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } @Test public void testTransform() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 1000); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional<String> result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); + Optional<String> result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); assertEquals(parsedJson.get("urn").asText(), snapshot.getUrn().toString()); @@ -65,10 +71,12 @@ public void testTransform() throws IOException { @Test public void testTransformForDelete() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 1000); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional<String> result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, true); + Optional<String> result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, true); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); assertEquals(parsedJson.get("urn").asText(), snapshot.getUrn().toString()); @@ -86,14 +94,18 @@ public void testTransformForDelete() throws IOException { @Test public void testTransformMaxFieldValue() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 5); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 5); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional<String> result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); + Optional<String> result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); - assertEquals(parsedJson.get("customProperties"), JsonNodeFactory.instance.arrayNode().add("shortValue=123")); + assertEquals( + parsedJson.get("customProperties"), + JsonNodeFactory.instance.arrayNode().add("shortValue=123")); assertEquals(parsedJson.get("esObjectField"), JsonNodeFactory.instance.arrayNode().add("123")); searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 20); @@ -103,10 +115,21 @@ public void testTransformMaxFieldValue() throws IOException { assertTrue(result.isPresent()); parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); - - assertEquals(parsedJson.get("customProperties"), JsonNodeFactory.instance.arrayNode() - .add("key1=value1").add("key2=value2").add("shortValue=123").add("longValue=0123456789")); - assertEquals(parsedJson.get("esObjectField"), JsonNodeFactory.instance.arrayNode() - .add("value1").add("value2").add("123").add("0123456789")); + assertEquals( + parsedJson.get("customProperties"), + JsonNodeFactory.instance + .arrayNode() + .add("key1=value1") + .add("key2=value2") + .add("shortValue=123") + .add("longValue=0123456789")); + assertEquals( + parsedJson.get("esObjectField"), + JsonNodeFactory.instance + .arrayNode() + .add("value1") + .add("value2") + .add("123") + .add("0123456789")); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java index 6127326db8ab9..e4e0d00391fa5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java @@ -14,8 +14,6 @@ import org.testng.Assert; import org.testng.annotations.Test; - - public class BrowsePathUtilsTest { private final EntityRegistry registry = new TestEntityRegistry(); @@ -24,43 +22,40 @@ public class BrowsePathUtilsTest { public void testGetDefaultBrowsePath() throws URISyntaxException { // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); String datasetPath = BrowsePathUtils.getDefaultBrowsePath(datasetUrn, this.registry, '.'); Assert.assertEquals(datasetPath, "/prod/kafka/test/a"); // Charts - ChartKey chartKey = new ChartKey() - .setChartId("Test/A/B") - .setDashboardTool("looker"); + ChartKey chartKey = new ChartKey().setChartId("Test/A/B").setDashboardTool("looker"); Urn chartUrn = EntityKeyUtils.convertEntityKeyToUrn(chartKey, "chart"); String chartPath = BrowsePathUtils.getDefaultBrowsePath(chartUrn, this.registry, '/'); Assert.assertEquals(chartPath, "/looker"); // Dashboards - DashboardKey dashboardKey = new DashboardKey() - .setDashboardId("Test/A/B") - .setDashboardTool("looker"); + DashboardKey dashboardKey = + new DashboardKey().setDashboardId("Test/A/B").setDashboardTool("looker"); Urn dashboardUrn = EntityKeyUtils.convertEntityKeyToUrn(dashboardKey, "dashboard"); String dashboardPath = BrowsePathUtils.getDefaultBrowsePath(dashboardUrn, this.registry, '/'); Assert.assertEquals(dashboardPath, "/looker"); // Data Flows - DataFlowKey dataFlowKey = new DataFlowKey() - .setCluster("test") - .setFlowId("Test/A/B") - .setOrchestrator("airflow"); + DataFlowKey dataFlowKey = + new DataFlowKey().setCluster("test").setFlowId("Test/A/B").setOrchestrator("airflow"); Urn dataFlowUrn = EntityKeyUtils.convertEntityKeyToUrn(dataFlowKey, "dataFlow"); String dataFlowPath = BrowsePathUtils.getDefaultBrowsePath(dataFlowUrn, this.registry, '/'); Assert.assertEquals(dataFlowPath, "/airflow/test"); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = + new DataJobKey() + .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) + .setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); String dataJobPath = BrowsePathUtils.getDefaultBrowsePath(dataJobUrn, this.registry, '/'); Assert.assertEquals(dataJobPath, "/airflow/test"); @@ -69,46 +64,42 @@ public void testGetDefaultBrowsePath() throws URISyntaxException { @Test public void testBuildDataPlatformUrn() throws URISyntaxException { // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); Urn dataPlatformUrn1 = BrowsePathUtils.buildDataPlatformUrn(datasetUrn, this.registry); Assert.assertEquals(dataPlatformUrn1, Urn.createFromString("urn:li:dataPlatform:kafka")); // Charts - ChartKey chartKey = new ChartKey() - .setChartId("Test/A/B") - .setDashboardTool("looker"); + ChartKey chartKey = new ChartKey().setChartId("Test/A/B").setDashboardTool("looker"); Urn chartUrn = EntityKeyUtils.convertEntityKeyToUrn(chartKey, "chart"); Urn dataPlatformUrn2 = BrowsePathUtils.buildDataPlatformUrn(chartUrn, this.registry); Assert.assertEquals(dataPlatformUrn2, Urn.createFromString("urn:li:dataPlatform:looker")); // Dashboards - DashboardKey dashboardKey = new DashboardKey() - .setDashboardId("Test/A/B") - .setDashboardTool("looker"); + DashboardKey dashboardKey = + new DashboardKey().setDashboardId("Test/A/B").setDashboardTool("looker"); Urn dashboardUrn = EntityKeyUtils.convertEntityKeyToUrn(dashboardKey, "dashboard"); Urn dataPlatformUrn3 = BrowsePathUtils.buildDataPlatformUrn(dashboardUrn, this.registry); Assert.assertEquals(dataPlatformUrn3, Urn.createFromString("urn:li:dataPlatform:looker")); // Data Flows - DataFlowKey dataFlowKey = new DataFlowKey() - .setCluster("test") - .setFlowId("Test/A/B") - .setOrchestrator("airflow"); + DataFlowKey dataFlowKey = + new DataFlowKey().setCluster("test").setFlowId("Test/A/B").setOrchestrator("airflow"); Urn dataFlowUrn = EntityKeyUtils.convertEntityKeyToUrn(dataFlowKey, "dataFlow"); Urn dataPlatformUrn4 = BrowsePathUtils.buildDataPlatformUrn(dataFlowUrn, this.registry); Assert.assertEquals(dataPlatformUrn4, Urn.createFromString("urn:li:dataPlatform:airflow")); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = + new DataJobKey() + .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) + .setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); Urn dataPlatformUrn5 = BrowsePathUtils.buildDataPlatformUrn(dataJobUrn, this.registry); Assert.assertEquals(dataPlatformUrn5, Urn.createFromString("urn:li:dataPlatform:airflow")); - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java index 8a85ae0396ee1..3041b13839768 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.linkedin.common.BrowsePathEntry; import com.linkedin.common.BrowsePathEntryArray; import com.linkedin.common.BrowsePathsV2; @@ -17,22 +22,17 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.eq; +import org.testng.Assert; +import org.testng.annotations.Test; public class BrowsePathV2UtilsTest { - private static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test.a.b,DEV)"; + private static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test.a.b,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; private static final String DATA_FLOW_URN = "urn:li:dataFlow:(orchestrator,flowId,cluster)"; @@ -46,12 +46,16 @@ public void testGetDefaultDatasetBrowsePathV2WithContainers() throws URISyntaxEx Urn datasetUrn = UrnUtils.getUrn(DATASET_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -62,9 +66,12 @@ public void testGetDefaultDatasetBrowsePathV2WithContainersFlagOff() throws URIS Urn datasetUrn = UrnUtils.getUrn(DATASET_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, false); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + datasetUrn, this.registry, '.', mockService, false); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("test"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("a"); @@ -78,12 +85,16 @@ public void testGetDefaultChartBrowsePathV2WithContainers() throws URISyntaxExce Urn chartUrn = UrnUtils.getUrn(CHART_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(chartUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(chartUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -94,12 +105,17 @@ public void testGetDefaultDashboardBrowsePathV2WithContainers() throws URISyntax Urn dashboardUrn = UrnUtils.getUrn(DASHBOARD_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(dashboardUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(dashboardUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dashboardUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dashboardUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -110,15 +126,19 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept EntityService mockService = mock(EntityService.class); // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); - when( - mockService.getEntityV2(eq(datasetUrn.getEntityType()), eq(datasetUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); + when(mockService.getEntityV2( + eq(datasetUrn.getEntityType()), + eq(datasetUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("test"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("a"); @@ -128,10 +148,13 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Charts Urn chartUrn = UrnUtils.getUrn(CHART_URN); - when( - mockService.getEntityV2(eq(chartUrn.getEntityType()), eq(chartUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(chartUrn.getEntityType()), + eq(chartUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); @@ -139,10 +162,14 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Dashboards Urn dashboardUrn = UrnUtils.getUrn(DASHBOARD_URN); - when( - mockService.getEntityV2(eq(dashboardUrn.getEntityType()), eq(dashboardUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dashboardUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(dashboardUrn.getEntityType()), + eq(dashboardUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dashboardUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); @@ -150,52 +177,64 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Data Flows Urn dataFlowUrn = UrnUtils.getUrn(DATA_FLOW_URN); - when( - mockService.getEntityV2(eq(dataFlowUrn.getEntityType()), eq(dataFlowUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dataFlowUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(dataFlowUrn.getEntityType()), + eq(dataFlowUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dataFlowUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(dataFlowUrn) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = new DataJobKey().setFlow(dataFlowUrn).setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dataJobUrn, this.registry, '/', mockService, true); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(dataJobUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId(dataFlowUrn.toString()).setUrn(dataFlowUrn); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); } - private EntityService initMockServiceWithContainerParents(Urn entityUrn, Urn containerUrn1, Urn containerUrn2) throws URISyntaxException { + private EntityService initMockServiceWithContainerParents( + Urn entityUrn, Urn containerUrn1, Urn containerUrn2) throws URISyntaxException { EntityService mockService = mock(EntityService.class); final Container container1 = new Container().setContainer(containerUrn1); final Map<String, EnvelopedAspect> aspectMap1 = new HashMap<>(); - aspectMap1.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container1.data()))); - final EntityResponse entityResponse1 = new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap1)); - when( - mockService.getEntityV2(eq(entityUrn.getEntityType()), eq(entityUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(entityResponse1); + aspectMap1.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container1.data()))); + final EntityResponse entityResponse1 = + new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap1)); + when(mockService.getEntityV2( + eq(entityUrn.getEntityType()), + eq(entityUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(entityResponse1); final Container container2 = new Container().setContainer(containerUrn2); final Map<String, EnvelopedAspect> aspectMap2 = new HashMap<>(); - aspectMap2.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container2.data()))); - final EntityResponse entityResponse2 = new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap2)); - when( - mockService.getEntityV2(eq(containerUrn1.getEntityType()), eq(containerUrn1), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(entityResponse2); - - when( - mockService.getEntityV2(eq(containerUrn2.getEntityType()), eq(containerUrn2), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + aspectMap2.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container2.data()))); + final EntityResponse entityResponse2 = + new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap2)); + when(mockService.getEntityV2( + eq(containerUrn1.getEntityType()), + eq(containerUrn1), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(entityResponse2); + + when(mockService.getEntityV2( + eq(containerUrn2.getEntityType()), + eq(containerUrn2), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); return mockService; - } } - diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java index ddd75a152c333..03abd9ffe29d7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java @@ -8,7 +8,6 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class ESUtilsTest { private static final String FIELD_TO_EXPAND = "fieldTags"; @@ -16,247 +15,241 @@ public class ESUtilsTest { @Test public void testGetQueryBuilderFromCriterionEqualsValues() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1" - ))); + final Criterion singleValueCriterion = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1"))); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"terms\" : {\n" + + " \"myTestField.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion multiValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion multiValueCriterion = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); result = ESUtils.getQueryBuilderFromCriterion(multiValueCriterion, false); expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField.keyword\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"terms\" : {\n" + + " \"myTestField.keyword\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion timeseriesField = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"terms\" : {\n" + + " \"myTestField\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - } @Test public void testGetQueryBuilderFromCriterionExists() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EXISTS); + final Criterion singleValueCriterion = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"bool\" : {\n" - + " \"must\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); // No diff in the timeseries field case for this condition. - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.EXISTS); + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"bool\" : {\n" - + " \"must\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } @Test public void testGetQueryBuilderFromCriterionIsNull() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.IS_NULL); + final Criterion singleValueCriterion = + new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"bool\" : {\n" - + " \"must_not\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); // No diff in the timeseries case for this condition - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.IS_NULL); + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"bool\" : {\n" - + " \"must_not\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } @Test public void testGetQueryBuilderFromCriterionFieldToExpand() { - final Criterion singleValueCriterion = new Criterion() - .setField(FIELD_TO_EXPAND) - .setCondition(Condition.EQUAL) - .setValue("") // Ignored - .setValues(new StringArray(ImmutableList.of( - "value1" - ))); + final Criterion singleValueCriterion = + new Criterion() + .setField(FIELD_TO_EXPAND) + .setCondition(Condition.EQUAL) + .setValue("") // Ignored + .setValues(new StringArray(ImmutableList.of("value1"))); // Ensure that the query is expanded! QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); - String expected = "{\n" - + " \"bool\" : {\n" - + " \"should\" : [\n" - + " {\n" - + " \"terms\" : {\n" - + " \"fieldTags.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"fieldTags\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"terms\" : {\n" - + " \"editedFieldTags.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"editedFieldTags\"\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + String expected = + "{\n" + + " \"bool\" : {\n" + + " \"should\" : [\n" + + " {\n" + + " \"terms\" : {\n" + + " \"fieldTags.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"fieldTags\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"terms\" : {\n" + + " \"editedFieldTags.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"editedFieldTags\"\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion timeseriesField = new Criterion() - .setField(FIELD_TO_EXPAND) - .setCondition(Condition.EQUAL) - .setValue("") // Ignored - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion timeseriesField = + new Criterion() + .setField(FIELD_TO_EXPAND) + .setCondition(Condition.EQUAL) + .setValue("") // Ignored + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); // Ensure that the query is expanded without keyword. result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); expected = "{\n" - + " \"bool\" : {\n" - + " \"should\" : [\n" - + " {\n" - + " \"terms\" : {\n" - + " \"fieldTags\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"fieldTags\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"terms\" : {\n" - + " \"editedFieldTags\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"editedFieldTags\"\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"should\" : [\n" + + " {\n" + + " \"terms\" : {\n" + + " \"fieldTags\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"fieldTags\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"terms\" : {\n" + + " \"editedFieldTags\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"editedFieldTags\"\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java index 7aa3bb19f0df6..5ea58e3416205 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java @@ -1,75 +1,164 @@ package com.linkedin.metadata.search.utils; -import com.linkedin.metadata.query.SearchFlags; -import org.testng.annotations.Test; +import static org.testng.Assert.assertEquals; +import com.linkedin.metadata.query.SearchFlags; import java.util.Set; - -import static org.testng.Assert.assertEquals; +import org.testng.annotations.Test; public class SearchUtilsTest { - @Test - public void testApplyDefaultSearchFlags() { - SearchFlags defaultFlags = new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); + @Test + public void testApplyDefaultSearchFlags() { + SearchFlags defaultFlags = + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipAggregates(true) + .setMaxAggValues(1) + .setSkipHighlighting(true); - assertEquals(SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), defaultFlags, - "Expected all default values"); + assertEquals( + SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), + defaultFlags, + "Expected all default values"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(false), - "Expected no default values"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "not empty", + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "Expected no default values"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), null, defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(true), - "Expected skip highlight due to query null query"); - for (String query : Set.of("*", "")) { - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), query, defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(true), - String.format("Expected skip highlight due to query string `%s`", query)); - } - - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except fulltext"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipCache(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(false).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except skipCache"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(false).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except skipAggregates"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(2).setSkipHighlighting(true), - "Expected all default values except maxAggValues"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(false), - "Expected all default values except skipHighlighting"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + null, + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + "Expected skip highlight due to query null query"); + for (String query : Set.of("*", "")) { + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + query, + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + String.format("Expected skip highlight due to query string `%s`", query)); } - @Test - public void testImmutableDefaults() throws CloneNotSupportedException { - SearchFlags defaultFlags = new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); - SearchFlags copyFlags = defaultFlags.copy(); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setFulltext(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except fulltext"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipCache(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(false) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except skipCache"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except skipAggregates"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(2) + .setSkipHighlighting(true), + "Expected all default values except maxAggValues"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(false), + "Expected all default values except skipHighlighting"); + } - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(false), - "Expected no default values"); + @Test + public void testImmutableDefaults() throws CloneNotSupportedException { + SearchFlags defaultFlags = + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipAggregates(true) + .setMaxAggValues(1) + .setSkipHighlighting(true); + SearchFlags copyFlags = defaultFlags.copy(); - assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); - } + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "not empty", + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "Expected no default values"); + assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java index e6a9bd7d198f7..8643855162fa7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.systemmetadata; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.run.IngestionRunSummary; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; @@ -8,30 +11,27 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.mxe.SystemMetadata; +import java.util.List; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.List; - -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - -abstract public class SystemMetadataServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class SystemMetadataServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); - private final IndexConvention _indexConvention = new IndexConventionImpl("es_system_metadata_service_test"); + private final IndexConvention _indexConvention = + new IndexConventionImpl("es_system_metadata_service_test"); private ElasticSearchSystemMetadataService _client; @@ -48,8 +48,10 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchSystemMetadataService buildService() { - ESSystemMetadataDAO dao = new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); - return new ElasticSearchSystemMetadataService(getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); + ESSystemMetadataDAO dao = + new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); + return new ElasticSearchSystemMetadataService( + getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java index 407d2ae684ede..921fbac12df85 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.timeline; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.mock; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.event.EventProducer; @@ -14,21 +16,19 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; - /** - * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against a Cassandra database. + * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against + * a Cassandra database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ public class CassandraTimelineServiceTest extends TimelineServiceTest<CassandraAspectDao> { private CassandraContainer _cassandraContainer; - public CassandraTimelineServiceTest() throws EntityRegistryException { - } + public CassandraTimelineServiceTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -54,14 +54,20 @@ private void configureComponents() { _mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java index 9e89328715510..4e47e596dddc2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java @@ -1,7 +1,9 @@ package com.linkedin.metadata.timeline; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.mock; + import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; @@ -11,37 +13,42 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; - /** - * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against a relational database. + * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against + * a relational database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + * <p>This class also contains all the test methods where realities of an underlying storage leak + * into the {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ public class EbeanTimelineServiceTest extends TimelineServiceTest<EbeanAspectDao> { - public EbeanTimelineServiceTest() throws EntityRegistryException { - } + public EbeanTimelineServiceTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { - Database server = EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); + Database server = + EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); _aspectDao = new EbeanAspectDao(server); _aspectDao.setConnectionValidated(true); _entityTimelineService = new TimelineServiceImpl(_aspectDao, _testEntityRegistry); _mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java index b3e4b84a4962d..6cea5a78201b7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline; +import static org.mockito.Mockito.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; @@ -26,8 +28,6 @@ import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; import com.linkedin.util.Pair; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -35,29 +35,30 @@ import java.util.List; import java.util.Map; import java.util.Set; - -import static org.mockito.Mockito.*; - +import org.testng.annotations.Test; /** * A class to test {@link TimelineServiceImpl} * - * This class is generic to allow same integration tests to be reused to test all supported storage backends. - * If you're adding another storage backend - you should create a new test class that extends this one providing - * hard implementations of {@link AspectDao} and implements {@code @BeforeMethod} etc to set up and tear down state. + * <p>This class is generic to allow same integration tests to be reused to test all supported + * storage backends. If you're adding another storage backend - you should create a new test class + * that extends this one providing hard implementations of {@link AspectDao} and implements + * {@code @BeforeMethod} etc to set up and tear down state. * - * If you realise that a feature you want to test, sadly, has divergent behaviours between different storage implementations, - * that you can't rectify - you should make the test method abstract and implement it in all implementations of this class. + * <p>If you realise that a feature you want to test, sadly, has divergent behaviours between + * different storage implementations, that you can't rectify - you should make the test method + * abstract and implement it in all implementations of this class. * * @param <T_AD> {@link AspectDao} implementation. */ -abstract public class TimelineServiceTest<T_AD extends AspectDao> { +public abstract class TimelineServiceTest<T_AD extends AspectDao> { protected T_AD _aspectDao; protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); protected TimelineServiceImpl _entityTimelineService; @@ -65,14 +66,16 @@ abstract public class TimelineServiceTest<T_AD extends AspectDao> { protected EventProducer _mockProducer; protected UpdateIndicesService _mockUpdateIndicesService = mock(UpdateIndicesService.class); - protected TimelineServiceTest() throws EntityRegistryException { - } + protected TimelineServiceTest() throws EntityRegistryException {} @Test public void testGetTimeline() throws Exception { - Urn entityUrn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:hive,fooDb.fooTable" + System.currentTimeMillis() + ",PROD)"); + Urn entityUrn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:hive,fooDb.fooTable" + + System.currentTimeMillis() + + ",PROD)"); String aspectName = "schemaMetadata"; ArrayList<AuditStamp> timestamps = new ArrayList(); @@ -82,39 +85,49 @@ public void testGetTimeline() throws Exception { SchemaMetadata schemaMetadata = getSchemaMetadata("This is the new description for day " + i); AuditStamp daysAgo = createTestAuditStamp(i); timestamps.add(daysAgo); - _entityServiceImpl.ingestAspects(entityUrn, Collections.singletonList(new Pair<>(aspectName, schemaMetadata)), - daysAgo, getSystemMetadata(daysAgo, "run-" + i)); + _entityServiceImpl.ingestAspects( + entityUrn, + Collections.singletonList(new Pair<>(aspectName, schemaMetadata)), + daysAgo, + getSystemMetadata(daysAgo, "run-" + i)); } Map<String, RecordTemplate> latestAspects = - _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(Arrays.asList(aspectName))); + _entityServiceImpl.getLatestAspectsForUrn( + entityUrn, new HashSet<>(Arrays.asList(aspectName))); Set<ChangeCategory> elements = new HashSet<>(); elements.add(ChangeCategory.TECHNICAL_SCHEMA); List<ChangeTransaction> changes = - _entityTimelineService.getTimeline(entityUrn, elements, createTestAuditStamp(10).getTime(), 0, null, null, - false); - //Assert.assertEquals(changes.size(), 7); - //Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), ChangeOperation.ADD); - //Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(0).getTime().longValue()); - //Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(1).getTime().longValue()); + _entityTimelineService.getTimeline( + entityUrn, elements, createTestAuditStamp(10).getTime(), 0, null, null, false); + // Assert.assertEquals(changes.size(), 7); + // Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.ADD); + // Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(0).getTime().longValue()); + // Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(1).getTime().longValue()); changes = - _entityTimelineService.getTimeline(entityUrn, elements, timestamps.get(4).getTime() - 3000L, 0, null, null, - false); - //Assert.assertEquals(changes.size(), 3); - //Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(4).getTime().longValue()); - //Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(5).getTime().longValue()); + _entityTimelineService.getTimeline( + entityUrn, elements, timestamps.get(4).getTime() - 3000L, 0, null, null, false); + // Assert.assertEquals(changes.size(), 3); + // Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(4).getTime().longValue()); + // Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(5).getTime().longValue()); } private static AuditStamp createTestAuditStamp(int daysAgo) { try { Long timestamp = System.currentTimeMillis() - (daysAgo * 24 * 60 * 60 * 1000L); Long timestampRounded = 1000 * (timestamp / 1000); - return new AuditStamp().setTime(timestampRounded).setActor(Urn.createFromString("urn:li:principal:tester")); + return new AuditStamp() + .setTime(timestampRounded) + .setActor(Urn.createFromString("urn:li:principal:tester")); } catch (Exception e) { throw new RuntimeException("Failed to create urn"); } @@ -128,17 +141,22 @@ private SystemMetadata getSystemMetadata(AuditStamp twoDaysAgo, String s) { } private SchemaMetadata getSchemaMetadata(String s) { - SchemaField field1 = new SchemaField() - .setFieldPath("column1") - .setDescription(s) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string"); + SchemaField field1 = + new SchemaField() + .setFieldPath("column1") + .setDescription(s) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string"); SchemaFieldArray fieldArray = new SchemaFieldArray(); fieldArray.add(field1); - return new SchemaMetadata().setSchemaName("testSchema") - .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema("foo"))) + return new SchemaMetadata() + .setSchemaName("testSchema") + .setPlatformSchema( + SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema("foo"))) .setPlatform(new DataPlatformUrn("hive")) .setHash("") .setVersion(0L) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java index 75508320abdce..3e9f1cd0fe092 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java @@ -1,44 +1,50 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeEvent; import com.linkedin.mxe.SystemMetadata; import com.linkedin.restli.internal.server.util.DataMapUtils; import com.linkedin.schema.SchemaMetadata; -import org.apache.commons.io.IOUtils; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; - -import static org.testng.AssertJUnit.assertEquals; +import org.apache.commons.io.IOUtils; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; public class SchemaMetadataChangeEventGeneratorTest extends AbstractTestNGSpringContextTests { - @Test - public void testDelete() throws Exception { - SchemaMetadataChangeEventGenerator test = new SchemaMetadataChangeEventGenerator(); - - Urn urn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); - String entity = "dataset"; - String aspect = "schemaMetadata"; - AuditStamp auditStamp = new AuditStamp() - .setActor(Urn.createFromString("urn:li:corpuser:__datahub_system")) - .setTime(1683829509553L); - Aspect<SchemaMetadata> from = new Aspect<>(DataMapUtils.read(IOUtils.toInputStream(TEST_OBJECT, StandardCharsets.UTF_8), - SchemaMetadata.class, Map.of()), new SystemMetadata()); - Aspect<SchemaMetadata> to = new Aspect<>(null, new SystemMetadata()); - - List<ChangeEvent> actual = test.getChangeEvents(urn, entity, aspect, from, to, auditStamp); - - assertEquals(14, actual.size()); - } - - //CHECKSTYLE:OFF - private static final String TEST_OBJECT = "{\"platformSchema\":{\"com.linkedin.schema.KafkaSchema\":{\"documentSchema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SampleHdfsSchema\\\",\\\"namespace\\\":\\\"com.linkedin.dataset\\\",\\\"doc\\\":\\\"Sample HDFS dataset\\\",\\\"fields\\\":[{\\\"name\\\":\\\"field_foo\\\",\\\"type\\\":[\\\"string\\\"]},{\\\"name\\\":\\\"field_bar\\\",\\\"type\\\":[\\\"boolean\\\"]}]}\"}},\"created\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"lastModified\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"fields\":[{\"nullable\":false,\"fieldPath\":\"shipment_info\",\"description\":\"Shipment info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.date\",\"description\":\"Shipment info date description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.DateType\":{}}},\"recursive\":false,\"nativeDataType\":\"Date\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.target\",\"description\":\"Shipment info target description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"text\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.destination\",\"description\":\"Shipment info destination description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info\",\"description\":\"Shipment info geo_info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lat\",\"description\":\"Shipment info geo_info lat\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lng\",\"description\":\"Shipment info geo_info lng\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"}],\"schemaName\":\"SampleHdfsSchema\",\"version\":0,\"hash\":\"\",\"platform\":\"urn:li:dataPlatform:hdfs\"}"; - //CHECKSTYLE:ON + @Test + public void testDelete() throws Exception { + SchemaMetadataChangeEventGenerator test = new SchemaMetadataChangeEventGenerator(); + + Urn urn = + Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + String entity = "dataset"; + String aspect = "schemaMetadata"; + AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString("urn:li:corpuser:__datahub_system")) + .setTime(1683829509553L); + Aspect<SchemaMetadata> from = + new Aspect<>( + DataMapUtils.read( + IOUtils.toInputStream(TEST_OBJECT, StandardCharsets.UTF_8), + SchemaMetadata.class, + Map.of()), + new SystemMetadata()); + Aspect<SchemaMetadata> to = new Aspect<>(null, new SystemMetadata()); + + List<ChangeEvent> actual = test.getChangeEvents(urn, entity, aspect, from, to, auditStamp); + + assertEquals(14, actual.size()); + } + + // CHECKSTYLE:OFF + private static final String TEST_OBJECT = + "{\"platformSchema\":{\"com.linkedin.schema.KafkaSchema\":{\"documentSchema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SampleHdfsSchema\\\",\\\"namespace\\\":\\\"com.linkedin.dataset\\\",\\\"doc\\\":\\\"Sample HDFS dataset\\\",\\\"fields\\\":[{\\\"name\\\":\\\"field_foo\\\",\\\"type\\\":[\\\"string\\\"]},{\\\"name\\\":\\\"field_bar\\\",\\\"type\\\":[\\\"boolean\\\"]}]}\"}},\"created\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"lastModified\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"fields\":[{\"nullable\":false,\"fieldPath\":\"shipment_info\",\"description\":\"Shipment info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.date\",\"description\":\"Shipment info date description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.DateType\":{}}},\"recursive\":false,\"nativeDataType\":\"Date\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.target\",\"description\":\"Shipment info target description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"text\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.destination\",\"description\":\"Shipment info destination description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info\",\"description\":\"Shipment info geo_info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lat\",\"description\":\"Shipment info geo_info lat\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lng\",\"description\":\"Shipment info geo_info lng\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"}],\"schemaName\":\"SampleHdfsSchema\",\"version\":0,\"hash\":\"\",\"platform\":\"urn:li:dataPlatform:hdfs\"}"; + // CHECKSTYLE:ON } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java index 1362a0f69eff2..13236e302c259 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.timeseries.search; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + import com.datahub.test.BatchType; import com.datahub.test.ComplexNestedRecord; import com.datahub.test.TestEntityComponentProfile; @@ -46,37 +54,35 @@ import com.linkedin.timeseries.GroupingBucketType; import com.linkedin.timeseries.TimeWindowSize; import com.linkedin.timeseries.TimeseriesIndexSizeResult; -import org.opensearch.client.RestHighLevelClient; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.Calendar; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import org.opensearch.client.RestHighLevelClient; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; -import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class TimeseriesAspectServiceTestBase extends AbstractTestNGSpringContextTests { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } private static final String ENTITY_NAME = "testEntity"; private static final String ASPECT_NAME = "testEntityProfile"; - private static final Urn TEST_URN = new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table1"); + private static final Urn TEST_URN = + new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table1"); private static final int NUM_PROFILES = 100; private static final long TIME_INCREMENT = 3600000; // hour in ms. private static final String CONTENT_TYPE = "application/json"; @@ -85,13 +91,13 @@ abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpri private static final String ES_FIELD_STAT = "stat"; @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -107,8 +113,12 @@ abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpri @BeforeClass public void setup() { - _entityRegistry = new ConfigEntityRegistry(new DataSchemaFactory("com.datahub.test"), - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + _entityRegistry = + new ConfigEntityRegistry( + new DataSchemaFactory("com.datahub.test"), + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); _indexConvention = new IndexConventionImpl("es_timeseries_aspect_service_test"); _elasticSearchTimeseriesAspectService = buildService(); _elasticSearchTimeseriesAspectService.configure(); @@ -118,9 +128,13 @@ public void setup() { @Nonnull private ElasticSearchTimeseriesAspectService buildService() { - return new ElasticSearchTimeseriesAspectService(getSearchClient(), _indexConvention, - new TimeseriesAspectIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention), _entityRegistry, getBulkProcessor(), 1); + return new ElasticSearchTimeseriesAspectService( + getSearchClient(), + _indexConvention, + new TimeseriesAspectIndexBuilders(getIndexBuilder(), _entityRegistry, _indexConvention), + _entityRegistry, + getBulkProcessor(), + 1); } /* @@ -128,10 +142,13 @@ private ElasticSearchTimeseriesAspectService buildService() { */ private void upsertDocument(TestEntityProfile dp, Urn urn) throws JsonProcessingException { - Map<String, JsonNode> documents = TimeseriesAspectTransformer.transform(urn, dp, _aspectSpec, null); + Map<String, JsonNode> documents = + TimeseriesAspectTransformer.transform(urn, dp, _aspectSpec, null); assertEquals(documents.size(), 3); documents.forEach( - (key, value) -> _elasticSearchTimeseriesAspectService.upsertDocument(ENTITY_NAME, ASPECT_NAME, key, value)); + (key, value) -> + _elasticSearchTimeseriesAspectService.upsertDocument( + ENTITY_NAME, ASPECT_NAME, key, value)); } private TestEntityProfile makeTestProfile(long eventTime, long stat, String messageId) { @@ -140,7 +157,8 @@ private TestEntityProfile makeTestProfile(long eventTime, long stat, String mess testEntityProfile.setStat(stat); testEntityProfile.setStrStat(String.valueOf(stat)); testEntityProfile.setStrArray(new StringArray("sa_" + stat, "sa_" + (stat + 1))); - testEntityProfile.setEventGranularity(new TimeWindowSize().setUnit(CalendarInterval.DAY).setMultiple(1)); + testEntityProfile.setEventGranularity( + new TimeWindowSize().setUnit(CalendarInterval.DAY).setMultiple(1)); if (messageId != null) { testEntityProfile.setMessageId(messageId); } @@ -152,14 +170,17 @@ private TestEntityProfile makeTestProfile(long eventTime, long stat, String mess TestEntityComponentProfile componentProfile2 = new TestEntityComponentProfile(); componentProfile2.setKey("col2"); componentProfile2.setStat(stat + 2); - testEntityProfile.setComponentProfiles(new TestEntityComponentProfileArray(componentProfile1, componentProfile2)); + testEntityProfile.setComponentProfiles( + new TestEntityComponentProfileArray(componentProfile1, componentProfile2)); StringMap stringMap1 = new StringMap(); stringMap1.put("p_key1", "p_val1"); StringMap stringMap2 = new StringMap(); stringMap2.put("p_key2", "p_val2"); - ComplexNestedRecord nestedRecord = new ComplexNestedRecord().setType(BatchType.PARTITION_BATCH) - .setPartitions(new StringMapArray(stringMap1, stringMap2)); + ComplexNestedRecord nestedRecord = + new ComplexNestedRecord() + .setType(BatchType.PARTITION_BATCH) + .setPartitions(new StringMapArray(stringMap1, stringMap2)); testEntityProfile.setAComplexNestedRecord(nestedRecord); return testEntityProfile; @@ -172,57 +193,74 @@ public void testUpsertProfiles() throws Exception { _startTime = _startTime - _startTime % 86400000; // Create the testEntity profiles that we would like to use for testing. TestEntityProfile firstProfile = makeTestProfile(_startTime, 20, null); - Stream<TestEntityProfile> testEntityProfileStream = Stream.iterate(firstProfile, - (TestEntityProfile prev) -> makeTestProfile(prev.getTimestampMillis() + TIME_INCREMENT, prev.getStat() + 10, - null)); - - _testEntityProfiles = testEntityProfileStream.limit(NUM_PROFILES) - .collect(Collectors.toMap(TestEntityProfile::getTimestampMillis, Function.identity())); + Stream<TestEntityProfile> testEntityProfileStream = + Stream.iterate( + firstProfile, + (TestEntityProfile prev) -> + makeTestProfile( + prev.getTimestampMillis() + TIME_INCREMENT, prev.getStat() + 10, null)); + + _testEntityProfiles = + testEntityProfileStream + .limit(NUM_PROFILES) + .collect(Collectors.toMap(TestEntityProfile::getTimestampMillis, Function.identity())); Long endTime = _startTime + (NUM_PROFILES - 1) * TIME_INCREMENT; assertNotNull(_testEntityProfiles.get(_startTime)); assertNotNull(_testEntityProfiles.get(endTime)); // Upsert the documents into the index. - _testEntityProfiles.values().forEach(x -> { - try { - upsertDocument(x, TEST_URN); - } catch (JsonProcessingException jsonProcessingException) { - jsonProcessingException.printStackTrace(); - } - }); + _testEntityProfiles + .values() + .forEach( + x -> { + try { + upsertDocument(x, TEST_URN); + } catch (JsonProcessingException jsonProcessingException) { + jsonProcessingException.printStackTrace(); + } + }); syncAfterWrite(getBulkProcessor()); } @Test(groups = "upsertUniqueMessageId") public void testUpsertProfilesWithUniqueMessageIds() throws Exception { - // Create the testEntity profiles that have the same value for timestampMillis, but use unique message ids. + // Create the testEntity profiles that have the same value for timestampMillis, but use unique + // message ids. // We should preserve all the documents we are going to upsert in the index. final long curTimeMillis = Calendar.getInstance().getTimeInMillis(); final long startTime = curTimeMillis - curTimeMillis % 86400000; final TestEntityProfile firstProfile = makeTestProfile(startTime, 20, "20"); - Stream<TestEntityProfile> testEntityProfileStream = Stream.iterate(firstProfile, - (TestEntityProfile prev) -> makeTestProfile(prev.getTimestampMillis(), prev.getStat() + 10, - String.valueOf(prev.getStat() + 10))); - - final List<TestEntityProfile> testEntityProfiles = testEntityProfileStream.limit(3).collect(Collectors.toList()); + Stream<TestEntityProfile> testEntityProfileStream = + Stream.iterate( + firstProfile, + (TestEntityProfile prev) -> + makeTestProfile( + prev.getTimestampMillis(), + prev.getStat() + 10, + String.valueOf(prev.getStat() + 10))); + + final List<TestEntityProfile> testEntityProfiles = + testEntityProfileStream.limit(3).collect(Collectors.toList()); // Upsert the documents into the index. - final Urn urn = new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table2"); - testEntityProfiles.forEach(x -> { - try { - upsertDocument(x, urn); - } catch (JsonProcessingException jsonProcessingException) { - jsonProcessingException.printStackTrace(); - } - }); + final Urn urn = + new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table2"); + testEntityProfiles.forEach( + x -> { + try { + upsertDocument(x, urn); + } catch (JsonProcessingException jsonProcessingException) { + jsonProcessingException.printStackTrace(); + } + }); syncAfterWrite(getBulkProcessor()); List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(urn, ENTITY_NAME, ASPECT_NAME, null, null, - testEntityProfiles.size(), null); + _elasticSearchTimeseriesAspectService.getAspectValues( + urn, ENTITY_NAME, ASPECT_NAME, null, null, testEntityProfiles.size(), null); assertEquals(resultAspects.size(), testEntityProfiles.size()); } @@ -232,8 +270,9 @@ public void testUpsertProfilesWithUniqueMessageIds() throws Exception { private void validateAspectValue(EnvelopedAspect envelopedAspectResult) { TestEntityProfile actualProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(envelopedAspectResult.getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + envelopedAspectResult.getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile expectedProfile = _testEntityProfiles.get(actualProfile.getTimestampMillis()); assertNotNull(expectedProfile); assertEquals(actualProfile.getStat(), expectedProfile.getStat()); @@ -248,18 +287,23 @@ private void validateAspectValues(List<EnvelopedAspect> aspects, long numResults @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesAll() { List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, null); validateAspectValues(resultAspects, NUM_PROFILES); TestEntityProfile firstProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(0).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(0).getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile lastProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); - - // Now verify that the first index is the one with the highest stat value, and the last the one with the lower. + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), + CONTENT_TYPE, + _aspectSpec); + + // Now verify that the first index is the one with the highest stat value, and the last the one + // with the lower. assertEquals((long) firstProfile.getStat(), 20 + (NUM_PROFILES - 1) * 10); assertEquals((long) lastProfile.getStat(), 20); } @@ -267,31 +311,43 @@ public void testGetAspectTimeseriesValuesAll() { @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesAllSorted() { List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null, new SortCriterion().setField("stat").setOrder(SortOrder.ASCENDING)); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + null, + null, + NUM_PROFILES, + null, + new SortCriterion().setField("stat").setOrder(SortOrder.ASCENDING)); validateAspectValues(resultAspects, NUM_PROFILES); TestEntityProfile firstProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(0).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(0).getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile lastProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); - - // Now verify that the first index is the one with the highest stat value, and the last the one with the lower. + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), + CONTENT_TYPE, + _aspectSpec); + + // Now verify that the first index is the one with the highest stat value, and the last the one + // with the lower. assertEquals((long) firstProfile.getStat(), 20); assertEquals((long) lastProfile.getStat(), 20 + (NUM_PROFILES - 1) * 10); - } @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesWithFilter() { Filter filter = new Filter(); - Criterion hasStatEqualsTwenty = new Criterion().setField("stat").setCondition(Condition.EQUAL).setValue("20"); + Criterion hasStatEqualsTwenty = + new Criterion().setField("stat").setCondition(Condition.EQUAL).setValue("20"); filter.setCriteria(new CriterionArray(hasStatEqualsTwenty)); List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, filter); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, filter); validateAspectValues(resultAspects, 1); } @@ -299,8 +355,14 @@ public void testGetAspectTimeseriesValuesWithFilter() { public void testGetAspectTimeseriesValuesSubRangeInclusiveOverlap() { int expectedNumRows = 10; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, _startTime, - _startTime + TIME_INCREMENT * (expectedNumRows - 1), expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime, + _startTime + TIME_INCREMENT * (expectedNumRows - 1), + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -308,9 +370,14 @@ public void testGetAspectTimeseriesValuesSubRangeInclusiveOverlap() { public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlap() { int expectedNumRows = 10; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, - expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -318,9 +385,14 @@ public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlap() { public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlapLatestValueOnly() { int expectedNumRows = 1; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, - expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -328,17 +400,25 @@ public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlapLatestValueOnly public void testGetAspectTimeseriesValuesExactlyOneResponse() { int expectedNumRows = 1; List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * 3 / 2, expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * 3 / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } - @Test(groups = {"getAspectValues"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAspectValues"}, + dependsOnGroups = {"upsert"}) public void testGetAspectTimeseriesValueMissingUrn() { Urn nonExistingUrn = new TestEntityUrn("missing", "missing", "missing"); List<EnvelopedAspect> resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(nonExistingUrn, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + nonExistingUrn, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, null); validateAspectValues(resultAspects, 0); } @@ -347,71 +427,109 @@ public void testGetAspectTimeseriesValueMissingUrn() { */ /* Latest Aggregation Tests */ - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), - _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestAComplexNestedRecordForDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("aComplexNestedRecord"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("aComplexNestedRecord"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_aComplexNestedRecord")); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_aComplexNestedRecord")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "record")); // Validate rows @@ -421,86 +539,121 @@ public void testGetAggregatedStatsLatestAComplexNestedRecordForDay1() { try { ComplexNestedRecord latestAComplexNestedRecord = OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), ComplexNestedRecord.class); - assertEquals(latestAComplexNestedRecord, + assertEquals( + latestAComplexNestedRecord, _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getAComplexNestedRecord()); } catch (JsonProcessingException e) { fail("Unexpected exception thrown" + e); } } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStrArrayDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("strArray"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + "strArray")); + assertEquals( + resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + "strArray")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "array")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - StringArray expectedStrArray = _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStrArray(); - //assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), + StringArray expectedStrArray = + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStrArray(); + // assertEquals(resultTable.getRows(), new StringArrayArray(new + // StringArray(_startTime.toString(), // expectedStrArray.toString()))); // Test array construction using object mapper as well try { - StringArray actualStrArray = OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), StringArray.class); + StringArray actualStrArray = + OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), StringArray.class); assertEquals(actualStrArray, expectedStrArray); } catch (JsonProcessingException e) { e.printStackTrace(); } } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForTwoDays() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 47 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 47 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows @@ -508,132 +661,223 @@ public void testGetAggregatedStatsLatestStatForTwoDays() { assertEquals(resultTable.getRows().size(), 2); Long latestDay1Ts = _startTime + 23 * TIME_INCREMENT; Long latestDay2Ts = _startTime + 47 * TIME_INCREMENT; - assertEquals(resultTable.getRows(), new StringArrayArray( - new StringArray(_startTime.toString(), _testEntityProfiles.get(latestDay1Ts).getStat().toString()), - new StringArray(String.valueOf(_startTime + 24 * TIME_INCREMENT), - _testEntityProfiles.get(latestDay2Ts).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), _testEntityProfiles.get(latestDay1Ts).getStat().toString()), + new StringArray( + String.valueOf(_startTime + 24 * TIME_INCREMENT), + _testEntityProfiles.get(latestDay2Ts).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForFirst10HoursOfDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), - _testEntityProfiles.get(_startTime + 9 * TIME_INCREMENT).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 9 * TIME_INCREMENT).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForCol1Day1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Criterion hasCol1 = - new Criterion().setField("componentProfiles.key").setCondition(Condition.EQUAL).setValue("col1"); + new Criterion() + .setField("componentProfiles.key") + .setCondition(Condition.EQUAL) + .setValue("col1"); - Filter filter = QueryUtils.getFilterFromCriteria( - ImmutableList.of(hasUrnCriterion, hasCol1, startTimeCriterion, endTimeCriterion)); + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, hasCol1, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "col1", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(0).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + "col1", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(0) + .getStat() + .toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForAllColumnsDay1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "long")); // Validate rows - StringArray expectedRow1 = new StringArray(_startTime.toString(), "col1", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(0).getStat().toString()); - StringArray expectedRow2 = new StringArray(_startTime.toString(), "col2", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(1).getStat().toString()); + StringArray expectedRow1 = + new StringArray( + _startTime.toString(), + "col1", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(0) + .getStat() + .toString()); + StringArray expectedRow2 = + new StringArray( + _startTime.toString(), + "col2", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(1) + .getStat() + .toString()); assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 2); @@ -641,33 +885,48 @@ public void testGetAggregatedStatsLatestStatForAllColumnsDay1() { } /* Sum Aggregation Tests */ - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatForFirst10HoursOfDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate the sum of stat value AggregationSpec sumAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{sumAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {sumAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "sum_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "sum_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "double")); // Validate rows @@ -675,45 +934,68 @@ public void testGetAggregatedStatsSumStatForFirst10HoursOfDay1() { assertEquals(resultTable.getRows().size(), 1); // value is 20+30+40+... up to 10 terms = 650 // TODO: Compute this caching the documents. - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), String.valueOf(650)))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatForCol2Day1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Criterion hasCol2 = - new Criterion().setField("componentProfiles.key").setCondition(Condition.EQUAL).setValue("col2"); + new Criterion() + .setField("componentProfiles.key") + .setCondition(Condition.EQUAL) + .setValue("col2"); - Filter filter = QueryUtils.getFilterFromCriteria( - ImmutableList.of(hasUrnCriterion, hasCol2, startTimeCriterion, endTimeCriterion)); + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, hasCol2, startTimeCriterion, endTimeCriterion)); // Aggregate the sum of stat value AggregationSpec sumStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{sumStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {sumStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "sum_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "sum_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "double")); // Validate rows @@ -721,115 +1003,166 @@ public void testGetAggregatedStatsSumStatForCol2Day1() { assertEquals(resultTable.getRows().size(), 1); // value = 22+32+42+... 24 terms = 3288 // TODO: Compute this caching the documents. - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "col2", String.valueOf(3288)))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsCardinalityAggStrStatDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec cardinalityStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("strStat"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("strStat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{cardinalityStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {cardinalityStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "cardinality_" + "strStat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "cardinality_" + "strStat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "24"))); + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "24"))); } - @Test(groups = {"getAggregatedStats", "usageStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats", "usageStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatsCollectionDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec cardinalityStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is only timestamp filed. GroupingBucket profileStatBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{cardinalityStatAggregationSpec}, filter, new GroupingBucket[]{profileStatBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {cardinalityStatAggregationSpec}, + filter, + new GroupingBucket[] {profileStatBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), + assertEquals( + resultTable.getColumnNames(), new StringArray("componentProfiles.key", "sum_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("string", "double")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 2); - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray("col1", "3264"), new StringArray("col2", "3288"))); } - @Test(groups = {"deleteAspectValues1"}, dependsOnGroups = {"getAggregatedStats", "getAspectValues", "testCountBeforeDelete"}) + @Test( + groups = {"deleteAspectValues1"}, + dependsOnGroups = {"getAggregatedStats", "getAspectValues", "testCountBeforeDelete"}) public void testDeleteAspectValuesByUrnAndTimeRangeDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); DeleteAspectValuesResult result = _elasticSearchTimeseriesAspectService.deleteAspectValues(ENTITY_NAME, ASPECT_NAME, filter); - // For day1, we expect 24 (number of hours) * 3 (each testEntityProfile aspect expands 3 elastic docs: + // For day1, we expect 24 (number of hours) * 3 (each testEntityProfile aspect expands 3 elastic + // docs: // 1 original + 2 for componentProfiles) = 72 total. assertEquals(result.getNumDocsDeleted(), Long.valueOf(72L)); } - @Test(groups = {"deleteAspectValues2"}, dependsOnGroups = {"deleteAspectValues1", "testCountAfterDelete"}) + @Test( + groups = {"deleteAspectValues2"}, + dependsOnGroups = {"deleteAspectValues1", "testCountAfterDelete"}) public void testDeleteAspectValuesByUrn() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); Filter filter = QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion)); DeleteAspectValuesResult result = _elasticSearchTimeseriesAspectService.deleteAspectValues(ENTITY_NAME, ASPECT_NAME, filter); - // Of the 300 elastic docs upserted for TEST_URN, 72 got deleted by deleteAspectValues1 test group leaving 228. + // Of the 300 elastic docs upserted for TEST_URN, 72 got deleted by deleteAspectValues1 test + // group leaving 228. assertEquals(result.getNumDocsDeleted(), Long.valueOf(228L)); } - @Test(groups = {"testCountBeforeDelete"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"testCountBeforeDelete"}, + dependsOnGroups = {"upsert"}) public void testCountByFilter() { // Test with filter Criterion hasUrnCriterion = @@ -840,17 +1173,23 @@ public void testCountByFilter() { assertEquals(count, 300L); // Test with filter with multiple criteria - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter urnAndTimeFilter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); count = - _elasticSearchTimeseriesAspectService.countByFilter(ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); + _elasticSearchTimeseriesAspectService.countByFilter( + ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); assertEquals(count, 72L); // test without filter @@ -860,7 +1199,9 @@ public void testCountByFilter() { assertTrue(count >= 300L); } - @Test(groups = {"testCountAfterDelete"}, dependsOnGroups = {"deleteAspectValues1"}) + @Test( + groups = {"testCountAfterDelete"}, + dependsOnGroups = {"deleteAspectValues1"}) public void testCountByFilterAfterDelete() throws InterruptedException { syncAfterWrite(getBulkProcessor()); // Test with filter @@ -872,24 +1213,32 @@ public void testCountByFilterAfterDelete() throws InterruptedException { assertEquals(count, 228L); // Test with filter with multiple criteria - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter urnAndTimeFilter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); count = - _elasticSearchTimeseriesAspectService.countByFilter(ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); + _elasticSearchTimeseriesAspectService.countByFilter( + ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); assertEquals(count, 0L); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetIndexSizes() { List<TimeseriesIndexSizeResult> result = _elasticSearchTimeseriesAspectService.getIndexSizes(); - //CHECKSTYLE:OFF + // CHECKSTYLE:OFF /* Example result: {aspectName=testentityprofile, sizeMb=52.234, @@ -897,11 +1246,17 @@ public void testGetIndexSizes() { {aspectName=testentityprofile, sizeMb=0.208, indexName=es_timeseries_aspect_service_test_testentitywithouttests_testentityprofileaspect_v1, entityName=testentitywithouttests} */ - // There may be other indices in there from other tests, so just make sure that index for entity + aspect is in there - //CHECKSTYLE:ON + // There may be other indices in there from other tests, so just make sure that index for entity + // + aspect is in there + // CHECKSTYLE:ON assertTrue(result.size() > 0); assertTrue( - result.stream().anyMatch(idxSizeResult -> idxSizeResult.getIndexName().equals( - "es_timeseries_aspect_service_test_testentity_testentityprofileaspect_v1"))); + result.stream() + .anyMatch( + idxSizeResult -> + idxSizeResult + .getIndexName() + .equals( + "es_timeseries_aspect_service_test_testentity_testentityprofileaspect_v1"))); } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java index 12a02f954e1bc..29c64abdc4d0d 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java +++ b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java @@ -1,5 +1,7 @@ package io.datahubproject.test; +import static org.mockito.Mockito.mock; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -24,13 +26,7 @@ import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; -import net.datafaker.Faker; import com.linkedin.mxe.MetadataChangeProposal; -import net.datafaker.providers.base.Animal; -import net.datafaker.providers.base.Cat; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URISyntaxException; @@ -46,72 +42,92 @@ import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; - -import static org.mockito.Mockito.mock; +import javax.annotation.Nonnull; +import net.datafaker.Faker; +import net.datafaker.providers.base.Animal; +import net.datafaker.providers.base.Cat; +import org.apache.commons.lang3.NotImplementedException; public class DataGenerator { - private final static Faker FAKER = new Faker(); - private final EntityRegistry entityRegistry; - private final EntityService entityService; - - public DataGenerator(EntityService entityService) { - this.entityService = entityService; - this.entityRegistry = entityService.getEntityRegistry(); - } - - public static DataGenerator build(EntityRegistry entityRegistry) { - EntityServiceImpl mockEntityServiceImpl = new EntityServiceImpl(mock(AspectDao.class), - mock(EventProducer.class), entityRegistry, false, - mock(UpdateIndicesService.class), mock(PreProcessHooks.class)); - return new DataGenerator(mockEntityServiceImpl); - } - - public Stream<List<MetadataChangeProposal>> generateDatasets() { - return generateMCPs("dataset", 10, List.of()); - } - - public List<MetadataChangeProposal> generateTags(long count) { - return generateMCPs("tag", count, List.of()).findFirst().get(); - } - - public Stream<List<MetadataChangeProposal>> generateMCPs(String entityName, long count, List<String> aspects) { - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - - // Prevent duplicate tags and terms generated as secondary entities - Set<Urn> secondaryUrns = new HashSet<>(); - - return LongStream.range(0, count).mapToObj(idx -> { - RecordTemplate key = randomKeyAspect(entitySpec); - MetadataChangeProposal mcp = new MetadataChangeProposal(); - mcp.setEntityType(entitySpec.getName()); - mcp.setAspectName(entitySpec.getKeyAspectName()); - mcp.setAspect(GenericRecordUtils.serializeAspect(key)); - mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); - mcp.setChangeType(ChangeType.UPSERT); - return mcp; - }).flatMap(mcp -> { - // Expand with additional random aspects - List<MetadataChangeProposal> additionalMCPs = new LinkedList<>(); - - for (String aspectName : aspects) { + private static final Faker FAKER = new Faker(); + private final EntityRegistry entityRegistry; + private final EntityService entityService; + + public DataGenerator(EntityService entityService) { + this.entityService = entityService; + this.entityRegistry = entityService.getEntityRegistry(); + } + + public static DataGenerator build(EntityRegistry entityRegistry) { + EntityServiceImpl mockEntityServiceImpl = + new EntityServiceImpl( + mock(AspectDao.class), + mock(EventProducer.class), + entityRegistry, + false, + mock(UpdateIndicesService.class), + mock(PreProcessHooks.class)); + return new DataGenerator(mockEntityServiceImpl); + } + + public Stream<List<MetadataChangeProposal>> generateDatasets() { + return generateMCPs("dataset", 10, List.of()); + } + + public List<MetadataChangeProposal> generateTags(long count) { + return generateMCPs("tag", count, List.of()).findFirst().get(); + } + + public Stream<List<MetadataChangeProposal>> generateMCPs( + String entityName, long count, List<String> aspects) { + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + // Prevent duplicate tags and terms generated as secondary entities + Set<Urn> secondaryUrns = new HashSet<>(); + + return LongStream.range(0, count) + .mapToObj( + idx -> { + RecordTemplate key = randomKeyAspect(entitySpec); + MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityType(entitySpec.getName()); + mcp.setAspectName(entitySpec.getKeyAspectName()); + mcp.setAspect(GenericRecordUtils.serializeAspect(key)); + mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); + mcp.setChangeType(ChangeType.UPSERT); + return mcp; + }) + .flatMap( + mcp -> { + // Expand with additional random aspects + List<MetadataChangeProposal> additionalMCPs = new LinkedList<>(); + + for (String aspectName : aspects) { AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - throw new IllegalStateException("Aspect " + aspectName + " not found for entity " + entityName); + throw new IllegalStateException( + "Aspect " + aspectName + " not found for entity " + entityName); } - RecordTemplate aspect = randomAspectGenerators.getOrDefault(aspectName, - DataGenerator::defaultRandomAspect).apply(entitySpec, aspectSpec); + RecordTemplate aspect = + randomAspectGenerators + .getOrDefault(aspectName, DataGenerator::defaultRandomAspect) + .apply(entitySpec, aspectSpec); // Maybe generate nested entities at the same time, like globalTags/glossaryTerms - List<MetadataChangeProposal> secondaryEntities = nestedRandomAspectGenerators.getOrDefault(aspectSpec.getName(), - (a, c) -> List.of()).apply(aspect, 5).stream() - .filter(secondaryMCP -> { - if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { + List<MetadataChangeProposal> secondaryEntities = + nestedRandomAspectGenerators + .getOrDefault(aspectSpec.getName(), (a, c) -> List.of()) + .apply(aspect, 5) + .stream() + .filter( + secondaryMCP -> { + if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { secondaryUrns.add(secondaryMCP.getEntityUrn()); return true; - } - return false; - }) + } + return false; + }) .collect(Collectors.toList()); additionalMCPs.addAll(secondaryEntities); @@ -123,254 +139,327 @@ public Stream<List<MetadataChangeProposal>> generateMCPs(String entityName, long additionalMCP.setChangeType(ChangeType.UPSERT); additionalMCPs.add(additionalMCP); - } - - return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); - }).map(mcp -> { - // Expand with default aspects per normal - return Stream.concat(Stream.of(mcp), - AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) - .collect(Collectors.toList()); - }); - } - - public static Map<String, BiFunction<EntitySpec, AspectSpec, ? extends RecordTemplate>> randomAspectGenerators = Map.of( - "glossaryTermInfo", (e, a) -> { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); + } + + return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); + }) + .map( + mcp -> { + // Expand with default aspects per normal + return Stream.concat( + Stream.of(mcp), + AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) + .collect(Collectors.toList()); + }); + } + + public static Map<String, BiFunction<EntitySpec, AspectSpec, ? extends RecordTemplate>> + randomAspectGenerators = + Map.of( + "glossaryTermInfo", + (e, a) -> { + GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); glossaryTermInfo.setName(normalize(FAKER.company().buzzword())); return glossaryTermInfo; - } - ); - - public Map<String, BiFunction<RecordTemplate, Integer, List<MetadataChangeProposal>>> nestedRandomAspectGenerators = Map.of( - "globalTags", (aspect, count) -> { - try { - List<MetadataChangeProposal> tags = generateTags(count); - Method setTagsMethod = aspect.getClass().getMethod("setTags", TagAssociationArray.class); - TagAssociationArray tagAssociations = new TagAssociationArray(); - tagAssociations.addAll(tags.stream().map( - tagMCP -> { - try { - return new TagAssociation().setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList())); - setTagsMethod.invoke(aspect, tagAssociations); - return tags; - } catch (Exception e) { - throw new RuntimeException(e); - } - }, - "glossaryTerms", (aspect, count) -> { - try { - List<MetadataChangeProposal> terms = generateMCPs("glossaryTerm", count, - List.of("glossaryTermInfo")) - .map(mcps -> mcps.get(0)) - .collect(Collectors.toList()); - Method setTermsMethod = aspect.getClass().getMethod("setTerms", GlossaryTermAssociationArray.class); - GlossaryTermAssociationArray termAssociations = new GlossaryTermAssociationArray(); - termAssociations.addAll(terms.stream().map( - termMCP -> { - try { - return new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(termMCP.getEntityUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList())); - setTermsMethod.invoke(aspect, termAssociations); - return terms; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - ); - - private static RecordTemplate defaultRandomAspect(@Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { - Class<RecordTemplate> aspectClass = aspectSpec.getDataTemplateClass(); - try { - Object aspect = aspectClass.getDeclaredConstructor().newInstance(); - - List<Method> booleanMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == Boolean.class) - .collect(Collectors.toList()); - - for (Method boolMethod : booleanMethods) { - boolMethod.invoke(aspect, FAKER.random().nextBoolean()); - } - - List<Method> stringMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == String.class) - .collect(Collectors.toList()); - - for (Method stringMethod : stringMethods) { - String value; - switch (aspectSpec.getName() + "_" + stringMethod.getName()) { - default: - value = FAKER.lorem().characters(8, 16, false); - break; - } - - // global - if (stringMethod.getName().toLowerCase().contains("description") - || stringMethod.getName().toLowerCase().contains("definition")) { - value = FAKER.lorem().paragraph(); - } + }); + + public Map<String, BiFunction<RecordTemplate, Integer, List<MetadataChangeProposal>>> + nestedRandomAspectGenerators = + Map.of( + "globalTags", + (aspect, count) -> { + try { + List<MetadataChangeProposal> tags = generateTags(count); + Method setTagsMethod = + aspect.getClass().getMethod("setTags", TagAssociationArray.class); + TagAssociationArray tagAssociations = new TagAssociationArray(); + tagAssociations.addAll( + tags.stream() + .map( + tagMCP -> { + try { + return new TagAssociation() + .setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList())); + setTagsMethod.invoke(aspect, tagAssociations); + return tags; + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "glossaryTerms", + (aspect, count) -> { + try { + List<MetadataChangeProposal> terms = + generateMCPs("glossaryTerm", count, List.of("glossaryTermInfo")) + .map(mcps -> mcps.get(0)) + .collect(Collectors.toList()); + Method setTermsMethod = + aspect + .getClass() + .getMethod("setTerms", GlossaryTermAssociationArray.class); + GlossaryTermAssociationArray termAssociations = + new GlossaryTermAssociationArray(); + termAssociations.addAll( + terms.stream() + .map( + termMCP -> { + try { + return new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + termMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList())); + setTermsMethod.invoke(aspect, termAssociations); + return terms; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + private static RecordTemplate defaultRandomAspect( + @Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { + Class<RecordTemplate> aspectClass = aspectSpec.getDataTemplateClass(); + try { + Object aspect = aspectClass.getDeclaredConstructor().newInstance(); + + List<Method> booleanMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Boolean.class) + .collect(Collectors.toList()); + + for (Method boolMethod : booleanMethods) { + boolMethod.invoke(aspect, FAKER.random().nextBoolean()); + } + + List<Method> stringMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + for (Method stringMethod : stringMethods) { + String value; + switch (aspectSpec.getName() + "_" + stringMethod.getName()) { + default: + value = FAKER.lorem().characters(8, 16, false); + break; + } - stringMethod.invoke(aspect, value); - } - - List<Method> enumMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0].isEnum()) - .collect(Collectors.toList()); - - for (Method enumMethod : enumMethods) { - Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); - // Excluding $UNKNOWNs - enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); - } - - // auditStamp - Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == AuditStamp.class) - .findFirst().ifPresent(auditStampMethod -> { - try { - AuditStamp auditStamp = new AuditStamp() - .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) - .setTime(System.currentTimeMillis()); - auditStampMethod.invoke(aspect, auditStamp); - } catch (URISyntaxException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }); - - return aspectClass.cast(aspect); - } catch (Exception e) { - throw new RuntimeException(e); + // global + if (stringMethod.getName().toLowerCase().contains("description") + || stringMethod.getName().toLowerCase().contains("definition")) { + value = FAKER.lorem().paragraph(); } - } - private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { - Class<RecordTemplate> keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); - try { - Object key = keyClass.getDeclaredConstructor().newInstance(); - - List<Method> stringMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == String.class) - .collect(Collectors.toList()); - - switch (entitySpec.getName()) { - case "tag": - stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); - break; - case "glossaryTerm": - stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); - break; - case "container": - stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); - break; - default: - switch (stringMethods.size()) { - case 1: - stringMethods.get(0).invoke(key, String.join(".", multiName(3))); - break; - case 2: - Cat cat = FAKER.cat(); - stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); - stringMethods.get(1).invoke(key, cat.name().toLowerCase()); - break; - default: - Animal animal = FAKER.animal(); - stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); - stringMethods.get(1).invoke(key, animal.species().toLowerCase()); - stringMethods.get(2).invoke(key, animal.name().toLowerCase()); - break; - } - break; - } - - List<Method> urnMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == Urn.class) - .collect(Collectors.toList()); - - for (Method urnMethod : urnMethods) { - switch (entitySpec.getName()) { - case "dataset": - urnMethod.invoke(key, randomUrnLowerCase("dataPlatform", - List.of(randomDataPlatform()))); - break; - default: - throw new NotImplementedException(entitySpec.getName()); + stringMethod.invoke(aspect, value); + } + + List<Method> enumMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + // auditStamp + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == AuditStamp.class) + .findFirst() + .ifPresent( + auditStampMethod -> { + try { + AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) + .setTime(System.currentTimeMillis()); + auditStampMethod.invoke(aspect, auditStamp); + } catch (URISyntaxException + | IllegalAccessException + | InvocationTargetException e) { + throw new RuntimeException(e); } - } - - List<Method> enumMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0].isEnum()) - .collect(Collectors.toList()); - - for (Method enumMethod : enumMethods) { - Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); - // Excluding $UNKNOWNs - enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); - } - - return keyClass.cast(key); - } catch (Exception e) { - throw new RuntimeException(e); - } - } + }); - private static List<String> multiName(int size) { - switch (size) { + return aspectClass.cast(aspect); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { + Class<RecordTemplate> keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); + try { + Object key = keyClass.getDeclaredConstructor().newInstance(); + + List<Method> stringMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + switch (entitySpec.getName()) { + case "tag": + stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); + break; + case "glossaryTerm": + stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); + break; + case "container": + stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); + break; + default: + switch (stringMethods.size()) { case 1: - return Stream.of(FAKER.marketing().buzzwords()) - .map(String::toLowerCase).collect(Collectors.toList()); + stringMethods.get(0).invoke(key, String.join(".", multiName(3))); + break; case 2: - Cat cat = FAKER.cat(); - return Stream.of(cat.breed(), cat.name()) - .map(String::toLowerCase).collect(Collectors.toList()); - case 3: - Animal animal = FAKER.animal(); - return Stream.of(animal.genus(), animal.species(), animal.name()) - .map(String::toLowerCase).collect(Collectors.toList()); + Cat cat = FAKER.cat(); + stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); + stringMethods.get(1).invoke(key, cat.name().toLowerCase()); + break; default: - return IntStream.range(0, size).mapToObj(i -> FAKER.expression("#{numerify 'test####'}")).collect(Collectors.toList()); + Animal animal = FAKER.animal(); + stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); + stringMethods.get(1).invoke(key, animal.species().toLowerCase()); + stringMethods.get(2).invoke(key, animal.name().toLowerCase()); + break; + } + break; + } + + List<Method> urnMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Urn.class) + .collect(Collectors.toList()); + + for (Method urnMethod : urnMethods) { + switch (entitySpec.getName()) { + case "dataset": + urnMethod.invoke( + key, randomUrnLowerCase("dataPlatform", List.of(randomDataPlatform()))); + break; + default: + throw new NotImplementedException(entitySpec.getName()); } + } + + List<Method> enumMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + return keyClass.cast(key); + } catch (Exception e) { + throw new RuntimeException(e); } - - private static Urn randomUrnLowerCase(String entityType, List<String> tuple) { - return Urn.createFromTuple(entityType, - tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); - } - - private static String normalize(String input) { - return input.toLowerCase().replaceAll("\\W+", "_"); - } - - private static String randomDataPlatform() { - String[] platforms = { - "ambry", "bigquery", "couchbase", "druid", "external", "feast", "glue", "hdfs", "hive", "kafka", "kusto", - "looker", "mongodb", "mssql", "mysql", "oracle", "pinot", "postgres", "presto", "redshift", "s3", - "sagemaker", "snowflake", "teradata", "voldemort" - }; - - return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } + + private static List<String> multiName(int size) { + switch (size) { + case 1: + return Stream.of(FAKER.marketing().buzzwords()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + case 2: + Cat cat = FAKER.cat(); + return Stream.of(cat.breed(), cat.name()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + case 3: + Animal animal = FAKER.animal(); + return Stream.of(animal.genus(), animal.species(), animal.name()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + default: + return IntStream.range(0, size) + .mapToObj(i -> FAKER.expression("#{numerify 'test####'}")) + .collect(Collectors.toList()); } + } + + private static Urn randomUrnLowerCase(String entityType, List<String> tuple) { + return Urn.createFromTuple( + entityType, tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); + } + + private static String normalize(String input) { + return input.toLowerCase().replaceAll("\\W+", "_"); + } + + private static String randomDataPlatform() { + String[] platforms = { + "ambry", + "bigquery", + "couchbase", + "druid", + "external", + "feast", + "glue", + "hdfs", + "hive", + "kafka", + "kusto", + "looker", + "mongodb", + "mssql", + "mysql", + "oracle", + "pinot", + "postgres", + "presto", + "redshift", + "s3", + "sagemaker", + "snowflake", + "teradata", + "voldemort" + }; + + return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java index 18fbf86f8668d..ff14b91a72c7f 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java @@ -1,5 +1,10 @@ package io.datahubproject.test.fixtures.search; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; import lombok.Builder; import lombok.NonNull; import org.opensearch.action.search.SearchRequest; @@ -11,56 +16,50 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import java.io.IOException; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - @Builder public class EntityExporter { - @NonNull - private RestHighLevelClient client; - @Builder.Default - private int fetchSize = 3000; - @NonNull - private FixtureWriter writer; - @NonNull - private String fixtureName; - @Builder.Default - private String sourceIndexPrefix = ""; - @Builder.Default - private String sourceIndexSuffix = "index_v2"; - @Builder.Default - private Set<String> indexEntities = SEARCHABLE_ENTITY_TYPES.stream() - .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) - .collect(Collectors.toSet()); - + @NonNull private RestHighLevelClient client; + @Builder.Default private int fetchSize = 3000; + @NonNull private FixtureWriter writer; + @NonNull private String fixtureName; + @Builder.Default private String sourceIndexPrefix = ""; + @Builder.Default private String sourceIndexSuffix = "index_v2"; - public void export() throws IOException { - Set<String> searchIndexSuffixes = indexEntities.stream() - .map(entityName -> entityName + sourceIndexSuffix) - .collect(Collectors.toSet()); + @Builder.Default + private Set<String> indexEntities = + SEARCHABLE_ENTITY_TYPES.stream() + .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) + .collect(Collectors.toSet()); - // Fetch indices - GetMappingsResponse response = client.indices().getMapping(new GetMappingsRequest().indices("*"), - RequestOptions.DEFAULT); + public void export() throws IOException { + Set<String> searchIndexSuffixes = + indexEntities.stream() + .map(entityName -> entityName + sourceIndexSuffix) + .collect(Collectors.toSet()); - response.mappings().keySet().stream() - .filter(index -> searchIndexSuffixes.stream().anyMatch(index::contains) - && index.startsWith(sourceIndexPrefix)) - .map(index -> index.split(sourceIndexSuffix, 2)[0] + sourceIndexSuffix) - .forEach(indexName -> { + // Fetch indices + GetMappingsResponse response = + client.indices().getMapping(new GetMappingsRequest().indices("*"), RequestOptions.DEFAULT); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + response.mappings().keySet().stream() + .filter( + index -> + searchIndexSuffixes.stream().anyMatch(index::contains) + && index.startsWith(sourceIndexPrefix)) + .map(index -> index.split(sourceIndexSuffix, 2)[0] + sourceIndexSuffix) + .forEach( + indexName -> { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(searchSourceBuilder); + SearchRequest searchRequest = new SearchRequest(indexName); + searchRequest.source(searchSourceBuilder); - String outputPath = String.format("%s/%s.json", fixtureName, indexName.replaceFirst(sourceIndexPrefix, "")); - writer.write(searchRequest, outputPath, false); - }); - } + String outputPath = + String.format( + "%s/%s.json", fixtureName, indexName.replaceFirst(sourceIndexPrefix, "")); + writer.write(searchRequest, outputPath, false); + }); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java index 1b804a2346883..dff6b7ab5a898 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java @@ -1,13 +1,9 @@ package io.datahubproject.test.fixtures.search; +import static io.datahubproject.test.fixtures.search.SearchFixtureUtils.OBJECT_MAPPER; + import com.fasterxml.jackson.core.JsonProcessingException; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; -import lombok.Builder; -import lombok.NonNull; -import org.apache.commons.io.FilenameUtils; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.common.xcontent.XContentType; - import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.Closeable; @@ -22,105 +18,113 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; - -import static io.datahubproject.test.fixtures.search.SearchFixtureUtils.OBJECT_MAPPER; +import lombok.Builder; +import lombok.NonNull; +import org.apache.commons.io.FilenameUtils; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.common.xcontent.XContentType; @Builder public class FixtureReader { - @Builder.Default - private String inputBase = SearchFixtureUtils.FIXTURE_BASE; - @NonNull - private ESBulkProcessor bulkProcessor; - @NonNull - private String fixtureName; - @Builder.Default - private String targetIndexPrefix = ""; + @Builder.Default private String inputBase = SearchFixtureUtils.FIXTURE_BASE; + @NonNull private ESBulkProcessor bulkProcessor; + @NonNull private String fixtureName; + @Builder.Default private String targetIndexPrefix = ""; - private long refreshIntervalSeconds; + private long refreshIntervalSeconds; - public Set<String> read() throws IOException { - try (Stream<Path> files = Files.list(Paths.get(String.format("%s/%s", inputBase, fixtureName)))) { - return files.map(file -> { + public Set<String> read() throws IOException { + try (Stream<Path> files = + Files.list(Paths.get(String.format("%s/%s", inputBase, fixtureName)))) { + return files + .map( + file -> { String absolutePath = file.toAbsolutePath().toString(); - String indexName = String.format("%s_%s", targetIndexPrefix, FilenameUtils.getBaseName(absolutePath).split("[.]", 2)[0]); + String indexName = + String.format( + "%s_%s", + targetIndexPrefix, + FilenameUtils.getBaseName(absolutePath).split("[.]", 2)[0]); try (Stream<String> lines = getLines(absolutePath)) { - lines.forEach(line -> { + lines.forEach( + line -> { try { - UrnDocument doc = OBJECT_MAPPER.readValue(line, UrnDocument.class); - IndexRequest request = new IndexRequest(indexName) - .id(doc.urn) - .source(line.getBytes(), XContentType.JSON); + UrnDocument doc = OBJECT_MAPPER.readValue(line, UrnDocument.class); + IndexRequest request = + new IndexRequest(indexName) + .id(doc.urn) + .source(line.getBytes(), XContentType.JSON); - bulkProcessor.add(request); + bulkProcessor.add(request); } catch (JsonProcessingException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); + }); } catch (IOException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } return indexName; - }).collect(Collectors.toSet()); - } finally { - bulkProcessor.flush(); - try { - Thread.sleep(1000 * refreshIntervalSeconds); - } catch (InterruptedException ignored) { - } - } + }) + .collect(Collectors.toSet()); + } finally { + bulkProcessor.flush(); + try { + Thread.sleep(1000 * refreshIntervalSeconds); + } catch (InterruptedException ignored) { + } } + } - private Stream<String> getLines(String path) throws IOException { - if (FilenameUtils.getExtension(path).equals("gz")) { - return GZIPFiles.lines(Paths.get(path)); - } else { - return Files.lines(Paths.get(path)); - } + private Stream<String> getLines(String path) throws IOException { + if (FilenameUtils.getExtension(path).equals("gz")) { + return GZIPFiles.lines(Paths.get(path)); + } else { + return Files.lines(Paths.get(path)); } + } - public static class GZIPFiles { - /** - * Get a lazily loaded stream of lines from a gzipped file, similar to - * {@link Files#lines(java.nio.file.Path)}. - * - * @param path - * The path to the gzipped file. - * @return stream with lines. - */ - public static Stream<String> lines(Path path) { - InputStream fileIs = null; - BufferedInputStream bufferedIs = null; - GZIPInputStream gzipIs = null; - try { - fileIs = Files.newInputStream(path); - // Even though GZIPInputStream has a buffer it reads individual bytes - // when processing the header, better add a buffer in-between - bufferedIs = new BufferedInputStream(fileIs, 65535); - gzipIs = new GZIPInputStream(bufferedIs); - } catch (IOException e) { - closeSafely(gzipIs); - closeSafely(bufferedIs); - closeSafely(fileIs); - throw new UncheckedIOException(e); - } - BufferedReader reader = new BufferedReader(new InputStreamReader(gzipIs)); - return reader.lines().onClose(() -> closeSafely(reader)); - } + public static class GZIPFiles { + /** + * Get a lazily loaded stream of lines from a gzipped file, similar to {@link + * Files#lines(java.nio.file.Path)}. + * + * @param path The path to the gzipped file. + * @return stream with lines. + */ + public static Stream<String> lines(Path path) { + InputStream fileIs = null; + BufferedInputStream bufferedIs = null; + GZIPInputStream gzipIs = null; + try { + fileIs = Files.newInputStream(path); + // Even though GZIPInputStream has a buffer it reads individual bytes + // when processing the header, better add a buffer in-between + bufferedIs = new BufferedInputStream(fileIs, 65535); + gzipIs = new GZIPInputStream(bufferedIs); + } catch (IOException e) { + closeSafely(gzipIs); + closeSafely(bufferedIs); + closeSafely(fileIs); + throw new UncheckedIOException(e); + } + BufferedReader reader = new BufferedReader(new InputStreamReader(gzipIs)); + return reader.lines().onClose(() -> closeSafely(reader)); + } - private static void closeSafely(Closeable closeable) { - if (closeable != null) { - try { - closeable.close(); - } catch (IOException e) { - // Ignore - } - } + private static void closeSafely(Closeable closeable) { + if (closeable != null) { + try { + closeable.close(); + } catch (IOException e) { + // Ignore } + } } + } - public static class UrnDocument { - public String urn; - } + public static class UrnDocument { + public String urn; + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java index 0aefa006421fc..8a11de6c513a3 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java @@ -1,6 +1,11 @@ package io.datahubproject.test.fixtures.search; import com.fasterxml.jackson.core.JsonProcessingException; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; import lombok.Builder; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -9,70 +14,72 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; -import javax.annotation.Nullable; -import java.io.BufferedWriter; -import java.io.FileWriter; -import java.io.IOException; -import java.util.function.BiConsumer; - -/** - * - */ +/** */ @Builder public class FixtureWriter { - private RestHighLevelClient client; + private RestHighLevelClient client; - @Builder.Default - private String outputBase = SearchFixtureUtils.FIXTURE_BASE; + @Builder.Default private String outputBase = SearchFixtureUtils.FIXTURE_BASE; - public void write(SearchRequest searchRequest, String relativeOutput, boolean append) { - write(searchRequest, relativeOutput, append, null, null, null); - } + public void write(SearchRequest searchRequest, String relativeOutput, boolean append) { + write(searchRequest, relativeOutput, append, null, null, null); + } - public <O, C> void write(SearchRequest searchRequest, String relativeOutput, boolean append, - @Nullable Class<O> outputType, Class<C> callbackType, BiConsumer<SearchHit, C> callback) { - try { - SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); - SearchHits hits = searchResponse.getHits(); - long remainingHits = hits.getTotalHits().value; + public <O, C> void write( + SearchRequest searchRequest, + String relativeOutput, + boolean append, + @Nullable Class<O> outputType, + Class<C> callbackType, + BiConsumer<SearchHit, C> callback) { + try { + SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); + SearchHits hits = searchResponse.getHits(); + long remainingHits = hits.getTotalHits().value; - if (remainingHits > 0) { - try (FileWriter writer = new FileWriter(String.format("%s/%s", outputBase, relativeOutput), append); - BufferedWriter bw = new BufferedWriter(writer)) { + if (remainingHits > 0) { + try (FileWriter writer = + new FileWriter(String.format("%s/%s", outputBase, relativeOutput), append); + BufferedWriter bw = new BufferedWriter(writer)) { - while (remainingHits > 0) { - SearchHit lastHit = null; - for (SearchHit hit : hits.getHits()) { - lastHit = hit; - remainingHits -= 1; + while (remainingHits > 0) { + SearchHit lastHit = null; + for (SearchHit hit : hits.getHits()) { + lastHit = hit; + remainingHits -= 1; - try { - if (outputType == null) { - bw.write(hit.getSourceAsString()); - } else { - O doc = SearchFixtureUtils.OBJECT_MAPPER.readValue(hit.getSourceAsString(), outputType); - bw.write(SearchFixtureUtils.OBJECT_MAPPER.writeValueAsString(doc)); - } - bw.newLine(); + try { + if (outputType == null) { + bw.write(hit.getSourceAsString()); + } else { + O doc = + SearchFixtureUtils.OBJECT_MAPPER.readValue( + hit.getSourceAsString(), outputType); + bw.write(SearchFixtureUtils.OBJECT_MAPPER.writeValueAsString(doc)); + } + bw.newLine(); - // Fire callback - if (callback != null) { - callback.accept(hit, SearchFixtureUtils.OBJECT_MAPPER.readValue(hit.getSourceAsString(), callbackType)); - } - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - if (lastHit != null) { - searchRequest.source().searchAfter(lastHit.getSortValues()); - hits = client.search(searchRequest, RequestOptions.DEFAULT).getHits(); - } - } + // Fire callback + if (callback != null) { + callback.accept( + hit, + SearchFixtureUtils.OBJECT_MAPPER.readValue( + hit.getSourceAsString(), callbackType)); } + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + if (lastHit != null) { + searchRequest.source().searchAfter(lastHit.getSortValues()); + hits = client.search(searchRequest, RequestOptions.DEFAULT).getHits(); } - } catch (IOException e) { - throw new RuntimeException(e); + } } + } + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java index 5db07ee6fb8bc..4b7d81aa04416 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java @@ -1,15 +1,6 @@ package io.datahubproject.test.fixtures.search; import com.google.common.collect.Lists; -import lombok.Builder; -import lombok.NonNull; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.search.sort.SortBuilders; -import org.opensearch.search.sort.SortOrder; - import java.net.URLDecoder; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; @@ -20,174 +11,210 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.Builder; +import lombok.NonNull; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.sort.SortBuilders; +import org.opensearch.search.sort.SortOrder; @Builder public class LineageExporter<O> { - @Builder.Default - private int fetchSize = 3000; - @Builder.Default - private int queryStatementSize = 32000; - @NonNull - private FixtureWriter writer; - private String entityIndexName; - - private String graphIndexName; - - private String entityOutputPath; - private String graphOutputPath; - - private Class<O> anonymizerClazz; - - - private static String idToUrn(String id) { - return URLDecoder.decode(id, StandardCharsets.UTF_8); + @Builder.Default private int fetchSize = 3000; + @Builder.Default private int queryStatementSize = 32000; + @NonNull private FixtureWriter writer; + private String entityIndexName; + + private String graphIndexName; + + private String entityOutputPath; + private String graphOutputPath; + + private Class<O> anonymizerClazz; + + private static String idToUrn(String id) { + return URLDecoder.decode(id, StandardCharsets.UTF_8); + } + + public <O> void export(Set<String> ids) { + if (entityIndexName != null) { + assert (entityOutputPath != null); + exportEntityIndex( + ids.stream() + .map(id -> URLEncoder.encode(id, StandardCharsets.UTF_8)) + .collect(Collectors.toSet()), + new HashSet<>(), + 0); } - - public <O> void export(Set<String> ids) { - if (entityIndexName != null) { - assert (entityOutputPath != null); - exportEntityIndex(ids.stream().map(id -> URLEncoder.encode(id, StandardCharsets.UTF_8)).collect(Collectors.toSet()), - new HashSet<>(), 0); - } - if (graphIndexName != null) { - assert (graphOutputPath != null); - exportGraphIndex(ids, new HashSet<>(), new HashSet<>(), 0); - } + if (graphIndexName != null) { + assert (graphOutputPath != null); + exportGraphIndex(ids, new HashSet<>(), new HashSet<>(), 0); } - - public void exportGraphIndex(Set<String> urns, Set<String> visitedUrns, Set<String> visitedIds, int hops) { - Set<String> nextIds = new HashSet<>(); - if (!urns.isEmpty()) { - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - - boolQueryBuilder.must(QueryBuilders.termQuery("relationshipType", "DownstreamOf")); - - Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize).forEach(batch -> { - boolQueryBuilder.should(QueryBuilders.termsQuery("source.urn", batch.toArray(String[]::new))); - boolQueryBuilder.should(QueryBuilders.termsQuery("destination.urn", batch.toArray(String[]::new))); - }); - boolQueryBuilder.minimumShouldMatch(1); - - // Exclude visited - Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.mustNot(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.query(boolQueryBuilder); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - - SearchRequest searchRequest = new SearchRequest(graphIndexName); - searchRequest.source(searchSourceBuilder); - - Set<String> docIds = new HashSet<>(); - Set<GraphDocument> docs = new HashSet<>(); - - long startTime = System.currentTimeMillis(); - System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, urns.size(), visitedUrns.size()); - - writer.write(searchRequest, graphOutputPath, hops != 0, anonymizerClazz, - GraphDocument.class, (hit, doc) -> { - docIds.add(hit.getId()); - docs.add(doc); - }); - - long endTime = System.currentTimeMillis(); - System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); - - visitedIds.addAll(docIds); - visitedUrns.addAll(urns); - - Set<String> discoveredUrns = docs.stream().flatMap(d -> Stream.of(d.destination.urn, d.source.urn)) - .filter(Objects::nonNull) - .filter(urn -> !visitedUrns.contains(urn)) - .collect(Collectors.toSet()); - - nextIds.addAll(discoveredUrns); - } - - if (!nextIds.isEmpty()) { - exportGraphIndex(nextIds, visitedUrns, visitedIds, hops + 1); - } + } + + public void exportGraphIndex( + Set<String> urns, Set<String> visitedUrns, Set<String> visitedIds, int hops) { + Set<String> nextIds = new HashSet<>(); + if (!urns.isEmpty()) { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + boolQueryBuilder.must(QueryBuilders.termQuery("relationshipType", "DownstreamOf")); + + Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> { + boolQueryBuilder.should( + QueryBuilders.termsQuery("source.urn", batch.toArray(String[]::new))); + boolQueryBuilder.should( + QueryBuilders.termsQuery("destination.urn", batch.toArray(String[]::new))); + }); + boolQueryBuilder.minimumShouldMatch(1); + + // Exclude visited + Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.mustNot( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.query(boolQueryBuilder); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + + SearchRequest searchRequest = new SearchRequest(graphIndexName); + searchRequest.source(searchSourceBuilder); + + Set<String> docIds = new HashSet<>(); + Set<GraphDocument> docs = new HashSet<>(); + + long startTime = System.currentTimeMillis(); + System.out.printf( + "Hops: %s (Ids: %s) [VisitedIds: %s]", hops, urns.size(), visitedUrns.size()); + + writer.write( + searchRequest, + graphOutputPath, + hops != 0, + anonymizerClazz, + GraphDocument.class, + (hit, doc) -> { + docIds.add(hit.getId()); + docs.add(doc); + }); + + long endTime = System.currentTimeMillis(); + System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); + + visitedIds.addAll(docIds); + visitedUrns.addAll(urns); + + Set<String> discoveredUrns = + docs.stream() + .flatMap(d -> Stream.of(d.destination.urn, d.source.urn)) + .filter(Objects::nonNull) + .filter(urn -> !visitedUrns.contains(urn)) + .collect(Collectors.toSet()); + + nextIds.addAll(discoveredUrns); } - public void exportEntityIndex(Set<String> ids, Set<String> visitedIds, int hops) { - Set<String> nextIds = new HashSet<>(); - - if (!ids.isEmpty()) { - Set<String> urns = ids.stream().map(LineageExporter::idToUrn).collect(Collectors.toSet()); - - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - - Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.should(QueryBuilders.termsQuery("upstreams.keyword", batch.toArray(String[]::new))) - ); - Lists.partition(Arrays.asList(ids.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.should(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - boolQueryBuilder.minimumShouldMatch(1); - - // Exclude visited - Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.mustNot(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.query(boolQueryBuilder); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - - SearchRequest searchRequest = new SearchRequest(entityIndexName); - searchRequest.source(searchSourceBuilder); - - Set<String> docIds = new HashSet<>(); - Set<UrnDocument> docs = new HashSet<>(); - - long startTime = System.currentTimeMillis(); - System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, ids.size(), visitedIds.size()); - - writer.write(searchRequest, entityOutputPath, hops != 0, anonymizerClazz, - UrnDocument.class, (hit, doc) -> { - docIds.add(hit.getId()); - docs.add(doc); - }); - - long endTime = System.currentTimeMillis(); - System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); - - visitedIds.addAll(docIds); - - nextIds.addAll(docIds.stream() - .filter(Objects::nonNull) - .filter(docId -> !visitedIds.contains(docId)) - .collect(Collectors.toSet())); - nextIds.addAll(docs.stream() - .filter(doc -> doc.upstreams != null && !doc.upstreams.isEmpty()) - .flatMap(doc -> doc.upstreams.stream()) - .map(urn -> URLEncoder.encode(urn, StandardCharsets.UTF_8)) - .filter(docId -> !visitedIds.contains(docId)) - .collect(Collectors.toSet())); - } - - if (!nextIds.isEmpty()) { - exportEntityIndex(nextIds, visitedIds, hops + 1); - } + if (!nextIds.isEmpty()) { + exportGraphIndex(nextIds, visitedUrns, visitedIds, hops + 1); + } + } + + public void exportEntityIndex(Set<String> ids, Set<String> visitedIds, int hops) { + Set<String> nextIds = new HashSet<>(); + + if (!ids.isEmpty()) { + Set<String> urns = ids.stream().map(LineageExporter::idToUrn).collect(Collectors.toSet()); + + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.should( + QueryBuilders.termsQuery("upstreams.keyword", batch.toArray(String[]::new)))); + Lists.partition(Arrays.asList(ids.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.should( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + boolQueryBuilder.minimumShouldMatch(1); + + // Exclude visited + Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.mustNot( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.query(boolQueryBuilder); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + + SearchRequest searchRequest = new SearchRequest(entityIndexName); + searchRequest.source(searchSourceBuilder); + + Set<String> docIds = new HashSet<>(); + Set<UrnDocument> docs = new HashSet<>(); + + long startTime = System.currentTimeMillis(); + System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, ids.size(), visitedIds.size()); + + writer.write( + searchRequest, + entityOutputPath, + hops != 0, + anonymizerClazz, + UrnDocument.class, + (hit, doc) -> { + docIds.add(hit.getId()); + docs.add(doc); + }); + + long endTime = System.currentTimeMillis(); + System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); + + visitedIds.addAll(docIds); + + nextIds.addAll( + docIds.stream() + .filter(Objects::nonNull) + .filter(docId -> !visitedIds.contains(docId)) + .collect(Collectors.toSet())); + nextIds.addAll( + docs.stream() + .filter(doc -> doc.upstreams != null && !doc.upstreams.isEmpty()) + .flatMap(doc -> doc.upstreams.stream()) + .map(urn -> URLEncoder.encode(urn, StandardCharsets.UTF_8)) + .filter(docId -> !visitedIds.contains(docId)) + .collect(Collectors.toSet())); } - public static class UrnDocument { - public String urn; - public List<String> upstreams; + if (!nextIds.isEmpty()) { + exportEntityIndex(nextIds, visitedIds, hops + 1); } + } + + public static class UrnDocument { + public String urn; + public List<String> upstreams; + } - public static class GraphDocument { - public String relationshipType; - public GraphNode source; - public GraphNode destination; + public static class GraphDocument { + public String relationshipType; + public GraphNode source; + public GraphNode destination; - public static class GraphNode { - public String urn; - public String entityType; - } + public static class GraphNode { + public String urn; + public String entityType; } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 45bbd912bc794..14e5259f90097 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -1,15 +1,20 @@ package io.datahubproject.test.fixtures.search; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import static com.linkedin.metadata.Constants.*; +import static io.datahubproject.test.search.config.SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.EntityAspectIdentifier; @@ -31,8 +36,11 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; - +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import java.io.IOException; +import java.util.Map; import java.util.Optional; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -42,244 +50,245 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static io.datahubproject.test.search.config.SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS; -import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration @Import(SearchCommonTestConfiguration.class) public class SampleDataFixtureConfiguration { - /** - * Interested in adding more fixtures? Here's what you will need to update? - * 1. Create a new indexPrefix and FixtureName. Both are needed or else all fixtures will load on top of each other, - * overwriting each other - * 2. Create a new IndexConvention, IndexBuilder, and EntityClient. These are needed - * to index a different set of entities. - */ - - @Autowired - private ESBulkProcessor _bulkProcessor; - - @Autowired - private RestHighLevelClient _searchClient; - - @Autowired - private RestHighLevelClient _longTailSearchClient; - - @Autowired - private SearchConfiguration _searchConfiguration; - - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; - - @Bean(name = "sampleDataPrefix") - protected String sampleDataPrefix() { - return "smpldat"; - } - - @Bean(name = "longTailPrefix") - protected String longTailIndexPrefix() { - return "lngtl"; - } - - @Bean(name = "sampleDataIndexConvention") - protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "longTailIndexConvention") - protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "sampleDataFixtureName") - protected String sampleDataFixtureName() { - return "sample_data"; - } - - @Bean(name = "longTailFixtureName") - protected String longTailFixtureName() { - return "long_tail"; - } - - @Bean(name = "sampleDataEntityIndexBuilders") - protected EntityIndexBuilders entityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention - ) { - return entityIndexBuildersHelper(entityRegistry, indexConvention); - } - - @Bean(name = "longTailEntityIndexBuilders") - protected EntityIndexBuilders longTailEntityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailIndexConvention") IndexConvention indexConvention - ) { - return entityIndexBuildersHelper(longTailEntityRegistry, indexConvention); - } - - protected EntityIndexBuilders entityIndexBuildersHelper( - EntityRegistry entityRegistry, - IndexConvention indexConvention - ) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder indexBuilder = new ESIndexBuilder(_searchClient, 1, 0, 1, - 1, Map.of(), true, false, - new ElasticSearchConfiguration(), gitVersion); - SettingsBuilder settingsBuilder = new SettingsBuilder(null); - return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); - } - - @Bean(name = "sampleDataEntitySearchService") - protected ElasticSearchService entitySearchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention - ) throws IOException { - return entitySearchServiceHelper(entityRegistry, indexBuilders, indexConvention); - } - - @Bean(name = "longTailEntitySearchService") - protected ElasticSearchService longTailEntitySearchService( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailEndexBuilders, - @Qualifier("longTailIndexConvention") IndexConvention longTailIndexConvention - ) throws IOException { - return entitySearchServiceHelper(longTailEntityRegistry, longTailEndexBuilders, longTailIndexConvention); - } - - protected ElasticSearchService entitySearchServiceHelper( - EntityRegistry entityRegistry, - EntityIndexBuilders indexBuilders, - IndexConvention indexConvention - ) throws IOException { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_fixture_test.yml"); - CustomSearchConfiguration customSearchConfiguration = customConfiguration.resolve(new YAMLMapper()); - - ESSearchDAO searchDAO = new ESSearchDAO(entityRegistry, _searchClient, indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, _searchConfiguration, customSearchConfiguration); - ESBrowseDAO browseDAO = new ESBrowseDAO(entityRegistry, _searchClient, indexConvention, _searchConfiguration, _customSearchConfiguration); - ESWriteDAO writeDAO = new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); - return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); - } - - @Bean(name = "sampleDataSearchService") - @Nonnull - protected SearchService searchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("sampleDataPrefix") String prefix, - @Qualifier("sampleDataFixtureName") String sampleDataFixtureName - ) throws IOException { - return searchServiceHelper(entityRegistry, entitySearchService, indexBuilders, prefix, sampleDataFixtureName); - } - - @Bean(name = "longTailSearchService") - @Nonnull - protected SearchService longTailSearchService( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailEntitySearchService") ElasticSearchService longTailEntitySearchService, - @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailIndexBuilders, - @Qualifier("longTailPrefix") String longTailPrefix, - @Qualifier("longTailFixtureName") String longTailFixtureName - ) throws IOException { - return searchServiceHelper(longTailEntityRegistry, longTailEntitySearchService, longTailIndexBuilders, longTailPrefix, longTailFixtureName); - } - - public SearchService searchServiceHelper( - EntityRegistry entityRegistry, - ElasticSearchService entitySearchService, - EntityIndexBuilders indexBuilders, - String prefix, - String fixtureName - ) throws IOException { - int batchSize = 100; - SearchRanker<Double> ranker = new SimpleRanker(); - CacheManager cacheManager = new ConcurrentMapCacheManager(); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); - entityDocCountCacheConfiguration.setTtlSeconds(600L); - - SearchService service = new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, entityDocCountCacheConfiguration), - new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - false - ), - ranker - ); - - // Build indices & write fixture data - indexBuilders.reindexAll(); - - FixtureReader.builder() - .bulkProcessor(_bulkProcessor) - .fixtureName(fixtureName) - .targetIndexPrefix(prefix) - .refreshIntervalSeconds(REFRESH_INTERVAL_SECONDS) - .build() - .read(); - - return service; - } - - @Bean(name = "sampleDataEntityClient") - @Nonnull - protected EntityClient entityClient( - @Qualifier("sampleDataSearchService") SearchService searchService, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry entityRegistry - ) { - return entityClientHelper(searchService, entitySearchService, entityRegistry); - } - - @Bean(name = "longTailEntityClient") - @Nonnull - protected EntityClient longTailEntityClient( - @Qualifier("sampleDataSearchService") SearchService searchService, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry - ) { - return entityClientHelper(searchService, entitySearchService, longTailEntityRegistry); - } - - private EntityClient entityClientHelper( - SearchService searchService, - ElasticSearchService entitySearchService, - EntityRegistry entityRegistry - ) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - new ConcurrentMapCacheManager(), - entitySearchService, - 1, - false); - - AspectDao mockAspectDao = mock(AspectDao.class); - when(mockAspectDao.batchGet(anySet())).thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); - - PreProcessHooks preProcessHooks = new PreProcessHooks(); - preProcessHooks.setUiEnabled(true); - return new JavaEntityClient( - new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, - preProcessHooks), - null, - entitySearchService, - cachingEntitySearchService, - searchService, - null, - null, - null, - null); - } + /** + * Interested in adding more fixtures? Here's what you will need to update? 1. Create a new + * indexPrefix and FixtureName. Both are needed or else all fixtures will load on top of each + * other, overwriting each other 2. Create a new IndexConvention, IndexBuilder, and EntityClient. + * These are needed to index a different set of entities. + */ + @Autowired private ESBulkProcessor _bulkProcessor; + + @Autowired private RestHighLevelClient _searchClient; + + @Autowired private RestHighLevelClient _longTailSearchClient; + + @Autowired private SearchConfiguration _searchConfiguration; + + @Autowired private CustomSearchConfiguration _customSearchConfiguration; + + @Bean(name = "sampleDataPrefix") + protected String sampleDataPrefix() { + return "smpldat"; + } + + @Bean(name = "longTailPrefix") + protected String longTailIndexPrefix() { + return "lngtl"; + } + + @Bean(name = "sampleDataIndexConvention") + protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "longTailIndexConvention") + protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "sampleDataFixtureName") + protected String sampleDataFixtureName() { + return "sample_data"; + } + + @Bean(name = "longTailFixtureName") + protected String longTailFixtureName() { + return "long_tail"; + } + + @Bean(name = "sampleDataEntityIndexBuilders") + protected EntityIndexBuilders entityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention) { + return entityIndexBuildersHelper(entityRegistry, indexConvention); + } + + @Bean(name = "longTailEntityIndexBuilders") + protected EntityIndexBuilders longTailEntityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailIndexConvention") IndexConvention indexConvention) { + return entityIndexBuildersHelper(longTailEntityRegistry, indexConvention); + } + + protected EntityIndexBuilders entityIndexBuildersHelper( + EntityRegistry entityRegistry, IndexConvention indexConvention) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder indexBuilder = + new ESIndexBuilder( + _searchClient, + 1, + 0, + 1, + 1, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + SettingsBuilder settingsBuilder = new SettingsBuilder(null); + return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); + } + + @Bean(name = "sampleDataEntitySearchService") + protected ElasticSearchService entitySearchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention) + throws IOException { + return entitySearchServiceHelper(entityRegistry, indexBuilders, indexConvention); + } + + @Bean(name = "longTailEntitySearchService") + protected ElasticSearchService longTailEntitySearchService( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailEndexBuilders, + @Qualifier("longTailIndexConvention") IndexConvention longTailIndexConvention) + throws IOException { + return entitySearchServiceHelper( + longTailEntityRegistry, longTailEndexBuilders, longTailIndexConvention); + } + + protected ElasticSearchService entitySearchServiceHelper( + EntityRegistry entityRegistry, + EntityIndexBuilders indexBuilders, + IndexConvention indexConvention) + throws IOException { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_fixture_test.yml"); + CustomSearchConfiguration customSearchConfiguration = + customConfiguration.resolve(new YAMLMapper()); + + ESSearchDAO searchDAO = + new ESSearchDAO( + entityRegistry, + _searchClient, + indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + _searchConfiguration, + customSearchConfiguration); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + entityRegistry, + _searchClient, + indexConvention, + _searchConfiguration, + _customSearchConfiguration); + ESWriteDAO writeDAO = + new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); + } + + @Bean(name = "sampleDataSearchService") + @Nonnull + protected SearchService searchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("sampleDataPrefix") String prefix, + @Qualifier("sampleDataFixtureName") String sampleDataFixtureName) + throws IOException { + return searchServiceHelper( + entityRegistry, entitySearchService, indexBuilders, prefix, sampleDataFixtureName); + } + + @Bean(name = "longTailSearchService") + @Nonnull + protected SearchService longTailSearchService( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailEntitySearchService") ElasticSearchService longTailEntitySearchService, + @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailIndexBuilders, + @Qualifier("longTailPrefix") String longTailPrefix, + @Qualifier("longTailFixtureName") String longTailFixtureName) + throws IOException { + return searchServiceHelper( + longTailEntityRegistry, + longTailEntitySearchService, + longTailIndexBuilders, + longTailPrefix, + longTailFixtureName); + } + + public SearchService searchServiceHelper( + EntityRegistry entityRegistry, + ElasticSearchService entitySearchService, + EntityIndexBuilders indexBuilders, + String prefix, + String fixtureName) + throws IOException { + int batchSize = 100; + SearchRanker<Double> ranker = new SimpleRanker(); + CacheManager cacheManager = new ConcurrentMapCacheManager(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); + entityDocCountCacheConfiguration.setTtlSeconds(600L); + + SearchService service = + new SearchService( + new EntityDocCountCache( + entityRegistry, entitySearchService, entityDocCountCacheConfiguration), + new CachingEntitySearchService(cacheManager, entitySearchService, batchSize, false), + ranker); + + // Build indices & write fixture data + indexBuilders.reindexAll(); + + FixtureReader.builder() + .bulkProcessor(_bulkProcessor) + .fixtureName(fixtureName) + .targetIndexPrefix(prefix) + .refreshIntervalSeconds(REFRESH_INTERVAL_SECONDS) + .build() + .read(); + + return service; + } + + @Bean(name = "sampleDataEntityClient") + @Nonnull + protected EntityClient entityClient( + @Qualifier("sampleDataSearchService") SearchService searchService, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry entityRegistry) { + return entityClientHelper(searchService, entitySearchService, entityRegistry); + } + + @Bean(name = "longTailEntityClient") + @Nonnull + protected EntityClient longTailEntityClient( + @Qualifier("sampleDataSearchService") SearchService searchService, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry) { + return entityClientHelper(searchService, entitySearchService, longTailEntityRegistry); + } + + private EntityClient entityClientHelper( + SearchService searchService, + ElasticSearchService entitySearchService, + EntityRegistry entityRegistry) { + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService( + new ConcurrentMapCacheManager(), entitySearchService, 1, false); + + AspectDao mockAspectDao = mock(AspectDao.class); + when(mockAspectDao.batchGet(anySet())) + .thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); + + PreProcessHooks preProcessHooks = new PreProcessHooks(); + preProcessHooks.setUiEnabled(true); + return new JavaEntityClient( + new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, preProcessHooks), + null, + entitySearchService, + cachingEntitySearchService, + searchService, + null, + null, + null, + null); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java index d74dd041f082e..d3b16b2beed3d 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java @@ -1,13 +1,18 @@ package io.datahubproject.test.fixtures.search; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; -import io.datahubproject.test.search.ElasticsearchTestContainer; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import io.datahubproject.test.models.DatasetAnonymized; +import io.datahubproject.test.search.ElasticsearchTestContainer; import io.datahubproject.test.search.SearchTestUtils; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import java.io.IOException; +import java.util.Set; import org.opensearch.client.RestHighLevelClient; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; @@ -15,125 +20,127 @@ import org.testng.annotations.Ignore; import org.testng.annotations.Test; -import java.io.IOException; -import java.util.Set; - -import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; -import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; - -/** - * This class is used for extracting and moving search fixture data. - */ +/** This class is used for extracting and moving search fixture data. */ @TestConfiguration public class SearchFixtureUtils { - final public static String FIXTURE_BASE = "src/test/resources/elasticsearch"; - - final public static ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - - @Bean(name = "testSearchContainer") - public GenericContainer<?> testSearchContainer() { - return new ElasticsearchTestContainer().startContainer(); + public static final String FIXTURE_BASE = "src/test/resources/elasticsearch"; + + public static final ObjectMapper OBJECT_MAPPER = + new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + @Bean(name = "testSearchContainer") + public GenericContainer<?> testSearchContainer() { + return new ElasticsearchTestContainer().startContainer(); + } + + @Test + @Ignore("Fixture capture lineage") + /* + * Run this to capture test fixtures, repeat for graph & dataset + * 1. Configure anonymizer class (use builder or set to null) Do not commit non-anonymous data + * 2. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) + * 2. Update fixture name + * 3. Comment @Ignore + * 4. Create output directory + * 5. Run extraction + **/ + private void extractSearchLineageTestFixture() throws IOException { + String rootUrn = + "urn:li:dataset:(urn:li:dataPlatform:teradata,teradata.simba.pp_bi_tables.tmis_daily_metrics_final_agg,PROD)"; + + // Set.of("system_metadata_service_v1", "datasetindex_v2", "graph_service_v1") + try (RestHighLevelClient client = + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { + FixtureWriter fixtureWriter = FixtureWriter.builder().client(client).build(); + + /* + LineageExporter<GraphAnonymized> exporter = LineageExporter.<GraphAnonymized>builder() + .writer(fixtureWriter) + .anonymizerClazz(GraphAnonymized.class) + .graphIndexName("<namespace>_graph_service_v1-5shards") + .graphOutputPath(String.format("%s/%s.json", "search_lineage2", "graph_service_v1")) + .build(); + */ + + LineageExporter<DatasetAnonymized> exporter = + LineageExporter.<DatasetAnonymized>builder() + .writer(fixtureWriter) + .anonymizerClazz(DatasetAnonymized.class) + .entityIndexName("<namespace>_datasetindex_v2-5shards") + .entityOutputPath(String.format("%s/%s.json", "search_lineage2", "datasetindex_v2")) + .build(); + + exporter.export(Set.of(rootUrn)); } - - @Test - @Ignore("Fixture capture lineage") - /* - * Run this to capture test fixtures, repeat for graph & dataset - * 1. Configure anonymizer class (use builder or set to null) Do not commit non-anonymous data - * 2. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) - * 2. Update fixture name - * 3. Comment @Ignore - * 4. Create output directory - * 5. Run extraction - **/ - private void extractSearchLineageTestFixture() throws IOException { - String rootUrn = "urn:li:dataset:(urn:li:dataPlatform:teradata,teradata.simba.pp_bi_tables.tmis_daily_metrics_final_agg,PROD)"; - - // Set.of("system_metadata_service_v1", "datasetindex_v2", "graph_service_v1") - try (RestHighLevelClient client = new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { - FixtureWriter fixtureWriter = FixtureWriter.builder() - .client(client) - .build(); - - /* - LineageExporter<GraphAnonymized> exporter = LineageExporter.<GraphAnonymized>builder() - .writer(fixtureWriter) - .anonymizerClazz(GraphAnonymized.class) - .graphIndexName("<namespace>_graph_service_v1-5shards") - .graphOutputPath(String.format("%s/%s.json", "search_lineage2", "graph_service_v1")) - .build(); - */ - - LineageExporter<DatasetAnonymized> exporter = LineageExporter.<DatasetAnonymized>builder() - .writer(fixtureWriter) - .anonymizerClazz(DatasetAnonymized.class) - .entityIndexName("<namespace>_datasetindex_v2-5shards") - .entityOutputPath(String.format("%s/%s.json", "search_lineage2", "datasetindex_v2")) - .build(); - - exporter.export(Set.of(rootUrn)); - } - } - - @Test - @Ignore("Fixture capture logic") - /* - * Run this to capture test fixtures - * 1. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) - * 2. Update fixture name - * 3. Comment @Ignore - * 4. Run extraction - **/ - private void extractEntityTestFixture() throws IOException { - String fixtureName = "temp"; - String prefix = ""; - String commonSuffix = "index_v2"; - - try (RestHighLevelClient client = new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { - FixtureWriter fixtureWriter = FixtureWriter.builder() - .client(client) - .build(); - - EntityExporter exporter = EntityExporter.builder() - .client(client) - .writer(fixtureWriter) - .fixtureName(fixtureName) - .sourceIndexSuffix(commonSuffix) - .sourceIndexPrefix(prefix) - .build(); - - exporter.export(); - } - } - - @Test - @Ignore("Write capture logic to some external ES cluster for testing") - /* - * Can be used to write fixture data to external ES cluster - * 1. Set environment variables - * 2. Update fixture name and prefix - * 3. Uncomment and run test - */ - private void reindexTestFixtureData() throws IOException { - ESBulkProcessor bulkProcessor = ESBulkProcessor.builder(new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) - .async(true) - .bulkRequestsLimit(1000) - .retryInterval(1L) - .numRetries(2) - .build(); - - FixtureReader reader = FixtureReader.builder() - .bulkProcessor(bulkProcessor) - .fixtureName("long_tail") - .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) - .build(); - - reader.read(); + } + + @Test + @Ignore("Fixture capture logic") + /* + * Run this to capture test fixtures + * 1. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) + * 2. Update fixture name + * 3. Comment @Ignore + * 4. Run extraction + **/ + private void extractEntityTestFixture() throws IOException { + String fixtureName = "temp"; + String prefix = ""; + String commonSuffix = "index_v2"; + + try (RestHighLevelClient client = + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { + FixtureWriter fixtureWriter = FixtureWriter.builder().client(client).build(); + + EntityExporter exporter = + EntityExporter.builder() + .client(client) + .writer(fixtureWriter) + .fixtureName(fixtureName) + .sourceIndexSuffix(commonSuffix) + .sourceIndexPrefix(prefix) + .build(); + + exporter.export(); } + } + + @Test + @Ignore("Write capture logic to some external ES cluster for testing") + /* + * Can be used to write fixture data to external ES cluster + * 1. Set environment variables + * 2. Update fixture name and prefix + * 3. Uncomment and run test + */ + private void reindexTestFixtureData() throws IOException { + ESBulkProcessor bulkProcessor = + ESBulkProcessor.builder( + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) + .async(true) + .bulkRequestsLimit(1000) + .retryInterval(1L) + .numRetries(2) + .build(); + + FixtureReader reader = + FixtureReader.builder() + .bulkProcessor(bulkProcessor) + .fixtureName("long_tail") + .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) + .build(); + + reader.read(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java index 93d3f108d9e47..978471b53faad 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java @@ -1,7 +1,9 @@ package io.datahubproject.test.fixtures.search; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; @@ -9,8 +11,6 @@ import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.graph.elastic.ESGraphQueryDAO; import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO; @@ -34,9 +34,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; - +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import java.io.IOException; +import java.util.Map; import java.util.Optional; - +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -46,180 +49,199 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Map; - -import static com.linkedin.metadata.Constants.*; - - @TestConfiguration @Import(SearchCommonTestConfiguration.class) public class SearchLineageFixtureConfiguration { - @Autowired - private ESBulkProcessor _bulkProcessor; - - @Autowired - private RestHighLevelClient _searchClient; - - @Autowired - private SearchConfiguration _searchConfiguration; - - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; - - @Bean(name = "searchLineagePrefix") - protected String indexPrefix() { - return "srchlin"; - } - - @Bean(name = "searchLineageIndexConvention") - protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "searchLineageFixtureName") - protected String fixtureName() { - return "search_lineage"; - } - - @Bean(name = "lineageCacheConfiguration") - protected SearchLineageCacheConfiguration searchLineageCacheConfiguration() { - SearchLineageCacheConfiguration conf = new SearchLineageCacheConfiguration(); - conf.setLightningThreshold(300); - conf.setTtlSeconds(30); - return conf; - } - - @Bean(name = "searchLineageEntityIndexBuilders") - protected EntityIndexBuilders entityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder indexBuilder = new ESIndexBuilder(_searchClient, 1, 0, 1, - 1, Map.of(), true, false, - new ElasticSearchConfiguration(), gitVersion); - SettingsBuilder settingsBuilder = new SettingsBuilder(null); - return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); - } - - @Bean(name = "searchLineageEntitySearchService") - protected ElasticSearchService entitySearchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - ESSearchDAO searchDAO = new ESSearchDAO(entityRegistry, _searchClient, indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, _searchConfiguration, null); - ESBrowseDAO browseDAO = new ESBrowseDAO(entityRegistry, _searchClient, indexConvention, _searchConfiguration, _customSearchConfiguration); - ESWriteDAO writeDAO = new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); - return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); - } - - @Bean(name = "searchLineageESIndexBuilder") - @Nonnull - protected ESIndexBuilder esIndexBuilder() { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - return new ESIndexBuilder(_searchClient, 1, 1, 1, 1, Map.of(), - true, true, - new ElasticSearchConfiguration(), gitVersion); - } - - @Bean(name = "searchLineageGraphService") - @Nonnull - protected ElasticSearchGraphService graphService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageESIndexBuilder") ESIndexBuilder indexBuilder, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - ElasticSearchGraphService graphService = new ElasticSearchGraphService(lineageRegistry, _bulkProcessor, indexConvention, - new ESGraphWriteDAO(indexConvention, _bulkProcessor, 1), - new ESGraphQueryDAO(_searchClient, lineageRegistry, indexConvention, GraphQueryConfiguration.testDefaults), indexBuilder); - graphService.configure(); - return graphService; - } - - @Bean(name = "searchLineageLineageSearchService") - @Nonnull - protected LineageSearchService lineageSearchService( - @Qualifier("searchLineageSearchService") SearchService searchService, - @Qualifier("searchLineageGraphService") ElasticSearchGraphService graphService, - @Qualifier("searchLineagePrefix") String prefix, - @Qualifier("searchLineageFixtureName") String fixtureName, - @Qualifier("lineageCacheConfiguration") SearchLineageCacheConfiguration cacheConfiguration - ) throws IOException { - - // Load fixture data (after graphService mappings applied) - FixtureReader.builder() - .bulkProcessor(_bulkProcessor) - .fixtureName(fixtureName) - .targetIndexPrefix(prefix) - .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) - .build() - .read(); - - return new LineageSearchService(searchService, graphService, null, false, cacheConfiguration); - } - - @Bean(name = "searchLineageSearchService") - @Nonnull - protected SearchService searchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders - ) throws IOException { - - int batchSize = 100; - SearchRanker<Double> ranker = new SimpleRanker(); - CacheManager cacheManager = new ConcurrentMapCacheManager(); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); - entityDocCountCacheConfiguration.setTtlSeconds(600L); - - SearchService service = new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, entityDocCountCacheConfiguration), - new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - false - ), - ranker - ); - - // Build indices - indexBuilders.reindexAll(); - - return service; - } - - @Bean(name = "searchLineageEntityClient") - @Nonnull - protected EntityClient entityClient( - @Qualifier("searchLineageSearchService") SearchService searchService, - @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry entityRegistry - ) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - new ConcurrentMapCacheManager(), - entitySearchService, - 1, - false); - - PreProcessHooks preProcessHooks = new PreProcessHooks(); - preProcessHooks.setUiEnabled(true); - return new JavaEntityClient( - new EntityServiceImpl(null, null, entityRegistry, true, null, - preProcessHooks), - null, - entitySearchService, - cachingEntitySearchService, - searchService, - null, - null, - null, - null); - } + @Autowired private ESBulkProcessor _bulkProcessor; + + @Autowired private RestHighLevelClient _searchClient; + + @Autowired private SearchConfiguration _searchConfiguration; + + @Autowired private CustomSearchConfiguration _customSearchConfiguration; + + @Bean(name = "searchLineagePrefix") + protected String indexPrefix() { + return "srchlin"; + } + + @Bean(name = "searchLineageIndexConvention") + protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "searchLineageFixtureName") + protected String fixtureName() { + return "search_lineage"; + } + + @Bean(name = "lineageCacheConfiguration") + protected SearchLineageCacheConfiguration searchLineageCacheConfiguration() { + SearchLineageCacheConfiguration conf = new SearchLineageCacheConfiguration(); + conf.setLightningThreshold(300); + conf.setTtlSeconds(30); + return conf; + } + + @Bean(name = "searchLineageEntityIndexBuilders") + protected EntityIndexBuilders entityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder indexBuilder = + new ESIndexBuilder( + _searchClient, + 1, + 0, + 1, + 1, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + SettingsBuilder settingsBuilder = new SettingsBuilder(null); + return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); + } + + @Bean(name = "searchLineageEntitySearchService") + protected ElasticSearchService entitySearchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + ESSearchDAO searchDAO = + new ESSearchDAO( + entityRegistry, + _searchClient, + indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + _searchConfiguration, + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + entityRegistry, + _searchClient, + indexConvention, + _searchConfiguration, + _customSearchConfiguration); + ESWriteDAO writeDAO = + new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); + } + + @Bean(name = "searchLineageESIndexBuilder") + @Nonnull + protected ESIndexBuilder esIndexBuilder() { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + return new ESIndexBuilder( + _searchClient, + 1, + 1, + 1, + 1, + Map.of(), + true, + true, + new ElasticSearchConfiguration(), + gitVersion); + } + + @Bean(name = "searchLineageGraphService") + @Nonnull + protected ElasticSearchGraphService graphService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageESIndexBuilder") ESIndexBuilder indexBuilder, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); + ElasticSearchGraphService graphService = + new ElasticSearchGraphService( + lineageRegistry, + _bulkProcessor, + indexConvention, + new ESGraphWriteDAO(indexConvention, _bulkProcessor, 1), + new ESGraphQueryDAO( + _searchClient, + lineageRegistry, + indexConvention, + GraphQueryConfiguration.testDefaults), + indexBuilder); + graphService.configure(); + return graphService; + } + + @Bean(name = "searchLineageLineageSearchService") + @Nonnull + protected LineageSearchService lineageSearchService( + @Qualifier("searchLineageSearchService") SearchService searchService, + @Qualifier("searchLineageGraphService") ElasticSearchGraphService graphService, + @Qualifier("searchLineagePrefix") String prefix, + @Qualifier("searchLineageFixtureName") String fixtureName, + @Qualifier("lineageCacheConfiguration") SearchLineageCacheConfiguration cacheConfiguration) + throws IOException { + + // Load fixture data (after graphService mappings applied) + FixtureReader.builder() + .bulkProcessor(_bulkProcessor) + .fixtureName(fixtureName) + .targetIndexPrefix(prefix) + .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) + .build() + .read(); + + return new LineageSearchService(searchService, graphService, null, false, cacheConfiguration); + } + + @Bean(name = "searchLineageSearchService") + @Nonnull + protected SearchService searchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders) + throws IOException { + + int batchSize = 100; + SearchRanker<Double> ranker = new SimpleRanker(); + CacheManager cacheManager = new ConcurrentMapCacheManager(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); + entityDocCountCacheConfiguration.setTtlSeconds(600L); + + SearchService service = + new SearchService( + new EntityDocCountCache( + entityRegistry, entitySearchService, entityDocCountCacheConfiguration), + new CachingEntitySearchService(cacheManager, entitySearchService, batchSize, false), + ranker); + + // Build indices + indexBuilders.reindexAll(); + + return service; + } + + @Bean(name = "searchLineageEntityClient") + @Nonnull + protected EntityClient entityClient( + @Qualifier("searchLineageSearchService") SearchService searchService, + @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry entityRegistry) { + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService( + new ConcurrentMapCacheManager(), entitySearchService, 1, false); + + PreProcessHooks preProcessHooks = new PreProcessHooks(); + preProcessHooks.setUiEnabled(true); + return new JavaEntityClient( + new EntityServiceImpl(null, null, entityRegistry, true, null, preProcessHooks), + null, + entitySearchService, + cachingEntitySearchService, + searchService, + null, + null, + null, + null); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java index 6036473063059..3b68ef50be18f 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java @@ -1,53 +1,48 @@ package io.datahubproject.test.models; import com.fasterxml.jackson.annotation.JsonSetter; - import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.regex.Matcher; import java.util.regex.Pattern; - - import org.apache.commons.codec.binary.Hex; public abstract class Anonymized { - public String urn; - - @JsonSetter - public void setUrn(String urn) { - this.urn = anonymizeUrn(urn); - } - - private static final Pattern URN_REGEX = Pattern.compile("^(.+)[(](.+),(.+),([A-Z]+)[)]$"); - - public static String anonymizeUrn(String urn) { - if (urn != null) { - Matcher m = URN_REGEX.matcher(urn); - if (m.find()) { - return String.format("%s(%s,%s,%s)", - m.group(1), - anonymizeLast(m.group(2), ":"), - hashFunction(m.group(3)), - m.group(4)); - } - } - return urn; + public String urn; + + @JsonSetter + public void setUrn(String urn) { + this.urn = anonymizeUrn(urn); + } + + private static final Pattern URN_REGEX = Pattern.compile("^(.+)[(](.+),(.+),([A-Z]+)[)]$"); + + public static String anonymizeUrn(String urn) { + if (urn != null) { + Matcher m = URN_REGEX.matcher(urn); + if (m.find()) { + return String.format( + "%s(%s,%s,%s)", + m.group(1), anonymizeLast(m.group(2), ":"), hashFunction(m.group(3)), m.group(4)); + } } - - protected static String anonymizeLast(String s, String sep) { - String[] splits = s.split(sep); - splits[splits.length - 1] = hashFunction(splits[splits.length - 1]); - return String.join(sep, splits); - } - - protected static String hashFunction(String s) { - try { - MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); - messageDigest.update(s.getBytes()); - char[] hex = Hex.encodeHex(messageDigest.digest()); - return new String(hex).substring(0, Math.min(s.length() - 1, hex.length - 1)); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } + return urn; + } + + protected static String anonymizeLast(String s, String sep) { + String[] splits = s.split(sep); + splits[splits.length - 1] = hashFunction(splits[splits.length - 1]); + return String.join(sep, splits); + } + + protected static String hashFunction(String s) { + try { + MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); + messageDigest.update(s.getBytes()); + char[] hex = Hex.encodeHex(messageDigest.digest()); + return new String(hex).substring(0, Math.min(s.length() - 1, hex.length - 1)); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java index 35813d22067a6..c870b4682a6b8 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java @@ -1,7 +1,6 @@ package io.datahubproject.test.models; import com.fasterxml.jackson.annotation.JsonGetter; - import java.util.Arrays; import java.util.Optional; import java.util.Set; @@ -9,35 +8,38 @@ public class DatasetAnonymized extends Anonymized { - public Set<String> upstreams; - public String id; - public String origin; - public String platform; - public boolean removed; - public Set<String> browsePaths; - - @JsonGetter("id") - public String getId() { - return Optional.ofNullable(id).map(Anonymized::hashFunction).orElse(null); - } - - @JsonGetter("platform") - public String getPlatform() { - return Optional.ofNullable(platform).map(p -> Anonymized.anonymizeLast(p, ":")).orElse(null); - } - - @JsonGetter("upstreams") - public Set<String> getUpstreams() { - return Optional.ofNullable(upstreams).orElse(Set.of()).stream() - .map(Anonymized::anonymizeUrn).collect(Collectors.toSet()); - } - - @JsonGetter("browsePaths") - public Set<String> getBrowsePaths() { - return Optional.ofNullable(browsePaths).orElse(Set.of()).stream() - .map(p -> Arrays.stream(p.split("/")) - .map(Anonymized::hashFunction) - .collect(Collectors.joining("/")) - ).collect(Collectors.toSet()); - } + public Set<String> upstreams; + public String id; + public String origin; + public String platform; + public boolean removed; + public Set<String> browsePaths; + + @JsonGetter("id") + public String getId() { + return Optional.ofNullable(id).map(Anonymized::hashFunction).orElse(null); + } + + @JsonGetter("platform") + public String getPlatform() { + return Optional.ofNullable(platform).map(p -> Anonymized.anonymizeLast(p, ":")).orElse(null); + } + + @JsonGetter("upstreams") + public Set<String> getUpstreams() { + return Optional.ofNullable(upstreams).orElse(Set.of()).stream() + .map(Anonymized::anonymizeUrn) + .collect(Collectors.toSet()); + } + + @JsonGetter("browsePaths") + public Set<String> getBrowsePaths() { + return Optional.ofNullable(browsePaths).orElse(Set.of()).stream() + .map( + p -> + Arrays.stream(p.split("/")) + .map(Anonymized::hashFunction) + .collect(Collectors.joining("/"))) + .collect(Collectors.toSet()); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java index 3d2360ae04228..bbd95671ee95a 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java @@ -3,17 +3,17 @@ import com.fasterxml.jackson.annotation.JsonSetter; public class GraphAnonymized { - public GraphNode source; - public GraphNode destination; - public String relationshipType; + public GraphNode source; + public GraphNode destination; + public String relationshipType; - public static class GraphNode extends Anonymized { - public String urn; - public String entityType; + public static class GraphNode extends Anonymized { + public String urn; + public String entityType; - @JsonSetter("urn") - public void setUrn(String urn) { - this.urn = anonymizeUrn(urn); - } + @JsonSetter("urn") + public void setUrn(String urn) { + this.urn = anonymizeUrn(urn); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java index 233a667d078dd..4129a2f997dc8 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java @@ -1,42 +1,46 @@ package io.datahubproject.test.search; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; + import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; - public class ElasticsearchTestContainer implements SearchTestContainer { - private static final String ELASTIC_VERSION = "7.10.1"; - private static final String ELASTIC_IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch"; - private static final String ENV_ELASTIC_IMAGE_FULL_NAME = System.getenv("ELASTIC_IMAGE_FULL_NAME"); - private static final String ELASTIC_IMAGE_FULL_NAME = ENV_ELASTIC_IMAGE_FULL_NAME != null - ? ENV_ELASTIC_IMAGE_FULL_NAME : ELASTIC_IMAGE_NAME + ":" + ELASTIC_VERSION; - private static final DockerImageName DOCKER_IMAGE_NAME = DockerImageName.parse(ELASTIC_IMAGE_FULL_NAME) - .asCompatibleSubstituteFor(ELASTIC_IMAGE_NAME); - - protected static final GenericContainer<?> ES_CONTAINER; - private boolean isStarted = false; - - // A helper method to create an ElasticsearchContainer defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - static { - ES_CONTAINER = new org.testcontainers.elasticsearch.ElasticsearchContainer(DOCKER_IMAGE_NAME); - checkContainerEngine(ES_CONTAINER.getDockerClient()); - ES_CONTAINER.withEnv("ES_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + private static final String ELASTIC_VERSION = "7.10.1"; + private static final String ELASTIC_IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch"; + private static final String ENV_ELASTIC_IMAGE_FULL_NAME = + System.getenv("ELASTIC_IMAGE_FULL_NAME"); + private static final String ELASTIC_IMAGE_FULL_NAME = + ENV_ELASTIC_IMAGE_FULL_NAME != null + ? ENV_ELASTIC_IMAGE_FULL_NAME + : ELASTIC_IMAGE_NAME + ":" + ELASTIC_VERSION; + private static final DockerImageName DOCKER_IMAGE_NAME = + DockerImageName.parse(ELASTIC_IMAGE_FULL_NAME).asCompatibleSubstituteFor(ELASTIC_IMAGE_NAME); + + protected static final GenericContainer<?> ES_CONTAINER; + private boolean isStarted = false; + + // A helper method to create an ElasticsearchContainer defaulting to the current image and + // version, with the ability + // within firewalled environments to override with an environment variable to point to the offline + // repository. + static { + ES_CONTAINER = new org.testcontainers.elasticsearch.ElasticsearchContainer(DOCKER_IMAGE_NAME); + checkContainerEngine(ES_CONTAINER.getDockerClient()); + ES_CONTAINER.withEnv("ES_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + } + + @Override + public GenericContainer<?> startContainer() { + if (!isStarted) { + ElasticsearchTestContainer.ES_CONTAINER.start(); + isStarted = true; } + return ES_CONTAINER; + } - @Override - public GenericContainer<?> startContainer() { - if (!isStarted) { - ElasticsearchTestContainer.ES_CONTAINER.start(); - isStarted = true; - } - return ES_CONTAINER; - } - - @Override - public void stopContainer() { - ES_CONTAINER.stop(); - } + @Override + public void stopContainer() { + ES_CONTAINER.stop(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java index d94b88b466f89..739169b834a57 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java @@ -1,43 +1,50 @@ package io.datahubproject.test.search; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; + import org.opensearch.testcontainers.OpensearchContainer; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; - public class OpenSearchTestContainer implements SearchTestContainer { - private static final String OPENSEARCH_VERSION = "2.9.0"; - private static final String OPENSEARCH_IMAGE_NAME = "opensearchproject/opensearch"; - private static final String ENV_OPENSEARCH_IMAGE_FULL_NAME = System.getenv("OPENSEARCH_IMAGE_FULL_NAME"); - private static final String OPENSEARCH_IMAGE_FULL_NAME = ENV_OPENSEARCH_IMAGE_FULL_NAME != null - ? ENV_OPENSEARCH_IMAGE_FULL_NAME : OPENSEARCH_IMAGE_NAME + ":" + OPENSEARCH_VERSION; - private static final DockerImageName DOCKER_IMAGE_NAME = DockerImageName.parse(OPENSEARCH_IMAGE_FULL_NAME) - .asCompatibleSubstituteFor(OPENSEARCH_IMAGE_NAME); - - protected static final GenericContainer<?> OS_CONTAINER; - private boolean isStarted = false; - - // A helper method to create an ElasticseachContainer defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - static { - OS_CONTAINER = new OpensearchContainer(DOCKER_IMAGE_NAME); - checkContainerEngine(OS_CONTAINER.getDockerClient()); - OS_CONTAINER.withEnv("OPENSEARCH_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + private static final String OPENSEARCH_VERSION = "2.9.0"; + private static final String OPENSEARCH_IMAGE_NAME = "opensearchproject/opensearch"; + private static final String ENV_OPENSEARCH_IMAGE_FULL_NAME = + System.getenv("OPENSEARCH_IMAGE_FULL_NAME"); + private static final String OPENSEARCH_IMAGE_FULL_NAME = + ENV_OPENSEARCH_IMAGE_FULL_NAME != null + ? ENV_OPENSEARCH_IMAGE_FULL_NAME + : OPENSEARCH_IMAGE_NAME + ":" + OPENSEARCH_VERSION; + private static final DockerImageName DOCKER_IMAGE_NAME = + DockerImageName.parse(OPENSEARCH_IMAGE_FULL_NAME) + .asCompatibleSubstituteFor(OPENSEARCH_IMAGE_NAME); + + protected static final GenericContainer<?> OS_CONTAINER; + private boolean isStarted = false; + + // A helper method to create an ElasticseachContainer defaulting to the current image and version, + // with the ability + // within firewalled environments to override with an environment variable to point to the offline + // repository. + static { + OS_CONTAINER = new OpensearchContainer(DOCKER_IMAGE_NAME); + checkContainerEngine(OS_CONTAINER.getDockerClient()); + OS_CONTAINER + .withEnv("OPENSEARCH_JAVA_OPTS", SEARCH_JAVA_OPTS) + .withStartupTimeout(STARTUP_TIMEOUT); + } + + @Override + public GenericContainer<?> startContainer() { + if (!isStarted) { + OS_CONTAINER.start(); + isStarted = true; } + return OS_CONTAINER; + } - @Override - public GenericContainer<?> startContainer() { - if (!isStarted) { - OS_CONTAINER.start(); - isStarted = true; - } - return OS_CONTAINER; - } - - @Override - public void stopContainer() { - OS_CONTAINER.stop(); - } + @Override + public void stopContainer() { + OS_CONTAINER.stop(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java index 34aa6978f742f..cda6a4c179f48 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java @@ -1,16 +1,15 @@ package io.datahubproject.test.search; -import org.testcontainers.containers.GenericContainer; - import java.time.Duration; +import org.testcontainers.containers.GenericContainer; public interface SearchTestContainer { - String SEARCH_JAVA_OPTS = "-Xms446m -Xmx446m -XX:MaxDirectMemorySize=368435456"; + String SEARCH_JAVA_OPTS = "-Xms446m -Xmx446m -XX:MaxDirectMemorySize=368435456"; - Duration STARTUP_TIMEOUT = Duration.ofMinutes(5); // usually < 1min + Duration STARTUP_TIMEOUT = Duration.ofMinutes(5); // usually < 1min - GenericContainer<?> startContainer(); + GenericContainer<?> startContainer(); - void stopContainer(); + void stopContainer(); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index 414b9f927fada..58ea020e42565 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -1,5 +1,8 @@ package io.datahubproject.test.search; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; @@ -18,6 +21,11 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; @@ -27,121 +35,174 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestClientBuilder; -import javax.annotation.Nullable; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - public class SearchTestUtils { - private SearchTestUtils() { - } - - public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws InterruptedException { - bulkProcessor.flush(); - Thread.sleep(1000); - } - - public final static List<String> SEARCHABLE_ENTITIES; - static { - SEARCHABLE_ENTITIES = Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) - .map(EntityTypeMapper::getName) - .distinct() - .collect(Collectors.toList()); - } - - public static SearchResult searchAcrossEntities(SearchService searchService, String query) { - return searchAcrossEntities(searchService, query, null); - } - - public static SearchResult searchAcrossEntities(SearchService searchService, String query, @Nullable List<String> facets) { - return searchService.searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true), facets); - } - - public static SearchResult searchAcrossCustomEntities(SearchService searchService, String query, List<String> searchableEntities) { - return searchService.searchAcrossEntities(searchableEntities, query, null, null, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static SearchResult search(SearchService searchService, String query) { - return search(searchService, SEARCHABLE_ENTITIES, query); - } - - public static SearchResult search(SearchService searchService, List<String> entities, String query) { - return searchService.search(entities, query, null, null, 0, 100, - new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static ScrollResult scroll(SearchService searchService, String query, int batchSize, @Nullable String scrollId) { - return searchService.scrollAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, - scrollId, "3m", batchSize, new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static SearchResult searchStructured(SearchService searchService, String query) { - return searchService.searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, 0, - 100, new SearchFlags().setFulltext(false).setSkipCache(true)); - } - - public static LineageSearchResult lineage(LineageSearchService lineageSearchService, Urn root, int hops) { - String degree = hops >= 3 ? "3+" : String.valueOf(hops); - List<FacetFilterInput> filters = List.of(FacetFilterInput.builder() + private SearchTestUtils() {} + + public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws InterruptedException { + bulkProcessor.flush(); + Thread.sleep(1000); + } + + public static final List<String> SEARCHABLE_ENTITIES; + + static { + SEARCHABLE_ENTITIES = + Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + } + + public static SearchResult searchAcrossEntities(SearchService searchService, String query) { + return searchAcrossEntities(searchService, query, null); + } + + public static SearchResult searchAcrossEntities( + SearchService searchService, String query, @Nullable List<String> facets) { + return searchService.searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + facets); + } + + public static SearchResult searchAcrossCustomEntities( + SearchService searchService, String query, List<String> searchableEntities) { + return searchService.searchAcrossEntities( + searchableEntities, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static SearchResult search(SearchService searchService, String query) { + return search(searchService, SEARCHABLE_ENTITIES, query); + } + + public static SearchResult search( + SearchService searchService, List<String> entities, String query) { + return searchService.search( + entities, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static ScrollResult scroll( + SearchService searchService, String query, int batchSize, @Nullable String scrollId) { + return searchService.scrollAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + scrollId, + "3m", + batchSize, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static SearchResult searchStructured(SearchService searchService, String query) { + return searchService.searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(false).setSkipCache(true)); + } + + public static LineageSearchResult lineage( + LineageSearchService lineageSearchService, Urn root, int hops) { + String degree = hops >= 3 ? "3+" : String.valueOf(hops); + List<FacetFilterInput> filters = + List.of( + FacetFilterInput.builder() .setField("degree") .setCondition(FilterOperator.EQUAL) .setValues(List.of(degree)) .setNegated(false) .build()); - return lineageSearchService.searchAcrossLineage(root, LineageDirection.DOWNSTREAM, - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", hops, ResolverUtils.buildFilter(filters, List.of()), null, 0, 100, null, - null, new SearchFlags().setSkipCache(true)); - } - - public static AutoCompleteResults autocomplete(SearchableEntityType<?, String> searchableEntityType, String query) throws Exception { - return searchableEntityType.autoComplete(query, null, null, 100, new QueryContext() { - @Override - public boolean isAuthenticated() { - return true; - } - - @Override - public Authentication getAuthentication() { - return null; - } - - @Override - public Authorizer getAuthorizer() { - return null; - } + return lineageSearchService.searchAcrossLineage( + root, + LineageDirection.DOWNSTREAM, + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + hops, + ResolverUtils.buildFilter(filters, List.of()), + null, + 0, + 100, + null, + null, + new SearchFlags().setSkipCache(true)); + } + + public static AutoCompleteResults autocomplete( + SearchableEntityType<?, String> searchableEntityType, String query) throws Exception { + return searchableEntityType.autoComplete( + query, + null, + null, + 100, + new QueryContext() { + @Override + public boolean isAuthenticated() { + return true; + } + + @Override + public Authentication getAuthentication() { + return null; + } + + @Override + public Authorizer getAuthorizer() { + return null; + } }); - } - - public static RestClientBuilder environmentRestClientBuilder() { - Integer port = Integer.parseInt(Optional.ofNullable(System.getenv("ELASTICSEARCH_PORT")).orElse("9200")); - return RestClient.builder( - new HttpHost(Optional.ofNullable(System.getenv("ELASTICSEARCH_HOST")).orElse("localhost"), - port, port.equals(443) ? "https" : "http")) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient( - HttpAsyncClientBuilder httpClientBuilder) { - httpClientBuilder.disableAuthCaching(); - - if (System.getenv("ELASTICSEARCH_USERNAME") != null) { - final CredentialsProvider credentialsProvider = - new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(System.getenv("ELASTICSEARCH_USERNAME"), - System.getenv("ELASTICSEARCH_PASSWORD"))); - httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); - } - - return httpClientBuilder; - } - }); - } + } + + public static RestClientBuilder environmentRestClientBuilder() { + Integer port = + Integer.parseInt(Optional.ofNullable(System.getenv("ELASTICSEARCH_PORT")).orElse("9200")); + return RestClient.builder( + new HttpHost( + Optional.ofNullable(System.getenv("ELASTICSEARCH_HOST")).orElse("localhost"), + port, + port.equals(443) ? "https" : "http")) + .setHttpClientConfigCallback( + new RestClientBuilder.HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + httpClientBuilder.disableAuthCaching(); + + if (System.getenv("ELASTICSEARCH_USERNAME") != null) { + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + AuthScope.ANY, + new UsernamePasswordCredentials( + System.getenv("ELASTICSEARCH_USERNAME"), + System.getenv("ELASTICSEARCH_PASSWORD"))); + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + + return httpClientBuilder; + } + }); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java index 530d3f4d53625..17747d9ba1cc9 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java @@ -13,51 +13,50 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; -/** - * This is common configuration for search regardless of which - * test container implementation. - */ +/** This is common configuration for search regardless of which test container implementation. */ @TestConfiguration public class SearchCommonTestConfiguration { - @Bean - public SearchConfiguration searchConfiguration() { - SearchConfiguration searchConfiguration = new SearchConfiguration(); - searchConfiguration.setMaxTermBucketSize(20); - - ExactMatchConfiguration exactMatchConfiguration = new ExactMatchConfiguration(); - exactMatchConfiguration.setExclusive(false); - exactMatchConfiguration.setExactFactor(10.0f); - exactMatchConfiguration.setWithPrefix(true); - exactMatchConfiguration.setPrefixFactor(6.0f); - exactMatchConfiguration.setCaseSensitivityFactor(0.7f); - exactMatchConfiguration.setEnableStructured(true); - - WordGramConfiguration wordGramConfiguration = new WordGramConfiguration(); - wordGramConfiguration.setTwoGramFactor(1.2f); - wordGramConfiguration.setThreeGramFactor(1.5f); - wordGramConfiguration.setFourGramFactor(1.8f); - - PartialConfiguration partialConfiguration = new PartialConfiguration(); - partialConfiguration.setFactor(0.4f); - partialConfiguration.setUrnFactor(0.5f); - - searchConfiguration.setExactMatch(exactMatchConfiguration); - searchConfiguration.setWordGram(wordGramConfiguration); - searchConfiguration.setPartial(partialConfiguration); - return searchConfiguration; - } - - @Bean - public CustomSearchConfiguration customSearchConfiguration() throws Exception { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_builder_test.yml"); - return customConfiguration.resolve(new YAMLMapper()); - } - - @Bean(name = "entityRegistry") - public EntityRegistry entityRegistry() throws EntityRegistryException { - return new ConfigEntityRegistry( - SearchCommonTestConfiguration.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - } + @Bean + public SearchConfiguration searchConfiguration() { + SearchConfiguration searchConfiguration = new SearchConfiguration(); + searchConfiguration.setMaxTermBucketSize(20); + + ExactMatchConfiguration exactMatchConfiguration = new ExactMatchConfiguration(); + exactMatchConfiguration.setExclusive(false); + exactMatchConfiguration.setExactFactor(10.0f); + exactMatchConfiguration.setWithPrefix(true); + exactMatchConfiguration.setPrefixFactor(6.0f); + exactMatchConfiguration.setCaseSensitivityFactor(0.7f); + exactMatchConfiguration.setEnableStructured(true); + + WordGramConfiguration wordGramConfiguration = new WordGramConfiguration(); + wordGramConfiguration.setTwoGramFactor(1.2f); + wordGramConfiguration.setThreeGramFactor(1.5f); + wordGramConfiguration.setFourGramFactor(1.8f); + + PartialConfiguration partialConfiguration = new PartialConfiguration(); + partialConfiguration.setFactor(0.4f); + partialConfiguration.setUrnFactor(0.5f); + + searchConfiguration.setExactMatch(exactMatchConfiguration); + searchConfiguration.setWordGram(wordGramConfiguration); + searchConfiguration.setPartial(partialConfiguration); + return searchConfiguration; + } + + @Bean + public CustomSearchConfiguration customSearchConfiguration() throws Exception { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_builder_test.yml"); + return customConfiguration.resolve(new YAMLMapper()); + } + + @Bean(name = "entityRegistry") + public EntityRegistry entityRegistry() throws EntityRegistryException { + return new ConfigEntityRegistry( + SearchCommonTestConfiguration.class + .getClassLoader() + .getResourceAsStream("entity-registry.yml")); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java index 2cfa9f9187825..0ddfd77399325 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java @@ -4,8 +4,9 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.version.GitVersion; +import java.util.Map; import java.util.Optional; - +import javax.annotation.Nonnull; import org.apache.http.HttpHost; import org.apache.http.impl.nio.reactor.IOReactorConfig; import org.opensearch.action.support.WriteRequest; @@ -18,71 +19,81 @@ import org.springframework.context.annotation.Primary; import org.testcontainers.containers.GenericContainer; -import javax.annotation.Nonnull; - -import java.util.Map; - - /** - * This configuration is for `test containers` it builds these objects tied to - * the test container instantiated for tests. Could be ES or OpenSearch, etc. + * This configuration is for `test containers` it builds these objects tied to the test container + * instantiated for tests. Could be ES or OpenSearch, etc. * - * Does your test required a running instance? If no, {@link io.datahubproject.test.search.config.SearchCommonTestConfiguration} instead. + * <p>Does your test required a running instance? If no, {@link + * io.datahubproject.test.search.config.SearchCommonTestConfiguration} instead. */ @TestConfiguration public class SearchTestContainerConfiguration { - // This port is overridden by the specific test container instance - private static final int HTTP_PORT = 9200; - public static final int REFRESH_INTERVAL_SECONDS = 5; + // This port is overridden by the specific test container instance + private static final int HTTP_PORT = 9200; + public static final int REFRESH_INTERVAL_SECONDS = 5; - @Primary - @Bean(name = "searchRestHighLevelClient") - @Nonnull - public RestHighLevelClient getElasticsearchClient(@Qualifier("testSearchContainer") GenericContainer<?> searchContainer) { - // A helper method to create a search test container defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - // A helper method to construct a standard rest client for search. - final RestClientBuilder builder = - RestClient.builder(new HttpHost( - "localhost", - searchContainer.getMappedPort(HTTP_PORT), "http") - ).setHttpClientConfigCallback(httpAsyncClientBuilder -> - httpAsyncClientBuilder.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build())); + @Primary + @Bean(name = "searchRestHighLevelClient") + @Nonnull + public RestHighLevelClient getElasticsearchClient( + @Qualifier("testSearchContainer") GenericContainer<?> searchContainer) { + // A helper method to create a search test container defaulting to the current image and + // version, with the ability + // within firewalled environments to override with an environment variable to point to the + // offline repository. + // A helper method to construct a standard rest client for search. + final RestClientBuilder builder = + RestClient.builder( + new HttpHost("localhost", searchContainer.getMappedPort(HTTP_PORT), "http")) + .setHttpClientConfigCallback( + httpAsyncClientBuilder -> + httpAsyncClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(1).build())); - builder.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder. - setConnectionRequestTimeout(30000)); + builder.setRequestConfigCallback( + requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(30000)); - return new RestHighLevelClient(builder); - } + return new RestHighLevelClient(builder); + } - /* - Cannot use the factory class without circular dependencies - */ - @Primary - @Bean(name = "searchBulkProcessor") - @Nonnull - public ESBulkProcessor getBulkProcessor(@Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { - return ESBulkProcessor.builder(searchClient) - .async(true) - /* - * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful - * to present a consistent view to for indices with very low traffic. And it is wonderful for tests! - */ - .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .bulkRequestsLimit(10000) - .bulkFlushPeriod(REFRESH_INTERVAL_SECONDS - 1) - .retryInterval(1L) - .numRetries(1) - .build(); - } + /* + Cannot use the factory class without circular dependencies + */ + @Primary + @Bean(name = "searchBulkProcessor") + @Nonnull + public ESBulkProcessor getBulkProcessor( + @Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { + return ESBulkProcessor.builder(searchClient) + .async(true) + /* + * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful + * to present a consistent view to for indices with very low traffic. And it is wonderful for tests! + */ + .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .bulkRequestsLimit(10000) + .bulkFlushPeriod(REFRESH_INTERVAL_SECONDS - 1) + .retryInterval(1L) + .numRetries(1) + .build(); + } - @Primary - @Bean(name = "searchIndexBuilder") - @Nonnull - protected ESIndexBuilder getIndexBuilder(@Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - return new ESIndexBuilder(searchClient, 1, 1, 3, 1, Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion); - } + @Primary + @Bean(name = "searchIndexBuilder") + @Nonnull + protected ESIndexBuilder getIndexBuilder( + @Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + return new ESIndexBuilder( + searchClient, + 1, + 1, + 3, + 1, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + } } diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index 0e47202a9d237..f9684871f39e2 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -10,26 +10,35 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") -@SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class, CassandraAutoConfiguration.class, - SolrHealthContributorAutoConfiguration.class}) -@ComponentScan(basePackages = { - //"com.linkedin.gms.factory.config", - //"com.linkedin.gms.factory.common", - "com.linkedin.gms.factory.kafka", - "com.linkedin.metadata.boot.kafka", - "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer", - "com.linkedin.gms.factory.config", - "com.linkedin.gms.factory.entity.update.indices" -}, - excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class), - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SiblingGraphServiceFactory.class)} - ) +@SpringBootApplication( + exclude = { + ElasticsearchRestClientAutoConfiguration.class, + CassandraAutoConfiguration.class, + SolrHealthContributorAutoConfiguration.class + }) +@ComponentScan( + basePackages = { + // "com.linkedin.gms.factory.config", + // "com.linkedin.gms.factory.common", + "com.linkedin.gms.factory.kafka", + "com.linkedin.metadata.boot.kafka", + "com.linkedin.metadata.kafka", + "com.linkedin.metadata.dao.producer", + "com.linkedin.gms.factory.config", + "com.linkedin.gms.factory.entity.update.indices" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class), + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = SiblingGraphServiceFactory.class) + }) public class MaeConsumerApplication { public static void main(String[] args) { Class<?>[] primarySources = {MaeConsumerApplication.class, MclConsumerConfig.class}; SpringApplication.run(primarySources, args); } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java index d0190279930fe..69288cec8220a 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static org.testng.AssertJUnit.*; + import com.linkedin.metadata.entity.EntityService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -7,15 +9,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - @ActiveProfiles("test") -@SpringBootTest(classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) public class MaeConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; @Test public void testMaeConsumerAutoWiring() { diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index aa097a52c8fc6..7135e4e44d459 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -18,30 +18,21 @@ @Import(value = {SystemAuthenticationFactory.class}) public class MaeConsumerApplicationTestConfiguration { - @MockBean - private KafkaHealthChecker kafkaHealthChecker; + @MockBean private KafkaHealthChecker kafkaHealthChecker; - @MockBean - private EntityServiceImpl _entityServiceImpl; + @MockBean private EntityServiceImpl _entityServiceImpl; - @MockBean - private SystemRestliEntityClient restliEntityClient; + @MockBean private SystemRestliEntityClient restliEntityClient; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - private GraphService _graphService; + @MockBean private GraphService _graphService; - @MockBean - private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; + @MockBean private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; - @MockBean - private ConfigEntityRegistry _configEntityRegistry; + @MockBean private ConfigEntityRegistry _configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java index fd15d36b109dd..1c7aa4fa22dd5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java @@ -22,7 +22,6 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @EnableKafka @@ -34,17 +33,22 @@ public class DataHubUsageEventsProcessor { private final DataHubUsageEventTransformer dataHubUsageEventTransformer; private final String indexName; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - public DataHubUsageEventsProcessor(ElasticsearchConnector elasticSearchConnector, - DataHubUsageEventTransformer dataHubUsageEventTransformer, IndexConvention indexConvention) { + public DataHubUsageEventsProcessor( + ElasticsearchConnector elasticSearchConnector, + DataHubUsageEventTransformer dataHubUsageEventTransformer, + IndexConvention indexConvention) { this.elasticSearchConnector = elasticSearchConnector; this.dataHubUsageEventTransformer = dataHubUsageEventTransformer; this.indexName = indexConvention.getIndexName("datahub_usage_event"); } - @KafkaListener(id = "${DATAHUB_USAGE_EVENT_KAFKA_CONSUMER_GROUP_ID:datahub-usage-event-consumer-job-client}", topics = - "${DATAHUB_USAGE_EVENT_NAME:" + Topics.DATAHUB_USAGE_EVENT + "}", containerFactory = "simpleKafkaConsumer") + @KafkaListener( + id = "${DATAHUB_USAGE_EVENT_KAFKA_CONSUMER_GROUP_ID:datahub-usage-event-consumer-job-client}", + topics = "${DATAHUB_USAGE_EVENT_NAME:" + Topics.DATAHUB_USAGE_EVENT + "}", + containerFactory = "simpleKafkaConsumer") public void consume(final ConsumerRecord<String, String> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final String record = consumerRecord.value(); @@ -64,16 +68,20 @@ public void consume(final ConsumerRecord<String, String> consumerRecord) { } /** - * DataHub Usage Event is written to an append-only index called a data stream. Due to circumstances - * it is possible that the event's id, even though it contains an epoch millisecond, results in duplicate ids - * in the index. The collisions will stall processing of the topic. To prevent the collisions we append - * the last 5 digits, padded with zeros, of the kafka offset to prevent the collision. + * DataHub Usage Event is written to an append-only index called a data stream. Due to + * circumstances it is possible that the event's id, even though it contains an epoch millisecond, + * results in duplicate ids in the index. The collisions will stall processing of the topic. To + * prevent the collisions we append the last 5 digits, padded with zeros, of the kafka offset to + * prevent the collision. + * * @param eventId the event's id * @param kafkaOffset the kafka offset for the message * @return unique identifier for event */ private static String generateDocumentId(String eventId, long kafkaOffset) { - return URLEncoder.encode(String.format("%s_%05d", eventId, leastSignificant(kafkaOffset, 5)), StandardCharsets.UTF_8); + return URLEncoder.encode( + String.format("%s_%05d", eventId, leastSignificant(kafkaOffset, 5)), + StandardCharsets.UTF_8); } private static int leastSignificant(long kafkaOffset, int digits) { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java index d8cd49a736511..686e2a816ffb5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -13,9 +15,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static com.linkedin.metadata.Constants.*; - - @Controller @Import(GitVersionFactory.class) public class MclConsumerConfig { @@ -24,10 +23,15 @@ public class MclConsumerConfig { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public MclConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java index 796f570a1732e..479617f0b6a82 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.Topics; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -29,47 +28,56 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(MetadataChangeLogProcessorCondition.class) @Import({ - UpdateIndicesHook.class, - IngestionSchedulerHook.class, - EntityChangeEventGeneratorHook.class, - KafkaEventConsumerFactory.class, - SiblingAssociationHook.class + UpdateIndicesHook.class, + IngestionSchedulerHook.class, + EntityChangeEventGeneratorHook.class, + KafkaEventConsumerFactory.class, + SiblingAssociationHook.class }) @EnableKafka public class MetadataChangeLogProcessor { - @Getter - private final List<MetadataChangeLogHook> hooks; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + @Getter private final List<MetadataChangeLogHook> hooks; + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); @Autowired public MetadataChangeLogProcessor(List<MetadataChangeLogHook> metadataChangeLogHooks) { - this.hooks = metadataChangeLogHooks.stream() + this.hooks = + metadataChangeLogHooks.stream() .filter(MetadataChangeLogHook::isEnabled) .sorted(Comparator.comparing(MetadataChangeLogHook::executionOrder)) .collect(Collectors.toList()); this.hooks.forEach(MetadataChangeLogHook::init); } - @KafkaListener(id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", topics = { - "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", - "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES - + "}"}, containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", + topics = { + "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}" + }, + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); - log.debug("Got Generic MCL on topic: {}, partition: {}, offset: {}", consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset()); + log.debug( + "Got Generic MCL on topic: {}, partition: {}, offset: {}", + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset()); MetricUtils.counter(this.getClass(), "received_mcl_count").inc(); MetadataChangeLog event; try { event = EventUtils.avroToPegasusMCL(record); - log.debug("Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", event.getEntityUrn(), + log.debug( + "Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", + event.getEntityUrn(), event.getEntityKeyAspect()); } catch (Exception e) { MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); @@ -78,15 +86,18 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) return; } - log.debug("Invoking MCL hooks for urn: {}, key: {}", event.getEntityUrn(), event.getEntityKeyAspect()); + log.debug( + "Invoking MCL hooks for urn: {}, key: {}", + event.getEntityUrn(), + event.getEntityKeyAspect()); // Here - plug in additional "custom processor hooks" for (MetadataChangeLogHook hook : this.hooks) { if (!hook.isEnabled()) { continue; } - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency").time()) { hook.invoke(event); } catch (Exception e) { // Just skip this hook and continue. - Note that this represents "at most once" processing. @@ -96,7 +107,9 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) } // TODO: Manually commit kafka offsets after full processing. MetricUtils.counter(this.getClass(), "consumed_mcl_count").inc(); - log.debug("Successfully completed MCL hooks for urn: {}, key: {}", event.getEntityUrn(), + log.debug( + "Successfully completed MCL hooks for urn: {}, key: {}", + event.getEntityUrn(), event.getEntityKeyAspect()); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java index b8334cd7fac27..f70eaf6084a00 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.boot.BootstrapManager; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.kafka.config.MetadataChangeLogProcessorCondition; - import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; @@ -14,23 +13,22 @@ import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Slf4j @Component @Conditional(MetadataChangeLogProcessorCondition.class) public class ApplicationStartupListener implements ApplicationListener<ContextRefreshedEvent> { - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final DataHubUpgradeKafkaListener _dataHubUpgradeKafkaListener; private final ConfigurationProvider _configurationProvider; private final BootstrapManager _mclBootstrapManager; public ApplicationStartupListener( - @Qualifier("dataHubUpgradeKafkaListener") DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, + @Qualifier("dataHubUpgradeKafkaListener") + DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, ConfigurationProvider configurationProvider, @Qualifier("mclBootstrapManager") BootstrapManager bootstrapManager) { _dataHubUpgradeKafkaListener = dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java index 9235a1d98014c..8ad1638115dae 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; - @Configuration @Conditional(MetadataChangeLogProcessorCondition.class) public class MCLBootstrapManagerFactory { @@ -26,8 +25,7 @@ public class MCLBootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -36,8 +34,8 @@ public class MCLBootstrapManagerFactory { @Scope("singleton") @Nonnull protected BootstrapManager createInstance() { - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final List<BootstrapStep> finalSteps = ImmutableList.of(waitForSystemUpdateStep); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java index 0413cd09c36b7..90069f5a56c39 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java @@ -5,15 +5,12 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class DataHubUsageEventsProcessorCondition implements Condition { @Override - public boolean matches( - ConditionContext context, - AnnotatedTypeMetadata metadata) { + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) && ( - env.getProperty("DATAHUB_ANALYTICS_ENABLED") == null + return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) + && (env.getProperty("DATAHUB_ANALYTICS_ENABLED") == null || "true".equals(env.getProperty("DATAHUB_ANALYTICS_ENABLED"))); } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java index a9e54e5354b42..036968f9f6759 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java @@ -11,7 +11,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({RestliEntityClientFactory.class}) public class EntityHydratorConfig { @@ -20,13 +19,16 @@ public class EntityHydratorConfig { @Qualifier("systemRestliEntityClient") private SystemRestliEntityClient _entityClient; - @Autowired - private EntityRegistry _entityRegistry; + @Autowired private EntityRegistry _entityRegistry; - public final static ImmutableSet<String> EXCLUDED_ASPECTS = ImmutableSet.<String>builder() + public static final ImmutableSet<String> EXCLUDED_ASPECTS = + ImmutableSet.<String>builder() .add("datasetUpstreamLineage", "upstreamLineage") .add("dataJobInputOutput") - .add("dataProcessInstanceRelationships", "dataProcessInstanceInput", "dataProcessInstanceOutput") + .add( + "dataProcessInstanceRelationships", + "dataProcessInstanceInput", + "dataProcessInstanceOutput") .add("inputFields") .build(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java index 4d7e60b74c858..db1c0b1a87541 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java @@ -5,12 +5,11 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class MetadataChangeLogProcessorCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) || "true".equals( - env.getProperty("MCL_CONSUMER_ENABLED")); + return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) + || "true".equals(env.getProperty("MCL_CONSUMER_ENABLED")); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java index b0fade24e26ad..d757feef5aa95 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java @@ -15,4 +15,4 @@ public abstract class ElasticEvent { public XContentBuilder buildJson() { return null; } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java index bea75f7b282ee..5b5a4ab072109 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java @@ -1,9 +1,8 @@ package com.linkedin.metadata.kafka.elasticsearch; import com.linkedin.events.metadata.ChangeType; -import javax.annotation.Nonnull; - import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.delete.DeleteRequest; @@ -11,7 +10,6 @@ import org.opensearch.action.update.UpdateRequest; import org.opensearch.common.xcontent.XContentType; - @Slf4j public class ElasticsearchConnector { @@ -38,7 +36,8 @@ public void feedElasticEvent(@Nonnull ElasticEvent event) { @Nonnull private static IndexRequest createIndexRequest(@Nonnull ElasticEvent event) { - return new IndexRequest(event.getIndex()).id(event.getId()) + return new IndexRequest(event.getIndex()) + .id(event.getId()) .source(event.buildJson()) .opType(DocWriteRequest.OpType.CREATE); } @@ -50,12 +49,10 @@ private static DeleteRequest createDeleteRequest(@Nonnull ElasticEvent event) { @Nonnull private UpdateRequest createUpsertRequest(@Nonnull ElasticEvent event) { - return new UpdateRequest( - event.getIndex(), event.getId()) - .detectNoop(false) - .docAsUpsert(true) - .doc(event.buildJson(), XContentType.JSON) - .retryOnConflict(_numRetries); + return new UpdateRequest(event.getIndex(), event.getId()) + .detectNoop(false) + .docAsUpsert(true) + .doc(event.buildJson(), XContentType.JSON) + .retryOnConflict(_numRetries); } } - diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java index a3672975e42e6..884d74d3cd647 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.kafka.elasticsearch; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -8,9 +9,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - -import javax.annotation.Nonnull; - // TODO: Move this factory. @Slf4j @Configuration @@ -27,5 +25,4 @@ public class ElasticsearchConnectorFactory { public ElasticsearchConnector createInstance() { return new ElasticsearchConnector(bulkProcessor, numRetries); } - -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java index 230cd8433e6ff..d97290975ae26 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java @@ -1,14 +1,13 @@ package com.linkedin.metadata.kafka.elasticsearch; +import java.io.IOException; +import javax.annotation.Nullable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; - -import java.io.IOException; -import javax.annotation.Nullable; public class JsonElasticEvent extends ElasticEvent { private final String _document; @@ -23,8 +22,12 @@ public XContentBuilder buildJson() { XContentBuilder builder = null; try { builder = XContentFactory.jsonBuilder().prettyPrint(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, _document); + XContentParser parser = + XContentFactory.xContent(XContentType.JSON) + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + _document); builder.copyCurrentStructure(parser); } catch (IOException e) { e.printStackTrace(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java index a3d6dca75068b..83d44cf609a41 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java @@ -1,17 +1,15 @@ package com.linkedin.metadata.kafka.elasticsearch; -import com.linkedin.data.template.RecordTemplate; import com.datahub.util.RecordUtils; +import com.linkedin.data.template.RecordTemplate; +import java.io.IOException; +import javax.annotation.Nullable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; - -import java.io.IOException; -import javax.annotation.Nullable; - public class MCEElasticEvent extends ElasticEvent { @@ -28,8 +26,12 @@ public XContentBuilder buildJson() { try { String jsonString = RecordUtils.toJsonString(this._doc); builder = XContentFactory.jsonBuilder().prettyPrint(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, jsonString); + XContentParser parser = + XContentFactory.xContent(XContentType.JSON) + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + jsonString); builder.copyCurrentStructure(parser); } catch (IOException e) { e.printStackTrace(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java index 39b47768a6dcf..f7e110f53a019 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java @@ -3,35 +3,32 @@ import com.linkedin.mxe.MetadataChangeLog; import javax.annotation.Nonnull; - /** * Custom hook which is invoked on receiving a new {@link MetadataChangeLog} event. * - * The semantics of this hook are currently "at most once". That is, the hook will not be called + * <p>The semantics of this hook are currently "at most once". That is, the hook will not be called * with the same message. In the future, we intend to migrate to "at least once" semantics, meaning * that the hook will be responsible for implementing idempotency. */ public interface MetadataChangeLogHook { - /** - * Initialize the hook - */ - default void init() { } + /** Initialize the hook */ + default void init() {} /** - * Return whether the hook is enabled or not. If not enabled, the below invoke method is not triggered + * Return whether the hook is enabled or not. If not enabled, the below invoke method is not + * triggered */ default boolean isEnabled() { return true; } - /** - * Invoke the hook when a MetadataChangeLog is received - */ + /** Invoke the hook when a MetadataChangeLog is received */ void invoke(@Nonnull MetadataChangeLog log) throws Exception; /** * Controls hook execution ordering + * * @return order to execute */ default int executionOrder() { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java index 78c87ec8f4b3b..019d6b898ae6b 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.common.SystemMetadataServiceFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; @@ -14,14 +16,17 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; -import static com.linkedin.metadata.Constants.*; - - // TODO: Backfill tests for this class in UpdateIndicesHookTest.java @Slf4j @Component -@Import({GraphServiceFactory.class, EntitySearchServiceFactory.class, TimeseriesAspectServiceFactory.class, - EntityRegistryFactory.class, SystemMetadataServiceFactory.class, SearchDocumentTransformerFactory.class}) +@Import({ + GraphServiceFactory.class, + EntitySearchServiceFactory.class, + TimeseriesAspectServiceFactory.class, + EntityRegistryFactory.class, + SystemMetadataServiceFactory.class, + SearchDocumentTransformerFactory.class +}) public class UpdateIndicesHook implements MetadataChangeLogHook { protected final UpdateIndicesService _updateIndicesService; @@ -44,7 +49,8 @@ public void invoke(@Nonnull final MetadataChangeLog event) { if (event.getSystemMetadata() != null) { if (event.getSystemMetadata().getProperties() != null) { if (UI_SOURCE.equals(event.getSystemMetadata().getProperties().get(APP_SOURCE))) { - // If coming from the UI, we pre-process the Update Indices hook as a fast path to avoid Kafka lag + // If coming from the UI, we pre-process the Update Indices hook as a fast path to avoid + // Kafka lag return; } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java index 3b65ecccad336..08790b1be3319 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java @@ -36,19 +36,20 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; - /** - * A {@link MetadataChangeLogHook} responsible for generating Entity Change Events - * to the Platform Events topic. + * A {@link MetadataChangeLogHook} responsible for generating Entity Change Events to the Platform + * Events topic. */ @Slf4j @Component -@Import({EntityChangeEventGeneratorRegistry.class, EntityRegistryFactory.class, RestliEntityClientFactory.class}) +@Import({ + EntityChangeEventGeneratorRegistry.class, + EntityRegistryFactory.class, + RestliEntityClientFactory.class +}) public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { - /** - * The list of aspects that are supported for generating semantic change events. - */ + /** The list of aspects that are supported for generating semantic change events. */ private static final Set<String> SUPPORTED_ASPECT_NAMES = ImmutableSet.of( Constants.GLOBAL_TAGS_ASPECT_NAME, @@ -74,10 +75,11 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { Constants.DOMAIN_KEY_ASPECT_NAME, Constants.TAG_KEY_ASPECT_NAME, Constants.STATUS_ASPECT_NAME); - /** - * The list of change types that are supported for generating semantic change events. - */ - private static final Set<String> SUPPORTED_OPERATIONS = ImmutableSet.of("CREATE", "UPSERT", "DELETE"); + + /** The list of change types that are supported for generating semantic change events. */ + private static final Set<String> SUPPORTED_OPERATIONS = + ImmutableSet.of("CREATE", "UPSERT", "DELETE"); + private final EntityChangeEventGeneratorRegistry _entityChangeEventGeneratorRegistry; private final SystemRestliEntityClient _entityClient; private final EntityRegistry _entityRegistry; @@ -89,7 +91,8 @@ public EntityChangeEventGeneratorHook( @Nonnull final SystemRestliEntityClient entityClient, @Nonnull final EntityRegistry entityRegistry, @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled) { - _entityChangeEventGeneratorRegistry = Objects.requireNonNull(entityChangeEventGeneratorRegistry); + _entityChangeEventGeneratorRegistry = + Objects.requireNonNull(entityChangeEventGeneratorRegistry); _entityClient = Objects.requireNonNull(entityClient); _entityRegistry = Objects.requireNonNull(entityRegistry); _isEnabled = isEnabled; @@ -108,41 +111,46 @@ public void invoke(@Nonnull final MetadataChangeLog logEvent) throws Exception { // 2. Find and invoke a EntityChangeEventGenerator. // 3. Sink the output of the EntityChangeEventGenerator to a specific PDL change event. final AspectSpec aspectSpec = - _entityRegistry.getEntitySpec(logEvent.getEntityType()).getAspectSpec(logEvent.getAspectName()); + _entityRegistry + .getEntitySpec(logEvent.getEntityType()) + .getAspectSpec(logEvent.getAspectName()); assert aspectSpec != null; - final RecordTemplate fromAspect = logEvent.getPreviousAspectValue() != null - ? GenericRecordUtils.deserializeAspect( - logEvent.getPreviousAspectValue().getValue(), - logEvent.getPreviousAspectValue().getContentType(), - aspectSpec) - : null; - - final RecordTemplate toAspect = logEvent.getAspect() != null - ? GenericRecordUtils.deserializeAspect( - logEvent.getAspect().getValue(), - logEvent.getAspect().getContentType(), - aspectSpec) - : null; - - final List<ChangeEvent> changeEvents = generateChangeEvents( - logEvent.getEntityUrn(), - logEvent.getEntityType(), - logEvent.getAspectName(), - createAspect(fromAspect, logEvent.getPreviousSystemMetadata()), - createAspect(toAspect, logEvent.getSystemMetadata()), - logEvent.getCreated() - ); + final RecordTemplate fromAspect = + logEvent.getPreviousAspectValue() != null + ? GenericRecordUtils.deserializeAspect( + logEvent.getPreviousAspectValue().getValue(), + logEvent.getPreviousAspectValue().getContentType(), + aspectSpec) + : null; + + final RecordTemplate toAspect = + logEvent.getAspect() != null + ? GenericRecordUtils.deserializeAspect( + logEvent.getAspect().getValue(), + logEvent.getAspect().getContentType(), + aspectSpec) + : null; + + final List<ChangeEvent> changeEvents = + generateChangeEvents( + logEvent.getEntityUrn(), + logEvent.getEntityType(), + logEvent.getAspectName(), + createAspect(fromAspect, logEvent.getPreviousSystemMetadata()), + createAspect(toAspect, logEvent.getSystemMetadata()), + logEvent.getCreated()); // Iterate through each transaction, emit change events as platform events. for (final ChangeEvent event : changeEvents) { PlatformEvent platformEvent = buildPlatformEvent(event); emitPlatformEvent( platformEvent, - String.format("%s-%s", Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, event.getEntityUrn()) - ); - log.debug("Successfully emitted change event. category: {}, operation: {}, entity urn: {}", + String.format( + "%s-%s", Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, event.getEntityUrn())); + log.debug( + "Successfully emitted change event. category: {}, operation: {}, entity urn: {}", event.getCategory(), event.getOperation(), event.getEntityUrn()); @@ -156,35 +164,30 @@ private <T extends RecordTemplate> List<ChangeEvent> generateChangeEvents( @Nonnull final String aspectName, @Nonnull final Aspect from, @Nonnull final Aspect to, - @Nonnull AuditStamp auditStamp - ) { + @Nonnull AuditStamp auditStamp) { final List<EntityChangeEventGenerator<T>> entityChangeEventGenerators = - _entityChangeEventGeneratorRegistry - .getEntityChangeEventGenerators(aspectName) - .stream() + _entityChangeEventGeneratorRegistry.getEntityChangeEventGenerators(aspectName).stream() // Note: Assumes that correct types have been registered for the aspect. .map(changeEventGenerator -> (EntityChangeEventGenerator<T>) changeEventGenerator) .collect(Collectors.toList()); final List<ChangeEvent> allChangeEvents = new ArrayList<>(); for (EntityChangeEventGenerator<T> entityChangeEventGenerator : entityChangeEventGenerators) { allChangeEvents.addAll( - entityChangeEventGenerator.getChangeEvents(urn, entityName, aspectName, from, to, auditStamp)); + entityChangeEventGenerator.getChangeEvents( + urn, entityName, aspectName, from, to, auditStamp)); } return allChangeEvents; } private boolean isEligibleForProcessing(final MetadataChangeLog log) { - return SUPPORTED_OPERATIONS.contains(log.getChangeType().toString()) && SUPPORTED_ASPECT_NAMES.contains( - log.getAspectName()); + return SUPPORTED_OPERATIONS.contains(log.getChangeType().toString()) + && SUPPORTED_ASPECT_NAMES.contains(log.getAspectName()); } - private void emitPlatformEvent(@Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) - throws Exception { + private void emitPlatformEvent( + @Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) throws Exception { _entityClient.producePlatformEvent( - Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, - partitioningKey, - event - ); + Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, partitioningKey, event); } private PlatformEvent buildPlatformEvent(final ChangeEvent rawChangeEvent) { @@ -193,14 +196,15 @@ private PlatformEvent buildPlatformEvent(final ChangeEvent rawChangeEvent) { // 2. Build platform event PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME); - platformEvent.setHeader(new PlatformEventHeader().setTimestampMillis(rawChangeEvent.getAuditStamp().getTime())); + platformEvent.setHeader( + new PlatformEventHeader().setTimestampMillis(rawChangeEvent.getAuditStamp().getTime())); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); return platformEvent; } /** - * Thin mapping from internal Timeline API {@link ChangeEvent} to Kafka Platform Event {@link ChangeEvent}, which serves as a public - * API for outbound consumption. + * Thin mapping from internal Timeline API {@link ChangeEvent} to Kafka Platform Event {@link + * ChangeEvent}, which serves as a public API for outbound consumption. */ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeEvent) { com.linkedin.platform.event.v1.EntityChangeEvent changeEvent = @@ -216,7 +220,8 @@ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeE changeEvent.setAuditStamp(rawChangeEvent.getAuditStamp()); changeEvent.setVersion(0); if (rawChangeEvent.getParameters() != null) { - // This map should ideally contain only primitives at the leaves - integers, floats, booleans, strings. + // This map should ideally contain only primitives at the leaves - integers, floats, + // booleans, strings. changeEvent.setParameters(new Parameters(new DataMap(rawChangeEvent.getParameters()))); } return changeEvent; @@ -225,7 +230,8 @@ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeE } } - private Aspect createAspect(@Nullable final RecordTemplate value, @Nullable final SystemMetadata systemMetadata) { + private Aspect createAspect( + @Nullable final RecordTemplate value, @Nullable final SystemMetadata systemMetadata) { return new Aspect(value, systemMetadata); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java index 1a3febb623314..82f1de0a889bf 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java @@ -22,10 +22,9 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; - /** - * This hook updates a stateful {@link IngestionScheduler} of Ingestion Runs for Ingestion Sources defined - * within DataHub. + * This hook updates a stateful {@link IngestionScheduler} of Ingestion Runs for Ingestion Sources + * defined within DataHub. */ @Slf4j @Component @@ -41,8 +40,7 @@ public class IngestionSchedulerHook implements MetadataChangeLogHook { public IngestionSchedulerHook( @Nonnull final EntityRegistry entityRegistry, @Nonnull final IngestionScheduler scheduler, - @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled - ) { + @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; _scheduler = scheduler; _isEnabled = isEnabled; @@ -62,7 +60,8 @@ public void init() { public void invoke(@Nonnull MetadataChangeLog event) { if (isEligibleForProcessing(event)) { - log.info("Received {} to Ingestion Source. Rescheduling the source (if applicable). urn: {}, key: {}.", + log.info( + "Received {} to Ingestion Source. Rescheduling the source (if applicable). urn: {}, key: {}.", event.getChangeType(), event.getEntityUrn(), event.getEntityKeyAspect()); @@ -80,8 +79,9 @@ public void invoke(@Nonnull MetadataChangeLog event) { } /** - * Returns true if the event should be processed, which is only true if the event represents a create, update, or delete - * of an Ingestion Source Info aspect, which in turn contains the schedule associated with the source. + * Returns true if the event should be processed, which is only true if the event represents a + * create, update, or delete of an Ingestion Source Info aspect, which in turn contains the + * schedule associated with the source. */ private boolean isEligibleForProcessing(final MetadataChangeLog event) { return isIngestionSourceUpdate(event) || isIngestionSourceDeleted(event); @@ -90,8 +90,8 @@ private boolean isEligibleForProcessing(final MetadataChangeLog event) { private boolean isIngestionSourceUpdate(final MetadataChangeLog event) { return Constants.INGESTION_INFO_ASPECT_NAME.equals(event.getAspectName()) && (ChangeType.UPSERT.equals(event.getChangeType()) - || ChangeType.CREATE.equals(event.getChangeType()) - || ChangeType.DELETE.equals(event.getChangeType())); + || ChangeType.CREATE.equals(event.getChangeType()) + || ChangeType.DELETE.equals(event.getChangeType())); } private boolean isIngestionSourceDeleted(final MetadataChangeLog event) { @@ -100,8 +100,8 @@ private boolean isIngestionSourceDeleted(final MetadataChangeLog event) { } /** - * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an entityUrn - * or entityKey field, depending on which is present. + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. */ private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -109,15 +109,17 @@ private Urn getUrnFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); } // Extract an URN from the Log Event. return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); } /** - * Deserializes and returns an instance of {@link DataHubIngestionSourceInfo} extracted from a {@link MetadataChangeLog} event. - * The incoming event is expected to have a populated "aspect" field. + * Deserializes and returns an instance of {@link DataHubIngestionSourceInfo} extracted from a + * {@link MetadataChangeLog} event. The incoming event is expected to have a populated "aspect" + * field. */ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -125,12 +127,15 @@ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog even entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get Ingestion Source info from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get Ingestion Source info from MetadataChangeLog event. Skipping processing.", + e); } - return (DataHubIngestionSourceInfo) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(Constants.INGESTION_INFO_ASPECT_NAME)); + return (DataHubIngestionSourceInfo) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(Constants.INGESTION_INFO_ASPECT_NAME)); } @VisibleForTesting diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 064f987ff1ba9..67198d13772a3 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook.siblings; +import static com.linkedin.metadata.Constants.*; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,6 +24,12 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -42,26 +50,19 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; - -import static com.linkedin.metadata.Constants.*; - - -/** - * This hook associates dbt datasets with their sibling entities - */ +/** This hook associates dbt datasets with their sibling entities */ @Slf4j @Component @Singleton -@Import({EntityRegistryFactory.class, RestliEntityClientFactory.class, EntitySearchServiceFactory.class}) +@Import({ + EntityRegistryFactory.class, + RestliEntityClientFactory.class, + EntitySearchServiceFactory.class +}) public class SiblingAssociationHook implements MetadataChangeLogHook { - public static final String SIBLING_ASSOCIATION_SYSTEM_ACTOR = "urn:li:corpuser:__datahub_system_sibling_hook"; + public static final String SIBLING_ASSOCIATION_SYSTEM_ACTOR = + "urn:li:corpuser:__datahub_system_sibling_hook"; public static final String DBT_PLATFORM_NAME = "dbt"; // Older dbt sources produced lowercase subtypes, whereas we now @@ -80,8 +81,7 @@ public SiblingAssociationHook( @Nonnull final EntityRegistry entityRegistry, @Nonnull final SystemRestliEntityClient entityClient, @Nonnull final EntitySearchService searchService, - @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled - ) { + @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; _entityClient = entityClient; _searchService = searchService; @@ -97,8 +97,7 @@ void setEnabled(Boolean newValue) { } @Override - public void init() { - } + public void init() {} @Override public boolean isEnabled() { @@ -135,28 +134,38 @@ public void invoke(@Nonnull MetadataChangeLog event) { private void handleEntityKeyEvent(DatasetUrn datasetUrn) { Filter entitiesWithYouAsSiblingFilter = createFilterForEntitiesWithYouAsSibling(datasetUrn); - final SearchResult searchResult = _searchService.search( - List.of(DATASET_ENTITY_NAME), - "*", - entitiesWithYouAsSiblingFilter, - null, - 0, - 10, + final SearchResult searchResult = + _searchService.search( + List.of(DATASET_ENTITY_NAME), + "*", + entitiesWithYouAsSiblingFilter, + null, + 0, + 10, new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); // we have a match of an entity with you as a sibling, associate yourself back - searchResult.getEntities().forEach(entity -> { - if (!entity.getEntity().equals(datasetUrn)) { - if (datasetUrn.getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) { - setSiblingsAndSoftDeleteSibling(datasetUrn, searchResult.getEntities().get(0).getEntity()); - } else { - setSiblingsAndSoftDeleteSibling(searchResult.getEntities().get(0).getEntity(), datasetUrn); - } - } - }); + searchResult + .getEntities() + .forEach( + entity -> { + if (!entity.getEntity().equals(datasetUrn)) { + if (datasetUrn + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) { + setSiblingsAndSoftDeleteSibling( + datasetUrn, searchResult.getEntities().get(0).getEntity()); + } else { + setSiblingsAndSoftDeleteSibling( + searchResult.getEntities().get(0).getEntity(), datasetUrn); + } + } + }); } - // If the upstream is a single source system node & subtype is source, then associate the upstream as your sibling + // If the upstream is a single source system node & subtype is source, then associate the upstream + // as your sibling private void handleDbtDatasetEvent(MetadataChangeLog event, DatasetUrn datasetUrn) { // we need both UpstreamLineage & Subtypes to determine whether to associate UpstreamLineage upstreamLineage = null; @@ -172,41 +181,54 @@ private void handleDbtDatasetEvent(MetadataChangeLog event, DatasetUrn datasetUr upstreamLineage = getUpstreamLineageFromEntityClient(datasetUrn); } - if ( - upstreamLineage != null - && subTypesAspectOfEntity != null - && upstreamLineage.hasUpstreams() - && subTypesAspectOfEntity.hasTypeNames() - && (subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V1) - || subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V2)) - ) { + if (upstreamLineage != null + && subTypesAspectOfEntity != null + && upstreamLineage.hasUpstreams() + && subTypesAspectOfEntity.hasTypeNames() + && (subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V1) + || subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V2))) { UpstreamArray upstreams = upstreamLineage.getUpstreams(); - if ( - upstreams.size() == 1 - && !upstreams.get(0).getDataset().getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) { + if (upstreams.size() == 1 + && !upstreams + .get(0) + .getDataset() + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) { setSiblingsAndSoftDeleteSibling(datasetUrn, upstreams.get(0).getDataset()); } } } - // if the dataset is not dbt--- it may be produced by a dbt dataset. If so, associate them as siblings + // if the dataset is not dbt--- it may be produced by a dbt dataset. If so, associate them as + // siblings private void handleSourceDatasetEvent(MetadataChangeLog event, DatasetUrn sourceUrn) { if (event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME)) { UpstreamLineage upstreamLineage = getUpstreamLineageFromEvent(event); if (upstreamLineage != null && upstreamLineage.hasUpstreams()) { UpstreamArray upstreams = upstreamLineage.getUpstreams(); - // an entity can have merged lineage (eg. dbt + snowflake), but by default siblings are only between dbt <> non-dbt - UpstreamArray dbtUpstreams = new UpstreamArray( - upstreams.stream() - .filter(obj -> obj.getDataset().getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) - .collect(Collectors.toList()) - ); - // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt model + // an entity can have merged lineage (eg. dbt + snowflake), but by default siblings are only + // between dbt <> non-dbt + UpstreamArray dbtUpstreams = + new UpstreamArray( + upstreams.stream() + .filter( + obj -> + obj.getDataset() + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) + .collect(Collectors.toList())); + // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt + // model if (dbtUpstreams.size() == 1) { setSiblingsAndSoftDeleteSibling(dbtUpstreams.get(0).getDataset(), sourceUrn); } else if (dbtUpstreams.size() > 1) { - log.error("{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", sourceUrn.toString(), dbtUpstreams.size()); + log.error( + "{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", + sourceUrn.toString(), + dbtUpstreams.size()); } } } @@ -218,12 +240,10 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { log.info("Associating {} and {} as siblings.", dbtUrn.toString(), sourceUrn.toString()); - if ( - existingDbtSiblingAspect != null - && existingSourceSiblingAspect != null - && existingDbtSiblingAspect.getSiblings().contains(sourceUrn.toString()) - && existingSourceSiblingAspect.getSiblings().contains(dbtUrn.toString()) - ) { + if (existingDbtSiblingAspect != null + && existingSourceSiblingAspect != null + && existingDbtSiblingAspect.getSiblings().contains(sourceUrn.toString()) + && existingSourceSiblingAspect.getSiblings().contains(dbtUrn.toString())) { // we have already connected them- we can abort here return; } @@ -266,20 +286,24 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { // clean up any references to stale siblings that have been deleted List<Urn> filteredNewSiblingsArray = - newSiblingsUrnArray.stream().filter(urn -> { - try { - return _entityClient.exists(urn); - } catch (RemoteInvocationException e) { - log.error("Error while checking existence of {}: {}", urn, e.toString()); - throw new RuntimeException("Error checking existence. Skipping processing.", e); - } - }).collect(Collectors.toList()); + newSiblingsUrnArray.stream() + .filter( + urn -> { + try { + return _entityClient.exists(urn); + } catch (RemoteInvocationException e) { + log.error("Error while checking existence of {}: {}", urn, e.toString()); + throw new RuntimeException("Error checking existence. Skipping processing.", e); + } + }) + .collect(Collectors.toList()); sourceSiblingAspect.setSiblings(new UrnArray(filteredNewSiblingsArray)); sourceSiblingAspect.setPrimary(false); MetadataChangeProposal sourceSiblingProposal = new MetadataChangeProposal(); - GenericAspect sourceSiblingAspectSerialized = GenericRecordUtils.serializeAspect(sourceSiblingAspect); + GenericAspect sourceSiblingAspectSerialized = + GenericRecordUtils.serializeAspect(sourceSiblingAspect); sourceSiblingProposal.setAspect(sourceSiblingAspectSerialized); sourceSiblingProposal.setAspectName(SIBLINGS_ASPECT_NAME); @@ -295,23 +319,21 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { } } - /** - * Returns true if the event should be processed, which is only true if the event represents a dataset for now + * Returns true if the event should be processed, which is only true if the event represents a + * dataset for now */ private boolean isEligibleForProcessing(final MetadataChangeLog event) { return event.getEntityType().equals("dataset") && !event.getChangeType().equals(ChangeType.DELETE) - && ( - event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME) - || event.getAspectName().equals(SUB_TYPES_ASPECT_NAME) - || event.getAspectName().equals(DATASET_KEY_ASPECT_NAME) - ); + && (event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME) + || event.getAspectName().equals(SUB_TYPES_ASPECT_NAME) + || event.getAspectName().equals(DATASET_KEY_ASPECT_NAME)); } /** - * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an entityUrn - * or entityKey field, depending on which is present. + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. */ private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -319,14 +341,16 @@ private Urn getUrnFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); } // Extract an URN from the Log Event. return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); } /** - * Deserializes and returns an instance of {@link UpstreamLineage} extracted from a {@link MetadataChangeLog} event. + * Deserializes and returns an instance of {@link UpstreamLineage} extracted from a {@link + * MetadataChangeLog} event. */ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -338,16 +362,19 @@ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog even entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get UpstreamLineage from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get UpstreamLineage from MetadataChangeLog event. Skipping processing.", e); } - return (UpstreamLineage) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(UPSTREAM_LINEAGE_ASPECT_NAME)); + return (UpstreamLineage) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(UPSTREAM_LINEAGE_ASPECT_NAME)); } /** - * Deserializes and returns an instance of {@link SubTypes} extracted from a {@link MetadataChangeLog} event. + * Deserializes and returns an instance of {@link SubTypes} extracted from a {@link + * MetadataChangeLog} event. */ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -359,22 +386,24 @@ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get SubTypes from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get SubTypes from MetadataChangeLog event. Skipping processing.", e); } - return (SubTypes) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(SUB_TYPES_ASPECT_NAME)); + return (SubTypes) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(SUB_TYPES_ASPECT_NAME)); } @SneakyThrows private AuditStamp getAuditStamp() { - return new AuditStamp().setActor(Urn.createFromString(SIBLING_ASSOCIATION_SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + return new AuditStamp() + .setActor(Urn.createFromString(SIBLING_ASSOCIATION_SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); } - private Filter createFilterForEntitiesWithYouAsSibling( - final Urn entityUrn - ) { + private Filter createFilterForEntitiesWithYouAsSibling(final Urn entityUrn) { final Filter filter = new Filter(); final ConjunctiveCriterionArray disjunction = new ConjunctiveCriterionArray(); @@ -395,16 +424,16 @@ private Filter createFilterForEntitiesWithYouAsSibling( return filter; } - private SubTypes getSubtypesFromEntityClient( - final Urn urn - ) { + private SubTypes getSubtypesFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.SUB_TYPES_ASPECT_NAME)) { - return new SubTypes(entityResponse.getAspects().get(Constants.SUB_TYPES_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.SUB_TYPES_ASPECT_NAME)) { + return new SubTypes( + entityResponse.getAspects().get(Constants.SUB_TYPES_ASPECT_NAME).getValue().data()); } else { return null; } @@ -413,17 +442,20 @@ private SubTypes getSubtypesFromEntityClient( } } - private UpstreamLineage getUpstreamLineageFromEntityClient( - final Urn urn - ) { + private UpstreamLineage getUpstreamLineageFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME) - ); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - return new UpstreamLineage(entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { + return new UpstreamLineage( + entityResponse + .getAspects() + .get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME) + .getValue() + .data()); } else { return null; } @@ -432,17 +464,16 @@ private UpstreamLineage getUpstreamLineageFromEntityClient( } } - private Siblings getSiblingsFromEntityClient( - final Urn urn - ) { + private Siblings getSiblingsFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(SIBLINGS_ASPECT_NAME) - ); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.SIBLINGS_ASPECT_NAME)) { - return new Siblings(entityResponse.getAspects().get(Constants.SIBLINGS_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.SIBLINGS_ASPECT_NAME)) { + return new Siblings( + entityResponse.getAspects().get(Constants.SIBLINGS_ASPECT_NAME).getValue().data()); } else { return null; } @@ -450,5 +481,4 @@ private Siblings getSiblingsFromEntityClient( throw new RuntimeException("Failed to retrieve UpstreamLineage", e); } } - } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java index 03303b7723b9c..5e0b10b3d5049 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java @@ -4,13 +4,10 @@ import com.linkedin.entity.EntityResponse; import lombok.extern.slf4j.Slf4j; - @Slf4j public abstract class BaseHydrator { - /** - * Use values in the entity response to hydrate the document - */ - protected abstract void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse); - + /** Use values in the entity response to hydrate the document */ + protected abstract void hydrateFromEntityResponse( + ObjectNode document, EntityResponse entityResponse); } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java index 493f7424758cc..9dfbdb280b215 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.chart.ChartInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.ChartKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class ChartHydrator extends BaseHydrator { @@ -21,9 +20,12 @@ public class ChartHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(TITLE, new ChartInfo(dataMap).getTitle())); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(DASHBOARD_TOOL, new ChartKey(dataMap).getDashboardTool())); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(TITLE, new ChartInfo(dataMap).getTitle())); + mappingHelper.mapToResult( + CHART_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(DASHBOARD_TOOL, new ChartKey(dataMap).getDashboardTool())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java index 0b8735533ed06..8b7b63f1f3240 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.entity.EntityResponse; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.CorpUserKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class CorpUserHydrator extends BaseHydrator { @@ -21,9 +20,11 @@ public class CorpUserHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new CorpUserInfo(dataMap).getDisplayName())); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(USER_NAME, new CorpUserKey(dataMap).getUsername())); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new CorpUserInfo(dataMap).getDisplayName())); + mappingHelper.mapToResult( + CORP_USER_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(USER_NAME, new CorpUserKey(dataMap).getUsername())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java index 8b376128b7d11..fcafb3aabc860 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.dashboard.DashboardInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DashboardKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DashboardHydrator extends BaseHydrator { private static final String DASHBOARD_TOOL = "dashboardTool"; @@ -20,9 +19,12 @@ public class DashboardHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(TITLE, new DashboardInfo(dataMap).getTitle())); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(DASHBOARD_TOOL, new DashboardKey(dataMap).getDashboardTool())); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(TITLE, new DashboardInfo(dataMap).getTitle())); + mappingHelper.mapToResult( + DASHBOARD_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(DASHBOARD_TOOL, new DashboardKey(dataMap).getDashboardTool())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java index d847168de7783..88efe53f5c53e 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datajob.DataFlowInfo; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DataFlowKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DataFlowHydrator extends BaseHydrator { @@ -21,9 +20,12 @@ public class DataFlowHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new DataFlowInfo(dataMap).getName())); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(ORCHESTRATOR, new DataFlowKey(dataMap).getOrchestrator())); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new DataFlowInfo(dataMap).getName())); + mappingHelper.mapToResult( + CORP_USER_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(ORCHESTRATOR, new DataFlowKey(dataMap).getOrchestrator())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java index d9c99e8570e68..d8ea57a467277 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -11,9 +13,6 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DataJobHydrator extends BaseHydrator { @@ -24,8 +23,9 @@ public class DataJobHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper<ObjectNode> mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DATA_JOB_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new DataJobInfo(dataMap).getName())); + mappingHelper.mapToResult( + DATA_JOB_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new DataJobInfo(dataMap).getName())); try { mappingHelper.mapToResult(DATA_JOB_KEY_ASPECT_NAME, this::mapKey); } catch (Exception e) { @@ -35,8 +35,10 @@ protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse ent private void mapKey(ObjectNode jsonNodes, DataMap dataMap) { DataJobKey dataJobKey = new DataJobKey(dataMap); - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils - .convertUrnToEntityKeyInternal(dataJobKey.getFlow(), new DataFlowKey().schema()); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + dataJobKey.getFlow(), new DataFlowKey().schema()); jsonNodes.put(ORCHESTRATOR, dataFlowKey.getOrchestrator()); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java index 715b23e48b5b9..d95faf4373521 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DatasetKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DatasetHydrator extends BaseHydrator { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java index 0a3b38517eaad..7a8fdd11fac43 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.config.EntityHydratorConfig.EXCLUDED_ASPECTS; + import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; @@ -13,14 +16,9 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.config.EntityHydratorConfig.EXCLUDED_ASPECTS; - - @Slf4j @RequiredArgsConstructor public class EntityHydrator { @@ -47,12 +45,17 @@ public Optional<ObjectNode> getHydratedEntity(String entityTypeName, String urn) // Hydrate fields from snapshot EntityResponse entityResponse; try { - Set<String> aspectNames = Optional.ofNullable(_entityRegistry.getEntitySpecs().get(urnObj.getEntityType())) - .map(spec -> spec.getAspectSpecs().stream().map(AspectSpec::getName) - .filter(aspectName -> !EXCLUDED_ASPECTS.contains(aspectName)) - .collect(Collectors.toSet())) + Set<String> aspectNames = + Optional.ofNullable(_entityRegistry.getEntitySpecs().get(urnObj.getEntityType())) + .map( + spec -> + spec.getAspectSpecs().stream() + .map(AspectSpec::getName) + .filter(aspectName -> !EXCLUDED_ASPECTS.contains(aspectName)) + .collect(Collectors.toSet())) .orElse(Set.of()); - entityResponse = _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); + entityResponse = + _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); } catch (RemoteInvocationException | URISyntaxException e) { log.error("Error while calling GMS to hydrate entity for urn {}", urn); return Optional.empty(); @@ -83,7 +86,10 @@ public Optional<ObjectNode> getHydratedEntity(String entityTypeName, String urn) _datasetHydrator.hydrateFromEntityResponse(document, entityResponse); break; default: - log.error("Unable to find valid hydrator for entity type: {} urn: {}", entityResponse.getEntityName(), urn); + log.error( + "Unable to find valid hydrator for entity type: {} urn: {}", + entityResponse.getEntityName(), + urn); return Optional.empty(); } return Optional.of(document); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java index 5beb6bdd765a2..30250f14e93e5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.transformer; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.datahubusage.DataHubUsageEventConstants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,24 +19,28 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.datahubusage.DataHubUsageEventConstants.*; - - -/** - * Transformer that transforms usage event (schema defined HERE) into a search document - */ +/** Transformer that transforms usage event (schema defined HERE) into a search document */ @Slf4j @Component public class DataHubUsageEventTransformer { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final Set<DataHubUsageEventType> EVENTS_WITH_ENTITY_URN = - ImmutableSet.of(DataHubUsageEventType.SEARCH_RESULT_CLICK_EVENT, DataHubUsageEventType.BROWSE_RESULT_CLICK_EVENT, - DataHubUsageEventType.ENTITY_VIEW_EVENT, DataHubUsageEventType.ENTITY_SECTION_VIEW_EVENT, + ImmutableSet.of( + DataHubUsageEventType.SEARCH_RESULT_CLICK_EVENT, + DataHubUsageEventType.BROWSE_RESULT_CLICK_EVENT, + DataHubUsageEventType.ENTITY_VIEW_EVENT, + DataHubUsageEventType.ENTITY_SECTION_VIEW_EVENT, DataHubUsageEventType.ENTITY_ACTION_EVENT); private final EntityHydrator _entityHydrator; @@ -97,7 +104,8 @@ public Optional<TransformedDocument> transformDataHubUsageEvent(String dataHubUs try { return Optional.of( - new TransformedDocument(getId(eventDocument), OBJECT_MAPPER.writeValueAsString(eventDocument))); + new TransformedDocument( + getId(eventDocument), OBJECT_MAPPER.writeValueAsString(eventDocument))); } catch (JsonProcessingException e) { log.info("Failed to package document: {}", eventDocument); return Optional.empty(); @@ -128,13 +136,21 @@ private void setFieldsForEntity(EntityType entityType, String urn, ObjectNode se log.info("No matches for urn {}", urn); return; } - entityObject.get().fieldNames() + entityObject + .get() + .fieldNames() .forEachRemaining( - key -> searchObject.put(entityType.name().toLowerCase() + "_" + key, entityObject.get().get(key).asText())); + key -> + searchObject.put( + entityType.name().toLowerCase() + "_" + key, + entityObject.get().get(key).asText())); } private String getId(final ObjectNode eventDocument) { - return eventDocument.get(TYPE).asText() + "_" + eventDocument.get(ACTOR_URN).asText() + "_" + eventDocument.get( - TIMESTAMP).asText(); + return eventDocument.get(TYPE).asText() + + "_" + + eventDocument.get(ACTOR_URN).asText() + + "_" + + eventDocument.get(TIMESTAMP).asText(); } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java index a237e3e27f168..f2bb8a5fc9222 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java @@ -5,14 +5,18 @@ import com.linkedin.metadata.models.registry.EntityRegistry; public class EntityRegistryTestUtil { - private EntityRegistryTestUtil() { - } + private EntityRegistryTestUtil() {} - public static final EntityRegistry ENTITY_REGISTRY; + public static final EntityRegistry ENTITY_REGISTRY; - static { - EntityRegistryTestUtil.class.getClassLoader().setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - ENTITY_REGISTRY = new ConfigEntityRegistry( - EntityRegistryTestUtil.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); - } + static { + EntityRegistryTestUtil.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + ENTITY_REGISTRY = + new ConfigEntityRegistry( + EntityRegistryTestUtil.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); + } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java index 85b00e9ade6b8..a1a9bd4cd413a 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.graph.GraphIndexUtils.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; +import static org.testng.Assert.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; @@ -29,11 +33,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.graph.GraphIndexUtils.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; -import static org.testng.Assert.*; - - public class GraphIndexUtilsTest { private static final String UPSTREAM_RELATIONSHIP_PATH = "/upstreams/*/dataset"; @@ -54,7 +53,9 @@ public class GraphIndexUtilsTest { public void setupTest() { _createdActorUrn = UrnUtils.getUrn(CREATED_ACTOR_URN); _updatedActorUrn = UrnUtils.getUrn(UPDATED_ACTOR_URN); - _datasetUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)"); + _datasetUrn = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)"); _upstreamDataset1 = UrnUtils.toDatasetUrn("snowflake", "test", "DEV"); _upstreamDataset2 = UrnUtils.toDatasetUrn("snowflake", "test2", "DEV"); _mockRegistry = ENTITY_REGISTRY; @@ -74,29 +75,30 @@ public void testExtractGraphEdgesDefault() { for (Map.Entry<RelationshipFieldSpec, List<Object>> entry : extractedFields.entrySet()) { // check specifically for the upstreams relationship entry if (entry.getKey().getPath().toString().equals(UPSTREAM_RELATIONSHIP_PATH)) { - List<Edge> edgesToAdd = GraphIndexUtils.extractGraphEdges(entry, upstreamLineage, _datasetUrn, event, true); + List<Edge> edgesToAdd = + GraphIndexUtils.extractGraphEdges(entry, upstreamLineage, _datasetUrn, event, true); List<Edge> expectedEdgesToAdd = new ArrayList<>(); // edges contain default created event time and created actor from system metadata - Edge edge1 = new Edge( - _datasetUrn, - _upstreamDataset1, - entry.getKey().getRelationshipName(), - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_1, - _updatedActorUrn, - null - ); - Edge edge2 = new Edge( - _datasetUrn, - _upstreamDataset2, - entry.getKey().getRelationshipName(), - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - null - ); + Edge edge1 = + new Edge( + _datasetUrn, + _upstreamDataset1, + entry.getKey().getRelationshipName(), + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_1, + _updatedActorUrn, + null); + Edge edge2 = + new Edge( + _datasetUrn, + _upstreamDataset2, + entry.getKey().getRelationshipName(), + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + null); expectedEdgesToAdd.add(edge1); expectedEdgesToAdd.add(edge2); assertEquals(expectedEdgesToAdd.size(), edgesToAdd.size()); @@ -108,26 +110,26 @@ public void testExtractGraphEdgesDefault() { @Test public void testMergeEdges() { - final Edge edge1 = new Edge( - _datasetUrn, - _upstreamDataset1, - DOWNSTREAM_RELATIONSHIP_TYPE, - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_1, - _updatedActorUrn, - Collections.singletonMap("foo", "bar") - ); - final Edge edge2 = new Edge( - _datasetUrn, - _upstreamDataset1, - DOWNSTREAM_RELATIONSHIP_TYPE, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - Collections.singletonMap("foo", "baz") - ); + final Edge edge1 = + new Edge( + _datasetUrn, + _upstreamDataset1, + DOWNSTREAM_RELATIONSHIP_TYPE, + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_1, + _updatedActorUrn, + Collections.singletonMap("foo", "bar")); + final Edge edge2 = + new Edge( + _datasetUrn, + _upstreamDataset1, + DOWNSTREAM_RELATIONSHIP_TYPE, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + Collections.singletonMap("foo", "baz")); final Edge edge3 = mergeEdges(edge1, edge2); assertEquals(edge3.getSource(), edge1.getSource()); assertEquals(edge3.getDestination(), edge1.getDestination()); @@ -144,11 +146,13 @@ private UpstreamLineage createUpstreamLineage() { UpstreamArray upstreams = new UpstreamArray(); Upstream upstream1 = new Upstream(); upstream1.setDataset(_upstreamDataset1); - upstream1.setAuditStamp(new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); + upstream1.setAuditStamp( + new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); upstream1.setType(DatasetLineageType.TRANSFORMED); Upstream upstream2 = new Upstream(); upstream2.setDataset(_upstreamDataset2); - upstream2.setAuditStamp(new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); + upstream2.setAuditStamp( + new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); upstream2.setType(DatasetLineageType.TRANSFORMED); upstreams.add(upstream1); upstreams.add(upstream2); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java index 0897cfa01084f..724b91edbf8a1 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.UpdateIndicesHookTest.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; @@ -18,15 +21,9 @@ import java.util.HashMap; import java.util.Map; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.UpdateIndicesHookTest.*; - - public class MCLProcessingTestDataGenerator { - private MCLProcessingTestDataGenerator() { - - } + private MCLProcessingTestDataGenerator() {} public static MetadataChangeLog createBaseChangeLog() throws URISyntaxException { MetadataChangeLog event = new MetadataChangeLog(); @@ -39,7 +36,8 @@ public static MetadataChangeLog createBaseChangeLog() throws URISyntaxException event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); - event.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(TEST_ACTOR_URN)).setTime(EVENT_TIME)); + event.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn(TEST_ACTOR_URN)).setTime(EVENT_TIME)); return event; } @@ -68,7 +66,8 @@ public static MetadataChangeLog setSystemMetadataWithForceIndexing(MetadataChang return changeLog.setSystemMetadata(systemMetadata); } - public static MetadataChangeLog setPreviousData(MetadataChangeLog changeLog, MetadataChangeLog previousState) { + public static MetadataChangeLog setPreviousData( + MetadataChangeLog changeLog, MetadataChangeLog previousState) { changeLog.setPreviousAspectValue(previousState.getAspect()); return changeLog.setPreviousSystemMetadata(previousState.getSystemMetadata()); } @@ -93,7 +92,8 @@ public static MetadataChangeLog modifySystemMetadata2(MetadataChangeLog changeLo return changeLog.setSystemMetadata(systemMetadata); } - public static MetadataChangeLog modifyAspect(MetadataChangeLog changeLog, UpstreamLineage upstreamLineage) { + public static MetadataChangeLog modifyAspect( + MetadataChangeLog changeLog, UpstreamLineage upstreamLineage) { return changeLog.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); } @@ -109,7 +109,8 @@ public static UpstreamLineage createBaseLineageAspect() throws URISyntaxExceptio return upstreamLineage; } - public static UpstreamLineage addLineageEdge(UpstreamLineage upstreamLineage) throws URISyntaxException { + public static UpstreamLineage addLineageEdge(UpstreamLineage upstreamLineage) + throws URISyntaxException { UpstreamArray upstreamArray = upstreamLineage.getUpstreams(); Upstream upstream = new Upstream(); upstream.setType(DatasetLineageType.TRANSFORMED); @@ -127,5 +128,4 @@ public static UpstreamLineage modifyNonSearchableField(UpstreamLineage upstreamL upstreamArray.set(0, upstream); return upstreamLineage.setUpstreams(upstreamArray); } - } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java index 90f8f208c4cb6..12c8ad7d0c69b 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java @@ -1,7 +1,10 @@ package com.linkedin.metadata.kafka.hook; -import com.linkedin.metadata.config.SystemUpdateConfiguration; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; +import static com.linkedin.metadata.kafka.hook.MCLProcessingTestDataGenerator.*; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; + import com.linkedin.common.AuditStamp; import com.linkedin.common.InputField; import com.linkedin.common.InputFieldArray; @@ -22,9 +25,11 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import com.linkedin.metadata.config.SystemUpdateConfiguration; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; import com.linkedin.metadata.key.ChartKey; import com.linkedin.metadata.models.AspectSpec; @@ -43,31 +48,28 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaField; - import java.net.URISyntaxException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Value; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Collections; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; -import static com.linkedin.metadata.kafka.hook.MCLProcessingTestDataGenerator.*; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; - public class UpdateIndicesHookTest { -// going to want a test where we have an upstreamLineage aspect with finegrained, check that we call _graphService.addEdge for each edge -// as well as _graphService.removeEdgesFromNode for each field and their relationships + // going to want a test where we have an upstreamLineage aspect with finegrained, check that we + // call _graphService.addEdge for each edge + // as well as _graphService.removeEdgesFromNode for each field and their relationships static final long EVENT_TIME = 123L; - static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)"; - static final String TEST_DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)"; - static final String TEST_DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressKafkaDataset,PROD)"; + static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)"; + static final String TEST_DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)"; + static final String TEST_DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressKafkaDataset,PROD)"; static final String TEST_CHART_URN = "urn:li:chart:(looker,dashboard_elements.1)"; static final String TEST_ACTOR_URN = "urn:li:corpuser:test"; static final String DOWNSTREAM_OF = "DownstreamOf"; @@ -107,88 +109,112 @@ public void setupTest() { ElasticSearchConfiguration elasticSearchConfiguration = new ElasticSearchConfiguration(); SystemUpdateConfiguration systemUpdateConfiguration = new SystemUpdateConfiguration(); systemUpdateConfiguration.setWaitForSystemUpdate(false); - Mockito.when(_mockConfigurationProvider.getElasticSearch()).thenReturn(elasticSearchConfiguration); - _updateIndicesService = new UpdateIndicesService( - _mockGraphService, - _mockEntitySearchService, - _mockTimeseriesAspectService, - _mockSystemMetadataService, - ENTITY_REGISTRY, - _searchDocumentTransformer, - _mockEntityIndexBuilders - ); - _updateIndicesHook = new UpdateIndicesHook( - _updateIndicesService, - true - ); + Mockito.when(_mockConfigurationProvider.getElasticSearch()) + .thenReturn(elasticSearchConfiguration); + _updateIndicesService = + new UpdateIndicesService( + _mockGraphService, + _mockEntitySearchService, + _mockTimeseriesAspectService, + _mockSystemMetadataService, + ENTITY_REGISTRY, + _searchDocumentTransformer, + _mockEntityIndexBuilders); + _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); } @Test public void testFineGrainedLineageEdgesAreAdded() throws Exception { _updateIndicesService.setGraphDiffMode(false); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); MetadataChangeLog event = createUpstreamLineageMCL(upstreamUrn, downstreamUrn); _updateIndicesHook.invoke(event); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); } @Test public void testFineGrainedLineageEdgesAreAddedRestate() throws Exception { _updateIndicesService.setGraphDiffMode(false); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); - MetadataChangeLog event = createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.RESTATE); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + MetadataChangeLog event = + createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.RESTATE); _updateIndicesHook.invoke(event); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); Mockito.verify(_mockEntitySearchService, Mockito.times(1)) - .upsertDocument(Mockito.eq(DATASET_ENTITY_NAME), Mockito.any(), - Mockito.eq(URLEncoder.encode("urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)", - StandardCharsets.UTF_8))); + .upsertDocument( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.any(), + Mockito.eq( + URLEncoder.encode( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)", + StandardCharsets.UTF_8))); } @Test public void testInputFieldsEdgesAreAdded() throws Exception { - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,thelook.explore.order_items,PROD),users.count)"); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,thelook.explore.order_items,PROD),users.count)"); String downstreamFieldPath = "users.count"; MetadataChangeLog event = createInputFieldsMCL(upstreamUrn, downstreamFieldPath); EntityRegistry mockEntityRegistry = createMockEntityRegistry(); - _updateIndicesService = new UpdateIndicesService( - _mockGraphService, - _mockEntitySearchService, - _mockTimeseriesAspectService, - _mockSystemMetadataService, - mockEntityRegistry, - _searchDocumentTransformer, - _mockEntityIndexBuilders - ); + _updateIndicesService = + new UpdateIndicesService( + _mockGraphService, + _mockEntitySearchService, + _mockTimeseriesAspectService, + _mockSystemMetadataService, + mockEntityRegistry, + _searchDocumentTransformer, + _mockEntityIndexBuilders); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); _updateIndicesHook.invoke(event); - Urn downstreamUrn = UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", TEST_CHART_URN, downstreamFieldPath)); + Urn downstreamUrn = + UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", TEST_CHART_URN, downstreamFieldPath)); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); } @Test @@ -207,7 +233,9 @@ public void testMCLProcessExhaustive() throws URISyntaxException { Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.any()); // Update document Mockito.verify(_mockEntitySearchService, Mockito.times(1)) - .upsertDocument(Mockito.eq(DATASET_ENTITY_NAME), Mockito.any(), + .upsertDocument( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.any(), Mockito.eq(URLEncoder.encode(TEST_DATASET_URN, StandardCharsets.UTF_8))); /* @@ -261,7 +289,6 @@ public void testMCLProcessExhaustive() throws URISyntaxException { Mockito.verify(_mockEntitySearchService, Mockito.times(0)) .upsertDocument(Mockito.any(), Mockito.any(), Mockito.any()); - /* * noOpUpsert */ @@ -356,8 +383,8 @@ public void testMCLProcessExhaustive() throws URISyntaxException { _updateIndicesHook.invoke(changeLog); // Forced removal of all edges - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode(Mockito.any(), - Mockito.any(), Mockito.any()); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode(Mockito.any(), Mockito.any(), Mockito.any()); // Forced add of edges Mockito.verify(_mockGraphService, Mockito.times(2)).addEdge(Mockito.any()); // Forced document update @@ -369,45 +396,64 @@ public void testMCLProcessExhaustive() throws URISyntaxException { public void testMCLUIPreProcessed() throws Exception { _updateIndicesService.setGraphDiffMode(true); _updateIndicesService.setSearchDiffMode(true); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); - - MetadataChangeLog changeLog = createUpstreamLineageMCLUIPreProcessed(upstreamUrn, downstreamUrn, ChangeType.UPSERT); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + + MetadataChangeLog changeLog = + createUpstreamLineageMCLUIPreProcessed(upstreamUrn, downstreamUrn, ChangeType.UPSERT); _updateIndicesHook.invoke(changeLog); - Mockito.verifyNoInteractions(_mockEntitySearchService, _mockGraphService, _mockTimeseriesAspectService, _mockSystemMetadataService); + Mockito.verifyNoInteractions( + _mockEntitySearchService, + _mockGraphService, + _mockTimeseriesAspectService, + _mockSystemMetadataService); } private EntityRegistry createMockEntityRegistry() { - // need to mock this registry instead of using test-entity-registry.yml because inputFields does not work due to a known bug + // need to mock this registry instead of using test-entity-registry.yml because inputFields does + // not work due to a known bug EntityRegistry mockEntityRegistry = Mockito.mock(EntityRegistry.class); EntitySpec entitySpec = Mockito.mock(EntitySpec.class); AspectSpec aspectSpec = createMockAspectSpec(InputFields.class, InputFields.dataSchema()); - AspectSpec upstreamLineageAspectSpec = createMockAspectSpec(UpstreamLineage.class, UpstreamLineage.dataSchema()); - Mockito.when(mockEntityRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(mockEntityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); - Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)).thenReturn(upstreamLineageAspectSpec); + AspectSpec upstreamLineageAspectSpec = + createMockAspectSpec(UpstreamLineage.class, UpstreamLineage.dataSchema()); + Mockito.when(mockEntityRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(mockEntityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) + .thenReturn(upstreamLineageAspectSpec); Mockito.when(aspectSpec.isTimeseries()).thenReturn(false); Mockito.when(aspectSpec.getName()).thenReturn(Constants.INPUT_FIELDS_ASPECT_NAME); Mockito.when(upstreamLineageAspectSpec.isTimeseries()).thenReturn(false); - Mockito.when(upstreamLineageAspectSpec.getName()).thenReturn(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + Mockito.when(upstreamLineageAspectSpec.getName()) + .thenReturn(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); AspectSpec chartKeyAspectSpec = createMockAspectSpec(ChartKey.class, ChartKey.dataSchema()); Mockito.when(entitySpec.getKeyAspectSpec()).thenReturn(chartKeyAspectSpec); return mockEntityRegistry; } - private <T extends RecordTemplate> AspectSpec createMockAspectSpec(Class<T> clazz, RecordDataSchema schema) { + private <T extends RecordTemplate> AspectSpec createMockAspectSpec( + Class<T> clazz, RecordDataSchema schema) { AspectSpec mockSpec = Mockito.mock(AspectSpec.class); Mockito.when(mockSpec.getDataTemplateClass()).thenReturn((Class<RecordTemplate>) clazz); Mockito.when(mockSpec.getPegasusSchema()).thenReturn(schema); return mockSpec; } - private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn) throws Exception { + private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn) + throws Exception { return createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.UPSERT); } - private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { + private MetadataChangeLog createUpstreamLineageMCL( + Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(Constants.DATASET_ENTITY_NAME); event.setAspectName(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); @@ -427,7 +473,9 @@ private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstre final UpstreamArray upstreamArray = new UpstreamArray(); final Upstream upstream = new Upstream(); upstream.setType(DatasetLineageType.TRANSFORMED); - upstream.setDataset(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)")); + upstream.setDataset( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)")); upstreamArray.add(upstream); upstreamLineage.setUpstreams(upstreamArray); @@ -438,8 +486,10 @@ private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstre return event; } - private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed(Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { - final MetadataChangeLog metadataChangeLog = createUpstreamLineageMCL(upstreamUrn, downstreamUrn, changeType); + private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed( + Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { + final MetadataChangeLog metadataChangeLog = + createUpstreamLineageMCL(upstreamUrn, downstreamUrn, changeType); final StringMap properties = new StringMap(); properties.put(APP_SOURCE, UI_SOURCE); final SystemMetadata systemMetadata = new SystemMetadata().setProperties(properties); @@ -447,7 +497,8 @@ private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed(Urn upstreamUrn return metadataChangeLog; } - private MetadataChangeLog createInputFieldsMCL(Urn upstreamUrn, String downstreamFieldPath) throws Exception { + private MetadataChangeLog createInputFieldsMCL(Urn upstreamUrn, String downstreamFieldPath) + throws Exception { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(Constants.CHART_ENTITY_NAME); event.setAspectName(Constants.INPUT_FIELDS_ASPECT_NAME); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java index 7d9619f3e2d1c..8400e19ce49a3 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka.hook.event; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.assertion.AssertionResult; @@ -64,30 +68,27 @@ import com.linkedin.platform.event.v1.Parameters; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - - /** * Tests the {@link EntityChangeEventGeneratorHook}. * - * TODO: Include Schema Field Tests, description update tests. + * <p>TODO: Include Schema Field Tests, description update tests. */ public class EntityChangeEventGeneratorHookTest { private static final long EVENT_TIME = 123L; - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleDataset,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleDataset,PROD)"; private static final String TEST_ACTOR_URN = "urn:li:corpuser:test"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:123"; private static final String TEST_RUN_ID = "runId"; - private static final String TEST_DATA_PROCESS_INSTANCE_URN = "urn:li:dataProcessInstance:instance"; - private static final String TEST_DATA_PROCESS_INSTANCE_PARENT_URN = "urn:li:dataProcessInstance:parent"; + private static final String TEST_DATA_PROCESS_INSTANCE_URN = + "urn:li:dataProcessInstance:instance"; + private static final String TEST_DATA_PROCESS_INSTANCE_PARENT_URN = + "urn:li:dataProcessInstance:parent"; private static final String TEST_DATA_FLOW_URN = "urn:li:dataFlow:flow"; private static final String TEST_DATA_JOB_URN = "urn:li:dataJob:job"; private Urn actorUrn; @@ -101,9 +102,11 @@ public void setupTest() throws URISyntaxException { actorUrn = Urn.createFromString(TEST_ACTOR_URN); _mockClient = Mockito.mock(SystemRestliEntityClient.class); _mockEntityService = Mockito.mock(EntityService.class); - EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = createEntityChangeEventGeneratorRegistry(); + EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = + createEntityChangeEventGeneratorRegistry(); _entityChangeEventHook = - new EntityChangeEventGeneratorHook(entityChangeEventGeneratorRegistry, _mockClient, createMockEntityRegistry(), true); + new EntityChangeEventGeneratorHook( + entityChangeEventGeneratorRegistry, _mockClient, createMockEntityRegistry(), true); } @Test @@ -114,10 +117,8 @@ public void testInvokeEntityAddTagChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlobalTags newTags = new GlobalTags(); final TagUrn newTagUrn = new TagUrn("Test"); - newTags.setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation() - .setTag(newTagUrn) - ))); + newTags.setTags( + new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(newTagUrn)))); event.setAspect(GenericRecordUtils.serializeAspect(newTags)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -128,8 +129,14 @@ public void testInvokeEntityAddTagChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.TAG, - ChangeOperation.ADD, newTagUrn.toString(), ImmutableMap.of("tagUrn", newTagUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.TAG, + ChangeOperation.ADD, + newTagUrn.toString(), + ImmutableMap.of("tagUrn", newTagUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -142,10 +149,8 @@ public void testInvokeEntityRemoveTagChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlobalTags existingTags = new GlobalTags(); final TagUrn newTagUrn = new TagUrn("Test"); - existingTags.setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation() - .setTag(newTagUrn) - ))); + existingTags.setTags( + new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(newTagUrn)))); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(existingTags)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -156,8 +161,14 @@ public void testInvokeEntityRemoveTagChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.TAG, - ChangeOperation.REMOVE, newTagUrn.toString(), ImmutableMap.of("tagUrn", newTagUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.TAG, + ChangeOperation.REMOVE, + newTagUrn.toString(), + ImmutableMap.of("tagUrn", newTagUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -170,11 +181,9 @@ public void testInvokeEntityAddTermChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlossaryTerms newTerms = new GlossaryTerms(); final GlossaryTermUrn glossaryTermUrn = new GlossaryTermUrn("TestTerm"); - newTerms.setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation() - .setUrn(glossaryTermUrn) - ) - )); + newTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of(new GlossaryTermAssociation().setUrn(glossaryTermUrn)))); final GlossaryTerms previousTerms = new GlossaryTerms(); previousTerms.setTerms(new GlossaryTermAssociationArray()); event.setAspect(GenericRecordUtils.serializeAspect(newTerms)); @@ -188,8 +197,13 @@ public void testInvokeEntityAddTermChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.GLOSSARY_TERM, - ChangeOperation.ADD, glossaryTermUrn.toString(), ImmutableMap.of("termUrn", glossaryTermUrn.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.GLOSSARY_TERM, + ChangeOperation.ADD, + glossaryTermUrn.toString(), + ImmutableMap.of("termUrn", glossaryTermUrn.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); @@ -205,11 +219,9 @@ public void testInvokeEntityRemoveTermChange() throws Exception { newTerms.setTerms(new GlossaryTermAssociationArray()); final GlossaryTerms previousTerms = new GlossaryTerms(); final GlossaryTermUrn glossaryTermUrn = new GlossaryTermUrn("TestTerm"); - previousTerms.setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation() - .setUrn(glossaryTermUrn) - ) - )); + previousTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of(new GlossaryTermAssociation().setUrn(glossaryTermUrn)))); event.setAspect(GenericRecordUtils.serializeAspect(newTerms)); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(previousTerms)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); @@ -221,8 +233,13 @@ public void testInvokeEntityRemoveTermChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.GLOSSARY_TERM, - ChangeOperation.REMOVE, glossaryTermUrn.toString(), ImmutableMap.of("termUrn", glossaryTermUrn.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.GLOSSARY_TERM, + ChangeOperation.REMOVE, + glossaryTermUrn.toString(), + ImmutableMap.of("termUrn", glossaryTermUrn.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); @@ -236,8 +253,7 @@ public void testInvokeEntitySetDomain() throws Exception { event.setChangeType(ChangeType.UPSERT); final Domains newDomains = new Domains(); final Urn domainUrn = Urn.createFromString("urn:li:domain:test"); - newDomains.setDomains(new UrnArray( - ImmutableList.of(domainUrn))); + newDomains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); event.setAspect(GenericRecordUtils.serializeAspect(newDomains)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -248,8 +264,14 @@ public void testInvokeEntitySetDomain() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DOMAIN, - ChangeOperation.ADD, domainUrn.toString(), ImmutableMap.of("domainUrn", domainUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DOMAIN, + ChangeOperation.ADD, + domainUrn.toString(), + ImmutableMap.of("domainUrn", domainUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -262,8 +284,7 @@ public void testInvokeEntityUnsetDomain() throws Exception { event.setChangeType(ChangeType.UPSERT); final Domains previousDomains = new Domains(); final Urn domainUrn = Urn.createFromString("urn:li:domain:test"); - previousDomains.setDomains(new UrnArray( - ImmutableList.of(domainUrn))); + previousDomains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(previousDomains)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -274,8 +295,14 @@ public void testInvokeEntityUnsetDomain() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DOMAIN, - ChangeOperation.REMOVE, domainUrn.toString(), ImmutableMap.of("domainUrn", domainUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DOMAIN, + ChangeOperation.REMOVE, + domainUrn.toString(), + ImmutableMap.of("domainUrn", domainUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -289,12 +316,11 @@ public void testInvokeEntityOwnerChange() throws Exception { final Ownership newOwners = new Ownership(); final Urn ownerUrn1 = Urn.createFromString("urn:li:corpuser:test1"); final Urn ownerUrn2 = Urn.createFromString("urn:li:corpuser:test2"); - newOwners.setOwners(new OwnerArray( - ImmutableList.of( - new Owner().setOwner(ownerUrn1).setType(OwnershipType.TECHNICAL_OWNER), - new Owner().setOwner(ownerUrn2).setType(OwnershipType.BUSINESS_OWNER) - ) - )); + newOwners.setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(ownerUrn1).setType(OwnershipType.TECHNICAL_OWNER), + new Owner().setOwner(ownerUrn2).setType(OwnershipType.BUSINESS_OWNER)))); final Ownership prevOwners = new Ownership(); prevOwners.setOwners(new OwnerArray()); event.setAspect(GenericRecordUtils.serializeAspect(newOwners)); @@ -308,16 +334,32 @@ public void testInvokeEntityOwnerChange() throws Exception { // Create Platform Event PlatformEvent platformEvent1 = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.OWNER, - ChangeOperation.ADD, ownerUrn1.toString(), - ImmutableMap.of("ownerUrn", ownerUrn1.toString(), "ownerType", OwnershipType.TECHNICAL_OWNER.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.OWNER, + ChangeOperation.ADD, + ownerUrn1.toString(), + ImmutableMap.of( + "ownerUrn", + ownerUrn1.toString(), + "ownerType", + OwnershipType.TECHNICAL_OWNER.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent1, false); PlatformEvent platformEvent2 = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.OWNER, - ChangeOperation.ADD, ownerUrn2.toString(), - ImmutableMap.of("ownerUrn", ownerUrn2.toString(), "ownerType", OwnershipType.BUSINESS_OWNER.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.OWNER, + ChangeOperation.ADD, + ownerUrn2.toString(), + ImmutableMap.of( + "ownerUrn", + ownerUrn2.toString(), + "ownerType", + OwnershipType.BUSINESS_OWNER.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent2, true); } @@ -344,8 +386,14 @@ public void testInvokeEntityTermDeprecation() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DEPRECATION, - ChangeOperation.MODIFY, null, ImmutableMap.of("status", "DEPRECATED"), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DEPRECATION, + ChangeOperation.MODIFY, + null, + ImmutableMap.of("status", "DEPRECATED"), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -372,8 +420,14 @@ public void testInvokeEntityCreate() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.CREATE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.CREATE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -400,8 +454,14 @@ public void testInvokeEntityHardDelete() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.HARD_DELETE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.HARD_DELETE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -426,8 +486,14 @@ public void testInvokeEntitySoftDelete() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.SOFT_DELETE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.SOFT_DELETE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -464,8 +530,14 @@ public void testInvokeAssertionRunEventCreate() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(ASSERTION_ENTITY_NAME, assertionUrn, ChangeCategory.RUN, ChangeOperation.COMPLETED, null, - paramsMap, actorUrn); + createChangeEvent( + ASSERTION_ENTITY_NAME, + assertionUrn, + ChangeCategory.RUN, + ChangeOperation.COMPLETED, + null, + paramsMap, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -487,25 +559,37 @@ public void testInvokeDataProcessInstanceRunEventStart() throws Exception { event.setCreated(new AuditStamp().setActor(actorUrn).setTime(EVENT_TIME)); DataProcessInstanceRelationships relationships = - new DataProcessInstanceRelationships().setParentInstance( - Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) + new DataProcessInstanceRelationships() + .setParentInstance(Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) .setParentTemplate(Urn.createFromString(TEST_DATA_JOB_URN)); final EntityResponse entityResponse = - buildEntityResponse(ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); + buildEntityResponse( + ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); Mockito.when(_mockClient.getV2(eq(dataProcessInstanceUrn), any())).thenReturn(entityResponse); _entityChangeEventHook.invoke(event); Map<String, Object> parameters = - ImmutableMap.of(ATTEMPT_KEY, 1, PARENT_INSTANCE_URN_KEY, TEST_DATA_PROCESS_INSTANCE_PARENT_URN, - DATA_JOB_URN_KEY, TEST_DATA_JOB_URN); + ImmutableMap.of( + ATTEMPT_KEY, + 1, + PARENT_INSTANCE_URN_KEY, + TEST_DATA_PROCESS_INSTANCE_PARENT_URN, + DATA_JOB_URN_KEY, + TEST_DATA_JOB_URN); // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATA_PROCESS_INSTANCE_ENTITY_NAME, dataProcessInstanceUrn, ChangeCategory.RUN, - ChangeOperation.STARTED, null, parameters, actorUrn); + createChangeEvent( + DATA_PROCESS_INSTANCE_ENTITY_NAME, + dataProcessInstanceUrn, + ChangeCategory.RUN, + ChangeOperation.STARTED, + null, + parameters, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent, false); } @@ -521,7 +605,8 @@ public void testInvokeDataProcessInstanceRunEventComplete() throws Exception { event.setChangeType(ChangeType.UPSERT); DataProcessInstanceRunEvent dataProcessInstanceRunEvent = - new DataProcessInstanceRunEvent().setStatus(DataProcessRunStatus.COMPLETE) + new DataProcessInstanceRunEvent() + .setStatus(DataProcessRunStatus.COMPLETE) .setAttempt(1) .setResult(new DataProcessInstanceRunResult().setType(RunResultType.SUCCESS)); @@ -529,24 +614,38 @@ public void testInvokeDataProcessInstanceRunEventComplete() throws Exception { event.setCreated(new AuditStamp().setActor(actorUrn).setTime(EVENT_TIME)); DataProcessInstanceRelationships relationships = - new DataProcessInstanceRelationships().setParentInstance( - Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) + new DataProcessInstanceRelationships() + .setParentInstance(Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) .setParentTemplate(Urn.createFromString(TEST_DATA_FLOW_URN)); final EntityResponse entityResponse = - buildEntityResponse(ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); + buildEntityResponse( + ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); Mockito.when(_mockClient.getV2(eq(dataProcessInstanceUrn), any())).thenReturn(entityResponse); _entityChangeEventHook.invoke(event); Map<String, Object> parameters = - ImmutableMap.of(ATTEMPT_KEY, 1, RUN_RESULT_KEY, RunResultType.SUCCESS.toString(), PARENT_INSTANCE_URN_KEY, - TEST_DATA_PROCESS_INSTANCE_PARENT_URN, DATA_FLOW_URN_KEY, TEST_DATA_FLOW_URN); + ImmutableMap.of( + ATTEMPT_KEY, + 1, + RUN_RESULT_KEY, + RunResultType.SUCCESS.toString(), + PARENT_INSTANCE_URN_KEY, + TEST_DATA_PROCESS_INSTANCE_PARENT_URN, + DATA_FLOW_URN_KEY, + TEST_DATA_FLOW_URN); // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATA_PROCESS_INSTANCE_ENTITY_NAME, dataProcessInstanceUrn, ChangeCategory.RUN, - ChangeOperation.COMPLETED, null, parameters, actorUrn); + createChangeEvent( + DATA_PROCESS_INSTANCE_ENTITY_NAME, + dataProcessInstanceUrn, + ChangeCategory.RUN, + ChangeOperation.COMPLETED, + null, + parameters, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent, false); } @@ -572,8 +671,14 @@ public void testInvokeIneligibleAspect() throws Exception { Mockito.verifyNoMoreInteractions(_mockClient); } - private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, ChangeCategory category, - ChangeOperation operation, String modifier, Map<String, Object> parameters, Urn actor) { + private PlatformEvent createChangeEvent( + String entityType, + Urn entityUrn, + ChangeCategory category, + ChangeOperation operation, + String modifier, + Map<String, Object> parameters, + Urn actor) { final EntityChangeEvent changeEvent = new EntityChangeEvent(); changeEvent.setEntityType(entityType); changeEvent.setEntityUrn(entityUrn); @@ -582,7 +687,8 @@ private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, Change if (modifier != null) { changeEvent.setModifier(modifier); } - changeEvent.setAuditStamp(new AuditStamp().setActor(actor).setTime(EntityChangeEventGeneratorHookTest.EVENT_TIME)); + changeEvent.setAuditStamp( + new AuditStamp().setActor(actor).setTime(EntityChangeEventGeneratorHookTest.EVENT_TIME)); changeEvent.setVersion(0); if (parameters != null) { changeEvent.setParameters(new Parameters(new DataMap(parameters))); @@ -590,7 +696,8 @@ private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, Change final PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(CHANGE_EVENT_PLATFORM_EVENT_NAME); platformEvent.setHeader( - new PlatformEventHeader().setTimestampMillis(EntityChangeEventGeneratorHookTest.EVENT_TIME)); + new PlatformEventHeader() + .setTimestampMillis(EntityChangeEventGeneratorHookTest.EVENT_TIME)); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); return platformEvent; } @@ -611,7 +718,8 @@ private EntityChangeEventGeneratorRegistry createEntityChangeEventGeneratorRegis // Run change event generators registry.register(ASSERTION_RUN_EVENT_ASPECT_NAME, new AssertionRunEventChangeEventGenerator()); - registry.register(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + registry.register( + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, new DataProcessInstanceRunEventChangeEventGenerator(_mockClient)); return registry; } @@ -637,7 +745,8 @@ private EntityRegistry createMockEntityRegistry() { Mockito.when(datasetSpec.getAspectSpec(eq(DOMAINS_ASPECT_NAME))).thenReturn(mockDomains); AspectSpec mockDeprecation = createMockAspectSpec(Deprecation.class); - Mockito.when(datasetSpec.getAspectSpec(eq(DEPRECATION_ASPECT_NAME))).thenReturn(mockDeprecation); + Mockito.when(datasetSpec.getAspectSpec(eq(DEPRECATION_ASPECT_NAME))) + .thenReturn(mockDeprecation); AspectSpec mockDatasetKey = createMockAspectSpec(DatasetKey.class); Mockito.when(datasetSpec.getAspectSpec(eq(DATASET_KEY_ASPECT_NAME))).thenReturn(mockDatasetKey); @@ -647,29 +756,39 @@ private EntityRegistry createMockEntityRegistry() { // Build Assertion Entity Spec EntitySpec assertionSpec = Mockito.mock(EntitySpec.class); AspectSpec mockAssertionRunEvent = createMockAspectSpec(AssertionRunEvent.class); - Mockito.when(assertionSpec.getAspectSpec(eq(ASSERTION_RUN_EVENT_ASPECT_NAME))).thenReturn(mockAssertionRunEvent); + Mockito.when(assertionSpec.getAspectSpec(eq(ASSERTION_RUN_EVENT_ASPECT_NAME))) + .thenReturn(mockAssertionRunEvent); Mockito.when(registry.getEntitySpec(eq(ASSERTION_ENTITY_NAME))).thenReturn(assertionSpec); // Build Data Process Instance Entity Spec EntitySpec dataProcessInstanceSpec = Mockito.mock(EntitySpec.class); - AspectSpec mockDataProcessInstanceRunEvent = createMockAspectSpec(DataProcessInstanceRunEvent.class); - Mockito.when(dataProcessInstanceSpec.getAspectSpec(eq(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME))) + AspectSpec mockDataProcessInstanceRunEvent = + createMockAspectSpec(DataProcessInstanceRunEvent.class); + Mockito.when( + dataProcessInstanceSpec.getAspectSpec(eq(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME))) .thenReturn(mockDataProcessInstanceRunEvent); - Mockito.when(registry.getEntitySpec(DATA_PROCESS_INSTANCE_ENTITY_NAME)).thenReturn(dataProcessInstanceSpec); + Mockito.when(registry.getEntitySpec(DATA_PROCESS_INSTANCE_ENTITY_NAME)) + .thenReturn(dataProcessInstanceSpec); return registry; } - private void verifyProducePlatformEvent(SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { + private void verifyProducePlatformEvent( + SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { verifyProducePlatformEvent(mockClient, platformEvent, true); } - private void verifyProducePlatformEvent(SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) throws Exception { + private void verifyProducePlatformEvent( + SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) + throws Exception { // Verify event has been emitted. - verify(mockClient, Mockito.times(1)).producePlatformEvent(eq(CHANGE_EVENT_PLATFORM_EVENT_NAME), Mockito.anyString(), - argThat(new PlatformEventMatcher(platformEvent))); + verify(mockClient, Mockito.times(1)) + .producePlatformEvent( + eq(CHANGE_EVENT_PLATFORM_EVENT_NAME), + Mockito.anyString(), + argThat(new PlatformEventMatcher(platformEvent))); if (noMoreInteractions) { Mockito.verifyNoMoreInteractions(_mockClient); @@ -686,9 +805,10 @@ private EntityResponse buildEntityResponse(Map<String, RecordTemplate> aspects) final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry<String, RecordTemplate> entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java index aafc87b2db5a2..8a3fb237e816f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import org.mockito.ArgumentMatcher; - public class PlatformEventMatcher implements ArgumentMatcher<PlatformEvent> { private final PlatformEvent _expected; @@ -16,26 +15,34 @@ public PlatformEventMatcher(@Nonnull final PlatformEvent expected) { @Override public boolean matches(@Nonnull final PlatformEvent actual) { - return _expected.getName().equals(actual.getName()) && _expected.getHeader() - .getTimestampMillis() - .equals(actual.getHeader().getTimestampMillis()) && payloadMatches(actual); + return _expected.getName().equals(actual.getName()) + && _expected + .getHeader() + .getTimestampMillis() + .equals(actual.getHeader().getTimestampMillis()) + && payloadMatches(actual); } public boolean payloadMatches(@Nonnull final PlatformEvent actual) { final EntityChangeEvent expectedChangeEvent = - GenericRecordUtils.deserializePayload(_expected.getPayload().getValue(), EntityChangeEvent.class); + GenericRecordUtils.deserializePayload( + _expected.getPayload().getValue(), EntityChangeEvent.class); final EntityChangeEvent actualChangeEvent = - GenericRecordUtils.deserializePayload(actual.getPayload().getValue(), EntityChangeEvent.class); - boolean requiredFieldsMatch = expectedChangeEvent.getEntityType().equals(actualChangeEvent.getEntityType()) - && expectedChangeEvent.getEntityUrn().equals(actualChangeEvent.getEntityUrn()) - && expectedChangeEvent.getCategory().equals(actualChangeEvent.getCategory()) - && expectedChangeEvent.getOperation().equals(actualChangeEvent.getOperation()) - && expectedChangeEvent.getAuditStamp().equals(actualChangeEvent.getAuditStamp()) - && expectedChangeEvent.getVersion().equals(actualChangeEvent.getVersion()); + GenericRecordUtils.deserializePayload( + actual.getPayload().getValue(), EntityChangeEvent.class); + boolean requiredFieldsMatch = + expectedChangeEvent.getEntityType().equals(actualChangeEvent.getEntityType()) + && expectedChangeEvent.getEntityUrn().equals(actualChangeEvent.getEntityUrn()) + && expectedChangeEvent.getCategory().equals(actualChangeEvent.getCategory()) + && expectedChangeEvent.getOperation().equals(actualChangeEvent.getOperation()) + && expectedChangeEvent.getAuditStamp().equals(actualChangeEvent.getAuditStamp()) + && expectedChangeEvent.getVersion().equals(actualChangeEvent.getVersion()); boolean modifierMatches = - !expectedChangeEvent.hasModifier() || expectedChangeEvent.getModifier().equals(actualChangeEvent.getModifier()); - boolean parametersMatch = !expectedChangeEvent.hasParameters() || expectedChangeEvent.getParameters() - .equals(actualChangeEvent.getParameters()); + !expectedChangeEvent.hasModifier() + || expectedChangeEvent.getModifier().equals(actualChangeEvent.getModifier()); + boolean parametersMatch = + !expectedChangeEvent.hasParameters() + || expectedChangeEvent.getParameters().equals(actualChangeEvent.getParameters()); return requiredFieldsMatch && modifierMatches && parametersMatch; } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java index a4aa00e228725..843502b2eefad 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook.ingestion; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; + import com.datahub.metadata.ingestion.IngestionScheduler; import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; @@ -10,13 +13,8 @@ import com.linkedin.mxe.MetadataChangeLog; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; - import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; - - public class IngestionSchedulerHookTest { private IngestionSchedulerHook _ingestionSchedulerHook; @@ -33,18 +31,22 @@ public void testInvoke() throws Exception { event.setAspectName(INGESTION_INFO_ASPECT_NAME); event.setChangeType(ChangeType.UPSERT); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); event.setAspect(GenericRecordUtils.serializeAspect(newInfo)); event.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).scheduleNextIngestionSourceExecution(Mockito.any(), Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .scheduleNextIngestionSourceExecution(Mockito.any(), Mockito.any()); } @Test @@ -55,7 +57,8 @@ public void testInvokeDeleteKeyAspect() throws Exception { event2.setChangeType(ChangeType.DELETE); event2.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event2); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).unscheduleNextIngestionSourceExecution(Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .unscheduleNextIngestionSourceExecution(Mockito.any()); } @Test @@ -66,7 +69,8 @@ public void testInvokeDeleteInfoAspect() throws Exception { event2.setChangeType(ChangeType.DELETE); event2.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event2); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).unscheduleNextIngestionSourceExecution(Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .unscheduleNextIngestionSourceExecution(Mockito.any()); } @Test @@ -78,5 +82,3 @@ public void testInvokeWrongAspect() { Mockito.verifyNoInteractions(_ingestionSchedulerHook.scheduler()); } } - - diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 93e98b7343cd4..d4c6d122a6689 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook.siblings; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.FabricType; @@ -31,16 +34,11 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; - - public class SiblingAssociationHookTest { private SiblingAssociationHook _siblingAssociationHook; SystemRestliEntityClient _mockEntityClient; @@ -48,11 +46,15 @@ public class SiblingAssociationHookTest { @BeforeMethod public void setupTest() { - EntityRegistry registry = new ConfigEntityRegistry( - SiblingAssociationHookTest.class.getClassLoader().getResourceAsStream("test-entity-registry-siblings.yml")); + EntityRegistry registry = + new ConfigEntityRegistry( + SiblingAssociationHookTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry-siblings.yml")); _mockEntityClient = Mockito.mock(SystemRestliEntityClient.class); _mockSearchService = Mockito.mock(EntitySearchService.class); - _siblingAssociationHook = new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); + _siblingAssociationHook = + new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); _siblingAssociationHook.setEnabled(true); } @@ -61,23 +63,28 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { SubTypes mockSourceSubtypesAspect = new SubTypes(); mockSourceSubtypesAspect.setTypeNames(new StringArray(ImmutableList.of("source"))); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); - mockResponseMap.put(SUB_TYPES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); + mockResponseMap.put( + SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME) - )).thenReturn(mockResponse); - - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); - - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + _mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + .thenReturn(mockResponse); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); @@ -85,34 +92,52 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } @Test @@ -123,20 +148,27 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); - mockResponseMap.put(SUB_TYPES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); + mockResponseMap.put( + SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME))).thenReturn(mockResponse); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + _mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + .thenReturn(mockResponse); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); @@ -145,65 +177,96 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(0)) + .ingestProposal(Mockito.eq(proposal), eq(true)); } @Test public void testInvokeWhenThereIsAPairWithBigqueryDownstreamNode() throws Exception { Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(upstream); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } @Test @@ -214,126 +277,176 @@ public void testInvokeWhenThereIsAKeyBeingReingested() throws Exception { SearchEntityArray returnEntityArray = new SearchEntityArray(); SearchEntity returnArrayValue = new SearchEntity(); returnArrayValue.setEntity( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)") - ); + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); returnEntityArray.add(returnArrayValue); returnSearchResult.setEntities(returnEntityArray); Mockito.when( - _mockSearchService.search( - any(), anyString(), any(), any(), anyInt(), anyInt(), eq(new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)) - )).thenReturn(returnSearchResult); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, DATASET_KEY_ASPECT_NAME, ChangeType.UPSERT); + _mockSearchService.search( + any(), + anyString(), + any(), + any(), + anyInt(), + anyInt(), + eq( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true)))) + .thenReturn(returnSearchResult); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, DATASET_KEY_ASPECT_NAME, ChangeType.UPSERT); final DatasetKey datasetKey = new DatasetKey(); datasetKey.setName("my-proj.jaffle_shop.customers"); datasetKey.setOrigin(FabricType.PROD); datasetKey.setPlatform(DataPlatformUrn.createFromString("urn:li:dataPlatform:bigquery")); event.setAspect(GenericRecordUtils.serializeAspect(datasetKey)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } + @Test public void testInvokeWhenSourceUrnHasTwoDbtUpstreams() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream dbtUpstream1 = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", DatasetLineageType.TRANSFORMED); - Upstream dbtUpstream2 = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity2,PROD)", DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream1 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", + DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream2 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity2,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(dbtUpstream1); upstreamArray.add(dbtUpstream2); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); - - } @Test public void testInvokeWhenSourceUrnHasTwoUpstreamsOneDbt() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream dbtUpstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", + DatasetLineageType.TRANSFORMED); Upstream snowflakeUpstream = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(dbtUpstream); upstreamArray.add(snowflakeUpstream); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(2)).ingestProposal(Mockito.any(), eq(true)); } @Test public void testInvokeWhenSourceUrnHasTwoUpstreamsNoDbt() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream snowflakeUpstream1 = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream snowflakeUpstream1 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", + DatasetLineageType.TRANSFORMED); Upstream snowflakeUpstream2 = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", DatasetLineageType.TRANSFORMED); + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(snowflakeUpstream1); upstreamArray.add(snowflakeUpstream2); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); } - private MetadataChangeLog createEvent(String entityType, String aspectName, ChangeType changeType) { + private MetadataChangeLog createEvent( + String entityType, String aspectName, ChangeType changeType) { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(entityType); event.setAspectName(aspectName); event.setChangeType(changeType); return event; } + private Upstream createUpstream(String urn, DatasetLineageType upstreamType) { final Upstream upstream = new Upstream(); @@ -346,6 +459,4 @@ private Upstream createUpstream(String urn, DatasetLineageType upstreamType) { return upstream; } - - - } +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java index b46308873ca16..6d1bdca9c116f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook.spring; +import static org.testng.AssertJUnit.*; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.kafka.MetadataChangeLogProcessor; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; @@ -13,29 +15,34 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - -@SpringBootTest(classes = { - MCLSpringTestConfiguration.class, ConfigurationProvider.class - }, +@SpringBootTest( + classes = {MCLSpringTestConfiguration.class, ConfigurationProvider.class}, properties = { "ingestionScheduler.enabled=false", "configEntityRegistry.path=../../metadata-jobs/mae-consumer/src/test/resources/test-entity-registry.yml", "kafka.schemaRegistry.type=INTERNAL" - }) -@TestPropertySource(locations = "classpath:/application.yml", properties = { - "MCL_CONSUMER_ENABLED=true" -}) + }) +@TestPropertySource( + locations = "classpath:/application.yml", + properties = {"MCL_CONSUMER_ENABLED=true"}) @EnableAutoConfiguration(exclude = {CassandraAutoConfiguration.class}) public class MCLSpringTest extends AbstractTestNGSpringContextTests { @Test public void testHooks() { - MetadataChangeLogProcessor metadataChangeLogProcessor = applicationContext.getBean(MetadataChangeLogProcessor.class); - assertTrue(metadataChangeLogProcessor.getHooks().stream().noneMatch(hook -> hook instanceof IngestionSchedulerHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof UpdateIndicesHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof SiblingAssociationHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); + MetadataChangeLogProcessor metadataChangeLogProcessor = + applicationContext.getBean(MetadataChangeLogProcessor.class); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .noneMatch(hook -> hook instanceof IngestionSchedulerHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof UpdateIndicesHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof SiblingAssociationHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index 1d9c17c676990..9d646819932e9 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -19,40 +19,32 @@ import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; - @Configuration -@ComponentScan(basePackages = { - "com.linkedin.metadata.kafka", - "com.linkedin.gms.factory.entity.update.indices" -}) +@ComponentScan( + basePackages = { + "com.linkedin.metadata.kafka", + "com.linkedin.gms.factory.entity.update.indices" + }) public class MCLSpringTestConfiguration { - @MockBean - public EntityRegistry entityRegistry; + @MockBean public EntityRegistry entityRegistry; - @MockBean - public ElasticSearchGraphService graphService; + @MockBean public ElasticSearchGraphService graphService; - @MockBean - public TimeseriesAspectService timeseriesAspectService; + @MockBean public TimeseriesAspectService timeseriesAspectService; - @MockBean - public SystemMetadataService systemMetadataService; + @MockBean public SystemMetadataService systemMetadataService; - @MockBean - public SearchDocumentTransformer searchDocumentTransformer; + @MockBean public SearchDocumentTransformer searchDocumentTransformer; - @MockBean - public IngestionScheduler ingestionScheduler; + @MockBean public IngestionScheduler ingestionScheduler; @MockBean(name = "systemRestliEntityClient") public SystemRestliEntityClient entityClient; - @MockBean - public ElasticSearchService searchService; + @MockBean public ElasticSearchService searchService; - @MockBean - public Authentication systemAuthentication; + @MockBean public Authentication systemAuthentication; @MockBean(name = "dataHubUpgradeKafkaListener") public DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener; @@ -63,9 +55,7 @@ public class MCLSpringTestConfiguration { @MockBean(name = "duheKafkaConsumerFactory") public DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory; - @MockBean - public SchemaRegistryService schemaRegistryService; + @MockBean public SchemaRegistryService schemaRegistryService; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index f0c59240a9ba4..0d8192a823865 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -12,34 +12,35 @@ import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.PropertySource; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") -@SpringBootApplication(exclude = { - ElasticsearchRestClientAutoConfiguration.class, - CassandraAutoConfiguration.class, - SolrHealthContributorAutoConfiguration.class -}) -@ComponentScan(basePackages = { - "com.linkedin.metadata.boot.kafka", - "com.linkedin.gms.factory.auth", - "com.linkedin.gms.factory.common", - "com.linkedin.gms.factory.config", - "com.linkedin.gms.factory.entity", - "com.linkedin.gms.factory.entityregistry", - "com.linkedin.gms.factory.kafka", - "com.linkedin.gms.factory.search", - "com.linkedin.gms.factory.secret", - "com.linkedin.gms.factory.timeseries", - "com.linkedin.restli.server", - "com.linkedin.metadata.restli", - "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = { - ScheduledAnalyticsFactory.class, - RestliEntityClientFactory.class - }) -}) +@SpringBootApplication( + exclude = { + ElasticsearchRestClientAutoConfiguration.class, + CassandraAutoConfiguration.class, + SolrHealthContributorAutoConfiguration.class + }) +@ComponentScan( + basePackages = { + "com.linkedin.metadata.boot.kafka", + "com.linkedin.gms.factory.auth", + "com.linkedin.gms.factory.common", + "com.linkedin.gms.factory.config", + "com.linkedin.gms.factory.entity", + "com.linkedin.gms.factory.entityregistry", + "com.linkedin.gms.factory.kafka", + "com.linkedin.gms.factory.search", + "com.linkedin.gms.factory.secret", + "com.linkedin.gms.factory.timeseries", + "com.linkedin.restli.server", + "com.linkedin.metadata.restli", + "com.linkedin.metadata.kafka", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = {ScheduledAnalyticsFactory.class, RestliEntityClientFactory.class}) + }) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MceConsumerApplication { diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java index abd73d03a7b55..990e0df102d37 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java @@ -1,74 +1,71 @@ package com.linkedin.metadata.restli; +import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts; + import io.ebean.datasource.DataSourceConfig; +import java.util.HashMap; +import java.util.Map; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; -import java.util.HashMap; -import java.util.Map; - -import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts; - @Configuration public class EbeanServerConfig { - @Value("${ebean.username}") - private String ebeanDatasourceUsername; - - @Value("${ebean.password}") - private String ebeanDatasourcePassword; + @Value("${ebean.username}") + private String ebeanDatasourceUsername; - @Value("${ebean.driver}") - private String ebeanDatasourceDriver; + @Value("${ebean.password}") + private String ebeanDatasourcePassword; - @Value("${ebean.minConnections:1}") - private Integer ebeanMinConnections; + @Value("${ebean.driver}") + private String ebeanDatasourceDriver; - @Value("${ebean.maxInactiveTimeSeconds:120}") - private Integer ebeanMaxInactiveTimeSecs; + @Value("${ebean.minConnections:1}") + private Integer ebeanMinConnections; - @Value("${ebean.maxAgeMinutes:120}") - private Integer ebeanMaxAgeMinutes; + @Value("${ebean.maxInactiveTimeSeconds:120}") + private Integer ebeanMaxInactiveTimeSecs; - @Value("${ebean.leakTimeMinutes:15}") - private Integer ebeanLeakTimeMinutes; + @Value("${ebean.maxAgeMinutes:120}") + private Integer ebeanMaxAgeMinutes; - @Value("${ebean.waitTimeoutMillis:1000}") - private Integer ebeanWaitTimeoutMillis; + @Value("${ebean.leakTimeMinutes:15}") + private Integer ebeanLeakTimeMinutes; - @Value("${ebean.autoCreateDdl:false}") - private Boolean ebeanAutoCreate; + @Value("${ebean.waitTimeoutMillis:1000}") + private Integer ebeanWaitTimeoutMillis; - @Value("${ebean.postgresUseIamAuth:false}") - private Boolean postgresUseIamAuth; + @Value("${ebean.autoCreateDdl:false}") + private Boolean ebeanAutoCreate; + @Value("${ebean.postgresUseIamAuth:false}") + private Boolean postgresUseIamAuth; - @Bean("ebeanDataSourceConfig") - @Primary - public DataSourceConfig buildDataSourceConfig( - @Value("${ebean.url}") String dataSourceUrl, - @Qualifier("parseqEngineThreads") int ebeanMaxConnections - ) { - DataSourceConfig dataSourceConfig = new DataSourceConfig(); - dataSourceConfig.setUsername(ebeanDatasourceUsername); - dataSourceConfig.setPassword(ebeanDatasourcePassword); - dataSourceConfig.setUrl(dataSourceUrl); - dataSourceConfig.setDriver(ebeanDatasourceDriver); - dataSourceConfig.setMinConnections(ebeanMinConnections); - dataSourceConfig.setMaxConnections(ebeanMaxConnections); - dataSourceConfig.setMaxInactiveTimeSecs(ebeanMaxInactiveTimeSecs); - dataSourceConfig.setMaxAgeMinutes(ebeanMaxAgeMinutes); - dataSourceConfig.setLeakTimeMinutes(ebeanLeakTimeMinutes); - dataSourceConfig.setWaitTimeoutMillis(ebeanWaitTimeoutMillis); - dataSourceConfig.setListener(getListenerToTrackCounts("mce-consumer")); - // Adding IAM auth access for AWS Postgres - if (postgresUseIamAuth) { - Map<String, String> custom = new HashMap<>(); - custom.put("wrapperPlugins", "iam"); - dataSourceConfig.setCustomProperties(custom); - } - return dataSourceConfig; + @Bean("ebeanDataSourceConfig") + @Primary + public DataSourceConfig buildDataSourceConfig( + @Value("${ebean.url}") String dataSourceUrl, + @Qualifier("parseqEngineThreads") int ebeanMaxConnections) { + DataSourceConfig dataSourceConfig = new DataSourceConfig(); + dataSourceConfig.setUsername(ebeanDatasourceUsername); + dataSourceConfig.setPassword(ebeanDatasourcePassword); + dataSourceConfig.setUrl(dataSourceUrl); + dataSourceConfig.setDriver(ebeanDatasourceDriver); + dataSourceConfig.setMinConnections(ebeanMinConnections); + dataSourceConfig.setMaxConnections(ebeanMaxConnections); + dataSourceConfig.setMaxInactiveTimeSecs(ebeanMaxInactiveTimeSecs); + dataSourceConfig.setMaxAgeMinutes(ebeanMaxAgeMinutes); + dataSourceConfig.setLeakTimeMinutes(ebeanLeakTimeMinutes); + dataSourceConfig.setWaitTimeoutMillis(ebeanWaitTimeoutMillis); + dataSourceConfig.setListener(getListenerToTrackCounts("mce-consumer")); + // Adding IAM auth access for AWS Postgres + if (postgresUseIamAuth) { + Map<String, String> custom = new HashMap<>(); + custom.put("wrapperPlugins", "iam"); + dataSourceConfig.setCustomProperties(custom); } + return dataSourceConfig; + } } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java index 563cc5ce04c66..4d7e10d694c4e 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java @@ -6,6 +6,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import com.linkedin.restli.server.RestliHandlerServlet; +import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.web.servlet.FilterRegistrationBean; @@ -15,54 +16,53 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import java.net.URI; - @Configuration @Import({SystemAuthenticationFactory.class}) public class RestliServletConfig { - @Value("${server.port}") - private int configuredPort; + @Value("${server.port}") + private int configuredPort; - @Value("${entityClient.retryInterval:2}") - private int retryInterval; + @Value("${entityClient.retryInterval:2}") + private int retryInterval; - @Value("${entityClient.numRetries:3}") - private int numRetries; + @Value("${entityClient.numRetries:3}") + private int numRetries; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = String.format("http://localhost:%s/gms/", configuredPort); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); - } + @Bean("restliEntityClient") + @Primary + public RestliEntityClient restliEntityClient() { + String selfUri = String.format("http://localhost:%s/gms/", configuredPort); + final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); + return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); + } - @Bean("restliServletRegistration") - public ServletRegistrationBean<RestliHandlerServlet> restliServletRegistration( - @Qualifier("restliHandlerServlet") RestliHandlerServlet servlet) { - return new ServletRegistrationBean<>(servlet, "/gms/*"); - } + @Bean("restliServletRegistration") + public ServletRegistrationBean<RestliHandlerServlet> restliServletRegistration( + @Qualifier("restliHandlerServlet") RestliHandlerServlet servlet) { + return new ServletRegistrationBean<>(servlet, "/gms/*"); + } - @Bean - public RestliHandlerServlet restliHandlerServlet() { - return new RestliHandlerServlet(); - } + @Bean + public RestliHandlerServlet restliHandlerServlet() { + return new RestliHandlerServlet(); + } - @Bean - public FilterRegistrationBean<AuthenticationFilter> authenticationFilterRegistrationBean( - @Qualifier("restliServletRegistration") ServletRegistrationBean<RestliHandlerServlet> servlet - ) { - FilterRegistrationBean<AuthenticationFilter> registrationBean = new FilterRegistrationBean<>(); - registrationBean.addServletRegistrationBeans(servlet); - registrationBean.setOrder(1); - return registrationBean; - } + @Bean + public FilterRegistrationBean<AuthenticationFilter> authenticationFilterRegistrationBean( + @Qualifier("restliServletRegistration") + ServletRegistrationBean<RestliHandlerServlet> servlet) { + FilterRegistrationBean<AuthenticationFilter> registrationBean = new FilterRegistrationBean<>(); + registrationBean.addServletRegistrationBeans(servlet); + registrationBean.setOrder(1); + return registrationBean; + } - @Bean - public AuthenticationFilter authenticationFilter(FilterRegistrationBean<AuthenticationFilter> filterReg) { - AuthenticationFilter filter = new AuthenticationFilter(); - filterReg.setFilter(filter); - return filter; - } + @Bean + public AuthenticationFilter authenticationFilter( + FilterRegistrationBean<AuthenticationFilter> filterReg) { + AuthenticationFilter filter = new AuthenticationFilter(); + filterReg.setFilter(filter); + return filter; + } } diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index c23cf1ea3d165..714c7b899ff49 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +import static org.testng.AssertJUnit.assertTrue; + import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import org.springframework.beans.factory.annotation.Autowired; @@ -9,30 +13,25 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; -import static org.testng.AssertJUnit.assertTrue; - @ActiveProfiles("test") -@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, - classes = {MceConsumerApplication.class, MceConsumerApplicationTestConfiguration.class}) +@SpringBootTest( + webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, + classes = {MceConsumerApplication.class, MceConsumerApplicationTestConfiguration.class}) public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - private TestRestTemplate restTemplate; + @Autowired private TestRestTemplate restTemplate; - @Autowired - private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; - @Test - public void testRestliServletConfig() { - RestoreIndicesResult mockResult = new RestoreIndicesResult(); - mockResult.setRowsMigrated(100); - when(_mockEntityService.restoreIndices(any(), any())).thenReturn(mockResult); + @Test + public void testRestliServletConfig() { + RestoreIndicesResult mockResult = new RestoreIndicesResult(); + mockResult.setRowsMigrated(100); + when(_mockEntityService.restoreIndices(any(), any())).thenReturn(mockResult); - String response = this.restTemplate - .postForObject("/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); - assertTrue(response.contains(mockResult.toString())); - } + String response = + this.restTemplate.postForObject( + "/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); + assertTrue(response.contains(mockResult.toString())); + } } diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index bee1441b5aaf6..1a44265c7a92a 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -13,6 +13,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import io.ebean.Database; +import java.net.URI; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -21,44 +22,33 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import java.net.URI; - @TestConfiguration @Import(value = {SystemAuthenticationFactory.class}) public class MceConsumerApplicationTestConfiguration { - @Autowired - private TestRestTemplate restTemplate; + @Autowired private TestRestTemplate restTemplate; - @MockBean - public KafkaHealthChecker kafkaHealthChecker; + @MockBean public KafkaHealthChecker kafkaHealthChecker; - @MockBean - public EntityService _entityService; + @MockBean public EntityService _entityService; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = restTemplate.getRootUri(); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); - } + @Bean("restliEntityClient") + @Primary + public RestliEntityClient restliEntityClient() { + String selfUri = restTemplate.getRootUri(); + final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); + return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); + } - @MockBean - public Database ebeanServer; + @MockBean public Database ebeanServer; - @MockBean - protected TimeseriesAspectService timeseriesAspectService; + @MockBean protected TimeseriesAspectService timeseriesAspectService; - @MockBean - protected EntityRegistry entityRegistry; + @MockBean protected EntityRegistry entityRegistry; - @MockBean - protected ConfigEntityRegistry configEntityRegistry; + @MockBean protected ConfigEntityRegistry configEntityRegistry; - @MockBean - protected SiblingGraphService siblingGraphService; + @MockBean protected SiblingGraphService siblingGraphService; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java index 76e13d5e4da23..b04ecc7761eb6 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -13,9 +15,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static com.linkedin.metadata.Constants.*; - - @Controller @Import(GitVersionFactory.class) public class McpConsumerConfig { @@ -24,10 +23,15 @@ public class McpConsumerConfig { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public McpConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java index c30dd6e6f96dc..217b826689c7c 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java @@ -6,8 +6,8 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; import com.linkedin.metadata.snapshot.Snapshot; @@ -18,7 +18,6 @@ import com.linkedin.r2.RemoteInvocationException; import java.io.IOException; import javax.annotation.Nonnull; - import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -35,27 +34,38 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(MetadataChangeProposalProcessorCondition.class) -@Import({RestliEntityClientFactory.class, KafkaEventConsumerFactory.class, DataHubKafkaProducerFactory.class}) +@Import({ + RestliEntityClientFactory.class, + KafkaEventConsumerFactory.class, + DataHubKafkaProducerFactory.class +}) @EnableKafka @RequiredArgsConstructor public class MetadataChangeEventsProcessor { - @NonNull - private final Authentication systemAuthentication; + @NonNull private final Authentication systemAuthentication; private final SystemRestliEntityClient entityClient; private final Producer<String, IndexedRecord> kafkaProducer; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - @Value("${FAILED_METADATA_CHANGE_EVENT_NAME:${KAFKA_FMCE_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_EVENT + "}}") + @Value( + "${FAILED_METADATA_CHANGE_EVENT_NAME:${KAFKA_FMCE_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_EVENT + + "}}") private String fmceTopicName; - @KafkaListener(id = "${METADATA_CHANGE_EVENT_KAFKA_CONSUMER_GROUP_ID:mce-consumer-job-client}", topics = - "${METADATA_CHANGE_EVENT_NAME:${KAFKA_MCE_TOPIC_NAME:" + Topics.METADATA_CHANGE_EVENT + "}}", containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_EVENT_KAFKA_CONSUMER_GROUP_ID:mce-consumer-job-client}", + topics = + "${METADATA_CHANGE_EVENT_NAME:${KAFKA_MCE_TOPIC_NAME:" + + Topics.METADATA_CHANGE_EVENT + + "}}", + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); @@ -77,21 +87,26 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) } private void sendFailedMCE(@Nonnull MetadataChangeEvent event, @Nonnull Throwable throwable) { - final FailedMetadataChangeEvent failedMetadataChangeEvent = createFailedMCEEvent(event, throwable); + final FailedMetadataChangeEvent failedMetadataChangeEvent = + createFailedMCEEvent(event, throwable); try { - final GenericRecord genericFailedMCERecord = EventUtils.pegasusToAvroFailedMCE(failedMetadataChangeEvent); + final GenericRecord genericFailedMCERecord = + EventUtils.pegasusToAvroFailedMCE(failedMetadataChangeEvent); log.debug("Sending FailedMessages to topic - {}", fmceTopicName); - log.info("Error while processing MCE: FailedMetadataChangeEvent - {}", failedMetadataChangeEvent); + log.info( + "Error while processing MCE: FailedMetadataChangeEvent - {}", failedMetadataChangeEvent); kafkaProducer.send(new ProducerRecord<>(fmceTopicName, genericFailedMCERecord)); } catch (IOException e) { - log.error("Error while sending FailedMetadataChangeEvent: Exception - {}, FailedMetadataChangeEvent - {}", - e.getStackTrace(), failedMetadataChangeEvent); + log.error( + "Error while sending FailedMetadataChangeEvent: Exception - {}, FailedMetadataChangeEvent - {}", + e.getStackTrace(), + failedMetadataChangeEvent); } } @Nonnull - private FailedMetadataChangeEvent createFailedMCEEvent(@Nonnull MetadataChangeEvent event, - @Nonnull Throwable throwable) { + private FailedMetadataChangeEvent createFailedMCEEvent( + @Nonnull MetadataChangeEvent event, @Nonnull Throwable throwable) { final FailedMetadataChangeEvent fmce = new FailedMetadataChangeEvent(); fmce.setError(ExceptionUtils.getStackTrace(throwable)); fmce.setMetadataChangeEvent(event); @@ -103,6 +118,7 @@ private void processProposedSnapshot(@Nonnull MetadataChangeEvent metadataChange final Snapshot snapshotUnion = metadataChangeEvent.getProposedSnapshot(); final Entity entity = new Entity().setValue(snapshotUnion); // TODO: GMS Auth Part 2: Get the actor identity from the event header itself. - entityClient.updateWithSystemMetadata(entity, metadataChangeEvent.getSystemMetadata(), this.systemAuthentication); + entityClient.updateWithSystemMetadata( + entity, metadataChangeEvent.getSystemMetadata(), this.systemAuthentication); } } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java index 79f8c90af8ec7..b487ded6a9439 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java @@ -4,8 +4,8 @@ import com.codahale.metrics.MetricRegistry; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -14,7 +14,6 @@ import com.linkedin.mxe.Topics; import java.io.IOException; import javax.annotation.Nonnull; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -30,10 +29,13 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component -@Import({RestliEntityClientFactory.class, KafkaEventConsumerFactory.class, DataHubKafkaProducerFactory.class}) +@Import({ + RestliEntityClientFactory.class, + KafkaEventConsumerFactory.class, + DataHubKafkaProducerFactory.class +}) @Conditional(MetadataChangeProposalProcessorCondition.class) @EnableKafka @RequiredArgsConstructor @@ -42,14 +44,19 @@ public class MetadataChangeProposalsProcessor { private final SystemRestliEntityClient entityClient; private final Producer<String, IndexedRecord> kafkaProducer; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - @Value("${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_PROPOSAL + "}") + @Value( + "${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_PROPOSAL + + "}") private String fmcpTopicName; - @KafkaListener(id = "${METADATA_CHANGE_PROPOSAL_KAFKA_CONSUMER_GROUP_ID:generic-mce-consumer-job-client}", topics = - "${METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.METADATA_CHANGE_PROPOSAL - + "}", containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_PROPOSAL_KAFKA_CONSUMER_GROUP_ID:generic-mce-consumer-job-client}", + topics = "${METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.METADATA_CHANGE_PROPOSAL + "}", + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); @@ -69,21 +76,27 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) } private void sendFailedMCP(@Nonnull MetadataChangeProposal event, @Nonnull Throwable throwable) { - final FailedMetadataChangeProposal failedMetadataChangeProposal = createFailedMCPEvent(event, throwable); + final FailedMetadataChangeProposal failedMetadataChangeProposal = + createFailedMCPEvent(event, throwable); try { - final GenericRecord genericFailedMCERecord = EventUtils.pegasusToAvroFailedMCP(failedMetadataChangeProposal); + final GenericRecord genericFailedMCERecord = + EventUtils.pegasusToAvroFailedMCP(failedMetadataChangeProposal); log.debug("Sending FailedMessages to topic - {}", fmcpTopicName); - log.info("Error while processing FMCP: FailedMetadataChangeProposal - {}", failedMetadataChangeProposal); + log.info( + "Error while processing FMCP: FailedMetadataChangeProposal - {}", + failedMetadataChangeProposal); kafkaProducer.send(new ProducerRecord<>(fmcpTopicName, genericFailedMCERecord)); } catch (IOException e) { - log.error("Error while sending FailedMetadataChangeProposal: Exception - {}, FailedMetadataChangeProposal - {}", - e.getStackTrace(), failedMetadataChangeProposal); + log.error( + "Error while sending FailedMetadataChangeProposal: Exception - {}, FailedMetadataChangeProposal - {}", + e.getStackTrace(), + failedMetadataChangeProposal); } } @Nonnull - private FailedMetadataChangeProposal createFailedMCPEvent(@Nonnull MetadataChangeProposal event, - @Nonnull Throwable throwable) { + private FailedMetadataChangeProposal createFailedMCPEvent( + @Nonnull MetadataChangeProposal event, @Nonnull Throwable throwable) { final FailedMetadataChangeProposal fmcp = new FailedMetadataChangeProposal(); fmcp.setError(ExceptionUtils.getStackTrace(throwable)); fmcp.setMetadataChangeProposal(event); diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java index 1b69b1113bdb1..2bbc8304f2e27 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java @@ -4,6 +4,7 @@ import com.linkedin.metadata.boot.BootstrapManager; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationListener; @@ -12,25 +13,22 @@ import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; -import javax.annotation.Nonnull; - - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Slf4j @Component @Conditional(MetadataChangeProposalProcessorCondition.class) public class ApplicationStartupListener implements ApplicationListener<ContextRefreshedEvent> { - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final DataHubUpgradeKafkaListener _dataHubUpgradeKafkaListener; private final ConfigurationProvider _configurationProvider; private final BootstrapManager _mcpBootstrapManager; public ApplicationStartupListener( - @Qualifier("dataHubUpgradeKafkaListener") DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, + @Qualifier("dataHubUpgradeKafkaListener") + DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, ConfigurationProvider configurationProvider, @Qualifier("mcpBootstrapManager") BootstrapManager bootstrapManager) { _dataHubUpgradeKafkaListener = dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java index 44e5c7cff8661..0220764cd99d6 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java @@ -7,6 +7,8 @@ import com.linkedin.metadata.boot.dependencies.BootstrapDependency; import com.linkedin.metadata.boot.steps.WaitForSystemUpdateStep; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; +import java.util.List; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -15,10 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; -import java.util.List; - - @Configuration @Conditional(MetadataChangeProposalProcessorCondition.class) public class MCPBootstrapManagerFactory { @@ -27,8 +25,7 @@ public class MCPBootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -37,8 +34,8 @@ public class MCPBootstrapManagerFactory { @Scope("singleton") @Nonnull protected BootstrapManager createInstance() { - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final List<BootstrapStep> finalSteps = ImmutableList.of(waitForSystemUpdateStep); diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java index 9dd265736bfc2..1cdb05b04e0ac 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java @@ -5,12 +5,11 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class MetadataChangeProposalProcessorCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MCE_CONSUMER_ENABLED")) || "true".equals( - env.getProperty("MCP_CONSUMER_ENABLED")); + return "true".equals(env.getProperty("MCE_CONSUMER_ENABLED")) + || "true".equals(env.getProperty("MCP_CONSUMER_ENABLED")); } } diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java index aa09679cb08a8..84d4f4ae4c095 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java @@ -21,7 +21,6 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(PlatformEventProcessorCondition.class) @@ -30,7 +29,8 @@ public class PlatformEventProcessor { private final List<PlatformEventHook> hooks; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); @Autowired public PlatformEventProcessor() { @@ -39,8 +39,9 @@ public PlatformEventProcessor() { this.hooks.forEach(PlatformEventHook::init); } - @KafkaListener(id = "${PLATFORM_EVENT_KAFKA_CONSUMER_GROUP_ID:generic-platform-event-job-client}", topics = { - "${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}" }, + @KafkaListener( + id = "${PLATFORM_EVENT_KAFKA_CONSUMER_GROUP_ID:generic-platform-event-job-client}", + topics = {"${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}"}, containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) { @@ -48,14 +49,17 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); - log.debug("Got Generic PE on topic: {}, partition: {}, offset: {}", consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset()); + log.debug( + "Got Generic PE on topic: {}, partition: {}, offset: {}", + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset()); MetricUtils.counter(this.getClass(), "received_pe_count").inc(); PlatformEvent event; try { event = EventUtils.avroToPegasusPE(record); - log.debug("Successfully converted Avro PE to Pegasus PE. name: {}", - event.getName()); + log.debug("Successfully converted Avro PE to Pegasus PE. name: {}", event.getName()); } catch (Exception e) { MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); log.error("Error deserializing message due to: ", e); @@ -66,8 +70,8 @@ public void consume(final ConsumerRecord<String, GenericRecord> consumerRecord) log.debug("Invoking PE hooks for event name {}", event.getName()); for (PlatformEventHook hook : this.hooks) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency").time()) { hook.invoke(event); } catch (Exception e) { // Just skip this hook and continue. diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java index 878e4edd371bc..3083642c5bfb6 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java @@ -6,20 +6,15 @@ /** * Custom hook which is invoked on receiving a new {@link PlatformEvent} event. * - * The semantics of this hook are currently "at most once". That is, the hook will not be called + * <p>The semantics of this hook are currently "at most once". That is, the hook will not be called * with the same message. In the future, we intend to migrate to "at least once" semantics, meaning * that the hook will be responsible for implementing idempotency. */ public interface PlatformEventHook { - /** - * Initialize the hook - */ - default void init() { } + /** Initialize the hook */ + default void init() {} - /** - * Invoke the hook when a PlatformEvent is received - */ + /** Invoke the hook when a PlatformEvent is received */ void invoke(@Nonnull PlatformEvent event); - } diff --git a/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java b/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java index aa86568bed01f..56d909781fd51 100644 --- a/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java +++ b/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java @@ -4,25 +4,21 @@ import com.linkedin.pegasus.generator.DataSchemaParser; import java.io.IOException; - /** * Validates GMS PDL models by constructing a set of {@link EntitySpec}s from them. * - * The following validation rules are applied: - * - * 1. Each Entity Snapshot Model is annotated as @Entity with a common name - * 2. Each Aspect is annotated as @Aspect with a common name - * 3. Each @Searchable field is of primitive / list of primitive type - * 4. Each @Relationship field is of Urn / List of Urn type - * 5. Each Entity Snapshot includes a single Key Aspect + * <p>The following validation rules are applied: * + * <p>1. Each Entity Snapshot Model is annotated as @Entity with a common name 2. Each Aspect is + * annotated as @Aspect with a common name 3. Each @Searchable field is of primitive / list of + * primitive type 4. Each @Relationship field is of Urn / List of Urn type 5. Each Entity Snapshot + * includes a single Key Aspect */ public class ModelValidationTask { private static final String SNAPSHOT_SCHEMA_NAME = "com.linkedin.metadata.snapshot.Snapshot"; - private ModelValidationTask() { - } + private ModelValidationTask() {} public static void main(String[] args) throws IOException { if (args.length != 3) { @@ -34,21 +30,25 @@ public static void main(String[] args) throws IOException { final String modelPath = args[1]; final DataSchemaParser parser = new DataSchemaParser(resolverPath); - parser.parseSources(new String[]{modelPath}); + parser.parseSources(new String[] {modelPath}); - final DataSchema snapshotSchema = parser.getSchemaResolver().existingDataSchema(SNAPSHOT_SCHEMA_NAME); + final DataSchema snapshotSchema = + parser.getSchemaResolver().existingDataSchema(SNAPSHOT_SCHEMA_NAME); if (snapshotSchema == null) { throw new RuntimeException( - String.format("Failed to find Snapshot model with name %s in parsed schemas!", SNAPSHOT_SCHEMA_NAME)); + String.format( + "Failed to find Snapshot model with name %s in parsed schemas!", + SNAPSHOT_SCHEMA_NAME)); } - // TODO: Fix this so that aspects that are just in the entity registry don't fail because they aren't in the + // TODO: Fix this so that aspects that are just in the entity registry don't fail because they + // aren't in the // snapshot registry. -// try { -// new EntitySpecBuilder().buildEntitySpecs(snapshotSchema); -// } catch (Exception e) { -// throw new RuntimeException("Failed to validate DataHub PDL models", e); -// } + // try { + // new EntitySpecBuilder().buildEntitySpecs(snapshotSchema); + // } catch (Exception e) { + // throw new RuntimeException("Failed to validate DataHub PDL models", e); + // } } -} \ No newline at end of file +} diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index bd8052283e168..e90a4042c1921 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -75,11 +75,7 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: 'generateJsonSchema') } tasks.getByName("compileJava").dependsOn(openApiGenerate) -checkstyleMain.exclude '**/generated/**' - task cleanExtraDirs { delete "$projectDir/src/generatedJsonSchema" } clean.finalizedBy(cleanExtraDirs) - -checkstyleMain.exclude '**/generated/**' diff --git a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java index c3db318ece23e..29f58223a240a 100644 --- a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java +++ b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java @@ -1,5 +1,8 @@ package com.linkedin.metadata; +import static com.linkedin.metadata.ModelValidationConstants.*; +import static org.testng.AssertJUnit.*; + import com.datahub.util.validator.AspectValidator; import com.datahub.util.validator.DeltaValidator; import com.datahub.util.validator.SnapshotValidator; @@ -13,10 +16,6 @@ import javax.annotation.Nonnull; import org.testng.annotations.Test; -import static com.linkedin.metadata.ModelValidationConstants.*; -import static org.testng.AssertJUnit.*; - - public class ModelValidation { @Test @@ -39,23 +38,28 @@ public void validateSnapshots() throws Exception { @Test public void validateDeltas() throws Exception { - getRecordTemplatesInPackage("com.linkedin.metadata.delta", IGNORED_DELTA_CLASSES).forEach( - DeltaValidator::validateDeltaSchema); + getRecordTemplatesInPackage("com.linkedin.metadata.delta", IGNORED_DELTA_CLASSES) + .forEach(DeltaValidator::validateDeltaSchema); } - private List<? extends Class<? extends UnionTemplate>> getUnionTemplatesInPackage(@Nonnull String packageName, - @Nonnull Set<Class<? extends UnionTemplate>> ignoreClasses) throws IOException { + private List<? extends Class<? extends UnionTemplate>> getUnionTemplatesInPackage( + @Nonnull String packageName, @Nonnull Set<Class<? extends UnionTemplate>> ignoreClasses) + throws IOException { return getClassesInPackage(packageName, UnionTemplate.class, ignoreClasses); } - private List<? extends Class<? extends RecordTemplate>> getRecordTemplatesInPackage(@Nonnull String packageName, - @Nonnull Set<Class<? extends RecordTemplate>> ignoreClasses) throws IOException { + private List<? extends Class<? extends RecordTemplate>> getRecordTemplatesInPackage( + @Nonnull String packageName, @Nonnull Set<Class<? extends RecordTemplate>> ignoreClasses) + throws IOException { return getClassesInPackage(packageName, RecordTemplate.class, ignoreClasses); } @SuppressWarnings("unchecked") - private <T> List<? extends Class<? extends T>> getClassesInPackage(@Nonnull String packageName, - @Nonnull Class<T> parentClass, @Nonnull Set<Class<? extends T>> ignoreClasses) throws IOException { + private <T> List<? extends Class<? extends T>> getClassesInPackage( + @Nonnull String packageName, + @Nonnull Class<T> parentClass, + @Nonnull Set<Class<? extends T>> ignoreClasses) + throws IOException { return ClassPath.from(ClassLoader.getSystemClassLoader()) .getTopLevelClasses(packageName) .stream() diff --git a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java index 11fa8cdc965d4..f9e8fcc06bcbb 100644 --- a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java +++ b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.UnionTemplate; import java.util.Set; - public class ModelValidationConstants { private ModelValidationConstants() { diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java index d3c5ba822ac04..b8553235b3de7 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java @@ -3,31 +3,25 @@ import java.util.List; import lombok.Data; -/** - * POJO representing the "authentication" configuration block in application.yml. - */ +/** POJO representing the "authentication" configuration block in application.yml. */ @Data public class AuthenticationConfiguration { - /** - * Whether authentication is enabled - */ + /** Whether authentication is enabled */ private boolean enabled; + /** - * List of configurations for {@link com.datahub.plugins.auth.authentication.Authenticator}s to be registered + * List of configurations for {@link com.datahub.plugins.auth.authentication.Authenticator}s to be + * registered */ private List<AuthenticatorConfiguration> authenticators; - /** - * Unique id to identify internal system callers - */ + + /** Unique id to identify internal system callers */ private String systemClientId; - /** - * Unique secret to authenticate internal system callers - */ + + /** Unique secret to authenticate internal system callers */ private String systemClientSecret; - /** - * The lifespan of a UI session token. - */ + /** The lifespan of a UI session token. */ private long sessionTokenDurationMs; private TokenServiceConfiguration tokenService; diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java index 96a3f1b8f56bd..31cfe1c057468 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java @@ -1,29 +1,21 @@ package com.datahub.authentication; -/** - * A set of shared constants related to Authentication. - */ +/** A set of shared constants related to Authentication. */ public class AuthenticationConstants { - /** - * Name of the header which carries authorization information - */ + /** Name of the header which carries authorization information */ public static final String AUTHORIZATION_HEADER_NAME = "Authorization"; /** - * A deprecated header that previously carried the urn of the authenticated actor. - * This has been replaced by the DELEGATED_FOR_ACTOR_ID and DELEGATED_FOR_ACTOR_TYPE headers. + * A deprecated header that previously carried the urn of the authenticated actor. This has been + * replaced by the DELEGATED_FOR_ACTOR_ID and DELEGATED_FOR_ACTOR_TYPE headers. */ public static final String LEGACY_X_DATAHUB_ACTOR_HEADER = "X-DataHub-Actor"; - /** - * A header capturing the unique Actor Id that is delegating a request. - */ + /** A header capturing the unique Actor Id that is delegating a request. */ public static final String DELEGATED_FOR_ACTOR_ID_HEADER_NAME = "X-DataHub-Delegated-For-Id"; - /** - * A header capturing the unique Actor Type that is delegating a request. - */ + /** A header capturing the unique Actor Type that is delegating a request. */ public static final String DELEGATED_FOR_ACTOR_TYPE_HEADER_NAME = "X-DataHub-Delegated-For-Type"; public static final String SYSTEM_CLIENT_ID_CONFIG = "systemClientId"; @@ -32,6 +24,5 @@ public class AuthenticationConstants { public static final String ENTITY_SERVICE = "entityService"; public static final String TOKEN_SERVICE = "tokenService"; - private AuthenticationConstants() { - } + private AuthenticationConstants() {} } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java index 2d3cf5f588d7d..36814ee380e2f 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java @@ -3,18 +3,21 @@ import java.util.Map; import lombok.Data; - /** - * POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations provided in the application.yml. + * POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations + * provided in the application.yml. */ @Data public class AuthenticatorConfiguration { /** - * A fully-qualified class name for the {@link com.datahub.plugins.auth.authentication.Authenticator} implementation to be registered. + * A fully-qualified class name for the {@link + * com.datahub.plugins.auth.authentication.Authenticator} implementation to be registered. */ private String type; + /** - * A set of authenticator-specific configurations passed through during "init" of the authenticator. + * A set of authenticator-specific configurations passed through during "init" of the + * authenticator. */ private Map<String, Object> configs; } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java index 0a606f0f06d92..70b93544bebdf 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data -/** - * Configurations for DataHub token service - */ +/** Configurations for DataHub token service */ public class TokenServiceConfiguration { private String signingKey; private String salt; diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java index 2770fc5c41aa0..5ed69d3e2ff8c 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java @@ -4,18 +4,12 @@ import java.util.List; import lombok.Data; - -/** - * POJO representing the "authentication" configuration block in application.yml. - */ +/** POJO representing the "authentication" configuration block in application.yml. */ @Data public class AuthorizationConfiguration { - /** - * Configuration for the default DataHub Policies-based authorizer. - */ + /** Configuration for the default DataHub Policies-based authorizer. */ private DefaultAuthorizerConfiguration defaultAuthorizer; - /** - * List of configurations for {@link Authorizer}s to be registered - */ + + /** List of configurations for {@link Authorizer}s to be registered */ private List<AuthorizerConfiguration> authorizers; -} \ No newline at end of file +} diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java index 65cd6c17c739c..c4a26a1cd6276 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java @@ -4,22 +4,15 @@ import java.util.Map; import lombok.Data; - -/** - * POJO representing {@link Authorizer} configurations provided in the application.yml. - */ +/** POJO representing {@link Authorizer} configurations provided in the application.yml. */ @Data public class AuthorizerConfiguration { - /** - * Whether to enable this authorizer - */ + /** Whether to enable this authorizer */ private boolean enabled; - /** - * A fully-qualified class name for the {@link Authorizer} implementation to be registered. - */ + + /** A fully-qualified class name for the {@link Authorizer} implementation to be registered. */ private String type; - /** - * A set of authorizer-specific configurations passed through during "init" of the authorizer. - */ + + /** A set of authorizer-specific configurations passed through during "init" of the authorizer. */ private Map<String, Object> configs; } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java index dfec06dedd147..c06e5b10b23f9 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java @@ -2,15 +2,11 @@ import lombok.Data; - @Data public class DefaultAuthorizerConfiguration { - /** - * Whether authorization via DataHub policies is enabled. - */ + /** Whether authorization via DataHub policies is enabled. */ private boolean enabled; - /** - * The duration between policies cache refreshes. - */ + + /** The duration between policies cache refreshes. */ private int cacheRefreshIntervalSecs; } diff --git a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java index 8c7b3ac8b98f0..335a30280c3be 100644 --- a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java +++ b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java @@ -1,19 +1,21 @@ package com.datahub.auth.authentication.filter; -import com.datahub.authentication.authenticator.AuthenticatorChain; -import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; -import com.datahub.authentication.authenticator.HealthStatusAuthenticator; -import com.datahub.authentication.authenticator.NoOpAuthenticator; -import com.datahub.authentication.token.StatefulTokenService; -import com.datahub.plugins.PluginConstant; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.AuthenticationContext; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorConfiguration; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.authentication.authenticator.AuthenticatorChain; +import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; +import com.datahub.authentication.authenticator.HealthStatusAuthenticator; +import com.datahub.authentication.authenticator.NoOpAuthenticator; +import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.plugins.PluginConstant; +import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.plugins.common.PluginConfig; import com.datahub.plugins.common.PluginPermissionManager; import com.datahub.plugins.common.PluginType; @@ -49,18 +51,14 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.web.context.support.SpringBeanAutowiringSupport; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * A servlet {@link Filter} for authenticating requests inbound to the Metadata Service. This filter is applied to the - * GraphQL Servlet, the Rest.li Servlet, and the Auth (token) Servlet. + * A servlet {@link Filter} for authenticating requests inbound to the Metadata Service. This filter + * is applied to the GraphQL Servlet, the Rest.li Servlet, and the Auth (token) Servlet. */ @Slf4j public class AuthenticationFilter implements Filter { - @Inject - private ConfigurationProvider configurationProvider; + @Inject private ConfigurationProvider configurationProvider; @Inject @Named("entityService") @@ -90,23 +88,28 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha authentication = this.authenticatorChain.authenticate(context, _logAuthenticatorExceptions); } catch (AuthenticationException e) { // For AuthenticationExpiredExceptions, terminate and provide that feedback to the user - log.debug("Failed to authenticate request. Received an AuthenticationExpiredException from authenticator chain.", + log.debug( + "Failed to authenticate request. Received an AuthenticationExpiredException from authenticator chain.", e); - ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); return; } if (authentication != null) { // Successfully authenticated. - log.debug(String.format("Successfully authenticated request for Actor with type: %s, id: %s", - authentication.getActor().getType(), authentication.getActor().getId())); + log.debug( + String.format( + "Successfully authenticated request for Actor with type: %s, id: %s", + authentication.getActor().getType(), authentication.getActor().getId())); AuthenticationContext.setAuthentication(authentication); chain.doFilter(request, response); } else { // Reject request - log.debug("Failed to authenticate request. Received 'null' Authentication value from authenticator chain."); - ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, - "Unauthorized to perform this action."); + log.debug( + "Failed to authenticate request. Received 'null' Authentication value from authenticator chain."); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized to perform this action."); return; } AuthenticationContext.remove(); @@ -120,9 +123,10 @@ public void destroy() { /** * Constructs an {@link AuthenticatorChain} via the provided {@link AuthenticationConfiguration}. * - * The process is simple: For each configured {@link Authenticator}, attempt to instantiate the class using a default (zero-arg) - * constructor, then call it's initialize method passing in a freeform block of associated configurations as a {@link Map}. Finally, - * register the {@link Authenticator} in the authenticator chain. + * <p>The process is simple: For each configured {@link Authenticator}, attempt to instantiate the + * class using a default (zero-arg) constructor, then call it's initialize method passing in a + * freeform block of associated configurations as a {@link Map}. Finally, register the {@link + * Authenticator} in the authenticator chain. */ private void buildAuthenticatorChain() { @@ -130,89 +134,123 @@ private void buildAuthenticatorChain() { boolean isAuthEnabled = this.configurationProvider.getAuthentication().isEnabled(); - // Create authentication context object to pass to authenticator instances. They can use it as needed. - final AuthenticatorContext authenticatorContext = new AuthenticatorContext( - ImmutableMap.of(ENTITY_SERVICE, this._entityService, TOKEN_SERVICE, this._tokenService)); + // Create authentication context object to pass to authenticator instances. They can use it as + // needed. + final AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of( + ENTITY_SERVICE, this._entityService, TOKEN_SERVICE, this._tokenService)); if (isAuthEnabled) { log.info("Auth is enabled. Building authenticator chain..."); - this.registerNativeAuthenticator(authenticatorChain, authenticatorContext); // Register native authenticators + this.registerNativeAuthenticator( + authenticatorChain, authenticatorContext); // Register native authenticators this.registerPlugins(authenticatorChain); // Register plugin authenticators } else { - // Authentication is not enabled. Populate authenticator chain with a purposely permissive Authenticator. + // Authentication is not enabled. Populate authenticator chain with a purposely permissive + // Authenticator. log.info("Auth is disabled. Building no-op authenticator chain..."); final NoOpAuthenticator noOpAuthenticator = new NoOpAuthenticator(); noOpAuthenticator.init( - ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, this.configurationProvider.getAuthentication().getSystemClientId()), + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId()), authenticatorContext); authenticatorChain.register(noOpAuthenticator); } } private AuthenticationRequest buildAuthContext(HttpServletRequest request) { - return new AuthenticationRequest(request.getServletPath(), request.getPathInfo(), Collections.list(request.getHeaderNames()) - .stream() - .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); + return new AuthenticationRequest( + request.getServletPath(), + request.getPathInfo(), + Collections.list(request.getHeaderNames()).stream() + .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); } private void registerPlugins(AuthenticatorChain authenticatorChain) { - // TODO: Introduce plugin factory to reduce duplicate code around authentication and authorization processing + // TODO: Introduce plugin factory to reduce duplicate code around authentication and + // authorization processing ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); - Path pluginBaseDirectory = Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); + Path pluginBaseDirectory = + Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); Optional<Config> optionalConfig = (new ConfigProvider(pluginBaseDirectory)).load(); - optionalConfig.ifPresent((config) -> { - log.info("Processing authenticator plugin from auth plugin directory {}", pluginBaseDirectory); - PluginConfigFactory authenticatorPluginPluginConfigFactory = - new PluginConfigFactory(config); + optionalConfig.ifPresent( + (config) -> { + log.info( + "Processing authenticator plugin from auth plugin directory {}", pluginBaseDirectory); + PluginConfigFactory authenticatorPluginPluginConfigFactory = + new PluginConfigFactory(config); - List<PluginConfig> authorizers = - authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - // Filter enabled authenticator plugins - List<PluginConfig> enabledAuthenticators = authorizers.stream().filter(pluginConfig -> { - if (!pluginConfig.getEnabled()) { - log.info(String.format("Authenticator %s is not enabled", pluginConfig.getName())); - } - return pluginConfig.getEnabled(); - }).collect(Collectors.toList()); + List<PluginConfig> authorizers = + authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); + // Filter enabled authenticator plugins + List<PluginConfig> enabledAuthenticators = + authorizers.stream() + .filter( + pluginConfig -> { + if (!pluginConfig.getEnabled()) { + log.info( + String.format( + "Authenticator %s is not enabled", pluginConfig.getName())); + } + return pluginConfig.getEnabled(); + }) + .collect(Collectors.toList()); - SecurityMode securityMode = - SecurityMode.valueOf(this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); - // Create permission manager with security mode - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); + SecurityMode securityMode = + SecurityMode.valueOf( + this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + // Create permission manager with security mode + PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); - // Initiate Authenticators - enabledAuthenticators.forEach((pluginConfig) -> { - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, pluginConfig); - // Create context - AuthenticatorContext context = new AuthenticatorContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString())); + // Initiate Authenticators + enabledAuthenticators.forEach( + (pluginConfig) -> { + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, pluginConfig); + // Create context + AuthenticatorContext context = + new AuthenticatorContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + pluginConfig.getPluginHomeDirectory().toString())); - try { - Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); - log.info("Initializing plugin {}", pluginConfig.getName()); - authenticator.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); - authenticatorChain.register(authenticator); - log.info("Plugin {} is initialized", pluginConfig.getName()); - } catch (ClassNotFoundException e) { - throw new RuntimeException(String.format("Plugin className %s not found", pluginConfig.getClassName()), e); - } finally { - Thread.currentThread().setContextClassLoader(contextClassLoader); - } - }); - }); + try { + Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + log.info("Initializing plugin {}", pluginConfig.getName()); + authenticator.init( + pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); + authenticatorChain.register(authenticator); + log.info("Plugin {} is initialized", pluginConfig.getName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException( + String.format("Plugin className %s not found", pluginConfig.getClassName()), + e); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); + } + }); + }); } - private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, AuthenticatorContext authenticatorContext) { + private void registerNativeAuthenticator( + AuthenticatorChain authenticatorChain, AuthenticatorContext authenticatorContext) { log.info("Registering native authenticators"); // Register system authenticator DataHubSystemAuthenticator systemAuthenticator = new DataHubSystemAuthenticator(); systemAuthenticator.init( - ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, this.configurationProvider.getAuthentication().getSystemClientId(), - SYSTEM_CLIENT_SECRET_CONFIG, this.configurationProvider.getAuthentication().getSystemClientSecret()), + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId(), + SYSTEM_CLIENT_SECRET_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientSecret()), authenticatorContext); - authenticatorChain.register(systemAuthenticator); // Always register authenticator for internal system. + authenticatorChain.register( + systemAuthenticator); // Always register authenticator for internal system. // Register authenticator define in application.yml final List<AuthenticatorConfiguration> authenticatorConfigurations = @@ -229,14 +267,16 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, clazz = (Class<? extends Authenticator>) Class.forName(type); } catch (ClassNotFoundException e) { throw new RuntimeException( - String.format("Failed to find Authenticator class with name %s on the classpath.", type)); + String.format( + "Failed to find Authenticator class with name %s on the classpath.", type)); } // Ensure class conforms to the correct type. if (!Authenticator.class.isAssignableFrom(clazz)) { - throw new IllegalArgumentException(String.format( - "Failed to instantiate invalid Authenticator with class name %s. Class does not implement the 'Authenticator' interface", - clazz.getCanonicalName())); + throw new IllegalArgumentException( + String.format( + "Failed to instantiate invalid Authenticator with class name %s. Class does not implement the 'Authenticator' interface", + clazz.getCanonicalName())); } // Else construct an instance of the class, each class should have an empty constructor. @@ -245,9 +285,14 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, // Successfully created authenticator. Now init and register it. log.debug(String.format("Initializing Authenticator with name %s", type)); if (authenticator instanceof HealthStatusAuthenticator) { - Map<String, Object> authenticatorConfig = new HashMap<>(Map.of(SYSTEM_CLIENT_ID_CONFIG, - this.configurationProvider.getAuthentication().getSystemClientId())); - authenticatorConfig.putAll(Optional.ofNullable(internalAuthenticatorConfig.getConfigs()).orElse(Collections.emptyMap())); + Map<String, Object> authenticatorConfig = + new HashMap<>( + Map.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId())); + authenticatorConfig.putAll( + Optional.ofNullable(internalAuthenticatorConfig.getConfigs()) + .orElse(Collections.emptyMap())); authenticator.init(authenticatorConfig, authenticatorContext); } else { authenticator.init(configs, authenticatorContext); @@ -256,8 +301,10 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, authenticatorChain.register(authenticator); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to instantiate Authenticator with class name %s", clazz.getCanonicalName()), e); + String.format( + "Failed to instantiate Authenticator with class name %s", clazz.getCanonicalName()), + e); } } } -} \ No newline at end of file +} diff --git a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java index 05ca428283a6c..471fdf8c36903 100644 --- a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java +++ b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java @@ -1,5 +1,7 @@ package com.datahub.auth.authentication; +import static org.mockito.Mockito.*; + import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.AuthenticatorConfiguration; @@ -17,27 +19,25 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static org.mockito.Mockito.*; - @Configuration public class AuthTestConfiguration { - @Bean public EntityService entityService() { return mock(EntityService.class); } @Bean("dataHubTokenService") - public StatefulTokenService statefulTokenService(ConfigurationProvider configurationProvider, EntityService entityService) { - TokenServiceConfiguration tokenServiceConfiguration = configurationProvider.getAuthentication().getTokenService(); + public StatefulTokenService statefulTokenService( + ConfigurationProvider configurationProvider, EntityService entityService) { + TokenServiceConfiguration tokenServiceConfiguration = + configurationProvider.getAuthentication().getTokenService(); return new StatefulTokenService( tokenServiceConfiguration.getSigningKey(), tokenServiceConfiguration.getSigningAlgorithm(), tokenServiceConfiguration.getIssuer(), entityService, - tokenServiceConfiguration.getSalt() - ); + tokenServiceConfiguration.getSalt()); } @Bean @@ -59,8 +59,12 @@ public ConfigurationProvider configurationProvider() { authenticationConfiguration.setTokenService(tokenServiceConfiguration); AuthenticatorConfiguration authenticator = new AuthenticatorConfiguration(); authenticator.setType("com.datahub.authentication.authenticator.DataHubTokenAuthenticator"); - authenticator.setConfigs(Map.of("signingKey", "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=", - "salt", "ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=")); + authenticator.setConfigs( + Map.of( + "signingKey", + "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=", + "salt", + "ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=")); List<AuthenticatorConfiguration> authenticators = List.of(authenticator); authenticationConfiguration.setAuthenticators(authenticators); authPluginConfiguration.setPath(""); diff --git a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java index 2ac65bf09c912..746138e4ee90f 100644 --- a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java +++ b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java @@ -1,5 +1,8 @@ package com.datahub.auth.authentication; +import static com.datahub.authentication.AuthenticationConstants.*; +import static org.mockito.Mockito.*; + import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; @@ -17,18 +20,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.datahub.authentication.AuthenticationConstants.*; -import static org.mockito.Mockito.*; - - -@ContextConfiguration(classes = { AuthTestConfiguration.class }) +@ContextConfiguration(classes = {AuthTestConfiguration.class}) public class AuthenticationFilterTest extends AbstractTestNGSpringContextTests { - @Autowired - AuthenticationFilter _authenticationFilter; + @Autowired AuthenticationFilter _authenticationFilter; - @Autowired - StatefulTokenService _statefulTokenService; + @Autowired StatefulTokenService _statefulTokenService; @Test public void testExpiredToken() throws ServletException, IOException, TokenException { @@ -37,17 +34,20 @@ public void testExpiredToken() throws ServletException, IOException, TokenExcept HttpServletResponse servletResponse = mock(HttpServletResponse.class); FilterChain filterChain = mock(FilterChain.class); Actor actor = new Actor(ActorType.USER, "datahub"); -// String token = _statefulTokenService.generateAccessToken(TokenType.SESSION, actor, 0L, System.currentTimeMillis(), "token", -// "token", actor.toUrnStr()); + // String token = _statefulTokenService.generateAccessToken(TokenType.SESSION, actor, 0L, + // System.currentTimeMillis(), "token", + // "token", actor.toUrnStr()); // Token generated 9/11/23, invalid for all future dates - String token = "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIZCI6ImRhdGFodWIiLCJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwian" - + "RpIjoiMmI0MzZkZDAtYjEwOS00N2UwLWJmYTEtMzM2ZmU4MTU4MDE1Iiwic3ViIjoiZGF0YWh1YiIsImV4cCI6MTY5NDU0NzA2OCwiaXNzIjoiZGF" - + "0YWh1Yi1tZXRhZGF0YS1zZXJ2aWNlIn0.giqx7J5a9mxuubG6rXdAMoaGlcII-fqY-W82Wm7OlLI"; - when(servletRequest.getHeaderNames()).thenReturn(Collections.enumeration(List.of(AUTHORIZATION_HEADER_NAME))); - when(servletRequest.getHeader(AUTHORIZATION_HEADER_NAME)) - .thenReturn("Bearer " + token); + String token = + "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIZCI6ImRhdGFodWIiLCJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwian" + + "RpIjoiMmI0MzZkZDAtYjEwOS00N2UwLWJmYTEtMzM2ZmU4MTU4MDE1Iiwic3ViIjoiZGF0YWh1YiIsImV4cCI6MTY5NDU0NzA2OCwiaXNzIjoiZGF" + + "0YWh1Yi1tZXRhZGF0YS1zZXJ2aWNlIn0.giqx7J5a9mxuubG6rXdAMoaGlcII-fqY-W82Wm7OlLI"; + when(servletRequest.getHeaderNames()) + .thenReturn(Collections.enumeration(List.of(AUTHORIZATION_HEADER_NAME))); + when(servletRequest.getHeader(AUTHORIZATION_HEADER_NAME)).thenReturn("Bearer " + token); _authenticationFilter.doFilter(servletRequest, servletResponse, filterChain); - verify(servletResponse, times(1)).sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString()); + verify(servletResponse, times(1)) + .sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString()); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java index e72225e6ee990..b69a8a7818485 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java @@ -1,27 +1,26 @@ package com.datahub.authentication.authenticator; import com.datahub.authentication.Authentication; - import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.util.Pair; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * A configurable chain of {@link Authenticator}s executed in series to attempt to authenticate an inbound request. + * A configurable chain of {@link Authenticator}s executed in series to attempt to authenticate an + * inbound request. * - * Individual {@link Authenticator}s are registered with the chain using {@link #register(Authenticator)}. - * The chain can be executed by invoking {@link #authenticate(AuthenticationRequest)} with an instance of {@link AuthenticationRequest}. + * <p>Individual {@link Authenticator}s are registered with the chain using {@link + * #register(Authenticator)}. The chain can be executed by invoking {@link + * #authenticate(AuthenticationRequest)} with an instance of {@link AuthenticationRequest}. */ @Slf4j public class AuthenticatorChain { @@ -39,21 +38,30 @@ public void register(@Nonnull final Authenticator authenticator) { } /** - * Executes a set of {@link Authenticator}s and returns the first successful authentication result. + * Executes a set of {@link Authenticator}s and returns the first successful authentication + * result. * - * Returns an instance of {@link Authentication} if the incoming request is successfully authenticated. - * Returns null if {@link Authentication} cannot be resolved for the incoming request. + * <p>Returns an instance of {@link Authentication} if the incoming request is successfully + * authenticated. Returns null if {@link Authentication} cannot be resolved for the incoming + * request. */ @Nullable - public Authentication authenticate(@Nonnull final AuthenticationRequest context, boolean logExceptions) throws AuthenticationException { + public Authentication authenticate( + @Nonnull final AuthenticationRequest context, boolean logExceptions) + throws AuthenticationException { Objects.requireNonNull(context); ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); List<Pair<String, Exception>> authenticationFailures = new ArrayList<>(); for (final Authenticator authenticator : this.authenticators) { try { - log.debug(String.format("Executing Authenticator with class name %s", authenticator.getClass().getCanonicalName())); - // The library came with plugin can use the contextClassLoader to load the classes. For example apache-ranger library does this. - // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class loading request from plugin's home directory, + log.debug( + String.format( + "Executing Authenticator with class name %s", + authenticator.getClass().getCanonicalName())); + // The library came with plugin can use the contextClassLoader to load the classes. For + // example apache-ranger library does this. + // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class + // loading request from plugin's home directory, // otherwise plugin's internal library wouldn't be able to find their dependent classes Thread.currentThread().setContextClassLoader(authenticator.getClass().getClassLoader()); Authentication result = authenticator.authenticate(context); @@ -65,13 +73,19 @@ public Authentication authenticate(@Nonnull final AuthenticationRequest context, } } catch (AuthenticationExpiredException e) { // Throw if it's an AuthenticationException to propagate the error message to the end user - log.debug(String.format("Unable to authenticate request using Authenticator %s", authenticator.getClass().getCanonicalName()), e); + log.debug( + String.format( + "Unable to authenticate request using Authenticator %s", + authenticator.getClass().getCanonicalName()), + e); throw e; } catch (Exception e) { // Log as a normal error otherwise. - log.debug(String.format( + log.debug( + String.format( "Caught exception while attempting to authenticate request using Authenticator %s", - authenticator.getClass().getCanonicalName()), e); + authenticator.getClass().getCanonicalName()), + e); authenticationFailures.add(new Pair<>(authenticator.getClass().getCanonicalName(), e)); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); @@ -79,14 +93,19 @@ public Authentication authenticate(@Nonnull final AuthenticationRequest context, } // No authentication resolved. Return null. if (!authenticationFailures.isEmpty()) { - List<Pair<String, String>> shortMessage = authenticationFailures.stream() - .peek(p -> { - if (logExceptions) { - log.error("Error during {} authentication: ", p.getFirst(), p.getSecond()); - } - }) - .map(p -> Pair.of(p.getFirst(), p.getSecond().getMessage())).collect(Collectors.toList()); - log.warn("Authentication chain failed to resolve a valid authentication. Errors: {}", shortMessage); + List<Pair<String, String>> shortMessage = + authenticationFailures.stream() + .peek( + p -> { + if (logExceptions) { + log.error("Error during {} authentication: ", p.getFirst(), p.getSecond()); + } + }) + .map(p -> Pair.of(p.getFirst(), p.getSecond().getMessage())) + .collect(Collectors.toList()); + log.warn( + "Authentication chain failed to resolve a valid authentication. Errors: {}", + shortMessage); } return null; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java index 524c12c56c266..635a87dc84c11 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java @@ -1,29 +1,28 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorContext; import com.datahub.authentication.token.DataHubJwtSigningKeyResolver; +import com.datahub.plugins.auth.authentication.Authenticator; import io.jsonwebtoken.Claims; -import io.jsonwebtoken.Jwts; import io.jsonwebtoken.Jws; +import io.jsonwebtoken.Jwts; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.HashSet; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * This Authenticator verifies third party token and allows to pass claim for "id" part of resolved actor urn. - * Supported algorithm at this moment RSA + * This Authenticator verifies third party token and allows to pass claim for "id" part of resolved + * actor urn. Supported algorithm at this moment RSA */ @Slf4j public class DataHubJwtTokenAuthenticator implements Authenticator { @@ -33,57 +32,67 @@ public class DataHubJwtTokenAuthenticator implements Authenticator { static final String DEFAULT_SIGNING_ALG = "RSA"; /** - * idUserClaim allows you to select which claim will be used as the "id" part of the resolved actor urn, e.g. "urn:li:corpuser:" - * **/ + * idUserClaim allows you to select which claim will be used as the "id" part of the resolved + * actor urn, e.g. "urn:li:corpuser:" * + */ private String userIdClaim; - /** - * List of trusted issuers - * **/ + /** List of trusted issuers * */ private HashSet<String> trustedIssuers; /** - * This public key is optional and should be used if token public key is not available online or will not change for signed token. - * **/ + * This public key is optional and should be used if token public key is not available online or + * will not change for signed token. * + */ private String publicKey; /** - * Algorithm used to sign your token. - * This is optional and can be skiped if public key is available online. - * **/ + * Algorithm used to sign your token. This is optional and can be skiped if public key is + * available online. * + */ private String algorithm; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.userIdClaim = config.get("userIdClaim") == null ? DEFAULT_USER_CLAIM : (String) config.get("userIdClaim"); + this.userIdClaim = + config.get("userIdClaim") == null ? DEFAULT_USER_CLAIM : (String) config.get("userIdClaim"); - Map<String, String> issuers = Objects.requireNonNull((Map<String, String>) config.get("trustedIssuers"), - "Missing required config trusted issuers"); + Map<String, String> issuers = + Objects.requireNonNull( + (Map<String, String>) config.get("trustedIssuers"), + "Missing required config trusted issuers"); this.trustedIssuers = new HashSet<String>(issuers.values()); this.publicKey = (String) config.get("publicKey"); - this.algorithm = config.get("algorithm") == null ? DEFAULT_SIGNING_ALG : (String) config.get("algorithm"); + this.algorithm = + config.get("algorithm") == null ? DEFAULT_SIGNING_ALG : (String) config.get("algorithm"); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); try { String jwtToken = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); - if (jwtToken == null || (!jwtToken.startsWith("Bearer ") && !jwtToken.startsWith("bearer "))) { + if (jwtToken == null + || (!jwtToken.startsWith("Bearer ") && !jwtToken.startsWith("bearer "))) { throw new AuthenticationException("Invalid Authorization token"); } String token = getToken(jwtToken); - Jws<Claims> claims = Jwts.parserBuilder() - .setSigningKeyResolver(new DataHubJwtSigningKeyResolver(this.trustedIssuers, this.publicKey, this.algorithm)) - .build() - .parseClaimsJws(token); + Jws<Claims> claims = + Jwts.parserBuilder() + .setSigningKeyResolver( + new DataHubJwtSigningKeyResolver( + this.trustedIssuers, this.publicKey, this.algorithm)) + .build() + .parseClaimsJws(token); final String userClaim = claims.getBody().get(userIdClaim, String.class); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java index 70a4abc3fd18d..9a25a51b72622 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java @@ -1,34 +1,33 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; -import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; +import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; -import javax.annotation.Nonnull; +import com.datahub.plugins.auth.authentication.Authenticator; import java.util.Collections; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * Authenticator that verifies system internal callers, such as the metadata-service itself OR datahub-frontend, - * using HTTP Basic Authentication. - * - * This makes use of a single "system client id" and "system shared secret" which each - * component in the system is configured to provide. + * Authenticator that verifies system internal callers, such as the metadata-service itself OR + * datahub-frontend, using HTTP Basic Authentication. * - * This authenticator requires the following configurations: + * <p>This makes use of a single "system client id" and "system shared secret" which each component + * in the system is configured to provide. * - * - systemClientId: an identifier for internal system callers, provided in the Authorization header via Basic Authentication. - * - systemClientSecret: a shared secret used to authenticate internal system callers + * <p>This authenticator requires the following configurations: * + * <p>- systemClientId: an identifier for internal system callers, provided in the Authorization + * header via Basic Authentication. - systemClientSecret: a shared secret used to authenticate + * internal system callers */ @Slf4j public class DataHubSystemAuthenticator implements Authenticator { @@ -37,16 +36,22 @@ public class DataHubSystemAuthenticator implements Authenticator { private String systemClientSecret; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); - this.systemClientSecret = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_SECRET_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_SECRET_CONFIG)); + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + this.systemClientSecret = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_SECRET_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_SECRET_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); final String authorizationHeader = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); if (authorizationHeader != null) { @@ -57,16 +62,18 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw if (splitCredentials.length == 2 && this.systemClientId.equals(splitCredentials[0]) - && this.systemClientSecret.equals(splitCredentials[1]) - ) { + && this.systemClientSecret.equals(splitCredentials[1])) { // If this request was made internally, there may be a delegated id. return new Authentication( - new Actor(ActorType.USER, this.systemClientId), // todo: replace this with service actor type once they exist. + new Actor( + ActorType.USER, + this.systemClientId), // todo: replace this with service actor type once they + // exist. authorizationHeader, - Collections.emptyMap() - ); + Collections.emptyMap()); } else { - throw new AuthenticationException("Provided credentials do not match known system client id & client secret. Check your configuration values..."); + throw new AuthenticationException( + "Provided credentials do not match known system client id & client secret. Check your configuration values..."); } } else { throw new AuthenticationException("Authorization header is missing 'Basic' prefix."); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java index e7e776999f34e..f1d1f5a80119c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java @@ -1,34 +1,33 @@ package com.datahub.authentication.authenticator; -import com.datahub.authentication.token.StatefulTokenService; -import com.datahub.authentication.token.StatelessTokenService; -import com.datahub.authentication.token.TokenClaims; -import com.datahub.authentication.token.TokenExpiredException; -import com.datahub.authentication.Actor; +import static com.datahub.authentication.AuthenticationConstants.*; +import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConstants; +import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenClaims; +import com.datahub.authentication.token.TokenExpiredException; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** * Authenticator that verifies DataHub-issued JSON web tokens. * - * This authenticator requires the following configurations: + * <p>This authenticator requires the following configurations: * - * - signingAlgorithm (optional): the algorithm used to verify JWT's. This should be THE SAME ONE used by the {@link StatelessTokenService}. Defaults to HS256. - * - signingKey: a key used to sign all JWT tokens using the provided signingAlgorithm + * <p>- signingAlgorithm (optional): the algorithm used to verify JWT's. This should be THE SAME ONE + * used by the {@link StatelessTokenService}. Defaults to HS256. - signingKey: a key used to sign + * all JWT tokens using the provided signingAlgorithm */ @Slf4j public class DataHubTokenAuthenticator implements Authenticator { @@ -47,28 +46,35 @@ public void init(@Nonnull final Map<String, Object> config, final AuthenticatorC Objects.requireNonNull(config, "Config parameter cannot be null"); Objects.requireNonNull(context, "Context parameter cannot be null"); final String signingKey = - Objects.requireNonNull((String) config.get(SIGNING_KEY_CONFIG_NAME), "signingKey is a required config"); + Objects.requireNonNull( + (String) config.get(SIGNING_KEY_CONFIG_NAME), "signingKey is a required config"); final String salt = Objects.requireNonNull((String) config.get(SALT_CONFIG_NAME), "salt is a required config"); - final String signingAlgorithm = (String) config.getOrDefault(SIGNING_ALG_CONFIG_NAME, DEFAULT_SIGNING_ALG); + final String signingAlgorithm = + (String) config.getOrDefault(SIGNING_ALG_CONFIG_NAME, DEFAULT_SIGNING_ALG); log.debug(String.format("Creating TokenService using signing algorithm %s", signingAlgorithm)); if (!context.data().containsKey(AuthenticationConstants.ENTITY_SERVICE)) { - throw new IllegalArgumentException("Unable to initialize DataHubTokenAuthenticator, entity service reference not" - + " found."); + throw new IllegalArgumentException( + "Unable to initialize DataHubTokenAuthenticator, entity service reference not" + + " found."); } final Object entityService = context.data().get(ENTITY_SERVICE); if (!(entityService instanceof EntityService)) { throw new RuntimeException( "Unable to initialize DataHubTokenAuthenticator, entity service reference is not of type: " - + "EntityService.class, found: " + entityService.getClass()); + + "EntityService.class, found: " + + entityService.getClass()); } - this._statefulTokenService = (StatefulTokenService) Objects.requireNonNull(context.data().get(TOKEN_SERVICE)); + this._statefulTokenService = + (StatefulTokenService) Objects.requireNonNull(context.data().get(TOKEN_SERVICE)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); - final String authorizationHeader = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); // Case insensitive + final String authorizationHeader = + context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); // Case insensitive if (authorizationHeader != null) { if (authorizationHeader.startsWith("Bearer ") || authorizationHeader.startsWith("bearer ")) { return validateAndExtract(authorizationHeader); @@ -79,12 +85,14 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw throw new AuthenticationException("Request is missing 'Authorization' header."); } - private Authentication validateAndExtract(final String credentials) throws AuthenticationException { + private Authentication validateAndExtract(final String credentials) + throws AuthenticationException { log.debug("Found authentication token. Verifying..."); final String token = credentials.substring(7); try { final TokenClaims claims = this._statefulTokenService.validateAccessToken(token); - return new Authentication(new Actor(claims.getActorType(), claims.getActorId()), credentials, claims.asMap()); + return new Authentication( + new Actor(claims.getActorType(), claims.getActorId()), credentials, claims.asMap()); } catch (TokenExpiredException e) { throw new AuthenticationExpiredException(e.getMessage(), e); } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java index 5749eacf5d25d..65581f1d5b635 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java @@ -1,5 +1,7 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -7,48 +9,45 @@ import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; import com.datahub.plugins.auth.authentication.Authenticator; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Set; - -import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * This Authenticator is used for allowing access for unauthenticated health check endpoints * - * It exists to support load balancers, liveness/readiness checks - * + * <p>It exists to support load balancers, liveness/readiness checks */ @Slf4j public class HealthStatusAuthenticator implements Authenticator { - private static final Set<String> HEALTH_ENDPOINTS = Set.of( - "/openapi/check/", - "/openapi/up/" - ); + private static final Set<String> HEALTH_ENDPOINTS = Set.of("/openapi/check/", "/openapi/up/"); private String systemClientId; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); - if (HEALTH_ENDPOINTS.stream().anyMatch(prefix -> String.join("", context.getServletInfo(), context.getPathInfo()).startsWith(prefix))) { + if (HEALTH_ENDPOINTS.stream() + .anyMatch( + prefix -> + String.join("", context.getServletInfo(), context.getPathInfo()) + .startsWith(prefix))) { return new Authentication( - new Actor(ActorType.USER, systemClientId), - "", - Collections.emptyMap() - ); + new Actor(ActorType.USER, systemClientId), "", Collections.emptyMap()); } throw new AuthenticationException("Authorization not allowed. Non-health check endpoint."); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java index 4e1b3cf7f73aa..19f135debdae4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java @@ -1,12 +1,14 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import java.util.Collections; @@ -16,16 +18,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * This Authenticator is used as a no-op to simply convert the X-DataHub-Actor header into a valid Authentication, or fall - * back to resolving a system {@link Actor} by default. + * This Authenticator is used as a no-op to simply convert the X-DataHub-Actor header into a valid + * Authentication, or fall back to resolving a system {@link Actor} by default. * - * It exists to support deployments that do not have Metadata Service Authentication enabled. + * <p>It exists to support deployments that do not have Metadata Service Authentication enabled. * - * Notice that this authenticator should generally be avoided in production. + * <p>Notice that this authenticator should generally be avoided in production. */ @Slf4j public class NoOpAuthenticator implements Authenticator { @@ -33,24 +32,29 @@ public class NoOpAuthenticator implements Authenticator { private String systemClientId; @Override - public void init(@Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map<String, Object> config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); String actorUrn = context.getRequestHeaders().get(LEGACY_X_DATAHUB_ACTOR_HEADER); // For backwards compatibility, support pulling actor context from the deprecated // X-DataHub-Actor header. if (actorUrn == null || "".equals(actorUrn)) { - log.debug(String.format("Found no X-DataHub-Actor header provided with the request. Falling back to system creds %s", Constants.UNKNOWN_ACTOR)); - return new Authentication( - new Actor(ActorType.USER, this.systemClientId), "" - ); + log.debug( + String.format( + "Found no X-DataHub-Actor header provided with the request. Falling back to system creds %s", + Constants.UNKNOWN_ACTOR)); + return new Authentication(new Actor(ActorType.USER, this.systemClientId), ""); } // If not provided, fallback to system caller identity. @@ -58,8 +62,7 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw // When authentication is disabled, assume everyone is a normal user. new Actor(ActorType.USER, getActorIdFromUrn(actorUrn)), "", // No Credentials provided. - Collections.emptyMap() - ); + Collections.emptyMap()); } private String getActorIdFromUrn(final String urnStr) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index 29ec2f73dc688..f33ae5de130da 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -1,5 +1,7 @@ package com.datahub.authentication.group; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -35,15 +37,14 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GroupService { private final EntityClient _entityClient; private final EntityService _entityService; private final GraphClient _graphClient; - public GroupService(@Nonnull EntityClient entityClient, @Nonnull EntityService entityService, + public GroupService( + @Nonnull EntityClient entityClient, + @Nonnull EntityService entityService, @Nonnull GraphClient graphClient) { Objects.requireNonNull(entityClient, "entityClient must not be null!"); Objects.requireNonNull(entityService, "entityService must not be null!"); @@ -64,7 +65,9 @@ public Origin getGroupOrigin(@Nonnull final Urn groupUrn) { return (Origin) _entityService.getLatestAspect(groupUrn, ORIGIN_ASPECT_NAME); } - public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn groupUrn, + public void addUserToNativeGroup( + @Nonnull final Urn userUrn, + @Nonnull final Urn groupUrn, final Authentication authentication) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(groupUrn, "groupUrn must not be null"); @@ -76,7 +79,8 @@ public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn try { // First, fetch user's group membership aspect. - NativeGroupMembership nativeGroupMembership = getExistingNativeGroupMembership(userUrn, authentication); + NativeGroupMembership nativeGroupMembership = + getExistingNativeGroupMembership(userUrn, authentication); // Handle the duplicate case. nativeGroupMembership.getNativeGroups().remove(groupUrn); nativeGroupMembership.getNativeGroups().add(groupUrn); @@ -94,13 +98,18 @@ public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn } } - public String createNativeGroup(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, - @Nonnull final String groupDescription, final Authentication authentication) throws Exception { + public String createNativeGroup( + @Nonnull final CorpGroupKey corpGroupKey, + @Nonnull final String groupName, + @Nonnull final String groupDescription, + final Authentication authentication) + throws Exception { Objects.requireNonNull(corpGroupKey, "corpGroupKey must not be null"); Objects.requireNonNull(groupName, "groupName must not be null"); Objects.requireNonNull(groupDescription, "groupDescription must not be null"); - Urn corpGroupUrn = EntityKeyUtils.convertEntityKeyToUrn(corpGroupKey, Constants.CORP_GROUP_ENTITY_NAME); + Urn corpGroupUrn = + EntityKeyUtils.convertEntityKeyToUrn(corpGroupKey, Constants.CORP_GROUP_ENTITY_NAME); if (groupExists(corpGroupUrn)) { throw new IllegalArgumentException("This Group already exists!"); } @@ -110,22 +119,34 @@ public String createNativeGroup(@Nonnull final CorpGroupKey corpGroupKey, @Nonnu return groupInfo; } - public void removeExistingNativeGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List<Urn> userUrnList, - final Authentication authentication) throws Exception { + public void removeExistingNativeGroupMembers( + @Nonnull final Urn groupUrn, + @Nonnull final List<Urn> userUrnList, + final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); Objects.requireNonNull(userUrnList, "userUrnList must not be null"); final Set<Urn> userUrns = new HashSet<>(userUrnList); for (Urn userUrn : userUrns) { - final Map<Urn, EntityResponse> entityResponseMap = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, userUrns, - Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), authentication); + final Map<Urn, EntityResponse> entityResponseMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + userUrns, + Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + authentication); EntityResponse entityResponse = entityResponseMap.get(userUrn); if (entityResponse == null) { continue; } - final NativeGroupMembership nativeGroupMembership = new NativeGroupMembership( - entityResponse.getAspects().get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + final NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership( + entityResponse + .getAspects() + .get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME) + .getValue() + .data()); if (nativeGroupMembership.getNativeGroups().remove(groupUrn)) { // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -139,8 +160,9 @@ public void removeExistingNativeGroupMembers(@Nonnull final Urn groupUrn, @Nonnu } } - public void migrateGroupMembershipToNativeGroupMembership(@Nonnull final Urn groupUrn, final String actorUrnStr, - final Authentication authentication) throws Exception { + public void migrateGroupMembershipToNativeGroupMembership( + @Nonnull final Urn groupUrn, final String actorUrnStr, final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Get the existing set of users @@ -153,26 +175,41 @@ public void migrateGroupMembershipToNativeGroupMembership(@Nonnull final Urn gro userUrnList.forEach(userUrn -> addUserToNativeGroup(userUrn, groupUrn, authentication)); } - NativeGroupMembership getExistingNativeGroupMembership(@Nonnull final Urn userUrn, - final Authentication authentication) throws Exception { + NativeGroupMembership getExistingNativeGroupMembership( + @Nonnull final Urn userUrn, final Authentication authentication) throws Exception { final EntityResponse entityResponse = - _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, Collections.singleton(userUrn), - Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), authentication).get(userUrn); + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + authentication) + .get(userUrn); NativeGroupMembership nativeGroupMembership; - if (entityResponse == null || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { + if (entityResponse == null + || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { // If the user doesn't have the NativeGroupMembership aspect, create one. nativeGroupMembership = new NativeGroupMembership(); nativeGroupMembership.setNativeGroups(new UrnArray()); } else { - nativeGroupMembership = new NativeGroupMembership( - entityResponse.getAspects().get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + nativeGroupMembership = + new NativeGroupMembership( + entityResponse + .getAspects() + .get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME) + .getValue() + .data()); } return nativeGroupMembership; } - String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, - @Nonnull final String groupDescription, final Authentication authentication) throws Exception { + String createGroupInfo( + @Nonnull final CorpGroupKey corpGroupKey, + @Nonnull final String groupName, + @Nonnull final String groupDescription, + final Authentication authentication) + throws Exception { Objects.requireNonNull(corpGroupKey, "corpGroupKey must not be null"); Objects.requireNonNull(groupName, "groupName must not be null"); Objects.requireNonNull(groupDescription, "groupDescription must not be null"); @@ -184,7 +221,10 @@ String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final corpGroupInfo.setGroups(new CorpGroupUrnArray()); corpGroupInfo.setMembers(new CorpuserUrnArray()); corpGroupInfo.setAdmins(new CorpuserUrnArray()); - corpGroupInfo.setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + corpGroupInfo.setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -196,7 +236,8 @@ String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final return _entityClient.ingestProposal(proposal, authentication); } - void createNativeGroupOrigin(@Nonnull final Urn groupUrn, final Authentication authentication) throws Exception { + void createNativeGroupOrigin(@Nonnull final Urn groupUrn, final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Create the Group info. @@ -217,20 +258,33 @@ List<Urn> getExistingGroupMembers(@Nonnull final Urn groupUrn, final String acto Objects.requireNonNull(groupUrn, "groupUrn must not be null"); final EntityRelationships relationships = - _graphClient.getRelatedEntities(groupUrn.toString(), ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, 0, 500, actorUrnStr); - return relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + _graphClient.getRelatedEntities( + groupUrn.toString(), + ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + actorUrnStr); + return relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); } - void removeExistingGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List<Urn> userUrnList, - final Authentication authentication) throws Exception { + void removeExistingGroupMembers( + @Nonnull final Urn groupUrn, + @Nonnull final List<Urn> userUrnList, + final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); Objects.requireNonNull(userUrnList, "userUrnList must not be null"); final Set<Urn> userUrns = new HashSet<>(userUrnList); for (Urn userUrn : userUrns) { final Map<Urn, EntityResponse> entityResponseMap = - _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, userUrns, Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + userUrns, + Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), authentication); EntityResponse entityResponse = entityResponseMap.get(userUrn); if (entityResponse == null) { @@ -238,7 +292,8 @@ void removeExistingGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List } final GroupMembership groupMembership = - new GroupMembership(entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + new GroupMembership( + entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); if (groupMembership.getGroups().remove(groupUrn)) { // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java index 35052810236a0..73add48958f60 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java @@ -1,5 +1,8 @@ package com.datahub.authentication.invite; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class InviteTokenService { @@ -42,26 +41,33 @@ public Urn getInviteTokenUrn(@Nonnull final String inviteTokenStr) throws URISyn return Urn.createFromString(inviteTokenUrnStr); } - public boolean isInviteTokenValid(@Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + public boolean isInviteTokenValid( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) throws RemoteInvocationException { return _entityClient.exists(inviteTokenUrn, authentication); } @Nullable - public Urn getInviteTokenRole(@Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + public Urn getInviteTokenRole( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) throws URISyntaxException, RemoteInvocationException { - final com.linkedin.identity.InviteToken inviteToken = getInviteTokenEntity(inviteTokenUrn, authentication); + final com.linkedin.identity.InviteToken inviteToken = + getInviteTokenEntity(inviteTokenUrn, authentication); return inviteToken.hasRole() ? inviteToken.getRole() : null; } @Nonnull - public String getInviteToken(@Nullable final String roleUrnStr, boolean regenerate, - @Nonnull final Authentication authentication) throws Exception { + public String getInviteToken( + @Nullable final String roleUrnStr, + boolean regenerate, + @Nonnull final Authentication authentication) + throws Exception { final Filter inviteTokenFilter = roleUrnStr == null ? createInviteTokenFilter() : createInviteTokenFilter(roleUrnStr); final SearchResult searchResult = - _entityClient.filter(INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); + _entityClient.filter( + INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); final int numEntities = searchResult.getEntities().size(); // If there is more than one invite token, wipe all of them and generate a fresh one @@ -78,14 +84,19 @@ public String getInviteToken(@Nullable final String roleUrnStr, boolean regenera final SearchEntity searchEntity = searchResult.getEntities().get(0); final Urn inviteTokenUrn = searchEntity.getEntity(); - com.linkedin.identity.InviteToken inviteToken = getInviteTokenEntity(inviteTokenUrn, authentication); + com.linkedin.identity.InviteToken inviteToken = + getInviteTokenEntity(inviteTokenUrn, authentication); return _secretService.decrypt(inviteToken.getToken()); } - private com.linkedin.identity.InviteToken getInviteTokenEntity(@Nonnull final Urn inviteTokenUrn, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { + private com.linkedin.identity.InviteToken getInviteTokenEntity( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { final EntityResponse inviteTokenEntity = - _entityClient.getV2(INVITE_TOKEN_ENTITY_NAME, inviteTokenUrn, Collections.singleton(INVITE_TOKEN_ASPECT_NAME), + _entityClient.getV2( + INVITE_TOKEN_ENTITY_NAME, + inviteTokenUrn, + Collections.singleton(INVITE_TOKEN_ASPECT_NAME), authentication); if (inviteTokenEntity == null) { @@ -96,9 +107,12 @@ private com.linkedin.identity.InviteToken getInviteTokenEntity(@Nonnull final Ur // If invite token aspect is not present, create a new one. Otherwise, return existing one. if (!aspectMap.containsKey(INVITE_TOKEN_ASPECT_NAME)) { throw new RuntimeException( - String.format("Invite token %s does not contain aspect %s", inviteTokenUrn, INVITE_TOKEN_ASPECT_NAME)); + String.format( + "Invite token %s does not contain aspect %s", + inviteTokenUrn, INVITE_TOKEN_ASPECT_NAME)); } - return new com.linkedin.identity.InviteToken(aspectMap.get(INVITE_TOKEN_ASPECT_NAME).getValue().data()); + return new com.linkedin.identity.InviteToken( + aspectMap.get(INVITE_TOKEN_ASPECT_NAME).getValue().data()); } private Filter createInviteTokenFilter() { @@ -140,7 +154,8 @@ private Filter createInviteTokenFilter(@Nonnull final String roleUrnStr) { } @Nonnull - private String createInviteToken(@Nullable final String roleUrnStr, @Nonnull final Authentication authentication) + private String createInviteToken( + @Nullable final String roleUrnStr, @Nonnull final Authentication authentication) throws Exception { String inviteTokenStr = _secretService.generateUrlSafeToken(INVITE_TOKEN_LENGTH); String hashedInviteTokenStr = _secretService.hashString(inviteTokenStr); @@ -155,21 +170,26 @@ private String createInviteToken(@Nullable final String roleUrnStr, @Nonnull fin // Ingest new InviteToken aspect final MetadataChangeProposal proposal = - buildMetadataChangeProposal(INVITE_TOKEN_ENTITY_NAME, inviteTokenKey, INVITE_TOKEN_ASPECT_NAME, - inviteTokenAspect); + buildMetadataChangeProposal( + INVITE_TOKEN_ENTITY_NAME, inviteTokenKey, INVITE_TOKEN_ASPECT_NAME, inviteTokenAspect); _entityClient.ingestProposal(proposal, authentication); return inviteTokenStr; } - private void deleteExistingInviteTokens(@Nonnull final SearchResult searchResult, - @Nonnull final Authentication authentication) { - searchResult.getEntities().forEach(entity -> { - try { - _entityClient.deleteEntity(entity.getEntity(), authentication); - } catch (RemoteInvocationException e) { - log.error(String.format("Failed to delete invite token entity %s", entity.getEntity()), e); - } - }); + private void deleteExistingInviteTokens( + @Nonnull final SearchResult searchResult, @Nonnull final Authentication authentication) { + searchResult + .getEntities() + .forEach( + entity -> { + try { + _entityClient.deleteEntity(entity.getEntity(), authentication); + } catch (RemoteInvocationException e) { + log.error( + String.format("Failed to delete invite token entity %s", entity.getEntity()), + e); + } + }); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java index c3b7c4bcf3be7..ec5d5f1e436b7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java @@ -1,5 +1,8 @@ package com.datahub.authentication.post; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Media; import com.linkedin.common.MediaType; @@ -20,10 +23,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class PostService { @@ -38,9 +37,14 @@ public Media mapMedia(@Nonnull String type, @Nonnull String location) { } @Nonnull - public PostContent mapPostContent(@Nonnull String contentType, @Nonnull String title, @Nullable String description, @Nullable String link, + public PostContent mapPostContent( + @Nonnull String contentType, + @Nonnull String title, + @Nullable String description, + @Nullable String link, @Nullable Media media) { - final PostContent postContent = new PostContent().setType(PostContentType.valueOf(contentType)).setTitle(title); + final PostContent postContent = + new PostContent().setType(PostContentType.valueOf(contentType)).setTitle(title); if (description != null) { postContent.setDescription(description); } @@ -53,15 +57,20 @@ public PostContent mapPostContent(@Nonnull String contentType, @Nonnull String t return postContent; } - public boolean createPost(@Nonnull String postType, @Nonnull PostContent postContent, - @Nonnull Authentication authentication) throws RemoteInvocationException { + public boolean createPost( + @Nonnull String postType, + @Nonnull PostContent postContent, + @Nonnull Authentication authentication) + throws RemoteInvocationException { final String uuid = UUID.randomUUID().toString(); final PostKey postKey = new PostKey().setId(uuid); final long currentTimeMillis = Instant.now().toEpochMilli(); - final PostInfo postInfo = new PostInfo().setType(PostType.valueOf(postType)) - .setContent(postContent) - .setCreated(currentTimeMillis) - .setLastModified(currentTimeMillis); + final PostInfo postInfo = + new PostInfo() + .setType(PostType.valueOf(postType)) + .setContent(postContent) + .setCreated(currentTimeMillis) + .setLastModified(currentTimeMillis); final MetadataChangeProposal proposal = buildMetadataChangeProposal(POST_ENTITY_NAME, postKey, POST_INFO_ASPECT_NAME, postInfo); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java index ea6de3fc7dca0..bb2d5f0efd2c7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java @@ -12,16 +12,15 @@ import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.PublicKey; +import java.security.interfaces.RSAPublicKey; import java.security.spec.InvalidKeySpecException; +import java.security.spec.RSAPublicKeySpec; import java.security.spec.X509EncodedKeySpec; import java.util.Base64; import java.util.HashSet; -import java.security.spec.RSAPublicKeySpec; -import java.security.interfaces.RSAPublicKey; import org.json.JSONArray; import org.json.JSONObject; - public class DataHubJwtSigningKeyResolver extends SigningKeyResolverAdapter { public HttpClient client; @@ -38,9 +37,7 @@ public DataHubJwtSigningKeyResolver(HashSet<String> list, String publicKey, Stri client = HttpClient.newHttpClient(); } - /** - * inspect the header or claims, lookup and return the signing key - **/ + /** inspect the header or claims, lookup and return the signing key */ @Override public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) { @@ -66,12 +63,11 @@ public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) { return key; } - /** - * Get public keys from issuer and filter public key for token signature based on token keyId. - **/ + /** Get public keys from issuer and filter public key for token signature based on token keyId. */ private PublicKey loadPublicKey(String issuer, String keyId) throws Exception { - HttpRequest request = HttpRequest.newBuilder().uri(URI.create(issuer + "/protocol/openid-connect/certs")).build(); + HttpRequest request = + HttpRequest.newBuilder().uri(URI.create(issuer + "/protocol/openid-connect/certs")).build(); HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString()); var body = new JSONObject(response.body()); @@ -87,9 +83,9 @@ private PublicKey loadPublicKey(String issuer, String keyId) throws Exception { } /** - * Generate public key based on token algorithem and public token received from issuer. - * Supported algo RSA - **/ + * Generate public key based on token algorithem and public token received from issuer. Supported + * algo RSA + */ private PublicKey getPublicKey(JSONObject token) throws Exception { PublicKey publicKey = null; @@ -97,8 +93,10 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { case "RSA": try { KeyFactory kf = KeyFactory.getInstance("RSA"); - BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); - BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); + BigInteger modulus = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); + BigInteger exponent = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); publicKey = kf.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } catch (InvalidKeySpecException e) { throw new InvalidKeySpecException("Invalid public key", e); @@ -113,10 +111,7 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { return publicKey; } - /** - * Generate public Key based on algorithem and 64 encoded public key. - * Supported algo RSA - **/ + /** Generate public Key based on algorithem and 64 encoded public key. Supported algo RSA */ private PublicKey generatePublicKey(String alg, String key) throws Exception { PublicKey publicKey = null; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index 125bba7ec3280..2879f15784370 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -32,10 +32,10 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang.ArrayUtils; - /** - * Service responsible for generating JWT tokens & managing the associated metadata entities in GMS for use within - * DataHub that are stored in the entity service so that we can list & revoke tokens as needed. + * Service responsible for generating JWT tokens & managing the associated metadata entities in GMS + * for use within DataHub that are stored in the entity service so that we can list & revoke tokens + * as needed. */ @Slf4j public class StatefulTokenService extends StatelessTokenService { @@ -44,47 +44,65 @@ public class StatefulTokenService extends StatelessTokenService { private final LoadingCache<String, Boolean> _revokedTokenCache; private final String salt; - public StatefulTokenService(@Nonnull final String signingKey, @Nonnull final String signingAlgorithm, - @Nullable final String iss, @Nonnull final EntityService entityService, @Nonnull final String salt) { + public StatefulTokenService( + @Nonnull final String signingKey, + @Nonnull final String signingAlgorithm, + @Nullable final String iss, + @Nonnull final EntityService entityService, + @Nonnull final String salt) { super(signingKey, signingAlgorithm, iss); this._entityService = entityService; - this._revokedTokenCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(5, TimeUnit.MINUTES) - .build(new CacheLoader<String, Boolean>() { - @Override - public Boolean load(final String key) { - final Urn accessUrn = Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); - return !_entityService.exists(accessUrn); - } - }); + this._revokedTokenCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build( + new CacheLoader<String, Boolean>() { + @Override + public Boolean load(final String key) { + final Urn accessUrn = + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); + return !_entityService.exists(accessUrn); + } + }); this.salt = salt; } /** * Generates a JWT for an actor with a default expiration time. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Override public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor) { - throw new UnsupportedOperationException("Please use generateToken(Token, Actor, String, String, String) endpoint " - + "instead. Reason: StatefulTokenService requires that all tokens have a name & ownerUrn specified."); + throw new UnsupportedOperationException( + "Please use generateToken(Token, Actor, String, String, String) endpoint " + + "instead. Reason: StatefulTokenService requires that all tokens have a name & ownerUrn specified."); } @Nonnull - public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor, - @Nonnull final String name, final String description, final String actorUrn) { + public String generateAccessToken( + @Nonnull final TokenType type, + @Nonnull final Actor actor, + @Nonnull final String name, + final String description, + final String actorUrn) { Date date = new Date(); long timeMilli = date.getTime(); - return generateAccessToken(type, actor, DEFAULT_EXPIRES_IN_MS, timeMilli, name, description, actorUrn); + return generateAccessToken( + type, actor, DEFAULT_EXPIRES_IN_MS, timeMilli, name, description, actorUrn); } @Nonnull - public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor, - @Nullable final Long expiresInMs, @Nonnull final long createdAtInMs, @Nonnull final String tokenName, - @Nullable final String tokenDescription, final String actorUrn) { + public String generateAccessToken( + @Nonnull final TokenType type, + @Nonnull final Actor actor, + @Nullable final Long expiresInMs, + @Nonnull final long createdAtInMs, + @Nonnull final String tokenName, + @Nullable final String tokenDescription, + final String actorUrn) { Objects.requireNonNull(type); Objects.requireNonNull(actor); @@ -101,7 +119,8 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final final MetadataChangeProposal proposal = new MetadataChangeProposal(); - // Create the access token key --> use a hashed access token value as a unique id to ensure it's not duplicated. + // Create the access token key --> use a hashed access token value as a unique id to ensure it's + // not duplicated. final DataHubAccessTokenKey key = new DataHubAccessTokenKey(); key.setId(tokenHash); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); @@ -124,14 +143,20 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final proposal.setChangeType(ChangeType.UPSERT); log.info("About to ingest access token metadata {}", proposal); - final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); + final AuditStamp auditStamp = + AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); - Stream<MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(proposal), + Stream<MetadataChangeProposal> proposalStream = + Stream.concat( + Stream.of(proposal), AspectUtils.getAdditionalChanges(proposal, _entityService).stream()); - _entityService.ingestProposal(AspectsBatchImpl.builder() + _entityService.ingestProposal( + AspectsBatchImpl.builder() .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(), auditStamp, false); + .build(), + auditStamp, + false); return accessToken; } @@ -153,7 +178,8 @@ public TokenClaims validateAccessToken(@Nonnull String accessToken) throws Token this.revokeAccessToken(hash(accessToken)); throw e; } catch (final ExecutionException e) { - throw new TokenException("Failed to validate DataHub token: Unable to load token information from store", e); + throw new TokenException( + "Failed to validate DataHub token: Unable to load token information from store", e); } } @@ -171,9 +197,7 @@ public void revokeAccessToken(@Nonnull String hashedToken) throws TokenException throw new TokenException("Access token no longer exists"); } - /** - * Hashes the input after salting it. - */ + /** Hashes the input after salting it. */ public String hash(String input) { final byte[] saltingKeyBytes = this.salt.getBytes(); final byte[] inputBytes = input.getBytes(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java index fa8ec8c818734..71f12477a33b2 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java @@ -21,10 +21,9 @@ import javax.annotation.Nullable; import javax.crypto.spec.SecretKeySpec; - /** - * Service responsible for generating JWT tokens for use within DataHub in stateless way. - * This service is responsible only for generating tokens, it will not do anything else with them. + * Service responsible for generating JWT tokens for use within DataHub in stateless way. This + * service is responsible only for generating tokens, it will not do anything else with them. */ public class StatelessTokenService { @@ -40,17 +39,14 @@ public class StatelessTokenService { private final String iss; public StatelessTokenService( - @Nonnull final String signingKey, - @Nonnull final String signingAlgorithm - ) { + @Nonnull final String signingKey, @Nonnull final String signingAlgorithm) { this(signingKey, signingAlgorithm, null); } public StatelessTokenService( @Nonnull final String signingKey, @Nonnull final String signingAlgorithm, - @Nullable final String iss - ) { + @Nullable final String iss) { this.signingKey = Objects.requireNonNull(signingKey); this.signingAlgorithm = validateAlgorithm(Objects.requireNonNull(signingAlgorithm)); this.iss = iss; @@ -59,8 +55,8 @@ public StatelessTokenService( /** * Generates a JWT for an actor with a default expiration time. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor) { return generateAccessToken(type, actor, DEFAULT_EXPIRES_IN_MS); @@ -69,19 +65,19 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final /** * Generates a JWT for an actor with a specific duration in milliseconds. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Nonnull public String generateAccessToken( - @Nonnull final TokenType type, - @Nonnull final Actor actor, - @Nullable final Long expiresInMs) { + @Nonnull final TokenType type, @Nonnull final Actor actor, @Nullable final Long expiresInMs) { Objects.requireNonNull(type); Objects.requireNonNull(actor); Map<String, Object> claims = new HashMap<>(); - claims.put(TokenClaims.TOKEN_VERSION_CLAIM_NAME, String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. + claims.put( + TokenClaims.TOKEN_VERSION_CLAIM_NAME, + String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. claims.put(TokenClaims.TOKEN_TYPE_CLAIM_NAME, type.toString()); claims.put(TokenClaims.ACTOR_TYPE_CLAIM_NAME, actor.getType()); claims.put(TokenClaims.ACTOR_ID_CLAIM_NAME, actor.getId()); @@ -91,7 +87,8 @@ public String generateAccessToken( /** * Generates a JWT for a custom set of claims. * - * Note that the caller of this method is expected to authorize the action of generating a token. + * <p>Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Nonnull public String generateAccessToken( @@ -100,10 +97,8 @@ public String generateAccessToken( @Nullable final Long expiresInMs) { Objects.requireNonNull(sub); Objects.requireNonNull(claims); - final JwtBuilder builder = Jwts.builder() - .addClaims(claims) - .setId(UUID.randomUUID().toString()) - .setSubject(sub); + final JwtBuilder builder = + Jwts.builder().addClaims(claims).setId(UUID.randomUUID().toString()).setSubject(sub); if (expiresInMs != null) { builder.setExpiration(new Date(System.currentTimeMillis() + expiresInMs)); @@ -111,7 +106,7 @@ public String generateAccessToken( if (this.iss != null) { builder.setIssuer(this.iss); } - byte [] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); + byte[] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); final Key signingKey = new SecretKeySpec(apiKeySecretBytes, this.signingAlgorithm.getJcaName()); return builder.signWith(signingKey, this.signingAlgorithm).compact(); } @@ -119,18 +114,16 @@ public String generateAccessToken( /** * Validates a JWT issued by this service. * - * Throws an {@link TokenException} in the case that the token cannot be verified. + * <p>Throws an {@link TokenException} in the case that the token cannot be verified. */ @Nonnull public TokenClaims validateAccessToken(@Nonnull final String accessToken) throws TokenException { Objects.requireNonNull(accessToken); try { - byte [] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); + byte[] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); final String base64Key = Base64.getEncoder().encodeToString(apiKeySecretBytes); - final Jws<Claims> jws = Jwts.parserBuilder() - .setSigningKey(base64Key) - .build() - .parseClaimsJws(accessToken); + final Jws<Claims> jws = + Jwts.parserBuilder().setSigningKey(base64Key).build().parseClaimsJws(accessToken); validateTokenAlgorithm(jws.getHeader().getAlgorithm()); final Claims claims = jws.getBody(); final String tokenVersion = claims.get(TokenClaims.TOKEN_VERSION_CLAIM_NAME, String.class); @@ -138,33 +131,37 @@ public TokenClaims validateAccessToken(@Nonnull final String accessToken) throws final String actorId = claims.get(TokenClaims.ACTOR_ID_CLAIM_NAME, String.class); final String actorType = claims.get(TokenClaims.ACTOR_TYPE_CLAIM_NAME, String.class); if (tokenType != null && actorId != null && actorType != null) { - return new TokenClaims( - TokenVersion.fromNumericStringValue(tokenVersion), - TokenType.valueOf(tokenType), - ActorType.valueOf(actorType), - actorId, - claims.getExpiration() == null ? null : claims.getExpiration().getTime()); + return new TokenClaims( + TokenVersion.fromNumericStringValue(tokenVersion), + TokenType.valueOf(tokenType), + ActorType.valueOf(actorType), + actorId, + claims.getExpiration() == null ? null : claims.getExpiration().getTime()); } } catch (io.jsonwebtoken.ExpiredJwtException e) { throw new TokenExpiredException("Failed to validate DataHub token. Token has expired.", e); } catch (Exception e) { throw new TokenException("Failed to validate DataHub token", e); } - throw new TokenException("Failed to validate DataHub token: Found malformed or missing 'actor' claim."); + throw new TokenException( + "Failed to validate DataHub token: Found malformed or missing 'actor' claim."); } private void validateTokenAlgorithm(final String algorithm) throws TokenException { try { validateAlgorithm(algorithm); } catch (UnsupportedOperationException e) { - throw new TokenException(String.format("Failed to validate signing algorithm for provided JWT! Found %s", algorithm)); + throw new TokenException( + String.format( + "Failed to validate signing algorithm for provided JWT! Found %s", algorithm)); } } private SignatureAlgorithm validateAlgorithm(final String algorithm) { if (!SUPPORTED_ALGORITHMS.contains(algorithm)) { throw new UnsupportedOperationException( - String.format("Failed to create Token Service. Unsupported algorithm %s provided", algorithm)); + String.format( + "Failed to create Token Service. Unsupported algorithm %s provided", algorithm)); } return SignatureAlgorithm.valueOf(algorithm); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java index 05890cd2181ab..83e23a07918e7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java @@ -8,10 +8,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Contains strongly-typed claims that appear in all DataHub granted access tokens. - */ +/** Contains strongly-typed claims that appear in all DataHub granted access tokens. */ public class TokenClaims { public static final String TOKEN_VERSION_CLAIM_NAME = "version"; @@ -21,42 +18,40 @@ public class TokenClaims { public static final String EXPIRATION_CLAIM = "exp"; /** - * The type of the access token, e.g. a session token issued by the frontend or a personal access token - * generated for programmatic use. + * The type of the access token, e.g. a session token issued by the frontend or a personal access + * token generated for programmatic use. */ private final TokenVersion tokenVersion; /** - * The type of the access token, e.g. a session token issued by the frontend or a personal access token - * generated for programmatic use. + * The type of the access token, e.g. a session token issued by the frontend or a personal access + * token generated for programmatic use. */ private final TokenType tokenType; /** * The type of an authenticated DataHub actor. * - * E.g. "urn:li:corpuser:johnsmith" is of type USER. + * <p>E.g. "urn:li:corpuser:johnsmith" is of type USER. */ private final ActorType actorType; /** * A unique identifier for an actor of a particular type. * - * E.g. "johnsmith" inside urn:li:corpuser:johnsmith. + * <p>E.g. "johnsmith" inside urn:li:corpuser:johnsmith. */ private final String actorId; - /** - * The expiration time in milliseconds if one exists, null otherwise. - */ + /** The expiration time in milliseconds if one exists, null otherwise. */ private final Long expirationInMs; public TokenClaims( - @Nonnull TokenVersion tokenVersion, - @Nonnull TokenType tokenType, - @Nonnull final ActorType actorType, - @Nonnull final String actorId, - @Nullable Long expirationInMs) { + @Nonnull TokenVersion tokenVersion, + @Nonnull TokenType tokenType, + @Nonnull final ActorType actorType, + @Nonnull final String actorId, + @Nullable Long expirationInMs) { Objects.requireNonNull(tokenVersion); Objects.requireNonNull(tokenType); Objects.requireNonNull(actorType); @@ -68,51 +63,38 @@ public TokenClaims( this.expirationInMs = expirationInMs; } - /** - * Returns the version of the access token - */ + /** Returns the version of the access token */ public TokenVersion getTokenVersion() { return this.tokenVersion; } - /** - * Returns the type of an authenticated DataHub actor. - */ + /** Returns the type of an authenticated DataHub actor. */ public TokenType getTokenType() { return this.tokenType; } - /** - * Returns the type of an authenticated DataHub actor. - */ + /** Returns the type of an authenticated DataHub actor. */ public ActorType getActorType() { return this.actorType; } - /** - * Returns the expiration time in milliseconds if one exists, null otherwise. - */ + /** Returns the expiration time in milliseconds if one exists, null otherwise. */ public Long getExpirationInMs() { return this.expirationInMs; } - /** - * Returns a unique id associated with a DataHub actor of a particular type. - */ + /** Returns a unique id associated with a DataHub actor of a particular type. */ public String getActorId() { return this.actorId; } - /** - * Returns the claims in the DataHub Access token as a map. - */ + /** Returns the claims in the DataHub Access token as a map. */ public Map<String, Object> asMap() { return ImmutableMap.of( TOKEN_VERSION_CLAIM_NAME, this.tokenVersion.numericValue, TOKEN_TYPE_CLAIM_NAME, this.tokenType.toString(), ACTOR_TYPE_CLAIM_NAME, this.actorType.toString(), ACTOR_ID_CLAIM_NAME, this.actorId, - EXPIRATION_CLAIM, Optional.ofNullable(this.expirationInMs) - ); + EXPIRATION_CLAIM, Optional.ofNullable(this.expirationInMs)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java index 24b6daa830f47..9d239482f85f8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java @@ -1,8 +1,6 @@ package com.datahub.authentication.token; -/** - * A checked exception that is thrown when a DataHub-issued access token cannot be verified. - */ +/** A checked exception that is thrown when a DataHub-issued access token cannot be verified. */ public class TokenException extends Exception { public TokenException(final String message) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java index 6c4e5e037d4da..ae5d2daddcc0e 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java @@ -1,8 +1,6 @@ package com.datahub.authentication.token; -/** - * A checked exception that is thrown when a DataHub-issued access token cannot be verified. - */ +/** A checked exception that is thrown when a DataHub-issued access token cannot be verified. */ public class TokenExpiredException extends TokenException { public TokenExpiredException(final String message) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java index ca5de37b0fad4..475f79da3805c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java @@ -1,16 +1,10 @@ package com.datahub.authentication.token; -/** - * Represents a type of JWT access token granted by the {@link StatelessTokenService}. - */ +/** Represents a type of JWT access token granted by the {@link StatelessTokenService}. */ public enum TokenType { - /** - * A UI-initiated session token - */ + /** A UI-initiated session token */ SESSION, - /** - * A personal token for programmatic use - */ + /** A personal token for programmatic use */ PERSONAL; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java index 8f9189bf17b95..f1b362b71dfb4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java @@ -4,19 +4,14 @@ import java.util.Objects; import java.util.Optional; - -/** - * Represents a type of JWT access token granted by the {@link StatelessTokenService}. - */ +/** Represents a type of JWT access token granted by the {@link StatelessTokenService}. */ public enum TokenVersion { - /** - * The first version of the DataHub access token. - */ + /** The first version of the DataHub access token. */ ONE(1), /** - * The second version of the DataHub access token (latest). - * Used to represent tokens that are stateful and are stored within DataHub. + * The second version of the DataHub access token (latest). Used to represent tokens that are + * stateful and are stored within DataHub. */ TWO(2); @@ -26,37 +21,35 @@ public enum TokenVersion { this.numericValue = numericValue; } - /** - * Returns the numeric representation of the version - */ + /** Returns the numeric representation of the version */ public int getNumericValue() { return this.numericValue; } - /** - * Returns a {@link TokenVersion} provided a numeric token version. - */ + /** Returns a {@link TokenVersion} provided a numeric token version. */ public static TokenVersion fromNumericValue(int num) { - Optional<TokenVersion> maybeVersion = Arrays.stream(TokenVersion.values()) - .filter(version -> num == version.getNumericValue()) - .findFirst(); + Optional<TokenVersion> maybeVersion = + Arrays.stream(TokenVersion.values()) + .filter(version -> num == version.getNumericValue()) + .findFirst(); if (maybeVersion.isPresent()) { return maybeVersion.get(); } - throw new IllegalArgumentException(String.format("Failed to find DataHubAccessTokenVersion %s", num)); + throw new IllegalArgumentException( + String.format("Failed to find DataHubAccessTokenVersion %s", num)); } - /** - * Returns a {@link TokenVersion} provided a stringified numeric token version. - */ + /** Returns a {@link TokenVersion} provided a stringified numeric token version. */ public static TokenVersion fromNumericStringValue(String num) { Objects.requireNonNull(num); - Optional<TokenVersion> maybeVersion = Arrays.stream(TokenVersion.values()) - .filter(version -> Integer.parseInt(num) == version.getNumericValue()) - .findFirst(); + Optional<TokenVersion> maybeVersion = + Arrays.stream(TokenVersion.values()) + .filter(version -> Integer.parseInt(num) == version.getNumericValue()) + .findFirst(); if (maybeVersion.isPresent()) { return maybeVersion.get(); } - throw new IllegalArgumentException(String.format("Failed to find DataHubAccessTokenVersion %s", num)); + throw new IllegalArgumentException( + String.format("Failed to find DataHubAccessTokenVersion %s", num)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index bff675ddd9cb2..741d176f98c1b 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -1,5 +1,7 @@ package com.datahub.authentication.user; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConfiguration; import com.linkedin.common.AuditStamp; @@ -22,12 +24,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - -/** - * Service responsible for creating, updating and authenticating native DataHub users. - */ +/** Service responsible for creating, updating and authenticating native DataHub users. */ @Slf4j @RequiredArgsConstructor public class NativeUserService { @@ -38,8 +35,14 @@ public class NativeUserService { private final SecretService _secretService; private final AuthenticationConfiguration _authConfig; - public void createNativeUser(@Nonnull String userUrnString, @Nonnull String fullName, @Nonnull String email, - @Nonnull String title, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { + public void createNativeUser( + @Nonnull String userUrnString, + @Nonnull String fullName, + @Nonnull String email, + @Nonnull String title, + @Nonnull String password, + @Nonnull Authentication authentication) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); Objects.requireNonNull(fullName, "fullName must not be null!"); Objects.requireNonNull(email, "email must not be null!"); @@ -49,7 +52,8 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full final Urn userUrn = Urn.createFromString(userUrnString); if (_entityService.exists(userUrn) - // Should never fail these due to Controller level check, but just in case more usages get put in + // Should never fail these due to Controller level check, but just in case more usages get + // put in || userUrn.toString().equals(SYSTEM_ACTOR) || userUrn.toString().equals(new CorpuserUrn(_authConfig.getSystemClientId()).toString()) || userUrn.toString().equals(DATAHUB_ACTOR) @@ -61,8 +65,13 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full updateCorpUserCredentials(userUrn, password, authentication); } - void updateCorpUserInfo(@Nonnull Urn userUrn, @Nonnull String fullName, @Nonnull String email, @Nonnull String title, - Authentication authentication) throws Exception { + void updateCorpUserInfo( + @Nonnull Urn userUrn, + @Nonnull String fullName, + @Nonnull String email, + @Nonnull String title, + Authentication authentication) + throws Exception { // Construct corpUserInfo final CorpUserInfo corpUserInfo = new CorpUserInfo(); corpUserInfo.setFullName(fullName); @@ -86,7 +95,9 @@ void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) t CorpUserStatus corpUserStatus = new CorpUserStatus(); corpUserStatus.setStatus(CORP_USER_STATUS_ACTIVE); corpUserStatus.setLastModified( - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); // Ingest corpUserStatus MCP final MetadataChangeProposal corpUserStatusProposal = new MetadataChangeProposal(); @@ -98,7 +109,8 @@ void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) t _entityClient.ingestProposal(corpUserStatusProposal, authentication); } - void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, @Nonnull Authentication authentication) + void updateCorpUserCredentials( + @Nonnull Urn userUrn, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { // Construct corpUserCredentials CorpUserCredentials corpUserCredentials = new CorpUserCredentials(); @@ -118,15 +130,18 @@ void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, @ _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); } - public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString, Authentication authentication) - throws Exception { + public String generateNativeUserPasswordResetToken( + @Nonnull String userUrnString, Authentication authentication) throws Exception { Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { throw new RuntimeException("User does not exist or is a non-native user!"); } // Add reset token to CorpUserCredentials @@ -148,8 +163,12 @@ public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString return passwordResetToken; } - public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull String password, - @Nonnull String resetToken, Authentication authentication) throws Exception { + public void resetCorpUserCredentials( + @Nonnull String userUrnString, + @Nonnull String password, + @Nonnull String resetToken, + Authentication authentication) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); Objects.requireNonNull(password, "password must not be null!"); Objects.requireNonNull(resetToken, "resetToken must not be null!"); @@ -157,24 +176,30 @@ public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull Str Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { throw new RuntimeException("User does not exist!"); } - if (!corpUserCredentials.hasPasswordResetToken() || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() + if (!corpUserCredentials.hasPasswordResetToken() + || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() || corpUserCredentials.getPasswordResetTokenExpirationTimeMillis() == null) { throw new RuntimeException("User has not generated a password reset token!"); } if (!_secretService.decrypt(corpUserCredentials.getPasswordResetToken()).equals(resetToken)) { - throw new RuntimeException("Invalid reset token. Please ask your administrator to send you an updated link!"); + throw new RuntimeException( + "Invalid reset token. Please ask your administrator to send you an updated link!"); } long currentTimeMillis = Instant.now().toEpochMilli(); if (currentTimeMillis > corpUserCredentials.getPasswordResetTokenExpirationTimeMillis()) { - throw new RuntimeException("Reset token has expired! Please ask your administrator to create a new one"); + throw new RuntimeException( + "Reset token has expired! Please ask your administrator to create a new one"); } // Construct corpUserCredentials @@ -194,14 +219,18 @@ public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull Str _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); } - public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) throws Exception { + public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); Objects.requireNonNull(password, "Password must not be null!"); Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { return false; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java index 7e7a1de176f06..9e8c1928c9de0 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java @@ -14,12 +14,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * A configurable chain of {@link Authorizer}s executed in series to attempt to authenticate an inbound request. + * A configurable chain of {@link Authorizer}s executed in series to attempt to authenticate an + * inbound request. * - * Individual {@link Authorizer}s are registered with the chain using {@link #register(Authorizer)}. - * The chain can be executed by invoking {@link #authorize(AuthorizationRequest)}. + * <p>Individual {@link Authorizer}s are registered with the chain using {@link + * #register(Authorizer)}. The chain can be executed by invoking {@link + * #authorize(AuthorizationRequest)}. */ @Slf4j public class AuthorizerChain implements Authorizer { @@ -41,7 +42,7 @@ public void init(@Nonnull Map<String, Object> authorizerConfig, @Nonnull Authori /** * Executes a set of {@link Authorizer}s and returns the first successful authentication result. * - * Returns an instance of {@link AuthorizationResult}. + * <p>Returns an instance of {@link AuthorizationResult}. */ @Nullable public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request) { @@ -51,10 +52,13 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request for (final Authorizer authorizer : this.authorizers) { try { - log.debug("Executing Authorizer with class name {}", authorizer.getClass().getCanonicalName()); + log.debug( + "Executing Authorizer with class name {}", authorizer.getClass().getCanonicalName()); log.debug("Authorization Request: {}", request.toString()); - // The library came with plugin can use the contextClassLoader to load the classes. For example apache-ranger library does this. - // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class loading request from plugin's home directory, + // The library came with plugin can use the contextClassLoader to load the classes. For + // example apache-ranger library does this. + // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class + // loading request from plugin's home directory, // otherwise plugin's internal library wouldn't be able to find their dependent classes Thread.currentThread().setContextClassLoader(authorizer.getClass().getClassLoader()); AuthorizationResult result = authorizer.authorize(request); @@ -67,12 +71,16 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request return result; } else { - log.debug("Received DENY result from Authorizer with class name {}. message: {}", - authorizer.getClass().getCanonicalName(), result.getMessage()); + log.debug( + "Received DENY result from Authorizer with class name {}. message: {}", + authorizer.getClass().getCanonicalName(), + result.getMessage()); } } catch (Exception e) { - log.error("Caught exception while attempting to authorize request using Authorizer {}. Skipping authorizer.", - authorizer.getClass().getCanonicalName(), e); + log.error( + "Caught exception while attempting to authorize request using Authorizer {}. Skipping authorizer.", + authorizer.getClass().getCanonicalName(), + e); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } @@ -87,16 +95,19 @@ public AuthorizedActors authorizedActors(String privilege, Optional<EntitySpec> return null; } - AuthorizedActors finalAuthorizedActors = this.authorizers.get(0).authorizedActors(privilege, resourceSpec); + AuthorizedActors finalAuthorizedActors = + this.authorizers.get(0).authorizedActors(privilege, resourceSpec); for (int i = 1; i < this.authorizers.size(); i++) { - finalAuthorizedActors = mergeAuthorizedActors(finalAuthorizedActors, - this.authorizers.get(i).authorizedActors(privilege, resourceSpec)); + finalAuthorizedActors = + mergeAuthorizedActors( + finalAuthorizedActors, + this.authorizers.get(i).authorizedActors(privilege, resourceSpec)); } return finalAuthorizedActors; } - private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors original, - @Nullable AuthorizedActors other) { + private AuthorizedActors mergeAuthorizedActors( + @Nullable AuthorizedActors original, @Nullable AuthorizedActors other) { if (original == null) { return other; } @@ -139,10 +150,8 @@ private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors origin .build(); } - /** - * Returns an instance of default {@link DataHubAuthorizer} - */ + /** Returns an instance of default {@link DataHubAuthorizer} */ public DataHubAuthorizer getDefaultAuthorizer() { return (DataHubAuthorizer) defaultAuthorizer; } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index 956d635c7901a..9ae95bd4e92b6 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -8,7 +8,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.policy.DataHubPolicyInfo; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; @@ -26,26 +25,23 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * The Authorizer is a singleton class responsible for authorizing - * operations on the DataHub platform via DataHub Policies. + * The Authorizer is a singleton class responsible for authorizing operations on the DataHub + * platform via DataHub Policies. * - * Currently, the authorizer is implemented as a spring-instantiated Singleton - * which manages its own thread-pool used for resolving policy predicates. + * <p>Currently, the authorizer is implemented as a spring-instantiated Singleton which manages its + * own thread-pool used for resolving policy predicates. */ // TODO: Decouple this from all Rest.li objects if possible. @Slf4j public class DataHubAuthorizer implements Authorizer { public enum AuthorizationMode { - /** - * Default mode simply means that authorization is enforced, with a DENY result returned - */ + /** Default mode simply means that authorization is enforced, with a DENY result returned */ DEFAULT, /** - * Allow all means that the DataHubAuthorizer will allow all actions. This is used as an override to disable the - * policies feature. + * Allow all means that the DataHubAuthorizer will allow all actions. This is used as an + * override to disable the policies feature. */ ALLOW_ALL } @@ -55,11 +51,13 @@ public enum AuthorizationMode { // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. - private final Map<String, List<DataHubPolicyInfo>> _policyCache = new HashMap<>(); // Shared Policy Cache. + private final Map<String, List<DataHubPolicyInfo>> _policyCache = + new HashMap<>(); // Shared Policy Cache. private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); private final Lock readLock = readWriteLock.readLock(); - private final ScheduledExecutorService _refreshExecutorService = Executors.newScheduledThreadPool(1); + private final ScheduledExecutorService _refreshExecutorService = + Executors.newScheduledThreadPool(1); private final PolicyRefreshRunnable _policyRefreshRunnable; private final PolicyEngine _policyEngine; private EntitySpecResolver _entitySpecResolver; @@ -77,9 +75,15 @@ public DataHubAuthorizer( _systemAuthentication = Objects.requireNonNull(systemAuthentication); _mode = Objects.requireNonNull(mode); _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, - readWriteLock.writeLock(), policyFetchSize); - _refreshExecutorService.scheduleAtFixedRate(_policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); + _policyRefreshRunnable = + new PolicyRefreshRunnable( + systemAuthentication, + new PolicyFetcher(entityClient), + _policyCache, + readWriteLock.writeLock(), + policyFetchSize); + _refreshExecutorService.scheduleAtFixedRate( + _policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); } @Override @@ -95,41 +99,48 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, null); } - Optional<ResolvedEntitySpec> resolvedResourceSpec = request.getResourceSpec().map(_entitySpecResolver::resolve); + Optional<ResolvedEntitySpec> resolvedResourceSpec = + request.getResourceSpec().map(_entitySpecResolver::resolve); // 1. Fetch the policies relevant to the requested privilege. - final List<DataHubPolicyInfo> policiesToEvaluate = getOrDefault(request.getPrivilege(), new ArrayList<>()); + final List<DataHubPolicyInfo> policiesToEvaluate = + getOrDefault(request.getPrivilege(), new ArrayList<>()); // 2. Evaluate each policy. for (DataHubPolicyInfo policy : policiesToEvaluate) { if (isRequestGranted(policy, request, resolvedResourceSpec)) { // Short circuit if policy has granted privileges to this actor. - return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, + return new AuthorizationResult( + request, + AuthorizationResult.Type.ALLOW, String.format("Granted by policy with type: %s", policy.getType())); } } - return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); + return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); } - public List<String> getGrantedPrivileges(final String actor, final Optional<EntitySpec> resourceSpec) { + public List<String> getGrantedPrivileges( + final String actor, final Optional<EntitySpec> resourceSpec) { // 1. Fetch all policies final List<DataHubPolicyInfo> policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); Urn actorUrn = UrnUtils.getUrn(actor); - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); - Optional<ResolvedEntitySpec> resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional<ResolvedEntitySpec> resolvedResourceSpec = + resourceSpec.map(_entitySpecResolver::resolve); - return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); + return _policyEngine.getGrantedPrivileges( + policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); } /** - * Retrieves the current list of actors authorized to for a particular privilege against - * an optional resource + * Retrieves the current list of actors authorized to for a particular privilege against an + * optional resource */ public AuthorizedActors authorizedActors( - final String privilege, - final Optional<EntitySpec> resourceSpec) { + final String privilege, final Optional<EntitySpec> resourceSpec) { final List<Urn> authorizedUsers = new ArrayList<>(); final List<Urn> authorizedGroups = new ArrayList<>(); @@ -140,7 +151,8 @@ public AuthorizedActors authorizedActors( // Step 1: Find policies granting the privilege. final List<DataHubPolicyInfo> policiesToEvaluate = getOrDefault(privilege, new ArrayList<>()); - Optional<ResolvedEntitySpec> resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional<ResolvedEntitySpec> resolvedResourceSpec = + resourceSpec.map(_entitySpecResolver::resolve); // Step 2: For each policy, determine whether the resource is a match. for (DataHubPolicyInfo policy : policiesToEvaluate) { @@ -149,7 +161,8 @@ public AuthorizedActors authorizedActors( continue; } - final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + final PolicyEngine.PolicyActors matchingActors = + _policyEngine.getMatchingActors(policy, resolvedResourceSpec); // Step 3: For each matching policy, add actors that are authorized. authorizedUsers.addAll(matchingActors.getUsers()); @@ -164,12 +177,13 @@ public AuthorizedActors authorizedActors( } // Step 4: Return all authorized users and groups. - return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); + return new AuthorizedActors( + privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); } /** - * Invalidates the policy cache and fires off a refresh thread. Should be invoked - * when a policy is created, modified, or deleted. + * Invalidates the policy cache and fires off a refresh thread. Should be invoked when a policy is + * created, modified, or deleted. */ public void invalidateCache() { _refreshExecutorService.execute(_policyRefreshRunnable); @@ -184,17 +198,19 @@ public void setMode(final AuthorizationMode mode) { } /** - * Returns true if the request's is coming from the system itself, in which cases - * the action is always authorized. + * Returns true if the request's is coming from the system itself, in which cases the action is + * always authorized. */ - private boolean isSystemRequest(final AuthorizationRequest request, final Authentication systemAuthentication) { + private boolean isSystemRequest( + final AuthorizationRequest request, final Authentication systemAuthentication) { return systemAuthentication.getActor().toUrnStr().equals(request.getActorUrn()); } - /** - * Returns true if a policy grants the requested privilege for a given actor and resource. - */ - private boolean isRequestGranted(final DataHubPolicyInfo policy, final AuthorizationRequest request, final Optional<ResolvedEntitySpec> resourceSpec) { + /** Returns true if a policy grants the requested privilege for a given actor and resource. */ + private boolean isRequestGranted( + final DataHubPolicyInfo policy, + final AuthorizationRequest request, + final Optional<ResolvedEntitySpec> resourceSpec) { if (AuthorizationMode.ALLOW_ALL.equals(mode())) { return true; } @@ -204,14 +220,12 @@ private boolean isRequestGranted(final DataHubPolicyInfo policy, final Authoriza return false; } - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve( + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve( new EntitySpec(actorUrn.get().getEntityType(), request.getActorUrn())); - final PolicyEngine.PolicyEvaluationResult result = _policyEngine.evaluatePolicy( - policy, - resolvedActorSpec, - request.getPrivilege(), - resourceSpec - ); + final PolicyEngine.PolicyEvaluationResult result = + _policyEngine.evaluatePolicy( + policy, resolvedActorSpec, request.getPrivilege(), resourceSpec); return result.isGranted(); } @@ -219,7 +233,10 @@ private Optional<Urn> getUrnFromRequestActor(String actor) { try { return Optional.of(Urn.createFromString(actor)); } catch (URISyntaxException e) { - log.error(String.format("Failed to bind actor %s to an URN. Actors must be URNs. Denying the authorization request", actor)); + log.error( + String.format( + "Failed to bind actor %s to an URN. Actors must be URNs. Denying the authorization request", + actor)); return Optional.empty(); } } @@ -237,8 +254,8 @@ private List<DataHubPolicyInfo> getOrDefault(String key, List<DataHubPolicyInfo> /** * A {@link Runnable} used to periodically fetch a new instance of the policies Cache. * - * Currently, the refresh logic is not very smart. When the cache is invalidated, we simply re-fetch the - * entire cache using Policies stored in the backend. + * <p>Currently, the refresh logic is not very smart. When the cache is invalidated, we simply + * re-fetch the entire cache using Policies stored in the backend. */ @VisibleForTesting @RequiredArgsConstructor @@ -260,8 +277,8 @@ public void run() { while (total == null || scrollId != null) { try { - final PolicyFetcher.PolicyFetchResult - policyFetchResult = _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); + final PolicyFetcher.PolicyFetchResult policyFetchResult = + _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); addPoliciesToCache(newCache, policyFetchResult.getPolicies()); @@ -269,7 +286,10 @@ public void run() { scrollId = policyFetchResult.getScrollId(); } catch (Exception e) { log.error( - "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", count, scrollId, e); + "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", + count, + scrollId, + e); return; } } @@ -285,23 +305,31 @@ public void run() { log.debug(String.format("Successfully fetched %s policies.", total)); } catch (Exception e) { - log.error("Caught exception while loading Policy cache. Will retry on next scheduled attempt.", e); + log.error( + "Caught exception while loading Policy cache. Will retry on next scheduled attempt.", + e); } } - private void addPoliciesToCache(final Map<String, List<DataHubPolicyInfo>> cache, + private void addPoliciesToCache( + final Map<String, List<DataHubPolicyInfo>> cache, final List<PolicyFetcher.Policy> policies) { policies.forEach(policy -> addPolicyToCache(cache, policy.getPolicyInfo())); } - private void addPolicyToCache(final Map<String, List<DataHubPolicyInfo>> cache, final DataHubPolicyInfo policy) { + private void addPolicyToCache( + final Map<String, List<DataHubPolicyInfo>> cache, final DataHubPolicyInfo policy) { final List<String> privileges = policy.getPrivileges(); for (String privilege : privileges) { - List<DataHubPolicyInfo> existingPolicies = cache.containsKey(privilege) ? new ArrayList<>(cache.get(privilege)) : new ArrayList<>(); + List<DataHubPolicyInfo> existingPolicies = + cache.containsKey(privilege) + ? new ArrayList<>(cache.get(privilege)) + : new ArrayList<>(); existingPolicies.add(policy); cache.put(privilege, existingPolicies); } - List<DataHubPolicyInfo> existingPolicies = cache.containsKey(ALL) ? new ArrayList<>(cache.get(ALL)) : new ArrayList<>(); + List<DataHubPolicyInfo> existingPolicies = + cache.containsKey(ALL) ? new ArrayList<>(cache.get(ALL)) : new ArrayList<>(); existingPolicies.add(policy); cache.put(ALL, existingPolicies); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java index 65b0329a9c4f2..c2d9c42693311 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java @@ -15,13 +15,14 @@ import java.util.Map; import java.util.stream.Collectors; - public class DefaultEntitySpecResolver implements EntitySpecResolver { private final List<EntityFieldResolverProvider> _entityFieldResolverProviders; public DefaultEntitySpecResolver(Authentication systemAuthentication, EntityClient entityClient) { _entityFieldResolverProviders = - ImmutableList.of(new EntityTypeFieldResolverProvider(), new EntityUrnFieldResolverProvider(), + ImmutableList.of( + new EntityTypeFieldResolverProvider(), + new EntityUrnFieldResolverProvider(), new DomainFieldResolverProvider(entityClient, systemAuthentication), new OwnerFieldResolverProvider(entityClient, systemAuthentication), new DataPlatformInstanceFieldResolverProvider(entityClient, systemAuthentication), @@ -35,7 +36,10 @@ public ResolvedEntitySpec resolve(EntitySpec entitySpec) { private Map<EntityFieldType, FieldResolver> getFieldResolvers(EntitySpec entitySpec) { return _entityFieldResolverProviders.stream() - .flatMap(resolver -> resolver.getFieldTypes().stream().map(fieldType -> Pair.of(fieldType, resolver))) - .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().getFieldResolver(entitySpec))); + .flatMap( + resolver -> + resolver.getFieldTypes().stream().map(fieldType -> Pair.of(fieldType, resolver))) + .collect( + Collectors.toMap(Pair::getKey, pair -> pair.getValue().getFieldResolver(entitySpec))); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java index 0dbb9cd132f8a..e4f6b483e09f8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java @@ -13,55 +13,56 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class FilterUtils { public static final PolicyMatchFilter EMPTY_FILTER = new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray()); - private FilterUtils() { - } + private FilterUtils() {} - /** - * Creates new PolicyMatchCriterion with field and value, using EQUAL PolicyMatchCondition. - */ + /** Creates new PolicyMatchCriterion with field and value, using EQUAL PolicyMatchCondition. */ @Nonnull - public static PolicyMatchCriterion newCriterion(@Nonnull EntityFieldType field, @Nonnull List<String> values) { + public static PolicyMatchCriterion newCriterion( + @Nonnull EntityFieldType field, @Nonnull List<String> values) { return newCriterion(field, values, PolicyMatchCondition.EQUALS); } - /** - * Creates new PolicyMatchCriterion with field, value and PolicyMatchCondition. - */ + /** Creates new PolicyMatchCriterion with field, value and PolicyMatchCondition. */ @Nonnull - public static PolicyMatchCriterion newCriterion(@Nonnull EntityFieldType field, @Nonnull List<String> values, + public static PolicyMatchCriterion newCriterion( + @Nonnull EntityFieldType field, + @Nonnull List<String> values, @Nonnull PolicyMatchCondition policyMatchCondition) { - return new PolicyMatchCriterion().setField(field.name()) + return new PolicyMatchCriterion() + .setField(field.name()) .setValues(new StringArray(values)) .setCondition(policyMatchCondition); } /** - * Creates new PolicyMatchFilter from a map of Criteria by removing null-valued Criteria and using EQUAL PolicyMatchCondition (default). + * Creates new PolicyMatchFilter from a map of Criteria by removing null-valued Criteria and using + * EQUAL PolicyMatchCondition (default). */ @Nonnull public static PolicyMatchFilter newFilter(@Nullable Map<EntityFieldType, List<String>> params) { if (params == null) { return EMPTY_FILTER; } - PolicyMatchCriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue())) - .collect(Collectors.toCollection(PolicyMatchCriterionArray::new)); + PolicyMatchCriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue())) + .collect(Collectors.toCollection(PolicyMatchCriterionArray::new)); return new PolicyMatchFilter().setCriteria(criteria); } /** - * Creates new PolicyMatchFilter from a single PolicyMatchCriterion with EQUAL PolicyMatchCondition (default). + * Creates new PolicyMatchFilter from a single PolicyMatchCriterion with EQUAL + * PolicyMatchCondition (default). */ @Nonnull - public static PolicyMatchFilter newFilter(@Nonnull EntityFieldType field, @Nonnull List<String> values) { + public static PolicyMatchFilter newFilter( + @Nonnull EntityFieldType field, @Nonnull List<String> values) { return newFilter(Collections.singletonMap(field, values)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index da0ae26f2b1da..123e5f3c55932 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -1,5 +1,7 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Owner; import com.linkedin.common.Ownership; @@ -20,7 +22,6 @@ import com.linkedin.policy.PolicyMatchCriterion; import com.linkedin.policy.PolicyMatchCriterionArray; import com.linkedin.policy.PolicyMatchFilter; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -31,16 +32,12 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nullable; - import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class PolicyEngine { @@ -59,13 +56,19 @@ public PolicyEvaluationResult evaluatePolicy( // If the privilege is not in scope, deny the request. if (!isPrivilegeMatch(privilege, policy.getPrivileges())) { - log.debug("Policy denied based on irrelevant privileges {} for {}", policy.getPrivileges(), privilege); + log.debug( + "Policy denied based on irrelevant privileges {} for {}", + policy.getPrivileges(), + privilege); return PolicyEvaluationResult.DENIED; } // If policy is not applicable, deny the request if (!isPolicyApplicable(policy, resolvedActorSpec, resource, context)) { - log.debug("Policy does not applicable for actor {} and resource {}", resolvedActorSpec.getSpec().getEntity(), resource); + log.debug( + "Policy does not applicable for actor {} and resource {}", + resolvedActorSpec.getSpec().getEntity(), + resource); return PolicyEvaluationResult.DENIED; } @@ -74,8 +77,7 @@ public PolicyEvaluationResult evaluatePolicy( } public PolicyActors getMatchingActors( - final DataHubPolicyInfo policy, - final Optional<ResolvedEntitySpec> resource) { + final DataHubPolicyInfo policy, final Optional<ResolvedEntitySpec> resource) { final List<Urn> users = new ArrayList<>(); final List<Urn> groups = new ArrayList<>(); final List<Urn> roles = new ArrayList<>(); @@ -118,8 +120,7 @@ private boolean isPolicyApplicable( final DataHubPolicyInfo policy, final ResolvedEntitySpec resolvedActorSpec, final Optional<ResolvedEntitySpec> resource, - final PolicyEvaluationContext context - ) { + final PolicyEvaluationContext context) { // If policy is inactive, simply return DENY. if (PoliciesConfig.INACTIVE_POLICY_STATE.equals(policy.getState())) { @@ -150,25 +151,27 @@ public List<String> getGrantedPrivileges( /** * Returns true if the policy matches the resource spec, false otherwise. * - * If the policy is of type "PLATFORM", the resource will always match (since there's no resource). - * If the policy is of type "METADATA", the resourceSpec parameter will be matched against the - * resource filter defined on the policy. + * <p>If the policy is of type "PLATFORM", the resource will always match (since there's no + * resource). If the policy is of type "METADATA", the resourceSpec parameter will be matched + * against the resource filter defined on the policy. */ - public Boolean policyMatchesResource(final DataHubPolicyInfo policy, final Optional<ResolvedEntitySpec> resourceSpec) { + public Boolean policyMatchesResource( + final DataHubPolicyInfo policy, final Optional<ResolvedEntitySpec> resourceSpec) { return isResourceMatch(policy.getType(), policy.getResources(), resourceSpec); } /** - * Returns true if the privilege portion of a DataHub policy matches a the privilege being evaluated, false otherwise. + * Returns true if the privilege portion of a DataHub policy matches a the privilege being + * evaluated, false otherwise. */ private boolean isPrivilegeMatch( - final String requestPrivilege, - final List<String> policyPrivileges) { + final String requestPrivilege, final List<String> policyPrivileges) { return policyPrivileges.contains(requestPrivilege); } /** - * Returns true if the resource portion of a DataHub policy matches a the resource being evaluated, false otherwise. + * Returns true if the resource portion of a DataHub policy matches a the resource being + * evaluated, false otherwise. */ private boolean isResourceMatch( final String policyType, @@ -192,8 +195,8 @@ private boolean isResourceMatch( } /** - * Get filter object from policy resource filter. Make sure it is backward compatible by constructing PolicyMatchFilter object - * from other fields if the filter field is not set + * Get filter object from policy resource filter. Make sure it is backward compatible by + * constructing PolicyMatchFilter object from other fields if the filter field is not set */ private PolicyMatchFilter getFilter(DataHubResourceFilter policyResourceFilter) { if (policyResourceFilter.hasFilter()) { @@ -201,13 +204,19 @@ private PolicyMatchFilter getFilter(DataHubResourceFilter policyResourceFilter) } PolicyMatchCriterionArray criteria = new PolicyMatchCriterionArray(); if (policyResourceFilter.hasType()) { - criteria.add(new PolicyMatchCriterion().setField(EntityFieldType.TYPE.name()) - .setValues(new StringArray(Collections.singletonList(policyResourceFilter.getType())))); + criteria.add( + new PolicyMatchCriterion() + .setField(EntityFieldType.TYPE.name()) + .setValues( + new StringArray(Collections.singletonList(policyResourceFilter.getType())))); } - if (policyResourceFilter.hasType() && policyResourceFilter.hasResources() + if (policyResourceFilter.hasType() + && policyResourceFilter.hasResources() && !policyResourceFilter.isAllResources()) { criteria.add( - new PolicyMatchCriterion().setField(EntityFieldType.URN.name()).setValues(policyResourceFilter.getResources())); + new PolicyMatchCriterion() + .setField(EntityFieldType.URN.name()) + .setValues(policyResourceFilter.getResources())); } return new PolicyMatchFilter().setCriteria(criteria); } @@ -216,7 +225,8 @@ private boolean checkFilter(final PolicyMatchFilter filter, final ResolvedEntity return filter.getCriteria().stream().allMatch(criterion -> checkCriterion(criterion, resource)); } - private boolean checkCriterion(final PolicyMatchCriterion criterion, final ResolvedEntitySpec resource) { + private boolean checkCriterion( + final PolicyMatchCriterion criterion, final ResolvedEntitySpec resource) { EntityFieldType entityFieldType; try { entityFieldType = EntityFieldType.valueOf(criterion.getField().toUpperCase()); @@ -226,12 +236,13 @@ private boolean checkCriterion(final PolicyMatchCriterion criterion, final Resol } Set<String> fieldValues = resource.getFieldValues(entityFieldType); - return criterion.getValues() - .stream() - .anyMatch(filterValue -> checkCondition(fieldValues, filterValue, criterion.getCondition())); + return criterion.getValues().stream() + .anyMatch( + filterValue -> checkCondition(fieldValues, filterValue, criterion.getCondition())); } - private boolean checkCondition(Set<String> fieldValues, String filterValue, PolicyMatchCondition condition) { + private boolean checkCondition( + Set<String> fieldValues, String filterValue, PolicyMatchCondition condition) { if (condition == PolicyMatchCondition.EQUALS) { return fieldValues.contains(filterValue); } @@ -240,8 +251,9 @@ private boolean checkCondition(Set<String> fieldValues, String filterValue, Poli } /** - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, false otherwise. - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, false otherwise. + * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, + * false otherwise. Returns true if the actor portion of a DataHub policy matches a the actor + * being evaluated, false otherwise. */ private boolean isActorMatch( final ResolvedEntitySpec resolvedActorSpec, @@ -259,7 +271,8 @@ private boolean isActorMatch( return true; } - // 3. If the actor is the owner, either directly or indirectly via a group, return true immediately. + // 3. If the actor is the owner, either directly or indirectly via a group, return true + // immediately. if (isOwnerMatch(resolvedActorSpec, actorFilter, resourceSpec, context)) { return true; } @@ -268,11 +281,14 @@ private boolean isActorMatch( return isRoleMatch(resolvedActorSpec, actorFilter, context); } - private boolean isUserMatch(final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter) { + private boolean isUserMatch( + final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter) { // If the actor is a matching "User" in the actor filter, return true immediately. - return actorFilter.isAllUsers() || (actorFilter.hasUsers() && Objects.requireNonNull(actorFilter.getUsers()) - .stream().map(Urn::toString) - .anyMatch(user -> user.equals(resolvedActorSpec.getSpec().getEntity()))); + return actorFilter.isAllUsers() + || (actorFilter.hasUsers() + && Objects.requireNonNull(actorFilter.getUsers()).stream() + .map(Urn::toString) + .anyMatch(user -> user.equals(resolvedActorSpec.getSpec().getEntity()))); } private boolean isGroupMatch( @@ -283,9 +299,10 @@ private boolean isGroupMatch( if (actorFilter.isAllGroups() || actorFilter.hasGroups()) { final Set<String> groups = resolveGroups(resolvedActorSpec, context); return (actorFilter.isAllGroups() && !groups.isEmpty()) - || (actorFilter.hasGroups() && Objects.requireNonNull(actorFilter.getGroups()) - .stream().map(Urn::toString) - .anyMatch(groups::contains)); + || (actorFilter.hasGroups() + && Objects.requireNonNull(actorFilter.getGroups()).stream() + .map(Urn::toString) + .anyMatch(groups::contains)); } // If there are no groups on the policy, return false for the group match. return false; @@ -296,7 +313,8 @@ private boolean isOwnerMatch( final DataHubActorFilter actorFilter, final Optional<ResolvedEntitySpec> requestResource, final PolicyEvaluationContext context) { - // If the policy does not apply to owners, or there is no resource to own, return false immediately. + // If the policy does not apply to owners, or there is no resource to own, return false + // immediately. if (!actorFilter.isResourceOwners() || requestResource.isEmpty()) { return false; } @@ -308,8 +326,12 @@ private Set<String> getOwnersForType(EntitySpec resourceSpec, List<Urn> ownershi Urn entityUrn = UrnUtils.getUrn(resourceSpec.getEntity()); EnvelopedAspect ownershipAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { return Collections.emptySet(); } @@ -328,7 +350,8 @@ private Set<String> getOwnersForType(EntitySpec resourceSpec, List<Urn> ownershi private boolean isActorOwner( final ResolvedEntitySpec resolvedActorSpec, - ResolvedEntitySpec resourceSpec, List<Urn> ownershipTypes, + ResolvedEntitySpec resourceSpec, + List<Urn> ownershipTypes, PolicyEvaluationContext context) { Set<String> owners = this.getOwnersForType(resourceSpec.getSpec(), ownershipTypes); if (isUserOwner(resolvedActorSpec, owners)) { @@ -357,12 +380,11 @@ private boolean isRoleMatch( } // If the actor has a matching "Role" in the actor filter, return true immediately. Set<Urn> actorRoles = resolveRoles(resolvedActorSpec, context); - return Objects.requireNonNull(actorFilter.getRoles()) - .stream() - .anyMatch(actorRoles::contains); + return Objects.requireNonNull(actorFilter.getRoles()).stream().anyMatch(actorRoles::contains); } - private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { + private Set<Urn> resolveRoles( + final ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.roles != null) { return context.roles; } @@ -374,14 +396,21 @@ private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy try { Urn actorUrn = Urn.createFromString(actor); - final EntityResponse corpUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, Collections.singleton(actorUrn), - Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), _systemAuthentication).get(actorUrn); + final EntityResponse corpUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(actorUrn), + Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication) + .get(actorUrn); if (corpUser == null || !corpUser.hasAspects()) { return roles; } aspectMap = corpUser.getAspects(); } catch (Exception e) { - log.error(String.format("Failed to fetch %s for urn %s", ROLE_MEMBERSHIP_ASPECT_NAME, actor), e); + log.error( + String.format("Failed to fetch %s for urn %s", ROLE_MEMBERSHIP_ASPECT_NAME, actor), e); return roles; } @@ -389,7 +418,8 @@ private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy return roles; } - RoleMembership roleMembership = new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); + RoleMembership roleMembership = + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); if (roleMembership.hasRoles()) { roles.addAll(roleMembership.getRoles()); context.setRoles(roles); @@ -397,7 +427,8 @@ private Set<Urn> resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy return roles; } - private Set<String> resolveGroups(ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { + private Set<String> resolveGroups( + ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.groups != null) { return context.groups; } @@ -408,9 +439,7 @@ private Set<String> resolveGroups(ResolvedEntitySpec resolvedActorSpec, PolicyEv return groups; } - /** - * Class used to store state across a single Policy evaluation. - */ + /** Class used to store state across a single Policy evaluation. */ static class PolicyEvaluationContext { private Set<String> groups; private Set<Urn> roles; @@ -424,9 +453,7 @@ public void setRoles(Set<Urn> roles) { } } - /** - * Class used to represent the result of a Policy evaluation - */ + /** Class used to represent the result of a Policy evaluation */ static class PolicyEvaluationResult { public static final PolicyEvaluationResult GRANTED = new PolicyEvaluationResult(true); public static final PolicyEvaluationResult DENIED = new PolicyEvaluationResult(false); @@ -442,9 +469,7 @@ public boolean isGranted() { } } - /** - * Class used to represent all valid users of a policy. - */ + /** Class used to represent all valid users of a policy. */ @Value @AllArgsConstructor(access = AccessLevel.PUBLIC) public static class PolicyActors { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java index c06da4d245f91..9c5950985eea4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java @@ -1,5 +1,8 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -20,19 +23,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nullable; - -import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; - - -/** - * Wrapper around entity client to fetch policies in a paged manner - */ +/** Wrapper around entity client to fetch policies in a paged manner */ @Slf4j @RequiredArgsConstructor public class PolicyFetcher { @@ -42,49 +38,66 @@ public class PolicyFetcher { new SortCriterion().setField("lastUpdatedTimestamp").setOrder(SortOrder.DESCENDING); /** - * This is to provide a scroll implementation using the start/count api. It is not efficient - * and the scroll native functions should be used instead. This does fix a failure to fetch - * policies when deep pagination happens where there are >10k policies. - * Exists primarily to prevent breaking change to the graphql api. + * This is to provide a scroll implementation using the start/count api. It is not efficient and + * the scroll native functions should be used instead. This does fix a failure to fetch policies + * when deep pagination happens where there are >10k policies. Exists primarily to prevent + * breaking change to the graphql api. */ @Deprecated - public CompletableFuture<PolicyFetchResult> fetchPolicies(int start, String query, int count, Authentication authentication) { - return CompletableFuture.supplyAsync(() -> { - try { - PolicyFetchResult result = PolicyFetchResult.EMPTY; - String scrollId = ""; - int fetchedResults = 0; - - while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { - PolicyFetchResult tmpResult = fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); - fetchedResults += tmpResult.getPolicies().size(); - scrollId = tmpResult.getScrollId(); - if (fetchedResults > start) { - result = tmpResult; + public CompletableFuture<PolicyFetchResult> fetchPolicies( + int start, String query, int count, Authentication authentication) { + return CompletableFuture.supplyAsync( + () -> { + try { + PolicyFetchResult result = PolicyFetchResult.EMPTY; + String scrollId = ""; + int fetchedResults = 0; + + while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { + PolicyFetchResult tmpResult = + fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); + fetchedResults += tmpResult.getPolicies().size(); + scrollId = tmpResult.getScrollId(); + if (fetchedResults > start) { + result = tmpResult; + } + } + + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list policies", e); } - } - - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list policies", e); - } - }); + }); } - public PolicyFetchResult fetchPolicies(int count, @Nullable String scrollId, Authentication authentication) - throws RemoteInvocationException, URISyntaxException { + public PolicyFetchResult fetchPolicies( + int count, @Nullable String scrollId, Authentication authentication) + throws RemoteInvocationException, URISyntaxException { return fetchPolicies("", count, scrollId, authentication); } - public PolicyFetchResult fetchPolicies(String query, int count, @Nullable String scrollId, Authentication authentication) + public PolicyFetchResult fetchPolicies( + String query, int count, @Nullable String scrollId, Authentication authentication) throws RemoteInvocationException, URISyntaxException { log.debug(String.format("Batch fetching policies. count: %s, scroll: %s", count, scrollId)); // First fetch all policy urns - ScrollResult result = _entityClient.scrollAcrossEntities(List.of(POLICY_ENTITY_NAME), query, null, scrollId, - null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) - .setSkipHighlighting(true).setFulltext(true), authentication); - List<Urn> policyUrns = result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + ScrollResult result = + _entityClient.scrollAcrossEntities( + List.of(POLICY_ENTITY_NAME), + query, + null, + scrollId, + null, + count, + new SearchFlags() + .setSkipCache(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setFulltext(true), + authentication); + List<Urn> policyUrns = + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); if (policyUrns.isEmpty()) { return PolicyFetchResult.EMPTY; @@ -92,23 +105,29 @@ null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) // Fetch DataHubPolicyInfo aspects for each urn final Map<Urn, EntityResponse> policyEntities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); - return new PolicyFetchResult(policyUrns.stream() - .map(policyEntities::get) - .filter(Objects::nonNull) - .map(this::extractPolicy) - .filter(Objects::nonNull) - .collect(Collectors.toList()), result.getNumEntities(), result.getScrollId()); + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); + return new PolicyFetchResult( + policyUrns.stream() + .map(policyEntities::get) + .filter(Objects::nonNull) + .map(this::extractPolicy) + .filter(Objects::nonNull) + .collect(Collectors.toList()), + result.getNumEntities(), + result.getScrollId()); } private Policy extractPolicy(EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); if (!aspectMap.containsKey(DATAHUB_POLICY_INFO_ASPECT_NAME)) { - // Right after deleting the policy, there could be a small time frame where search and local db is not consistent. + // Right after deleting the policy, there could be a small time frame where search and local + // db is not consistent. // Simply return null in that case return null; } - return new Policy(entityResponse.getUrn(), + return new Policy( + entityResponse.getUrn(), new DataHubPolicyInfo(aspectMap.get(DATAHUB_POLICY_INFO_ASPECT_NAME).getValue().data())); } @@ -116,10 +135,10 @@ private Policy extractPolicy(EntityResponse entityResponse) { public static class PolicyFetchResult { List<Policy> policies; int total; - @Nullable - String scrollId; + @Nullable String scrollId; - public static final PolicyFetchResult EMPTY = new PolicyFetchResult(Collections.emptyList(), 0, null); + public static final PolicyFetchResult EMPTY = + new PolicyFetchResult(Collections.emptyList(), 0, null); } @Value diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java index cbb237654e969..c24c65725830f 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java @@ -1,5 +1,7 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; @@ -16,11 +18,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - -/** - * Provides field resolver for domain given resourceSpec - */ +/** Provides field resolver for domain given resourceSpec */ @Slf4j @RequiredArgsConstructor public class DataPlatformInstanceFieldResolverProvider implements EntityFieldResolverProvider { @@ -40,7 +38,8 @@ public FieldResolver getFieldResolver(EntitySpec entitySpec) { private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) { Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); - // In the case that the entity is a platform instance, the associated platform instance entity is the instance itself + // In the case that the entity is a platform instance, the associated platform instance entity + // is the instance itself if (entityUrn.getEntityType().equals(DATA_PLATFORM_INSTANCE_ENTITY_NAME)) { return FieldResolver.FieldValue.builder() .values(Collections.singleton(entityUrn.toString())) @@ -49,9 +48,14 @@ private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) EnvelopedAspect dataPlatformInstanceAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME), _systemAuthentication); - if (response == null || !response.getAspects().containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME), + _systemAuthentication); + if (response == null + || !response.getAspects().containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } dataPlatformInstanceAspect = response.getAspects().get(DATA_PLATFORM_INSTANCE_ASPECT_NAME); @@ -59,12 +63,15 @@ private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) log.error("Error while retrieving platform instance aspect for urn {}", entityUrn, e); return FieldResolver.emptyFieldValue(); } - DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataPlatformInstanceAspect.getValue().data()); + DataPlatformInstance dataPlatformInstance = + new DataPlatformInstance(dataPlatformInstanceAspect.getValue().data()); if (dataPlatformInstance.getInstance() == null) { return FieldResolver.emptyFieldValue(); } return FieldResolver.FieldValue.builder() - .values(Collections.singleton(Objects.requireNonNull(dataPlatformInstance.getInstance()).toString())) + .values( + Collections.singleton( + Objects.requireNonNull(dataPlatformInstance.getInstance()).toString())) .build(); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java index 15d821b75c0bd..e99e13ce00145 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java @@ -1,9 +1,11 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.domain.DomainProperties; @@ -11,25 +13,17 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; - import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - - -/** - * Provides field resolver for domain given entitySpec - */ +/** Provides field resolver for domain given entitySpec */ @Slf4j @RequiredArgsConstructor public class DomainFieldResolverProvider implements EntityFieldResolverProvider { @@ -51,29 +45,35 @@ private Set<Urn> getBatchedParentDomains(@Nonnull final Set<Urn> urns) { final Set<Urn> parentUrns = new HashSet<>(); try { - final Map<Urn, EntityResponse> batchResponse = _entityClient.batchGetV2( - DOMAIN_ENTITY_NAME, - urns, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - _systemAuthentication - ); - - batchResponse.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); - if (properties.hasParentDomain()) { - parentUrns.add(properties.getParentDomain()); - } - } - }); + final Map<Urn, EntityResponse> batchResponse = + _entityClient.batchGetV2( + DOMAIN_ENTITY_NAME, + urns, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + _systemAuthentication); + + batchResponse.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); + if (properties.hasParentDomain()) { + parentUrns.add(properties.getParentDomain()); + } + } + }); } catch (Exception e) { log.error( "Error while retrieving parent domains for {} urns including \"{}\"", urns.size(), urns.stream().findFirst().map(Urn::toString).orElse(""), - e - ); + e); } return parentUrns; @@ -90,8 +90,12 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { final EnvelopedAspect domainsAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(DOMAINS_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(DOMAINS_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(DOMAINS_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } @@ -106,7 +110,8 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { * To avoid cycles we remove any parents we've already visited to prevent an infinite loop cycle. */ - final Set<Urn> domainUrns = new HashSet<>(new Domains(domainsAspect.getValue().data()).getDomains()); + final Set<Urn> domainUrns = + new HashSet<>(new Domains(domainsAspect.getValue().data()).getDomains()); Set<Urn> batchedParentUrns = getBatchedParentDomains(domainUrns); batchedParentUrns.removeAll(domainUrns); @@ -116,9 +121,8 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { batchedParentUrns.removeAll(domainUrns); } - return FieldResolver.FieldValue.builder().values(domainUrns - .stream() - .map(Object::toString) - .collect(Collectors.toSet())).build(); + return FieldResolver.FieldValue.builder() + .values(domainUrns.stream().map(Object::toString).collect(Collectors.toSet())) + .build(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java index 227d403a9cd1d..8cb612515e626 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java @@ -1,24 +1,20 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import java.util.List; - -/** - * Base class for defining a class that provides the field resolver for the given field type - */ +/** Base class for defining a class that provides the field resolver for the given field type */ public interface EntityFieldResolverProvider { /** * List of fields that this hydrator is hydrating. + * * @return */ List<EntityFieldType> getFieldTypes(); - /** - * Return resolver for fetching the field values given the entity - */ + /** Return resolver for fetching the field values given the entity */ FieldResolver getFieldResolver(EntitySpec entitySpec); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java index addac84c68b18..d4dbf86172954 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java @@ -1,16 +1,13 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.List; - -/** - * Provides field resolver for entity type given entitySpec - */ +/** Provides field resolver for entity type given entitySpec */ public class EntityTypeFieldResolverProvider implements EntityFieldResolverProvider { @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java index 32960de687839..c4d27d959e023 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java @@ -1,16 +1,13 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.List; - -/** - * Provides field resolver for entity urn given entitySpec - */ +/** Provides field resolver for entity urn given entitySpec */ public class EntityUrnFieldResolverProvider implements EntityFieldResolverProvider { @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java index b1202d9f4bbd3..a64dc3a8b5db8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java @@ -1,33 +1,29 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME; + import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; import com.linkedin.identity.NativeGroupMembership; import com.linkedin.metadata.Constants; -import com.linkedin.identity.GroupMembership; -import java.util.Collections; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static com.linkedin.metadata.Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME; - - -/** - * Provides field resolver for owners given entitySpec - */ +/** Provides field resolver for owners given entitySpec */ @Slf4j @RequiredArgsConstructor public class GroupMembershipFieldResolverProvider implements EntityFieldResolverProvider { @@ -51,21 +47,30 @@ private FieldResolver.FieldValue getGroupMembership(EntitySpec entitySpec) { EnvelopedAspect nativeGroupMembershipAspect; List<Urn> groups = new ArrayList<>(); try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null - || !(response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME) - || response.getAspects().containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME))) { + || !(response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME) + || response + .getAspects() + .containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME))) { return FieldResolver.emptyFieldValue(); } if (response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME)) { groupMembershipAspect = response.getAspects().get(Constants.GROUP_MEMBERSHIP_ASPECT_NAME); - GroupMembership groupMembership = new GroupMembership(groupMembershipAspect.getValue().data()); + GroupMembership groupMembership = + new GroupMembership(groupMembershipAspect.getValue().data()); groups.addAll(groupMembership.getGroups()); } if (response.getAspects().containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { - nativeGroupMembershipAspect = response.getAspects().get(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME); - NativeGroupMembership nativeGroupMembership = new NativeGroupMembership(nativeGroupMembershipAspect.getValue().data()); + nativeGroupMembershipAspect = + response.getAspects().get(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME); + NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership(nativeGroupMembershipAspect.getValue().data()); groups.addAll(nativeGroupMembership.getNativeGroups()); } } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java index 3c27f9e6ce8d7..d26082bab6d63 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java @@ -1,9 +1,9 @@ package com.datahub.authorization.fieldresolverprovider; import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -17,10 +17,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - -/** - * Provides field resolver for owners given entitySpec - */ +/** Provides field resolver for owners given entitySpec */ @Slf4j @RequiredArgsConstructor public class OwnerFieldResolverProvider implements EntityFieldResolverProvider { @@ -42,8 +39,12 @@ private FieldResolver.FieldValue getOwners(EntitySpec entitySpec) { Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); EnvelopedAspect ownershipAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } @@ -54,7 +55,10 @@ private FieldResolver.FieldValue getOwners(EntitySpec entitySpec) { } Ownership ownership = new Ownership(ownershipAspect.getValue().data()); return FieldResolver.FieldValue.builder() - .values(ownership.getOwners().stream().map(owner -> owner.getOwner().toString()).collect(Collectors.toSet())) + .values( + ownership.getOwners().stream() + .map(owner -> owner.getOwner().toString()) + .collect(Collectors.toSet())) .build(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java index cd7ae5c3bffc4..51a700a935274 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java @@ -1,5 +1,8 @@ package com.datahub.authorization.role; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; @@ -14,35 +17,45 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class RoleService { private final EntityClient _entityClient; - public void batchAssignRoleToActors(@Nonnull final List<String> actors, @Nullable final Urn roleUrn, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public void batchAssignRoleToActors( + @Nonnull final List<String> actors, + @Nullable final Urn roleUrn, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { if (roleUrn != null && !_entityClient.exists(roleUrn, authentication)) { - throw new RuntimeException(String.format("Role %s does not exist. Skipping batch role assignment", roleUrn)); + throw new RuntimeException( + String.format("Role %s does not exist. Skipping batch role assignment", roleUrn)); } - actors.forEach(actor -> { - try { - assignRoleToActor(actor, roleUrn, authentication); - } catch (Exception e) { - log.warn(String.format("Failed to assign role %s to actor %s. Skipping actor assignment", roleUrn, actor), e); - } - }); + actors.forEach( + actor -> { + try { + assignRoleToActor(actor, roleUrn, authentication); + } catch (Exception e) { + log.warn( + String.format( + "Failed to assign role %s to actor %s. Skipping actor assignment", + roleUrn, actor), + e); + } + }); } - private void assignRoleToActor(@Nonnull final String actor, @Nullable final Urn roleUrn, - @Nonnull final Authentication authentication) throws URISyntaxException, RemoteInvocationException { + private void assignRoleToActor( + @Nonnull final String actor, + @Nullable final Urn roleUrn, + @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { final Urn actorUrn = Urn.createFromString(actor); if (!_entityClient.exists(actorUrn, authentication)) { - log.warn(String.format("Failed to assign role %s to actor %s, actor does not exist. Skipping actor assignment", - roleUrn, actor)); + log.warn( + String.format( + "Failed to assign role %s to actor %s, actor does not exist. Skipping actor assignment", + roleUrn, actor)); return; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java index ac27e1a16c8b7..dc63b5e4a2897 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java @@ -1,5 +1,7 @@ package com.datahub.telemetry; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; @@ -27,9 +29,6 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class TrackingService { @@ -56,11 +55,29 @@ public class TrackingService { private static final String INTERVAL_FIELD = "interval"; private static final String VIEW_TYPE_FIELD = "viewType"; - private static final Set<String> ALLOWED_EVENT_FIELDS = new HashSet<>( - ImmutableList.of(EVENT_TYPE_FIELD, ENTITY_TYPE_FIELD, ENTITY_TYPE_FILTER_FIELD, - PAGE_NUMBER_FIELD, PAGE_FIELD, TOTAL_FIELD, INDEX_FIELD, RESULT_TYPE_FIELD, RENDER_ID_FIELD, MODULE_ID_FIELD, - RENDER_TYPE_FIELD, SCENARIO_TYPE_FIELD, SECTION_FIELD, ACCESS_TOKEN_TYPE_FIELD, DURATION_FIELD, - ROLE_URN_FIELD, POLICY_URN_FIELD, SOURCE_TYPE_FIELD, INTERVAL_FIELD, VIEW_TYPE_FIELD)); + private static final Set<String> ALLOWED_EVENT_FIELDS = + new HashSet<>( + ImmutableList.of( + EVENT_TYPE_FIELD, + ENTITY_TYPE_FIELD, + ENTITY_TYPE_FILTER_FIELD, + PAGE_NUMBER_FIELD, + PAGE_FIELD, + TOTAL_FIELD, + INDEX_FIELD, + RESULT_TYPE_FIELD, + RENDER_ID_FIELD, + MODULE_ID_FIELD, + RENDER_TYPE_FIELD, + SCENARIO_TYPE_FIELD, + SECTION_FIELD, + ACCESS_TOKEN_TYPE_FIELD, + DURATION_FIELD, + ROLE_URN_FIELD, + POLICY_URN_FIELD, + SOURCE_TYPE_FIELD, + INTERVAL_FIELD, + VIEW_TYPE_FIELD)); private static final String ACTOR_URN_FIELD = "actorUrn"; private static final String ORIGIN_FIELD = "origin"; @@ -72,9 +89,20 @@ public class TrackingService { private static final String USER_URN_FIELD = "userUrn"; private static final String USER_URNS_FIELD = "userUrns"; private static final String PARENT_NODE_URN_FIELD = "parentNodeUrn"; - private static final Set<String> ALLOWED_OBFUSCATED_EVENT_FIELDS = new HashSet<>( - ImmutableList.of(ACTOR_URN_FIELD, ORIGIN_FIELD, ENTITY_URN_FIELD, ENTITY_URNS_FIELD, GROUP_NAME_FIELD, - SECTION_FIELD, ENTITY_PAGE_FILTER_FIELD, PATH_FIELD, USER_URN_FIELD, USER_URNS_FIELD, PARENT_NODE_URN_FIELD)); + private static final Set<String> ALLOWED_OBFUSCATED_EVENT_FIELDS = + new HashSet<>( + ImmutableList.of( + ACTOR_URN_FIELD, + ORIGIN_FIELD, + ENTITY_URN_FIELD, + ENTITY_URNS_FIELD, + GROUP_NAME_FIELD, + SECTION_FIELD, + ENTITY_PAGE_FILTER_FIELD, + PATH_FIELD, + USER_URN_FIELD, + USER_URNS_FIELD, + PARENT_NODE_URN_FIELD)); private final MixpanelAPI _mixpanelAPI; private final MessageBuilder _mixpanelMessageBuilder; @@ -100,9 +128,11 @@ public void emitAnalyticsEvent(@Nonnull final JsonNode event) { } try { - _mixpanelAPI.sendMessage(_mixpanelMessageBuilder.event(getClientId(), eventType, sanitizedEvent)); + _mixpanelAPI.sendMessage( + _mixpanelMessageBuilder.event(getClientId(), eventType, sanitizedEvent)); } catch (IOException e) { - log.info("Failed to send event to Mixpanel; this does not affect the functionality of the application"); + log.info( + "Failed to send event to Mixpanel; this does not affect the functionality of the application"); log.debug("Failed to send event to Mixpanel", e); } } @@ -134,7 +164,8 @@ JSONObject sanitizeEvent(@Nonnull final JsonNode event) { final JSONObject unsanitizedEventObj; try { - unsanitizedEventObj = new JSONObject(_objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(event)); + unsanitizedEventObj = + new JSONObject(_objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(event)); } catch (Exception e) { log.warn("Failed to serialize event", e); return createFailedEvent(); @@ -145,18 +176,25 @@ JSONObject sanitizeEvent(@Nonnull final JsonNode event) { return createFailedEvent(); } - unsanitizedEventObj.keys().forEachRemaining(key -> { - String keyString = (String) key; - try { - if (ALLOWED_EVENT_FIELDS.contains(keyString)) { - sanitizedEventObj.put(keyString, unsanitizedEventObj.get(keyString).toString()); - } else if (ALLOWED_OBFUSCATED_EVENT_FIELDS.contains(keyString)) { - sanitizedEventObj.put(keyString, _secretService.hashString(unsanitizedEventObj.get(keyString).toString())); - } - } catch (JSONException e) { - log.warn(String.format("Failed to sanitize field %s. Skipping this field.", keyString), e); - } - }); + unsanitizedEventObj + .keys() + .forEachRemaining( + key -> { + String keyString = (String) key; + try { + if (ALLOWED_EVENT_FIELDS.contains(keyString)) { + sanitizedEventObj.put(keyString, unsanitizedEventObj.get(keyString).toString()); + } else if (ALLOWED_OBFUSCATED_EVENT_FIELDS.contains(keyString)) { + sanitizedEventObj.put( + keyString, + _secretService.hashString(unsanitizedEventObj.get(keyString).toString())); + } + } catch (JSONException e) { + log.warn( + String.format("Failed to sanitize field %s. Skipping this field.", keyString), + e); + } + }); return transformObjectNodeToJSONObject(sanitizedEventObj); } @@ -189,8 +227,8 @@ private static String createClientIdIfNotPresent(@Nonnull final EntityService en final AuditStamp clientIdStamp = new AuditStamp(); clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); clientIdStamp.setTime(System.currentTimeMillis()); - entityService.ingestAspectIfNotPresent(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, - null); + entityService.ingestAspectIfNotPresent( + UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); return uuid; } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java index 2e25493133b43..5b5702de4381a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java @@ -1,18 +1,17 @@ package com.datahub.authentication.authenticator; -import com.datahub.authentication.Authentication; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; +import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; +import com.datahub.plugins.auth.authentication.Authenticator; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - public class AuthenticatorChainTest { @Test @@ -23,7 +22,8 @@ public void testAuthenticateSuccess() throws Exception { final Authentication mockAuthentication = Mockito.mock(Authentication.class); Mockito.when(mockAuthenticator1.authenticate(Mockito.any())).thenReturn(mockAuthentication); - Mockito.when(mockAuthenticator2.authenticate(Mockito.any())).thenThrow(new AuthenticationException("Failed to authenticate")); + Mockito.when(mockAuthenticator2.authenticate(Mockito.any())) + .thenThrow(new AuthenticationException("Failed to authenticate")); authenticatorChain.register(mockAuthenticator1); authenticatorChain.register(mockAuthenticator2); @@ -40,13 +40,13 @@ public void testAuthenticateSuccess() throws Exception { verify(mockAuthenticator2, times(0)).authenticate(any()); } - @Test public void testAuthenticateFailure() throws Exception { final AuthenticatorChain authenticatorChain = new AuthenticatorChain(); final Authenticator mockAuthenticator = Mockito.mock(Authenticator.class); final Authentication mockAuthentication = Mockito.mock(Authentication.class); - Mockito.when(mockAuthenticator.authenticate(Mockito.any())).thenThrow(new AuthenticationException("Failed to authenticate")); + Mockito.when(mockAuthenticator.authenticate(Mockito.any())) + .thenThrow(new AuthenticationException("Failed to authenticate")); authenticatorChain.register(mockAuthenticator); @@ -55,7 +55,8 @@ public void testAuthenticateFailure() throws Exception { Authentication result = authenticatorChain.authenticate(mockContext, false); - // If the authenticator throws, verify that null is returned to indicate failure to authenticate. + // If the authenticator throws, verify that null is returned to indicate failure to + // authenticate. assertNull(result); } @@ -64,13 +65,16 @@ public void testAuthenticateThrows() throws Exception { final AuthenticatorChain authenticatorChain = new AuthenticatorChain(); final Authenticator mockAuthenticator = Mockito.mock(Authenticator.class); final Authentication mockAuthentication = Mockito.mock(Authentication.class); - Mockito.when(mockAuthenticator.authenticate(Mockito.any())).thenThrow(new AuthenticationExpiredException("Failed to authenticate, token has expired")); + Mockito.when(mockAuthenticator.authenticate(Mockito.any())) + .thenThrow(new AuthenticationExpiredException("Failed to authenticate, token has expired")); authenticatorChain.register(mockAuthenticator); // Verify that the mock authentication is returned on Authenticate. final AuthenticationRequest mockContext = Mockito.mock(AuthenticationRequest.class); - assertThrows(AuthenticationExpiredException.class, () -> authenticatorChain.authenticate(mockContext, false)); + assertThrows( + AuthenticationExpiredException.class, + () -> authenticatorChain.authenticate(mockContext, false)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java index 759ecaa8f3a4d..62395c77e3847 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java @@ -1,9 +1,12 @@ package com.datahub.authentication.authenticator; +import static org.mockito.Mockito.*; +import static org.testng.AssertJUnit.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; -import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticationException; +import com.datahub.authentication.AuthenticationRequest; import com.google.common.collect.ImmutableMap; import java.util.HashMap; import java.util.HashSet; @@ -11,10 +14,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.AssertJUnit.*; - - public class DataHubJwtTokenAuthenticatorTest { @Test @@ -28,14 +27,16 @@ void testPublicAuthentication() throws Exception { HashSet<String> set = new HashSet<>(); set.add("https://test.com/realm/domain"); - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); Map<String, Object> config = new HashMap<>(); config.put("userIdClaim", "username"); config.put("trustedIssuers", getTrustedIssuer()); - config.put("publicKey", + config.put( + "publicKey", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu1SU1LfVLPHCozMxH2Mo4lgOEePzNm0tRgeLezV6ffAt0gunVTLw7onLRnrq0/" + "IzW7yWR7QkrmBL7jTKEn5u+qKhbwKfBstIs+bMY2Zkp18gnTxKLxoS2tFczGkPLPgizskuemMghRniWaoLcyehkd3qqGElvW/VDL5AaWTg0nLVkjRo9z+40RQzuVaE" + "8AkAFmxZzow3x+VJYKdjykkJ0iT9wCS0DRTXu269V264Vf/3jvredZiKRkgwlL9xNAwxXFg0x/XFw005UWVRIkdgcKWTjpBP2dPwVZ4WWC+9aGVd+Gyn1o0CLelf" @@ -59,7 +60,8 @@ void testInvalidToken() throws Exception { + "L5lrwEO-rTXYNamy8gJOBoM8n7gHDOo6JDd25go4MsLbjHbQ-WNq5SErgaNOMfZdkg2jqKVldZvjW33v8aupx08fzONnuzaYIJBQpONhGzDkYZKkk" + "rewdrYYVl_naNRWsKt8uSVu83G3mLhMPazkxNT5CWfNR7sdXfladz8U6ruLFOGUJJ5KDjEVAReRpEbxaKOIY6oFio1TeUQsi" + "6vppLXB0RupTBmE5dr7rxdL4j9eDY94M2uowBDuOsEGA"; - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); @@ -84,14 +86,16 @@ void testUserClaim() throws Exception { HashSet<String> set = new HashSet<>(); set.add("https://test.com/realm/domain"); - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); Map<String, Object> config = new HashMap<>(); config.put("userId", "username"); config.put("trustedIssuers", getTrustedIssuer()); - config.put("publicKey", + config.put( + "publicKey", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu1SU1LfVLPHCozMxH2Mo4lgOEePzNm0tRgeLezV6" + "ffAt0gunVTLw7onLRnrq0/IzW7yWR7QkrmBL7jTKEn5u+qKhbwKfBstIs+bMY2Zkp18gnTxKLxoS2tFczGkPLPgizskuemM" + "ghRniWaoLcyehkd3qqGElvW/VDL5AaWTg0nLVkjRo9z+40RQzuVaE8AkAFmxZzow3x+VJYKdjykkJ0iT9wCS0DRTXu269V26" diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java index 72b2fd5769715..819caa80d3417 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; @@ -8,9 +11,6 @@ import java.util.Collections; import org.testng.annotations.Test; -import static com.datahub.authentication.AuthenticationConstants.*; -import static org.testng.Assert.*; - public class DataHubSystemAuthenticatorTest { private static final String TEST_CLIENT_ID = "clientId"; @@ -21,17 +21,33 @@ public void testInit() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); assertThrows(() -> authenticator.init(null, null)); assertThrows(() -> authenticator.init(Collections.emptyMap(), null)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID), null)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null)); + assertThrows( + () -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID), null)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of(SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null)); // Correct configs provided. - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); } @Test public void testAuthenticateFailureMissingAuthorizationHeader() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); @@ -40,22 +56,39 @@ public void testAuthenticateFailureMissingAuthorizationHeader() { @Test public void testAuthenticateFailureMissingBasicCredentials() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer something") // Missing basic authentication. - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, "Bearer something") // Missing basic authentication. + ); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); } @Test public void testAuthenticateFailureMismatchingCredentials() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic incorrectId:incorrectSecret") // Incorrect authentication - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, + "Basic incorrectId:incorrectSecret") // Incorrect authentication + ); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); } @@ -63,12 +96,19 @@ public void testAuthenticateFailureMismatchingCredentials() { public void testAuthenticateSuccessNoDelegatedActor() throws Exception { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final String authorizationHeaderValue = String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue) - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final String authorizationHeaderValue = + String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue)); final Authentication authentication = authenticator.authenticate(context); @@ -84,13 +124,23 @@ public void testAuthenticateSuccessNoDelegatedActor() throws Exception { public void testAuthenticateSuccessDelegatedActor() throws Exception { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final String authorizationHeaderValue = String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); - final AuthenticationRequest context = new AuthenticationRequest( + authenticator.init( ImmutableMap.of( - AUTHORIZATION_HEADER_NAME, authorizationHeaderValue, LEGACY_X_DATAHUB_ACTOR_HEADER, "urn:li:corpuser:datahub") - ); + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final String authorizationHeaderValue = + String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, + authorizationHeaderValue, + LEGACY_X_DATAHUB_ACTOR_HEADER, + "urn:li:corpuser:datahub")); final Authentication authentication = authenticator.authenticate(context); @@ -101,4 +151,4 @@ public void testAuthenticateSuccessDelegatedActor() throws Exception { assertEquals(authentication.getCredentials(), authorizationHeaderValue); assertEquals(authentication.getClaims(), Collections.emptyMap()); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java index f5ce938c411c6..5bd273f3bacf8 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java @@ -1,5 +1,17 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SALT_CONFIG_NAME; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_ALG_CONFIG_NAME; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_KEY_CONFIG_NAME; +import static com.datahub.authentication.token.TokenClaims.ACTOR_ID_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,130 +27,167 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import org.mockito.Mockito; -import org.testng.annotations.Test; import java.util.Collections; import java.util.Map; - -import static com.datahub.authentication.AuthenticationConstants.*; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SALT_CONFIG_NAME; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_ALG_CONFIG_NAME; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_KEY_CONFIG_NAME; -import static com.datahub.authentication.token.TokenClaims.ACTOR_ID_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertThrows; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataHubTokenAuthenticatorTest { - private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; - private static final String TEST_SALT = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI93="; - - final EntityService mockService = Mockito.mock(EntityService.class); - final StatefulTokenService statefulTokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALT); - - @Test - public void testInit() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - AuthenticatorContext authenticatorContext = - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService)); - assertThrows(() -> authenticator.init(null, authenticatorContext)); - assertThrows(() -> authenticator.init(Collections.emptyMap(), authenticatorContext)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, - SIGNING_ALG_CONFIG_NAME, "UNSUPPORTED_ALG"), authenticatorContext)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, - SIGNING_ALG_CONFIG_NAME, "HS256"), null)); - - // Correct configs provided. - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), authenticatorContext); - } - - @Test - public void testAuthenticateFailureMissingAuthorizationHeader() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateFailureMissingBearerCredentials() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic username:password") - ); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateFailureInvalidToken() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer someRandomToken") - ); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateSuccess() throws Exception { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - final ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); - final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); - Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); - Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); - - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final Actor datahub = new Actor(ActorType.USER, "datahub"); - final String validToken = authenticator._statefulTokenService.generateAccessToken( - TokenType.PERSONAL, - datahub, - "some token", - "A token description", - datahub.toUrnStr() - ); - - final String authorizationHeaderValue = String.format("Bearer %s", validToken); - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue) - ); - - final Authentication authentication = authenticator.authenticate(context); - - // Validate the resulting authentication object - assertNotNull(authentication); - assertEquals(authentication.getActor().getType(), ActorType.USER); - assertEquals(authentication.getActor().getId(), "datahub"); - assertEquals(authentication.getCredentials(), authorizationHeaderValue); - - Map<String, Object> claimsMap = authentication.getClaims(); - assertEquals(claimsMap.get(TOKEN_VERSION_CLAIM_NAME), 2); - assertEquals(claimsMap.get(TOKEN_TYPE_CLAIM_NAME), "PERSONAL"); - assertEquals(claimsMap.get(ACTOR_TYPE_CLAIM_NAME), "USER"); - assertEquals(claimsMap.get(ACTOR_ID_CLAIM_NAME), "datahub"); - } + private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; + private static final String TEST_SALT = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI93="; + + final EntityService mockService = Mockito.mock(EntityService.class); + final StatefulTokenService statefulTokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALT); + + @Test + public void testInit() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService)); + assertThrows(() -> authenticator.init(null, authenticatorContext)); + assertThrows(() -> authenticator.init(Collections.emptyMap(), authenticatorContext)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SIGNING_ALG_CONFIG_NAME, + "UNSUPPORTED_ALG"), + authenticatorContext)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SIGNING_ALG_CONFIG_NAME, "HS256"), + null)); + + // Correct configs provided. + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + authenticatorContext); + } + + @Test + public void testAuthenticateFailureMissingAuthorizationHeader() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateFailureMissingBearerCredentials() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic username:password")); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateFailureInvalidToken() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer someRandomToken")); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateSuccess() throws Exception { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + final ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + DataHubTokenAuthenticatorTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + final AspectSpec keyAspectSpec = + configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) + .thenReturn(keyAspectSpec); + Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); + + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final Actor datahub = new Actor(ActorType.USER, "datahub"); + final String validToken = + authenticator._statefulTokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); + + final String authorizationHeaderValue = String.format("Bearer %s", validToken); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue)); + + final Authentication authentication = authenticator.authenticate(context); + + // Validate the resulting authentication object + assertNotNull(authentication); + assertEquals(authentication.getActor().getType(), ActorType.USER); + assertEquals(authentication.getActor().getId(), "datahub"); + assertEquals(authentication.getCredentials(), authorizationHeaderValue); + + Map<String, Object> claimsMap = authentication.getClaims(); + assertEquals(claimsMap.get(TOKEN_VERSION_CLAIM_NAME), 2); + assertEquals(claimsMap.get(TOKEN_TYPE_CLAIM_NAME), "PERSONAL"); + assertEquals(claimsMap.get(ACTOR_TYPE_CLAIM_NAME), "USER"); + assertEquals(claimsMap.get(ACTOR_ID_CLAIM_NAME), "datahub"); + } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java index 81cf94d3bfe02..6d0678d4f3558 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.group; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -30,11 +34,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GroupServiceTest { private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; @@ -67,20 +66,36 @@ public void setupTest() throws Exception { _groupKey.setName(GROUP_ID); NativeGroupMembership nativeGroupMembership = new NativeGroupMembership(); - nativeGroupMembership.setNativeGroups(new UrnArray(Urn.createFromString(NATIVE_GROUP_URN_STRING))); + nativeGroupMembership.setNativeGroups( + new UrnArray(Urn.createFromString(NATIVE_GROUP_URN_STRING))); GroupMembership groupMembership = new GroupMembership(); groupMembership.setGroups(new UrnArray(Urn.createFromString(EXTERNAL_GROUP_URN_STRING))); - _entityResponseMap = ImmutableMap.of(USER_URN, new EntityResponse().setEntityName(CORP_USER_ENTITY_NAME) - .setUrn(USER_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data())), GROUP_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(groupMembership.data())))))); - - _entityRelationships = new EntityRelationships().setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray(ImmutableList.of( - new EntityRelationship().setEntity(USER_URN).setType(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)))); + _entityResponseMap = + ImmutableMap.of( + USER_URN, + new EntityResponse() + .setEntityName(CORP_USER_ENTITY_NAME) + .setUrn(USER_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(nativeGroupMembership.data())), + GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(groupMembership.data())))))); + + _entityRelationships = + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(USER_URN) + .setType(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)))); _entityClient = mock(EntityClient.class); _entityService = mock(EntityService.class); @@ -118,7 +133,8 @@ public void testGetGroupOriginNullArguments() { @Test public void testGetGroupOriginPasses() { Origin groupOrigin = mock(Origin.class); - when(_entityService.getLatestAspect(eq(_groupUrn), eq(ORIGIN_ASPECT_NAME))).thenReturn(groupOrigin); + when(_entityService.getLatestAspect(eq(_groupUrn), eq(ORIGIN_ASPECT_NAME))) + .thenReturn(groupOrigin); assertEquals(groupOrigin, _groupService.getGroupOrigin(_groupUrn)); } @@ -132,8 +148,9 @@ public void testAddUserToNativeGroupNullArguments() { @Test public void testAddUserToNativeGroupPasses() throws Exception { when(_entityService.exists(USER_URN)).thenReturn(true); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); _groupService.addUserToNativeGroup(USER_URN, _groupUrn, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); @@ -141,68 +158,101 @@ public void testAddUserToNativeGroupPasses() throws Exception { @Test public void testCreateNativeGroupNullArguments() { - assertThrows(() -> _groupService.createNativeGroup(null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createNativeGroup(_groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createNativeGroup(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createNativeGroup( + null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createNativeGroup( + _groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.createNativeGroup(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); } @Test public void testCreateNativeGroupPasses() throws Exception { - _groupService.createNativeGroup(_groupKey, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION); + _groupService.createNativeGroup( + _groupKey, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION); verify(_entityClient, times(2)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingNativeGroupMembersNullArguments() { - assertThrows(() -> _groupService.removeExistingNativeGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.removeExistingNativeGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.removeExistingNativeGroupMembers( + null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.removeExistingNativeGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); } @Test - public void testRemoveExistingNativeGroupMembersGroupNotInNativeGroupMembership() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); - - _groupService.removeExistingNativeGroupMembers(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + public void testRemoveExistingNativeGroupMembersGroupNotInNativeGroupMembership() + throws Exception { + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); + + _groupService.removeExistingNativeGroupMembers( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingNativeGroupMembersPasses() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingNativeGroupMembers(Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingNativeGroupMembers( + Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testMigrateGroupMembershipToNativeGroupMembershipNullArguments() { - assertThrows(() -> _groupService.migrateGroupMembershipToNativeGroupMembership(null, USER_URN.toString(), - SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.migrateGroupMembershipToNativeGroupMembership( + null, USER_URN.toString(), SYSTEM_AUTHENTICATION)); } @Test public void testMigrateGroupMembershipToNativeGroupMembershipPasses() throws Exception { - when(_graphClient.getRelatedEntities(eq(EXTERNAL_GROUP_URN_STRING), - eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), eq(RelationshipDirection.INCOMING), anyInt(), - anyInt(), any())).thenReturn(_entityRelationships); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_graphClient.getRelatedEntities( + eq(EXTERNAL_GROUP_URN_STRING), + eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), + eq(RelationshipDirection.INCOMING), + anyInt(), + anyInt(), + any())) + .thenReturn(_entityRelationships); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); when(_entityService.exists(USER_URN)).thenReturn(true); - _groupService.migrateGroupMembershipToNativeGroupMembership(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), - USER_URN.toString(), SYSTEM_AUTHENTICATION); + _groupService.migrateGroupMembershipToNativeGroupMembership( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), + USER_URN.toString(), + SYSTEM_AUTHENTICATION); verify(_entityClient, times(3)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testCreateGroupInfoNullArguments() { - assertThrows(() -> _groupService.createGroupInfo(null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createGroupInfo(_groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createGroupInfo(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createGroupInfo( + null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createGroupInfo( + _groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.createGroupInfo(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); } @Test @@ -229,36 +279,46 @@ public void testGetExistingGroupMembersNullArguments() { @Test public void testGetExistingGroupMembersPasses() { - when(_graphClient.getRelatedEntities(eq(GROUP_URN_STRING), - eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), eq(RelationshipDirection.INCOMING), anyInt(), - anyInt(), any())).thenReturn(_entityRelationships); - - assertEquals(USER_URN_LIST, _groupService.getExistingGroupMembers(_groupUrn, USER_URN.toString())); + when(_graphClient.getRelatedEntities( + eq(GROUP_URN_STRING), + eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), + eq(RelationshipDirection.INCOMING), + anyInt(), + anyInt(), + any())) + .thenReturn(_entityRelationships); + + assertEquals( + USER_URN_LIST, _groupService.getExistingGroupMembers(_groupUrn, USER_URN.toString())); } @Test public void testRemoveExistingGroupMembersNullArguments() { - assertThrows(() -> _groupService.removeExistingGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.removeExistingGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.removeExistingGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.removeExistingGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingGroupMembersGroupNotInGroupMembership() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingGroupMembers(Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingGroupMembers( + Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingGroupMembersPasses() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingGroupMembers(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingGroupMembers( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java index 2eed108b40300..cd9d5972103c1 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.invite; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -17,11 +21,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class InviteTokenServiceTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -68,20 +67,24 @@ public void testIsInviteTokenValidTrue() throws Exception { @Test public void testGetInviteTokenRoleNullEntity() throws Exception { - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(null); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(null); - assertThrows(() -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); } @Test public void testGetInviteTokenRoleEmptyAspectMap() throws Exception { final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); - assertThrows(() -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); } @Test @@ -89,11 +92,14 @@ public void testGetInviteTokenRoleNoRole() throws Exception { final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION); assertNull(roleUrn); @@ -103,12 +109,16 @@ public void testGetInviteTokenRoleNoRole() throws Exception { public void testGetInviteTokenRole() throws Exception { final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + final InviteToken inviteTokenAspect = + new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION); assertNotNull(roleUrn); @@ -119,15 +129,22 @@ public void testGetInviteTokenRole() throws Exception { public void getInviteTokenRoleUrnDoesNotExist() throws Exception { when(_entityClient.exists(eq(roleUrn), eq(SYSTEM_AUTHENTICATION))).thenReturn(false); - assertThrows(() -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); } @Test public void getInviteTokenRegenerate() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -140,8 +157,14 @@ public void getInviteTokenRegenerate() throws Exception { public void getInviteTokenEmptySearchResult() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -157,10 +180,17 @@ public void getInviteTokenNullEntity() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(null); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(null); assertThrows(() -> _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION)); } @@ -172,12 +202,19 @@ public void getInviteTokenNoInviteTokenAspect() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -191,19 +228,31 @@ public void getInviteToken() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + final InviteToken inviteTokenAspect = + new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN_STRING))).thenReturn(INVITE_TOKEN_STRING); - assertEquals(_inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), INVITE_TOKEN_STRING); + assertEquals( + _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), + INVITE_TOKEN_STRING); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java index 4c78ab13c9cda..d8a0716937525 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.post; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,10 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class PostServiceTest { private static final Urn POST_URN = UrnUtils.getUrn("urn:li:post:123"); private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; @@ -29,12 +28,15 @@ public class PostServiceTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; private static final Authentication SYSTEM_AUTHENTICATION = @@ -57,7 +59,8 @@ public void testMapMedia() { @Test public void testMapPostContent() { PostContent postContent = - _postService.mapPostContent(POST_CONTENT_TYPE.toString(), POST_TITLE, POST_DESCRIPTION, POST_LINK, MEDIA); + _postService.mapPostContent( + POST_CONTENT_TYPE.toString(), POST_TITLE, POST_DESCRIPTION, POST_LINK, MEDIA); assertEquals(POST_CONTENT, postContent); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java index 811bdaaa0fcf8..155f1314a0190 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java @@ -1,5 +1,7 @@ package com.datahub.authentication.token; +import static org.testng.AssertJUnit.*; + import io.jsonwebtoken.Claims; import io.jsonwebtoken.JwsHeader; import java.math.BigInteger; @@ -20,13 +22,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class DataHubJwtSigningKeyResolverTest { - @InjectMocks - private DataHubJwtSigningKeyResolver resolver; + @InjectMocks private DataHubJwtSigningKeyResolver resolver; @Test public void testResolveSigningKeyWithPublicKey() throws Exception { @@ -55,11 +53,12 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { HttpResponse<String> httpResponse = Mockito.mock(HttpResponse.class); Mockito.when(httpResponse.statusCode()).thenReturn(200); - JSONObject token = new JSONObject( - "{\"kty\": \"RSA\", \"kid\": \"test_key\", \"n\": \"ueXyoaxgWhMTLwkowaskhiV85rbN9n_nLft8CxFUY3nbMpNybAWsWuhJ4SYLT4U-GbKdL-h-NYgBXKn" - + "GK1ieG6qSC25T3hWXTb3cNe73ZQUcZSivAV2tZouPYcb1XKSyKd-PsK8NsCpq1NHsJsrXSKq-7YCaf4MxIUaFXSZTE7ZNC0fPVqYH71jnyOU9FA_KJm0IC-x_Bs2g" - + "Ak3Eq1_6pZ_0VeYpczv82LACAUzi1vuU1gbbZLNHHl4DHwWb98eI1aCbWHNMux70Ba4aREOdKOWrxZ066W_NKUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH" - + "0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}"); + JSONObject token = + new JSONObject( + "{\"kty\": \"RSA\", \"kid\": \"test_key\", \"n\": \"ueXyoaxgWhMTLwkowaskhiV85rbN9n_nLft8CxFUY3nbMpNybAWsWuhJ4SYLT4U-GbKdL-h-NYgBXKn" + + "GK1ieG6qSC25T3hWXTb3cNe73ZQUcZSivAV2tZouPYcb1XKSyKd-PsK8NsCpq1NHsJsrXSKq-7YCaf4MxIUaFXSZTE7ZNC0fPVqYH71jnyOU9FA_KJm0IC-x_Bs2g" + + "Ak3Eq1_6pZ_0VeYpczv82LACAUzi1vuU1gbbZLNHHl4DHwWb98eI1aCbWHNMux70Ba4aREOdKOWrxZ066W_NKUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH" + + "0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}"); PublicKey expectedKey = getPublicKey(token); String responseJson = @@ -69,11 +68,14 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { + "KUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}]}"; Mockito.when(httpResponse.body()).thenReturn(responseJson); - Mockito.when(httpClient.send(Mockito.any(HttpRequest.class), Mockito.any(HttpResponse.BodyHandler.class))) + Mockito.when( + httpClient.send( + Mockito.any(HttpRequest.class), Mockito.any(HttpResponse.BodyHandler.class))) .thenReturn(httpResponse); HashSet<String> trustedIssuers = new HashSet<>(); trustedIssuers.add("https://example.com"); - DataHubJwtSigningKeyResolver resolver = new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); + DataHubJwtSigningKeyResolver resolver = + new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); resolver.client = httpClient; JwsHeader mockJwsHeader = Mockito.mock(JwsHeader.class); Mockito.when(mockJwsHeader.getKeyId()).thenReturn("test_key"); @@ -88,7 +90,8 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { void testInvalidIssuer() throws Exception { HashSet<String> trustedIssuers = new HashSet<>(); - DataHubJwtSigningKeyResolver resolver = new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); + DataHubJwtSigningKeyResolver resolver = + new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); JwsHeader mockJwsHeader = Mockito.mock(JwsHeader.class); Claims mockClaims = Mockito.mock(Claims.class); resolver.resolveSigningKey(mockJwsHeader, mockClaims); @@ -120,8 +123,10 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { if (token.get("kty").toString().equals("RSA")) { try { KeyFactory kf = KeyFactory.getInstance("RSA"); - BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); - BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); + BigInteger modulus = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); + BigInteger exponent = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); publicKey = kf.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } catch (InvalidKeySpecException e) { throw new InvalidKeySpecException("Invalid public key", e); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java index 1c46e864a559e..ed10022632a56 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.token; +import static com.datahub.authentication.token.TokenClaims.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.authenticator.DataHubTokenAuthenticatorTest; @@ -13,14 +16,9 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import java.util.Date; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.datahub.authentication.token.TokenClaims.*; -import static org.testng.Assert.*; - - public class StatefulTokenServiceTest { private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; @@ -32,7 +30,8 @@ public class StatefulTokenServiceTest { public void testConstructor() { assertThrows(() -> new StatefulTokenService(null, null, null, null, null)); assertThrows(() -> new StatefulTokenService(TEST_SIGNING_KEY, null, null, null, null)); - assertThrows(() -> new StatefulTokenService(TEST_SIGNING_KEY, "UNSUPPORTED_ALG", null, null, null)); + assertThrows( + () -> new StatefulTokenService(TEST_SIGNING_KEY, "UNSUPPORTED_ALG", null, null, null)); // Succeeds: new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); @@ -40,11 +39,12 @@ public void testConstructor() { @Test public void testGenerateAccessTokenPersonalToken() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); // Verify token claims @@ -65,10 +65,15 @@ public void testGenerateAccessTokenPersonalToken() throws Exception { @Test public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, - null, System.currentTimeMillis(), + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, + datahub, + null, + System.currentTimeMillis(), "some token", "A token description", datahub.toUrnStr()); @@ -92,11 +97,12 @@ public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { @Test public void testGenerateAccessTokenSessionToken() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.SESSION, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.SESSION, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); @@ -118,14 +124,21 @@ public void testGenerateAccessTokenSessionToken() throws Exception { @Test public void testValidateAccessTokenFailsDueToExpiration() { - StatefulTokenService - tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); // Generate token that expires immediately. Date date = new Date(); - //This method returns the time in millis + // This method returns the time in millis long createdAtInMs = date.getTime(); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L, - createdAtInMs, "token", "", "urn:li:corpuser:datahub"); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, + new Actor(ActorType.USER, "datahub"), + 0L, + createdAtInMs, + "token", + "", + "urn:li:corpuser:datahub"); assertNotNull(token); // Validation should fail. @@ -134,12 +147,13 @@ public void testValidateAccessTokenFailsDueToExpiration() { @Test public void testValidateAccessTokenFailsDueToManipulation() { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); // Change single character @@ -152,23 +166,30 @@ public void testValidateAccessTokenFailsDueToManipulation() { @Test public void generateRevokeToken() throws TokenException { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - final ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); - final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + final ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + DataHubTokenAuthenticatorTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + final AspectSpec keyAspectSpec = + configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); - Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); + Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) + .thenReturn(keyAspectSpec); Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); final RollbackRunResult result = new RollbackRunResult(ImmutableList.of(), 0); Mockito.when(mockService.deleteUrn(Mockito.any(Urn.class))).thenReturn(result); - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); // Revoke token tokenService.revokeAccessToken(tokenService.hash(token)); @@ -177,7 +198,5 @@ public void generateRevokeToken() throws TokenException { assertThrows(TokenException.class, () -> tokenService.validateAccessToken(token)); } - private void mockStateful() { - - } + private void mockStateful() {} } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java index 4268521a07c0c..841308441569d 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.token; +import static com.datahub.authentication.token.TokenClaims.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.authenticator.DataHubTokenAuthenticator; @@ -15,10 +18,6 @@ import javax.crypto.spec.SecretKeySpec; import org.testng.annotations.Test; -import static com.datahub.authentication.token.TokenClaims.*; -import static org.testng.Assert.*; - - public class StatelessTokenServiceTest { private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; @@ -37,8 +36,11 @@ public void testConstructor() { @Test public void testGenerateAccessTokenPersonalToken() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Verify token claims @@ -59,10 +61,11 @@ public void testGenerateAccessTokenPersonalToken() throws Exception { @Test public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, - new Actor(ActorType.USER, "datahub"), - null); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), null); assertNotNull(token); // Verify token claims @@ -83,8 +86,11 @@ public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { @Test public void testGenerateAccessTokenSessionToken() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.SESSION, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.SESSION, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Verify token claims @@ -105,26 +111,34 @@ public void testGenerateAccessTokenSessionToken() throws Exception { @Test public void testValidateAccessTokenFailsDueToExpiration() { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); // Generate token that expires immediately. - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L); assertNotNull(token); // Validation should fail. - assertThrows(TokenExpiredException.class, () -> statelessTokenService.validateAccessToken(token)); + assertThrows( + TokenExpiredException.class, () -> statelessTokenService.validateAccessToken(token)); } @Test public void testValidateAccessTokenFailsDueToManipulation() { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Change single character String changedToken = token.substring(1); // Validation should fail. - assertThrows(TokenException.class, () -> statelessTokenService.validateAccessToken(changedToken)); + assertThrows( + TokenException.class, () -> statelessTokenService.validateAccessToken(changedToken)); } @Test @@ -134,31 +148,37 @@ public void testValidateAccessTokenFailsDueToNoneAlgorithm() { "eyJhbGciOiJub25lIn0.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6Il9fZGF0YWh1Yl9zeXN0ZW0iL" + "CJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwianRpIjoiN2VmOTkzYjQtMjBiOC00Y2Y5LTljNm" + "YtMTE2NjNjZWVmOTQzIiwic3ViIjoiZGF0YWh1YiIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9."; - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); // Validation should fail. assertThrows(TokenException.class, () -> statelessTokenService.validateAccessToken(badToken)); } @Test public void testValidateAccessTokenFailsDueToUnsupportedSigningAlgorithm() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); Map<String, Object> claims = new HashMap<>(); - claims.put(TOKEN_VERSION_CLAIM_NAME, String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. + claims.put( + TOKEN_VERSION_CLAIM_NAME, + String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. claims.put(TOKEN_TYPE_CLAIM_NAME, "SESSION"); claims.put(ACTOR_TYPE_CLAIM_NAME, "USER"); claims.put(ACTOR_ID_CLAIM_NAME, "__datahub_system"); - final JwtBuilder builder = Jwts.builder() - .addClaims(claims) - .setId(UUID.randomUUID().toString()) - .setIssuer("datahub-metadata-service") - .setSubject("datahub"); - builder.setExpiration(new Date(System.currentTimeMillis() + 60)); + final JwtBuilder builder = + Jwts.builder() + .addClaims(claims) + .setId(UUID.randomUUID().toString()) + .setIssuer("datahub-metadata-service") + .setSubject("datahub"); + builder.setExpiration(new Date(System.currentTimeMillis() + 60)); final String testSigningKey = "TLHLdPSivAwIjXP4MT4TtlitsEGkOKjQGNnqsprisfghpU8g"; - byte [] apiKeySecretBytes = testSigningKey.getBytes(StandardCharsets.UTF_8); - final Key signingKey = new SecretKeySpec(apiKeySecretBytes, SignatureAlgorithm.HS384.getJcaName()); + byte[] apiKeySecretBytes = testSigningKey.getBytes(StandardCharsets.UTF_8); + final Key signingKey = + new SecretKeySpec(apiKeySecretBytes, SignatureAlgorithm.HS384.getJcaName()); final String badToken = builder.signWith(signingKey, SignatureAlgorithm.HS384).compact(); // Validation should fail. diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index 0102311ff3b61..9cb5d5cb697cc 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.user; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class NativeUserServiceTest { private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; @@ -52,39 +51,60 @@ public void setupTest() throws Exception { AuthenticationConfiguration authenticationConfiguration = new AuthenticationConfiguration(); authenticationConfiguration.setSystemClientId("someCustomId"); - _nativeUserService = new NativeUserService(_entityService, _entityClient, _secretService, authenticationConfiguration); + _nativeUserService = + new NativeUserService( + _entityService, _entityClient, _secretService, authenticationConfiguration); } @Test public void testCreateNativeUserNullArguments() { assertThrows( - () -> _nativeUserService.createNativeUser(null, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, - SYSTEM_AUTHENTICATION)); - } - - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + () -> + _nativeUserService.createNativeUser( + null, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, SYSTEM_AUTHENTICATION)); + } + + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserAlreadyExists() throws Exception { // The user already exists when(_entityService.exists(any())).thenReturn(true); - _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserDatahub() throws Exception { - _nativeUserService.createNativeUser(DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserSystemUser() throws Exception { - _nativeUserService.createNativeUser(SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } @Test @@ -94,7 +114,8 @@ public void testCreateNativeUserPasses() throws Exception { when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); when(_secretService.getHashedPassword(any(), any())).thenReturn(HASHED_PASSWORD); - _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } @Test @@ -121,13 +142,17 @@ public void testUpdateCorpUserCredentialsPasses() throws Exception { @Test public void testGenerateNativeUserResetTokenNullArguments() { - assertThrows(() -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { // Nonexistent corpUserCredentials - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(null); _nativeUserService.generateNativeUserPasswordResetToken(USER_URN_STRING, SYSTEM_AUTHENTICATION); } @@ -135,8 +160,8 @@ public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { @Test public void testGenerateNativeUserResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); @@ -148,80 +173,101 @@ public void testGenerateNativeUserResetToken() throws Exception { @Test public void testResetCorpUserCredentialsNullArguments() { - assertThrows(() -> _nativeUserService.resetCorpUserCredentials(null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); assertThrows( - () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + () -> + _nativeUserService.resetCorpUserCredentials( + null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); assertThrows( - () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); + () -> + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User has not generated a password reset token!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "User has not generated a password reset token!") public void testResetCorpUserCredentialsNoPasswordResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); // No password reset token when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(false); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, - expectedExceptionsMessageRegExp = "Invalid reset token. Please ask your administrator to send you an updated link!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = + "Invalid reset token. Please ask your administrator to send you an updated link!") public void testResetCorpUserCredentialsBadResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( - Instant.now().toEpochMilli()); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()) + .thenReturn(Instant.now().toEpochMilli()); // Reset token won't match when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn("badResetToken"); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, - expectedExceptionsMessageRegExp = "Reset token has expired! Please ask your administrator to create a new one") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = + "Reset token has expired! Please ask your administrator to create a new one") public void testResetCorpUserCredentialsExpiredResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - // Reset token expiration time will be before the system time when we run resetCorpUserCredentials + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + // Reset token expiration time will be before the system time when we run + // resetCorpUserCredentials when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn(0L); when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } @Test public void testResetCorpUserCredentialsPasses() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( - Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()) + .thenReturn(Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); when(_secretService.generateSalt(anyInt())).thenReturn(SALT); when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), any()); } @@ -233,7 +279,8 @@ public void testDoesPasswordMatchNullArguments() { @Test public void testDoesPasswordMatchNoCorpUserCredentials() throws Exception { - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(null); assertFalse(_nativeUserService.doesPasswordMatch(USER_URN_STRING, PASSWORD)); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index b0b206001209c..ffee378a363c7 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -1,4 +1,19 @@ package com.datahub.authorization; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE; +import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE; +import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -30,7 +45,6 @@ import com.linkedin.policy.DataHubActorFilter; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.policy.DataHubResourceFilter; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -38,34 +52,18 @@ import java.util.Map; import java.util.Optional; import java.util.Set; - +import javax.annotation.Nullable; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nullable; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE; -import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE; -import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.assertFalse; - - public class DataHubAuthorizerTest { public static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; private static final Urn PARENT_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:parent"); private static final Urn CHILD_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:child"); - private static final Urn USER_WITH_ADMIN_ROLE = UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); + private static final Urn USER_WITH_ADMIN_ROLE = + UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); private EntityClient _entityClient; private DataHubAuthorizer _dataHubAuthorizer; @@ -76,102 +74,158 @@ public void setupTest() throws Exception { // Init mocks. final Urn activePolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:0"); - final DataHubPolicyInfo activePolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_TAGS"), null); + final DataHubPolicyInfo activePolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_TAGS"), null); final EnvelopedAspectMap activeAspectMap = new EnvelopedAspectMap(); - activeAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(activePolicy.data()))); + activeAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(activePolicy.data()))); final Urn inactivePolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:1"); - final DataHubPolicyInfo inactivePolicy = createDataHubPolicyInfo(false, ImmutableList.of("EDIT_ENTITY_OWNERS"), null); + final DataHubPolicyInfo inactivePolicy = + createDataHubPolicyInfo(false, ImmutableList.of("EDIT_ENTITY_OWNERS"), null); final EnvelopedAspectMap inactiveAspectMap = new EnvelopedAspectMap(); - inactiveAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inactivePolicy.data()))); + inactiveAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inactivePolicy.data()))); final Urn parentDomainPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:2"); - final DataHubPolicyInfo parentDomainPolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_DOCS"), PARENT_DOMAIN_URN); + final DataHubPolicyInfo parentDomainPolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_DOCS"), PARENT_DOMAIN_URN); final EnvelopedAspectMap parentDomainPolicyAspectMap = new EnvelopedAspectMap(); - parentDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomainPolicy.data()))); + parentDomainPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomainPolicy.data()))); final Urn childDomainPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:3"); - final DataHubPolicyInfo childDomainPolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_STATUS"), CHILD_DOMAIN_URN); + final DataHubPolicyInfo childDomainPolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_STATUS"), CHILD_DOMAIN_URN); final EnvelopedAspectMap childDomainPolicyAspectMap = new EnvelopedAspectMap(); - childDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); + childDomainPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); final Urn adminPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:4"); final DataHubActorFilter actorFilter = new DataHubActorFilter(); - actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); - final DataHubPolicyInfo adminPolicy = createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); + actorFilter.setRoles( + new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); + final DataHubPolicyInfo adminPolicy = + createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); final EnvelopedAspectMap adminPolicyAspectMap = new EnvelopedAspectMap(); - adminPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); + adminPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); - final ScrollResult policySearchResult1 = new ScrollResult() + final ScrollResult policySearchResult1 = + new ScrollResult() .setScrollId("1") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); - final ScrollResult policySearchResult2 = new ScrollResult() + final ScrollResult policySearchResult2 = + new ScrollResult() .setScrollId("2") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); - final ScrollResult policySearchResult3 = new ScrollResult() + final ScrollResult policySearchResult3 = + new ScrollResult() .setScrollId("3") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); - final ScrollResult policySearchResult4 = new ScrollResult() - .setScrollId("4") - .setNumEntities(5) + final ScrollResult policySearchResult4 = + new ScrollResult() + .setScrollId("4") + .setNumEntities(5) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(childDomainPolicyUrn)))); + + final ScrollResult policySearchResult5 = + new ScrollResult() + .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(childDomainPolicyUrn)))); - - final ScrollResult policySearchResult5 = new ScrollResult() - .setNumEntities(5) - .setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(adminPolicyUrn)))); - - when(_entityClient.scrollAcrossEntities(eq(List.of("dataHubPolicy")), eq(""), isNull(), any(), isNull(), - anyInt(), eq(new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipHighlighting(true).setSkipCache(true)), any())) - .thenReturn(policySearchResult1) - .thenReturn(policySearchResult2) - .thenReturn(policySearchResult3) - .thenReturn(policySearchResult4) - .thenReturn(policySearchResult5); - - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())).thenAnswer(args -> { - Set<Urn> inputUrns = args.getArgument(1); - Urn urn = inputUrns.stream().findFirst().get(); - - switch (urn.toString()) { - case "urn:li:dataHubPolicy:0": - return Map.of(activePolicyUrn, new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); - case "urn:li:dataHubPolicy:1": - return Map.of(inactivePolicyUrn, new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); - case "urn:li:dataHubPolicy:2": - return Map.of(parentDomainPolicyUrn, new EntityResponse().setUrn(parentDomainPolicyUrn).setAspects(parentDomainPolicyAspectMap)); - case "urn:li:dataHubPolicy:3": - return Map.of(childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspects(childDomainPolicyAspectMap)); - case "urn:li:dataHubPolicy:4": - return Map.of(adminPolicyUrn, new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); - default: - throw new IllegalStateException(); - } - }); - - final List<Urn> userUrns = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user3"), Urn.createFromString("urn:li:corpuser:user4")); - final List<Urn> groupUrns = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group3"), Urn.createFromString("urn:li:corpGroup:group4")); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(adminPolicyUrn)))); + + when(_entityClient.scrollAcrossEntities( + eq(List.of("dataHubPolicy")), + eq(""), + isNull(), + any(), + isNull(), + anyInt(), + eq( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setSkipCache(true)), + any())) + .thenReturn(policySearchResult1) + .thenReturn(policySearchResult2) + .thenReturn(policySearchResult3) + .thenReturn(policySearchResult4) + .thenReturn(policySearchResult5); + + when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())) + .thenAnswer( + args -> { + Set<Urn> inputUrns = args.getArgument(1); + Urn urn = inputUrns.stream().findFirst().get(); + + switch (urn.toString()) { + case "urn:li:dataHubPolicy:0": + return Map.of( + activePolicyUrn, + new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); + case "urn:li:dataHubPolicy:1": + return Map.of( + inactivePolicyUrn, + new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); + case "urn:li:dataHubPolicy:2": + return Map.of( + parentDomainPolicyUrn, + new EntityResponse() + .setUrn(parentDomainPolicyUrn) + .setAspects(parentDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:3": + return Map.of( + childDomainPolicyUrn, + new EntityResponse() + .setUrn(childDomainPolicyUrn) + .setAspects(childDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:4": + return Map.of( + adminPolicyUrn, + new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); + default: + throw new IllegalStateException(); + } + }); + + final List<Urn> userUrns = + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user3"), + Urn.createFromString("urn:li:corpuser:user4")); + final List<Urn> groupUrns = + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group3"), + Urn.createFromString("urn:li:corpGroup:group4")); EntityResponse ownershipResponse = new EntityResponse(); EnvelopedAspectMap ownershipAspectMap = new EnvelopedAspectMap(); - ownershipAspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(userUrns, groupUrns).data()))); + ownershipAspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new com.linkedin.entity.Aspect(createOwnershipAspect(userUrns, groupUrns).data()))); ownershipResponse.setAspects(ownershipAspectMap); when(_entityClient.getV2(any(), any(), eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())) .thenReturn(ownershipResponse); @@ -181,31 +235,45 @@ public void setupTest() throws Exception { .thenReturn(createDomainsResponse(CHILD_DOMAIN_URN)); // Mocks to get parent domains on a domain - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(CHILD_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(CHILD_DOMAIN_URN)), + eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + any())) .thenReturn(createDomainPropertiesBatchResponse(PARENT_DOMAIN_URN)); // Mocks to reach the stopping point on domain parents - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(PARENT_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(PARENT_DOMAIN_URN)), + eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + any())) .thenReturn(createDomainPropertiesBatchResponse(null)); // Mocks to reach role membership for a user urn - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any()) - ).thenReturn(createUserRoleMembershipBatchResponse(USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); - - final Authentication systemAuthentication = new Authentication( - new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), - "" - ); - - _dataHubAuthorizer = new DataHubAuthorizer( - systemAuthentication, - _entityClient, - 10, - 10, - DataHubAuthorizer.AuthorizationMode.DEFAULT, - 1 // force pagination logic - ); - _dataHubAuthorizer.init(Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createUserRoleMembershipBatchResponse( + USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + + final Authentication systemAuthentication = + new Authentication(new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), ""); + + _dataHubAuthorizer = + new DataHubAuthorizer( + systemAuthentication, + _entityClient, + 10, + 10, + DataHubAuthorizer.AuthorizationMode.DEFAULT, + 1 // force pagination logic + ); + _dataHubAuthorizer.init( + Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); _dataHubAuthorizer.invalidateCache(); Thread.sleep(500); // Sleep so the runnable can execute. (not ideal) } @@ -217,11 +285,11 @@ public void testSystemAuthentication() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID).toUrnStr(), - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID).toUrnStr(), + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -231,11 +299,9 @@ public void testAuthorizeGranted() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -246,11 +312,9 @@ public void testAuthorizeNotGranted() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); // Policy for this privilege is inactive. - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_OWNERS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } @@ -263,11 +327,9 @@ public void testAllowAllMode() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); // Policy for this privilege is inactive. - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_OWNERS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -278,11 +340,9 @@ public void testInvalidateCache() throws Exception { // First make sure that the default policies are as expected. EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); @@ -291,64 +351,78 @@ public void testInvalidateCache() throws Exception { emptyResult.setNumEntities(0); emptyResult.setEntities(new SearchEntityArray()); - when(_entityClient.search(eq("dataHubPolicy"), eq(""), isNull(), any(), anyInt(), anyInt(), any(), - eq(new SearchFlags().setFulltext(true)))).thenReturn(emptyResult); - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())).thenReturn( - Collections.emptyMap()); + when(_entityClient.search( + eq("dataHubPolicy"), + eq(""), + isNull(), + any(), + anyInt(), + anyInt(), + any(), + eq(new SearchFlags().setFulltext(true)))) + .thenReturn(emptyResult); + when(_entityClient.batchGetV2( + eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())) + .thenReturn(Collections.emptyMap()); // Invalidate Cache. _dataHubAuthorizer.invalidateCache(); Thread.sleep(500); // Sleep so the runnable can execute. (not ideal) - // Now verify that invalidating the cache updates the policies by running the same authorization request. + // Now verify that invalidating the cache updates the policies by running the same authorization + // request. assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } @Test public void testAuthorizedActorsActivePolicy() throws Exception { final AuthorizedActors actors = - _dataHubAuthorizer.authorizedActors("EDIT_ENTITY_TAGS", // Should be inside the active policy. + _dataHubAuthorizer.authorizedActors( + "EDIT_ENTITY_TAGS", // Should be inside the active policy. Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); assertTrue(actors.isAllUsers()); assertTrue(actors.isAllGroups()); - assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of( - Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2"), - Urn.createFromString("urn:li:corpuser:user3"), - Urn.createFromString("urn:li:corpuser:user4") - )); - - assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of( - Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2"), - Urn.createFromString("urn:li:corpGroup:group3"), - Urn.createFromString("urn:li:corpGroup:group4") - )); + assertEquals( + new HashSet<>(actors.getUsers()), + ImmutableSet.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2"), + Urn.createFromString("urn:li:corpuser:user3"), + Urn.createFromString("urn:li:corpuser:user4"))); + + assertEquals( + new HashSet<>(actors.getGroups()), + ImmutableSet.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2"), + Urn.createFromString("urn:li:corpGroup:group3"), + Urn.createFromString("urn:li:corpGroup:group4"))); } @Test public void testAuthorizedRoleActivePolicy() throws Exception { final AuthorizedActors actors = - _dataHubAuthorizer.authorizedActors("EDIT_USER_PROFILE", // Should be inside the active policy. + _dataHubAuthorizer.authorizedActors( + "EDIT_USER_PROFILE", // Should be inside the active policy. Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); assertFalse(actors.isAllUsers()); assertFalse(actors.isAllGroups()); assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of()); assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of()); - assertEquals(new HashSet<>(actors.getRoles()), ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + assertEquals( + new HashSet<>(actors.getRoles()), + ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); } @Test public void testAuthorizationBasedOnRoleIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - USER_WITH_ADMIN_ROLE.toString(), - "EDIT_USER_PROFILE", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + USER_WITH_ADMIN_ROLE.toString(), "EDIT_USER_PROFILE", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -357,11 +431,9 @@ public void testAuthorizationBasedOnRoleIsAllowed() { public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_STATUS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_STATUS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -370,11 +442,9 @@ public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { public void testAuthorizationOnDomainWithParentPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_DOCS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_DOCS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -383,19 +453,24 @@ public void testAuthorizationOnDomainWithParentPrivilegeIsAllowed() { public void testAuthorizationOnDomainWithoutPrivilegeIsDenied() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_DOC_LINKS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_DOC_LINKS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } - private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List<String> privileges, @Nullable final Urn domain) throws Exception { + private DataHubPolicyInfo createDataHubPolicyInfo( + boolean active, List<String> privileges, @Nullable final Urn domain) throws Exception { - List<Urn> users = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2")); - List<Urn> groups = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), Urn.createFromString("urn:li:corpGroup:group2")); + List<Urn> users = + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")); + List<Urn> groups = + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")); final DataHubActorFilter actorFilter = new DataHubActorFilter(); actorFilter.setResourceOwners(true); @@ -407,8 +482,12 @@ private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List<String> p return createDataHubPolicyInfoFor(active, privileges, domain, actorFilter); } - private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List<String> privileges, - @Nullable final Urn domain, DataHubActorFilter actorFilter) throws Exception { + private DataHubPolicyInfo createDataHubPolicyInfoFor( + boolean active, + List<String> privileges, + @Nullable final Urn domain, + DataHubActorFilter actorFilter) + throws Exception { final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); dataHubPolicyInfo.setState(active ? ACTIVE_POLICY_STATE : INACTIVE_POLICY_STATE); @@ -424,7 +503,10 @@ private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List<String resourceFilter.setType("dataset"); if (domain != null) { - resourceFilter.setFilter(FilterUtils.newFilter(ImmutableMap.of(EntityFieldType.DOMAIN, Collections.singletonList(domain.toString())))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.DOMAIN, Collections.singletonList(domain.toString())))); } dataHubPolicyInfo.setResources(resourceFilter); @@ -432,31 +514,34 @@ private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List<String return dataHubPolicyInfo; } - private Ownership createOwnershipAspect(final List<Urn> userOwners, final List<Urn> groupOwners) throws Exception { + private Ownership createOwnershipAspect(final List<Urn> userOwners, final List<Urn> groupOwners) + throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); if (userOwners != null) { - userOwners.forEach(userUrn -> { - final Owner userOwner = new Owner(); - userOwner.setOwner(userUrn); - userOwner.setType(OwnershipType.DATAOWNER); - owners.add(userOwner); - } - ); + userOwners.forEach( + userUrn -> { + final Owner userOwner = new Owner(); + userOwner.setOwner(userUrn); + userOwner.setType(OwnershipType.DATAOWNER); + owners.add(userOwner); + }); } if (groupOwners != null) { - groupOwners.forEach(groupUrn -> { - final Owner groupOwner = new Owner(); - groupOwner.setOwner(groupUrn); - groupOwner.setType(OwnershipType.DATAOWNER); - owners.add(groupOwner); - }); + groupOwners.forEach( + groupUrn -> { + final Owner groupOwner = new Owner(); + groupOwner.setOwner(groupUrn); + groupOwner.setType(OwnershipType.DATAOWNER); + owners.add(groupOwner); + }); } ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -466,13 +551,15 @@ private EntityResponse createDomainsResponse(final Urn domainUrn) { EnvelopedAspectMap domainsAspectMap = new EnvelopedAspectMap(); final Domains domains = new Domains(); domains.setDomains(new UrnArray(domainUrns)); - domainsAspectMap.put(DOMAINS_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(domains.data()))); + domainsAspectMap.put( + DOMAINS_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(domains.data()))); domainsResponse.setAspects(domainsAspectMap); return domainsResponse; } - private Map<Urn, EntityResponse> createDomainPropertiesBatchResponse(@Nullable final Urn parentDomainUrn) { + private Map<Urn, EntityResponse> createDomainPropertiesBatchResponse( + @Nullable final Urn parentDomainUrn) { final Map<Urn, EntityResponse> batchResponse = new HashMap<>(); final EntityResponse response = new EntityResponse(); EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); @@ -480,14 +567,16 @@ private Map<Urn, EntityResponse> createDomainPropertiesBatchResponse(@Nullable f if (parentDomainUrn != null) { properties.setParentDomain(parentDomainUrn); } - aspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(properties.data()))); + aspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(properties.data()))); response.setAspects(aspectMap); batchResponse.put(parentDomainUrn, response); return batchResponse; } - private Map<Urn, EntityResponse> createUserRoleMembershipBatchResponse(final Urn userUrn, @Nullable final Urn roleUrn) { + private Map<Urn, EntityResponse> createUserRoleMembershipBatchResponse( + final Urn userUrn, @Nullable final Urn roleUrn) { final Map<Urn, EntityResponse> batchResponse = new HashMap<>(); final EntityResponse response = new EntityResponse(); EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); @@ -495,14 +584,17 @@ private Map<Urn, EntityResponse> createUserRoleMembershipBatchResponse(final Urn if (roleUrn != null) { membership.setRoles(new UrnArray(roleUrn)); } - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(membership.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(membership.data()))); response.setAspects(aspectMap); batchResponse.put(userUrn, response); return batchResponse; } - private AuthorizerContext createAuthorizerContext(final Authentication systemAuthentication, final EntityClient entityClient) { - return new AuthorizerContext(Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); + private AuthorizerContext createAuthorizerContext( + final Authentication systemAuthentication, final EntityClient entityClient) { + return new AuthorizerContext( + Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index 2790c16ba75e6..08ec91d5287dc 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -1,5 +1,10 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -32,12 +37,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class PolicyEngineTest { private static final String AUTHORIZED_PRINCIPAL = "urn:li:corpuser:datahub"; @@ -46,7 +45,8 @@ public class PolicyEngineTest { private static final String RESOURCE_URN = "urn:li:dataset:test"; private static final String DOMAIN_URN = "urn:li:domain:domain1"; private static final String OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__technical_owner"; - private static final String OTHER_OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__data_steward"; + private static final String OTHER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__data_steward"; private EntityClient _entityClient; private PolicyEngine _policyEngine; @@ -63,10 +63,16 @@ public void setupTest() throws Exception { _policyEngine = new PolicyEngine(Mockito.mock(Authentication.class), _entityClient); authorizedUserUrn = Urn.createFromString(AUTHORIZED_PRINCIPAL); - resolvedAuthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, AUTHORIZED_PRINCIPAL, - Collections.emptySet(), Collections.emptySet(), Collections.singleton(AUTHORIZED_GROUP)); + resolvedAuthorizedUserSpec = + buildEntityResolvers( + CORP_USER_ENTITY_NAME, + AUTHORIZED_PRINCIPAL, + Collections.emptySet(), + Collections.emptySet(), + Collections.singleton(AUTHORIZED_GROUP)); unauthorizedUserUrn = Urn.createFromString(UNAUTHORIZED_PRINCIPAL); - resolvedUnauthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); + resolvedUnauthorizedUserSpec = + buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); resourceUrn = Urn.createFromString(RESOURCE_URN); // Init role membership mocks. @@ -74,25 +80,39 @@ public void setupTest() throws Exception { authorizedEntityResponse.setUrn(authorizedUserUrn); Map<Urn, EntityResponse> authorizedEntityResponseMap = Collections.singletonMap(authorizedUserUrn, authorizedEntityResponse); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any())).thenReturn(authorizedEntityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn(authorizedEntityResponseMap); EntityResponse unauthorizedEntityResponse = createUnauthorizedEntityResponse(); unauthorizedEntityResponse.setUrn(unauthorizedUserUrn); Map<Urn, EntityResponse> unauthorizedEntityResponseMap = Collections.singletonMap(unauthorizedUserUrn, unauthorizedEntityResponse); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(unauthorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any())).thenReturn(unauthorizedEntityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(unauthorizedUserUrn)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn(unauthorizedEntityResponseMap); // Init ownership type mocks. EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data()))); + envelopedAspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data()))); entityResponse.setAspects(envelopedAspectMap); Map<Urn, EntityResponse> mockMap = mock(Map.class); - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(resourceUrn)), - eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())).thenReturn(mockMap); + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(resourceUrn)), + eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(mockMap); when(mockMap.get(eq(resourceUrn))).thenReturn(entityResponse); } @@ -121,7 +141,10 @@ public void testEvaluatePolicyInactivePolicyState() { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -150,7 +173,10 @@ public void testEvaluatePolicyPrivilegeFilterNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_OWNERS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -175,8 +201,8 @@ public void testEvaluatePlatformPolicyPrivilegeFilterMatch() throws Exception { dataHubPolicyInfo.setActors(actorFilter); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "MANAGE_POLICIES", - Optional.empty()); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, resolvedAuthorizedUserSpec, "MANAGE_POLICIES", Optional.empty()); assertTrue(result.isGranted()); // Verify no network calls @@ -211,7 +237,10 @@ public void testEvaluatePolicyActorFilterUserMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert Authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -248,7 +277,10 @@ public void testEvaluatePolicyActorFilterUserNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, buildEntityResolvers(CORP_USER_ENTITY_NAME, "urn:li:corpuser:test"), "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + buildEntityResolvers(CORP_USER_ENTITY_NAME, "urn:li:corpuser:test"), + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -285,7 +317,10 @@ public void testEvaluatePolicyActorFilterGroupMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -321,7 +356,10 @@ public void testEvaluatePolicyActorFilterGroupNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -358,14 +396,18 @@ public void testEvaluatePolicyActorFilterRoleMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult authorizedResult = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(authorizedResult.isGranted()); // Verify we are only calling for roles during these requests. - verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), - eq(Collections.singleton(authorizedUserUrn)), any(), any()); + verify(_entityClient, times(1)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), any(), any()); } @Test @@ -397,14 +439,21 @@ public void testEvaluatePolicyActorFilterNoRoleMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult unauthorizedResult = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(unauthorizedResult.isGranted()); // Verify we are only calling for roles during these requests. - verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), - eq(Collections.singleton(unauthorizedUserUrn)), any(), any()); + verify(_entityClient, times(1)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(unauthorizedUserUrn)), + any(), + any()); } @Test @@ -432,13 +481,19 @@ public void testEvaluatePolicyActorFilterAllUsersMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result2.isGranted()); @@ -471,13 +526,19 @@ public void testEvaluatePolicyActorFilterAllGroupsMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -509,17 +570,30 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersMatch() throws Except final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -542,7 +616,8 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setResourceOwnersTypes(new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); + actorFilter.setResourceOwnersTypes( + new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -552,18 +627,32 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspectWithTypeUrn(OWNERSHIP_TYPE_URN).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(createOwnershipAspectWithTypeUrn(OWNERSHIP_TYPE_URN).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), - Collections.emptySet()); - + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), + Collections.emptySet()); + PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec)); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Verify no network calls @@ -585,7 +674,8 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setResourceOwnersTypes(new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); + actorFilter.setResourceOwnersTypes( + new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -595,17 +685,33 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspectWithTypeUrn(OTHER_OWNERSHIP_TYPE_URN).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(createOwnershipAspectWithTypeUrn(OTHER_OWNERSHIP_TYPE_URN).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), Collections.emptySet()); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), + Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec)); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertFalse(result1.isGranted()); // Verify no network calls @@ -636,17 +742,30 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersMatch() throws Excep final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(false, true).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(false, true).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_GROUP), Collections.emptySet(), + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_GROUP), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -678,7 +797,10 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersNoMatch() throws Exc ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -708,9 +830,13 @@ public void testEvaluatePolicyResourceFilterAllResourcesMatch() throws Exception dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_. + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -739,9 +865,13 @@ public void testEvaluatePolicyResourceFilterAllResourcesNoMatch() throws Excepti resourceFilter.setType("dataset"); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("chart", RESOURCE_URN); // Notice: Not a dataset. + ResolvedEntitySpec resourceSpec = + buildEntityResolvers("chart", RESOURCE_URN); // Notice: Not a dataset. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -776,7 +906,10 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchLegacy() throws ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -801,14 +934,21 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatch() throws Excep dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -833,15 +973,23 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatch() throws Exc dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A resource not covered by the policy. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -866,15 +1014,27 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchDomain() throws dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), Collections.singleton(DOMAIN_URN), Collections.emptySet()); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -899,15 +1059,27 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatchDomain() thro dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton("urn:li:domain:domain2"), Collections.emptySet()); // Domain doesn't match + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet()); // Domain doesn't match PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -933,9 +1105,13 @@ public void testGetGrantedPrivileges() throws Exception { dataHubPolicyInfo1.setActors(actorFilter1); final DataHubResourceFilter resourceFilter1 = new DataHubResourceFilter(); - resourceFilter1.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter1.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo1.setResources(resourceFilter1); // Policy 2, match dataset type and resource @@ -954,9 +1130,13 @@ public void testGetGrantedPrivileges() throws Exception { dataHubPolicyInfo2.setActors(actorFilter2); final DataHubResourceFilter resourceFilter2 = new DataHubResourceFilter(); - resourceFilter2.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter2.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo2.setResources(resourceFilter2); // Policy 3, match dataset type and owner (legacy resource filter) @@ -982,43 +1162,80 @@ public void testGetGrantedPrivileges() throws Exception { final List<DataHubPolicyInfo> policies = ImmutableList.of(dataHubPolicyInfo1, dataHubPolicyInfo2, dataHubPolicyInfo3); - assertEquals(_policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.empty()), + assertEquals( + _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.empty()), Collections.emptyList()); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Everything matches + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Everything matches assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2")); - resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton("urn:li:domain:domain2"), Collections.emptySet()); // Domain doesn't match + resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet()); // Domain doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_2_1", "PRIVILEGE_2_2")); - resourceSpec = buildEntityResolvers("dataset", "urn:li:dataset:random", Collections.emptySet(), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Resource doesn't match + resourceSpec = + buildEntityResolvers( + "dataset", + "urn:li:dataset:random", + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Resource doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1")); final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); - resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Is owner + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); + resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.singleton(AUTHORIZED_PRINCIPAL), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Is owner assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2", "PRIVILEGE_3")); - resourceSpec = buildEntityResolvers("chart", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Resource type doesn't match + resourceSpec = + buildEntityResolvers( + "chart", + RESOURCE_URN, + Collections.singleton(AUTHORIZED_PRINCIPAL), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Resource type doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), Collections.emptyList()); } @@ -1037,10 +1254,16 @@ public void testGetMatchingActorsResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(true); actorFilter.setAllGroups(true); - actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2")))); - actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setUsers( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")))); + actorFilter.setGroups( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")))); actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:role:Admin")))); dataHubPolicyInfo.setActors(actorFilter); @@ -1053,27 +1276,43 @@ public void testGetMatchingActorsResourceMatch() throws Exception { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), - Collections.emptySet(), Collections.emptySet()); - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), + Collections.emptySet(), + Collections.emptySet()); + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertTrue(actors.getAllUsers()); assertTrue(actors.getAllGroups()); - assertEquals(actors.getUsers(), - ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2"), + assertEquals( + actors.getUsers(), + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2"), Urn.createFromString(AUTHORIZED_PRINCIPAL) // Resource Owner - )); + )); - assertEquals(actors.getGroups(), ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2"), Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner - )); + assertEquals( + actors.getGroups(), + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2"), + Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner + )); assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:role:Admin"))); // Verify aspect client called, entity client not called. - verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(null), any()); + verify(_entityClient, times(0)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(null), + any()); } @Test @@ -1091,10 +1330,16 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(true); actorFilter.setAllGroups(true); - actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2")))); - actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setUsers( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")))); + actorFilter.setGroups( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1106,14 +1351,16 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A resource not covered by the policy. + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertFalse(actors.getAllUsers()); assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), Collections.emptyList()); assertEquals(actors.getGroups(), Collections.emptyList()); - //assertEquals(actors.getRoles(), Collections.emptyList()); + // assertEquals(actors.getRoles(), Collections.emptyList()); // Verify no network calls verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any()); @@ -1133,7 +1380,8 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); + actorFilter.setRoles( + new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1144,24 +1392,36 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { resourceFilter.setResources(resourceUrns); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(), - Collections.emptySet(), Collections.emptySet()); + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(), + Collections.emptySet(), + Collections.emptySet()); - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertFalse(actors.getAllUsers()); assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), ImmutableList.of()); assertEquals(actors.getGroups(), ImmutableList.of()); - assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); + assertEquals( + actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); // Verify aspect client called, entity client not called. - verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(null), any()); + verify(_entityClient, times(0)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(null), + any()); } - private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) throws Exception { + private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) + throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); @@ -1180,7 +1440,8 @@ private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolea } ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -1194,7 +1455,8 @@ private Ownership createOwnershipAspectWithTypeUrn(final String typeUrn) throws owners.add(userOwner); ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -1206,7 +1468,9 @@ private EntityResponse createAuthorizedEntityResponse() throws URISyntaxExceptio final UrnArray roles = new UrnArray(); roles.add(Urn.createFromString("urn:li:dataHubRole:admin")); rolesAspect.setRoles(roles); - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); entityResponse.setAspects(aspectMap); return entityResponse; @@ -1220,24 +1484,41 @@ private EntityResponse createUnauthorizedEntityResponse() throws URISyntaxExcept final UrnArray roles = new UrnArray(); roles.add(Urn.createFromString("urn:li:dataHubRole:reader")); rolesAspect.setRoles(roles); - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); entityResponse.setAspects(aspectMap); return entityResponse; } public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn) { - return buildEntityResolvers(entityType, entityUrn, Collections.emptySet(), Collections.emptySet(), Collections.emptySet()); + return buildEntityResolvers( + entityType, + entityUrn, + Collections.emptySet(), + Collections.emptySet(), + Collections.emptySet()); } - public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn, Set<String> owners, - Set<String> domains, Set<String> groups) { - return new ResolvedEntitySpec(new EntitySpec(entityType, entityUrn), - ImmutableMap.of(EntityFieldType.TYPE, - FieldResolver.getResolverFromValues(Collections.singleton(entityType)), EntityFieldType.URN, - FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)), EntityFieldType.OWNER, - FieldResolver.getResolverFromValues(owners), EntityFieldType.DOMAIN, - FieldResolver.getResolverFromValues(domains), EntityFieldType.GROUP_MEMBERSHIP, + public static ResolvedEntitySpec buildEntityResolvers( + String entityType, + String entityUrn, + Set<String> owners, + Set<String> domains, + Set<String> groups) { + return new ResolvedEntitySpec( + new EntitySpec(entityType, entityUrn), + ImmutableMap.of( + EntityFieldType.TYPE, + FieldResolver.getResolverFromValues(Collections.singleton(entityType)), + EntityFieldType.URN, + FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)), + EntityFieldType.OWNER, + FieldResolver.getResolverFromValues(owners), + EntityFieldType.DOMAIN, + FieldResolver.getResolverFromValues(domains), + EntityFieldType.GROUP_MEMBERSHIP, FieldResolver.getResolverFromValues(groups))); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java index 52a8d2454ffba..ca9ee92b77a5a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java @@ -1,5 +1,7 @@ package com.datahub.authorization; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -10,9 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class RoleServiceTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; @@ -35,21 +34,23 @@ public void setupTest() throws Exception { @Test public void testBatchAssignRoleNoActorExists() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - false); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(false); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING), - roleUrn, - SYSTEM_AUTHENTICATION); + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); } @Test public void testBatchAssignRoleSomeActorExists() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); @@ -57,12 +58,15 @@ public void testBatchAssignRoleSomeActorExists() throws Exception { @Test public void testBatchAssignRoleAllActorsExist() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); - when(_entityClient.exists(eq(Urn.createFromString(SECOND_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); + when(_entityClient.exists( + eq(Urn.createFromString(SECOND_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, times(2)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); @@ -70,10 +74,12 @@ public void testBatchAssignRoleAllActorsExist() throws Exception { @Test public void testAssignNullRoleToActorAllActorsExist() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING), null, SYSTEM_AUTHENTICATION); + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING), null, SYSTEM_AUTHENTICATION); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java index 5c7d87f1c05a9..d5d5b0c4e6c71 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java @@ -40,10 +40,8 @@ public class DataPlatformInstanceFieldResolverProviderTest { "urn:li:dataset:(urn:li:dataPlatform:s3,test-platform-instance.testDataset,PROD)"; private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); - @Mock - private EntityClient entityClientMock; - @Mock - private Authentication systemAuthenticationMock; + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; private DataPlatformInstanceFieldResolverProvider dataPlatformInstanceFieldResolverProvider; @@ -56,37 +54,42 @@ public void setup() { @Test public void shouldReturnDataPlatformInstanceType() { - assertEquals(EntityFieldType.DATA_PLATFORM_INSTANCE, dataPlatformInstanceFieldResolverProvider.getFieldTypes().get(0)); + assertEquals( + EntityFieldType.DATA_PLATFORM_INSTANCE, + dataPlatformInstanceFieldResolverProvider.getFieldTypes().get(0)); } @Test public void shouldReturnFieldValueWithResourceSpecIfTypeIsDataPlatformInstance() { - var resourceSpec = new EntitySpec(DATA_PLATFORM_INSTANCE_ENTITY_NAME, DATA_PLATFORM_INSTANCE_URN); + var resourceSpec = + new EntitySpec(DATA_PLATFORM_INSTANCE_ENTITY_NAME, DATA_PLATFORM_INSTANCE_URN); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(resourceSpec); - assertEquals(Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); + assertEquals( + Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); verifyZeroInteractions(entityClientMock); } @Test - public void shouldReturnEmptyFieldValueWhenResponseIsNull() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(null); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test @@ -95,99 +98,104 @@ public void shouldReturnEmptyFieldValueWhenResourceHasNoDataPlatformInstance() var entityResponseMock = mock(EntityResponse.class); when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test - public void shouldReturnEmptyFieldValueWhenThereIsAnException() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenThrow(new RemoteInvocationException()); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnEmptyFieldValueWhenDataPlatformInstanceHasNoInstance() throws RemoteInvocationException, URISyntaxException { - var dataPlatform = new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); + var dataPlatform = + new DataPlatformInstance().setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, + envelopedAspectMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(dataPlatform.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithDataPlatformInstanceOfTheResource() throws RemoteInvocationException, URISyntaxException { - var dataPlatformInstance = new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")) - .setInstance(Urn.createFromString(DATA_PLATFORM_INSTANCE_URN)); + var dataPlatformInstance = + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")) + .setInstance(Urn.createFromString(DATA_PLATFORM_INSTANCE_URN)); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, + envelopedAspectMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(dataPlatformInstance.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); - assertEquals(Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + assertEquals( + Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java index af547f14cd3fc..542f6c9f8bc79 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java @@ -1,5 +1,12 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; @@ -15,32 +22,23 @@ import com.linkedin.identity.GroupMembership; import com.linkedin.identity.NativeGroupMembership; import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.Set; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.net.URISyntaxException; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - public class GroupMembershipFieldResolverProviderTest { private static final String CORPGROUP_URN = "urn:li:corpGroup:groupname"; private static final String NATIVE_CORPGROUP_URN = "urn:li:corpGroup:nativegroupname"; - private static final String RESOURCE_URN = "urn:li:dataset:(urn:li:dataPlatform:testPlatform,testDataset,PROD)"; + private static final String RESOURCE_URN = + "urn:li:dataset:(urn:li:dataPlatform:testPlatform,testDataset,PROD)"; private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); - @Mock - private EntityClient entityClientMock; - @Mock - private Authentication systemAuthenticationMock; + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; private GroupMembershipFieldResolverProvider groupMembershipFieldResolverProvider; @@ -53,27 +51,30 @@ public void setup() { @Test public void shouldReturnGroupsMembershipType() { - assertEquals(EntityFieldType.GROUP_MEMBERSHIP, groupMembershipFieldResolverProvider.getFieldTypes().get(0)); + assertEquals( + EntityFieldType.GROUP_MEMBERSHIP, + groupMembershipFieldResolverProvider.getFieldTypes().get(0)); } @Test - public void shouldReturnEmptyFieldValueWhenResponseIsNull() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(null); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test @@ -82,131 +83,144 @@ public void shouldReturnEmptyFieldValueWhenResourceDoesNotBelongToAnyGroup() var entityResponseMock = mock(EntityResponse.class); when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test - public void shouldReturnEmptyFieldValueWhenThereIsAnException() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenThrow(new RemoteInvocationException()); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithOnlyGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var groupMembership = new GroupMembership().setGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); + var groupMembership = + new GroupMembership() + .setGroups(new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertEquals(Set.of(CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithOnlyNativeGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var nativeGroupMembership = new NativeGroupMembership().setNativeGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); + var nativeGroupMembership = + new NativeGroupMembership() + .setNativeGroups( + new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertEquals(Set.of(NATIVE_CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithGroupsAndNativeGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var groupMembership = new GroupMembership().setGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); - var nativeGroupMembership = new NativeGroupMembership().setNativeGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); + var groupMembership = + new GroupMembership() + .setGroups(new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); + var nativeGroupMembership = + new NativeGroupMembership() + .setNativeGroups( + new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupMembership.data()))); - envelopedAspectMap.put(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); - assertEquals(Set.of(CORPGROUP_URN, NATIVE_CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + assertEquals( + Set.of(CORPGROUP_URN, NATIVE_CORPGROUP_URN), + result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java index 2e974d309f127..a0bbe69691db4 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.telemetry; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -19,16 +23,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class TrackingServiceTest { private static final String APP_VERSION_FIELD = "appVersion"; private static final String APP_VERSION = "1.0.0"; private static final String CLIENT_ID = "testClientId"; - private static final TelemetryClientId TELEMETRY_CLIENT_ID = new TelemetryClientId().setClientId(CLIENT_ID); + private static final TelemetryClientId TELEMETRY_CLIENT_ID = + new TelemetryClientId().setClientId(CLIENT_ID); private static final String NOT_ALLOWED_FIELD = "browserId"; private static final String NOT_ALLOWED_FIELD_VALUE = "testBrowserId"; private static final String EVENT_TYPE_FIELD = "type"; @@ -38,10 +38,17 @@ public class TrackingServiceTest { private static final String ACTOR_URN_STRING = "urn:li:corpuser:user"; private static final String HASHED_ACTOR_URN_STRING = "hashedActorUrn"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private Urn _clientIdUrn; private JSONObject _mixpanelMessage; private MixpanelAPI _mixpanelAPI; @@ -62,19 +69,28 @@ public void setupTest() { GitVersion gitVersion = new GitVersion(APP_VERSION, "", Optional.empty()); _trackingService = - new TrackingService(_mixpanelAPI, _mixpanelMessageBuilder, _secretService, _entityService, gitVersion); + new TrackingService( + _mixpanelAPI, _mixpanelMessageBuilder, _secretService, _entityService, gitVersion); } @Test public void testEmitAnalyticsEvent() throws IOException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); when(_entityService.exists(_clientIdUrn)).thenReturn(true); - when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))).thenReturn(TELEMETRY_CLIENT_ID); - when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())).thenReturn(_mixpanelMessage); + when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) + .thenReturn(TELEMETRY_CLIENT_ID); + when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())) + .thenReturn(_mixpanelMessage); final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, ACTOR_URN_FIELD, - ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, + EVENT_TYPE, + ACTOR_URN_FIELD, + ACTOR_URN_STRING, + NOT_ALLOWED_FIELD, + NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); _trackingService.emitAnalyticsEvent(event); @@ -84,7 +100,8 @@ public void testEmitAnalyticsEvent() throws IOException { @Test public void testGetClientIdAlreadyExists() { when(_entityService.exists(_clientIdUrn)).thenReturn(true); - when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))).thenReturn(TELEMETRY_CLIENT_ID); + when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) + .thenReturn(TELEMETRY_CLIENT_ID); assertEquals(CLIENT_ID, _trackingService.getClientId()); } @@ -94,15 +111,17 @@ public void testGetClientIdDoesNotExist() { when(_entityService.exists(_clientIdUrn)).thenReturn(false); assertNotNull(_trackingService.getClientId()); - verify(_entityService, times(1)).ingestAspectIfNotPresent(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT), - any(TelemetryClientId.class), any(), eq(null)); + verify(_entityService, times(1)) + .ingestAspectIfNotPresent( + eq(_clientIdUrn), eq(CLIENT_ID_ASPECT), any(TelemetryClientId.class), any(), eq(null)); } @Test public void testSanitizeEventNoEventType() throws JsonProcessingException, JSONException { final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\"}", ACTOR_URN_FIELD, ACTOR_URN_STRING, NOT_ALLOWED_FIELD, - NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\"}", + ACTOR_URN_FIELD, ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); @@ -116,8 +135,9 @@ public void testSanitizeEventNoEventType() throws JsonProcessingException, JSONE @Test public void testSanitizeEventNoActorUrn() throws JsonProcessingException, JSONException { final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, NOT_ALLOWED_FIELD, - NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, EVENT_TYPE, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); @@ -133,8 +153,14 @@ public void testSanitizeEvent() throws JsonProcessingException, JSONException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, ACTOR_URN_FIELD, - ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, + EVENT_TYPE, + ACTOR_URN_FIELD, + ACTOR_URN_STRING, + NOT_ALLOWED_FIELD, + NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index 34354a47b7f04..4e9fe3e335dc3 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -1,13 +1,15 @@ package com.datahub.auth.authentication; -import com.datahub.authentication.invite.InviteTokenService; -import com.datahub.authentication.token.StatelessTokenService; -import com.datahub.authentication.token.TokenType; -import com.datahub.authentication.user.NativeUserService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenType; +import com.datahub.authentication.user.NativeUserService; import com.datahub.telemetry.TrackingService; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -29,9 +31,6 @@ import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.HttpClientErrorException; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RestController public class AuthServiceController { @@ -46,55 +45,49 @@ public class AuthServiceController { private static final String INVITE_TOKEN_FIELD_NAME = "inviteToken"; private static final String RESET_TOKEN_FIELD_NAME = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD_NAME = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD_NAME = "doesPasswordMatch"; - @Inject - StatelessTokenService _statelessTokenService; + @Inject StatelessTokenService _statelessTokenService; - @Inject - Authentication _systemAuthentication; + @Inject Authentication _systemAuthentication; @Inject @Qualifier("configurationProvider") ConfigurationProvider _configProvider; - @Inject - NativeUserService _nativeUserService; + @Inject NativeUserService _nativeUserService; - @Inject - InviteTokenService _inviteTokenService; + @Inject InviteTokenService _inviteTokenService; - @Inject - @Nullable - TrackingService _trackingService; + @Inject @Nullable TrackingService _trackingService; /** - * Generates a JWT access token for as user UI session, provided a unique "user id" to generate the token for inside a JSON - * POST body. + * Generates a JWT access token for as user UI session, provided a unique "user id" to generate + * the token for inside a JSON POST body. * - * Example Request: + * <p>Example Request: * - * POST /generateSessionTokenForUser -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "userId": "datahub" - * } + * <p>POST /generateSessionTokenForUser -H "Authorization: Basic + * <system-client-id>:<system-client-secret>" { "userId": "datahub" } * - * Example Response: + * <p>Example Response: * - * { - * "accessToken": "<the access token>" - * } + * <p>{ "accessToken": "<the access token>" } */ @PostMapping(value = "/generateSessionTokenForUser", produces = "application/json;charset=utf-8") - CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser(final HttpEntity<String> httpEntity) { + CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser( + final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson = null; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to generate session token %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to generate session token %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -110,46 +103,45 @@ CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser(final Http log.debug(String.format("Attempting to generate session token for user %s", userId.asText())); final String actorId = AuthenticationContext.getAuthentication().getActor().getId(); - return CompletableFuture.supplyAsync(() -> { - // 1. Verify that only those authorized to generate a token (datahub system) are able to. - if (isAuthorizedToGenerateSessionToken(actorId)) { - try { - // 2. Generate a new DataHub JWT - final String token = _statelessTokenService.generateAccessToken( - TokenType.SESSION, - new Actor(ActorType.USER, userId.asText()), - _configProvider.getAuthentication().getSessionTokenDurationMs()); - return new ResponseEntity<>(buildTokenResponse(token), HttpStatus.OK); - } catch (Exception e) { - log.error("Failed to generate session token for user", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - throw HttpClientErrorException.create(HttpStatus.UNAUTHORIZED, "Unauthorized to perform this action.", new HttpHeaders(), null, null); - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. Verify that only those authorized to generate a token (datahub system) are able to. + if (isAuthorizedToGenerateSessionToken(actorId)) { + try { + // 2. Generate a new DataHub JWT + final String token = + _statelessTokenService.generateAccessToken( + TokenType.SESSION, + new Actor(ActorType.USER, userId.asText()), + _configProvider.getAuthentication().getSessionTokenDurationMs()); + return new ResponseEntity<>(buildTokenResponse(token), HttpStatus.OK); + } catch (Exception e) { + log.error("Failed to generate session token for user", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + } + throw HttpClientErrorException.create( + HttpStatus.UNAUTHORIZED, + "Unauthorized to perform this action.", + new HttpHeaders(), + null, + null); + }); } /** - * Creates a native DataHub user using the provided full name, email and password. The provided invite token must - * be current otherwise a new user will not be created. + * Creates a native DataHub user using the provided full name, email and password. The provided + * invite token must be current otherwise a new user will not be created. * - * Example Request: + * <p>Example Request: * - * POST /signUp -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "fullName": "Full Name" - * "userUrn": "urn:li:corpuser:test" - * "email": "email@test.com" - * "title": "Data Scientist" - * "password": "password123" - * "inviteToken": "abcd" - * } + * <p>POST /signUp -H "Authorization: Basic <system-client-id>:<system-client-secret>" { + * "fullName": "Full Name" "userUrn": "urn:li:corpuser:test" "email": "email@test.com" "title": + * "Data Scientist" "password": "password123" "inviteToken": "abcd" } * - * Example Response: + * <p>Example Response: * - * { - * "isNativeUserCreated": true - * } + * <p>{ "isNativeUserCreated": true } */ @PostMapping(value = "/signUp", produces = "application/json;charset=utf-8") CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEntity) { @@ -159,7 +151,8 @@ CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEn try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + log.error( + String.format("Failed to parse json while attempting to create native user %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -174,15 +167,22 @@ CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEn JsonNode title = bodyJson.get(TITLE_FIELD_NAME); JsonNode password = bodyJson.get(PASSWORD_FIELD_NAME); JsonNode inviteToken = bodyJson.get(INVITE_TOKEN_FIELD_NAME); - if (fullName == null || userUrn == null || email == null || title == null || password == null + if (fullName == null + || userUrn == null + || email == null + || title == null + || password == null || inviteToken == null) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } String userUrnString = userUrn.asText(); - String systemClientUser = new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); + String systemClientUser = + new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); - if (userUrnString.equals(systemClientUser) || userUrnString.equals(DATAHUB_ACTOR) || userUrnString.equals(UNKNOWN_ACTOR)) { + if (userUrnString.equals(systemClientUser) + || userUrnString.equals(DATAHUB_ACTOR) + || userUrnString.equals(UNKNOWN_ACTOR)) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } String fullNameString = fullName.asText(); @@ -192,53 +192,52 @@ CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEn String inviteTokenString = inviteToken.asText(); Authentication auth = AuthenticationContext.getAuthentication(); log.debug(String.format("Attempting to create native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, auth)) { - log.error(String.format("Invalid invite token %s", inviteTokenString)); - return new ResponseEntity<>(HttpStatus.BAD_REQUEST); - } + return CompletableFuture.supplyAsync( + () -> { + try { + Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, auth)) { + log.error(String.format("Invalid invite token %s", inviteTokenString)); + return new ResponseEntity<>(HttpStatus.BAD_REQUEST); + } - _nativeUserService.createNativeUser(userUrnString, fullNameString, emailString, titleString, passwordString, - auth); - String response = buildSignUpResponse(); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to create credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + _nativeUserService.createNativeUser( + userUrnString, fullNameString, emailString, titleString, passwordString, auth); + String response = buildSignUpResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to create credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } /** - * Resets the credentials for a native DataHub user using the provided email and new password. The provided reset - * token must be current otherwise the credentials will not be updated + * Resets the credentials for a native DataHub user using the provided email and new password. The + * provided reset token must be current otherwise the credentials will not be updated * - * Example Request: + * <p>Example Request: * - * POST /resetNativeUserCredentials -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "userUrn": "urn:li:corpuser:test" - * "password": "password123" - * "resetToken": "abcd" - * } + * <p>POST /resetNativeUserCredentials -H "Authorization: Basic + * <system-client-id>:<system-client-secret>" { "userUrn": "urn:li:corpuser:test" "password": + * "password123" "resetToken": "abcd" } * - * Example Response: + * <p>Example Response: * - * { - * "areNativeUserCredentialsReset": true - * } + * <p>{ "areNativeUserCredentialsReset": true } */ @PostMapping(value = "/resetNativeUserCredentials", produces = "application/json;charset=utf-8") - CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials(final HttpEntity<String> httpEntity) { + CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials( + final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + log.error( + String.format("Failed to parse json while attempting to create native user %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -259,45 +258,46 @@ CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials(final HttpE String resetTokenString = resetToken.asText(); Authentication auth = AuthenticationContext.getAuthentication(); log.debug(String.format("Attempting to reset credentials for native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - _nativeUserService.resetCorpUserCredentials(userUrnString, passwordString, resetTokenString, - auth); - String response = buildResetNativeUserCredentialsResponse(); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to reset credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _nativeUserService.resetCorpUserCredentials( + userUrnString, passwordString, resetTokenString, auth); + String response = buildResetNativeUserCredentialsResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to reset credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } /** * Verifies the credentials for a native DataHub user. * - * Example Request: + * <p>Example Request: * - * POST /verifyNativeUserCredentials -H "Authorization: Basic <system-client-id>:<system-client-secret>" - * { - * "userUrn": "urn:li:corpuser:test" - * "password": "password123" - * } + * <p>POST /verifyNativeUserCredentials -H "Authorization: Basic + * <system-client-id>:<system-client-secret>" { "userUrn": "urn:li:corpuser:test" "password": + * "password123" } * - * Example Response: + * <p>Example Response: * - * { - * "passwordMatches": true - * } + * <p>{ "passwordMatches": true } */ @PostMapping(value = "/verifyNativeUserCredentials", produces = "application/json;charset=utf-8") - CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials(final HttpEntity<String> httpEntity) { + CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials( + final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to verify native user password %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to verify native user password %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -315,21 +315,22 @@ CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials(final Http String userUrnString = userUrn.asText(); String passwordString = password.asText(); log.debug(String.format("Attempting to verify credentials for native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - boolean doesPasswordMatch = _nativeUserService.doesPasswordMatch(userUrnString, passwordString); - String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to verify credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + boolean doesPasswordMatch = + _nativeUserService.doesPasswordMatch(userUrnString, passwordString); + String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to verify credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } - /** - * Tracking endpoint - */ + /** Tracking endpoint */ @PostMapping(value = "/track", produces = "application/json;charset=utf-8") CompletableFuture<ResponseEntity<String>> track(final HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); @@ -338,23 +339,26 @@ CompletableFuture<ResponseEntity<String>> track(final HttpEntity<String> httpEnt try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to track analytics event %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to track analytics event %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } - return CompletableFuture.supplyAsync(() -> { - try { - if (_trackingService != null) { - _trackingService.emitAnalyticsEvent(bodyJson); - } - return new ResponseEntity<>(HttpStatus.OK); - } catch (Exception e) { - log.error("Failed to track event", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (_trackingService != null) { + _trackingService.emitAnalyticsEvent(bodyJson); + } + return new ResponseEntity<>(HttpStatus.OK); + } catch (Exception e) { + log.error("Failed to track event", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } // Currently, only internal system is authorized to generate a token on behalf of a user! diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java index cf882f6ce6813..9f1aefd4cf17a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java @@ -2,12 +2,10 @@ import lombok.Data; - @Data public class AssetsConfiguration { - /** - * The url of the logo to render in the DataHub Application. - */ + /** The url of the logo to render in the DataHub Application. */ public String logoUrl; + public String faviconUrl; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java index 6a5c13970517a..eacbe7816f75c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data public class AuthPluginConfiguration { - /** - * Plugin base directory path, default to /etc/datahub/plugins/auth - */ + /** Plugin base directory path, default to /etc/datahub/plugins/auth */ String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java index 2374686b76d01..72dfbf84a4a00 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java @@ -1,15 +1,15 @@ package com.linkedin.metadata.config; import lombok.Data; -/** - * POJO representing the "datahub" configuration block in application.yml. - */ + +/** POJO representing the "datahub" configuration block in application.yml. */ @Data public class DataHubConfiguration { /** - * Indicates the type of server that has been deployed: quickstart, prod, or a custom configuration + * Indicates the type of server that has been deployed: quickstart, prod, or a custom + * configuration */ public String serverType; private PluginConfiguration plugin; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java index 7c4394d07bf9c..8ebea29a32659 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java @@ -2,11 +2,11 @@ import lombok.Data; - @Data public class EntityProfileConfig { /** - * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not present. + * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not + * present. */ public String domainDefaultTab; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java index 4b00346a469c3..4fcbca3527d2a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java @@ -2,8 +2,7 @@ import lombok.Data; - @Data public class EntityRegistryPluginConfiguration { String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java index 5b10b59ff0c20..2f3c3dc3bd546 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java @@ -2,18 +2,12 @@ import lombok.Data; -/** - * POJO representing the "ingestion" configuration block in application.yml. - */ +/** POJO representing the "ingestion" configuration block in application.yml. */ @Data public class IngestionConfiguration { - /** - * Whether managed ingestion is enabled - */ + /** Whether managed ingestion is enabled */ public boolean enabled; - /** - * The default CLI version to use in managed ingestion - */ + /** The default CLI version to use in managed ingestion */ public String defaultCliVersion; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java index 0645c1d7ea96c..5f34a6a5d4f05 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java @@ -2,28 +2,24 @@ import lombok.Data; - @Data public class PluginConfiguration { /** * Plugin security mode, either RESTRICTED or LENIENT * - * Note: Ideally the pluginSecurityMode should be of type com.datahub.plugin.common.SecurityMode from metadata-service/plugin, - * However avoiding to include metadata-service/plugin as dependency in this module (i.e. metadata-io) as some modules - * from metadata-service/ are dependent on metadata-io, so it might create a circular dependency + * <p>Note: Ideally the pluginSecurityMode should be of type + * com.datahub.plugin.common.SecurityMode from metadata-service/plugin, However avoiding to + * include metadata-service/plugin as dependency in this module (i.e. metadata-io) as some modules + * from metadata-service/ are dependent on metadata-io, so it might create a circular dependency */ private String pluginSecurityMode; - /** - * Directory path of entity registry, default to /etc/datahub/plugins/models - */ + /** Directory path of entity registry, default to /etc/datahub/plugins/models */ private EntityRegistryPluginConfiguration entityRegistry; - /** - * The location where the Retention config files live - */ + + /** The location where the Retention config files live */ private RetentionPluginConfiguration retention; - /** - * Plugin framework's plugin base directory path, default to /etc/datahub/plugins/auth - */ + + /** Plugin framework's plugin base directory path, default to /etc/datahub/plugins/auth */ private AuthPluginConfiguration auth; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java index 912abf525631b..987df7f307d39 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PreProcessHooks { private boolean uiEnabled; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java index ef03206996823..cc80954afd27e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data public class QueriesTabConfig { - /** - * The number of queries to show on the queries tab. - */ + /** The number of queries to show on the queries tab. */ public Integer queriesTabResultSize; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java index dde7ede34c659..d553f2e268509 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java @@ -2,8 +2,7 @@ import lombok.Data; - @Data public class RetentionPluginConfiguration { String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java index 7094bbd710f75..1901c433e82c8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java @@ -4,8 +4,9 @@ @Data public class SearchResultVisualConfig { - /** - * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not present. - */ - public Boolean enableNameHighlight; + /** + * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not + * present. + */ + public Boolean enableNameHighlight; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java index 96e4a1716974e..738a9684cc764 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java @@ -2,13 +2,9 @@ import lombok.Data; -/** - * POJO representing the "tests" configuration block in application.yml.on.yml - */ +/** POJO representing the "tests" configuration block in application.yml.on.yml */ @Data public class TestsConfiguration { - /** - * Whether tests are enabled - */ + /** Whether tests are enabled */ public boolean enabled; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java index 89c7376dfd110..670a412ec285e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java @@ -2,13 +2,9 @@ import lombok.Data; -/** - * POJO representing the "views" configuration block in application.yml.on.yml - */ +/** POJO representing the "views" configuration block in application.yml.on.yml */ @Data public class ViewsConfiguration { - /** - * Whether Views are enabled - */ + /** Whether Views are enabled */ public boolean enabled; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java index 14ac2406c2256..bc749a373c5b0 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java @@ -2,29 +2,18 @@ import lombok.Data; - -/** - * POJO representing visualConfig block in the application.yml. - */ +/** POJO representing visualConfig block in the application.yml. */ @Data public class VisualConfiguration { - /** - * Asset related configurations - */ + /** Asset related configurations */ public AssetsConfiguration assets; - /** - * Queries tab related configurations - */ + /** Queries tab related configurations */ public QueriesTabConfig queriesTab; - /** - * Queries tab related configurations - */ + /** Queries tab related configurations */ public EntityProfileConfig entityProfile; - /** - * Search result related configurations - */ + /** Search result related configurations */ public SearchResultVisualConfig searchResult; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java index aff0e23e3b337..70601b8a69fe6 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.cache.client.ClientCacheConfiguration; import lombok.Data; - @Data public class CacheConfiguration { PrimaryCacheConfiguration primary; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java index 3bd7ea1758b2b..9a684ee92f3f8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class EntityDocCountCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java index 3e60c4bf587e1..be39e71bb1b52 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class HomepageCacheConfiguration { EntityDocCountCacheConfiguration entityCounts; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java index a1b08695e4089..001eb41f05cb7 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PrimaryCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java index 290b566caf962..ab686cc266b9f 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchCacheConfiguration { SearchLineageCacheConfiguration lineage; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java index a121900435b1f..b8fb371dfa13c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchLineageCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java index 3cf7ef20797bb..32136929d4875 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.config.cache.client; - public interface ClientCacheConfig { - boolean isEnabled(); - boolean isStatsEnabled(); - int getStatsIntervalSeconds(); - int getDefaultTTLSeconds(); - int getMaxBytes(); + boolean isEnabled(); + + boolean isStatsEnabled(); + + int getStatsIntervalSeconds(); + + int getDefaultTTLSeconds(); + + int getMaxBytes(); } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java index d940bbe135e55..7564ee978e2bd 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java @@ -4,6 +4,6 @@ @Data public class ClientCacheConfiguration { - EntityClientCacheConfig entityClient; - UsageClientCacheConfig usageClient; + EntityClientCacheConfig entityClient; + UsageClientCacheConfig usageClient; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java index 595b614f2f599..3bb37373db0eb 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java @@ -1,17 +1,16 @@ package com.linkedin.metadata.config.cache.client; -import lombok.Data; - import java.util.Map; +import lombok.Data; @Data public class EntityClientCacheConfig implements ClientCacheConfig { - private boolean enabled; - private boolean statsEnabled; - private int statsIntervalSeconds; - private int defaultTTLSeconds; - private int maxBytes; + private boolean enabled; + private boolean statsEnabled; + private int statsIntervalSeconds; + private int defaultTTLSeconds; + private int maxBytes; - // entityName -> aspectName -> cache ttl override - private Map<String, Map<String, Integer>> entityAspectTTLSeconds; + // entityName -> aspectName -> cache ttl override + private Map<String, Map<String, Integer>> entityAspectTTLSeconds; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java index 3aebec9422ed8..f5a9c24c4b188 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java @@ -4,9 +4,9 @@ @Data public class UsageClientCacheConfig implements ClientCacheConfig { - private boolean enabled; - private boolean statsEnabled; - private int statsIntervalSeconds; - private int defaultTTLSeconds; - private int maxBytes; + private boolean enabled; + private boolean statsEnabled; + private int statsIntervalSeconds; + private int defaultTTLSeconds; + private int maxBytes; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index 7a93119226a2d..b505674f2ed9c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ConsumerConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java index 26a8c6b649133..960baa9cd9172 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ProducerConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java index 7a8594853e0d0..ac0c248f5e559 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SchemaRegistryConfiguration { private String type; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java index 74db6fb9719d4..70ffa59ea40e2 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class BuildIndicesConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java index 7a0292c2adec1..82e3868fa3974 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java @@ -1,17 +1,15 @@ package com.linkedin.metadata.config.search; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; - - @Data @Slf4j public class CustomConfiguration { @@ -20,6 +18,7 @@ public class CustomConfiguration { /** * Materialize the search configuration from a location external to main application.yml + * * @param mapper yaml enabled jackson mapper * @return search configuration class * @throws IOException diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java index 30679bbaab9ce..130620a9ab918 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ElasticSearchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java index 89636ee3c47c5..b471116ebe349 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ExactMatchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java index 816a7e41470f5..6f3e1cb278f5f 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class GraphQueryConfiguration { @@ -11,6 +10,7 @@ public class GraphQueryConfiguration { private int maxResult; public static GraphQueryConfiguration testDefaults; + static { testDefaults = new GraphQueryConfiguration(); testDefaults.setBatchSize(1000); diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java index 5d24248be3650..5dbdcfb269a77 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PartialConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java index b2b5260dc5e70..befce024fbc1a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java index 624d2a4c63c4c..fcce110a56d9c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class WordGramConfiguration { private float twoGramFactor; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java index 460501cc91075..652aa0cc8842e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java @@ -7,21 +7,19 @@ import lombok.Getter; import lombok.ToString; - @Builder(toBuilder = true) @Getter @ToString @EqualsAndHashCode @JsonDeserialize(builder = BoolQueryConfiguration.BoolQueryConfigurationBuilder.class) public class BoolQueryConfiguration { - private Object must; - private Object should; - //CHECKSTYLE:OFF - private Object must_not; - //CHECKSTYLE:ON - private Object filter; + private Object must; + private Object should; + // CHECKSTYLE:OFF + private Object must_not; + // CHECKSTYLE:ON + private Object filter; - @JsonPOJOBuilder(withPrefix = "") - public static class BoolQueryConfigurationBuilder { - } + @JsonPOJOBuilder(withPrefix = "") + public static class BoolQueryConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java index 15deea7620e3d..e6756ca8f0da8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java @@ -2,13 +2,11 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import java.util.List; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - - @Builder(toBuilder = true) @Getter @EqualsAndHashCode @@ -18,6 +16,5 @@ public class CustomSearchConfiguration { private List<QueryConfiguration> queryConfigurations; @JsonPOJOBuilder(withPrefix = "") - public static class CustomSearchConfigurationBuilder { - } + public static class CustomSearchConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java index cd4364a64a0c5..901bf803d2bca 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java @@ -2,15 +2,13 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import java.util.Map; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; -import java.util.Map; - - @Slf4j @Builder(toBuilder = true) @Getter @@ -19,17 +17,13 @@ @JsonDeserialize(builder = QueryConfiguration.QueryConfigurationBuilder.class) public class QueryConfiguration { - private String queryRegex; - @Builder.Default - private boolean simpleQuery = true; - @Builder.Default - private boolean exactMatchQuery = true; - @Builder.Default - private boolean prefixMatchQuery = true; - private BoolQueryConfiguration boolQuery; - private Map<String, Object> functionScore; + private String queryRegex; + @Builder.Default private boolean simpleQuery = true; + @Builder.Default private boolean exactMatchQuery = true; + @Builder.Default private boolean prefixMatchQuery = true; + private BoolQueryConfiguration boolQuery; + private Map<String, Object> functionScore; - @JsonPOJOBuilder(withPrefix = "") - public static class QueryConfigurationBuilder { - } + @JsonPOJOBuilder(withPrefix = "") + public static class QueryConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java index 3821cbbed83e8..aa6825360a2df 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java @@ -1,26 +1,19 @@ package com.linkedin.metadata.config.telemetry; import lombok.Data; -/** - * POJO representing the "telemetry" configuration block in application.yml. - */ + +/** POJO representing the "telemetry" configuration block in application.yml. */ @Data public class TelemetryConfiguration { - /** - * Whether cli telemetry is enabled - */ - public boolean enabledCli; - /** - * Whether reporting telemetry is enabled - */ - public boolean enabledIngestion; - /** - * Whether or not third party logging should be enabled for this instance - */ - public boolean enableThirdPartyLogging; + /** Whether cli telemetry is enabled */ + public boolean enabledCli; + + /** Whether reporting telemetry is enabled */ + public boolean enabledIngestion; + + /** Whether or not third party logging should be enabled for this instance */ + public boolean enableThirdPartyLogging; - /** - * Whether or not server telemetry should be enabled - */ - public boolean enabledServer; -} \ No newline at end of file + /** Whether or not server telemetry should be enabled */ + public boolean enabledServer; +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java index c10399c4f3e70..f84ac9ec8bfe3 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java @@ -1,18 +1,14 @@ package com.linkedin.metadata.spring; +import java.io.IOException; +import java.util.Properties; import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; import org.springframework.core.env.PropertiesPropertySource; import org.springframework.core.env.PropertySource; import org.springframework.core.io.support.EncodedResource; import org.springframework.core.io.support.PropertySourceFactory; -import java.io.IOException; -import java.util.Properties; - - -/** - * Required for Spring to parse the application.yml provided by this module - */ +/** Required for Spring to parse the application.yml provided by this module */ public class YamlPropertySourceFactory implements PropertySourceFactory { @Override @@ -25,4 +21,4 @@ public PropertySource<?> createPropertySource(String name, EncodedResource encod return new PropertiesPropertySource(encodedResource.getResource().getFilename(), properties); } -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java index d506cf972c255..576969a1032dd 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class BatchWriteOperationsOptions { private int batchSize; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java index b90257870a8b2..ab6990dcf0603 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java @@ -1,13 +1,13 @@ package com.linkedin.gms.factory.auth; +import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizerChain; +import com.datahub.authorization.AuthorizerContext; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.DefaultEntitySpecResolver; +import com.datahub.authorization.EntitySpecResolver; import com.datahub.plugins.PluginConstant; -import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; -import com.datahub.authorization.AuthorizerContext; -import com.datahub.authorization.EntitySpecResolver; import com.datahub.plugins.common.PluginConfig; import com.datahub.plugins.common.PluginPermissionManager; import com.datahub.plugins.common.PluginType; @@ -19,8 +19,8 @@ import com.datahub.plugins.loader.PluginPermissionManagerImpl; import com.google.common.collect.ImmutableMap; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; @@ -38,7 +38,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -86,61 +85,75 @@ private EntitySpecResolver initResolver() { private List<Authorizer> initCustomAuthorizers(EntitySpecResolver resolver) { final List<Authorizer> customAuthorizers = new ArrayList<>(); - Path pluginBaseDirectory = Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); - ConfigProvider configProvider = - new ConfigProvider(pluginBaseDirectory); + Path pluginBaseDirectory = + Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); + ConfigProvider configProvider = new ConfigProvider(pluginBaseDirectory); Optional<Config> optionalConfig = configProvider.load(); // Register authorizer plugins if present - optionalConfig.ifPresent((config) -> { - registerAuthorizer(customAuthorizers, resolver, config); - }); + optionalConfig.ifPresent( + (config) -> { + registerAuthorizer(customAuthorizers, resolver, config); + }); return customAuthorizers; } - private void registerAuthorizer(List<Authorizer> customAuthorizers, EntitySpecResolver resolver, Config config) { + private void registerAuthorizer( + List<Authorizer> customAuthorizers, EntitySpecResolver resolver, Config config) { PluginConfigFactory authorizerPluginPluginConfigFactory = new PluginConfigFactory(config); // Load only Authorizer configuration from plugin config factory List<PluginConfig> authorizers = authorizerPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); // Select only enabled authorizer for instantiation - List<PluginConfig> enabledAuthorizers = authorizers.stream().filter(pluginConfig -> { - if (!pluginConfig.getEnabled()) { - log.info(String.format("Authorizer %s is not enabled", pluginConfig.getName())); - } - return pluginConfig.getEnabled(); - }).collect(Collectors.toList()); + List<PluginConfig> enabledAuthorizers = + authorizers.stream() + .filter( + pluginConfig -> { + if (!pluginConfig.getEnabled()) { + log.info(String.format("Authorizer %s is not enabled", pluginConfig.getName())); + } + return pluginConfig.getEnabled(); + }) + .collect(Collectors.toList()); // Get security mode set by user SecurityMode securityMode = - SecurityMode.valueOf(this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + SecurityMode.valueOf( + this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); // Create permission manager with security mode PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); - // Save ContextClassLoader. As some plugins are directly using context classloader from current thread to load libraries + // Save ContextClassLoader. As some plugins are directly using context classloader from current + // thread to load libraries // This will break plugin as their dependencies are inside plugin directory only ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); // Instantiate Authorizer plugins - enabledAuthorizers.forEach((pluginConfig) -> { - // Create context - AuthorizerContext context = new AuthorizerContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString()), resolver); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, pluginConfig); - try { - Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); - Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); - log.info("Initializing plugin {}", pluginConfig.getName()); - authorizer.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); - customAuthorizers.add(authorizer); - log.info("Plugin {} is initialized", pluginConfig.getName()); - } catch (ClassNotFoundException e) { - log.debug(String.format("Failed to init the plugin", pluginConfig.getName())); - throw new RuntimeException(e); - } finally { - Thread.currentThread().setContextClassLoader(contextClassLoader); - } - }); + enabledAuthorizers.forEach( + (pluginConfig) -> { + // Create context + AuthorizerContext context = + new AuthorizerContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString()), + resolver); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, pluginConfig); + try { + Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); + Authorizer authorizer = + (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); + log.info("Initializing plugin {}", pluginConfig.getName()); + authorizer.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); + customAuthorizers.add(authorizer); + log.info("Plugin {} is initialized", pluginConfig.getName()); + } catch (ClassNotFoundException e) { + log.debug(String.format("Failed to init the plugin", pluginConfig.getName())); + throw new RuntimeException(e); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); + } + }); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java index 363c657453b49..296aab95ae427 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java @@ -34,38 +34,30 @@ import software.amazon.awssdk.regions.Region; /** - * An {@link HttpRequestInterceptor} that signs requests using any AWS {@link Signer} - * and {@link AwsCredentialsProvider}. + * An {@link HttpRequestInterceptor} that signs requests using any AWS {@link Signer} and {@link + * AwsCredentialsProvider}. */ public class AwsRequestSigningApacheInterceptor implements HttpRequestInterceptor { - /** - * The service that we're connecting to. - */ + /** The service that we're connecting to. */ private final String service; - /** - * The particular signer implementation. - */ + /** The particular signer implementation. */ private final Signer signer; - /** - * The source of AWS credentials for signing. - */ + /** The source of AWS credentials for signing. */ private final AwsCredentialsProvider awsCredentialsProvider; - /** - * The region signing region. - */ + /** The region signing region. */ private final Region region; /** - * * @param service service that we're connecting to * @param signer particular signer implementation * @param awsCredentialsProvider source of AWS credentials for signing * @param region signing region */ - public AwsRequestSigningApacheInterceptor(final String service, + public AwsRequestSigningApacheInterceptor( + final String service, final Signer signer, final AwsCredentialsProvider awsCredentialsProvider, final Region region) { @@ -76,22 +68,20 @@ public AwsRequestSigningApacheInterceptor(final String service, } /** - * * @param service service that we're connecting to * @param signer particular signer implementation * @param awsCredentialsProvider source of AWS credentials for signing * @param region signing region */ - public AwsRequestSigningApacheInterceptor(final String service, + public AwsRequestSigningApacheInterceptor( + final String service, final Signer signer, final AwsCredentialsProvider awsCredentialsProvider, final String region) { this(service, signer, awsCredentialsProvider, Region.of(region)); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public void process(final HttpRequest request, final HttpContext context) throws HttpException, IOException { @@ -103,13 +93,13 @@ public void process(final HttpRequest request, final HttpContext context) } // Copy Apache HttpRequest to AWS Request - SdkHttpFullRequest.Builder requestBuilder = SdkHttpFullRequest.builder() - .method(SdkHttpMethod.fromValue(request.getRequestLine().getMethod())) - .uri(buildUri(context, uriBuilder)); + SdkHttpFullRequest.Builder requestBuilder = + SdkHttpFullRequest.builder() + .method(SdkHttpMethod.fromValue(request.getRequestLine().getMethod())) + .uri(buildUri(context, uriBuilder)); if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest httpEntityEnclosingRequest = - (HttpEntityEnclosingRequest) request; + HttpEntityEnclosingRequest httpEntityEnclosingRequest = (HttpEntityEnclosingRequest) request; if (httpEntityEnclosingRequest.getEntity() != null) { InputStream content = httpEntityEnclosingRequest.getEntity().getContent(); requestBuilder.contentStreamProvider(() -> content); @@ -119,7 +109,8 @@ public void process(final HttpRequest request, final HttpContext context) requestBuilder.headers(headerArrayToMap(request.getAllHeaders())); ExecutionAttributes attributes = new ExecutionAttributes(); - attributes.putAttribute(AwsSignerExecutionAttribute.AWS_CREDENTIALS, awsCredentialsProvider.resolveCredentials()); + attributes.putAttribute( + AwsSignerExecutionAttribute.AWS_CREDENTIALS, awsCredentialsProvider.resolveCredentials()); attributes.putAttribute(AwsSignerExecutionAttribute.SERVICE_SIGNING_NAME, service); attributes.putAttribute(AwsSignerExecutionAttribute.SIGNING_REGION, region); @@ -129,13 +120,14 @@ public void process(final HttpRequest request, final HttpContext context) // Now copy everything back request.setHeaders(mapToHeaderArray(signedRequest.headers())); if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest httpEntityEnclosingRequest = - (HttpEntityEnclosingRequest) request; + HttpEntityEnclosingRequest httpEntityEnclosingRequest = (HttpEntityEnclosingRequest) request; if (httpEntityEnclosingRequest.getEntity() != null) { BasicHttpEntity basicHttpEntity = new BasicHttpEntity(); - basicHttpEntity.setContent(signedRequest.contentStreamProvider() - .orElseThrow(() -> new IllegalStateException("There must be content")) - .newStream()); + basicHttpEntity.setContent( + signedRequest + .contentStreamProvider() + .orElseThrow(() -> new IllegalStateException("There must be content")) + .newStream()); // wrap into repeatable entity to support retries httpEntityEnclosingRequest.setEntity(new BufferedHttpEntity(basicHttpEntity)); } @@ -159,15 +151,13 @@ private URI buildUri(final HttpContext context, URIBuilder uriBuilder) throws IO } /** - * * @param params list of HTTP query params as NameValuePairs * @return a multimap of HTTP query params */ private static Map<String, List<String>> nvpToMapParams(final List<NameValuePair> params) { Map<String, List<String>> parameterMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (NameValuePair nvp : params) { - List<String> argsList = - parameterMap.computeIfAbsent(nvp.getName(), k -> new ArrayList<>()); + List<String> argsList = parameterMap.computeIfAbsent(nvp.getName(), k -> new ArrayList<>()); argsList.add(nvp.getValue()); } return parameterMap; @@ -181,9 +171,10 @@ private static Map<String, List<String>> headerArrayToMap(final Header[] headers Map<String, List<String>> headersMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (Header header : headers) { if (!skipHeader(header)) { - headersMap.put(header.getName(), headersMap - .getOrDefault(header.getName(), - new LinkedList<>(Collections.singletonList(header.getValue())))); + headersMap.put( + header.getName(), + headersMap.getOrDefault( + header.getName(), new LinkedList<>(Collections.singletonList(header.getValue())))); } } return headersMap; @@ -195,7 +186,7 @@ private static Map<String, List<String>> headerArrayToMap(final Header[] headers */ private static boolean skipHeader(final Header header) { return ("content-length".equalsIgnoreCase(header.getName()) - && "0".equals(header.getValue())) // Strip Content-Length: 0 + && "0".equals(header.getValue())) // Strip Content-Length: 0 || "host".equalsIgnoreCase(header.getName()); // Host comes from endpoint } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 663234e2519fa..3b23243f76742 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -2,8 +2,8 @@ import com.datahub.authentication.Authentication; import com.datahub.authorization.DataHubAuthorizer; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({RestliEntityClientFactory.class}) @@ -43,10 +42,17 @@ public class DataHubAuthorizerFactory { @Nonnull protected DataHubAuthorizer getInstance() { - final DataHubAuthorizer.AuthorizationMode mode = policiesEnabled ? DataHubAuthorizer.AuthorizationMode.DEFAULT - : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; - - return new DataHubAuthorizer(systemAuthentication, entityClient, 10, - policyCacheRefreshIntervalSeconds, mode, policyCacheFetchSize); + final DataHubAuthorizer.AuthorizationMode mode = + policiesEnabled + ? DataHubAuthorizer.AuthorizationMode.DEFAULT + : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; + + return new DataHubAuthorizer( + systemAuthentication, + entityClient, + 10, + policyCacheRefreshIntervalSeconds, + mode, + policyCacheFetchSize); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java index d47e1a0a73401..83544e4165ae3 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java @@ -2,9 +2,8 @@ import com.datahub.authentication.token.StatefulTokenService; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -29,12 +28,7 @@ public class DataHubTokenServiceFactory { @Value("${authentication.tokenService.issuer:datahub-metadata-service}") private String issuer; - /** - * + @Inject - * + @Named("entityService") - * + private EntityService _entityService; - * + - */ + /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ @Autowired @Qualifier("entityService") private EntityService _entityService; @@ -44,11 +38,6 @@ public class DataHubTokenServiceFactory { @Nonnull protected StatefulTokenService getInstance() { return new StatefulTokenService( - this.signingKey, - this.signingAlgorithm, - this.issuer, - this._entityService, - this.saltingKey - ); + this.signingKey, this.signingAlgorithm, this.issuer, this._entityService, this.saltingKey); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java index 57598abf8095d..7c6c4384d7343 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java @@ -1,12 +1,10 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authentication.group.GroupService; import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +13,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class GroupServiceFactory { @@ -37,4 +34,4 @@ public class GroupServiceFactory { protected GroupService getInstance() throws Exception { return new GroupService(this._javaEntityClient, this._entityService, this._graphClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java index 105f4c677a9e4..c44eada46794d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.invite.InviteTokenService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class InviteTokenServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index a0df661852935..844f3a094b6b7 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -1,13 +1,11 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authentication.user.NativeUserService; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -16,7 +14,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class NativeUserServiceFactory { @@ -32,14 +29,16 @@ public class NativeUserServiceFactory { @Qualifier("dataHubSecretService") private SecretService _secretService; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Bean(name = "nativeUserService") @Scope("singleton") @Nonnull protected NativeUserService getInstance() throws Exception { - return new NativeUserService(_entityService, _javaEntityClient, _secretService, + return new NativeUserService( + _entityService, + _javaEntityClient, + _secretService, _configurationProvider.getAuthentication()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java index cc6f5c8272f9d..a6ae703576a3e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.post.PostService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -11,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PostServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java index 8a85f63cdd66d..7696d5201493a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java @@ -1,10 +1,8 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authorization.role.RoleService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -13,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RoleServiceFactory { @@ -28,4 +25,4 @@ public class RoleServiceFactory { protected RoleService getInstance() throws Exception { return new RoleService(this._javaEntityClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java index 5bdd8cbf83c65..52d13b05a654d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java @@ -13,10 +13,9 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - /** - * Factory responsible for instantiating an instance of {@link Authentication} used to authenticate requests - * made by the internal system. + * Factory responsible for instantiating an instance of {@link Authentication} used to authenticate + * requests made by the internal system. */ @Configuration @ConfigurationProperties @@ -37,7 +36,6 @@ protected Authentication getInstance() { // TODO: Change to service final Actor systemActor = new Actor(ActorType.USER, this.systemClientId); return new Authentication( - systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret) - ); + systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret)); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java index 820b272bedb67..44f1669546e33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CacheConfig { @@ -50,25 +49,30 @@ private Caffeine<Object, Object> caffeineCacheBuilder() { @ConditionalOnProperty(name = "searchService.cacheImplementation", havingValue = "hazelcast") public CacheManager hazelcastCacheManager() { Config config = new Config(); - // TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, containsKey etc. + // TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, + // containsKey etc. // is this behavior what we actually desire? Should we change it now? MapConfig mapConfig = new MapConfig().setMaxIdleSeconds(cacheTtlSeconds); - EvictionConfig evictionConfig = new EvictionConfig() - .setMaxSizePolicy(MaxSizePolicy.PER_NODE) - .setSize(cacheMaxSize) - .setEvictionPolicy(EvictionPolicy.LFU); + EvictionConfig evictionConfig = + new EvictionConfig() + .setMaxSizePolicy(MaxSizePolicy.PER_NODE) + .setSize(cacheMaxSize) + .setEvictionPolicy(EvictionPolicy.LFU); mapConfig.setEvictionConfig(evictionConfig); mapConfig.setName("default"); config.addMapConfig(mapConfig); config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); - config.getNetworkConfig().getJoin().getKubernetesConfig().setEnabled(true) + config + .getNetworkConfig() + .getJoin() + .getKubernetesConfig() + .setEnabled(true) .setProperty("service-dns", hazelcastServiceName); - HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(config); return new HazelcastCacheManager(hazelcastInstance); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java index d80d57799ee4d..ddfce627b56cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java @@ -4,7 +4,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class DatasetUrnNameCasingFactory { @Nonnull @@ -13,4 +12,4 @@ protected Boolean getInstance() { String datasetUrnNameCasingEnv = System.getenv("DATAHUB_DATASET_URN_TO_LOWER"); return Boolean.parseBoolean(datasetUrnNameCasingEnv); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java index 51c7db5e37366..aa47a35f3d38a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java @@ -3,12 +3,12 @@ import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.graph.elastic.ESGraphQueryDAO; import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.LineageRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class}) @@ -30,17 +29,25 @@ public class ElasticSearchGraphServiceFactory { @Qualifier("entityRegistry") private EntityRegistry entityRegistry; - @Autowired - private ConfigurationProvider configurationProvider; + @Autowired private ConfigurationProvider configurationProvider; @Bean(name = "elasticSearchGraphService") @Nonnull protected ElasticSearchGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new ElasticSearchGraphService(lineageRegistry, components.getBulkProcessor(), components.getIndexConvention(), - new ESGraphWriteDAO(components.getIndexConvention(), components.getBulkProcessor(), components.getNumRetries()), - new ESGraphQueryDAO(components.getSearchClient(), lineageRegistry, components.getIndexConvention(), - configurationProvider.getElasticSearch().getSearch().getGraph()), + return new ElasticSearchGraphService( + lineageRegistry, + components.getBulkProcessor(), + components.getIndexConvention(), + new ESGraphWriteDAO( + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries()), + new ESGraphQueryDAO( + components.getSearchClient(), + lineageRegistry, + components.getIndexConvention(), + configurationProvider.getElasticSearch().getSearch().getGraph()), components.getIndexBuilder()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java index 504618ba9cc6a..20c3e92767ce4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class}) @@ -24,8 +23,14 @@ public class ElasticSearchSystemMetadataServiceFactory { @Bean(name = "elasticSearchSystemMetadataService") @Nonnull protected ElasticSearchSystemMetadataService getInstance() { - return new ElasticSearchSystemMetadataService(components.getBulkProcessor(), components.getIndexConvention(), - new ESSystemMetadataDAO(components.getSearchClient(), components.getIndexConvention(), - components.getBulkProcessor(), components.getNumRetries()), components.getIndexBuilder()); + return new ElasticSearchSystemMetadataService( + components.getBulkProcessor(), + components.getIndexConvention(), + new ESSystemMetadataDAO( + components.getSearchClient(), + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries()), + components.getIndexBuilder()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java index 0dce80b98964b..483251644b6c0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java @@ -1,103 +1,117 @@ package com.linkedin.gms.factory.common; import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import org.apache.http.ssl.SSLContextBuilder; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; +import org.apache.http.ssl.SSLContextBuilder; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ElasticsearchSSLContextFactory { - @Value("${elasticsearch.sslContext.protocol}") - private String sslProtocol; - - @Value("${elasticsearch.sslContext.secureRandomImplementation}") - private String sslSecureRandomImplementation; + @Value("${elasticsearch.sslContext.protocol}") + private String sslProtocol; - @Value("${elasticsearch.sslContext.trustStoreFile}") - private String sslTrustStoreFile; + @Value("${elasticsearch.sslContext.secureRandomImplementation}") + private String sslSecureRandomImplementation; - @Value("${elasticsearch.sslContext.trustStoreType}") - private String sslTrustStoreType; + @Value("${elasticsearch.sslContext.trustStoreFile}") + private String sslTrustStoreFile; - @Value("${elasticsearch.sslContext.trustStorePassword}") - private String sslTrustStorePassword; + @Value("${elasticsearch.sslContext.trustStoreType}") + private String sslTrustStoreType; - @Value("${elasticsearch.sslContext.keyStoreFile}") - private String sslKeyStoreFile; + @Value("${elasticsearch.sslContext.trustStorePassword}") + private String sslTrustStorePassword; - @Value("${elasticsearch.sslContext.keyStoreType}") - private String sslKeyStoreType; + @Value("${elasticsearch.sslContext.keyStoreFile}") + private String sslKeyStoreFile; - @Value("${elasticsearch.sslContext.keyStorePassword}") - private String sslKeyStorePassword; + @Value("${elasticsearch.sslContext.keyStoreType}") + private String sslKeyStoreType; - @Value("${elasticsearch.sslContext.keyPassword}") - private String sslKeyPassword; + @Value("${elasticsearch.sslContext.keyStorePassword}") + private String sslKeyStorePassword; - @Bean(name = "elasticSearchSSLContext") - public SSLContext createInstance() { - final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); - if (sslProtocol != null) { - sslContextBuilder.useProtocol(sslProtocol); - } + @Value("${elasticsearch.sslContext.keyPassword}") + private String sslKeyPassword; - if (sslTrustStoreFile != null && sslTrustStoreType != null && sslTrustStorePassword != null) { - loadTrustStore(sslContextBuilder, sslTrustStoreFile, sslTrustStoreType, sslTrustStorePassword); - } - - if (sslKeyStoreFile != null && sslKeyStoreType != null && sslKeyStorePassword != null && sslKeyPassword != null) { - loadKeyStore(sslContextBuilder, sslKeyStoreFile, sslKeyStoreType, sslKeyStorePassword, sslKeyPassword); - } - - final SSLContext sslContext; - try { - if (sslSecureRandomImplementation != null) { - sslContextBuilder.setSecureRandom(SecureRandom.getInstance(sslSecureRandomImplementation)); - } - sslContext = sslContextBuilder.build(); - } catch (NoSuchAlgorithmException | KeyManagementException e) { - throw new RuntimeException("Failed to build SSL Context", e); - } - return sslContext; + @Bean(name = "elasticSearchSSLContext") + public SSLContext createInstance() { + final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); + if (sslProtocol != null) { + sslContextBuilder.useProtocol(sslProtocol); } - private void loadKeyStore(@Nonnull SSLContextBuilder sslContextBuilder, @Nonnull String path, - @Nonnull String type, @Nonnull String password, @Nonnull String keyPassword) { - try (InputStream identityFile = new FileInputStream(path)) { - final KeyStore keystore = KeyStore.getInstance(type); - keystore.load(identityFile, password.toCharArray()); - sslContextBuilder.loadKeyMaterial(keystore, keyPassword.toCharArray()); - } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException | UnrecoverableKeyException e) { - throw new RuntimeException("Failed to load key store: " + path, e); - } + if (sslTrustStoreFile != null && sslTrustStoreType != null && sslTrustStorePassword != null) { + loadTrustStore( + sslContextBuilder, sslTrustStoreFile, sslTrustStoreType, sslTrustStorePassword); } - private void loadTrustStore(@Nonnull SSLContextBuilder sslContextBuilder, @Nonnull String path, - @Nonnull String type, @Nonnull String password) { - try (InputStream identityFile = new FileInputStream(path)) { - final KeyStore keystore = KeyStore.getInstance(type); - keystore.load(identityFile, password.toCharArray()); - sslContextBuilder.loadTrustMaterial(keystore, null); - } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { - throw new RuntimeException("Failed to load key store: " + path, e); - } + if (sslKeyStoreFile != null + && sslKeyStoreType != null + && sslKeyStorePassword != null + && sslKeyPassword != null) { + loadKeyStore( + sslContextBuilder, sslKeyStoreFile, sslKeyStoreType, sslKeyStorePassword, sslKeyPassword); } + final SSLContext sslContext; + try { + if (sslSecureRandomImplementation != null) { + sslContextBuilder.setSecureRandom(SecureRandom.getInstance(sslSecureRandomImplementation)); + } + sslContext = sslContextBuilder.build(); + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException("Failed to build SSL Context", e); + } + return sslContext; + } + + private void loadKeyStore( + @Nonnull SSLContextBuilder sslContextBuilder, + @Nonnull String path, + @Nonnull String type, + @Nonnull String password, + @Nonnull String keyPassword) { + try (InputStream identityFile = new FileInputStream(path)) { + final KeyStore keystore = KeyStore.getInstance(type); + keystore.load(identityFile, password.toCharArray()); + sslContextBuilder.loadKeyMaterial(keystore, keyPassword.toCharArray()); + } catch (IOException + | CertificateException + | NoSuchAlgorithmException + | KeyStoreException + | UnrecoverableKeyException e) { + throw new RuntimeException("Failed to load key store: " + path, e); + } + } + + private void loadTrustStore( + @Nonnull SSLContextBuilder sslContextBuilder, + @Nonnull String path, + @Nonnull String type, + @Nonnull String password) { + try (InputStream identityFile = new FileInputStream(path)) { + final KeyStore keystore = KeyStore.getInstance(type); + keystore.load(identityFile, password.toCharArray()); + sslContextBuilder.loadTrustMaterial(keystore, null); + } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { + throw new RuntimeException("Failed to load key store: " + path, e); + } + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java index ba66b678d82b9..63a2e42de1d1a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource("classpath:git.properties") public class GitVersionFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java index 1e37c735b5bd4..db4928cfe3764 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({GraphServiceFactory.class}) public class GraphClientFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java index 94593eb1fb84c..d98dfcb617f84 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.common; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; +import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({Neo4jGraphServiceFactory.class, ElasticSearchGraphServiceFactory.class}) @@ -42,7 +41,8 @@ protected GraphService createInstance() { return _elasticSearchGraphService; } else { throw new RuntimeException( - "Error: Failed to initialize graph service. Graph Service provided: " + graphServiceImpl + "Error: Failed to initialize graph service. Graph Service provided: " + + graphServiceImpl + ". Valid options: [neo4j, elasticsearch]."); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java index ada8466d302e6..b268bb0937035 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java @@ -8,7 +8,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - /** * Creates a {@link IndexConvention} to generate search index names. * diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java index 9805d554d5941..62d4beddd1ab1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java @@ -1,10 +1,8 @@ package com.linkedin.gms.factory.common; -import lombok.extern.slf4j.Slf4j; - import java.util.HashMap; import java.util.Map; - +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -36,14 +34,16 @@ public class LocalCassandraSessionConfigFactory { @Bean(name = "gmsCassandraServiceConfig") protected Map<String, String> createInstance() { - return new HashMap<String, String>() {{ - put("username", datasourceUsername); - put("password", datasourcePassword); - put("hosts", hosts); - put("port", port); - put("datacenter", datacenter); - put("keyspace", keyspace); - put("useSsl", useSsl); - }}; + return new HashMap<String, String>() { + { + put("username", datasourceUsername); + put("password", datasourcePassword); + put("hosts", hosts); + put("port", port); + put("datacenter", datacenter); + put("keyspace", keyspace); + put("useSsl", useSsl); + } + }; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java index 6bf8ff123b221..08787cdb89aba 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java @@ -15,7 +15,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -93,7 +92,8 @@ public DataSourceConfig buildDataSourceConfig(@Value("${ebean.url}") String data } @Bean(name = "gmsEbeanServiceConfig") - protected ServerConfig createInstance(@Qualifier("ebeanDataSourceConfig") DataSourceConfig config) { + protected ServerConfig createInstance( + @Qualifier("ebeanDataSourceConfig") DataSourceConfig config) { ServerConfig serverConfig = new ServerConfig(); serverConfig.setName("gmsEbeanServiceConfig"); serverConfig.setDataSourceConfig(config); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java index 65b6115d6638e..04ed29407518d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java @@ -2,7 +2,6 @@ import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.util.concurrent.TimeUnit; - import org.neo4j.driver.AuthTokens; import org.neo4j.driver.Config; import org.neo4j.driver.Driver; @@ -12,7 +11,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class Neo4jDriverFactory { @@ -49,10 +47,12 @@ protected Driver createInstance() { Config.ConfigBuilder builder = Config.builder(); builder.withMaxConnectionPoolSize(neo4jMaxConnectionPoolSize); - builder.withConnectionAcquisitionTimeout(neo4jMaxConnectionAcquisitionTimeout, TimeUnit.SECONDS); + builder.withConnectionAcquisitionTimeout( + neo4jMaxConnectionAcquisitionTimeout, TimeUnit.SECONDS); builder.withMaxConnectionLifetime(neo4jMaxConnectionLifetime(), TimeUnit.SECONDS); builder.withMaxTransactionRetryTime(neo4jMaxTransactionRetryTime, TimeUnit.SECONDS); - builder.withConnectionLivenessCheckTimeout(neo4jConnectionLivenessCheckTimeout, TimeUnit.SECONDS); + builder.withConnectionLivenessCheckTimeout( + neo4jConnectionLivenessCheckTimeout, TimeUnit.SECONDS); return GraphDatabase.driver(uri, AuthTokens.basic(username, password), builder.build()); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java index 87670ce10f481..d3b0cd8aa6d92 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.common; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.LineageRegistry; import javax.annotation.Nonnull; import org.neo4j.driver.Driver; import org.neo4j.driver.SessionConfig; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({Neo4jDriverFactory.class, EntityRegistryFactory.class}) public class Neo4jGraphServiceFactory { @@ -33,6 +32,7 @@ public class Neo4jGraphServiceFactory { @Nonnull protected Neo4jGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new Neo4jGraphService(lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); + return new Neo4jGraphService( + lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java index 3c40b30bfc7d1..ddd31f2692934 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java @@ -10,10 +10,15 @@ import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHost; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.ssl.DefaultHostnameVerifier; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.util.PublicSuffixMatcherLoader; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; @@ -33,11 +38,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.auth.AuthScope; import org.springframework.context.annotation.PropertySource; import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; import software.amazon.awssdk.auth.signer.Aws4Signer; @@ -45,7 +45,7 @@ @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@Import({ ElasticsearchSSLContextFactory.class }) +@Import({ElasticsearchSSLContextFactory.class}) public class RestHighLevelClientFactory { @Value("${elasticsearch.host}") @@ -93,21 +93,26 @@ public RestHighLevelClient createInstance(RestClientBuilder restClientBuilder) { public RestClientBuilder loadRestClient() { final RestClientBuilder builder = createBuilder(useSSL ? "https" : "http"); - builder.setHttpClientConfigCallback(httpAsyncClientBuilder -> { - if (useSSL) { - httpAsyncClientBuilder.setSSLContext(sslContext).setSSLHostnameVerifier(new NoopHostnameVerifier()); - } - try { - httpAsyncClientBuilder.setConnectionManager(createConnectionManager()); - } catch (IOReactorException e) { - throw new IllegalStateException("Unable to start ElasticSearch client. Please verify connection configuration."); - } - httpAsyncClientBuilder.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(threadCount).build()); - - setCredentials(httpAsyncClientBuilder); - - return httpAsyncClientBuilder; - }); + builder.setHttpClientConfigCallback( + httpAsyncClientBuilder -> { + if (useSSL) { + httpAsyncClientBuilder + .setSSLContext(sslContext) + .setSSLHostnameVerifier(new NoopHostnameVerifier()); + } + try { + httpAsyncClientBuilder.setConnectionManager(createConnectionManager()); + } catch (IOReactorException e) { + throw new IllegalStateException( + "Unable to start ElasticSearch client. Please verify connection configuration."); + } + httpAsyncClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(threadCount).build()); + + setCredentials(httpAsyncClientBuilder); + + return httpAsyncClientBuilder; + }); return builder; } @@ -121,41 +126,47 @@ private RestClientBuilder createBuilder(String scheme) { } builder.setRequestConfigCallback( - requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(connectionRequestTimeout)); + requestConfigBuilder -> + requestConfigBuilder.setConnectionRequestTimeout(connectionRequestTimeout)); return builder; } /** - * Needed to override ExceptionHandler behavior for cases where IO error would have put client in unrecoverable state - * We don't utilize system properties in the client builder, so setting defaults pulled from - * {@link HttpAsyncClientBuilder#build()}. + * Needed to override ExceptionHandler behavior for cases where IO error would have put client in + * unrecoverable state We don't utilize system properties in the client builder, so setting + * defaults pulled from {@link HttpAsyncClientBuilder#build()}. + * * @return */ private NHttpClientConnectionManager createConnectionManager() throws IOReactorException { SSLContext sslContext = SSLContexts.createDefault(); - HostnameVerifier hostnameVerifier = new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); + HostnameVerifier hostnameVerifier = + new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); SchemeIOSessionStrategy sslStrategy = new SSLIOSessionStrategy(sslContext, null, null, hostnameVerifier); - IOReactorConfig ioReactorConfig = IOReactorConfig.custom().setIoThreadCount(threadCount).build(); + IOReactorConfig ioReactorConfig = + IOReactorConfig.custom().setIoThreadCount(threadCount).build(); DefaultConnectingIOReactor ioReactor = new DefaultConnectingIOReactor(ioReactorConfig); - IOReactorExceptionHandler ioReactorExceptionHandler = new IOReactorExceptionHandler() { - @Override - public boolean handle(IOException ex) { - log.error("IO Exception caught during ElasticSearch connection.", ex); - return true; - } - - @Override - public boolean handle(RuntimeException ex) { - log.error("Runtime Exception caught during ElasticSearch connection.", ex); - return true; - } - }; + IOReactorExceptionHandler ioReactorExceptionHandler = + new IOReactorExceptionHandler() { + @Override + public boolean handle(IOException ex) { + log.error("IO Exception caught during ElasticSearch connection.", ex); + return true; + } + + @Override + public boolean handle(RuntimeException ex) { + log.error("Runtime Exception caught during ElasticSearch connection.", ex); + return true; + } + }; ioReactor.setExceptionHandler(ioReactorExceptionHandler); - return new PoolingNHttpClientConnectionManager(ioReactor, + return new PoolingNHttpClientConnectionManager( + ioReactor, RegistryBuilder.<SchemeIOSessionStrategy>create() .register("http", NoopIOSessionStrategy.INSTANCE) .register("https", sslStrategy) @@ -165,7 +176,8 @@ public boolean handle(RuntimeException ex) { private void setCredentials(HttpAsyncClientBuilder httpAsyncClientBuilder) { if (username != null && password != null) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); + credentialsProvider.setCredentials( + AuthScope.ANY, new UsernamePasswordCredentials(username, password)); httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider); } if (opensearchUseAwsIamAuth) { @@ -177,11 +189,12 @@ private void setCredentials(HttpAsyncClientBuilder httpAsyncClientBuilder) { private HttpRequestInterceptor getAwsRequestSigningInterceptor(String region) { if (region == null) { - throw new IllegalArgumentException("Region must not be null when opensearchUseAwsIamAuth is enabled"); + throw new IllegalArgumentException( + "Region must not be null when opensearchUseAwsIamAuth is enabled"); } Aws4Signer signer = Aws4Signer.create(); // Uses default AWS credentials - return new AwsRequestSigningApacheInterceptor("es", signer, - DefaultCredentialsProvider.create(), region); + return new AwsRequestSigningApacheInterceptor( + "es", signer, DefaultCredentialsProvider.create(), region); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java index 3ba6965577204..5663162186b83 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({GraphServiceFactory.class, EntityServiceFactory.class}) public class SiblingGraphServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java index 241c93f438bf1..1c17e433d5507 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java @@ -11,7 +11,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchSystemMetadataServiceFactory.class}) public class SystemMetadataServiceFactory { @@ -24,6 +23,6 @@ public class SystemMetadataServiceFactory { @Bean(name = "systemMetadataService") @Primary protected SystemMetadataService createInstance() { - return _elasticSearchSystemMetadataService; + return _elasticSearchSystemMetadataService; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java index c7df8b1cde6ec..fac0bf0c46685 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - /** * Creates a {@link TopicConvention} to generate kafka metadata event topic names. * @@ -32,10 +31,14 @@ public class TopicConventionFactory { @Value("${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}") private String metadataChangeLogVersionedTopicName; - @Value("${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") + @Value( + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") private String metadataChangeLogTimeseriesTopicName; - @Value("${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_PROPOSAL + "}") + @Value( + "${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_PROPOSAL + + "}") private String failedMetadataChangeProposalName; @Value("${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}") @@ -46,10 +49,17 @@ public class TopicConventionFactory { @Bean(name = TOPIC_CONVENTION_BEAN) protected TopicConvention createInstance() { - return new TopicConventionImpl(metadataChangeEventName, metadataAuditEventName, failedMetadataChangeEventName, - metadataChangeProposalName, metadataChangeLogVersionedTopicName, metadataChangeLogTimeseriesTopicName, - failedMetadataChangeProposalName, platformEventTopicName, + return new TopicConventionImpl( + metadataChangeEventName, + metadataAuditEventName, + failedMetadataChangeEventName, + metadataChangeProposalName, + metadataChangeLogVersionedTopicName, + metadataChangeLogTimeseriesTopicName, + failedMetadataChangeProposalName, + platformEventTopicName, // TODO once we start rolling out v5 add support for changing the new event names. - TopicConventionImpl.DEFAULT_EVENT_PATTERN, dataHubUpgradeHistoryTopicName); + TopicConventionImpl.DEFAULT_EVENT_PATTERN, + dataHubUpgradeHistoryTopicName); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java index 465480be344c7..5c7c2370ab337 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java @@ -2,6 +2,7 @@ import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authorization.AuthorizationConfiguration; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.metadata.config.DataHubConfiguration; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.config.SystemUpdateConfiguration; @@ -11,76 +12,57 @@ import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; -import com.linkedin.datahub.graphql.featureflags.FeatureFlags; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @ConfigurationProperties @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Data public class ConfigurationProvider { - /** - * Authentication related configs - */ + /** Authentication related configs */ private AuthenticationConfiguration authentication; - /** - * Authorizer related configs - */ + + /** Authorizer related configs */ private AuthorizationConfiguration authorization; - /** - * Ingestion related configs - */ + + /** Ingestion related configs */ private IngestionConfiguration ingestion; - /** - * Telemetry related configs - */ + + /** Telemetry related configs */ private TelemetryConfiguration telemetry; - /** - * Viz related configs - */ + + /** Viz related configs */ private VisualConfiguration visualConfig; - /** - * Tests related configs - */ + + /** Tests related configs */ private TestsConfiguration metadataTests; - /** - * DataHub top-level server configurations - */ + + /** DataHub top-level server configurations */ private DataHubConfiguration datahub; - /** - * Views feature related configs - */ + + /** Views feature related configs */ private ViewsConfiguration views; - /** - * Feature flags indicating what is turned on vs turned off - */ + + /** Feature flags indicating what is turned on vs turned off */ private FeatureFlags featureFlags; - /** - * Kafka related configs. - */ + + /** Kafka related configs. */ private KafkaConfiguration kafka; - /** - * ElasticSearch configurations - */ + + /** ElasticSearch configurations */ private ElasticSearchConfiguration elasticSearch; - /** - * System Update configurations - */ + + /** System Update configurations */ private SystemUpdateConfiguration systemUpdate; - /** - * Configuration for caching - */ + /** Configuration for caching */ private CacheConfiguration cache; - /** - * Configuration for the health check server - */ + /** Configuration for the health check server */ private HealthCheckConfiguration healthCheck; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java index 6eadf06288d29..23b7ec9edd306 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class HealthCheckConfiguration { private int cacheDurationSeconds; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java index 6eab711603c52..739211855cacd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java @@ -1,9 +1,10 @@ package com.linkedin.gms.factory.dataproduct; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -11,8 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class DataProductServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java index ae20f7e96ba40..326537ee07cbd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java @@ -2,6 +2,13 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import java.net.InetSocketAddress; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -9,14 +16,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; -import java.net.InetSocketAddress; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - @Configuration public class CassandraSessionFactory { @@ -30,20 +29,22 @@ public class CassandraSessionFactory { @Nonnull protected CqlSession createSession() { int port = Integer.parseInt(sessionConfig.get("port")); - List<InetSocketAddress> addresses = Arrays.stream(sessionConfig.get("hosts").split(",")) - .map(host -> new InetSocketAddress(host, port)) - .collect(Collectors.toList()); + List<InetSocketAddress> addresses = + Arrays.stream(sessionConfig.get("hosts").split(",")) + .map(host -> new InetSocketAddress(host, port)) + .collect(Collectors.toList()); String dc = sessionConfig.get("datacenter"); String ks = sessionConfig.get("keyspace"); String username = sessionConfig.get("username"); String password = sessionConfig.get("password"); - CqlSessionBuilder csb = CqlSession.builder() - .addContactPoints(addresses) - .withLocalDatacenter(dc) - .withKeyspace(ks) - .withAuthCredentials(username, password); + CqlSessionBuilder csb = + CqlSession.builder() + .addContactPoints(addresses) + .withLocalDatacenter(dc) + .withKeyspace(ks) + .withAuthCredentials(username, password); if (sessionConfig.containsKey("useSsl") && sessionConfig.get("useSsl").equals("true")) { try { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java index 9feb7e469d018..2bfe7bff1b45a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java @@ -12,14 +12,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration @Slf4j public class EbeanServerFactory { public static final String EBEAN_MODEL_PACKAGE = EbeanAspectV2.class.getPackage().getName(); - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "ebeanServer") @DependsOn({"gmsEbeanServiceConfig"}) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java index 925689c8609db..94aebb2a39efa 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java @@ -5,13 +5,12 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - @Configuration public class EntityAspectDaoFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java index 4000f7d6ed058..9123714de5bc8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java @@ -5,13 +5,12 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - @Configuration public class EntityAspectMigrationsDaoFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index f1c1a7b743714..e75ec0c0dc44a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -11,6 +11,7 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.mxe.TopicConvention; +import javax.annotation.Nonnull; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.Producer; import org.springframework.beans.factory.annotation.Qualifier; @@ -19,9 +20,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - - @Configuration public class EntityServiceFactory { @@ -29,22 +27,35 @@ public class EntityServiceFactory { private Integer _ebeanMaxTransactionRetry; @Bean(name = "entityService") - @DependsOn({"entityAspectDao", "kafkaEventProducer", "kafkaHealthChecker", - TopicConventionFactory.TOPIC_CONVENTION_BEAN, "entityRegistry"}) + @DependsOn({ + "entityAspectDao", + "kafkaEventProducer", + "kafkaHealthChecker", + TopicConventionFactory.TOPIC_CONVENTION_BEAN, + "entityRegistry" + }) @Nonnull protected EntityService createInstance( - Producer<String, ? extends IndexedRecord> producer, - TopicConvention convention, - KafkaHealthChecker kafkaHealthChecker, - @Qualifier("entityAspectDao") AspectDao aspectDao, - EntityRegistry entityRegistry, - ConfigurationProvider configurationProvider, - UpdateIndicesService updateIndicesService) { - - final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker); + Producer<String, ? extends IndexedRecord> producer, + TopicConvention convention, + KafkaHealthChecker kafkaHealthChecker, + @Qualifier("entityAspectDao") AspectDao aspectDao, + EntityRegistry entityRegistry, + ConfigurationProvider configurationProvider, + UpdateIndicesService updateIndicesService) { + + final KafkaEventProducer eventProducer = + new KafkaEventProducer(producer, convention, kafkaHealthChecker); FeatureFlags featureFlags = configurationProvider.getFeatureFlags(); - EntityService entityService = new EntityServiceImpl(aspectDao, eventProducer, entityRegistry, - featureFlags.isAlwaysEmitChangeLog(), updateIndicesService, featureFlags.getPreProcessHooks(), _ebeanMaxTransactionRetry); + EntityService entityService = + new EntityServiceImpl( + aspectDao, + eventProducer, + entityRegistry, + featureFlags.isAlwaysEmitChangeLog(), + updateIndicesService, + featureFlags.getPreProcessHooks(), + _ebeanMaxTransactionRetry); return entityService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java index 3f2388f4829e3..080845147766f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java @@ -1,10 +1,10 @@ package com.linkedin.gms.factory.entity; import com.datahub.authentication.Authentication; -import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; @@ -21,7 +21,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @ConditionalOnExpression("'${entityClient.preferredImpl:java}'.equals('java')") @Import({DataHubKafkaProducerFactory.class}) @@ -60,7 +59,8 @@ public class JavaEntityClientFactory { private EventProducer _eventProducer; @Bean("javaEntityClient") - public JavaEntityClient getJavaEntityClient(@Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { + public JavaEntityClient getJavaEntityClient( + @Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { return new JavaEntityClient( _entityService, _deleteEntityService, @@ -74,10 +74,12 @@ public JavaEntityClient getJavaEntityClient(@Qualifier("restliEntityClient") fin } @Bean("systemJavaEntityClient") - public SystemJavaEntityClient systemJavaEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication, - @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { - SystemJavaEntityClient systemJavaEntityClient = new SystemJavaEntityClient( + public SystemJavaEntityClient systemJavaEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication, + @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { + SystemJavaEntityClient systemJavaEntityClient = + new SystemJavaEntityClient( _entityService, _deleteEntityService, _entitySearchService, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java index dfc5e835392df..1dee8c4aa4d27 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java @@ -4,19 +4,17 @@ import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; +import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; -import java.net.URI; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RestliEntityClientFactory { @@ -48,21 +46,28 @@ public RestliEntityClient getRestliEntityClient() { if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); } else { - restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + restClient = + DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); } @Bean("systemRestliEntityClient") - public SystemRestliEntityClient systemRestliEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + public SystemRestliEntityClient systemRestliEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); } else { - restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + restClient = + DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } - return new SystemRestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries, - systemAuthentication, configurationProvider.getCache().getClient().getEntityClient()); + return new SystemRestliEntityClient( + restClient, + new ExponentialBackoff(retryInterval), + numRetries, + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java index ff56f19e4f8fd..b02541586de49 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java @@ -1,12 +1,13 @@ package com.linkedin.gms.factory.entity; import com.datastax.oss.driver.api.core.CqlSession; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -16,9 +17,6 @@ import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.PropertySource; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RetentionServiceFactory { @@ -30,24 +28,24 @@ public class RetentionServiceFactory { @Value("${RETENTION_APPLICATION_BATCH_SIZE:1000}") private Integer _batchSize; - @Bean(name = "retentionService") @DependsOn({"cassandraSession", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") @Nonnull protected RetentionService createCassandraInstance(CqlSession session) { - RetentionService retentionService = new CassandraRetentionService(_entityService, session, _batchSize); + RetentionService retentionService = + new CassandraRetentionService(_entityService, session, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } - @Bean(name = "retentionService") @DependsOn({"ebeanServer", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull protected RetentionService createEbeanInstance(Database server) { - RetentionService retentionService = new EbeanRetentionService(_entityService, server, _batchSize); + RetentionService retentionService = + new EbeanRetentionService(_entityService, server, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java index a4ea02af94bad..d8c1422f988c2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java @@ -17,23 +17,32 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import(EntityIndexBuildersFactory.class) public class UpdateIndicesServiceFactory { - @Autowired - private ApplicationContext context; + @Autowired private ApplicationContext context; + @Value("${entityClient.preferredImpl:java}") private String entityClientImpl; @Bean - public UpdateIndicesService updateIndicesService(GraphService graphService, EntitySearchService entitySearchService, - TimeseriesAspectService timeseriesAspectService, - SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { - UpdateIndicesService updateIndicesService = new UpdateIndicesService(graphService, entitySearchService, timeseriesAspectService, - systemMetadataService, entityRegistry, searchDocumentTransformer, entityIndexBuilders); + public UpdateIndicesService updateIndicesService( + GraphService graphService, + EntitySearchService entitySearchService, + TimeseriesAspectService timeseriesAspectService, + SystemMetadataService systemMetadataService, + EntityRegistry entityRegistry, + SearchDocumentTransformer searchDocumentTransformer, + EntityIndexBuilders entityIndexBuilders) { + UpdateIndicesService updateIndicesService = + new UpdateIndicesService( + graphService, + entitySearchService, + timeseriesAspectService, + systemMetadataService, + entityRegistry, + searchDocumentTransformer, + entityIndexBuilders); if ("restli".equals(entityClientImpl)) { updateIndicesService.setSystemEntityClient(context.getBean(SystemRestliEntityClient.class)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java index cda21f8907867..356fb226937dd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.entityregistry; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistryException; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.io.IOException; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Value; @@ -11,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.core.io.Resource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ConfigEntityRegistryFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java index 962bab56cbbf5..2c65eeafe063b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java @@ -32,7 +32,8 @@ public class EntityRegistryFactory { @Primary @Nonnull protected EntityRegistry getInstance() throws EntityRegistryException { - MergedEntityRegistry baseEntityRegistry = new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); + MergedEntityRegistry baseEntityRegistry = + new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); pluginEntityRegistryLoader.withBaseRegistry(baseEntityRegistry).start(true); return baseEntityRegistry; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java index 6dbb07309c7cc..8c6a4ad998aff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.entityregistry; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.PluginEntityRegistryLoader; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.io.FileNotFoundException; import java.net.MalformedURLException; import javax.annotation.Nonnull; @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PluginEntityRegistryFactory { @@ -20,7 +19,8 @@ public class PluginEntityRegistryFactory { @Bean(name = "pluginEntityRegistry") @Nonnull - protected PluginEntityRegistryLoader getInstance() throws FileNotFoundException, MalformedURLException { + protected PluginEntityRegistryLoader getInstance() + throws FileNotFoundException, MalformedURLException { return new PluginEntityRegistryLoader(pluginRegistryPath); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index c50b4c9088bc2..723715a13b1c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -2,24 +2,24 @@ import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.post.PostService; import com.datahub.authentication.token.StatefulTokenService; import com.datahub.authentication.user.NativeUserService; import com.datahub.authorization.role.RoleService; -import com.datahub.authentication.post.PostService; import com.linkedin.datahub.graphql.GmsGraphQLEngine; import com.linkedin.datahub.graphql.GmsGraphQLEngineArgs; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.gms.factory.auth.DataHubTokenServiceFactory; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.common.SiblingGraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.recommendation.RecommendationServiceFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; @@ -29,11 +29,11 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; -import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -48,11 +48,17 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, RestliEntityClientFactory.class, - RecommendationServiceFactory.class, EntityRegistryFactory.class, DataHubTokenServiceFactory.class, - GitVersionFactory.class, SiblingGraphServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + RestliEntityClientFactory.class, + RecommendationServiceFactory.class, + EntityRegistryFactory.class, + DataHubTokenServiceFactory.class, + GitVersionFactory.class, + SiblingGraphServiceFactory.class +}) public class GraphQLEngineFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") @@ -169,7 +175,6 @@ public class GraphQLEngineFactory { @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; - @Bean(name = "graphQLEngine") @Nonnull protected GraphQLEngine getInstance() { @@ -211,8 +216,6 @@ protected GraphQLEngine getInstance() { args.setQueryService(_queryService); args.setFeatureFlags(_configProvider.getFeatureFlags()); args.setDataProductService(_dataProductService); - return new GmsGraphQLEngine( - args - ).builder().build(); + return new GmsGraphQLEngine(args).builder().build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java index 9beb617c4f6e8..78b9c5d52efdd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java @@ -16,7 +16,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Import({SystemAuthenticationFactory.class, RestliEntityClientFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class IngestionSchedulerFactory { @@ -33,17 +32,23 @@ public class IngestionSchedulerFactory { @Qualifier("configurationProvider") private ConfigurationProvider _configProvider; - @Value("${ingestion.scheduler.delayIntervalSeconds:45}") // Boot up ingestion source cache after waiting 45 seconds for startup. + @Value("${ingestion.scheduler.delayIntervalSeconds:45}") // Boot up ingestion source cache after + // waiting 45 seconds for startup. private Integer _delayIntervalSeconds; - @Value("${ingestion.scheduler.refreshIntervalSeconds:43200}") // By default, refresh ingestion sources 2 times per day. + @Value("${ingestion.scheduler.refreshIntervalSeconds:43200}") // By default, refresh ingestion + // sources 2 times per day. private Integer _refreshIntervalSeconds; @Bean(name = "ingestionScheduler") @Scope("singleton") @Nonnull protected IngestionScheduler getInstance() { - return new IngestionScheduler(_systemAuthentication, _entityClient, _configProvider.getIngestion(), - _delayIntervalSeconds, _refreshIntervalSeconds); + return new IngestionScheduler( + _systemAuthentication, + _entityClient, + _configProvider.getIngestion(), + _delayIntervalSeconds, + _refreshIntervalSeconds); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java index 675f015d9e378..41807d0daaa72 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.kafka; import com.linkedin.gms.factory.common.TopicConventionFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.mxe.TopicConvention; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.Producer; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({DataHubKafkaProducerFactory.class, TopicConventionFactory.class, KafkaHealthChecker.class}) @@ -28,14 +27,10 @@ public class DataHubKafkaEventProducerFactory { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "kafkaEventProducer") protected KafkaEventProducer createInstance() { - return new KafkaEventProducer( - kafkaProducer, - topicConvention, - kafkaHealthChecker); + return new KafkaEventProducer(kafkaProducer, topicConvention, kafkaHealthChecker); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java index 78b3de501e0e5..0b331ffc40be4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java @@ -1,11 +1,11 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.util.Arrays; import java.util.Map; @@ -23,11 +23,14 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @EnableConfigurationProperties({KafkaProperties.class}) -@Import({KafkaSchemaRegistryFactory.class, AwsGlueSchemaRegistryFactory.class, InternalSchemaRegistryFactory.class}) +@Import({ + KafkaSchemaRegistryFactory.class, + AwsGlueSchemaRegistryFactory.class, + InternalSchemaRegistryFactory.class +}) public class DataHubKafkaProducerFactory { @Autowired @@ -35,20 +38,26 @@ public class DataHubKafkaProducerFactory { private SchemaRegistryConfig _schemaRegistryConfig; @Bean(name = "kafkaProducer") - protected Producer<String, IndexedRecord> createInstance(@Qualifier("configurationProvider") ConfigurationProvider - provider, KafkaProperties properties) { + protected Producer<String, IndexedRecord> createInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - return new KafkaProducer<>(buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties)); + return new KafkaProducer<>( + buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties)); } - public static Map<String, Object> buildProducerProperties(SchemaRegistryConfig schemaRegistryConfig, - KafkaConfiguration kafkaConfiguration, KafkaProperties properties) { + public static Map<String, Object> buildProducerProperties( + SchemaRegistryConfig schemaRegistryConfig, + KafkaConfiguration kafkaConfiguration, + KafkaProperties properties) { KafkaProperties.Producer producerProps = properties.getProducer(); producerProps.setKeySerializer(StringSerializer.class); // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - producerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + producerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 Map<String, Object> props = properties.buildProducerProperties(); @@ -56,18 +65,27 @@ public static Map<String, Object> buildProducerProperties(SchemaRegistryConfig s props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getSerializer()); props.put(ProducerConfig.RETRIES_CONFIG, kafkaConfiguration.getProducer().getRetryCount()); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getDeliveryTimeout()); - props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getRequestTimeout()); - props.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, kafkaConfiguration.getProducer().getBackoffTimeout()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, kafkaConfiguration.getProducer().getCompressionType()); - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, kafkaConfiguration.getProducer().getMaxRequestSize()); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + kafkaConfiguration.getProducer().getDeliveryTimeout()); + props.put( + ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, + kafkaConfiguration.getProducer().getRequestTimeout()); + props.put( + ProducerConfig.RETRY_BACKOFF_MS_CONFIG, + kafkaConfiguration.getProducer().getBackoffTimeout()); + props.put( + ProducerConfig.COMPRESSION_TYPE_CONFIG, + kafkaConfiguration.getProducer().getCompressionType()); + props.put( + ProducerConfig.MAX_REQUEST_SIZE_CONFIG, + kafkaConfiguration.getProducer().getMaxRequestSize()); // Override KafkaProperties with SchemaRegistryConfig only for non-empty values - schemaRegistryConfig.getProperties().entrySet() - .stream() - .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) - .forEach(entry -> props.put(entry.getKey(), entry.getValue())); + schemaRegistryConfig.getProperties().entrySet().stream() + .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) + .forEach(entry -> props.put(entry.getKey(), entry.getValue())); return props; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index 7a9e80781d639..2a6338ac15e93 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -1,16 +1,14 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; - +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import java.time.Duration; import java.util.Arrays; import java.util.Map; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -26,98 +24,109 @@ @Slf4j @Configuration -@Import({KafkaSchemaRegistryFactory.class, AwsGlueSchemaRegistryFactory.class, InternalSchemaRegistryFactory.class}) +@Import({ + KafkaSchemaRegistryFactory.class, + AwsGlueSchemaRegistryFactory.class, + InternalSchemaRegistryFactory.class +}) public class KafkaEventConsumerFactory { - - private int kafkaEventConsumerConcurrency; - - @Bean(name = "kafkaConsumerFactory") - protected DefaultKafkaConsumerFactory<String, GenericRecord> createConsumerFactory( - @Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties baseKafkaProperties, - SchemaRegistryConfig schemaRegistryConfig) { - kafkaEventConsumerConcurrency = provider.getKafka().getListener().getConcurrency(); - - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Map<String, Object> customizedProperties = buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, - schemaRegistryConfig); - - return new DefaultKafkaConsumerFactory<>(customizedProperties); - } - - @Bean(name = "duheKafkaConsumerFactory") - protected DefaultKafkaConsumerFactory<String, GenericRecord> duheKafkaConsumerFactory( - @Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties baseKafkaProperties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig schemaRegistryConfig) { - - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Map<String, Object> customizedProperties = buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, - schemaRegistryConfig); - - return new DefaultKafkaConsumerFactory<>(customizedProperties); - } - - private static Map<String, Object> buildCustomizedProperties(KafkaProperties baseKafkaProperties, - KafkaConfiguration kafkaConfiguration, - SchemaRegistryConfig schemaRegistryConfig) { - KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); - - // Specify (de)serializers for record keys and for record values. - consumerProps.setKeyDeserializer(StringDeserializer.class); - // Records will be flushed every 10 seconds. - consumerProps.setEnableAutoCommit(true); - consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); - - - // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); - } // else we rely on KafkaProperties which defaults to localhost:9092 - - Map<String, Object> customizedProperties = baseKafkaProperties.buildConsumerProperties(); - customizedProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); - - // Override KafkaProperties with SchemaRegistryConfig only for non-empty values - schemaRegistryConfig.getProperties().entrySet() - .stream() - .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) - .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); - - customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, - kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); - - return customizedProperties; - } - - @Bean(name = "kafkaEventConsumer") - protected KafkaListenerContainerFactory<?> createInstance( - @Qualifier("kafkaConsumerFactory") DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { - - ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = - new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(kafkaConsumerFactory); - factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConcurrency(kafkaEventConsumerConcurrency); - - log.info(String.format("Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", - kafkaEventConsumerConcurrency)); - - return factory; - } - - @Bean(name = "duheKafkaEventConsumer") - protected KafkaListenerContainerFactory<?> duheKafkaEventConsumer( - @Qualifier("duheKafkaConsumerFactory") DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { - - ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = - new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(kafkaConsumerFactory); - factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConcurrency(1); - - log.info("Event-based DUHE KafkaListenerContainerFactory built successfully. Consumer concurrency = 1"); - return factory; - } -} \ No newline at end of file + private int kafkaEventConsumerConcurrency; + + @Bean(name = "kafkaConsumerFactory") + protected DefaultKafkaConsumerFactory<String, GenericRecord> createConsumerFactory( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties baseKafkaProperties, + SchemaRegistryConfig schemaRegistryConfig) { + kafkaEventConsumerConcurrency = provider.getKafka().getListener().getConcurrency(); + + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + Map<String, Object> customizedProperties = + buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, schemaRegistryConfig); + + return new DefaultKafkaConsumerFactory<>(customizedProperties); + } + + @Bean(name = "duheKafkaConsumerFactory") + protected DefaultKafkaConsumerFactory<String, GenericRecord> duheKafkaConsumerFactory( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties baseKafkaProperties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig schemaRegistryConfig) { + + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + Map<String, Object> customizedProperties = + buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, schemaRegistryConfig); + + return new DefaultKafkaConsumerFactory<>(customizedProperties); + } + + private static Map<String, Object> buildCustomizedProperties( + KafkaProperties baseKafkaProperties, + KafkaConfiguration kafkaConfiguration, + SchemaRegistryConfig schemaRegistryConfig) { + KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); + + // Specify (de)serializers for record keys and for record values. + consumerProps.setKeyDeserializer(StringDeserializer.class); + // Records will be flushed every 10 seconds. + consumerProps.setEnableAutoCommit(true); + consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); + + // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + consumerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + } // else we rely on KafkaProperties which defaults to localhost:9092 + + Map<String, Object> customizedProperties = baseKafkaProperties.buildConsumerProperties(); + customizedProperties.put( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); + + // Override KafkaProperties with SchemaRegistryConfig only for non-empty values + schemaRegistryConfig.getProperties().entrySet().stream() + .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) + .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); + + customizedProperties.put( + ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + + return customizedProperties; + } + + @Bean(name = "kafkaEventConsumer") + protected KafkaListenerContainerFactory<?> createInstance( + @Qualifier("kafkaConsumerFactory") + DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { + + ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = + new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(kafkaConsumerFactory); + factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); + factory.setConcurrency(kafkaEventConsumerConcurrency); + + log.info( + String.format( + "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", + kafkaEventConsumerConcurrency)); + + return factory; + } + + @Bean(name = "duheKafkaEventConsumer") + protected KafkaListenerContainerFactory<?> duheKafkaEventConsumer( + @Qualifier("duheKafkaConsumerFactory") + DefaultKafkaConsumerFactory<String, GenericRecord> kafkaConsumerFactory) { + + ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = + new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(kafkaConsumerFactory); + factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); + factory.setConcurrency(1); + + log.info( + "Event-based DUHE KafkaListenerContainerFactory built successfully. Consumer concurrency = 1"); + return factory; + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java index 14ffc01d75781..58cb311c526bc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java @@ -1,11 +1,10 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import java.time.Duration; import java.util.Arrays; import java.util.Map; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -19,15 +18,15 @@ import org.springframework.kafka.config.KafkaListenerContainerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; - @Slf4j @Configuration @EnableConfigurationProperties({KafkaProperties.class}) public class SimpleKafkaConsumerFactory { @Bean(name = "simpleKafkaConsumer") - protected KafkaListenerContainerFactory<?> createInstance(@Qualifier("configurationProvider") ConfigurationProvider - provider, KafkaProperties properties) { + protected KafkaListenerContainerFactory<?> createInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); KafkaProperties.Consumer consumerProps = properties.getConsumer(); @@ -39,13 +38,16 @@ protected KafkaListenerContainerFactory<?> createInstance(@Qualifier("configurat consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + consumerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 Map<String, Object> customizedProperties = properties.buildConsumerProperties(); - customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, - kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + customizedProperties.put( + ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = new ConcurrentKafkaListenerContainerFactory<>(); @@ -56,4 +58,4 @@ protected KafkaListenerContainerFactory<?> createInstance(@Qualifier("configurat return factory; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java index 07cbccd93c595..f79026c8ee337 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java @@ -5,13 +5,14 @@ import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; - public class ThreadPoolContainerCustomizer - implements ContainerCustomizer<String, GenericRecord, ConcurrentMessageListenerContainer<String, GenericRecord>> { + implements ContainerCustomizer< + String, GenericRecord, ConcurrentMessageListenerContainer<String, GenericRecord>> { @Override public void configure(ConcurrentMessageListenerContainer<String, GenericRecord> container) { ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor(); - // Default Queue Capacity is set to max, so we want to allow the thread pool to add concurrent threads up to configured value + // Default Queue Capacity is set to max, so we want to allow the thread pool to add concurrent + // threads up to configured value threadPoolTaskExecutor.setCorePoolSize(container.getConcurrency()); threadPoolTaskExecutor.setMaxPoolSize(container.getConcurrency()); threadPoolTaskExecutor.initialize(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java index ac1cbbc5cc5ff..a88e1d971973b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java @@ -17,17 +17,19 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = AwsGlueSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = AwsGlueSchemaRegistryFactory.TYPE) public class AwsGlueSchemaRegistryFactory { public static final String TYPE = "AWS_GLUE"; @Value("${kafka.schemaRegistry.awsGlue.region}") private String awsRegion; + @Value("${kafka.schemaRegistry.awsGlue.registryName}") private Optional<String> registryName; @@ -35,7 +37,8 @@ public class AwsGlueSchemaRegistryFactory { @Nonnull protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { Map<String, Object> props = new HashMap<>(); - // FIXME: Properties for this factory should come from ConfigurationProvider object, specifically under the + // FIXME: Properties for this factory should come from ConfigurationProvider object, + // specifically under the // KafkaConfiguration class. See InternalSchemaRegistryFactory as an example. props.put(AWSSchemaRegistryConstants.AWS_REGION, awsRegion); props.put(AWSSchemaRegistryConstants.DATA_FORMAT, "AVRO"); @@ -43,7 +46,7 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr props.put(AWSSchemaRegistryConstants.AVRO_RECORD_TYPE, AvroRecordType.GENERIC_RECORD.getName()); registryName.ifPresent(s -> props.put(AWSSchemaRegistryConstants.REGISTRY_NAME, s)); log.info("Creating AWS Glue registry"); - return new SchemaRegistryConfig(GlueSchemaRegistryKafkaSerializer.class, GlueSchemaRegistryKafkaDeserializer.class, - props); + return new SchemaRegistryConfig( + GlueSchemaRegistryKafkaSerializer.class, GlueSchemaRegistryKafkaDeserializer.class, props); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java index aeef166a077c7..4819984307af9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java @@ -1,42 +1,40 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; +import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.boot.kafka.MockDUHEDeserializer; import com.linkedin.metadata.boot.kafka.MockDUHESerializer; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import java.util.HashMap; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.HashMap; -import java.util.Map; - -import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; - @Slf4j @Configuration public class DUHESchemaRegistryFactory { - public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; + public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; - @Value(TOPIC_NAME) - private String duheTopicName; + @Value(TOPIC_NAME) + private String duheTopicName; - /** - * Configure Kafka Producer/Consumer processes with a custom schema registry. - */ - @Bean("duheSchemaRegistryConfig") - protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { - Map<String, Object> props = new HashMap<>(); - KafkaConfiguration kafkaConfiguration = provider.getKafka(); + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ + @Bean("duheSchemaRegistryConfig") + protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { + Map<String, Object> props = new HashMap<>(); + KafkaConfiguration kafkaConfiguration = provider.getKafka(); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaConfiguration - .getSchemaRegistry().getUrl()); - props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); + props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); - log.info("DataHub System Update Registry"); - return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); - } + log.info("DataHub System Update Registry"); + return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java index 217dc15bbc3e8..8c814e5054758 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.registry.SchemaRegistryService; import com.linkedin.metadata.registry.SchemaRegistryServiceImpl; import com.linkedin.mxe.TopicConvention; @@ -19,27 +19,30 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Slf4j @Configuration -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) public class InternalSchemaRegistryFactory { public static final String TYPE = "INTERNAL"; - /** - * Configure Kafka Producer/Consumer processes with a custom schema registry. - */ + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ @Bean("schemaRegistryConfig") @Nonnull - protected SchemaRegistryConfig getInstance(@Qualifier("configurationProvider") ConfigurationProvider provider) { + protected SchemaRegistryConfig getInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider) { Map<String, Object> props = new HashMap<>(); KafkaConfiguration kafkaConfiguration = provider.getKafka(); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaConfiguration - .getSchemaRegistry().getUrl()); + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); - log.info("Creating internal registry configuration for url {}", kafkaConfiguration.getSchemaRegistry().getUrl()); + log.info( + "Creating internal registry configuration for url {}", + kafkaConfiguration.getSchemaRegistry().getUrl()); return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java index 7b72ba3f3bb88..e6c255b99a9ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java @@ -21,7 +21,9 @@ @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = KafkaSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = KafkaSchemaRegistryFactory.TYPE) public class KafkaSchemaRegistryFactory { public static final String TYPE = "KAFKA"; @@ -48,7 +50,8 @@ public class KafkaSchemaRegistryFactory { @Nonnull protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { Map<String, Object> props = new HashMap<>(); - // FIXME: Properties for this factory should come from ConfigurationProvider object, specifically under the + // FIXME: Properties for this factory should come from ConfigurationProvider object, + // specifically under the // KafkaConfiguration class. See InternalSchemaRegistryFactory as an example. props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaSchemaRegistryUrl); props.put(withNamespace(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), sslTruststoreLocation); @@ -60,8 +63,11 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr if (sslKeystoreLocation.isEmpty()) { log.info("creating schema registry config using url: {}", kafkaSchemaRegistryUrl); } else { - log.info("creating schema registry config using url: {}, keystore location: {} and truststore location: {}", - kafkaSchemaRegistryUrl, sslTruststoreLocation, sslKeystoreLocation); + log.info( + "creating schema registry config using url: {}, keystore location: {} and truststore location: {}", + kafkaSchemaRegistryUrl, + sslTruststoreLocation, + sslKeystoreLocation); } return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); @@ -70,4 +76,4 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr private String withNamespace(String configKey) { return SchemaRegistryClientConfig.CLIENT_NAMESPACE + configKey; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java index 1e2962bbda7c8..004a7abb88489 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java @@ -3,7 +3,6 @@ import java.util.Map; import lombok.Data; - @Data public class SchemaRegistryConfig { private final Class<?> serializer; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java index 8596a14b7fc24..1589b33862bfe 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.lineage; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; -import javax.annotation.Nonnull; - import com.linkedin.metadata.service.LineageService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -12,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageServiceFactory { @@ -26,4 +24,4 @@ public class LineageServiceFactory { protected LineageService getInstance() throws Exception { return new LineageService(this._javaEntityClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java index 3a1f18692fdc6..ff48a922adf22 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.ownership; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.OwnershipTypeService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OwnershipTypeServiceFactory { @@ -30,4 +29,4 @@ public class OwnershipTypeServiceFactory { protected OwnershipTypeService getInstance() throws Exception { return new OwnershipTypeService(_javaEntityClient, _authentication); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java index f98c5bd50467d..cf81cbf70d5eb 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.query; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.QueryService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class QueryServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java index 36b203f677c9c..dc68451c6fce1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java @@ -10,9 +10,9 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.recommendation.candidatesource.DomainsCandidateSource; import com.linkedin.metadata.recommendation.candidatesource.MostPopularSource; +import com.linkedin.metadata.recommendation.candidatesource.RecentlyEditedSource; import com.linkedin.metadata.recommendation.candidatesource.RecentlySearchedSource; import com.linkedin.metadata.recommendation.candidatesource.RecentlyViewedSource; -import com.linkedin.metadata.recommendation.candidatesource.RecentlyEditedSource; import com.linkedin.metadata.recommendation.candidatesource.RecommendationSource; import com.linkedin.metadata.recommendation.candidatesource.TopPlatformsSource; import com.linkedin.metadata.recommendation.candidatesource.TopTagsSource; @@ -26,10 +26,15 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({TopPlatformsCandidateSourceFactory.class, RecentlyEditedCandidateSourceFactory.class, - MostPopularCandidateSourceFactory.class, TopTagsCandidateSourceFactory.class, TopTermsCandidateSourceFactory.class, DomainsCandidateSourceFactory.class}) +@Import({ + TopPlatformsCandidateSourceFactory.class, + RecentlyEditedCandidateSourceFactory.class, + MostPopularCandidateSourceFactory.class, + TopTagsCandidateSourceFactory.class, + TopTermsCandidateSourceFactory.class, + DomainsCandidateSourceFactory.class +}) public class RecommendationServiceFactory { @Autowired @@ -69,11 +74,16 @@ public class RecommendationServiceFactory { protected RecommendationsService getInstance() { // TODO: Make this class-name pluggable to minimize merge conflict potential. // This is where you can add new recommendation modules. - final List<RecommendationSource> candidateSources = ImmutableList.of( - topPlatformsCandidateSource, - domainsCandidateSource, - recentlyViewedCandidateSource, recentlyEditedCandidateSource, _mostPopularCandidateSource, - topTagsCandidateSource, topTermsCandidateSource, recentlySearchedCandidateSource); + final List<RecommendationSource> candidateSources = + ImmutableList.of( + topPlatformsCandidateSource, + domainsCandidateSource, + recentlyViewedCandidateSource, + recentlyEditedCandidateSource, + _mostPopularCandidateSource, + topTagsCandidateSource, + topTermsCandidateSource, + recentlySearchedCandidateSource); return new RecommendationsService(candidateSources, new SimpleRecommendationRanker()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java index c266b3635b16f..f3be4db147399 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class MostPopularCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java index 109cc8dbc82d1..ac227faf06c4c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class RecentlyEditedCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java index 5209f65a2ec63..05b6f974eedca 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({RestHighLevelClientFactory.class, IndexConventionFactory.class}) public class RecentlySearchedCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java index aea40b4d8eb46..6f17846efc1cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class RecentlyViewedCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java index fc04bbcce31ee..ad241e7717545 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntityServiceFactory.class, EntitySearchServiceFactory.class}) public class TopPlatformsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java index 857a788454c34..fe5c2d03d1907 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntitySearchServiceFactory.class}) public class TopTagsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java index b8d50169e49ab..36c53936094ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntitySearchServiceFactory.class}) public class TopTermsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java index c99d429e986b6..e4e7d04e311da 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java @@ -2,27 +2,28 @@ import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Value; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - -/** - * Factory for components required for any services using elasticsearch - */ +/** Factory for components required for any services using elasticsearch */ @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, ElasticSearchBulkProcessorFactory.class, - ElasticSearchIndexBuilderFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + ElasticSearchBulkProcessorFactory.class, + ElasticSearchIndexBuilderFactory.class +}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class BaseElasticSearchComponentsFactory { @lombok.Value @@ -56,6 +57,7 @@ public static class BaseElasticSearchComponents { @Bean(name = "baseElasticSearchComponents") @Nonnull protected BaseElasticSearchComponents getInstance() { - return new BaseElasticSearchComponents(searchClient, indexConvention, bulkProcessor, indexBuilder, numRetries); + return new BaseElasticSearchComponents( + searchClient, indexConvention, bulkProcessor, indexBuilder, numRetries); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java index 845c63c32e0fd..d2292b215e62a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -13,7 +13,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class CachingEntitySearchServiceFactory { @@ -22,8 +21,7 @@ public class CachingEntitySearchServiceFactory { @Qualifier("entitySearchService") private EntitySearchService entitySearchService; - @Autowired - private CacheManager cacheManager; + @Autowired private CacheManager cacheManager; @Value("${searchService.resultBatchSize}") private Integer batchSize; @@ -36,9 +34,6 @@ public class CachingEntitySearchServiceFactory { @Nonnull protected CachingEntitySearchService getInstance() { return new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - enableCache); + cacheManager, entitySearchService, batchSize, enableCache); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java index 5deffdb01d247..64b1fcc2f5695 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; +import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; - -import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.RestHighLevelClient; @@ -16,7 +15,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @Import({RestHighLevelClientFactory.class}) @@ -51,13 +49,13 @@ public class ElasticSearchBulkProcessorFactory { @Nonnull protected ESBulkProcessor getInstance() { return ESBulkProcessor.builder(searchClient) - .async(async) - .bulkFlushPeriod(bulkFlushPeriod) - .bulkRequestsLimit(bulkRequestsLimit) - .retryInterval(retryInterval) - .numRetries(numRetries) - .batchDelete(enableBatchDelete) - .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.valueOf(refreshPolicy)) - .build(); + .async(async) + .bulkFlushPeriod(bulkFlushPeriod) + .bulkRequestsLimit(bulkRequestsLimit) + .retryInterval(retryInterval) + .numRetries(numRetries) + .batchDelete(enableBatchDelete) + .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.valueOf(refreshPolicy)) + .build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java index b619ee9516dce..7bf04b467d205 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java @@ -1,18 +1,23 @@ package com.linkedin.gms.factory.search; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.version.GitVersion; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -22,14 +27,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - @Configuration @Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, GitVersionFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -66,30 +63,41 @@ public class ElasticSearchIndexBuilderFactory { @Bean(name = "elasticSearchIndexSettingsOverrides") @Nonnull protected Map<String, Map<String, String>> getIndexSettingsOverrides( - @Qualifier(INDEX_CONVENTION_BEAN) IndexConvention indexConvention) { + @Qualifier(INDEX_CONVENTION_BEAN) IndexConvention indexConvention) { return Stream.concat( parseIndexSettingsMap(indexSettingOverrides).entrySet().stream() - .map(e -> Map.entry(indexConvention.getIndexName(e.getKey()), e.getValue())), - parseIndexSettingsMap(entityIndexSettingOverrides).entrySet().stream() - .map(e -> Map.entry(indexConvention.getEntityIndexName(e.getKey()), e.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .map(e -> Map.entry(indexConvention.getIndexName(e.getKey()), e.getValue())), + parseIndexSettingsMap(entityIndexSettingOverrides).entrySet().stream() + .map(e -> Map.entry(indexConvention.getEntityIndexName(e.getKey()), e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } @Bean(name = "elasticSearchIndexBuilder") @Nonnull protected ESIndexBuilder getInstance( - @Qualifier("elasticSearchIndexSettingsOverrides") Map<String, Map<String, String>> overrides, - final ConfigurationProvider configurationProvider, final GitVersion gitVersion) { - return new ESIndexBuilder(searchClient, numShards, numReplicas, numRetries, refreshIntervalSeconds, overrides, - enableSettingsReindex, enableMappingsReindex, configurationProvider.getElasticSearch(), gitVersion); + @Qualifier("elasticSearchIndexSettingsOverrides") Map<String, Map<String, String>> overrides, + final ConfigurationProvider configurationProvider, + final GitVersion gitVersion) { + return new ESIndexBuilder( + searchClient, + numShards, + numReplicas, + numRetries, + refreshIntervalSeconds, + overrides, + enableSettingsReindex, + enableMappingsReindex, + configurationProvider.getElasticSearch(), + gitVersion); } @Nonnull private static Map<String, Map<String, String>> parseIndexSettingsMap(@Nullable String json) { - Optional<Map<String, Map<String, String>>> parseOpt = Optional.ofNullable( - new Gson().fromJson(json, - new TypeToken<Map<String, Map<String, String>>>() { }.getType())); + Optional<Map<String, Map<String, String>>> parseOpt = + Optional.ofNullable( + new Gson() + .fromJson(json, new TypeToken<Map<String, Map<String, String>>>() {}.getType())); return parseOpt.orElse(Map.of()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java index 6d8a62ac1fd18..2b6d495e4fe33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java @@ -1,13 +1,12 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; @@ -15,8 +14,9 @@ import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import java.io.IOException; import javax.annotation.Nonnull; - import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -25,9 +25,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; -import java.io.IOException; - - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -47,30 +44,47 @@ public class ElasticSearchServiceFactory { @Qualifier("settingsBuilder") private SettingsBuilder settingsBuilder; - @Autowired - private EntityIndexBuilders entityIndexBuilders; + @Autowired private EntityIndexBuilders entityIndexBuilders; - @Autowired - private ConfigurationProvider configurationProvider; + @Autowired private ConfigurationProvider configurationProvider; @Bean(name = "elasticSearchService") @Nonnull - protected ElasticSearchService getInstance(ConfigurationProvider configurationProvider) throws IOException { + protected ElasticSearchService getInstance(ConfigurationProvider configurationProvider) + throws IOException { log.info("Search configuration: {}", configurationProvider.getElasticSearch().getSearch()); - ElasticSearchConfiguration elasticSearchConfiguration = configurationProvider.getElasticSearch(); + ElasticSearchConfiguration elasticSearchConfiguration = + configurationProvider.getElasticSearch(); SearchConfiguration searchConfiguration = elasticSearchConfiguration.getSearch(); - CustomSearchConfiguration customSearchConfiguration = searchConfiguration.getCustom() == null ? null + CustomSearchConfiguration customSearchConfiguration = + searchConfiguration.getCustom() == null + ? null : searchConfiguration.getCustom().resolve(YAML_MAPPER); ESSearchDAO esSearchDAO = - new ESSearchDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(), - elasticSearchConfiguration.getImplementation(), searchConfiguration, customSearchConfiguration); - return new ElasticSearchService(entityIndexBuilders, esSearchDAO, - new ESBrowseDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - searchConfiguration, customSearchConfiguration), - new ESWriteDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - components.getBulkProcessor(), components.getNumRetries())); + new ESSearchDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(), + elasticSearchConfiguration.getImplementation(), + searchConfiguration, + customSearchConfiguration); + return new ElasticSearchService( + entityIndexBuilders, + esSearchDAO, + new ESBrowseDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + searchConfiguration, + customSearchConfiguration), + new ESWriteDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries())); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java index 6bb206ee3ad61..334194b95c162 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java @@ -10,26 +10,28 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class EntityIndexBuildersFactory { - @Autowired - @Qualifier("baseElasticSearchComponents") - private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - @Autowired - @Qualifier("settingsBuilder") - private SettingsBuilder settingsBuilder; - - - @Bean - protected EntityIndexBuilders entityIndexBuilders() { - return new EntityIndexBuilders(components.getIndexBuilder(), entityRegistry, components.getIndexConvention(), settingsBuilder); - } -} \ No newline at end of file + @Autowired + @Qualifier("baseElasticSearchComponents") + private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @Autowired + @Qualifier("settingsBuilder") + private SettingsBuilder settingsBuilder; + + @Bean + protected EntityIndexBuilders entityIndexBuilders() { + return new EntityIndexBuilders( + components.getIndexBuilder(), + entityRegistry, + components.getIndexConvention(), + settingsBuilder); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java index 49dab31cca1d0..38fd27fb44024 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchServiceFactory.class}) public class EntitySearchServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java index e2eef83bc6e3f..17103240c938b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java @@ -2,10 +2,10 @@ import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.cache.CacheManager; import org.springframework.context.annotation.Bean; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @Import({GraphServiceFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -23,11 +22,17 @@ public class LineageSearchServiceFactory { @Bean(name = "relationshipSearchService") @Primary @Nonnull - protected LineageSearchService getInstance(CacheManager cacheManager, GraphService graphService, - SearchService searchService, ConfigurationProvider configurationProvider) { + protected LineageSearchService getInstance( + CacheManager cacheManager, + GraphService graphService, + SearchService searchService, + ConfigurationProvider configurationProvider) { boolean cacheEnabled = configurationProvider.getFeatureFlags().isLineageSearchCacheEnabled(); - return new LineageSearchService(searchService, graphService, - cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, cacheEnabled, - configurationProvider.getCache().getSearch().getLineage()); + return new LineageSearchService( + searchService, + graphService, + cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, + cacheEnabled, + configurationProvider.getCache().getSearch().getLineage()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java index a186d2de770f3..9d9018bd31f07 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java @@ -1,13 +1,12 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SearchDocumentTransformerFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java index 64bb0218a0d71..1cb905665e489 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java @@ -1,13 +1,13 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.cache.EntityDocCountCache; import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.search.ranker.SearchRanker; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -16,7 +16,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SearchServiceFactory { @@ -42,8 +41,10 @@ public class SearchServiceFactory { @Nonnull protected SearchService getInstance(ConfigurationProvider configurationProvider) { return new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, configurationProvider.getCache() - .getHomepage().getEntityCounts()), + new EntityDocCountCache( + entityRegistry, + entitySearchService, + configurationProvider.getCache().getHomepage().getEntityCounts()), cachingEntitySearchService, searchRanker); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java index 840a370957706..ce1d6f12c58b4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @Import(EntityRegistryFactory.class) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java index 1040edca30bfb..b010358bad81c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; - @Configuration public class SearchRankerFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java index 60bcd9ea22be6..32ad2175c9052 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.search.views; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.ViewService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ViewServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java index a1cac07e3fb03..64093c54d0410 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; - @Configuration public class SecretServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java index 2e22d43913493..f0d09a815628d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.settings; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.SettingsService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SettingsServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java index 2610ebd3528cd..b735e490f583e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java @@ -1,5 +1,7 @@ package com.linkedin.gms.factory.telemetry; +import static com.linkedin.gms.factory.telemetry.TelemetryUtils.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; import com.linkedin.datahub.graphql.generated.DateRange; @@ -12,13 +14,11 @@ import java.io.IOException; import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; import org.joda.time.DateTime; import org.json.JSONObject; +import org.opensearch.client.RestHighLevelClient; import org.springframework.scheduling.annotation.Scheduled; -import static com.linkedin.gms.factory.telemetry.TelemetryUtils.*; - @Slf4j public class DailyReport { @@ -32,8 +32,12 @@ public class DailyReport { private MixpanelAPI mixpanel; private MessageBuilder mixpanelBuilder; - public DailyReport(IndexConvention indexConvention, RestHighLevelClient elasticClient, - ConfigurationProvider configurationProvider, EntityService entityService, GitVersion gitVersion) { + public DailyReport( + IndexConvention indexConvention, + RestHighLevelClient elasticClient, + ConfigurationProvider configurationProvider, + EntityService entityService, + GitVersion gitVersion) { this._indexConvention = indexConvention; this._elasticClient = elasticClient; this._configurationProvider = configurationProvider; @@ -43,7 +47,10 @@ public DailyReport(IndexConvention indexConvention, RestHighLevelClient elasticC String clientId = getClientId(entityService); // initialize MixPanel instance and message builder - mixpanel = new MixpanelAPI("https://track.datahubproject.io/mp/track", "https://track.datahubproject.io/mp/engage"); + mixpanel = + new MixpanelAPI( + "https://track.datahubproject.io/mp/track", + "https://track.datahubproject.io/mp/engage"); mixpanelBuilder = new MessageBuilder(MIXPANEL_TOKEN); // set user-level properties @@ -72,24 +79,48 @@ public void dailyReport() { DateTime lastWeek = endDate.minusWeeks(1); DateTime lastMonth = endDate.minusMonths(1); - DateRange dayRange = new DateRange(String.valueOf(yesterday.getMillis()), String.valueOf(endDate.getMillis())); - DateRange weekRange = new DateRange(String.valueOf(lastWeek.getMillis()), String.valueOf(endDate.getMillis())); - DateRange monthRange = new DateRange(String.valueOf(lastMonth.getMillis()), String.valueOf(endDate.getMillis())); + DateRange dayRange = + new DateRange(String.valueOf(yesterday.getMillis()), String.valueOf(endDate.getMillis())); + DateRange weekRange = + new DateRange(String.valueOf(lastWeek.getMillis()), String.valueOf(endDate.getMillis())); + DateRange monthRange = + new DateRange(String.valueOf(lastMonth.getMillis()), String.valueOf(endDate.getMillis())); int dailyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(dayRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(dayRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); int weeklyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(weekRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(weekRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); int monthlyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(monthRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(monthRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); // floor to nearest power of 10 - dailyActiveUsers = dailyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(dailyActiveUsers) / Math.log(2))); - weeklyActiveUsers = weeklyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(weeklyActiveUsers) / Math.log(2))); - monthlyActiveUsers = monthlyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(monthlyActiveUsers) / Math.log(2))); + dailyActiveUsers = + dailyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(dailyActiveUsers) / Math.log(2))); + weeklyActiveUsers = + weeklyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(weeklyActiveUsers) / Math.log(2))); + monthlyActiveUsers = + monthlyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(monthlyActiveUsers) / Math.log(2))); // set user-level properties JSONObject report = new JSONObject(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java index 8178ce1399aa3..b9330d5827419 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java @@ -8,7 +8,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MixpanelApiFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java index 5385c5e81f804..f64766534469d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java @@ -8,10 +8,8 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) - public class MixpanelMessageBuilderFactory { private static final String MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a"; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java index 7cdca996a8131..4986e705fd7b4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java @@ -13,17 +13,20 @@ import org.springframework.context.annotation.Configuration; import org.springframework.scheduling.annotation.EnableScheduling; - @Slf4j @Configuration @EnableScheduling public class ScheduledAnalyticsFactory { - @Bean - @ConditionalOnProperty("telemetry.enabledServer") - public DailyReport dailyReport(@Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, - @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, - ConfigurationProvider configurationProvider, EntityService entityService, GitVersion gitVersion) { - return new DailyReport(indexConvention, elasticClient, configurationProvider, entityService, gitVersion); - } + @Bean + @ConditionalOnProperty("telemetry.enabledServer") + public DailyReport dailyReport( + @Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, + @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, + ConfigurationProvider configurationProvider, + EntityService entityService, + GitVersion gitVersion) { + return new DailyReport( + indexConvention, elasticClient, configurationProvider, entityService, gitVersion); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java index 3bbb542b2cf5a..748acb4a9499e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java @@ -1,45 +1,44 @@ package com.linkedin.gms.factory.telemetry; import com.linkedin.common.AuditStamp; -import com.linkedin.telemetry.TelemetryClientId; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.telemetry.TelemetryClientId; import java.util.UUID; - import lombok.extern.slf4j.Slf4j; - @Slf4j public final class TelemetryUtils { - public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; - public static final String CLIENT_ID_ASPECT = "telemetryClientId"; - - private static String _clientId; + public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; + public static final String CLIENT_ID_ASPECT = "telemetryClientId"; + private static String _clientId; - public static String getClientId(EntityService entityService) { - if (_clientId == null) { - createClientIdIfNotPresent(entityService); - RecordTemplate clientIdTemplate = entityService.getLatestAspect(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT); - // Should always be present here from above, so no need for null check - _clientId = ((TelemetryClientId) clientIdTemplate).getClientId(); - } - return _clientId; + public static String getClientId(EntityService entityService) { + if (_clientId == null) { + createClientIdIfNotPresent(entityService); + RecordTemplate clientIdTemplate = + entityService.getLatestAspect(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT); + // Should always be present here from above, so no need for null check + _clientId = ((TelemetryClientId) clientIdTemplate).getClientId(); } - - private static void createClientIdIfNotPresent(EntityService entityService) { - String uuid = UUID.randomUUID().toString(); - TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); - final AuditStamp clientIdStamp = new AuditStamp(); - clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); - clientIdStamp.setTime(System.currentTimeMillis()); - entityService.ingestAspectIfNotPresent(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); - } - private TelemetryUtils() { - throw new UnsupportedOperationException(); - } - + return _clientId; + } + + private static void createClientIdIfNotPresent(EntityService entityService) { + String uuid = UUID.randomUUID().toString(); + TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); + final AuditStamp clientIdStamp = new AuditStamp(); + clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); + clientIdStamp.setTime(System.currentTimeMillis()); + entityService.ingestAspectIfNotPresent( + UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); + } + + private TelemetryUtils() { + throw new UnsupportedOperationException(); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java index bb166af5501b3..4e858fb5cdefd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.telemetry; import com.datahub.telemetry.TrackingService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.version.GitVersion; import com.mixpanel.mixpanelapi.MessageBuilder; import com.mixpanel.mixpanelapi.MixpanelAPI; @@ -15,19 +15,21 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class TrackingServiceFactory { @Autowired(required = false) @Qualifier("mixpanelApi") private MixpanelAPI _mixpanelAPI; + @Autowired(required = false) @Qualifier("mixpanelMessageBuilder") private MessageBuilder _mixpanelMessageBuilder; + @Autowired @Qualifier("dataHubSecretService") private SecretService _secretService; + @Autowired @Qualifier("entityService") private EntityService _entityService; @@ -40,7 +42,11 @@ public class TrackingServiceFactory { @ConditionalOnProperty("telemetry.enabledServer") @Scope("singleton") protected TrackingService getInstance() throws Exception { - return new TrackingService(this._mixpanelAPI, this._mixpanelMessageBuilder, this._secretService, - this._entityService, this._gitVersion); + return new TrackingService( + this._mixpanelAPI, + this._mixpanelMessageBuilder, + this._secretService, + this._entityService, + this._gitVersion); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java index 89a7e7dd8d71a..f1b040ed78f86 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java @@ -1,16 +1,18 @@ package com.linkedin.gms.factory.timeline; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.metadata.timeline.eventgenerator.AssertionRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DataProcessInstanceRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DatasetPropertiesChangeEventGenerator; -import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermInfoChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DeprecationChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EditableSchemaMetadataChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EntityKeyChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.GlobalTagsChangeEventGenerator; +import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermInfoChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermsChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.InstitutionalMemoryChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.OwnershipChangeEventGenerator; @@ -25,35 +27,38 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.metadata.Constants.*; - - @Configuration public class EntityChangeEventGeneratorRegistryFactory { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "entityChangeEventGeneratorRegistry") @DependsOn({"restliEntityClient", "systemAuthentication"}) @Singleton @Nonnull - protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry() { - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry + entityChangeEventGeneratorRegistry() { + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); - final com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry registry = - new com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry(); + final com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry + registry = + new com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry(); registry.register(SCHEMA_METADATA_ASPECT_NAME, new SchemaMetadataChangeEventGenerator()); - registry.register(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataChangeEventGenerator()); + registry.register( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataChangeEventGenerator()); registry.register(GLOBAL_TAGS_ASPECT_NAME, new GlobalTagsChangeEventGenerator()); registry.register(GLOSSARY_TERMS_ASPECT_NAME, new GlossaryTermsChangeEventGenerator()); registry.register(OWNERSHIP_ASPECT_NAME, new OwnershipChangeEventGenerator()); - registry.register(INSTITUTIONAL_MEMORY_ASPECT_NAME, new InstitutionalMemoryChangeEventGenerator()); + registry.register( + INSTITUTIONAL_MEMORY_ASPECT_NAME, new InstitutionalMemoryChangeEventGenerator()); registry.register(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesChangeEventGenerator()); registry.register(GLOSSARY_TERM_INFO_ASPECT_NAME, new GlossaryTermInfoChangeEventGenerator()); registry.register(DOMAINS_ASPECT_NAME, new SingleDomainChangeEventGenerator()); registry.register(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesChangeEventGenerator()); - registry.register(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, new EditableDatasetPropertiesChangeEventGenerator()); + registry.register( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + new EditableDatasetPropertiesChangeEventGenerator()); // Entity Lifecycle Differs registry.register(DATASET_KEY_ASPECT_NAME, new EntityKeyChangeEventGenerator<>()); @@ -73,7 +78,8 @@ protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGenerat registry.register(ASSERTION_RUN_EVENT_ASPECT_NAME, new AssertionRunEventChangeEventGenerator()); // Data Process Instance differs - registry.register(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + registry.register( + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, new DataProcessInstanceRunEventChangeEventGenerator(entityClient)); // TODO: Add ML models. diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java index baa22d401387f..bc121da4e43dd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java @@ -1,19 +1,17 @@ package com.linkedin.gms.factory.timeline; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeline.TimelineServiceImpl; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.PropertySource; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class TimelineServiceFactory { @@ -21,7 +19,8 @@ public class TimelineServiceFactory { @Bean(name = "timelineService") @DependsOn({"entityAspectDao", "entityService", "entityRegistry"}) @Nonnull - protected TimelineService timelineService(@Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry) { + protected TimelineService timelineService( + @Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry) { return new TimelineServiceImpl(aspectDao, entityRegistry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java index e3cc772f21c40..bba82bb5d0569 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java @@ -2,8 +2,8 @@ import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeseries.elastic.ElasticSearchTimeseriesAspectService; import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; import javax.annotation.Nonnull; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class}) @@ -30,8 +29,13 @@ public class ElasticSearchTimeseriesAspectServiceFactory { @Bean(name = "elasticSearchTimeseriesAspectService") @Nonnull protected ElasticSearchTimeseriesAspectService getInstance() { - return new ElasticSearchTimeseriesAspectService(components.getSearchClient(), components.getIndexConvention(), - new TimeseriesAspectIndexBuilders(components.getIndexBuilder(), entityRegistry, - components.getIndexConvention()), entityRegistry, components.getBulkProcessor(), components.getNumRetries()); + return new ElasticSearchTimeseriesAspectService( + components.getSearchClient(), + components.getIndexConvention(), + new TimeseriesAspectIndexBuilders( + components.getIndexBuilder(), entityRegistry, components.getIndexConvention()), + entityRegistry, + components.getBulkProcessor(), + components.getNumRetries()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java index 76090770ace11..7d4afa661aba0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchTimeseriesAspectServiceFactory.class}) public class TimeseriesAspectServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java index d2bd89de8767a..03e066a912e44 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java @@ -2,12 +2,14 @@ import com.datahub.authentication.Authentication; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.Client; import com.linkedin.usage.UsageClient; +import java.util.HashMap; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -15,10 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; -import java.util.HashMap; -import java.util.Map; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class UsageClientFactory { @@ -49,13 +47,19 @@ public class UsageClientFactory { private ConfigurationProvider configurationProvider; @Bean("usageClient") - public UsageClient getUsageClient(@Qualifier("systemAuthentication") final Authentication systemAuthentication) { + public UsageClient getUsageClient( + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { Map<String, String> params = new HashMap<>(); params.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, String.valueOf(timeoutMs)); - Client restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); - return new UsageClient(restClient, new ExponentialBackoff(retryInterval), numRetries, systemAuthentication, - configurationProvider.getCache().getClient().getUsageClient()); + Client restClient = + DefaultRestliClientFactory.getRestLiClient( + gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); + return new UsageClient( + restClient, + new ExponentialBackoff(retryInterval), + numRetries, + systemAuthentication, + configurationProvider.getCache().getClient().getUsageClient()); } } - diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java index 811ea84bc7240..2d1b79fdace48 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java @@ -7,10 +7,7 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; - -/** - * Responsible for coordinating boot-time logic. - */ +/** Responsible for coordinating boot-time logic. */ @Slf4j @Component public class BootstrapManager { @@ -30,22 +27,39 @@ public void start() { for (int i = 0; i < stepsToExecute.size(); i++) { final BootstrapStep step = stepsToExecute.get(i); if (step.getExecutionMode() == BootstrapStep.ExecutionMode.BLOCKING) { - log.info("Executing bootstrap step {}/{} with name {}...", i + 1, stepsToExecute.size(), step.name()); + log.info( + "Executing bootstrap step {}/{} with name {}...", + i + 1, + stepsToExecute.size(), + step.name()); try { step.execute(); } catch (Exception e) { - log.error(String.format("Caught exception while executing bootstrap step %s. Exiting...", step.name()), e); + log.error( + String.format( + "Caught exception while executing bootstrap step %s. Exiting...", step.name()), + e); System.exit(1); } } else { // Async - log.info("Starting asynchronous bootstrap step {}/{} with name {}...", i + 1, stepsToExecute.size(), step.name()); - CompletableFuture.runAsync(() -> { - try { - step.execute(); - } catch (Exception e) { - log.error(String.format("Caught exception while executing bootstrap step %s. Continuing...", step.name()), e); - } - }, _asyncExecutor); + log.info( + "Starting asynchronous bootstrap step {}/{} with name {}...", + i + 1, + stepsToExecute.size(), + step.name()); + CompletableFuture.runAsync( + () -> { + try { + step.execute(); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while executing bootstrap step %s. Continuing...", + step.name()), + e); + } + }, + _asyncExecutor); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index 876a0871fa4cb..dc82fc4907edc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -10,29 +10,19 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.upgrade.DataHubUpgradeResult; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; - -/** - * A single step in the Bootstrap process. - */ +/** A single step in the Bootstrap process. */ public interface BootstrapStep { - /** - * A human-readable name for the boot step. - */ + /** A human-readable name for the boot step. */ String name(); - /** - * Execute a boot-time step, or throw an exception on failure. - */ + /** Execute a boot-time step, or throw an exception on failure. */ void execute() throws Exception; - /** - * Return the execution mode of this step - */ + /** Return the execution mode of this step */ @Nonnull default ExecutionMode getExecutionMode() { return ExecutionMode.BLOCKING; @@ -46,16 +36,17 @@ enum ExecutionMode { } static Urn getUpgradeUrn(String upgradeId) { - return EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(upgradeId), - Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + return EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { - final AuditStamp auditStamp = new AuditStamp() + final AuditStamp auditStamp = + new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()); - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult() - .setTimestampMs(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(urn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java index 032b934a7ba87..801a902b7f835 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java @@ -16,24 +16,27 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; -import org.springframework.context.annotation.Configuration; - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Configuration @Slf4j @Component public class OnBootApplicationListener { - private static final Set<Integer> ACCEPTED_HTTP_CODES = Set.of(HttpStatus.SC_OK, HttpStatus.SC_MOVED_PERMANENTLY, - HttpStatus.SC_MOVED_TEMPORARILY, HttpStatus.SC_FORBIDDEN, HttpStatus.SC_UNAUTHORIZED); + private static final Set<Integer> ACCEPTED_HTTP_CODES = + Set.of( + HttpStatus.SC_OK, + HttpStatus.SC_MOVED_PERMANENTLY, + HttpStatus.SC_MOVED_TEMPORARILY, + HttpStatus.SC_FORBIDDEN, + HttpStatus.SC_UNAUTHORIZED); - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final CloseableHttpClient httpClient = HttpClients.createDefault(); @@ -52,8 +55,10 @@ public class OnBootApplicationListener { @EventListener(ContextRefreshedEvent.class) public void onApplicationEvent(@Nonnull ContextRefreshedEvent event) { - log.warn("OnBootApplicationListener context refreshed! {} event: {}", - ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId()), event); + log.warn( + "OnBootApplicationListener context refreshed! {} event: {}", + ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId()), + event); String schemaRegistryType = provider.getKafka().getSchemaRegistry().getType(); if (ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId())) { if (InternalSchemaRegistryFactory.TYPE.equals(schemaRegistryType)) { @@ -66,29 +71,31 @@ public void onApplicationEvent(@Nonnull ContextRefreshedEvent event) { public Runnable isSchemaRegistryAPIServletReady() { return () -> { - final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); - int timeouts = _servletsWaitTimeout; - boolean openAPIServeletReady = false; - while (!openAPIServeletReady && timeouts > 0) { - try { - log.info("Sleeping for 1 second"); - Thread.sleep(1000); - StatusLine statusLine = httpClient.execute(request).getStatusLine(); - if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { - log.info("Connected! Authentication not tested."); - openAPIServeletReady = true; - } - } catch (IOException | InterruptedException e) { - log.info("Failed to connect to open servlet: {}", e.getMessage()); + final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); + int timeouts = _servletsWaitTimeout; + boolean openAPIServeletReady = false; + while (!openAPIServeletReady && timeouts > 0) { + try { + log.info("Sleeping for 1 second"); + Thread.sleep(1000); + StatusLine statusLine = httpClient.execute(request).getStatusLine(); + if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { + log.info("Connected! Authentication not tested."); + openAPIServeletReady = true; } - timeouts--; + } catch (IOException | InterruptedException e) { + log.info("Failed to connect to open servlet: {}", e.getMessage()); } - if (!openAPIServeletReady) { - log.error("Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", timeouts); - System.exit(1); - } else { + timeouts--; + } + if (!openAPIServeletReady) { + log.error( + "Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", + timeouts); + System.exit(1); + } else { _bootstrapManager.start(); - } + } }; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java index dbbcf3a139bf1..9ccb2c3f650bd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java @@ -17,7 +17,6 @@ import java.util.Collections; import lombok.extern.slf4j.Slf4j; - @Slf4j public abstract class UpgradeStep implements BootstrapStep { @@ -30,8 +29,9 @@ public UpgradeStep(EntityService entityService, String version, String upgradeId this._entityService = entityService; this._version = version; this._upgradeId = upgradeId; - this._upgradeUrn = EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(upgradeId), - Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + this._upgradeUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } @Override @@ -47,7 +47,8 @@ public void execute() throws Exception { upgrade(); ingestUpgradeResultAspect(); } catch (Exception e) { - String errorMessage = String.format("Error when running %s for version %s", _upgradeId, _version); + String errorMessage = + String.format("Error when running %s for version %s", _upgradeId, _version); cleanUpgradeAfterError(e, errorMessage); throw new RuntimeException(errorMessage, e); } @@ -62,18 +63,29 @@ public String name() { private boolean hasUpgradeRan() { try { - EntityResponse response = _entityService.getEntityV2(Constants.DATA_HUB_UPGRADE_ENTITY_NAME, _upgradeUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); - - if (response != null && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { - DataMap dataMap = response.getAspects().get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME).getValue().data(); + EntityResponse response = + _entityService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + _upgradeUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); + + if (response != null + && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { + DataMap dataMap = + response + .getAspects() + .get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) + .getValue() + .data(); DataHubUpgradeRequest request = new DataHubUpgradeRequest(dataMap); if (request.hasVersion() && request.getVersion().equals(_version)) { return true; } } } catch (Exception e) { - log.error("Error when checking to see if datahubUpgrade entity exists. Commencing with upgrade...", e); + log.error( + "Error when checking to see if datahubUpgrade entity exists. Commencing with upgrade...", + e); return false; } return false; @@ -81,7 +93,9 @@ private boolean hasUpgradeRan() { private void ingestUpgradeRequestAspect() throws URISyntaxException { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final DataHubUpgradeRequest upgradeRequest = new DataHubUpgradeRequest().setTimestampMs(System.currentTimeMillis()).setVersion(_version); @@ -97,8 +111,11 @@ private void ingestUpgradeRequestAspect() throws URISyntaxException { private void ingestUpgradeResultAspect() throws URISyntaxException { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(_upgradeUrn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java index 4aed7791470da..8b0c72c4c91d5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.boot.dependencies; -/** - * Empty interface for passing named bean references to bootstrap steps - */ +/** Empty interface for passing named bean references to bootstrap steps */ public interface BootstrapDependency { /** * Execute any dependent methods, avoids increasing module dependencies + * * @return true if the dependency has successfully executed its expected methods, false otherwise */ boolean waitForBootstrap(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index c4e6c941303c8..70fa91ae61861 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -31,7 +31,6 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; @@ -44,10 +43,13 @@ import org.springframework.context.annotation.Scope; import org.springframework.core.io.Resource; - @Configuration -@Import({EntityServiceFactory.class, EntityRegistryFactory.class, EntitySearchServiceFactory.class, - SearchDocumentTransformerFactory.class}) +@Import({ + EntityServiceFactory.class, + EntityRegistryFactory.class, + EntitySearchServiceFactory.class, + SearchDocumentTransformerFactory.class +}) public class BootstrapManagerFactory { @Autowired @@ -82,8 +84,7 @@ public class BootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -103,9 +104,15 @@ public class BootstrapManagerFactory { protected BootstrapManager createInstance() { final IngestRootUserStep ingestRootUserStep = new IngestRootUserStep(_entityService); final IngestPoliciesStep ingestPoliciesStep = - new IngestPoliciesStep(_entityRegistry, _entityService, _entitySearchService, _searchDocumentTransformer, _policiesResource); + new IngestPoliciesStep( + _entityRegistry, + _entityService, + _entitySearchService, + _searchDocumentTransformer, + _policiesResource); final IngestRolesStep ingestRolesStep = new IngestRolesStep(_entityService, _entityRegistry); - final IngestDataPlatformsStep ingestDataPlatformsStep = new IngestDataPlatformsStep(_entityService); + final IngestDataPlatformsStep ingestDataPlatformsStep = + new IngestDataPlatformsStep(_entityService); final IngestDataPlatformInstancesStep ingestDataPlatformInstancesStep = new IngestDataPlatformInstancesStep(_entityService, _migrationsDao); final RestoreGlossaryIndices restoreGlossaryIndicesStep = @@ -114,28 +121,34 @@ protected BootstrapManager createInstance() { new IndexDataPlatformsStep(_entityService, _entitySearchService, _entityRegistry); final RestoreDbtSiblingsIndices restoreDbtSiblingsIndices = new RestoreDbtSiblingsIndices(_entityService, _entityRegistry); - final RemoveClientIdAspectStep removeClientIdAspectStep = new RemoveClientIdAspectStep(_entityService); - final RestoreColumnLineageIndices restoreColumnLineageIndices = new RestoreColumnLineageIndices(_entityService, _entityRegistry); - final IngestDefaultGlobalSettingsStep ingestSettingsStep = new IngestDefaultGlobalSettingsStep(_entityService); - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); - final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); - - final List<BootstrapStep> finalSteps = new ArrayList<>(ImmutableList.of( - waitForSystemUpdateStep, - ingestRootUserStep, - ingestPoliciesStep, - ingestRolesStep, - ingestDataPlatformsStep, - ingestDataPlatformInstancesStep, - _ingestRetentionPoliciesStep, - ingestOwnershipTypesStep, - ingestSettingsStep, - restoreGlossaryIndicesStep, - removeClientIdAspectStep, - restoreDbtSiblingsIndices, - indexDataPlatformsStep, - restoreColumnLineageIndices)); + final RemoveClientIdAspectStep removeClientIdAspectStep = + new RemoveClientIdAspectStep(_entityService); + final RestoreColumnLineageIndices restoreColumnLineageIndices = + new RestoreColumnLineageIndices(_entityService, _entityRegistry); + final IngestDefaultGlobalSettingsStep ingestSettingsStep = + new IngestDefaultGlobalSettingsStep(_entityService); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); + final IngestOwnershipTypesStep ingestOwnershipTypesStep = + new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); + + final List<BootstrapStep> finalSteps = + new ArrayList<>( + ImmutableList.of( + waitForSystemUpdateStep, + ingestRootUserStep, + ingestPoliciesStep, + ingestRolesStep, + ingestDataPlatformsStep, + ingestDataPlatformInstancesStep, + _ingestRetentionPoliciesStep, + ingestOwnershipTypesStep, + ingestSettingsStep, + restoreGlossaryIndicesStep, + removeClientIdAspectStep, + restoreDbtSiblingsIndices, + indexDataPlatformsStep, + restoreColumnLineageIndices)); if (_upgradeDefaultBrowsePathsEnabled) { finalSteps.add(new UpgradeDefaultBrowsePathsStep(_entityService)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java index e038cb230c458..2436938c6c026 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.boot.factories; import com.linkedin.gms.factory.entity.RetentionServiceFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.boot.steps.IngestRetentionPoliciesStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @Import({RetentionServiceFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -42,6 +41,11 @@ public class IngestRetentionPoliciesStepFactory { @Scope("singleton") @Nonnull protected IngestRetentionPoliciesStep createInstance() { - return new IngestRetentionPoliciesStep(_retentionService, _entityService, _enableRetention, _applyOnBootstrap, _pluginRegistryPath); + return new IngestRetentionPoliciesStep( + _retentionService, + _entityService, + _enableRetention, + _applyOnBootstrap, + _pluginRegistryPath); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java index 11d12072e12b7..263cc335a8a40 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java @@ -7,11 +7,9 @@ import com.linkedin.metadata.version.GitVersion; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.Topics; - import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.Consumer; @@ -27,7 +25,8 @@ import org.springframework.kafka.listener.MessageListenerContainer; import org.springframework.stereotype.Component; -// We don't disable this on GMS since we want GMS to also wait until the system is ready to read in case of +// We don't disable this on GMS since we want GMS to also wait until the system is ready to read in +// case of // backwards incompatible query logic dependent on system updates. @Component("dataHubUpgradeKafkaListener") @Slf4j @@ -36,14 +35,17 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap private final KafkaListenerEndpointRegistry registry; - private static final String CONSUMER_GROUP = "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; + private static final String CONSUMER_GROUP = + "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; private static final String SUFFIX = "temp"; - public static final String TOPIC_NAME = "${DATAHUB_UPGRADE_HISTORY_TOPIC_NAME:" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME + "}"; + public static final String TOPIC_NAME = + "${DATAHUB_UPGRADE_HISTORY_TOPIC_NAME:" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME + "}"; private final DefaultKafkaConsumerFactory<String, GenericRecord> _defaultKafkaConsumerFactory; @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") private String revision; + private final GitVersion _gitVersion; private final ConfigurationProvider _configurationProvider; @@ -53,35 +55,48 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap @Value(TOPIC_NAME) private String topicName; - private final static AtomicBoolean IS_UPDATED = new AtomicBoolean(false); + private static final AtomicBoolean IS_UPDATED = new AtomicBoolean(false); - public DataHubUpgradeKafkaListener(KafkaListenerEndpointRegistry registry, - @Qualifier("duheKafkaConsumerFactory") DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory, - GitVersion gitVersion, - ConfigurationProvider configurationProvider) { + public DataHubUpgradeKafkaListener( + KafkaListenerEndpointRegistry registry, + @Qualifier("duheKafkaConsumerFactory") + DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory, + GitVersion gitVersion, + ConfigurationProvider configurationProvider) { this.registry = registry; this._defaultKafkaConsumerFactory = defaultKafkaConsumerFactory; this._gitVersion = gitVersion; this._configurationProvider = configurationProvider; } - // Constructs a consumer to read determine final offset to assign, prevents re-reading whole topic to get the latest version + // Constructs a consumer to read determine final offset to assign, prevents re-reading whole topic + // to get the latest version @Override - public void onPartitionsAssigned(Map<TopicPartition, Long> assignments, ConsumerSeekCallback callback) { + public void onPartitionsAssigned( + Map<TopicPartition, Long> assignments, ConsumerSeekCallback callback) { try (Consumer<String, GenericRecord> kafkaConsumer = _defaultKafkaConsumerFactory.createConsumer(consumerGroup, SUFFIX)) { final Map<TopicPartition, Long> offsetMap = kafkaConsumer.endOffsets(assignments.keySet()); assignments.entrySet().stream() .filter(entry -> topicName.equals(entry.getKey().topic())) - .forEach(entry -> { - log.info("Partition: {} Current Offset: {}", entry.getKey(), offsetMap.get(entry.getKey())); - long newOffset = offsetMap.get(entry.getKey()) - 1; - callback.seek(entry.getKey().topic(), entry.getKey().partition(), Math.max(0, newOffset)); - }); + .forEach( + entry -> { + log.info( + "Partition: {} Current Offset: {}", + entry.getKey(), + offsetMap.get(entry.getKey())); + long newOffset = offsetMap.get(entry.getKey()) - 1; + callback.seek( + entry.getKey().topic(), entry.getKey().partition(), Math.max(0, newOffset)); + }); } } - @KafkaListener(id = CONSUMER_GROUP, topics = {TOPIC_NAME}, containerFactory = "duheKafkaEventConsumer", concurrency = "1") + @KafkaListener( + id = CONSUMER_GROUP, + topics = {TOPIC_NAME}, + containerFactory = "duheKafkaEventConsumer", + concurrency = "1") public void checkSystemVersion(final ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord record = consumerRecord.value(); final String expectedVersion = String.format("%s-%s", _gitVersion.getVersion(), revision); @@ -96,7 +111,9 @@ public void checkSystemVersion(final ConsumerRecord<String, GenericRecord> consu log.warn("Wait for system update is disabled. Proceeding with startup."); IS_UPDATED.getAndSet(true); } else { - log.warn("System version is not up to date: {}. Waiting for datahub-upgrade to complete...", expectedVersion); + log.warn( + "System version is not up to date: {}. Waiting for datahub-upgrade to complete...", + expectedVersion); } } catch (Exception e) { @@ -113,15 +130,19 @@ public void waitForUpdate() { IS_UPDATED.getAndSet(true); } int maxBackOffs = Integer.parseInt(_configurationProvider.getSystemUpdate().getMaxBackOffs()); - long initialBackOffMs = Long.parseLong(_configurationProvider.getSystemUpdate().getInitialBackOffMs()); - int backOffFactor = Integer.parseInt(_configurationProvider.getSystemUpdate().getBackOffFactor()); + long initialBackOffMs = + Long.parseLong(_configurationProvider.getSystemUpdate().getInitialBackOffMs()); + int backOffFactor = + Integer.parseInt(_configurationProvider.getSystemUpdate().getBackOffFactor()); long backOffMs = initialBackOffMs; for (int i = 0; i < maxBackOffs; i++) { if (IS_UPDATED.get()) { log.debug("Finished waiting for updated indices."); try { - log.info("Containers: {}", registry.getListenerContainers().stream() + log.info( + "Containers: {}", + registry.getListenerContainers().stream() .map(MessageListenerContainer::getListenerId) .collect(Collectors.toList())); registry.getListenerContainer(consumerGroup).stop(); @@ -142,8 +163,9 @@ public void waitForUpdate() { if (!IS_UPDATED.get()) { - throw new IllegalStateException("Indices are not updated after exponential backoff." - + " Please try restarting and consider increasing back off settings."); + throw new IllegalStateException( + "Indices are not updated after exponential backoff." + + " Please try restarting and consider increasing back off settings."); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java index 408b212d52f48..e631f776abd08 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.kafka; +import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; +import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; + import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; @@ -7,61 +10,55 @@ import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroDeserializer; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.util.Map; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; -import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; - -/** - * Used for early bootstrap to avoid contact with not yet existing schema registry - */ +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ @Slf4j public class MockDUHEDeserializer extends KafkaAvroDeserializer { - private String topicName; + private String topicName; - public MockDUHEDeserializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer() { + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - public MockDUHEDeserializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer(SchemaRegistryClient client) { + super(client); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - public MockDUHEDeserializer(SchemaRegistryClient client, Map<String, ?> props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer(SchemaRegistryClient client, Map<String, ?> props) { + super(client, props); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - @Override - public void configure(Map<String, ?> configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } + @Override + public void configure(Map<String, ?> configs, boolean isKey) { + super.configure(configs, isKey); + topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + } - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); - try { - schemaRegistry.register(topicToSubjectName(topicName), - new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); + try { + schemaRegistry.register( + topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + return schemaRegistry; + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); } + } - public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { - /** - * Previously used topics can have schema ids > 1 which fully match - * however we are replacing that registry so force schema id to 1 - */ - @Override - public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { - return super.getSchemaById(1); - } + public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { + /** + * Previously used topics can have schema ids > 1 which fully match however we are replacing + * that registry so force schema id to 1 + */ + @Override + public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { + return super.getSchemaById(1); } + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java index 1421f952289b3..36fe514d5536f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java @@ -1,60 +1,57 @@ package com.linkedin.metadata.boot.kafka; +import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; + import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroSerializer; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.util.Map; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; - -/** - * Used for early bootstrap to avoid contact with not yet existing schema registry - */ +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ @Slf4j public class MockDUHESerializer extends KafkaAvroSerializer { - private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; - - private String topicName; - - public MockDUHESerializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); + private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; + + private String topicName; + + public MockDUHESerializer() { + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + public MockDUHESerializer(SchemaRegistryClient client) { + super(client); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + public MockDUHESerializer(SchemaRegistryClient client, Map<String, ?> props) { + super(client, props); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + @Override + public void configure(Map<String, ?> configs, boolean isKey) { + super.configure(configs, isKey); + topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + } + + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); + try { + schemaRegistry.register( + topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + return schemaRegistry; + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); } + } - public MockDUHESerializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHESerializer(SchemaRegistryClient client, Map<String, ?> props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - @Override - public void configure(Map<String, ?> configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } - - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); - try { - schemaRegistry.register(topicToSubjectName(topicName), - new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } - } - - public static String topicToSubjectName(String topicName) { - return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; - } + public static String topicToSubjectName(String topicName) { + return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index ea9ac57778550..770c0d2840fe8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -21,28 +23,24 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class BackfillBrowsePathsV2Step extends UpgradeStep { - private static final Set<String> ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set<String> ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final String VERSION = "2"; private static final String UPGRADE_ID = "backfill-default-browse-paths-v2-step"; private static final Integer BATCH_SIZE = 5000; @@ -63,14 +61,18 @@ public ExecutionMode getExecutionMode() { @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -78,7 +80,7 @@ public void upgrade() throws Exception { } private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, String scrollId) - throws Exception { + throws Exception { // Condition: has `browsePaths` AND does NOT have `browsePathV2` Criterion missingBrowsePathV2 = new Criterion(); @@ -102,16 +104,9 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S Filter filter = new Filter(); filter.setOr(conjunctiveCriterionArray); - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - "5m", - BATCH_SIZE, - null - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), "*", filter, null, scrollId, "5m", BATCH_SIZE, null); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -121,7 +116,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -136,12 +135,9 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); + _entityService.ingestProposal(proposal, auditStamp, false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java index b26eb67465c0d..c46cfdd61158d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.search.EntitySearchService; - import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -25,7 +24,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IndexDataPlatformsStep extends UpgradeStep { private static final String VERSION = "1"; @@ -35,7 +33,9 @@ public class IndexDataPlatformsStep extends UpgradeStep { private final EntitySearchService _entitySearchService; private final EntityRegistry _entityRegistry; - public IndexDataPlatformsStep(EntityService entityService, EntitySearchService entitySearchService, + public IndexDataPlatformsStep( + EntityService entityService, + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entitySearchService = entitySearchService; @@ -44,11 +44,15 @@ public IndexDataPlatformsStep(EntityService entityService, EntitySearchService e @Override public void upgrade() throws Exception { - final AspectSpec dataPlatformSpec = _entityRegistry.getEntitySpec(Constants.DATA_PLATFORM_ENTITY_NAME) - .getAspectSpec(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); + final AspectSpec dataPlatformSpec = + _entityRegistry + .getEntitySpec(Constants.DATA_PLATFORM_ENTITY_NAME) + .getAspectSpec(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); getAndReIndexDataPlatforms(auditStamp, dataPlatformSpec); @@ -61,8 +65,8 @@ public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPlatformInfoAspectSpec) - throws Exception { + private int getAndReIndexDataPlatforms( + AuditStamp auditStamp, AspectSpec dataPlatformInfoAspectSpec) throws Exception { ListUrnsResult listResult = _entityService.listUrns(Constants.DATA_PLATFORM_ENTITY_NAME, 0, BATCH_SIZE); @@ -73,9 +77,10 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla } final Map<Urn, EntityResponse> dataPlatformInfoResponses = - _entityService.getEntitiesV2(Constants.DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), - Collections.singleton(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - ); + _entityService.getEntitiesV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + Collections.singleton(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); // Loop over Data platforms and produce changelog List<Future<?>> futures = new LinkedList<>(); @@ -92,26 +97,32 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - dpUrn, - Constants.DATA_PLATFORM_ENTITY_NAME, - Constants.DATA_PLATFORM_INFO_ASPECT_NAME, - dataPlatformInfoAspectSpec, - null, - dpInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + dpUrn, + Constants.DATA_PLATFORM_ENTITY_NAME, + Constants.DATA_PLATFORM_INFO_ASPECT_NAME, + dataPlatformInfoAspectSpec, + null, + dpInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return listResult.getTotal(); } @@ -122,6 +133,7 @@ private DataPlatformInfo mapDpInfo(EntityResponse entityResponse) { return null; } - return new DataPlatformInfo(aspectMap.get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); + return new DataPlatformInfo( + aspectMap.get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index 30608e984a0f2..ae4baee37c822 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; @@ -13,16 +15,12 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.utils.DataPlatformInstanceUtils; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.LinkedList; import java.util.List; import java.util.Optional; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -62,7 +60,9 @@ public void execute() throws Exception { int start = 0; while (start < numEntities) { - log.info("Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", start, + log.info( + "Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", + start, start + BATCH_SIZE); List<UpsertBatchItem> items = new LinkedList<>(); @@ -71,7 +71,8 @@ public void execute() throws Exception { Urn urn = Urn.createFromString(urnStr); Optional<DataPlatformInstance> dataPlatformInstance = getDataPlatformInstance(urn); if (dataPlatformInstance.isPresent()) { - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) .aspect(dataPlatformInstance.get()) @@ -80,10 +81,14 @@ public void execute() throws Exception { } final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - _entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); - - log.info("Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + _entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); + + log.info( + "Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); start += BATCH_SIZE; } log.info("Finished ingesting DataPlatformInstance for all entities"); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index e4ad215eec864..db8cad65caa8a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -10,6 +12,8 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; @@ -17,16 +21,10 @@ import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestDataPlatformsStep implements BootstrapStep { @@ -44,45 +42,60 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 1. Read from the file into JSON. - final JsonNode dataPlatforms = mapper.readTree(new ClassPathResource("./boot/data_platforms.json").getFile()); + final JsonNode dataPlatforms = + mapper.readTree(new ClassPathResource("./boot/data_platforms.json").getFile()); if (!dataPlatforms.isArray()) { - throw new RuntimeException(String.format("Found malformed data platforms file, expected an Array but found %s", - dataPlatforms.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed data platforms file, expected an Array but found %s", + dataPlatforms.getNodeType())); } // 2. For each JSON object, cast into a DataPlatformSnapshot object. - List<UpsertBatchItem> dataPlatformAspects = StreamSupport.stream( - Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), false) - .map(dataPlatform -> { - final String urnString; - final Urn urn; - try { - urnString = dataPlatform.get("urn").asText(); - urn = Urn.createFromString(urnString); - } catch (URISyntaxException e) { - log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); - throw new RuntimeException("Malformed urn", e); - } - - final DataPlatformInfo info = - RecordUtils.toRecordTemplate(DataPlatformInfo.class, dataPlatform.get("aspect").toString()); - - return UpsertBatchItem.builder() + List<UpsertBatchItem> dataPlatformAspects = + StreamSupport.stream( + Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), + false) + .map( + dataPlatform -> { + final String urnString; + final Urn urn; + try { + urnString = dataPlatform.get("urn").asText(); + urn = Urn.createFromString(urnString); + } catch (URISyntaxException e) { + log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); + throw new RuntimeException("Malformed urn", e); + } + + final DataPlatformInfo info = + RecordUtils.toRecordTemplate( + DataPlatformInfo.class, dataPlatform.get("aspect").toString()); + + return UpsertBatchItem.builder() .urn(urn) .aspectName(PLATFORM_ASPECT_NAME) .aspect(info) .build(_entityService.getEntityRegistry()); - }).collect(Collectors.toList()); - - _entityService.ingestAspects(AspectsBatchImpl.builder().items(dataPlatformAspects).build(), - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - true, - false); + }) + .collect(Collectors.toList()); + + _entityService.ingestAspects( + AspectsBatchImpl.builder().items(dataPlatformAspects).build(), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + true, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java index 5bc80f46e6478..0b812a6f818f4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -28,14 +30,12 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - /** - * This bootstrap step is responsible for ingesting a default Global Settings object if it does not already exist. + * This bootstrap step is responsible for ingesting a default Global Settings object if it does not + * already exist. * - * If settings already exist, we merge the defaults and the existing settings such that the container will also - * get new settings when they are added. + * <p>If settings already exist, we merge the defaults and the existing settings such that the + * container will also get new settings when they are added. */ @Slf4j public class IngestDefaultGlobalSettingsStep implements BootstrapStep { @@ -49,8 +49,7 @@ public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityServic } public IngestDefaultGlobalSettingsStep( - @Nonnull final EntityService entityService, - @Nonnull final String resourcePath) { + @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { _entityService = Objects.requireNonNull(entityService); _resourcePath = Objects.requireNonNull(resourcePath); } @@ -64,9 +63,13 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); log.info("Ingesting default global settings..."); @@ -76,37 +79,45 @@ public void execute() throws IOException, URISyntaxException { defaultSettingsObj = mapper.readTree(new ClassPathResource(_resourcePath).getFile()); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to parse global settings file. Could not parse valid json at resource path %s", - _resourcePath), - e); + String.format( + "Failed to parse global settings file. Could not parse valid json at resource path %s", + _resourcePath), + e); } if (!defaultSettingsObj.isObject()) { - throw new RuntimeException(String.format("Found malformed global settings info file, expected an Object but found %s", - defaultSettingsObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed global settings info file, expected an Object but found %s", + defaultSettingsObj.getNodeType())); } // 2. Bind the global settings json into a GlobalSettingsInfo aspect. GlobalSettingsInfo defaultSettings; - defaultSettings = RecordUtils.toRecordTemplate(GlobalSettingsInfo.class, defaultSettingsObj.toString()); - ValidationResult result = ValidateDataAgainstSchema.validate( - defaultSettings, - new ValidationOptions( - RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, - CoercionMode.NORMAL, - UnrecognizedFieldMode.DISALLOW - )); + defaultSettings = + RecordUtils.toRecordTemplate(GlobalSettingsInfo.class, defaultSettingsObj.toString()); + ValidationResult result = + ValidateDataAgainstSchema.validate( + defaultSettings, + new ValidationOptions( + RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, + CoercionMode.NORMAL, + UnrecognizedFieldMode.DISALLOW)); if (!result.isValid()) { - throw new RuntimeException(String.format( - "Failed to parse global settings file. Provided JSON does not match GlobalSettingsInfo.pdl model. %s", result.getMessages())); + throw new RuntimeException( + String.format( + "Failed to parse global settings file. Provided JSON does not match GlobalSettingsInfo.pdl model. %s", + result.getMessages())); } // 3. Get existing settings or empty settings object final GlobalSettingsInfo existingSettings = getExistingGlobalSettingsOrEmpty(); - // 4. Merge existing settings onto previous settings. Be careful - if we change the settings schema dramatically in future we may need to account for that. - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo(mergeDataMaps(defaultSettings.data(), existingSettings.data())); + // 4. Merge existing settings onto previous settings. Be careful - if we change the settings + // schema dramatically in future we may need to account for that. + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo(mergeDataMaps(defaultSettings.data(), existingSettings.data())); // 5. Ingest into DataHub. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -118,12 +129,15 @@ public void execute() throws IOException, URISyntaxException { _entityService.ingestProposal( proposal, - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), false); } - private GlobalSettingsInfo getExistingGlobalSettingsOrEmpty() { - RecordTemplate aspect = _entityService.getAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, 0); + private GlobalSettingsInfo getExistingGlobalSettingsOrEmpty() { + RecordTemplate aspect = + _entityService.getAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, 0); return aspect != null ? (GlobalSettingsInfo) aspect : new GlobalSettingsInfo(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index 6d64ceea32339..f5a76b5f75778 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,22 +18,16 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.ownership.OwnershipTypeInfo; - +import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.Resource; -import java.util.List; - -import static com.linkedin.metadata.Constants.*; - - /** * This bootstrap step is responsible for ingesting default ownership types. - * <p></p> - * If system has never bootstrapped this step will: - * For each ownership type defined in the yaml file, it checks whether the urn exists. - * If not, it ingests the ownership type into DataHub. + * + * <p>If system has never bootstrapped this step will: For each ownership type defined in the yaml + * file, it checks whether the urn exists. If not, it ingests the ownership type into DataHub. */ @Slf4j @RequiredArgsConstructor @@ -54,19 +50,23 @@ public void execute() throws Exception { final JsonNode ownershipTypesObj = JSON_MAPPER.readTree(_ownershipTypesResource.getFile()); if (!ownershipTypesObj.isArray()) { - throw new RuntimeException(String.format("Found malformed ownership file, expected an Array but found %s", - ownershipTypesObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed ownership file, expected an Array but found %s", + ownershipTypesObj.getNodeType())); } final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); log.info("Ingesting {} ownership types", ownershipTypesObj.size()); int numIngested = 0; for (final JsonNode roleObj : ownershipTypesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); - final OwnershipTypeInfo info = RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info") - .toString()); + final OwnershipTypeInfo info = + RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info").toString()); log.info(String.format("Ingesting default ownership type with urn %s", urn)); ingestOwnershipType(urn, info, auditStamp); numIngested++; @@ -74,13 +74,15 @@ public void execute() throws Exception { log.info("Ingested {} new ownership types", numIngested); } - private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { + private void ingestOwnershipType( + final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { // 3. Write key & aspect MCPs. final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(ownershipTypeUrn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(ownershipTypeUrn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(ownershipTypeUrn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(OWNERSHIP_TYPE_ENTITY_NAME); @@ -96,8 +98,11 @@ private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipType proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()).build(), auditStamp, - false); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()) + .build(), + auditStamp, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index cf29645214466..2aa5fe4f46b65 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -25,7 +27,6 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.policy.DataHubPolicyInfo; - import java.io.IOException; import java.net.URISyntaxException; import java.util.Collections; @@ -33,15 +34,10 @@ import java.util.List; import java.util.Map; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.Resource; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestPoliciesStep implements BootstrapStep { @@ -65,9 +61,13 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 0. Execute preflight check to see whether we need to ingest policies log.info("Ingesting default access policies from: {}...", _policiesResource); @@ -77,14 +77,17 @@ public void execute() throws IOException, URISyntaxException { if (!policiesObj.isArray()) { throw new RuntimeException( - String.format("Found malformed policies file, expected an Array but found %s", policiesObj.getNodeType())); + String.format( + "Found malformed policies file, expected an Array but found %s", + policiesObj.getNodeType())); } // 2. For each JSON object, cast into a DataHub Policy Info object. for (final JsonNode policyObj : policiesObj) { final Urn urn = Urn.createFromString(policyObj.get("urn").asText()); - // If the info is not there, it means that the policy was there before, but must now be removed + // If the info is not there, it means that the policy was there before, but must now be + // removed if (!policyObj.has("info")) { _entityService.deleteUrn(urn); continue; @@ -107,7 +110,8 @@ public void execute() throws IOException, URISyntaxException { } } } - // If search index for policies is empty, update the policy index with the ingested policies from previous step. + // If search index for policies is empty, update the policy index with the ingested policies + // from previous step. // Directly update the ES index, does not produce MCLs if (_entitySearchService.docCount(Constants.POLICY_ENTITY_NAME) == 0) { updatePolicyIndex(); @@ -115,31 +119,37 @@ public void execute() throws IOException, URISyntaxException { log.info("Successfully ingested default access policies."); } - /** - * Update policy index and push in the relevant search documents into the search index - */ + /** Update policy index and push in the relevant search documents into the search index */ private void updatePolicyIndex() throws URISyntaxException { log.info("Pushing documents to the policy index"); - AspectSpec policyInfoAspectSpec = _entityRegistry.getEntitySpec(Constants.POLICY_ENTITY_NAME) - .getAspectSpec(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); + AspectSpec policyInfoAspectSpec = + _entityRegistry + .getEntitySpec(Constants.POLICY_ENTITY_NAME) + .getAspectSpec(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); int start = 0; int count = 30; int total = 100; while (start < total) { - ListUrnsResult listUrnsResult = _entityService.listUrns(Constants.POLICY_ENTITY_NAME, start, count); + ListUrnsResult listUrnsResult = + _entityService.listUrns(Constants.POLICY_ENTITY_NAME, start, count); total = listUrnsResult.getTotal(); start = start + count; final Map<Urn, EntityResponse> policyEntities = - _entityService.getEntitiesV2(POLICY_ENTITY_NAME, new HashSet<>(listUrnsResult.getEntities()), + _entityService.getEntitiesV2( + POLICY_ENTITY_NAME, + new HashSet<>(listUrnsResult.getEntities()), Collections.singleton(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME)); - policyEntities.values().forEach(entityResponse -> insertPolicyDocument(entityResponse, policyInfoAspectSpec)); + policyEntities + .values() + .forEach(entityResponse -> insertPolicyDocument(entityResponse, policyInfoAspectSpec)); } log.info("Successfully updated the policy index"); } private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspectSpec) { - EnvelopedAspect aspect = entityResponse.getAspects().get(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); + EnvelopedAspect aspect = + entityResponse.getAspects().get(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); if (aspect == null) { log.info("Missing policy info aspect for urn {}", entityResponse.getUrn()); return; @@ -147,10 +157,15 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe Optional<String> searchDocument; try { - searchDocument = _searchDocumentTransformer.transformAspect(entityResponse.getUrn(), - new DataHubPolicyInfo(aspect.getValue().data()), aspectSpec, false); + searchDocument = + _searchDocumentTransformer.transformAspect( + entityResponse.getUrn(), + new DataHubPolicyInfo(aspect.getValue().data()), + aspectSpec, + false); } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -164,7 +179,8 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe return; } - _entitySearchService.upsertDocument(Constants.POLICY_ENTITY_NAME, searchDocument.get(), docId.get()); + _entitySearchService.upsertDocument( + Constants.POLICY_ENTITY_NAME, searchDocument.get(), docId.get()); } private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws URISyntaxException { @@ -172,7 +188,8 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(urn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(POLICY_ENTITY_NAME); @@ -186,11 +203,14 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) - .build(), - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - false); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); } private boolean hasPolicy(Urn policyUrn) { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java index 9aed445a967b3..b24acc61ff6c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java @@ -1,12 +1,14 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.boot.BootstrapStep; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.key.DataHubRetentionKey; @@ -22,9 +24,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRetentionPoliciesStep implements BootstrapStep { @@ -36,10 +35,17 @@ public class IngestRetentionPoliciesStep implements BootstrapStep { private final String pluginPath; private static final ObjectMapper YAML_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - YAML_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + YAML_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String UPGRADE_ID = "ingest-retention-policies"; private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); @@ -80,7 +86,8 @@ public void execute() throws IOException, URISyntaxException { log.info("Setting {} policies", retentionPolicyMap.size()); boolean hasUpdate = false; for (DataHubRetentionKey key : retentionPolicyMap.keySet()) { - if (_retentionService.setRetention(key.getEntityName(), key.getAspectName(), retentionPolicyMap.get(key))) { + if (_retentionService.setRetention( + key.getEntityName(), key.getAspectName(), retentionPolicyMap.get(key))) { hasUpdate = true; } } @@ -95,7 +102,8 @@ public void execute() throws IOException, URISyntaxException { } // Parse input yaml file or yaml files in the input directory to generate a retention policy map - private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File retentionFileOrDir) throws IOException { + private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File retentionFileOrDir) + throws IOException { // If path does not exist return empty if (!retentionFileOrDir.exists()) { return Collections.emptyMap(); @@ -107,7 +115,9 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File ret for (File retentionFile : retentionFileOrDir.listFiles()) { if (!retentionFile.isFile()) { - log.info("Element {} in plugin directory {} is not a file. Skipping", retentionFile.getPath(), + log.info( + "Element {} in plugin directory {} is not a file. Skipping", + retentionFile.getPath(), retentionFileOrDir.getPath()); continue; } @@ -116,7 +126,8 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File ret return result; } // If file, parse the yaml file and return result; - if (!retentionFileOrDir.getPath().endsWith(".yaml") && retentionFileOrDir.getPath().endsWith(".yml")) { + if (!retentionFileOrDir.getPath().endsWith(".yaml") + && retentionFileOrDir.getPath().endsWith(".yml")) { log.info("File {} is not a YAML file. Skipping", retentionFileOrDir.getPath()); return Collections.emptyMap(); } @@ -126,15 +137,16 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseFileOrDir(File ret /** * Parse yaml retention config * - * The structure of yaml must be a list of retention policies where each element specifies the entity, aspect - * to apply the policy to and the policy definition. The policy definition is converted into the - * {@link com.linkedin.retention.DataHubRetentionConfig} class. + * <p>The structure of yaml must be a list of retention policies where each element specifies the + * entity, aspect to apply the policy to and the policy definition. The policy definition is + * converted into the {@link com.linkedin.retention.DataHubRetentionConfig} class. */ - private Map<DataHubRetentionKey, DataHubRetentionConfig> parseYamlRetentionConfig(File retentionConfigFile) - throws IOException { + private Map<DataHubRetentionKey, DataHubRetentionConfig> parseYamlRetentionConfig( + File retentionConfigFile) throws IOException { final JsonNode retentionPolicies = YAML_MAPPER.readTree(retentionConfigFile); if (!retentionPolicies.isArray()) { - throw new IllegalArgumentException("Retention config file must contain an array of retention policies"); + throw new IllegalArgumentException( + "Retention config file must contain an array of retention policies"); } Map<DataHubRetentionKey, DataHubRetentionConfig> retentionPolicyMap = new HashMap<>(); @@ -158,9 +170,11 @@ private Map<DataHubRetentionKey, DataHubRetentionConfig> parseYamlRetentionConfi DataHubRetentionConfig retentionInfo; if (retentionPolicy.has("config")) { retentionInfo = - RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, retentionPolicy.get("config").toString()); + RecordUtils.toRecordTemplate( + DataHubRetentionConfig.class, retentionPolicy.get("config").toString()); } else { - throw new IllegalArgumentException("Each element in the retention config must contain field config"); + throw new IllegalArgumentException( + "Each element in the retention config must contain field config"); } retentionPolicyMap.put(key, retentionInfo); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java index 99be185113968..f3c395abdfc3a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -25,9 +27,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRolesStep implements BootstrapStep { @@ -49,9 +48,13 @@ public ExecutionMode getExecutionMode() { @Override public void execute() throws Exception { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // Sleep to ensure deployment process finishes. Thread.sleep(SLEEP_SECONDS * 1000); @@ -64,13 +67,19 @@ public void execute() throws Exception { if (!rolesObj.isArray()) { throw new RuntimeException( - String.format("Found malformed roles file, expected an Array but found %s", rolesObj.getNodeType())); + String.format( + "Found malformed roles file, expected an Array but found %s", + rolesObj.getNodeType())); } final AspectSpec roleInfoAspectSpec = - _entityRegistry.getEntitySpec(DATAHUB_ROLE_ENTITY_NAME).getAspectSpec(DATAHUB_ROLE_INFO_ASPECT_NAME); + _entityRegistry + .getEntitySpec(DATAHUB_ROLE_ENTITY_NAME) + .getAspectSpec(DATAHUB_ROLE_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); for (final JsonNode roleObj : rolesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); @@ -81,20 +90,26 @@ public void execute() throws Exception { continue; } - final DataHubRoleInfo info = RecordUtils.toRecordTemplate(DataHubRoleInfo.class, roleObj.get("info").toString()); + final DataHubRoleInfo info = + RecordUtils.toRecordTemplate(DataHubRoleInfo.class, roleObj.get("info").toString()); ingestRole(urn, info, auditStamp, roleInfoAspectSpec); } log.info("Successfully ingested default Roles."); } - private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo, final AuditStamp auditStamp, - final AspectSpec roleInfoAspectSpec) throws URISyntaxException { + private void ingestRole( + final Urn roleUrn, + final DataHubRoleInfo dataHubRoleInfo, + final AuditStamp auditStamp, + final AspectSpec roleInfoAspectSpec) + throws URISyntaxException { // 3. Write key & aspect final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(roleUrn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(roleUrn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(roleUrn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(DATAHUB_ROLE_ENTITY_NAME); @@ -108,12 +123,25 @@ private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo proposal.setAspect(GenericRecordUtils.serializeAspect(dataHubRoleInfo)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityRegistry).build(), - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - false); - - _entityService.alwaysProduceMCLAsync(roleUrn, DATAHUB_ROLE_ENTITY_NAME, DATAHUB_ROLE_INFO_ASPECT_NAME, - roleInfoAspectSpec, null, dataHubRoleInfo, null, null, auditStamp, ChangeType.RESTATE); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + + _entityService.alwaysProduceMCLAsync( + roleUrn, + DATAHUB_ROLE_ENTITY_NAME, + DATAHUB_ROLE_INFO_ASPECT_NAME, + roleInfoAspectSpec, + null, + dataHubRoleInfo, + null, + null, + auditStamp, + ChangeType.RESTATE); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java index febcb9d4ec8a4..9e00b960482c5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -7,25 +10,19 @@ import com.linkedin.common.urn.Urn; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.boot.BootstrapStep; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; - import com.linkedin.metadata.key.CorpUserKey; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.util.Pair; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; - -import com.linkedin.util.Pair; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRootUserStep implements BootstrapStep { @@ -43,16 +40,23 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 1. Read from the file into JSON. - final JsonNode userObj = mapper.readTree(new ClassPathResource("./boot/root_user.json").getFile()); + final JsonNode userObj = + mapper.readTree(new ClassPathResource("./boot/root_user.json").getFile()); if (!userObj.isObject()) { - throw new RuntimeException(String.format("Found malformed root user file, expected an Object but found %s", - userObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed root user file, expected an Object but found %s", + userObj.getNodeType())); } // 2. Ingest the user info @@ -66,18 +70,22 @@ public void execute() throws IOException, URISyntaxException { final CorpUserInfo info = RecordUtils.toRecordTemplate(CorpUserInfo.class, userObj.get("info").toString()); - final CorpUserKey key = (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); + final CorpUserKey key = + (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - _entityService.ingestAspects(urn, List.of( - Pair.of(CORP_USER_KEY_ASPECT_NAME, key), - Pair.of(USER_INFO_ASPECT_NAME, info) - ), aspectAuditStamp, null); + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + _entityService.ingestAspects( + urn, + List.of(Pair.of(CORP_USER_KEY_ASPECT_NAME, key), Pair.of(USER_INFO_ASPECT_NAME, info)), + aspectAuditStamp, + null); } private AspectSpec getUserKeyAspectSpec() { final EntitySpec spec = _entityService.getEntityRegistry().getEntitySpec(CORP_USER_ENTITY_NAME); return spec.getKeyAspectSpec(); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java index 34147b166ecd7..3c62f695ddd5f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java @@ -9,7 +9,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements BootstrapStep { @@ -33,7 +32,8 @@ public void execute() throws Exception { return; } // Remove invalid telemetry aspect - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_TELEMETRY_ASPECT_NAME, new HashMap<>(), true); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_TELEMETRY_ASPECT_NAME, new HashMap<>(), true); BootstrapStep.setUpgradeResult(REMOVE_UNKNOWN_ASPECTS_URN, _entityService); } catch (Exception e) { @@ -48,5 +48,4 @@ public void execute() throws Exception { public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java index 1f5f7f26ed89b..333928999f453 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java @@ -13,14 +13,13 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ExtraInfo; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.LinkedList; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class RestoreColumnLineageIndices extends UpgradeStep { @@ -30,7 +29,8 @@ public class RestoreColumnLineageIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; - public RestoreColumnLineageIndices(@Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { + public RestoreColumnLineageIndices( + @Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entityRegistry = Objects.requireNonNull(entityRegistry, "entityRegistry must not be null"); } @@ -38,7 +38,9 @@ public RestoreColumnLineageIndices(@Nonnull final EntityService entityService, @ @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final int totalUpstreamLineageCount = getAndRestoreUpstreamLineageIndices(0, auditStamp); int upstreamLineageCount = BATCH_SIZE; @@ -47,17 +49,21 @@ public void upgrade() throws Exception { upstreamLineageCount += BATCH_SIZE; } - final int totalChartInputFieldsCount = getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, 0, auditStamp); + final int totalChartInputFieldsCount = + getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, 0, auditStamp); int chartInputFieldsCount = BATCH_SIZE; while (chartInputFieldsCount < totalChartInputFieldsCount) { - getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, chartInputFieldsCount, auditStamp); + getAndRestoreInputFieldsIndices( + Constants.CHART_ENTITY_NAME, chartInputFieldsCount, auditStamp); chartInputFieldsCount += BATCH_SIZE; } - final int totalDashboardInputFieldsCount = getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, 0, auditStamp); + final int totalDashboardInputFieldsCount = + getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, 0, auditStamp); int dashboardInputFieldsCount = BATCH_SIZE; while (dashboardInputFieldsCount < totalDashboardInputFieldsCount) { - getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, dashboardInputFieldsCount, auditStamp); + getAndRestoreInputFieldsIndices( + Constants.DASHBOARD_ENTITY_NAME, dashboardInputFieldsCount, auditStamp); dashboardInputFieldsCount += BATCH_SIZE; } } @@ -69,23 +75,29 @@ public ExecutionMode getExecutionMode() { } private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp) { - final AspectSpec upstreamLineageAspectSpec = _entityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME) - .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); - - final ListResult<RecordTemplate> latestAspects = _entityService.listLatestAspects( - Constants.DATASET_ENTITY_NAME, - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - start, - BATCH_SIZE); - - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { + final AspectSpec upstreamLineageAspectSpec = + _entityRegistry + .getEntitySpec(Constants.DATASET_ENTITY_NAME) + .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + + final ListResult<RecordTemplate> latestAspects = + _entityService.listLatestAspects( + Constants.DATASET_ENTITY_NAME, + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + start, + BATCH_SIZE); + + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { log.debug("Found 0 upstreamLineage aspects for datasets. Skipping migration."); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of upstreamLineages. - log.warn("Failed to match upstreamLineage aspects with corresponding urns. Found mismatched length between aspects ({})" + log.warn( + "Failed to match upstreamLineage aspects with corresponding urns. Found mismatched length between aspects ({})" + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), @@ -104,48 +116,56 @@ private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - urn, - Constants.DATASET_ENTITY_NAME, - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - upstreamLineageAspectSpec, - null, - upstreamLineage, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + urn, + Constants.DATASET_ENTITY_NAME, + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + upstreamLineageAspectSpec, + null, + upstreamLineage, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return latestAspects.getTotalCount(); } - private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditStamp auditStamp) throws Exception { - final AspectSpec inputFieldsAspectSpec = _entityRegistry.getEntitySpec(entityName) - .getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME); + private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditStamp auditStamp) + throws Exception { + final AspectSpec inputFieldsAspectSpec = + _entityRegistry.getEntitySpec(entityName).getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME); - final ListResult<RecordTemplate> latestAspects = _entityService.listLatestAspects( - entityName, - Constants.INPUT_FIELDS_ASPECT_NAME, - start, - BATCH_SIZE); + final ListResult<RecordTemplate> latestAspects = + _entityService.listLatestAspects( + entityName, Constants.INPUT_FIELDS_ASPECT_NAME, start, BATCH_SIZE); - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { log.debug("Found 0 inputFields aspects. Skipping migration."); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of inputFields. - log.warn("Failed to match inputFields aspects with corresponding urns. Found mismatched length between aspects ({})" + log.warn( + "Failed to match inputFields aspects with corresponding urns. Found mismatched length between aspects ({})" + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), @@ -164,26 +184,32 @@ private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditS continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - urn, - entityName, - Constants.INPUT_FIELDS_ASPECT_NAME, - inputFieldsAspectSpec, - null, - inputFields, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + urn, + entityName, + Constants.INPUT_FIELDS_ASPECT_NAME, + inputFieldsAspectSpec, + null, + inputFields, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return latestAspects.getTotalCount(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java index 355936fe1994c..bb7ad80ef73d2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -33,16 +35,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class RestoreDbtSiblingsIndices implements BootstrapStep { private static final String VERSION = "0"; private static final String UPGRADE_ID = "restore-dbt-siblings-indices"; private static final Urn SIBLING_UPGRADE_URN = - EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(UPGRADE_ID), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(UPGRADE_ID), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); private static final Integer BATCH_SIZE = 1000; private static final Integer SLEEP_SECONDS = 120; @@ -65,12 +65,19 @@ public void execute() throws Exception { log.info("Attempting to run RestoreDbtSiblingsIndices upgrade.."); log.info(String.format("Waiting %s seconds..", SLEEP_SECONDS)); - EntityResponse response = _entityService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, SIBLING_UPGRADE_URN, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - ); - if (response != null && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { - DataMap dataMap = response.getAspects().get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME).getValue().data(); + EntityResponse response = + _entityService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + SIBLING_UPGRADE_URN, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); + if (response != null + && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { + DataMap dataMap = + response + .getAspects() + .get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) + .getValue() + .data(); DataHubUpgradeRequest request = new DataHubUpgradeRequest(dataMap); if (request.hasVersion() && request.getVersion().equals(VERSION)) { log.info("RestoreDbtSiblingsIndices has run before with this version. Skipping"); @@ -89,11 +96,20 @@ public void execute() throws Exception { log.info("Found {} dataset entities to attempt to bootstrap", rowCount); final AspectSpec datasetAspectSpec = - _entityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME).getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); - final AuditStamp auditStamp = new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - final DataHubUpgradeRequest upgradeRequest = new DataHubUpgradeRequest().setTimestampMs(System.currentTimeMillis()).setVersion(VERSION); - ingestUpgradeAspect(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, upgradeRequest, auditStamp); + _entityRegistry + .getEntitySpec(Constants.DATASET_ENTITY_NAME) + .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + final DataHubUpgradeRequest upgradeRequest = + new DataHubUpgradeRequest() + .setTimestampMs(System.currentTimeMillis()) + .setVersion(VERSION); + ingestUpgradeAspect( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, upgradeRequest, auditStamp); int indexedCount = 0; while (indexedCount < rowCount) { @@ -101,19 +117,23 @@ public void execute() throws Exception { indexedCount += BATCH_SIZE; } - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); ingestUpgradeAspect(Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, upgradeResult, auditStamp); log.info("Successfully restored sibling aspects"); } catch (Exception e) { log.error("Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); _entityService.deleteUrn(SIBLING_UPGRADE_URN); - throw new RuntimeException("Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); + throw new RuntimeException( + "Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); } } - private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp, AspectSpec upstreamAspectSpec) { - ListUrnsResult datasetUrnsResult = _entityService.listUrns(DATASET_ENTITY_NAME, start, BATCH_SIZE); + private void getAndRestoreUpstreamLineageIndices( + int start, AuditStamp auditStamp, AspectSpec upstreamAspectSpec) { + ListUrnsResult datasetUrnsResult = + _entityService.listUrns(DATASET_ENTITY_NAME, start, BATCH_SIZE); List<Urn> datasetUrns = datasetUrnsResult.getEntities(); log.info("Re-indexing upstreamLineage aspect from {} with batch size {}", start, BATCH_SIZE); @@ -121,12 +141,16 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam return; } - final Map<Urn, EntityResponse> upstreamLineageResponse; + final Map<Urn, EntityResponse> upstreamLineageResponse; try { upstreamLineageResponse = - _entityService.getEntitiesV2(DATASET_ENTITY_NAME, new HashSet<>(datasetUrns), Collections.singleton(UPSTREAM_LINEAGE_ASPECT_NAME)); + _entityService.getEntitiesV2( + DATASET_ENTITY_NAME, + new HashSet<>(datasetUrns), + Collections.singleton(UPSTREAM_LINEAGE_ASPECT_NAME)); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Error fetching upstream lineage history: %s", e.toString())); + throw new RuntimeException( + String.format("Error fetching upstream lineage history: %s", e.toString())); } // Loop over datasets and produce changelog @@ -142,26 +166,32 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - datasetUrn, - DATASET_ENTITY_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - upstreamAspectSpec, - null, - upstreamLineage, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + datasetUrn, + DATASET_ENTITY_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + upstreamAspectSpec, + null, + upstreamLineage, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { @@ -170,10 +200,12 @@ private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { return null; } - return new UpstreamLineage(aspectMap.get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); + return new UpstreamLineage( + aspectMap.get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); } - private void ingestUpgradeAspect(String aspectName, RecordTemplate aspect, AuditStamp auditStamp) { + private void ingestUpgradeAspect( + String aspectName, RecordTemplate aspect, AuditStamp auditStamp) { final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(SIBLING_UPGRADE_URN); upgradeProposal.setEntityType(Constants.DATA_HUB_UPGRADE_ENTITY_NAME); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 4de2bea9a76a9..319bbd084e05c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -16,7 +16,6 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; - import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -29,7 +28,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class RestoreGlossaryIndices extends UpgradeStep { private static final String VERSION = "1"; @@ -39,7 +37,9 @@ public class RestoreGlossaryIndices extends UpgradeStep { private final EntitySearchService _entitySearchService; private final EntityRegistry _entityRegistry; - public RestoreGlossaryIndices(EntityService entityService, EntitySearchService entitySearchService, + public RestoreGlossaryIndices( + EntityService entityService, + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entitySearchService = entitySearchService; @@ -48,12 +48,18 @@ public RestoreGlossaryIndices(EntityService entityService, EntitySearchService e @Override public void upgrade() throws Exception { - final AspectSpec termAspectSpec = _entityRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) - .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); - final AspectSpec nodeAspectSpec = _entityRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) - .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); + final AspectSpec termAspectSpec = + _entityRegistry + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) + .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + final AspectSpec nodeAspectSpec = + _entityRegistry + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) + .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final int totalTermsCount = getAndRestoreTermAspectIndices(0, auditStamp, termAspectSpec); int termsCount = BATCH_SIZE; @@ -76,20 +82,29 @@ public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - private int getAndRestoreTermAspectIndices(int start, AuditStamp auditStamp, AspectSpec termAspectSpec) - throws Exception { + private int getAndRestoreTermAspectIndices( + int start, AuditStamp auditStamp, AspectSpec termAspectSpec) throws Exception { SearchResult termsResult = - _entitySearchService.search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, - null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - List<Urn> termUrns = termsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + _entitySearchService.search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + start, + BATCH_SIZE, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + List<Urn> termUrns = + termsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); if (termUrns.size() == 0) { return 0; } final Map<Urn, EntityResponse> termInfoResponses = - _entityService.getEntitiesV2(Constants.GLOSSARY_TERM_ENTITY_NAME, new HashSet<>(termUrns), - Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) - ); + _entityService.getEntitiesV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(termUrns), + Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)); // Loop over Terms and produce changelog List<Future<?>> futures = new LinkedList<>(); @@ -105,43 +120,59 @@ null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - termUrn, - Constants.GLOSSARY_TERM_ENTITY_NAME, - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - termAspectSpec, - null, - termInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + termUrn, + Constants.GLOSSARY_TERM_ENTITY_NAME, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + termAspectSpec, + null, + termInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return termsResult.getNumEntities(); } - private int getAndRestoreNodeAspectIndices(int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { - SearchResult nodesResult = _entitySearchService.search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", - null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - List<Urn> nodeUrns = nodesResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + private int getAndRestoreNodeAspectIndices( + int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { + SearchResult nodesResult = + _entitySearchService.search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + start, + BATCH_SIZE, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + List<Urn> nodeUrns = + nodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); if (nodeUrns.size() == 0) { return 0; } - final Map<Urn, EntityResponse> nodeInfoResponses = _entityService.getEntitiesV2( - Constants.GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(nodeUrns), - Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) - ); + final Map<Urn, EntityResponse> nodeInfoResponses = + _entityService.getEntitiesV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(nodeUrns), + Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)); // Loop over Nodes and produce changelog List<Future<?>> futures = new LinkedList<>(); @@ -157,26 +188,32 @@ null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - nodeUrn, - Constants.GLOSSARY_NODE_ENTITY_NAME, - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - nodeAspectSpec, - null, - nodeInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + nodeUrn, + Constants.GLOSSARY_NODE_ENTITY_NAME, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + nodeAspectSpec, + null, + nodeInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return nodesResult.getNumEntities(); } @@ -187,7 +224,8 @@ private GlossaryTermInfo mapTermInfo(EntityResponse entityResponse) { return null; } - return new GlossaryTermInfo(aspectMap.get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + return new GlossaryTermInfo( + aspectMap.get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); } private GlossaryNodeInfo mapNodeInfo(EntityResponse entityResponse) { @@ -196,6 +234,7 @@ private GlossaryNodeInfo mapNodeInfo(EntityResponse entityResponse) { return null; } - return new GlossaryNodeInfo(aspectMap.get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + return new GlossaryNodeInfo( + aspectMap.get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java index 7fcafa24d7b45..e2d59b505a568 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; @@ -19,9 +21,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * This is an opt-in optional upgrade step to migrate your browse paths to the new truncated form. * It is idempotent, can be retried as many times as necessary. @@ -29,13 +28,13 @@ @Slf4j public class UpgradeDefaultBrowsePathsStep extends UpgradeStep { - private static final Set<String> ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME - ); + private static final Set<String> ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME); private static final String VERSION = "1"; private static final String UPGRADE_ID = "upgrade-default-browse-paths-step"; private static final Integer BATCH_SIZE = 5000; @@ -47,14 +46,18 @@ public UpgradeDefaultBrowsePathsStep(EntityService entityService) { @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); int total = 0; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s out of %s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, total, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s out of %s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, total, entityType)); total = getAndMigrateBrowsePaths(entityType, migratedCount, auditStamp); migratedCount += BATCH_SIZE; } while (migratedCount < total); @@ -71,21 +74,24 @@ public ExecutionMode getExecutionMode() { private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp auditStamp) throws Exception { - final ListResult<RecordTemplate> latestAspects = _entityService.listLatestAspects( - entityType, - Constants.BROWSE_PATHS_ASPECT_NAME, - start, - BATCH_SIZE); + final ListResult<RecordTemplate> latestAspects = + _entityService.listLatestAspects( + entityType, Constants.BROWSE_PATHS_ASPECT_NAME, start, BATCH_SIZE); - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { - log.debug(String.format("Found 0 browse paths for entity with type %s. Skipping migration!", entityType)); + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { + log.debug( + String.format( + "Found 0 browse paths for entity with type %s. Skipping migration!", entityType)); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of paths. - log.warn("Failed to match browse path aspects with corresponding urns. Found mismatched length between aspects ({})" - + "and metadata ({}) for metadata {}", + log.warn( + "Failed to match browse path aspects with corresponding urns. Found mismatched length between aspects ({})" + + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), latestAspects.getMetadata()); @@ -107,7 +113,8 @@ private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp au log.debug(String.format("Inspecting browse path for urn %s, value %s", urn, browsePaths)); if (browsePaths.hasPaths() && browsePaths.getPaths().size() == 1) { - String legacyBrowsePath = BrowsePathUtils.getLegacyDefaultBrowsePath(urn, _entityService.getEntityRegistry()); + String legacyBrowsePath = + BrowsePathUtils.getLegacyDefaultBrowsePath(urn, _entityService.getEntityRegistry()); log.debug(String.format("Legacy browse path for urn %s, value %s", urn, legacyBrowsePath)); if (legacyBrowsePath.equals(browsePaths.getPaths().get(0))) { migrateBrowsePath(urn, auditStamp); @@ -126,13 +133,9 @@ private void migrateBrowsePath(Urn urn, AuditStamp auditStamp) throws Exception proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(newPaths)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); + _entityService.ingestProposal(proposal, auditStamp, false); } - -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java index 5cac32cfe1a42..409285fc8f1e9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java @@ -19,7 +19,8 @@ public String name() { @Override public void execute() throws Exception { if (!_dataHubUpgradeKafkaListener.waitForBootstrap()) { - throw new IllegalStateException("Build indices was unsuccessful, stopping bootstrap process."); + throw new IllegalStateException( + "Build indices was unsuccessful, stopping bootstrap process."); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java index 52fee1342755c..67d0976a1b0a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java @@ -1,5 +1,7 @@ package com.linkedin.restli.server; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.linkedin.data.codec.AbstractJacksonDataCodec; import com.linkedin.metadata.filter.RestliLoggingFilter; @@ -10,59 +12,62 @@ import com.linkedin.r2.transport.http.server.RAPServlet; import com.linkedin.restli.docgen.DefaultDocumentationRequestHandler; import com.linkedin.restli.server.spring.SpringInjectResourceFactory; +import java.util.concurrent.Executors; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.concurrent.Executors; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j @Configuration public class RAPServletFactory { - @Value("#{systemEnvironment['RESTLI_SERVLET_THREADS']}") - private Integer environmentThreads; + @Value("#{systemEnvironment['RESTLI_SERVLET_THREADS']}") + private Integer environmentThreads; + + @Value("${" + INGESTION_MAX_SERIALIZED_STRING_LENGTH + ":16000000}") + private int maxSerializedStringLength; - @Value("${" + INGESTION_MAX_SERIALIZED_STRING_LENGTH + ":16000000}") - private int maxSerializedStringLength; + @Bean(name = "restliSpringInjectResourceFactory") + public SpringInjectResourceFactory springInjectResourceFactory() { + return new SpringInjectResourceFactory(); + } - @Bean(name = "restliSpringInjectResourceFactory") - public SpringInjectResourceFactory springInjectResourceFactory() { - return new SpringInjectResourceFactory(); - } + @Bean("parseqEngineThreads") + public int parseqEngineThreads() { + return environmentThreads != null + ? environmentThreads + : (Runtime.getRuntime().availableProcessors() + 1); + } - @Bean("parseqEngineThreads") - public int parseqEngineThreads() { - return environmentThreads != null ? environmentThreads : (Runtime.getRuntime().availableProcessors() + 1); - } - @Bean - public RAPServlet rapServlet( - @Qualifier("restliSpringInjectResourceFactory") SpringInjectResourceFactory springInjectResourceFactory, - @Qualifier("parseqEngineThreads") int threads) { - log.info("Starting restli servlet with {} threads.", threads); - Engine parseqEngine = new EngineBuilder() - .setTaskExecutor(Executors.newFixedThreadPool(threads)) - .setTimerScheduler(Executors.newSingleThreadScheduledExecutor()) - .build(); + @Bean + public RAPServlet rapServlet( + @Qualifier("restliSpringInjectResourceFactory") + SpringInjectResourceFactory springInjectResourceFactory, + @Qualifier("parseqEngineThreads") int threads) { + log.info("Starting restli servlet with {} threads.", threads); + Engine parseqEngine = + new EngineBuilder() + .setTaskExecutor(Executors.newFixedThreadPool(threads)) + .setTimerScheduler(Executors.newSingleThreadScheduledExecutor()) + .build(); - // !!!!!!! IMPORTANT !!!!!!! - // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. Without this the limit is - // whatever Jackson is defaulting to (5 MB currently). - AbstractJacksonDataCodec.JSON_FACTORY.setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSerializedStringLength).build()); - // !!!!!!! IMPORTANT !!!!!!! + // !!!!!!! IMPORTANT !!!!!!! + // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. + // Without this the limit is + // whatever Jackson is defaulting to (5 MB currently). + AbstractJacksonDataCodec.JSON_FACTORY.setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSerializedStringLength).build()); + // !!!!!!! IMPORTANT !!!!!!! - RestLiConfig config = new RestLiConfig(); - config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler()); - config.setResourcePackageNames("com.linkedin.metadata.resources"); - config.addFilter(new RestliLoggingFilter()); + RestLiConfig config = new RestLiConfig(); + config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler()); + config.setResourcePackageNames("com.linkedin.metadata.resources"); + config.addFilter(new RestliLoggingFilter()); - RestLiServer restLiServer = new RestLiServer(config, springInjectResourceFactory, parseqEngine); - return new RAPServlet(new FilterChainDispatcher(new DelegatingTransportDispatcher(restLiServer, restLiServer), - FilterChains.empty())); - } + RestLiServer restLiServer = new RestLiServer(config, springInjectResourceFactory, parseqEngine); + return new RAPServlet( + new FilterChainDispatcher( + new DelegatingTransportDispatcher(restLiServer, restLiServer), FilterChains.empty())); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java index 723f0333999dd..29211d295a2a1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java +++ b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java @@ -1,28 +1,28 @@ package com.linkedin.restli.server; import com.linkedin.r2.transport.http.server.RAPServlet; +import java.io.IOException; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.web.HttpRequestHandler; import org.springframework.web.context.support.HttpRequestHandlerServlet; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.IOException; - @Component public class RestliHandlerServlet extends HttpRequestHandlerServlet implements HttpRequestHandler { - @Autowired - private RAPServlet _r2Servlet; + @Autowired private RAPServlet _r2Servlet; - @Override - public void service(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { - _r2Servlet.service(req, res); - } + @Override + public void service(HttpServletRequest req, HttpServletResponse res) + throws ServletException, IOException { + _r2Servlet.service(req, res); + } - @Override - public void handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - service(request, response); - } + @Override + public void handleRequest(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + service(request, response); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java index 408c7b67b25f0..19a2a19fcaa4c 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.kafka; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -8,25 +11,18 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @SpringBootTest( - properties = { - "spring.kafka.properties.security.protocol=SSL" - }, - classes = { - SimpleKafkaConsumerFactory.class, - ConfigurationProvider.class - }) + properties = {"spring.kafka.properties.security.protocol=SSL"}, + classes = {SimpleKafkaConsumerFactory.class, ConfigurationProvider.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class SimpleKafkaConsumerFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ConcurrentKafkaListenerContainerFactory<?, ?> testFactory; + @Autowired ConcurrentKafkaListenerContainerFactory<?, ?> testFactory; - @Test - void testInitialization() { - assertNotNull(testFactory); - assertEquals(testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), "SSL"); - } + @Test + void testInitialization() { + assertNotNull(testFactory); + assertEquals( + testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), + "SSL"); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java index 017e8f32886af..6cc1d293e24e6 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static com.datahub.util.RecordUtils.*; +import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; + import com.google.common.collect.ImmutableList; import com.hazelcast.config.Config; import com.hazelcast.core.HazelcastInstance; @@ -34,136 +37,163 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.datahub.util.RecordUtils.*; -import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; - - public class CacheTest extends JetTestSupport { - HazelcastCacheManager cacheManager1; - HazelcastCacheManager cacheManager2; - HazelcastInstance instance1; - HazelcastInstance instance2; + HazelcastCacheManager cacheManager1; + HazelcastCacheManager cacheManager2; + HazelcastInstance instance1; + HazelcastInstance instance2; - public CacheTest() { - Config config = new Config(); + public CacheTest() { + Config config = new Config(); - instance1 = createHazelcastInstance(config); - instance2 = createHazelcastInstance(config); + instance1 = createHazelcastInstance(config); + instance2 = createHazelcastInstance(config); - cacheManager1 = new HazelcastCacheManager(instance1); - cacheManager2 = new HazelcastCacheManager(instance2); - } + cacheManager1 = new HazelcastCacheManager(instance1); + cacheManager2 = new HazelcastCacheManager(instance2); + } - @Test - public void hazelcastTest() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); - SearchResult searchResult = new SearchResult() + @Test + public void hazelcastTest() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); + SearchResult searchResult = + new SearchResult() .setEntities(new SearchEntityArray(List.of(searchEntity))) .setNumEntities(1) .setFrom(0) .setPageSize(1) .setMetadata(new SearchResultMetadata()); - Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination> - quintet = Quintet.with(List.of(corpuserUrn.toString()), "*", null, null, - new CacheableSearcher.QueryPagination(0, 1)); - - CacheableSearcher<Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> cacheableSearcher1 = - new CacheableSearcher<>(cacheManager1.getCache("test"), 10, - querySize -> searchResult, - querySize -> quintet, null, true); - - CacheableSearcher<Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> cacheableSearcher2 = - new CacheableSearcher<>(cacheManager2.getCache("test"), 10, + Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination> + quintet = + Quintet.with( + List.of(corpuserUrn.toString()), + "*", + null, + null, + new CacheableSearcher.QueryPagination(0, 1)); + + CacheableSearcher< + Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> + cacheableSearcher1 = + new CacheableSearcher<>( + cacheManager1.getCache("test"), + 10, querySize -> searchResult, - querySize -> quintet, null, true); - - // Cache result - SearchResult result = cacheableSearcher1.getSearchResults(0, 1); - Assert.assertNotEquals(result, null); - - Assert.assertEquals(instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); - } - - @Test - public void hazelcastTestScroll() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); - ScrollResult scrollResult = new ScrollResult() + querySize -> quintet, + null, + true); + + CacheableSearcher< + Quintet<List<String>, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> + cacheableSearcher2 = + new CacheableSearcher<>( + cacheManager2.getCache("test"), + 10, + querySize -> searchResult, + querySize -> quintet, + null, + true); + + // Cache result + SearchResult result = cacheableSearcher1.getSearchResults(0, 1); + Assert.assertNotEquals(result, null); + + Assert.assertEquals( + instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); + Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); + Assert.assertEquals( + cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); + } + + @Test + public void hazelcastTestScroll() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); + ScrollResult scrollResult = + new ScrollResult() .setEntities(new SearchEntityArray(List.of(searchEntity))) .setNumEntities(1) .setPageSize(1) .setMetadata(new SearchResultMetadata()); - final Criterion filterCriterion = new Criterion() + final Criterion filterCriterion = + new Criterion() .setField("platform") .setCondition(Condition.EQUAL) .setValue("hive") .setValues(new StringArray(ImmutableList.of("hive"))); - final Criterion subtypeCriterion = new Criterion() + final Criterion subtypeCriterion = + new Criterion() .setField("subtypes") .setCondition(Condition.EQUAL) .setValue("") .setValues(new StringArray(ImmutableList.of("view"))); - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(subtypeCriterion))) - )); - - Sextet<List<String>, String, String, String, String, Integer> - sextet = Sextet.with(List.of(corpuserUrn.toString()), "*", toJsonString(filterWithCondition), null, null, 1); - - Cache cache1 = cacheManager1.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); - Cache cache2 = cacheManager2.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); - - // Cache result - String json = toJsonString(scrollResult); - cache1.put(sextet, json); - Assert.assertEquals(instance1.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet), - instance2.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet)); - String cachedResult1 = cache1.get(sextet, String.class); - String cachedResult2 = cache2.get(sextet, String.class); - Assert.assertEquals(cachedResult1, cachedResult2); - Assert.assertEquals(cache1.get(sextet, String.class), json); - Assert.assertEquals(cache2.get(sextet, String.class), json); - } - - @Test - public void testLineageCaching() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - EntityLineageResult lineageResult = new EntityLineageResult(); - LineageRelationshipArray array = new LineageRelationshipArray(); - LineageRelationship lineageRelationship = new LineageRelationship().setEntity(corpuserUrn).setType("type"); - for (int i = 0; i < 10000; i++) { - array.add(lineageRelationship); - } - lineageResult.setRelationships(array).setCount(1).setStart(0).setTotal(1); - CachedEntityLineageResult cachedEntityLineageResult = new CachedEntityLineageResult(lineageResult, - System.currentTimeMillis()); - - Cache cache1 = cacheManager1.getCache("relationshipSearchService"); - Cache cache2 = cacheManager2.getCache("relationshipSearchService"); - - EntityLineageResultCacheKey key = new EntityLineageResultCacheKey(corpuserUrn, LineageDirection.DOWNSTREAM, - 0L, 1L, 1, ChronoUnit.DAYS); - - cache1.put(key, cachedEntityLineageResult); - - Assert.assertEquals(instance1.getMap("relationshipSearchService").get(key), - instance2.getMap("relationshipSearchService").get(key)); - CachedEntityLineageResult cachedResult1 = cache1.get(key, CachedEntityLineageResult.class); - CachedEntityLineageResult cachedResult2 = cache2.get(key, CachedEntityLineageResult.class); - Assert.assertEquals(cachedResult1, cachedResult2); - Assert.assertEquals(cache1.get(key, CachedEntityLineageResult.class), cachedEntityLineageResult); - Assert.assertEquals(cache2.get(key, CachedEntityLineageResult.class).getEntityLineageResult(), lineageResult); + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))), + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(subtypeCriterion))))); + + Sextet<List<String>, String, String, String, String, Integer> sextet = + Sextet.with( + List.of(corpuserUrn.toString()), "*", toJsonString(filterWithCondition), null, null, 1); + + Cache cache1 = cacheManager1.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); + Cache cache2 = cacheManager2.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); + + // Cache result + String json = toJsonString(scrollResult); + cache1.put(sextet, json); + Assert.assertEquals( + instance1.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet), + instance2.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet)); + String cachedResult1 = cache1.get(sextet, String.class); + String cachedResult2 = cache2.get(sextet, String.class); + Assert.assertEquals(cachedResult1, cachedResult2); + Assert.assertEquals(cache1.get(sextet, String.class), json); + Assert.assertEquals(cache2.get(sextet, String.class), json); + } + + @Test + public void testLineageCaching() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + EntityLineageResult lineageResult = new EntityLineageResult(); + LineageRelationshipArray array = new LineageRelationshipArray(); + LineageRelationship lineageRelationship = + new LineageRelationship().setEntity(corpuserUrn).setType("type"); + for (int i = 0; i < 10000; i++) { + array.add(lineageRelationship); } + lineageResult.setRelationships(array).setCount(1).setStart(0).setTotal(1); + CachedEntityLineageResult cachedEntityLineageResult = + new CachedEntityLineageResult(lineageResult, System.currentTimeMillis()); + + Cache cache1 = cacheManager1.getCache("relationshipSearchService"); + Cache cache2 = cacheManager2.getCache("relationshipSearchService"); + + EntityLineageResultCacheKey key = + new EntityLineageResultCacheKey( + corpuserUrn, LineageDirection.DOWNSTREAM, 0L, 1L, 1, ChronoUnit.DAYS); + + cache1.put(key, cachedEntityLineageResult); + + Assert.assertEquals( + instance1.getMap("relationshipSearchService").get(key), + instance2.getMap("relationshipSearchService").get(key)); + CachedEntityLineageResult cachedResult1 = cache1.get(key, CachedEntityLineageResult.class); + CachedEntityLineageResult cachedResult2 = cache2.get(key, CachedEntityLineageResult.class); + Assert.assertEquals(cachedResult1, cachedResult2); + Assert.assertEquals( + cache1.get(key, CachedEntityLineageResult.class), cachedEntityLineageResult); + Assert.assertEquals( + cache2.get(key, CachedEntityLineageResult.class).getEntityLineageResult(), lineageResult); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java index 266039afb45d5..f910f7981b138 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import org.opensearch.action.support.WriteRequest; @@ -10,19 +13,15 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {ElasticSearchBulkProcessorFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchBulkProcessorFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ESBulkProcessor test; + @Autowired ESBulkProcessor test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(WriteRequest.RefreshPolicy.NONE, test.getWriteRequestRefreshPolicy()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(WriteRequest.RefreshPolicy.NONE, test.getWriteRequestRefreshPolicy()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java index 6ef623648640a..a3f3f469ea611 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java @@ -1,33 +1,31 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.util.Map; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @SpringBootTest( - properties = { - "elasticsearch.index.settingsOverrides=", - "elasticsearch.index.entitySettingsOverrides=", - "elasticsearch.index.prefix=test_prefix" - }, - classes = {ElasticSearchIndexBuilderFactory.class}) + properties = { + "elasticsearch.index.settingsOverrides=", + "elasticsearch.index.entitySettingsOverrides=", + "elasticsearch.index.prefix=test_prefix" + }, + classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchIndexBuilderFactoryEmptyTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(Map.of(), test.getIndexSettingOverrides()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java index 21c3265753ac5..fa4575c1e4142 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java @@ -1,31 +1,36 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.*; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import org.springframework.beans.factory.annotation.Autowired; - -import static org.testng.Assert.*; @SpringBootTest( - properties = { - "elasticsearch.index.settingsOverrides={\"my_index\":{\"number_of_shards\":\"10\"}}", - "elasticsearch.index.entitySettingsOverrides={\"my_entity\":{\"number_of_shards\":\"5\"}}", - "elasticsearch.index.prefix=test_prefix" - }, - classes = {ElasticSearchIndexBuilderFactory.class}) + properties = { + "elasticsearch.index.settingsOverrides={\"my_index\":{\"number_of_shards\":\"10\"}}", + "elasticsearch.index.entitySettingsOverrides={\"my_entity\":{\"number_of_shards\":\"5\"}}", + "elasticsearch.index.prefix=test_prefix" + }, + classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) -public class ElasticSearchIndexBuilderFactoryOverridesTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; +public class ElasticSearchIndexBuilderFactoryOverridesTest + extends AbstractTestNGSpringContextTests { + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals("10", test.getIndexSettingOverrides().get("test_prefix_my_index").get("number_of_shards")); - assertEquals("5", test.getIndexSettingOverrides().get("test_prefix_my_entityindex_v2").get("number_of_shards")); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals( + "10", test.getIndexSettingOverrides().get("test_prefix_my_index").get("number_of_shards")); + assertEquals( + "5", + test.getIndexSettingOverrides() + .get("test_prefix_my_entityindex_v2") + .get("number_of_shards")); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java index 4d63d18f370eb..2c309cb44b04e 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java @@ -1,7 +1,11 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; @@ -9,21 +13,15 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.util.Map; - -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertEquals; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchIndexBuilderFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(Map.of(), test.getIndexSettingOverrides()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java index 6461df2894326..a8e6b50089602 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java @@ -1,7 +1,12 @@ package com.linkedin.gms.factory.secret; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.secret.SecretService; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -10,28 +15,22 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {SecretServiceFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class SecretServiceFactoryTest extends AbstractTestNGSpringContextTests { - @Value("${secretService.encryptionKey}") - private String encryptionKey; + @Value("${secretService.encryptionKey}") + private String encryptionKey; - @Autowired - SecretService test; + @Autowired SecretService test; - @Test - void testInjection() throws IOException { - assertEquals(encryptionKey, "ENCRYPTION_KEY"); - assertNotNull(test); - assertEquals(test.getHashedPassword("".getBytes(StandardCharsets.UTF_8), "password"), - "XohImNooBHFR0OVvjcYpJ3NgPQ1qq73WKhHvch0VQtg="); - } + @Test + void testInjection() throws IOException { + assertEquals(encryptionKey, "ENCRYPTION_KEY"); + assertNotNull(test); + assertEquals( + test.getHashedPassword("".getBytes(StandardCharsets.UTF_8), "password"), + "XohImNooBHFR0OVvjcYpJ3NgPQ1qq73WKhHvch0VQtg="); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 49fce75ab7c61..8268eeff48c5e 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePathEntry; @@ -20,56 +22,57 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class BackfillBrowsePathsV2StepTest { private static final String VERSION = "2"; - private static final String UPGRADE_URN = String.format( - "urn:li:%s:%s", - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - "backfill-default-browse-paths-v2-step"); + private static final String UPGRADE_URN = + String.format( + "urn:li:%s:%s", + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "backfill-default-browse-paths-v2-step"); - private static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; + private static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; - private static final String DATA_JOB_URN = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATA_JOB_URN = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; private static final String DATA_FLOW_URN = "urn:li:dataFlow:(orchestrator,flowId,cluster)"; - private static final String ML_MODEL_URN = "urn:li:mlModel:(urn:li:dataPlatform:sagemaker,trustmodel,PROD)"; - private static final String ML_MODEL_GROUP_URN = "urn:li:mlModelGroup:(urn:li:dataPlatform:sagemaker,a-model-package-group,PROD)"; - private static final String ML_FEATURE_TABLE_URN = "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,user_features)"; + private static final String ML_MODEL_URN = + "urn:li:mlModel:(urn:li:dataPlatform:sagemaker,trustmodel,PROD)"; + private static final String ML_MODEL_GROUP_URN = + "urn:li:mlModelGroup:(urn:li:dataPlatform:sagemaker,a-model-package-group,PROD)"; + private static final String ML_FEATURE_TABLE_URN = + "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,user_features)"; private static final String ML_FEATURE_URN = "urn:li:mlFeature:(test,feature_1)"; - private static final List<String> ENTITY_TYPES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); - private static final List<Urn> ENTITY_URNS = ImmutableList.of( - UrnUtils.getUrn(DATASET_URN), - UrnUtils.getUrn(DASHBOARD_URN), - UrnUtils.getUrn(CHART_URN), - UrnUtils.getUrn(DATA_JOB_URN), - UrnUtils.getUrn(DATA_FLOW_URN), - UrnUtils.getUrn(ML_MODEL_URN), - UrnUtils.getUrn(ML_MODEL_GROUP_URN), - UrnUtils.getUrn(ML_FEATURE_TABLE_URN), - UrnUtils.getUrn(ML_FEATURE_URN) - ); - + private static final List<String> ENTITY_TYPES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); + private static final List<Urn> ENTITY_URNS = + ImmutableList.of( + UrnUtils.getUrn(DATASET_URN), + UrnUtils.getUrn(DASHBOARD_URN), + UrnUtils.getUrn(CHART_URN), + UrnUtils.getUrn(DATA_JOB_URN), + UrnUtils.getUrn(DATA_FLOW_URN), + UrnUtils.getUrn(ML_MODEL_URN), + UrnUtils.getUrn(ML_MODEL_GROUP_URN), + UrnUtils.getUrn(ML_FEATURE_TABLE_URN), + UrnUtils.getUrn(ML_FEATURE_URN)); @Test public void testExecuteNoExistingBrowsePaths() throws Exception { @@ -77,31 +80,32 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - - BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = new BackfillBrowsePathsV2Step(mockService, mockSearchService); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + + BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = + new BackfillBrowsePathsV2Step(mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); - Mockito.verify(mockSearchService, Mockito.times(9)).scrollAcrossEntities( - Mockito.any(), - Mockito.eq("*"), - Mockito.any(Filter.class), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null) - ); - // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of each entity type - Mockito.verify(mockService, Mockito.times(11)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockSearchService, Mockito.times(9)) + .scrollAcrossEntities( + Mockito.any(), + Mockito.eq("*"), + Mockito.any(Filter.class), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq("5m"), + Mockito.eq(5000), + Mockito.eq(null)); + // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of + // each entity type + Mockito.verify(mockService, Mockito.times(11)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test @@ -110,42 +114,51 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(response); - - BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = new BackfillBrowsePathsV2Step(mockService, mockSearchService); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(response); + + BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = + new BackfillBrowsePathsV2Step(mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private EntityService initMockService() throws URISyntaxException { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new UpgradeDefaultBrowsePathsStepTest.TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when(mockService.buildDefaultBrowsePathV2(Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))).thenReturn( - new BrowsePathsV2().setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); - - Mockito.when(mockService.getEntityV2( - Mockito.any(), - Mockito.eq(ENTITY_URNS.get(i)), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)) - )).thenReturn(null); + Mockito.when( + mockService.buildDefaultBrowsePathV2( + Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))) + .thenReturn( + new BrowsePathsV2() + .setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); + + Mockito.when( + mockService.getEntityV2( + Mockito.any(), + Mockito.eq(ENTITY_URNS.get(i)), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(null); } return mockService; @@ -155,16 +168,21 @@ private SearchService initMockSearchService() { final SearchService mockSearchService = Mockito.mock(SearchService.class); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when(mockSearchService.scrollAcrossEntities( - Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), - Mockito.eq("*"), - Mockito.any(Filter.class), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null) - )).thenReturn(new ScrollResult().setNumEntities(1).setEntities(new SearchEntityArray(new SearchEntity().setEntity(ENTITY_URNS.get(i))))); + Mockito.when( + mockSearchService.scrollAcrossEntities( + Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), + Mockito.eq("*"), + Mockito.any(Filter.class), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq("5m"), + Mockito.eq(5000), + Mockito.eq(null))) + .thenReturn( + new ScrollResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray(new SearchEntity().setEntity(ENTITY_URNS.get(i))))); } return mockSearchService; diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 0ae8eb2cba808..976698f3032d2 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,31 +21,31 @@ import org.jetbrains.annotations.NotNull; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - /** * Test the behavior of IngestDataPlatformInstancesStep. * - * We expect it to check if any data platform instance aspects already exist in the database and if none are found, - * to go through all the stored entities and ingest a data platform instance aspect for any that are compatible with it. + * <p>We expect it to check if any data platform instance aspects already exist in the database and + * if none are found, to go through all the stored entities and ingest a data platform instance + * aspect for any that are compatible with it. * - * CorpUser is used as an example of an entity that is not compatible with data platform instance and therefore should be ignored. - * Char is used as an example of an entity that should get adorned with a data platform instance. + * <p>CorpUser is used as an example of an entity that is not compatible with data platform instance + * and therefore should be ignored. Char is used as an example of an entity that should get adorned + * with a data platform instance. * - * See {@link DataPlatformInstanceUtils} for the compatibility rules. + * <p>See {@link DataPlatformInstanceUtils} for the compatibility rules. */ public class IngestDataPlatformInstancesStepTest { @Test - public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() throws Exception { + public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() + throws Exception { final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockDBWithDataPlatformInstanceAspects(migrationsDao); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(migrationsDao, times(1)).checkIfAspectExists(anyString()); @@ -57,7 +60,8 @@ public void testExecuteCopesWithEmptyDB() throws Exception { mockEmptyDB(migrationsDao); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(migrationsDao, times(1)).checkIfAspectExists(anyString()); @@ -75,9 +79,15 @@ public void testExecuteChecksKeySpecForAllUrns() throws Exception { final int countOfChartEntities = 4; final int totalUrnsInDB = countOfCorpUserEntities + countOfChartEntities; - mockDBWithWorkToDo(entityRegistry, entityService, migrationsDao, countOfCorpUserEntities, countOfChartEntities); + mockDBWithWorkToDo( + entityRegistry, + entityService, + migrationsDao, + countOfCorpUserEntities, + countOfChartEntities); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(entityService, times(totalUrnsInDB)).getKeyAspectSpec(any(Urn.class)); @@ -91,35 +101,55 @@ public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throw final int countOfCorpUserEntities = 5; final int countOfChartEntities = 7; - mockDBWithWorkToDo(entityRegistry, entityService, migrationsDao, countOfCorpUserEntities, countOfChartEntities); + mockDBWithWorkToDo( + entityRegistry, + entityService, + migrationsDao, + countOfCorpUserEntities, + countOfChartEntities); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(entityService, times(1)) .ingestAspects( - argThat(arg -> - arg.getItems().stream() - .allMatch(item -> item.getUrn().getEntityType().equals("chart") - && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) - ), + argThat( + arg -> + arg.getItems().stream() + .allMatch( + item -> + item.getUrn().getEntityType().equals("chart") + && item.getAspectName() + .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() + instanceof DataPlatformInstance)), any(), anyBoolean(), anyBoolean()); verify(entityService, times(0)) - .ingestAspects(argThat(arg -> - !arg.getItems().stream() - .allMatch(item -> item.getUrn().getEntityType().equals("chart") - && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) - ), any(), anyBoolean(), anyBoolean()); + .ingestAspects( + argThat( + arg -> + !arg.getItems().stream() + .allMatch( + item -> + item.getUrn().getEntityType().equals("chart") + && item.getAspectName() + .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() + instanceof DataPlatformInstance)), + any(), + anyBoolean(), + anyBoolean()); } @NotNull private ConfigEntityRegistry getTestEntityRegistry() { return new ConfigEntityRegistry( - IngestDataPlatformInstancesStepTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); } private void mockDBWithDataPlatformInstanceAspects(AspectMigrationsDao migrationsDao) { @@ -137,18 +167,36 @@ private void mockDBWithWorkToDo( AspectMigrationsDao migrationsDao, int countOfCorpUserEntities, int countOfChartEntities) { - List<Urn> corpUserUrns = insertMockEntities(countOfCorpUserEntities, "corpuser", "urn:li:corpuser:test%d", entityRegistry, - entityService); - List<Urn> charUrns = insertMockEntities(countOfChartEntities, "chart", "urn:li:chart:(looker,test%d)", entityRegistry, - entityService); - List<String> allUrnsInDB = Stream.concat(corpUserUrns.stream(), charUrns.stream()).map(Urn::toString).collect(Collectors.toList()); + List<Urn> corpUserUrns = + insertMockEntities( + countOfCorpUserEntities, + "corpuser", + "urn:li:corpuser:test%d", + entityRegistry, + entityService); + List<Urn> charUrns = + insertMockEntities( + countOfChartEntities, + "chart", + "urn:li:chart:(looker,test%d)", + entityRegistry, + entityService); + List<String> allUrnsInDB = + Stream.concat(corpUserUrns.stream(), charUrns.stream()) + .map(Urn::toString) + .collect(Collectors.toList()); when(migrationsDao.checkIfAspectExists(DATA_PLATFORM_INSTANCE_ASPECT_NAME)).thenReturn(false); when(migrationsDao.countEntities()).thenReturn((long) allUrnsInDB.size()); when(migrationsDao.listAllUrns(anyInt(), anyInt())).thenReturn(allUrnsInDB); when(entityService.getEntityRegistry()).thenReturn(entityRegistry); } - private List<Urn> insertMockEntities(int count, String entity, String urnTemplate, EntityRegistry entityRegistry, EntityService entityService) { + private List<Urn> insertMockEntities( + int count, + String entity, + String urnTemplate, + EntityRegistry entityRegistry, + EntityService entityService) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entity); AspectSpec keySpec = entitySpec.getKeyAspectSpec(); List<Urn> urns = new ArrayList<>(); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java index 24bdd193a39c8..b28a6e9f5cc5b 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.events.metadata.ChangeType; @@ -12,15 +15,11 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - /** * Test the behavior of IngestDefaultGlobalSettingsStep. * - * We expect it to ingest a JSON file, throwing if the JSON file - * is malformed or does not match the PDL model for GlobalSettings.pdl. + * <p>We expect it to ingest a JSON file, throwing if the JSON file is malformed or does not match + * the PDL model for GlobalSettings.pdl. */ public class IngestDefaultGlobalSettingsStepTest { @@ -29,20 +28,21 @@ public void testExecuteValidSettingsNoExistingSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_valid.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_valid.json"); step.execute(); GlobalSettingsInfo expectedResult = new GlobalSettingsInfo(); - expectedResult.setViews(new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:test"))); - - Mockito.verify(entityService, times(1)).ingestProposal( - Mockito.eq(buildUpdateSettingsProposal(expectedResult)), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + expectedResult.setViews( + new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:test"))); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateSettingsProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -50,26 +50,29 @@ public void testExecuteValidSettingsExistingSettings() throws Exception { // Verify that the user provided settings overrides are NOT overwritten. final EntityService entityService = mock(EntityService.class); - final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings() - .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); + final GlobalSettingsInfo existingSettings = + new GlobalSettingsInfo() + .setViews( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); configureEntityServiceMock(entityService, existingSettings); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_valid.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_valid.json"); step.execute(); // Verify that the merge preserves the user settings. GlobalSettingsInfo expectedResult = new GlobalSettingsInfo(); - expectedResult.setViews(new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); - - Mockito.verify(entityService, times(1)).ingestProposal( - Mockito.eq(buildUpdateSettingsProposal(expectedResult)), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + expectedResult.setViews( + new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateSettingsProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -77,9 +80,9 @@ public void testExecuteInvalidJsonSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_invalid_json.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_invalid_json.json"); Assert.assertThrows(RuntimeException.class, step::execute); @@ -92,9 +95,9 @@ public void testExecuteInvalidModelSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_invalid_model.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_invalid_model.json"); Assert.assertThrows(RuntimeException.class, step::execute); @@ -102,15 +105,18 @@ public void testExecuteInvalidModelSettings() throws Exception { verifyNoInteractions(entityService); } - private static void configureEntityServiceMock(final EntityService mockService, final GlobalSettingsInfo settingsInfo) { - Mockito.when(mockService.getAspect( - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(GLOBAL_SETTINGS_INFO_ASPECT_NAME), - Mockito.eq(0L) - )).thenReturn(settingsInfo); + private static void configureEntityServiceMock( + final EntityService mockService, final GlobalSettingsInfo settingsInfo) { + Mockito.when( + mockService.getAspect( + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(GLOBAL_SETTINGS_INFO_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(settingsInfo); } - private static MetadataChangeProposal buildUpdateSettingsProposal(final GlobalSettingsInfo settings) { + private static MetadataChangeProposal buildUpdateSettingsProposal( + final GlobalSettingsInfo settings) { final MetadataChangeProposal mcp = new MetadataChangeProposal(); mcp.setEntityUrn(GLOBAL_SETTINGS_URN); mcp.setEntityType(GLOBAL_SETTINGS_ENTITY_NAME); @@ -119,4 +125,4 @@ private static MetadataChangeProposal buildUpdateSettingsProposal(final GlobalSe mcp.setAspect(GenericRecordUtils.serializeAspect(settings)); return mcp; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java index aca5e322567d8..5a9e93f70c952 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java @@ -22,25 +22,27 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; -import org.mockito.Mockito; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Future; +import javax.annotation.Nonnull; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RestoreColumnLineageIndicesTest { private static final String VERSION_1 = "1"; private static final String VERSION_2 = "2"; private static final String COLUMN_LINEAGE_UPGRADE_URN = - String.format("urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-column-lineage-indices"); - private final Urn datasetUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + String.format( + "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-column-lineage-indices"); + private final Urn datasetUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); private final Urn chartUrn = UrnUtils.getUrn("urn:li:chart:(looker,dashboard_elements.1)"); - private final Urn dashboardUrn = UrnUtils.getUrn("urn:li:dashboard:(looker,dashboards.thelook::web_analytics_overview)"); + private final Urn dashboardUrn = + UrnUtils.getUrn("urn:li:dashboard:(looker,dashboards.thelook::web_analytics_overview)"); @Test public void testExecuteFirstTime() throws Exception { @@ -54,54 +56,55 @@ public void testExecuteFirstTime() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test @@ -116,54 +119,55 @@ public void testExecuteWithNewVersion() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test @@ -178,106 +182,126 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { - final List<ExtraInfo> extraInfos = ImmutableList.of( - new ExtraInfo() - .setUrn(datasetUrn) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); + final List<ExtraInfo> extraInfos = + ImmutableList.of( + new ExtraInfo() + .setUrn(datasetUrn) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(1000) - )).thenReturn(new ListResult<>( - ImmutableList.of(new UpstreamLineage()), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), - 1, - false, - 1, - 1, - 1)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new ListResult<>( + ImmutableList.of(new UpstreamLineage()), + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), + 1, + false, + 1, + 1, + 1)); } - private void mockGetInputFields(@Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { - final List<ExtraInfo> extraInfos = ImmutableList.of( - new ExtraInfo() - .setUrn(entityUrn) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); + private void mockGetInputFields( + @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { + final List<ExtraInfo> extraInfos = + ImmutableList.of( + new ExtraInfo() + .setUrn(entityUrn) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(entityName), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(1000) - )).thenReturn(new ListResult<>( - ImmutableList.of(new InputFields()), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), - 1, - false, - 1, - 1, - 1)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(entityName), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new ListResult<>( + ImmutableList.of(new InputFields()), + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), + 1, + false, + 1, + 1, + 1)); } private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { @@ -285,28 +309,39 @@ private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { final AspectSpec aspectSpec = Mockito.mock(AspectSpec.class); // Mock for upstreamLineage Mockito.when(mockRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock inputFields for charts Mockito.when(mockRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock inputFields for dashboards - Mockito.when(mockRegistry.getEntitySpec(Constants.DASHBOARD_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.DASHBOARD_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); return aspectSpec; } - private void mockGetUpgradeStep(boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) throws Exception { + private void mockGetUpgradeStep( + boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) + throws Exception { final Urn upgradeEntityUrn = UrnUtils.getUrn(COLUMN_LINEAGE_UPGRADE_URN); - final com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(version); + final com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(version); final Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - final EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(shouldReturnResponse ? response : null); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + final EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(shouldReturnResponse ? response : null); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index 3753904053256..a4f0c5e0aaba0 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -13,59 +13,107 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.models.EntitySpec; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; -import java.util.List; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.Future; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RestoreGlossaryIndicesTest { private static final String VERSION_1 = "1"; private static final String VERSION_2 = "2"; - private static final String GLOSSARY_UPGRADE_URN = String.format("urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); + private static final String GLOSSARY_UPGRADE_URN = + String.format( + "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); - private void mockGetTermInfo(Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { + private void mockGetTermInfo( + Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) + throws Exception { Map<String, EnvelopedAspect> termInfoAspects = new HashMap<>(); - termInfoAspects.put(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(new GlossaryTermInfo().setName("test").data()))); + termInfoAspects.put( + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(new GlossaryTermInfo().setName("test").data()))); Map<Urn, EntityResponse> termInfoResponses = new HashMap<>(); - termInfoResponses.put(glossaryTermUrn, new EntityResponse().setUrn(glossaryTermUrn).setAspects(new EnvelopedAspectMap(termInfoAspects))); - Mockito.when(mockSearchService.search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, null, 0, 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true))) - .thenReturn(new SearchResult().setNumEntities(1).setEntities(new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(glossaryTermUrn))))); - Mockito.when(mockService.getEntitiesV2( - Constants.GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(Collections.singleton(glossaryTermUrn)), - Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME))) + termInfoResponses.put( + glossaryTermUrn, + new EntityResponse() + .setUrn(glossaryTermUrn) + .setAspects(new EnvelopedAspectMap(termInfoAspects))); + Mockito.when( + mockSearchService.search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true))) + .thenReturn( + new SearchResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(glossaryTermUrn))))); + Mockito.when( + mockService.getEntitiesV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(Collections.singleton(glossaryTermUrn)), + Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME))) .thenReturn(termInfoResponses); } - private void mockGetNodeInfo(Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { + private void mockGetNodeInfo( + Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) + throws Exception { Map<String, EnvelopedAspect> nodeInfoAspects = new HashMap<>(); - nodeInfoAspects.put(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(new GlossaryNodeInfo().setName("test").data()))); + nodeInfoAspects.put( + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(new GlossaryNodeInfo().setName("test").data()))); Map<Urn, EntityResponse> nodeInfoResponses = new HashMap<>(); - nodeInfoResponses.put(glossaryNodeUrn, new EntityResponse().setUrn(glossaryNodeUrn).setAspects(new EnvelopedAspectMap(nodeInfoAspects))); - Mockito.when(mockSearchService.search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", null, null, 0, 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true))) - .thenReturn(new SearchResult().setNumEntities(1).setEntities(new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(glossaryNodeUrn))))); - Mockito.when(mockService.getEntitiesV2( - Constants.GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(Collections.singleton(glossaryNodeUrn)), - Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) - )) + nodeInfoResponses.put( + glossaryNodeUrn, + new EntityResponse() + .setUrn(glossaryNodeUrn) + .setAspects(new EnvelopedAspectMap(nodeInfoAspects))); + Mockito.when( + mockSearchService.search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true))) + .thenReturn( + new SearchResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(glossaryNodeUrn))))); + Mockito.when( + mockService.getEntitiesV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(Collections.singleton(glossaryNodeUrn)), + Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME))) .thenReturn(nodeInfoResponses); } @@ -73,200 +121,257 @@ private AspectSpec mockGlossaryAspectSpecs(EntityRegistry mockRegistry) { EntitySpec entitySpec = Mockito.mock(EntitySpec.class); AspectSpec aspectSpec = Mockito.mock(AspectSpec.class); // Mock for Terms - Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock for Nodes - Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)) + .thenReturn(aspectSpec); return aspectSpec; } @Test public void testExecuteFirstTime() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(null); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(null); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test public void testExecutesWithNewVersion() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_2); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_2); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(response); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(response); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(response); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(response); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockSearchService, Mockito.times(0)).search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), - "", null, null, 0, 1000, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - Mockito.verify(mockSearchService, Mockito.times(0)).search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), - "", null, null, 0, 1000, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(0)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(0)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockSearchService, Mockito.times(0)) + .search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.verify(mockSearchService, Mockito.times(0)) + .search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java index 5e4ad6e7fe880..17159ba1baf53 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java @@ -37,14 +37,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; - public class UpgradeDefaultBrowsePathsStepTest { private static final String VERSION_1 = "1"; - private static final String UPGRADE_URN = String.format( - "urn:li:%s:%s", - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - "upgrade-default-browse-paths-step"); + private static final String UPGRADE_URN = + String.format( + "urn:li:%s:%s", + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "upgrade-default-browse-paths-step"); @Test public void testExecuteNoExistingBrowsePaths() throws Exception { @@ -54,180 +53,218 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); final List<RecordTemplate> browsePaths1 = Collections.emptyList(); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths1, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), - 0, - false, - 0, - 0, - 2)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths1, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), + 0, + false, + 0, + 0, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); - // Verify that 4 aspects are ingested, 2 for the upgrade request / result, but none for ingesting - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); + // Verify that 4 aspects are ingested, 2 for the upgrade request / result, but none for + // ingesting + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test public void testExecuteFirstTime() throws Exception { - Urn testUrn1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset1,PROD)"); - Urn testUrn2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); + Urn testUrn1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset1,PROD)"); + Urn testUrn2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))).thenReturn( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))).thenReturn( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); + Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))) + .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); + Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))) + .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - final List<RecordTemplate> browsePaths1 = ImmutableList.of( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of(BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn1, registry)))), - new BrowsePaths().setPaths(new StringArray(ImmutableList.of(BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn2, registry)))) - ); - - final List<ExtraInfo> extraInfos1 = ImmutableList.of( - new ExtraInfo() - .setUrn(testUrn1) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), - new ExtraInfo() - .setUrn(testUrn2) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); - - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths1, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos1)), - 2, - false, - 2, - 2, - 2)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + final List<RecordTemplate> browsePaths1 = + ImmutableList.of( + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn1, registry)))), + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn2, registry))))); + + final List<ExtraInfo> extraInfos1 = + ImmutableList.of( + new ExtraInfo() + .setUrn(testUrn1) + .setVersion(0L) + .setAudit( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), + new ExtraInfo() + .setUrn(testUrn2) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); + + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths1, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos1)), + 2, + false, + 2, + 2, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); - // Verify that 4 aspects are ingested, 2 for the upgrade request / result and 2 for the browse pahts - Mockito.verify(mockService, Mockito.times(4)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); + // Verify that 4 aspects are ingested, 2 for the upgrade request / result and 2 for the browse + // pahts + Mockito.verify(mockService, Mockito.times(4)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { // Test for browse paths that are not ingested - Urn testUrn3 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset3,PROD)"); // Do not migrate - Urn testUrn4 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not migrate + Urn testUrn3 = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset3,PROD)"); // Do not + // migrate + Urn testUrn4 = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not + // migrate final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - - final List<RecordTemplate> browsePaths2 = ImmutableList.of( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of( - BrowsePathUtils.getDefaultBrowsePath(testUrn3, registry, '.')))), - new BrowsePaths().setPaths(new StringArray(ImmutableList.of( - BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn4, registry), - BrowsePathUtils.getDefaultBrowsePath(testUrn4, registry, '.')))) - ); - - final List<ExtraInfo> extraInfos2 = ImmutableList.of( - new ExtraInfo() - .setUrn(testUrn3) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), - new ExtraInfo() - .setUrn(testUrn4) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L))); - - - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths2, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos2)), - 2, - false, - 2, - 2, - 2)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + + final List<RecordTemplate> browsePaths2 = + ImmutableList.of( + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getDefaultBrowsePath(testUrn3, registry, '.')))), + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn4, registry), + BrowsePathUtils.getDefaultBrowsePath(testUrn4, registry, '.'))))); + + final List<ExtraInfo> extraInfos2 = + ImmutableList.of( + new ExtraInfo() + .setUrn(testUrn3) + .setVersion(0L) + .setAudit( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), + new ExtraInfo() + .setUrn(testUrn4) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); + + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths2, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos2)), + 2, + false, + 2, + 2, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); // Verify that 2 aspects are ingested, only those for the upgrade step - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test @@ -235,48 +272,55 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { final EntityService mockService = Mockito.mock(EntityService.class); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); Map<String, EnvelopedAspect> upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(response); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(response); UpgradeDefaultBrowsePathsStep step = new UpgradeDefaultBrowsePathsStep(mockService); step.execute(); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } private void initMockServiceOtherEntities(EntityService mockService) { - List<String> skippedEntityTypes = ImmutableList.of( - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME - ); + List<String> skippedEntityTypes = + ImmutableList.of( + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME); for (String entityType : skippedEntityTypes) { - Mockito.when(mockService.listLatestAspects( - Mockito.eq(entityType), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - Collections.emptyList(), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), - 0, - false, - 0, - 0, - 0)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(entityType), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + Collections.emptyList(), + new ListResultMetadata() + .setExtraInfos(new ExtraInfoArray(Collections.emptyList())), + 0, + false, + 0, + 0, + 0)); } } @@ -285,10 +329,10 @@ public static class TestEntityRegistry implements EntityRegistry { private final Map<String, EntitySpec> entityNameToSpec; public TestEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) - .buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); } @Nonnull diff --git a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java index fe0d61986b4a6..9931f044931b6 100644 --- a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java +++ b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java @@ -1,5 +1,8 @@ package io.datahubproject.telemetry; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.gms.factory.telemetry.TelemetryUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.telemetry.TelemetryClientId; @@ -7,10 +10,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.AssertJUnit.assertEquals; - - public class TelemetryUtilsTest { EntityService _entityService; @@ -18,7 +17,8 @@ public class TelemetryUtilsTest { @BeforeMethod public void init() { _entityService = Mockito.mock(EntityService.class); - Mockito.when(_entityService.getLatestAspect(any(), anyString())).thenReturn(new TelemetryClientId().setClientId("1234")); + Mockito.when(_entityService.getLatestAspect(any(), anyString())) + .thenReturn(new TelemetryClientId().setClientId("1234")); } @Test diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 043c142da8323..692208c42f90c 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -1,5 +1,7 @@ package com.datahub.graphql; +import static com.linkedin.metadata.Constants.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -31,9 +33,6 @@ import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RestController public class GraphQLController { @@ -43,20 +42,22 @@ public GraphQLController() { MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "call")); } - @Inject - GraphQLEngine _engine; + @Inject GraphQLEngine _engine; - @Inject - AuthorizerChain _authorizerChain; + @Inject AuthorizerChain _authorizerChain; @PostMapping(value = "/graphql", produces = "application/json;charset=utf-8") CompletableFuture<ResponseEntity<String>> postGraphQL(HttpEntity<String> httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode bodyJson = null; try { bodyJson = mapper.readTree(jsonStr); @@ -81,9 +82,11 @@ CompletableFuture<ResponseEntity<String>> postGraphQL(HttpEntity<String> httpEnt * Extract "variables" map */ JsonNode variablesJson = bodyJson.get("variables"); - final Map<String, Object> variables = (variablesJson != null && !variablesJson.isNull()) - ? new ObjectMapper().convertValue(variablesJson, new TypeReference<Map<String, Object>>() { }) - : Collections.emptyMap(); + final Map<String, Object> variables = + (variablesJson != null && !variablesJson.isNull()) + ? new ObjectMapper() + .convertValue(variablesJson, new TypeReference<Map<String, Object>>() {}) + : Collections.emptyMap(); log.debug(String.format("Executing graphQL query: %s, variables: %s", queryJson, variables)); @@ -91,61 +94,76 @@ CompletableFuture<ResponseEntity<String>> postGraphQL(HttpEntity<String> httpEnt * Init QueryContext */ Authentication authentication = AuthenticationContext.getAuthentication(); - SpringQueryContext context = new SpringQueryContext( - true, - authentication, - _authorizerChain); - - return CompletableFuture.supplyAsync(() -> { - /* - * Execute GraphQL Query - */ - ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context); - - if (executionResult.getErrors().size() != 0) { - // There were GraphQL errors. Report in error logs. - log.error(String.format("Errors while executing graphQL query: %s, result: %s, errors: %s", - queryJson, - executionResult.toSpecification(), - executionResult.getErrors())); - } else { - log.debug(String.format("Executed graphQL query: %s, result: %s", - queryJson, - executionResult.toSpecification())); - } - - /* - * Format & Return Response - */ - try { - submitMetrics(executionResult); - // Remove tracing from response to reduce bulk, not used by the frontend - executionResult.getExtensions().remove("tracing"); - String responseBodyStr = new ObjectMapper().writeValueAsString(executionResult.toSpecification()); - return new ResponseEntity<>(responseBodyStr, HttpStatus.OK); - } catch (IllegalArgumentException | JsonProcessingException e) { - log.error(String.format("Failed to convert execution result %s into a JsonNode", executionResult.toSpecification())); - return new ResponseEntity<>(HttpStatus.SERVICE_UNAVAILABLE); - } - }); + SpringQueryContext context = new SpringQueryContext(true, authentication, _authorizerChain); + + return CompletableFuture.supplyAsync( + () -> { + /* + * Execute GraphQL Query + */ + ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context); + + if (executionResult.getErrors().size() != 0) { + // There were GraphQL errors. Report in error logs. + log.error( + String.format( + "Errors while executing graphQL query: %s, result: %s, errors: %s", + queryJson, executionResult.toSpecification(), executionResult.getErrors())); + } else { + log.debug( + String.format( + "Executed graphQL query: %s, result: %s", + queryJson, executionResult.toSpecification())); + } + + /* + * Format & Return Response + */ + try { + submitMetrics(executionResult); + // Remove tracing from response to reduce bulk, not used by the frontend + executionResult.getExtensions().remove("tracing"); + String responseBodyStr = + new ObjectMapper().writeValueAsString(executionResult.toSpecification()); + return new ResponseEntity<>(responseBodyStr, HttpStatus.OK); + } catch (IllegalArgumentException | JsonProcessingException e) { + log.error( + String.format( + "Failed to convert execution result %s into a JsonNode", + executionResult.toSpecification())); + return new ResponseEntity<>(HttpStatus.SERVICE_UNAVAILABLE); + } + }); } @GetMapping("/graphql") - void getGraphQL(HttpServletRequest request, HttpServletResponse response) throws HttpRequestMethodNotSupportedException { + void getGraphQL(HttpServletRequest request, HttpServletResponse response) + throws HttpRequestMethodNotSupportedException { log.info("GET on GraphQL API is not supported"); throw new HttpRequestMethodNotSupportedException("GET"); } private void observeErrors(ExecutionResult executionResult) { - executionResult.getErrors().forEach(graphQLError -> { - if (graphQLError instanceof DataHubGraphQLError) { - DataHubGraphQLError dhGraphQLError = (DataHubGraphQLError) graphQLError; - int errorCode = dhGraphQLError.getErrorCode(); - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "errorCode", Integer.toString(errorCode))).inc(); - } else { - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "errorType", graphQLError.getErrorType().toString())).inc(); - } - }); + executionResult + .getErrors() + .forEach( + graphQLError -> { + if (graphQLError instanceof DataHubGraphQLError) { + DataHubGraphQLError dhGraphQLError = (DataHubGraphQLError) graphQLError; + int errorCode = dhGraphQLError.getErrorCode(); + MetricUtils.get() + .counter( + MetricRegistry.name( + this.getClass(), "errorCode", Integer.toString(errorCode))) + .inc(); + } else { + MetricUtils.get() + .counter( + MetricRegistry.name( + this.getClass(), "errorType", graphQLError.getErrorType().toString())) + .inc(); + } + }); if (executionResult.getErrors().size() != 0) { MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "error")).inc(); } @@ -162,14 +180,22 @@ private void submitMetrics(ExecutionResult executionResult) { long totalDuration = TimeUnit.NANOSECONDS.toMillis((long) tracingMap.get("duration")); Map<String, Object> executionData = (Map<String, Object>) tracingMap.get("execution"); // Extract top level resolver, parent is top level query. Assumes single query per call. - List<Map<String, Object>> resolvers = (List<Map<String, Object>>) executionData.get("resolvers"); - Optional<Map<String, Object>> - parentResolver = resolvers.stream().filter(resolver -> resolver.get("parentType").equals("Query")).findFirst(); - String fieldName = parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; - MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), fieldName)).update(totalDuration); + List<Map<String, Object>> resolvers = + (List<Map<String, Object>>) executionData.get("resolvers"); + Optional<Map<String, Object>> parentResolver = + resolvers.stream() + .filter(resolver -> resolver.get("parentType").equals("Query")) + .findFirst(); + String fieldName = + parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; + MetricUtils.get() + .histogram(MetricRegistry.name(this.getClass(), fieldName)) + .update(totalDuration); } } catch (Exception e) { - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "submitMetrics", "exception")).inc(); + MetricUtils.get() + .counter(MetricRegistry.name(this.getClass(), "submitMetrics", "exception")) + .inc(); log.error("Unable to submit metrics for GraphQL call.", e); } } diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java index 6dd71d84d6dc3..35636bf07eb10 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java @@ -1,5 +1,7 @@ package com.datahub.graphql; +import static java.nio.charset.StandardCharsets.*; + import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; @@ -14,9 +16,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static java.nio.charset.StandardCharsets.*; - - @Slf4j @Controller public class GraphiQLController { @@ -37,4 +36,4 @@ public GraphiQLController() { CompletableFuture<String> graphiQL() { return CompletableFuture.supplyAsync(() -> this.graphiqlHtml); } -} \ No newline at end of file +} diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java index a1ddc5a013f7d..379521eda0c1a 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java @@ -4,14 +4,16 @@ import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; - public class SpringQueryContext implements QueryContext { private final boolean isAuthenticated; private final Authentication authentication; private final Authorizer authorizer; - public SpringQueryContext(final boolean isAuthenticated, final Authentication authentication, final Authorizer authorizer) { + public SpringQueryContext( + final boolean isAuthenticated, + final Authentication authentication, + final Authorizer authorizer) { this.isAuthenticated = isAuthenticated; this.authentication = authentication; this.authorizer = authorizer; diff --git a/metadata-service/openapi-analytics-servlet/build.gradle b/metadata-service/openapi-analytics-servlet/build.gradle index 6475d215db5f5..8ecd48a03e09d 100644 --- a/metadata-service/openapi-analytics-servlet/build.gradle +++ b/metadata-service/openapi-analytics-servlet/build.gradle @@ -63,5 +63,3 @@ task openApiGenerate(type: GenerateSwaggerCode) { ] } tasks.getByName("compileJava").dependsOn(openApiGenerate) - -checkstyleMain.exclude '**/generated/**' \ No newline at end of file diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java index 7816e81fe4a6d..4322dc08887a5 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java @@ -7,8 +7,8 @@ @Configuration public class OpenapiAnalyticsConfig { - @Bean - public DatahubUsageEventsApiDelegate datahubUsageEventsApiDelegate() { - return new DatahubUsageEventsImpl(); - } + @Bean + public DatahubUsageEventsApiDelegate datahubUsageEventsApiDelegate() { + return new DatahubUsageEventsImpl(); + } } diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java index 99e47f32555df..0cedfc22ded6b 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java @@ -1,48 +1,50 @@ package io.datahubproject.openapi.delegates; -import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; -import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.ResponseEntity; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.AuthorizerChain; -import org.springframework.beans.factory.annotation.Value; import com.google.common.collect.ImmutableList; -import io.datahubproject.openapi.exception.UnauthorizedException; -import com.datahub.authorization.AuthUtil; import com.linkedin.metadata.authorization.PoliciesConfig; - -import java.util.Optional; +import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; +import io.datahubproject.openapi.exception.UnauthorizedException; +import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; import java.util.Objects; +import java.util.Optional; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.ResponseEntity; public class DatahubUsageEventsImpl implements DatahubUsageEventsApiDelegate { - @Autowired - private ElasticSearchService _searchService; - @Autowired - private AuthorizerChain _authorizationChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean _restApiAuthorizationEnabled; + @Autowired private ElasticSearchService _searchService; + @Autowired private AuthorizerChain _authorizationChain; - final public static String DATAHUB_USAGE_INDEX = "datahub_usage_event"; + @Value("${authorization.restApiAuthorization:false}") + private boolean _restApiAuthorizationEnabled; - @Override - public ResponseEntity<String> raw(String body) { - Authentication authentication = AuthenticationContext.getAuthentication(); - checkAnalyticsAuthorized(authentication); - return ResponseEntity.of(_searchService.raw(DATAHUB_USAGE_INDEX, body).map(Objects::toString)); - } + public static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; + + @Override + public ResponseEntity<String> raw(String body) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAnalyticsAuthorized(authentication); + return ResponseEntity.of(_searchService.raw(DATAHUB_USAGE_INDEX, body).map(Objects::toString)); + } - private void checkAnalyticsAuthorized(Authentication authentication) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); + private void checkAnalyticsAuthorized(Authentication authentication) { + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); - if (_restApiAuthorizationEnabled && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); - } + if (_restApiAuthorizationEnabled + && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); } + } } diff --git a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java index 83b1b3f87c724..eebef4c07f7b2 100644 --- a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java +++ b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java @@ -1,5 +1,10 @@ package io.datahubproject.openapi.config; +import static io.datahubproject.openapi.delegates.DatahubUsageEventsImpl.DATAHUB_USAGE_INDEX; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -7,42 +12,36 @@ import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; -import org.opensearch.action.search.SearchResponse; +import java.io.IOException; +import java.util.Optional; import org.mockito.Mockito; +import org.opensearch.action.search.SearchResponse; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; -import java.io.IOException; -import java.util.Optional; - -import static io.datahubproject.openapi.delegates.DatahubUsageEventsImpl.DATAHUB_USAGE_INDEX; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration public class OpenAPIAnalyticsTestConfiguration { - @Bean - @Primary - public ElasticSearchService datahubUsageEventsApiDelegate() throws IOException { - ElasticSearchService elasticSearchService = mock(ElasticSearchService.class); - SearchResponse mockResp = mock(SearchResponse.class); - when(elasticSearchService.raw(eq(DATAHUB_USAGE_INDEX), anyString())) - .thenReturn(Optional.of(mockResp)); - return elasticSearchService; - } - - @Bean - public AuthorizerChain authorizerChain() { - AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - - Authentication authentication = Mockito.mock(Authentication.class); - when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); - AuthenticationContext.setAuthentication(authentication); - - return authorizerChain; - } + @Bean + @Primary + public ElasticSearchService datahubUsageEventsApiDelegate() throws IOException { + ElasticSearchService elasticSearchService = mock(ElasticSearchService.class); + SearchResponse mockResp = mock(SearchResponse.class); + when(elasticSearchService.raw(eq(DATAHUB_USAGE_INDEX), anyString())) + .thenReturn(Optional.of(mockResp)); + return elasticSearchService; + } + + @Bean + public AuthorizerChain authorizerChain() { + AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); + + Authentication authentication = Mockito.mock(Authentication.class); + when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + AuthenticationContext.setAuthentication(authentication); + + return authorizerChain; + } } diff --git a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java index af2a24391fea8..d445f321132ef 100644 --- a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java +++ b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.delegates; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import io.datahubproject.openapi.config.OpenAPIAnalyticsTestConfiguration; import io.datahubproject.openapi.config.SpringWebConfig; @@ -14,31 +17,27 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - - @SpringBootTest(classes = {SpringWebConfig.class}) @ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) @Import({DatahubUsageEventsImpl.class, OpenAPIAnalyticsTestConfiguration.class}) public class DatahubUsageEventsImplTest extends AbstractTestNGSpringContextTests { - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } - @Autowired - private DatahubUsageEventsApiController analyticsController; + @Autowired private DatahubUsageEventsApiController analyticsController; - @Test - public void initTest() { - assertNotNull(analyticsController); - } + @Test + public void initTest() { + assertNotNull(analyticsController); + } - @Test - public void analyticsControllerTest() { - ResponseEntity<String> resp = analyticsController.raw(""); - assertEquals(resp.getStatusCode(), HttpStatus.OK); - } + @Test + public void analyticsControllerTest() { + ResponseEntity<String> resp = analyticsController.raw(""); + assertEquals(resp.getStatusCode(), HttpStatus.OK); + } } diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index 7f9c472b91fac..dbec469085b07 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -77,6 +77,4 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'delegatePattern' : "false" ] } -tasks.getByName("compileJava").dependsOn(openApiGenerate) - -checkstyleMain.exclude '**/generated/**' \ No newline at end of file +tasks.getByName("compileJava").dependsOn(openApiGenerate) \ No newline at end of file diff --git a/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java b/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java index ef36d8aa38785..2cd2935496898 100644 --- a/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java +++ b/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java @@ -1,43 +1,41 @@ package io.datahubproject; import io.swagger.codegen.v3.generators.java.SpringCodegen; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.Map; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CustomSpringCodegen extends SpringCodegen { - public CustomSpringCodegen() { - super(); - } + public CustomSpringCodegen() { + super(); + } - @Override - public String getName() { - return "custom-spring"; - } + @Override + public String getName() { + return "custom-spring"; + } - @Override - public Map<String, Object> postProcessOperations(Map<String, Object> objs) { - Map<String, Object> result = super.postProcessOperations(objs); - List<Map<String, String>> imports = (List) objs.get("imports"); + @Override + public Map<String, Object> postProcessOperations(Map<String, Object> objs) { + Map<String, Object> result = super.postProcessOperations(objs); + List<Map<String, String>> imports = (List) objs.get("imports"); - for (Map<String, String> importMap : imports) { - for (String type : importMap.values()) { - if (type.contains("EntityRequest") && !type.contains(".Scroll")) { - additionalProperties.put("requestClass", type); - } - if (type.contains("EntityResponse") && !type.contains(".Scroll")) { - additionalProperties.put("responseClass", type); - } - if (type.contains("EntityResponse") && type.contains(".Scroll")) { - additionalProperties.put("scrollResponseClass", type); - } - } + for (Map<String, String> importMap : imports) { + for (String type : importMap.values()) { + if (type.contains("EntityRequest") && !type.contains(".Scroll")) { + additionalProperties.put("requestClass", type); } - - return result; + if (type.contains("EntityResponse") && !type.contains(".Scroll")) { + additionalProperties.put("responseClass", type); + } + if (type.contains("EntityResponse") && type.contains(".Scroll")) { + additionalProperties.put("scrollResponseClass", type); + } + } } + + return result; + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java index 207c2284e2673..31cd3e6c69e50 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java @@ -1,9 +1,18 @@ package io.datahubproject.openapi.delegates; -import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.entity.EntityService; +import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.AuthorizerChain; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.authorization.EntitySpec; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; @@ -13,7 +22,6 @@ import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.dto.UrnResponseMap; import io.datahubproject.openapi.entities.EntitiesController; -import com.datahub.authorization.AuthorizerChain; import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.generated.BrowsePathsV2AspectRequestV2; import io.datahubproject.openapi.generated.BrowsePathsV2AspectResponseV2; @@ -43,18 +51,6 @@ import io.datahubproject.openapi.generated.StatusAspectRequestV2; import io.datahubproject.openapi.generated.StatusAspectResponseV2; import io.datahubproject.openapi.util.OpenApiEntitiesUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.linkedin.metadata.authorization.PoliciesConfig; -import com.google.common.collect.ImmutableList; -import com.datahub.authorization.AuthUtil; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; - -import javax.validation.Valid; -import javax.validation.constraints.Min; import java.net.URISyntaxException; import java.util.List; import java.util.Map; @@ -62,544 +58,678 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; +import javax.validation.Valid; +import javax.validation.constraints.Min; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; public class EntityApiDelegateImpl<I, O, S> { - final private EntityRegistry _entityRegistry; - final private EntityService _entityService; - final private SearchService _searchService; - final private EntitiesController _v1Controller; - final private AuthorizerChain _authorizationChain; - - final private boolean _restApiAuthorizationEnabled; - final private Class<I> _reqClazz; - final private Class<O> _respClazz; - final private Class<S> _scrollRespClazz; - - final private StackWalker walker = StackWalker.getInstance(); - - public EntityApiDelegateImpl(EntityService entityService, SearchService searchService, EntitiesController entitiesController, - boolean restApiAuthorizationEnabled, AuthorizerChain authorizationChain, - Class<I> reqClazz, Class<O> respClazz, Class<S> scrollRespClazz) { - this._entityService = entityService; - this._searchService = searchService; - this._entityRegistry = entityService.getEntityRegistry(); - this._v1Controller = entitiesController; - this._authorizationChain = authorizationChain; - this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; - this._reqClazz = reqClazz; - this._respClazz = respClazz; - this._scrollRespClazz = scrollRespClazz; - } - - public ResponseEntity<O> get(String urn, Boolean systemMetadata, List<String> aspects) { - String[] requestedAspects = Optional.ofNullable(aspects).map(asp -> asp.stream().distinct().toArray(String[]::new)).orElse(null); - ResponseEntity<UrnResponseMap> result = _v1Controller.getEntities(new String[]{urn}, requestedAspects); - return ResponseEntity.of(OpenApiEntitiesUtil.convertEntity(Optional.ofNullable(result) - .map(HttpEntity::getBody).orElse(null), _respClazz, systemMetadata)); - } - - public ResponseEntity<List<O>> create(List<I> body) { - List<UpsertAspectRequest> aspects = body.stream() - .flatMap(b -> OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry).stream()) - .collect(Collectors.toList()); - _v1Controller.postEntities(aspects); - List<O> responses = body.stream() - .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) - .collect(Collectors.toList()); - return ResponseEntity.ok(responses); - } - - public ResponseEntity<Void> delete(String urn) { - _v1Controller.deleteEntities(new String[]{urn}, false); - return new ResponseEntity<>(HttpStatus.OK); - } - - public ResponseEntity<Void> head(String urn) { - try { - Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn)) { - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } else { - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public <A> ResponseEntity<A> getAspect(String urn, Boolean systemMetadata, String aspect, Class<O> entityRespClass, - Class<A> aspectRespClazz) { - String[] requestedAspects = new String[]{aspect}; - ResponseEntity<UrnResponseMap> result = _v1Controller.getEntities(new String[]{urn}, requestedAspects); - return ResponseEntity.of(OpenApiEntitiesUtil.convertAspect(result.getBody(), aspect, entityRespClass, aspectRespClazz, - systemMetadata)); - } - - public <AQ, AR> ResponseEntity<AR> createAspect(String urn, String aspectName, AQ body, Class<AQ> reqClazz, Class<AR> respClazz) { - UpsertAspectRequest aspectUpsert = OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); - _v1Controller.postEntities(Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList())); - AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); - return ResponseEntity.ok(response); - } - - public ResponseEntity<Void> headAspect(String urn, String aspect) { - try { - Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn, aspect)) { - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } else { - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public ResponseEntity<Void> deleteAspect(String urn, String aspect) { - _entityService.deleteAspect(urn, aspect, Map.of(), false); - _v1Controller.deleteEntities(new String[]{urn}, false); - return new ResponseEntity<>(HttpStatus.OK); - } - - public ResponseEntity<DomainsAspectResponseV2> createDomains(DomainsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DomainsAspectRequestV2.class, DomainsAspectResponseV2.class); - } - - public ResponseEntity<GlobalTagsAspectResponseV2> createGlobalTags(GlobalTagsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, GlobalTagsAspectRequestV2.class, GlobalTagsAspectResponseV2.class); - } - - public ResponseEntity<GlossaryTermsAspectResponseV2> createGlossaryTerms(GlossaryTermsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, GlossaryTermsAspectRequestV2.class, GlossaryTermsAspectResponseV2.class); - } - - public ResponseEntity<OwnershipAspectResponseV2> createOwnership(OwnershipAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, OwnershipAspectRequestV2.class, OwnershipAspectResponseV2.class); - } - - public ResponseEntity<StatusAspectResponseV2> createStatus(StatusAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, StatusAspectRequestV2.class, StatusAspectResponseV2.class); - } - - public ResponseEntity<Void> deleteDomains(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteGlobalTags(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteGlossaryTerms(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteOwnership(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteStatus(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DomainsAspectResponseV2> getDomains(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DomainsAspectResponseV2.class); - } - - public ResponseEntity<GlobalTagsAspectResponseV2> getGlobalTags(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - GlobalTagsAspectResponseV2.class); - } - - public ResponseEntity<GlossaryTermsAspectResponseV2> getGlossaryTerms(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - GlossaryTermsAspectResponseV2.class); - } - - public ResponseEntity<OwnershipAspectResponseV2> getOwnership(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - OwnershipAspectResponseV2.class); - } - - public ResponseEntity<StatusAspectResponseV2> getStatus(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - StatusAspectResponseV2.class); - } - - public ResponseEntity<Void> headDomains(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headGlobalTags(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headGlossaryTerms(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headOwnership(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headStatus(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - protected static String methodNameToAspectName(String methodName) { - return toLowerFirst(methodName.replaceFirst("^(get|head|delete|create)", "")); - } - - public ResponseEntity<Void> deleteDeprecation(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteBrowsePathsV2(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DeprecationAspectResponseV2> getDeprecation(String urn, @Valid Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DeprecationAspectResponseV2.class); - } - - public ResponseEntity<Void> headDeprecation(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DeprecationAspectResponseV2> createDeprecation(@Valid DeprecationAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DeprecationAspectRequestV2.class, - DeprecationAspectResponseV2.class); - } - - public ResponseEntity<Void> headBrowsePathsV2(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<BrowsePathsV2AspectResponseV2> getBrowsePathsV2(String urn, @Valid Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - BrowsePathsV2AspectResponseV2.class); - } - - public ResponseEntity<BrowsePathsV2AspectResponseV2> createBrowsePathsV2(@Valid BrowsePathsV2AspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, BrowsePathsV2AspectRequestV2.class, - BrowsePathsV2AspectResponseV2.class); - } - - public ResponseEntity<S> scroll(@Valid Boolean systemMetadata, @Valid List<String> aspects, @Min(1) @Valid Integer count, - @Valid String scrollId, @Valid List<String> sort, @Valid SortOrder sortOrder, @Valid String query) { - - Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); - checkScrollAuthorized(authentication, entitySpec); - - // TODO multi-field sort - SortCriterion sortCriterion = new SortCriterion(); - sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); - sortCriterion.setOrder(com.linkedin.metadata.query.filter.SortOrder.valueOf(Optional.ofNullable(sortOrder) - .map(Enum::name).orElse("ASCENDING"))); - - SearchFlags searchFlags = new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true); - - ScrollResult result = _searchService.scrollAcrossEntities( - List.of(entitySpec.getName()), - query, null, sortCriterion, scrollId, null, count, searchFlags); - - String[] urns = result.getEntities().stream() - .map(SearchEntity::getEntity) - .map(Urn::toString) - .toArray(String[]::new); - String[] requestedAspects = Optional.ofNullable(aspects) - .map(asp -> asp.stream().distinct().toArray(String[]::new)) - .orElse(null); - List<O> entities = Optional.ofNullable(_v1Controller.getEntities(urns, requestedAspects).getBody()) - .map(body -> body.getResponses().entrySet()) - .map(entries -> OpenApiEntitiesUtil.convertEntities(entries, _respClazz, systemMetadata)) - .orElse(List.of()); - - return ResponseEntity.of(OpenApiEntitiesUtil.convertToScrollResponse(_scrollRespClazz, result.getScrollId(), entities)); - } - - private void checkScrollAuthorized(Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); - - List<Optional<EntitySpec>> resourceSpecs = List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); - if (_restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); - } - } - - public ResponseEntity<DatasetPropertiesAspectResponseV2> createDatasetProperties(@Valid DatasetPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DatasetPropertiesAspectRequestV2.class, - DatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> createEditableDatasetProperties( - @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, EditableDatasetPropertiesAspectRequestV2.class, - EditableDatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<InstitutionalMemoryAspectResponseV2> createInstitutionalMemory( - @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, InstitutionalMemoryAspectRequestV2.class, - InstitutionalMemoryAspectResponseV2.class); - } - - public ResponseEntity<ChartInfoAspectResponseV2> createChartInfo(@Valid ChartInfoAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, ChartInfoAspectRequestV2.class, - ChartInfoAspectResponseV2.class); - } - - public ResponseEntity<EditableChartPropertiesAspectResponseV2> createEditableChartProperties( - @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, EditableChartPropertiesAspectRequestV2.class, - EditableChartPropertiesAspectResponseV2.class); - } - - public ResponseEntity<DataProductPropertiesAspectResponseV2> createDataProductProperties( - @Valid DataProductPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DataProductPropertiesAspectRequestV2.class, - DataProductPropertiesAspectResponseV2.class); - } - - public ResponseEntity<Void> deleteDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteEditableDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteInstitutionalMemory(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteChartInfo(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<DatasetPropertiesAspectResponseV2> getDatasetProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> getEditableDatasetProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - EditableDatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity<InstitutionalMemoryAspectResponseV2> getInstitutionalMemory(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - InstitutionalMemoryAspectResponseV2.class); - } - - public ResponseEntity<EditableChartPropertiesAspectResponseV2> getEditableChartProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, EditableChartPropertiesAspectResponseV2.class); - } - - public ResponseEntity<ChartInfoAspectResponseV2> getChartInfo(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - ChartInfoAspectResponseV2.class); - } - - public ResponseEntity<DataProductPropertiesAspectResponseV2> getDataProductProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DataProductPropertiesAspectResponseV2.class); - } - - public ResponseEntity<Void> headDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headEditableDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headInstitutionalMemory(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headDataProductProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headEditableChartProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> headChartInfo(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteEditableChartProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity<Void> deleteDataProductProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } + private final EntityRegistry _entityRegistry; + private final EntityService _entityService; + private final SearchService _searchService; + private final EntitiesController _v1Controller; + private final AuthorizerChain _authorizationChain; + + private final boolean _restApiAuthorizationEnabled; + private final Class<I> _reqClazz; + private final Class<O> _respClazz; + private final Class<S> _scrollRespClazz; + + private final StackWalker walker = StackWalker.getInstance(); + + public EntityApiDelegateImpl( + EntityService entityService, + SearchService searchService, + EntitiesController entitiesController, + boolean restApiAuthorizationEnabled, + AuthorizerChain authorizationChain, + Class<I> reqClazz, + Class<O> respClazz, + Class<S> scrollRespClazz) { + this._entityService = entityService; + this._searchService = searchService; + this._entityRegistry = entityService.getEntityRegistry(); + this._v1Controller = entitiesController; + this._authorizationChain = authorizationChain; + this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; + this._reqClazz = reqClazz; + this._respClazz = respClazz; + this._scrollRespClazz = scrollRespClazz; + } + + public ResponseEntity<O> get(String urn, Boolean systemMetadata, List<String> aspects) { + String[] requestedAspects = + Optional.ofNullable(aspects) + .map(asp -> asp.stream().distinct().toArray(String[]::new)) + .orElse(null); + ResponseEntity<UrnResponseMap> result = + _v1Controller.getEntities(new String[] {urn}, requestedAspects); + return ResponseEntity.of( + OpenApiEntitiesUtil.convertEntity( + Optional.ofNullable(result).map(HttpEntity::getBody).orElse(null), + _respClazz, + systemMetadata)); + } + + public ResponseEntity<List<O>> create(List<I> body) { + List<UpsertAspectRequest> aspects = + body.stream() + .flatMap( + b -> + OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry) + .stream()) + .collect(Collectors.toList()); + _v1Controller.postEntities(aspects); + List<O> responses = + body.stream() + .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) + .collect(Collectors.toList()); + return ResponseEntity.ok(responses); + } + + public ResponseEntity<Void> delete(String urn) { + _v1Controller.deleteEntities(new String[] {urn}, false); + return new ResponseEntity<>(HttpStatus.OK); + } + + public ResponseEntity<Void> head(String urn) { + try { + Urn entityUrn = Urn.createFromString(urn); + if (_entityService.exists(entityUrn)) { + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } else { + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + } + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public <A> ResponseEntity<A> getAspect( + String urn, + Boolean systemMetadata, + String aspect, + Class<O> entityRespClass, + Class<A> aspectRespClazz) { + String[] requestedAspects = new String[] {aspect}; + ResponseEntity<UrnResponseMap> result = + _v1Controller.getEntities(new String[] {urn}, requestedAspects); + return ResponseEntity.of( + OpenApiEntitiesUtil.convertAspect( + result.getBody(), aspect, entityRespClass, aspectRespClazz, systemMetadata)); + } + + public <AQ, AR> ResponseEntity<AR> createAspect( + String urn, String aspectName, AQ body, Class<AQ> reqClazz, Class<AR> respClazz) { + UpsertAspectRequest aspectUpsert = + OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); + _v1Controller.postEntities( + Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList())); + AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); + return ResponseEntity.ok(response); + } + + public ResponseEntity<Void> headAspect(String urn, String aspect) { + try { + Urn entityUrn = Urn.createFromString(urn); + if (_entityService.exists(entityUrn, aspect)) { + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } else { + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + } + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public ResponseEntity<Void> deleteAspect(String urn, String aspect) { + _entityService.deleteAspect(urn, aspect, Map.of(), false); + _v1Controller.deleteEntities(new String[] {urn}, false); + return new ResponseEntity<>(HttpStatus.OK); + } + + public ResponseEntity<DomainsAspectResponseV2> createDomains( + DomainsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DomainsAspectRequestV2.class, + DomainsAspectResponseV2.class); + } + + public ResponseEntity<GlobalTagsAspectResponseV2> createGlobalTags( + GlobalTagsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + GlobalTagsAspectRequestV2.class, + GlobalTagsAspectResponseV2.class); + } + + public ResponseEntity<GlossaryTermsAspectResponseV2> createGlossaryTerms( + GlossaryTermsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + GlossaryTermsAspectRequestV2.class, + GlossaryTermsAspectResponseV2.class); + } + + public ResponseEntity<OwnershipAspectResponseV2> createOwnership( + OwnershipAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + OwnershipAspectRequestV2.class, + OwnershipAspectResponseV2.class); + } + + public ResponseEntity<StatusAspectResponseV2> createStatus( + StatusAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + StatusAspectRequestV2.class, + StatusAspectResponseV2.class); + } + + public ResponseEntity<Void> deleteDomains(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteGlobalTags(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteGlossaryTerms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteOwnership(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteStatus(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DomainsAspectResponseV2> getDomains(String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DomainsAspectResponseV2.class); + } + + public ResponseEntity<GlobalTagsAspectResponseV2> getGlobalTags( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + GlobalTagsAspectResponseV2.class); + } + + public ResponseEntity<GlossaryTermsAspectResponseV2> getGlossaryTerms( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + GlossaryTermsAspectResponseV2.class); + } + + public ResponseEntity<OwnershipAspectResponseV2> getOwnership( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + OwnershipAspectResponseV2.class); + } + + public ResponseEntity<StatusAspectResponseV2> getStatus(String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + StatusAspectResponseV2.class); + } + + public ResponseEntity<Void> headDomains(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headGlobalTags(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headGlossaryTerms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headOwnership(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headStatus(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + protected static String methodNameToAspectName(String methodName) { + return toLowerFirst(methodName.replaceFirst("^(get|head|delete|create)", "")); + } + + public ResponseEntity<Void> deleteDeprecation(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteBrowsePathsV2(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DeprecationAspectResponseV2> getDeprecation( + String urn, @Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DeprecationAspectResponseV2.class); + } + + public ResponseEntity<Void> headDeprecation(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DeprecationAspectResponseV2> createDeprecation( + @Valid DeprecationAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DeprecationAspectRequestV2.class, + DeprecationAspectResponseV2.class); + } + + public ResponseEntity<Void> headBrowsePathsV2(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<BrowsePathsV2AspectResponseV2> getBrowsePathsV2( + String urn, @Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + BrowsePathsV2AspectResponseV2.class); + } + + public ResponseEntity<BrowsePathsV2AspectResponseV2> createBrowsePathsV2( + @Valid BrowsePathsV2AspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + BrowsePathsV2AspectRequestV2.class, + BrowsePathsV2AspectResponseV2.class); + } + + public ResponseEntity<S> scroll( + @Valid Boolean systemMetadata, + @Valid List<String> aspects, + @Min(1) @Valid Integer count, + @Valid String scrollId, + @Valid List<String> sort, + @Valid SortOrder sortOrder, + @Valid String query) { + + Authentication authentication = AuthenticationContext.getAuthentication(); + com.linkedin.metadata.models.EntitySpec entitySpec = + OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); + checkScrollAuthorized(authentication, entitySpec); + + // TODO multi-field sort + SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); + sortCriterion.setOrder( + com.linkedin.metadata.query.filter.SortOrder.valueOf( + Optional.ofNullable(sortOrder).map(Enum::name).orElse("ASCENDING"))); + + SearchFlags searchFlags = + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + + ScrollResult result = + _searchService.scrollAcrossEntities( + List.of(entitySpec.getName()), + query, + null, + sortCriterion, + scrollId, + null, + count, + searchFlags); + + String[] urns = + result.getEntities().stream() + .map(SearchEntity::getEntity) + .map(Urn::toString) + .toArray(String[]::new); + String[] requestedAspects = + Optional.ofNullable(aspects) + .map(asp -> asp.stream().distinct().toArray(String[]::new)) + .orElse(null); + List<O> entities = + Optional.ofNullable(_v1Controller.getEntities(urns, requestedAspects).getBody()) + .map(body -> body.getResponses().entrySet()) + .map( + entries -> OpenApiEntitiesUtil.convertEntities(entries, _respClazz, systemMetadata)) + .orElse(List.of()); + + return ResponseEntity.of( + OpenApiEntitiesUtil.convertToScrollResponse( + _scrollRespClazz, result.getScrollId(), entities)); + } + + private void checkScrollAuthorized( + Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); + + List<Optional<EntitySpec>> resourceSpecs = + List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); + if (_restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); + } + } + + public ResponseEntity<DatasetPropertiesAspectResponseV2> createDatasetProperties( + @Valid DatasetPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DatasetPropertiesAspectRequestV2.class, + DatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> createEditableDatasetProperties( + @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + EditableDatasetPropertiesAspectRequestV2.class, + EditableDatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<InstitutionalMemoryAspectResponseV2> createInstitutionalMemory( + @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + InstitutionalMemoryAspectRequestV2.class, + InstitutionalMemoryAspectResponseV2.class); + } + + public ResponseEntity<ChartInfoAspectResponseV2> createChartInfo( + @Valid ChartInfoAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + ChartInfoAspectRequestV2.class, + ChartInfoAspectResponseV2.class); + } + + public ResponseEntity<EditableChartPropertiesAspectResponseV2> createEditableChartProperties( + @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + EditableChartPropertiesAspectRequestV2.class, + EditableChartPropertiesAspectResponseV2.class); + } + + public ResponseEntity<DataProductPropertiesAspectResponseV2> createDataProductProperties( + @Valid DataProductPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DataProductPropertiesAspectRequestV2.class, + DataProductPropertiesAspectResponseV2.class); + } + + public ResponseEntity<Void> deleteDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteEditableDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteInstitutionalMemory(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteChartInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<DatasetPropertiesAspectResponseV2> getDatasetProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<EditableDatasetPropertiesAspectResponseV2> getEditableDatasetProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + EditableDatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity<InstitutionalMemoryAspectResponseV2> getInstitutionalMemory( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + InstitutionalMemoryAspectResponseV2.class); + } + + public ResponseEntity<EditableChartPropertiesAspectResponseV2> getEditableChartProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + EditableChartPropertiesAspectResponseV2.class); + } + + public ResponseEntity<ChartInfoAspectResponseV2> getChartInfo( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + ChartInfoAspectResponseV2.class); + } + + public ResponseEntity<DataProductPropertiesAspectResponseV2> getDataProductProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DataProductPropertiesAspectResponseV2.class); + } + + public ResponseEntity<Void> headDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headEditableDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headInstitutionalMemory(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headDataProductProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headEditableChartProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> headChartInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteEditableChartProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity<Void> deleteDataProductProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java index 205d401dd956d..317f9311003e5 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.util; +import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; +import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -9,8 +12,6 @@ import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.generated.OneOfGenericAspectValue; import io.datahubproject.openapi.generated.SystemMetadata; -import lombok.extern.slf4j.Slf4j; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; @@ -20,260 +21,338 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; -import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class OpenApiEntitiesUtil { - private final static String MODEL_VERSION = "V2"; - private final static String REQUEST_SUFFIX = "Request" + MODEL_VERSION; - private final static String RESPONSE_SUFFIX = "Response" + MODEL_VERSION; - - private final static String ASPECT_REQUEST_SUFFIX = "Aspect" + REQUEST_SUFFIX; - private final static String ASPECT_RESPONSE_SUFFIX = "Aspect" + RESPONSE_SUFFIX; - private final static String ENTITY_REQUEST_SUFFIX = "Entity" + REQUEST_SUFFIX; - private final static String ENTITY_RESPONSE_SUFFIX = "Entity" + RESPONSE_SUFFIX; + private static final String MODEL_VERSION = "V2"; + private static final String REQUEST_SUFFIX = "Request" + MODEL_VERSION; + private static final String RESPONSE_SUFFIX = "Response" + MODEL_VERSION; + + private static final String ASPECT_REQUEST_SUFFIX = "Aspect" + REQUEST_SUFFIX; + private static final String ASPECT_RESPONSE_SUFFIX = "Aspect" + RESPONSE_SUFFIX; + private static final String ENTITY_REQUEST_SUFFIX = "Entity" + REQUEST_SUFFIX; + private static final String ENTITY_RESPONSE_SUFFIX = "Entity" + RESPONSE_SUFFIX; + + private OpenApiEntitiesUtil() {} + + private static final ReflectionCache REFLECT = + ReflectionCache.builder().basePackage("io.datahubproject.openapi.generated").build(); + + public static <T> UpsertAspectRequest convertAspectToUpsert( + String entityUrn, Object aspectRequest, Class<T> aspectRequestClazz) { + try { + UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); + builder.entityType(Urn.createFromString(entityUrn).getEntityType()); + builder.entityUrn(entityUrn); + + // i.e. GlobalTagsAspectRequestV2 + if (aspectRequest != null) { + // i.e. GlobalTags + Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); + Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); + + if (aspect != null) { + builder.aspect((OneOfGenericAspectValue) aspect); + return builder.build(); + } + } - private OpenApiEntitiesUtil() { + return null; + } catch (Exception e) { + log.error("Error reflecting urn: {} aspect: {}", entityUrn, aspectRequestClazz.getName()); + throw new RuntimeException(e); } - - private final static ReflectionCache REFLECT = ReflectionCache.builder() - .basePackage("io.datahubproject.openapi.generated") - .build(); - - - public static <T> UpsertAspectRequest convertAspectToUpsert(String entityUrn, Object aspectRequest, Class<T> aspectRequestClazz) { - try { - UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); - builder.entityType(Urn.createFromString(entityUrn).getEntityType()); - builder.entityUrn(entityUrn); - - // i.e. GlobalTagsAspectRequestV2 - if (aspectRequest != null) { - // i.e. GlobalTags - Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); - Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); - - if (aspect != null) { + } + + public static <T> List<UpsertAspectRequest> convertEntityToUpsert( + Object openapiEntity, Class<T> fromClazz, EntityRegistry entityRegistry) { + final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, fromClazz); + + return entitySpec.getAspectSpecs().stream() + .map( + aspectSpec -> { + try { + UpsertAspectRequest.UpsertAspectRequestBuilder builder = + UpsertAspectRequest.builder(); + builder.entityType(entitySpec.getName()); + builder.entityUrn( + (String) REFLECT.lookupMethod(fromClazz, "getUrn").invoke(openapiEntity)); + + String upperAspectName = toUpperFirst(aspectSpec.getName()); + Method aspectMethod = REFLECT.lookupMethod(fromClazz, "get" + upperAspectName); + + // i.e. GlobalTagsAspectRequestV2 + Object aspectRequest = + aspectMethod == null ? null : aspectMethod.invoke(openapiEntity); + if (aspectRequest != null) { + Class<?> aspectRequestClazz = + REFLECT.lookupClass(upperAspectName + ASPECT_REQUEST_SUFFIX); + + // i.e. GlobalTags + Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); + Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); + + if (aspect != null) { builder.aspect((OneOfGenericAspectValue) aspect); return builder.build(); + } } - } - - return null; - } catch (Exception e) { - log.error("Error reflecting urn: {} aspect: {}", entityUrn, aspectRequestClazz.getName()); - throw new RuntimeException(e); - } - } - public static <T> List<UpsertAspectRequest> convertEntityToUpsert(Object openapiEntity, Class<T> fromClazz, EntityRegistry entityRegistry) { - final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, fromClazz); - - return entitySpec.getAspectSpecs().stream() - .map(aspectSpec -> { - try { - UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); - builder.entityType(entitySpec.getName()); - builder.entityUrn((String) REFLECT.lookupMethod(fromClazz, "getUrn").invoke(openapiEntity)); - - String upperAspectName = toUpperFirst(aspectSpec.getName()); - Method aspectMethod = REFLECT.lookupMethod(fromClazz, "get" + upperAspectName); - - // i.e. GlobalTagsAspectRequestV2 - Object aspectRequest = aspectMethod == null ? null : aspectMethod.invoke(openapiEntity); - if (aspectRequest != null) { - Class<?> aspectRequestClazz = REFLECT.lookupClass(upperAspectName + ASPECT_REQUEST_SUFFIX); - - // i.e. GlobalTags - Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); - Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); - - if (aspect != null) { - builder.aspect((OneOfGenericAspectValue) aspect); - return builder.build(); - } - } - - return null; - } catch (Exception e) { - log.error("Error reflecting entity: {} aspect: {}", entitySpec.getName(), aspectSpec.getName()); - throw new RuntimeException(e); - } - }).filter(Objects::nonNull).collect(Collectors.toList()); - } - public static <E, A> Optional<A> convertAspect(UrnResponseMap urnResponseMap, String aspectName, Class<E> entityClazz, - Class<A> aspectClazz, boolean withSystemMetadata) { - return convertEntity(urnResponseMap, entityClazz, withSystemMetadata).map(entity -> { - try { - Method aspectMethod = REFLECT.lookupMethod(entityClazz, "get" + toUpperFirst(aspectName)); + return null; + } catch (Exception e) { + log.error( + "Error reflecting entity: {} aspect: {}", + entitySpec.getName(), + aspectSpec.getName()); + throw new RuntimeException(e); + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + + public static <E, A> Optional<A> convertAspect( + UrnResponseMap urnResponseMap, + String aspectName, + Class<E> entityClazz, + Class<A> aspectClazz, + boolean withSystemMetadata) { + return convertEntity(urnResponseMap, entityClazz, withSystemMetadata) + .map( + entity -> { + try { + Method aspectMethod = + REFLECT.lookupMethod(entityClazz, "get" + toUpperFirst(aspectName)); return aspectMethod == null ? null : aspectClazz.cast(aspectMethod.invoke(entity)); - } catch (IllegalAccessException | InvocationTargetException e) { + } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); - } - }); - - } - - public static <T> Optional<T> convertEntity(UrnResponseMap urnResponseMap, Class<T> toClazz, boolean withSystemMetadata) { - return Optional.ofNullable(urnResponseMap) - .flatMap(respMap -> respMap.getResponses().entrySet().stream().findFirst()) - .flatMap(entry -> convertEntities(Set.of(entry), toClazz, withSystemMetadata).stream().findFirst()); - } - - public static <T> List<T> convertEntities(Set<Map.Entry<String, EntityResponse>> entityResponseSet, Class<T> toClazz, boolean withSystemMetadata) { - if (entityResponseSet != null) { - return entityResponseSet.stream().map(entry -> { + } + }); + } + + public static <T> Optional<T> convertEntity( + UrnResponseMap urnResponseMap, Class<T> toClazz, boolean withSystemMetadata) { + return Optional.ofNullable(urnResponseMap) + .flatMap(respMap -> respMap.getResponses().entrySet().stream().findFirst()) + .flatMap( + entry -> + convertEntities(Set.of(entry), toClazz, withSystemMetadata).stream().findFirst()); + } + + public static <T> List<T> convertEntities( + Set<Map.Entry<String, EntityResponse>> entityResponseSet, + Class<T> toClazz, + boolean withSystemMetadata) { + if (entityResponseSet != null) { + return entityResponseSet.stream() + .map( + entry -> { try { - // i.e. DataContractEntityResponseV2.Builder - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(toClazz); - Set<String> builderMethods = Arrays.stream(builderPair.getFirst().getMethods()) - .map(Method::getName).collect(Collectors.toSet()); - - REFLECT.lookupMethod(builderPair, "urn", String.class).invoke(builderPair.getSecond(), entry.getKey()); - - entry.getValue().getAspects().entrySet().forEach(aspectEntry -> { - try { - if (builderMethods.contains(aspectEntry.getKey())) { + // i.e. DataContractEntityResponseV2.Builder + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(toClazz); + Set<String> builderMethods = + Arrays.stream(builderPair.getFirst().getMethods()) + .map(Method::getName) + .collect(Collectors.toSet()); + + REFLECT + .lookupMethod(builderPair, "urn", String.class) + .invoke(builderPair.getSecond(), entry.getKey()); + + entry + .getValue() + .getAspects() + .entrySet() + .forEach( + aspectEntry -> { + try { + if (builderMethods.contains(aspectEntry.getKey())) { String upperFirstAspect = toUpperFirst(aspectEntry.getKey()); Class<?> aspectClazz = REFLECT.lookupClass(upperFirstAspect); - Class<?> aspectRespClazz = REFLECT.lookupClass(upperFirstAspect + ASPECT_RESPONSE_SUFFIX); - Class<?> aspectRespClazzBuilder = REFLECT.lookupClass(String.join("", - upperFirstAspect, ASPECT_RESPONSE_SUFFIX, - "$", upperFirstAspect, ASPECT_RESPONSE_SUFFIX, "Builder")); - Object aspectBuilder = REFLECT.lookupMethod(aspectRespClazz, "builder").invoke(null); - - REFLECT.lookupMethod(aspectRespClazzBuilder, "value", aspectClazz).invoke(aspectBuilder, aspectEntry.getValue().getValue()); + Class<?> aspectRespClazz = + REFLECT.lookupClass(upperFirstAspect + ASPECT_RESPONSE_SUFFIX); + Class<?> aspectRespClazzBuilder = + REFLECT.lookupClass( + String.join( + "", + upperFirstAspect, + ASPECT_RESPONSE_SUFFIX, + "$", + upperFirstAspect, + ASPECT_RESPONSE_SUFFIX, + "Builder")); + Object aspectBuilder = + REFLECT.lookupMethod(aspectRespClazz, "builder").invoke(null); + + REFLECT + .lookupMethod(aspectRespClazzBuilder, "value", aspectClazz) + .invoke(aspectBuilder, aspectEntry.getValue().getValue()); if (withSystemMetadata) { - REFLECT.lookupMethod(aspectRespClazzBuilder, "systemMetadata", SystemMetadata.class) - .invoke(aspectBuilder, aspectEntry.getValue().getSystemMetadata()); + REFLECT + .lookupMethod( + aspectRespClazzBuilder, + "systemMetadata", + SystemMetadata.class) + .invoke( + aspectBuilder, + aspectEntry.getValue().getSystemMetadata()); } - REFLECT.lookupMethod(builderPair, aspectEntry.getKey(), aspectRespClazz).invoke(builderPair.getSecond(), - REFLECT.lookupMethod(aspectRespClazzBuilder, "build").invoke(aspectBuilder)); + REFLECT + .lookupMethod( + builderPair, aspectEntry.getKey(), aspectRespClazz) + .invoke( + builderPair.getSecond(), + REFLECT + .lookupMethod(aspectRespClazzBuilder, "build") + .invoke(aspectBuilder)); + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }); + }); - return toClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + return toClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }).collect(Collectors.toList()); - } - return List.of(); + }) + .collect(Collectors.toList()); } - - public static <I, T> T convertToResponseAspect(I source, Class<T> targetClazz) { - if (source != null) { - try { - Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); - Method valueMethod = REFLECT.lookupMethod(sourceClazz, "getValue"); - Object aspect = valueMethod.invoke(source); - - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); - REFLECT.lookupMethod(builderPair, "value", valueMethod.getReturnType()).invoke(builderPair.getSecond(), aspect); - - return targetClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return null; + return List.of(); + } + + public static <I, T> T convertToResponseAspect(I source, Class<T> targetClazz) { + if (source != null) { + try { + Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); + Method valueMethod = REFLECT.lookupMethod(sourceClazz, "getValue"); + Object aspect = valueMethod.invoke(source); + + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); + REFLECT + .lookupMethod(builderPair, "value", valueMethod.getReturnType()) + .invoke(builderPair.getSecond(), aspect); + + return targetClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - public static <I, T> T convertToResponse(I source, Class<T> targetClazz, EntityRegistry entityRegistry) { - if (source != null) { - try { - Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); - copy(Pair.of(sourceClazz, source), builderPair, "urn"); - - final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, sourceClazz); - entitySpec.getAspectSpecs().stream() - .forEach(aspectSpec -> { - try { - copy(Pair.of(sourceClazz, source), builderPair, aspectSpec.getName()); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - - return targetClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return null; + return null; + } + + public static <I, T> T convertToResponse( + I source, Class<T> targetClazz, EntityRegistry entityRegistry) { + if (source != null) { + try { + Class<?> sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(targetClazz); + copy(Pair.of(sourceClazz, source), builderPair, "urn"); + + final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, sourceClazz); + entitySpec.getAspectSpecs().stream() + .forEach( + aspectSpec -> { + try { + copy(Pair.of(sourceClazz, source), builderPair, aspectSpec.getName()); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } + }); + + return targetClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - public static <T, S> Optional<S> convertToScrollResponse(Class<S> scrollRespClazz, String scrollId, List<T> entityResults) { - if (entityResults != null) { - try { - Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(scrollRespClazz); - REFLECT.lookupMethod(builderPair.getFirst(), "scrollId", String.class).invoke(builderPair.getSecond(), scrollId); - REFLECT.lookupMethod(builderPair.getFirst(), "entities", List.class).invoke(builderPair.getSecond(), entityResults); - - return Optional.of(scrollRespClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond()))); - - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return Optional.empty(); + return null; + } + + public static <T, S> Optional<S> convertToScrollResponse( + Class<S> scrollRespClazz, String scrollId, List<T> entityResults) { + if (entityResults != null) { + try { + Pair<Class<?>, Object> builderPair = REFLECT.getBuilder(scrollRespClazz); + REFLECT + .lookupMethod(builderPair.getFirst(), "scrollId", String.class) + .invoke(builderPair.getSecond(), scrollId); + REFLECT + .lookupMethod(builderPair.getFirst(), "entities", List.class) + .invoke(builderPair.getSecond(), entityResults); + + return Optional.of( + scrollRespClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond()))); + + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - - - private static void copy(Pair<Class<?>, Object> sourcePair, Pair<Class<?>, Object> builderPair, String method) - throws InvocationTargetException, IllegalAccessException { - Method sourceMethod = REFLECT.lookupMethod(sourcePair, String.format("get%s", toUpperFirst(method))); - if (sourceMethod != null) { - Class<?> paramClazz = null; - Object param = null; - if (sourceMethod.getReturnType().getSimpleName().contains("Request")) { - Object sourceParam = sourceMethod.invoke(sourcePair.getSecond()); - if (sourceParam != null) { - paramClazz = REFLECT.lookupClass(sourceMethod.getReturnType().getSimpleName().replace("Request", "Response")); - Pair<Class<?>, Object> aspectBuilder = REFLECT.getBuilder(paramClazz); - - for (Method m : sourceMethod.getReturnType().getMethods()) { - if (m.getName().startsWith("get") && !Objects.equals("getClass", m.getName())) { - String getterMethod = m.getName().replaceFirst("^get", ""); - copy(Pair.of(sourceMethod.getReturnType(), sourceMethod.invoke(sourcePair.getSecond())), - aspectBuilder, getterMethod); - } - } - - param = REFLECT.lookupMethod(aspectBuilder, "build").invoke(aspectBuilder.getSecond()); - } - } else { - paramClazz = sourceMethod.getReturnType(); - param = sourceMethod.invoke(sourcePair.getSecond()); + return Optional.empty(); + } + + private static void copy( + Pair<Class<?>, Object> sourcePair, Pair<Class<?>, Object> builderPair, String method) + throws InvocationTargetException, IllegalAccessException { + Method sourceMethod = + REFLECT.lookupMethod(sourcePair, String.format("get%s", toUpperFirst(method))); + if (sourceMethod != null) { + Class<?> paramClazz = null; + Object param = null; + if (sourceMethod.getReturnType().getSimpleName().contains("Request")) { + Object sourceParam = sourceMethod.invoke(sourcePair.getSecond()); + if (sourceParam != null) { + paramClazz = + REFLECT.lookupClass( + sourceMethod.getReturnType().getSimpleName().replace("Request", "Response")); + Pair<Class<?>, Object> aspectBuilder = REFLECT.getBuilder(paramClazz); + + for (Method m : sourceMethod.getReturnType().getMethods()) { + if (m.getName().startsWith("get") && !Objects.equals("getClass", m.getName())) { + String getterMethod = m.getName().replaceFirst("^get", ""); + copy( + Pair.of( + sourceMethod.getReturnType(), sourceMethod.invoke(sourcePair.getSecond())), + aspectBuilder, + getterMethod); } + } - if (param != null) { - Method targetMethod = REFLECT.lookupMethod(builderPair, toLowerFirst(method), paramClazz); - targetMethod.invoke(builderPair.getSecond(), param); - } - } else { - log.info("Class {} doesn't container method {}", sourcePair.getFirst(), - String.format("get%s", toUpperFirst(method))); + param = REFLECT.lookupMethod(aspectBuilder, "build").invoke(aspectBuilder.getSecond()); } + } else { + paramClazz = sourceMethod.getReturnType(); + param = sourceMethod.invoke(sourcePair.getSecond()); + } + + if (param != null) { + Method targetMethod = REFLECT.lookupMethod(builderPair, toLowerFirst(method), paramClazz); + targetMethod.invoke(builderPair.getSecond(), param); + } + } else { + log.info( + "Class {} doesn't container method {}", + sourcePair.getFirst(), + String.format("get%s", toUpperFirst(method))); } - - public static <T> EntitySpec requestClassToEntitySpec(EntityRegistry entityRegistry, Class<T> reqClazz) { - final String entityType = toLowerFirst(reqClazz.getSimpleName().replace(ENTITY_REQUEST_SUFFIX, "")); - return entityRegistry.getEntitySpec(entityType); - } - - public static <T> EntitySpec responseClassToEntitySpec(EntityRegistry entityRegistry, Class<T> respClazz) { - String entityType = toLowerFirst(respClazz.getSimpleName().replace(ENTITY_RESPONSE_SUFFIX, "")); - return entityRegistry.getEntitySpec(entityType); - } + } + + public static <T> EntitySpec requestClassToEntitySpec( + EntityRegistry entityRegistry, Class<T> reqClazz) { + final String entityType = + toLowerFirst(reqClazz.getSimpleName().replace(ENTITY_REQUEST_SUFFIX, "")); + return entityRegistry.getEntitySpec(entityType); + } + + public static <T> EntitySpec responseClassToEntitySpec( + EntityRegistry entityRegistry, Class<T> respClazz) { + String entityType = toLowerFirst(respClazz.getSimpleName().replace(ENTITY_RESPONSE_SUFFIX, "")); + return entityRegistry.getEntitySpec(entityType); + } } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java index cabaa2cbd75e6..920a13d998985 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java @@ -1,5 +1,11 @@ package io.datahubproject.openapi.config; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -26,6 +32,9 @@ import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.relationships.RelationshipsController; import io.datahubproject.openapi.timeline.TimelineController; +import java.util.Arrays; +import java.util.Map; +import java.util.stream.Collectors; import org.mockito.Mockito; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -33,102 +42,96 @@ import org.springframework.context.annotation.Primary; import org.springframework.http.ResponseEntity; -import java.util.Arrays; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration public class OpenAPIEntityTestConfiguration { - @Bean - public ObjectMapper objectMapper() { - return new ObjectMapper(new YAMLFactory()); - } - - @Bean - @Primary - public EntityService entityService(final EntityRegistry mockRegistry) { - EntityService entityService = mock(EntityServiceImpl.class); - when(entityService.getEntityRegistry()).thenReturn(mockRegistry); - return entityService; - } - - @Bean - @Primary - public SearchService searchService() { - SearchService searchService = mock(SearchService.class); - when(searchService.scrollAcrossEntities(anyList(), any(), any(), any(), - any(), any(), anyInt(), any())) - .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); - - return searchService; - } - - @Bean - public AuthorizerChain authorizerChain() { - AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - - Authentication authentication = Mockito.mock(Authentication.class); - when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); - AuthenticationContext.setAuthentication(authentication); - - return authorizerChain; - } - - @MockBean(name = "elasticSearchSystemMetadataService") - public SystemMetadataService systemMetadataService; - - @MockBean - public TimelineService timelineService; - - @Bean("entityRegistry") - @Primary - public EntityRegistry entityRegistry() throws EntityRegistryException, InterruptedException { - /* - Considered a few different approach to loading a custom model. Chose this method - to as closely match a production configuration rather than direct project to project - dependency. - */ - PluginEntityRegistryLoader custom = new PluginEntityRegistryLoader( - getClass().getResource("/custom-model").getFile()); - - ConfigEntityRegistry standard = new ConfigEntityRegistry( - OpenAPIEntityTestConfiguration.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - MergedEntityRegistry entityRegistry = new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(standard); - custom.withBaseRegistry(entityRegistry).start(true); - - return entityRegistry; - } - - /* Controllers not under this module */ - @Bean - @Primary - public EntitiesController entitiesController() { - EntitiesController entitiesController = mock(EntitiesController.class); - when(entitiesController.getEntities(any(), any())) - .thenAnswer(params -> { - String[] urns = params.getArgument(0); - String[] aspects = params.getArgument(1); - return ResponseEntity.ok(UrnResponseMap.builder() - .responses(Arrays.stream(urns) - .map(urn -> Map.entry(urn, EntityResponse.builder().urn(urn).build())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - .build()); - }); - - return entitiesController; - } - - @MockBean - public TimelineController timelineController; - - @MockBean - public RelationshipsController relationshipsController; + @Bean + public ObjectMapper objectMapper() { + return new ObjectMapper(new YAMLFactory()); + } + + @Bean + @Primary + public EntityService entityService(final EntityRegistry mockRegistry) { + EntityService entityService = mock(EntityServiceImpl.class); + when(entityService.getEntityRegistry()).thenReturn(mockRegistry); + return entityService; + } + + @Bean + @Primary + public SearchService searchService() { + SearchService searchService = mock(SearchService.class); + when(searchService.scrollAcrossEntities( + anyList(), any(), any(), any(), any(), any(), anyInt(), any())) + .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); + + return searchService; + } + + @Bean + public AuthorizerChain authorizerChain() { + AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); + + Authentication authentication = Mockito.mock(Authentication.class); + when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + AuthenticationContext.setAuthentication(authentication); + + return authorizerChain; + } + + @MockBean(name = "elasticSearchSystemMetadataService") + public SystemMetadataService systemMetadataService; + + @MockBean public TimelineService timelineService; + + @Bean("entityRegistry") + @Primary + public EntityRegistry entityRegistry() throws EntityRegistryException, InterruptedException { + /* + Considered a few different approach to loading a custom model. Chose this method + to as closely match a production configuration rather than direct project to project + dependency. + */ + PluginEntityRegistryLoader custom = + new PluginEntityRegistryLoader(getClass().getResource("/custom-model").getFile()); + + ConfigEntityRegistry standard = + new ConfigEntityRegistry( + OpenAPIEntityTestConfiguration.class + .getClassLoader() + .getResourceAsStream("entity-registry.yml")); + MergedEntityRegistry entityRegistry = + new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(standard); + custom.withBaseRegistry(entityRegistry).start(true); + + return entityRegistry; + } + + /* Controllers not under this module */ + @Bean + @Primary + public EntitiesController entitiesController() { + EntitiesController entitiesController = mock(EntitiesController.class); + when(entitiesController.getEntities(any(), any())) + .thenAnswer( + params -> { + String[] urns = params.getArgument(0); + String[] aspects = params.getArgument(1); + return ResponseEntity.ok( + UrnResponseMap.builder() + .responses( + Arrays.stream(urns) + .map(urn -> Map.entry(urn, EntityResponse.builder().urn(urn).build())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) + .build()); + }); + + return entitiesController; + } + + @MockBean public TimelineController timelineController; + + @MockBean public RelationshipsController relationshipsController; } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java index 57803ac904a93..1f8f0a5023513 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.delegates; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import static org.testng.Assert.*; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.models.registry.EntityRegistry; import io.datahubproject.openapi.config.OpenAPIEntityTestConfiguration; @@ -31,6 +34,7 @@ import io.datahubproject.openapi.generated.TagAssociation; import io.datahubproject.openapi.generated.controller.ChartApiController; import io.datahubproject.openapi.generated.controller.DatasetApiController; +import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; @@ -46,208 +50,245 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import java.util.List; - -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import static org.testng.Assert.*; - - @SpringBootTest(classes = {SpringWebConfig.class}) @ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) @Import({OpenAPIEntityTestConfiguration.class}) @AutoConfigureMockMvc public class EntityApiDelegateImplTest extends AbstractTestNGSpringContextTests { - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } - - @Autowired - private ChartApiController chartApiController; - @Autowired - private DatasetApiController datasetApiController; - @Autowired - private EntityRegistry entityRegistry; - @Autowired - private MockMvc mockMvc; - - @Test - public void initTest() { - assertNotNull(chartApiController); - assertNotNull(datasetApiController); - - assertTrue(entityRegistry.getEntitySpec("dataset").getAspectSpecMap().containsKey("customDataQualityRules"), - "Failed to load custom model from custom registry"); - } - - @Test - public void chartApiControllerTest() { - final String testUrn = "urn:li:chart:(looker,baz1)"; - - ChartEntityRequestV2 req = ChartEntityRequestV2.builder() - .urn(testUrn) - .build(); - ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); - assertEquals(resp.getUrn(), testUrn); - - resp = chartApiController.get(testUrn, false, List.of()).getBody(); - assertEquals(resp.getUrn(), testUrn); - - ResponseEntity<Void> deleteResp = chartApiController.delete(testUrn); - assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); - - ResponseEntity<Void> headResp = chartApiController.head(testUrn); - assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); - - ResponseEntity<ScrollChartEntityResponseV2> scrollResp = chartApiController.scroll( - false, List.of(), 10, null, null, null, null); - assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); - assertNotNull(scrollResp.getBody().getEntities()); - } - - @Test - public void datasetApiControllerTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder() - .urn(testUrn) - .build(); - DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); - assertEquals(resp.getUrn(), testUrn); - - resp = datasetApiController.get(testUrn, false, List.of()).getBody(); - assertEquals(resp.getUrn(), testUrn); - - ResponseEntity<Void> deleteResp = datasetApiController.delete(testUrn); - assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); - - ResponseEntity<Void> headResp = datasetApiController.head(testUrn); - assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); - - ResponseEntity<ScrollDatasetEntityResponseV2> scrollResp = datasetApiController.scroll( - false, List.of(), 10, null, null, null, null); - assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); - assertNotNull(scrollResp.getBody().getEntities()); - } - - @Test - public void browsePathsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - BrowsePathsV2AspectRequestV2 req = BrowsePathsV2AspectRequestV2.builder() - .value(BrowsePathsV2.builder().path(List.of(BrowsePathEntry.builder().urn(testUrn) - .id("path").build())).build()).build(); - assertEquals(datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void deprecationTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DeprecationAspectRequestV2 req = DeprecationAspectRequestV2.builder() - .value(Deprecation.builder().deprecated(true).build()).build(); - assertEquals(datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headDeprecation(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void domainsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DomainsAspectRequestV2 req = DomainsAspectRequestV2.builder() - .value(Domains.builder().domains(List.of("my_domain")).build()).build(); - assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headDomains(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void ownershipTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - OwnershipAspectRequestV2 req = OwnershipAspectRequestV2.builder() - .value(Ownership.builder().owners(List.of(Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())).build()).build(); - assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headOwnership(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void statusTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - StatusAspectRequestV2 req = StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); - assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headStatus(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void globalTagsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - GlobalTagsAspectRequestV2 req = GlobalTagsAspectRequestV2.builder() - .value(GlobalTags.builder().tags(List.of(TagAssociation.builder().tag("tag").build())).build()).build(); - assertEquals(datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headGlobalTags(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void glossaryTermsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - GlossaryTermsAspectRequestV2 req = GlossaryTermsAspectRequestV2.builder() - .value(GlossaryTerms.builder().terms(List.of(GlossaryTermAssociation.builder().urn("term urn").build())).build()).build(); - assertEquals(datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headGlossaryTerms(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - - /** - * The purpose of this test is to ensure no errors when a custom aspect is encountered, - * not that the custom aspect is processed. The missing piece to support custom - * aspects is the openapi generated classes for the custom aspects and related request/responses. - */ - @Test - public void customModelTest() throws Exception { - String expectedUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"; - - //CHECKSTYLE:OFF - String body = "[\n" + - " {\n" + - " \"urn\": \"" + expectedUrn + "\",\n" + - " \"customDataQualityRules\": [\n" + - " {\n" + - " \"field\": \"my_event_data\",\n" + - " \"isFieldLevel\": false,\n" + - " \"type\": \"isNull\",\n" + - " \"checkDefinition\": \"n/a\",\n" + - " \"url\": \"https://github.com/datahub-project/datahub/blob/master/checks/nonNull.sql\"\n" + - " }\n" + - " ]\n" + - " }\n" + - "]"; - //CHECKSTYLE:ON - - mockMvc.perform(MockMvcRequestBuilders - .post("/v2/entity/dataset") - .content(body) - .contentType(MediaType.APPLICATION_JSON) - .accept(MediaType.APPLICATION_JSON)) - .andExpect(status().is2xxSuccessful()) - .andExpect(MockMvcResultMatchers.jsonPath("$.[0].urn").value(expectedUrn)); - } + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Autowired private ChartApiController chartApiController; + @Autowired private DatasetApiController datasetApiController; + @Autowired private EntityRegistry entityRegistry; + @Autowired private MockMvc mockMvc; + + @Test + public void initTest() { + assertNotNull(chartApiController); + assertNotNull(datasetApiController); + + assertTrue( + entityRegistry + .getEntitySpec("dataset") + .getAspectSpecMap() + .containsKey("customDataQualityRules"), + "Failed to load custom model from custom registry"); + } + + @Test + public void chartApiControllerTest() { + final String testUrn = "urn:li:chart:(looker,baz1)"; + + ChartEntityRequestV2 req = ChartEntityRequestV2.builder().urn(testUrn).build(); + ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); + assertEquals(resp.getUrn(), testUrn); + + resp = chartApiController.get(testUrn, false, List.of()).getBody(); + assertEquals(resp.getUrn(), testUrn); + + ResponseEntity<Void> deleteResp = chartApiController.delete(testUrn); + assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); + + ResponseEntity<Void> headResp = chartApiController.head(testUrn); + assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); + + ResponseEntity<ScrollChartEntityResponseV2> scrollResp = + chartApiController.scroll(false, List.of(), 10, null, null, null, null); + assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); + assertNotNull(scrollResp.getBody().getEntities()); + } + + @Test + public void datasetApiControllerTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder().urn(testUrn).build(); + DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); + assertEquals(resp.getUrn(), testUrn); + + resp = datasetApiController.get(testUrn, false, List.of()).getBody(); + assertEquals(resp.getUrn(), testUrn); + + ResponseEntity<Void> deleteResp = datasetApiController.delete(testUrn); + assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); + + ResponseEntity<Void> headResp = datasetApiController.head(testUrn); + assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); + + ResponseEntity<ScrollDatasetEntityResponseV2> scrollResp = + datasetApiController.scroll(false, List.of(), 10, null, null, null, null); + assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); + assertNotNull(scrollResp.getBody().getEntities()); + } + + @Test + public void browsePathsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + BrowsePathsV2AspectRequestV2 req = + BrowsePathsV2AspectRequestV2.builder() + .value( + BrowsePathsV2.builder() + .path(List.of(BrowsePathEntry.builder().urn(testUrn).id("path").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), + HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void deprecationTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DeprecationAspectRequestV2 req = + DeprecationAspectRequestV2.builder() + .value(Deprecation.builder().deprecated(true).build()) + .build(); + assertEquals( + datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headDeprecation(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void domainsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DomainsAspectRequestV2 req = + DomainsAspectRequestV2.builder() + .value(Domains.builder().domains(List.of("my_domain")).build()) + .build(); + assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headDomains(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void ownershipTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + OwnershipAspectRequestV2 req = + OwnershipAspectRequestV2.builder() + .value( + Ownership.builder() + .owners( + List.of( + Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())) + .build()) + .build(); + assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headOwnership(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void statusTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + StatusAspectRequestV2 req = + StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); + assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headStatus(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void globalTagsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + GlobalTagsAspectRequestV2 req = + GlobalTagsAspectRequestV2.builder() + .value( + GlobalTags.builder() + .tags(List.of(TagAssociation.builder().tag("tag").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headGlobalTags(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void glossaryTermsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + GlossaryTermsAspectRequestV2 req = + GlossaryTermsAspectRequestV2.builder() + .value( + GlossaryTerms.builder() + .terms(List.of(GlossaryTermAssociation.builder().urn("term urn").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), + HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headGlossaryTerms(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + /** + * The purpose of this test is to ensure no errors when a custom aspect is encountered, not that + * the custom aspect is processed. The missing piece to support custom aspects is the openapi + * generated classes for the custom aspects and related request/responses. + */ + @Test + public void customModelTest() throws Exception { + String expectedUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"; + + // CHECKSTYLE:OFF + String body = + "[\n" + + " {\n" + + " \"urn\": \"" + + expectedUrn + + "\",\n" + + " \"customDataQualityRules\": [\n" + + " {\n" + + " \"field\": \"my_event_data\",\n" + + " \"isFieldLevel\": false,\n" + + " \"type\": \"isNull\",\n" + + " \"checkDefinition\": \"n/a\",\n" + + " \"url\": \"https://github.com/datahub-project/datahub/blob/master/checks/nonNull.sql\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "]"; + // CHECKSTYLE:ON + + mockMvc + .perform( + MockMvcRequestBuilders.post("/v2/entity/dataset") + .content(body) + .contentType(MediaType.APPLICATION_JSON) + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().is2xxSuccessful()) + .andExpect(MockMvcResultMatchers.jsonPath("$.[0].urn").value(expectedUrn)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java index b4e87eedea542..12596d9410874 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java @@ -1,13 +1,17 @@ package io.datahubproject.openapi.util; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import io.datahubproject.openapi.config.OpenAPIEntityTestConfiguration; import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.generated.ContainerEntityRequestV2; import io.datahubproject.openapi.generated.ContainerKey; import io.datahubproject.openapi.generated.ContainerKeyAspectRequestV2; +import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; @@ -15,41 +19,44 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import java.util.List; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({OpenAPIEntityTestConfiguration.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OpenApiEntitiesUtilTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; - - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } - - @Test - public void testInitialization() { - assertNotNull(entityRegistry); - } - - @Test - public void containerConversionTest() { - ContainerEntityRequestV2 test = ContainerEntityRequestV2.builder() - .urn("urn:li:container:123") - .containerKey(ContainerKeyAspectRequestV2.builder().value(ContainerKey.builder().guid("123").build()).build()) - .build(); - List<UpsertAspectRequest> expected = List.of(UpsertAspectRequest.builder() + @Autowired private EntityRegistry entityRegistry; + + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Test + public void testInitialization() { + assertNotNull(entityRegistry); + } + + @Test + public void containerConversionTest() { + ContainerEntityRequestV2 test = + ContainerEntityRequestV2.builder() + .urn("urn:li:container:123") + .containerKey( + ContainerKeyAspectRequestV2.builder() + .value(ContainerKey.builder().guid("123").build()) + .build()) + .build(); + List<UpsertAspectRequest> expected = + List.of( + UpsertAspectRequest.builder() .entityType("container") .entityUrn("urn:li:container:123") .aspect(ContainerKey.builder().guid("123").build()) .build()); - assertEquals(expected, OpenApiEntitiesUtil.convertEntityToUpsert(test, ContainerEntityRequestV2.class, entityRegistry)); - } + assertEquals( + expected, + OpenApiEntitiesUtil.convertEntityToUpsert( + test, ContainerEntityRequestV2.class, entityRegistry)); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java index 47e2cfec3a9c0..cc040d29657b2 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java @@ -6,7 +6,6 @@ import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; - @ControllerAdvice public class GlobalControllerExceptionHandler { @ExceptionHandler(ConversionFailedException.class) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java index e4f49df90c392..ed98cf3ef4ce9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java @@ -5,7 +5,6 @@ import io.swagger.v3.oas.annotations.info.Info; import io.swagger.v3.oas.annotations.servers.Server; import java.util.List; - import org.springdoc.core.GroupedOpenApi; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -18,10 +17,10 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - @EnableWebMvc -@OpenAPIDefinition(info = @Info(title = "DataHub OpenAPI", version = "2.0.0"), - servers = {@Server(url = "/openapi/", description = "Default Server URL")}) +@OpenAPIDefinition( + info = @Info(title = "DataHub OpenAPI", version = "2.0.0"), + servers = {@Server(url = "/openapi/", description = "Default Server URL")}) @Configuration public class SpringWebConfig implements WebMvcConfigurer { @@ -41,20 +40,17 @@ public void addFormatters(FormatterRegistry registry) { @Bean public GroupedOpenApi defaultOpenApiGroup() { return GroupedOpenApi.builder() - .group("default") - .packagesToExclude( - "io.datahubproject.openapi.operations", - "io.datahubproject.openapi.health" - ).build(); + .group("default") + .packagesToExclude( + "io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .build(); } @Bean public GroupedOpenApi operationsOpenApiGroup() { return GroupedOpenApi.builder() - .group("operations") - .packagesToScan( - "io.datahubproject.openapi.operations", - "io.datahubproject.openapi.health" - ).build(); + .group("operations") + .packagesToScan("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .build(); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java index e88f499208af8..c092a2423fdf5 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.converter; +import static com.linkedin.metadata.timeline.data.ChangeCategory.*; + import com.linkedin.metadata.timeline.data.ChangeCategory; import java.util.List; import java.util.Optional; @@ -8,28 +10,29 @@ import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.Converter; -import static com.linkedin.metadata.timeline.data.ChangeCategory.*; - - public class StringToChangeCategoryConverter implements Converter<String, ChangeCategory> { @Override public ChangeCategory convert(String source) { try { String upperCase = source.toUpperCase(); - // For compound enums, want to support different cases i.e. technical_schema, technical schema, technical-schema, etc. - Optional<ChangeCategory> compoundCategory = COMPOUND_CATEGORIES.keySet().stream() - .filter(compoundCategoryKey -> matchCompound(compoundCategoryKey, upperCase)) - .map(COMPOUND_CATEGORIES::get) - .findFirst(); + // For compound enums, want to support different cases i.e. technical_schema, technical + // schema, technical-schema, etc. + Optional<ChangeCategory> compoundCategory = + COMPOUND_CATEGORIES.keySet().stream() + .filter(compoundCategoryKey -> matchCompound(compoundCategoryKey, upperCase)) + .map(COMPOUND_CATEGORIES::get) + .findFirst(); return compoundCategory.orElseGet(() -> ChangeCategory.valueOf(upperCase)); } catch (Exception e) { - throw new ConversionFailedException(TypeDescriptor.valueOf(String.class), - TypeDescriptor.valueOf(ChangeCategory.class), source, e); + throw new ConversionFailedException( + TypeDescriptor.valueOf(String.class), + TypeDescriptor.valueOf(ChangeCategory.class), + source, + e); } } private boolean matchCompound(@Nonnull List<String> compoundCategoryKey, @Nonnull String source) { - return compoundCategoryKey.stream() - .allMatch(source::contains); + return compoundCategoryKey.stream().allMatch(source::contains); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java index 0be69e3264957..07a501885f1aa 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java @@ -6,7 +6,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder @JsonInclude(JsonInclude.Include.NON_NULL) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java index 67858581ba97a..d185e01804c24 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java @@ -9,7 +9,6 @@ import lombok.Builder; import lombok.Value; - @JsonInclude(JsonInclude.Include.NON_NULL) @Value @Builder @@ -17,15 +16,21 @@ public class UpsertAspectRequest { @JsonProperty("entityType") - @Schema(required = true, description = "The name of the entity matching with its definition in the entity registry") + @Schema( + required = true, + description = "The name of the entity matching with its definition in the entity registry") String entityType; @JsonProperty("entityUrn") - @Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") + @Schema( + description = + "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") String entityUrn; @JsonProperty("entityKeyAspect") - @Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null") + @Schema( + description = + "A key aspect referencing the entity to be updated, required if entityUrn is null") OneOfGenericAspectValue entityKeyAspect; @JsonProperty("aspect") @@ -33,7 +38,5 @@ public class UpsertAspectRequest { OneOfGenericAspectValue aspect; @JsonPOJOBuilder(withPrefix = "") - public static class UpsertAspectRequestBuilder { - - } + public static class UpsertAspectRequestBuilder {} } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java index 02be0cc93eb1c..60062823a7d82 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java @@ -7,7 +7,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder @JsonInclude(JsonInclude.Include.NON_NULL) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java index 898f768cf999a..6e0fc5deb0b3c 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.entities; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; @@ -52,14 +54,13 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - - @RestController @RequiredArgsConstructor @RequestMapping("/entities/v1") @Slf4j -@Tag(name = "Entities", description = "APIs for ingesting and accessing entities and their constituent aspects") +@Tag( + name = "Entities", + description = "APIs for ingesting and accessing entities and their constituent aspects") public class EntitiesController { private final EntityService _entityService; @@ -76,27 +77,42 @@ public void initBinder(WebDataBinder binder) { @GetMapping(value = "/latest", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<UrnResponseMap> getEntities( - @Parameter(name = "urns", required = true, description = "A list of raw urn strings, only supports a single entity type per request.") - @RequestParam("urns") @Nonnull String[] urns, + @Parameter( + name = "urns", + required = true, + description = + "A list of raw urn strings, only supports a single entity type per request.") + @RequestParam("urns") + @Nonnull + String[] urns, @Parameter(name = "aspectNames", description = "The list of aspect names to retrieve") - @RequestParam(name = "aspectNames", required = false) @Nullable String[] aspectNames) { + @RequestParam(name = "aspectNames", required = false) + @Nullable + String[] aspectNames) { Timer.Context context = MetricUtils.timer("getEntities").time(); final Set<Urn> entityUrns = Arrays.stream(urns) - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access + // Have to decode here because of frontend routing, does No-op for already unencoded + // through direct API access .map(URLDecoder::decode) - .map(UrnUtils::getUrn).collect(Collectors.toSet()); + .map(UrnUtils::getUrn) + .collect(Collectors.toSet()); log.debug("GET ENTITIES {}", entityUrns); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - ))); - - List<Optional<EntitySpec>> resourceSpecs = entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); + + List<Optional<EntitySpec>> resourceSpecs = + entityUrns.stream() + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); } if (entityUrns.size() <= 0) { @@ -104,19 +120,26 @@ public ResponseEntity<UrnResponseMap> getEntities( } // TODO: Only supports one entity type at a time, may cause confusion final String entityName = urnToEntityName(entityUrns.iterator().next()); - final Set<String> projectedAspects = aspectNames == null ? _entityService.getEntityAspectNames(entityName) - : new HashSet<>(Arrays.asList(aspectNames)); + final Set<String> projectedAspects = + aspectNames == null + ? _entityService.getEntityAspectNames(entityName) + : new HashSet<>(Arrays.asList(aspectNames)); Throwable exceptionally = null; try { - return ResponseEntity.ok(UrnResponseMap.builder() - .responses(MappingUtil.mapServiceResponse(_entityService - .getEntitiesV2(entityName, entityUrns, projectedAspects), _objectMapper)) - .build()); + return ResponseEntity.ok( + UrnResponseMap.builder() + .responses( + MappingUtil.mapServiceResponse( + _entityService.getEntitiesV2(entityName, entityUrns, projectedAspects), + _objectMapper)) + .build()); } catch (Exception e) { exceptionally = e; throw new RuntimeException( - String.format("Failed to batch get entities with urns: %s, projectedAspects: %s", entityUrns, - projectedAspects), e); + String.format( + "Failed to batch get entities with urns: %s, projectedAspects: %s", + entityUrns, projectedAspects), + e); } finally { if (exceptionally != null) { MetricUtils.counter(MetricRegistry.name("getEntities", "failed")).inc(); @@ -134,24 +157,34 @@ public ResponseEntity<List<String>> postEntities( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()) - ))); - List<com.linkedin.mxe.MetadataChangeProposal> proposals = aspectRequests.stream() - .map(MappingUtil::mapToProposal) - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .collect(Collectors.toList()); - - if (restApiAuthorizationEnabled && !MappingUtil.authorizeProposals(proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); + List<com.linkedin.mxe.MetadataChangeProposal> proposals = + aspectRequests.stream() + .map(MappingUtil::mapToProposal) + .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .collect(Collectors.toList()); + + if (restApiAuthorizationEnabled + && !MappingUtil.authorizeProposals( + proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - List<Pair<String, Boolean>> responses = proposals.stream() - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .collect(Collectors.toList()); + List<Pair<String, Boolean>> responses = + proposals.stream() + .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) + .collect(Collectors.toList()); if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) - .body(responses.stream().filter(Pair::getSecond).map(Pair::getFirst).collect(Collectors.toList())); + .body( + responses.stream() + .filter(Pair::getSecond) + .map(Pair::getFirst) + .collect(Collectors.toList())); } else { return ResponseEntity.ok(Collections.emptyList()); } @@ -159,52 +192,83 @@ public ResponseEntity<List<String>> postEntities( @DeleteMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<List<RollbackRunResultDto>> deleteEntities( - @Parameter(name = "urns", required = true, description = "A list of raw urn strings, only supports a single entity type per request.") - @RequestParam("urns") @Nonnull String[] urns, - @Parameter(name = "soft", description = "Determines whether the delete will be soft or hard, defaults to true for soft delete") - @RequestParam(value = "soft", defaultValue = "true") boolean soft) { + @Parameter( + name = "urns", + required = true, + description = + "A list of raw urn strings, only supports a single entity type per request.") + @RequestParam("urns") + @Nonnull + String[] urns, + @Parameter( + name = "soft", + description = + "Determines whether the delete will be soft or hard, defaults to true for soft delete") + @RequestParam(value = "soft", defaultValue = "true") + boolean soft) { Throwable exceptionally = null; try (Timer.Context context = MetricUtils.timer("deleteEntities").time()) { - Authentication authentication = AuthenticationContext.getAuthentication(); - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType()) - ))); - final Set<Urn> entityUrns = Arrays.stream(urns) - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access - .map(URLDecoder::decode) - .map(UrnUtils::getUrn).collect(Collectors.toSet()); - - List<Optional<EntitySpec>> resourceSpecs = entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { - UnauthorizedException unauthorizedException = new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); - exceptionally = unauthorizedException; - throw unauthorizedException; - } + Authentication authentication = AuthenticationContext.getAuthentication(); + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); + final Set<Urn> entityUrns = + Arrays.stream(urns) + // Have to decode here because of frontend routing, does No-op for already unencoded + // through direct API access + .map(URLDecoder::decode) + .map(UrnUtils::getUrn) + .collect(Collectors.toSet()); - if (!soft) { - return ResponseEntity.ok(entityUrns.stream() - .map(_entityService::deleteUrn) - .map(rollbackRunResult -> MappingUtil.mapRollbackRunResult(rollbackRunResult, _objectMapper)) - .collect(Collectors.toList())); - } else { - List<UpsertAspectRequest> deleteRequests = entityUrns.stream() - .map(entityUrn -> MappingUtil.createStatusRemoval(entityUrn, _entityService)) - .collect(Collectors.toList()); - - return ResponseEntity.ok(Collections.singletonList(RollbackRunResultDto.builder() - .rowsRolledBack(deleteRequests.stream() - .map(MappingUtil::mapToProposal) - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .filter(Pair::getSecond) - .map(Pair::getFirst) - .map(urnString -> AspectRowSummary.builder().urn(urnString).build()) - .collect(Collectors.toList())) - .rowsDeletedFromEntityDeletion(deleteRequests.size()) - .build())); + List<Optional<EntitySpec>> resourceSpecs = + entityUrns.stream() + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + UnauthorizedException unauthorizedException = + new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); + exceptionally = unauthorizedException; + throw unauthorizedException; + } + + if (!soft) { + return ResponseEntity.ok( + entityUrns.stream() + .map(_entityService::deleteUrn) + .map( + rollbackRunResult -> + MappingUtil.mapRollbackRunResult(rollbackRunResult, _objectMapper)) + .collect(Collectors.toList())); + } else { + List<UpsertAspectRequest> deleteRequests = + entityUrns.stream() + .map(entityUrn -> MappingUtil.createStatusRemoval(entityUrn, _entityService)) + .collect(Collectors.toList()); + + return ResponseEntity.ok( + Collections.singletonList( + RollbackRunResultDto.builder() + .rowsRolledBack( + deleteRequests.stream() + .map(MappingUtil::mapToProposal) + .map( + proposal -> + MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .map( + proposal -> + MappingUtil.ingestProposal( + proposal, actorUrnStr, _entityService)) + .filter(Pair::getSecond) + .map(Pair::getFirst) + .map(urnString -> AspectRowSummary.builder().urn(urnString).build()) + .collect(Collectors.toList())) + .rowsDeletedFromEntityDeletion(deleteRequests.size()) + .build())); } } catch (Exception e) { exceptionally = e; diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java index c90603bf88c31..79a219f891fc9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java @@ -10,7 +10,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.client.RequestOptions; @@ -25,7 +24,6 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/") @Tag(name = "HealthCheck", description = "An API for checking health of GMS and its clients.") @@ -33,26 +31,31 @@ public class HealthCheckController { @Autowired @Qualifier("elasticSearchRestHighLevelClient") private RestHighLevelClient elasticClient; + private final Supplier<ResponseEntity<String>> memoizedSupplier; public HealthCheckController(ConfigurationProvider config) { - this.memoizedSupplier = Suppliers.memoizeWithExpiration( - this::getElasticHealth, config.getHealthCheck().getCacheDurationSeconds(), TimeUnit.SECONDS); + this.memoizedSupplier = + Suppliers.memoizeWithExpiration( + this::getElasticHealth, + config.getHealthCheck().getCacheDurationSeconds(), + TimeUnit.SECONDS); } @GetMapping(path = "/check/ready", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Boolean> getCombinedHealthCheck(String... checks) { return ResponseEntity.status(getCombinedDebug(checks).getStatusCode()) - .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); + .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); } /** - * Combined health check endpoint for checking GMS clients. - * For now, just checks the health of the ElasticSearch client - * @return A ResponseEntity with a Map of String (component name) to ResponseEntity (the health check status of - * that component). The status code will be 200 if all components are okay, and 500 if one or more components are not - * healthy. + * Combined health check endpoint for checking GMS clients. For now, just checks the health of the + * ElasticSearch client + * + * @return A ResponseEntity with a Map of String (component name) to ResponseEntity (the health + * check status of that component). The status code will be 200 if all components are okay, + * and 500 if one or more components are not healthy. */ @GetMapping(path = "/debug/ready", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Map<String, ResponseEntity<String>>> getCombinedDebug(String... checks) { @@ -60,19 +63,26 @@ public ResponseEntity<Map<String, ResponseEntity<String>>> getCombinedDebug(Stri healthChecks.put("elasticsearch", this::getElasticDebugWithCache); // Add new components here - List<String> componentsToCheck = checks != null && checks.length > 0 - ? Arrays.asList(checks) - : new ArrayList<>(healthChecks.keySet()); + List<String> componentsToCheck = + checks != null && checks.length > 0 + ? Arrays.asList(checks) + : new ArrayList<>(healthChecks.keySet()); Map<String, ResponseEntity<String>> componentHealth = new HashMap<>(); for (String check : componentsToCheck) { - componentHealth.put(check, - healthChecks.getOrDefault(check, - () -> ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE).body("Unrecognized component " + check)) + componentHealth.put( + check, + healthChecks + .getOrDefault( + check, + () -> + ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE) + .body("Unrecognized component " + check)) .get()); } - boolean isHealthy = componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); + boolean isHealthy = + componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); if (isHealthy) { return ResponseEntity.ok(componentHealth); } @@ -82,11 +92,12 @@ public ResponseEntity<Map<String, ResponseEntity<String>>> getCombinedDebug(Stri @GetMapping(path = "/check/elastic", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<Boolean> getElasticHealthWithCache() { return ResponseEntity.status(getElasticDebugWithCache().getStatusCode()) - .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); + .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); } /** * Checks the memoized cache for the latest elastic health check result + * * @return The ResponseEntity containing the health check result */ @GetMapping(path = "/debug/elastic", produces = MediaType.APPLICATION_JSON_VALUE) @@ -96,13 +107,15 @@ public ResponseEntity<String> getElasticDebugWithCache() { /** * Query ElasticSearch health endpoint + * * @return A response including the result from ElasticSearch */ private ResponseEntity<String> getElasticHealth() { String responseString = null; try { ClusterHealthRequest request = new ClusterHealthRequest(); - ClusterHealthResponse response = elasticClient.cluster().health(request, RequestOptions.DEFAULT); + ClusterHealthResponse response = + elasticClient.cluster().health(request, RequestOptions.DEFAULT); boolean isHealthy = !response.isTimedOut() && response.getStatus() != ClusterHealthStatus.RED; responseString = response.toString(); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java index 2e243f4c8df9e..3fa926924aabe 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java @@ -9,7 +9,6 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @Slf4j @RestController @RequestMapping("/up") diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index f29461734ebfc..f7c848f91a64c 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -6,15 +6,15 @@ import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import io.datahubproject.openapi.util.ElasticsearchUtils; import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.List; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.tasks.GetTaskResponse; import org.json.JSONObject; +import org.opensearch.client.tasks.GetTaskResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -28,11 +28,12 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/operations/elasticSearch") @Slf4j -@Tag(name = "ElasticSearchOperations", description = "An API for managing your elasticsearch instance") +@Tag( + name = "ElasticSearchOperations", + description = "An API for managing your elasticsearch instance") public class OperationsController { private final AuthorizerChain _authorizerChain; @@ -51,26 +52,36 @@ public OperationsController(AuthorizerChain authorizerChain) { public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); } + @GetMapping(path = "/getTaskStatus", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity<String> getTaskStatus(String task) { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType()) - ))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { - return ResponseEntity.status(HttpStatus.FORBIDDEN).body( - String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN) + .body(String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); } if (!ElasticsearchUtils.isTaskIdValid(task)) { - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body( - String.format("Task ID should be in the form nodeId:taskId e.g. aB1cdEf2GHI-JKLMnoPQr3:123456 (got %s)", task)); + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body( + String.format( + "Task ID should be in the form nodeId:taskId e.g. aB1cdEf2GHI-JKLMnoPQr3:123456 (got %s)", + task)); } - String nodeIdToQuery = task.split(":")[0]; + String nodeIdToQuery = task.split(":")[0]; long taskIdToQuery = Long.parseLong(task.split(":")[1]); - java.util.Optional<GetTaskResponse> res = _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); + java.util.Optional<GetTaskResponse> res = + _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); if (res.isEmpty()) { - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); } GetTaskResponse resp = res.get(); JSONObject j = new JSONObject(); @@ -80,4 +91,4 @@ public ResponseEntity<String> getTaskStatus(String task) { j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); return ResponseEntity.ok(j.toString()); } -} \ No newline at end of file +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java index cfb516913eb09..370f2019a42dd 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java @@ -32,12 +32,13 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequiredArgsConstructor @RequestMapping("/platform/entities/v1") @Slf4j -@Tag(name = "Platform Entities", description = "Platform level APIs intended for lower level access to entities") +@Tag( + name = "Platform Entities", + description = "Platform level APIs intended for lower level access to entities") public class PlatformEntitiesController { private final EntityService _entityService; @@ -60,24 +61,33 @@ public ResponseEntity<List<String>> postEntities( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List<com.linkedin.mxe.MetadataChangeProposal> proposals = metadataChangeProposals.stream() - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .collect(Collectors.toList()); - DisjunctivePrivilegeGroup - orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()) - ))); + List<com.linkedin.mxe.MetadataChangeProposal> proposals = + metadataChangeProposals.stream() + .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .collect(Collectors.toList()); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled && !MappingUtil.authorizeProposals(proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { + if (restApiAuthorizationEnabled + && !MappingUtil.authorizeProposals( + proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - List<Pair<String, Boolean>> responses = proposals.stream() - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .collect(Collectors.toList()); + List<Pair<String, Boolean>> responses = + proposals.stream() + .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) + .collect(Collectors.toList()); if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) - .body(responses.stream().filter(Pair::getSecond).map(Pair::getFirst).collect(Collectors.toList())); + .body( + responses.stream() + .filter(Pair::getSecond) + .map(Pair::getFirst) + .collect(Collectors.toList())); } else { return ResponseEntity.ok(Collections.emptyList()); } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java index 4641fed3a8610..4ceed6a11b973 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.relationships; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; @@ -45,9 +47,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - - @RestController @RequiredArgsConstructor @RequestMapping("/relationships/v1") @@ -59,6 +58,7 @@ public enum RelationshipDirection { INCOMING, OUTGOING } + private static final int MAX_DOWNSTREAM_CNT = 200; private final GraphService _graphService; private final AuthorizerChain _authorizerChain; @@ -71,83 +71,127 @@ public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); } - private RelatedEntitiesResult getRelatedEntities(String rawUrn, List<String> relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count) { + private RelatedEntitiesResult getRelatedEntities( + String rawUrn, + List<String> relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count) { start = start == null ? 0 : start; count = count == null ? MAX_DOWNSTREAM_CNT : count; com.linkedin.metadata.query.filter.RelationshipDirection restLiDirection; switch (direction) { - case INCOMING: { - restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING; - break; - } - case OUTGOING: { - restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.OUTGOING; - break; - } - default: { - throw new RuntimeException("Unexpected relationship direction " + direction); - } + case INCOMING: + { + restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING; + break; + } + case OUTGOING: + { + restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.OUTGOING; + break; + } + default: + { + throw new RuntimeException("Unexpected relationship direction " + direction); + } } - return _graphService.findRelatedEntities(null, newFilter("urn", rawUrn), null, QueryUtils.EMPTY_FILTER, - relationshipTypes, newRelationshipFilter(QueryUtils.EMPTY_FILTER, restLiDirection), start, count); + return _graphService.findRelatedEntities( + null, + newFilter("urn", rawUrn), + null, + QueryUtils.EMPTY_FILTER, + relationshipTypes, + newRelationshipFilter(QueryUtils.EMPTY_FILTER, restLiDirection), + start, + count); } @GetMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) - @Operation(responses = { @ApiResponse(responseCode = "0", description = "", - content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class)))}) + @Operation( + responses = { + @ApiResponse( + responseCode = "0", + description = "", + content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class))) + }) public ResponseEntity<RelatedEntitiesResult> getRelationships( - @Parameter(name = "urn", required = true, - description = "The urn for the entity whose relationships are being queried") - @RequestParam("urn") - @Nonnull String urn, - @Parameter(name = "relationshipTypes", required = true, - description = "The list of relationship types to traverse") - @RequestParam(name = "relationshipTypes") - @Nonnull String[] relationshipTypes, - @Parameter(name = "direction", required = true, - description = "The directionality of the relationship") - @RequestParam(name = "direction") - @Nonnull RelationshipsController.RelationshipDirection direction, - @Parameter(name = "start", description = "An offset for the relationships to return from. " - + "Useful for pagination.") - @RequestParam(name = "start", defaultValue = "0") - @Nullable Integer start, - @Parameter(name = "count", description = "A count of relationships that will be returned " - + "starting from the offset. Useful for pagination.") - @RequestParam(name = "count", defaultValue = "200") - @Nullable Integer count) { + @Parameter( + name = "urn", + required = true, + description = "The urn for the entity whose relationships are being queried") + @RequestParam("urn") + @Nonnull + String urn, + @Parameter( + name = "relationshipTypes", + required = true, + description = "The list of relationship types to traverse") + @RequestParam(name = "relationshipTypes") + @Nonnull + String[] relationshipTypes, + @Parameter( + name = "direction", + required = true, + description = "The directionality of the relationship") + @RequestParam(name = "direction") + @Nonnull + RelationshipsController.RelationshipDirection direction, + @Parameter( + name = "start", + description = + "An offset for the relationships to return from. " + "Useful for pagination.") + @RequestParam(name = "start", defaultValue = "0") + @Nullable + Integer start, + @Parameter( + name = "count", + description = + "A count of relationships that will be returned " + + "starting from the offset. Useful for pagination.") + @RequestParam(name = "count", defaultValue = "200") + @Nullable + Integer count) { Timer.Context context = MetricUtils.timer("getRelationships").time(); - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access + // Have to decode here because of frontend routing, does No-op for already unencoded through + // direct API access final Urn entityUrn = UrnUtils.getUrn(URLDecoder.decode(urn, Charset.forName("UTF-8"))); log.debug("GET Relationships {}", entityUrn); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the privileges between these APIs. - ))); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) + // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the + // privileges between these APIs. + ))); List<Optional<EntitySpec>> resourceSpecs = - Collections.singletonList(Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, - orGroup)) { + Collections.singletonList( + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get relationships."); } Throwable exceptionally = null; try { return ResponseEntity.ok( - getRelatedEntities(entityUrn.toString(), Arrays.asList(relationshipTypes), direction, start, - count)); + getRelatedEntities( + entityUrn.toString(), Arrays.asList(relationshipTypes), direction, start, count)); } catch (Exception e) { exceptionally = e; throw new RuntimeException( - String.format("Failed to batch get relationships with urn: %s, relationshipTypes: %s", urn, - Arrays.toString(relationshipTypes)), e); + String.format( + "Failed to batch get relationships with urn: %s, relationshipTypes: %s", + urn, Arrays.toString(relationshipTypes)), + e); } finally { if (exceptionally != null) { MetricUtils.counter(MetricRegistry.name("getRelationships", "failed")).inc(); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java index fbde9e8072002..a84c50e74baf2 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java @@ -30,11 +30,13 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; - @RestController @AllArgsConstructor @RequestMapping("/timeline/v1") -@Tag(name = "Timeline", description = "An API for retrieving historical updates to entities and their related documentation.") +@Tag( + name = "Timeline", + description = + "An API for retrieving historical updates to entities and their related documentation.") public class TimelineController { private final TimelineService _timelineService; @@ -44,7 +46,6 @@ public class TimelineController { private Boolean restApiAuthorizationEnabled; /** - * * @param rawUrn * @param startTime * @param endTime @@ -60,7 +61,8 @@ public ResponseEntity<List<ChangeTransaction>> getTimeline( @RequestParam(defaultValue = "-1") long startTime, @RequestParam(defaultValue = "0") long endTime, @RequestParam(defaultValue = "false") boolean raw, - @RequestParam Set<ChangeCategory> categories) throws URISyntaxException, JsonProcessingException { + @RequestParam Set<ChangeCategory> categories) + throws URISyntaxException, JsonProcessingException { // Make request params when implemented String startVersionStamp = null; String endVersionStamp = null; @@ -68,11 +70,18 @@ public ResponseEntity<List<ChangeTransaction>> getTimeline( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); EntitySpec resourceSpec = new EntitySpec(urn.getEntityType(), rawUrn); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorized(_authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorized( + _authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - return ResponseEntity.ok(_timelineService.getTimeline(urn, categories, startTime, endTime, startVersionStamp, endVersionStamp, raw)); + return ResponseEntity.ok( + _timelineService.getTimeline( + urn, categories, startTime, endTime, startVersionStamp, endVersionStamp, raw)); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java index 9ef14eefc429b..7b13191bc1b38 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java @@ -1,7 +1,8 @@ package io.datahubproject.openapi.util; public class ElasticsearchUtils { - private ElasticsearchUtils() { } + private ElasticsearchUtils() {} + public static boolean isTaskIdValid(String task) { if (task.matches("^[a-zA-Z0-9-_]+:[0-9]+$")) { try { diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index 21dc5a4c8a0d6..0eb3e2d6b8c6e 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -1,11 +1,15 @@ package io.datahubproject.openapi.util; +import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; +import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; +import static java.nio.charset.StandardCharsets.UTF_8; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -21,13 +25,13 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.Aspect; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.RollbackRunResult; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.GenericAspect; @@ -35,7 +39,15 @@ import com.linkedin.util.Pair; import io.datahubproject.openapi.dto.RollbackRunResultDto; import io.datahubproject.openapi.dto.UpsertAspectRequest; - +import io.datahubproject.openapi.generated.AspectRowSummary; +import io.datahubproject.openapi.generated.AspectType; +import io.datahubproject.openapi.generated.AuditStamp; +import io.datahubproject.openapi.generated.EntityResponse; +import io.datahubproject.openapi.generated.EnvelopedAspect; +import io.datahubproject.openapi.generated.MetadataChangeProposal; +import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue; +import io.datahubproject.openapi.generated.OneOfGenericAspectValue; +import io.datahubproject.openapi.generated.Status; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; @@ -51,16 +63,6 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import io.datahubproject.openapi.generated.AspectRowSummary; -import io.datahubproject.openapi.generated.AspectType; -import io.datahubproject.openapi.generated.AuditStamp; -import io.datahubproject.openapi.generated.EntityResponse; -import io.datahubproject.openapi.generated.EnvelopedAspect; -import io.datahubproject.openapi.generated.MetadataChangeProposal; -import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue; -import io.datahubproject.openapi.generated.OneOfGenericAspectValue; -import io.datahubproject.openapi.generated.Status; import lombok.extern.slf4j.Slf4j; import org.apache.avro.Schema; import org.reflections.Reflections; @@ -72,36 +74,33 @@ import org.springframework.http.MediaType; import org.springframework.web.client.HttpClientErrorException; -import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; -import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; -import static java.nio.charset.StandardCharsets.UTF_8; - @Slf4j public class MappingUtil { - private MappingUtil() { - - } + private MappingUtil() {} private static final JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance; - private static final Map<String, Class<? extends OneOfEnvelopedAspectValue>> ENVELOPED_ASPECT_TYPE_MAP = - new HashMap<>(); + private static final Map<String, Class<? extends OneOfEnvelopedAspectValue>> + ENVELOPED_ASPECT_TYPE_MAP = new HashMap<>(); private static final Map<Class<? extends OneOfGenericAspectValue>, String> ASPECT_NAME_MAP = new HashMap<>(); - private static final Map<String, Class<? extends RecordTemplate>> PEGASUS_TYPE_MAP = new HashMap<>(); + private static final Map<String, Class<? extends RecordTemplate>> PEGASUS_TYPE_MAP = + new HashMap<>(); private static final String DISCRIMINATOR = "__type"; private static final String PEGASUS_PACKAGE = "com.linkedin"; private static final String OPENAPI_PACKAGE = "io.datahubproject.openapi.generated"; - private static final ReflectionCache REFLECT_AVRO = ReflectionCache.builder() - .basePackage("com.linkedin.pegasus2avro").build(); - private static final ReflectionCache REFLECT_OPENAPI = ReflectionCache.builder() - .basePackage(OPENAPI_PACKAGE).build(); + private static final ReflectionCache REFLECT_AVRO = + ReflectionCache.builder().basePackage("com.linkedin.pegasus2avro").build(); + private static final ReflectionCache REFLECT_OPENAPI = + ReflectionCache.builder().basePackage(OPENAPI_PACKAGE).build(); static { // Build a map from __type name to generated class - ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(false); + ClassPathScanningCandidateComponentProvider provider = + new ClassPathScanningCandidateComponentProvider(false); provider.addIncludeFilter(new AssignableTypeFilter(OneOfEnvelopedAspectValue.class)); - Set<BeanDefinition> components = provider.findCandidateComponents("io/datahubproject/openapi/generated"); + Set<BeanDefinition> components = + provider.findCandidateComponents("io/datahubproject/openapi/generated"); components.forEach(MappingUtil::putEnvelopedAspectEntry); provider = new ClassPathScanningCandidateComponentProvider(false); @@ -111,36 +110,43 @@ private MappingUtil() { // Build a map from fully qualified Pegasus generated class name to class new Reflections(PEGASUS_PACKAGE, new SubTypesScanner(false)) - .getSubTypesOf(RecordTemplate.class) - .forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass)); + .getSubTypesOf(RecordTemplate.class) + .forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass)); } - public static Map<String, EntityResponse> mapServiceResponse(Map<Urn, com.linkedin.entity.EntityResponse> serviceResponse, - ObjectMapper objectMapper) { - return serviceResponse.entrySet() - .stream() - .collect(Collectors.toMap(entry -> entry.getKey().toString(), entry -> mapEntityResponse(entry.getValue(), objectMapper))); + public static Map<String, EntityResponse> mapServiceResponse( + Map<Urn, com.linkedin.entity.EntityResponse> serviceResponse, ObjectMapper objectMapper) { + return serviceResponse.entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), + entry -> mapEntityResponse(entry.getValue(), objectMapper))); } - public static EntityResponse mapEntityResponse(com.linkedin.entity.EntityResponse entityResponse, ObjectMapper objectMapper) { + public static EntityResponse mapEntityResponse( + com.linkedin.entity.EntityResponse entityResponse, ObjectMapper objectMapper) { return EntityResponse.builder() - .entityName(entityResponse.getEntityName()) - .urn(entityResponse.getUrn().toString()) - .aspects(entityResponse.getAspects() - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> mapEnvelopedAspect(entry.getValue(), objectMapper)))).build(); + .entityName(entityResponse.getEntityName()) + .urn(entityResponse.getUrn().toString()) + .aspects( + entityResponse.getAspects().entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> mapEnvelopedAspect(entry.getValue(), objectMapper)))) + .build(); } - public static EnvelopedAspect mapEnvelopedAspect(com.linkedin.entity.EnvelopedAspect envelopedAspect, - ObjectMapper objectMapper) { + public static EnvelopedAspect mapEnvelopedAspect( + com.linkedin.entity.EnvelopedAspect envelopedAspect, ObjectMapper objectMapper) { return EnvelopedAspect.builder() - .name(envelopedAspect.getName()) - .timestamp(envelopedAspect.getTimestamp()) - .version(envelopedAspect.getVersion()) - .type(AspectType.fromValue(envelopedAspect.getType().name().toUpperCase(Locale.ROOT))) - .created(objectMapper.convertValue(envelopedAspect.getCreated().data(), AuditStamp.class)) - .value(mapAspectValue(envelopedAspect.getName(), envelopedAspect.getValue(), objectMapper)).build(); + .name(envelopedAspect.getName()) + .timestamp(envelopedAspect.getTimestamp()) + .version(envelopedAspect.getVersion()) + .type(AspectType.fromValue(envelopedAspect.getType().name().toUpperCase(Locale.ROOT))) + .created(objectMapper.convertValue(envelopedAspect.getCreated().data(), AuditStamp.class)) + .value(mapAspectValue(envelopedAspect.getName(), envelopedAspect.getValue(), objectMapper)) + .build(); } private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataMap dataMap) { @@ -148,20 +154,23 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM dataMap.put(DISCRIMINATOR, parentClazz.getSimpleName()); } - Set<Map.Entry<String, DataMap>> requiresDiscriminator = dataMap.entrySet().stream() + Set<Map.Entry<String, DataMap>> requiresDiscriminator = + dataMap.entrySet().stream() .filter(e -> e.getValue() instanceof DataMap) .filter(e -> shouldCollapseClassToDiscriminator(e.getKey())) .map(e -> Map.entry(e.getKey(), (DataMap) e.getValue())) .collect(Collectors.toSet()); // DataMap doesn't support concurrent access - requiresDiscriminator.forEach(e -> { - dataMap.remove(e.getKey()); - dataMap.put(DISCRIMINATOR, e.getKey().substring(e.getKey().lastIndexOf(".") + 1)); - dataMap.putAll(e.getValue()); - }); + requiresDiscriminator.forEach( + e -> { + dataMap.remove(e.getKey()); + dataMap.put(DISCRIMINATOR, e.getKey().substring(e.getKey().lastIndexOf(".") + 1)); + dataMap.putAll(e.getValue()); + }); // Look through all the nested classes for possible discriminator requirements - Set<Pair<List<String>, DataMap>> nestedDataMaps = getDataMapPaths(new LinkedList<>(), dataMap).collect(Collectors.toSet()); + Set<Pair<List<String>, DataMap>> nestedDataMaps = + getDataMapPaths(new LinkedList<>(), dataMap).collect(Collectors.toSet()); // DataMap doesn't support concurrent access for (Pair<List<String>, DataMap> nestedDataMapPath : nestedDataMaps) { List<String> nestedPath = nestedDataMapPath.getFirst(); @@ -178,7 +187,10 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM nextClazz = getMethod != null ? getMethod.getReturnType() : null; if (nextClazz != null && "List".equals(nextClazz.getSimpleName())) { - String listElemClassName = getMethod.getGenericReturnType().getTypeName() + String listElemClassName = + getMethod + .getGenericReturnType() + .getTypeName() .replace("java.util.List<", "") .replace(">", ""); try { @@ -192,7 +204,7 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM } if ((nextClazz != parentClazz && shouldCheckTypeMethod(nextClazz)) - || nested.keySet().stream().anyMatch(MappingUtil::shouldCollapseClassToDiscriminator)) { + || nested.keySet().stream().anyMatch(MappingUtil::shouldCollapseClassToDiscriminator)) { insertDiscriminator(nextClazz, nested); } } @@ -201,42 +213,49 @@ private static DataMap insertDiscriminator(@Nullable Class<?> parentClazz, DataM return dataMap; } - /** * Stream paths to DataMaps + * * @param paths current path * @param data current DataMap or DataList * @return path to all nested DataMaps */ - private static Stream<Pair<List<String>, DataMap>> getDataMapPaths(List<String> paths, Object data) { + private static Stream<Pair<List<String>, DataMap>> getDataMapPaths( + List<String> paths, Object data) { if (data instanceof DataMap) { - return ((DataMap) data).entrySet().stream() + return ((DataMap) data) + .entrySet().stream() .filter(e -> e.getValue() instanceof DataMap || e.getValue() instanceof DataList) - .flatMap(entry -> { - List<String> thisPath = new LinkedList<>(paths); - thisPath.add(entry.getKey()); - if (entry.getValue() instanceof DataMap) { - return Stream.concat( + .flatMap( + entry -> { + List<String> thisPath = new LinkedList<>(paths); + thisPath.add(entry.getKey()); + if (entry.getValue() instanceof DataMap) { + return Stream.concat( Stream.of(Pair.of(thisPath, (DataMap) entry.getValue())), - getDataMapPaths(thisPath, entry.getValue()) - ); - } else { - // DataList - return getDataMapPaths(thisPath, entry.getValue()); - } - }); + getDataMapPaths(thisPath, entry.getValue())); + } else { + // DataList + return getDataMapPaths(thisPath, entry.getValue()); + } + }); } else if (data instanceof DataList) { DataList dataList = (DataList) data; return IntStream.range(0, dataList.size()) - .mapToObj(idx -> Pair.of(idx, dataList.get(idx))) - .filter(idxObject -> idxObject.getValue() instanceof DataMap || idxObject.getValue() instanceof DataList) - .flatMap(idxObject -> { + .mapToObj(idx -> Pair.of(idx, dataList.get(idx))) + .filter( + idxObject -> + idxObject.getValue() instanceof DataMap + || idxObject.getValue() instanceof DataList) + .flatMap( + idxObject -> { Object item = idxObject.getValue(); List<String> thisPath = new LinkedList<>(paths); thisPath.add("[" + idxObject.getKey() + "]"); if (item instanceof DataMap) { - return Stream.concat(Stream.of(Pair.of(thisPath, (DataMap) item)), - getDataMapPaths(thisPath, item)); + return Stream.concat( + Stream.of(Pair.of(thisPath, (DataMap) item)), + getDataMapPaths(thisPath, item)); } else { // DataList return getDataMapPaths(thisPath, item); @@ -246,8 +265,10 @@ private static Stream<Pair<List<String>, DataMap>> getDataMapPaths(List<String> return Stream.empty(); } - public static OneOfEnvelopedAspectValue mapAspectValue(String aspectName, Aspect aspect, ObjectMapper objectMapper) { - Class<? extends OneOfEnvelopedAspectValue> aspectClass = ENVELOPED_ASPECT_TYPE_MAP.get(aspectName); + public static OneOfEnvelopedAspectValue mapAspectValue( + String aspectName, Aspect aspect, ObjectMapper objectMapper) { + Class<? extends OneOfEnvelopedAspectValue> aspectClass = + ENVELOPED_ASPECT_TYPE_MAP.get(aspectName); DataMap wrapper = insertDiscriminator(aspectClass, aspect.data()); try { String dataMapAsJson = objectMapper.writeValueAsString(wrapper); @@ -261,7 +282,8 @@ public static OneOfEnvelopedAspectValue mapAspectValue(String aspectName, Aspect private static void putEnvelopedAspectEntry(BeanDefinition beanDefinition) { try { Class<? extends OneOfEnvelopedAspectValue> cls = - (Class<? extends OneOfEnvelopedAspectValue>) Class.forName(beanDefinition.getBeanClassName()); + (Class<? extends OneOfEnvelopedAspectValue>) + Class.forName(beanDefinition.getBeanClassName()); String aspectName = getAspectName(cls); ENVELOPED_ASPECT_TYPE_MAP.put(aspectName, cls); } catch (ClassNotFoundException e) { @@ -273,7 +295,8 @@ private static void putEnvelopedAspectEntry(BeanDefinition beanDefinition) { private static void putGenericAspectEntry(BeanDefinition beanDefinition) { try { Class<? extends OneOfGenericAspectValue> cls = - (Class<? extends OneOfGenericAspectValue>) Class.forName(beanDefinition.getBeanClassName()); + (Class<? extends OneOfGenericAspectValue>) + Class.forName(beanDefinition.getBeanClassName()); String aspectName = getAspectName(cls); ASPECT_NAME_MAP.put(cls, aspectName); } catch (ClassNotFoundException e) { @@ -288,14 +311,17 @@ private static String getAspectName(Class<?> cls) { } private static boolean shouldCheckTypeMethod(@Nullable Class<?> parentClazz) { - return Optional.ofNullable(parentClazz).map(cls -> cls.getName().startsWith(OPENAPI_PACKAGE + ".")).orElse(false); + return Optional.ofNullable(parentClazz) + .map(cls -> cls.getName().startsWith(OPENAPI_PACKAGE + ".")) + .orElse(false); } private static boolean shouldCollapseClassToDiscriminator(String className) { return className.startsWith(PEGASUS_PACKAGE + "."); } - private static Optional<String> shouldDiscriminate(String parentShortClass, String fieldName, ObjectNode node) { + private static Optional<String> shouldDiscriminate( + String parentShortClass, String fieldName, ObjectNode node) { try { if (parentShortClass != null) { Class<?> pegasus2AvroClazz = REFLECT_AVRO.lookupClass(parentShortClass, true); @@ -304,7 +330,8 @@ private static Optional<String> shouldDiscriminate(String parentShortClass, Stri Schema.Field avroField = avroSchema.getField(fieldName); if (avroField.schema().isUnion()) { - Class<?> discriminatedClazz = REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); + Class<?> discriminatedClazz = + REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); return Optional.of(discriminatedClazz.getName().replace(".pegasus2avro", "")); } } @@ -313,7 +340,8 @@ private static Optional<String> shouldDiscriminate(String parentShortClass, Stri Iterator<String> itr = node.fieldNames(); itr.next(); if (!itr.hasNext()) { // only contains discriminator - Class<?> discriminatedClazz = REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); + Class<?> discriminatedClazz = + REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); return Optional.of(discriminatedClazz.getName().replace(".pegasus2avro", "")); } @@ -326,16 +354,22 @@ private static Optional<String> shouldDiscriminate(String parentShortClass, Stri private static void replaceDiscriminator(ObjectNode node) { replaceDiscriminator(null, null, null, node); } - private static void replaceDiscriminator(@Nullable ObjectNode parentNode, @Nullable String parentDiscriminator, - @Nullable String propertyName, @Nonnull ObjectNode node) { + + private static void replaceDiscriminator( + @Nullable ObjectNode parentNode, + @Nullable String parentDiscriminator, + @Nullable String propertyName, + @Nonnull ObjectNode node) { final String discriminator; if (node.isObject() && node.has(DISCRIMINATOR)) { - Optional<String> discriminatorClassName = shouldDiscriminate(parentDiscriminator, propertyName, node); + Optional<String> discriminatorClassName = + shouldDiscriminate(parentDiscriminator, propertyName, node); if (parentNode != null && discriminatorClassName.isPresent()) { discriminator = node.remove(DISCRIMINATOR).asText(); parentNode.remove(propertyName); - parentNode.set(propertyName, NODE_FACTORY.objectNode().set(discriminatorClassName.get(), node)); + parentNode.set( + propertyName, NODE_FACTORY.objectNode().set(discriminatorClassName.get(), node)); } else { discriminator = node.remove(DISCRIMINATOR).asText(); } @@ -344,55 +378,75 @@ private static void replaceDiscriminator(@Nullable ObjectNode parentNode, @Nulla } List<Map.Entry<String, JsonNode>> objectChildren = new LinkedList<>(); - node.fields().forEachRemaining(entry -> { - if (entry.getValue().isObject()) { - objectChildren.add(entry); - } else if (entry.getValue().isArray()) { - entry.getValue().forEach(i -> { - if (i.isObject()) { - objectChildren.add(Map.entry(entry.getKey(), i)); - } - }); - } - }); + node.fields() + .forEachRemaining( + entry -> { + if (entry.getValue().isObject()) { + objectChildren.add(entry); + } else if (entry.getValue().isArray()) { + entry + .getValue() + .forEach( + i -> { + if (i.isObject()) { + objectChildren.add(Map.entry(entry.getKey(), i)); + } + }); + } + }); - objectChildren.forEach(entry -> - replaceDiscriminator(node, discriminator, entry.getKey(), (ObjectNode) entry.getValue()) - ); + objectChildren.forEach( + entry -> + replaceDiscriminator( + node, discriminator, entry.getKey(), (ObjectNode) entry.getValue())); } @Nonnull - public static GenericAspect convertGenericAspect(@Nonnull io.datahubproject.openapi.generated.GenericAspect genericAspect, + public static GenericAspect convertGenericAspect( + @Nonnull io.datahubproject.openapi.generated.GenericAspect genericAspect, ObjectMapper objectMapper) { try { ObjectNode jsonTree = (ObjectNode) objectMapper.valueToTree(genericAspect).get("value"); replaceDiscriminator(jsonTree); String pretty = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonTree); - return new GenericAspect().setContentType(genericAspect.getContentType()) + return new GenericAspect() + .setContentType(genericAspect.getContentType()) .setValue(ByteString.copyString(pretty, UTF_8)); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } - public static boolean authorizeProposals(List<com.linkedin.mxe.MetadataChangeProposal> proposals, EntityService entityService, - Authorizer authorizer, String actorUrnStr, DisjunctivePrivilegeGroup orGroup) { - List<Optional<EntitySpec>> resourceSpecs = proposals.stream() - .map(proposal -> { - com.linkedin.metadata.models.EntitySpec entitySpec = entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); - Urn entityUrn = EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); - return Optional.of(new EntitySpec(proposal.getEntityType(), entityUrn.toString())); - }) - .collect(Collectors.toList()); + public static boolean authorizeProposals( + List<com.linkedin.mxe.MetadataChangeProposal> proposals, + EntityService entityService, + Authorizer authorizer, + String actorUrnStr, + DisjunctivePrivilegeGroup orGroup) { + List<Optional<EntitySpec>> resourceSpecs = + proposals.stream() + .map( + proposal -> { + com.linkedin.metadata.models.EntitySpec entitySpec = + entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); + Urn entityUrn = + EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); + return Optional.of( + new EntitySpec(proposal.getEntityType(), entityUrn.toString())); + }) + .collect(Collectors.toList()); return AuthUtil.isAuthorizedForResources(authorizer, actorUrnStr, resourceSpecs, orGroup); } - public static Pair<String, Boolean> ingestProposal(com.linkedin.mxe.MetadataChangeProposal serviceProposal, String actorUrn, + public static Pair<String, Boolean> ingestProposal( + com.linkedin.mxe.MetadataChangeProposal serviceProposal, + String actorUrn, EntityService entityService) { // TODO: Use the actor present in the IC. Timer.Context context = MetricUtils.timer("postEntity").time(); final com.linkedin.common.AuditStamp auditStamp = - new com.linkedin.common.AuditStamp().setTime(System.currentTimeMillis()) + new com.linkedin.common.AuditStamp() + .setTime(System.currentTimeMillis()) .setActor(UrnUtils.getUrn(actorUrn)); final List<com.linkedin.mxe.MetadataChangeProposal> additionalChanges = @@ -401,20 +455,25 @@ public static Pair<String, Boolean> ingestProposal(com.linkedin.mxe.MetadataChan log.info("Proposal: {}", serviceProposal); Throwable exceptionally = null; try { - Stream<com.linkedin.mxe.MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(serviceProposal), + Stream<com.linkedin.mxe.MetadataChangeProposal> proposalStream = + Stream.concat( + Stream.of(serviceProposal), AspectUtils.getAdditionalChanges(serviceProposal, entityService).stream()); - AspectsBatch batch = AspectsBatchImpl.builder().mcps(proposalStream.collect(Collectors.toList()), - entityService.getEntityRegistry()).build(); + AspectsBatch batch = + AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), entityService.getEntityRegistry()) + .build(); - Set<IngestResult> proposalResult = - entityService.ingestProposal(batch, auditStamp, false); + Set<IngestResult> proposalResult = entityService.ingestProposal(batch, auditStamp, false); Urn urn = proposalResult.stream().findFirst().get().getUrn(); - return new Pair<>(urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); + return new Pair<>( + urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); } catch (ValidationException ve) { exceptionally = ve; - throw HttpClientErrorException.create(HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); + throw HttpClientErrorException.create( + HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); } catch (Exception e) { exceptionally = e; throw e; @@ -429,18 +488,23 @@ public static Pair<String, Boolean> ingestProposal(com.linkedin.mxe.MetadataChan } public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectRequest) { - MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = MetadataChangeProposal.builder(); - io.datahubproject.openapi.generated.GenericAspect - genericAspect = io.datahubproject.openapi.generated.GenericAspect.builder() - .value(aspectRequest.getAspect()) - .contentType(MediaType.APPLICATION_JSON_VALUE).build(); + MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = + MetadataChangeProposal.builder(); + io.datahubproject.openapi.generated.GenericAspect genericAspect = + io.datahubproject.openapi.generated.GenericAspect.builder() + .value(aspectRequest.getAspect()) + .contentType(MediaType.APPLICATION_JSON_VALUE) + .build(); io.datahubproject.openapi.generated.GenericAspect keyAspect = null; if (aspectRequest.getEntityKeyAspect() != null) { - keyAspect = io.datahubproject.openapi.generated.GenericAspect.builder() - .contentType(MediaType.APPLICATION_JSON_VALUE) - .value(aspectRequest.getEntityKeyAspect()).build(); + keyAspect = + io.datahubproject.openapi.generated.GenericAspect.builder() + .contentType(MediaType.APPLICATION_JSON_VALUE) + .value(aspectRequest.getEntityKeyAspect()) + .build(); } - metadataChangeProposal.aspect(genericAspect) + metadataChangeProposal + .aspect(genericAspect) .changeType(io.datahubproject.openapi.generated.ChangeType.UPSERT) .aspectName(ASPECT_NAME_MAP.get(aspectRequest.getAspect().getClass())) .entityKeyAspect(keyAspect) @@ -450,9 +514,10 @@ public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectReq return metadataChangeProposal.build(); } - public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(MetadataChangeProposal metadataChangeProposal, - ObjectMapper objectMapper) { - io.datahubproject.openapi.generated.KafkaAuditHeader auditHeader = metadataChangeProposal.getAuditHeader(); + public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal( + MetadataChangeProposal metadataChangeProposal, ObjectMapper objectMapper) { + io.datahubproject.openapi.generated.KafkaAuditHeader auditHeader = + metadataChangeProposal.getAuditHeader(); com.linkedin.mxe.MetadataChangeProposal serviceProposal = new com.linkedin.mxe.MetadataChangeProposal() @@ -463,7 +528,8 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad } if (metadataChangeProposal.getSystemMetadata() != null) { serviceProposal.setSystemMetadata( - objectMapper.convertValue(metadataChangeProposal.getSystemMetadata(), SystemMetadata.class)); + objectMapper.convertValue( + metadataChangeProposal.getSystemMetadata(), SystemMetadata.class)); } if (metadataChangeProposal.getAspectName() != null) { serviceProposal.setAspectName(metadataChangeProposal.getAspectName()); @@ -471,7 +537,8 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad if (auditHeader != null) { KafkaAuditHeader kafkaAuditHeader = new KafkaAuditHeader(); - kafkaAuditHeader.setAuditVersion(auditHeader.getAuditVersion()) + kafkaAuditHeader + .setAuditVersion(auditHeader.getAuditVersion()) .setTime(auditHeader.getTime()) .setAppName(auditHeader.getAppName()) .setMessageId(new UUID(ByteString.copyString(auditHeader.getMessageId(), UTF_8))) @@ -491,30 +558,40 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad serviceProposal.setAuditHeader(kafkaAuditHeader); } - serviceProposal = metadataChangeProposal.getEntityKeyAspect() != null - ? serviceProposal.setEntityKeyAspect( - MappingUtil.convertGenericAspect(metadataChangeProposal.getEntityKeyAspect(), objectMapper)) - : serviceProposal; - serviceProposal = metadataChangeProposal.getAspect() != null - ? serviceProposal.setAspect( - MappingUtil.convertGenericAspect(metadataChangeProposal.getAspect(), objectMapper)) - : serviceProposal; + serviceProposal = + metadataChangeProposal.getEntityKeyAspect() != null + ? serviceProposal.setEntityKeyAspect( + MappingUtil.convertGenericAspect( + metadataChangeProposal.getEntityKeyAspect(), objectMapper)) + : serviceProposal; + serviceProposal = + metadataChangeProposal.getAspect() != null + ? serviceProposal.setAspect( + MappingUtil.convertGenericAspect(metadataChangeProposal.getAspect(), objectMapper)) + : serviceProposal; return serviceProposal; } - public static RollbackRunResultDto mapRollbackRunResult(RollbackRunResult rollbackRunResult, ObjectMapper objectMapper) { - List<AspectRowSummary> aspectRowSummaries = rollbackRunResult.getRowsRolledBack().stream() - .map(aspectRowSummary -> objectMapper.convertValue(aspectRowSummary.data(), AspectRowSummary.class)) - .collect(Collectors.toList()); + public static RollbackRunResultDto mapRollbackRunResult( + RollbackRunResult rollbackRunResult, ObjectMapper objectMapper) { + List<AspectRowSummary> aspectRowSummaries = + rollbackRunResult.getRowsRolledBack().stream() + .map( + aspectRowSummary -> + objectMapper.convertValue(aspectRowSummary.data(), AspectRowSummary.class)) + .collect(Collectors.toList()); return RollbackRunResultDto.builder() .rowsRolledBack(aspectRowSummaries) - .rowsDeletedFromEntityDeletion(rollbackRunResult.getRowsDeletedFromEntityDeletion()).build(); + .rowsDeletedFromEntityDeletion(rollbackRunResult.getRowsDeletedFromEntityDeletion()) + .build(); } public static UpsertAspectRequest createStatusRemoval(Urn urn, EntityService entityService) { - com.linkedin.metadata.models.EntitySpec entitySpec = entityService.getEntityRegistry().getEntitySpec(urn.getEntityType()); + com.linkedin.metadata.models.EntitySpec entitySpec = + entityService.getEntityRegistry().getEntitySpec(urn.getEntityType()); if (entitySpec == null || !entitySpec.getAspectSpecMap().containsKey(STATUS_ASPECT_NAME)) { - throw new IllegalArgumentException("Entity type is not valid for soft deletes: " + urn.getEntityType()); + throw new IllegalArgumentException( + "Entity type is not valid for soft deletes: " + urn.getEntityType()); } return UpsertAspectRequest.builder() .aspect(Status.builder().removed(true).build()) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java index 12f7652aff587..31577429df72d 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java @@ -2,9 +2,6 @@ import com.google.common.reflect.ClassPath; import com.linkedin.util.Pair; -import lombok.Builder; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -16,123 +13,132 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; +import lombok.Builder; +import lombok.extern.slf4j.Slf4j; @Slf4j @Builder public class ReflectionCache { - private static final ConcurrentHashMap<String, Method> METHOD_CACHE = new ConcurrentHashMap<>(); - private static final ConcurrentHashMap<String, Class<?>> CLASS_CACHE = new ConcurrentHashMap<>(); - - private final String basePackage; - private final Set<String> subPackages; - @Builder.Default // appropriate for lombok - private final Function<Class<?>, String> getBuilderName = clazz -> - String.join("", clazz.getSimpleName(), "$", clazz.getSimpleName(), "Builder"); - - public static class ReflectionCacheBuilder { - public ReflectionCacheBuilder basePackage(String basePackage) { - return basePackage(basePackage, Set.of()); - } - - public ReflectionCacheBuilder basePackage(String basePackage, Set<String> packageExclusions) { - this.basePackage = basePackage; - return subPackages(findSubPackages(basePackage, Optional.ofNullable(packageExclusions).orElse(Set.of()))); - } - - private ReflectionCacheBuilder subPackages(Set<String> subPackages) { - this.subPackages = subPackages; - return this; - } - - private Set<String> findSubPackages(String packageName, Set<String> exclusions) { - try { - return ClassPath.from(getClass().getClassLoader()) - .getAllClasses() - .stream() - .filter(clazz -> exclusions.stream().noneMatch(excl -> clazz.getPackageName().startsWith(excl)) - && !clazz.getName().contains("$") && clazz.getName().startsWith(packageName)) - .map(ClassPath.ClassInfo::getPackageName) - .collect(Collectors.toSet()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - public Method lookupMethod(Class<?> clazz, String method, Class<?>... parameters) { - if (clazz == null) { - return null; - } else { - return METHOD_CACHE.computeIfAbsent( - String.join("_", clazz.getName(), method), - key -> { - try { - log.debug("Lookup: " + clazz.getName() + " Method: " + method + " Parameters: " + Arrays.toString(parameters)); - return clazz.getDeclaredMethod(method, parameters); - } catch (NoSuchMethodException e) { - return null; - } - } - ); - } - } - - public Class<?> lookupClass(String className, boolean searchSubclass) { - if (!searchSubclass) { - return lookupClass(className); - } else { - List<String> subclasses = new LinkedList<>(); - subclasses.add(basePackage); - if (subPackages != null) { - subclasses.addAll(subPackages); - } - - for (String packageName : subclasses) { - try { - return cachedClassLookup(packageName, className); - } catch (Exception e) { - log.debug("Class not found {}.{} ... continuing search", packageName, className); - } - } - } - throw new ClassCastException(String.format("Could not locate %s in package %s", className, basePackage)); + private static final ConcurrentHashMap<String, Method> METHOD_CACHE = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap<String, Class<?>> CLASS_CACHE = new ConcurrentHashMap<>(); + + private final String basePackage; + private final Set<String> subPackages; + @Builder.Default // appropriate for lombok + private final Function<Class<?>, String> getBuilderName = + clazz -> String.join("", clazz.getSimpleName(), "$", clazz.getSimpleName(), "Builder"); + + public static class ReflectionCacheBuilder { + public ReflectionCacheBuilder basePackage(String basePackage) { + return basePackage(basePackage, Set.of()); } - public Class<?> lookupClass(String className) { - return cachedClassLookup(basePackage, className); + public ReflectionCacheBuilder basePackage(String basePackage, Set<String> packageExclusions) { + this.basePackage = basePackage; + return subPackages( + findSubPackages(basePackage, Optional.ofNullable(packageExclusions).orElse(Set.of()))); } - private Class<?> cachedClassLookup(String packageName, String className) { - return CLASS_CACHE.computeIfAbsent( - String.format("%s.%s", packageName, className), - key -> { - try { - log.debug("Lookup: " + key); - return Class.forName(key); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - ); + private ReflectionCacheBuilder subPackages(Set<String> subPackages) { + this.subPackages = subPackages; + return this; } - /** - * Get builder instance and class - */ - public Pair<Class<?>, Object> getBuilder(Class<?> toClazz) throws InvocationTargetException, IllegalAccessException { - Class<?> toClazzBuilder = lookupClass(getBuilderName.apply(toClazz)); - return Pair.of(toClazzBuilder, lookupMethod(toClazz, "builder").invoke(null)); + private Set<String> findSubPackages(String packageName, Set<String> exclusions) { + try { + return ClassPath.from(getClass().getClassLoader()).getAllClasses().stream() + .filter( + clazz -> + exclusions.stream().noneMatch(excl -> clazz.getPackageName().startsWith(excl)) + && !clazz.getName().contains("$") + && clazz.getName().startsWith(packageName)) + .map(ClassPath.ClassInfo::getPackageName) + .collect(Collectors.toSet()); + } catch (IOException e) { + throw new RuntimeException(e); + } } - - public Method lookupMethod(Pair<Class<?>, Object> builderPair, String method, Class<?>... parameters) { - return lookupMethod(builderPair.getFirst(), method, parameters); - } - - public static String toLowerFirst(String s) { - return s.substring(0, 1).toLowerCase() + s.substring(1); + } + + public Method lookupMethod(Class<?> clazz, String method, Class<?>... parameters) { + if (clazz == null) { + return null; + } else { + return METHOD_CACHE.computeIfAbsent( + String.join("_", clazz.getName(), method), + key -> { + try { + log.debug( + "Lookup: " + + clazz.getName() + + " Method: " + + method + + " Parameters: " + + Arrays.toString(parameters)); + return clazz.getDeclaredMethod(method, parameters); + } catch (NoSuchMethodException e) { + return null; + } + }); } - - public static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + } + + public Class<?> lookupClass(String className, boolean searchSubclass) { + if (!searchSubclass) { + return lookupClass(className); + } else { + List<String> subclasses = new LinkedList<>(); + subclasses.add(basePackage); + if (subPackages != null) { + subclasses.addAll(subPackages); + } + + for (String packageName : subclasses) { + try { + return cachedClassLookup(packageName, className); + } catch (Exception e) { + log.debug("Class not found {}.{} ... continuing search", packageName, className); + } + } } + throw new ClassCastException( + String.format("Could not locate %s in package %s", className, basePackage)); + } + + public Class<?> lookupClass(String className) { + return cachedClassLookup(basePackage, className); + } + + private Class<?> cachedClassLookup(String packageName, String className) { + return CLASS_CACHE.computeIfAbsent( + String.format("%s.%s", packageName, className), + key -> { + try { + log.debug("Lookup: " + key); + return Class.forName(key); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + }); + } + + /** Get builder instance and class */ + public Pair<Class<?>, Object> getBuilder(Class<?> toClazz) + throws InvocationTargetException, IllegalAccessException { + Class<?> toClazzBuilder = lookupClass(getBuilderName.apply(toClazz)); + return Pair.of(toClazzBuilder, lookupMethod(toClazz, "builder").invoke(null)); + } + + public Method lookupMethod( + Pair<Class<?>, Object> builderPair, String method, Class<?>... parameters) { + return lookupMethod(builderPair.getFirst(), method, parameters); + } + + public static String toLowerFirst(String s) { + return s.substring(0, 1).toLowerCase() + s.substring(1); + } + + public static String toUpperFirst(String s) { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } diff --git a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java index 6c2ec108fe493..06640ba13fb8b 100644 --- a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java @@ -1,13 +1,17 @@ package entities; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; -import com.linkedin.metadata.config.PreProcessHooks; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.event.EventProducer; @@ -32,13 +36,12 @@ import io.datahubproject.openapi.generated.SubTypes; import io.datahubproject.openapi.generated.TagAssociation; import io.datahubproject.openapi.generated.ViewProperties; +import io.ebean.Transaction; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.function.Function; - -import io.ebean.Transaction; import mock.MockEntityRegistry; import mock.MockEntityService; import org.mockito.ArgumentMatchers; @@ -46,15 +49,11 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - - public class EntitiesControllerTest { public static final String S = "somerandomstring"; - public static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; + public static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; public static final String CORPUSER_URN = "urn:li:corpuser:datahub"; public static final String GLOSSARY_TERM_URN = "urn:li:glossaryTerm:SavingAccount"; public static final String DATA_PLATFORM_URN = "urn:li:dataPlatform:platform"; @@ -62,25 +61,38 @@ public class EntitiesControllerTest { @BeforeMethod public void setup() - throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { + throws NoSuchMethodException, + InvocationTargetException, + InstantiationException, + IllegalAccessException { EntityRegistry mockEntityRegistry = new MockEntityRegistry(); AspectDao aspectDao = Mockito.mock(AspectDao.class); - Mockito.when(aspectDao.runInTransactionWithRetry( - ArgumentMatchers.<Function<Transaction, UpdateAspectResult>>any(), any(), anyInt())).thenAnswer(i -> - ((Function<Transaction, UpdateAspectResult>) i.getArgument(0)).apply(Mockito.mock(Transaction.class)) - ); + Mockito.when( + aspectDao.runInTransactionWithRetry( + ArgumentMatchers.<Function<Transaction, UpdateAspectResult>>any(), any(), anyInt())) + .thenAnswer( + i -> + ((Function<Transaction, UpdateAspectResult>) i.getArgument(0)) + .apply(Mockito.mock(Transaction.class))); EventProducer mockEntityEventProducer = Mockito.mock(EventProducer.class); UpdateIndicesService mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - MockEntityService mockEntityService = new MockEntityService(aspectDao, mockEntityEventProducer, mockEntityRegistry, - mockUpdateIndicesService, preProcessHooks); + MockEntityService mockEntityService = + new MockEntityService( + aspectDao, + mockEntityEventProducer, + mockEntityRegistry, + mockUpdateIndicesService, + preProcessHooks); AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - _entitiesController = new EntitiesController(mockEntityService, new ObjectMapper(), authorizerChain); + _entitiesController = + new EntitiesController(mockEntityService, new ObjectMapper(), authorizerChain); Authentication authentication = Mockito.mock(Authentication.class); when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); AuthenticationContext.setAuthentication(authentication); } @@ -89,98 +101,130 @@ public void setup() @Test public void testIngestDataset() { List<UpsertAspectRequest> datasetAspects = new ArrayList<>(); - UpsertAspectRequest viewProperties = UpsertAspectRequest.builder() - .aspect(ViewProperties.builder() - .viewLogic(S) - .viewLanguage(S) - .materialized(true).build()) - .entityType(DATASET_ENTITY_NAME) - .entityUrn(DATASET_URN) - .build(); + UpsertAspectRequest viewProperties = + UpsertAspectRequest.builder() + .aspect( + ViewProperties.builder().viewLogic(S).viewLanguage(S).materialized(true).build()) + .entityType(DATASET_ENTITY_NAME) + .entityUrn(DATASET_URN) + .build(); datasetAspects.add(viewProperties); - UpsertAspectRequest subTypes = UpsertAspectRequest.builder() - .aspect(SubTypes.builder() - .typeNames(Collections.singletonList(S)).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest subTypes = + UpsertAspectRequest.builder() + .aspect(SubTypes.builder().typeNames(Collections.singletonList(S)).build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(subTypes); - UpsertAspectRequest datasetProfile = UpsertAspectRequest.builder() - .aspect(DatasetProfile.builder().build().timestampMillis(0L).addFieldProfilesItem( - DatasetFieldProfile.builder() - .fieldPath(S) - .histogram(Histogram.builder() - .boundaries(Collections.singletonList(S)).build()).build() - ) - ) + UpsertAspectRequest datasetProfile = + UpsertAspectRequest.builder() + .aspect( + DatasetProfile.builder() + .build() + .timestampMillis(0L) + .addFieldProfilesItem( + DatasetFieldProfile.builder() + .fieldPath(S) + .histogram( + Histogram.builder() + .boundaries(Collections.singletonList(S)) + .build()) + .build())) .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() + .entityKeyAspect( + DatasetKey.builder() .name("name") .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) + .origin(FabricType.PROD) + .build()) .build(); datasetAspects.add(datasetProfile); - UpsertAspectRequest schemaMetadata = UpsertAspectRequest.builder() - .aspect(SchemaMetadata.builder() - .schemaName(S) - .dataset(DATASET_URN) - .platform(DATA_PLATFORM_URN) - .hash(S) - .version(0L) - .platformSchema(MySqlDDL.builder().tableSchema(S).build()) - .fields(Collections.singletonList(SchemaField.builder() - .fieldPath(S) - .nativeDataType(S) - .type(SchemaFieldDataType.builder().type(StringType.builder().build()).build()) - .description(S) - .globalTags(GlobalTags.builder() - .tags(Collections.singletonList(TagAssociation.builder() - .tag(TAG_URN).build())).build()) - .glossaryTerms(GlossaryTerms.builder() - .terms(Collections.singletonList(GlossaryTermAssociation.builder() - .urn(GLOSSARY_TERM_URN).build())) - .auditStamp(AuditStamp.builder() - .time(0L) - .actor(CORPUSER_URN).build()).build()).build() - ) - ).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest schemaMetadata = + UpsertAspectRequest.builder() + .aspect( + SchemaMetadata.builder() + .schemaName(S) + .dataset(DATASET_URN) + .platform(DATA_PLATFORM_URN) + .hash(S) + .version(0L) + .platformSchema(MySqlDDL.builder().tableSchema(S).build()) + .fields( + Collections.singletonList( + SchemaField.builder() + .fieldPath(S) + .nativeDataType(S) + .type( + SchemaFieldDataType.builder() + .type(StringType.builder().build()) + .build()) + .description(S) + .globalTags( + GlobalTags.builder() + .tags( + Collections.singletonList( + TagAssociation.builder().tag(TAG_URN).build())) + .build()) + .glossaryTerms( + GlossaryTerms.builder() + .terms( + Collections.singletonList( + GlossaryTermAssociation.builder() + .urn(GLOSSARY_TERM_URN) + .build())) + .auditStamp( + AuditStamp.builder() + .time(0L) + .actor(CORPUSER_URN) + .build()) + .build()) + .build())) + .build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(schemaMetadata); - UpsertAspectRequest glossaryTerms = UpsertAspectRequest.builder() - .aspect(GlossaryTerms.builder() - .terms(Collections.singletonList(GlossaryTermAssociation.builder() - .urn(GLOSSARY_TERM_URN).build())) - .auditStamp(AuditStamp.builder() - .time(0L) - .actor(CORPUSER_URN).build()).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest glossaryTerms = + UpsertAspectRequest.builder() + .aspect( + GlossaryTerms.builder() + .terms( + Collections.singletonList( + GlossaryTermAssociation.builder().urn(GLOSSARY_TERM_URN).build())) + .auditStamp(AuditStamp.builder().time(0L).actor(CORPUSER_URN).build()) + .build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(glossaryTerms); _entitiesController.postEntities(datasetAspects); } -// @Test -// public void testGetDataset() { -// _entitiesController.getEntities(new String[] {DATASET_URN}, -// new String[] { -// SCHEMA_METADATA_ASPECT_NAME -// }); -// } + // @Test + // public void testGetDataset() { + // _entitiesController.getEntities(new String[] {DATASET_URN}, + // new String[] { + // SCHEMA_METADATA_ASPECT_NAME + // }); + // } } diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index 852b6cfcb4b22..91e9e4fd4671e 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -1,6 +1,7 @@ package mock; -import com.linkedin.metadata.config.PreProcessHooks; +import static entities.EntitiesControllerTest.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -20,6 +21,7 @@ import com.linkedin.entity.AspectType; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ListResult; @@ -49,22 +51,25 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; -import static entities.EntitiesControllerTest.*; - - public class MockEntityService extends EntityServiceImpl { - public MockEntityService(@Nonnull AspectDao aspectDao, @Nonnull EventProducer producer, @Nonnull EntityRegistry entityRegistry, @Nonnull - UpdateIndicesService updateIndicesService, PreProcessHooks preProcessHooks) { + public MockEntityService( + @Nonnull AspectDao aspectDao, + @Nonnull EventProducer producer, + @Nonnull EntityRegistry entityRegistry, + @Nonnull UpdateIndicesService updateIndicesService, + PreProcessHooks preProcessHooks) { super(aspectDao, producer, entityRegistry, true, updateIndicesService, preProcessHooks); } @Override - public Map<Urn, List<RecordTemplate>> getLatestAspects(@Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) { + public Map<Urn, List<RecordTemplate>> getLatestAspects( + @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) { return null; } @Override - public Map<String, RecordTemplate> getLatestAspectsForUrn(@Nonnull Urn urn, @Nonnull Set<String> aspectNames) { + public Map<String, RecordTemplate> getLatestAspectsForUrn( + @Nonnull Urn urn, @Nonnull Set<String> aspectNames) { return Collections.emptyMap(); } @@ -74,42 +79,58 @@ public RecordTemplate getAspect(@Nonnull Urn urn, @Nonnull String aspectName, lo } @Override - public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects(@Nonnull String entityName, @Nonnull Set<Urn> urns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { + public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( + @Nonnull String entityName, @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { Urn urn = UrnUtils.getUrn(DATASET_URN); Map<Urn, List<EnvelopedAspect>> envelopedAspectMap = new HashMap<>(); List<EnvelopedAspect> aspects = new ArrayList<>(); EnvelopedAspect schemaMetadata = new EnvelopedAspect(); SchemaMetadata pegasusSchemaMetadata = new SchemaMetadata(); - pegasusSchemaMetadata.setDataset(DatasetUrn.createFromUrn(UrnUtils.getUrn(DATASET_URN))) + pegasusSchemaMetadata + .setDataset(DatasetUrn.createFromUrn(UrnUtils.getUrn(DATASET_URN))) .setVersion(0L) - .setCreated(new AuditStamp().setActor(UrnUtils.getUrn(CORPUSER_URN)).setTime(System.currentTimeMillis())) + .setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(CORPUSER_URN)) + .setTime(System.currentTimeMillis())) .setHash(S) .setCluster(S) .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema(S))) - .setForeignKeys(new ForeignKeyConstraintArray(Collections.singletonList( - new ForeignKeyConstraint() - .setForeignDataset(urn) - .setName(S) - .setForeignFields(new UrnArray(Collections.singletonList(urn)))))) - .setFields(new SchemaFieldArray(Collections.singletonList( - new SchemaField() - .setDescription(S) - .setFieldPath(S) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags( - new GlobalTags() - .setTags(new TagAssociationArray(Collections.singletonList( - new TagAssociation().setTag(TagUrn.createFromUrn(UrnUtils.getUrn(TAG_URN))) - )))) - .setGlossaryTerms(new GlossaryTerms().setTerms( - new GlossaryTermAssociationArray(Collections.singletonList( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(UrnUtils.getUrn(GLOSSARY_TERM_URN))) - ))) - ) - )) - ); + .setForeignKeys( + new ForeignKeyConstraintArray( + Collections.singletonList( + new ForeignKeyConstraint() + .setForeignDataset(urn) + .setName(S) + .setForeignFields(new UrnArray(Collections.singletonList(urn)))))) + .setFields( + new SchemaFieldArray( + Collections.singletonList( + new SchemaField() + .setDescription(S) + .setFieldPath(S) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + Collections.singletonList( + new TagAssociation() + .setTag( + TagUrn.createFromUrn( + UrnUtils.getUrn(TAG_URN))))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + Collections.singletonList( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + UrnUtils.getUrn(GLOSSARY_TERM_URN)))))))))); schemaMetadata .setType(AspectType.VERSIONED) .setName("schemaMetadata") @@ -120,29 +141,31 @@ public Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects(@Nonnull String } @Override - public Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects(@Nonnull Set<VersionedUrn> versionedUrns, - @Nonnull Set<String> aspectNames) throws URISyntaxException { + public Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( + @Nonnull Set<VersionedUrn> versionedUrns, @Nonnull Set<String> aspectNames) + throws URISyntaxException { return null; } @Override - public EnvelopedAspect getLatestEnvelopedAspect(@Nonnull String entityName, @Nonnull Urn urn, - @Nonnull String aspectName) throws Exception { + public EnvelopedAspect getLatestEnvelopedAspect( + @Nonnull String entityName, @Nonnull Urn urn, @Nonnull String aspectName) throws Exception { return null; } @Override - public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { + public VersionedAspect getVersionedAspect( + @Nonnull Urn urn, @Nonnull String aspectName, long version) { return null; } @Override - public ListResult<RecordTemplate> listLatestAspects(@Nonnull String entityName, @Nonnull String aspectName, int start, - int count) { + public ListResult<RecordTemplate> listLatestAspects( + @Nonnull String entityName, @Nonnull String aspectName, int start, int count) { return null; } -/* @Nonnull + /* @Nonnull @Override protected UpdateAspectResult ingestAspectToLocalDB(@Nonnull Urn urn, @Nonnull String aspectName, @Nonnull Function<Optional<RecordTemplate>, RecordTemplate> updateLambda, @Nonnull AuditStamp auditStamp, @@ -161,8 +184,12 @@ protected List<Pair<String, UpdateAspectResult>> ingestAspectsToLocalDB(@Nonnull @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@NotNull Urn urn, @NotNull String aspectName, - @NotNull RecordTemplate newValue, @NotNull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata) { + public RecordTemplate ingestAspectIfNotPresent( + @NotNull Urn urn, + @NotNull String aspectName, + @NotNull RecordTemplate newValue, + @NotNull AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata) { return null; } @@ -172,13 +199,11 @@ public ListUrnsResult listUrns(@Nonnull String entityName, int start, int count) } @Override - public void setWritable(boolean canWrite) { - - } + public void setWritable(boolean canWrite) {} @Override - public RollbackRunResult rollbackWithConditions(List<AspectRowSummary> aspectRows, Map<String, String> conditions, - boolean hardDelete) { + public RollbackRunResult rollbackWithConditions( + List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete) { return null; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java index e632aa7eadff0..17163b937f417 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java @@ -4,32 +4,29 @@ import java.util.Optional; import lombok.Data; - /** * POJO for YAML section presents in config.yml at location plugins[].params. * - * These parameters are same for Authenticator and Authorizer plugins. + * <p>These parameters are same for Authenticator and Authorizer plugins. * - * {@link com.datahub.plugins.auth.provider.AuthPluginConfigProvider} uses this AuthParam to create instance of - * either {@link AuthenticatorPluginConfig} or {@link AuthorizerPluginConfig} + * <p>{@link com.datahub.plugins.auth.provider.AuthPluginConfigProvider} uses this AuthParam to + * create instance of either {@link AuthenticatorPluginConfig} or {@link AuthorizerPluginConfig} */ @Data public class AuthParam { - /** - * Fully-qualified class-name of plugin - */ + /** Fully-qualified class-name of plugin */ private String className; /** - * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default value then set - * this property. + * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default + * value then set this property. */ private Optional<String> jarFileName = Optional.empty(); /** - * These configs are specific to plugin. GMS pass this map as is to plugin - * {@link com.datahub.plugins.auth.authentication.Authenticator} or - * {@link com.datahub.plugins.auth.authorization.Authorizer} init method + * These configs are specific to plugin. GMS pass this map as is to plugin {@link + * com.datahub.plugins.auth.authentication.Authenticator} or {@link + * com.datahub.plugins.auth.authorization.Authorizer} init method */ private Optional<Map<String, Object>> configs = Optional.empty(); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java index b4546d9f5af16..8bc06c73a9439 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java @@ -9,16 +9,19 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - -/** - * Superclass for {@link AuthenticatorPluginConfig} and {@link AuthorizerPluginConfig} - */ +/** Superclass for {@link AuthenticatorPluginConfig} and {@link AuthorizerPluginConfig} */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthPluginConfig extends PluginConfig { - public AuthPluginConfig(PluginType type, String name, Boolean enabled, String className, Path pluginHomeDirectory, - Path pluginJarPath, Optional<Map<String, Object>> configs) { + public AuthPluginConfig( + PluginType type, + String name, + Boolean enabled, + String className, + Path pluginHomeDirectory, + Path pluginJarPath, + Optional<Map<String, Object>> configs) { super(type, name, enabled, className, pluginHomeDirectory, pluginJarPath, configs); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java index 276faed56f7ab..b10a178caa9fa 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java @@ -8,16 +8,20 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - /** - * Authenticator plugin configuration provided by user. - * {@link com.datahub.plugins.auth.provider.AuthenticatorPluginConfigProvider} instantiate this class + * Authenticator plugin configuration provided by user. {@link + * com.datahub.plugins.auth.provider.AuthenticatorPluginConfigProvider} instantiate this class */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthenticatorPluginConfig extends AuthPluginConfig { - public AuthenticatorPluginConfig(String name, Boolean enabled, String className, Path pluginDirectory, Path pluginJar, + public AuthenticatorPluginConfig( + String name, + Boolean enabled, + String className, + Path pluginDirectory, + Path pluginJar, Optional<Map<String, Object>> configs) { super(PluginType.AUTHENTICATOR, name, enabled, className, pluginDirectory, pluginJar, configs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java index 1a4bd1ea07906..de8c3d7ecaaa4 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java @@ -8,16 +8,20 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - /** - * Authorizer plugin configuration provided by user. - * {@link com.datahub.plugins.auth.provider.AuthorizerPluginConfigProvider} instantiate this class + * Authorizer plugin configuration provided by user. {@link + * com.datahub.plugins.auth.provider.AuthorizerPluginConfigProvider} instantiate this class */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthorizerPluginConfig extends AuthPluginConfig { - public AuthorizerPluginConfig(String name, Boolean enabled, String className, Path pluginDirectory, Path pluginJar, + public AuthorizerPluginConfig( + String name, + Boolean enabled, + String className, + Path pluginDirectory, + Path pluginJar, Optional<Map<String, Object>> configs) { super(PluginType.AUTHORIZER, name, enabled, className, pluginDirectory, pluginJar, configs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java index b970258aa3ea0..4e62d03620f7f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java @@ -11,9 +11,9 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - /** - * Base class for {@link AuthenticatorPluginConfigProvider} and {@link AuthorizerPluginConfigProvider}. + * Base class for {@link AuthenticatorPluginConfigProvider} and {@link + * AuthorizerPluginConfigProvider}. */ public abstract class AuthPluginConfigProvider implements PluginConfigProvider<AuthPluginConfig> { public abstract PluginType getType(); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java index 546cee04d05a0..71563e79ef787 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java @@ -9,10 +9,9 @@ import java.nio.file.Path; import javax.annotation.Nonnull; - /** - * Responsible for creating {@link AuthenticatorPluginConfig} instance. - * This provider is register in {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authenticator + * Responsible for creating {@link AuthenticatorPluginConfig} instance. This provider is register in + * {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authenticator * configuration */ public class AuthenticatorPluginConfigProvider extends AuthPluginConfigProvider { @@ -24,13 +23,19 @@ public PluginType getType() { @Override public AuthPluginConfig createAuthPluginConfig(@Nonnull PluginConfig pluginConfig) { // Map Yaml section present in config.yml at plugins[].params to AuthParam - AuthParam authParam = (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); - // Make the pluginJar file path either from name of plugin or explicitly from plugins[].params.jarFileName + AuthParam authParam = + (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); + // Make the pluginJar file path either from name of plugin or explicitly from + // plugins[].params.jarFileName // This logic is common for authenticator and authorizer plugin and hence define in superclass Path pluginJar = formPluginJar(pluginConfig, authParam); - return new AuthenticatorPluginConfig(pluginConfig.getName(), pluginConfig.getEnabled(), authParam.getClassName(), - pluginConfig.getPluginHomeDirectory(), pluginJar, authParam.getConfigs()); + return new AuthenticatorPluginConfig( + pluginConfig.getName(), + pluginConfig.getEnabled(), + authParam.getClassName(), + pluginConfig.getPluginHomeDirectory(), + pluginJar, + authParam.getConfigs()); } } - diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java index 397dc3fd93b36..7899f55523595 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java @@ -9,11 +9,9 @@ import java.nio.file.Path; import javax.annotation.Nonnull; - /** - * Responsible for creating {@link AuthorizerPluginConfig} instance. - * This provider is register in {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authorizer - * configuration + * Responsible for creating {@link AuthorizerPluginConfig} instance. This provider is register in + * {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authorizer configuration */ public class AuthorizerPluginConfigProvider extends AuthPluginConfigProvider { @Override @@ -24,13 +22,20 @@ public PluginType getType() { @Override public AuthPluginConfig createAuthPluginConfig(@Nonnull PluginConfig pluginConfig) { // Map Yaml section present in config.yml at plugins[].params to AuthParam - AuthParam authParam = (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); + AuthParam authParam = + (new YamlMapper<AuthParam>()).fromMap(pluginConfig.getParams(), AuthParam.class); - // Make the pluginJar file path either from name of plugin or explicitly from plugins[].params.jarFileName + // Make the pluginJar file path either from name of plugin or explicitly from + // plugins[].params.jarFileName // This logic is common for authenticator and authorizer plugin and hence define in superclass Path pluginJar = formPluginJar(pluginConfig, authParam); - return new AuthorizerPluginConfig(pluginConfig.getName(), pluginConfig.getEnabled(), authParam.getClassName(), - pluginConfig.getPluginHomeDirectory(), pluginJar, authParam.getConfigs()); + return new AuthorizerPluginConfig( + pluginConfig.getName(), + pluginConfig.getEnabled(), + authParam.getClassName(), + pluginConfig.getPluginHomeDirectory(), + pluginJar, + authParam.getConfigs()); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java index c4dc94b7c73d5..ba15fea2ccd50 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java @@ -7,15 +7,10 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; - -/** - * Common validations. - * Used in {@link com.datahub.plugins.configuration.PluginConfig} - */ +/** Common validations. Used in {@link com.datahub.plugins.configuration.PluginConfig} */ public class ConfigValidationUtils { - private ConfigValidationUtils() { - } + private ConfigValidationUtils() {} public static void whiteSpacesValidation(@Nonnull String fieldName, @Nonnull String value) throws IllegalArgumentException { @@ -25,7 +20,8 @@ public static void whiteSpacesValidation(@Nonnull String fieldName, @Nonnull Str } } - public static void mapShouldNotBeEmpty(@Nonnull String fieldName, @Nonnull Map<String, Object> attributeMap) + public static void mapShouldNotBeEmpty( + @Nonnull String fieldName, @Nonnull Map<String, Object> attributeMap) throws IllegalArgumentException { if (attributeMap.isEmpty()) { throw new IllegalArgumentException(String.format("%s should not be empty", fieldName)); @@ -39,15 +35,18 @@ public static void listShouldNotBeEmpty(@Nonnull String fieldName, @Nonnull List } } - public static void listShouldNotHaveDuplicate(@Nonnull String fieldName, @Nonnull List<String> list) { + public static void listShouldNotHaveDuplicate( + @Nonnull String fieldName, @Nonnull List<String> list) { Set<String> set = new HashSet<>(); - list.forEach((input) -> { - if (set.contains(input)) { - throw new IllegalArgumentException( - String.format("Duplicate entry of %s is found in %s. %s should not contain duplicate", input, fieldName, - fieldName)); - } - set.add(input); - }); + list.forEach( + (input) -> { + if (set.contains(input)) { + throw new IllegalArgumentException( + String.format( + "Duplicate entry of %s is found in %s. %s should not contain duplicate", + input, fieldName, fieldName)); + } + set.add(input); + }); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java index 02b3b4566d705..dfc26041ee627 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java @@ -7,52 +7,43 @@ import lombok.Data; import lombok.NoArgsConstructor; - -/** - * Flat form of plugin configuration configured in config.yaml at plugins[] and plugins[].params - */ +/** Flat form of plugin configuration configured in config.yaml at plugins[] and plugins[].params */ @Data @NoArgsConstructor @AllArgsConstructor public class PluginConfig { - /** - * Type of plugin. Supported types are {@link PluginType} - */ + /** Type of plugin. Supported types are {@link PluginType} */ private PluginType type; - /** - * name of the plugin. It should be unique in plugins[] list - */ + /** name of the plugin. It should be unique in plugins[] list */ private String name; /** - * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take authentication/authorization - * decisions. + * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take + * authentication/authorization decisions. */ private Boolean enabled; - /** - * Fully-qualified class-name of plugin - */ + /** Fully-qualified class-name of plugin */ private String className; /** - * It is always set to <plugin-base-directory>/<plugin-name>. - * For example if plugin-name is ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then - * pluginDirectory would be /etc/datahub/plugins/auth/ranger-authorizer + * It is always set to <plugin-base-directory>/<plugin-name>. For example if plugin-name is + * ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then pluginDirectory + * would be /etc/datahub/plugins/auth/ranger-authorizer */ private Path pluginHomeDirectory; /** - * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default value then set - * this property. + * Default jarFileName is "<plugin-name>.jar". If plugin's jar file name is different from default + * value then set this property. */ private Path pluginJarPath; /** - * These configs are specific to plugin. GMS pass this map as is to plugin - * {@link com.datahub.plugins.auth.authentication.Authenticator} or - * {@link com.datahub.plugins.auth.authorization.Authorizer} init method + * These configs are specific to plugin. GMS pass this map as is to plugin {@link + * com.datahub.plugins.auth.authentication.Authenticator} or {@link + * com.datahub.plugins.auth.authorization.Authorizer} init method */ private Optional<Map<String, Object>> configs; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java index b1b0844f428b7..b068a009528d3 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java @@ -2,7 +2,6 @@ import java.util.List; - public interface PluginConfigProvider<T extends PluginConfig> { List<T> processConfig(List<com.datahub.plugins.configuration.PluginConfig> pluginConfigConfigs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java index 0a46be21155b6..713f5683a82a1 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java @@ -3,13 +3,12 @@ import java.nio.file.Path; import java.security.ProtectionDomain; - -/** - * Implement this interface to create Java SecurityManager's ProtectionDomain for the plugin. - */ +/** Implement this interface to create Java SecurityManager's ProtectionDomain for the plugin. */ public interface PluginPermissionManager { /** - * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to the plugin code + * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to + * the plugin code + * * @param pluginHome * @return ProtectionDomain */ diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java index ed3bf0a4f4473..7db9b7d40276e 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java @@ -1,17 +1,11 @@ package com.datahub.plugins.common; -/** - * Supported plugin types - */ +/** Supported plugin types */ public enum PluginType { - /** - * PluginType for Authenticator plugin - */ + /** PluginType for Authenticator plugin */ AUTHENTICATOR, - /** - * PluginType for Authorizer plugin - */ + /** PluginType for Authorizer plugin */ AUTHORIZER; @Override diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java index 7ab0032b86497..3eb01659eb99f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java @@ -7,28 +7,22 @@ import java.security.Permissions; import java.util.function.Function; - -/** - * Supported security modes - */ +/** Supported security modes */ public enum SecurityMode { /** * In this mode plugins has limited access. * - * Plugins are allowed to connect on below ports only - * 1) port greater than 1024 - * 2) port 80 - * 3) port 443 - * All other ports connection are disallowed. + * <p>Plugins are allowed to connect on below ports only 1) port greater than 1024 2) port 80 3) + * port 443 All other ports connection are disallowed. * - * Plugins are allowed to read and write files on PLUGIN_HOME directory only and all other read/write access are - * denied. + * <p>Plugins are allowed to read and write files on PLUGIN_HOME directory only and all other + * read/write access are denied. */ RESTRICTED(SecurityMode::restrictModePermissionSupplier), /** - * Plugins has full access. - * In this mode plugin can read/write to any directory, can connect to any port and can read environment variables. + * Plugins has full access. In this mode plugin can read/write to any directory, can connect to + * any port and can read environment variables. */ LENIENT(SecurityMode::lenientModePermissionSupplier); @@ -43,9 +37,12 @@ private static Permissions restrictModePermissionSupplier(Path sourceCodeDirecto permissions.add(new FilePermission(sourceCodeDirectory.toString() + "/*", "read,write,delete")); permissions.add( - new SocketPermission("*:1024-", "connect,resolve")); // Allow to connect access to all socket above 1024 - permissions.add(new SocketPermission("*:80", "connect,resolve")); // Allow to connect access to HTTP port - permissions.add(new SocketPermission("*:443", "connect,resolve")); // Allow to connect access to HTTPS port + new SocketPermission( + "*:1024-", "connect,resolve")); // Allow to connect access to all socket above 1024 + permissions.add( + new SocketPermission("*:80", "connect,resolve")); // Allow to connect access to HTTP port + permissions.add( + new SocketPermission("*:443", "connect,resolve")); // Allow to connect access to HTTPS port return permissions; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java index c4a79e9434923..309bbfb1b6485 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java @@ -9,16 +9,13 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * - * A mapper to map plugin configuration to java Pojo classes - */ +/** A mapper to map plugin configuration to java Pojo classes */ public class YamlMapper<T> { private final ObjectMapper objectMapper; public YamlMapper() { - this.objectMapper = YAMLMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); + this.objectMapper = + YAMLMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); objectMapper.registerModule(new Jdk8Module()); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java index 6cf1966787875..ff87176ebbd7f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java @@ -10,17 +10,13 @@ import lombok.Builder; import lombok.Getter; - -/** - * {@link Config} is getting loaded from /etc/datahub/plugins/auth/config.yaml - */ +/** {@link Config} is getting loaded from /etc/datahub/plugins/auth/config.yaml */ @Getter @Builder @JsonDeserialize(builder = Config.CustomBuilder.class) public class Config { - public static final String FIELD_PLUGINS = "plugins"; // for validation error messages - @Nonnull - private List<PluginConfig> plugins; + public static final String FIELD_PLUGINS = "plugins"; // for validation error messages + @Nonnull private List<PluginConfig> plugins; public static CustomBuilder builder() { return new CustomBuilder(); @@ -29,12 +25,14 @@ public static CustomBuilder builder() { @JsonPOJOBuilder(withPrefix = "") public static class CustomBuilder extends ConfigBuilder { public Config build() { - ConfigValidationUtils.listShouldNotBeEmpty(FIELD_PLUGINS, Collections.singletonList(super.plugins)); + ConfigValidationUtils.listShouldNotBeEmpty( + FIELD_PLUGINS, Collections.singletonList(super.plugins)); List<String> list = new ArrayList<>(super.plugins.size()); - super.plugins.forEach((pluginConfig) -> { - list.add(pluginConfig.getName()); - }); + super.plugins.forEach( + (pluginConfig) -> { + list.add(pluginConfig.getName()); + }); ConfigValidationUtils.listShouldNotHaveDuplicate(FIELD_PLUGINS, list); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java index ac2590209f4db..0c371263eea5f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java @@ -7,19 +7,19 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class ConfigProvider { public static final String CONFIG_FILE_NAME = "config.yml"; /** - * Yaml file path of plugin configuration file. Content of this file should match with {@link Config} + * Yaml file path of plugin configuration file. Content of this file should match with {@link + * Config} */ private final Path configFilePath; /** - * Directory where all plugins are mounted in DataHub GMS. - * Default pluginBaseDir is /etc/datahub/plugins/auth. + * Directory where all plugins are mounted in DataHub GMS. Default pluginBaseDir is + * /etc/datahub/plugins/auth. */ private final Path pluginBaseDir; @@ -36,7 +36,8 @@ private void setPluginDir(@Nonnull PluginConfig pluginConfig) { public Optional<Config> load() { // Check config file should exist if (!this.configFilePath.toFile().exists()) { - log.warn("Configuration {} file not found at location {}", CONFIG_FILE_NAME, this.pluginBaseDir); + log.warn( + "Configuration {} file not found at location {}", CONFIG_FILE_NAME, this.pluginBaseDir); return Optional.empty(); } @@ -45,4 +46,4 @@ public Optional<Config> load() { config.getPlugins().forEach(this::setPluginDir); return Optional.of(config); } -} \ No newline at end of file +} diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java index faeeabbf955eb..5280f520109fd 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java @@ -1,7 +1,7 @@ package com.datahub.plugins.configuration; -import com.datahub.plugins.common.PluginType; import com.datahub.plugins.common.ConfigValidationUtils; +import com.datahub.plugins.common.PluginType; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; @@ -13,10 +13,7 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - -/** - * POJO to map YAML section present in config.yml at plugins[] - */ +/** POJO to map YAML section present in config.yml at plugins[] */ @Data @NoArgsConstructor @AllArgsConstructor @@ -24,35 +21,30 @@ @JsonDeserialize(builder = PluginConfig.CustomBuilder.class) @EqualsAndHashCode(onlyExplicitlyIncluded = true) public class PluginConfig { - /** - * name of the plugin. It should be unique in plugins[] list - */ - @EqualsAndHashCode.Include - private String name; // In list of plugin, the name should be unique + /** name of the plugin. It should be unique in plugins[] list */ + @EqualsAndHashCode.Include private String name; // In list of plugin, the name should be unique - /** - * Type of plugin. Supported types are {@link PluginType} - */ + /** Type of plugin. Supported types are {@link PluginType} */ private PluginType type; /** - * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take authentication/authorization - * decisions. + * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take + * authentication/authorization decisions. */ private Boolean enabled; /** - * Attributes in params should be as per POJO {@link com.datahub.plugins.auth.configuration.AuthParam} + * Attributes in params should be as per POJO {@link + * com.datahub.plugins.auth.configuration.AuthParam} */ private Map<String, Object> params; /** - * It is always set to <plugin-base-directory>/<plugin-name>. - * For example if plugin-name is ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then - * pluginDirectory would be /etc/datahub/plugins/auth/ranger-authorizer + * It is always set to <plugin-base-directory>/<plugin-name>. For example if plugin-name is + * ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then pluginDirectory + * would be /etc/datahub/plugins/auth/ranger-authorizer */ - @JsonIgnore - private Path pluginHomeDirectory; + @JsonIgnore private Path pluginHomeDirectory; public static CustomBuilder builder() { return new CustomBuilder(); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java index b0a59a1656c8d..80837b966ba58 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java @@ -11,12 +11,9 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * Create instance of config provider as per type mentioned in {@link Config} - */ +/** Create instance of config provider as per type mentioned in {@link Config} */ public class PluginConfigFactory { - private final static Map<PluginType, PluginConfigProvider> CONFIG_PROVIDER_REGISTRY; + private static final Map<PluginType, PluginConfigProvider> CONFIG_PROVIDER_REGISTRY; static { CONFIG_PROVIDER_REGISTRY = new HashMap<>(2); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java index 92a7cae0647c5..1529df3ede676 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java @@ -30,10 +30,9 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; - /** - * IsolatedClassLoader to load custom implementation of DataHub Plugins. - * Override methods behave as per Java ClassLoader documentation. + * IsolatedClassLoader to load custom implementation of DataHub Plugins. Override methods behave as + * per Java ClassLoader documentation. */ @Slf4j public class IsolatedClassLoader extends ClassLoader { @@ -50,22 +49,30 @@ public class IsolatedClassLoader extends ClassLoader { private final Path _executionDirectory; - public IsolatedClassLoader(@Nonnull PluginPermissionManager pluginPermissionManager, - @Nonnull PluginConfig pluginToLoad, @Nonnull ClassLoader... applicationClassLoaders) { + public IsolatedClassLoader( + @Nonnull PluginPermissionManager pluginPermissionManager, + @Nonnull PluginConfig pluginToLoad, + @Nonnull ClassLoader... applicationClassLoaders) { this._pluginPermissionManager = pluginPermissionManager; this._pluginConfig = pluginToLoad; this._classLoaders.add(this.getClass().getClassLoader()); // then application class-loader this._classLoaders.addAll(Arrays.asList(applicationClassLoaders)); // if any extra class loaders this._executionDirectory = - Paths.get("/tmp", pluginToLoad.getPluginHomeDirectory().toString(), EXECUTION_DIR); // to store .so files i.e. libraries + Paths.get( + "/tmp", + pluginToLoad.getPluginHomeDirectory().toString(), + EXECUTION_DIR); // to store .so files i.e. libraries try { this.createJarEntryMap(); } catch (IOException e) { - // This would occur if we don't have permission on directory and chances of this is close to zero, hence catching + // This would occur if we don't have permission on directory and chances of this is close to + // zero, hence catching // this checked exception and throwing runtime exception // to make caller code more readable - String message = String.format("Unable to load jar file %s for plugin %s", pluginToLoad.getPluginJarPath(), - pluginToLoad.getName()); + String message = + String.format( + "Unable to load jar file %s for plugin %s", + pluginToLoad.getPluginJarPath(), pluginToLoad.getName()); throw new RuntimeException(message, e); } } @@ -85,15 +92,18 @@ private void createJarEntryMap() throws IOException { } /** - * Load plugin class from jar given in pluginToLoad parameter and return instance of class which implements Plugin - * interface. - * This method verifies whether loaded plugin is assignable to expectedInstanceOf class + * Load plugin class from jar given in pluginToLoad parameter and return instance of class which + * implements Plugin interface. This method verifies whether loaded plugin is assignable to + * expectedInstanceOf class + * * @param expectedInstanceOf class instance of interface caller is expecting * @return Instance of Plugin - * @throws ClassNotFoundException className parameter available in Plugin configuration is not found + * @throws ClassNotFoundException className parameter available in Plugin configuration is not + * found */ @Nonnull - public Plugin instantiatePlugin(@Nonnull Class<? extends Plugin> expectedInstanceOf) throws ClassNotFoundException { + public Plugin instantiatePlugin(@Nonnull Class<? extends Plugin> expectedInstanceOf) + throws ClassNotFoundException { Class<?> clazz = this.loadClass(this._pluginConfig.getClassName(), true); try { @@ -102,14 +112,17 @@ public Plugin instantiatePlugin(@Nonnull Class<? extends Plugin> expectedInstanc // Check loaded plugin has implemented the proper implementation of child interface if (!expectedInstanceOf.isAssignableFrom(clazz)) { throw new InstantiationException( - String.format("In plugin %s, the class %s has not implemented the interface %s", - this._pluginConfig.getName(), plugin.getClass().getCanonicalName(), + String.format( + "In plugin %s, the class %s has not implemented the interface %s", + this._pluginConfig.getName(), + plugin.getClass().getCanonicalName(), expectedInstanceOf.getCanonicalName())); } log.debug("Successfully created instance of plugin {}", this._pluginConfig.getClassName()); return plugin; } catch (InstantiationException | IllegalAccessException e) { - throw new RuntimeException(String.format("Failed to instantiate the plugin %s", this._pluginConfig.getName()), e); + throw new RuntimeException( + String.format("Failed to instantiate the plugin %s", this._pluginConfig.getName()), e); } } @@ -157,7 +170,8 @@ protected Class<?> loadClass(String s, boolean b) throws ClassNotFoundException byte[] classBytes = getClassData(this._classPathVsZipEntry.get(path)); ProtectionDomain protectionDomain = - this._pluginPermissionManager.createProtectionDomain(this._pluginConfig.getPluginHomeDirectory()); + this._pluginPermissionManager.createProtectionDomain( + this._pluginConfig.getPluginHomeDirectory()); return defineClass(s, classBytes, 0, classBytes.length, protectionDomain); } @@ -210,8 +224,11 @@ private Optional<URL> findResourceInPluginJar(String resource) { private Optional<URL> findResourceInPluginHome(String resource) { try { - try (Stream<Path> stream = Files.find(this._pluginConfig.getPluginHomeDirectory(), 1, - ((path, basicFileAttributes) -> path.toFile().getName().equals(resource)))) { + try (Stream<Path> stream = + Files.find( + this._pluginConfig.getPluginHomeDirectory(), + 1, + ((path, basicFileAttributes) -> path.toFile().getName().equals(resource)))) { List<Path> resources = stream.collect(Collectors.toList()); if (resources.size() > 0) { log.debug("Number of resources found {}", resources.size()); @@ -227,9 +244,9 @@ private Optional<URL> findResourceInPluginHome(String resource) { } /** - * Look for resource in below order - * - First search in plugin jar if not found - * - then search in plugin directory if not found then return null + * Look for resource in below order - First search in plugin jar if not found - then search in + * plugin directory if not found then return null + * * @param resource Resource to find * @return URL of the resource */ diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java index a20e9d0760968..f27a2e2551d58 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java @@ -8,15 +8,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j class JarExtractor { - private JarExtractor() { - } + private JarExtractor() {} /** * Write url content to destinationFilePath + * * @param url * @param destinationFilePath * @throws IOException @@ -30,4 +29,4 @@ public static void write(@Nonnull URL url, @Nonnull Path destinationFilePath) th } } } -} \ No newline at end of file +} diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java index 0596f8abcea74..7107787fdec3b 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java @@ -11,7 +11,6 @@ import java.security.cert.Certificate; import javax.annotation.Nonnull; - public class PluginPermissionManagerImpl implements PluginPermissionManager { private final SecurityMode _securityMode; @@ -21,7 +20,9 @@ public PluginPermissionManagerImpl(@Nonnull SecurityMode securityMode) { } /** - * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to the plugin code + * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to + * the plugin code + * * @param pluginHome * @return ProtectionDomain */ diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java index 64c53f1cb6db3..ccc95e4941ad0 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java @@ -8,7 +8,6 @@ import java.util.Map; import org.testng.annotations.Test; - @Test public class TestConfig { @Test @@ -16,8 +15,12 @@ public void testConfig() { PluginConfig authorizerConfig = new PluginConfig(); authorizerConfig.setName("apache-ranger-authorizer"); authorizerConfig.setType(PluginType.AUTHORIZER); - authorizerConfig.setParams(Map.of("className", "com.datahub.authorization.ranger.RangerAuthorizer", "configs", - Map.of("username", "foo", "password", "root123"))); + authorizerConfig.setParams( + Map.of( + "className", + "com.datahub.authorization.ranger.RangerAuthorizer", + "configs", + Map.of("username", "foo", "password", "root123"))); PluginConfig authenticatorConfig = new PluginConfig(); authorizerConfig.setName("sample-authenticator"); diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java index e311aae258109..bfb83f0ddfb24 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java @@ -11,7 +11,6 @@ import java.util.function.Consumer; import org.testng.annotations.Test; - @Test public class TestConfigProvider { @Test @@ -26,45 +25,57 @@ public void testConfigurationLoading() throws Exception { List<PluginConfig> authenticators = authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - List<PluginConfig> authorizers = authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); + List<PluginConfig> authorizers = + authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); assert authenticators.size() != 0; assert authorizers.size() != 0; - Consumer<PluginConfig> validateAuthenticationPlugin = (plugin) -> { - assert plugin.getName().equals("apache-ranger-authenticator"); + Consumer<PluginConfig> validateAuthenticationPlugin = + (plugin) -> { + assert plugin.getName().equals("apache-ranger-authenticator"); - assert "com.datahub.ranger.Authenticator".equals(plugin.getClassName()); + assert "com.datahub.ranger.Authenticator".equals(plugin.getClassName()); - assert plugin.getEnabled(); + assert plugin.getEnabled(); - String pluginJarPath = - Paths.get(pluginBaseDirectory.toString(), "apache-ranger-authenticator", "apache-ranger-authenticator.jar") - .toAbsolutePath() - .toString(); - assert pluginJarPath.equals(plugin.getPluginJarPath().toString()); + String pluginJarPath = + Paths.get( + pluginBaseDirectory.toString(), + "apache-ranger-authenticator", + "apache-ranger-authenticator.jar") + .toAbsolutePath() + .toString(); + assert pluginJarPath.equals(plugin.getPluginJarPath().toString()); - String pluginDirectory = Paths.get(pluginBaseDirectory.toString(), plugin.getName()).toAbsolutePath().toString(); - assert pluginDirectory.equals(plugin.getPluginHomeDirectory().toString()); - }; + String pluginDirectory = + Paths.get(pluginBaseDirectory.toString(), plugin.getName()) + .toAbsolutePath() + .toString(); + assert pluginDirectory.equals(plugin.getPluginHomeDirectory().toString()); + }; - Consumer<PluginConfig> validateAuthorizationPlugin = (plugin) -> { - assert plugin.getName().equals("apache-ranger-authorizer"); + Consumer<PluginConfig> validateAuthorizationPlugin = + (plugin) -> { + assert plugin.getName().equals("apache-ranger-authorizer"); - assert "com.datahub.ranger.Authorizer".equals(plugin.getClassName()); + assert "com.datahub.ranger.Authorizer".equals(plugin.getClassName()); - assert plugin.getEnabled(); + assert plugin.getEnabled(); - assert Paths.get(pluginBaseDirectory.toString(), "apache-ranger-authorizer", "apache-ranger-authorizer.jar") - .toAbsolutePath() - .toString() - .equals(plugin.getPluginJarPath().toString()); + assert Paths.get( + pluginBaseDirectory.toString(), + "apache-ranger-authorizer", + "apache-ranger-authorizer.jar") + .toAbsolutePath() + .toString() + .equals(plugin.getPluginJarPath().toString()); - assert Paths.get(pluginBaseDirectory.toString(), plugin.getName()) - .toAbsolutePath() - .toString() - .equals(plugin.getPluginHomeDirectory().toString()); - }; + assert Paths.get(pluginBaseDirectory.toString(), plugin.getName()) + .toAbsolutePath() + .toString() + .equals(plugin.getPluginHomeDirectory().toString()); + }; authenticators.forEach(validateAuthenticationPlugin); authorizers.forEach(validateAuthorizationPlugin); diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java index d85bfc0379d17..6596ca0c83f33 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java @@ -6,7 +6,6 @@ import java.util.List; import org.testng.annotations.Test; - @Test public class TestConfigValidationUtils { diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java index 314849e8ebea5..5e447caa292e2 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java @@ -30,35 +30,32 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; - /** - * This test case covers below scenarios - * 1. Loading plugin configuration and validating the loaded configuration against the expected configuration. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testConfigurationLoading()} - * test + * This test case covers below scenarios 1. Loading plugin configuration and validating the loaded + * configuration against the expected configuration. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testConfigurationLoading()} test * - * 2. Plugin name should be unique in config.yaml. The plugin framework should raise error if more than one plugin - * has the same name. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testDuplicatePluginName()} - * test + * <p>2. Plugin name should be unique in config.yaml. The plugin framework should raise error if + * more than one plugin has the same name. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testDuplicatePluginName()} test * - * 3. Developer can provide plugin jar file name in config.yaml. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testJarFileName()} test + * <p>3. Developer can provide plugin jar file name in config.yaml. This scenario is covered + * in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testJarFileName()} test * - * 4. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthenticatorPlugin()} covers the valid - * authenticator plugin execution. - * Plugin used in this test-case is metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub - * /plugins/test/TestAuthenticator.java + * <p>4. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthenticatorPlugin()} + * covers the valid authenticator plugin execution. Plugin used in this test-case is + * metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub + * /plugins/test/TestAuthenticator.java * - * 5. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthorizerPlugin()} covers the valid - * authorizer plugin execution - * Plugin used in this test-case is metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub - * /plugins/test/TestAuthorizer.java + * <p>5. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthorizerPlugin()} covers + * the valid authorizer plugin execution Plugin used in this test-case is + * metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub + * /plugins/test/TestAuthorizer.java * - * 6. The plugin framework should raise error if authenticator plugin is configured as authorizer plugin or vice-versa. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testIncorrectImplementation - * ()}. - * The test case tries to load authorizer plugin as authenticator plugin + * <p>6. The plugin framework should raise error if authenticator plugin is configured as authorizer + * plugin or vice-versa. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testIncorrectImplementation ()}. The test case + * tries to load authorizer plugin as authenticator plugin */ class TestIsolatedClassLoader { @@ -84,22 +81,34 @@ public void testDuplicatePluginName() { public void testJarFileName() throws Exception { Path configPath = Paths.get("src", "test", "resources", "plugin-jar-from-jarFileName"); - Path authenticatorPluginJarPath = Paths.get(configPath.toAbsolutePath().toString(), "apache-ranger-authenticator", - "apache-ranger-authenticator-v1.0.1.jar"); - Config config = (new ConfigProvider(configPath)).load().orElseThrow(() -> new Exception("Should not be empty")); - List<PluginConfig> pluginConfig = (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHENTICATOR); - pluginConfig.forEach((pluginConfigWithJar) -> { - assert pluginConfigWithJar.getPluginJarPath().equals(authenticatorPluginJarPath); - }); - - Path authorizerPluginJarPath = Paths.get(configPath.toAbsolutePath().toString(), "apache-ranger-authorizer", - "apache-ranger-authorizer-v2.0.1.jar"); + Path authenticatorPluginJarPath = + Paths.get( + configPath.toAbsolutePath().toString(), + "apache-ranger-authenticator", + "apache-ranger-authenticator-v1.0.1.jar"); + Config config = + (new ConfigProvider(configPath)) + .load() + .orElseThrow(() -> new Exception("Should not be empty")); + List<PluginConfig> pluginConfig = + (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHENTICATOR); + pluginConfig.forEach( + (pluginConfigWithJar) -> { + assert pluginConfigWithJar.getPluginJarPath().equals(authenticatorPluginJarPath); + }); + + Path authorizerPluginJarPath = + Paths.get( + configPath.toAbsolutePath().toString(), + "apache-ranger-authorizer", + "apache-ranger-authorizer-v2.0.1.jar"); List<PluginConfig> authorizerPluginConfigs = (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHORIZER); - authorizerPluginConfigs.forEach((pluginConfigWithJar) -> { - assert pluginConfigWithJar.getPluginJarPath().equals(authorizerPluginJarPath); - }); + authorizerPluginConfigs.forEach( + (pluginConfigWithJar) -> { + assert pluginConfigWithJar.getPluginJarPath().equals(authorizerPluginJarPath); + }); } public static Path getSamplePluginDirectory() { @@ -145,14 +154,21 @@ public void testAuthenticatorPlugin() throws ClassNotFoundException, Authenticat // authenticator plugin config instance AuthenticatorPluginConfig authenticatorPluginConfig = getAuthenticatorPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); // initiate and invoke the init and authenticate methods - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); - AuthenticatorContext authenticatorContext = new AuthenticatorContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, authenticatorPluginConfig.getPluginHomeDirectory().toString())); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + authenticatorPluginConfig.getPluginHomeDirectory().toString())); AuthenticationRequest request = new AuthenticationRequest(ImmutableMap.of("foo", "bar")); - authenticator.init(authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), authenticatorContext); + authenticator.init( + authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), authenticatorContext); Authentication authentication = authenticator.authenticate(request); assert authentication.getActor().getId().equals("fake"); @@ -163,13 +179,20 @@ public void testAuthorizerPlugin() throws ClassNotFoundException, Authentication // authenticator plugin config instance AuthorizerPluginConfig authorizerPluginConfig = getAuthorizerPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authorizerPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authorizerPluginConfig); // initiate and invoke the init and authenticate methods Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); - AuthorizerContext authorizerContext = new AuthorizerContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, authorizerPluginConfig.getPluginHomeDirectory().toString()), null); - AuthorizationRequest authorizationRequest = new AuthorizationRequest("urn:li:user:fake", "test", Optional.empty()); + AuthorizerContext authorizerContext = + new AuthorizerContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + authorizerPluginConfig.getPluginHomeDirectory().toString()), + null); + AuthorizationRequest authorizationRequest = + new AuthorizationRequest("urn:li:user:fake", "test", Optional.empty()); authorizer.init(authorizerPluginConfig.getConfigs().orElse(new HashMap<>()), authorizerContext); assert authorizer.authorize(authorizationRequest).getMessage().equals("fake message"); } @@ -178,13 +201,17 @@ public void testAuthorizerPlugin() throws ClassNotFoundException, Authentication public void testIncorrectImplementation() { AuthorizerPluginConfig authorizerPluginConfig = getAuthorizerPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authorizerPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authorizerPluginConfig); // initiate and invoke the init and authenticate methods try { - // Authorizer configuration is provided, however here we were expecting that plugin should be of type + // Authorizer configuration is provided, however here we were expecting that plugin should be + // of type // Authenticator.class - Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authenticator.class); + Authorizer authorizer = + (Authorizer) isolatedClassLoader.instantiatePlugin(Authenticator.class); assert authorizer != null; } catch (RuntimeException | ClassNotFoundException e) { assert e.getCause() instanceof java.lang.InstantiationException; @@ -197,10 +224,13 @@ public void testLenientMode() throws ClassNotFoundException, AuthenticationExcep AuthenticatorPluginConfig authenticatorPluginConfig = getAuthenticatorPluginConfig(); authenticatorPluginConfig.setClassName("com.datahub.plugins.test.TestLenientModeAuthenticator"); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.LENIENT); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.LENIENT); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); // initiate and invoke the init and authenticate methods - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); authenticator.init(authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), null); AuthenticationRequest request = new AuthenticationRequest(ImmutableMap.of("foo", "bar")); assert authenticator.authenticate(request) != null; diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java index 1d182f5fa8ea7..f620a1687064c 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java @@ -10,7 +10,6 @@ import java.util.List; import org.testng.annotations.Test; - public class TestPluginConfigFactory { @Test @@ -26,14 +25,20 @@ public void authConfig() throws Exception { // Load authenticator plugin configuration List<PluginConfig> authenticatorConfigs = authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - authenticatorConfigs.forEach(c -> { - assert c.getClassName().equals("com.datahub.ranger.Authenticator"); // className should match to Authenticator - }); + authenticatorConfigs.forEach( + c -> { + assert c.getClassName() + .equals( + "com.datahub.ranger.Authenticator"); // className should match to Authenticator + }); // Load authorizer plugin configuration - List<PluginConfig> authorizerConfigs = authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); - authorizerConfigs.forEach(c -> { - assert c.getClassName().equals("com.datahub.ranger.Authorizer"); // className should match to Authorizer - }); + List<PluginConfig> authorizerConfigs = + authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); + authorizerConfigs.forEach( + c -> { + assert c.getClassName() + .equals("com.datahub.ranger.Authorizer"); // className should match to Authorizer + }); } } diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java index e6882e7de3120..56e4c150b100c 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java @@ -14,19 +14,23 @@ import java.util.Map; import org.testng.annotations.Test; - @Test public class TestPluginPermissionManager { @Test public void testRestrictedMode() throws MalformedURLException { - PluginPermissionManagerImpl pluginPermissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + PluginPermissionManagerImpl pluginPermissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - Path pluginHome = Paths.get("src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); + Path pluginHome = + Paths.get( + "src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); - ProtectionDomain protectionDomain = pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); + ProtectionDomain protectionDomain = + pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); // provided pluginHome and codeSource in protection domain should be equal - assert pluginHome.toUri() + assert pluginHome + .toUri() .toURL() .toExternalForm() .equals(protectionDomain.getCodeSource().getLocation().toExternalForm()); @@ -43,21 +47,27 @@ public void testRestrictedMode() throws MalformedURLException { map.put(pluginHome.toAbsolutePath() + "/*", "read,write,delete"); // Compare actual with expected - permissions.forEach(permission -> { - assert map.keySet().contains(permission.getName()); - assert map.values().contains(permission.getActions()); - }); + permissions.forEach( + permission -> { + assert map.keySet().contains(permission.getName()); + assert map.values().contains(permission.getActions()); + }); } public void testLenientMode() throws MalformedURLException { - PluginPermissionManagerImpl pluginPermissionManager = new PluginPermissionManagerImpl(SecurityMode.LENIENT); + PluginPermissionManagerImpl pluginPermissionManager = + new PluginPermissionManagerImpl(SecurityMode.LENIENT); - Path pluginHome = Paths.get("src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); + Path pluginHome = + Paths.get( + "src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); - ProtectionDomain protectionDomain = pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); + ProtectionDomain protectionDomain = + pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); // provided pluginHome and codeSource in protection domain should be equal - assert pluginHome.toUri() + assert pluginHome + .toUri() .toURL() .toExternalForm() .equals(protectionDomain.getCodeSource().getLocation().toExternalForm()); @@ -68,8 +78,9 @@ public void testLenientMode() throws MalformedURLException { // It should have 1 permission assert permissions.size() == 1; - permissions.forEach(permission -> { - assert permission.getName().equals("<all permissions>"); - }); + permissions.forEach( + permission -> { + assert permission.getName().equals("<all permissions>"); + }); } } diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java index 4fb958de2edd6..e234a150ccd73 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java @@ -25,13 +25,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TestAuthenticator implements Authenticator { private AuthenticatorContext _authenticatorContext; @Override - public void init(@Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) { + public void init( + @Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) { /* * authenticatorConfig contains key, value pairs set in plugins[].params.configs of config.yml */ @@ -48,7 +48,8 @@ public void init(@Nonnull Map<String, Object> authenticatorConfig, @Nullable Aut private void readInputStream() { // Test resource as stream is working - try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { + try (InputStream inputStream = + this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { assert inputStream != null; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); assert reader.readLine() != null; @@ -59,9 +60,12 @@ private void readInputStream() { } private void accessFile() { - // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write on plugin directory + // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write + // on plugin directory Path pluginDirectory = - Paths.get((String) this._authenticatorContext.data().get(PluginConstant.PLUGIN_HOME), "tmp_file1.txt"); + Paths.get( + (String) this._authenticatorContext.data().get(PluginConstant.PLUGIN_HOME), + "tmp_file1.txt"); try { try (BufferedWriter writer = new BufferedWriter(new FileWriter(pluginDirectory.toString()))) { @@ -79,7 +83,8 @@ private void accessFile() { public void accessSystemProperty() { try { System.getProperty("user.home"); - throw new RuntimeException("Plugin is able to access system properties"); // we should not reach here + throw new RuntimeException( + "Plugin is able to access system properties"); // we should not reach here } catch (AccessControlException accessControlException) { log.info("Expected: Don't have permission to read system properties"); } diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java index e5f3e223ff505..4dcace841205a 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java @@ -21,7 +21,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TestAuthorizer implements Authorizer { private AuthorizerContext _authorizerContext; @@ -45,9 +44,12 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { URL url = this.getClass().getClassLoader().getResource("foo_bar.json"); assert url != null; - // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write on plugin directory + // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write + // on plugin directory Path pluginDirectory = - Paths.get((String) this._authorizerContext.data().get(PluginConstant.PLUGIN_HOME), "tmp_file1.txt"); + Paths.get( + (String) this._authorizerContext.data().get(PluginConstant.PLUGIN_HOME), + "tmp_file1.txt"); try { try (BufferedWriter writer = new BufferedWriter(new FileWriter(pluginDirectory.toString()))) { @@ -62,7 +64,8 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { } // Test resource as stream is working - try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { + try (InputStream inputStream = + this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { assert inputStream != null; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); assert reader.readLine() != null; @@ -78,4 +81,3 @@ public AuthorizedActors authorizedActors(String privilege, Optional<EntitySpec> return new AuthorizedActors("ALL", null, null, null, true, true); } } - diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java index 2cc27f11a6254..d143b3803ca34 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java @@ -11,18 +11,17 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class TestLenientModeAuthenticator implements Authenticator { @Override - public void init(@Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) { - - } + public void init( + @Nonnull Map<String, Object> authenticatorConfig, @Nullable AuthenticatorContext context) {} @Nullable @Override public Authentication authenticate(@Nonnull AuthenticationRequest authenticationRequest) throws AuthenticationException { - // We should be able to access user directory as we are going to be loaded with Lenient mode IsolatedClassLoader + // We should be able to access user directory as we are going to be loaded with Lenient mode + // IsolatedClassLoader String userHome = System.getProperty("user.home"); assert userHome != null; return new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json index 3e1b975311b11..27581334814ce 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json @@ -4,10 +4,12 @@ "path" : "/analytics", "schema" : "com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse", "doc" : "Rest.li entry point: /analytics\n\ngenerated from: com.linkedin.metadata.resources.analytics.Analytics", + "resourceClass" : "com.linkedin.metadata.resources.analytics.Analytics", "simple" : { "supports" : [ ], "actions" : [ { "name" : "getTimeseriesStats", + "javaMethodName" : "getTimeseriesStats", "parameters" : [ { "name" : "entityName", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json index 3a0df137a0469..917540aca8728 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json @@ -4,6 +4,7 @@ "path" : "/aspects", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.AspectResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.AspectResource", "collection" : { "identifier" : { "name" : "aspectsId", @@ -12,6 +13,7 @@ "supports" : [ "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.\n TODO: Get rid of this and migrate to getAspect.", "parameters" : [ { "name" : "aspect", @@ -25,6 +27,7 @@ } ], "actions" : [ { "name" : "getCount", + "javaMethodName" : "getCount", "parameters" : [ { "name" : "aspect", "type" : "string" @@ -36,6 +39,7 @@ "returns" : "int" }, { "name" : "getTimeseriesAspectValues", + "javaMethodName" : "getTimeseriesAspectValues", "parameters" : [ { "name" : "urn", "type" : "string" @@ -73,6 +77,7 @@ "returns" : "com.linkedin.aspect.GetTimeseriesAspectValuesResponse" }, { "name" : "ingestProposal", + "javaMethodName" : "ingestProposal", "parameters" : [ { "name" : "proposal", "type" : "com.linkedin.mxe.MetadataChangeProposal" @@ -84,6 +89,7 @@ "returns" : "string" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json index a9de21d08aedc..eac1cc690a60d 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json @@ -4,6 +4,7 @@ "path" : "/entities", "schema" : "com.linkedin.entity.Entity", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityResource", "collection" : { "identifier" : { "name" : "entitiesId", @@ -12,6 +13,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -20,6 +22,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -28,6 +31,7 @@ } ], "actions" : [ { "name" : "applyRetention", + "javaMethodName" : "applyRetention", "parameters" : [ { "name" : "start", "type" : "int", @@ -52,6 +56,7 @@ "returns" : "string" }, { "name" : "autocomplete", + "javaMethodName" : "autocomplete", "parameters" : [ { "name" : "entity", "type" : "string" @@ -73,6 +78,7 @@ "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { "name" : "batchGetTotalEntityCount", + "javaMethodName" : "batchGetTotalEntityCount", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }" @@ -80,6 +86,7 @@ "returns" : "{ \"type\" : \"map\", \"values\" : \"long\" }" }, { "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.entity.Entity\" }" @@ -90,6 +97,7 @@ } ] }, { "name" : "browse", + "javaMethodName" : "browse", "parameters" : [ { "name" : "entity", "type" : "string" @@ -110,6 +118,7 @@ "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { "name" : "delete", + "javaMethodName" : "deleteEntity", "doc" : "Deletes all data related to an individual urn(entity).\nService Returns: - a DeleteEntityResponse object.", "parameters" : [ { "name" : "urn", @@ -119,7 +128,7 @@ "name" : "aspectName", "type" : "string", "optional" : true, - "doc" : "- the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects)." + "doc" : "- the optional aspect name if only want to delete the aspect (applicable only\n for timeseries aspects)." }, { "name" : "startTimeMillis", "type" : "long", @@ -134,6 +143,7 @@ "returns" : "com.linkedin.metadata.run.DeleteEntityResponse" }, { "name" : "deleteAll", + "javaMethodName" : "deleteEntities", "parameters" : [ { "name" : "registryId", "type" : "string", @@ -146,6 +156,7 @@ "returns" : "com.linkedin.metadata.run.RollbackResponse" }, { "name" : "deleteReferences", + "javaMethodName" : "deleteReferencesTo", "parameters" : [ { "name" : "urn", "type" : "string" @@ -157,6 +168,7 @@ "returns" : "com.linkedin.metadata.run.DeleteReferencesResponse" }, { "name" : "exists", + "javaMethodName" : "exists", "parameters" : [ { "name" : "urn", "type" : "string" @@ -164,6 +176,7 @@ "returns" : "boolean" }, { "name" : "filter", + "javaMethodName" : "filter", "parameters" : [ { "name" : "entity", "type" : "string" @@ -184,6 +197,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "getBrowsePaths", + "javaMethodName" : "getBrowsePaths", "parameters" : [ { "name" : "urn", "type" : "com.linkedin.common.Urn" @@ -191,6 +205,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"string\" }" }, { "name" : "getTotalEntityCount", + "javaMethodName" : "getTotalEntityCount", "parameters" : [ { "name" : "entity", "type" : "string" @@ -198,6 +213,7 @@ "returns" : "long" }, { "name" : "ingest", + "javaMethodName" : "ingest", "parameters" : [ { "name" : "entity", "type" : "com.linkedin.entity.Entity" @@ -208,6 +224,7 @@ } ] }, { "name" : "list", + "javaMethodName" : "list", "parameters" : [ { "name" : "entity", "type" : "string" @@ -229,6 +246,7 @@ "returns" : "com.linkedin.metadata.query.ListResult" }, { "name" : "listUrns", + "javaMethodName" : "listUrns", "parameters" : [ { "name" : "entity", "type" : "string" @@ -242,6 +260,7 @@ "returns" : "com.linkedin.metadata.query.ListUrnsResult" }, { "name" : "scrollAcrossEntities", + "javaMethodName" : "scrollAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -274,6 +293,7 @@ "returns" : "com.linkedin.metadata.search.ScrollResult" }, { "name" : "scrollAcrossLineage", + "javaMethodName" : "scrollAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -325,6 +345,7 @@ "returns" : "com.linkedin.metadata.search.LineageScrollResult" }, { "name" : "search", + "javaMethodName" : "search", "parameters" : [ { "name" : "entity", "type" : "string" @@ -360,6 +381,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossEntities", + "javaMethodName" : "searchAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -389,6 +411,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossLineage", + "javaMethodName" : "searchAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -437,6 +460,7 @@ "returns" : "com.linkedin.metadata.search.LineageSearchResult" }, { "name" : "setWritable", + "javaMethodName" : "setWriteable", "parameters" : [ { "name" : "value", "type" : "boolean", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json index 0c92a981c7356..33cfba0f27802 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json @@ -4,6 +4,7 @@ "path" : "/entitiesV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityV2Resource", "collection" : { "identifier" : { "name" : "entitiesV2Id", @@ -12,6 +13,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -20,6 +22,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json index 579f1d7c7dddc..f3eb9d38dc6ae 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json @@ -4,6 +4,7 @@ "path" : "/entitiesVersionedV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", "collection" : { "identifier" : { "name" : "entitiesVersionedV2Id", @@ -12,6 +13,7 @@ "supports" : [ "batch_get" ], "methods" : [ { "method" : "batch_get", + "javaMethodName" : "batchGetVersioned", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json index 5eaa34bc7a2e9..7284cd2bac48f 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json @@ -4,6 +4,7 @@ "path" : "/runs", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "resource for showing information and rolling back runs\n\ngenerated from: com.linkedin.metadata.resources.entity.BatchIngestionRunResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.BatchIngestionRunResource", "collection" : { "identifier" : { "name" : "runsId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "describe", + "javaMethodName" : "describe", "parameters" : [ { "name" : "runId", "type" : "string" @@ -33,6 +35,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.AspectRowSummary\" }" }, { "name" : "list", + "javaMethodName" : "list", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "pageOffset", @@ -50,6 +53,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.IngestionRunSummary\" }" }, { "name" : "rollback", + "javaMethodName" : "rollback", "doc" : "Rolls back an ingestion run", "parameters" : [ { "name" : "runId", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json index 68f9fe8ae152e..7056368d82c7d 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json @@ -4,10 +4,12 @@ "path" : "/relationships", "schema" : "com.linkedin.common.EntityRelationships", "doc" : "Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types}\n\ngenerated from: com.linkedin.metadata.resources.lineage.Relationships", + "resourceClass" : "com.linkedin.metadata.resources.lineage.Relationships", "simple" : { "supports" : [ "delete", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "parameters" : [ { "name" : "urn", "type" : "string" @@ -28,6 +30,7 @@ } ] }, { "method" : "delete", + "javaMethodName" : "delete", "parameters" : [ { "name" : "urn", "type" : "string" @@ -35,6 +38,7 @@ } ], "actions" : [ { "name" : "getLineage", + "javaMethodName" : "getLineage", "parameters" : [ { "name" : "urn", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json index 958ec13b37fca..0fb6a18a7974b 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json @@ -4,6 +4,7 @@ "path" : "/operations", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Endpoints for performing maintenance operations\n\ngenerated from: com.linkedin.metadata.resources.operations.OperationsResource", + "resourceClass" : "com.linkedin.metadata.resources.operations.OperationsResource", "collection" : { "identifier" : { "name" : "operationsId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "getEsTaskStatus", + "javaMethodName" : "getTaskStatus", "parameters" : [ { "name" : "nodeId", "type" : "string", @@ -28,9 +30,11 @@ "returns" : "string" }, { "name" : "getIndexSizes", + "javaMethodName" : "getIndexSizes", "returns" : "com.linkedin.timeseries.TimeseriesIndicesSizesResult" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", @@ -55,6 +59,7 @@ "returns" : "string" }, { "name" : "truncateTimeseriesAspect", + "javaMethodName" : "truncateTimeseriesAspect", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json index 3346ddd23e3ba..9fbb3e9b6698e 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json @@ -4,6 +4,7 @@ "path" : "/platform", "schema" : "com.linkedin.entity.Entity", "doc" : "DataHub Platform Actions\n\ngenerated from: com.linkedin.metadata.resources.platform.PlatformResource", + "resourceClass" : "com.linkedin.metadata.resources.platform.PlatformResource", "collection" : { "identifier" : { "name" : "platformId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "producePlatformEvent", + "javaMethodName" : "producePlatformEvent", "parameters" : [ { "name" : "name", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json index 2a4cf40b58412..42f0894fbb7a6 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json @@ -7,6 +7,7 @@ "path" : "/usageStats", "schema" : "com.linkedin.usage.UsageAggregation", "doc" : "Rest.li entry point: /usageStats\n\ngenerated from: com.linkedin.metadata.resources.usage.UsageStats", + "resourceClass" : "com.linkedin.metadata.resources.usage.UsageStats", "simple" : { "supports" : [ ], "actions" : [ { @@ -14,12 +15,14 @@ "deprecated" : { } }, "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "buckets", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.usage.UsageAggregation\" }" } ] }, { "name" : "query", + "javaMethodName" : "query", "parameters" : [ { "name" : "resource", "type" : "string" @@ -42,6 +45,7 @@ "returns" : "com.linkedin.usage.UsageQueryResult" }, { "name" : "queryRange", + "javaMethodName" : "queryRange", "parameters" : [ { "name" : "resource", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json index d75ec58546465..c4532cba9e6be 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json @@ -222,10 +222,12 @@ "path" : "/analytics", "schema" : "com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse", "doc" : "Rest.li entry point: /analytics\n\ngenerated from: com.linkedin.metadata.resources.analytics.Analytics", + "resourceClass" : "com.linkedin.metadata.resources.analytics.Analytics", "simple" : { "supports" : [ ], "actions" : [ { "name" : "getTimeseriesStats", + "javaMethodName" : "getTimeseriesStats", "parameters" : [ { "name" : "entityName", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index 0403fa2ceea6f..bca3e7161c8b8 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -3993,6 +3993,7 @@ "path" : "/aspects", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.AspectResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.AspectResource", "collection" : { "identifier" : { "name" : "aspectsId", @@ -4001,6 +4002,7 @@ "supports" : [ "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.\n TODO: Get rid of this and migrate to getAspect.", "parameters" : [ { "name" : "aspect", @@ -4014,6 +4016,7 @@ } ], "actions" : [ { "name" : "getCount", + "javaMethodName" : "getCount", "parameters" : [ { "name" : "aspect", "type" : "string" @@ -4025,6 +4028,7 @@ "returns" : "int" }, { "name" : "getTimeseriesAspectValues", + "javaMethodName" : "getTimeseriesAspectValues", "parameters" : [ { "name" : "urn", "type" : "string" @@ -4062,6 +4066,7 @@ "returns" : "com.linkedin.aspect.GetTimeseriesAspectValuesResponse" }, { "name" : "ingestProposal", + "javaMethodName" : "ingestProposal", "parameters" : [ { "name" : "proposal", "type" : "com.linkedin.mxe.MetadataChangeProposal" @@ -4073,6 +4078,7 @@ "returns" : "string" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index d79a4a1919af9..69184856e4f9e 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -6289,6 +6289,7 @@ "path" : "/entities", "schema" : "com.linkedin.entity.Entity", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityResource", "collection" : { "identifier" : { "name" : "entitiesId", @@ -6297,6 +6298,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -6305,6 +6307,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6313,6 +6316,7 @@ } ], "actions" : [ { "name" : "applyRetention", + "javaMethodName" : "applyRetention", "parameters" : [ { "name" : "start", "type" : "int", @@ -6337,6 +6341,7 @@ "returns" : "string" }, { "name" : "autocomplete", + "javaMethodName" : "autocomplete", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6358,6 +6363,7 @@ "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { "name" : "batchGetTotalEntityCount", + "javaMethodName" : "batchGetTotalEntityCount", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }" @@ -6365,6 +6371,7 @@ "returns" : "{ \"type\" : \"map\", \"values\" : \"long\" }" }, { "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.entity.Entity\" }" @@ -6375,6 +6382,7 @@ } ] }, { "name" : "browse", + "javaMethodName" : "browse", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6395,6 +6403,7 @@ "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { "name" : "delete", + "javaMethodName" : "deleteEntity", "doc" : "Deletes all data related to an individual urn(entity).\nService Returns: - a DeleteEntityResponse object.", "parameters" : [ { "name" : "urn", @@ -6404,7 +6413,7 @@ "name" : "aspectName", "type" : "string", "optional" : true, - "doc" : "- the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects)." + "doc" : "- the optional aspect name if only want to delete the aspect (applicable only\n for timeseries aspects)." }, { "name" : "startTimeMillis", "type" : "long", @@ -6419,6 +6428,7 @@ "returns" : "com.linkedin.metadata.run.DeleteEntityResponse" }, { "name" : "deleteAll", + "javaMethodName" : "deleteEntities", "parameters" : [ { "name" : "registryId", "type" : "string", @@ -6431,6 +6441,7 @@ "returns" : "com.linkedin.metadata.run.RollbackResponse" }, { "name" : "deleteReferences", + "javaMethodName" : "deleteReferencesTo", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6442,6 +6453,7 @@ "returns" : "com.linkedin.metadata.run.DeleteReferencesResponse" }, { "name" : "exists", + "javaMethodName" : "exists", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6449,6 +6461,7 @@ "returns" : "boolean" }, { "name" : "filter", + "javaMethodName" : "filter", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6469,6 +6482,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "getBrowsePaths", + "javaMethodName" : "getBrowsePaths", "parameters" : [ { "name" : "urn", "type" : "com.linkedin.common.Urn" @@ -6476,6 +6490,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"string\" }" }, { "name" : "getTotalEntityCount", + "javaMethodName" : "getTotalEntityCount", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6483,6 +6498,7 @@ "returns" : "long" }, { "name" : "ingest", + "javaMethodName" : "ingest", "parameters" : [ { "name" : "entity", "type" : "com.linkedin.entity.Entity" @@ -6493,6 +6509,7 @@ } ] }, { "name" : "list", + "javaMethodName" : "list", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6514,6 +6531,7 @@ "returns" : "com.linkedin.metadata.query.ListResult" }, { "name" : "listUrns", + "javaMethodName" : "listUrns", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6527,6 +6545,7 @@ "returns" : "com.linkedin.metadata.query.ListUrnsResult" }, { "name" : "scrollAcrossEntities", + "javaMethodName" : "scrollAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6559,6 +6578,7 @@ "returns" : "com.linkedin.metadata.search.ScrollResult" }, { "name" : "scrollAcrossLineage", + "javaMethodName" : "scrollAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6610,6 +6630,7 @@ "returns" : "com.linkedin.metadata.search.LineageScrollResult" }, { "name" : "search", + "javaMethodName" : "search", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6645,6 +6666,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossEntities", + "javaMethodName" : "searchAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6674,6 +6696,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossLineage", + "javaMethodName" : "searchAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6722,6 +6745,7 @@ "returns" : "com.linkedin.metadata.search.LineageSearchResult" }, { "name" : "setWritable", + "javaMethodName" : "setWriteable", "parameters" : [ { "name" : "value", "type" : "boolean", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json index c7618e5d3c5a1..3eac87e268f5d 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json @@ -162,6 +162,7 @@ "path" : "/entitiesV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityV2Resource", "collection" : { "identifier" : { "name" : "entitiesV2Id", @@ -170,6 +171,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -178,6 +180,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json index 45e542883b723..1733537e68f30 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json @@ -171,6 +171,7 @@ "path" : "/entitiesVersionedV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", "collection" : { "identifier" : { "name" : "entitiesVersionedV2Id", @@ -179,6 +180,7 @@ "supports" : [ "batch_get" ], "methods" : [ { "method" : "batch_get", + "javaMethodName" : "batchGetVersioned", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index b20953749ac35..09c0185f74f3a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -3748,6 +3748,7 @@ "path" : "/runs", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "resource for showing information and rolling back runs\n\ngenerated from: com.linkedin.metadata.resources.entity.BatchIngestionRunResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.BatchIngestionRunResource", "collection" : { "identifier" : { "name" : "runsId", @@ -3756,6 +3757,7 @@ "supports" : [ ], "actions" : [ { "name" : "describe", + "javaMethodName" : "describe", "parameters" : [ { "name" : "runId", "type" : "string" @@ -3777,6 +3779,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.AspectRowSummary\" }" }, { "name" : "list", + "javaMethodName" : "list", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "pageOffset", @@ -3794,6 +3797,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.IngestionRunSummary\" }" }, { "name" : "rollback", + "javaMethodName" : "rollback", "doc" : "Rolls back an ingestion run", "parameters" : [ { "name" : "runId", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json index 6febf225ad77d..9aa40edd0b118 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json @@ -180,10 +180,12 @@ "path" : "/relationships", "schema" : "com.linkedin.common.EntityRelationships", "doc" : "Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types}\n\ngenerated from: com.linkedin.metadata.resources.lineage.Relationships", + "resourceClass" : "com.linkedin.metadata.resources.lineage.Relationships", "simple" : { "supports" : [ "delete", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "parameters" : [ { "name" : "urn", "type" : "string" @@ -204,6 +206,7 @@ } ] }, { "method" : "delete", + "javaMethodName" : "delete", "parameters" : [ { "name" : "urn", "type" : "string" @@ -211,6 +214,7 @@ } ], "actions" : [ { "name" : "getLineage", + "javaMethodName" : "getLineage", "parameters" : [ { "name" : "urn", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index e29dd6809b968..339ce62de6298 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -3690,6 +3690,7 @@ "path" : "/operations", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Endpoints for performing maintenance operations\n\ngenerated from: com.linkedin.metadata.resources.operations.OperationsResource", + "resourceClass" : "com.linkedin.metadata.resources.operations.OperationsResource", "collection" : { "identifier" : { "name" : "operationsId", @@ -3698,6 +3699,7 @@ "supports" : [ ], "actions" : [ { "name" : "getEsTaskStatus", + "javaMethodName" : "getTaskStatus", "parameters" : [ { "name" : "nodeId", "type" : "string", @@ -3714,9 +3716,11 @@ "returns" : "string" }, { "name" : "getIndexSizes", + "javaMethodName" : "getIndexSizes", "returns" : "com.linkedin.timeseries.TimeseriesIndicesSizesResult" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", @@ -3741,6 +3745,7 @@ "returns" : "string" }, { "name" : "truncateTimeseriesAspect", + "javaMethodName" : "truncateTimeseriesAspect", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index 8391af60f8ece..cb253c458e6c4 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -5542,6 +5542,7 @@ "path" : "/platform", "schema" : "com.linkedin.entity.Entity", "doc" : "DataHub Platform Actions\n\ngenerated from: com.linkedin.metadata.resources.platform.PlatformResource", + "resourceClass" : "com.linkedin.metadata.resources.platform.PlatformResource", "collection" : { "identifier" : { "name" : "platformId", @@ -5550,6 +5551,7 @@ "supports" : [ ], "actions" : [ { "name" : "producePlatformEvent", + "javaMethodName" : "producePlatformEvent", "parameters" : [ { "name" : "name", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json index a21b0c1cd30be..e8e68dae4c368 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json @@ -164,6 +164,7 @@ "path" : "/usageStats", "schema" : "com.linkedin.usage.UsageAggregation", "doc" : "Rest.li entry point: /usageStats\n\ngenerated from: com.linkedin.metadata.resources.usage.UsageStats", + "resourceClass" : "com.linkedin.metadata.resources.usage.UsageStats", "simple" : { "supports" : [ ], "actions" : [ { @@ -171,12 +172,14 @@ "deprecated" : { } }, "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "buckets", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.usage.UsageAggregation\" }" } ] }, { "name" : "query", + "javaMethodName" : "query", "parameters" : [ { "name" : "resource", "type" : "string" @@ -199,6 +202,7 @@ "returns" : "com.linkedin.usage.UsageQueryResult" }, { "name" : "queryRange", + "javaMethodName" : "queryRange", "parameters" : [ { "name" : "resource", "type" : "string" diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java b/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java index a61c6e53ab814..eb04382dda45c 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java @@ -8,8 +8,6 @@ import com.linkedin.restli.client.base.BatchGetEntityRequestBuilderBase; import com.linkedin.restli.common.ComplexResourceKey; import com.linkedin.restli.common.EmptyRecord; - -import javax.annotation.Nonnull; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -18,47 +16,52 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public final class BatchGetUtils { - private BatchGetUtils() { - // not called - } - - private static int batchSize = 25; + private BatchGetUtils() { + // not called + } - public static < - U extends Urn, - T extends RecordTemplate, - CRK extends ComplexResourceKey<K, EmptyRecord>, - RB extends BatchGetEntityRequestBuilderBase<CRK, T, RB>, - K extends RecordTemplate> Map<U, T> batchGet( - @Nonnull Set<U> urns, - Function<Void, BatchGetEntityRequestBuilderBase<CRK, T, RB>> requestBuilders, - Function<U, CRK> getKeyFromUrn, - Function<CRK, U> getUrnFromKey, - Client client - ) throws RemoteInvocationException { - AtomicInteger index = new AtomicInteger(0); + private static int batchSize = 25; - final Collection<List<U>> entityUrnBatches = urns.stream() - .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) - .values(); + public static < + U extends Urn, + T extends RecordTemplate, + CRK extends ComplexResourceKey<K, EmptyRecord>, + RB extends BatchGetEntityRequestBuilderBase<CRK, T, RB>, + K extends RecordTemplate> + Map<U, T> batchGet( + @Nonnull Set<U> urns, + Function<Void, BatchGetEntityRequestBuilderBase<CRK, T, RB>> requestBuilders, + Function<U, CRK> getKeyFromUrn, + Function<CRK, U> getUrnFromKey, + Client client) + throws RemoteInvocationException { + AtomicInteger index = new AtomicInteger(0); - final Map<U, T> response = new HashMap<>(); + final Collection<List<U>> entityUrnBatches = + urns.stream() + .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) + .values(); - for (List<U> urnsInBatch : entityUrnBatches) { - BatchGetEntityRequest<CRK, T> batchGetRequest = - requestBuilders.apply(null) - .ids(urnsInBatch.stream().map(getKeyFromUrn).collect(Collectors.toSet())) - .build(); - final Map<U, T> batchResponse = client.sendRequest(batchGetRequest).getResponseEntity().getResults() - .entrySet().stream().collect(Collectors.toMap( - entry -> getUrnFromKey.apply(entry.getKey()), - entry -> entry.getValue().getEntity()) - ); - response.putAll(batchResponse); - } + final Map<U, T> response = new HashMap<>(); - return response; + for (List<U> urnsInBatch : entityUrnBatches) { + BatchGetEntityRequest<CRK, T> batchGetRequest = + requestBuilders + .apply(null) + .ids(urnsInBatch.stream().map(getKeyFromUrn).collect(Collectors.toSet())) + .build(); + final Map<U, T> batchResponse = + client.sendRequest(batchGetRequest).getResponseEntity().getResults().entrySet().stream() + .collect( + Collectors.toMap( + entry -> getUrnFromKey.apply(entry.getKey()), + entry -> entry.getValue().getEntity())); + response.putAll(batchResponse); } + + return response; + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java index 1ba0e5c3d555a..4474fd5ce67ec 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java @@ -5,20 +5,17 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; - -import java.util.Objects; -import java.util.Set; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - import com.linkedin.restli.client.AbstractRequestBuilder; import com.linkedin.restli.client.Client; import com.linkedin.restli.client.Request; import com.linkedin.restli.client.Response; +import java.util.Objects; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpHeaders; - @Slf4j public abstract class BaseClient implements AutoCloseable { @@ -26,7 +23,8 @@ public abstract class BaseClient implements AutoCloseable { protected final BackoffPolicy _backoffPolicy; protected final int _retryCount; - protected final static Set<String> NON_RETRYABLE = Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); + protected static final Set<String> NON_RETRYABLE = + Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); protected BaseClient(@Nonnull Client restliClient, BackoffPolicy backoffPolicy, int retryCount) { _client = Objects.requireNonNull(restliClient); @@ -34,16 +32,20 @@ protected BaseClient(@Nonnull Client restliClient, BackoffPolicy backoffPolicy, _retryCount = retryCount; } - protected <T> Response<T> sendClientRequest(final AbstractRequestBuilder<?, ?, ? extends Request<T>> requestBuilder) throws RemoteInvocationException { + protected <T> Response<T> sendClientRequest( + final AbstractRequestBuilder<?, ?, ? extends Request<T>> requestBuilder) + throws RemoteInvocationException { return sendClientRequest(requestBuilder, null); } /** - * TODO: Remove unused "actor" parameter. Actor is now implied by the systemClientId + systemClientSecret. + * TODO: Remove unused "actor" parameter. Actor is now implied by the systemClientId + + * systemClientSecret. */ protected <T> Response<T> sendClientRequest( final AbstractRequestBuilder<?, ?, ? extends Request<T>> requestBuilder, - @Nullable final Authentication authentication) throws RemoteInvocationException { + @Nullable final Authentication authentication) + throws RemoteInvocationException { if (authentication != null) { requestBuilder.addHeader(HttpHeaders.AUTHORIZATION, authentication.getCredentials()); } @@ -54,10 +56,15 @@ protected <T> Response<T> sendClientRequest( try { return _client.sendRequest(requestBuilder.build()).getResponse(); } catch (Throwable ex) { - MetricUtils.counter(BaseClient.class, "exception" + MetricUtils.DELIMITER + ex.getClass().getName().toLowerCase()).inc(); - - final boolean skipRetry = NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) - || (ex.getCause() != null && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); + MetricUtils.counter( + BaseClient.class, + "exception" + MetricUtils.DELIMITER + ex.getClass().getName().toLowerCase()) + .inc(); + + final boolean skipRetry = + NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) + || (ex.getCause() != null + && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); if (attemptCount == _retryCount || skipRetry) { throw ex; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java index 79d473d1b0090..56565819afc30 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java @@ -9,126 +9,138 @@ import com.github.benmanes.caffeine.cache.stats.CacheStats; import com.linkedin.metadata.config.cache.client.ClientCacheConfig; import com.linkedin.metadata.utils.metrics.MetricUtils; -import lombok.Builder; -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.checkerframework.checker.nullness.qual.Nullable; - import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Function; +import lombok.Builder; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.checkerframework.checker.nullness.qual.Nullable; /** * Generic cache with common configuration for limited weight, per item expiry, and batch loading + * * @param <K> key * @param <V> value */ @Slf4j @Builder public class ClientCache<K, V, C extends ClientCacheConfig> { - @NonNull - protected final C config; - @NonNull - protected final LoadingCache<K, V> cache; - @NonNull - private final Function<Iterable<? extends K>, Map<K, V>> loadFunction; - @NonNull - private final Weigher<K, V> weigher; - @NonNull - private final BiFunction<C, K, Integer> ttlSecondsFunction; - - public @Nullable V get(@NonNull K key) { - return cache.get(key); - } + @NonNull protected final C config; + @NonNull protected final LoadingCache<K, V> cache; + @NonNull private final Function<Iterable<? extends K>, Map<K, V>> loadFunction; + @NonNull private final Weigher<K, V> weigher; + @NonNull private final BiFunction<C, K, Integer> ttlSecondsFunction; + + public @Nullable V get(@NonNull K key) { + return cache.get(key); + } + + public @NonNull Map<@NonNull K, @NonNull V> getAll(@NonNull Iterable<? extends @NonNull K> keys) { + return cache.getAll(keys); + } - public @NonNull Map<@NonNull K, @NonNull V> getAll(@NonNull Iterable<? extends @NonNull K> keys) { - return cache.getAll(keys); + public void refresh(@NonNull K key) { + cache.refresh(key); + } + + public static class ClientCacheBuilder<K, V, C extends ClientCacheConfig> { + + private ClientCacheBuilder<K, V, C> cache(LoadingCache<K, V> cache) { + return null; } - public void refresh(@NonNull K key) { - cache.refresh(key); + private ClientCache<K, V, C> build() { + return null; } - public static class ClientCacheBuilder<K, V, C extends ClientCacheConfig> { - - private ClientCacheBuilder<K, V, C> cache(LoadingCache<K, V> cache) { - return null; - } - private ClientCache<K, V, C> build() { - return null; - } - - public ClientCache<K, V, C> build(Class<?> metricClazz) { - // loads data from entity client - CacheLoader<K, V> loader = new CacheLoader<K, V>() { - @Override - public V load(@NonNull K key) { - return loadAll(Set.of(key)).get(key); - } - - @Override - @NonNull - public Map<K, V> loadAll(@NonNull Set<? extends K> keys) { - return loadFunction.apply(keys); - } - }; - - // build cache - Caffeine<K, V> caffeine = Caffeine.newBuilder() - .maximumWeight(config.getMaxBytes()) - // limit total size - .weigher(weigher) - .softValues() - // define per entity/aspect ttls - .expireAfter(new Expiry<K, V>() { - public long expireAfterCreate(@NonNull K key, @NonNull V aspect, long currentTime) { - int ttlSeconds = ttlSecondsFunction.apply(config, key); - if (ttlSeconds < 0) { - ttlSeconds = Integer.MAX_VALUE; - } - return TimeUnit.SECONDS.toNanos(ttlSeconds); - } - public long expireAfterUpdate(@NonNull K key, @NonNull V aspect, - long currentTime, long currentDuration) { - return currentDuration; - } - public long expireAfterRead(@NonNull K key, @NonNull V aspect, - long currentTime, long currentDuration) { - return currentDuration; - } - }); - - if (config.isStatsEnabled()) { - caffeine.recordStats(); + public ClientCache<K, V, C> build(Class<?> metricClazz) { + // loads data from entity client + CacheLoader<K, V> loader = + new CacheLoader<K, V>() { + @Override + public V load(@NonNull K key) { + return loadAll(Set.of(key)).get(key); } - LoadingCache<K, V> cache = caffeine.build(loader); - - if (config.isStatsEnabled()) { - ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); - executor.scheduleAtFixedRate(() -> { - CacheStats cacheStats = cache.stats(); - - MetricUtils.gauge(metricClazz, "hitRate", () -> (Gauge<Double>) cacheStats::hitRate); - MetricUtils.gauge(metricClazz, "loadFailureRate", () -> - (Gauge<Double>) cacheStats::loadFailureRate); - MetricUtils.gauge(metricClazz, "evictionCount", () -> - (Gauge<Long>) cacheStats::evictionCount); - MetricUtils.gauge(metricClazz, "loadFailureCount", () -> - (Gauge<Long>) cacheStats::loadFailureCount); - MetricUtils.gauge(metricClazz, "averageLoadPenalty", () -> - (Gauge<Double>) cacheStats::averageLoadPenalty); - MetricUtils.gauge(metricClazz, "evictionWeight", () -> - (Gauge<Long>) cacheStats::evictionWeight); - - log.debug(metricClazz.getSimpleName() + ": " + cacheStats); - }, 0, config.getStatsIntervalSeconds(), TimeUnit.SECONDS); + @Override + @NonNull + public Map<K, V> loadAll(@NonNull Set<? extends K> keys) { + return loadFunction.apply(keys); } - - return new ClientCache<>(config, cache, loadFunction, weigher, ttlSecondsFunction); - } + }; + + // build cache + Caffeine<K, V> caffeine = + Caffeine.newBuilder() + .maximumWeight(config.getMaxBytes()) + // limit total size + .weigher(weigher) + .softValues() + // define per entity/aspect ttls + .expireAfter( + new Expiry<K, V>() { + public long expireAfterCreate( + @NonNull K key, @NonNull V aspect, long currentTime) { + int ttlSeconds = ttlSecondsFunction.apply(config, key); + if (ttlSeconds < 0) { + ttlSeconds = Integer.MAX_VALUE; + } + return TimeUnit.SECONDS.toNanos(ttlSeconds); + } + + public long expireAfterUpdate( + @NonNull K key, @NonNull V aspect, long currentTime, long currentDuration) { + return currentDuration; + } + + public long expireAfterRead( + @NonNull K key, @NonNull V aspect, long currentTime, long currentDuration) { + return currentDuration; + } + }); + + if (config.isStatsEnabled()) { + caffeine.recordStats(); + } + + LoadingCache<K, V> cache = caffeine.build(loader); + + if (config.isStatsEnabled()) { + ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); + executor.scheduleAtFixedRate( + () -> { + CacheStats cacheStats = cache.stats(); + + MetricUtils.gauge(metricClazz, "hitRate", () -> (Gauge<Double>) cacheStats::hitRate); + MetricUtils.gauge( + metricClazz, + "loadFailureRate", + () -> (Gauge<Double>) cacheStats::loadFailureRate); + MetricUtils.gauge( + metricClazz, "evictionCount", () -> (Gauge<Long>) cacheStats::evictionCount); + MetricUtils.gauge( + metricClazz, + "loadFailureCount", + () -> (Gauge<Long>) cacheStats::loadFailureCount); + MetricUtils.gauge( + metricClazz, + "averageLoadPenalty", + () -> (Gauge<Double>) cacheStats::averageLoadPenalty); + MetricUtils.gauge( + metricClazz, "evictionWeight", () -> (Gauge<Long>) cacheStats::evictionWeight); + + log.debug(metricClazz.getSimpleName() + ": " + cacheStats); + }, + 0, + config.getStatsIntervalSeconds(), + TimeUnit.SECONDS); + } + + return new ClientCache<>(config, cache, loadFunction, weigher, ttlSecondsFunction); } + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 84d0ed6b9594d..7bc50a8f3dc7e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -45,7 +45,8 @@ public EntityResponse getV2( @Nonnull String entityName, @Nonnull final Urn urn, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull @Deprecated @@ -57,18 +58,21 @@ public Map<Urn, EntityResponse> batchGetV2( @Nonnull String entityName, @Nonnull final Set<Urn> urns, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull Map<Urn, EntityResponse> batchGetVersionedV2( @Nonnull String entityName, @Nonnull final Set<VersionedUrn> versionedUrns, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull @Deprecated - public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) + public Map<Urn, Entity> batchGet( + @Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -81,9 +85,14 @@ public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Au * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nullable String field, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Gets browse snapshot of a given path @@ -94,8 +103,12 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nonnull Authentication authentication) + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -109,8 +122,13 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, - @Nullable Map<String, String> requestFilters, int start, int limit, @Nonnull Authentication authentication) + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map<String, String> requestFilters, + int start, + int limit, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -125,8 +143,14 @@ public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated @@ -134,11 +158,15 @@ public void update(@Nonnull final Entity entity, @Nonnull final Authentication a throws RemoteInvocationException; @Deprecated - public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException; + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException; @Deprecated - public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) + public void batchUpdate( + @Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -153,15 +181,20 @@ public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Auth * @throws RemoteInvocationException */ @Nonnull - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException; /** * Filters for entities matching to a given query and filters * - * TODO: This no longer has any usages, can we deprecate/remove? + * <p>TODO: This no longer has any usages, can we deprecate/remove? * * @param requestFilters search filters * @param start start offset for search results @@ -170,8 +203,13 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, * @throws RemoteInvocationException */ @Nonnull - public ListResult list(@Nonnull String entity, @Nullable Map<String, String> requestFilters, int start, int count, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public ListResult list( + @Nonnull String entity, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Searches for datasets matching to a given query and filters @@ -186,9 +224,16 @@ public ListResult list(@Nonnull String entity, @Nullable Map<String, String> req * @throws RemoteInvocationException */ @Nonnull - public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, int start, int count, @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) throws RemoteInvocationException; + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException; /** * Searches for entities matching to a given query and filters across multiple entity types @@ -203,9 +248,15 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nulla * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull Authentication authentication) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -222,9 +273,16 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull Authentication authentication, List<String> facets) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull Authentication authentication, + List<String> facets) throws RemoteInvocationException; /** @@ -240,8 +298,14 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, @Nullable SearchFlags searchFlags, + ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) throws RemoteInvocationException; @@ -258,43 +322,57 @@ ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull Strin * @param start index to start the search from * @param count the number of search hits to return * @param searchFlags configuration flags for the search request - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** - * Gets a list of documents that match given search request that is related to - * the input entity + * Gets a list of documents that match given search request that is related to the input entity * - * @param sourceUrn Urn of the source entity - * @param direction Direction of the relationship - * @param entities list of entities to search (If empty, searches - * across all entities) - * @param input the search input text - * @param maxHops the max number of hops away to search for. If null, - * searches all hops. - * @param filter the request map with fields and values as filters - * to be applied to search hits - * @param sortCriterion {@link SortCriterion} to be applied to search - * results - * @param start index to start the search from - * @param count the number of search hits to return - * @param endTimeMillis end time to filter to + * @param sourceUrn Urn of the source entity + * @param direction Direction of the relationship + * @param entities list of entities to search (If empty, searches across all entities) + * @param input the search input text + * @param maxHops the max number of hops away to search for. If null, searches all hops. + * @param filter the request map with fields and values as filters to be applied to search hits + * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param start index to start the search from + * @param count the number of search hits to return + * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param searchFlags configuration flags for the search request - * @return a {@link SearchResult} that contains a list of matched documents and - * related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -309,16 +387,27 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll ID indicating offset * @param keepAlive string representation of time to keep point in time alive, ex: 5m - * @param endTimeMillis end time to filter to + * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param count the number of search hits to return - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable final Long startTimeMillis, @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, + LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException; @@ -333,28 +422,29 @@ LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull Lineage public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; - public void setWritable(boolean canWrite, @Nonnull Authentication authentication) throws RemoteInvocationException; + public void setWritable(boolean canWrite, @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nonnull - public Map<String, Long> batchGetTotalEntityCount(@Nonnull List<String> entityName, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public Map<String, Long> batchGetTotalEntityCount( + @Nonnull List<String> entityName, @Nonnull Authentication authentication) + throws RemoteInvocationException; - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException; + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException; - /** - * Hard delete an entity with a particular urn. - */ + /** Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; - /** - * Delete all references to an entity with a particular urn. - */ - public void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + /** Delete all references to an entity with a particular urn. */ + public void deleteEntityReferences( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -369,68 +459,96 @@ public void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authen * @throws RemoteInvocationException */ @Nonnull - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Checks whether an entity with a given urn exists * * @param urn the urn of the entity - * @return true if an entity exists, i.e. there are > 0 aspects in the DB for the entity. This means that the entity - * has not been hard-deleted. + * @return true if an entity exists, i.e. there are > 0 aspects in the DB for the entity. This + * means that the entity has not been hard-deleted. * @throws RemoteInvocationException */ @Nonnull - public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; + public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; - default List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nonnull Authentication authentication) + default List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nonnull Authentication authentication) throws RemoteInvocationException { return getTimeseriesAspectValues( - urn, - entity, - aspect, - startTimeMillis, - endTimeMillis, - limit, - filter, - null, - authentication); + urn, entity, aspect, startTimeMillis, endTimeMillis, limit, filter, null, authentication); } - public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull Authentication authentication) + public List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated - default String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + default String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { return ingestProposal(metadataChangeProposal, authentication, false); } - String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException; + String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException; @Deprecated - default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataChangeProposal, + default String wrappedIngestProposal( + @Nonnull MetadataChangeProposal metadataChangeProposal, @Nonnull final Authentication authentication) { return wrappedIngestProposal(metadataChangeProposal, authentication, false); } - default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) { + default String wrappedIngestProposal( + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) { try { return ingestProposal(metadataChangeProposal, authentication, async); } catch (RemoteInvocationException e) { @@ -439,13 +557,18 @@ default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataCha } @Deprecated - default List<String> batchIngestProposals(@Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + default List<String> batchIngestProposals( + @Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { return batchIngestProposals(metadataChangeProposals, authentication, false); } - default List<String> batchIngestProposals(@Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException { + default List<String> batchIngestProposals( + @Nonnull final Collection<MetadataChangeProposal> metadataChangeProposals, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { return metadataChangeProposals.stream() .map(proposal -> wrappedIngestProposal(proposal, authentication, async)) .collect(Collectors.toList()); @@ -453,16 +576,29 @@ default List<String> batchIngestProposals(@Nonnull final Collection<MetadataChan @Nonnull @Deprecated - public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull Authentication authentication) + public <T extends RecordTemplate> Optional<T> getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class<T> aspectClass, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, - @Nonnull Authentication authentication) throws Exception; + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull Authentication authentication) + throws Exception; - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) throws Exception; + public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + throws Exception; } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java index 8e103cff283ea..453eecab7b446 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java @@ -1,5 +1,7 @@ package com.linkedin.entity.client; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.github.benmanes.caffeine.cache.LoadingCache; import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; @@ -9,11 +11,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.util.Pair; -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Map; import java.util.Optional; @@ -22,116 +19,144 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; @Builder public class EntityClientCache { - @NonNull - private EntityClientCacheConfig config; - @NonNull - private final ClientCache<Key, EnvelopedAspect, EntityClientCacheConfig> cache; - @NonNull - private BiFunction<Set<Urn>, Set<String>, Map<Urn, EntityResponse>> loadFunction; - - public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { - return batchGetV2(Set.of(urn), aspectNames).get(urn); - } - - public Map<Urn, EntityResponse> batchGetV2(@Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { - final Map<Urn, EntityResponse> response; - - if (config.isEnabled()) { - Set<Key> keys = urns.stream() - .flatMap(urn -> aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) - .collect(Collectors.toSet()); - Map<Key, EnvelopedAspect> envelopedAspects = cache.getAll(keys); - - Set<EntityResponse> responses = envelopedAspects.entrySet().stream() - .map(entry -> Pair.of(entry.getKey().getUrn(), entry.getValue())) - .collect(Collectors.groupingBy(Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))) - .entrySet().stream().map(e -> toEntityResponse(e.getKey(), e.getValue())) - .collect(Collectors.toSet()); - - response = responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); - } else { - response = loadFunction.apply(urns, aspectNames); - } - - return response; + @NonNull private EntityClientCacheConfig config; + @NonNull private final ClientCache<Key, EnvelopedAspect, EntityClientCacheConfig> cache; + @NonNull private BiFunction<Set<Urn>, Set<String>, Map<Urn, EntityResponse>> loadFunction; + + public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames) { + return batchGetV2(Set.of(urn), aspectNames).get(urn); + } + + public Map<Urn, EntityResponse> batchGetV2( + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames) { + final Map<Urn, EntityResponse> response; + + if (config.isEnabled()) { + Set<Key> keys = + urns.stream() + .flatMap( + urn -> + aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) + .collect(Collectors.toSet()); + Map<Key, EnvelopedAspect> envelopedAspects = cache.getAll(keys); + + Set<EntityResponse> responses = + envelopedAspects.entrySet().stream() + .map(entry -> Pair.of(entry.getKey().getUrn(), entry.getValue())) + .collect( + Collectors.groupingBy( + Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))) + .entrySet() + .stream() + .map(e -> toEntityResponse(e.getKey(), e.getValue())) + .collect(Collectors.toSet()); + + response = + responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); + } else { + response = loadFunction.apply(urns, aspectNames); } - private static EntityResponse toEntityResponse(Urn urn, Collection<EnvelopedAspect> envelopedAspects) { - final EntityResponse response = new EntityResponse(); - response.setUrn(urn); - response.setEntityName(urnToEntityName(urn)); - response.setAspects(new EnvelopedAspectMap( - envelopedAspects.stream() - .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)) - )); - return response; + return response; + } + + private static EntityResponse toEntityResponse( + Urn urn, Collection<EnvelopedAspect> envelopedAspects) { + final EntityResponse response = new EntityResponse(); + response.setUrn(urn); + response.setEntityName(urnToEntityName(urn)); + response.setAspects( + new EnvelopedAspectMap( + envelopedAspects.stream() + .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)))); + return response; + } + + public static class EntityClientCacheBuilder { + + private EntityClientCacheBuilder cache(LoadingCache<Key, EnvelopedAspect> cache) { + return this; } - public static class EntityClientCacheBuilder { - - private EntityClientCacheBuilder cache(LoadingCache<Key, EnvelopedAspect> cache) { - return this; - } - - public EntityClientCache build(Class<?> metricClazz) { - // estimate size - Weigher<Key, EnvelopedAspect> weighByEstimatedSize = (key, value) -> - value.getValue().data().toString().getBytes().length; - - // batch loads data from entity client (restli or java) - Function<Iterable<? extends Key>, Map<Key, EnvelopedAspect>> loader = (Iterable<? extends Key> keys) -> { - Map<String, Set<Key>> keysByEntity = StreamSupport.stream(keys.spliterator(), true) - .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); - - Map<Key, EnvelopedAspect> results = keysByEntity.entrySet().stream() - .flatMap(entry -> { - Set<Urn> urns = entry.getValue().stream() - .map(Key::getUrn) - .collect(Collectors.toSet()); - Set<String> aspects = entry.getValue().stream() - .map(Key::getAspectName) - .collect(Collectors.toSet()); - return loadFunction.apply(urns, aspects).entrySet().stream(); + public EntityClientCache build(Class<?> metricClazz) { + // estimate size + Weigher<Key, EnvelopedAspect> weighByEstimatedSize = + (key, value) -> value.getValue().data().toString().getBytes().length; + + // batch loads data from entity client (restli or java) + Function<Iterable<? extends Key>, Map<Key, EnvelopedAspect>> loader = + (Iterable<? extends Key> keys) -> { + Map<String, Set<Key>> keysByEntity = + StreamSupport.stream(keys.spliterator(), true) + .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); + + Map<Key, EnvelopedAspect> results = + keysByEntity.entrySet().stream() + .flatMap( + entry -> { + Set<Urn> urns = + entry.getValue().stream() + .map(Key::getUrn) + .collect(Collectors.toSet()); + Set<String> aspects = + entry.getValue().stream() + .map(Key::getAspectName) + .collect(Collectors.toSet()); + return loadFunction.apply(urns, aspects).entrySet().stream(); }) - .flatMap(resp -> resp.getValue().getAspects().values().stream() - .map(envAspect -> { - Key key = Key.builder().urn(resp.getKey()).aspectName(envAspect.getName()).build(); - return Map.entry(key, envAspect); - })).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - return results; - }; - - // ideally the cache time comes from caching headers from service, but configuration driven for now - BiFunction<EntityClientCacheConfig, Key, Integer> ttlSeconds = (config, key) -> - Optional.ofNullable(config.getEntityAspectTTLSeconds()).orElse(Map.of()) - .getOrDefault(key.getEntityName(), Map.of()) - .getOrDefault(key.getAspectName(), config.getDefaultTTLSeconds()); - - cache = ClientCache.<Key, EnvelopedAspect, EntityClientCacheConfig>builder() - .weigher(weighByEstimatedSize) - .config(config) - .loadFunction(loader) - .ttlSecondsFunction(ttlSeconds) - .build(metricClazz); - - return new EntityClientCache(config, cache, loadFunction); - } + .flatMap( + resp -> + resp.getValue().getAspects().values().stream() + .map( + envAspect -> { + Key key = + Key.builder() + .urn(resp.getKey()) + .aspectName(envAspect.getName()) + .build(); + return Map.entry(key, envAspect); + })) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return results; + }; + + // ideally the cache time comes from caching headers from service, but configuration driven + // for now + BiFunction<EntityClientCacheConfig, Key, Integer> ttlSeconds = + (config, key) -> + Optional.ofNullable(config.getEntityAspectTTLSeconds()) + .orElse(Map.of()) + .getOrDefault(key.getEntityName(), Map.of()) + .getOrDefault(key.getAspectName(), config.getDefaultTTLSeconds()); + + cache = + ClientCache.<Key, EnvelopedAspect, EntityClientCacheConfig>builder() + .weigher(weighByEstimatedSize) + .config(config) + .loadFunction(loader) + .ttlSecondsFunction(ttlSeconds) + .build(metricClazz); + + return new EntityClientCache(config, cache, loadFunction); } + } - @Data - @Builder - protected static class Key { - private final Urn urn; - private final String aspectName; + @Data + @Builder + protected static class Key { + private final Urn urn; + private final String aspectName; - public String getEntityName() { - return urn.getEntityType(); - } + public String getEntityName() { + return urn.getEntityType(); } + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 2716e27518fcc..c854cb9dd279e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -91,74 +91,95 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; - @Slf4j public class RestliEntityClient extends BaseClient implements EntityClient { - private static final EntitiesRequestBuilders ENTITIES_REQUEST_BUILDERS = new EntitiesRequestBuilders(); - private static final EntitiesV2RequestBuilders ENTITIES_V2_REQUEST_BUILDERS = new EntitiesV2RequestBuilders(); + private static final EntitiesRequestBuilders ENTITIES_REQUEST_BUILDERS = + new EntitiesRequestBuilders(); + private static final EntitiesV2RequestBuilders ENTITIES_V2_REQUEST_BUILDERS = + new EntitiesV2RequestBuilders(); private static final EntitiesVersionedV2RequestBuilders ENTITIES_VERSIONED_V2_REQUEST_BUILDERS = new EntitiesVersionedV2RequestBuilders(); - private static final AspectsRequestBuilders ASPECTS_REQUEST_BUILDERS = new AspectsRequestBuilders(); - private static final PlatformRequestBuilders PLATFORM_REQUEST_BUILDERS = new PlatformRequestBuilders(); + private static final AspectsRequestBuilders ASPECTS_REQUEST_BUILDERS = + new AspectsRequestBuilders(); + private static final PlatformRequestBuilders PLATFORM_REQUEST_BUILDERS = + new PlatformRequestBuilders(); private static final RunsRequestBuilders RUNS_REQUEST_BUILDERS = new RunsRequestBuilders(); - public RestliEntityClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount) { + public RestliEntityClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount) { super(restliClient, backoffPolicy, retryCount); } @Nullable - public EntityResponse getV2(@Nonnull String entityName, @Nonnull final Urn urn, - @Nullable final Set<String> aspectNames, @Nonnull final Authentication authentication) + public EntityResponse getV2( + @Nonnull String entityName, + @Nonnull final Urn urn, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final EntitiesV2GetRequestBuilder requestBuilder = ENTITIES_V2_REQUEST_BUILDERS.get() - .aspectsParam(aspectNames) - .id(urn.toString()); + final EntitiesV2GetRequestBuilder requestBuilder = + ENTITIES_V2_REQUEST_BUILDERS.get().aspectsParam(aspectNames).id(urn.toString()); return sendClientRequest(requestBuilder, authentication).getEntity(); } @Nonnull public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return sendClientRequest(ENTITIES_REQUEST_BUILDERS.get().id(urn.toString()), authentication).getEntity(); + return sendClientRequest(ENTITIES_REQUEST_BUILDERS.get().id(urn.toString()), authentication) + .getEntity(); } /** * Legacy! Use {#batchGetV2} instead, as this method leverages Snapshot models, and will not work * for fetching entities + aspects added by Entity Registry configuration. * - * Batch get a set of {@link Entity} objects by urn. + * <p>Batch get a set of {@link Entity} objects by urn. * * @param urns the urns of the entities to batch get * @param authentication the authentication to include in the request to the Metadata Service * @throws RemoteInvocationException */ @Nonnull - public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) + public Map<Urn, Entity> batchGet( + @Nonnull final Set<Urn> urns, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Integer batchSize = 25; final AtomicInteger index = new AtomicInteger(0); final Collection<List<Urn>> entityUrnBatches = - urns.stream().collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)).values(); + urns.stream() + .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) + .values(); final Map<Urn, Entity> response = new HashMap<>(); for (List<Urn> urnsInBatch : entityUrnBatches) { EntitiesBatchGetRequestBuilder batchGetRequestBuilder = - ENTITIES_REQUEST_BUILDERS.batchGet().ids(urnsInBatch.stream().map(Urn::toString).collect(Collectors.toSet())); - final Map<Urn, Entity> batchResponse = sendClientRequest(batchGetRequestBuilder, authentication).getEntity() - .getResults() - .entrySet() - .stream() - .collect(Collectors.toMap(entry -> { - try { - return Urn.createFromString(entry.getKey()); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Urn from key string %s", entry.getKey())); - } - }, entry -> entry.getValue().getEntity())); + ENTITIES_REQUEST_BUILDERS + .batchGet() + .ids(urnsInBatch.stream().map(Urn::toString).collect(Collectors.toSet())); + final Map<Urn, Entity> batchResponse = + sendClientRequest(batchGetRequestBuilder, authentication) + .getEntity() + .getResults() + .entrySet() + .stream() + .collect( + Collectors.toMap( + entry -> { + try { + return Urn.createFromString(entry.getKey()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to create Urn from key string %s", entry.getKey())); + } + }, + entry -> entry.getValue().getEntity())); response.putAll(batchResponse); } return response; @@ -174,25 +195,36 @@ public Map<Urn, Entity> batchGet(@Nonnull final Set<Urn> urns, @Nonnull final Au * @throws RemoteInvocationException */ @Nonnull - public Map<Urn, EntityResponse> batchGetV2(@Nonnull String entityName, @Nonnull final Set<Urn> urns, - @Nullable final Set<String> aspectNames, @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { + public Map<Urn, EntityResponse> batchGetV2( + @Nonnull String entityName, + @Nonnull final Set<Urn> urns, + @Nullable final Set<String> aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { - final EntitiesV2BatchGetRequestBuilder requestBuilder = ENTITIES_V2_REQUEST_BUILDERS.batchGet() - .aspectsParam(aspectNames) - .ids(urns.stream().map(Urn::toString).collect(Collectors.toList())); + final EntitiesV2BatchGetRequestBuilder requestBuilder = + ENTITIES_V2_REQUEST_BUILDERS + .batchGet() + .aspectsParam(aspectNames) + .ids(urns.stream().map(Urn::toString).collect(Collectors.toList())); - return sendClientRequest(requestBuilder, authentication).getEntity() + return sendClientRequest(requestBuilder, authentication) + .getEntity() .getResults() .entrySet() .stream() - .collect(Collectors.toMap(entry -> { - try { - return Urn.createFromString(entry.getKey()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to bind urn string with value %s into urn", entry.getKey())); - } - }, entry -> entry.getValue().getEntity())); + .collect( + Collectors.toMap( + entry -> { + try { + return Urn.createFromString(entry.getKey()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to bind urn string with value %s into urn", entry.getKey())); + } + }, + entry -> entry.getValue().getEntity())); } /** @@ -209,21 +241,31 @@ public Map<Urn, EntityResponse> batchGetVersionedV2( @Nonnull String entityName, @Nonnull final Set<VersionedUrn> versionedUrns, @Nullable final Set<String> aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - - final EntitiesVersionedV2BatchGetRequestBuilder requestBuilder = ENTITIES_VERSIONED_V2_REQUEST_BUILDERS.batchGet() - .aspectsParam(aspectNames) - .entityTypeParam(entityName) - .ids(versionedUrns.stream() - .map(versionedUrn -> com.linkedin.common.urn.VersionedUrn.of(versionedUrn.getUrn().toString(), versionedUrn.getVersionStamp())) - .collect(Collectors.toSet())); + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { - return sendClientRequest(requestBuilder, authentication).getEntity() + final EntitiesVersionedV2BatchGetRequestBuilder requestBuilder = + ENTITIES_VERSIONED_V2_REQUEST_BUILDERS + .batchGet() + .aspectsParam(aspectNames) + .entityTypeParam(entityName) + .ids( + versionedUrns.stream() + .map( + versionedUrn -> + com.linkedin.common.urn.VersionedUrn.of( + versionedUrn.getUrn().toString(), versionedUrn.getVersionStamp())) + .collect(Collectors.toSet())); + + return sendClientRequest(requestBuilder, authentication) + .getEntity() .getResults() .entrySet() .stream() - .collect(Collectors.toMap(entry -> - UrnUtils.getUrn(entry.getKey().getUrn()), entry -> entry.getValue().getEntity())); + .collect( + Collectors.toMap( + entry -> UrnUtils.getUrn(entry.getKey().getUrn()), + entry -> entry.getValue().getEntity())); } /** @@ -238,15 +280,22 @@ public Map<Urn, EntityResponse> batchGetVersionedV2( * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nullable String field, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionAutocomplete() - .entityParam(entityType) - .queryParam(query) - .fieldParam(field) - .filterParam(filterOrDefaultEmptyFilter(requestFilters)) - .limitParam(limit); + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoAutocompleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionAutocomplete() + .entityParam(entityType) + .queryParam(query) + .fieldParam(field) + .filterParam(filterOrDefaultEmptyFilter(requestFilters)) + .limitParam(limit); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -260,14 +309,20 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nonnull final Authentication authentication) + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionAutocomplete() - .entityParam(entityType) - .queryParam(query) - .filterParam(filterOrDefaultEmptyFilter(requestFilters)) - .limitParam(limit); + EntitiesDoAutocompleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionAutocomplete() + .entityParam(entityType) + .queryParam(query) + .filterParam(filterOrDefaultEmptyFilter(requestFilters)) + .limitParam(limit); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -282,14 +337,21 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, - @Nullable Map<String, String> requestFilters, int start, int limit, @Nonnull final Authentication authentication) + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map<String, String> requestFilters, + int start, + int limit, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoBrowseRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionBrowse() - .pathParam(path) - .entityParam(entityType) - .startParam(start) - .limitParam(limit); + EntitiesDoBrowseRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionBrowse() + .pathParam(path) + .entityParam(entityType) + .startParam(start) + .limitParam(limit); if (requestFilters != null) { requestBuilder.filterParam(newFilter(requestFilters)); } @@ -308,31 +370,45 @@ public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoIngestRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity); + EntitiesDoIngestRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity); sendClientRequest(requestBuilder, authentication); } - public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { if (systemMetadata == null) { update(entity, authentication); return; } EntitiesDoIngestRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity).systemMetadataParam(systemMetadata); + ENTITIES_REQUEST_BUILDERS + .actionIngest() + .entityParam(entity) + .systemMetadataParam(systemMetadata); sendClientRequest(requestBuilder, authentication); } - public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) + public void batchUpdate( + @Nonnull final Set<Entity> entities, @Nonnull final Authentication authentication) throws RemoteInvocationException { EntitiesDoBatchIngestRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionBatchIngest().entitiesParam(new EntityArray(entities)); @@ -353,18 +429,25 @@ public void batchUpdate(@Nonnull final Set<Entity> entities, @Nonnull final Auth */ @Nonnull @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map<String, String> requestFilters, int start, int count, @Nonnull final Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull final Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException { - final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionSearch() - .entityParam(entity) - .inputParam(input) - .filterParam(newFilter(requestFilters)) - .startParam(start) - .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) - .countParam(count); + final EntitiesDoSearchRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionSearch() + .entityParam(entity) + .inputParam(input) + .filterParam(newFilter(requestFilters)) + .startParam(start) + .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) + .countParam(count); if (searchFlags != null) { requestBuilder.searchFlagsParam(searchFlags); } @@ -382,13 +465,20 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, * @throws RemoteInvocationException */ @Nonnull - public ListResult list(@Nonnull String entity, @Nullable Map<String, String> requestFilters, int start, int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - final EntitiesDoListRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionList() - .entityParam(entity) - .filterParam(newFilter(requestFilters)) - .startParam(start) - .countParam(count); + public ListResult list( + @Nonnull String entity, + @Nullable Map<String, String> requestFilters, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + final EntitiesDoListRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionList() + .entityParam(entity) + .filterParam(newFilter(requestFilters)) + .startParam(start) + .countParam(count); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -406,16 +496,24 @@ public ListResult list(@Nonnull String entity, @Nullable Map<String, String> req */ @Nonnull @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, int start, int count, @Nonnull final Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException { - final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionSearch() - .entityParam(entity) - .inputParam(input) - .startParam(start) - .countParam(count); + final EntitiesDoSearchRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionSearch() + .entityParam(entity) + .inputParam(input) + .startParam(start) + .countParam(count); if (filter != null) { requestBuilder.filterParam(filter); @@ -434,11 +532,18 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nulla } @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull final Authentication authentication) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - return searchAcrossEntities(entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + return searchAcrossEntities( + entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); } /** @@ -454,13 +559,24 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull final Authentication authentication, @Nullable List<String> facets) + public SearchResult searchAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication, + @Nullable List<String> facets) throws RemoteInvocationException { final EntitiesDoSearchAcrossEntitiesRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossEntities().inputParam(input).startParam(start).countParam(count); + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossEntities() + .inputParam(input) + .startParam(start) + .countParam(count); if (entities != null) { requestBuilder.entitiesParam(new StringArray(entities)); @@ -481,9 +597,15 @@ public SearchResult searchAcrossEntities(@Nonnull List<String> entities, @Nonnul @Nonnull @Override - public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, - @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) + public ScrollResult scrollAcrossEntities( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossEntitiesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionScrollAcrossEntities().inputParam(input).countParam(count); @@ -509,14 +631,23 @@ public ScrollResult scrollAcrossEntities(@Nonnull List<String> entities, @Nonnul @Nonnull @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -538,15 +669,25 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull @Nonnull @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -572,16 +713,26 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull return sendClientRequest(requestBuilder, authentication).getEntity(); } - @Override - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List<String> entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable final Long startTimeMillis, @Nullable final Long endTimeMillis, @Nullable final SearchFlags searchFlags, + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable final SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionScrollAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionScrollAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -633,51 +784,66 @@ public void setWritable(boolean canWrite, @Nonnull final Authentication authenti } @Nonnull - public Map<String, Long> batchGetTotalEntityCount(@Nonnull List<String> entityName, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public Map<String, Long> batchGetTotalEntityCount( + @Nonnull List<String> entityName, @Nonnull final Authentication authentication) + throws RemoteInvocationException { EntitiesDoBatchGetTotalEntityCountRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionBatchGetTotalEntityCount().entitiesParam(new StringArray(entityName)); + ENTITIES_REQUEST_BUILDERS + .actionBatchGetTotalEntityCount() + .entitiesParam(new StringArray(entityName)); return sendClientRequest(requestBuilder, authentication).getEntity(); } - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { EntitiesDoListUrnsRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionListUrns().entityParam(entityName).startParam(start).countParam(count); + ENTITIES_REQUEST_BUILDERS + .actionListUrns() + .entityParam(entityName) + .startParam(start) + .countParam(count); return sendClientRequest(requestBuilder, authentication).getEntity(); } - /** - * Hard delete an entity with a particular urn. - */ + /** Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoDeleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionDelete().urnParam(urn.toString()); + EntitiesDoDeleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionDelete().urnParam(urn.toString()); sendClientRequest(requestBuilder, authentication); } - /** - * Delete all references to a particular entity. - */ + /** Delete all references to a particular entity. */ @Override public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException { - EntitiesDoDeleteReferencesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionDeleteReferences().urnParam(urn.toString()); + EntitiesDoDeleteReferencesRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionDeleteReferences().urnParam(urn.toString()); sendClientRequest(requestBuilder, authentication); } @Nonnull @Override - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoFilterRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionFilter() - .entityParam(entity) - .filterParam(filter) - .startParam(start) - .countParam(count); + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoFilterRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionFilter() + .entityParam(entity) + .filterParam(filter) + .startParam(start) + .countParam(count); if (sortCriterion != null) { requestBuilder.sortParam(sortCriterion); } @@ -686,9 +852,10 @@ public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Null @Nonnull @Override - public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoExistsRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionExists() - .urnParam(urn.toString()); + public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoExistsRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionExists().urnParam(urn.toString()); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -700,8 +867,12 @@ public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentica * @throws RemoteInvocationException on remote request error. */ @Nonnull - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS.get().id(urn).aspectParam(aspect).versionParam(version); @@ -717,8 +888,12 @@ public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @N * @throws RemoteInvocationException on remote request error. */ @Nullable - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS.get().id(urn).aspectParam(aspect).versionParam(version); @@ -747,13 +922,21 @@ public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspe * @throws RemoteInvocationException on remote request error. */ @Nonnull - public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull final Authentication authentication) + public List<EnvelopedAspect> getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull final Authentication authentication) throws RemoteInvocationException { AspectsDoGetTimeseriesAspectValuesRequestBuilder requestBuilder = - ASPECTS_REQUEST_BUILDERS.actionGetTimeseriesAspectValues() + ASPECTS_REQUEST_BUILDERS + .actionGetTimeseriesAspectValues() .urnParam(urn) .entityParam(entity) .aspectParam(aspect); @@ -783,19 +966,29 @@ public List<EnvelopedAspect> getTimeseriesAspectValues(@Nonnull String urn, @Non /** * Ingest a MetadataChangeProposal event. + * * @return */ @Override - public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, - final boolean async) throws RemoteInvocationException { + public String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { final AspectsDoIngestProposalRequestBuilder requestBuilder = - ASPECTS_REQUEST_BUILDERS.actionIngestProposal().proposalParam(metadataChangeProposal).asyncParam(String.valueOf(async)); + ASPECTS_REQUEST_BUILDERS + .actionIngestProposal() + .proposalParam(metadataChangeProposal) + .asyncParam(String.valueOf(async)); return sendClientRequest(requestBuilder, authentication).getEntity(); } - public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class<T> aspectClass, @Nonnull final Authentication authentication) + public <T extends RecordTemplate> Optional<T> getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class<T> aspectClass, + @Nonnull final Authentication authentication) throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = @@ -825,18 +1018,24 @@ public <T extends RecordTemplate> Optional<T> getVersionedAspect(@Nonnull String @SneakyThrows @Override - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException { + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException { throw new MethodNotSupportedException(); } @Override - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, @Nonnull final Authentication authentication) + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull final Authentication authentication) throws Exception { final PlatformDoProducePlatformEventRequestBuilder requestBuilder = - PLATFORM_REQUEST_BUILDERS.actionProducePlatformEvent() - .nameParam(name) - .eventParam(event); + PLATFORM_REQUEST_BUILDERS.actionProducePlatformEvent().nameParam(name).eventParam(event); if (key != null) { requestBuilder.keyParam(key); } @@ -846,28 +1045,34 @@ public void producePlatformEvent(@Nonnull String name, @Nullable String key, @No @Override public void rollbackIngestion(@Nonnull String runId, @Nonnull final Authentication authentication) throws Exception { - final RunsDoRollbackRequestBuilder requestBuilder = RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); + final RunsDoRollbackRequestBuilder requestBuilder = + RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); sendClientRequest(requestBuilder, authentication); } - // TODO: Refactor QueryUtils inside of metadata-io to extract these methods into a single shared library location. - // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL condition (default). + // TODO: Refactor QueryUtils inside of metadata-io to extract these methods into a single shared + // library location. + // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL + // condition (default). @Nonnull public static Filter newFilter(@Nullable Map<String, String> params) { if (params == null) { return new Filter().setOr(new ConjunctiveCriterionArray()); } - CriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue(), Condition.EQUAL)) - .collect(Collectors.toCollection(CriterionArray::new)); - return new Filter().setOr( - new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); + CriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue(), Condition.EQUAL)) + .collect(Collectors.toCollection(CriterionArray::new)); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); } @Nonnull - public static Criterion newCriterion(@Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { + public static Criterion newCriterion( + @Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { return new Criterion().setField(field).setValue(value).setCondition(condition); } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java index 94067abd0cf65..babb290655d3d 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java @@ -7,85 +7,95 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.r2.RemoteInvocationException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -/** - * Adds entity/aspect cache and assumes system authentication - */ +/** Adds entity/aspect cache and assumes system authentication */ public interface SystemEntityClient extends EntityClient { - EntityClientCache getEntityClientCache(); - Authentication getSystemAuthentication(); + EntityClientCache getEntityClientCache(); + + Authentication getSystemAuthentication(); - /** - * Builds the cache - * @param systemAuthentication system authentication - * @param cacheConfig cache configuration - * @return the cache - */ - default EntityClientCache buildEntityClientCache(Class<?> metricClazz, Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { - return EntityClientCache.builder() - .config(cacheConfig) - .loadFunction((Set<Urn> urns, Set<String> aspectNames) -> { - try { - String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + /** + * Builds the cache + * + * @param systemAuthentication system authentication + * @param cacheConfig cache configuration + * @return the cache + */ + default EntityClientCache buildEntityClientCache( + Class<?> metricClazz, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + return EntityClientCache.builder() + .config(cacheConfig) + .loadFunction( + (Set<Urn> urns, Set<String> aspectNames) -> { + try { + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); - if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { - throw new IllegalArgumentException("Urns must be of the same entity type. RestliEntityClient API limitation."); - } + if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { + throw new IllegalArgumentException( + "Urns must be of the same entity type. RestliEntityClient API limitation."); + } - return batchGetV2(entityName, urns, aspectNames, systemAuthentication); - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException(e); - } - }).build(metricClazz); - } + return batchGetV2(entityName, urns, aspectNames, systemAuthentication); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .build(metricClazz); + } - /** - * Get an entity by urn with the given aspects - * @param urn the id of the entity - * @param aspectNames aspects of the entity - * @return response object - * @throws RemoteInvocationException - * @throws URISyntaxException - */ - @Nullable - default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set<String> aspectNames) - throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().getV2(urn, aspectNames); - } + /** + * Get an entity by urn with the given aspects + * + * @param urn the id of the entity + * @param aspectNames aspects of the entity + * @return response object + * @throws RemoteInvocationException + * @throws URISyntaxException + */ + @Nullable + default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set<String> aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getEntityClientCache().getV2(urn, aspectNames); + } - /** - * Batch get a set of aspects for a single entity type, multiple ids with the given aspects. - * - * @param urns the urns of the entities to batch get - * @param aspectNames the aspect names to batch get - * @throws RemoteInvocationException - */ - @Nonnull - default Map<Urn, EntityResponse> batchGetV2(@Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) - throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().batchGetV2(urns, aspectNames); - } + /** + * Batch get a set of aspects for a single entity type, multiple ids with the given aspects. + * + * @param urns the urns of the entities to batch get + * @param aspectNames the aspect names to batch get + * @throws RemoteInvocationException + */ + @Nonnull + default Map<Urn, EntityResponse> batchGetV2( + @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getEntityClientCache().batchGetV2(urns, aspectNames); + } - default void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) throws Exception { - producePlatformEvent(name, key, event, getSystemAuthentication()); - } + default void producePlatformEvent( + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) throws Exception { + producePlatformEvent(name, key, event, getSystemAuthentication()); + } - default boolean exists(@Nonnull Urn urn) throws RemoteInvocationException { - return exists(urn, getSystemAuthentication()); - } + default boolean exists(@Nonnull Urn urn) throws RemoteInvocationException { + return exists(urn, getSystemAuthentication()); + } - default String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) throws RemoteInvocationException { - return ingestProposal(metadataChangeProposal, getSystemAuthentication(), async); - } + default String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) + throws RemoteInvocationException { + return ingestProposal(metadataChangeProposal, getSystemAuthentication(), async); + } - default void setWritable(boolean canWrite) throws RemoteInvocationException { - setWritable(canWrite, getSystemAuthentication()); - } + default void setWritable(boolean canWrite) throws RemoteInvocationException { + setWritable(canWrite, getSystemAuthentication()); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java index f3c343534209c..a2f5596af9f4e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java @@ -4,22 +4,24 @@ import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.restli.client.Client; -import lombok.Getter; - import javax.annotation.Nonnull; +import lombok.Getter; -/** - * Restli backed SystemEntityClient - */ +/** Restli backed SystemEntityClient */ @Getter public class SystemRestliEntityClient extends RestliEntityClient implements SystemEntityClient { - private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final EntityClientCache entityClientCache; + private final Authentication systemAuthentication; - public SystemRestliEntityClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { - super(restliClient, backoffPolicy, retryCount); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); - } + public SystemRestliEntityClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + super(restliClient, backoffPolicy, retryCount); + this.systemAuthentication = systemAuthentication; + this.entityClientCache = + buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java index 850847bfd262a..747e1e0e1a288 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java @@ -2,62 +2,66 @@ import com.datahub.authentication.Authentication; import com.linkedin.common.EntityRelationships; - import com.linkedin.common.WindowDuration; import com.linkedin.common.client.BaseClient; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.restli.client.Client; - import java.net.URISyntaxException; import javax.annotation.Nonnull; - public class UsageClient extends BaseClient { - private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = - new UsageStatsRequestBuilders(); - - private final UsageClientCache usageClientCache; - - public UsageClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, UsageClientCacheConfig cacheConfig) { - super(restliClient, backoffPolicy, retryCount); - this.usageClientCache = UsageClientCache.builder() - .config(cacheConfig) - .loadFunction((String resource, UsageTimeRange range) -> { - try { - return getUsageStats(resource, range, systemAuthentication); - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException(e); - } - }).build(); - } - - /** - * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. - * Using cache and system authentication. - * Validate permissions before use! - */ - @Nonnull - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - return usageClientCache.getUsageStats(resource, range); - } - - /** - * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. - */ - @Nonnull - private UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range, - @Nonnull Authentication authentication) - throws RemoteInvocationException, URISyntaxException { - - final UsageStatsDoQueryRangeRequestBuilder requestBuilder = USAGE_STATS_REQUEST_BUILDERS - .actionQueryRange() - .resourceParam(resource) - .durationParam(WindowDuration.DAY) - .rangeFromEndParam(range); - return sendClientRequest(requestBuilder, authentication).getEntity(); - } + private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = + new UsageStatsRequestBuilders(); + + private final UsageClientCache usageClientCache; + + public UsageClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount, + Authentication systemAuthentication, + UsageClientCacheConfig cacheConfig) { + super(restliClient, backoffPolicy, retryCount); + this.usageClientCache = + UsageClientCache.builder() + .config(cacheConfig) + .loadFunction( + (String resource, UsageTimeRange range) -> { + try { + return getUsageStats(resource, range, systemAuthentication); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .build(); + } + + /** + * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. Using + * cache and system authentication. Validate permissions before use! + */ + @Nonnull + public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + return usageClientCache.getUsageStats(resource, range); + } + + /** Gets a specific version of downstream {@link EntityRelationships} for the given dataset. */ + @Nonnull + private UsageQueryResult getUsageStats( + @Nonnull String resource, + @Nonnull UsageTimeRange range, + @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + + final UsageStatsDoQueryRangeRequestBuilder requestBuilder = + USAGE_STATS_REQUEST_BUILDERS + .actionQueryRange() + .resourceParam(resource) + .durationParam(WindowDuration.DAY) + .rangeFromEndParam(range); + return sendClientRequest(requestBuilder, authentication).getEntity(); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java index 10a1ebb6dcccb..e4c7ed0b674c0 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java @@ -4,70 +4,68 @@ import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - -import javax.annotation.Nonnull; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; @Builder public class UsageClientCache { - @NonNull - private UsageClientCacheConfig config; - @NonNull - private final ClientCache<Key, UsageQueryResult, UsageClientCacheConfig> cache; - @NonNull - private BiFunction<String, UsageTimeRange, UsageQueryResult> loadFunction; + @NonNull private UsageClientCacheConfig config; + @NonNull private final ClientCache<Key, UsageQueryResult, UsageClientCacheConfig> cache; + @NonNull private BiFunction<String, UsageTimeRange, UsageQueryResult> loadFunction; - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - if (config.isEnabled()) { - return cache.get(Key.builder().resource(resource).range(range).build()); - } else { - return loadFunction.apply(resource, range); - } + public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + if (config.isEnabled()) { + return cache.get(Key.builder().resource(resource).range(range).build()); + } else { + return loadFunction.apply(resource, range); } + } - public static class UsageClientCacheBuilder { + public static class UsageClientCacheBuilder { - private UsageClientCacheBuilder cache(LoadingCache<Key, UsageQueryResult> cache) { - return this; - } + private UsageClientCacheBuilder cache(LoadingCache<Key, UsageQueryResult> cache) { + return this; + } - public UsageClientCache build() { - // estimate size - Weigher<Key, UsageQueryResult> weighByEstimatedSize = (key, value) -> - value.data().toString().getBytes().length; + public UsageClientCache build() { + // estimate size + Weigher<Key, UsageQueryResult> weighByEstimatedSize = + (key, value) -> value.data().toString().getBytes().length; - // batch loads data from usage client - Function<Iterable<? extends Key>, Map<Key, UsageQueryResult>> loader = (Iterable<? extends Key> keys) -> - StreamSupport.stream(keys.spliterator(), true) - .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + // batch loads data from usage client + Function<Iterable<? extends Key>, Map<Key, UsageQueryResult>> loader = + (Iterable<? extends Key> keys) -> + StreamSupport.stream(keys.spliterator(), true) + .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - // default ttl only - BiFunction<UsageClientCacheConfig, Key, Integer> ttlSeconds = (config, key) -> config.getDefaultTTLSeconds(); + // default ttl only + BiFunction<UsageClientCacheConfig, Key, Integer> ttlSeconds = + (config, key) -> config.getDefaultTTLSeconds(); - cache = ClientCache.<Key, UsageQueryResult, UsageClientCacheConfig>builder() - .weigher(weighByEstimatedSize) - .config(config) - .loadFunction(loader) - .ttlSecondsFunction(ttlSeconds) - .build(UsageClientCache.class); + cache = + ClientCache.<Key, UsageQueryResult, UsageClientCacheConfig>builder() + .weigher(weighByEstimatedSize) + .config(config) + .loadFunction(loader) + .ttlSecondsFunction(ttlSeconds) + .build(UsageClientCache.class); - return new UsageClientCache(config, cache, loadFunction); - } + return new UsageClientCache(config, cache, loadFunction); } + } - @Data - @Builder - protected static class Key { - private final String resource; - private final UsageTimeRange range; - } + @Data + @Builder + protected static class Key { + private final String resource; + private final UsageTimeRange range; + } } diff --git a/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java b/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java index c4109f1daedb3..1f8342170a2ff 100644 --- a/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java +++ b/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java @@ -1,5 +1,12 @@ package com.linkedin.common.client; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,62 +22,59 @@ import com.linkedin.restli.client.ResponseFuture; import org.testng.annotations.Test; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertThrows; - public class BaseClientTest { - final static Authentication AUTH = new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); - - @Test - public void testZeroRetry() throws RemoteInvocationException { - MetadataChangeProposal mcp = new MetadataChangeProposal(); + static final Authentication AUTH = + new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); - ResponseFuture<String> mockFuture = mock(ResponseFuture.class); - when(mockRestliClient.sendRequest(any(ActionRequest.class))).thenReturn(mockFuture); + @Test + public void testZeroRetry() throws RemoteInvocationException { + MetadataChangeProposal mcp = new MetadataChangeProposal(); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 0); - testClient.sendClientRequest(testRequestBuilder, AUTH); - // Expected 1 actual try and 0 retries - verify(mockRestliClient).sendRequest(any(ActionRequest.class)); - } + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); + ResponseFuture<String> mockFuture = mock(ResponseFuture.class); + when(mockRestliClient.sendRequest(any(ActionRequest.class))).thenReturn(mockFuture); - @Test - public void testMultipleRetries() throws RemoteInvocationException { - MetadataChangeProposal mcp = new MetadataChangeProposal(); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); - ResponseFuture<String> mockFuture = mock(ResponseFuture.class); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 0); + testClient.sendClientRequest(testRequestBuilder, AUTH); + // Expected 1 actual try and 0 retries + verify(mockRestliClient).sendRequest(any(ActionRequest.class)); + } - when(mockRestliClient.sendRequest(any(ActionRequest.class))) - .thenThrow(new RuntimeException()) - .thenReturn(mockFuture); + @Test + public void testMultipleRetries() throws RemoteInvocationException { + MetadataChangeProposal mcp = new MetadataChangeProposal(); + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); + ResponseFuture<String> mockFuture = mock(ResponseFuture.class); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); - testClient.sendClientRequest(testRequestBuilder, AUTH); - // Expected 1 actual try and 1 retries - verify(mockRestliClient, times(2)).sendRequest(any(ActionRequest.class)); - } + when(mockRestliClient.sendRequest(any(ActionRequest.class))) + .thenThrow(new RuntimeException()) + .thenReturn(mockFuture); - @Test - public void testNonRetry() { - MetadataChangeProposal mcp = new MetadataChangeProposal(); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); + testClient.sendClientRequest(testRequestBuilder, AUTH); + // Expected 1 actual try and 1 retries + verify(mockRestliClient, times(2)).sendRequest(any(ActionRequest.class)); + } - when(mockRestliClient.sendRequest(any(ActionRequest.class))) - .thenThrow(new RuntimeException(new RequiredFieldNotPresentException("value"))); + @Test + public void testNonRetry() { + MetadataChangeProposal mcp = new MetadataChangeProposal(); + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); - assertThrows(RuntimeException.class, () -> testClient.sendClientRequest(testRequestBuilder, AUTH)); - } + when(mockRestliClient.sendRequest(any(ActionRequest.class))) + .thenThrow(new RuntimeException(new RequiredFieldNotPresentException("value"))); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); + assertThrows( + RuntimeException.class, () -> testClient.sendClientRequest(testRequestBuilder, AUTH)); + } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java index 98ecf6142ef2c..edd8270e87210 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java @@ -20,11 +20,9 @@ public CompletableFuture<Void> onRequest(final FilterRequestContext requestConte return CompletableFuture.completedFuture(null); } - @Override public CompletableFuture<Void> onResponse( - final FilterRequestContext requestContext, - final FilterResponseContext responseContext) { + final FilterRequestContext requestContext, final FilterResponseContext responseContext) { logResponse(requestContext, responseContext); return CompletableFuture.completedFuture(null); } @@ -40,8 +38,7 @@ public CompletableFuture<Void> onError( } private void logResponse( - final FilterRequestContext requestContext, - final FilterResponseContext responseContext) { + final FilterRequestContext requestContext, final FilterResponseContext responseContext) { long startTime = (long) requestContext.getFilterScratchpad().get(START_TIME); long endTime = System.currentTimeMillis(); long duration = endTime - startTime; @@ -54,5 +51,4 @@ private void logResponse( log.info("{} {} - {} - {} - {}ms", httpMethod, uri, method, status.getCode(), duration); } - } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java index 1f19094a74654..b8cbf1ceb6794 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java @@ -22,10 +22,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; - -/** - * Rest.li entry point: /analytics - */ +/** Rest.li entry point: /analytics */ @Slf4j @RestLiSimpleResource(name = "analytics", namespace = "com.linkedin.analytics") public class Analytics extends SimpleResourceTemplate<GetTimeseriesAggregatedStatsResponse> { @@ -35,6 +32,7 @@ public class Analytics extends SimpleResourceTemplate<GetTimeseriesAggregatedSta private static final String PARAM_FILTER = "filter"; private static final String PARAM_METRICS = "metrics"; private static final String PARAM_BUCKETS = "buckets"; + @Inject @Named("timeseriesAspectService") private TimeseriesAspectService _timeseriesAspectService; @@ -47,24 +45,25 @@ public Task<GetTimeseriesAggregatedStatsResponse> getTimeseriesStats( @ActionParam(PARAM_METRICS) @Nonnull AggregationSpec[] aggregationSpecs, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_BUCKETS) @Optional @Nullable GroupingBucket[] groupingBuckets) { - return RestliUtils.toTask(() -> { - log.info("Attempting to query timeseries stats"); - GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); - resp.setEntityName(entityName); - resp.setAspectName(aspectName); - resp.setAggregationSpecs(new AggregationSpecArray(Arrays.asList(aggregationSpecs))); - if (filter != null) { - resp.setFilter(filter); - } - if (groupingBuckets != null) { - resp.setGroupingBuckets(new GroupingBucketArray(Arrays.asList(groupingBuckets))); - } + return RestliUtils.toTask( + () -> { + log.info("Attempting to query timeseries stats"); + GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); + resp.setEntityName(entityName); + resp.setAspectName(aspectName); + resp.setAggregationSpecs(new AggregationSpecArray(Arrays.asList(aggregationSpecs))); + if (filter != null) { + resp.setFilter(filter); + } + if (groupingBuckets != null) { + resp.setGroupingBuckets(new GroupingBucketArray(Arrays.asList(groupingBuckets))); + } - GenericTable aggregatedStatsTable = - _timeseriesAspectService.getAggregatedStats(entityName, aspectName, aggregationSpecs, filter, - groupingBuckets); - resp.setTable(aggregatedStatsTable); - return resp; - }); + GenericTable aggregatedStatsTable = + _timeseriesAspectService.getAggregatedStats( + entityName, aspectName, aggregationSpecs, filter, groupingBuckets); + resp.setTable(aggregatedStatsTable); + return resp; + }); } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index af76af90ce77f..f14dc2e8b2918 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.operations.OperationsResource.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -8,10 +13,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.aspect.GetTimeseriesAspectValuesResponse; -import com.linkedin.metadata.entity.IngestResult; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.transactions.AspectsBatch; -import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.aspect.EnvelopedAspectArray; @@ -19,9 +20,13 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; @@ -52,15 +57,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.operations.OperationsResource.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "aspects", namespace = "com.linkedin.entity") public class AspectResource extends CollectionResourceTaskTemplate<String, VersionedAspect> { @@ -114,66 +111,101 @@ void setAuthorizer(Authorizer authorizer) { @RestMethod.Get @Nonnull @WithSpan - public Task<AnyRecord> get(@Nonnull String urnStr, @QueryParam("aspect") @Optional @Nullable String aspectName, - @QueryParam("version") @Optional @Nullable Long version) throws URISyntaxException { + public Task<AnyRecord> get( + @Nonnull String urnStr, + @QueryParam("aspect") @Optional @Nullable String aspectName, + @QueryParam("version") @Optional @Nullable Long version) + throws URISyntaxException { log.info("GET ASPECT urn: {} aspect: {} version: {}", urnStr, aspectName, version); final Urn urn = Urn.createFromString(urnStr); - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); - } - final VersionedAspect aspect = _entityService.getVersionedAspect(urn, aspectName, version); - if (aspect == null) { - throw RestliUtil.resourceNotFoundException(String.format("Did not find urn: %s aspect: %s version: %s", urn, aspectName, version)); - } - return new AnyRecord(aspect.data()); - }, MetricRegistry.name(this.getClass(), "get")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); + } + final VersionedAspect aspect = + _entityService.getVersionedAspect(urn, aspectName, version); + if (aspect == null) { + throw RestliUtil.resourceNotFoundException( + String.format( + "Did not find urn: %s aspect: %s version: %s", urn, aspectName, version)); + } + return new AnyRecord(aspect.data()); + }, + MetricRegistry.name(this.getClass(), "get")); } @Action(name = ACTION_GET_TIMESERIES_ASPECT) @Nonnull @WithSpan public Task<GetTimeseriesAspectValuesResponse> getTimeseriesAspectValues( - @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_ASPECT) @Nonnull String aspectName, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, @ActionParam(PARAM_LIMIT) @Optional("10000") int limit, - @ActionParam(PARAM_LATEST_VALUE) @Optional("false") boolean latestValue, // This field is deprecated. + @ActionParam(PARAM_LATEST_VALUE) @Optional("false") + boolean latestValue, // This field is deprecated. @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sort) throws URISyntaxException { + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sort) + throws URISyntaxException { log.info( "Get Timeseries Aspect values for aspect {} for entity {} with startTimeMillis {}, endTimeMillis {} and limit {}.", - aspectName, entityName, startTimeMillis, endTimeMillis, limit); + aspectName, + entityName, + startTimeMillis, + endTimeMillis, + limit); final Urn urn = Urn.createFromString(urnStr); - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get timeseries aspect for " + urn); - } - GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); - response.setEntityName(entityName); - response.setAspectName(aspectName); - if (startTimeMillis != null) { - response.setStartTimeMillis(startTimeMillis); - } - if (endTimeMillis != null) { - response.setEndTimeMillis(endTimeMillis); - } - if (latestValue) { - response.setLimit(1); - } else { - response.setLimit(limit); - } - response.setValues(new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues(urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, sort))); - return response; - }, MetricRegistry.name(this.getClass(), "getTimeseriesAspectValues")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, + "User is unauthorized to get timeseries aspect for " + urn); + } + GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); + response.setEntityName(entityName); + response.setAspectName(aspectName); + if (startTimeMillis != null) { + response.setStartTimeMillis(startTimeMillis); + } + if (endTimeMillis != null) { + response.setEndTimeMillis(endTimeMillis); + } + if (latestValue) { + response.setLimit(1); + } else { + response.setLimit(limit); + } + response.setValues( + new EnvelopedAspectArray( + _timeseriesAspectService.getAspectValues( + urn, + entityName, + aspectName, + startTimeMillis, + endTimeMillis, + limit, + filter, + sort))); + return response; + }, + MetricRegistry.name(this.getClass(), "getTimeseriesAspectValues")); } @Action(name = ACTION_INGEST_PROPOSAL) @@ -181,7 +213,8 @@ public Task<GetTimeseriesAspectValuesResponse> getTimeseriesAspectValues( @WithSpan public Task<String> ingestProposal( @ActionParam(PARAM_PROPOSAL) @Nonnull MetadataChangeProposal metadataChangeProposal, - @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) throws URISyntaxException { + @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) + throws URISyntaxException { log.info("INGEST PROPOSAL proposal: {}", metadataChangeProposal); final boolean asyncBool; @@ -192,85 +225,111 @@ public Task<String> ingestProposal( } Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); - Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); + com.linkedin.metadata.models.EntitySpec entitySpec = + _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); + Urn urn = + EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); } String actorUrnStr = authentication.getActor().toUrnStr(); - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); - return RestliUtil.toTask(() -> { - log.debug("Proposal: {}", metadataChangeProposal); - try { - final AspectsBatch batch; - if (asyncBool) { - // if async we'll expand the getAdditionalChanges later, no need to do this early - batch = AspectsBatchImpl.builder() - .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) - .build(); - } else { - Stream<MetadataChangeProposal> proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); + return RestliUtil.toTask( + () -> { + log.debug("Proposal: {}", metadataChangeProposal); + try { + final AspectsBatch batch; + if (asyncBool) { + // if async we'll expand the getAdditionalChanges later, no need to do this early + batch = + AspectsBatchImpl.builder() + .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) + .build(); + } else { + Stream<MetadataChangeProposal> proposalStream = + Stream.concat( + Stream.of(metadataChangeProposal), + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService) + .stream()); - batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(); - } + batch = + AspectsBatchImpl.builder() + .mcps( + proposalStream.collect(Collectors.toList()), + _entityService.getEntityRegistry()) + .build(); + } - Set<IngestResult> results = - _entityService.ingestProposal(batch, auditStamp, asyncBool); + Set<IngestResult> results = _entityService.ingestProposal(batch, auditStamp, asyncBool); - IngestResult one = results.stream() - .findFirst() - .get(); + IngestResult one = results.stream().findFirst().get(); - // Update runIds, only works for existing documents, so ES document must exist - Urn resultUrn = one.getUrn(); - if (one.isProcessedMCL() || one.isUpdate()) { - tryIndexRunId(resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); - } - return resultUrn.toString(); - } catch (ValidationException e) { - throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); - } - }, MetricRegistry.name(this.getClass(), "ingestProposal")); + // Update runIds, only works for existing documents, so ES document must exist + Urn resultUrn = one.getUrn(); + if (one.isProcessedMCL() || one.isUpdate()) { + tryIndexRunId( + resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); + } + return resultUrn.toString(); + } catch (ValidationException e) { + throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); + } + }, + MetricRegistry.name(this.getClass(), "ingestProposal")); } @Action(name = ACTION_GET_COUNT) @Nonnull @WithSpan - public Task<Integer> getCount(@ActionParam(PARAM_ASPECT) @Nonnull String aspectName, - @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), - (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); - } - return _entityService.getCountAspect(aspectName, urnLike); - }, MetricRegistry.name(this.getClass(), "getCount")); + public Task<Integer> getCount( + @ActionParam(PARAM_ASPECT) @Nonnull String aspectName, + @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); + } + return _entityService.getCountAspect(aspectName, urnLike); + }, + MetricRegistry.name(this.getClass(), "getCount")); } @Action(name = ACTION_RESTORE_INDICES) @Nonnull @WithSpan - public Task<String> restoreIndices(@ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, - @ActionParam(PARAM_URN) @Optional @Nullable String urn, - @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, - @ActionParam("start") @Optional @Nullable Integer start, - @ActionParam("batchSize") @Optional @Nullable Integer batchSize - ) { - return RestliUtil.toTask(() -> { - return Utils.restoreIndices(aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); - }, MetricRegistry.name(this.getClass(), "restoreIndices")); + public Task<String> restoreIndices( + @ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, + @ActionParam(PARAM_URN) @Optional @Nullable String urn, + @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, + @ActionParam("start") @Optional @Nullable Integer start, + @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { + return RestliUtil.toTask( + () -> { + return Utils.restoreIndices( + aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); + }, + MetricRegistry.name(this.getClass(), "restoreIndices")); } - private static void tryIndexRunId(final Urn urn, final @Nullable SystemMetadata systemMetadata, - final EntitySearchService entitySearchService) { + private static void tryIndexRunId( + final Urn urn, + final @Nullable SystemMetadata systemMetadata, + final EntitySearchService entitySearchService) { if (systemMetadata != null && systemMetadata.hasRunId()) { entitySearchService.appendRunId(urn.getEntityType(), urn, systemMetadata.getRunId()); } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java index 9bab846d1bdcc..294ded8a1e255 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -53,16 +56,11 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * resource for showing information and rolling back runs - */ +/** resource for showing information and rolling back runs */ @Slf4j @RestLiCollection(name = "runs", namespace = "com.linkedin.entity") -public class BatchIngestionRunResource extends CollectionResourceTaskTemplate<String, VersionedAspect> { +public class BatchIngestionRunResource + extends CollectionResourceTaskTemplate<String, VersionedAspect> { private static final Integer DEFAULT_OFFSET = 0; private static final Integer DEFAULT_PAGE_SIZE = 100; @@ -91,173 +89,239 @@ public class BatchIngestionRunResource extends CollectionResourceTaskTemplate<St @Named("authorizerChain") private Authorizer _authorizer; - /** - * Rolls back an ingestion run - */ + /** Rolls back an ingestion run */ @Action(name = "rollback") @Nonnull @WithSpan - public Task<RollbackResponse> rollback(@ActionParam("runId") @Nonnull String runId, + public Task<RollbackResponse> rollback( + @ActionParam("runId") @Nonnull String runId, @ActionParam("dryRun") @Optional Boolean dryRun, @Deprecated @ActionParam("hardDelete") @Optional Boolean hardDelete, - @ActionParam("safe") @Optional Boolean safe) throws Exception { + @ActionParam("safe") @Optional Boolean safe) + throws Exception { log.info("ROLLBACK RUN runId: {} dry run: {}", runId, dryRun); - boolean doHardDelete = safe != null ? !safe : hardDelete != null ? hardDelete : DEFAULT_HARD_DELETE; + boolean doHardDelete = + safe != null ? !safe : hardDelete != null ? hardDelete : DEFAULT_HARD_DELETE; if (safe != null && hardDelete != null) { - log.warn("Both Safe & hardDelete flags were defined, honouring safe flag as hardDelete is deprecated"); + log.warn( + "Both Safe & hardDelete flags were defined, honouring safe flag as hardDelete is deprecated"); } try { - return RestliUtil.toTask(() -> { - if (runId.equals(DEFAULT_RUN_ID)) { - throw new IllegalArgumentException(String.format( - "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", - runId)); - } - if (!dryRun) { - updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); - } - - RollbackResponse response = new RollbackResponse(); - List<AspectRowSummary> aspectRowsToDelete; - aspectRowsToDelete = _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - Set<String> urns = aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet(); - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(UrnUtils::getUrn) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entities."); - } - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - if (dryRun) { - - final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream().collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); - - long entitiesDeleted = keyAspects.size(); - long aspectsReverted = aspectRowsToDelete.size(); - - final long affectedEntities = - aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - // If we are soft deleting, remove key aspects from count of aspects being deleted - if (!doHardDelete) { - aspectsReverted -= keyAspects.size(); - rowSummaries.removeIf(AspectRowSummary::isKeyAspect); - } - // Compute the aspects that exist referencing the key aspects we are deleting - final List<AspectRowSummary> affectedAspectsList = keyAspects.stream() - .map((AspectRowSummary urn) -> _systemMetadataService.findByUrn(urn.getUrn(), false, 0, - ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter(row -> !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName() - .equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final List<UnsafeEntityInfo> unsafeEntityInfos = - affectedAspectsList.stream().map(AspectRowSummary::getUrn).distinct().map(urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE).collect(Collectors.toList()); - - return response.setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - } - - RollbackRunResult rollbackRunResult = _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - final List<AspectRowSummary> deletedRows = rollbackRunResult.getRowsRolledBack(); - int rowsDeletedFromEntityDeletion = rollbackRunResult.getRowsDeletedFromEntityDeletion(); - - // since elastic limits how many rows we can access at once, we need to iteratively delete - while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - aspectRowsToDelete = _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - log.info("{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - log.info("deleting..."); - rollbackRunResult = _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); - rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); - } - - // Rollback timeseries aspects - DeleteAspectValuesResult timeseriesRollbackResult = _timeseriesAspectService.rollbackTimeseriesAspects(runId); - rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); - - log.info("finished deleting {} rows", deletedRows.size()); - int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; - - final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream().collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); - - final long entitiesDeleted = keyAspects.size(); - final long affectedEntities = - deletedRows.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - log.info("computing aspects affected by this rollback..."); - // Compute the aspects that exist referencing the key aspects we are deleting - final List<AspectRowSummary> affectedAspectsList = keyAspects.stream() - .map((AspectRowSummary urn) -> _systemMetadataService.findByUrn(urn.getUrn(), false, 0, - ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter(row -> !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName() - .equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final List<UnsafeEntityInfo> unsafeEntityInfos = - affectedAspectsList.stream().map(AspectRowSummary::getUrn).distinct().map(urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE).collect(Collectors.toList()); - - log.info("calculation done."); - - updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); - - return response.setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - }, MetricRegistry.name(this.getClass(), "rollback")); + return RestliUtil.toTask( + () -> { + if (runId.equals(DEFAULT_RUN_ID)) { + throw new IllegalArgumentException( + String.format( + "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", + runId)); + } + if (!dryRun) { + updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); + } + + RollbackResponse response = new RollbackResponse(); + List<AspectRowSummary> aspectRowsToDelete; + aspectRowsToDelete = + _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); + Set<String> urns = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet(); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(UrnUtils::getUrn) + .map( + urn -> + java.util.Optional.of( + new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + } + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + if (dryRun) { + + final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); + + long entitiesDeleted = keyAspects.size(); + long aspectsReverted = aspectRowsToDelete.size(); + + final long affectedEntities = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + // If we are soft deleting, remove key aspects from count of aspects being deleted + if (!doHardDelete) { + aspectsReverted -= keyAspects.size(); + rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + } + // Compute the aspects that exist referencing the key aspects we are deleting + final List<AspectRowSummary> affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + _systemMetadataService.findByUrn( + urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .collect(Collectors.toList()); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List<UnsafeEntityInfo> unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + return response + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + RollbackRunResult rollbackRunResult = + _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); + final List<AspectRowSummary> deletedRows = rollbackRunResult.getRowsRolledBack(); + int rowsDeletedFromEntityDeletion = + rollbackRunResult.getRowsDeletedFromEntityDeletion(); + + // since elastic limits how many rows we can access at once, we need to iteratively + // delete + while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + aspectRowsToDelete = + _systemMetadataService.findByRunId( + runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); + log.info( + "{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + log.info("deleting..."); + rollbackRunResult = + _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); + deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); + rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); + } + + // Rollback timeseries aspects + DeleteAspectValuesResult timeseriesRollbackResult = + _timeseriesAspectService.rollbackTimeseriesAspects(runId); + rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); + + log.info("finished deleting {} rows", deletedRows.size()); + int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; + + final Map<Boolean, List<AspectRowSummary>> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List<AspectRowSummary> keyAspects = aspectsSplitByIsKeyAspects.get(true); + + final long entitiesDeleted = keyAspects.size(); + final long affectedEntities = + deletedRows.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + log.info("computing aspects affected by this rollback..."); + // Compute the aspects that exist referencing the key aspects we are deleting + final List<AspectRowSummary> affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + _systemMetadataService.findByUrn( + urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .collect(Collectors.toList()); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List<UnsafeEntityInfo> unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + log.info("calculation done."); + + updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); + + return response + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + }, + MetricRegistry.name(this.getClass(), "rollback")); } catch (Exception e) { updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); - throw new RuntimeException(String.format("There was an issue rolling back ingestion run with runId %s", runId), e); + throw new RuntimeException( + String.format("There was an issue rolling back ingestion run with runId %s", runId), e); } } @@ -279,9 +343,14 @@ private void sleep(Integer seconds) { private void updateExecutionRequestStatus(String runId, String status) { try { - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect(executionRequestUrn.getEntityType(), executionRequestUrn, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + _entityService.getLatestEnvelopedAspect( + executionRequestUrn.getEntityType(), + executionRequestUrn, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (aspect == null) { log.warn("Aspect for execution request with runId {} not found", runId); } else { @@ -294,65 +363,82 @@ private void updateExecutionRequestStatus(String runId, String status) { proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(proposal, - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); } } catch (Exception e) { - log.error(String.format("Not able to update execution result aspect with runId %s and new status %s.", runId, status), e); + log.error( + String.format( + "Not able to update execution result aspect with runId %s and new status %s.", + runId, status), + e); } } - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @Action(name = "list") @Nonnull @WithSpan - public Task<IngestionRunSummaryArray> list(@ActionParam("pageOffset") @Optional @Nullable Integer pageOffset, + public Task<IngestionRunSummaryArray> list( + @ActionParam("pageOffset") @Optional @Nullable Integer pageOffset, @ActionParam("pageSize") @Optional @Nullable Integer pageSize, @ActionParam("includeSoft") @Optional @Nullable Boolean includeSoft) { log.info("LIST RUNS offset: {} size: {}", pageOffset, pageSize); - return RestliUtil.toTask(() -> { - List<IngestionRunSummary> summaries = - _systemMetadataService.listRuns(pageOffset != null ? pageOffset : DEFAULT_OFFSET, - pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, - includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); - - return new IngestionRunSummaryArray(summaries); - }, MetricRegistry.name(this.getClass(), "list")); + return RestliUtil.toTask( + () -> { + List<IngestionRunSummary> summaries = + _systemMetadataService.listRuns( + pageOffset != null ? pageOffset : DEFAULT_OFFSET, + pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, + includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); + + return new IngestionRunSummaryArray(summaries); + }, + MetricRegistry.name(this.getClass(), "list")); } @Action(name = "describe") @Nonnull @WithSpan - public Task<AspectRowSummaryArray> describe(@ActionParam("runId") @Nonnull String runId, - @ActionParam("start") Integer start, @ActionParam("count") Integer count, + public Task<AspectRowSummaryArray> describe( + @ActionParam("runId") @Nonnull String runId, + @ActionParam("start") Integer start, + @ActionParam("count") Integer count, @ActionParam("includeSoft") @Optional @Nullable Boolean includeSoft, @ActionParam("includeAspect") @Optional @Nullable Boolean includeAspect) { log.info("DESCRIBE RUN runId: {}, start: {}, count: {}", runId, start, count); - return RestliUtil.toTask(() -> { - List<AspectRowSummary> summaries = - _systemMetadataService.findByRunId(runId, includeSoft != null && includeSoft, start, count); - - if (includeAspect != null && includeAspect) { - summaries.forEach(summary -> { - Urn urn = UrnUtils.getUrn(summary.getUrn()); - try { - EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect(urn.getEntityType(), urn, summary.getAspectName()); - if (aspect == null) { - log.error("Aspect for summary {} not found", summary); - } else { - summary.setAspect(aspect.getValue()); - } - } catch (Exception e) { - log.error("Error while fetching aspect for summary {}", summary, e); + return RestliUtil.toTask( + () -> { + List<AspectRowSummary> summaries = + _systemMetadataService.findByRunId( + runId, includeSoft != null && includeSoft, start, count); + + if (includeAspect != null && includeAspect) { + summaries.forEach( + summary -> { + Urn urn = UrnUtils.getUrn(summary.getUrn()); + try { + EnvelopedAspect aspect = + _entityService.getLatestEnvelopedAspect( + urn.getEntityType(), urn, summary.getAspectName()); + if (aspect == null) { + log.error("Aspect for summary {} not found", summary); + } else { + summary.setAspect(aspect.getValue()); + } + } catch (Exception e) { + log.error("Error while fetching aspect for summary {}", summary, e); + } + }); } - }); - } - return new AspectRowSummaryArray(summaries); - }, MetricRegistry.name(this.getClass(), "describe")); + return new AspectRowSummaryArray(summaries); + }, + MetricRegistry.name(this.getClass(), "describe")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index 3ee98b3244718..ddf5efa5027ca 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.validation.ValidationUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.search.utils.SearchUtils.*; +import static com.linkedin.metadata.shared.ValidationUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -80,18 +88,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.validation.ValidationUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.search.utils.SearchUtils.*; -import static com.linkedin.metadata.shared.ValidationUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.*; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "entities", namespace = "com.linkedin.entity") public class EntityResource extends CollectionResourceTaskTemplate<String, Entity> { @@ -121,6 +118,7 @@ public class EntityResource extends CollectionResourceTaskTemplate<String, Entit private static final String ES_FIELD_TIMESTAMP = "timestampMillis"; private static final Integer ELASTIC_MAX_PAGE_SIZE = 10000; private final Clock _clock = Clock.systemUTC(); + @Inject @Named("entityService") private EntityService _entityService; @@ -161,61 +159,79 @@ public class EntityResource extends CollectionResourceTaskTemplate<String, Entit @Named("authorizerChain") private Authorizer _authorizer; - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @RestMethod.Get @Nonnull @WithSpan - public Task<AnyRecord> get(@Nonnull String urnStr, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<AnyRecord> get( + @Nonnull String urnStr, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.info("GET {}", urnStr); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity " + urn); - } - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - final Entity entity = _entityService.getEntity(urn, projectedAspects); - if (entity == null) { - throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); - } - return new AnyRecord(entity.data()); - }, MetricRegistry.name(this.getClass(), "get")); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); + } + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? Collections.emptySet() + : new HashSet<>(Arrays.asList(aspectNames)); + final Entity entity = _entityService.getEntity(urn, projectedAspects); + if (entity == null) { + throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); + } + return new AnyRecord(entity.data()); + }, + MetricRegistry.name(this.getClass(), "get")); } @RestMethod.BatchGet @Nonnull @WithSpan - public Task<Map<String, AnyRecord>> batchGet(@Nonnull Set<String> urnStrs, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<Map<String, AnyRecord>> batchGet( + @Nonnull Set<String> urnStrs, + @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.info("BATCH GET {}", urnStrs); final Set<Urn> urns = new HashSet<>(); for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entities: " + urnStrs); - } - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - return _entityService.getEntities(urns, projectedAspects) - .entrySet() - .stream() - .collect( - Collectors.toMap(entry -> entry.getKey().toString(), entry -> new AnyRecord(entry.getValue().data()))); - }, MetricRegistry.name(this.getClass(), "batchGet")); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities: " + urnStrs); + } + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? Collections.emptySet() + : new HashSet<>(Arrays.asList(aspectNames)); + return _entityService.getEntities(urns, projectedAspects).entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), + entry -> new AnyRecord(entry.getValue().data()))); + }, + MetricRegistry.name(this.getClass(), "batchGet")); } private SystemMetadata populateDefaultFieldsIfEmpty(@Nullable SystemMetadata systemMetadata) { @@ -234,17 +250,21 @@ private SystemMetadata populateDefaultFieldsIfEmpty(@Nullable SystemMetadata sys @Action(name = ACTION_INGEST) @Nonnull @WithSpan - public Task<Void> ingest(@ActionParam(PARAM_ENTITY) @Nonnull Entity entity, + public Task<Void> ingest( + @ActionParam(PARAM_ENTITY) @Nonnull Entity entity, @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata providedSystemMetadata) throws URISyntaxException { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entity " + urn); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entity " + urn); } try { @@ -255,33 +275,43 @@ public Task<Void> ingest(@ActionParam(PARAM_ENTITY) @Nonnull Entity entity, SystemMetadata systemMetadata = populateDefaultFieldsIfEmpty(providedSystemMetadata); - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); // variables referenced in lambdas are required to be final final SystemMetadata finalSystemMetadata = systemMetadata; - return RestliUtil.toTask(() -> { - _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); - return null; - }, MetricRegistry.name(this.getClass(), "ingest")); + return RestliUtil.toTask( + () -> { + _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); + return null; + }, + MetricRegistry.name(this.getClass(), "ingest")); } @Action(name = ACTION_BATCH_INGEST) @Nonnull @WithSpan - public Task<Void> batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] entities, - @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata[] systemMetadataList) throws URISyntaxException { + public Task<Void> batchIngest( + @ActionParam(PARAM_ENTITIES) @Nonnull Entity[] entities, + @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata[] systemMetadataList) + throws URISyntaxException { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List<java.util.Optional<EntitySpec>> resourceSpecs = Arrays.stream(entities) - .map(Entity::getValue) - .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + Arrays.stream(entities) + .map(Entity::getValue) + .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entities."); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); } for (Entity entity : entities) { @@ -292,7 +322,8 @@ public Task<Void> batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] ent } } - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); if (systemMetadataList == null) { systemMetadataList = new SystemMetadata[entities.length]; @@ -302,113 +333,186 @@ public Task<Void> batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] ent throw RestliUtil.invalidArgumentsException("entities and systemMetadata length must match"); } - final List<SystemMetadata> finalSystemMetadataList = Arrays.stream(systemMetadataList) - .map(systemMetadata -> populateDefaultFieldsIfEmpty(systemMetadata)) - .collect(Collectors.toList()); + final List<SystemMetadata> finalSystemMetadataList = + Arrays.stream(systemMetadataList) + .map(systemMetadata -> populateDefaultFieldsIfEmpty(systemMetadata)) + .collect(Collectors.toList()); - return RestliUtil.toTask(() -> { - _entityService.ingestEntities(Arrays.asList(entities), auditStamp, finalSystemMetadataList); - return null; - }, MetricRegistry.name(this.getClass(), "batchIngest")); + return RestliUtil.toTask( + () -> { + _entityService.ingestEntities( + Arrays.asList(entities), auditStamp, finalSystemMetadataList); + return null; + }, + MetricRegistry.name(this.getClass(), "batchIngest")); } @Action(name = ACTION_SEARCH) @Nonnull @WithSpan - public Task<SearchResult> search(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_COUNT) int count, @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, + public Task<SearchResult> search( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count, + @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET SEARCH RESULTS for {} with query {}", entityName, input); // TODO - change it to use _searchService once we are confident on it's latency return RestliUtil.toTask( - () -> { - final SearchResult result; - // This API is not used by the frontend for search bars so we default to structured - result = _entitySearchService.search(List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); - return validateSearchResult(result, _entityService); - }, - MetricRegistry.name(this.getClass(), "search")); + () -> { + final SearchResult result; + // This API is not used by the frontend for search bars so we default to structured + result = + _entitySearchService.search( + List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); + return validateSearchResult(result, _entityService); + }, + MetricRegistry.name(this.getClass(), "search")); } @Action(name = ACTION_SEARCH_ACROSS_ENTITIES) @Nonnull @WithSpan - public Task<SearchResult> searchAcrossEntities(@ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { + public Task<SearchResult> searchAcrossEntities( + @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count, + @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info("GET SEARCH RESULTS ACROSS ENTITIES for {} with query {}", entityList, input); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return RestliUtil.toTask(() -> validateSearchResult( - _searchService.searchAcrossEntities(entityList, input, filter, sortCriterion, start, count, finalFlags), - _entityService), "searchAcrossEntities"); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( + () -> + validateSearchResult( + _searchService.searchAcrossEntities( + entityList, input, filter, sortCriterion, start, count, finalFlags), + _entityService), + "searchAcrossEntities"); } @Action(name = ACTION_SCROLL_ACROSS_ENTITIES) @Nonnull @WithSpan - public Task<ScrollResult> scrollAcrossEntities(@ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_SCROLL_ID) String scrollId, - @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, + public Task<ScrollResult> scrollAcrossEntities( + @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SCROLL_ID) String scrollId, + @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, + @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", entityList, input, scrollId); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return RestliUtil.toTask(() -> validateScrollResult( - _searchService.scrollAcrossEntities(entityList, input, filter, sortCriterion, scrollId, keepAlive, count, finalFlags), - _entityService), "scrollAcrossEntities"); + log.info( + "GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", + entityList, + input, + scrollId); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( + () -> + validateScrollResult( + _searchService.scrollAcrossEntities( + entityList, + input, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + finalFlags), + _entityService), + "scrollAcrossEntities"); } @Action(name = ACTION_SEARCH_ACROSS_LINEAGE) @Nonnull @WithSpan - public Task<LineageSearchResult> searchAcrossLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, + public Task<LineageSearchResult> searchAcrossLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, @ActionParam(PARAM_INPUT) @Optional @Nullable String input, @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, - @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) throws URISyntaxException { + @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) + throws URISyntaxException { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } Urn urn = Urn.createFromString(urnStr); List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SEARCH RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", - urnStr, direction, entityList, input); - return RestliUtil.toTask(() -> validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(urn, LineageDirection.valueOf(direction), entityList, input, maxHops, - filter, sortCriterion, start, count, startTimeMillis, endTimeMillis, searchFlags), _entityService), + log.info( + "GET SEARCH RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", + urnStr, + direction, + entityList, + input); + return RestliUtil.toTask( + () -> + validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags), + _entityService), "searchAcrossRelationships"); } @Action(name = ACTION_SCROLL_ACROSS_LINEAGE) @Nonnull @WithSpan - public Task<LineageScrollResult> scrollAcrossLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, + public Task<LineageScrollResult> scrollAcrossLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, @ActionParam(PARAM_INPUT) @Optional @Nullable String input, @@ -420,72 +524,120 @@ public Task<LineageScrollResult> scrollAcrossLineage(@ActionParam(PARAM_URN) @No @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, - @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) throws URISyntaxException { + @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) + throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); List<String> entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SCROLL RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", - urnStr, direction, entityList, input); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); - return RestliUtil.toTask(() -> validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage(urn, LineageDirection.valueOf(direction), entityList, input, maxHops, - filter, sortCriterion, scrollId, keepAlive, count, startTimeMillis, endTimeMillis, finalFlags), - _entityService), + log.info( + "GET SCROLL RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", + urnStr, + direction, + entityList, + input); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); + return RestliUtil.toTask( + () -> + validateLineageScrollResult( + _lineageSearchService.scrollAcrossLineage( + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + finalFlags), + _entityService), "scrollAcrossLineage"); } @Action(name = ACTION_LIST) @Nonnull @WithSpan - public Task<ListResult> list(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, + public Task<ListResult> list( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET LIST RESULTS for {} with filter {}", entityName, filter); - return RestliUtil.toTask(() -> validateListResult( - toListResult(_entitySearchService.filter(entityName, filter, sortCriterion, start, count)), _entityService), + return RestliUtil.toTask( + () -> + validateListResult( + toListResult( + _entitySearchService.filter(entityName, filter, sortCriterion, start, count)), + _entityService), MetricRegistry.name(this.getClass(), "filter")); } @Action(name = ACTION_AUTOCOMPLETE) @Nonnull @WithSpan - public Task<AutoCompleteResult> autocomplete(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_QUERY) @Nonnull String query, @ActionParam(PARAM_FIELD) @Optional @Nullable String field, - @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_LIMIT) int limit) { + public Task<AutoCompleteResult> autocomplete( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_QUERY) @Nonnull String query, + @ActionParam(PARAM_FIELD) @Optional @Nullable String field, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_LIMIT) int limit) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } - return RestliUtil.toTask(() -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), + return RestliUtil.toTask( + () -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), MetricRegistry.name(this.getClass(), "autocomplete")); } @Action(name = ACTION_BROWSE) @Nonnull @WithSpan - public Task<BrowseResult> browse(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_PATH) @Nonnull String path, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_START) int start, @ActionParam(PARAM_LIMIT) int limit) { + public Task<BrowseResult> browse( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_PATH) @Nonnull String path, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_LIMIT) int limit) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET BROWSE RESULTS for {} at path {}", entityName, path); return RestliUtil.toTask( - () -> validateBrowseResult(_entitySearchService.browse(entityName, path, filter, start, limit), _entityService), + () -> + validateBrowseResult( + _entitySearchService.browse(entityName, path, filter, start, limit), + _entityService), MetricRegistry.name(this.getClass(), "browse")); } @@ -496,13 +648,17 @@ public Task<StringArray> getBrowsePaths( @ActionParam(value = PARAM_URN, typeref = com.linkedin.common.Urn.class) @Nonnull Urn urn) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity: " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity: " + urn); } log.info("GET BROWSE PATHS for {}", urn); - return RestliUtil.toTask(() -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), + return RestliUtil.toTask( + () -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), MetricRegistry.name(this.getClass(), "getBrowsePaths")); } @@ -515,12 +671,13 @@ private String stringifyRowCount(int size) { } /* - Used to delete all data related to a filter criteria based on registryId, runId etc. - */ + Used to delete all data related to a filter criteria based on registryId, runId etc. + */ @Action(name = "deleteAll") @Nonnull @WithSpan - public Task<RollbackResponse> deleteEntities(@ActionParam("registryId") @Optional String registryId, + public Task<RollbackResponse> deleteEntities( + @ActionParam("registryId") @Optional String registryId, @ActionParam("dryRun") @Optional Boolean dryRun) { String registryName = null; ComparableVersion registryVersion = new ComparableVersion("0.0.0-dev"); @@ -530,139 +687,188 @@ public Task<RollbackResponse> deleteEntities(@ActionParam("registryId") @Optiona registryName = registryId.split(":")[0]; registryVersion = new ComparableVersion(registryId.split(":")[1]); } catch (Exception e) { - throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, - "Failed to parse registry id: " + registryId, e); + throw new RestLiServiceException( + HttpStatus.S_500_INTERNAL_SERVER_ERROR, + "Failed to parse registry id: " + registryId, + e); } } String finalRegistryName = registryName; ComparableVersion finalRegistryVersion = registryVersion; String finalRegistryName1 = registryName; ComparableVersion finalRegistryVersion1 = registryVersion; - return RestliUtil.toTask(() -> { - RollbackResponse response = new RollbackResponse(); - List<AspectRowSummary> aspectRowsToDelete = - _systemMetadataService.findByRegistry(finalRegistryName, finalRegistryVersion.toString(), false, 0, - ESUtils.MAX_RESULT_SIZE); - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - response.setAspectsAffected(aspectRowsToDelete.size()); - Set<String> urns = aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet(); - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(UrnUtils::getUrn) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entities."); - } - response.setEntitiesAffected(urns.size()); - response.setEntitiesDeleted(aspectRowsToDelete.stream().filter(AspectRowSummary::isKeyAspect).count()); - response.setAspectRowSummaries( - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size())))); - if ((dryRun == null) || (!dryRun)) { - Map<String, String> conditions = new HashMap(); - conditions.put("registryName", finalRegistryName1); - conditions.put("registryVersion", finalRegistryVersion1.toString()); - _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); - } - return response; - }, MetricRegistry.name(this.getClass(), "deleteAll")); + return RestliUtil.toTask( + () -> { + RollbackResponse response = new RollbackResponse(); + List<AspectRowSummary> aspectRowsToDelete = + _systemMetadataService.findByRegistry( + finalRegistryName, + finalRegistryVersion.toString(), + false, + 0, + ESUtils.MAX_RESULT_SIZE); + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + response.setAspectsAffected(aspectRowsToDelete.size()); + Set<String> urns = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet(); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(UrnUtils::getUrn) + .map( + urn -> + java.util.Optional.of( + new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + } + response.setEntitiesAffected(urns.size()); + response.setEntitiesDeleted( + aspectRowsToDelete.stream().filter(AspectRowSummary::isKeyAspect).count()); + response.setAspectRowSummaries( + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size())))); + if ((dryRun == null) || (!dryRun)) { + Map<String, String> conditions = new HashMap(); + conditions.put("registryName", finalRegistryName1); + conditions.put("registryVersion", finalRegistryVersion1.toString()); + _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); + } + return response; + }, + MetricRegistry.name(this.getClass(), "deleteAll")); } /** * Deletes all data related to an individual urn(entity). + * * @param urnStr - the urn of the entity. - * @param aspectName - the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects). + * @param aspectName - the optional aspect name if only want to delete the aspect (applicable only + * for timeseries aspects). * @param startTimeMills - the optional start time (applicable only for timeseries aspects). * @param endTimeMillis - the optional end time (applicable only for the timeseries aspects). - * @return - a DeleteEntityResponse object. + * @return - a DeleteEntityResponse object. * @throws URISyntaxException */ @Action(name = ACTION_DELETE) @Nonnull @WithSpan - public Task<DeleteEntityResponse> deleteEntity(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam(PARAM_ASPECT_NAME) @Optional String aspectName, - @ActionParam(PARAM_START_TIME_MILLIS) @Optional Long startTimeMills, - @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) throws URISyntaxException { + public Task<DeleteEntityResponse> deleteEntity( + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_ASPECT_NAME) @Optional String aspectName, + @ActionParam(PARAM_START_TIME_MILLIS) @Optional Long startTimeMills, + @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) + throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity: " + urnStr); - } - return RestliUtil.toTask(() -> { - // Find the timeseries aspects to delete. If aspectName is null, delete all. - List<String> timeseriesAspectNames = - EntitySpecUtils.getEntityTimeseriesAspectNames(_entityService.getEntityRegistry(), urn.getEntityType()); - if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { - throw new UnsupportedOperationException( + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + urnStr); + } + return RestliUtil.toTask( + () -> { + // Find the timeseries aspects to delete. If aspectName is null, delete all. + List<String> timeseriesAspectNames = + EntitySpecUtils.getEntityTimeseriesAspectNames( + _entityService.getEntityRegistry(), urn.getEntityType()); + if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { + throw new UnsupportedOperationException( String.format("Not supported for non-timeseries aspect '{}'.", aspectName)); - } - List<String> timeseriesAspectsToDelete = + } + List<String> timeseriesAspectsToDelete = (aspectName == null) ? timeseriesAspectNames : ImmutableList.of(aspectName); - DeleteEntityResponse response = new DeleteEntityResponse(); - if (aspectName == null) { - RollbackRunResult result = _entityService.deleteUrn(urn); - response.setRows(result.getRowsDeletedFromEntityDeletion()); - } - Long numTimeseriesDocsDeleted = - deleteTimeseriesAspects(urn, startTimeMills, endTimeMillis, timeseriesAspectsToDelete); - log.info("Total number of timeseries aspect docs deleted: {}", numTimeseriesDocsDeleted); - - response.setUrn(urnStr); - response.setTimeseriesRows(numTimeseriesDocsDeleted); - - return response; - }, MetricRegistry.name(this.getClass(), "delete")); + DeleteEntityResponse response = new DeleteEntityResponse(); + if (aspectName == null) { + RollbackRunResult result = _entityService.deleteUrn(urn); + response.setRows(result.getRowsDeletedFromEntityDeletion()); + } + Long numTimeseriesDocsDeleted = + deleteTimeseriesAspects( + urn, startTimeMills, endTimeMillis, timeseriesAspectsToDelete); + log.info("Total number of timeseries aspect docs deleted: {}", numTimeseriesDocsDeleted); + + response.setUrn(urnStr); + response.setTimeseriesRows(numTimeseriesDocsDeleted); + + return response; + }, + MetricRegistry.name(this.getClass(), "delete")); } /** - * Deletes the set of timeseries aspect values for the specified aspects that are associated with the given - * entity urn between startTimeMillis and endTimeMillis. + * Deletes the set of timeseries aspect values for the specified aspects that are associated with + * the given entity urn between startTimeMillis and endTimeMillis. + * * @param urn The entity urn whose timeseries aspect values need to be deleted. - * @param startTimeMillis The start time in milliseconds from when the aspect values need to be deleted. - * If this is null, the deletion starts from the oldest value. - * @param endTimeMillis The end time in milliseconds up to when the aspect values need to be deleted. - * If this is null, the deletion will go till the most recent value. + * @param startTimeMillis The start time in milliseconds from when the aspect values need to be + * deleted. If this is null, the deletion starts from the oldest value. + * @param endTimeMillis The end time in milliseconds up to when the aspect values need to be + * deleted. If this is null, the deletion will go till the most recent value. * @param aspectsToDelete - The list of aspect names whose values need to be deleted. * @return The total number of documents deleted. */ - private Long deleteTimeseriesAspects(@Nonnull Urn urn, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, + private Long deleteTimeseriesAspects( + @Nonnull Urn urn, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, @Nonnull List<String> aspectsToDelete) { long totalNumberOfDocsDeleted = 0; Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urn); } // Construct the filter. List<Criterion> criteria = new ArrayList<>(); criteria.add(QueryUtils.newCriterion("urn", urn.toString())); if (startTimeMillis != null) { criteria.add( - QueryUtils.newCriterion(ES_FIELD_TIMESTAMP, startTimeMillis.toString(), Condition.GREATER_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + ES_FIELD_TIMESTAMP, startTimeMillis.toString(), Condition.GREATER_THAN_OR_EQUAL_TO)); } if (endTimeMillis != null) { criteria.add( - QueryUtils.newCriterion(ES_FIELD_TIMESTAMP, endTimeMillis.toString(), Condition.LESS_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + ES_FIELD_TIMESTAMP, endTimeMillis.toString(), Condition.LESS_THAN_OR_EQUAL_TO)); } final Filter filter = QueryUtils.getFilterFromCriteria(criteria); // Delete all the timeseries aspects by the filter. final String entityType = urn.getEntityType(); for (final String aspect : aspectsToDelete) { - DeleteAspectValuesResult result = _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); + DeleteAspectValuesResult result = + _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); totalNumberOfDocsDeleted += result.getNumDocsDeleted(); - log.debug("Number of timeseries docs deleted for entity:{}, aspect:{}, urn:{}, startTime:{}, endTime:{}={}", - entityType, aspect, urn, startTimeMillis, endTimeMillis, result.getNumDocsDeleted()); + log.debug( + "Number of timeseries docs deleted for entity:{}, aspect:{}, urn:{}, startTime:{}, endTime:{}={}", + entityType, + aspect, + urn, + startTimeMillis, + endTimeMillis, + result.getNumDocsDeleted()); } return totalNumberOfDocsDeleted; } @@ -670,19 +876,24 @@ private Long deleteTimeseriesAspects(@Nonnull Urn urn, @Nullable Long startTimeM @Action(name = "deleteReferences") @Nonnull @WithSpan - public Task<DeleteReferencesResponse> deleteReferencesTo(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam("dryRun") @Optional Boolean dry) throws URISyntaxException { + public Task<DeleteReferencesResponse> deleteReferencesTo( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam("dryRun") @Optional Boolean dry) + throws URISyntaxException { boolean dryRun = dry != null ? dry : false; Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urnStr); } - return RestliUtil.toTask(() -> _deleteEntityService.deleteReferencesTo(urn, dryRun), + return RestliUtil.toTask( + () -> _deleteEntityService.deleteReferencesTo(urn, dryRun), MetricRegistry.name(this.getClass(), "deleteReferences")); } @@ -692,18 +903,24 @@ public Task<DeleteReferencesResponse> deleteReferencesTo(@ActionParam(PARAM_URN) @Action(name = "setWritable") @Nonnull @WithSpan - public Task<Void> setWriteable(@ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { + public Task<Void> setWriteable( + @ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to enable and disable write mode."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to enable and disable write mode."); } log.info("setting entity resource to be writable"); - return RestliUtil.toTask(() -> { - _entityService.setWritable(value); - return null; - }); + return RestliUtil.toTask( + () -> { + _entityService.setWritable(value); + return null; + }); } @Action(name = "getTotalEntityCount") @@ -712,9 +929,13 @@ public Task<Void> setWriteable(@ActionParam(PARAM_VALUE) @Optional("true") @Nonn public Task<Long> getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String entityName) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity counts."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } return RestliUtil.toTask(() -> _entitySearchService.docCount(entityName)); } @@ -722,26 +943,39 @@ public Task<Long> getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String @Action(name = "batchGetTotalEntityCount") @Nonnull @WithSpan - public Task<LongMap> batchGetTotalEntityCount(@ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { + public Task<LongMap> batchGetTotalEntityCount( + @ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity counts."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } - return RestliUtil.toTask(() -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); + return RestliUtil.toTask( + () -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); } @Action(name = ACTION_LIST_URNS) @Nonnull @WithSpan - public Task<ListUrnsResult> listUrns(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) throws URISyntaxException { + public Task<ListUrnsResult> listUrns( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count) + throws URISyntaxException { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("LIST URNS for {} with start {} and count {}", entityName, start, count); return RestliUtil.toTask(() -> _entityService.listUrns(entityName, start, count), "listUrns"); @@ -750,12 +984,12 @@ public Task<ListUrnsResult> listUrns(@ActionParam(PARAM_ENTITY) @Nonnull String @Action(name = ACTION_APPLY_RETENTION) @Nonnull @WithSpan - public Task<String> applyRetention(@ActionParam(PARAM_START) @Optional @Nullable Integer start, - @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, - @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, - @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, - @ActionParam(PARAM_URN) @Optional @Nullable String urn - ) { + public Task<String> applyRetention( + @ActionParam(PARAM_START) @Optional @Nullable Integer start, + @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, + @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, + @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, + @ActionParam(PARAM_URN) @Optional @Nullable String urn) { Authentication auth = AuthenticationContext.getAuthentication(); EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { @@ -763,47 +997,66 @@ public Task<String> applyRetention(@ActionParam(PARAM_START) @Optional @Nullable resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), resourceSpec)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to apply retention."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), + resourceSpec)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to apply retention."); } - return RestliUtil.toTask(() -> _entityService.batchApplyRetention( - start, count, attemptWithVersion, aspectName, urn), ACTION_APPLY_RETENTION); + return RestliUtil.toTask( + () -> _entityService.batchApplyRetention(start, count, attemptWithVersion, aspectName, urn), + ACTION_APPLY_RETENTION); } @Action(name = ACTION_FILTER) @Nonnull @WithSpan - public Task<SearchResult> filter(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, + public Task<SearchResult> filter( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("FILTER RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( - () -> validateSearchResult(_entitySearchService.filter(entityName, filter, sortCriterion, start, count), - _entityService), MetricRegistry.name(this.getClass(), "search")); + () -> + validateSearchResult( + _entitySearchService.filter(entityName, filter, sortCriterion, start, count), + _entityService), + MetricRegistry.name(this.getClass(), "search")); } @Action(name = ACTION_EXISTS) @Nonnull @WithSpan - public Task<Boolean> exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) throws URISyntaxException { + public Task<Boolean> exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) + throws URISyntaxException { Urn urn = UrnUtils.getUrn(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized get entity: " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized get entity: " + urnStr); } log.info("EXISTS for {}", urnStr); - return RestliUtil.toTask(() -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); + return RestliUtil.toTask( + () -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java index 0c3e93273b863..9653a20bd8785 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java @@ -1,10 +1,16 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -34,16 +40,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "entitiesV2", namespace = "com.linkedin.entity") public class EntityV2Resource extends CollectionResourceTaskTemplate<String, EntityResponse> { @@ -56,68 +53,90 @@ public class EntityV2Resource extends CollectionResourceTaskTemplate<String, Ent @Named("authorizerChain") private Authorizer _authorizer; - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @RestMethod.Get @Nonnull @WithSpan - public Task<EntityResponse> get(@Nonnull String urnStr, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<EntityResponse> get( + @Nonnull String urnStr, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.debug("GET V2 {}", urnStr); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); } - return RestliUtil.toTask(() -> { - final String entityName = urnToEntityName(urn); - final Set<String> projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityName) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntityV2(entityName, urn, projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to get entity with urn: %s, aspects: %s", urn, projectedAspects), e); - } - }, MetricRegistry.name(this.getClass(), "get")); + return RestliUtil.toTask( + () -> { + final String entityName = urnToEntityName(urn); + final Set<String> projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityName) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntityV2(entityName, urn, projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to get entity with urn: %s, aspects: %s", urn, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "get")); } @RestMethod.BatchGet @Nonnull @WithSpan - public Task<Map<Urn, EntityResponse>> batchGet(@Nonnull Set<String> urnStrs, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task<Map<Urn, EntityResponse>> batchGet( + @Nonnull Set<String> urnStrs, + @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.debug("BATCH GET V2 {}", urnStrs.toString()); final Set<Urn> urns = new HashSet<>(); for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } Authentication auth = AuthenticationContext.getAuthentication(); - List<java.util.Optional<EntitySpec>> resourceSpecs = urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + urns.stream() + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entities " + urnStrs); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + urnStrs); } if (urns.size() <= 0) { return Task.value(Collections.emptyMap()); } final String entityName = urnToEntityName(urns.iterator().next()); - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityName) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntitiesV2(entityName, urns, projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to batch get entities with urns: %s, projectedAspects: %s", urns, projectedAspects), - e); - } - }, MetricRegistry.name(this.getClass(), "batchGet")); + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityName) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntitiesV2(entityName, urns, projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch get entities with urns: %s, projectedAspects: %s", + urns, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "batchGet")); } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java index 05b7e6b3ff24b..fc556d15342c2 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java @@ -1,10 +1,15 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -35,19 +40,16 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - /** * Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities */ @Slf4j -@RestLiCollection(name = "entitiesVersionedV2", namespace = "com.linkedin.entity", +@RestLiCollection( + name = "entitiesVersionedV2", + namespace = "com.linkedin.entity", keyTyperefClass = com.linkedin.common.versioned.VersionedUrn.class) -public class EntityVersionedV2Resource extends CollectionResourceTaskTemplate<com.linkedin.common.urn.VersionedUrn, EntityResponse> { +public class EntityVersionedV2Resource + extends CollectionResourceTaskTemplate<com.linkedin.common.urn.VersionedUrn, EntityResponse> { @Inject @Named("entityService") @@ -65,36 +67,54 @@ public Task<Map<Urn, EntityResponse>> batchGetVersioned( @QueryParam(PARAM_ENTITY_TYPE) @Nonnull String entityType, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) { Authentication auth = AuthenticationContext.getAuthentication(); - List<java.util.Optional<EntitySpec>> resourceSpecs = versionedUrnStrs.stream() - .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List<java.util.Optional<EntitySpec>> resourceSpecs = + versionedUrnStrs.stream() + .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + versionedUrnStrs); } log.debug("BATCH GET VERSIONED V2 {}", versionedUrnStrs); if (versionedUrnStrs.size() <= 0) { return Task.value(Collections.emptyMap()); } - return RestliUtil.toTask(() -> { - final Set<String> projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityType) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntitiesVersionedV2(versionedUrnStrs.stream() - .map(versionedUrnTyperef -> { - VersionedUrn versionedUrn = new VersionedUrn().setUrn(UrnUtils.getUrn(versionedUrnTyperef.getUrn())); - if (versionedUrnTyperef.getVersionStamp() != null) { - versionedUrn.setVersionStamp(versionedUrnTyperef.getVersionStamp()); - } - return versionedUrn; - }).collect(Collectors.toSet()), projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to batch get versioned entities: %s, projectedAspects: %s", versionedUrnStrs, projectedAspects), - e); - } - }, MetricRegistry.name(this.getClass(), "batchGet")); + return RestliUtil.toTask( + () -> { + final Set<String> projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityType) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntitiesVersionedV2( + versionedUrnStrs.stream() + .map( + versionedUrnTyperef -> { + VersionedUrn versionedUrn = + new VersionedUrn() + .setUrn(UrnUtils.getUrn(versionedUrnTyperef.getUrn())); + if (versionedUrnTyperef.getVersionStamp() != null) { + versionedUrn.setVersionStamp(versionedUrnTyperef.getVersionStamp()); + } + return versionedUrn; + }) + .collect(Collectors.toSet()), + projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch get versioned entities: %s, projectedAspects: %s", + versionedUrnStrs, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "batchGet")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java index 82d29ea00663b..1b22cc135b037 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java @@ -3,14 +3,11 @@ import com.linkedin.metadata.entity.EntityService; import java.util.Set; - public class ResourceUtils { - private ResourceUtils() { - - } + private ResourceUtils() {} - public static Set<String> getAllAspectNames(final EntityService entityService, final String entityName) { + public static Set<String> getAllAspectNames( + final EntityService entityService, final String entityName) { return entityService.getEntityAspectNames(entityName); } - } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java index 4a8e74c89039a..3fdd1d804a83f 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java @@ -1,10 +1,19 @@ package com.linkedin.metadata.resources.lineage; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; @@ -42,19 +51,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; - - -/** - * Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types} - */ +/** Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types} */ @Slf4j @RestLiSimpleResource(name = "relationships", namespace = "com.linkedin.lineage") public final class Relationships extends SimpleResourceTemplate<EntityRelationships> { @@ -76,14 +73,25 @@ public Relationships() { super(); } - private RelatedEntitiesResult getRelatedEntities(String rawUrn, List<String> relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count) { + private RelatedEntitiesResult getRelatedEntities( + String rawUrn, + List<String> relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count) { start = start == null ? 0 : start; count = count == null ? MAX_DOWNSTREAM_CNT : count; - return _graphService.findRelatedEntities(null, newFilter("urn", rawUrn), null, QueryUtils.EMPTY_FILTER, - relationshipTypes, newRelationshipFilter(QueryUtils.EMPTY_FILTER, direction), start, count); + return _graphService.findRelatedEntities( + null, + newFilter("urn", rawUrn), + null, + QueryUtils.EMPTY_FILTER, + relationshipTypes, + newRelationshipFilter(QueryUtils.EMPTY_FILTER, direction), + start, + count); } static RelationshipDirection getOppositeDirection(RelationshipDirection direction) { @@ -99,40 +107,55 @@ static RelationshipDirection getOppositeDirection(RelationshipDirection directio @Nonnull @RestMethod.Get @WithSpan - public Task<EntityRelationships> get(@QueryParam("urn") @Nonnull String rawUrn, + public Task<EntityRelationships> get( + @QueryParam("urn") @Nonnull String rawUrn, @QueryParam("types") @Nonnull String[] relationshipTypesParam, - @QueryParam("direction") @Nonnull String rawDirection, @QueryParam("start") @Optional @Nullable Integer start, + @QueryParam("direction") @Nonnull String rawDirection, + @QueryParam("start") @Optional @Nullable Integer start, @QueryParam("count") @Optional @Nullable Integer count) { Urn urn = UrnUtils.getUrn(rawUrn); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity lineage: " + rawUrn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + rawUrn); } RelationshipDirection direction = RelationshipDirection.valueOf(rawDirection); final List<String> relationshipTypes = Arrays.asList(relationshipTypesParam); - return RestliUtil.toTask(() -> { - - final RelatedEntitiesResult relatedEntitiesResult = - getRelatedEntities(rawUrn, relationshipTypes, direction, start, count); - final EntityRelationshipArray entityArray = - new EntityRelationshipArray(relatedEntitiesResult.getEntities().stream().map(entity -> { - try { - return new EntityRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to convert urnStr %s found in the Graph to an Urn object", entity.getUrn())); - } - }).collect(Collectors.toList())); - - return new EntityRelationships().setStart(relatedEntitiesResult.getStart()) - .setCount(relatedEntitiesResult.getCount()) - .setTotal(relatedEntitiesResult.getTotal()) - .setRelationships(entityArray); - }, MetricRegistry.name(this.getClass(), "getLineage")); + return RestliUtil.toTask( + () -> { + final RelatedEntitiesResult relatedEntitiesResult = + getRelatedEntities(rawUrn, relationshipTypes, direction, start, count); + final EntityRelationshipArray entityArray = + new EntityRelationshipArray( + relatedEntitiesResult.getEntities().stream() + .map( + entity -> { + try { + return new EntityRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to convert urnStr %s found in the Graph to an Urn object", + entity.getUrn())); + } + }) + .collect(Collectors.toList())); + + return new EntityRelationships() + .setStart(relatedEntitiesResult.getStart()) + .setCount(relatedEntitiesResult.getCount()) + .setTotal(relatedEntitiesResult.getTotal()) + .setRelationships(entityArray); + }, + MetricRegistry.name(this.getClass(), "getLineage")); } @Nonnull @@ -141,10 +164,14 @@ public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws E Urn urn = Urn.createFromString(rawUrn); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity: " + rawUrn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + rawUrn); } _graphService.removeNode(urn); return new UpdateResponse(HttpStatus.S_200_OK); @@ -153,22 +180,34 @@ public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws E @Action(name = ACTION_GET_LINEAGE) @Nonnull @WithSpan - public Task<EntityLineageResult> getLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_START) @Optional @Nullable Integer start, + public Task<EntityLineageResult> getLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_DIRECTION) String direction, + @ActionParam(PARAM_START) @Optional @Nullable Integer start, @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, - @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops) throws URISyntaxException { + @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops) + throws URISyntaxException { log.info("GET LINEAGE {} {} {} {} {}", urnStr, direction, start, count, maxHops); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity lineage: " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + urnStr); } return RestliUtil.toTask( - () -> _graphService.getLineage(urn, LineageDirection.valueOf(direction), start != null ? start : 0, - count != null ? count : 100, maxHops != null ? maxHops : 1), + () -> + _graphService.getLineage( + urn, + LineageDirection.valueOf(direction), + start != null ? start : 0, + count != null ? count : 100, + maxHops != null ? maxHops : 1), MetricRegistry.name(this.getClass(), "getLineage")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java index 1e6523e774d66..499fc0f5221fe 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.resources.operations; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -35,17 +39,10 @@ import javax.inject.Inject; import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.tasks.GetTaskResponse; import org.json.JSONObject; +import org.opensearch.client.tasks.GetTaskResponse; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Endpoints for performing maintenance operations - */ +/** Endpoints for performing maintenance operations */ @Slf4j @RestLiCollection(name = "operations", namespace = "com.linkedin.operations") public class OperationsResource extends CollectionResourceTaskTemplate<String, VersionedAspect> { @@ -67,9 +64,11 @@ public class OperationsResource extends CollectionResourceTaskTemplate<String, V @Inject @Named("entityService") private EntityService _entityService; + @Inject @Named("timeseriesAspectService") private TimeseriesAspectService _timeseriesAspectService; + @Inject @Named("elasticSearchSystemMetadataService") private SystemMetadataService _systemMetadataService; @@ -78,7 +77,7 @@ public class OperationsResource extends CollectionResourceTaskTemplate<String, V @Named("authorizerChain") private Authorizer _authorizer; - public OperationsResource() { } + public OperationsResource() {} @VisibleForTesting OperationsResource(TimeseriesAspectService timeseriesAspectService) { @@ -88,15 +87,18 @@ public OperationsResource() { } @Action(name = ACTION_RESTORE_INDICES) @Nonnull @WithSpan - public Task<String> restoreIndices(@ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, + public Task<String> restoreIndices( + @ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, @ActionParam(PARAM_URN) @Optional @Nullable String urn, @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, @ActionParam("start") @Optional @Nullable Integer start, - @ActionParam("batchSize") @Optional @Nullable Integer batchSize - ) { - return RestliUtil.toTask(() -> { - return Utils.restoreIndices(aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); - }, MetricRegistry.name(this.getClass(), "restoreIndices")); + @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { + return RestliUtil.toTask( + () -> { + return Utils.restoreIndices( + aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); + }, + MetricRegistry.name(this.getClass(), "restoreIndices")); } @VisibleForTesting @@ -117,62 +119,86 @@ static boolean isTaskIdValid(String task) { public Task<String> getTaskStatus( @ActionParam(PARAM_NODE_ID) @Optional String nodeId, @ActionParam(PARAM_TASK_ID) @Optional("0") long taskId, - @ActionParam(PARAM_TASK) @Optional String task - ) { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); - } - boolean taskSpecified = task != null; - boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; - if (!taskSpecified && !nodeAndTaskIdSpecified) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Please specify either Node ID + task ID OR composite task parameters"); - } + @ActionParam(PARAM_TASK) @Optional String task) { + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); + } + boolean taskSpecified = task != null; + boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; + if (!taskSpecified && !nodeAndTaskIdSpecified) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + "Please specify either Node ID + task ID OR composite task parameters"); + } - if (taskSpecified && nodeAndTaskIdSpecified && !task.equals(String.format("%s:%d", nodeId, taskId))) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Please specify only one of Node ID + task ID OR composite task parameters"); - } + if (taskSpecified + && nodeAndTaskIdSpecified + && !task.equals(String.format("%s:%d", nodeId, taskId))) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + "Please specify only one of Node ID + task ID OR composite task parameters"); + } - if (taskSpecified && !isTaskIdValid(task)) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, - String.format("Task should be in the form nodeId:taskId e.g. aB1cdEf2GHIJKLMnoPQr3S:123456 (got %s)", task)); - } + if (taskSpecified && !isTaskIdValid(task)) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + String.format( + "Task should be in the form nodeId:taskId e.g. aB1cdEf2GHIJKLMnoPQr3S:123456 (got %s)", + task)); + } - String nodeIdToQuery = nodeAndTaskIdSpecified ? nodeId : task.split(":")[0]; - long taskIdToQuery = nodeAndTaskIdSpecified ? taskId : Long.parseLong(task.split(":")[1]); - java.util.Optional<GetTaskResponse> res = _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); - JSONObject j = new JSONObject(); - if (res.isEmpty()) { - j.put("error", String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); - return j.toString(); - } - GetTaskResponse resp = res.get(); - j.put("completed", resp.isCompleted()); - j.put("taskId", res.get().getTaskInfo().getTaskId()); - j.put("status", res.get().getTaskInfo().getStatus()); - j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); - return j.toString(); - }, MetricRegistry.name(this.getClass(), "getTaskStatus")); + String nodeIdToQuery = nodeAndTaskIdSpecified ? nodeId : task.split(":")[0]; + long taskIdToQuery = nodeAndTaskIdSpecified ? taskId : Long.parseLong(task.split(":")[1]); + java.util.Optional<GetTaskResponse> res = + _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); + JSONObject j = new JSONObject(); + if (res.isEmpty()) { + j.put( + "error", + String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); + return j.toString(); + } + GetTaskResponse resp = res.get(); + j.put("completed", resp.isCompleted()); + j.put("taskId", res.get().getTaskInfo().getTaskId()); + j.put("status", res.get().getTaskInfo().getStatus()); + j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); + return j.toString(); + }, + MetricRegistry.name(this.getClass(), "getTaskStatus")); } @Action(name = ACTION_GET_INDEX_SIZES) @Nonnull @WithSpan public Task<TimeseriesIndicesSizesResult> getIndexSizes() { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); - } - TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); - result.setIndexSizes(new TimeseriesIndexSizeResultArray(_timeseriesAspectService.getIndexSizes())); - return result; - }, MetricRegistry.name(this.getClass(), "getIndexSizes")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); + } + TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); + result.setIndexSizes( + new TimeseriesIndexSizeResultArray(_timeseriesAspectService.getIndexSizes())); + return result; + }, + MetricRegistry.name(this.getClass(), "getIndexSizes")); } @VisibleForTesting @@ -184,13 +210,16 @@ String executeTruncateTimeseriesAspect( @Nullable Integer batchSize, @Nullable Long timeoutSeconds, @Nullable Boolean forceDeleteByQuery, - @Nullable Boolean forceReindex - ) { + @Nullable Boolean forceReindex) { Authentication authentication = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); } if (forceDeleteByQuery != null && forceDeleteByQuery.equals(forceReindex)) { @@ -199,14 +228,20 @@ String executeTruncateTimeseriesAspect( List<Criterion> criteria = new ArrayList<>(); criteria.add( - QueryUtils.newCriterion("timestampMillis", String.valueOf(endTimeMillis), Condition.LESS_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + "timestampMillis", String.valueOf(endTimeMillis), Condition.LESS_THAN_OR_EQUAL_TO)); final Filter filter = QueryUtils.getFilterFromCriteria(criteria); long numToDelete = _timeseriesAspectService.countByFilter(entityType, aspectName, filter); long totalNum = _timeseriesAspectService.countByFilter(entityType, aspectName, new Filter()); - String deleteSummary = String.format("Delete %d out of %d rows (%.2f%%). ", numToDelete, totalNum, ((double) numToDelete) / totalNum * 100); - boolean reindex = !(forceDeleteByQuery != null && forceDeleteByQuery) && ((forceReindex != null && forceReindex) || numToDelete > (totalNum / 2)); + String deleteSummary = + String.format( + "Delete %d out of %d rows (%.2f%%). ", + numToDelete, totalNum, ((double) numToDelete) / totalNum * 100); + boolean reindex = + !(forceDeleteByQuery != null && forceDeleteByQuery) + && ((forceReindex != null && forceReindex) || numToDelete > (totalNum / 2)); if (reindex) { deleteSummary += "Reindexing the aspect without the deleted records. "; @@ -232,17 +267,22 @@ String executeTruncateTimeseriesAspect( } if (reindex) { - // need to invert query to retain only the ones that do NOT meet the criterion from the count + // need to invert query to retain only the ones that do NOT meet the criterion from the + // count List<Criterion> reindexCriteria = new ArrayList<>(); reindexCriteria.add( - QueryUtils.newCriterion("timestampMillis", String.valueOf(endTimeMillis), Condition.GREATER_THAN)); + QueryUtils.newCriterion( + "timestampMillis", String.valueOf(endTimeMillis), Condition.GREATER_THAN)); final Filter reindexFilter = QueryUtils.getFilterFromCriteria(reindexCriteria); - String taskId = _timeseriesAspectService.reindexAsync(entityType, aspectName, reindexFilter, options); + String taskId = + _timeseriesAspectService.reindexAsync(entityType, aspectName, reindexFilter, options); log.info("reindex request submitted with ID " + taskId); return taskId; } else { - String taskId = _timeseriesAspectService.deleteAspectValuesAsync(entityType, aspectName, filter, options); + String taskId = + _timeseriesAspectService.deleteAspectValuesAsync( + entityType, aspectName, filter, options); log.info("delete by query request submitted with ID " + taskId); return taskId; } @@ -260,10 +300,18 @@ public Task<String> truncateTimeseriesAspect( @ActionParam(PARAM_BATCH_SIZE) @Optional @Nullable Integer batchSize, @ActionParam(PARAM_TIMEOUT_SECONDS) @Optional @Nullable Long timeoutSeconds, @ActionParam(PARAM_FORCE_DELETE_BY_QUERY) @Optional @Nullable Boolean forceDeleteByQuery, - @ActionParam(PARAM_FORCE_REINDEX) @Optional @Nullable Boolean forceReindex - ) { - return RestliUtil.toTask(() -> - executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, dryRun, batchSize, timeoutSeconds, forceDeleteByQuery, forceReindex), + @ActionParam(PARAM_FORCE_REINDEX) @Optional @Nullable Boolean forceReindex) { + return RestliUtil.toTask( + () -> + executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + dryRun, + batchSize, + timeoutSeconds, + forceDeleteByQuery, + forceReindex), MetricRegistry.name(this.getClass(), "truncateTimeseriesAspect")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java index 12586b66495a9..bf07d0eb9dd5b 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.resources.operations; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; @@ -19,14 +22,11 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - @Slf4j public class Utils { - private Utils() { } + private Utils() {} + public static String restoreIndices( @Nonnull String aspectName, @Nullable String urn, @@ -34,8 +34,7 @@ public static String restoreIndices( @Nullable Integer start, @Nullable Integer batchSize, @Nonnull Authorizer authorizer, - @Nonnull EntityService entityService - ) { + @Nonnull EntityService entityService) { Authentication authentication = AuthenticationContext.getAuthentication(); EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { @@ -43,16 +42,21 @@ public static String restoreIndices( resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, authorizer, ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), - resourceSpec)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); + && !isAuthorized( + authentication, + authorizer, + ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), + resourceSpec)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); } - RestoreIndicesArgs args = new RestoreIndicesArgs() - .setAspectName(aspectName) - .setUrnLike(urnLike) - .setUrn(urn) - .setStart(start) - .setBatchSize(batchSize); + RestoreIndicesArgs args = + new RestoreIndicesArgs() + .setAspectName(aspectName) + .setUrnLike(urnLike) + .setUrn(urn) + .setStart(start) + .setBatchSize(batchSize); Map<String, Object> result = new HashMap<>(); result.put("args", args); result.put("result", entityService.restoreIndices(args, log::info)); diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java index a8018074497c4..f4bc0dd72e4c6 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java @@ -1,9 +1,12 @@ package com.linkedin.metadata.resources.platform; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.entity.Entity; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -24,13 +27,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * DataHub Platform Actions - */ +/** DataHub Platform Actions */ @Slf4j @RestLiCollection(name = "platform", namespace = "com.linkedin.platform") public class PlatformResource extends CollectionResourceTaskTemplate<String, Entity> { @@ -54,14 +51,19 @@ public Task<Void> producePlatformEvent( @ActionParam("event") @Nonnull PlatformEvent event) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to produce platform events."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to produce platform events."); } log.info(String.format("Emitting platform event. name: %s, key: %s", eventName, key)); - return RestliUtil.toTask(() -> { - _eventProducer.producePlatformEvent(eventName, key, event); - return null; - }); + return RestliUtil.toTask( + () -> { + _eventProducer.producePlatformEvent(eventName, key, event); + return null; + }); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java index 270c52f380356..af6efb1ad8093 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.resources.restli; public final class RestliConstants { - private RestliConstants() { } + private RestliConstants() {} public static final String FINDER_SEARCH = "search"; public static final String FINDER_FILTER = "filter"; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java index 9949556c99b81..278cd48bc455e 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java @@ -18,7 +18,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class RestliUtils { private RestliUtils() { @@ -26,8 +25,9 @@ private RestliUtils() { } /** - * Executes the provided supplier and convert the results to a {@link Task}. - * Exceptions thrown during the execution will be properly wrapped in {@link RestLiServiceException}. + * Executes the provided supplier and convert the results to a {@link Task}. Exceptions thrown + * during the execution will be properly wrapped in {@link RestLiServiceException}. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -38,7 +38,8 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { } catch (Throwable throwable) { // Convert IllegalArgumentException to BAD REQUEST - if (throwable instanceof IllegalArgumentException || throwable.getCause() instanceof IllegalArgumentException) { + if (throwable instanceof IllegalArgumentException + || throwable.getCause() instanceof IllegalArgumentException) { throwable = badRequestException(throwable.getMessage()); } @@ -51,8 +52,10 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { } /** - * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} instead. - * A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the optional is emtpy. + * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} + * instead. A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the + * optional is emtpy. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -81,22 +84,36 @@ public static RestLiServiceException invalidArgumentsException(@Nullable String return new RestLiServiceException(HttpStatus.S_412_PRECONDITION_FAILED, message); } - public static boolean isAuthorized(@Nonnull Authentication authentication, @Nonnull Authorizer authorizer, - @Nonnull final List<PoliciesConfig.Privilege> privileges, @Nonnull final List<java.util.Optional<EntitySpec>> resources) { + public static boolean isAuthorized( + @Nonnull Authentication authentication, + @Nonnull Authorizer authorizer, + @Nonnull final List<PoliciesConfig.Privilege> privileges, + @Nonnull final List<java.util.Optional<EntitySpec>> resources) { DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorizedForResources(authorizer, authentication.getActor().toUrnStr(), resources, orGroup); + return AuthUtil.isAuthorizedForResources( + authorizer, authentication.getActor().toUrnStr(), resources, orGroup); } - public static boolean isAuthorized(@Nonnull Authentication authentication, @Nonnull Authorizer authorizer, - @Nonnull final List<PoliciesConfig.Privilege> privileges, @Nullable final EntitySpec resource) { + public static boolean isAuthorized( + @Nonnull Authentication authentication, + @Nonnull Authorizer authorizer, + @Nonnull final List<PoliciesConfig.Privilege> privileges, + @Nullable final EntitySpec resource) { DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorized(authorizer, authentication.getActor().toUrnStr(), java.util.Optional.ofNullable(resource), orGroup); + return AuthUtil.isAuthorized( + authorizer, + authentication.getActor().toUrnStr(), + java.util.Optional.ofNullable(resource), + orGroup); } - private static DisjunctivePrivilegeGroup convertPrivilegeGroup(@Nonnull final List<PoliciesConfig.Privilege> privileges) { + private static DisjunctivePrivilegeGroup convertPrivilegeGroup( + @Nonnull final List<PoliciesConfig.Privilege> privileges) { return new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(privileges.stream() - .map(PoliciesConfig.Privilege::getType) - .collect(Collectors.toList())))); + ImmutableList.of( + new ConjunctivePrivilegeGroup( + privileges.stream() + .map(PoliciesConfig.Privilege::getType) + .collect(Collectors.toList())))); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java index 02d413301f3b4..554b6e909e9e3 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.resources.usage; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -67,22 +70,23 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Rest.li entry point: /usageStats - */ +/** Rest.li entry point: /usageStats */ @Slf4j @Deprecated @RestLiSimpleResource(name = "usageStats", namespace = "com.linkedin.usage") public class UsageStats extends SimpleResourceTemplate<UsageAggregation> { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String ACTION_BATCH_INGEST = "batchIngest"; private static final String PARAM_BUCKETS = "buckets"; @@ -122,18 +126,24 @@ public class UsageStats extends SimpleResourceTemplate<UsageAggregation> { @WithSpan public Task<Void> batchIngest(@ActionParam(PARAM_BUCKETS) @Nonnull UsageAggregation[] buckets) { log.info("Ingesting {} usage stats aggregations", buckets.length); - return RestliUtil.toTask(() -> { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entities."); - } - for (UsageAggregation agg : buckets) { - this.ingest(agg); - } - return null; - }, MetricRegistry.name(this.getClass(), "batchIngest")); + return RestliUtil.toTask( + () -> { + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); + } + for (UsageAggregation agg : buckets) { + this.ingest(agg); + } + return null; + }, + MetricRegistry.name(this.getClass(), "batchIngest")); } private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { @@ -153,35 +163,50 @@ private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { } } - private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String resource, - @Nonnull WindowDuration duration) { - // NOTE: We will not populate the per-bucket userCounts and fieldCounts in this implementation because - // (a) it is very expensive to compute the un-explode equivalent queries for timeseries field collections, and - // (b) the equivalent data for the whole query will anyways be populated in the `aggregations` part of the results + private UsageAggregationArray getBuckets( + @Nonnull Filter filter, @Nonnull String resource, @Nonnull WindowDuration duration) { + // NOTE: We will not populate the per-bucket userCounts and fieldCounts in this implementation + // because + // (a) it is very expensive to compute the un-explode equivalent queries for timeseries field + // collections, and + // (b) the equivalent data for the whole query will anyways be populated in the `aggregations` + // part of the results // (see getAggregations). - // 1. Construct the aggregation specs for latest value of uniqueUserCount, totalSqlQueries & topSqlQueries. + // 1. Construct the aggregation specs for latest value of uniqueUserCount, totalSqlQueries & + // topSqlQueries. AggregationSpec uniqueUserCountAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("uniqueUserCount"); AggregationSpec totalSqlQueriesAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("totalSqlQueries"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("totalSqlQueries"); AggregationSpec topSqlQueriesAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("topSqlQueries"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("topSqlQueries"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{uniqueUserCountAgg, totalSqlQueriesAgg, topSqlQueriesAgg}; + new AggregationSpec[] {uniqueUserCountAgg, totalSqlQueriesAgg, topSqlQueriesAgg}; // 2. Construct the Grouping buckets with just the ts bucket. GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(windowToInterval(duration))); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{timestampBucket}; + GroupingBucket[] groupingBuckets = new GroupingBucket[] {timestampBucket}; // 3. Query GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // 4. Populate buckets from the result. UsageAggregationArray buckets = new UsageAggregationArray(); @@ -211,9 +236,11 @@ private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String } if (!row.get(3).equals(ES_NULL_VALUE)) { try { - usageAggregationMetrics.setTopSqlQueries(OBJECT_MAPPER.readValue(row.get(3), StringArray.class)); + usageAggregationMetrics.setTopSqlQueries( + OBJECT_MAPPER.readValue(row.get(3), StringArray.class)); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to convert topSqlQueries from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert topSqlQueries from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -226,20 +253,31 @@ private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String private List<UserUsageCounts> getUserUsageCounts(Filter filter) { // Sum aggregation on userCounts.count AggregationSpec sumUserCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.count"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.count"); AggregationSpec latestUserEmailAggSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("userCounts.userEmail"); - AggregationSpec[] aggregationSpecs = new AggregationSpec[]{sumUserCountsCountAggSpec, latestUserEmailAggSpec}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("userCounts.userEmail"); + AggregationSpec[] aggregationSpecs = + new AggregationSpec[] {sumUserCountsCountAggSpec, latestUserEmailAggSpec}; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List<UserUsageCounts> userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -253,7 +291,8 @@ private List<UserUsageCounts> getUserUsageCounts(Filter filter) { try { userUsageCount.setCount(Integer.valueOf(row.get(1))); } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE)) { @@ -267,18 +306,26 @@ private List<UserUsageCounts> getUserUsageCounts(Filter filter) { private List<FieldUsageCounts> getFieldUsageCounts(Filter filter) { // Sum aggregation on fieldCounts.count AggregationSpec sumFieldCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("fieldCounts.count"); - AggregationSpec[] aggregationSpecs = new AggregationSpec[]{sumFieldCountAggSpec}; + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("fieldCounts.count"); + AggregationSpec[] aggregationSpecs = new AggregationSpec[] {sumFieldCountAggSpec}; // String grouping bucket on fieldCounts.fieldName GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("fieldCounts.fieldPath").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("fieldCounts.fieldPath") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List<FieldUsageCounts> fieldUsageCounts = new ArrayList<>(); @@ -289,7 +336,8 @@ private List<FieldUsageCounts> getFieldUsageCounts(Filter filter) { try { fieldUsageCount.setCount(Integer.valueOf(row.get(1))); } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert field usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert field usage count from ES to int", e); } } fieldUsageCounts.add(fieldUsageCount); @@ -312,80 +360,100 @@ private UsageQueryResultAggregations getAggregations(Filter filter) { @Action(name = ACTION_QUERY) @Nonnull @WithSpan - public Task<UsageQueryResult> query(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, + public Task<UsageQueryResult> query( + @ActionParam(PARAM_RESOURCE) @Nonnull String resource, @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, - @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional Long startTime, + @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional + Long startTime, @ActionParam(PARAM_END_TIME) @com.linkedin.restli.server.annotations.Optional Long endTime, - @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional Integer maxBuckets) { + @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional + Integer maxBuckets) { log.info("Attempting to query usage stats"); - return RestliUtil.toTask(() -> { - Authentication auth = AuthenticationContext.getAuthentication(); - Urn resourceUrn = UrnUtils.getUrn(resource); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), - new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to query usage."); - } - // 1. Populate the filter. This is common for all queries. - Filter filter = new Filter(); - ArrayList<Criterion> criteria = new ArrayList<>(); - Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); - criteria.add(hasUrnCriterion); - if (startTime != null) { - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTime.toString()); - criteria.add(startTimeCriterion); - } - if (endTime != null) { - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTime.toString()); - criteria.add(endTimeCriterion); - } - - filter.setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); - - // 2. Get buckets. - UsageAggregationArray buckets = getBuckets(filter, resource, duration); - - // 3. Get aggregations. - UsageQueryResultAggregations aggregations = getAggregations(filter); - - // 4. Compute totalSqlQuery count from the buckets itself. - // We want to avoid issuing an additional query with a sum aggregation. - Integer totalQueryCount = null; - for (UsageAggregation bucket : buckets) { - if (bucket.getMetrics().getTotalSqlQueries() != null) { - if (totalQueryCount == null) { - totalQueryCount = 0; + return RestliUtil.toTask( + () -> { + Authentication auth = AuthenticationContext.getAuthentication(); + Urn resourceUrn = UrnUtils.getUrn(resource); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); + } + // 1. Populate the filter. This is common for all queries. + Filter filter = new Filter(); + ArrayList<Criterion> criteria = new ArrayList<>(); + Criterion hasUrnCriterion = + new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); + criteria.add(hasUrnCriterion); + if (startTime != null) { + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTime.toString()); + criteria.add(startTimeCriterion); + } + if (endTime != null) { + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTime.toString()); + criteria.add(endTimeCriterion); } - totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); - } - } - if (totalQueryCount != null) { - aggregations.setTotalSqlQueries(totalQueryCount); - } + filter.setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + + // 2. Get buckets. + UsageAggregationArray buckets = getBuckets(filter, resource, duration); + + // 3. Get aggregations. + UsageQueryResultAggregations aggregations = getAggregations(filter); + + // 4. Compute totalSqlQuery count from the buckets itself. + // We want to avoid issuing an additional query with a sum aggregation. + Integer totalQueryCount = null; + for (UsageAggregation bucket : buckets) { + if (bucket.getMetrics().getTotalSqlQueries() != null) { + if (totalQueryCount == null) { + totalQueryCount = 0; + } + totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); + } + } - // 5. Populate and return the result. - return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); - }, MetricRegistry.name(this.getClass(), "query")); + if (totalQueryCount != null) { + aggregations.setTotalSqlQueries(totalQueryCount); + } + + // 5. Populate and return the result. + return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); + }, + MetricRegistry.name(this.getClass(), "query")); } @Action(name = ACTION_QUERY_RANGE) @Nonnull @WithSpan - public Task<UsageQueryResult> queryRange(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, - @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, @ActionParam(PARAM_RANGE) UsageTimeRange range) { + public Task<UsageQueryResult> queryRange( + @ActionParam(PARAM_RESOURCE) @Nonnull String resource, + @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, + @ActionParam(PARAM_RANGE) UsageTimeRange range) { Authentication auth = AuthenticationContext.getAuthentication(); Urn resourceUrn = UrnUtils.getUrn(resource); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to query usage."); + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); } final long now = Instant.now().toEpochMilli(); return this.query(resource, duration, convertRangeToStartTime(range, now), now, null); @@ -418,7 +486,8 @@ private void ingest(@Nonnull UsageAggregation bucket) { datasetUsageStatistics.setUserCounts(datasetUserUsageCountsArray); } if (aggregationMetrics.hasFields()) { - DatasetFieldUsageCountsArray datasetFieldUsageCountsArray = new DatasetFieldUsageCountsArray(); + DatasetFieldUsageCountsArray datasetFieldUsageCountsArray = + new DatasetFieldUsageCountsArray(); for (FieldUsageCounts f : aggregationMetrics.getFields()) { DatasetFieldUsageCounts datasetFieldUsageCounts = new DatasetFieldUsageCounts(); datasetFieldUsageCounts.setFieldPath(f.getFieldName()); @@ -431,17 +500,23 @@ private void ingest(@Nonnull UsageAggregation bucket) { Map<String, JsonNode> documents; try { documents = - TimeseriesAspectTransformer.transform(bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), - null); + TimeseriesAspectTransformer.transform( + bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), null); } catch (JsonProcessingException e) { log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; } // 3. Upsert the exploded documents to timeseries aspect service. - documents.entrySet().forEach(document -> { - _timeseriesAspectService.upsertDocument(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, document.getKey(), - document.getValue()); - }); + documents + .entrySet() + .forEach( + document -> { + _timeseriesAspectService.upsertDocument( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + document.getKey(), + document.getValue()); + }); } @Nonnull diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index 351a3d8f24e36..d6eeb1a01ac15 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,9 +18,9 @@ import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; -import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -27,15 +30,10 @@ import com.linkedin.mxe.MetadataChangeProposal; import java.net.URISyntaxException; import java.util.List; - import mock.MockEntityRegistry; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - public class AspectResourceTest { private AspectResource _aspectResource; private EntityService _entityService; @@ -54,7 +52,9 @@ public void setup() { _entityRegistry = new MockEntityRegistry(); _updateIndicesService = mock(UpdateIndicesService.class); _preProcessHooks = mock(PreProcessHooks.class); - _entityService = new EntityServiceImpl(_aspectDao, _producer, _entityRegistry, false, _updateIndicesService, _preProcessHooks); + _entityService = + new EntityServiceImpl( + _aspectDao, _producer, _entityRegistry, false, _updateIndicesService, _preProcessHooks); _authorizer = mock(Authorizer.class); _aspectResource.setAuthorizer(_authorizer); _aspectResource.setEntityService(_entityService); @@ -82,36 +82,49 @@ public void testAsyncDefaultAspects() throws URISyntaxException { reset(_producer, _aspectDao); - UpsertBatchItem req = UpsertBatchItem.builder() + UpsertBatchItem req = + UpsertBatchItem.builder() .urn(urn) .aspectName(mcp.getAspectName()) .aspect(mcp.getAspect()) .metadataChangeProposal(mcp) .build(_entityRegistry); when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) - .thenReturn(List.of( - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name1")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name2")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name3")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name4")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name5")) - .auditStamp(new AuditStamp()) - .request(req).build())); + .thenReturn( + List.of( + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name1")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name2")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name3")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name4")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name5")) + .auditStamp(new AuditStamp()) + .request(req) + .build())); _aspectResource.ingestProposal(mcp, "false"); - verify(_producer, times(5)).produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); + verify(_producer, times(5)) + .produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); verifyNoMoreInteractions(_producer); } } diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java index 470c6e87040ec..bdfe906f42af9 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java @@ -1,18 +1,16 @@ package com.linkedin.metadata.resources.operations; +import static org.testng.AssertJUnit.*; + import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.util.Pair; import java.util.List; import mock.MockTimeseriesAspectService; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class OperationsResourceTest { private static final String TASK_ID = "taskId123"; - @Test public void testDryRun() { TimeseriesAspectService mockTimeseriesAspectService = new MockTimeseriesAspectService(); @@ -20,11 +18,13 @@ public void testDryRun() { String aspectName = "datasetusagestatistics"; long endTimeMillis = 3000; OperationsResource testResource = new OperationsResource(mockTimeseriesAspectService); - String output = testResource.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, null, - null, null, null); + String output = + testResource.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, null, null); assertTrue(output.contains("This was a dry run")); - output = testResource.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, false, null, - null, null, null); + output = + testResource.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, false, null, null, null, null); assertEquals(TASK_ID, output); } @@ -42,59 +42,113 @@ public void testIsTaskIdValid() { @Test public void testForceFlags() { final String reindexTaskId = "REINDEX_TASK_ID"; - TimeseriesAspectService mockTimeseriesAspectServiceWouldDeleteByQuery = new MockTimeseriesAspectService(); - TimeseriesAspectService mockTimeseriesAspectServiceWouldReindex = new MockTimeseriesAspectService(30, 20, reindexTaskId); + TimeseriesAspectService mockTimeseriesAspectServiceWouldDeleteByQuery = + new MockTimeseriesAspectService(); + TimeseriesAspectService mockTimeseriesAspectServiceWouldReindex = + new MockTimeseriesAspectService(30, 20, reindexTaskId); String entityType = "dataset"; String aspectName = "datasetusagestatistics"; long endTimeMillis = 3000; - OperationsResource testResourceWouldReindex = new OperationsResource(mockTimeseriesAspectServiceWouldReindex); - OperationsResource testResourceWouldDeleteByQuery = new OperationsResource(mockTimeseriesAspectServiceWouldDeleteByQuery); + OperationsResource testResourceWouldReindex = + new OperationsResource(mockTimeseriesAspectServiceWouldReindex); + OperationsResource testResourceWouldDeleteByQuery = + new OperationsResource(mockTimeseriesAspectServiceWouldDeleteByQuery); - String result = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, true, true); + String result = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, true, true); String errorIfFlagsAreIncompatable = "please only set forceReindex OR forceDeleteByQuery flags"; assertEquals(errorIfFlagsAreIncompatable, result); - - result = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, false, false); + result = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, false, false); assertEquals(errorIfFlagsAreIncompatable, result); - - List<Pair<Boolean, Boolean>> - validOptionsNothingForced = List.of(Pair.of(null, null), Pair.of(null, false), Pair.of(false, null)); + List<Pair<Boolean, Boolean>> validOptionsNothingForced = + List.of(Pair.of(null, null), Pair.of(null, false), Pair.of(false, null)); for (Pair<Boolean, Boolean> values : validOptionsNothingForced) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); assertNotSame(errorIfFlagsAreIncompatable, reindexResult); assertTrue(reindexResult.contains("Reindexing the aspect without the deleted records")); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); assertNotSame(errorIfFlagsAreIncompatable, deleteResult); assertTrue(deleteResult.contains("Issuing a delete by query request. ")); } - List<Pair<Boolean, Boolean>> validOptionsForceDeleteByQuery = List.of(Pair.of(true, null), Pair.of(true, false)); + List<Pair<Boolean, Boolean>> validOptionsForceDeleteByQuery = + List.of(Pair.of(true, null), Pair.of(true, false)); for (Pair<Boolean, Boolean> values : validOptionsForceDeleteByQuery) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); for (String res : List.of(reindexResult, deleteResult)) { assertNotSame(errorIfFlagsAreIncompatable, res); assertTrue(res.contains("Issuing a delete by query request. ")); } } - List<Pair<Boolean, Boolean>> validOptionsForceReindex = List.of(Pair.of(null, true), Pair.of(false, true)); + List<Pair<Boolean, Boolean>> validOptionsForceReindex = + List.of(Pair.of(null, true), Pair.of(false, true)); for (Pair<Boolean, Boolean> values : validOptionsForceReindex) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); for (String res : List.of(reindexResult, deleteResult)) { assertNotSame(errorIfFlagsAreIncompatable, res); assertTrue(res.contains("Reindexing the aspect without the deleted records")); } } } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java index 81d2bbd88b3e6..2a12ecf6866bb 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java +++ b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java @@ -16,7 +16,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class MockTimeseriesAspectService implements TimeseriesAspectService { public static final long DEFAULT_COUNT = 30; @@ -32,6 +31,7 @@ public MockTimeseriesAspectService() { this._filteredCount = DEFAULT_FILTERED_COUNT; this._taskId = DEFAULT_TASK_ID; } + public MockTimeseriesAspectService(long count, long filteredCount, String taskId) { this._count = count; this._filteredCount = filteredCount; @@ -39,12 +39,11 @@ public MockTimeseriesAspectService(long count, long filteredCount, String taskId } @Override - public void configure() { - - } + public void configure() {} @Override - public long countByFilter(@Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter) { + public long countByFilter( + @Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter) { if (filter != null && !filter.equals(new Filter())) { return _filteredCount; } @@ -53,36 +52,51 @@ public long countByFilter(@Nonnull String entityName, @Nonnull String aspectName @Nonnull @Override - public List<EnvelopedAspect> getAspectValues(@Nonnull Urn urn, @Nonnull String entityName, - @Nonnull String aspectName, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, - @Nullable Integer limit, @Nullable Filter filter, @Nullable SortCriterion sort) { + public List<EnvelopedAspect> getAspectValues( + @Nonnull Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort) { return List.of(); } @Nonnull @Override - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { return new GenericTable(); } @Nonnull @Override - public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter) { + public DeleteAspectValuesResult deleteAspectValues( + @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { return new DeleteAspectValuesResult(); } @Nonnull @Override - public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { + public String deleteAspectValuesAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, + @Nonnull BatchWriteOperationsOptions options) { return _taskId; } @Override - public String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { return _taskId; } @@ -94,10 +108,11 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull String docId, - @Nonnull JsonNode document) { - - } + public void upsertDocument( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull String docId, + @Nonnull JsonNode document) {} @Override public List<TimeseriesIndexSizeResult> getIndexSizes() { diff --git a/metadata-service/schema-registry-api/build.gradle b/metadata-service/schema-registry-api/build.gradle index 290126836eb4a..077d7d4f2d6a4 100644 --- a/metadata-service/schema-registry-api/build.gradle +++ b/metadata-service/schema-registry-api/build.gradle @@ -45,5 +45,3 @@ tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { sourceSets.main.java.srcDirs "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" -// Disable checkstyle for this module. -checkstyleMain.source = "${projectDir}/src/main/java" diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java index dc7fd5e20d9cd..58058dc3332b0 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java @@ -1,24 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Compatibility check response - */ +/** Compatibility check response */ @io.swagger.v3.oas.annotations.media.Schema(description = "Compatibility check response") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class CompatibilityCheckResponse { +public class CompatibilityCheckResponse { @JsonProperty("is_compatible") private Boolean isCompatible = null; @@ -34,11 +32,12 @@ public CompatibilityCheckResponse isCompatible(Boolean isCompatible) { /** * Whether the compared schemas are compatible + * * @return isCompatible - **/ - @io.swagger.v3.oas.annotations.media.Schema(description = "Whether the compared schemas are compatible") - - public Boolean isIsCompatible() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + description = "Whether the compared schemas are compatible") + public Boolean isIsCompatible() { return isCompatible; } @@ -61,11 +60,11 @@ public CompatibilityCheckResponse addMessagesItem(String messagesItem) { /** * Error messages + * * @return messages - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "[]", description = "Error messages") - - public List<String> getMessages() { + public List<String> getMessages() { return messages; } @@ -73,7 +72,6 @@ public void setMessages(List<String> messages) { this.messages = messages; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -83,8 +81,8 @@ public boolean equals(java.lang.Object o) { return false; } CompatibilityCheckResponse compatibilityCheckResponse = (CompatibilityCheckResponse) o; - return Objects.equals(this.isCompatible, compatibilityCheckResponse.isCompatible) && - Objects.equals(this.messages, compatibilityCheckResponse.messages); + return Objects.equals(this.isCompatible, compatibilityCheckResponse.isCompatible) + && Objects.equals(this.messages, compatibilityCheckResponse.messages); } @Override @@ -96,7 +94,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class CompatibilityCheckResponse {\n"); - + sb.append(" isCompatible: ").append(toIndentedString(isCompatible)).append("\n"); sb.append(" messages: ").append(toIndentedString(messages)).append("\n"); sb.append("}"); @@ -104,8 +102,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java index 9e338b232e8da..0a223a88cfd33 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java @@ -1,40 +1,36 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Config - */ +/** Config */ @io.swagger.v3.oas.annotations.media.Schema(description = "Config") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Config { +public class Config { - /** - * Compatibility Level - */ + /** Compatibility Level */ public enum CompatibilityLevelEnum { BACKWARD("BACKWARD"), - + BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - + FORWARD("FORWARD"), - + FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - + FULL("FULL"), - + FULL_TRANSITIVE("FULL_TRANSITIVE"), - + NONE("NONE"); private String value; @@ -59,6 +55,7 @@ public static CompatibilityLevelEnum fromValue(String text) { return null; } } + @JsonProperty("compatibilityLevel") private CompatibilityLevelEnum compatibilityLevel = null; @@ -69,11 +66,13 @@ public Config compatibilityLevel(CompatibilityLevelEnum compatibilityLevel) { /** * Compatibility Level + * * @return compatibilityLevel - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "FULL_TRANSITIVE", description = "Compatibility Level") - - public CompatibilityLevelEnum getCompatibilityLevel() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "FULL_TRANSITIVE", + description = "Compatibility Level") + public CompatibilityLevelEnum getCompatibilityLevel() { return compatibilityLevel; } @@ -81,7 +80,6 @@ public void setCompatibilityLevel(CompatibilityLevelEnum compatibilityLevel) { this.compatibilityLevel = compatibilityLevel; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -103,15 +101,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Config {\n"); - + sb.append(" compatibilityLevel: ").append(toIndentedString(compatibilityLevel)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -119,4 +116,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java index 5b586e184c6ce..b179149b33d01 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java @@ -1,40 +1,36 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Config update request - */ +/** Config update request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Config update request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ConfigUpdateRequest { +public class ConfigUpdateRequest { - /** - * Compatibility Level - */ + /** Compatibility Level */ public enum CompatibilityEnum { BACKWARD("BACKWARD"), - + BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - + FORWARD("FORWARD"), - + FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - + FULL("FULL"), - + FULL_TRANSITIVE("FULL_TRANSITIVE"), - + NONE("NONE"); private String value; @@ -59,6 +55,7 @@ public static CompatibilityEnum fromValue(String text) { return null; } } + @JsonProperty("compatibility") private CompatibilityEnum compatibility = null; @@ -69,11 +66,13 @@ public ConfigUpdateRequest compatibility(CompatibilityEnum compatibility) { /** * Compatibility Level + * * @return compatibility - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "FULL_TRANSITIVE", description = "Compatibility Level") - - public CompatibilityEnum getCompatibility() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "FULL_TRANSITIVE", + description = "Compatibility Level") + public CompatibilityEnum getCompatibility() { return compatibility; } @@ -81,7 +80,6 @@ public void setCompatibility(CompatibilityEnum compatibility) { this.compatibility = compatibility; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -103,15 +101,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConfigUpdateRequest {\n"); - + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -119,4 +116,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java index f462d359bdea6..2f20d77b66137 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Error message - */ +/** Error message */ @io.swagger.v3.oas.annotations.media.Schema(description = "Error message") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ErrorMessage { +public class ErrorMessage { @JsonProperty("error_code") private Integer errorCode = null; @@ -31,11 +28,11 @@ public ErrorMessage errorCode(Integer errorCode) { /** * Error code + * * @return errorCode - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Error code") - - public Integer getErrorCode() { + public Integer getErrorCode() { return errorCode; } @@ -50,11 +47,11 @@ public ErrorMessage message(String message) { /** * Detailed error message + * * @return message - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Detailed error message") - - public String getMessage() { + public String getMessage() { return message; } @@ -62,7 +59,6 @@ public void setMessage(String message) { this.message = message; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -72,8 +68,8 @@ public boolean equals(java.lang.Object o) { return false; } ErrorMessage errorMessage = (ErrorMessage) o; - return Objects.equals(this.errorCode, errorMessage.errorCode) && - Objects.equals(this.message, errorMessage.message); + return Objects.equals(this.errorCode, errorMessage.errorCode) + && Objects.equals(this.message, errorMessage.message); } @Override @@ -85,7 +81,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ErrorMessage {\n"); - + sb.append(" errorCode: ").append(toIndentedString(errorCode)).append("\n"); sb.append(" message: ").append(toIndentedString(message)).append("\n"); sb.append("}"); @@ -93,8 +89,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -102,4 +97,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java index 2c6642c97c507..5a418401278d3 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java @@ -1,34 +1,30 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema Registry operating mode - */ +/** Schema Registry operating mode */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema Registry operating mode") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Mode { +public class Mode { - /** - * Schema Registry operating mode - */ + /** Schema Registry operating mode */ public enum ModeEnum { READWRITE("READWRITE"), - + READONLY("READONLY"), - + READONLY_OVERRIDE("READONLY_OVERRIDE"), - + IMPORT("IMPORT"); private String value; @@ -53,6 +49,7 @@ public static ModeEnum fromValue(String text) { return null; } } + @JsonProperty("mode") private ModeEnum mode = null; @@ -63,11 +60,13 @@ public Mode mode(ModeEnum mode) { /** * Schema Registry operating mode + * * @return mode - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "READWRITE", description = "Schema Registry operating mode") - - public ModeEnum getMode() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "READWRITE", + description = "Schema Registry operating mode") + public ModeEnum getMode() { return mode; } @@ -75,7 +74,6 @@ public void setMode(ModeEnum mode) { this.mode = mode; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -97,15 +95,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Mode {\n"); - + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java index c2fffea0034f9..2cbbe4d5351d8 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java @@ -1,34 +1,30 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Mode update request - */ +/** Mode update request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Mode update request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ModeUpdateRequest { +public class ModeUpdateRequest { - /** - * Schema Registry operating mode - */ + /** Schema Registry operating mode */ public enum ModeEnum { READWRITE("READWRITE"), - + READONLY("READONLY"), - + READONLY_OVERRIDE("READONLY_OVERRIDE"), - + IMPORT("IMPORT"); private String value; @@ -53,6 +49,7 @@ public static ModeEnum fromValue(String text) { return null; } } + @JsonProperty("mode") private ModeEnum mode = null; @@ -63,11 +60,13 @@ public ModeUpdateRequest mode(ModeEnum mode) { /** * Schema Registry operating mode + * * @return mode - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "READWRITE", description = "Schema Registry operating mode") - - public ModeEnum getMode() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "READWRITE", + description = "Schema Registry operating mode") + public ModeEnum getMode() { return mode; } @@ -75,7 +74,6 @@ public void setMode(ModeEnum mode) { this.mode = mode; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -97,15 +95,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ModeUpdateRequest {\n"); - + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java index 4f535f343f433..d7b2b28123b6f 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java @@ -1,25 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.datahubproject.schema_registry.openapi.generated.SchemaReference; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema register request - */ +/** Schema register request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema register request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class RegisterSchemaRequest { +public class RegisterSchemaRequest { @JsonProperty("version") private Integer version = null; @@ -44,11 +41,11 @@ public RegisterSchemaRequest version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -63,11 +60,12 @@ public RegisterSchemaRequest id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -82,11 +80,11 @@ public RegisterSchemaRequest schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -109,11 +107,12 @@ public RegisterSchemaRequest addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List<SchemaReference> getReferences() { + @Valid + public List<SchemaReference> getReferences() { return references; } @@ -128,11 +127,11 @@ public RegisterSchemaRequest schema(String schema) { /** * Schema definition string + * * @return schema - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema definition string") - - public String getSchema() { + public String getSchema() { return schema; } @@ -140,7 +139,6 @@ public void setSchema(String schema) { this.schema = schema; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -150,11 +148,11 @@ public boolean equals(java.lang.Object o) { return false; } RegisterSchemaRequest registerSchemaRequest = (RegisterSchemaRequest) o; - return Objects.equals(this.version, registerSchemaRequest.version) && - Objects.equals(this.id, registerSchemaRequest.id) && - Objects.equals(this.schemaType, registerSchemaRequest.schemaType) && - Objects.equals(this.references, registerSchemaRequest.references) && - Objects.equals(this.schema, registerSchemaRequest.schema); + return Objects.equals(this.version, registerSchemaRequest.version) + && Objects.equals(this.id, registerSchemaRequest.id) + && Objects.equals(this.schemaType, registerSchemaRequest.schemaType) + && Objects.equals(this.references, registerSchemaRequest.references) + && Objects.equals(this.schema, registerSchemaRequest.schema); } @Override @@ -166,7 +164,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RegisterSchemaRequest {\n"); - + sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" schemaType: ").append(toIndentedString(schemaType)).append("\n"); @@ -177,8 +175,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -186,4 +183,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java index 7cdcb1093f34b..54e480078233b 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema register response - */ +/** Schema register response */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema register response") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class RegisterSchemaResponse { +public class RegisterSchemaResponse { @JsonProperty("id") private Integer id = null; @@ -28,11 +25,13 @@ public RegisterSchemaResponse id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "100001", description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "100001", + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -40,7 +39,6 @@ public void setId(Integer id) { this.id = id; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -62,15 +60,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RegisterSchemaResponse {\n"); - + sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -78,4 +75,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java index b3ca087bdc5f3..cc4d5e7694976 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java @@ -8,14 +8,14 @@ import javax.validation.Valid; import org.springframework.validation.annotation.Validated; -/** - * Schema - */ +/** Schema */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Schema { +public class Schema { @JsonProperty("subject") private String subject = null; @@ -43,11 +43,11 @@ public Schema subject(String subject) { /** * Name of the subject + * * @return subject - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the subject") - - public String getSubject() { + public String getSubject() { return subject; } @@ -62,11 +62,11 @@ public Schema version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -81,11 +81,13 @@ public Schema id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "100001", description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "100001", + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -100,11 +102,11 @@ public Schema schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "AVRO", description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -127,11 +129,12 @@ public Schema addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List<SchemaReference> getReferences() { + @Valid + public List<SchemaReference> getReferences() { return references; } @@ -146,11 +149,13 @@ public Schema schema(String schema) { /** * Schema definition string + * * @return schema - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "{\"schema\": \"{\"type\": \"string\"}\"}", description = "Schema definition string") - - public String getSchema() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "{\"schema\": \"{\"type\": \"string\"}\"}", + description = "Schema definition string") + public String getSchema() { return schema; } @@ -158,7 +163,6 @@ public void setSchema(String schema) { this.schema = schema; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -168,12 +172,12 @@ public boolean equals(java.lang.Object o) { return false; } Schema schema = (Schema) o; - return Objects.equals(this.subject, schema.subject) && - Objects.equals(this.version, schema.version) && - Objects.equals(this.id, schema.id) && - Objects.equals(this.schemaType, schema.schemaType) && - Objects.equals(this.references, schema.references) && - Objects.equals(this.schema, schema.schema); + return Objects.equals(this.subject, schema.subject) + && Objects.equals(this.version, schema.version) + && Objects.equals(this.id, schema.id) + && Objects.equals(this.schemaType, schema.schemaType) + && Objects.equals(this.references, schema.references) + && Objects.equals(this.schema, schema.schema); } @Override @@ -185,7 +189,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Schema {\n"); - + sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); @@ -197,8 +201,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -206,4 +209,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java index 96fb685dc1bfc..a2dffa59778ed 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema reference - */ +/** Schema reference */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema reference") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaReference { +public class SchemaReference { @JsonProperty("name") private String name = null; @@ -34,11 +31,13 @@ public SchemaReference name(String name) { /** * Reference name + * * @return name - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "io.confluent.kafka.example.User", description = "Reference name") - - public String getName() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "io.confluent.kafka.example.User", + description = "Reference name") + public String getName() { return name; } @@ -53,11 +52,13 @@ public SchemaReference subject(String subject) { /** * Name of the referenced subject + * * @return subject - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the referenced subject") - - public String getSubject() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "User", + description = "Name of the referenced subject") + public String getSubject() { return subject; } @@ -72,11 +73,13 @@ public SchemaReference version(Integer version) { /** * Version number of the referenced subject + * * @return version - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number of the referenced subject") - - public Integer getVersion() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "1", + description = "Version number of the referenced subject") + public Integer getVersion() { return version; } @@ -84,7 +87,6 @@ public void setVersion(Integer version) { this.version = version; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -94,9 +96,9 @@ public boolean equals(java.lang.Object o) { return false; } SchemaReference schemaReference = (SchemaReference) o; - return Objects.equals(this.name, schemaReference.name) && - Objects.equals(this.subject, schemaReference.subject) && - Objects.equals(this.version, schemaReference.version); + return Objects.equals(this.name, schemaReference.name) + && Objects.equals(this.subject, schemaReference.subject) + && Objects.equals(this.version, schemaReference.version); } @Override @@ -108,7 +110,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaReference {\n"); - + sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); @@ -117,8 +119,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -126,4 +127,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java index d2832462a10c6..909416e6976b6 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java @@ -1,21 +1,18 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * SchemaRegistryServerVersion - */ +/** SchemaRegistryServerVersion */ @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaRegistryServerVersion { +public class SchemaRegistryServerVersion { @JsonProperty("version") private String version = null; @@ -30,11 +27,11 @@ public SchemaRegistryServerVersion version(String version) { /** * Get version + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getVersion() { + public String getVersion() { return version; } @@ -49,11 +46,11 @@ public SchemaRegistryServerVersion commitId(String commitId) { /** * Get commitId + * * @return commitId - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getCommitId() { + public String getCommitId() { return commitId; } @@ -61,7 +58,6 @@ public void setCommitId(String commitId) { this.commitId = commitId; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -71,8 +67,8 @@ public boolean equals(java.lang.Object o) { return false; } SchemaRegistryServerVersion schemaRegistryServerVersion = (SchemaRegistryServerVersion) o; - return Objects.equals(this.version, schemaRegistryServerVersion.version) && - Objects.equals(this.commitId, schemaRegistryServerVersion.commitId); + return Objects.equals(this.version, schemaRegistryServerVersion.version) + && Objects.equals(this.commitId, schemaRegistryServerVersion.commitId); } @Override @@ -84,7 +80,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaRegistryServerVersion {\n"); - + sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" commitId: ").append(toIndentedString(commitId)).append("\n"); sb.append("}"); @@ -92,8 +88,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -101,4 +96,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java index b2ea78e35ce22..977f5d410d667 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java @@ -1,25 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.datahubproject.schema_registry.openapi.generated.SchemaReference; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema definition - */ +/** Schema definition */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema definition") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaString { +public class SchemaString { @JsonProperty("schemaType") private String schemaType = null; @@ -41,11 +38,11 @@ public SchemaString schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "AVRO", description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -60,11 +57,13 @@ public SchemaString schema(String schema) { /** * Schema string identified by the ID + * * @return schema - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "{\"schema\": \"{\"type\": \"string\"}\"}", description = "Schema string identified by the ID") - - public String getSchema() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "{\"schema\": \"{\"type\": \"string\"}\"}", + description = "Schema string identified by the ID") + public String getSchema() { return schema; } @@ -87,11 +86,12 @@ public SchemaString addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List<SchemaReference> getReferences() { + @Valid + public List<SchemaReference> getReferences() { return references; } @@ -106,11 +106,11 @@ public SchemaString maxId(Integer maxId) { /** * Maximum ID + * * @return maxId - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Maximum ID") - - public Integer getMaxId() { + public Integer getMaxId() { return maxId; } @@ -118,7 +118,6 @@ public void setMaxId(Integer maxId) { this.maxId = maxId; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -128,10 +127,10 @@ public boolean equals(java.lang.Object o) { return false; } SchemaString schemaString = (SchemaString) o; - return Objects.equals(this.schemaType, schemaString.schemaType) && - Objects.equals(this.schema, schemaString.schema) && - Objects.equals(this.references, schemaString.references) && - Objects.equals(this.maxId, schemaString.maxId); + return Objects.equals(this.schemaType, schemaString.schemaType) + && Objects.equals(this.schema, schemaString.schema) + && Objects.equals(this.references, schemaString.references) + && Objects.equals(this.maxId, schemaString.maxId); } @Override @@ -143,7 +142,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaString {\n"); - + sb.append(" schemaType: ").append(toIndentedString(schemaType)).append("\n"); sb.append(" schema: ").append(toIndentedString(schema)).append("\n"); sb.append(" references: ").append(toIndentedString(references)).append("\n"); @@ -153,8 +152,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -162,4 +160,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java index 2ae476b0c3efc..e215d324f536e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java @@ -1,24 +1,21 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * ServerClusterId - */ +/** ServerClusterId */ @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ServerClusterId { +public class ServerClusterId { @JsonProperty("scope") @Valid @@ -42,11 +39,11 @@ public ServerClusterId putScopeItem(String key, Object scopeItem) { /** * Get scope + * * @return scope - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public Map<String, Object> getScope() { + public Map<String, Object> getScope() { return scope; } @@ -61,11 +58,11 @@ public ServerClusterId id(String id) { /** * Get id + * * @return id - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getId() { + public String getId() { return id; } @@ -73,7 +70,6 @@ public void setId(String id) { this.id = id; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -83,8 +79,8 @@ public boolean equals(java.lang.Object o) { return false; } ServerClusterId serverClusterId = (ServerClusterId) o; - return Objects.equals(this.scope, serverClusterId.scope) && - Objects.equals(this.id, serverClusterId.id); + return Objects.equals(this.scope, serverClusterId.scope) + && Objects.equals(this.id, serverClusterId.id); } @Override @@ -96,7 +92,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ServerClusterId {\n"); - + sb.append(" scope: ").append(toIndentedString(scope)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); @@ -104,8 +100,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +108,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java index 44379af934d5d..32b8979a0b71a 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Subject version pair - */ +/** Subject version pair */ @io.swagger.v3.oas.annotations.media.Schema(description = "Subject version pair") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SubjectVersion { +public class SubjectVersion { @JsonProperty("subject") private String subject = null; @@ -31,11 +28,11 @@ public SubjectVersion subject(String subject) { /** * Name of the subject + * * @return subject - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the subject") - - public String getSubject() { + public String getSubject() { return subject; } @@ -50,11 +47,11 @@ public SubjectVersion version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -62,7 +59,6 @@ public void setVersion(Integer version) { this.version = version; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -72,8 +68,8 @@ public boolean equals(java.lang.Object o) { return false; } SubjectVersion subjectVersion = (SubjectVersion) o; - return Objects.equals(this.subject, subjectVersion.subject) && - Objects.equals(this.version, subjectVersion.version); + return Objects.equals(this.subject, subjectVersion.subject) + && Objects.equals(this.version, subjectVersion.version); } @Override @@ -85,7 +81,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SubjectVersion {\n"); - + sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append("}"); @@ -93,8 +89,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -102,4 +97,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java index e30376002ae7b..6049cb96e1e45 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.CompatibilityCheckResponse; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.datahubproject.schema_registry.openapi.generated.RegisterSchemaRequest; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,96 +26,239 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface CompatibilityApi { - Logger log = LoggerFactory.getLogger(CompatibilityApi.class); + Logger log = LoggerFactory.getLogger(CompatibilityApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Test schema compatibility against a particular schema subject-version", description = "Test input schema against a particular version of a subject's schema for compatibility. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", tags={ "Compatibility (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Compatibility check result.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = CompatibilityCheckResponse.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/compatibility/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName(@Parameter(in = ParameterIn.PATH, description = "Subject of the schema version against which compatibility is to be tested", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the subject's schema against which compatibility is to be tested. Valid values for versionId are between [1,2^31-1] or the string \"latest\".\"latest\" checks compatibility of the input schema with the last registered schema under the specified subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to return detailed error messages" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "verbose", required = false) Boolean verbose) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", CompatibilityCheckResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); + @Operation( + summary = "Test schema compatibility against a particular schema subject-version", + description = + "Test input schema against a particular version of a subject's schema for compatibility. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", + tags = {"Compatibility (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Compatibility check result.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = CompatibilityCheckResponse.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/compatibility/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName( + @Parameter( + in = ParameterIn.PATH, + description = + "Subject of the schema version against which compatibility is to be tested", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the subject's schema against which compatibility is to be tested. Valid values for versionId are between [1,2^31-1] or the string \"latest\".\"latest\" checks compatibility of the input schema with the last registered schema under the specified subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return detailed error messages", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "verbose", required = false) + Boolean verbose) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", + CompatibilityCheckResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Test schema compatibility against all schemas under a subject", description = "Test input schema against a subject's schemas for compatibility, based on the configured compatibility level of the subject. In other words, it will perform the same compatibility check as register for that subject. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", tags={ "Compatibility (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Compatibility check result.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = CompatibilityCheckResponse.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/compatibility/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject(@Parameter(in = ParameterIn.PATH, description = "Subject of the schema version against which compatibility is to be tested", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to return detailed error messages" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "verbose", required = false) Boolean verbose) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", CompatibilityCheckResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); + @Operation( + summary = "Test schema compatibility against all schemas under a subject", + description = + "Test input schema against a subject's schemas for compatibility, based on the configured compatibility level of the subject. In other words, it will perform the same compatibility check as register for that subject. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", + tags = {"Compatibility (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Compatibility check result.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = CompatibilityCheckResponse.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/compatibility/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject( + @Parameter( + in = ParameterIn.PATH, + description = + "Subject of the schema version against which compatibility is to be tested", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return detailed error messages", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "verbose", required = false) + Boolean verbose) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", + CompatibilityCheckResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java index c30a01517d7d3..eac2fe8a3a02d 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class CompatibilityApiController implements CompatibilityApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public CompatibilityApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public CompatibilityApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java index f041211c6db4d..2e3df2f62fc32 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.Config; import io.datahubproject.schema_registry.openapi.generated.ConfigUpdateRequest; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,200 +26,462 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ConfigApi { - Logger log = LoggerFactory.getLogger(ConfigApi.class); + Logger log = LoggerFactory.getLogger(ConfigApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete subject compatibility level", description = "Deletes the specified subject-level compatibility level config and reverts to the global default.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<String> deleteSubjectConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"NONE\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Delete subject compatibility level", + description = + "Deletes the specified subject-level compatibility level config and reverts to the global default.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<String> deleteSubjectConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"NONE\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Delete global compatibility level", description = "Deletes the global compatibility level config and reverts to the default.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old global compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<String> deleteTopLevelConfig() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"NONE\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Delete global compatibility level", + description = "Deletes the global compatibility level config and reverts to the default.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old global compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<String> deleteTopLevelConfig() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"NONE\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get subject compatibility level", description = "Retrieves compatibility level for a subject.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The subject compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Config.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Config> getSubjectLevelConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to return the global compatibility level if subject compatibility level not found" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "defaultToGlobal", required = false) Boolean defaultToGlobal) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Get subject compatibility level", + description = "Retrieves compatibility level for a subject.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The subject compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Config.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Config> getSubjectLevelConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to return the global compatibility level if subject compatibility level not found", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "defaultToGlobal", required = false) + Boolean defaultToGlobal) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get global compatibility level", description = "Retrieves the global compatibility level.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The global compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Config.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Config> getTopLevelConfig() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Get global compatibility level", + description = "Retrieves the global compatibility level.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The global compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Config.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Config> getTopLevelConfig() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update subject compatibility level", description = "Update compatibility level for the specified subject. On success, echoes the original request back to the client.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ConfigUpdateRequest.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Config Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ConfigUpdateRequest body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Update subject compatibility level", + description = + "Update compatibility level for the specified subject. On success, echoes the original request back to the client.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ConfigUpdateRequest.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Config Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ConfigUpdateRequest body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update global compatibility level", description = "Updates the global compatibility level. On success, echoes the original request back to the client.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ConfigUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ConfigUpdateRequest> updateTopLevelConfig(@Parameter(in = ParameterIn.DEFAULT, description = "Config Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ConfigUpdateRequest body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Update global compatibility level", + description = + "Updates the global compatibility level. On success, echoes the original request back to the client.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ConfigUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ConfigUpdateRequest> updateTopLevelConfig( + @Parameter( + in = ParameterIn.DEFAULT, + description = "Config Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ConfigUpdateRequest body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java index cd3dc84fb4588..4fd6963797de2 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ConfigApiController implements ConfigApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ConfigApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ConfigApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java index 9ab0bc2388f7d..01b90a3c98c2d 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java @@ -1,84 +1,101 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; -import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import com.fasterxml.jackson.databind.ObjectMapper; +import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ContextsApi { - Logger log = LoggerFactory.getLogger(ContextsApi.class); + Logger log = LoggerFactory.getLogger(ContextsApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "List contexts", description = "Retrieves a list of contexts.", tags={ "Contexts (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The contexts.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/contexts", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> listContexts() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \".\", \".\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ContextsApi interface so no example is generated"); + @Operation( + summary = "List contexts", + description = "Retrieves a list of contexts.", + tags = {"Contexts (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The contexts.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/contexts", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> listContexts() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \".\", \".\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ContextsApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java index faead1a2b37b0..8b601e8fb0f2e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ContextsApiController implements ContextsApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ContextsApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ContextsApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java index 1d69c76c86122..711029371d583 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -27,69 +26,122 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface DefaultApi { - Logger log = LoggerFactory.getLogger(DefaultApi.class); + Logger log = LoggerFactory.getLogger(DefaultApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Schema Registry Root Resource", description = "The Root resource is a no-op.", tags={ }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))) }) - @RequestMapping(value = "/", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<String> get() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); + @Operation( + summary = "Schema Registry Root Resource", + description = "The Root resource is a no-op.", + tags = {}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))) + }) + @RequestMapping( + value = "/", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<String> get() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"\"", String.class), HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "", description = "", tags={ }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "default response", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Map.class)))) }) - @RequestMapping(value = "/", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<Map<String, String>> post(@Parameter(in = ParameterIn.DEFAULT, description = "", schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody Map<String, String> body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"key\" : \"\"\n}", Map.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); + @Operation( + summary = "", + description = "", + tags = {}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "default response", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Map.class)))) + }) + @RequestMapping( + value = "/", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<Map<String, String>> post( + @Parameter( + in = ParameterIn.DEFAULT, + description = "", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + Map<String, String> body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"key\" : \"\"\n}", Map.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java index 53e64d43d9572..90768b88e2f28 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class DefaultApiController implements DefaultApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public DefaultApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public DefaultApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java index f2857069d05c8..7fca1cb53cfba 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.datahubproject.schema_registry.openapi.generated.Mode; import io.datahubproject.schema_registry.openapi.generated.ModeUpdateRequest; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,173 +26,398 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; - -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ModeApi { - Logger log = LoggerFactory.getLogger(ModeApi.class); + Logger log = LoggerFactory.getLogger(ModeApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete subject mode", description = "Deletes the specified subject-level mode and reverts to the global default.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old mode.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<Mode> deleteSubjectMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Delete subject mode", + description = "Deletes the specified subject-level mode and reverts to the global default.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old mode.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<Mode> deleteSubjectMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get subject mode", description = "Retrieves the subject mode.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The subject mode.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Mode> getMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to return the global mode if subject mode not found" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "defaultToGlobal", required = false) Boolean defaultToGlobal) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Get subject mode", + description = "Retrieves the subject mode.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The subject mode.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Mode> getMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return the global mode if subject mode not found", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "defaultToGlobal", required = false) + Boolean defaultToGlobal) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get global mode", description = "Retrieves global mode.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The global mode", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store") }) - @RequestMapping(value = "/mode", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Mode> getTopLevelMode() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Get global mode", + description = "Retrieves global mode.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The global mode", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store") + }) + @RequestMapping( + value = "/mode", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Mode> getTopLevelMode() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update subject mode", description = "Update mode for the specified subject. On success, echoes the original request back to the client.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ModeUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ModeUpdateRequest> updateMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ModeUpdateRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to force update if setting mode to IMPORT and schemas currently exist" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "force", required = false) Boolean force) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Update subject mode", + description = + "Update mode for the specified subject. On success, echoes the original request back to the client.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ModeUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ModeUpdateRequest> updateMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ModeUpdateRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to force update if setting mode to IMPORT and schemas currently exist", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "force", required = false) + Boolean force) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update global mode", description = "Update global mode. On success, echoes the original request back to the client.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ModeUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity<ModeUpdateRequest> updateTopLevelMode(@Parameter(in = ParameterIn.DEFAULT, description = "Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ModeUpdateRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to force update if setting mode to IMPORT and schemas currently exist" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "force", required = false) Boolean force) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Update global mode", + description = + "Update global mode. On success, echoes the original request back to the client.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ModeUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity<ModeUpdateRequest> updateTopLevelMode( + @Parameter( + in = ParameterIn.DEFAULT, + description = "Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ModeUpdateRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to force update if setting mode to IMPORT and schemas currently exist", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "force", required = false) + Boolean force) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java index 97ae54ea6c9a2..28ad6fbdfbc12 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ModeApiController implements ModeApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ModeApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ModeApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java index cfb0fe183ee88..e01df38fca64a 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -32,180 +31,526 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface SchemasApi { - Logger log = LoggerFactory.getLogger(SchemasApi.class); + Logger log = LoggerFactory.getLogger(SchemasApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Get schema string by ID", description = "Retrieves the schema string identified by the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = SchemaString.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<SchemaString> getSchema(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Name of the subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Desired output format, dependent on schema type" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "format", required = false) String format, @Parameter(in = ParameterIn.QUERY, description = "Whether to fetch the maximum schema identifier that exists" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "fetchMaxId", required = false, defaultValue="false") Boolean fetchMaxId) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"maxId\" : 1,\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"schemaType\" : \"AVRO\"\n}", SchemaString.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "Get schema string by ID", + description = "Retrieves the schema string identified by the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = SchemaString.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<SchemaString> getSchema( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Name of the subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Desired output format, dependent on schema type", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "format", required = false) + String format, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to fetch the maximum schema identifier that exists", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "fetchMaxId", required = false, defaultValue = "false") + Boolean fetchMaxId) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"maxId\" : 1,\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"schemaType\" : \"AVRO\"\n}", + SchemaString.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema by ID", description = "Retrieves the schema identified by the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Raw schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/schema", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<String> getSchemaOnly(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Name of the subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Desired output format, dependent on schema type" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "format", required = false) String format) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "Get schema by ID", + description = "Retrieves the schema identified by the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Raw schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/schema", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<String> getSchemaOnly( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Name of the subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Desired output format, dependent on schema type", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "format", required = false) + String format) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List supported schema types", description = "Retrieve the schema types supported by this registry.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of supported schema types.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/types", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> getSchemaTypes() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"AVRO\", \"AVRO\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List supported schema types", + description = "Retrieve the schema types supported by this registry.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of supported schema types.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/types", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> getSchemaTypes() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"AVRO\", \"AVRO\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List schemas", description = "Get the schemas matching the specified parameters.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of schemas matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<Schema>> getSchemas(@Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject prefix" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subjectPrefix", required = false) String subjectPrefix, @Parameter(in = ParameterIn.QUERY, description = "Whether to return soft deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "deleted", required = false, defaultValue="false") Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return latest schema versions only for each matching subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "latestOnly", required = false, defaultValue="false") Boolean latestOnly, @Parameter(in = ParameterIn.QUERY, description = "Pagination offset for results" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="0")) @Valid @RequestParam(value = "offset", required = false, defaultValue="0") Integer offset, @Parameter(in = ParameterIn.QUERY, description = "Pagination size for results. Ignored if negative" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="-1")) @Valid @RequestParam(value = "limit", required = false, defaultValue="-1") Integer limit) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}, {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n} ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List schemas", + description = "Get the schemas matching the specified parameters.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of schemas matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<Schema>> getSchemas( + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject prefix", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subjectPrefix", required = false) + String subjectPrefix, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return soft deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "deleted", required = false, defaultValue = "false") + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to return latest schema versions only for each matching subject", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "latestOnly", required = false, defaultValue = "false") + Boolean latestOnly, + @Parameter( + in = ParameterIn.QUERY, + description = "Pagination offset for results", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "0")) + @Valid + @RequestParam(value = "offset", required = false, defaultValue = "0") + Integer offset, + @Parameter( + in = ParameterIn.QUERY, + description = "Pagination size for results. Ignored if negative", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "-1")) + @Valid + @RequestParam(value = "limit", required = false, defaultValue = "-1") + Integer limit) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "[ {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}, {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n} ]", + List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subjects associated to schema ID", description = "Retrieves all the subjects associated with a particular schema ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subjects matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/subjects", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> getSubjects(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include subjects where the schema was deleted" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List subjects associated to schema ID", + description = "Retrieves all the subjects associated with a particular schema ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subjects matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/subjects", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> getSubjects( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include subjects where the schema was deleted", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subject-versions associated to schema ID", description = "Get all the subject-version pairs associated with the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subject versions matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = SubjectVersion.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<SubjectVersion>> getVersions(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include subject versions where the schema was deleted" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ {\n \"subject\" : \"User\",\n \"version\" : 1\n}, {\n \"subject\" : \"User\",\n \"version\" : 1\n} ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List subject-versions associated to schema ID", + description = "Get all the subject-version pairs associated with the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subject versions matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = SubjectVersion.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<SubjectVersion>> getVersions( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include subject versions where the schema was deleted", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "[ {\n \"subject\" : \"User\",\n \"version\" : 1\n}, {\n \"subject\" : \"User\",\n \"version\" : 1\n} ]", + List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java index 4131a93695f13..6581f5f39a647 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class SchemasApiController implements SchemasApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public SchemasApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public SchemasApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java index 8977fc06387c2..544a1aff8008e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -33,275 +32,847 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface SubjectsApi { - Logger log = LoggerFactory.getLogger(SubjectsApi.class); + Logger log = LoggerFactory.getLogger(SubjectsApi.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete schema version", description = "Deletes a specific version of the schema registered under this subject. This only deletes the version and the schema ID remains intact making it still possible to decode data using the schema ID. This API is recommended to be used only in development environments or under extreme circumstances where-in, its required to delete a previously registered schema for compatibility purposes or re-register previously registered schema.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns the schema version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<Integer> deleteSchemaVersion(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to perform a permanent delete" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "permanent", required = false) Boolean permanent) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("1", Integer.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Delete schema version", + description = + "Deletes a specific version of the schema registered under this subject. This only deletes the version and the schema ID remains intact making it still possible to decode data using the schema ID. This API is recommended to be used only in development environments or under extreme circumstances where-in, its required to delete a previously registered schema for compatibility purposes or re-register previously registered schema.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns the schema version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<Integer> deleteSchemaVersion( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to perform a permanent delete", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "permanent", required = false) + Boolean permanent) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("1", Integer.class), HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Delete subject", description = "Deletes the specified subject and its associated compatibility level if registered. It is recommended to use this API only when a topic needs to be recycled or in development environment.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns list of schema versions deleted", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity<List<Integer>> deleteSubject(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to perform a permanent delete" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "permanent", required = false) Boolean permanent) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 1, 1 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Delete subject", + description = + "Deletes the specified subject and its associated compatibility level if registered. It is recommended to use this API only when a topic needs to be recycled or in development environment.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns list of schema versions deleted", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity<List<Integer>> deleteSubject( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to perform a permanent delete", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "permanent", required = false) + Boolean permanent) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 1, 1 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List schemas referencing a schema", description = "Retrieves the IDs of schemas that reference the specified schema.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of IDs for schemas that reference the specified schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}/referencedby", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<Integer>> getReferencedBy(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 100001, 100001 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List schemas referencing a schema", + description = "Retrieves the IDs of schemas that reference the specified schema.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of IDs for schemas that reference the specified schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}/referencedby", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<Integer>> getReferencedBy( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 100001, 100001 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema by version", description = "Retrieves a specific version of the schema registered under this subject.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<Schema> getSchemaByVersion(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", Schema.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Get schema by version", + description = "Retrieves a specific version of the schema registered under this subject.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<Schema> getSchemaByVersion( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", + Schema.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema string by version", description = "Retrieves the schema for the specified version of this subject. Only the unescaped schema string is returned.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}/schema", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<String> getSchemaOnly2(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Get schema string by version", + description = + "Retrieves the schema for the specified version of this subject. Only the unescaped schema string is returned.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}/schema", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<String> getSchemaOnly2( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subjects", description = "Retrieves a list of registered subjects matching specified parameters.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subjects matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<String>> list(@Parameter(in = ParameterIn.QUERY, description = "Subject name prefix" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue=":*:")) @Valid @RequestParam(value = "subjectPrefix", required = false, defaultValue=":*:") String subjectPrefix, @Parameter(in = ParameterIn.QUERY, description = "Whether to look up deleted subjects" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return deleted subjects only" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deletedOnly", required = false) Boolean deletedOnly) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List subjects", + description = "Retrieves a list of registered subjects matching specified parameters.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subjects matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<String>> list( + @Parameter( + in = ParameterIn.QUERY, + description = "Subject name prefix", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = ":*:")) + @Valid + @RequestParam(value = "subjectPrefix", required = false, defaultValue = ":*:") + String subjectPrefix, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to look up deleted subjects", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return deleted subjects only", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deletedOnly", required = false) + Boolean deletedOnly) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List versions under subject", description = "Retrieves a list of versions registered under the specified subject.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of version numbers matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity<List<Integer>> listVersions(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return deleted schemas only" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deletedOnly", required = false) Boolean deletedOnly) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 1, 1 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List versions under subject", + description = "Retrieves a list of versions registered under the specified subject.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of version numbers matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity<List<Integer>> listVersions( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return deleted schemas only", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deletedOnly", required = false) + Boolean deletedOnly) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 1, 1 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Lookup schema under subject", description = "Check if a schema has already been registered under the specified subject. If so, this returns the schema string along with its globally unique identifier, its version under this subject and the subject name.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<Schema> lookUpSchemaUnderSubject(@Parameter(in = ParameterIn.PATH, description = "Subject under which the schema will be registered", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to lookup the normalized schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "normalize", required = false) Boolean normalize, @Parameter(in = ParameterIn.QUERY, description = "Whether to lookup deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", Schema.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Lookup schema under subject", + description = + "Check if a schema has already been registered under the specified subject. If so, this returns the schema string along with its globally unique identifier, its version under this subject and the subject name.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = "Internal Server Error.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<Schema> lookUpSchemaUnderSubject( + @Parameter( + in = ParameterIn.PATH, + description = "Subject under which the schema will be registered", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to lookup the normalized schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "normalize", required = false) + Boolean normalize, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to lookup deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", + Schema.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Register schema under a subject", description = "Register a new schema under the specified subject. If successfully registered, this returns the unique identifier of this schema in the registry. The returned identifier should be used to retrieve this schema from the schemas resource and is different from the schema's version which is associated with the subject. If the same schema is registered under a different subject, the same identifier will be returned. However, the version of the schema may be different under different subjects. A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed.", tags={ "Subjects (v1)"}, hidden = true) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Schema successfully registered.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = RegisterSchemaResponse.class))), - - @ApiResponse(responseCode = "409", description = "Conflict. Incompatible schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.Error code 50002 indicates operation timed out. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity<RegisterSchemaResponse> register(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to register the normalized schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "normalize", required = false) Boolean normalize) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"id\" : 100001\n}", RegisterSchemaResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Register schema under a subject", + description = + "Register a new schema under the specified subject. If successfully registered, this returns the unique identifier of this schema in the registry. The returned identifier should be used to retrieve this schema from the schemas resource and is different from the schema's version which is associated with the subject. If the same schema is registered under a different subject, the same identifier will be returned. However, the version of the schema may be different under different subjects. A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed.", + tags = {"Subjects (v1)"}, + hidden = true) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Schema successfully registered.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = RegisterSchemaResponse.class))), + @ApiResponse( + responseCode = "409", + description = "Conflict. Incompatible schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.Error code 50002 indicates operation timed out. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity<RegisterSchemaResponse> register( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to register the normalized schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "normalize", required = false) + Boolean normalize) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"id\" : 100001\n}", RegisterSchemaResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java index f9b634add7b2a..779a56d6de540 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class SubjectsApiController implements SubjectsApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public SubjectsApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public SubjectsApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java index 268d50aa3a68a..65961426ec364 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java @@ -1,86 +1,82 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface V1Api { - Logger log = LoggerFactory.getLogger(V1Api.class); + Logger log = LoggerFactory.getLogger(V1Api.class); - default Optional<ObjectMapper> getObjectMapper(){ - return Optional.empty(); - } + default Optional<ObjectMapper> getObjectMapper() { + return Optional.empty(); + } - default Optional<HttpServletRequest> getRequest(){ - return Optional.empty(); - } + default Optional<HttpServletRequest> getRequest() { + return Optional.empty(); + } - default Optional<String> getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional<String> getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Get the server metadata", description = "", tags={ "Server Metadata (v1)" }, hidden = true) - @ApiResponses(value = { - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store ") }) - @RequestMapping(value = "/v1/metadata/id", - method = RequestMethod.GET) - default ResponseEntity<Void> getClusterId() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); - } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + @Operation( + summary = "Get the server metadata", + description = "", + tags = {"Server Metadata (v1)"}, + hidden = true) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store ") + }) + @RequestMapping(value = "/v1/metadata/id", method = RequestMethod.GET) + default ResponseEntity<Void> getClusterId() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get Schema Registry server version", description = "", tags={ "Server Metadata (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store ") }) - @RequestMapping(value = "/v1/metadata/version", - method = RequestMethod.GET) - default ResponseEntity<Void> getSchemaRegistryVersion() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); - } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + @Operation( + summary = "Get Schema Registry server version", + description = "", + tags = {"Server Metadata (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store ") + }) + @RequestMapping(value = "/v1/metadata/version", method = RequestMethod.GET) + default ResponseEntity<Void> getSchemaRegistryVersion() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java index d65db3be11231..90e56a914652e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class V1ApiController implements V1Api { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public V1ApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional<ObjectMapper> getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public V1ApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional<HttpServletRequest> getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional<ObjectMapper> getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional<HttpServletRequest> getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java index 0cf57361e58f8..3790bbde8e39f 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java @@ -35,17 +35,23 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - -/** - * DataHub Rest Controller implementation for Confluent's Schema Registry OpenAPI spec. - */ +/** DataHub Rest Controller implementation for Confluent's Schema Registry OpenAPI spec. */ @Slf4j @RestController @RequestMapping("/api") @RequiredArgsConstructor -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) public class SchemaRegistryController - implements CompatibilityApi, ConfigApi, ContextsApi, DefaultApi, ModeApi, SchemasApi, SubjectsApi, V1Api { + implements CompatibilityApi, + ConfigApi, + ContextsApi, + DefaultApi, + ModeApi, + SchemasApi, + SubjectsApi, + V1Api { private final ObjectMapper objectMapper; @@ -82,7 +88,8 @@ public ResponseEntity<Void> getSchemaRegistryVersion() { } @Override - public ResponseEntity<Integer> deleteSchemaVersion(String subject, String version, Boolean permanent) { + public ResponseEntity<Integer> deleteSchemaVersion( + String subject, String version, Boolean permanent) { log.error("[SubjectsApi] deleteSchemaVersion method not implemented"); return SubjectsApi.super.deleteSchemaVersion(subject, version, permanent); } @@ -100,7 +107,8 @@ public ResponseEntity<List<Integer>> getReferencedBy(String subject, String vers } @Override - public ResponseEntity<Schema> getSchemaByVersion(String subject, String version, Boolean deleted) { + public ResponseEntity<Schema> getSchemaByVersion( + String subject, String version, Boolean deleted) { log.error("[SubjectsApi] getSchemaByVersion method not implemented"); return SubjectsApi.super.getSchemaByVersion(subject, version, deleted); } @@ -112,20 +120,22 @@ public ResponseEntity<String> getSchemaOnly2(String subject, String version, Boo } @Override - public ResponseEntity<List<String>> list(String subjectPrefix, Boolean deleted, Boolean deletedOnly) { + public ResponseEntity<List<String>> list( + String subjectPrefix, Boolean deleted, Boolean deletedOnly) { log.error("[SubjectsApi] list method not implemented"); return SubjectsApi.super.list(subjectPrefix, deleted, deletedOnly); } @Override - public ResponseEntity<List<Integer>> listVersions(String subject, Boolean deleted, Boolean deletedOnly) { + public ResponseEntity<List<Integer>> listVersions( + String subject, Boolean deleted, Boolean deletedOnly) { log.error("[SubjectsApi] listVersions method not implemented"); return SubjectsApi.super.listVersions(subject, deleted, deletedOnly); } @Override - public ResponseEntity<Schema> lookUpSchemaUnderSubject(String subject, RegisterSchemaRequest body, Boolean normalize, - Boolean deleted) { + public ResponseEntity<Schema> lookUpSchemaUnderSubject( + String subject, RegisterSchemaRequest body, Boolean normalize, Boolean deleted) { log.error("[SubjectsApi] lookUpSchemaUnderSubject method not implemented"); return SubjectsApi.super.lookUpSchemaUnderSubject(subject, body, normalize, deleted); } @@ -149,26 +159,33 @@ public ResponseEntity<Mode> getTopLevelMode() { } @Override - public ResponseEntity<ModeUpdateRequest> updateMode(String subject, ModeUpdateRequest body, Boolean force) { + public ResponseEntity<ModeUpdateRequest> updateMode( + String subject, ModeUpdateRequest body, Boolean force) { log.error("[ModeApi] updateMode method not implemented"); return ModeApi.super.updateMode(subject, body, force); } @Override - public ResponseEntity<ModeUpdateRequest> updateTopLevelMode(ModeUpdateRequest body, Boolean force) { + public ResponseEntity<ModeUpdateRequest> updateTopLevelMode( + ModeUpdateRequest body, Boolean force) { log.error("[ModeApi] updateTopLevelMode method not implemented"); return ModeApi.super.updateTopLevelMode(body, force); } @Override - @Operation(summary = "Schema Registry Root Resource", description = "The Root resource is a no-op, only used to " - + "validate endpoint is ready.", tags = { "Schema Registry Base" }) + @Operation( + summary = "Schema Registry Root Resource", + description = "The Root resource is a no-op, only used to " + "validate endpoint is ready.", + tags = {"Schema Registry Base"}) public ResponseEntity<String> get() { return new ResponseEntity<>(HttpStatus.OK); } @Override - @Operation(summary = "", description = "", tags = { "Schema Registry Base" }) + @Operation( + summary = "", + description = "", + tags = {"Schema Registry Base"}) public ResponseEntity<Map<String, String>> post(Map<String, String> body) { log.error("[DefaultApi] post method not implemented"); return DefaultApi.super.post(body); @@ -205,7 +222,8 @@ public ResponseEntity<Config> getTopLevelConfig() { } @Override - public ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig(String subject, ConfigUpdateRequest body) { + public ResponseEntity<ConfigUpdateRequest> updateSubjectLevelConfig( + String subject, ConfigUpdateRequest body) { log.error("[ConfigApi] updateSubjectLevelConfig method not implemented"); return ConfigApi.super.updateSubjectLevelConfig(subject, body); } @@ -217,44 +235,55 @@ public ResponseEntity<ConfigUpdateRequest> updateTopLevelConfig(ConfigUpdateRequ } @Override - public ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName(String subject, String version, - RegisterSchemaRequest body, Boolean verbose) { + public ResponseEntity<CompatibilityCheckResponse> testCompatibilityBySubjectName( + String subject, String version, RegisterSchemaRequest body, Boolean verbose) { log.error("[CompatibilityApi] testCompatibilityBySubjectName method not implemented"); return CompatibilityApi.super.testCompatibilityBySubjectName(subject, version, body, verbose); } @Override - public ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject(String subject, - RegisterSchemaRequest body, Boolean verbose) { + public ResponseEntity<CompatibilityCheckResponse> testCompatibilityForSubject( + String subject, RegisterSchemaRequest body, Boolean verbose) { log.error("[CompatibilityApi] testCompatibilityForSubject method not implemented"); return CompatibilityApi.super.testCompatibilityForSubject(subject, body, verbose); } @Override - public ResponseEntity<RegisterSchemaResponse> register(String subject, RegisterSchemaRequest body, - Boolean normalize) { + public ResponseEntity<RegisterSchemaResponse> register( + String subject, RegisterSchemaRequest body, Boolean normalize) { final String topicName = subject.replaceFirst("-value", ""); - return _schemaRegistryService.getSchemaIdForTopic(topicName).map(id -> { - final RegisterSchemaResponse response = new RegisterSchemaResponse(); - return new ResponseEntity<>(response.id(id), HttpStatus.OK); - }).orElseGet(() -> { - log.error("Couldn't find topic with name {}.", topicName); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - }); - } - - @Override - public ResponseEntity<SchemaString> getSchema(Integer id, String subject, String format, Boolean fetchMaxId) { - return _schemaRegistryService.getSchemaForId(id).map(schema -> { - SchemaString result = new SchemaString(); - result.setMaxId(id); - result.setSchemaType("AVRO"); - result.setSchema(schema.toString()); - return new ResponseEntity<>(result, HttpStatus.OK); - }).orElseGet(() -> { - log.error("Couldn't find topic with id {}.", id); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - }); + return _schemaRegistryService + .getSchemaIdForTopic(topicName) + .map( + id -> { + final RegisterSchemaResponse response = new RegisterSchemaResponse(); + return new ResponseEntity<>(response.id(id), HttpStatus.OK); + }) + .orElseGet( + () -> { + log.error("Couldn't find topic with name {}.", topicName); + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + }); + } + + @Override + public ResponseEntity<SchemaString> getSchema( + Integer id, String subject, String format, Boolean fetchMaxId) { + return _schemaRegistryService + .getSchemaForId(id) + .map( + schema -> { + SchemaString result = new SchemaString(); + result.setMaxId(id); + result.setSchemaType("AVRO"); + result.setSchema(schema.toString()); + return new ResponseEntity<>(result, HttpStatus.OK); + }) + .orElseGet( + () -> { + log.error("Couldn't find topic with id {}.", id); + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + }); } @Override @@ -270,8 +299,8 @@ public ResponseEntity<List<String>> getSchemaTypes() { } @Override - public ResponseEntity<List<Schema>> getSchemas(String subjectPrefix, Boolean deleted, Boolean latestOnly, - Integer offset, Integer limit) { + public ResponseEntity<List<Schema>> getSchemas( + String subjectPrefix, Boolean deleted, Boolean latestOnly, Integer offset, Integer limit) { log.error("[SchemasApi] getSchemas method not implemented"); return SchemasApi.super.getSchemas(subjectPrefix, deleted, latestOnly, offset, limit); } @@ -283,7 +312,8 @@ public ResponseEntity<List<String>> getSubjects(Integer id, String subject, Bool } @Override - public ResponseEntity<List<SubjectVersion>> getVersions(Integer id, String subject, Boolean deleted) { + public ResponseEntity<List<SubjectVersion>> getVersions( + Integer id, String subject, Boolean deleted) { log.error("[SchemasApi] getVersions method not implemented"); return SchemasApi.super.getVersions(id, subject, deleted); } diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java index d217d501630e3..98163a7d91420 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java @@ -13,10 +13,10 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - @EnableWebMvc -@OpenAPIDefinition(info = @Info(title = "DataHub OpenAPI", version = "1.0.0"), - servers = {@Server(url = "/schema-registry/", description = "Schema Registry Server URL")}) +@OpenAPIDefinition( + info = @Info(title = "DataHub OpenAPI", version = "1.0.0"), + servers = {@Server(url = "/schema-registry/", description = "Schema Registry Server URL")}) @Configuration public class SpringWebSchemaRegistryConfig implements WebMvcConfigurer { @@ -27,4 +27,4 @@ public void configureMessageConverters(List<HttpMessageConverter<?>> messageConv messageConverters.add(new FormHttpMessageConverter()); messageConverters.add(new MappingJackson2HttpMessageConverter()); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java index 1d34008ebf1be..4dffe1e633c6c 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java @@ -12,5 +12,4 @@ public class OpenAPISpringTestServer { public static void main(String[] args) { SpringApplication.run(OpenAPISpringTestServer.class, args); } - } diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java index 4e31dea6dee1f..1aa0361117c18 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java @@ -6,9 +6,12 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.web.servlet.DispatcherServlet; - @TestConfiguration -@ComponentScan(basePackages = {"io.datahubproject.openapi.schema.registry", "com.linkedin.metadata.schema.registry"}) +@ComponentScan( + basePackages = { + "io.datahubproject.openapi.schema.registry", + "com.linkedin.metadata.schema.registry" + }) public class OpenAPISpringTestServerConfiguration { @Bean @@ -17,7 +20,8 @@ public DispatcherServlet dispatcherServlet() { } @Bean - public ServletRegistrationBean<DispatcherServlet> servletRegistrationBean(DispatcherServlet dispatcherServlet) { + public ServletRegistrationBean<DispatcherServlet> servletRegistrationBean( + DispatcherServlet dispatcherServlet) { return new ServletRegistrationBean<>(dispatcherServlet, "/"); } } diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java index 4e1bb09ab205b..664766f204e46 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.test; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.ByteString; @@ -45,25 +48,25 @@ import org.testcontainers.utility.DockerImageName; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - @ActiveProfiles("test") @ContextConfiguration @SpringBootTest( webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, - classes = {OpenAPISpringTestServer.class, OpenAPISpringTestServerConfiguration.class, - SchemaRegistryControllerTestConfiguration.class}) + classes = { + OpenAPISpringTestServer.class, + OpenAPISpringTestServerConfiguration.class, + SchemaRegistryControllerTestConfiguration.class + }) @EnableKafka public class SchemaRegistryControllerTest extends AbstractTestNGSpringContextTests { private static final String CONFLUENT_PLATFORM_VERSION = "7.2.2"; - static KafkaContainer kafka = new KafkaContainer( - DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) - .withReuse(true) - .withStartupAttempts(5) - .withStartupTimeout(Duration.of(30, ChronoUnit.SECONDS)); + static KafkaContainer kafka = + new KafkaContainer( + DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) + .withReuse(true) + .withStartupAttempts(5) + .withStartupTimeout(Duration.of(30, ChronoUnit.SECONDS)); @DynamicPropertySource static void kafkaProperties(DynamicPropertyRegistry registry) { @@ -73,8 +76,7 @@ static void kafkaProperties(DynamicPropertyRegistry registry) { registry.add("kafka.schemaRegistry.url", () -> "http://localhost:53222/api/"); } - @Autowired - EventProducer _producer; + @Autowired EventProducer _producer; private final CountDownLatch mcpLatch = new CountDownLatch(1); @@ -89,7 +91,8 @@ static void kafkaProperties(DynamicPropertyRegistry registry) { private final AtomicReference<PlatformEvent> peRef = new AtomicReference<>(); @Test - public void testMCPConsumption() throws IOException, InterruptedException, ExecutionException, TimeoutException { + public void testMCPConsumption() + throws IOException, InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final DatasetProperties datasetProperties = new DatasetProperties(); datasetProperties.setName("Foo Bar"); @@ -101,7 +104,8 @@ public void testMCPConsumption() throws IOException, InterruptedException, Execu gmce.setAspectName("datasetProperties"); final JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - final byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] datasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); final GenericAspect genericAspect = new GenericAspect(); genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); genericAspect.setContentType("application/json"); @@ -115,7 +119,8 @@ public void testMCPConsumption() throws IOException, InterruptedException, Execu } @Test - public void testMCLConsumption() throws IOException, InterruptedException, ExecutionException, TimeoutException { + public void testMCLConsumption() + throws IOException, InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final DatasetProperties datasetProperties = new DatasetProperties(); datasetProperties.setName("Foo Bar"); @@ -130,7 +135,8 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu // Set old aspect final GenericAspect oldAspect = new GenericAspect(); - final byte[] oldDatasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] oldDatasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); oldAspect.setValue(ByteString.unsafeWrap(oldDatasetPropertiesSerialized)); oldAspect.setContentType("application/json"); metadataChangeLog.setPreviousAspectValue(GenericRecordUtils.serializeAspect(oldAspect)); @@ -139,16 +145,20 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu // Set new aspect final GenericAspect newAspectValue = new GenericAspect(); datasetProperties.setDescription("Updated data"); - final byte[] newDatasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] newDatasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); newAspectValue.setValue(ByteString.unsafeWrap(newDatasetPropertiesSerialized)); newAspectValue.setContentType("application/json"); metadataChangeLog.setAspect(GenericRecordUtils.serializeAspect(newAspectValue)); metadataChangeLog.setSystemMetadata(SystemMetadataUtils.createDefaultSystemMetadata()); final MockEntitySpec entitySpec = new MockEntitySpec("dataset"); - final AspectSpec aspectSpec = entitySpec.createAspectSpec(datasetProperties, DATASET_PROPERTIES_ASPECT_NAME); + final AspectSpec aspectSpec = + entitySpec.createAspectSpec(datasetProperties, DATASET_PROPERTIES_ASPECT_NAME); - _producer.produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog).get(10, TimeUnit.SECONDS); + _producer + .produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog) + .get(10, TimeUnit.SECONDS); final boolean messageConsumed = mclLatch.await(10, TimeUnit.SECONDS); assertTrue(messageConsumed); assertEquals(mclLatch.getCount(), 0); @@ -156,7 +166,8 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu } @Test - public void testPEConsumption() throws InterruptedException, ExecutionException, TimeoutException { + public void testPEConsumption() + throws InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final EntityChangeEvent changeEvent = new EntityChangeEvent(); @@ -172,11 +183,11 @@ public void testPEConsumption() throws InterruptedException, ExecutionException, final PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(CHANGE_EVENT_PLATFORM_EVENT_NAME); - platformEvent.setHeader( - new PlatformEventHeader().setTimestampMillis(123L)); + platformEvent.setHeader(new PlatformEventHeader().setTimestampMillis(123L)); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); - _producer.producePlatformEvent(CHANGE_EVENT_PLATFORM_EVENT_NAME, "Some key", platformEvent) + _producer + .producePlatformEvent(CHANGE_EVENT_PLATFORM_EVENT_NAME, "Some key", platformEvent) .get(10, TimeUnit.SECONDS); final boolean messageConsumed = peLatch.await(10, TimeUnit.SECONDS); @@ -185,8 +196,11 @@ public void testPEConsumption() throws InterruptedException, ExecutionException, assertEquals(peRef.get(), platformEvent); } - @KafkaListener(id = "test-mcp-consumer", topics = Topics.METADATA_CHANGE_PROPOSAL, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-mcp-consumer", + topics = Topics.METADATA_CHANGE_PROPOSAL, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receiveMCP(ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -199,8 +213,11 @@ public void receiveMCP(ConsumerRecord<String, GenericRecord> consumerRecord) { } } - @KafkaListener(id = "test-mcl-consumer", topics = Topics.METADATA_CHANGE_LOG_VERSIONED, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-mcl-consumer", + topics = Topics.METADATA_CHANGE_LOG_VERSIONED, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receiveMCL(ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -212,8 +229,11 @@ public void receiveMCL(ConsumerRecord<String, GenericRecord> consumerRecord) { } } - @KafkaListener(id = "test-pe-consumer", topics = Topics.PLATFORM_EVENT, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-pe-consumer", + topics = Topics.PLATFORM_EVENT, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receivePE(ConsumerRecord<String, GenericRecord> consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -224,4 +244,4 @@ public void receivePE(ConsumerRecord<String, GenericRecord> consumerRecord) { throw new RuntimeException(e); } } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java index e9fb5887e29cc..ff5b951092070 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java @@ -4,10 +4,7 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.test.context.TestPropertySource; - @TestConfiguration @TestPropertySource(value = "classpath:/application.properties") @ComponentScan(basePackages = {"com.linkedin.gms.factory.kafka", "com.linkedin.gms.factory.config"}) -public class SchemaRegistryControllerTestConfiguration { - -} +public class SchemaRegistryControllerTestConfiguration {} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java index b622fc5bb6af2..caebc6a334e72 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java @@ -1,8 +1,7 @@ package com.linkedin.metadata.datahubusage; public class DataHubUsageEventConstants { - private DataHubUsageEventConstants() { - } + private DataHubUsageEventConstants() {} // Common fields public static final String TYPE = "type"; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java index c1018e2031b17..518b5f28a5b99 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java @@ -2,7 +2,6 @@ import lombok.Getter; - @Getter public enum DataHubUsageEventType { PAGE_VIEW_EVENT("PageViewEvent"), diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index 40a5e3a07ae6d..eab482c7bab27 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -25,57 +25,67 @@ import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTimeUtils; - @Slf4j public class AspectUtils { - private AspectUtils() { - } + private AspectUtils() {} - public static final Set<ChangeType> SUPPORTED_TYPES = Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); + public static final Set<ChangeType> SUPPORTED_TYPES = + Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); public static List<MetadataChangeProposal> getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService, - boolean onPrimaryKeyInsertOnly) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService, + boolean onPrimaryKeyInsertOnly) { // No additional changes for unsupported operations if (!SUPPORTED_TYPES.contains(metadataChangeProposal.getChangeType())) { return Collections.emptyList(); } - final Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, + final Urn urn = + EntityKeyUtils.getUrnFromProposal( + metadataChangeProposal, entityService.getKeyAspectSpec(metadataChangeProposal.getEntityType())); final Map<String, RecordTemplate> includedAspects; if (metadataChangeProposal.getChangeType() != ChangeType.PATCH) { - RecordTemplate aspectRecord = GenericRecordUtils.deserializeAspect(metadataChangeProposal.getAspect().getValue(), - metadataChangeProposal.getAspect().getContentType(), entityService.getEntityRegistry() - .getEntitySpec(urn.getEntityType()).getAspectSpec(metadataChangeProposal.getAspectName())); + RecordTemplate aspectRecord = + GenericRecordUtils.deserializeAspect( + metadataChangeProposal.getAspect().getValue(), + metadataChangeProposal.getAspect().getContentType(), + entityService + .getEntityRegistry() + .getEntitySpec(urn.getEntityType()) + .getAspectSpec(metadataChangeProposal.getAspectName())); includedAspects = ImmutableMap.of(metadataChangeProposal.getAspectName(), aspectRecord); } else { includedAspects = ImmutableMap.of(); } if (onPrimaryKeyInsertOnly) { - return entityService.generateDefaultAspectsOnFirstWrite(urn, includedAspects) - .getValue() - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return entityService + .generateDefaultAspectsOnFirstWrite(urn, includedAspects) + .getValue() + .stream() + .map( + entry -> + getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } else { - return entityService.generateDefaultAspectsIfMissing(urn, includedAspects) - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return entityService.generateDefaultAspectsIfMissing(urn, includedAspects).stream() + .map( + entry -> + getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } } public static List<MetadataChangeProposal> getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService) { return getAdditionalChanges(metadataChangeProposal, entityService, false); } @@ -85,12 +95,10 @@ public static Map<Urn, Aspect> batchGetLatestAspect( Set<Urn> urns, String aspectName, EntityClient entityClient, - Authentication authentication) throws Exception { - final Map<Urn, EntityResponse> gmsResponse = entityClient.batchGetV2( - entity, - urns, - ImmutableSet.of(aspectName), - authentication); + Authentication authentication) + throws Exception { + final Map<Urn, EntityResponse> gmsResponse = + entityClient.batchGetV2(entity, urns, ImmutableSet.of(aspectName), authentication); final Map<Urn, Aspect> finalResult = new HashMap<>(); for (Urn urn : urns) { EntityResponse response = gmsResponse.get(urn); @@ -101,8 +109,8 @@ public static Map<Urn, Aspect> batchGetLatestAspect( return finalResult; } - private static MetadataChangeProposal getProposalFromAspect(String aspectName, RecordTemplate aspect, - MetadataChangeProposal original) { + private static MetadataChangeProposal getProposalFromAspect( + String aspectName, RecordTemplate aspect, MetadataChangeProposal original) { MetadataChangeProposal proposal = new MetadataChangeProposal(); GenericAspect genericAspect = GenericRecordUtils.serializeAspect(aspect); // Set net new fields @@ -110,7 +118,8 @@ private static MetadataChangeProposal getProposalFromAspect(String aspectName, R proposal.setAspectName(aspectName); // Set fields determined from original - // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an UPSERT + // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an + // UPSERT proposal.setChangeType(original.getChangeType()); if (ChangeType.PATCH.equals(proposal.getChangeType())) { proposal.setChangeType(ChangeType.UPSERT); @@ -128,7 +137,7 @@ private static MetadataChangeProposal getProposalFromAspect(String aspectName, R if (original.getAuditHeader() != null) { proposal.setAuditHeader(original.getAuditHeader()); } - + proposal.setEntityType(original.getEntityType()); return proposal; @@ -145,8 +154,11 @@ public static MetadataChangeProposal buildMetadataChangeProposal( return proposal; } - public static MetadataChangeProposal buildMetadataChangeProposal(@Nonnull String entityType, - @Nonnull RecordTemplate keyAspect, @Nonnull String aspectName, @Nonnull RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposal( + @Nonnull String entityType, + @Nonnull RecordTemplate keyAspect, + @Nonnull String aspectName, + @Nonnull RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityType(entityType); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(keyAspect)); @@ -162,4 +174,4 @@ public static AuditStamp getAuditStamp(Urn actor) { auditStamp.setActor(actor); return auditStamp; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java index 40284efe7ac82..3b71c698e0c9f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,7 +28,6 @@ import com.linkedin.metadata.run.RelatedAspectArray; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; - import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; @@ -36,375 +37,465 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; - import lombok.AllArgsConstructor; import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - - @Slf4j @RequiredArgsConstructor public class DeleteEntityService { - private final EntityService _entityService; - private final GraphService _graphService; - - private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; - - /** - * Public endpoint that deletes references to a given urn across DataHub's metadata graph. This is the entrypoint for - * addressing dangling pointers whenever a user deletes some entity. - * - * @param urn The urn for which to delete references in DataHub's metadata graph. - * @param dryRun Specifies if the delete logic should be executed to conclusion or if the caller simply wants a - * preview of the response. - * @return A {@link DeleteReferencesResponse} instance detailing the response of deleting references to the provided - * urn. - */ - public DeleteReferencesResponse deleteReferencesTo(final Urn urn, final boolean dryRun) { - final DeleteReferencesResponse result = new DeleteReferencesResponse(); - RelatedEntitiesResult relatedEntities = - _graphService.findRelatedEntities(null, newFilter("urn", urn.toString()), null, - EMPTY_FILTER, - ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000); - - final List<RelatedAspect> relatedAspects = relatedEntities.getEntities().stream() - .flatMap(relatedEntity -> getRelatedAspectStream(urn, UrnUtils.getUrn(relatedEntity.getUrn()), + private final EntityService _entityService; + private final GraphService _graphService; + + private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; + + /** + * Public endpoint that deletes references to a given urn across DataHub's metadata graph. This is + * the entrypoint for addressing dangling pointers whenever a user deletes some entity. + * + * @param urn The urn for which to delete references in DataHub's metadata graph. + * @param dryRun Specifies if the delete logic should be executed to conclusion or if the caller + * simply wants a preview of the response. + * @return A {@link DeleteReferencesResponse} instance detailing the response of deleting + * references to the provided urn. + */ + public DeleteReferencesResponse deleteReferencesTo(final Urn urn, final boolean dryRun) { + final DeleteReferencesResponse result = new DeleteReferencesResponse(); + RelatedEntitiesResult relatedEntities = + _graphService.findRelatedEntities( + null, + newFilter("urn", urn.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000); + + final List<RelatedAspect> relatedAspects = + relatedEntities.getEntities().stream() + .flatMap( + relatedEntity -> + getRelatedAspectStream( + urn, + UrnUtils.getUrn(relatedEntity.getUrn()), relatedEntity.getRelationshipType())) - .limit(10) - .collect(Collectors.toList()); - - result.setRelatedAspects(new RelatedAspectArray(relatedAspects)); - result.setTotal(relatedEntities.getTotal()); - - if (dryRun) { - return result; - } - - for (int processedEntities = 0; processedEntities < relatedEntities.getTotal(); processedEntities += relatedEntities.getCount()) { - log.info("Processing batch {} of {} aspects", processedEntities, relatedEntities.getTotal()); - relatedEntities.getEntities().forEach(entity -> deleteReference(urn, entity)); - if (processedEntities + relatedEntities.getEntities().size() < relatedEntities.getTotal()) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - relatedEntities = _graphService.findRelatedEntities(null, newFilter("urn", urn.toString()), - null, EMPTY_FILTER, ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000); - } - } - - return result; - } + .limit(10) + .collect(Collectors.toList()); - /** - * Gets a stream of relatedAspects Pojos (high-level, trimmed information) that relate an entity with urn `urn` to - * another entity of urn `relatedUrn` via a concrete relationship type. Used to give users of this API a summary of - * what aspects are related to a given urn and how. - * - * @param urn The identifier of the source entity. - * @param relatedUrn The identifier of the destination entity. - * @param relationshipType The name of the relationship type that links urn to relatedUrn. - * @return A stream of {@link RelatedAspect} instances that have the relationship from urn to relatedUrn. - */ - private Stream<RelatedAspect> getRelatedAspectStream(Urn urn, Urn relatedUrn, String relationshipType) { - return getAspects(urn, relatedUrn, relationshipType).map(enrichedAspect -> { - final RelatedAspect relatedAspect = new RelatedAspect(); - relatedAspect.setEntity(relatedUrn); - relatedAspect.setRelationship(relationshipType); - relatedAspect.setAspect(enrichedAspect.getName()); - return relatedAspect; - }); - } + result.setRelatedAspects(new RelatedAspectArray(relatedAspects)); + result.setTotal(relatedEntities.getTotal()); - /** - * Gets a stream of Enriched Aspect Pojos (Aspect + aspect spec tuple) that relate an entity with urn `urn` to - * another entity of urn `relatedUrn` via a concrete relationship type. - * - * @param urn The identifier of the source entity. - * @param relatedUrn The identifier of the destination entity. - * @param relationshipType The name of the relationship type that links urn to relatedUrn. - * @return A stream of {@link EnrichedAspect} instances that have the relationship from urn to relatedUrn. - */ - private Stream<EnrichedAspect> getAspects(Urn urn, Urn relatedUrn, String relationshipType) { - final String relatedEntityName = relatedUrn.getEntityType(); - final EntitySpec relatedEntitySpec = _entityService.getEntityRegistry().getEntitySpec(relatedEntityName); - final Map<String, AspectSpec> aspectSpecs = getAspectSpecsReferringTo(urn.getEntityType(), relationshipType, relatedEntitySpec); - - // If we have an empty map it means that we have a graph edge that points to some aspect spec that we can't find in - // the entity registry. It would be a corrupted edge in the graph index or backwards incompatible change in the - // entity registry (I.e: deleting the aspect from the metadata model without being consistent in the graph index). - if (aspectSpecs.isEmpty()) { - log.error("Unable to find any aspect spec that has a {} relationship to {} entities. This means that the entity " - + "registry does not have relationships that the graph index has stored.", - relationshipType, relatedEntityName); - handleError(new DeleteEntityServiceError("Unable to find aspect spec in entity registry", - DeleteEntityServiceErrorReason.ENTITY_REGISTRY_SPEC_NOT_FOUND, - ImmutableMap.of("relatedEntityName", relatedEntityName, "relationshipType", relationshipType, - "relatedEntitySpec", relatedEntitySpec))); - return Stream.empty(); - } - - final List<EnvelopedAspect> aspectList = getAspectsReferringTo(relatedUrn, aspectSpecs) - .collect(Collectors.toList()); - - // If we have an empty list it means that we have a graph edge that points to some aspect that we can't find in the - // entity service. It would be a corrupted edge in the graph index or corrupted record in the entity DB. - if (aspectList.isEmpty()) { - log.error("Unable to find an aspect instance that relates {} {} via relationship {} in the entity service. " - + "This is potentially a lack of consistency between the graph and entity DBs.", - urn, relatedUrn, relationshipType); - handleError(new DeleteEntityServiceError("Unable to find aspect instance in entity service", - DeleteEntityServiceErrorReason.ENTITY_SERVICE_ASPECT_NOT_FOUND, - ImmutableMap.of("urn", urn, "relatedUrn", relatedUrn, "relationship", relationshipType, - "aspectSpecs", aspectSpecs))); - return Stream.empty(); - } - - return aspectList.stream() - .filter(envelopedAspect -> hasRelationshipInstanceTo(envelopedAspect.getValue(), urn.getEntityType(), - relationshipType, aspectSpecs.get(envelopedAspect.getName()))) - .map(envelopedAspect -> new EnrichedAspect( - envelopedAspect.getName(), - envelopedAspect.getValue(), - aspectSpecs.get(envelopedAspect.getName())) - ); + if (dryRun) { + return result; } - /** - * Utility method to sleep the thread. - * - * @param seconds The number of seconds to sleep. - */ - private void sleep(final Integer seconds) { - try { - TimeUnit.SECONDS.sleep(seconds); - } catch (InterruptedException e) { - log.error("Interrupted sleep", e); - } + for (int processedEntities = 0; + processedEntities < relatedEntities.getTotal(); + processedEntities += relatedEntities.getCount()) { + log.info("Processing batch {} of {} aspects", processedEntities, relatedEntities.getTotal()); + relatedEntities.getEntities().forEach(entity -> deleteReference(urn, entity)); + if (processedEntities + relatedEntities.getEntities().size() < relatedEntities.getTotal()) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + relatedEntities = + _graphService.findRelatedEntities( + null, + newFilter("urn", urn.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000); + } } - /** - * Processes an aspect of a given {@link RelatedEntity} instance that references a given {@link Urn}, removes said - * urn from the aspects and submits an MCP with the updated aspects. - * - * @param urn The urn to be found. - * @param relatedEntity The entity to be modified. - */ - private void deleteReference(final Urn urn, final RelatedEntity relatedEntity) { - final Urn relatedUrn = UrnUtils.getUrn(relatedEntity.getUrn()); - final String relationshipType = relatedEntity.getRelationshipType(); - getAspects(urn, relatedUrn, relationshipType) - .forEach(enrichedAspect -> { - final String aspectName = enrichedAspect.getName(); - final Aspect aspect = enrichedAspect.getAspect(); - final AspectSpec aspectSpec = enrichedAspect.getSpec(); - - final AtomicReference<Aspect> updatedAspect; - try { - updatedAspect = new AtomicReference<>(aspect.copy()); - } catch (CloneNotSupportedException e) { - log.error("Failed to clone aspect {}", aspect); - handleError(new DeleteEntityServiceError("Failed to clone aspect", - DeleteEntityServiceErrorReason.CLONE_FAILED, - ImmutableMap.of("aspect", aspect))); - return; - } - - aspectSpec.getRelationshipFieldSpecs().stream() - .filter(relationshipFieldSpec -> relationshipFieldSpec.getRelationshipAnnotation().getName().equals(relationshipType)) - .forEach(relationshipFieldSpec -> { - final PathSpec path = relationshipFieldSpec.getPath(); - updatedAspect.set(DeleteEntityUtils.getAspectWithReferenceRemoved(urn.toString(), - updatedAspect.get(), aspectSpec.getPegasusSchema(), path)); - }); - - // If there has been an update, then we produce an MCE. - if (!aspect.equals(updatedAspect.get())) { - if (updatedAspect.get() == null) { - // Then we should remove the aspect. - deleteAspect(relatedUrn, aspectName, aspect); - } else { - // Then we should update the aspect. - updateAspect(relatedUrn, aspectName, aspect, updatedAspect.get()); - } - } - }); + return result; + } + + /** + * Gets a stream of relatedAspects Pojos (high-level, trimmed information) that relate an entity + * with urn `urn` to another entity of urn `relatedUrn` via a concrete relationship type. Used to + * give users of this API a summary of what aspects are related to a given urn and how. + * + * @param urn The identifier of the source entity. + * @param relatedUrn The identifier of the destination entity. + * @param relationshipType The name of the relationship type that links urn to relatedUrn. + * @return A stream of {@link RelatedAspect} instances that have the relationship from urn to + * relatedUrn. + */ + private Stream<RelatedAspect> getRelatedAspectStream( + Urn urn, Urn relatedUrn, String relationshipType) { + return getAspects(urn, relatedUrn, relationshipType) + .map( + enrichedAspect -> { + final RelatedAspect relatedAspect = new RelatedAspect(); + relatedAspect.setEntity(relatedUrn); + relatedAspect.setRelationship(relationshipType); + relatedAspect.setAspect(enrichedAspect.getName()); + return relatedAspect; + }); + } + + /** + * Gets a stream of Enriched Aspect Pojos (Aspect + aspect spec tuple) that relate an entity with + * urn `urn` to another entity of urn `relatedUrn` via a concrete relationship type. + * + * @param urn The identifier of the source entity. + * @param relatedUrn The identifier of the destination entity. + * @param relationshipType The name of the relationship type that links urn to relatedUrn. + * @return A stream of {@link EnrichedAspect} instances that have the relationship from urn to + * relatedUrn. + */ + private Stream<EnrichedAspect> getAspects(Urn urn, Urn relatedUrn, String relationshipType) { + final String relatedEntityName = relatedUrn.getEntityType(); + final EntitySpec relatedEntitySpec = + _entityService.getEntityRegistry().getEntitySpec(relatedEntityName); + final Map<String, AspectSpec> aspectSpecs = + getAspectSpecsReferringTo(urn.getEntityType(), relationshipType, relatedEntitySpec); + + // If we have an empty map it means that we have a graph edge that points to some aspect spec + // that we can't find in + // the entity registry. It would be a corrupted edge in the graph index or backwards + // incompatible change in the + // entity registry (I.e: deleting the aspect from the metadata model without being consistent in + // the graph index). + if (aspectSpecs.isEmpty()) { + log.error( + "Unable to find any aspect spec that has a {} relationship to {} entities. This means that the entity " + + "registry does not have relationships that the graph index has stored.", + relationshipType, + relatedEntityName); + handleError( + new DeleteEntityServiceError( + "Unable to find aspect spec in entity registry", + DeleteEntityServiceErrorReason.ENTITY_REGISTRY_SPEC_NOT_FOUND, + ImmutableMap.of( + "relatedEntityName", + relatedEntityName, + "relationshipType", + relationshipType, + "relatedEntitySpec", + relatedEntitySpec))); + return Stream.empty(); } - /** - * Delete an existing aspect for an urn. - * - * @param urn the urn of the entity to remove the aspect for - * @param aspectName the aspect to remove - * @param prevAspect the old value for the aspect - */ - private void deleteAspect(Urn urn, String aspectName, RecordTemplate prevAspect) { - final RollbackResult rollbackResult = _entityService.deleteAspect(urn.toString(), aspectName, - new HashMap<>(), true); - if (rollbackResult == null || rollbackResult.getNewValue() != null) { - log.error("Failed to delete aspect with references. Before {}, after: null, please check GMS logs" - + " logs for more information", prevAspect); - handleError(new DeleteEntityServiceError("Failed to ingest new aspect", - DeleteEntityServiceErrorReason.ASPECT_DELETE_FAILED, - ImmutableMap.of("urn", urn, "aspectName", aspectName))); - } + final List<EnvelopedAspect> aspectList = + getAspectsReferringTo(relatedUrn, aspectSpecs).collect(Collectors.toList()); + + // If we have an empty list it means that we have a graph edge that points to some aspect that + // we can't find in the + // entity service. It would be a corrupted edge in the graph index or corrupted record in the + // entity DB. + if (aspectList.isEmpty()) { + log.error( + "Unable to find an aspect instance that relates {} {} via relationship {} in the entity service. " + + "This is potentially a lack of consistency between the graph and entity DBs.", + urn, + relatedUrn, + relationshipType); + handleError( + new DeleteEntityServiceError( + "Unable to find aspect instance in entity service", + DeleteEntityServiceErrorReason.ENTITY_SERVICE_ASPECT_NOT_FOUND, + ImmutableMap.of( + "urn", + urn, + "relatedUrn", + relatedUrn, + "relationship", + relationshipType, + "aspectSpecs", + aspectSpecs))); + return Stream.empty(); } - /** - * Update an aspect for an urn. - * - * @param urn the urn of the entity to remove the aspect for - * @param aspectName the aspect to remove - * @param prevAspect the old value for the aspect - * @param newAspect the new value for the aspect - */ - private void updateAspect(Urn urn, String aspectName, RecordTemplate prevAspect, RecordTemplate newAspect) { - final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(urn); - proposal.setChangeType(ChangeType.UPSERT); - proposal.setEntityType(urn.getEntityType()); - proposal.setAspectName(aspectName); - proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); - - final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final IngestResult ingestProposalResult = _entityService.ingestProposal(proposal, auditStamp, false); - - if (!ingestProposalResult.isSqlCommitted()) { - log.error("Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" - + " logs for more information", prevAspect, newAspect); - handleError(new DeleteEntityServiceError("Failed to ingest new aspect", - DeleteEntityServiceErrorReason.MCP_PROCESSOR_FAILED, - ImmutableMap.of("proposal", proposal))); - } + return aspectList.stream() + .filter( + envelopedAspect -> + hasRelationshipInstanceTo( + envelopedAspect.getValue(), + urn.getEntityType(), + relationshipType, + aspectSpecs.get(envelopedAspect.getName()))) + .map( + envelopedAspect -> + new EnrichedAspect( + envelopedAspect.getName(), + envelopedAspect.getValue(), + aspectSpecs.get(envelopedAspect.getName()))); + } + + /** + * Utility method to sleep the thread. + * + * @param seconds The number of seconds to sleep. + */ + private void sleep(final Integer seconds) { + try { + TimeUnit.SECONDS.sleep(seconds); + } catch (InterruptedException e) { + log.error("Interrupted sleep", e); } - - - /** - * Utility method that attempts to find Aspect information as well as the associated path spec for a given urn that - * has a relationship of type `relationType` to another urn. - * - * @param relatedUrn The urn of the related entity in which we want to find the aspect that has a relationship - * to `urn`. - * @param aspectSpecs The entity spec of the related entity. - * @return A {@link Stream} of {@link EnvelopedAspect} instances that contain relationships between `urn` & `relatedUrn`. - */ - private Stream<EnvelopedAspect> getAspectsReferringTo(final Urn relatedUrn, - final Map<String, AspectSpec> aspectSpecs) { - - // FIXME: Can we not depend on entity service? - final EntityResponse entityResponse; - try { - entityResponse = _entityService.getEntityV2(relatedUrn.getEntityType(), relatedUrn, aspectSpecs.keySet()); - } catch (URISyntaxException e) { - log.error("Unable to retrieve entity data for relatedUrn " + relatedUrn, e); - return Stream.empty(); - } - // Find aspect which contains the relationship with the value we are looking for - return entityResponse - .getAspects() - .values() - .stream() - // Get aspects which contain the relationship field specs found above - .filter(Objects::nonNull) - .filter(aspect -> aspectSpecs.containsKey(aspect.getName())); + } + + /** + * Processes an aspect of a given {@link RelatedEntity} instance that references a given {@link + * Urn}, removes said urn from the aspects and submits an MCP with the updated aspects. + * + * @param urn The urn to be found. + * @param relatedEntity The entity to be modified. + */ + private void deleteReference(final Urn urn, final RelatedEntity relatedEntity) { + final Urn relatedUrn = UrnUtils.getUrn(relatedEntity.getUrn()); + final String relationshipType = relatedEntity.getRelationshipType(); + getAspects(urn, relatedUrn, relationshipType) + .forEach( + enrichedAspect -> { + final String aspectName = enrichedAspect.getName(); + final Aspect aspect = enrichedAspect.getAspect(); + final AspectSpec aspectSpec = enrichedAspect.getSpec(); + + final AtomicReference<Aspect> updatedAspect; + try { + updatedAspect = new AtomicReference<>(aspect.copy()); + } catch (CloneNotSupportedException e) { + log.error("Failed to clone aspect {}", aspect); + handleError( + new DeleteEntityServiceError( + "Failed to clone aspect", + DeleteEntityServiceErrorReason.CLONE_FAILED, + ImmutableMap.of("aspect", aspect))); + return; + } + + aspectSpec.getRelationshipFieldSpecs().stream() + .filter( + relationshipFieldSpec -> + relationshipFieldSpec + .getRelationshipAnnotation() + .getName() + .equals(relationshipType)) + .forEach( + relationshipFieldSpec -> { + final PathSpec path = relationshipFieldSpec.getPath(); + updatedAspect.set( + DeleteEntityUtils.getAspectWithReferenceRemoved( + urn.toString(), + updatedAspect.get(), + aspectSpec.getPegasusSchema(), + path)); + }); + + // If there has been an update, then we produce an MCE. + if (!aspect.equals(updatedAspect.get())) { + if (updatedAspect.get() == null) { + // Then we should remove the aspect. + deleteAspect(relatedUrn, aspectName, aspect); + } else { + // Then we should update the aspect. + updateAspect(relatedUrn, aspectName, aspect, updatedAspect.get()); + } + } + }); + } + + /** + * Delete an existing aspect for an urn. + * + * @param urn the urn of the entity to remove the aspect for + * @param aspectName the aspect to remove + * @param prevAspect the old value for the aspect + */ + private void deleteAspect(Urn urn, String aspectName, RecordTemplate prevAspect) { + final RollbackResult rollbackResult = + _entityService.deleteAspect(urn.toString(), aspectName, new HashMap<>(), true); + if (rollbackResult == null || rollbackResult.getNewValue() != null) { + log.error( + "Failed to delete aspect with references. Before {}, after: null, please check GMS logs" + + " logs for more information", + prevAspect); + handleError( + new DeleteEntityServiceError( + "Failed to ingest new aspect", + DeleteEntityServiceErrorReason.ASPECT_DELETE_FAILED, + ImmutableMap.of("urn", urn, "aspectName", aspectName))); } - - /** - * Utility method that determines whether a given aspect has an instance of a relationship of type relationType - * to a given entity type. - * - * @param aspect The aspect in which to search for the relationship. - * @param entityType The name of the entity the method checks against. - * @param relationType The name of the relationship to search for. - * @param aspectSpec The aspect spec in which to search for a concrete relationship with name=relationType - * and that targets the entityType passed by parameter. - * @return {@code True} if the aspect has a relationship with the intended conditions, {@code False} otherwise. - */ - private boolean hasRelationshipInstanceTo(final Aspect aspect, final String entityType, final String relationType, - final AspectSpec aspectSpec) { - - final RecordTemplate recordTemplate = RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), - aspect.data()); - - final Map<RelationshipFieldSpec, List<Object>> extractFields = FieldExtractor.extractFields(recordTemplate, - aspectSpec.getRelationshipFieldSpecs()); - - // Is there is any instance of the relationship specs defined in the aspect's spec extracted from the - // aspect record instance? - return findRelationshipFor(aspectSpec, relationType, entityType) - .map(extractFields::get) - .filter(Objects::nonNull) - .anyMatch(list -> !list.isEmpty()); + } + + /** + * Update an aspect for an urn. + * + * @param urn the urn of the entity to remove the aspect for + * @param aspectName the aspect to remove + * @param prevAspect the old value for the aspect + * @param newAspect the new value for the aspect + */ + private void updateAspect( + Urn urn, String aspectName, RecordTemplate prevAspect, RecordTemplate newAspect) { + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(urn); + proposal.setChangeType(ChangeType.UPSERT); + proposal.setEntityType(urn.getEntityType()); + proposal.setAspectName(aspectName); + proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); + + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + final IngestResult ingestProposalResult = + _entityService.ingestProposal(proposal, auditStamp, false); + + if (!ingestProposalResult.isSqlCommitted()) { + log.error( + "Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" + + " logs for more information", + prevAspect, + newAspect); + handleError( + new DeleteEntityServiceError( + "Failed to ingest new aspect", + DeleteEntityServiceErrorReason.MCP_PROCESSOR_FAILED, + ImmutableMap.of("proposal", proposal))); } - - /** - * Computes the set of aspect specs of an entity that contain a relationship of a given name to a specific entity type. - * - * @param relatedEntityType The name of the entity. - * @param relationshipType The name of the relationship. - * @param entitySpec The entity spec from which to retrieve the aspect specs, if any. - * @return A filtered dictionary of aspect name to aspect specs containing only aspects that have a relationship of - * name relationshipType to the given relatedEntityType. - */ - private Map<String, AspectSpec> getAspectSpecsReferringTo(final String relatedEntityType, final String relationshipType, - final EntitySpec entitySpec) { - return entitySpec - .getAspectSpecMap() - .entrySet() - .stream() - .filter(entry -> findRelationshipFor(entry.getValue(), relationshipType, relatedEntityType).findAny().isPresent()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - /** - * Utility method to find the relationship specs within an AspectSpec with name relationshipName and which has - * relatedEntity name as a valid destination type. - * - * @param spec The aspect spec from which to extract relationship field specs. - * @param relationshipType The name of the relationship to find. - * @param entityType The name of the entity type (i.e: dataset, chart, usergroup, etc...) which the relationship - * is valid for. - * @return The list of relationship field specs which match the criteria. - */ - private Stream<RelationshipFieldSpec> findRelationshipFor(final AspectSpec spec, final String relationshipType, - final String entityType) { - return spec.getRelationshipFieldSpecs().stream() - .filter(relationship -> relationship.getRelationshipName().equals(relationshipType) - && relationship.getValidDestinationTypes().contains(entityType)); - } - - /** - * Entrypoint to handle the various errors that may occur during the execution of the delete entity service. - * @param error The error instance that provides context on what issue occured. - */ - private void handleError(final DeleteEntityServiceError error) { - // NO-OP for now. - } - - @AllArgsConstructor - @Data - private static class DeleteEntityServiceError { - String message; - DeleteEntityServiceErrorReason reason; - Map<String, Object> context; - } - - private enum DeleteEntityServiceErrorReason { - ENTITY_SERVICE_ASPECT_NOT_FOUND, - ENTITY_REGISTRY_SPEC_NOT_FOUND, - MCP_PROCESSOR_FAILED, - ASPECT_DELETE_FAILED, - CLONE_FAILED, - } - - @AllArgsConstructor - @Data - private static class EnrichedAspect { - String name; - Aspect aspect; - AspectSpec spec; + } + + /** + * Utility method that attempts to find Aspect information as well as the associated path spec for + * a given urn that has a relationship of type `relationType` to another urn. + * + * @param relatedUrn The urn of the related entity in which we want to find the aspect that has a + * relationship to `urn`. + * @param aspectSpecs The entity spec of the related entity. + * @return A {@link Stream} of {@link EnvelopedAspect} instances that contain relationships + * between `urn` & `relatedUrn`. + */ + private Stream<EnvelopedAspect> getAspectsReferringTo( + final Urn relatedUrn, final Map<String, AspectSpec> aspectSpecs) { + + // FIXME: Can we not depend on entity service? + final EntityResponse entityResponse; + try { + entityResponse = + _entityService.getEntityV2(relatedUrn.getEntityType(), relatedUrn, aspectSpecs.keySet()); + } catch (URISyntaxException e) { + log.error("Unable to retrieve entity data for relatedUrn " + relatedUrn, e); + return Stream.empty(); } + // Find aspect which contains the relationship with the value we are looking for + return entityResponse.getAspects().values().stream() + // Get aspects which contain the relationship field specs found above + .filter(Objects::nonNull) + .filter(aspect -> aspectSpecs.containsKey(aspect.getName())); + } + + /** + * Utility method that determines whether a given aspect has an instance of a relationship of type + * relationType to a given entity type. + * + * @param aspect The aspect in which to search for the relationship. + * @param entityType The name of the entity the method checks against. + * @param relationType The name of the relationship to search for. + * @param aspectSpec The aspect spec in which to search for a concrete relationship with + * name=relationType and that targets the entityType passed by parameter. + * @return {@code True} if the aspect has a relationship with the intended conditions, {@code + * False} otherwise. + */ + private boolean hasRelationshipInstanceTo( + final Aspect aspect, + final String entityType, + final String relationType, + final AspectSpec aspectSpec) { + + final RecordTemplate recordTemplate = + RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), aspect.data()); + + final Map<RelationshipFieldSpec, List<Object>> extractFields = + FieldExtractor.extractFields(recordTemplate, aspectSpec.getRelationshipFieldSpecs()); + + // Is there is any instance of the relationship specs defined in the aspect's spec extracted + // from the + // aspect record instance? + return findRelationshipFor(aspectSpec, relationType, entityType) + .map(extractFields::get) + .filter(Objects::nonNull) + .anyMatch(list -> !list.isEmpty()); + } + + /** + * Computes the set of aspect specs of an entity that contain a relationship of a given name to a + * specific entity type. + * + * @param relatedEntityType The name of the entity. + * @param relationshipType The name of the relationship. + * @param entitySpec The entity spec from which to retrieve the aspect specs, if any. + * @return A filtered dictionary of aspect name to aspect specs containing only aspects that have + * a relationship of name relationshipType to the given relatedEntityType. + */ + private Map<String, AspectSpec> getAspectSpecsReferringTo( + final String relatedEntityType, final String relationshipType, final EntitySpec entitySpec) { + return entitySpec.getAspectSpecMap().entrySet().stream() + .filter( + entry -> + findRelationshipFor(entry.getValue(), relationshipType, relatedEntityType) + .findAny() + .isPresent()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + /** + * Utility method to find the relationship specs within an AspectSpec with name relationshipName + * and which has relatedEntity name as a valid destination type. + * + * @param spec The aspect spec from which to extract relationship field specs. + * @param relationshipType The name of the relationship to find. + * @param entityType The name of the entity type (i.e: dataset, chart, usergroup, etc...) which + * the relationship is valid for. + * @return The list of relationship field specs which match the criteria. + */ + private Stream<RelationshipFieldSpec> findRelationshipFor( + final AspectSpec spec, final String relationshipType, final String entityType) { + return spec.getRelationshipFieldSpecs().stream() + .filter( + relationship -> + relationship.getRelationshipName().equals(relationshipType) + && relationship.getValidDestinationTypes().contains(entityType)); + } + + /** + * Entrypoint to handle the various errors that may occur during the execution of the delete + * entity service. + * + * @param error The error instance that provides context on what issue occured. + */ + private void handleError(final DeleteEntityServiceError error) { + // NO-OP for now. + } + + @AllArgsConstructor + @Data + private static class DeleteEntityServiceError { + String message; + DeleteEntityServiceErrorReason reason; + Map<String, Object> context; + } + + private enum DeleteEntityServiceErrorReason { + ENTITY_SERVICE_ASPECT_NOT_FOUND, + ENTITY_REGISTRY_SPEC_NOT_FOUND, + MCP_PROCESSOR_FAILED, + ASPECT_DELETE_FAILED, + CLONE_FAILED, + } + + @AllArgsConstructor + @Data + private static class EnrichedAspect { + String name; + Aspect aspect; + AspectSpec spec; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java index 58b5341c4ae0c..0a8b5880e5bce 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java @@ -13,43 +13,45 @@ import java.util.ListIterator; import lombok.extern.slf4j.Slf4j; - /** - * Utility class that encapsulates the logic on how to modify a {@link RecordTemplate} in place to remove a single value - * following a concrete set of rules. + * Utility class that encapsulates the logic on how to modify a {@link RecordTemplate} in place to + * remove a single value following a concrete set of rules. * - * It does this by a recursive tree traversal method, based on an aspect path provided for a given aspect. - * This so that we don't have to scan the entire aspect for the value to be removed and then figure out how to apply - * logical rules based on upstream optionality definitions. + * <p>It does this by a recursive tree traversal method, based on an aspect path provided for a + * given aspect. This so that we don't have to scan the entire aspect for the value to be removed + * and then figure out how to apply logical rules based on upstream optionality definitions. * - * For more information see {@link #getAspectWithReferenceRemoved(String, RecordTemplate, DataSchema, PathSpec)} + * <p>For more information see {@link #getAspectWithReferenceRemoved(String, RecordTemplate, + * DataSchema, PathSpec)} */ @Slf4j public class DeleteEntityUtils { - private DeleteEntityUtils() { } + private DeleteEntityUtils() {} /** - * Utility method that removes fields from a given aspect based on its aspect spec that follows the following logic: - * - * 1. If field is optional and not part of an array → remove the field. - * 2. If is a field that is part of an array (has an `*` in the path spec) - * → go up to the nearest array and remove the element. - * Extra → If array only has 1 element which is being deleted→ optional rules (if optional set null, otherwise delete) - * 3. If field is non-optional and does not belong to an array delete if and only if aspect becomes empty. + * Utility method that removes fields from a given aspect based on its aspect spec that follows + * the following logic: * - * @param value Value to be removed from Aspect. - * @param aspect Aspect in which the value property exists. - * @param schema {@link DataSchema} of the aspect being processed. - * @param aspectPath Path within the aspect to where the value can be found. + * <p>1. If field is optional and not part of an array → remove the field. 2. If is a field that + * is part of an array (has an `*` in the path spec) → go up to the nearest array and remove the + * element. Extra → If array only has 1 element which is being deleted→ optional rules (if + * optional set null, otherwise delete) 3. If field is non-optional and does not belong to an + * array delete if and only if aspect becomes empty. * - * @return A deep copy of the aspect. Modified if the value was found and according to the logic specified above. - * Otherwise, a copy of the original aspect is returned. + * @param value Value to be removed from Aspect. + * @param aspect Aspect in which the value property exists. + * @param schema {@link DataSchema} of the aspect being processed. + * @param aspectPath Path within the aspect to where the value can be found. + * @return A deep copy of the aspect. Modified if the value was found and according to the logic + * specified above. Otherwise, a copy of the original aspect is returned. */ - public static Aspect getAspectWithReferenceRemoved(String value, RecordTemplate aspect, DataSchema schema, PathSpec aspectPath) { + public static Aspect getAspectWithReferenceRemoved( + String value, RecordTemplate aspect, DataSchema schema, PathSpec aspectPath) { try { final DataMap copy = aspect.copy().data(); - final DataComplex newValue = removeValueBasedOnPath(value, schema, copy, aspectPath.getPathComponents(), 0); + final DataComplex newValue = + removeValueBasedOnPath(value, schema, copy, aspectPath.getPathComponents(), 0); if (newValue == null) { // If the new value is null, we should remove the aspect. return null; @@ -63,49 +65,54 @@ public static Aspect getAspectWithReferenceRemoved(String value, RecordTemplate /** * This method chooses which sub method to invoke based on the path being iterated on. * - * @param value The value to be removed from the data complex object. Merely propagated down in this method. - * @param schema The schema of the data complex being visited. Merely propagated down in this method. - * @param o The data complex instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data complex object. Merely propagated down in + * this method. + * @param schema The schema of the data complex being visited. Merely propagated down in this + * method. + * @param o The data complex instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueBasedOnPath(String value, DataSchema schema, DataComplex o, List<String> pathComponents, - int index) { + private static DataComplex removeValueBasedOnPath( + String value, DataSchema schema, DataComplex o, List<String> pathComponents, int index) { final String subPath = pathComponents.get(index); // Processing an array if (subPath.equals("*")) { // Process each entry - return removeValueFromArray(value, (ArrayDataSchema) schema, (DataList) o, pathComponents, index); + return removeValueFromArray( + value, (ArrayDataSchema) schema, (DataList) o, pathComponents, index); } else { // Processing a map - return removeValueFromMap(value, (RecordDataSchema) schema, (DataMap) o, pathComponents, index); + return removeValueFromMap( + value, (RecordDataSchema) schema, (DataMap) o, pathComponents, index); } } /** - * This method is used to visit and remove values from DataMap instances if they are the leaf nodes of the original - * data complex object. + * This method is used to visit and remove values from DataMap instances if they are the leaf + * nodes of the original data complex object. * - * Note that this method has side effects and mutates the provided DataMap instance. + * <p>Note that this method has side effects and mutates the provided DataMap instance. * - * @param value The value to be removed from the data map object. - * @param spec The schema of the data complex being visited. Used to get information of the optionallity of - * the data map being processed. - * @param record The data list instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data map object. + * @param spec The schema of the data complex being visited. Used to get information of the + * optionallity of the data map being processed. + * @param record The data list instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueFromMap(String value, RecordDataSchema spec, DataMap record, List<String> pathComponents, - int index) { + private static DataComplex removeValueFromMap( + String value, RecordDataSchema spec, DataMap record, List<String> pathComponents, int index) { // If in the last component of the path spec if (index == pathComponents.size() - 1) { boolean canDelete = spec.getField(pathComponents.get(index)).getOptional(); - boolean valueExistsInRecord = record.getOrDefault(pathComponents.get(index), "").equals(value); + boolean valueExistsInRecord = + record.getOrDefault(pathComponents.get(index), "").equals(value); if (valueExistsInRecord) { if (canDelete) { record.remove(pathComponents.get(index)); @@ -114,17 +121,26 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe return null; } } else { - log.error("[Reference removal logic] Unable to find value {} in data map {} at path {}", value, record, - pathComponents.subList(0, index)); + log.error( + "[Reference removal logic] Unable to find value {} in data map {} at path {}", + value, + record, + pathComponents.subList(0, index)); } } else { // else traverse further down the tree. final String key = pathComponents.get(index); final boolean optionalField = spec.getField(key).getOptional(); - // Check if key exists, this may not exist because you are in wrong branch of the tree (i.e: iterating for an array) + // Check if key exists, this may not exist because you are in wrong branch of the tree (i.e: + // iterating for an array) if (record.containsKey(key)) { - final DataComplex result = removeValueBasedOnPath(value, spec.getField(key).getType(), (DataComplex) record.get(key), pathComponents, - index + 1); + final DataComplex result = + removeValueBasedOnPath( + value, + spec.getField(key).getType(), + (DataComplex) record.get(key), + pathComponents, + index + 1); if (result == null) { if (optionalField) { @@ -134,7 +150,9 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe } else { // Not optional and not the only field, then this is a bad delete. Need to throw. throw new UnsupportedOperationException( - String.format("Delete failed! Failed to field with name %s from DataMap. The field is required!", key)); + String.format( + "Delete failed! Failed to field with name %s from DataMap. The field is required!", + key)); } } else { record.put(key, result); @@ -145,32 +163,40 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe } /** - * This method is used to visit and remove values from DataList instances if they are the leaf nodes of the original - * data complex object. + * This method is used to visit and remove values from DataList instances if they are the leaf + * nodes of the original data complex object. * - * Note that this method has side effects and mutates the provided DataMap instance. + * <p>Note that this method has side effects and mutates the provided DataMap instance. * - * @param value The value to be removed from the data list object. - * @param record The data list instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data list object. + * @param record The data list instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueFromArray(String value, ArrayDataSchema record, DataList aspectList, - List<String> pathComponents, int index) { + private static DataComplex removeValueFromArray( + String value, + ArrayDataSchema record, + DataList aspectList, + List<String> pathComponents, + int index) { // If in the last component of the path spec if (index == pathComponents.size() - 1) { final boolean found = aspectList.remove(value); if (!found) { - log.error(String.format("Unable to find value %s in aspect list %s at path %s", value, aspectList, - pathComponents.subList(0, index))); + log.error( + String.format( + "Unable to find value %s in aspect list %s at path %s", + value, aspectList, pathComponents.subList(0, index))); } } else { // else traverse further down the tree. final ListIterator<Object> it = aspectList.listIterator(); while (it.hasNext()) { final Object aspect = it.next(); - final DataComplex result = removeValueBasedOnPath(value, record.getItems(), (DataComplex) aspect, pathComponents, index + 1); + final DataComplex result = + removeValueBasedOnPath( + value, record.getItems(), (DataComplex) aspect, pathComponents, index + 1); if (result == null) { it.remove(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index b7607053df8e3..8654df4435cd6 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -33,11 +33,11 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface EntityService { /** * Just whether the entity/aspect exists + * * @param urn urn for the entity * @param aspectName aspect for the entity * @return exists or not @@ -45,35 +45,37 @@ public interface EntityService { Boolean exists(Urn urn, String aspectName); /** - * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided - * set of aspect names. + * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided set + * of aspect names. * * @param urns set of urns to fetch aspects for * @param aspectNames aspects to fetch for each urn in urns set * @return a map of provided {@link Urn} to a List containing the requested aspects. */ Map<Urn, List<RecordTemplate>> getLatestAspects( - @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames); + @Nonnull final Set<Urn> urns, @Nonnull final Set<String> aspectNames); - Map<String, RecordTemplate> getLatestAspectsForUrn(@Nonnull final Urn urn, @Nonnull final Set<String> aspectNames); + Map<String, RecordTemplate> getLatestAspectsForUrn( + @Nonnull final Urn urn, @Nonnull final Set<String> aspectNames); /** * Retrieves an aspect having a specific {@link Urn}, name, & version. * - * Note that once we drop support for legacy aspect-specific resources, - * we should make this a protected method. Only visible for backwards compatibility. + * <p>Note that once we drop support for legacy aspect-specific resources, we should make this a + * protected method. Only visible for backwards compatibility. * * @param urn an urn associated with the requested aspect * @param aspectName name of the aspect requested * @param version specific version of the aspect being requests - * @return the {@link RecordTemplate} representation of the requested aspect object, or null if one cannot be found + * @return the {@link RecordTemplate} representation of the requested aspect object, or null if + * one cannot be found */ - RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version); + RecordTemplate getAspect( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version); /** - * Retrieves the latest aspects for the given urn as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given urn as dynamic aspect objects (Without having to + * define union objects) * * @param entityName name of the entity to fetch * @param urn urn of entity to fetch @@ -83,11 +85,12 @@ Map<Urn, List<RecordTemplate>> getLatestAspects( EntityResponse getEntityV2( @Nonnull final String entityName, @Nonnull final Urn urn, - @Nonnull final Set<String> aspectNames) throws URISyntaxException; + @Nonnull final Set<String> aspectNames) + throws URISyntaxException; /** - * Retrieves the latest aspects for the given set of urns as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given set of urns as dynamic aspect objects (Without + * having to define union objects) * * @param entityName name of the entity to fetch * @param urns set of urns to fetch @@ -97,19 +100,21 @@ EntityResponse getEntityV2( Map<Urn, EntityResponse> getEntitiesV2( @Nonnull final String entityName, @Nonnull final Set<Urn> urns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException; + @Nonnull final Set<String> aspectNames) + throws URISyntaxException; /** - * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects (Without + * having to define union objects) * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link Entity} object */ Map<Urn, EntityResponse> getEntitiesVersionedV2( - @Nonnull final Set<VersionedUrn> versionedUrns, - @Nonnull final Set<String> aspectNames) throws URISyntaxException; + @Nonnull final Set<VersionedUrn> versionedUrns, @Nonnull final Set<String> aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects @@ -121,20 +126,20 @@ Map<Urn, EntityResponse> getEntitiesVersionedV2( */ Map<Urn, List<EnvelopedAspect>> getLatestEnvelopedAspects( // TODO: entityName is unused, can we remove this as a param? - @Nonnull String entityName, - @Nonnull Set<Urn> urns, - @Nonnull Set<String> aspectNames) throws URISyntaxException; + @Nonnull String entityName, @Nonnull Set<Urn> urns, @Nonnull Set<String> aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link EnvelopedAspect} object */ Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( - @Nonnull Set<VersionedUrn> versionedUrns, - @Nonnull Set<String> aspectNames) throws URISyntaxException; + @Nonnull Set<VersionedUrn> versionedUrns, @Nonnull Set<String> aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspect for the given urn as a list of enveloped aspects @@ -145,9 +150,8 @@ Map<Urn, List<EnvelopedAspect>> getVersionedEnvelopedAspects( * @return {@link EnvelopedAspect} object, or null if one cannot be found */ EnvelopedAspect getLatestEnvelopedAspect( - @Nonnull final String entityName, - @Nonnull final Urn urn, - @Nonnull final String aspectName) throws Exception; + @Nonnull final String entityName, @Nonnull final Urn urn, @Nonnull final String aspectName) + throws Exception; @Deprecated VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version); @@ -158,19 +162,27 @@ ListResult<RecordTemplate> listLatestAspects( final int start, final int count); - List<UpdateAspectResult> ingestAspects(@Nonnull final Urn urn, @Nonnull List<Pair<String, RecordTemplate>> aspectRecordsToIngest, - @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + List<UpdateAspectResult> ingestAspects( + @Nonnull final Urn urn, + @Nonnull List<Pair<String, RecordTemplate>> aspectRecordsToIngest, + @Nonnull final AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata); - List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspectsBatch, @Nonnull final AuditStamp auditStamp, - boolean emitMCL, boolean overwrite); + List<UpdateAspectResult> ingestAspects( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite); /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataAuditEvent}. * - * This method runs a read -> write atomically in a single transaction, this is to prevent multiple IDs from being created. + * <p>This method runs a read -> write atomically in a single transaction, this is to prevent + * multiple IDs from being created. * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. + * <p>Note that in general, this should not be used externally. It is currently serving upgrade + * scripts and is as such public. * * @param urn an urn associated with the new aspect * @param aspectName name of the aspect being inserted @@ -179,17 +191,22 @@ List<UpdateAspectResult> ingestAspects(@Nonnull final AspectsBatch aspectsBatch, * @param systemMetadata * @return the {@link RecordTemplate} representation of the written aspect object */ - RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, @Nonnull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + RecordTemplate ingestAspectIfNotPresent( + @Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata); // TODO: Why not in RetentionService? - String batchApplyRetention(Integer start, Integer count, Integer attemptWithVersion, String aspectName, - String urn); + String batchApplyRetention( + Integer start, Integer count, Integer attemptWithVersion, String aspectName, String urn); Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLike); // TODO: Extract this to a different service, doesn't need to be here - RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger); + RestoreIndicesResult restoreIndices( + @Nonnull RestoreIndicesArgs args, @Nonnull Consumer<String> logger); ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count); @@ -199,63 +216,76 @@ String batchApplyRetention(Integer start, Integer count, Integer attemptWithVers @Deprecated Map<Urn, Entity> getEntities(@Nonnull final Set<Urn> urns, @Nonnull Set<String> aspectNames); - Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, AspectSpec aspectSpec, + Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog); - Pair<Future<?>, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, - @Nonnull final ChangeType changeType); + Pair<Future<?>, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, + @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, + @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, + @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType); RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); @Deprecated - void ingestEntities(@Nonnull final List<Entity> entities, @Nonnull final AuditStamp auditStamp, + void ingestEntities( + @Nonnull final List<Entity> entities, + @Nonnull final AuditStamp auditStamp, @Nonnull final List<SystemMetadata> systemMetadata); @Deprecated SystemMetadata ingestEntity(Entity entity, AuditStamp auditStamp); @Deprecated - void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, + void ingestEntity( + @Nonnull Entity entity, + @Nonnull AuditStamp auditStamp, @Nonnull SystemMetadata systemMetadata); void setRetentionService(RetentionService retentionService); AspectSpec getKeyAspectSpec(@Nonnull final Urn urn); - Optional<AspectSpec> getAspectSpec(@Nonnull final String entityName, @Nonnull final String aspectName); + Optional<AspectSpec> getAspectSpec( + @Nonnull final String entityName, @Nonnull final String aspectName); String getKeyAspectName(@Nonnull final Urn urn); /** * Generate default aspects if not present in the database. + * * @param urn entity urn * @param includedAspects aspects being written * @return additional aspects to be written */ - List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects); + List<Pair<String, RecordTemplate>> generateDefaultAspectsIfMissing( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects); /** - * Generate default aspects if the entity key aspect is NOT in the database **AND** - * the key aspect is being written, present in `includedAspects`. - * - * Does not automatically create key aspects. - * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + * Generate default aspects if the entity key aspect is NOT in the database **AND** the key aspect + * is being written, present in `includedAspects`. * - * This version is more efficient in that it only generates additional writes - * when a new entity is being minted for the first time. The drawback is that it will not automatically - * add key aspects, in case the producer is not bothering to ensure that the entity exists - * before writing non-key aspects. + * <p>Does not automatically create key aspects. * + * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + * <p>This version is more efficient in that it only generates additional writes when a new + * entity is being minted for the first time. The drawback is that it will not automatically + * add key aspects, in case the producer is not bothering to ensure that the entity exists + * before writing non-key aspects. * @param urn entity urn * @param includedAspects aspects being written * @return whether key aspect exists in database and the additional aspects to be written */ - Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, - Map<String, RecordTemplate> includedAspects); + Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstWrite( + @Nonnull final Urn urn, Map<String, RecordTemplate> includedAspects); AspectSpec getKeyAspectSpec(@Nonnull final String entityName); @@ -263,24 +293,30 @@ Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstW EntityRegistry getEntityRegistry(); - RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete); + RollbackResult deleteAspect( + String urn, String aspectName, @Nonnull Map<String, String> conditions, boolean hardDelete); RollbackRunResult deleteUrn(Urn urn); - RollbackRunResult rollbackRun(List<AspectRowSummary> aspectRows, String runId, boolean hardDelete); + RollbackRunResult rollbackRun( + List<AspectRowSummary> aspectRows, String runId, boolean hardDelete); - RollbackRunResult rollbackWithConditions(List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete); + RollbackRunResult rollbackWithConditions( + List<AspectRowSummary> aspectRows, Map<String, String> conditions, boolean hardDelete); - Set<IngestResult> ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); + Set<IngestResult> ingestProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); /** * If you have more than 1 proposal use the {AspectsBatch} method + * * @param proposal the metadata proposal to ingest * @param auditStamp audit information * @param async async ingestion or sync ingestion * @return ingestion result */ - IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); + IngestResult ingestProposal( + MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); Boolean exists(Urn urn); @@ -293,15 +329,17 @@ Pair<Boolean, List<Pair<String, RecordTemplate>>> generateDefaultAspectsOnFirstW /** * Builds the default browse path V2 aspects for all entities. * - * This method currently supports datasets, charts, dashboards, and data jobs best. Everything else - * will have a basic "Default" folder added to their browsePathV2. + * <p>This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. */ @Nonnull - BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException; + BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) + throws URISyntaxException; /** - * Allow internal use of the system entity client. Solves recursive dependencies between the EntityService - * and the SystemJavaEntityClient + * Allow internal use of the system entity client. Solves recursive dependencies between the + * EntityService and the SystemJavaEntityClient + * * @param systemEntityClient system entity client */ void setSystemEntityClient(SystemEntityClient systemEntityClient); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java index 27c51e050deff..1ef818559faae 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java @@ -3,7 +3,6 @@ import com.linkedin.common.urn.Urn; import lombok.Value; - @Value public class IngestProposalResult { Urn urn; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java index 5e4ed6259a7f7..3e72a763fb17c 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java @@ -8,11 +8,11 @@ @Builder(toBuilder = true) @Value public class IngestResult { - Urn urn; - AbstractBatchItem request; - boolean publishedMCL; - boolean processedMCL; - boolean publishedMCP; - boolean sqlCommitted; - boolean isUpdate; // update else insert + Urn urn; + AbstractBatchItem request; + boolean publishedMCL; + boolean processedMCL; + boolean publishedMCP; + boolean sqlCommitted; + boolean isUpdate; // update else insert } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java index 21b07e59a2bf0..e6bf82b764484 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java @@ -7,7 +7,8 @@ import lombok.Value; /** - * An immutable value class that holds the result of a list operation and other pagination information. + * An immutable value class that holds the result of a list operation and other pagination + * information. * * @param <T> the result type */ diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java index 1cdd9965c4bfc..51519f48bd975 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java @@ -25,18 +25,16 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.Builder; import lombok.SneakyThrows; import lombok.Value; - /** * Service coupled with an {@link EntityServiceImpl} to handle aspect record retention. * - * TODO: This class is abstract with storage-specific implementations. It'd be nice to pull storage and retention - * concerns apart, let (into {@link AspectDao}) deal with storage, and merge all retention concerns into a single - * class. + * <p>TODO: This class is abstract with storage-specific implementations. It'd be nice to pull + * storage and retention concerns apart, let (into {@link AspectDao}) deal with storage, and merge + * all retention concerns into a single class. */ public abstract class RetentionService { protected static final String ALL = "*"; @@ -44,8 +42,8 @@ public abstract class RetentionService { protected abstract EntityService getEntityService(); /** - * Fetch retention policies given the entityName and aspectName - * Uses the entity service to fetch the latest retention policies set for the input entity and aspect + * Fetch retention policies given the entityName and aspectName Uses the entity service to fetch + * the latest retention policies set for the input entity and aspect * * @param entityName Name of the entity * @param aspectName Name of the aspect @@ -55,19 +53,24 @@ public Retention getRetention(@Nonnull String entityName, @Nonnull String aspect // Prioritized list of retention keys to fetch List<Urn> retentionUrns = getRetentionKeys(entityName, aspectName); Map<Urn, List<RecordTemplate>> fetchedAspects = - getEntityService().getLatestAspects(new HashSet<>(retentionUrns), ImmutableSet.of(Constants.DATAHUB_RETENTION_ASPECT)); + getEntityService() + .getLatestAspects( + new HashSet<>(retentionUrns), ImmutableSet.of(Constants.DATAHUB_RETENTION_ASPECT)); // Find the first retention info that is set among the prioritized list of retention keys above - Optional<DataHubRetentionConfig> retentionInfo = retentionUrns.stream() - .flatMap(urn -> fetchedAspects.getOrDefault(urn, Collections.emptyList()) - .stream() - .filter(aspect -> aspect instanceof DataHubRetentionConfig)) - .map(retention -> (DataHubRetentionConfig) retention) - .findFirst(); + Optional<DataHubRetentionConfig> retentionInfo = + retentionUrns.stream() + .flatMap( + urn -> + fetchedAspects.getOrDefault(urn, Collections.emptyList()).stream() + .filter(aspect -> aspect instanceof DataHubRetentionConfig)) + .map(retention -> (DataHubRetentionConfig) retention) + .findFirst(); return retentionInfo.map(DataHubRetentionConfig::getRetention).orElse(new Retention()); } // Get list of datahub retention keys that match the input entity name and aspect name - protected static List<Urn> getRetentionKeys(@Nonnull String entityName, @Nonnull String aspectName) { + protected static List<Urn> getRetentionKeys( + @Nonnull String entityName, @Nonnull String aspectName) { return ImmutableList.of( new DataHubRetentionKey().setEntityName(entityName).setAspectName(aspectName), new DataHubRetentionKey().setEntityName(entityName).setAspectName(ALL), @@ -79,22 +82,26 @@ protected static List<Urn> getRetentionKeys(@Nonnull String entityName, @Nonnull } /** - * Set retention policy for given entity and aspect. If entity or aspect names are null, the policy is set as default + * Set retention policy for given entity and aspect. If entity or aspect names are null, the + * policy is set as default * - * @param entityName Entity name to apply policy to. If null, set as "*", - * meaning it will be the default for any entities without specified policy - * @param aspectName Aspect name to apply policy to. If null, set as "*", - * meaning it will be the default for any aspects without specified policy + * @param entityName Entity name to apply policy to. If null, set as "*", meaning it will be the + * default for any entities without specified policy + * @param aspectName Aspect name to apply policy to. If null, set as "*", meaning it will be the + * default for any aspects without specified policy * @param retentionConfig Retention policy */ @SneakyThrows - public boolean setRetention(@Nullable String entityName, @Nullable String aspectName, - @Nonnull DataHubRetentionConfig retentionConfig) { + public boolean setRetention( + @Nullable String entityName, + @Nullable String aspectName, + @Nonnull DataHubRetentionConfig retentionConfig) { validateRetention(retentionConfig.getRetention()); DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); - Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + Urn retentionUrn = + EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); MetadataChangeProposal keyProposal = new MetadataChangeProposal(); GenericAspect keyAspect = GenericRecordUtils.serializeAspect(retentionKey); @@ -110,11 +117,13 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect aspectProposal.setAspectName(Constants.DATAHUB_RETENTION_ASPECT); AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); AspectsBatch batch = buildAspectsBatch(List.of(keyProposal, aspectProposal)); return getEntityService().ingestProposal(batch, auditStamp, false).stream() - .anyMatch(IngestResult::isSqlCommitted); + .anyMatch(IngestResult::isSqlCommitted); } protected abstract AspectsBatch buildAspectsBatch(List<MetadataChangeProposal> mcps); @@ -122,28 +131,31 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect /** * Delete the retention policy set for given entity and aspect. * - * @param entityName Entity name to apply policy to. If null, set as "*", - * meaning it will delete the default policy for any entities without specified policy - * @param aspectName Aspect name to apply policy to. If null, set as "*", - * meaning it will delete the default policy for any aspects without specified policy + * @param entityName Entity name to apply policy to. If null, set as "*", meaning it will delete + * the default policy for any entities without specified policy + * @param aspectName Aspect name to apply policy to. If null, set as "*", meaning it will delete + * the default policy for any aspects without specified policy */ public void deleteRetention(@Nullable String entityName, @Nullable String aspectName) { DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); - Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + Urn retentionUrn = + EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); getEntityService().deleteUrn(retentionUrn); } private void validateRetention(Retention retention) { if (retention.hasVersion()) { if (retention.getVersion().getMaxVersions() <= 0) { - throw new IllegalArgumentException("Invalid maxVersions: " + retention.getVersion().getMaxVersions()); + throw new IllegalArgumentException( + "Invalid maxVersions: " + retention.getVersion().getMaxVersions()); } } if (retention.hasTime()) { if (retention.getTime().getMaxAgeInSeconds() <= 0) { - throw new IllegalArgumentException("Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); + throw new IllegalArgumentException( + "Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); } } } @@ -151,33 +163,39 @@ private void validateRetention(Retention retention) { /** * Apply retention policies given the urn and aspect name * - * @param retentionContexts urn, aspect name, and additional context that could be used to apply retention + * @param retentionContexts urn, aspect name, and additional context that could be used to apply + * retention */ public void applyRetentionWithPolicyDefaults(@Nonnull List<RetentionContext> retentionContexts) { - List<RetentionContext> withDefaults = retentionContexts.stream() - .map(context -> { - if (context.getRetentionPolicy().isEmpty()) { - Retention retentionPolicy = getRetention(context.getUrn().getEntityType(), context.getAspectName()); - return context.toBuilder() + List<RetentionContext> withDefaults = + retentionContexts.stream() + .map( + context -> { + if (context.getRetentionPolicy().isEmpty()) { + Retention retentionPolicy = + getRetention(context.getUrn().getEntityType(), context.getAspectName()); + return context.toBuilder() .retentionPolicy(Optional.of(retentionPolicy)) .build(); - } else { - return context; - } - }) - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()) + } else { + return context; + } + }) + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) .collect(Collectors.toList()); applyRetention(withDefaults); } /** - * Apply retention policies given the urn and aspect name and policies. This protected - * method assumes that the policy is provided, however we likely need to fetch these - * from system configuration. + * Apply retention policies given the urn and aspect name and policies. This protected method + * assumes that the policy is provided, however we likely need to fetch these from system + * configuration. * - * Users of this should use {@link #applyRetentionWithPolicyDefaults(List<RetentionContext>)}) + * <p>Users of this should use {@link #applyRetentionWithPolicyDefaults(List<RetentionContext>)}) * * @param retentionContexts Additional context that could be used to apply retention */ @@ -189,23 +207,19 @@ public void applyRetentionWithPolicyDefaults(@Nonnull List<RetentionContext> ret * @param entityName Name of the entity to apply retention to. If null, applies to all entities * @param aspectName Name of the aspect to apply retention to. If null, applies to all aspects */ - public abstract void batchApplyRetention(@Nullable String entityName, @Nullable String aspectName); + public abstract void batchApplyRetention( + @Nullable String entityName, @Nullable String aspectName); - /** - * Batch apply retention to all records within the start, end count - */ - public abstract BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args); + /** Batch apply retention to all records within the start, end count */ + public abstract BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args); @Value @Builder(toBuilder = true) public static class RetentionContext { - @Nonnull - Urn urn; - @Nonnull - String aspectName; - @Builder.Default - Optional<Retention> retentionPolicy = Optional.empty(); - @Builder.Default - Optional<Long> maxVersion = Optional.empty(); + @Nonnull Urn urn; + @Nonnull String aspectName; + @Builder.Default Optional<Retention> retentionPolicy = Optional.empty(); + @Builder.Default Optional<Long> maxVersion = Optional.empty(); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java index 76a12a67b3aaf..9955a58c65339 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java @@ -1,13 +1,11 @@ package com.linkedin.metadata.entity; import com.linkedin.common.urn.Urn; - import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.mxe.SystemMetadata; import lombok.Value; - @Value public class RollbackResult { public Urn urn; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java index 02776b7de4d09..a8c558df77e57 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java @@ -4,7 +4,6 @@ import java.util.List; import lombok.Value; - @Value public class RollbackRunResult { public List<AspectRowSummary> rowsRolledBack; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java index 06199814d30dd..a10c90bc45320 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java @@ -6,12 +6,10 @@ import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.SystemMetadata; +import java.util.concurrent.Future; import lombok.Builder; import lombok.Value; -import java.util.concurrent.Future; - - @Builder(toBuilder = true) @Value public class UpdateAspectResult { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java index 64511325d96d2..d8fcbe0b7d44d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java @@ -4,51 +4,51 @@ @Data public class RestoreIndicesArgs implements Cloneable { - public int start = 0; - public int batchSize = 10; - public int numThreads = 1; - public long batchDelayMs = 1; - public String aspectName; - public String urn; - public String urnLike; - - @Override - public RestoreIndicesArgs clone() { - try { - RestoreIndicesArgs clone = (RestoreIndicesArgs) super.clone(); - // TODO: copy mutable state here, so the clone can't change the internals of the original - return clone; - } catch (CloneNotSupportedException e) { - throw new AssertionError(); - } + public int start = 0; + public int batchSize = 10; + public int numThreads = 1; + public long batchDelayMs = 1; + public String aspectName; + public String urn; + public String urnLike; + + @Override + public RestoreIndicesArgs clone() { + try { + RestoreIndicesArgs clone = (RestoreIndicesArgs) super.clone(); + // TODO: copy mutable state here, so the clone can't change the internals of the original + return clone; + } catch (CloneNotSupportedException e) { + throw new AssertionError(); } - - public RestoreIndicesArgs setAspectName(String aspectName) { - this.aspectName = aspectName; - return this; - } - - public RestoreIndicesArgs setUrnLike(String urnLike) { - this.urnLike = urnLike; - return this; - } - - public RestoreIndicesArgs setUrn(String urn) { - this.urn = urn; - return this; - } - - public RestoreIndicesArgs setStart(Integer start) { - if (start != null) { - this.start = start; - } - return this; + } + + public RestoreIndicesArgs setAspectName(String aspectName) { + this.aspectName = aspectName; + return this; + } + + public RestoreIndicesArgs setUrnLike(String urnLike) { + this.urnLike = urnLike; + return this; + } + + public RestoreIndicesArgs setUrn(String urn) { + this.urn = urn; + return this; + } + + public RestoreIndicesArgs setStart(Integer start) { + if (start != null) { + this.start = start; } + return this; + } - public RestoreIndicesArgs setBatchSize(Integer batchSize) { - if (batchSize != null) { - this.batchSize = batchSize; - } - return this; + public RestoreIndicesArgs setBatchSize(Integer batchSize) { + if (batchSize != null) { + this.batchSize = batchSize; } + return this; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java index b7917d87f99fc..8479338660db0 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java @@ -4,13 +4,13 @@ @Data public class RestoreIndicesResult { - public int ignored = 0; - public int rowsMigrated = 0; - public long timeSqlQueryMs = 0; - public long timeGetRowMs = 0; - public long timeUrnMs = 0; - public long timeEntityRegistryCheckMs = 0; - public long aspectCheckMs = 0; - public long createRecordMs = 0; - public long sendMessageMs = 0; + public int ignored = 0; + public int rowsMigrated = 0; + public long timeSqlQueryMs = 0; + public long timeGetRowMs = 0; + public long timeUrnMs = 0; + public long timeEntityRegistryCheckMs = 0; + public long aspectCheckMs = 0; + public long createRecordMs = 0; + public long sendMessageMs = 0; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java index 0d9126026b9c8..89e337771752f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java @@ -4,9 +4,9 @@ @Data public class BulkApplyRetentionArgs { - public Integer start; - public Integer count; - public Integer attemptWithVersion; - public String aspectName; - public String urn; + public Integer start; + public Integer count; + public Integer attemptWithVersion; + public String aspectName; + public String urn; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java index ef032496c8451..c84c7364534fc 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java @@ -4,13 +4,13 @@ @Data public class BulkApplyRetentionResult { - public long argStart; - public long argCount; - public long argAttemptWithVersion; - public String argUrn; - public String argAspectName; - public long rowsHandled = 0; - public long timeRetentionPolicyMapMs; - public long timeRowMs; - public long timeApplyRetentionMs = 0; + public long argStart; + public long argCount; + public long argAttemptWithVersion; + public String argUrn; + public String argAspectName; + public long rowsHandled = 0; + public long timeRetentionPolicyMapMs; + public long timeRowMs; + public long timeApplyRetentionMs = 0; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java index 03a2b4e2a7f73..155385c62ecef 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.transactions; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.models.AspectSpec; @@ -8,85 +10,85 @@ import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; - import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public abstract class AbstractBatchItem { - // urn an urn associated with the new aspect - public abstract Urn getUrn(); + // urn an urn associated with the new aspect + public abstract Urn getUrn(); - // aspectName name of the aspect being inserted - public abstract String getAspectName(); + // aspectName name of the aspect being inserted + public abstract String getAspectName(); - public abstract SystemMetadata getSystemMetadata(); + public abstract SystemMetadata getSystemMetadata(); - public abstract ChangeType getChangeType(); + public abstract ChangeType getChangeType(); - public abstract EntitySpec getEntitySpec(); + public abstract EntitySpec getEntitySpec(); - public abstract AspectSpec getAspectSpec(); + public abstract AspectSpec getAspectSpec(); - public abstract MetadataChangeProposal getMetadataChangeProposal(); + public abstract MetadataChangeProposal getMetadataChangeProposal(); - public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); + public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); - @Nonnull - protected static SystemMetadata generateSystemMetadataIfEmpty(@Nullable SystemMetadata systemMetadata) { - if (systemMetadata == null) { - systemMetadata = new SystemMetadata(); - systemMetadata.setRunId(DEFAULT_RUN_ID); - systemMetadata.setLastObserved(System.currentTimeMillis()); - } - return systemMetadata; + @Nonnull + protected static SystemMetadata generateSystemMetadataIfEmpty( + @Nullable SystemMetadata systemMetadata) { + if (systemMetadata == null) { + systemMetadata = new SystemMetadata(); + systemMetadata.setRunId(DEFAULT_RUN_ID); + systemMetadata.setLastObserved(System.currentTimeMillis()); } + return systemMetadata; + } - protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { - if (!mcp.hasAspectName() || !mcp.hasAspect()) { - throw new UnsupportedOperationException("Aspect and aspect name is required for create and update operations"); - } - - AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); + protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { + if (!mcp.hasAspectName() || !mcp.hasAspect()) { + throw new UnsupportedOperationException( + "Aspect and aspect name is required for create and update operations"); + } - if (aspectSpec == null) { - throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", mcp.getAspectName(), - mcp.getEntityType())); - } + AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); - return aspectSpec; + if (aspectSpec == null) { + throw new RuntimeException( + String.format( + "Unknown aspect %s for entity %s", mcp.getAspectName(), mcp.getEntityType())); } - /** - * Validates that a change type is valid for the given aspect - * @param changeType - * @param aspectSpec - * @return - */ - protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { - if (aspectSpec.isTimeseries()) { - // Timeseries aspects only support UPSERT - return ChangeType.UPSERT.equals(changeType); - } else { - if (ChangeType.PATCH.equals(changeType)) { - return supportsPatch(aspectSpec); - } else { - return ChangeType.UPSERT.equals(changeType); - } - } + return aspectSpec; + } + + /** + * Validates that a change type is valid for the given aspect + * + * @param changeType + * @param aspectSpec + * @return + */ + protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { + if (aspectSpec.isTimeseries()) { + // Timeseries aspects only support UPSERT + return ChangeType.UPSERT.equals(changeType); + } else { + if (ChangeType.PATCH.equals(changeType)) { + return supportsPatch(aspectSpec); + } else { + return ChangeType.UPSERT.equals(changeType); + } } - - protected static boolean supportsPatch(AspectSpec aspectSpec) { - // Limit initial support to defined templates - if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { - // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, - // specifically having array based fields that require merging without specifying merge behavior can get into bad states - throw new UnsupportedOperationException("Aspect: " + aspectSpec.getName() + " does not currently support patch " - + "operations."); - } - return true; + } + + protected static boolean supportsPatch(AspectSpec aspectSpec) { + // Limit initial support to defined templates + if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { + // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, + // specifically having array based fields that require merging without specifying merge + // behavior can get into bad states + throw new UnsupportedOperationException( + "Aspect: " + aspectSpec.getName() + " does not currently support patch " + "operations."); } + return true; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java index 1d3da08130071..4f2cf6073bdac 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java @@ -5,18 +5,22 @@ import java.util.Set; import java.util.stream.Collectors; - public interface AspectsBatch { - List<? extends AbstractBatchItem> getItems(); + List<? extends AbstractBatchItem> getItems(); - default boolean containsDuplicateAspects() { - return getItems().stream().map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) - .distinct().count() != getItems().size(); - } + default boolean containsDuplicateAspects() { + return getItems().stream() + .map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) + .distinct() + .count() + != getItems().size(); + } - default Map<String, Set<String>> getUrnAspectsMap() { - return getItems().stream() - .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) - .collect(Collectors.groupingBy(Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); - } + default Map<String, Set<String>> getUrnAspectsMap() { + return getItems().stream() + .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) + .collect( + Collectors.groupingBy( + Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java index 56bddba5dc0fa..d27b0ed303972 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java @@ -1,30 +1,20 @@ package com.linkedin.metadata.graph; import com.linkedin.common.urn.Urn; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Data; - -import java.util.Map; import lombok.EqualsAndHashCode; - @Data @AllArgsConstructor public class Edge { - @EqualsAndHashCode.Include - private Urn source; - @EqualsAndHashCode.Include - private Urn destination; - @EqualsAndHashCode.Include - private String relationshipType; - @EqualsAndHashCode.Exclude - private Long createdOn; - @EqualsAndHashCode.Exclude - private Urn createdActor; - @EqualsAndHashCode.Exclude - private Long updatedOn; - @EqualsAndHashCode.Exclude - private Urn updatedActor; - @EqualsAndHashCode.Exclude - private Map<String, Object> properties; + @EqualsAndHashCode.Include private Urn source; + @EqualsAndHashCode.Include private Urn destination; + @EqualsAndHashCode.Include private String relationshipType; + @EqualsAndHashCode.Exclude private Long createdOn; + @EqualsAndHashCode.Exclude private Urn createdActor; + @EqualsAndHashCode.Exclude private Long updatedOn; + @EqualsAndHashCode.Exclude private Urn updatedActor; + @EqualsAndHashCode.Exclude private Map<String, Object> properties; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java index d47d1e12cceb0..96a711d3875b3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java @@ -6,13 +6,12 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface GraphClient { public static final Integer DEFAULT_PAGE_SIZE = 100; /** - * Returns a list of related entities for a given entity, set of edge types, and direction relative to the - * source node + * Returns a list of related entities for a given entity, set of edge types, and direction + * relative to the source node */ @Nonnull EntityRelationships getRelatedEntities( @@ -24,10 +23,15 @@ EntityRelationships getRelatedEntities( String actor); /** - * Returns lineage relationships for given entity in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * Returns lineage relationships for given entity in the DataHub graph. Lineage relationship + * denotes whether an entity is directly upstream or downstream of another entity */ @Nonnull - EntityLineageResult getLineageEntities(String rawUrn, LineageDirection direction, @Nullable Integer start, - @Nullable Integer count, int maxHops, String actor); + EntityLineageResult getLineageEntities( + String rawUrn, + LineageDirection direction, + @Nullable Integer start, + @Nullable Integer count, + int maxHops, + String actor); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java index 3b47f244086a9..cb4eadb8824d5 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java @@ -5,7 +5,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class GraphFilters { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java index 2bbf2af1437d8..2afe907399745 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java @@ -18,10 +18,11 @@ @Slf4j public class GraphIndexUtils { - private GraphIndexUtils() { } + private GraphIndexUtils() {} @Nullable - private static List<Urn> getActorList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List<Urn> getActorList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -31,7 +32,8 @@ private static List<Urn> getActorList(@Nullable final String path, @Nonnull fina } @Nullable - private static List<Long> getTimestampList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List<Long> getTimestampList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -41,7 +43,8 @@ private static List<Long> getTimestampList(@Nullable final String path, @Nonnull } @Nullable - private static List<Map<String, Object>> getPropertiesList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List<Map<String, Object>> getPropertiesList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -50,10 +53,9 @@ private static List<Map<String, Object>> getPropertiesList(@Nullable final Strin return (List<Map<String, Object>>) value; } - - @Nullable - private static boolean isValueListValid(@Nullable final List<?> entryList, final int valueListSize) { + private static boolean isValueListValid( + @Nullable final List<?> entryList, final int valueListSize) { if (entryList == null) { return false; } @@ -64,7 +66,8 @@ private static boolean isValueListValid(@Nullable final List<?> entryList, final } @Nullable - private static Long getTimestamp(@Nullable final List<Long> timestampList, final int index, final int valueListSize) { + private static Long getTimestamp( + @Nullable final List<Long> timestampList, final int index, final int valueListSize) { if (isValueListValid(timestampList, valueListSize)) { return timestampList.get(index); } @@ -72,7 +75,8 @@ private static Long getTimestamp(@Nullable final List<Long> timestampList, final } @Nullable - private static Urn getActor(@Nullable final List<Urn> actorList, final int index, final int valueListSize) { + private static Urn getActor( + @Nullable final List<Urn> actorList, final int index, final int valueListSize) { if (isValueListValid(actorList, valueListSize)) { return actorList.get(index); } @@ -80,7 +84,10 @@ private static Urn getActor(@Nullable final List<Urn> actorList, final int index } @Nullable - private static Map<String, Object> getProperties(@Nullable final List<Map<String, Object>> propertiesList, final int index, final int valueListSize) { + private static Map<String, Object> getProperties( + @Nullable final List<Map<String, Object>> propertiesList, + final int index, + final int valueListSize) { if (isValueListValid(propertiesList, valueListSize)) { return propertiesList.get(index); } @@ -88,8 +95,8 @@ private static Map<String, Object> getProperties(@Nullable final List<Map<String } /** - * Used to create new edges for the graph db, adding all the metadata associated with each edge based on the aspect. - * Returns a list of Edges to be consumed by the graph service. + * Used to create new edges for the graph db, adding all the metadata associated with each edge + * based on the aspect. Returns a list of Edges to be consumed by the graph service. */ @Nonnull public static List<Edge> extractGraphEdges( @@ -97,14 +104,18 @@ public static List<Edge> extractGraphEdges( @Nonnull final RecordTemplate aspect, @Nonnull final Urn urn, @Nonnull final MetadataChangeLog event, - @Nonnull final boolean isNewAspectVersion - ) { + @Nonnull final boolean isNewAspectVersion) { final List<Edge> edgesToAdd = new ArrayList<>(); - final String createdOnPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedOn(); - final String createdActorPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedActor(); - final String updatedOnPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedOn(); - final String updatedActorPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); - final String propertiesPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); + final String createdOnPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedOn(); + final String createdActorPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedActor(); + final String updatedOnPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedOn(); + final String updatedActorPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); + final String propertiesPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); final List<Long> createdOnList = getTimestampList(createdOnPath, aspect); final List<Urn> createdActorList = getActorList(createdActorPath, aspect); @@ -114,27 +125,33 @@ public static List<Edge> extractGraphEdges( int index = 0; for (Object fieldValue : extractedFieldsEntry.getValue()) { - Long createdOn = createdOnList != null - ? getTimestamp(createdOnList, index, extractedFieldsEntry.getValue().size()) - : null; - Urn createdActor = createdActorList != null - ? getActor(createdActorList, index, extractedFieldsEntry.getValue().size()) - : null; - Long updatedOn = updatedOnList != null - ? getTimestamp(updatedOnList, index, extractedFieldsEntry.getValue().size()) - : null; - Urn updatedActor = updatedActorList != null - ? getActor(updatedActorList, index, extractedFieldsEntry.getValue().size()) - : null; - final Map<String, Object> properties = propertiesList != null - ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) - : null; + Long createdOn = + createdOnList != null + ? getTimestamp(createdOnList, index, extractedFieldsEntry.getValue().size()) + : null; + Urn createdActor = + createdActorList != null + ? getActor(createdActorList, index, extractedFieldsEntry.getValue().size()) + : null; + Long updatedOn = + updatedOnList != null + ? getTimestamp(updatedOnList, index, extractedFieldsEntry.getValue().size()) + : null; + Urn updatedActor = + updatedActorList != null + ? getActor(updatedActorList, index, extractedFieldsEntry.getValue().size()) + : null; + final Map<String, Object> properties = + propertiesList != null + ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) + : null; SystemMetadata systemMetadata; if (isNewAspectVersion) { systemMetadata = event.hasSystemMetadata() ? event.getSystemMetadata() : null; } else { - systemMetadata = event.hasPreviousSystemMetadata() ? event.getPreviousSystemMetadata() : null; + systemMetadata = + event.hasPreviousSystemMetadata() ? event.getPreviousSystemMetadata() : null; } if ((createdOn == null || createdOn == 0) && systemMetadata != null) { @@ -160,9 +177,7 @@ public static List<Edge> extractGraphEdges( createdActor, updatedOn, updatedActor, - properties - ) - ); + properties)); } catch (URISyntaxException e) { log.error("Invalid destination urn: {}", fieldValue, e); } @@ -183,7 +198,6 @@ public static Edge mergeEdges(@Nonnull final Edge oldEdge, @Nonnull final Edge n null, newEdge.getUpdatedOn(), newEdge.getUpdatedActor(), - newEdge.getProperties() - ); + newEdge.getProperties()); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java index 6f0ac4bc2f904..b3e713a906d01 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java @@ -18,9 +18,7 @@ import org.apache.commons.collections.CollectionUtils; public interface GraphService { - /** - * Return lineage registry to construct graph index - */ + /** Return lineage registry to construct graph index */ LineageRegistry getLineageRegistry(); /** @@ -29,157 +27,207 @@ public interface GraphService { void addEdge(final Edge edge); /** - * Adds or updates an edge to the graph. This creates the source and destination nodes, if they do not exist. + * Adds or updates an edge to the graph. This creates the source and destination nodes, if they do + * not exist. */ void upsertEdge(final Edge edge); /** * Remove an edge from the graph. + * * @param edge the edge to delete */ void removeEdge(final Edge edge); /** - * Find related entities (nodes) connected to a source entity via edges of given relationship types. Related entities - * can be filtered by source and destination type (use `null` for any type), by source and destination entity filter - * and relationship filter. Pagination of the result is controlled via `offset` and `count`. + * Find related entities (nodes) connected to a source entity via edges of given relationship + * types. Related entities can be filtered by source and destination type (use `null` for any + * type), by source and destination entity filter and relationship filter. Pagination of the + * result is controlled via `offset` and `count`. * - * Starting from a node as the source entity, determined by `sourceType` and `sourceEntityFilter`, - * related entities are found along the direction of edges (`RelationshipDirection.OUTGOING`) or in opposite - * direction of edges (`RelationshipDirection.INCOMING`). The destination entities are further filtered by `destinationType` - * and `destinationEntityFilter`, and then returned as related entities. + * <p>Starting from a node as the source entity, determined by `sourceType` and + * `sourceEntityFilter`, related entities are found along the direction of edges + * (`RelationshipDirection.OUTGOING`) or in opposite direction of edges + * (`RelationshipDirection.INCOMING`). The destination entities are further filtered by + * `destinationType` and `destinationEntityFilter`, and then returned as related entities. * - * This does not return duplicate related entities, even if entities are connected to source entities via multiple edges. - * An empty list of relationship types returns an empty result. + * <p>This does not return duplicate related entities, even if entities are connected to source + * entities via multiple edges. An empty list of relationship types returns an empty result. * - * In other words, the source and destination entity is not to be understood as the source and destination of the edge, - * but as the source and destination of "finding related entities", where always the destination entities are returned. - * This understanding is important when it comes to `RelationshipDirection.INCOMING`. The origin of the edge becomes - * the destination entity and the source entity is where the edge points to. + * <p>In other words, the source and destination entity is not to be understood as the source and + * destination of the edge, but as the source and destination of "finding related entities", where + * always the destination entities are returned. This understanding is important when it comes to + * `RelationshipDirection.INCOMING`. The origin of the edge becomes the destination entity and the + * source entity is where the edge points to. * - * Example I: - * dataset one --DownstreamOf-> dataset two --DownstreamOf-> dataset three + * <p>Example I: dataset one --DownstreamOf-> dataset two --DownstreamOf-> dataset three * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset two") - * - RelatedEntity("DownstreamOf", "dataset three") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset two") - RelatedEntity("DownstreamOf", "dataset three") * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset one") - * - RelatedEntity("DownstreamOf", "dataset two") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset one") - RelatedEntity("DownstreamOf", "dataset two") * - * Example II: - * dataset one --HasOwner-> user one + * <p>Example II: dataset one --HasOwner-> user one * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - * - RelatedEntity("HasOwner", "user one") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], + * RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - + * RelatedEntity("HasOwner", "user one") * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - * - RelatedEntity("HasOwner", "dataset one") + * <p>findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], + * RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - + * RelatedEntity("HasOwner", "dataset one") * - * Calling this method with {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` + * <p>Calling this method with {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` * is equivalent to the union of `OUTGOING` and `INCOMING` (without duplicates). * - * Example III: - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.UNDIRECTED), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset one") - * - RelatedEntity("DownstreamOf", "dataset two") - * - RelatedEntity("DownstreamOf", "dataset three") + * <p>Example III: findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.UNDIRECTED), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset one") - RelatedEntity("DownstreamOf", "dataset two") - + * RelatedEntity("DownstreamOf", "dataset three") */ @Nonnull - RelatedEntitiesResult findRelatedEntities(@Nullable final List<String> sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List<String> destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - final int offset, final int count); - + RelatedEntitiesResult findRelatedEntities( + @Nullable final List<String> sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List<String> destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List<String> relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count); /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops) { return getLineage( entityUrn, direction, - new GraphFilters(new ArrayList(getLineageRegistry().getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), + new GraphFilters( + new ArrayList( + getLineageRegistry() + .getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), offset, count, - maxHops - ); + maxHops); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { return getLineage( entityUrn, direction, - new GraphFilters(new ArrayList(getLineageRegistry().getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), + new GraphFilters( + new ArrayList( + getLineageRegistry() + .getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), offset, count, maxHops, startTimeMillis, - endTimeMillis - ); + endTimeMillis); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If entityTypes is not empty, - * will only return edges to entities that are within the entity types set. - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If + * entityTypes is not empty, will only return edges to entities that are within the entity types + * set. Abstracts away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops) { return getLineage(entityUrn, direction, graphFilters, offset, count, maxHops, null, null); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If entityTypes is not empty, - * will only return edges to entities that are within the entity types set. - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If + * entityTypes is not empty, will only return edges to entities that are within the entity types + * set. Abstracts away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + * <p>Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops, @Nullable Long startTimeMillis, + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { if (maxHops > 1) { maxHops = 1; } List<LineageRegistry.EdgeInfo> edgesToFetch = getLineageRegistry().getLineageRelationships(entityUrn.getEntityType(), direction); - Map<Boolean, List<LineageRegistry.EdgeInfo>> edgesByDirection = edgesToFetch.stream() - .collect(Collectors.partitioningBy(edgeInfo -> edgeInfo.getDirection() == RelationshipDirection.OUTGOING)); - EntityLineageResult result = new EntityLineageResult().setStart(offset) - .setCount(count) - .setRelationships(new LineageRelationshipArray()) - .setTotal(0); + Map<Boolean, List<LineageRegistry.EdgeInfo>> edgesByDirection = + edgesToFetch.stream() + .collect( + Collectors.partitioningBy( + edgeInfo -> edgeInfo.getDirection() == RelationshipDirection.OUTGOING)); + EntityLineageResult result = + new EntityLineageResult() + .setStart(offset) + .setCount(count) + .setRelationships(new LineageRelationshipArray()) + .setTotal(0); Set<String> visitedUrns = new HashSet<>(); // Outgoing edges if (!CollectionUtils.isEmpty(edgesByDirection.get(true))) { List<String> relationshipTypes = new ArrayList( - edgesByDirection.get(true).stream().map(LineageRegistry.EdgeInfo::getType).collect(Collectors.toSet())); + edgesByDirection.get(true).stream() + .map(LineageRegistry.EdgeInfo::getType) + .collect(Collectors.toSet())); // Fetch outgoing edges RelatedEntitiesResult outgoingEdges = - findRelatedEntities(null, QueryUtils.newFilter("urn", entityUrn.toString()), graphFilters.getAllowedEntityTypes(), + findRelatedEntities( + null, + QueryUtils.newFilter("urn", entityUrn.toString()), + graphFilters.getAllowedEntityTypes(), QueryUtils.EMPTY_FILTER, - relationshipTypes, QueryUtils.newRelationshipFilter(QueryUtils.EMPTY_FILTER, RelationshipDirection.OUTGOING), offset, + relationshipTypes, + QueryUtils.newRelationshipFilter( + QueryUtils.EMPTY_FILTER, RelationshipDirection.OUTGOING), + offset, count); // Update offset and count to fetch the correct number of incoming edges below @@ -187,39 +235,59 @@ default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageD count = Math.max(0, count - outgoingEdges.getEntities().size()); result.setTotal(result.getTotal() + outgoingEdges.getTotal()); - outgoingEdges.getEntities().forEach(entity -> { - visitedUrns.add(entity.getUrn()); - try { - result.getRelationships() - .add(new LineageRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType())); - } catch (URISyntaxException ignored) { - } - }); + outgoingEdges + .getEntities() + .forEach( + entity -> { + visitedUrns.add(entity.getUrn()); + try { + result + .getRelationships() + .add( + new LineageRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType())); + } catch (URISyntaxException ignored) { + } + }); } // Incoming edges if (!CollectionUtils.isEmpty(edgesByDirection.get(false))) { List<String> relationshipTypes = - edgesByDirection.get(false).stream().map(LineageRegistry.EdgeInfo::getType).collect(Collectors.toList()); + edgesByDirection.get(false).stream() + .map(LineageRegistry.EdgeInfo::getType) + .collect(Collectors.toList()); RelatedEntitiesResult incomingEdges = - findRelatedEntities(null, QueryUtils.newFilter("urn", entityUrn.toString()), graphFilters.getAllowedEntityTypes(), + findRelatedEntities( + null, + QueryUtils.newFilter("urn", entityUrn.toString()), + graphFilters.getAllowedEntityTypes(), QueryUtils.EMPTY_FILTER, - relationshipTypes, QueryUtils.newRelationshipFilter(QueryUtils.EMPTY_FILTER, RelationshipDirection.INCOMING), offset, + relationshipTypes, + QueryUtils.newRelationshipFilter( + QueryUtils.EMPTY_FILTER, RelationshipDirection.INCOMING), + offset, count); result.setTotal(result.getTotal() + incomingEdges.getTotal()); - incomingEdges.getEntities().forEach(entity -> { - if (visitedUrns.contains(entity.getUrn())) { - return; - } - visitedUrns.add(entity.getUrn()); - try { - result.getRelationships() - .add(new LineageRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType())); - } catch (URISyntaxException ignored) { - } - }); + incomingEdges + .getEntities() + .forEach( + entity -> { + if (visitedUrns.contains(entity.getUrn())) { + return; + } + visitedUrns.add(entity.getUrn()); + try { + result + .getRelationships() + .add( + new LineageRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType())); + } catch (URISyntaxException ignored) { + } + }); } return result; @@ -231,26 +299,26 @@ default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageD void removeNode(@Nonnull final Urn urn); /** - * Removes edges of the given relationship types from the given node after applying the relationship filter. + * Removes edges of the given relationship types from the given node after applying the + * relationship filter. * - * An empty list of relationship types removes nothing from the node. + * <p>An empty list of relationship types removes nothing from the node. * - * Calling this method with a {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` - * is equivalent to the union of `OUTGOING` and `INCOMING` (without duplicates). + * <p>Calling this method with a {@link RelationshipDirection} `UNDIRECTED` in + * `relationshipFilter` is equivalent to the union of `OUTGOING` and `INCOMING` (without + * duplicates). */ - void removeEdgesFromNode(@Nonnull final Urn urn, @Nonnull final List<String> relationshipTypes, + void removeEdgesFromNode( + @Nonnull final Urn urn, + @Nonnull final List<String> relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter); void configure(); - /** - * Removes all edges and nodes from the graph. - */ + /** Removes all edges and nodes from the graph. */ void clear(); - /** - * Whether or not this graph service supports multi-hop - */ + /** Whether or not this graph service supports multi-hop */ default boolean supportsMultiHop() { return false; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java index 2975d100933fd..be1b55655f671 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java @@ -6,13 +6,9 @@ @AllArgsConstructor @Data public class RelatedEntity { - /** - * How the entity is related, along which edge. - */ + /** How the entity is related, along which edge. */ String relationshipType; - /** - * Urn associated with the related entity. - */ + /** Urn associated with the related entity. */ String urn; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java index 27cb7fdec22d3..5676dc9ebac54 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java @@ -12,7 +12,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class RecommendationsService { @@ -28,16 +27,20 @@ public RecommendationsService( } private void validateRecommendationSources(final List<RecommendationSource> candidateSources) { - final Map<String, Long> moduleIdCount = candidateSources.stream() - .collect(Collectors.groupingBy(RecommendationSource::getModuleId, Collectors.counting())); - List<String> moduleIdsWithDuplicates = moduleIdCount.entrySet() - .stream() - .filter(entry -> entry.getValue() > 1) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + final Map<String, Long> moduleIdCount = + candidateSources.stream() + .collect( + Collectors.groupingBy(RecommendationSource::getModuleId, Collectors.counting())); + List<String> moduleIdsWithDuplicates = + moduleIdCount.entrySet().stream() + .filter(entry -> entry.getValue() > 1) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); if (!moduleIdsWithDuplicates.isEmpty()) { throw new IllegalArgumentException( - String.format("Found recommendations candidate sources with duplicate module IDs: %s", moduleIdsWithDuplicates.toString())); + String.format( + "Found recommendations candidate sources with duplicate module IDs: %s", + moduleIdsWithDuplicates.toString())); } } @@ -52,16 +55,23 @@ private void validateRecommendationSources(final List<RecommendationSource> cand @Nonnull @WithSpan public List<RecommendationModule> listRecommendations( - @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext, - int limit) { + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext, int limit) { // Get recommendation candidates from sources which are eligible, in parallel - final List<RecommendationModule> candidateModules = ConcurrencyUtils.transformAndCollectAsync(_candidateSources.stream() - .filter(source -> source.isEligible(userUrn, requestContext)) - .collect(Collectors.toList()), source -> source.getRecommendationModule(userUrn, requestContext), (source, exception) -> { - log.error("Error while fetching candidate modules from source {}", source, exception); - return Optional.<RecommendationModule>empty(); - }).stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList()); + final List<RecommendationModule> candidateModules = + ConcurrencyUtils.transformAndCollectAsync( + _candidateSources.stream() + .filter(source -> source.isEligible(userUrn, requestContext)) + .collect(Collectors.toList()), + source -> source.getRecommendationModule(userUrn, requestContext), + (source, exception) -> { + log.error( + "Error while fetching candidate modules from source {}", source, exception); + return Optional.<RecommendationModule>empty(); + }) + .stream() + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); // Rank recommendation modules, which determines their ordering during rendering return _moduleRanker.rank(candidateModules, userUrn, requestContext, limit); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java index 5aa097ccbb497..9392f50b4749e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DomainsCandidateSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } @@ -53,4 +53,3 @@ protected boolean isValueUrn() { return true; } } - diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java index e1ebc6d5e97be..a19909576d25b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java @@ -27,49 +27,36 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - /** * Base class for search aggregation based candidate source (e.g. top platform, top tags, top terms) - * Aggregates entities based on field value in the entity search index and gets the value with the most documents + * Aggregates entities based on field value in the entity search index and gets the value with the + * most documents */ @Slf4j @RequiredArgsConstructor public abstract class EntitySearchAggregationSource implements RecommendationSource { private final EntitySearchService _entitySearchService; - /** - * Field to aggregate on - */ + /** Field to aggregate on */ protected abstract String getSearchFieldName(); - /** - * Max number of contents in module - */ + /** Max number of contents in module */ protected abstract int getMaxContent(); - /** - * Whether the aggregate value is an urn - */ + /** Whether the aggregate value is an urn */ protected abstract boolean isValueUrn(); - /** - * Whether the urn candidate is valid - */ + /** Whether the urn candidate is valid */ protected boolean isValidCandidateUrn(Urn urn) { return true; } - /** - * Whether the string candidate is valid - */ + /** Whether the string candidate is valid */ protected boolean isValidCandidateValue(String candidateValue) { return true; } - /** - * Whether the candidate is valid - * Calls different functions if candidate is an Urn - */ + /** Whether the candidate is valid Calls different functions if candidate is an Urn */ protected <T> boolean isValidCandidate(T candidate) { if (candidate instanceof Urn) { return isValidCandidateUrn((Urn) candidate); @@ -79,10 +66,11 @@ protected <T> boolean isValidCandidate(T candidate) { @Override @WithSpan - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nullable RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nullable RecommendationRequestContext requestContext) { Map<String, Long> aggregationResult = - _entitySearchService.aggregateByValue(getEntityNames(), getSearchFieldName(), null, getMaxContent()); + _entitySearchService.aggregateByValue( + getEntityNames(), getSearchFieldName(), null, getMaxContent()); if (aggregationResult.isEmpty()) { return Collections.emptyList(); @@ -96,15 +84,21 @@ public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, } // If the aggregated values are urns, convert key into urns - Map<Urn, Long> urnCounts = aggregationResult.entrySet().stream().map(entry -> { - try { - Urn tagUrn = Urn.createFromString(entry.getKey()); - return Optional.of(Pair.of(tagUrn, entry.getValue())); - } catch (URISyntaxException e) { - log.error("Invalid tag urn {}", entry.getKey(), e); - return Optional.<Pair<Urn, Long>>empty(); - } - }).filter(Optional::isPresent).map(Optional::get).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + Map<Urn, Long> urnCounts = + aggregationResult.entrySet().stream() + .map( + entry -> { + try { + Urn tagUrn = Urn.createFromString(entry.getKey()); + return Optional.of(Pair.of(tagUrn, entry.getValue())); + } catch (URISyntaxException e) { + log.error("Invalid tag urn {}", entry.getKey(), e); + return Optional.<Pair<Urn, Long>>empty(); + } + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); if (urnCounts.isEmpty()) { return Collections.emptyList(); @@ -128,13 +122,16 @@ private <T> List<Map.Entry<T, Long>> getTopKValues(Map<T, Long> countMap) { for (Map.Entry<T, Long> entry : countMap.entrySet()) { if (queue.size() < getMaxContent() && isValidCandidate(entry.getKey())) { queue.add(entry); - } else if (queue.size() > 0 && queue.peek().getValue() < entry.getValue() && isValidCandidate(entry.getKey())) { + } else if (queue.size() > 0 + && queue.peek().getValue() < entry.getValue() + && isValidCandidate(entry.getKey())) { queue.poll(); queue.add(entry); } } - // Since priority queue polls in reverse order (nature of heaps), need to reverse order before returning + // Since priority queue polls in reverse order (nature of heaps), need to reverse order before + // returning final LinkedList<Map.Entry<T, Long>> topK = new LinkedList<>(); while (!queue.isEmpty()) { topK.addFirst(queue.poll()); @@ -149,15 +146,25 @@ private Map<String, Long> mergeAggregation(Map<String, Long> first, Map<String, private <T> RecommendationContent buildRecommendationContent(T candidate, long count) { // Set filters for platform - SearchParams searchParams = new SearchParams().setQuery("") - .setFilters(new CriterionArray( - ImmutableList.of(new Criterion().setField(getSearchFieldName()).setValue(candidate.toString())))); + SearchParams searchParams = + new SearchParams() + .setQuery("") + .setFilters( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(getSearchFieldName()) + .setValue(candidate.toString())))); ContentParams contentParams = new ContentParams().setCount(count); RecommendationContent content = new RecommendationContent(); if (candidate instanceof Urn) { content.setEntity((Urn) candidate); } - return content.setValue(candidate.toString()) - .setParams(new RecommendationParams().setSearchParams(searchParams).setContentParams(contentParams)); + return content + .setValue(candidate.toString()) + .setParams( + new RecommendationParams() + .setSearchParams(searchParams) + .setContentParams(contentParams)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java index 357a5df2edd44..e133e3dc75ff3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java @@ -33,7 +33,6 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlySearchedSource implements RecommendationSource { @@ -60,11 +59,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -72,15 +76,15 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo } @Override - public List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlySearched").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) .map(Optional::get) @@ -97,20 +101,26 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { SearchSourceBuilder source = new SearchSourceBuilder(); BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity view events of the user requesting recommendation - query.must(QueryBuilders.termQuery(DataHubUsageEventConstants.ACTOR_URN + ".keyword", userUrn.toString())); - query.must(QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, - DataHubUsageEventType.SEARCH_RESULTS_VIEW_EVENT.getType())); + query.must( + QueryBuilders.termQuery( + DataHubUsageEventConstants.ACTOR_URN + ".keyword", userUrn.toString())); + query.must( + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, + DataHubUsageEventType.SEARCH_RESULTS_VIEW_EVENT.getType())); query.must(QueryBuilders.rangeQuery("total").gt(0)); query.must(QueryBuilders.existsQuery(DataHubUsageEventConstants.QUERY)); source.query(query); // Find the entity with the largest last viewed timestamp String lastSearched = "last_searched"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(DataHubUsageEventConstants.QUERY + ".keyword") - .size(MAX_CONTENT * 2) // Fetch more than max to account for post-filtering - .order(BucketOrder.aggregation(lastSearched, false)) - .subAggregation(AggregationBuilders.max(lastSearched).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(DataHubUsageEventConstants.QUERY + ".keyword") + .size(MAX_CONTENT * 2) // Fetch more than max to account for post-filtering + .order(BucketOrder.aggregation(lastSearched, false)) + .subAggregation( + AggregationBuilders.max(lastSearched).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -127,7 +137,10 @@ private Optional<RecommendationContent> buildContent(@Nonnull String query) { if (isQueryInvalid(query)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setValue(query) - .setParams(new RecommendationParams().setSearchParams(new SearchParams().setQuery(query)))); + return Optional.of( + new RecommendationContent() + .setValue(query) + .setParams( + new RecommendationParams().setSearchParams(new SearchParams().setQuery(query)))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java index 7d43e3652b492..788ef728e294f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java @@ -11,25 +11,16 @@ import java.util.Optional; import javax.annotation.Nonnull; - -/** - * Base interface for defining a candidate source for recommendation module - */ +/** Base interface for defining a candidate source for recommendation module */ public interface RecommendationSource { - /** - * Returns the title of the module that is sourced (used in rendering) - */ + /** Returns the title of the module that is sourced (used in rendering) */ String getTitle(); - /** - * Returns a unique module id associated with the module - */ + /** Returns a unique module id associated with the module */ String getModuleId(); - /** - * Returns the template type used for rendering recommendations from this module - */ + /** Returns the template type used for rendering recommendations from this module */ RecommendationRenderType getRenderType(); /** @@ -49,7 +40,8 @@ public interface RecommendationSource { * @return list of recommendation candidates */ @WithSpan - List<RecommendationContent> getRecommendations(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); + List<RecommendationContent> getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); /** * Get the full recommendations module itself provided the request context. @@ -59,8 +51,7 @@ public interface RecommendationSource { * @return list of recommendation candidates */ default Optional<RecommendationModule> getRecommendationModule( - @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { if (!isEligible(userUrn, requestContext)) { return Optional.empty(); } @@ -70,9 +61,11 @@ default Optional<RecommendationModule> getRecommendationModule( return Optional.empty(); } - return Optional.of(new RecommendationModule().setTitle(getTitle()) - .setModuleId(getModuleId()) - .setRenderType(getRenderType()) - .setContent(new RecommendationContentArray(recommendations))); + return Optional.of( + new RecommendationModule() + .setTitle(getTitle()) + .setModuleId(getModuleId()) + .setRenderType(getRenderType()) + .setContent(new RecommendationContentArray(recommendations))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java index 3fd2b599b4d39..1fa47d1a13645 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java @@ -4,7 +4,6 @@ import java.util.Set; import javax.annotation.Nonnull; - public class RecommendationUtils { /** @@ -14,10 +13,11 @@ public class RecommendationUtils { * @param entityTypes the set of valid entity types * @return true if the type of the urn is in the set of valid entity types, false otherwise. */ - public static boolean isSupportedEntityType(@Nonnull final Urn urn, @Nonnull final Set<String> entityTypes) { + public static boolean isSupportedEntityType( + @Nonnull final Urn urn, @Nonnull final Set<String> entityTypes) { final String entityType = urn.getEntityType(); return entityTypes.contains(entityType); } - - private RecommendationUtils() { } + + private RecommendationUtils() {} } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java index 9562440889f63..3012e35baa607 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java @@ -14,29 +14,29 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopPlatformsSource extends EntitySearchAggregationSource { /** - * Set of entities that we want to consider for defining the top platform sources. - * This must match SearchUtils.SEARCHABLE_ENTITY_TYPES + * Set of entities that we want to consider for defining the top platform sources. This must match + * SearchUtils.SEARCHABLE_ENTITY_TYPES */ - private static final List<String> SEARCHABLE_ENTITY_TYPES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_PRIMARY_KEY_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.TAG_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.NOTEBOOK_ENTITY_NAME - ); + private static final List<String> SEARCHABLE_ENTITY_TYPES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_PRIMARY_KEY_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.TAG_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.NOTEBOOK_ENTITY_NAME); + private final EntityService _entityService; private static final String PLATFORM = "platform"; @@ -61,7 +61,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java index 6563ea7dc4f91..317f956e1ca8a 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopTagsSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java index e885208a8b6db..6cdb5fdb65911 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopTermsSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java index 7eae2e949d028..f09f83fd6ec25 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nonnull; - public interface RecommendationModuleRanker { /** * Rank and return the final list of modules @@ -17,6 +16,9 @@ public interface RecommendationModuleRanker { * @param limit Max number of modules to return * @return ranked list of modules */ - List<RecommendationModule> rank(@Nonnull List<RecommendationModule> candidates, @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext, int limit); + List<RecommendationModule> rank( + @Nonnull List<RecommendationModule> candidates, + @Nonnull Urn userUrn, + @Nonnull RecommendationRequestContext requestContext, + int limit); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java index cefb9aec5ac51..13bc5af91c9e9 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java @@ -7,11 +7,13 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class SimpleRecommendationRanker implements RecommendationModuleRanker { @Override - public List<RecommendationModule> rank(@Nonnull List<RecommendationModule> candidates, @Nonnull Urn userUrn, - @Nullable RecommendationRequestContext requestContext, int limit) { + public List<RecommendationModule> rank( + @Nonnull List<RecommendationModule> candidates, + @Nonnull Urn userUrn, + @Nullable RecommendationRequestContext requestContext, + int limit) { return candidates.subList(0, Math.min(candidates.size(), limit)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java index 0a0be60969486..9b5630875cd15 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java @@ -4,7 +4,8 @@ import org.apache.avro.Schema; /** - * Internal Service logic to be used to emulate Confluent's Schema Registry component within DataHub. + * Internal Service logic to be used to emulate Confluent's Schema Registry component within + * DataHub. */ public interface SchemaRegistryService { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java index 8f7403c6aa428..6e6671c08242b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java @@ -18,7 +18,6 @@ import lombok.Getter; import org.apache.avro.Schema; - public class SchemaRegistryServiceImpl implements SchemaRegistryService { @AllArgsConstructor @@ -33,8 +32,7 @@ private enum TopicOrdinal { MAE_TOPIC(MetadataAuditEvent.getClassSchema()), DUHE_TOPIC(DataHubUpgradeHistoryEvent.getClassSchema()); - @Getter - private final Schema schema; + @Getter private final Schema schema; } private final Map<String, Schema> _schemaMap; @@ -44,28 +42,45 @@ private enum TopicOrdinal { public SchemaRegistryServiceImpl(final TopicConvention convention) { this._schemaMap = new HashMap<>(); this._subjectToIdMap = HashBiMap.create(); - this._schemaMap.put(convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataChangeLogTimeseriesTopicName(), + this._schemaMap.put( + convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeLogTimeseriesTopicName(), TopicOrdinal.MCL_TIMESERIES_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeLogTimeseriesTopicName(), + this._subjectToIdMap.put( + convention.getMetadataChangeLogTimeseriesTopicName(), TopicOrdinal.MCL_TIMESERIES_TOPIC.ordinal()); - this._schemaMap.put(convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.ordinal()); + this._schemaMap.put( + convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.ordinal()); this._schemaMap.put(convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.ordinal()); - this._schemaMap.put(convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.ordinal()); + this._subjectToIdMap.put( + convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.ordinal()); // Adding legacy topics as they are still produced in the EntityService IngestAspect code path. - this._schemaMap.put(convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.ordinal()); - this._schemaMap.put(convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.ordinal()); } @Override diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java index 9eb67ca25dd8b..e7a115d1a0518 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java @@ -4,22 +4,15 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class ResourceReference { - /** - * The urn of an entity - */ + /** The urn of an entity */ Urn urn; - /** - * The type of the SubResource - */ + /** The type of the SubResource */ SubResourceType subResourceType; - /** - * The subresource being targeted - */ + /** The subresource being targeted */ String subResource; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java index 6a23158aa1fd9..042c6d1407a13 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.resource; public enum SubResourceType { - /** - * A field in a dataset - */ + /** A field in a dataset */ DATASET_FIELD } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 9cd865bd888e2..09a63e769f025 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -7,20 +7,16 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; - import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface EntitySearchService { void configure(); - /** - * Clear all data within the service - */ + /** Clear all data within the service */ void clear(); /** @@ -30,7 +26,6 @@ public interface EntitySearchService { */ long docCount(@Nonnull String entityName); - /** * Updates or inserts the given search document. * @@ -58,64 +53,90 @@ public interface EntitySearchService { void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable String runId); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * - * Safe for non-structured, user input, queries with an attempt to provide some advanced features - * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> + * <p>Safe for non-structured, user input, queries with an attempt to provide some advanced + * features <a + * href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags); + SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * - * Safe for non-structured, user input, queries with an attempt to provide some advanced features - * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> + * <p>Safe for non-structured, user input, queries with an attempt to provide some advanced + * features <a + * href="https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-simple-query-string-query.html">Impl</a> * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags flags controlling search options * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - SearchResult search(@Nonnull List<String> entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List<String> facets); + SearchResult search( + @Nonnull List<String> entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List<String> facets); /** * Gets a list of documents after applying the input filters. * * @param entityName name of the entity - * @param filters the request map with fields and values to be applied as filters to the search query + * @param filters the request map with fields and values to be applied as filters to the search + * query * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return - * @return a {@link SearchResult} that contains a list of filtered documents and related search result metadata + * @return a {@link SearchResult} that contains a list of filtered documents and related search + * result metadata */ @Nonnull - SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, - int from, int size); + SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size); /** * Returns a list of suggestions given type ahead query. * - * <p>The advanced auto complete can take filters and provides suggestions based on filtered context. + * <p>The advanced auto complete can take filters and provides suggestions based on filtered + * context. * * @param entityName name of the entity * @param query the type ahead query text @@ -125,21 +146,29 @@ SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, @Nulla * @return A list of suggestions as string */ @Nonnull - AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit); + AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit); /** * Returns number of documents per field value given the field and filters * - * @param entityNames list of name of entities to aggregate across, if empty aggregate over all entities + * @param entityNames list of name of entities to aggregate across, if empty aggregate over all + * entities * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return * @return */ @Nonnull - Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit); + Map<String, Long> aggregateByValue( + @Nullable List<String> entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit); /** * Gets a list of groups/entities that match given browse request. @@ -152,7 +181,11 @@ Map<String, Long> aggregateByValue(@Nullable List<String> entityNames, @Nonnull * @return a {@link BrowseResult} that contains a list of groups/entities */ @Nonnull - BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter requestParams, int from, + BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter requestParams, + int from, int size); /** @@ -166,7 +199,13 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable * @param count max number of results requested */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count); + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count); /** * Gets a list of paths for a given urn. @@ -179,41 +218,57 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable List<String> getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entities name of the entities to scroll across * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - ScrollResult fullTextScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); + ScrollResult fullTextScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entities name of the entities to scroll across * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - ScrollResult structuredScroll(@Nonnull List<String> entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); + ScrollResult structuredScroll( + @Nonnull List<String> entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags); - /** - * Max result size returned by the underlying search backend - */ + /** Max result size returned by the underlying search backend */ int maxResultSize(); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java index 31b94425d6815..842cc51e11777 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.ModelUtils; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.RecordTemplate; @@ -22,15 +24,11 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class QueryUtils { public static final Filter EMPTY_FILTER = new Filter().setOr(new ConjunctiveCriterionArray()); - private QueryUtils() { - } + private QueryUtils() {} // Creates new Criterion with field and value, using EQUAL condition. @Nonnull @@ -40,23 +38,31 @@ public static Criterion newCriterion(@Nonnull String field, @Nonnull String valu // Creates new Criterion with field, value and condition. @Nonnull - public static Criterion newCriterion(@Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { - return new Criterion().setField(field).setValue(value).setValues(new StringArray(ImmutableList.of(value))).setCondition(condition); + public static Criterion newCriterion( + @Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { + return new Criterion() + .setField(field) + .setValue(value) + .setValues(new StringArray(ImmutableList.of(value))) + .setCondition(condition); } - // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL condition (default). + // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL + // condition (default). @Nonnull public static Filter newFilter(@Nullable Map<String, String> params) { if (params == null) { return EMPTY_FILTER; } - CriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue())) - .collect(Collectors.toCollection(CriterionArray::new)); - return new Filter().setOr( - new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); + CriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue())) + .collect(Collectors.toCollection(CriterionArray::new)); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); } // Creates new Filter from a single Criterion with EQUAL condition (default). @@ -68,8 +74,12 @@ public static Filter newFilter(@Nonnull String field, @Nonnull String value) { // Create singleton filter with one criterion @Nonnull public static Filter newFilter(@Nonnull Criterion criterion) { - return new Filter().setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(criterion)))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(criterion)))))); } @Nonnull @@ -78,13 +88,18 @@ public static Filter filterOrDefaultEmptyFilter(@Nullable Filter filter) { } /** - * Converts a set of aspect classes to a set of {@link AspectVersion} with the version all set to latest. + * Converts a set of aspect classes to a set of {@link AspectVersion} with the version all set to + * latest. */ @Nonnull - public static Set<AspectVersion> latestAspectVersions(@Nonnull Set<Class<? extends RecordTemplate>> aspectClasses) { + public static Set<AspectVersion> latestAspectVersions( + @Nonnull Set<Class<? extends RecordTemplate>> aspectClasses) { return aspectClasses.stream() - .map(aspectClass -> new AspectVersion().setAspect(ModelUtils.getAspectName(aspectClass)) - .setVersion(LATEST_VERSION)) + .map( + aspectClass -> + new AspectVersion() + .setAspect(ModelUtils.getAspectName(aspectClass)) + .setVersion(LATEST_VERSION)) .collect(Collectors.toSet()); } @@ -97,7 +112,9 @@ public static Set<AspectVersion> latestAspectVersions(@Nonnull Set<Class<? exten * @return RelationshipFilter */ @Nonnull - public static RelationshipFilter createRelationshipFilter(@Nonnull String field, @Nonnull String value, + public static RelationshipFilter createRelationshipFilter( + @Nonnull String field, + @Nonnull String value, @Nonnull RelationshipDirection relationshipDirection) { return createRelationshipFilter(newFilter(field, value), relationshipDirection); } @@ -110,14 +127,14 @@ public static RelationshipFilter createRelationshipFilter(@Nonnull String field, * @return RelationshipFilter */ @Nonnull - public static RelationshipFilter createRelationshipFilter(@Nonnull Filter filter, - @Nonnull RelationshipDirection relationshipDirection) { + public static RelationshipFilter createRelationshipFilter( + @Nonnull Filter filter, @Nonnull RelationshipDirection relationshipDirection) { return new RelationshipFilter().setOr(filter.getOr()).setDirection(relationshipDirection); } @Nonnull - public static RelationshipFilter newRelationshipFilter(@Nonnull Filter filter, - @Nonnull RelationshipDirection relationshipDirection) { + public static RelationshipFilter newRelationshipFilter( + @Nonnull Filter filter, @Nonnull RelationshipDirection relationshipDirection) { return new RelationshipFilter().setOr(filter.getOr()).setDirection(relationshipDirection); } @@ -152,7 +169,9 @@ public static boolean hasMore(int from, int size, int totalPageCount) { @Nonnull public static Filter getFilterFromCriteria(List<Criterion> criteria) { - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java index 1995e3c1b80a1..a735374b54858 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java @@ -13,7 +13,6 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - public class SecretService { private static final int LOWERCASE_ASCII_START = 97; private static final int LOWERCASE_ASCII_END = 122; @@ -82,7 +81,8 @@ public String decrypt(String encryptedValue) { } public String generateUrlSafeToken(int length) { - return _secureRandom.ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) + return _secureRandom + .ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) .mapToObj(i -> String.valueOf((char) i)) .collect(Collectors.joining()); } @@ -98,7 +98,8 @@ public byte[] generateSalt(int length) { return randomBytes; } - public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) throws IOException { + public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) + throws IOException { byte[] saltedPassword = saltPassword(salt, password); byte[] hashedPassword = _messageDigest.digest(saltedPassword); return _encoder.encodeToString(hashedPassword); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java index 7fac2e0124897..ce7473fb29dc4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -20,15 +22,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - @Slf4j public class BaseService { protected final EntityClient entityClient; protected final Authentication systemAuthentication; - public BaseService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public BaseService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { this.entityClient = Objects.requireNonNull(entityClient); this.systemAuthentication = Objects.requireNonNull(systemAuthentication); } @@ -44,13 +45,13 @@ protected Map<Urn, GlobalTags> getTagsAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.GLOBAL_TAGS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.GLOBAL_TAGS_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, GlobalTags> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -83,13 +84,13 @@ protected Map<Urn, EditableSchemaMetadata> getEditableSchemaMetadataAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, EditableSchemaMetadata> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -122,13 +123,13 @@ protected Map<Urn, Ownership> getOwnershipAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.OWNERSHIP_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.OWNERSHIP_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, Ownership> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -161,13 +162,13 @@ protected Map<Urn, GlossaryTerms> getGlossaryTermsAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, GlossaryTerms> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -200,13 +201,13 @@ protected Map<Urn, Domains> getDomainsAspects( } try { - Map<Urn, Aspect> aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.DOMAINS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map<Urn, Aspect> aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.DOMAINS_ASPECT_NAME, + this.entityClient, + authentication); final Map<Urn, Domains> finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -228,7 +229,9 @@ protected Map<Urn, Domains> getDomainsAspects( } } - protected void ingestChangeProposals(@Nonnull List<MetadataChangeProposal> changes, @Nonnull Authentication authentication) throws Exception { + protected void ingestChangeProposals( + @Nonnull List<MetadataChangeProposal> changes, @Nonnull Authentication authentication) + throws Exception { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { this.entityClient.ingestProposal(change, authentication); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java index 87b96e4cef498..10016ee89605b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java @@ -22,22 +22,20 @@ import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Objects; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * This class is used to permit easy CRUD operations on a DataProduct * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. - * + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class DataProductService { @@ -52,18 +50,15 @@ public DataProductService(@Nonnull EntityClient entityClient, @Nonnull GraphClie /** * Creates a new Data Product. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the DataProduct * @param description optional description of the DataProduct - * * @return the urn of the newly created DataProduct */ public Urn createDataProduct( - @Nullable String name, - @Nullable String description, - @Nonnull Authentication authentication) { + @Nullable String name, @Nullable String description, @Nonnull Authentication authentication) { // 1. Generate a unique id for the new DataProduct. final DataProductKey key = new DataProductKey(); @@ -76,10 +71,14 @@ public Urn createDataProduct( // 3. Write the new dataProduct to GMS, return the new URN. try { - final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_PRODUCT_ENTITY_NAME); - return UrnUtils.getUrn(_entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), authentication, - false)); + final Urn entityUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_PRODUCT_ENTITY_NAME); + return UrnUtils.getUrn( + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create DataProduct", e); } @@ -88,8 +87,8 @@ public Urn createDataProduct( /** * Updates an existing DataProduct. If a provided field is null, the previous value will be kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the DataProduct * @param name optional name of the DataProduct @@ -108,7 +107,9 @@ public Urn updateDataProduct( DataProductProperties properties = getDataProductProperties(urn, authentication); if (properties == null) { - throw new IllegalArgumentException(String.format("Failed to update DataProduct. DataProduct with urn %s does not exist.", urn)); + throw new IllegalArgumentException( + String.format( + "Failed to update DataProduct. DataProduct with urn %s does not exist.", urn)); } // 2. Apply changes to existing DataProduct @@ -121,9 +122,12 @@ public Urn updateDataProduct( // 3. Write changes to GMS try { - return UrnUtils.getUrn(_entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - urn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), authentication, - false)); + return UrnUtils.getUrn( + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), + authentication, + false)); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", urn), e); } @@ -132,16 +136,23 @@ public Urn updateDataProduct( /** * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * - * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not exist. + * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not + * exist. */ @Nullable - public DataProductProperties getDataProductProperties(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public DataProductProperties getDataProductProperties( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - return new DataProductProperties(response.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + return new DataProductProperties( + response + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); } // No aspect found return null; @@ -150,41 +161,44 @@ public DataProductProperties getDataProductProperties(@Nonnull final Urn dataPro /** * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * - * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not exist. + * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not + * exist. */ @Nullable - public Domains getDataProductDomains(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public Domains getDataProductDomains( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - final EntityResponse response = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), - authentication - ); + final EntityResponse response = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), + authentication); if (response != null && response.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { - return new Domains(response.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data()); + return new Domains( + response.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); } } /** - * Returns an instance of {@link EntityResponse} for the specified DataProduct urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified DataProduct urn, or null if one + * cannot be found. * * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the DataProduct, null if it does not exist. */ @Nullable - public EntityResponse getDataProductEntityResponse(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public EntityResponse getDataProductEntityResponse( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -192,79 +206,92 @@ public EntityResponse getDataProductEntityResponse(@Nonnull final Urn dataProduc Constants.DATA_PRODUCT_ENTITY_NAME, dataProductUrn, ImmutableSet.of(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - authentication - ); + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); } } - /** - * Sets a given domain on a given Data Product. - */ - public void setDomain(@Nonnull final Urn dataProductUrn, @Nonnull final Urn domainUrn, @Nonnull final Authentication authentication) { + /** Sets a given domain on a given Data Product. */ + public void setDomain( + @Nonnull final Urn dataProductUrn, + @Nonnull final Urn domainUrn, + @Nonnull final Authentication authentication) { try { Domains domains = new Domains(); - EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), - authentication); + EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), + authentication); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data(); domains = new Domains(dataMap); } final UrnArray newDomains = new UrnArray(); newDomains.add(domainUrn); domains.setDomains(newDomains); - _entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - dataProductUrn, Constants.DOMAINS_ASPECT_NAME, domains), authentication, false); + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + dataProductUrn, Constants.DOMAINS_ASPECT_NAME, domains), + authentication, + false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to set domain for DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to set domain for DataProduct with urn %s", dataProductUrn), e); } } /** * Deletes an existing DataProduct with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the DataProduct does not exist, no exception will be thrown. + * <p>If the DataProduct does not exist, no exception will be thrown. * * @param dataProductUrn the urn of the DataProduct * @param authentication the current authentication */ public void deleteDataProduct( - @Nonnull Urn dataProductUrn, - @Nonnull Authentication authentication) { + @Nonnull Urn dataProductUrn, @Nonnull Authentication authentication) { try { _entityClient.deleteEntity( Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"), Objects.requireNonNull(authentication, "authentication must not be null")); // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(dataProductUrn, authentication); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for DataProduct with urn %s", dataProductUrn), e); - } - }); + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(dataProductUrn, authentication); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for DataProduct with urn %s", + dataProductUrn), + e); + } + }); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to delete DataProduct with urn %s", dataProductUrn), e); } } /** * Sets a Data Product for a given list of entities. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * * @param dataProductUrn the urn of the Data Product to set - null if removing Data Product * @param resourceUrns the urns of the entities to add the Data Product to @@ -276,9 +303,11 @@ public void batchSetDataProduct( @Nonnull Authentication authentication, @Nonnull Urn actorUrn) { try { - DataProductProperties dataProductProperties = getDataProductProperties(dataProductUrn, authentication); + DataProductProperties dataProductProperties = + getDataProductProperties(dataProductUrn, authentication); if (dataProductProperties == null) { - throw new RuntimeException("Failed to batch set data product as data product does not exist"); + throw new RuntimeException( + "Failed to batch set data product as data product does not exist"); } DataProductAssociationArray dataProductAssociations = new DataProductAssociationArray(); @@ -286,15 +315,23 @@ public void batchSetDataProduct( dataProductAssociations = dataProductProperties.getAssets(); } - List<Urn> existingResourceUrns = dataProductAssociations.stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList()); - List<Urn> newResourceUrns = resourceUrns.stream().filter(urn -> !existingResourceUrns.contains(urn)).collect(Collectors.toList()); - - // unset existing data product on resources first as we only allow one data product on an entity at a time + List<Urn> existingResourceUrns = + dataProductAssociations.stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList()); + List<Urn> newResourceUrns = + resourceUrns.stream() + .filter(urn -> !existingResourceUrns.contains(urn)) + .collect(Collectors.toList()); + + // unset existing data product on resources first as we only allow one data product on an + // entity at a time for (Urn resourceUrn : resourceUrns) { unsetDataProduct(resourceUrn, authentication, actorUrn); } - AuditStamp nowAuditStamp = new AuditStamp().setTime(System.currentTimeMillis()).setActor(actorUrn); + AuditStamp nowAuditStamp = + new AuditStamp().setTime(System.currentTimeMillis()).setActor(actorUrn); for (Urn resourceUrn : newResourceUrns) { DataProductAssociation association = new DataProductAssociation(); association.setDestinationUrn(resourceUrn); @@ -306,53 +343,59 @@ public void batchSetDataProduct( dataProductProperties.setAssets(dataProductAssociations); _entityClient.ingestProposal( AspectUtils.buildMetadataChangeProposal( - dataProductUrn, - Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - dataProductProperties), + dataProductUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties), authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update assets for %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to update assets for %s", dataProductUrn), e); } } /** * Unsets a Data Product for a given entity. Remove this entity from its data product(s). * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * * @param resourceUrn the urn of the entity to remove the Data Product from * @param authentication the current authentication */ public void unsetDataProduct( - @Nonnull Urn resourceUrn, - @Nonnull Authentication authentication, - @Nonnull Urn actorUrn) { + @Nonnull Urn resourceUrn, @Nonnull Authentication authentication, @Nonnull Urn actorUrn) { try { List<String> relationshipTypes = ImmutableList.of("DataProductContains"); - EntityRelationships relationships = _graphClient.getRelatedEntities( - resourceUrn.toString(), - relationshipTypes, - RelationshipDirection.INCOMING, - 0, - 10, // should never be more than 1 as long as we only allow one - actorUrn.toString()); + EntityRelationships relationships = + _graphClient.getRelatedEntities( + resourceUrn.toString(), + relationshipTypes, + RelationshipDirection.INCOMING, + 0, + 10, // should never be more than 1 as long as we only allow one + actorUrn.toString()); if (relationships.hasRelationships() && relationships.getRelationships().size() > 0) { - relationships.getRelationships().forEach(relationship -> { - Urn dataProductUrn = relationship.getEntity(); - removeEntityFromDataProduct(dataProductUrn, resourceUrn, authentication); - }); + relationships + .getRelationships() + .forEach( + relationship -> { + Urn dataProductUrn = relationship.getEntity(); + removeEntityFromDataProduct(dataProductUrn, resourceUrn, authentication); + }); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset data product for %s", resourceUrn), e); + throw new RuntimeException( + String.format("Failed to unset data product for %s", resourceUrn), e); } } - private void removeEntityFromDataProduct(@Nonnull Urn dataProductUrn, @Nonnull Urn resourceUrn, @Nonnull Authentication authentication) { + private void removeEntityFromDataProduct( + @Nonnull Urn dataProductUrn, + @Nonnull Urn resourceUrn, + @Nonnull Authentication authentication) { try { - DataProductProperties dataProductProperties = getDataProductProperties(dataProductUrn, authentication); + DataProductProperties dataProductProperties = + getDataProductProperties(dataProductUrn, authentication); if (dataProductProperties == null) { throw new RuntimeException("Failed to unset data product as data product does not exist"); } @@ -373,23 +416,22 @@ private void removeEntityFromDataProduct(@Nonnull Urn dataProductUrn, @Nonnull U dataProductProperties.setAssets(finalAssociations); _entityClient.ingestProposal( AspectUtils.buildMetadataChangeProposal( - dataProductUrn, - Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - dataProductProperties), + dataProductUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties), authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset data product for %s", resourceUrn), e); + throw new RuntimeException( + String.format("Failed to unset data product for %s", resourceUrn), e); } } public boolean verifyEntityExists( - @Nonnull Urn entityUrn, - @Nonnull Authentication authentication) { + @Nonnull Urn entityUrn, @Nonnull Authentication authentication) { try { return _entityClient.exists(entityUrn, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to determine if entity with urn %s exists", entityUrn), e); + throw new RuntimeException( + String.format("Failed to determine if entity with urn %s exists", entityUrn), e); } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java index 782a261675add..c18122eb9bb31 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java @@ -1,10 +1,14 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.domain.Domains; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.mxe.MetadataChangeProposal; @@ -14,19 +18,15 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class DomainService extends BaseService { - public DomainService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public DomainService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -47,14 +47,19 @@ public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List<ResourceReferen * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { + public void batchSetDomain( + @Nonnull Urn domainUrn, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { log.debug("Batch setting Domain to entities. domain: {}, resources: {}", resources, domainUrn); try { setDomainForResources(domainUrn, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - domainUrn, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + domainUrn, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -65,7 +70,8 @@ public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List<ResourceReferen * @param domainUrns the urns of the domain to set * @param resources references to the resources to change */ - public void batchAddDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { + public void batchAddDomains( + @Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { batchAddDomains(domainUrns, resources, this.systemAuthentication); } @@ -76,14 +82,20 @@ public void batchAddDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<Resourc * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchAddDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { - log.debug("Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); + public void batchAddDomains( + @Nonnull List<Urn> domainUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { + log.debug( + "Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); try { addDomainsToResources(domainUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Domains %s to resources with urns %s!", - domainUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Domains %s to resources with urns %s!", + domainUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -103,13 +115,16 @@ public void batchUnsetDomain(@Nonnull List<ResourceReference> resources) { * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchUnsetDomain(@Nonnull List<ResourceReference> resources, @Nullable Authentication authentication) { + public void batchUnsetDomain( + @Nonnull List<ResourceReference> resources, @Nullable Authentication authentication) { log.debug("Batch unsetting Domains to entities. resources: {}", resources); try { unsetDomainForResources(resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset add Domain for resources with urns %s!", - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to unset add Domain for resources with urns %s!", + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -120,7 +135,8 @@ public void batchUnsetDomain(@Nonnull List<ResourceReference> resources, @Nullab * @param domainUrns the urns of domains to remove * @param resources references to the resources to change */ - public void batchRemoveDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { + public void batchRemoveDomains( + @Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources) { batchRemoveDomains(domainUrns, resources, this.systemAuthentication); } @@ -131,23 +147,29 @@ public void batchRemoveDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<Reso * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchRemoveDomains(@Nonnull List<Urn> domainUrns, @Nonnull List<ResourceReference> resources, @Nullable Authentication authentication) { - log.debug("Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); + public void batchRemoveDomains( + @Nonnull List<Urn> domainUrns, + @Nonnull List<ResourceReference> resources, + @Nullable Authentication authentication) { + log.debug( + "Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); try { removeDomainsFromResources(domainUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Domains %s to resources with urns %s!", - domainUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), - e); + throw new RuntimeException( + String.format( + "Failed to batch add Domains %s to resources with urns %s!", + domainUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + e); } } private void setDomainForResources( com.linkedin.common.urn.Urn domainUrn, List<ResourceReference> resources, - @Nullable Authentication authentication - ) throws Exception { + @Nullable Authentication authentication) + throws Exception { final List<MetadataChangeProposal> changes = buildSetDomainProposals(domainUrn, resources); ingestChangeProposals(changes, authentication); } @@ -155,40 +177,37 @@ private void setDomainForResources( private void addDomainsToResources( List<com.linkedin.common.urn.Urn> domainUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildAddDomainsProposals(domainUrns, resources, authentication); + @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildAddDomainsProposals(domainUrns, resources, authentication); ingestChangeProposals(changes, authentication); } private void unsetDomainForResources( - List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { + List<ResourceReference> resources, @Nonnull Authentication authentication) throws Exception { final List<MetadataChangeProposal> changes = buildUnsetDomainProposals(resources); ingestChangeProposals(changes, authentication); } public void removeDomainsFromResources( - List<Urn> domains, - List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildRemoveDomainsProposals(domains, resources, authentication); + List<Urn> domains, List<ResourceReference> resources, @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildRemoveDomainsProposals(domains, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting @Nonnull List<MetadataChangeProposal> buildSetDomainProposals( - com.linkedin.common.urn.Urn domainUrn, - List<ResourceReference> resources - ) { + com.linkedin.common.urn.Urn domainUrn, List<ResourceReference> resources) { List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { Domains domains = new Domains(); domains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + changes.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return changes; } @@ -198,40 +217,40 @@ List<MetadataChangeProposal> buildSetDomainProposals( List<MetadataChangeProposal> buildAddDomainsProposals( List<com.linkedin.common.urn.Urn> domainUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws URISyntaxException { + @Nonnull Authentication authentication) + throws URISyntaxException { - final Map<Urn, Domains> domainAspects = getDomainsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Domains(), - authentication - ); + final Map<Urn, Domains> domainAspects = + getDomainsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Domains(), + authentication); - final List<MetadataChangeProposal> proposals = new ArrayList<>(); - for (ResourceReference resource : resources) { - Domains domains = domainAspects.get(resource.getUrn()); - if (domains == null) { - continue; - } - if (!domains.hasDomains()) { - domains.setDomains(new UrnArray()); - } - addDomainsIfNotExists(domains, domainUrns); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); - } - return proposals; + final List<MetadataChangeProposal> proposals = new ArrayList<>(); + for (ResourceReference resource : resources) { + Domains domains = domainAspects.get(resource.getUrn()); + if (domains == null) { + continue; + } + if (!domains.hasDomains()) { + domains.setDomains(new UrnArray()); + } + addDomainsIfNotExists(domains, domainUrns); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + } + return proposals; } @VisibleForTesting @Nonnull - List<MetadataChangeProposal> buildUnsetDomainProposals( - List<ResourceReference> resources - ) { + List<MetadataChangeProposal> buildUnsetDomainProposals(List<ResourceReference> resources) { final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { Domains domains = new Domains(); domains.setDomains(new UrnArray(Collections.emptyList())); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + changes.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return changes; } @@ -241,13 +260,12 @@ List<MetadataChangeProposal> buildUnsetDomainProposals( List<MetadataChangeProposal> buildRemoveDomainsProposals( List<Urn> domainUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) { - final Map<Urn, Domains> domainAspects = getDomainsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Domains(), - authentication - ); + @Nonnull Authentication authentication) { + final Map<Urn, Domains> domainAspects = + getDomainsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Domains(), + authentication); final List<MetadataChangeProposal> proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -259,7 +277,8 @@ List<MetadataChangeProposal> buildRemoveDomainsProposals( domains.setDomains(new UrnArray()); } removeDomainsIfExists(domains, domainUrns); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return proposals; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java index 36f2ba85ec98f..902ad07354d5e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java @@ -1,13 +1,17 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlossaryTerms; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; +import com.linkedin.common.GlossaryTerms; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.metadata.resource.SubResourceType; @@ -21,18 +25,14 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class GlossaryTermService extends BaseService { - public GlossaryTermService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public GlossaryTermService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -41,11 +41,9 @@ public GlossaryTermService(@Nonnull EntityClient entityClient, @Nonnull Authenti * * @param glossaryTermUrns the urns of the terms to add * @param resources references to the resources to change - * */ public void batchAddGlossaryTerms( - @Nonnull List<Urn> glossaryTermUrns, - @Nonnull List<ResourceReference> resources) { + @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources) { batchAddGlossaryTerms(glossaryTermUrns, resources, this.systemAuthentication); } @@ -55,19 +53,23 @@ public void batchAddGlossaryTerms( * @param glossaryTermUrns the urns of the terms to add * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ public void batchAddGlossaryTerms( @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { - log.debug("Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", resources, glossaryTermUrns); + log.debug( + "Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", + resources, + glossaryTermUrns); try { addGlossaryTermsToResources(glossaryTermUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add GlossaryTerms %s to resources with urns %s!", - glossaryTermUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add GlossaryTerms %s to resources with urns %s!", + glossaryTermUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -77,11 +79,9 @@ public void batchAddGlossaryTerms( * * @param glossaryTermUrns the urns of the terms to remove * @param resources references to the resources to change - * */ public void batchRemoveGlossaryTerms( - @Nonnull List<Urn> glossaryTermUrns, - @Nonnull List<ResourceReference> resources) { + @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources) { batchRemoveGlossaryTerms(glossaryTermUrns, resources, this.systemAuthentication); } @@ -91,59 +91,69 @@ public void batchRemoveGlossaryTerms( * @param glossaryTermUrns the urns of the terms to remove * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ public void batchRemoveGlossaryTerms( @Nonnull List<Urn> glossaryTermUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { - log.debug("Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", resources, glossaryTermUrns); + log.debug( + "Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", + resources, + glossaryTermUrns); try { removeGlossaryTermsFromResources(glossaryTermUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add GlossaryTerms %s to resources with urns %s!", - glossaryTermUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add GlossaryTerms %s to resources with urns %s!", + glossaryTermUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } private void addGlossaryTermsToResources( - List<Urn> glossaryTerms, - List<ResourceReference> resources, - Authentication authentication - ) throws Exception { - List<MetadataChangeProposal> changes = buildAddGlossaryTermsProposals(glossaryTerms, resources, authentication); + List<Urn> glossaryTerms, List<ResourceReference> resources, Authentication authentication) + throws Exception { + List<MetadataChangeProposal> changes = + buildAddGlossaryTermsProposals(glossaryTerms, resources, authentication); ingestChangeProposals(changes, authentication); } private void removeGlossaryTermsFromResources( - List<Urn> glossaryTerms, - List<ResourceReference> resources, - Authentication authentication - ) throws Exception { - List<MetadataChangeProposal> changes = buildRemoveGlossaryTermsProposals(glossaryTerms, resources, authentication); + List<Urn> glossaryTerms, List<ResourceReference> resources, Authentication authentication) + throws Exception { + List<MetadataChangeProposal> changes = + buildRemoveGlossaryTermsProposals(glossaryTerms, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting List<MetadataChangeProposal> buildAddGlossaryTermsProposals( - List<Urn> glossaryTermUrns, - List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { + List<Urn> glossaryTermUrns, List<ResourceReference> resources, Authentication authentication) + throws URISyntaxException { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildAddGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildAddGlossaryTermsToSubResourceProposals(glossaryTermUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildAddGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildAddGlossaryTermsToSubResourceProposals( + glossaryTermUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -155,20 +165,29 @@ List<MetadataChangeProposal> buildAddGlossaryTermsProposals( List<MetadataChangeProposal> buildRemoveGlossaryTermsProposals( List<Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) { + Authentication authentication) { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildRemoveGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildRemoveGlossaryTermsToSubResourceProposals(glossaryTermUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildRemoveGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildRemoveGlossaryTermsToSubResourceProposals( + glossaryTermUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -180,14 +199,14 @@ List<MetadataChangeProposal> buildRemoveGlossaryTermsProposals( List<MetadataChangeProposal> buildAddGlossaryTermsToEntityProposals( List<com.linkedin.common.urn.Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { + Authentication authentication) + throws URISyntaxException { - final Map<Urn, GlossaryTerms> glossaryTermAspects = getGlossaryTermsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlossaryTerms(), - authentication - ); + final Map<Urn, GlossaryTerms> glossaryTermAspects = + getGlossaryTermsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlossaryTerms(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -199,10 +218,15 @@ List<MetadataChangeProposal> buildAddGlossaryTermsToEntityProposals( if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); - glossaryTerms.setAuditStamp(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + glossaryTerms.setAuditStamp( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } addGlossaryTermsIfNotExists(glossaryTerms, glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms)); } return changes; } @@ -211,31 +235,36 @@ List<MetadataChangeProposal> buildAddGlossaryTermsToEntityProposals( List<MetadataChangeProposal> buildAddGlossaryTermsToSubResourceProposals( final List<Urn> glossaryTermUrns, final List<ResourceReference> resources, - final Authentication authentication - ) throws URISyntaxException { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Authentication authentication) + throws URISyntaxException { + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } addGlossaryTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -245,14 +274,13 @@ List<MetadataChangeProposal> buildAddGlossaryTermsToSubResourceProposals( List<MetadataChangeProposal> buildRemoveGlossaryTermsToEntityProposals( List<Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) { + Authentication authentication) { - final Map<Urn, GlossaryTerms> glossaryTermAspects = getGlossaryTermsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlossaryTerms(), - authentication - ); + final Map<Urn, GlossaryTerms> glossaryTermAspects = + getGlossaryTermsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlossaryTerms(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -262,15 +290,15 @@ List<MetadataChangeProposal> buildRemoveGlossaryTermsToEntityProposals( } if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); - glossaryTerms.setAuditStamp(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + glossaryTerms.setAuditStamp( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } removeGlossaryTermsIfExists(glossaryTerms, glossaryTermUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms); changes.add(proposal); } @@ -281,37 +309,42 @@ List<MetadataChangeProposal> buildRemoveGlossaryTermsToEntityProposals( List<MetadataChangeProposal> buildRemoveGlossaryTermsToSubResourceProposals( List<Urn> glossaryTermUrns, List<ResourceReference> resources, - Authentication authentication - ) { + Authentication authentication) { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeGlossaryTermsIfExists(editableFieldInfo.getGlossaryTerms(), glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; } - private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) throws URISyntaxException { + private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) + throws URISyntaxException { if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); } @@ -320,7 +353,8 @@ private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> List<Urn> glossaryTermsToAdd = new ArrayList<>(); for (Urn glossaryTermUrn : glossaryTermUrns) { - if (glossaryTermAssociationArray.stream().anyMatch(association -> association.getUrn().equals(glossaryTermUrn))) { + if (glossaryTermAssociationArray.stream() + .anyMatch(association -> association.getUrn().equals(glossaryTermUrn))) { continue; } glossaryTermsToAdd.add(glossaryTermUrn); @@ -338,30 +372,30 @@ private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List<Urn> } } - private static GlossaryTermAssociationArray removeGlossaryTermsIfExists(GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) { + private static GlossaryTermAssociationArray removeGlossaryTermsIfExists( + GlossaryTerms glossaryTerms, List<Urn> glossaryTermUrns) { if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); } GlossaryTermAssociationArray glossaryTermAssociationArray = glossaryTerms.getTerms(); for (Urn glossaryTermUrn : glossaryTermUrns) { - glossaryTermAssociationArray.removeIf(association -> association.getUrn().equals(glossaryTermUrn)); + glossaryTermAssociationArray.removeIf( + association -> association.getUrn().equals(glossaryTermUrn)); } return glossaryTermAssociationArray; } private static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional<EditableSchemaFieldInfo> fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional<EditableSchemaFieldInfo> fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java index 5649be0c701ca..cd5202ce75b64 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.chart.ChartDataSourceTypeArray; @@ -24,15 +26,12 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.entity.AspectUtils.*; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -42,77 +41,96 @@ public class LineageService { private final EntityClient _entityClient; /** - * Validates that a given list of urns are all datasets and all exist. Throws error if either condition is false for any urn. + * Validates that a given list of urns are all datasets and all exist. Throws error if either + * condition is false for any urn. */ - public void validateDatasetUrns(@Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDatasetUrns( + @Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add lineage edge with non-dataset node when we expect a dataset. Upstream urn: %s", urn)); + throw new IllegalArgumentException( + String.format( + "Tried to add lineage edge with non-dataset node when we expect a dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } /** - * Validates that a given list of urns are all either datasets or charts and that they exist. Otherwise, throw an error. + * Validates that a given list of urns are all either datasets or charts and that they exist. + * Otherwise, throw an error. */ - public void validateDashboardUpstreamUrns(@Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDashboardUpstreamUrns( + @Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { - if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) && !urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add an upstream to a dashboard that isn't a chart or dataset. Upstream urn: %s", urn)); + if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) + && !urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Tried to add an upstream to a dashboard that isn't a chart or dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } - /** - * Validates that a given urn exists using the entityService - */ - public void validateUrnExists(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws Exception { + /** Validates that a given urn exists using the entityService */ + public void validateUrnExists( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) throws Exception { if (!_entityClient.exists(urn, authentication)) { throw new IllegalArgumentException(String.format("Error: urn does not exist: %s", urn)); } } /** - * Updates dataset lineage by taking in a list of upstreams to add and to remove and updating the existing - * upstreamLineage aspect. + * Updates dataset lineage by taking in a list of upstreams to add and to remove and updating the + * existing upstreamLineage aspect. */ public void updateDatasetLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDatasetUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDatasetLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDatasetLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update dataset lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update dataset lineage for urn %s", downstreamUrn), e); } } - /** - * Builds an MCP of UpstreamLineage for dataset entities. - */ + /** Builds an MCP of UpstreamLineage for dataset entities. */ @Nonnull public MetadataChangeProposal buildDatasetLineageProposal( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DATASET_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATASET_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + authentication); UpstreamLineage upstreamLineage = new UpstreamLineage(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data(); upstreamLineage = new UpstreamLineage(dataMap); } @@ -129,7 +147,6 @@ public MetadataChangeProposal buildDatasetLineageProposal( upstreamsToAdd.add(upstreamUrn); } - for (final Urn upstreamUrn : upstreamsToAdd) { final Upstream newUpstream = new Upstream(); newUpstream.setDataset(DatasetUrn.createFromUrn(upstreamUrn)); @@ -147,52 +164,59 @@ public MetadataChangeProposal buildDatasetLineageProposal( upstreamLineage.setUpstreams(upstreams); return buildMetadataChangeProposal( - downstreamUrn, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, upstreamLineage - ); + downstreamUrn, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, upstreamLineage); } - /** - * Updates Chart lineage by building and ingesting an MCP based on inputs. - */ + /** Updates Chart lineage by building and ingesting an MCP based on inputs. */ public void updateChartLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { // ensure all upstream urns are dataset urns and they exist validateDatasetUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildChartLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildChartLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } - /** - * Builds an MCP of ChartInfo for chart entities. - */ + /** Builds an MCP of ChartInfo for chart entities. */ @Nonnull public MetadataChangeProposal buildChartLineageProposal( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.CHART_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME), authentication); - - if (entityResponse == null || !entityResponse.getAspects().containsKey(Constants.CHART_INFO_ASPECT_NAME)) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s as chart info doesn't exist", downstreamUrn)); - } - - DataMap dataMap = entityResponse.getAspects().get(Constants.CHART_INFO_ASPECT_NAME).getValue().data(); + _entityClient.getV2( + Constants.CHART_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME), + authentication); + + if (entityResponse == null + || !entityResponse.getAspects().containsKey(Constants.CHART_INFO_ASPECT_NAME)) { + throw new RuntimeException( + String.format( + "Failed to update chart lineage for urn %s as chart info doesn't exist", + downstreamUrn)); + } + + DataMap dataMap = + entityResponse.getAspects().get(Constants.CHART_INFO_ASPECT_NAME).getValue().data(); ChartInfo chartInfo = new ChartInfo(dataMap); if (!chartInfo.hasInputEdges()) { chartInfo.setInputEdges(new EdgeArray()); @@ -205,10 +229,9 @@ public MetadataChangeProposal buildChartLineageProposal( final EdgeArray inputEdges = chartInfo.getInputEdges(); final List<Urn> upstreamsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamUrnsToAdd) { - if ( - inputEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || inputs.stream().anyMatch(input -> input.equals(upstreamUrn)) - ) { + if (inputEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || inputs.stream().anyMatch(input -> input.equals(upstreamUrn))) { continue; } upstreamsToAdd.add(upstreamUrn); @@ -219,7 +242,7 @@ public MetadataChangeProposal buildChartLineageProposal( } inputEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); - inputs.removeIf(input -> upstreamUrnsToRemove.contains(input.getDatasetUrn())); + inputs.removeIf(input -> upstreamUrnsToRemove.contains(input.getDatasetUrn())); chartInfo.setInputEdges(inputEdges); chartInfo.setInputs(inputs); @@ -227,31 +250,33 @@ public MetadataChangeProposal buildChartLineageProposal( return buildMetadataChangeProposal(downstreamUrn, Constants.CHART_INFO_ASPECT_NAME, chartInfo); } - /** - * Updates Dashboard lineage by building and ingesting an MCP based on inputs. - */ + /** Updates Dashboard lineage by building and ingesting an MCP based on inputs. */ public void updateDashboardLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDashboardUpstreamUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDashboardLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDashboardLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } /** - * Builds an MCP of DashboardInfo for dashboard entities. DashboardInfo has a list of chart urns and dataset urns pointing upstream. - * We need to filter out the chart urns and dataset urns separately in upstreamUrnsToAdd to add them to the correct fields. + * Builds an MCP of DashboardInfo for dashboard entities. DashboardInfo has a list of chart urns + * and dataset urns pointing upstream. We need to filter out the chart urns and dataset urns + * separately in upstreamUrnsToAdd to add them to the correct fields. */ @Nonnull public MetadataChangeProposal buildDashboardLineageProposal( @@ -259,41 +284,62 @@ public MetadataChangeProposal buildDashboardLineageProposal( @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DASHBOARD_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), authentication); - - if (entityResponse == null || !entityResponse.getAspects().containsKey(Constants.DASHBOARD_INFO_ASPECT_NAME)) { - throw new RuntimeException(String.format("Failed to update dashboard lineage for urn %s as dashboard info doesn't exist", downstreamUrn)); - } - - DataMap dataMap = entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME).getValue().data(); + _entityClient.getV2( + Constants.DASHBOARD_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), + authentication); + + if (entityResponse == null + || !entityResponse.getAspects().containsKey(Constants.DASHBOARD_INFO_ASPECT_NAME)) { + throw new RuntimeException( + String.format( + "Failed to update dashboard lineage for urn %s as dashboard info doesn't exist", + downstreamUrn)); + } + + DataMap dataMap = + entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME).getValue().data(); DashboardInfo dashboardInfo = new DashboardInfo(dataMap); // first, deal with chart edges - updateUpstreamCharts(dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamCharts( + dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); // next, deal with dataset edges - updateUpstreamDatasets(dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDatasets( + dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); - return buildMetadataChangeProposal(downstreamUrn, Constants.DASHBOARD_INFO_ASPECT_NAME, dashboardInfo); + return buildMetadataChangeProposal( + downstreamUrn, Constants.DASHBOARD_INFO_ASPECT_NAME, dashboardInfo); } /** - * Updates the charts and chartEdges fields on the DashboardInfo aspect. First, add any new lineage edges not already represented - * in the existing fields to chartEdges. Then, remove all lineage edges from charts and chartEdges fields that are in upstreamUrnsToRemove. - * Then update the DashboardInfo aspect. + * Updates the charts and chartEdges fields on the DashboardInfo aspect. First, add any new + * lineage edges not already represented in the existing fields to chartEdges. Then, remove all + * lineage edges from charts and chartEdges fields that are in upstreamUrnsToRemove. Then update + * the DashboardInfo aspect. */ - private void updateUpstreamCharts(DashboardInfo dashboardInfo, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dashboardUrn, Urn actor) { + private void updateUpstreamCharts( + DashboardInfo dashboardInfo, + List<Urn> upstreamUrnsToAdd, + List<Urn> upstreamUrnsToRemove, + Urn dashboardUrn, + Urn actor) { initializeChartEdges(dashboardInfo); final List<Urn> upstreamChartUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) + .collect(Collectors.toList()); final ChartUrnArray charts = dashboardInfo.getCharts(); final EdgeArray chartEdges = dashboardInfo.getChartEdges(); - final List<Urn> upstreamsChartsToAdd = getUpstreamChartToAdd(upstreamChartUrnsToAdd, chartEdges, charts); + final List<Urn> upstreamsChartsToAdd = + getUpstreamChartToAdd(upstreamChartUrnsToAdd, chartEdges, charts); for (final Urn upstreamUrn : upstreamsChartsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, chartEdges); @@ -305,7 +351,6 @@ private void updateUpstreamCharts(DashboardInfo dashboardInfo, List<Urn> upstrea dashboardInfo.setCharts(charts); } - private void initializeChartEdges(DashboardInfo dashboardInfo) { if (!dashboardInfo.hasChartEdges()) { dashboardInfo.setChartEdges(new EdgeArray()); @@ -316,15 +361,16 @@ private void initializeChartEdges(DashboardInfo dashboardInfo) { } /** - * Need to filter out any existing upstream chart urns in order to get a list of net new chart urns to add to dashboard lineage + * Need to filter out any existing upstream chart urns in order to get a list of net new chart + * urns to add to dashboard lineage */ - private List<Urn> getUpstreamChartToAdd(List<Urn> upstreamChartUrnsToAdd, List<Edge> chartEdges, ChartUrnArray charts) { + private List<Urn> getUpstreamChartToAdd( + List<Urn> upstreamChartUrnsToAdd, List<Edge> chartEdges, ChartUrnArray charts) { final List<Urn> upstreamsChartsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamChartUrnsToAdd) { - if ( - chartEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || charts.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (chartEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || charts.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamsChartsToAdd.add(upstreamUrn); @@ -332,25 +378,35 @@ private List<Urn> getUpstreamChartToAdd(List<Urn> upstreamChartUrnsToAdd, List<E return upstreamsChartsToAdd; } - private void removeChartLineageEdges(List<Edge> chartEdges, ChartUrnArray charts, List<Urn> upstreamUrnsToRemove) { + private void removeChartLineageEdges( + List<Edge> chartEdges, ChartUrnArray charts, List<Urn> upstreamUrnsToRemove) { chartEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); charts.removeIf(upstreamUrnsToRemove::contains); } /** - * Updates the datasets and datasetEdges fields on the DashboardInfo aspect. First, add any new lineage edges not already represented - * in the existing fields to datasetEdges.Then, remove all lineage edges from datasets and datasetEdges fields that are in upstreamUrnsToRemove. - * Then update the DashboardInfo aspect. + * Updates the datasets and datasetEdges fields on the DashboardInfo aspect. First, add any new + * lineage edges not already represented in the existing fields to datasetEdges.Then, remove all + * lineage edges from datasets and datasetEdges fields that are in upstreamUrnsToRemove. Then + * update the DashboardInfo aspect. */ - private void updateUpstreamDatasets(DashboardInfo dashboardInfo, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dashboardUrn, Urn actor) { + private void updateUpstreamDatasets( + DashboardInfo dashboardInfo, + List<Urn> upstreamUrnsToAdd, + List<Urn> upstreamUrnsToRemove, + Urn dashboardUrn, + Urn actor) { initializeDatasetEdges(dashboardInfo); final List<Urn> upstreamDatasetUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) + .collect(Collectors.toList()); final UrnArray datasets = dashboardInfo.getDatasets(); final EdgeArray datasetEdges = dashboardInfo.getDatasetEdges(); - final List<Urn> upstreamDatasetsToAdd = getUpstreamDatasetsToAdd(upstreamDatasetUrnsToAdd, datasetEdges, datasets); + final List<Urn> upstreamDatasetsToAdd = + getUpstreamDatasetsToAdd(upstreamDatasetUrnsToAdd, datasetEdges, datasets); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, datasetEdges); @@ -371,13 +427,13 @@ private void initializeDatasetEdges(DashboardInfo dashboardInfo) { } } - private List<Urn> getUpstreamDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, UrnArray datasets) { + private List<Urn> getUpstreamDatasetsToAdd( + List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, UrnArray datasets) { final List<Urn> upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - datasetEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || datasets.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (datasetEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || datasets.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -385,49 +441,60 @@ private List<Urn> getUpstreamDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd, L return upstreamDatasetsToAdd; } - private void removeDatasetLineageEdges(List<Edge> datasetEdges, UrnArray datasets, List<Urn> upstreamUrnsToRemove) { - datasetEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeDatasetLineageEdges( + List<Edge> datasetEdges, UrnArray datasets, List<Urn> upstreamUrnsToRemove) { + datasetEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); datasets.removeIf(upstreamUrnsToRemove::contains); } /** - * Validates that a given list of urns are all either datasets or dataJobs and that they exist. Otherwise, throw an error. + * Validates that a given list of urns are all either datasets or dataJobs and that they exist. + * Otherwise, throw an error. */ - public void validateDataJobUpstreamUrns(@Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDataJobUpstreamUrns( + @Nonnull final List<Urn> urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { - if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) && !urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add an upstream to a dataJob that isn't a datJob or dataset. Upstream urn: %s", urn)); + if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) + && !urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Tried to add an upstream to a dataJob that isn't a datJob or dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } - /** - * Updates DataJob lineage by building and ingesting an MCP based on inputs. - */ + /** Updates DataJob lineage by building and ingesting an MCP based on inputs. */ public void updateDataJobUpstreamLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDataJobUpstreamUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDataJobUpstreamLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDataJobUpstreamLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } /** - * Builds an MCP of DataJobInputOutput for datajob entities. DataJobInputOutput has a list of dataset urns and datajob urns pointing upstream. - * We need to filter out the chart dataset and datajob urns separately in upstreamUrnsToAdd to add them to the correct fields. We deal with downstream - * pointing datasets in outputDatasets separately. + * Builds an MCP of DataJobInputOutput for datajob entities. DataJobInputOutput has a list of + * dataset urns and datajob urns pointing upstream. We need to filter out the chart dataset and + * datajob urns separately in upstreamUrnsToAdd to add them to the correct fields. We deal with + * downstream pointing datasets in outputDatasets separately. */ @Nonnull public MetadataChangeProposal buildDataJobUpstreamLineageProposal( @@ -435,46 +502,62 @@ public MetadataChangeProposal buildDataJobUpstreamLineageProposal( @Nonnull final List<Urn> upstreamUrnsToAdd, @Nonnull final List<Urn> upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DATA_JOB_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATA_JOB_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), + authentication); DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .getValue() + .data(); dataJobInputOutput = new DataJobInputOutput(dataMap); } // first, deal with dataset edges - updateUpstreamDatasetsForDataJobs(dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDatasetsForDataJobs( + dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); // next, deal with dataJobs edges - updateUpstreamDataJobs(dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDataJobs( + dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); - return buildMetadataChangeProposal(downstreamUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); + return buildMetadataChangeProposal( + downstreamUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); } /** - * Updates the inputDatasets and inputDatasetEdges fields on the DataJobInputOutput aspect. First, add any new lineage - * edges not already represented in the existing fields to inputDatasetEdges. Then, remove all lineage edges from inputDatasets - * and inputDatasetEdges fields that are in upstreamUrnsToRemove. Then update the DataJobInputOutput aspect. + * Updates the inputDatasets and inputDatasetEdges fields on the DataJobInputOutput aspect. First, + * add any new lineage edges not already represented in the existing fields to inputDatasetEdges. + * Then, remove all lineage edges from inputDatasets and inputDatasetEdges fields that are in + * upstreamUrnsToRemove. Then update the DataJobInputOutput aspect. */ private void updateUpstreamDatasetsForDataJobs( DataJobInputOutput dataJobInputOutput, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dashboardUrn, - Urn actor - ) { + Urn actor) { initializeInputDatasetEdges(dataJobInputOutput); final List<Urn> upstreamDatasetUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) + .collect(Collectors.toList()); final DatasetUrnArray inputDatasets = dataJobInputOutput.getInputDatasets(); final EdgeArray inputDatasetEdges = dataJobInputOutput.getInputDatasetEdges(); - final List<Urn> upstreamDatasetsToAdd = getInputOutputDatasetsToAdd(upstreamDatasetUrnsToAdd, inputDatasetEdges, inputDatasets); + final List<Urn> upstreamDatasetsToAdd = + getInputOutputDatasetsToAdd(upstreamDatasetUrnsToAdd, inputDatasetEdges, inputDatasets); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, inputDatasetEdges); @@ -495,14 +578,15 @@ private void initializeInputDatasetEdges(DataJobInputOutput dataJobInputOutput) } } - // get new dataset edges that we should be adding to inputDatasetEdges and outputDatasetEdges for the DataJobInputOutput aspect - private List<Urn> getInputOutputDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, DatasetUrnArray inputDatasets) { + // get new dataset edges that we should be adding to inputDatasetEdges and outputDatasetEdges for + // the DataJobInputOutput aspect + private List<Urn> getInputOutputDatasetsToAdd( + List<Urn> upstreamDatasetUrnsToAdd, List<Edge> datasetEdges, DatasetUrnArray inputDatasets) { final List<Urn> upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - datasetEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || inputDatasets.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (datasetEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || inputDatasets.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -510,31 +594,36 @@ private List<Urn> getInputOutputDatasetsToAdd(List<Urn> upstreamDatasetUrnsToAdd return upstreamDatasetsToAdd; } - private void removeDatasetEdges(List<Edge> datasetEdges, DatasetUrnArray datasets, List<Urn> upstreamUrnsToRemove) { - datasetEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeDatasetEdges( + List<Edge> datasetEdges, DatasetUrnArray datasets, List<Urn> upstreamUrnsToRemove) { + datasetEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); datasets.removeIf(upstreamUrnsToRemove::contains); } /** - * Updates the dataJobs and dataJobEdges fields on the DataJobInputOutput aspect. First, add any new lineage edges not already represented - * in the existing fields to dataJobEdges.Then, remove all lineage edges from dataJobs and dataJobEdges fields that are in upstreamUrnsToRemove. - * Then update the DataJobInputOutput aspect. + * Updates the dataJobs and dataJobEdges fields on the DataJobInputOutput aspect. First, add any + * new lineage edges not already represented in the existing fields to dataJobEdges.Then, remove + * all lineage edges from dataJobs and dataJobEdges fields that are in upstreamUrnsToRemove. Then + * update the DataJobInputOutput aspect. */ private void updateUpstreamDataJobs( DataJobInputOutput dataJobInputOutput, List<Urn> upstreamUrnsToAdd, List<Urn> upstreamUrnsToRemove, Urn dataJobUrn, - Urn actor - ) { + Urn actor) { initializeInputDatajobEdges(dataJobInputOutput); final List<Urn> upstreamDatajobUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); final DataJobUrnArray dataJobs = dataJobInputOutput.getInputDatajobs(); final EdgeArray dataJobEdges = dataJobInputOutput.getInputDatajobEdges(); - final List<Urn> upstreamDatasetsToAdd = getInputDatajobsToAdd(upstreamDatajobUrnsToAdd, dataJobEdges, dataJobs); + final List<Urn> upstreamDatasetsToAdd = + getInputDatajobsToAdd(upstreamDatajobUrnsToAdd, dataJobEdges, dataJobs); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dataJobUrn, actor, dataJobEdges); @@ -555,13 +644,13 @@ private void initializeInputDatajobEdges(DataJobInputOutput dataJobInputOutput) } } - private List<Urn> getInputDatajobsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List<Edge> dataJobEdges, DataJobUrnArray dataJobs) { + private List<Urn> getInputDatajobsToAdd( + List<Urn> upstreamDatasetUrnsToAdd, List<Edge> dataJobEdges, DataJobUrnArray dataJobs) { final List<Urn> upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - dataJobEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || dataJobs.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (dataJobEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || dataJobs.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -569,30 +658,33 @@ private List<Urn> getInputDatajobsToAdd(List<Urn> upstreamDatasetUrnsToAdd, List return upstreamDatasetsToAdd; } - private void removeInputDatajobEdges(List<Edge> dataJobEdges, DataJobUrnArray dataJobs, List<Urn> upstreamUrnsToRemove) { - dataJobEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeInputDatajobEdges( + List<Edge> dataJobEdges, DataJobUrnArray dataJobs, List<Urn> upstreamUrnsToRemove) { + dataJobEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); dataJobs.removeIf(upstreamUrnsToRemove::contains); } - /** - * Updates DataJob lineage in the downstream direction (outputDatasets and outputDatasetEdges) - */ + /** Updates DataJob lineage in the downstream direction (outputDatasets and outputDatasetEdges) */ public void updateDataJobDownstreamLineage( @Nonnull final Urn dataJobUrn, @Nonnull final List<Urn> downstreamUrnsToAdd, @Nonnull final List<Urn> downstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDatasetUrns(downstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - final MetadataChangeProposal changeProposal = buildDataJobDownstreamLineageProposal( - dataJobUrn, downstreamUrnsToAdd, downstreamUrnsToRemove, actor, authentication); + final MetadataChangeProposal changeProposal = + buildDataJobDownstreamLineageProposal( + dataJobUrn, downstreamUrnsToAdd, downstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", dataJobUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", dataJobUrn), e); } } @@ -603,8 +695,9 @@ private void initializeOutputDatajobEdges(DataJobInputOutput dataJobInputOutput) } /** - * Builds an MCP of DataJobInputOutput for datajob entities. Specifically this is updating this aspect for lineage in the downstream - * direction. This includes the fields outputDatasets (deprecated) and outputDatasetEdges + * Builds an MCP of DataJobInputOutput for datajob entities. Specifically this is updating this + * aspect for lineage in the downstream direction. This includes the fields outputDatasets + * (deprecated) and outputDatasetEdges */ @Nonnull public MetadataChangeProposal buildDataJobDownstreamLineageProposal( @@ -612,14 +705,24 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( @Nonnull final List<Urn> downstreamUrnsToAdd, @Nonnull final List<Urn> downstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { final EntityResponse entityResponse = - _entityClient.getV2(Constants.DATA_JOB_ENTITY_NAME, dataJobUrn, ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATA_JOB_ENTITY_NAME, + dataJobUrn, + ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), + authentication); DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .getValue() + .data(); dataJobInputOutput = new DataJobInputOutput(dataMap); } @@ -628,7 +731,8 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( final DatasetUrnArray outputDatasets = dataJobInputOutput.getOutputDatasets(); final EdgeArray outputDatasetEdges = dataJobInputOutput.getOutputDatasetEdges(); - final List<Urn> downstreamDatasetsToAdd = getInputOutputDatasetsToAdd(downstreamUrnsToAdd, outputDatasetEdges, outputDatasets); + final List<Urn> downstreamDatasetsToAdd = + getInputOutputDatasetsToAdd(downstreamUrnsToAdd, outputDatasetEdges, outputDatasets); for (final Urn downstreamUrn : downstreamDatasetsToAdd) { addNewEdge(downstreamUrn, dataJobUrn, actor, outputDatasetEdges); @@ -639,15 +743,15 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( dataJobInputOutput.setOutputDatasetEdges(outputDatasetEdges); dataJobInputOutput.setOutputDatasets(outputDatasets); - return buildMetadataChangeProposal(dataJobUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); + return buildMetadataChangeProposal( + dataJobUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); } private void addNewEdge( @Nonnull final Urn upstreamUrn, @Nonnull final Urn downstreamUrn, @Nonnull final Urn actor, - @Nonnull final EdgeArray edgeArray - ) { + @Nonnull final EdgeArray edgeArray) { final Edge newEdge = new Edge(); newEdge.setDestinationUrn(upstreamUrn); newEdge.setSourceUrn(downstreamUrn); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java index 7385e8aa6acae..e030404cd2607 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java @@ -1,13 +1,17 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; -import com.linkedin.common.Ownership; import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.mxe.MetadataChangeProposal; @@ -15,20 +19,16 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class OwnerService extends BaseService { public static final String SYSTEM_ID = "__system__"; - public OwnerService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public OwnerService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -39,7 +39,10 @@ public OwnerService(@Nonnull EntityClient entityClient, @Nonnull Authentication * @param resources references to the resources to change * @param ownershipType the ownership type to add */ - public void batchAddOwners(@Nonnull List<Urn> ownerUrns, @Nonnull List<ResourceReference> resources, @Nonnull OwnershipType ownershipType) { + public void batchAddOwners( + @Nonnull List<Urn> ownerUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull OwnershipType ownershipType) { batchAddOwners(ownerUrns, resources, ownershipType, this.systemAuthentication); } @@ -60,9 +63,11 @@ public void batchAddOwners( try { addOwnersToResources(ownerUrns, resources, ownershipType, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + ownerUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -73,7 +78,8 @@ public void batchAddOwners( * @param ownerUrns the urns of the owners to remove * @param resources references to the resources to change */ - public void batchRemoveOwners(@Nonnull List<Urn> ownerUrns, @Nonnull List<ResourceReference> resources) { + public void batchRemoveOwners( + @Nonnull List<Urn> ownerUrns, @Nonnull List<ResourceReference> resources) { batchRemoveOwners(ownerUrns, resources, this.systemAuthentication); } @@ -92,9 +98,11 @@ public void batchRemoveOwners( try { removeOwnersFromResources(ownerUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + ownerUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -103,18 +111,18 @@ private void addOwnersToResources( List<com.linkedin.common.urn.Urn> ownerUrns, List<ResourceReference> resources, OwnershipType ownershipType, - Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildAddOwnersProposals(ownerUrns, resources, ownershipType, authentication); + Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildAddOwnersProposals(ownerUrns, resources, ownershipType, authentication); ingestChangeProposals(changes, authentication); } private void removeOwnersFromResources( - List<Urn> owners, - List<ResourceReference> resources, - Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildRemoveOwnersProposals(owners, resources, authentication); + List<Urn> owners, List<ResourceReference> resources, Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildRemoveOwnersProposals(owners, resources, authentication); ingestChangeProposals(changes, authentication); } @@ -123,14 +131,13 @@ List<MetadataChangeProposal> buildAddOwnersProposals( List<com.linkedin.common.urn.Urn> ownerUrns, List<ResourceReference> resources, OwnershipType ownershipType, - Authentication authentication - ) { + Authentication authentication) { - final Map<Urn, Ownership> ownershipAspects = getOwnershipAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Ownership(), - authentication - ); + final Map<Urn, Ownership> ownershipAspects = + getOwnershipAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Ownership(), + authentication); final List<MetadataChangeProposal> proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -142,28 +149,26 @@ List<MetadataChangeProposal> buildAddOwnersProposals( if (!owners.hasOwners()) { owners.setOwners(new OwnerArray()); - owners.setLastModified(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - ); + owners.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } addOwnersIfNotExists(owners, ownerUrns, ownershipType); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); } return proposals; } @VisibleForTesting List<MetadataChangeProposal> buildRemoveOwnersProposals( - List<Urn> ownerUrns, - List<ResourceReference> resources, - Authentication authentication - ) { - final Map<Urn, Ownership> ownershipAspects = getOwnershipAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Ownership(), - authentication - ); + List<Urn> ownerUrns, List<ResourceReference> resources, Authentication authentication) { + final Map<Urn, Ownership> ownershipAspects = + getOwnershipAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Ownership(), + authentication); final List<MetadataChangeProposal> proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -175,16 +180,15 @@ List<MetadataChangeProposal> buildRemoveOwnersProposals( owners.setOwners(new OwnerArray()); } removeOwnersIfExists(owners, ownerUrns); - proposals.add(buildMetadataChangeProposal( - resource.getUrn(), - Constants.OWNERSHIP_ASPECT_NAME, owners - )); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); } return proposals; } - private void addOwnersIfNotExists(Ownership owners, List<Urn> ownerUrns, OwnershipType ownershipType) { + private void addOwnersIfNotExists( + Ownership owners, List<Urn> ownerUrns, OwnershipType ownershipType) { if (!owners.hasOwners()) { owners.setOwners(new OwnerArray()); } @@ -193,7 +197,8 @@ private void addOwnersIfNotExists(Ownership owners, List<Urn> ownerUrns, Ownersh List<Urn> ownersToAdd = new ArrayList<>(); for (Urn ownerUrn : ownerUrns) { - if (ownerAssociationArray.stream().anyMatch(association -> association.getOwner().equals(ownerUrn))) { + if (ownerAssociationArray.stream() + .anyMatch(association -> association.getOwner().equals(ownerUrn))) { continue; } ownersToAdd.add(ownerUrn); @@ -212,6 +217,7 @@ private void addOwnersIfNotExists(Ownership owners, List<Urn> ownerUrns, Ownersh ownerAssociationArray.add(newOwner); } } + @VisibleForTesting static Urn mapOwnershipTypeToEntity(String type) { final String typeName = SYSTEM_ID + type.toLowerCase(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java index 821321b634881..f91f9fbfd93f8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java @@ -20,39 +20,41 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a DataHub Ownership Type. - * Currently it supports creating, updating, and removing a Ownership Type. + * This class is used to permit easy CRUD operations on a DataHub Ownership Type. Currently it + * supports creating, updating, and removing a Ownership Type. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. * - * TODO: Ideally we have some basic caching of the view information inside of this class. + * <p>TODO: Ideally we have some basic caching of the view information inside of this class. */ @Slf4j public class OwnershipTypeService extends BaseService { public static final String SYSTEM_ID = "__system__"; - public OwnershipTypeService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public OwnershipTypeService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new Ownership Type. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the Ownership Type * @param description optional description of the Ownership Type * @param authentication the current authentication * @param currentTimeMs the current time in millis - * * @return the urn of the newly created Ownership Type */ - public Urn createOwnershipType(String name, @Nullable String description, @Nonnull Authentication authentication, + public Urn createOwnershipType( + String name, + @Nullable String description, + @Nonnull Authentication authentication, long currentTimeMs) { Objects.requireNonNull(name, "name must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); @@ -66,26 +68,33 @@ public Urn createOwnershipType(String name, @Nullable String description, @Nonnu ownershipTypeInfo.setName(name); ownershipTypeInfo.setDescription(description, SetMode.IGNORE_NULL); final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())).setTime(currentTimeMs); + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); ownershipTypeInfo.setCreated(auditStamp); ownershipTypeInfo.setLastModified(auditStamp); // 3. Write the new Ownership Type to GMS, return the new URN. try { - final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.OWNERSHIP_TYPE_ENTITY_NAME); - return UrnUtils.getUrn(this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(entityUrn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - ownershipTypeInfo), authentication, false)); + final Urn entityUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.OWNERSHIP_TYPE_ENTITY_NAME); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, ownershipTypeInfo), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create Ownership Type", e); } } /** - * Updates an existing Ownership Type. If a provided field is null, the previous value will be kept. + * Updates an existing Ownership Type. If a provided field is null, the previous value will be + * kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the Ownership Type * @param name optional name of the Ownership Type @@ -93,8 +102,12 @@ public Urn createOwnershipType(String name, @Nullable String description, @Nonnu * @param authentication the current authentication * @param currentTimeMs the current time in millis */ - public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullable String description, - @Nonnull Authentication authentication, long currentTimeMs) { + public void updateOwnershipType( + @Nonnull Urn urn, + @Nullable String name, + @Nullable String description, + @Nonnull Authentication authentication, + long currentTimeMs) { Objects.requireNonNull(urn, "urn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); @@ -103,7 +116,8 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab if (info == null) { throw new IllegalArgumentException( - String.format("Failed to update Ownership Type. Ownership Type with urn %s does not exist.", urn)); + String.format( + "Failed to update Ownership Type. Ownership Type with urn %s does not exist.", urn)); } // 2. Apply changes to existing Ownership Type @@ -115,12 +129,16 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab } info.setLastModified( - new AuditStamp().setTime(currentTimeMs).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(urn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, info), authentication, + AspectUtils.buildMetadataChangeProposal( + urn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, info), + authentication, false); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", urn), e); @@ -130,15 +148,16 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab /** * Deletes an existing Ownership Type with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the Ownership Type does not exist, no exception will be thrown. + * <p>If the Ownership Type does not exist, no exception will be thrown. * * @param urn the urn of the Ownership Type * @param authentication the current authentication */ - public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Nonnull Authentication authentication) { + public void deleteOwnershipType( + @Nonnull Urn urn, boolean deleteReferences, @Nonnull Authentication authentication) { Objects.requireNonNull(urn, "Ownership TypeUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -146,8 +165,11 @@ public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Non log.info("Soft deleting ownership type: {}", urn); final Status statusAspect = new Status(); statusAspect.setRemoved(true); - this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal(urn, Constants.STATUS_ASPECT_NAME, - statusAspect), authentication, false); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.STATUS_ASPECT_NAME, statusAspect), + authentication, + false); } else { this.entityClient.deleteEntity(urn, authentication); if (deleteReferences) { @@ -155,12 +177,14 @@ public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Non } } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete Ownership Type with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to delete Ownership Type with urn %s", urn), e); } } /** * Return whether the provided urn is for a system provided ownership type. + * * @param urn the urn of the Ownership Type * @return true is the ownership type is a system default. */ @@ -169,21 +193,23 @@ private boolean isSystemOwnershipType(Urn urn) { } /** - * Returns an instance of {@link OwnershipTypeInfo} for the specified Ownership Type urn, - * or null if one cannot be found. + * Returns an instance of {@link OwnershipTypeInfo} for the specified Ownership Type urn, or null + * if one cannot be found. * * @param ownershipTypeUrn the urn of the Ownership Type * @param authentication the authentication to use - * - * @return an instance of {@link OwnershipTypeInfo} for the Ownership Type, null if it does not exist. + * @return an instance of {@link OwnershipTypeInfo} for the Ownership Type, null if it does not + * exist. */ @Nullable - public OwnershipTypeInfo getOwnershipTypeInfo(@Nonnull final Urn ownershipTypeUrn, - @Nonnull final Authentication authentication) { + public OwnershipTypeInfo getOwnershipTypeInfo( + @Nonnull final Urn ownershipTypeUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ownershipTypeUrn, "ownershipTypeUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); - final EntityResponse response = getOwnershipTypeEntityResponse(ownershipTypeUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME)) { + final EntityResponse response = + getOwnershipTypeEntityResponse(ownershipTypeUrn, authentication); + if (response != null + && response.getAspects().containsKey(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME)) { return new OwnershipTypeInfo( response.getAspects().get(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME).getValue().data()); } @@ -192,24 +218,28 @@ public OwnershipTypeInfo getOwnershipTypeInfo(@Nonnull final Urn ownershipTypeUr } /** - * Returns an instance of {@link EntityResponse} for the specified Ownership Type urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified Ownership Type urn, or null if + * one cannot be found. * * @param ownershipTypeUrn the urn of the Ownership Type. * @param authentication the authentication to use - * - * @return an instance of {@link EntityResponse} for the Ownership Type, null if it does not exist. + * @return an instance of {@link EntityResponse} for the Ownership Type, null if it does not + * exist. */ @Nullable - public EntityResponse getOwnershipTypeEntityResponse(@Nonnull final Urn ownershipTypeUrn, - @Nonnull final Authentication authentication) { + public EntityResponse getOwnershipTypeEntityResponse( + @Nonnull final Urn ownershipTypeUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ownershipTypeUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - return this.entityClient.getV2(Constants.OWNERSHIP_TYPE_ENTITY_NAME, ownershipTypeUrn, - ImmutableSet.of(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME), authentication); + return this.entityClient.getV2( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + ownershipTypeUrn, + ImmutableSet.of(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME), + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Ownership Type with urn %s", ownershipTypeUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Ownership Type with urn %s", ownershipTypeUrn), e); } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java index b3765d1d9a4e0..ae289c067a78f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java @@ -27,27 +27,26 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a Query - * Currently it supports creating and removing a Query. - * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * This class is used to permit easy CRUD operations on a Query Currently it supports creating and + * removing a Query. * + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class QueryService extends BaseService { - public QueryService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public QueryService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new Query. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the Query * @param description optional description of the Query @@ -56,7 +55,6 @@ public QueryService(@Nonnull EntityClient entityClient, @Nonnull Authentication * @param subjects the query subjects * @param authentication the current authentication * @param currentTimeMs the current time in millis - * * @return the urn of the newly created View */ public Urn createQuery( @@ -82,9 +80,10 @@ public Urn createQuery( queryProperties.setStatement(statement); queryProperties.setName(name, SetMode.IGNORE_NULL); queryProperties.setDescription(description, SetMode.IGNORE_NULL); - final AuditStamp auditStamp = new AuditStamp() - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - .setTime(currentTimeMs); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); queryProperties.setCreated(auditStamp); queryProperties.setLastModified(auditStamp); @@ -95,12 +94,17 @@ public Urn createQuery( // 3. Write the new query to GMS, return the new URN. try { final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.QUERY_ENTITY_NAME); - this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.QUERY_PROPERTIES_ASPECT_NAME, queryProperties), authentication, - false); - return UrnUtils.getUrn(this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.QUERY_SUBJECTS_ASPECT_NAME, querySubjects), authentication, - false)); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.QUERY_PROPERTIES_ASPECT_NAME, queryProperties), + authentication, + false); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.QUERY_SUBJECTS_ASPECT_NAME, querySubjects), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create Query", e); } @@ -109,8 +113,8 @@ public Urn createQuery( /** * Updates an existing Query. If a provided field is null, the previous value will be kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the query * @param name optional name of the Query @@ -135,7 +139,8 @@ public void updateQuery( QueryProperties properties = getQueryProperties(urn, authentication); if (properties == null) { - throw new IllegalArgumentException(String.format("Failed to update Query. Query with urn %s does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update Query. Query with urn %s does not exist.", urn)); } // 2. Apply changes to existing Query @@ -149,17 +154,23 @@ public void updateQuery( properties.setStatement(statement); } - properties.setLastModified(new AuditStamp() - .setTime(currentTimeMs) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + properties.setLastModified( + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { final List<MetadataChangeProposal> aspectsToIngest = new ArrayList<>(); - aspectsToIngest.add(AspectUtils.buildMetadataChangeProposal(urn, Constants.QUERY_PROPERTIES_ASPECT_NAME, properties)); + aspectsToIngest.add( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.QUERY_PROPERTIES_ASPECT_NAME, properties)); if (subjects != null) { - aspectsToIngest.add(AspectUtils.buildMetadataChangeProposal(urn, Constants.QUERY_SUBJECTS_ASPECT_NAME, new QuerySubjects() - .setSubjects(new QuerySubjectArray(subjects)))); + aspectsToIngest.add( + AspectUtils.buildMetadataChangeProposal( + urn, + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new QuerySubjects().setSubjects(new QuerySubjectArray(subjects)))); } this.entityClient.batchIngestProposals(aspectsToIngest, authentication, false); } catch (Exception e) { @@ -170,17 +181,15 @@ public void updateQuery( /** * Deletes an existing Query with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the Query does not exist, no exception will be thrown. + * <p>If the Query does not exist, no exception will be thrown. * * @param queryUrn the urn of the Query * @param authentication the current authentication */ - public void deleteQuery( - @Nonnull Urn queryUrn, - @Nonnull Authentication authentication) { + public void deleteQuery(@Nonnull Urn queryUrn, @Nonnull Authentication authentication) { try { this.entityClient.deleteEntity( Objects.requireNonNull(queryUrn, "queryUrn must not be null"), @@ -191,69 +200,74 @@ public void deleteQuery( } /** - * Returns an instance of {@link QueryProperties} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link QueryProperties} for the specified Query urn, or null if one + * cannot be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link QueryProperties} for the Query, null if it does not exist. */ @Nullable - public QueryProperties getQueryProperties(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public QueryProperties getQueryProperties( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getQueryEntityResponse(queryUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.QUERY_PROPERTIES_ASPECT_NAME)) { - return new QueryProperties(response.getAspects().get(Constants.QUERY_PROPERTIES_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.QUERY_PROPERTIES_ASPECT_NAME)) { + return new QueryProperties( + response.getAspects().get(Constants.QUERY_PROPERTIES_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link QuerySubjects} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link QuerySubjects} for the specified Query urn, or null if one cannot + * be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link QuerySubjects} for the Query, null if it does not exist. */ @Nullable - public QuerySubjects getQuerySubjects(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public QuerySubjects getQuerySubjects( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getQueryEntityResponse(queryUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.QUERY_SUBJECTS_ASPECT_NAME)) { - return new QuerySubjects(response.getAspects().get(Constants.QUERY_SUBJECTS_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.QUERY_SUBJECTS_ASPECT_NAME)) { + return new QuerySubjects( + response.getAspects().get(Constants.QUERY_SUBJECTS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link EntityResponse} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified Query urn, or null if one + * cannot be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the Query, null if it does not exist. */ @Nullable - public EntityResponse getQueryEntityResponse(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public EntityResponse getQueryEntityResponse( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { return this.entityClient.getV2( Constants.QUERY_ENTITY_NAME, queryUrn, - ImmutableSet.of(Constants.QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME), - authentication - ); + ImmutableSet.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME), + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Query with urn %s", queryUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Query with urn %s", queryUrn), e); } } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java index 58645166a21ef..08b14fc84d7c8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -15,20 +17,19 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * This class is used to permit easy CRUD operations on both <b>Global</b> and <b>Personal</b> * DataHub settings. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class SettingsService extends BaseService { - public SettingsService(@Nonnull final EntityClient entityClient, @Nonnull final Authentication systemAuthentication) { + public SettingsService( + @Nonnull final EntityClient entityClient, + @Nonnull final Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -37,37 +38,38 @@ public SettingsService(@Nonnull final EntityClient entityClient, @Nonnull final * * @param user the urn of the user to fetch settings for * @param authentication the current authentication - * * @return an instance of {@link CorpUserSettings} for the specified user, or null if none exists. */ @Nullable public CorpUserSettings getCorpUserSettings( - @Nonnull final Urn user, - @Nonnull final Authentication authentication) { + @Nonnull final Urn user, @Nonnull final Authentication authentication) { Objects.requireNonNull(user, "user must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - EntityResponse response = this.entityClient.getV2( - CORP_USER_ENTITY_NAME, - user, - ImmutableSet.of(CORP_USER_SETTINGS_ASPECT_NAME), - authentication - ); - if (response != null && response.getAspects().containsKey(Constants.CORP_USER_SETTINGS_ASPECT_NAME)) { - return new CorpUserSettings(response.getAspects().get(Constants.CORP_USER_SETTINGS_ASPECT_NAME).getValue().data()); + EntityResponse response = + this.entityClient.getV2( + CORP_USER_ENTITY_NAME, + user, + ImmutableSet.of(CORP_USER_SETTINGS_ASPECT_NAME), + authentication); + if (response != null + && response.getAspects().containsKey(Constants.CORP_USER_SETTINGS_ASPECT_NAME)) { + return new CorpUserSettings( + response.getAspects().get(Constants.CORP_USER_SETTINGS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Corp User settings for user with urn %s", user), e); + throw new RuntimeException( + String.format("Failed to retrieve Corp User settings for user with urn %s", user), e); } } /** * Updates the settings for a given user. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param user the urn of the user * @param authentication the current authentication @@ -80,13 +82,13 @@ public void updateCorpUserSettings( Objects.requireNonNull(newSettings, "newSettings must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - MetadataChangeProposal proposal = AspectUtils.buildMetadataChangeProposal( - user, - CORP_USER_SETTINGS_ASPECT_NAME, - newSettings); + MetadataChangeProposal proposal = + AspectUtils.buildMetadataChangeProposal( + user, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); this.entityClient.ingestProposal(proposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Corp User settings for user with urn %s", user), e); + throw new RuntimeException( + String.format("Failed to update Corp User settings for user with urn %s", user), e); } } @@ -99,17 +101,24 @@ public void updateCorpUserSettings( public GlobalSettingsInfo getGlobalSettings(@Nonnull final Authentication authentication) { Objects.requireNonNull(authentication, "authentication must not be null"); try { - EntityResponse response = this.entityClient.getV2( - GLOBAL_SETTINGS_ENTITY_NAME, - GLOBAL_SETTINGS_URN, - ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME), - authentication - ); - if (response != null && response.getAspects().containsKey(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME)) { - return new GlobalSettingsInfo(response.getAspects().get(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + this.entityClient.getV2( + GLOBAL_SETTINGS_ENTITY_NAME, + GLOBAL_SETTINGS_URN, + ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME), + authentication); + if (response != null + && response.getAspects().containsKey(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME)) { + return new GlobalSettingsInfo( + response + .getAspects() + .get(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) + .getValue() + .data()); } // No aspect found - log.warn("Failed to retrieve Global Settings. No settings exist, but they should. Returning null"); + log.warn( + "Failed to retrieve Global Settings. No settings exist, but they should. Returning null"); return null; } catch (Exception e) { throw new RuntimeException("Failed to retrieve Global Settings!", e); @@ -119,27 +128,25 @@ public GlobalSettingsInfo getGlobalSettings(@Nonnull final Authentication authen /** * Updates the Global settings. * - * This performs a read-modify-write of the underlying GlobalSettingsInfo aspect. + * <p>This performs a read-modify-write of the underlying GlobalSettingsInfo aspect. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param newSettings the new value for the global settings. * @param authentication the current authentication */ public void updateGlobalSettings( - @Nonnull final GlobalSettingsInfo newSettings, - @Nonnull final Authentication authentication) { + @Nonnull final GlobalSettingsInfo newSettings, @Nonnull final Authentication authentication) { Objects.requireNonNull(newSettings, "newSettings must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - MetadataChangeProposal proposal = AspectUtils.buildMetadataChangeProposal( - GLOBAL_SETTINGS_URN, - GLOBAL_SETTINGS_INFO_ASPECT_NAME, - newSettings); + MetadataChangeProposal proposal = + AspectUtils.buildMetadataChangeProposal( + GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, newSettings); this.entityClient.ingestProposal(proposal, authentication, false); } catch (Exception e) { throw new RuntimeException("Failed to update Global settings", e); } } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java index 9e12fc80a3cdb..a03c98411cb6f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -7,6 +10,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.metadata.resource.SubResourceType; @@ -20,18 +24,14 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class TagService extends BaseService { - public TagService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public TagService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -45,23 +45,26 @@ public void batchAddTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRefer batchAddTags(tagUrns, resources, this.systemAuthentication); } - /** * Batch adds multiple tags for a set of resources. * * @param tagUrns the urns of the tags to add * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ - public void batchAddTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { + public void batchAddTags( + @Nonnull List<Urn> tagUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { log.debug("Batch adding Tags to entities. tags: {}, resources: {}", resources, tagUrns); try { addTagsToResources(tagUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -71,9 +74,9 @@ public void batchAddTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRefer * * @param tagUrns the urns of the tags to remove * @param resources references to the resources to change - * */ - public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources) { + public void batchRemoveTags( + @Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources) { batchRemoveTags(tagUrns, resources, this.systemAuthentication); } @@ -83,16 +86,20 @@ public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRe * @param tagUrns the urns of the tags to remove * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ - public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceReference> resources, @Nonnull Authentication authentication) { + public void batchRemoveTags( + @Nonnull List<Urn> tagUrns, + @Nonnull List<ResourceReference> resources, + @Nonnull Authentication authentication) { log.debug("Batch adding Tags to entities. tags: {}, resources: {}", resources, tagUrns); try { removeTagsFromResources(tagUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -100,39 +107,46 @@ public void batchRemoveTags(@Nonnull List<Urn> tagUrns, @Nonnull List<ResourceRe private void addTagsToResources( List<com.linkedin.common.urn.Urn> tagUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildAddTagsProposals(tagUrns, resources, authentication); + @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildAddTagsProposals(tagUrns, resources, authentication); ingestChangeProposals(changes, authentication); } private void removeTagsFromResources( - List<Urn> tags, - List<ResourceReference> resources, - @Nonnull Authentication authentication - ) throws Exception { - final List<MetadataChangeProposal> changes = buildRemoveTagsProposals(tags, resources, authentication); + List<Urn> tags, List<ResourceReference> resources, @Nonnull Authentication authentication) + throws Exception { + final List<MetadataChangeProposal> changes = + buildRemoveTagsProposals(tags, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting List<MetadataChangeProposal> buildAddTagsProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) + throws URISyntaxException { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildAddTagsToEntityProposals(tagUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildAddTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildAddTagsToEntityProposals(tagUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildAddTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -142,21 +156,27 @@ List<MetadataChangeProposal> buildAddTagsProposals( @VisibleForTesting List<MetadataChangeProposal> buildRemoveTagsProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) { + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) { final List<MetadataChangeProposal> changes = new ArrayList<>(); - final List<ResourceReference> entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> entityProposals = buildRemoveTagsToEntityProposals(tagUrns, entityRefs, authentication); - - final List<ResourceReference> schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List<MetadataChangeProposal> schemaFieldProposals = buildRemoveTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); + final List<ResourceReference> entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> entityProposals = + buildRemoveTagsToEntityProposals(tagUrns, entityRefs, authentication); + + final List<ResourceReference> schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List<MetadataChangeProposal> schemaFieldProposals = + buildRemoveTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -166,15 +186,13 @@ List<MetadataChangeProposal> buildRemoveTagsProposals( @VisibleForTesting List<MetadataChangeProposal> buildAddTagsToEntityProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) throws URISyntaxException { - final Map<Urn, GlobalTags> tagsAspects = getTagsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlobalTags(), - authentication - ); + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) + throws URISyntaxException { + final Map<Urn, GlobalTags> tagsAspects = + getTagsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlobalTags(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -186,11 +204,9 @@ List<MetadataChangeProposal> buildAddTagsToEntityProposals( globalTags.setTags(new TagAssociationArray()); } addTagsIfNotExists(globalTags, tagUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOBAL_TAGS_ASPECT_NAME, - globalTags - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, globalTags); changes.add(proposal); } return changes; @@ -200,32 +216,37 @@ List<MetadataChangeProposal> buildAddTagsToEntityProposals( List<MetadataChangeProposal> buildAddTagsToSubResourceProposals( final List<Urn> tagUrns, final List<ResourceReference> resources, - final Authentication authentication - ) throws URISyntaxException { + final Authentication authentication) + throws URISyntaxException { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -233,15 +254,12 @@ List<MetadataChangeProposal> buildAddTagsToSubResourceProposals( @VisibleForTesting List<MetadataChangeProposal> buildRemoveTagsToEntityProposals( - List<Urn> tagUrns, - List<ResourceReference> resources, - Authentication authentication - ) { - final Map<Urn, GlobalTags> tagsAspects = getTagsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlobalTags(), - authentication - ); + List<Urn> tagUrns, List<ResourceReference> resources, Authentication authentication) { + final Map<Urn, GlobalTags> tagsAspects = + getTagsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlobalTags(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -253,11 +271,9 @@ List<MetadataChangeProposal> buildRemoveTagsToEntityProposals( globalTags.setTags(new TagAssociationArray()); } removeTagsIfExists(globalTags, tagUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOBAL_TAGS_ASPECT_NAME, - globalTags - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, globalTags); changes.add(proposal); } @@ -268,30 +284,34 @@ List<MetadataChangeProposal> buildRemoveTagsToEntityProposals( List<MetadataChangeProposal> buildRemoveTagsToSubResourceProposals( List<Urn> tagUrns, List<ResourceReference> resources, - @Nonnull Authentication authentication - ) { - final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + @Nonnull Authentication authentication) { + final Map<Urn, EditableSchemaMetadata> editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List<MetadataChangeProposal> changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -306,7 +326,8 @@ private void addTagsIfNotExists(GlobalTags tags, List<Urn> tagUrns) throws URISy List<Urn> tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -336,18 +357,16 @@ private static TagAssociationArray removeTagsIfExists(GlobalTags tags, List<Urn> } private static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional<EditableSchemaFieldInfo> fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional<EditableSchemaFieldInfo> fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java index 026eb3cd61def..b4a683d2e2c68 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java @@ -21,35 +21,34 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a DataHub View. - * Currently it supports creating, updating, and removing a View. + * This class is used to permit easy CRUD operations on a DataHub View. Currently it supports + * creating, updating, and removing a View. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * <p>Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. * - * TODO: Ideally we have some basic caching of the view information inside of this class. + * <p>TODO: Ideally we have some basic caching of the view information inside of this class. */ @Slf4j public class ViewService extends BaseService { - public ViewService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public ViewService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new DataHub View. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param type the type of the View * @param name the name of the View * @param description the description of the View * @param definition the view definition, a.k.a. the View definition * @param authentication the current authentication - * * @return the urn of the newly created View */ public Urn createView( @@ -74,43 +73,49 @@ public Urn createView( newView.setName(name); newView.setDescription(description, SetMode.IGNORE_NULL); newView.setDefinition(definition); - final AuditStamp auditStamp = new AuditStamp() - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - .setTime(currentTimeMs); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); newView.setCreated(auditStamp); newView.setLastModified(auditStamp); - // 3. Write the new view to GMS, return the new URN. try { - return UrnUtils.getUrn(this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATAHUB_VIEW_ENTITY_NAME), Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, newView), authentication, - false)); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATAHUB_VIEW_ENTITY_NAME), + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + newView), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create View", e); } } /** - * Updates an existing DataHub View with a specific urn. The overwrites only the fields - * which are not null (provided). + * Updates an existing DataHub View with a specific urn. The overwrites only the fields which are + * not null (provided). * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * - * The View with the provided urn must exist, else an {@link IllegalArgumentException} will be + * <p>The View with the provided urn must exist, else an {@link IllegalArgumentException} will be * thrown. * - * This method will perform a read-modify-write. This can cause concurrent writes - * to conflict, and overwrite one another. The expected frequency of writes - * for views is very low, however. TODO: Convert this into a safer patch. + * <p>This method will perform a read-modify-write. This can cause concurrent writes to conflict, + * and overwrite one another. The expected frequency of writes for views is very low, however. + * TODO: Convert this into a safer patch. * * @param viewUrn the urn of the View * @param name the name of the View * @param description the description of the View * @param definition the view definition itself * @param authentication the current authentication - * @param currentTimeMs the current time in milliseconds, used for populating the lastUpdatedAt field. + * @param currentTimeMs the current time in milliseconds, used for populating the lastUpdatedAt + * field. */ public void updateView( @Nonnull Urn viewUrn, @@ -126,7 +131,8 @@ public void updateView( DataHubViewInfo existingInfo = getViewInfo(viewUrn, authentication); if (existingInfo == null) { - throw new IllegalArgumentException(String.format("Failed to update View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to update View. View with urn %s does not exist.", viewUrn)); } // 2. Apply changes to existing View @@ -140,15 +146,18 @@ public void updateView( existingInfo.setDefinition(definition); } - existingInfo.setLastModified(new AuditStamp() - .setTime(currentTimeMs) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + existingInfo.setLastModified( + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(viewUrn, Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, existingInfo), - authentication, false); + AspectUtils.buildMetadataChangeProposal( + viewUrn, Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, existingInfo), + authentication, + false); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", viewUrn), e); } @@ -157,17 +166,15 @@ public void updateView( /** * Deletes an existing DataHub View with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + * <p>Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the View does not exist, no exception will be thrown. + * <p>If the View does not exist, no exception will be thrown. * * @param viewUrn the urn of the View * @param authentication the current authentication */ - public void deleteView( - @Nonnull Urn viewUrn, - @Nonnull Authentication authentication) { + public void deleteView(@Nonnull Urn viewUrn, @Nonnull Authentication authentication) { try { this.entityClient.deleteEntity( Objects.requireNonNull(viewUrn, "viewUrn must not be null"), @@ -178,37 +185,39 @@ public void deleteView( } /** - * Returns an instance of {@link DataHubViewInfo} for the specified View urn, - * or null if one cannot be found. + * Returns an instance of {@link DataHubViewInfo} for the specified View urn, or null if one + * cannot be found. * * @param viewUrn the urn of the View * @param authentication the authentication to use - * * @return an instance of {@link DataHubViewInfo} for the View, null if it does not exist. */ @Nullable - public DataHubViewInfo getViewInfo(@Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { + public DataHubViewInfo getViewInfo( + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getViewEntityResponse(viewUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME)) { - return new DataHubViewInfo(response.getAspects().get(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME)) { + return new DataHubViewInfo( + response.getAspects().get(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link EntityResponse} for the specified View urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified View urn, or null if one cannot + * be found. * * @param viewUrn the urn of the View * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the View, null if it does not exist. */ @Nullable - public EntityResponse getViewEntityResponse(@Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { + public EntityResponse getViewEntityResponse( + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -216,8 +225,7 @@ public EntityResponse getViewEntityResponse(@Nonnull final Urn viewUrn, @Nonnull Constants.DATAHUB_VIEW_ENTITY_NAME, viewUrn, ImmutableSet.of(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME), - authentication - ); + authentication); } catch (Exception e) { throw new RuntimeException(String.format("Failed to retrieve View with urn %s", viewUrn), e); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index 928c70a7b3de1..c618db801d9d6 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -11,154 +11,169 @@ import com.linkedin.metadata.search.LineageSearchEntityArray; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; -import lombok.extern.slf4j.Slf4j; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import java.util.Objects; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class ValidationUtils { - - public static SearchResult validateSearchResult(final SearchResult searchResult, - @Nonnull final EntityService entityService) { + public static SearchResult validateSearchResult( + final SearchResult searchResult, @Nonnull final EntityService entityService) { if (searchResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - SearchResult validatedSearchResult = new SearchResult().setFrom(searchResult.getFrom()) - .setMetadata(searchResult.getMetadata()) - .setPageSize(searchResult.getPageSize()) - .setNumEntities(searchResult.getNumEntities()); - - SearchEntityArray validatedEntities = searchResult.getEntities() - .stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) - .collect(Collectors.toCollection(SearchEntityArray::new)); + SearchResult validatedSearchResult = + new SearchResult() + .setFrom(searchResult.getFrom()) + .setMetadata(searchResult.getMetadata()) + .setPageSize(searchResult.getPageSize()) + .setNumEntities(searchResult.getNumEntities()); + + SearchEntityArray validatedEntities = + searchResult.getEntities().stream() + .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + .collect(Collectors.toCollection(SearchEntityArray::new)); validatedSearchResult.setEntities(validatedEntities); return validatedSearchResult; } - public static ScrollResult validateScrollResult(final ScrollResult scrollResult, - @Nonnull final EntityService entityService) { + public static ScrollResult validateScrollResult( + final ScrollResult scrollResult, @Nonnull final EntityService entityService) { if (scrollResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - ScrollResult validatedScrollResult = new ScrollResult() - .setMetadata(scrollResult.getMetadata()) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); + ScrollResult validatedScrollResult = + new ScrollResult() + .setMetadata(scrollResult.getMetadata()) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); if (scrollResult.getScrollId() != null) { validatedScrollResult.setScrollId(scrollResult.getScrollId()); } - SearchEntityArray validatedEntities = scrollResult.getEntities() - .stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) - .collect(Collectors.toCollection(SearchEntityArray::new)); + SearchEntityArray validatedEntities = + scrollResult.getEntities().stream() + .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + .collect(Collectors.toCollection(SearchEntityArray::new)); validatedScrollResult.setEntities(validatedEntities); return validatedScrollResult; } - public static BrowseResult validateBrowseResult(final BrowseResult browseResult, - @Nonnull final EntityService entityService) { + public static BrowseResult validateBrowseResult( + final BrowseResult browseResult, @Nonnull final EntityService entityService) { if (browseResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - BrowseResult validatedBrowseResult = new BrowseResult().setGroups(browseResult.getGroups()) - .setMetadata(browseResult.getMetadata()) - .setFrom(browseResult.getFrom()) - .setPageSize(browseResult.getPageSize()) - .setNumGroups(browseResult.getNumGroups()) - .setNumEntities(browseResult.getNumEntities()) - .setNumElements(browseResult.getNumElements()); - - BrowseResultEntityArray validatedEntities = browseResult.getEntities() - .stream() - .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) - .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + BrowseResult validatedBrowseResult = + new BrowseResult() + .setGroups(browseResult.getGroups()) + .setMetadata(browseResult.getMetadata()) + .setFrom(browseResult.getFrom()) + .setPageSize(browseResult.getPageSize()) + .setNumGroups(browseResult.getNumGroups()) + .setNumEntities(browseResult.getNumEntities()) + .setNumElements(browseResult.getNumElements()); + + BrowseResultEntityArray validatedEntities = + browseResult.getEntities().stream() + .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) + .collect(Collectors.toCollection(BrowseResultEntityArray::new)); validatedBrowseResult.setEntities(validatedEntities); - return validatedBrowseResult; } - public static ListResult validateListResult(final ListResult listResult, @Nonnull final EntityService entityService) { + public static ListResult validateListResult( + final ListResult listResult, @Nonnull final EntityService entityService) { if (listResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - ListResult validatedListResult = new ListResult().setStart(listResult.getStart()) - .setCount(listResult.getCount()) - .setTotal(listResult.getTotal()); + ListResult validatedListResult = + new ListResult() + .setStart(listResult.getStart()) + .setCount(listResult.getCount()) + .setTotal(listResult.getTotal()); UrnArray validatedEntities = - listResult.getEntities().stream().filter(entityService::exists).collect(Collectors.toCollection(UrnArray::new)); + listResult.getEntities().stream() + .filter(entityService::exists) + .collect(Collectors.toCollection(UrnArray::new)); validatedListResult.setEntities(validatedEntities); return validatedListResult; } - public static LineageSearchResult validateLineageSearchResult(final LineageSearchResult lineageSearchResult, - @Nonnull final EntityService entityService) { + public static LineageSearchResult validateLineageSearchResult( + final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { if (lineageSearchResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); LineageSearchResult validatedLineageSearchResult = - new LineageSearchResult().setMetadata(lineageSearchResult.getMetadata()) + new LineageSearchResult() + .setMetadata(lineageSearchResult.getMetadata()) .setFrom(lineageSearchResult.getFrom()) .setPageSize(lineageSearchResult.getPageSize()) .setNumEntities(lineageSearchResult.getNumEntities()); - LineageSearchEntityArray validatedEntities = lineageSearchResult.getEntities() - .stream() - .filter(entity -> entityService.exists(entity.getEntity())) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + LineageSearchEntityArray validatedEntities = + lineageSearchResult.getEntities().stream() + .filter(entity -> entityService.exists(entity.getEntity())) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); validatedLineageSearchResult.setEntities(validatedEntities); return validatedLineageSearchResult; } - public static EntityLineageResult validateEntityLineageResult(@Nullable final EntityLineageResult entityLineageResult, + public static EntityLineageResult validateEntityLineageResult( + @Nullable final EntityLineageResult entityLineageResult, @Nonnull final EntityService entityService) { if (entityLineageResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - final EntityLineageResult validatedEntityLineageResult = new EntityLineageResult() - .setStart(entityLineageResult.getStart()) - .setCount(entityLineageResult.getCount()) - .setTotal(entityLineageResult.getTotal()); + final EntityLineageResult validatedEntityLineageResult = + new EntityLineageResult() + .setStart(entityLineageResult.getStart()) + .setCount(entityLineageResult.getCount()) + .setTotal(entityLineageResult.getTotal()); - final LineageRelationshipArray validatedRelationships = entityLineageResult.getRelationships().stream() - .filter(relationship -> entityService.exists(relationship.getEntity())) - .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) - .collect(Collectors.toCollection(LineageRelationshipArray::new)); + final LineageRelationshipArray validatedRelationships = + entityLineageResult.getRelationships().stream() + .filter(relationship -> entityService.exists(relationship.getEntity())) + .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); validatedEntityLineageResult.setFiltered( - (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null ? entityLineageResult.getFiltered() : 0) - + entityLineageResult.getRelationships().size() - validatedRelationships.size()); + (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0) + + entityLineageResult.getRelationships().size() + - validatedRelationships.size()); validatedEntityLineageResult.setRelationships(validatedRelationships); return validatedEntityLineageResult; } - public static LineageScrollResult validateLineageScrollResult(final LineageScrollResult lineageScrollResult, - @Nonnull final EntityService entityService) { + public static LineageScrollResult validateLineageScrollResult( + final LineageScrollResult lineageScrollResult, @Nonnull final EntityService entityService) { if (lineageScrollResult == null) { return null; } @@ -173,15 +188,14 @@ public static LineageScrollResult validateLineageScrollResult(final LineageScrol validatedLineageScrollResult.setScrollId(lineageScrollResult.getScrollId()); } - LineageSearchEntityArray validatedEntities = lineageScrollResult.getEntities() - .stream() - .filter(entity -> entityService.exists(entity.getEntity())) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + LineageSearchEntityArray validatedEntities = + lineageScrollResult.getEntities().stream() + .filter(entity -> entityService.exists(entity.getEntity())) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); validatedLineageScrollResult.setEntities(validatedEntities); return validatedLineageScrollResult; } - private ValidationUtils() { - } + private ValidationUtils() {} } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java index ea59885e8b6d5..f06671ac3c314 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java @@ -10,7 +10,6 @@ import javax.annotation.Nullable; import org.opensearch.client.tasks.GetTaskResponse; - public interface SystemMetadataService { /** * Deletes a specific aspect from the system metadata service. @@ -32,11 +31,14 @@ public interface SystemMetadataService { List<AspectRowSummary> findByUrn(String urn, boolean includeSoftDeleted, int from, int size); - List<AspectRowSummary> findByParams(Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, int size); + List<AspectRowSummary> findByParams( + Map<String, String> systemMetaParams, boolean includeSoftDeleted, int from, int size); - List<AspectRowSummary> findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size); + List<AspectRowSummary> findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size); - List<IngestionRunSummary> listRuns(Integer pageOffset, Integer pageSize, boolean includeSoftDeleted); + List<IngestionRunSummary> listRuns( + Integer pageOffset, Integer pageSize, boolean includeSoftDeleted); void configure(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java index ce9134896779a..1f794157b9cb9 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java @@ -3,19 +3,15 @@ import lombok.Builder; import lombok.Getter; - @Builder public class SemanticVersion { - @Getter - private int majorVersion; - @Getter - private int minorVersion; - @Getter - private int patchVersion; - @Getter - private String qualifier; + @Getter private int majorVersion; + @Getter private int minorVersion; + @Getter private int patchVersion; + @Getter private String qualifier; public String toString() { - return String.format(String.format("%d.%d.%d-%s", majorVersion, minorVersion, patchVersion, qualifier)); + return String.format( + String.format("%d.%d.%d-%s", majorVersion, minorVersion, patchVersion, qualifier)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java index ab0848c640e2a..949572359d754 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java @@ -8,14 +8,15 @@ import java.util.Set; import javax.annotation.Nonnull; - public interface TimelineService { - List<ChangeTransaction> getTimeline(@Nonnull final Urn urn, + List<ChangeTransaction> getTimeline( + @Nonnull final Urn urn, @Nonnull Set<ChangeCategory> elements, long startMillis, long endMillis, String startVersionStamp, String endVersionStamp, - boolean rawDiffRequested) throws JsonProcessingException; + boolean rawDiffRequested) + throws JsonProcessingException; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java index 72218c37fe5ce..141a963c3e014 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java @@ -5,17 +5,16 @@ import java.util.List; import java.util.Map; - public enum ChangeCategory { - //description, institutionalmemory, properties docs, field level docs/description etc. + // description, institutionalmemory, properties docs, field level docs/description etc. DOCUMENTATION, - //(field or top level) add term, remove term, etc. + // (field or top level) add term, remove term, etc. GLOSSARY_TERM, - //add new owner, remove owner, change ownership type etc. + // add new owner, remove owner, change ownership type etc. OWNER, - //new field, remove field, field type change, + // new field, remove field, field type change, TECHNICAL_SCHEMA, - //(field or top level) add tag, remove tag, + // (field or top level) add tag, remove tag, TAG, // Update the domain for an entity DOMAIN, diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java index dcd5f9d7dc2da..372e855841a36 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java @@ -7,52 +7,36 @@ import lombok.Value; import lombok.experimental.NonFinal; - -/** - * An event representing a high-level, semantic change to a DataHub entity. - */ +/** An event representing a high-level, semantic change to a DataHub entity. */ @Value @Builder @NonFinal @AllArgsConstructor public class ChangeEvent { - /** - * The urn of the entity being changed. - */ + /** The urn of the entity being changed. */ String entityUrn; - /** - * The category of the change. - */ + + /** The category of the change. */ ChangeCategory category; - /** - * The operation of the change. - */ + + /** The operation of the change. */ ChangeOperation operation; - /** - * An optional modifier associated with the change. For example, a tag urn. - */ + + /** An optional modifier associated with the change. For example, a tag urn. */ String modifier; - /** - * Parameters that determined by the combination of category + operation. - */ + + /** Parameters that determined by the combination of category + operation. */ Map<String, Object> parameters; - /** - * An audit stamp detailing who made the change and when. - */ + + /** An audit stamp detailing who made the change and when. */ AuditStamp auditStamp; - /** - * Optional: Semantic change version. - * TODO: Determine if this should be inside this structure. - */ + + /** Optional: Semantic change version. TODO: Determine if this should be inside this structure. */ SemanticChangeType semVerChange; + /** - * Optional: A human readable description of this change. - * TODO: Determine if this should be inside this structure. + * Optional: A human readable description of this change. TODO: Determine if this should be inside + * this structure. */ String description; } - - - - - diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java index c9aafa6a0330d..2321165cca529 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java @@ -1,40 +1,22 @@ package com.linkedin.metadata.timeline.data; public enum ChangeOperation { - /** - * Something is added to an entity, e.g. tag, glossary term. - */ + /** Something is added to an entity, e.g. tag, glossary term. */ ADD, - /** - * An entity is modified. e.g. Domain, description is updated. - */ + /** An entity is modified. e.g. Domain, description is updated. */ MODIFY, - /** - * Something is removed from an entity. e.g. tag, glossary term. - */ + /** Something is removed from an entity. e.g. tag, glossary term. */ REMOVE, - /** - * Entity is created. - */ + /** Entity is created. */ CREATE, - /** - * Entity is hard-deleted. - */ + /** Entity is hard-deleted. */ HARD_DELETE, - /** - * Entity is soft-deleted. - */ + /** Entity is soft-deleted. */ SOFT_DELETE, - /** - * Entity is reinstated after being soft-deleted. - */ + /** Entity is reinstated after being soft-deleted. */ REINSTATE, - /** - * Run has STARTED - */ + /** Run has STARTED */ STARTED, - /** - * Run is completed - */ + /** Run is completed */ COMPLETED } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java index 5037b8dde9a8b..3e963dba0cdb4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java @@ -8,7 +8,6 @@ import lombok.Getter; import lombok.Setter; - @Getter @Builder public class ChangeTransaction { @@ -19,10 +18,11 @@ public class ChangeTransaction { String semVer; SemanticChangeType semVerChange; List<ChangeEvent> changeEvents; + @ArraySchema(schema = @Schema(implementation = PatchOperation.class)) JsonPatch rawDiff; - @Setter - String versionStamp; + + @Setter String versionStamp; public void setSemanticVersion(String semanticVersion) { this.semVer = semanticVersion; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java index 6b28664bcb0f6..abbbe1af37546 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java @@ -3,7 +3,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder public class PatchOperation { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java index dfaa74a0656fe..b8ae83e34eacf 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.timeline.data; public enum SemanticChangeType { - NONE, PATCH, MINOR, MAJOR, EXCEPTIONAL + NONE, + PATCH, + MINOR, + MAJOR, + EXCEPTIONAL } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java index 8dc1fdcba0cbf..32af2737756a8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java @@ -1,4 +1,3 @@ package com.linkedin.metadata.timeline.data; -public interface SemanticDifference { -} +public interface SemanticDifference {} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java index 0908d927cd40b..54480bb700398 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java @@ -14,16 +14,14 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface TimeseriesAspectService { - /** - * Configure the Time-Series aspect service one time at boot-up. - */ + /** Configure the Time-Series aspect service one time at boot-up. */ void configure(); /** * Count the number of entries using a filter + * * @param entityName the name of the entity to count entries for * @param aspectName the name of the timeseries aspect to count for that entity * @param filter the filter to apply to the count @@ -32,27 +30,30 @@ public interface TimeseriesAspectService { public long countByFilter( @Nonnull final String entityName, @Nonnull final String aspectName, - @Nullable final Filter filter - ); + @Nullable final Filter filter); /** - * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional filters, sorted by the timestampMillis - * field descending. + * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional + * filters, sorted by the timestampMillis field descending. * - * This method allows you to optionally filter for events that fall into a particular time window based on the timestampMillis - * field of the aspect, or simply retrieve the latest aspects sorted by time. + * <p>This method allows you to optionally filter for events that fall into a particular time + * window based on the timestampMillis field of the aspect, or simply retrieve the latest aspects + * sorted by time. * - * Note that this does not always indicate the event time, and is often used to reflect the reported - * time of a given event. + * <p>Note that this does not always indicate the event time, and is often used to reflect the + * reported time of a given event. * * @param urn the urn of the entity to retrieve aspects for * @param entityName the name of the entity to retrieve aspects for * @param aspectName the name of the timeseries aspect to retrieve for the entity - * @param startTimeMillis the start of a time window in milliseconds, compared against the standard timestampMillis field - * @param endTimeMillis the end of a time window in milliseconds, compared against the standard timestampMillis field + * @param startTimeMillis the start of a time window in milliseconds, compared against the + * standard timestampMillis field + * @param endTimeMillis the end of a time window in milliseconds, compared against the standard + * timestampMillis field * @param limit the maximum number of results to retrieve * @param filter a set of additional secondary filters to apply when finding the aspects - * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, or empty list if none were found. + * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, + * or empty list if none were found. */ @Nonnull default List<EnvelopedAspect> getAspectValues( @@ -63,28 +64,34 @@ default List<EnvelopedAspect> getAspectValues( @Nullable final Long endTimeMillis, @Nullable final Integer limit, @Nullable final Filter filter) { - return getAspectValues(urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, null); + return getAspectValues( + urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, null); } /** - * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional filters, sorted by the timestampMillis - * field descending. + * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional + * filters, sorted by the timestampMillis field descending. * - * This method allows you to optionally filter for events that fall into a particular time window based on the timestampMillis - * field of the aspect, or simply retrieve the latest aspects sorted by time. + * <p>This method allows you to optionally filter for events that fall into a particular time + * window based on the timestampMillis field of the aspect, or simply retrieve the latest aspects + * sorted by time. * - * Note that this does not always indicate the event time, and is often used to reflect the reported - * time of a given event. + * <p>Note that this does not always indicate the event time, and is often used to reflect the + * reported time of a given event. * * @param urn the urn of the entity to retrieve aspects for * @param entityName the name of the entity to retrieve aspects for * @param aspectName the name of the timeseries aspect to retrieve for the entity - * @param startTimeMillis the start of a time window in milliseconds, compared against the standard timestampMillis field - * @param endTimeMillis the end of a time window in milliseconds, compared against the standard timestampMillis field + * @param startTimeMillis the start of a time window in milliseconds, compared against the + * standard timestampMillis field + * @param endTimeMillis the end of a time window in milliseconds, compared against the standard + * timestampMillis field * @param limit the maximum number of results to retrieve * @param filter a set of additional secondary filters to apply when finding the aspects - * @param sort the sort criterion for the result set. If not provided, defaults to sorting by timestampMillis descending. - * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, or empty list if none were found. + * @param sort the sort criterion for the result set. If not provided, defaults to sorting by + * timestampMillis descending. + * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, + * or empty list if none were found. */ @Nonnull List<EnvelopedAspect> getAspectValues( @@ -98,16 +105,19 @@ List<EnvelopedAspect> getAspectValues( @Nullable final SortCriterion sort); /** - * Perform a arbitrary aggregation query over a set of Time-Series aspects. - * This is used to answer arbitrary questions about the Time-Series aspects that we have. + * Perform a arbitrary aggregation query over a set of Time-Series aspects. This is used to answer + * arbitrary questions about the Time-Series aspects that we have. * * @param entityName the name of the entity associated with the Time-Series aspect. * @param aspectName the name of the Time-Series aspect. - * @param aggregationSpecs a specification of the types of metric-value aggregations that should be performed - * @param filter an optional filter that should be applied prior to performing the requested aggregations. - * @param groupingBuckets an optional set of buckets to group the aggregations on the timeline -- For example, by a particular date or - * string value. - * @return a "table" representation of the results of performing the aggregation, with a row per group. + * @param aggregationSpecs a specification of the types of metric-value aggregations that should + * be performed + * @param filter an optional filter that should be applied prior to performing the requested + * aggregations. + * @param groupingBuckets an optional set of buckets to group the aggregations on the timeline -- + * For example, by a particular date or string value. + * @return a "table" representation of the results of performing the aggregation, with a row per + * group. */ @Nonnull GenericTable getAggregatedStats( @@ -121,7 +131,7 @@ GenericTable getAggregatedStats( * Generic filter based deletion for Time-Series Aspects. * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used for deletion of the documents on the index. * @return a summary of the aspects which were deleted */ @@ -135,7 +145,7 @@ DeleteAspectValuesResult deleteAspectValues( * Generic filter based deletion for Time-Series Aspects. * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used for deletion of the documents on the index. * @param options Options to control delete parameters * @return The Job ID of the deletion operation @@ -149,18 +159,22 @@ String deleteAspectValuesAsync( /** * Reindex the index represented by entityName and aspect name, applying the filter + * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used when reindexing * @param options Options to control reindex parameters * @return The Job ID of the reindex operation */ - String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options); /** - * Rollback the Time-Series aspects associated with a particular runId. This is invoked as a part of an - * ingestion rollback process. + * Rollback the Time-Series aspects associated with a particular runId. This is invoked as a part + * of an ingestion rollback process. * * @param runId The runId that needs to be rolled back. * @return a summary of the aspects which were deleted @@ -169,14 +183,15 @@ String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Non DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull final String runId); /** - * Upsert a raw timeseries aspect into a timeseries index. Note that this is a bit of a hack, and leaks - * too much implementation detail around Elasticsearch. + * Upsert a raw timeseries aspect into a timeseries index. Note that this is a bit of a hack, and + * leaks too much implementation detail around Elasticsearch. * - * TODO: Make this more general purpose. + * <p>TODO: Make this more general purpose. * * @param entityName the name of the entity * @param aspectName the name of an aspect - * @param docId the doc id for the elasticsearch document - this serves as the primary key for the document. + * @param docId the doc id for the elasticsearch document - this serves as the primary key for the + * document. * @param document the raw document to insert. */ void upsertDocument( diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java index 4338d883ece1d..4c5595d4d1468 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java @@ -32,8 +32,10 @@ public class DomainServiceTest { private static final Urn TEST_DOMAIN_URN_1 = UrnUtils.getUrn("urn:li:domain:test"); private static final Urn TEST_DOMAIN_URN_2 = UrnUtils.getUrn("urn:li:domain:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testSetDomainExistingDomain() throws Exception { @@ -41,64 +43,66 @@ private void testSetDomainExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); Urn newDomainUrn = UrnUtils.getUrn("urn:li:domain:newDomain"); - List<MetadataChangeProposal> events = service.buildSetDomainProposals(newDomainUrn, ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildSetDomainProposals( + newDomainUrn, + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); } @Test private void testSetDomainNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); Urn newDomainUrn = UrnUtils.getUrn("urn:li:domain:newDomain"); - List<MetadataChangeProposal> events = service.buildSetDomainProposals(newDomainUrn, ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildSetDomainProposals( + newDomainUrn, + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); } @Test @@ -107,62 +111,62 @@ private void testUnsetDomainExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildUnsetDomainProposals(ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildUnsetDomainProposals( + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } @Test private void testUnsetDomainNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildUnsetDomainProposals(ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List<MetadataChangeProposal> events = + service.buildUnsetDomainProposals( + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } @Test @@ -171,165 +175,178 @@ private void testAddDomainsExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildAddDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildAddDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains() + .setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains() + .setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); } @Test private void testAddDomainsNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildAddDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildAddDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); } @Test private void testRemoveDomainsExistingDomain() throws Exception { Domains existingDomains = new Domains(); - existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2))); + existingDomains.setDomains( + new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); } @Test private void testRemoveDomainsNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } - private static EntityClient createMockEntityClient(@Nullable Domains existingDomains) throws Exception { + private static EntityClient createMockEntityClient(@Nullable Domains existingDomains) + throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(existingDomains != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingDomains.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingDomains.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + existingDomains != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingDomains.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingDomains.data())))))) + : Collections.emptyMap()); return mockClient; } diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java index 567a457efcf93..2048548f6cede 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java @@ -33,57 +33,64 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class GlossaryTermServiceTest { private static final Urn TEST_GLOSSARY_TERM_URN_1 = UrnUtils.getUrn("urn:li:glossaryTerm:test"); private static final Urn TEST_GLOSSARY_TERM_URN_2 = UrnUtils.getUrn("urn:li:glossaryTerm:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); - + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + @Test private void testAddGlossaryTermToEntityExistingGlossaryTerm() throws Exception { GlossaryTerms existingGlossaryTerms = new GlossaryTerms(); - existingGlossaryTerms.setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)) - ))); + existingGlossaryTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1))))); EntityClient mockClient = createMockGlossaryEntityClient(existingGlossaryTerms); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expected); } @@ -91,37 +98,42 @@ private void testAddGlossaryTermToEntityExistingGlossaryTerm() throws Exception private void testAddGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exception { EntityClient mockClient = createMockGlossaryEntityClient(null); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTermAssociationArray expectedTermsArray = new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTermAssociationArray expectedTermsArray = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expectedTermsArray); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expectedTermsArray); } @@ -129,50 +141,72 @@ private void testAddGlossaryTermsToEntityNoExistingGlossaryTerm() throws Excepti private void testAddGlossaryTermToSchemaFieldExistingGlossaryTerm() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_1))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test @@ -180,90 +214,114 @@ private void testAddGlossaryTermsToSchemaFieldNoExistingGlossaryTerm() throws Ex EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms(new GlossaryTerms())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn))) - ); + List<MetadataChangeProposal> events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test private void testRemoveGlossaryTermToEntityExistingGlossaryTerm() throws Exception { GlossaryTerms existingGlossaryTerms = new GlossaryTerms(); - existingGlossaryTerms.setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)) - ))); + existingGlossaryTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); EntityClient mockClient = createMockGlossaryEntityClient(existingGlossaryTerms); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); - - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTerms expected = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); + + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTerms expected = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + RecordTemplate glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + RecordTemplate glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2, expected); } @@ -271,36 +329,38 @@ private void testRemoveGlossaryTermToEntityExistingGlossaryTerm() throws Excepti private void testRemoveGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exception { EntityClient mockClient = createMockGlossaryEntityClient(null); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expected); } @@ -308,51 +368,73 @@ private void testRemoveGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exce private void testRemoveGlossaryTermToSchemaFieldExistingGlossaryTerm() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_2))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)) - )); + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test @@ -360,78 +442,99 @@ private void testRemoveGlossaryTermsToSchemaFieldNoExistingGlossaryTerm() throws EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms(new GlossaryTerms())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_ENTITY_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_ENTITY_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), Collections.emptyList()); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + Collections.emptyList()); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), Collections.emptyList()); - + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + Collections.emptyList()); } - private static EntityClient createMockGlossaryEntityClient(@Nullable GlossaryTerms existingGlossaryTerms) throws Exception { + private static EntityClient createMockGlossaryEntityClient( + @Nullable GlossaryTerms existingGlossaryTerms) throws Exception { return createMockEntityClient(existingGlossaryTerms, Constants.GLOSSARY_TERMS_ASPECT_NAME); } - private static EntityClient createMockSchemaMetadataEntityClient(@Nullable EditableSchemaMetadata existingMetadata) throws Exception { + private static EntityClient createMockSchemaMetadataEntityClient( + @Nullable EditableSchemaMetadata existingMetadata) throws Exception { return createMockEntityClient(existingMetadata, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java index 9df8b9ecf46e8..5888067dbe268 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -46,8 +48,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class LineageServiceTest { private static AuditStamp _auditStamp; private static EntityClient _mockClient; @@ -57,18 +57,25 @@ public class LineageServiceTest { private static final String SOURCE_FIELD_NAME = "source"; private static final String UI_SOURCE = "UI"; private static final String ACTOR_URN = "urn:li:corpuser:test"; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN_1 = "urn:li:chart:(looker,baz1)"; private static final String CHART_URN_2 = "urn:li:chart:(looker,baz2)"; private static final String CHART_URN_3 = "urn:li:chart:(looker,baz3)"; private static final String DASHBOARD_URN_1 = "urn:li:dashboard:(airflow,id1)"; private static final String DASHBOARD_URN_2 = "urn:li:dashboard:(airflow,id2)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; - private static final String DATAJOB_URN_3 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test3)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_3 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test3)"; private Urn actorUrn; private Urn datasetUrn1; private Urn datasetUrn2; @@ -114,41 +121,41 @@ public void testUpdateDatasetLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn2, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn3, AUTHENTICATION)).thenReturn(true); - UpstreamLineage upstreamLineage = createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_3, DATASET_URN_4))); + UpstreamLineage upstreamLineage = + createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_3, DATASET_URN_4))); - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(datasetUrn1), - Mockito.eq(ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(datasetUrn1), + Mockito.eq(ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(datasetUrn1) .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(upstreamLineage.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upstreamLineage.data())))))); final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.singletonList(datasetUrn3); - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); // upstreamLineage without dataset3, keep dataset4, add dataset2 - final UpstreamLineage updatedDataset1UpstreamLineage = createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_4, DATASET_URN_2))); + final UpstreamLineage updatedDataset1UpstreamLineage = + createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_4, DATASET_URN_2))); final MetadataChangeProposal proposal1 = new MetadataChangeProposal(); proposal1.setEntityUrn(UrnUtils.getUrn(DATASET_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(updatedDataset1UpstreamLineage)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal1), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -157,8 +164,11 @@ public void testFailUpdateWithMissingDataset() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.singletonList(datasetUrn3); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -167,11 +177,15 @@ public void testFailUpdateDatasetWithInvalidEdge() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(chartUrn1); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } - // Adds upstream for chart1 to dataset3 and removes edge to dataset1 while keeping edge to dataset2 + // Adds upstream for chart1 to dataset3 and removes edge to dataset1 while keeping edge to + // dataset2 @Test public void testUpdateChartLineage() throws Exception { Mockito.when(_mockClient.exists(chartUrn1, AUTHENTICATION)).thenReturn(true); @@ -179,30 +193,37 @@ public void testUpdateChartLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn2, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn3, AUTHENTICATION)).thenReturn(true); - ChartInfo chartInfo = createChartInfo(chartUrn1, Arrays.asList(datasetUrn1, datasetUrn2), Collections.emptyList()); + ChartInfo chartInfo = + createChartInfo( + chartUrn1, Arrays.asList(datasetUrn1, datasetUrn2), Collections.emptyList()); - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(chartUrn1), - Mockito.eq(ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(chartUrn1), + Mockito.eq(ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(chartUrn1) .setEntityName(Constants.CHART_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.CHART_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(chartInfo.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.CHART_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(chartInfo.data())))))); final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn3); final List<Urn> upstreamUrnsToRemove = Collections.singletonList(datasetUrn2); - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); // chartInfo with dataset1 in inputs and dataset3 in inputEdges - ChartInfo updatedChartInfo = createChartInfo(chartUrn1, Collections.singletonList(datasetUrn1), Collections.singletonList(datasetUrn3)); + ChartInfo updatedChartInfo = + createChartInfo( + chartUrn1, + Collections.singletonList(datasetUrn1), + Collections.singletonList(datasetUrn3)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(chartUrn1); @@ -210,11 +231,8 @@ public void testUpdateChartLineage() throws Exception { proposal.setAspectName(Constants.CHART_INFO_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedChartInfo)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -223,8 +241,11 @@ public void testFailUpdateChartWithMissingDataset() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -234,8 +255,11 @@ public void testFailUpdateChartWithInvalidEdge() throws Exception { // charts can't have charts upstream of them final List<Urn> upstreamUrnsToAdd = Collections.singletonList(chartUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } // Adds upstreams for dashboard to dataset2 and chart2 and removes edge to dataset1 and chart1 @@ -248,42 +272,44 @@ public void testUpdateDashboardLineage() throws Exception { Mockito.when(_mockClient.exists(chartUrn2, AUTHENTICATION)).thenReturn(true); // existing dashboardInfo has upstreams to dataset1, dataset3, chart1, chart3 - DashboardInfo dashboardInfo = createDashboardInfo( - dashboardUrn1, - Arrays.asList(chartUrn1, chartUrn3), - Collections.emptyList(), - Arrays.asList(datasetUrn1, datasetUrn3), - Collections.emptyList() - ); - - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(dashboardUrn1), - Mockito.eq(ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + DashboardInfo dashboardInfo = + createDashboardInfo( + dashboardUrn1, + Arrays.asList(chartUrn1, chartUrn3), + Collections.emptyList(), + Arrays.asList(datasetUrn1, datasetUrn3), + Collections.emptyList()); + + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(dashboardUrn1), + Mockito.eq(ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(dashboardUrn1) .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DASHBOARD_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dashboardInfo.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DASHBOARD_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dashboardInfo.data())))))); final List<Urn> upstreamUrnsToAdd = Arrays.asList(datasetUrn2, chartUrn2); final List<Urn> upstreamUrnsToRemove = Arrays.asList(datasetUrn1, chartUrn1); - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); - - // dashboardInfo with chartUrn3 in charts, chartUrn2 in chartEdges, datasetUrn3 in datasets, datasetUrn2 in datasetEdges - DashboardInfo updatedDashboardInfo = createDashboardInfo( - dashboardUrn1, - Collections.singletonList(chartUrn3), - Collections.singletonList(chartUrn2), - Arrays.asList(datasetUrn3), - Collections.singletonList(datasetUrn2) - ); + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + + // dashboardInfo with chartUrn3 in charts, chartUrn2 in chartEdges, datasetUrn3 in datasets, + // datasetUrn2 in datasetEdges + DashboardInfo updatedDashboardInfo = + createDashboardInfo( + dashboardUrn1, + Collections.singletonList(chartUrn3), + Collections.singletonList(chartUrn2), + Arrays.asList(datasetUrn3), + Collections.singletonList(datasetUrn2)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(dashboardUrn1); @@ -291,11 +317,8 @@ public void testUpdateDashboardLineage() throws Exception { proposal.setAspectName(Constants.DASHBOARD_INFO_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedDashboardInfo)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -304,8 +327,11 @@ public void testFailUpdateDashboardWithMissingDataset() throws Exception { final List<Urn> upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -315,11 +341,15 @@ public void testFailUpdateDashboardWithInvalidEdge() throws Exception { // dashboards can't have dashboards upstream of them final List<Urn> upstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } - // Adds upstream datajob3, upstream dataset3, downstream dataset4, removes upstream datajob2, upstream dataset1, downstream dataset1 + // Adds upstream datajob3, upstream dataset3, downstream dataset4, removes upstream datajob2, + // upstream dataset1, downstream dataset1 // has existing upstream datajob2, upstream dataset1 and dataset2, downstream dataset4 // Should result in upstream datajob3, upstream dataset3 and dataset2, downstream dataset5 @Test @@ -332,66 +362,71 @@ public void testUpdateDataJobLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn4, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn1, AUTHENTICATION)).thenReturn(true); - DataJobInputOutput firstDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1, datasetUrn2), - Collections.emptyList(), - Collections.singletonList(datajobUrn2), - Collections.emptyList(), - Collections.singletonList(datasetUrn1), - Collections.emptyList() - ); - - DataJobInputOutput secondDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1), - Arrays.asList(datasetUrn3), - Collections.emptyList(), - Arrays.asList(datajobUrn3), - Arrays.asList(datasetUrn1), - Collections.emptyList() - ); - - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DATA_JOB_ENTITY_NAME), - Mockito.eq(datajobUrn1), - Mockito.eq(ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + DataJobInputOutput firstDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1, datasetUrn2), + Collections.emptyList(), + Collections.singletonList(datajobUrn2), + Collections.emptyList(), + Collections.singletonList(datasetUrn1), + Collections.emptyList()); + + DataJobInputOutput secondDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1), + Arrays.asList(datasetUrn3), + Collections.emptyList(), + Arrays.asList(datajobUrn3), + Arrays.asList(datasetUrn1), + Collections.emptyList()); + + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DATA_JOB_ENTITY_NAME), + Mockito.eq(datajobUrn1), + Mockito.eq(ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(datajobUrn1) .setEntityName(Constants.DATA_JOB_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(firstDataJobInputOutput.data())) - ))), + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(firstDataJobInputOutput.data()))))), new EntityResponse() .setUrn(datajobUrn1) .setEntityName(Constants.DATA_JOB_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(secondDataJobInputOutput.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(secondDataJobInputOutput.data())))))); final List<Urn> upstreamUrnsToAdd = Arrays.asList(datajobUrn3, datasetUrn3); final List<Urn> upstreamUrnsToRemove = Arrays.asList(datajobUrn2, datasetUrn2); - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); final List<Urn> downstreamUrnsToAdd = Arrays.asList(datasetUrn4); final List<Urn> downstreamUrnsToRemove = Arrays.asList(datasetUrn1); - _lineageService.updateDataJobDownstreamLineage(datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION); - - DataJobInputOutput updatedDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1), - Arrays.asList(datasetUrn3), - Collections.emptyList(), - Arrays.asList(datajobUrn3), - Collections.emptyList(), - Collections.singletonList(datasetUrn4) - ); + _lineageService.updateDataJobDownstreamLineage( + datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION); + + DataJobInputOutput updatedDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1), + Arrays.asList(datasetUrn3), + Collections.emptyList(), + Arrays.asList(datajobUrn3), + Collections.emptyList(), + Collections.singletonList(datasetUrn4)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(datajobUrn1); @@ -399,11 +434,8 @@ public void testUpdateDataJobLineage() throws Exception { proposal.setAspectName(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedDataJobInputOutput)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -412,8 +444,11 @@ public void testFailUpdateUpstreamDataJobWithMissingUrnToAdd() throws Exception final List<Urn> upstreamUrnsToAdd = Arrays.asList(datajobUrn3); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDataJobUpstreamLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -423,8 +458,11 @@ public void testFailUpdateUpstreamDataJobWithInvalidEdge() throws Exception { // dataJobs can't have dashboards upstream of them final List<Urn> upstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List<Urn> upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -433,8 +471,15 @@ public void testFailUpdateDownstreamDataJobWithMissingUrnToAdd() throws Exceptio final List<Urn> downstreamUrnsToAdd = Arrays.asList(datasetUrn1); final List<Urn> downstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDataJobDownstreamLineage(dashboardUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDataJobDownstreamLineage( + dashboardUrn1, + downstreamUrnsToAdd, + downstreamUrnsToRemove, + actorUrn, + AUTHENTICATION)); } @Test @@ -444,8 +489,15 @@ public void testFailUpdateDownstreamDataJobWithInvalidEdge() throws Exception { // dataJobs can't have dashboards downstream of them final List<Urn> downstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List<Urn> downstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, + downstreamUrnsToAdd, + downstreamUrnsToRemove, + actorUrn, + AUTHENTICATION)); } private UpstreamLineage createUpstreamLineage(List<String> upstreamUrns) throws Exception { @@ -466,7 +518,8 @@ private UpstreamLineage createUpstreamLineage(List<String> upstreamUrns) throws return upstreamLineage; } - private ChartInfo createChartInfo(Urn entityUrn, List<Urn> inputsToAdd, List<Urn> inputEdgesToAdd) throws Exception { + private ChartInfo createChartInfo(Urn entityUrn, List<Urn> inputsToAdd, List<Urn> inputEdgesToAdd) + throws Exception { ChartInfo chartInfo = new ChartInfo(); ChartDataSourceTypeArray inputs = new ChartDataSourceTypeArray(); for (Urn input : inputsToAdd) { @@ -489,8 +542,8 @@ private DashboardInfo createDashboardInfo( List<Urn> chartsToAdd, List<Urn> chartEdgesToAdd, List<Urn> datasetsToAdd, - List<Urn> datasetEdgesToAdd - ) throws Exception { + List<Urn> datasetEdgesToAdd) + throws Exception { final DashboardInfo dashboardInfo = new DashboardInfo(); final ChartUrnArray charts = new ChartUrnArray(); @@ -525,8 +578,8 @@ private DataJobInputOutput createDataJobInputOutput( List<Urn> inputDatajobsToAdd, List<Urn> inputDatajobEdgesToAdd, List<Urn> outputDatasetsToAdd, - List<Urn> outputDatasetEdgesToAdd - ) throws Exception { + List<Urn> outputDatasetEdgesToAdd) + throws Exception { final DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); final DatasetUrnArray inputDatasets = new DatasetUrnArray(); @@ -571,8 +624,7 @@ private DataJobInputOutput createDataJobInputOutput( private void addNewEdge( @Nonnull final Urn upstreamUrn, @Nonnull final Urn downstreamUrn, - @Nonnull final EdgeArray edgeArray - ) { + @Nonnull final EdgeArray edgeArray) { final Edge newEdge = new Edge(); newEdge.setDestinationUrn(upstreamUrn); newEdge.setSourceUrn(downstreamUrn); diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java index c23a151e52734..fde1c32d53a92 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.service.OwnerService.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -29,61 +31,59 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.service.OwnerService.*; - - public class OwnerServiceTest { private static final Urn TEST_OWNER_URN_1 = UrnUtils.getUrn("urn:li:corpuser:test"); private static final Urn TEST_OWNER_URN_2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testAddOwnersExistingOwner() throws Exception { Ownership existingOwnership = new Ownership(); - existingOwnership.setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE) - ))); + existingOwnership.setOwners( + new OwnerArray( + ImmutableList.of(new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE)))); EntityClient mockClient = createMockOwnersClient(existingOwnership); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newOwnerUrn = UrnUtils.getUrn("urn:li:corpuser:newTag"); - List<MetadataChangeProposal> events = service.buildAddOwnersProposals( - ImmutableList.of(newOwnerUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - OwnershipType.NONE, - mockAuthentication()); - - OwnerArray expected = new OwnerArray( - ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE), - new Owner().setOwner(newOwnerUrn).setType(OwnershipType.NONE) - .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())) - )); + List<MetadataChangeProposal> events = + service.buildAddOwnersProposals( + ImmutableList.of(newOwnerUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + OwnershipType.NONE, + mockAuthentication()); + + OwnerArray expected = + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE), + new Owner() + .setOwner(newOwnerUrn) + .setType(OwnershipType.NONE) + .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect1.getOwners(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect2.getOwners(), expected); } @@ -91,81 +91,86 @@ private void testAddOwnersExistingOwner() throws Exception { private void testAddOwnersNoExistingOwners() throws Exception { EntityClient mockClient = createMockOwnersClient(null); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newOwnerUrn = UrnUtils.getUrn("urn:li:corpuser:newOwner"); - List<MetadataChangeProposal> events = service.buildAddOwnersProposals( - ImmutableList.of(newOwnerUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - OwnershipType.NONE, - mockAuthentication()); - - OwnerArray expectedOwners = new OwnerArray( - ImmutableList.of(new Owner().setOwner(newOwnerUrn).setType(OwnershipType.NONE) - .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); + List<MetadataChangeProposal> events = + service.buildAddOwnersProposals( + ImmutableList.of(newOwnerUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + OwnershipType.NONE, + mockAuthentication()); + + OwnerArray expectedOwners = + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(newOwnerUrn) + .setType(OwnershipType.NONE) + .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect1.getOwners(), expectedOwners); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect2.getOwners(), expectedOwners); } @Test private void testRemoveOwnerExistingOwners() throws Exception { Ownership existingOwnership = new Ownership(); - existingOwnership.setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.TECHNICAL_OWNER), - new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD) - ))); + existingOwnership.setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.TECHNICAL_OWNER), + new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD)))); EntityClient mockClient = createMockOwnersClient(existingOwnership); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); - - List<MetadataChangeProposal> events = service.buildRemoveOwnersProposals( - ImmutableList.of(TEST_OWNER_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - Ownership expected = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD)))); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); + + List<MetadataChangeProposal> events = + service.buildRemoveOwnersProposals( + ImmutableList.of(TEST_OWNER_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + Ownership expected = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(TEST_OWNER_URN_2) + .setType(OwnershipType.DATA_STEWARD)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate ownersAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + RecordTemplate ownersAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate ownersAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + RecordTemplate ownersAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect2, expected); } @@ -173,68 +178,72 @@ private void testRemoveOwnerExistingOwners() throws Exception { private void testRemoveOwnerNoExistingOwners() throws Exception { EntityClient mockClient = createMockOwnersClient(null); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:corpuser:newOwner"); - List<MetadataChangeProposal> events = service.buildRemoveOwnersProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveOwnersProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); OwnerArray expected = new OwnerArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownersAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownersAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect1.getOwners(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownersAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownersAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect2.getOwners(), expected); } - private static EntityClient createMockOwnersClient(@Nullable Ownership existingOwnership) throws Exception { + private static EntityClient createMockOwnersClient(@Nullable Ownership existingOwnership) + throws Exception { return createMockEntityClient(existingOwnership, Constants.OWNERSHIP_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } @@ -243,4 +252,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, Constants.SYSTEM_ACTOR)); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java index dcb4a745732b2..65ca25fc8524d 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -23,8 +25,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - public class OwnershipTypeServiceTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = UrnUtils.getUrn("urn:li:ownershipType:test"); @@ -34,80 +34,62 @@ public class OwnershipTypeServiceTest { private void testCreateOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = createOwnershipTypeMockEntityClient(); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Case 1: With description - Urn urn = service.createOwnershipType( - "test OwnershipType", - "my description", - mockAuthentication(), - 0L - ); + Urn urn = + service.createOwnershipType( + "test OwnershipType", "my description", mockAuthentication(), 0L); Assert.assertEquals(urn, TEST_OWNERSHIP_TYPE_URN); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Without description - urn = service.createOwnershipType( - "test OwnershipType", - null, - mockAuthentication(), - 0L - ); + urn = service.createOwnershipType("test OwnershipType", null, mockAuthentication(), 0L); Assert.assertEquals(urn, TEST_OWNERSHIP_TYPE_URN); - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateOwnershipTypeErrorMissingInputs() throws Exception { final EntityClient mockClient = createOwnershipTypeMockEntityClient(); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Only case: missing OwnershipType Name Assert.assertThrows( RuntimeException.class, - () -> service.createOwnershipType( - null, - "my description", - mockAuthentication(), - 0L - ) - ); + () -> service.createOwnershipType(null, "my description", mockAuthentication(), 0L)); } @Test private void testCreateOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createOwnershipType( - "new name", - "my description", - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.createOwnershipType("new name", "my description", mockAuthentication(), 1L)); } @Test @@ -117,174 +99,134 @@ private void testUpdateOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); - - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); + + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; // Case 1: Update name only - service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, newName, oldDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateOwnershipType(TEST_OWNERSHIP_TYPE_URN, newName, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, newName, oldDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); // Case 2: Update description only service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - null, - newDescription, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, oldName, newDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_OWNERSHIP_TYPE_URN, null, newDescription, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, oldName, newDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); // Case 3: Update all fields at once service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - newDescription, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, newName, newDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_OWNERSHIP_TYPE_URN, newName, newDescription, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, newName, newDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateOwnershipTypeMissingOwnershipType() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateOwnershipType( + TEST_OWNERSHIP_TYPE_URN, newName, null, mockAuthentication(), 1L)); } @Test private void testUpdateOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - "new name", - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateOwnershipType( + TEST_OWNERSHIP_TYPE_URN, "new name", null, mockAuthentication(), 1L)); } @Test private void testDeleteOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); service.deleteOwnershipType(TEST_OWNERSHIP_TYPE_URN, true, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntityReferences( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntityReferences( + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, + Assert.assertThrows( + RuntimeException.class, () -> service.deleteOwnershipType(TEST_OWNERSHIP_TYPE_URN, false, mockAuthentication())); } @@ -296,20 +238,13 @@ private void testGetOwnershipTypeInfoSuccess() throws Exception { final String description = "description"; resetGetOwnershipTypeInfoMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - name, - description, - TEST_USER_URN, - 0L, - 1L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, name, description, TEST_USER_URN, 0L, 1L); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); - final OwnershipTypeInfo info = service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication()); + final OwnershipTypeInfo info = + service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication()); // Assert that the info is correct. Assert.assertEquals((long) info.getCreated().getTime(), 0L); @@ -323,37 +258,40 @@ private void testGetOwnershipTypeInfoSuccess() throws Exception { private void testGetOwnershipTypeInfoNoOwnershipTypeExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); - } @Test private void testGetOwnershipTypeInfoError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq( + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateOwnershipTypeProposal( @@ -380,10 +318,12 @@ private static MetadataChangeProposal buildUpdateOwnershipTypeProposal( private static EntityClient createOwnershipTypeMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_OWNERSHIP_TYPE_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_OWNERSHIP_TYPE_URN.toString()); return mockClient; } @@ -394,34 +334,40 @@ private static void resetUpdateOwnershipTypeMockEntityClient( final String existingDescription, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(ownershipTypeUrn.toString()); - - final OwnershipTypeInfo existingInfo = new OwnershipTypeInfo() - .setName(existingName) - .setDescription(existingDescription) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(ownershipTypeUrn), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(ownershipTypeUrn.toString()); + + final OwnershipTypeInfo existingInfo = + new OwnershipTypeInfo() + .setName(existingName) + .setDescription(existingDescription) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(ownershipTypeUrn), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(ownershipTypeUrn) .setEntityName(OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static void resetGetOwnershipTypeInfoMockEntityClient( @@ -431,29 +377,33 @@ private static void resetGetOwnershipTypeInfoMockEntityClient( final String existingDescription, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - final OwnershipTypeInfo existingInfo = new OwnershipTypeInfo() - .setName(existingName) - .setDescription(existingDescription) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(ownershipTypeUrn), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + final OwnershipTypeInfo existingInfo = + new OwnershipTypeInfo() + .setName(existingName) + .setDescription(existingDescription) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(ownershipTypeUrn), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(ownershipTypeUrn) .setEntityName(OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static Authentication mockAuthentication() { @@ -461,5 +411,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } - -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java index 6ef0065b4d5db..5726dcc6cd17a 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -31,144 +33,138 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class QueryServiceTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test private void testCreateQuerySuccess() throws Exception { final EntityClient mockClient = createQueryMockEntityClient(); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Case 1: All fields provided - Urn urn = service.createQuery( - "test query", - "my description", - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 0L - ); + Urn urn = + service.createQuery( + "test query", + "my description", + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_QUERY_URN); // Ingests both aspects - properties and subjects - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Null fields provided - urn = service.createQuery( - null, - null, - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(), - mockAuthentication(), - 0L - ); + urn = + service.createQuery( + null, + null, + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_QUERY_URN); - Mockito.verify(mockClient, Mockito.times(4)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(4)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateQueryErrorMissingInputs() throws Exception { final EntityClient mockClient = createQueryMockEntityClient(); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Case 1: missing Query Source Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - null, // Cannot be null - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(), - mockAuthentication(), - 0L - ) - ); - + () -> + service.createQuery( + null, + null, + null, // Cannot be null + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(), + mockAuthentication(), + 0L)); // Case 2: missing Query Statement Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - QuerySource.MANUAL, // Cannot be null - null, - ImmutableList.of(), - mockAuthentication(), - 0L - ) - ); + () -> + service.createQuery( + null, + null, + QuerySource.MANUAL, // Cannot be null + null, + ImmutableList.of(), + mockAuthentication(), + 0L)); // Case 3: missing Query Subjects Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - QuerySource.MANUAL, // Cannot be null - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - null, - mockAuthentication(), - 0L - ) - ); + () -> + service.createQuery( + null, + null, + QuerySource.MANUAL, // Cannot be null + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + null, + mockAuthentication(), + 0L)); } @Test private void testCreateQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createQuery( - "test query", - "my description", - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 0L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.createQuery( + "test query", + "my description", + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 0L)); } @Test private void testUpdateQuerySuccess() throws Exception { final String oldName = "old name"; final String oldDescription = "old description"; - final QueryStatement oldStatement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final QueryStatement oldStatement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); final EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -181,35 +177,34 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; - final QueryStatement newStatement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); - final List<QuerySubject> newSubjects = ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)); + final QueryStatement newStatement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final List<QuerySubject> newSubjects = + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)); // Case 1: Update name only - service.updateQuery( - TEST_QUERY_URN, - newName, - null, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, newName, oldDescription, QuerySource.MANUAL, oldStatement, - 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, newName, null, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + newName, + oldDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -220,26 +215,25 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 2: Update description only - service.updateQuery( - TEST_QUERY_URN, - null, - newDescription, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, newDescription, QuerySource.MANUAL, - oldStatement, 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, newDescription, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + newDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -250,26 +244,25 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 3: Update definition only - service.updateQuery( - TEST_QUERY_URN, - null, - null, - newStatement, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, oldDescription, QuerySource.MANUAL, - newStatement, 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, null, newStatement, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + oldDescription, + QuerySource.MANUAL, + newStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -280,27 +273,26 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 4: Update subjects only - service.updateQuery( - TEST_QUERY_URN, - null, - null, - null, - newSubjects, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of( - buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, oldDescription, QuerySource.MANUAL, oldStatement, 0L, 1L), - buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, null, null, newSubjects, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + oldDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L), + buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 5: Update all fields service.updateQuery( @@ -310,103 +302,106 @@ private void testUpdateQuerySuccess() throws Exception { newStatement, newSubjects, mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of( - buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, newName, newDescription, QuerySource.MANUAL, newStatement, 0L, 1L), - buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects) - )), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + newName, + newDescription, + QuerySource.MANUAL, + newStatement, + 0L, + 1L), + buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateQueryMissingQuery() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateQuery( - TEST_QUERY_URN, - "new name", - null, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateQuery( + TEST_QUERY_URN, + "new name", + null, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 1L)); } @Test private void testUpdateQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateQuery( - TEST_QUERY_URN, - "new name", - null, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateQuery( + TEST_QUERY_URN, + "new name", + null, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 1L)); } @Test private void testDeleteQuerySuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); service.deleteQuery(TEST_QUERY_URN, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.deleteQuery(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.deleteQuery(TEST_QUERY_URN, mockAuthentication())); } @Test @@ -415,7 +410,8 @@ private void testGetQueryPropertiesSuccess() throws Exception { final String name = "name"; final String description = "description"; - final QueryStatement statement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final QueryStatement statement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); resetQueryPropertiesClient( mockClient, @@ -426,14 +422,12 @@ private void testGetQueryPropertiesSuccess() throws Exception { statement, TEST_USER_URN, 0L, - 1L - ); + 1L); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - final QueryProperties properties = service.getQueryProperties(TEST_QUERY_URN, mockAuthentication()); + final QueryProperties properties = + service.getQueryProperties(TEST_QUERY_URN, mockAuthentication()); // Assert that the info is correct. Assert.assertEquals((long) properties.getCreated().getTime(), 0L); @@ -449,16 +443,17 @@ private void testGetQueryPropertiesSuccess() throws Exception { private void testGetQueryPropertiesNoQueryExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of( + QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @@ -467,38 +462,40 @@ private void testGetQueryPropertiesNoQueryExists() throws Exception { private void testGetQueryPropertiesError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of( + QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @Test private void testGetQuerySubjectsSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - resetQuerySubjectsClient( - mockClient, - TEST_QUERY_URN, - existingSubjects - ); + resetQuerySubjectsClient(mockClient, TEST_QUERY_URN, existingSubjects); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - final QuerySubjects querySubjects = service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication()); + final QuerySubjects querySubjects = + service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication()); Assert.assertEquals(querySubjects, existingSubjects); } @@ -507,16 +504,16 @@ private void testGetQuerySubjectsSuccess() throws Exception { private void testGetQuerySubjectsNoQueryExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @@ -525,23 +522,24 @@ private void testGetQuerySubjectsNoQueryExists() throws Exception { private void testGetQuerySubjectsError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateQuerySubjectsProposal( - final Urn urn, - final List<QuerySubject> querySubjects) { + final Urn urn, final List<QuerySubject> querySubjects) { QuerySubjects subjects = new QuerySubjects(); subjects.setSubjects(new QuerySubjectArray(querySubjects)); @@ -583,10 +581,12 @@ private static MetadataChangeProposal buildUpdateQueryPropertiesProposal( private static EntityClient createQueryMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_QUERY_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_QUERY_URN.toString()); return mockClient; } @@ -599,63 +599,75 @@ private static void resetQueryPropertiesClient( final QueryStatement existingStatement, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(queryUrn.toString()); - - final QueryProperties existingProperties = new QueryProperties() - .setSource(existingSource) - .setName(existingName) - .setDescription(existingDescription) - .setStatement(existingStatement) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(queryUrn), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(queryUrn.toString()); + + final QueryProperties existingProperties = + new QueryProperties() + .setSource(existingSource) + .setName(existingName) + .setDescription(existingDescription) + .setStatement(existingStatement) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(queryUrn), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(queryUrn) .setEntityName(QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingProperties.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingProperties.data())))))); } private static void resetQuerySubjectsClient( - final EntityClient mockClient, - final Urn queryUrn, - final QuerySubjects subjects) throws Exception { + final EntityClient mockClient, final Urn queryUrn, final QuerySubjects subjects) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(queryUrn.toString()); - - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(queryUrn), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(queryUrn.toString()); + + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(queryUrn), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(queryUrn) .setEntityName(QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(subjects.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(subjects.data())))))); } private static Authentication mockAuthentication() { @@ -663,4 +675,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java index 43ebc53385ad4..b034111e7825f 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,9 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class SettingsServiceTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -35,26 +34,28 @@ public class SettingsServiceTest { @Test private static void testGetCorpUserSettingsNullSettings() throws Exception { - final SettingsService service = new SettingsService( - getCorpUserSettingsEntityClientMock(null), - Mockito.mock(Authentication.class) - ); - final CorpUserSettings res = service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); + final SettingsService service = + new SettingsService( + getCorpUserSettingsEntityClientMock(null), Mockito.mock(Authentication.class)); + final CorpUserSettings res = + service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); Assert.assertNull(res); } @Test private static void testGetCorpUserSettingsValidSettings() throws Exception { - final CorpUserSettings existingSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - - final SettingsService service = new SettingsService( - getCorpUserSettingsEntityClientMock(existingSettings), - Mockito.mock(Authentication.class) - ); - - final CorpUserSettings res = service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); + final CorpUserSettings existingSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + + final SettingsService service = + new SettingsService( + getCorpUserSettingsEntityClientMock(existingSettings), + Mockito.mock(Authentication.class)); + + final CorpUserSettings res = + service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); Assert.assertEquals(existingSettings, res); } @@ -62,107 +63,94 @@ private static void testGetCorpUserSettingsValidSettings() throws Exception { private static void testGetCorpUserSettingsSettingsException() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.CORP_USER_ENTITY_NAME), - Mockito.eq(TEST_USER_URN), - Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.CORP_USER_ENTITY_NAME), + Mockito.eq(TEST_USER_URN), + Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - Assert.assertThrows(RuntimeException.class, () -> service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class))); + Assert.assertThrows( + RuntimeException.class, + () -> service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class))); } @Test private static void testUpdateCorpUserSettingsValidSettings() throws Exception { - final CorpUserSettings newSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final CorpUserSettings newSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal expectedProposal = buildUpdateCorpUserSettingsChangeProposal( - TEST_USER_URN, - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateCorpUserSettingsChangeProposal(TEST_USER_URN, newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenReturn(TEST_USER_URN.toString()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - service.updateCorpUserSettings( - TEST_USER_URN, - newSettings, - Mockito.mock(Authentication.class)); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenReturn(TEST_USER_URN.toString()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + service.updateCorpUserSettings(TEST_USER_URN, newSettings, Mockito.mock(Authentication.class)); Mockito.verify(mockClient, Mockito.times(1)) .ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test private static void testUpdateCorpUserSettingsSettingsException() throws Exception { - final CorpUserSettings newSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final CorpUserSettings newSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal expectedProposal = buildUpdateCorpUserSettingsChangeProposal( - TEST_USER_URN, - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateCorpUserSettingsChangeProposal(TEST_USER_URN, newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenThrow(new RemoteInvocationException()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - Assert.assertThrows(RuntimeException.class, () -> service.updateCorpUserSettings( - TEST_USER_URN, - newSettings, - Mockito.mock(Authentication.class))); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenThrow(new RemoteInvocationException()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateCorpUserSettings( + TEST_USER_URN, newSettings, Mockito.mock(Authentication.class))); } @Test private static void testGetGlobalSettingsNullSettings() throws Exception { - final SettingsService service = new SettingsService( - getGlobalSettingsEntityClientMock(null), - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService( + getGlobalSettingsEntityClientMock(null), Mockito.mock(Authentication.class)); final GlobalSettingsInfo res = service.getGlobalSettings(Mockito.mock(Authentication.class)); Assert.assertNull(res); } @Test private static void testGetGlobalSettingsValidSettings() throws Exception { - final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo existingSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final SettingsService service = new SettingsService( - getGlobalSettingsEntityClientMock(existingSettings), - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService( + getGlobalSettingsEntityClientMock(existingSettings), + Mockito.mock(Authentication.class)); final GlobalSettingsInfo res = service.getGlobalSettings(Mockito.mock(Authentication.class)); Assert.assertEquals(existingSettings, res); @@ -172,136 +160,131 @@ private static void testGetGlobalSettingsValidSettings() throws Exception { private static void testGetGlobalSettingsSettingsException() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.getV2( + Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - Assert.assertThrows(RuntimeException.class, () -> service.getGlobalSettings(Mockito.mock(Authentication.class))); + Assert.assertThrows( + RuntimeException.class, + () -> service.getGlobalSettings(Mockito.mock(Authentication.class))); } @Test private static void testUpdateGlobalSettingsValidSettings() throws Exception { - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final MetadataChangeProposal expectedProposal = buildUpdateGlobalSettingsChangeProposal(newSettings); + final MetadataChangeProposal expectedProposal = + buildUpdateGlobalSettingsChangeProposal(newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenReturn(GLOBAL_SETTINGS_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenReturn(GLOBAL_SETTINGS_URN.toString()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - service.updateGlobalSettings( - newSettings, - Mockito.mock(Authentication.class)); + service.updateGlobalSettings(newSettings, Mockito.mock(Authentication.class)); Mockito.verify(mockClient, Mockito.times(1)) .ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test private static void testUpdateGlobalSettingsSettingsException() throws Exception { - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final MetadataChangeProposal expectedProposal = buildUpdateGlobalSettingsChangeProposal( - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateGlobalSettingsChangeProposal(newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenThrow(new RemoteInvocationException()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - Assert.assertThrows(RuntimeException.class, () -> service.updateGlobalSettings( - newSettings, - Mockito.mock(Authentication.class))); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenThrow(new RemoteInvocationException()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + Assert.assertThrows( + RuntimeException.class, + () -> service.updateGlobalSettings(newSettings, Mockito.mock(Authentication.class))); } - private static EntityClient getCorpUserSettingsEntityClientMock(@Nullable final CorpUserSettings settings) - throws Exception { + private static EntityClient getCorpUserSettingsEntityClientMock( + @Nullable final CorpUserSettings settings) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EnvelopedAspectMap aspectMap = settings != null ? new EnvelopedAspectMap(ImmutableMap.of( - Constants.CORP_USER_SETTINGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(settings.data())) - )) : new EnvelopedAspectMap(); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.CORP_USER_ENTITY_NAME), - Mockito.eq(TEST_USER_URN), - Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - new EntityResponse() - .setEntityName(Constants.CORP_USER_ENTITY_NAME) - .setUrn(TEST_USER_URN) - .setAspects(aspectMap) - ); + EnvelopedAspectMap aspectMap = + settings != null + ? new EnvelopedAspectMap( + ImmutableMap.of( + Constants.CORP_USER_SETTINGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(settings.data())))) + : new EnvelopedAspectMap(); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.CORP_USER_ENTITY_NAME), + Mockito.eq(TEST_USER_URN), + Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.CORP_USER_ENTITY_NAME) + .setUrn(TEST_USER_URN) + .setAspects(aspectMap)); return mockClient; } - private static EntityClient getGlobalSettingsEntityClientMock(@Nullable final GlobalSettingsInfo settings) - throws Exception { + private static EntityClient getGlobalSettingsEntityClientMock( + @Nullable final GlobalSettingsInfo settings) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EnvelopedAspectMap aspectMap = settings != null ? new EnvelopedAspectMap(ImmutableMap.of( - GLOBAL_SETTINGS_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(settings.data())) - )) : new EnvelopedAspectMap(); - - Mockito.when(mockClient.getV2( - Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - new EntityResponse() - .setEntityName(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) - .setUrn(GLOBAL_SETTINGS_URN) - .setAspects(aspectMap) - ); + EnvelopedAspectMap aspectMap = + settings != null + ? new EnvelopedAspectMap( + ImmutableMap.of( + GLOBAL_SETTINGS_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(settings.data())))) + : new EnvelopedAspectMap(); + + Mockito.when( + mockClient.getV2( + Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) + .setUrn(GLOBAL_SETTINGS_URN) + .setAspects(aspectMap)); return mockClient; } private static MetadataChangeProposal buildUpdateCorpUserSettingsChangeProposal( - final Urn urn, - final CorpUserSettings newSettings) { - final MetadataChangeProposal mcp = new MetadataChangeProposal(); - mcp.setEntityUrn(urn); - mcp.setEntityType(CORP_USER_ENTITY_NAME); - mcp.setAspectName(CORP_USER_SETTINGS_ASPECT_NAME); - mcp.setChangeType(ChangeType.UPSERT); - mcp.setAspect(GenericRecordUtils.serializeAspect(newSettings)); - return mcp; + final Urn urn, final CorpUserSettings newSettings) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(urn); + mcp.setEntityType(CORP_USER_ENTITY_NAME); + mcp.setAspectName(CORP_USER_SETTINGS_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(newSettings)); + return mcp; } private static MetadataChangeProposal buildUpdateGlobalSettingsChangeProposal( diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java index 125265540dc77..e7ed3db82d0f2 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java @@ -33,56 +33,55 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class TagServiceTest { private static final Urn TEST_TAG_URN_1 = UrnUtils.getUrn("urn:li:tag:test"); private static final Urn TEST_TAG_URN_2 = UrnUtils.getUrn("urn:li:tag:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testAddTagToEntityExistingTag() throws Exception { GlobalTags existingGlobalTags = new GlobalTags(); - existingGlobalTags.setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)) - ))); + existingGlobalTags.setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))))); EntityClient mockClient = createMockGlobalTagsClient(existingGlobalTags); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expected); } @@ -90,37 +89,35 @@ private void testAddTagToEntityExistingTag() throws Exception { private void testAddGlobalTagsToEntityNoExistingTag() throws Exception { EntityClient mockClient = createMockGlobalTagsClient(null); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - TagAssociationArray expectedTermsArray = new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + TagAssociationArray expectedTermsArray = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expectedTermsArray); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expectedTermsArray); } @@ -128,50 +125,59 @@ private void testAddGlobalTagsToEntityNoExistingTag() throws Exception { private void testAddTagToSchemaFieldExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test @@ -179,90 +185,95 @@ private void testAddGlobalTagsToSchemaFieldNoExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags(new GlobalTags())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn))) - ); + List<MetadataChangeProposal> events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test private void testRemoveTagToEntityExistingTag() throws Exception { GlobalTags existingGlobalTags = new GlobalTags(); - existingGlobalTags.setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)) - ))); + existingGlobalTags.setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); EntityClient mockClient = createMockGlobalTagsClient(existingGlobalTags); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_TAG_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_TAG_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); - GlobalTags expected = new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); + GlobalTags expected = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + RecordTemplate tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + RecordTemplate tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2, expected); } @@ -270,36 +281,33 @@ private void testRemoveTagToEntityExistingTag() throws Exception { private void testRemoveGlobalTagsToEntityNoExistingTag() throws Exception { EntityClient mockClient = createMockGlobalTagsClient(null); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); TagAssociationArray expected = new TagAssociationArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expected); } @@ -307,51 +315,58 @@ private void testRemoveGlobalTagsToEntityNoExistingTag() throws Exception { private void testRemoveTagToSchemaFieldExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_TAG_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_TAG_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); - TagAssociationArray expected = new TagAssociationArray(ImmutableList.of( - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)) - )); + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test @@ -359,78 +374,90 @@ private void testRemoveGlobalTagsToSchemaFieldNoExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags(new GlobalTags())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List<MetadataChangeProposal> events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_ENTITY_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List<MetadataChangeProposal> events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_ENTITY_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), Collections.emptyList()); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + Collections.emptyList()); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), Collections.emptyList()); - + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + Collections.emptyList()); } - private static EntityClient createMockGlobalTagsClient(@Nullable GlobalTags existingGlobalTags) throws Exception { + private static EntityClient createMockGlobalTagsClient(@Nullable GlobalTags existingGlobalTags) + throws Exception { return createMockEntityClient(existingGlobalTags, Constants.GLOBAL_TAGS_ASPECT_NAME); } - private static EntityClient createMockSchemaMetadataEntityClient(@Nullable EditableSchemaMetadata existingMetadata) throws Exception { + private static EntityClient createMockSchemaMetadataEntityClient( + @Nullable EditableSchemaMetadata existingMetadata) throws Exception { return createMockEntityClient(existingMetadata, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } @@ -439,4 +466,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, Constants.SYSTEM_ACTOR)); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java index 5841717e7db93..cd62cf3959103 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -33,9 +35,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class ViewServiceTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -45,150 +44,187 @@ public class ViewServiceTest { private void testCreateViewSuccess() throws Exception { final EntityClient mockClient = createViewMockEntityClient(); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Case 1: With description - Urn urn = service.createView(DataHubViewType.PERSONAL, - "test view", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ); + Urn urn = + service.createView( + DataHubViewType.PERSONAL, + "test view", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_VIEW_URN); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Without description - urn = service.createView(DataHubViewType.PERSONAL, - "test view", - null, - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ); + urn = + service.createView( + DataHubViewType.PERSONAL, + "test view", + null, + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_VIEW_URN); - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateViewErrorMissingInputs() throws Exception { final EntityClient mockClient = createViewMockEntityClient(); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Case 1: missing View Type Assert.assertThrows( RuntimeException.class, - () -> service.createView(null, - "test view", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ) - ); - + () -> + service.createView( + null, + "test view", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L)); // Case 2: missing View name Assert.assertThrows( RuntimeException.class, - () -> service.createView(DataHubViewType.PERSONAL, - null, - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ) - ); + () -> + service.createView( + DataHubViewType.PERSONAL, + null, + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L)); // Case 3: missing View definition Assert.assertThrows( RuntimeException.class, - () -> service.createView(DataHubViewType.PERSONAL, - "My name", - "my description", - null, - mockAuthentication(), - 0L - ) - ); + () -> + service.createView( + DataHubViewType.PERSONAL, + "My name", + "my description", + null, + mockAuthentication(), + 0L)); } @Test private void testCreateViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createView( - DataHubViewType.PERSONAL, - "new name", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.createView( + DataHubViewType.PERSONAL, + "new name", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 1L)); } @Test @@ -196,9 +232,10 @@ private void testUpdateViewSuccess() throws Exception { final DataHubViewType type = DataHubViewType.PERSONAL; final String oldName = "old name"; final String oldDescription = "old description"; - final DataHubViewDefinition oldDefinition = new DataHubViewDefinition() - .setEntityTypes(new StringArray()) - .setFilter(new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList()))); + final DataHubViewDefinition oldDefinition = + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList()))); final EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -211,39 +248,39 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; - final DataHubViewDefinition newDefinition = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))); + final DataHubViewDefinition newDefinition = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))); // Case 1: Update name only - service.updateView( - TEST_VIEW_URN, - newName, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, newName, oldDescription, oldDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, newName, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, newName, oldDescription, oldDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -254,24 +291,18 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 2: Update description only - service.updateView( - TEST_VIEW_URN, - null, - newDescription, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, oldName, newDescription, oldDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, null, newDescription, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, oldName, newDescription, oldDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -282,23 +313,18 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 3: Update definition only - service.updateView(TEST_VIEW_URN, - null, - null, - newDefinition, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, oldName, oldDescription, newDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, null, null, newDefinition, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, oldName, oldDescription, newDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -309,110 +335,88 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 4: Update all fields at once service.updateView( - TEST_VIEW_URN, - newName, - newDescription, - newDefinition, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, newName, newDescription, newDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_VIEW_URN, newName, newDescription, newDefinition, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, newName, newDescription, newDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateViewMissingView() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateView( - TEST_VIEW_URN, - newName, - null, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.updateView(TEST_VIEW_URN, newName, null, null, mockAuthentication(), 1L)); } @Test private void testUpdateViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateView( - TEST_VIEW_URN, - "new name", - null, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.updateView(TEST_VIEW_URN, "new name", null, null, mockAuthentication(), 1L)); } @Test private void testDeleteViewSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); service.deleteView(TEST_VIEW_URN, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.deleteView(TEST_VIEW_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.deleteView(TEST_VIEW_URN, mockAuthentication())); } @Test @@ -422,31 +426,28 @@ private void testGetViewInfoSuccess() throws Exception { final DataHubViewType type = DataHubViewType.PERSONAL; final String name = "name"; final String description = "description"; - final DataHubViewDefinition definition = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))); + final DataHubViewDefinition definition = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))); resetGetViewInfoMockEntityClient( - mockClient, - TEST_VIEW_URN, - type, - name, - description, - definition, - TEST_USER_URN, - 0L, - 1L - ); + mockClient, TEST_VIEW_URN, type, name, description, definition, TEST_USER_URN, 0L, 1L); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final DataHubViewInfo info = service.getViewInfo(TEST_VIEW_URN, mockAuthentication()); @@ -464,37 +465,36 @@ private void testGetViewInfoSuccess() throws Exception { private void testGetViewInfoNoViewExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); - } @Test private void testGetViewInfoError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateViewProposal( @@ -525,10 +525,12 @@ private static MetadataChangeProposal buildUpdateViewProposal( private static EntityClient createViewMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_VIEW_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_VIEW_URN.toString()); return mockClient; } @@ -541,36 +543,42 @@ private static void resetUpdateViewMockEntityClient( final DataHubViewDefinition existingDefinition, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(viewUrn.toString()); - - final DataHubViewInfo existingInfo = new DataHubViewInfo() - .setType(existingType) - .setName(existingName) - .setDescription(existingDescription) - .setDefinition(existingDefinition) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(viewUrn), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(viewUrn.toString()); + + final DataHubViewInfo existingInfo = + new DataHubViewInfo() + .setType(existingType) + .setName(existingName) + .setDescription(existingDescription) + .setDefinition(existingDefinition) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(viewUrn), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( - new EntityResponse() - .setUrn(viewUrn) - .setEntityName(DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + new EntityResponse() + .setUrn(viewUrn) + .setEntityName(DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static void resetGetViewInfoMockEntityClient( @@ -582,31 +590,35 @@ private static void resetGetViewInfoMockEntityClient( final DataHubViewDefinition existingDefinition, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - final DataHubViewInfo existingInfo = new DataHubViewInfo() - .setType(existingType) - .setName(existingName) - .setDescription(existingDescription) - .setDefinition(existingDefinition) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(viewUrn), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + final DataHubViewInfo existingInfo = + new DataHubViewInfo() + .setType(existingType) + .setName(existingName) + .setDescription(existingDescription) + .setDefinition(existingDefinition) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(viewUrn), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(viewUrn) .setEntityName(DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static Authentication mockAuthentication() { @@ -614,4 +626,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java index 3ea2b01c3e214..1b3ef20cff00a 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java @@ -12,9 +12,9 @@ import com.linkedin.util.Pair; import java.io.IOException; import java.io.PrintWriter; +import java.time.ZoneId; import java.util.HashMap; import java.util.Map; -import java.time.ZoneId; import javax.servlet.ServletContext; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; @@ -27,30 +27,46 @@ public class Config extends HttpServlet { - Map<String, Object> config = new HashMap<String, Object>() {{ - put("noCode", "true"); - put("retention", "true"); - put("statefulIngestionCapable", true); - put("patchCapable", true); - put("timeZone", ZoneId.systemDefault().toString()); - }}; - ObjectMapper objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - - private Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> getPluginModels(ServletContext servletContext) { - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext); + Map<String, Object> config = + new HashMap<String, Object>() { + { + put("noCode", "true"); + put("retention", "true"); + put("statefulIngestionCapable", true); + put("patchCapable", true); + put("timeZone", ZoneId.systemDefault().toString()); + } + }; + ObjectMapper objectMapper = + new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + + private Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> getPluginModels( + ServletContext servletContext) { + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext); PluginEntityRegistryLoader pluginEntityRegistryLoader = (PluginEntityRegistryLoader) ctx.getBean("pluginEntityRegistry"); - Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> patchRegistries = - pluginEntityRegistryLoader.getPatchRegistries(); - Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> patchDiagnostics = new HashMap<>(); + Map<String, Map<ComparableVersion, Pair<EntityRegistry, EntityRegistryLoadResult>>> + patchRegistries = pluginEntityRegistryLoader.getPatchRegistries(); + Map<String, Map<ComparableVersion, EntityRegistryLoadResult>> patchDiagnostics = + new HashMap<>(); patchRegistries.keySet().forEach(name -> patchDiagnostics.put(name, new HashMap<>())); - patchRegistries.entrySet().forEach(entry -> { - entry.getValue() - .entrySet() - .forEach(versionLoadEntry -> patchDiagnostics.get(entry.getKey()) - .put(versionLoadEntry.getKey(), versionLoadEntry.getValue().getSecond())); - }); + patchRegistries + .entrySet() + .forEach( + entry -> { + entry + .getValue() + .entrySet() + .forEach( + versionLoadEntry -> + patchDiagnostics + .get(entry.getKey()) + .put( + versionLoadEntry.getKey(), + versionLoadEntry.getValue().getSecond())); + }); return patchDiagnostics; } @@ -74,7 +90,8 @@ private boolean checkImpactAnalysisSupport(WebApplicationContext ctx) { protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { config.put("noCode", "true"); - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); config.put("supportsImpactAnalysis", checkImpactAnalysisSupport(ctx)); @@ -85,21 +102,30 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IO ConfigurationProvider configProvider = getConfigProvider(ctx); - Map<String, Object> telemetryConfig = new HashMap<String, Object>() {{ - put("enabledCli", configProvider.getTelemetry().enabledCli); - put("enabledIngestion", configProvider.getTelemetry().enabledIngestion); - }}; + Map<String, Object> telemetryConfig = + new HashMap<String, Object>() { + { + put("enabledCli", configProvider.getTelemetry().enabledCli); + put("enabledIngestion", configProvider.getTelemetry().enabledIngestion); + } + }; config.put("telemetry", telemetryConfig); - Map<String, Object> ingestionConfig = new HashMap<String, Object>() {{ - put("enabled", configProvider.getIngestion().enabled); - put("defaultCliVersion", configProvider.getIngestion().defaultCliVersion); - }}; + Map<String, Object> ingestionConfig = + new HashMap<String, Object>() { + { + put("enabled", configProvider.getIngestion().enabled); + put("defaultCliVersion", configProvider.getIngestion().defaultCliVersion); + } + }; config.put("managedIngestion", ingestionConfig); - Map<String, Object> datahubConfig = new HashMap<String, Object>() {{ - put("serverType", configProvider.getDatahub().serverType); - }}; + Map<String, Object> datahubConfig = + new HashMap<String, Object>() { + { + put("serverType", configProvider.getDatahub().serverType); + } + }; config.put("datahub", datahubConfig); resp.setContentType("application/json"); diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java index d788222c5d87b..ebcfaeca7059e 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java @@ -1,13 +1,22 @@ package com.datahub.gms.servlet; -import com.linkedin.metadata.config.search.SearchConfiguration; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; + import com.datahub.gms.util.CSVWriter; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; +import java.io.PrintWriter; +import java.util.Map; +import java.util.Optional; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; @@ -22,16 +31,6 @@ import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.PrintWriter; -import java.util.Map; -import java.util.Optional; - -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - @Slf4j public class ConfigSearchExport extends HttpServlet { @@ -49,40 +48,73 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { CSVWriter writer = CSVWriter.builder().printWriter(pw).build(); - String[] header = {"entity", "query_category", "match_category", "query_type", "field_name", - "field_weight", "search_analyzer", "case_insensitive", "query_boost", "raw"}; + String[] header = { + "entity", + "query_category", + "match_category", + "query_type", + "field_name", + "field_weight", + "search_analyzer", + "case_insensitive", + "query_boost", + "raw" + }; writer.println(header); SEARCHABLE_ENTITY_TYPES.stream() - .map(entityType -> { + .map( + entityType -> { try { - EntitySpec entitySpec = entityRegistry.getEntitySpec(EntityTypeMapper.getName(entityType)); + EntitySpec entitySpec = + entityRegistry.getEntitySpec(EntityTypeMapper.getName(entityType)); return Optional.of(entitySpec); } catch (IllegalArgumentException e) { log.warn("Failed to resolve entity `{}`", entityType.name()); return Optional.<EntitySpec>empty(); } }) - .filter(Optional::isPresent) - .forEach(entitySpecOpt -> { + .filter(Optional::isPresent) + .forEach( + entitySpecOpt -> { EntitySpec entitySpec = entitySpecOpt.get(); - SearchRequest searchRequest = SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, null) - .getSearchRequest("*", null, null, 0, 0, new SearchFlags() - .setFulltext(true).setSkipHighlighting(true).setSkipAggregates(true), null); + SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, null) + .getSearchRequest( + "*", + null, + null, + 0, + 0, + new SearchFlags() + .setFulltext(true) + .setSkipHighlighting(true) + .setSkipAggregates(true), + null); - FunctionScoreQueryBuilder rankingQuery = ((FunctionScoreQueryBuilder) ((BoolQueryBuilder) - searchRequest.source().query()).must().get(0)); + FunctionScoreQueryBuilder rankingQuery = + ((FunctionScoreQueryBuilder) + ((BoolQueryBuilder) searchRequest.source().query()).must().get(0)); BoolQueryBuilder relevancyQuery = (BoolQueryBuilder) rankingQuery.query(); - BoolQueryBuilder simpleQueryString = (BoolQueryBuilder) relevancyQuery.should().get(0); + BoolQueryBuilder simpleQueryString = + (BoolQueryBuilder) relevancyQuery.should().get(0); BoolQueryBuilder exactPrefixMatch = (BoolQueryBuilder) relevancyQuery.should().get(1); for (QueryBuilder simpBuilder : simpleQueryString.should()) { SimpleQueryStringBuilder sqsb = (SimpleQueryStringBuilder) simpBuilder; for (Map.Entry<String, Float> fieldWeight : sqsb.fields().entrySet()) { - String[] row = {entitySpec.getName(), "relevancy", "fulltext", sqsb.getClass().getSimpleName(), - fieldWeight.getKey(), - fieldWeight.getValue().toString(), sqsb.analyzer(), "true", String.valueOf(sqsb.boost()), - sqsb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "fulltext", + sqsb.getClass().getSimpleName(), + fieldWeight.getKey(), + fieldWeight.getValue().toString(), + sqsb.analyzer(), + "true", + String.valueOf(sqsb.boost()), + sqsb.toString().replaceAll("\n", "") + }; writer.println(row); } } @@ -90,66 +122,119 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { for (QueryBuilder builder : exactPrefixMatch.should()) { if (builder instanceof TermQueryBuilder) { TermQueryBuilder tqb = (TermQueryBuilder) builder; - String[] row = {entitySpec.getName(), "relevancy", "exact_match", tqb.getClass().getSimpleName(), - tqb.fieldName(), - String.valueOf(tqb.boost()), KEYWORD_ANALYZER, String.valueOf(tqb.caseInsensitive()), "", - tqb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "exact_match", + tqb.getClass().getSimpleName(), + tqb.fieldName(), + String.valueOf(tqb.boost()), + KEYWORD_ANALYZER, + String.valueOf(tqb.caseInsensitive()), + "", + tqb.toString().replaceAll("\n", "") + }; writer.println(row); } else if (builder instanceof MatchPhrasePrefixQueryBuilder) { MatchPhrasePrefixQueryBuilder mppqb = (MatchPhrasePrefixQueryBuilder) builder; - String[] row = {entitySpec.getName(), "relevancy", "prefix_match", mppqb.getClass().getSimpleName(), - mppqb.fieldName(), - String.valueOf(mppqb.boost()), "", "true", "", mppqb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "prefix_match", + mppqb.getClass().getSimpleName(), + mppqb.fieldName(), + String.valueOf(mppqb.boost()), + "", + "true", + "", + mppqb.toString().replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled exact prefix builder: " + builder.getClass().getName()); + throw new IllegalStateException( + "Unhandled exact prefix builder: " + builder.getClass().getName()); } } - for (FunctionScoreQueryBuilder.FilterFunctionBuilder ffb : rankingQuery.filterFunctionBuilders()) { + for (FunctionScoreQueryBuilder.FilterFunctionBuilder ffb : + rankingQuery.filterFunctionBuilders()) { if (ffb.getFilter() instanceof MatchAllQueryBuilder) { MatchAllQueryBuilder filter = (MatchAllQueryBuilder) ffb.getFilter(); if (ffb.getScoreFunction() instanceof WeightBuilder) { WeightBuilder scoreFunction = (WeightBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), "*", - String.valueOf(scoreFunction.getWeight()), "", "true", String.valueOf(filter.boost()), - String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, - CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + "*", + String.valueOf(scoreFunction.getWeight()), + "", + "true", + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else if (ffb.getScoreFunction() instanceof FieldValueFactorFunctionBuilder) { - FieldValueFactorFunctionBuilder scoreFunction = (FieldValueFactorFunctionBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), scoreFunction.fieldName(), - String.valueOf(scoreFunction.factor()), "", "true", String.valueOf(filter.boost()), - String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + FieldValueFactorFunctionBuilder scoreFunction = + (FieldValueFactorFunctionBuilder) ffb.getScoreFunction(); + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + scoreFunction.fieldName(), + String.valueOf(scoreFunction.factor()), + "", + "true", + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled score function: " + ffb.getScoreFunction()); + throw new IllegalStateException( + "Unhandled score function: " + ffb.getScoreFunction()); } } else if (ffb.getFilter() instanceof TermQueryBuilder) { TermQueryBuilder filter = (TermQueryBuilder) ffb.getFilter(); if (ffb.getScoreFunction() instanceof WeightBuilder) { WeightBuilder scoreFunction = (WeightBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), filter.fieldName() + "=" + filter.value().toString(), - String.valueOf(scoreFunction.getWeight()), KEYWORD_ANALYZER, String.valueOf(filter.caseInsensitive()), - String.valueOf(filter.boost()), String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, - CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + filter.fieldName() + "=" + filter.value().toString(), + String.valueOf(scoreFunction.getWeight()), + KEYWORD_ANALYZER, + String.valueOf(filter.caseInsensitive()), + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled score function: " + ffb.getScoreFunction()); + throw new IllegalStateException( + "Unhandled score function: " + ffb.getScoreFunction()); } } else { - throw new IllegalStateException("Unhandled function score filter: " + ffb.getFilter()); + throw new IllegalStateException( + "Unhandled function score filter: " + ffb.getFilter()); } } }); } - @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) { if (!"csv".equals(req.getParameter("format"))) { @@ -157,7 +242,8 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) { return; } - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); try { resp.setContentType("text/csv"); diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java b/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java index 79d4f7077b797..da5f0b75efdae 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java @@ -1,45 +1,41 @@ package com.datahub.gms.util; - -import lombok.Builder; -import org.opensearch.index.query.functionscore.FieldValueFactorFunctionBuilder; -import org.opensearch.index.query.functionscore.WeightBuilder; - import java.io.PrintWriter; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.Builder; +import org.opensearch.index.query.functionscore.FieldValueFactorFunctionBuilder; +import org.opensearch.index.query.functionscore.WeightBuilder; @Builder public class CSVWriter { - private PrintWriter printWriter; - - public CSVWriter println(String[] data) { - printWriter.println(convertToCSV(data)); - return this; - } - - private static String convertToCSV(String[] data) { - return Stream.of(data) - .map(CSVWriter::escapeSpecialCharacters) - .collect(Collectors.joining(",")); - } - - private static String escapeSpecialCharacters(String data) { - String escapedData = data.replaceAll("\\R", " "); - if (data.contains(",") || data.contains("\"") || data.contains("'")) { - data = data.replace("\"", "\"\""); - escapedData = "\"" + data + "\""; - } - return escapedData; - } - - public static String builderToString(FieldValueFactorFunctionBuilder in) { - return String.format("{\"field\":\"%s\",\"factor\":%s,\"missing\":%s,\"modifier\":\"%s\"}", - in.fieldName(), in.factor(), in.missing(), in.modifier()); - } - - public static String builderToString(WeightBuilder in) { - return String.format("{\"weight\":%s}", in.getWeight()); + private PrintWriter printWriter; + + public CSVWriter println(String[] data) { + printWriter.println(convertToCSV(data)); + return this; + } + + private static String convertToCSV(String[] data) { + return Stream.of(data).map(CSVWriter::escapeSpecialCharacters).collect(Collectors.joining(",")); + } + + private static String escapeSpecialCharacters(String data) { + String escapedData = data.replaceAll("\\R", " "); + if (data.contains(",") || data.contains("\"") || data.contains("'")) { + data = data.replace("\"", "\"\""); + escapedData = "\"" + data + "\""; } + return escapedData; + } + + public static String builderToString(FieldValueFactorFunctionBuilder in) { + return String.format( + "{\"field\":\"%s\",\"factor\":%s,\"missing\":%s,\"modifier\":\"%s\"}", + in.fieldName(), in.factor(), in.missing(), in.modifier()); + } + + public static String builderToString(WeightBuilder in) { + return String.format("{\"weight\":%s}", in.getWeight()); + } } - diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index df960808d8a41..8258a7d226ed6 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -9,10 +9,7 @@ import lombok.Data; import lombok.Getter; - -/** - * This policies config file defines the base set of privileges that DataHub supports. - */ +/** This policies config file defines the base set of privileges that DataHub supports. */ public class PoliciesConfig { public static final String PLATFORM_POLICY_TYPE = "PLATFORM"; @@ -22,547 +19,580 @@ public class PoliciesConfig { // Platform Privileges // - public static final Privilege MANAGE_POLICIES_PRIVILEGE = Privilege.of( - "MANAGE_POLICIES", - "Manage Policies", - "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); + public static final Privilege MANAGE_POLICIES_PRIVILEGE = + Privilege.of( + "MANAGE_POLICIES", + "Manage Policies", + "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); + + public static final Privilege MANAGE_INGESTION_PRIVILEGE = + Privilege.of( + "MANAGE_INGESTION", + "Manage Metadata Ingestion", + "Create, remove, and update Metadata Ingestion sources."); + + public static final Privilege MANAGE_SECRETS_PRIVILEGE = + Privilege.of( + "MANAGE_SECRETS", "Manage Secrets", "Create & remove Secrets stored inside DataHub."); + + public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = + Privilege.of( + "MANAGE_USERS_AND_GROUPS", + "Manage Users & Groups", + "Create, remove, and update users and groups on DataHub."); + + public static final Privilege VIEW_ANALYTICS_PRIVILEGE = + Privilege.of("VIEW_ANALYTICS", "View Analytics", "View the DataHub analytics dashboard."); + + public static final Privilege GET_ANALYTICS_PRIVILEGE = + Privilege.of( + "GET_ANALYTICS_PRIVILEGE", + "Analytics API access", + "API read access to raw analytics data."); - public static final Privilege MANAGE_INGESTION_PRIVILEGE = Privilege.of( - "MANAGE_INGESTION", - "Manage Metadata Ingestion", - "Create, remove, and update Metadata Ingestion sources."); + public static final Privilege GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE = + Privilege.of( + "GENERATE_PERSONAL_ACCESS_TOKENS", + "Generate Personal Access Tokens", + "Generate personal access tokens for use with DataHub APIs."); - public static final Privilege MANAGE_SECRETS_PRIVILEGE = Privilege.of( - "MANAGE_SECRETS", - "Manage Secrets", - "Create & remove Secrets stored inside DataHub."); + public static final Privilege MANAGE_ACCESS_TOKENS = + Privilege.of( + "MANAGE_ACCESS_TOKENS", + "Manage All Access Tokens", + "Create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this " + + "privilege are effectively super users that can impersonate other users."); - public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = Privilege.of( - "MANAGE_USERS_AND_GROUPS", - "Manage Users & Groups", - "Create, remove, and update users and groups on DataHub."); + public static final Privilege MANAGE_DOMAINS_PRIVILEGE = + Privilege.of("MANAGE_DOMAINS", "Manage Domains", "Create and remove Asset Domains."); - public static final Privilege VIEW_ANALYTICS_PRIVILEGE = Privilege.of( - "VIEW_ANALYTICS", - "View Analytics", - "View the DataHub analytics dashboard."); + public static final Privilege MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = + Privilege.of( + "MANAGE_GLOBAL_ANNOUNCEMENTS", + "Manage Home Page Posts", + "Create and delete home page posts"); - public static final Privilege GET_ANALYTICS_PRIVILEGE = Privilege.of( - "GET_ANALYTICS_PRIVILEGE", - "Analytics API access", - "API read access to raw analytics data."); + public static final Privilege MANAGE_TESTS_PRIVILEGE = + Privilege.of("MANAGE_TESTS", "Manage Tests", "Create and remove Asset Tests."); - public static final Privilege GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE = Privilege.of( - "GENERATE_PERSONAL_ACCESS_TOKENS", - "Generate Personal Access Tokens", - "Generate personal access tokens for use with DataHub APIs."); - - public static final Privilege MANAGE_ACCESS_TOKENS = Privilege.of( - "MANAGE_ACCESS_TOKENS", - "Manage All Access Tokens", - "Create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this " - + "privilege are effectively super users that can impersonate other users." - ); - - public static final Privilege MANAGE_DOMAINS_PRIVILEGE = Privilege.of( - "MANAGE_DOMAINS", - "Manage Domains", - "Create and remove Asset Domains."); - - public static final Privilege MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = Privilege.of( - "MANAGE_GLOBAL_ANNOUNCEMENTS", - "Manage Home Page Posts", - "Create and delete home page posts"); - - public static final Privilege MANAGE_TESTS_PRIVILEGE = Privilege.of( - "MANAGE_TESTS", - "Manage Tests", - "Create and remove Asset Tests."); - - public static final Privilege MANAGE_GLOSSARIES_PRIVILEGE = Privilege.of( - "MANAGE_GLOSSARIES", - "Manage Glossaries", - "Create, edit, and remove Glossary Entities"); + public static final Privilege MANAGE_GLOSSARIES_PRIVILEGE = + Privilege.of( + "MANAGE_GLOSSARIES", "Manage Glossaries", "Create, edit, and remove Glossary Entities"); public static final Privilege MANAGE_USER_CREDENTIALS_PRIVILEGE = - Privilege.of("MANAGE_USER_CREDENTIALS", "Manage User Credentials", + Privilege.of( + "MANAGE_USER_CREDENTIALS", + "Manage User Credentials", "Manage credentials for native DataHub users, including inviting new users and resetting passwords"); - public static final Privilege MANAGE_TAGS_PRIVILEGE = Privilege.of( - "MANAGE_TAGS", - "Manage Tags", - "Create and remove Tags."); - - public static final Privilege CREATE_TAGS_PRIVILEGE = Privilege.of( - "CREATE_TAGS", - "Create Tags", - "Create new Tags."); - - public static final Privilege CREATE_DOMAINS_PRIVILEGE = Privilege.of( - "CREATE_DOMAINS", - "Create Domains", - "Create new Domains."); - - public static final Privilege CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = Privilege.of( - "CREATE_GLOBAL_ANNOUNCEMENTS", - "Create Global Announcements", - "Create new Global Announcements."); - - public static final Privilege MANAGE_GLOBAL_VIEWS = Privilege.of( - "MANAGE_GLOBAL_VIEWS", - "Manage Public Views", - "Create, update, and delete any Public (shared) Views."); - - public static final Privilege MANAGE_GLOBAL_OWNERSHIP_TYPES = Privilege.of( - "MANAGE_GLOBAL_OWNERSHIP_TYPES", - "Manage Ownership Types", - "Create, update and delete Ownership Types."); - - public static final List<Privilege> PLATFORM_PRIVILEGES = ImmutableList.of( - MANAGE_POLICIES_PRIVILEGE, - MANAGE_USERS_AND_GROUPS_PRIVILEGE, - VIEW_ANALYTICS_PRIVILEGE, - GET_ANALYTICS_PRIVILEGE, - MANAGE_DOMAINS_PRIVILEGE, - MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, - MANAGE_INGESTION_PRIVILEGE, - MANAGE_SECRETS_PRIVILEGE, - GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, - MANAGE_ACCESS_TOKENS, - MANAGE_TESTS_PRIVILEGE, - MANAGE_GLOSSARIES_PRIVILEGE, - MANAGE_USER_CREDENTIALS_PRIVILEGE, - MANAGE_TAGS_PRIVILEGE, - CREATE_TAGS_PRIVILEGE, - CREATE_DOMAINS_PRIVILEGE, - CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, - MANAGE_GLOBAL_VIEWS, - MANAGE_GLOBAL_OWNERSHIP_TYPES - ); + public static final Privilege MANAGE_TAGS_PRIVILEGE = + Privilege.of("MANAGE_TAGS", "Manage Tags", "Create and remove Tags."); + + public static final Privilege CREATE_TAGS_PRIVILEGE = + Privilege.of("CREATE_TAGS", "Create Tags", "Create new Tags."); + + public static final Privilege CREATE_DOMAINS_PRIVILEGE = + Privilege.of("CREATE_DOMAINS", "Create Domains", "Create new Domains."); + + public static final Privilege CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = + Privilege.of( + "CREATE_GLOBAL_ANNOUNCEMENTS", + "Create Global Announcements", + "Create new Global Announcements."); + + public static final Privilege MANAGE_GLOBAL_VIEWS = + Privilege.of( + "MANAGE_GLOBAL_VIEWS", + "Manage Public Views", + "Create, update, and delete any Public (shared) Views."); + + public static final Privilege MANAGE_GLOBAL_OWNERSHIP_TYPES = + Privilege.of( + "MANAGE_GLOBAL_OWNERSHIP_TYPES", + "Manage Ownership Types", + "Create, update and delete Ownership Types."); + + public static final List<Privilege> PLATFORM_PRIVILEGES = + ImmutableList.of( + MANAGE_POLICIES_PRIVILEGE, + MANAGE_USERS_AND_GROUPS_PRIVILEGE, + VIEW_ANALYTICS_PRIVILEGE, + GET_ANALYTICS_PRIVILEGE, + MANAGE_DOMAINS_PRIVILEGE, + MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, + MANAGE_INGESTION_PRIVILEGE, + MANAGE_SECRETS_PRIVILEGE, + GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, + MANAGE_ACCESS_TOKENS, + MANAGE_TESTS_PRIVILEGE, + MANAGE_GLOSSARIES_PRIVILEGE, + MANAGE_USER_CREDENTIALS_PRIVILEGE, + MANAGE_TAGS_PRIVILEGE, + CREATE_TAGS_PRIVILEGE, + CREATE_DOMAINS_PRIVILEGE, + CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, + MANAGE_GLOBAL_VIEWS, + MANAGE_GLOBAL_OWNERSHIP_TYPES); // Resource Privileges // - public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = Privilege.of( - "VIEW_ENTITY_PAGE", - "View Entity Page", - "The ability to view the entity page."); - - public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_TAGS", - "Edit Tags", - "The ability to add and remove tags to an asset."); - - public static final Privilege EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_GLOSSARY_TERMS", - "Edit Glossary Terms", - "The ability to add and remove glossary terms to an asset."); - - public static final Privilege EDIT_ENTITY_OWNERS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_OWNERS", - "Edit Owners", - "The ability to add and remove owners of an entity."); - - public static final Privilege EDIT_ENTITY_DOCS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DOCS", - "Edit Description", - "The ability to edit the description (documentation) of an entity."); - - public static final Privilege EDIT_ENTITY_DOC_LINKS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DOC_LINKS", - "Edit Links", - "The ability to edit links associated with an entity."); - - public static final Privilege EDIT_ENTITY_STATUS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_STATUS", - "Edit Status", - "The ability to edit the status of an entity (soft deleted or not)."); - - public static final Privilege EDIT_ENTITY_DOMAINS_PRIVILEGE = Privilege.of( - "EDIT_DOMAINS_PRIVILEGE", - "Edit Domain", - "The ability to edit the Domain of an entity."); - - public static final Privilege EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DATA_PRODUCTS", - "Edit Data Product", - "The ability to edit the Data Product of an entity."); - - public static final Privilege EDIT_ENTITY_DEPRECATION_PRIVILEGE = Privilege.of( - "EDIT_DEPRECATION_PRIVILEGE", - "Edit Deprecation", - "The ability to edit the Deprecation status of an entity."); - - public static final Privilege EDIT_ENTITY_ASSERTIONS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_ASSERTIONS", - "Edit Assertions", - "The ability to add and remove assertions from an entity."); - - public static final Privilege EDIT_ENTITY_OPERATIONS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_OPERATIONS", - "Edit Operations", - "The ability to report or edit operations information about an entity."); - - public static final Privilege EDIT_ENTITY_PRIVILEGE = Privilege.of( - "EDIT_ENTITY", - "Edit Entity", - "The ability to edit any information about an entity. Super user privileges for the entity."); - - public static final Privilege DELETE_ENTITY_PRIVILEGE = Privilege.of( - "DELETE_ENTITY", - "Delete", - "The ability to delete the delete this entity."); - - public static final Privilege EDIT_LINEAGE_PRIVILEGE = Privilege.of( - "EDIT_LINEAGE", - "Edit Lineage", - "The ability to add and remove lineage edges for this entity."); - - public static final Privilege EDIT_ENTITY_EMBED_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_EMBED", - "Edit Embedded Content", - "The ability to edit the embedded content for an entity."); - - public static final List<Privilege> COMMON_ENTITY_PRIVILEGES = ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_TAGS_PRIVILEGE, - EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_STATUS_PRIVILEGE, - EDIT_ENTITY_DOMAINS_PRIVILEGE, - EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE, - DELETE_ENTITY_PRIVILEGE - ); + public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = + Privilege.of("VIEW_ENTITY_PAGE", "View Entity Page", "The ability to view the entity page."); + + public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_TAGS", "Edit Tags", "The ability to add and remove tags to an asset."); + + public static final Privilege EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_GLOSSARY_TERMS", + "Edit Glossary Terms", + "The ability to add and remove glossary terms to an asset."); + + public static final Privilege EDIT_ENTITY_OWNERS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_OWNERS", + "Edit Owners", + "The ability to add and remove owners of an entity."); + + public static final Privilege EDIT_ENTITY_DOCS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DOCS", + "Edit Description", + "The ability to edit the description (documentation) of an entity."); + + public static final Privilege EDIT_ENTITY_DOC_LINKS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DOC_LINKS", + "Edit Links", + "The ability to edit links associated with an entity."); + + public static final Privilege EDIT_ENTITY_STATUS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_STATUS", + "Edit Status", + "The ability to edit the status of an entity (soft deleted or not)."); + + public static final Privilege EDIT_ENTITY_DOMAINS_PRIVILEGE = + Privilege.of( + "EDIT_DOMAINS_PRIVILEGE", "Edit Domain", "The ability to edit the Domain of an entity."); + + public static final Privilege EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DATA_PRODUCTS", + "Edit Data Product", + "The ability to edit the Data Product of an entity."); + + public static final Privilege EDIT_ENTITY_DEPRECATION_PRIVILEGE = + Privilege.of( + "EDIT_DEPRECATION_PRIVILEGE", + "Edit Deprecation", + "The ability to edit the Deprecation status of an entity."); + + public static final Privilege EDIT_ENTITY_ASSERTIONS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_ASSERTIONS", + "Edit Assertions", + "The ability to add and remove assertions from an entity."); + + public static final Privilege EDIT_ENTITY_OPERATIONS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_OPERATIONS", + "Edit Operations", + "The ability to report or edit operations information about an entity."); + + public static final Privilege EDIT_ENTITY_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY", + "Edit Entity", + "The ability to edit any information about an entity. Super user privileges for the entity."); + + public static final Privilege DELETE_ENTITY_PRIVILEGE = + Privilege.of("DELETE_ENTITY", "Delete", "The ability to delete the delete this entity."); + + public static final Privilege EDIT_LINEAGE_PRIVILEGE = + Privilege.of( + "EDIT_LINEAGE", + "Edit Lineage", + "The ability to add and remove lineage edges for this entity."); + + public static final Privilege EDIT_ENTITY_EMBED_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_EMBED", + "Edit Embedded Content", + "The ability to edit the embedded content for an entity."); + + public static final List<Privilege> COMMON_ENTITY_PRIVILEGES = + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_TAGS_PRIVILEGE, + EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_STATUS_PRIVILEGE, + EDIT_ENTITY_DOMAINS_PRIVILEGE, + EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE); // Dataset Privileges - public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_TAGS", - "Edit Dataset Column Tags", - "The ability to edit the column (field) tags associated with a dataset schema." - ); - - public static final Privilege EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_GLOSSARY_TERMS", - "Edit Dataset Column Glossary Terms", - "The ability to edit the column (field) glossary terms associated with a dataset schema." - ); - - public static final Privilege EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_DESCRIPTION", - "Edit Dataset Column Descriptions", - "The ability to edit the column (field) descriptions associated with a dataset schema." - ); - - public static final Privilege VIEW_DATASET_USAGE_PRIVILEGE = Privilege.of( - "VIEW_DATASET_USAGE", - "View Dataset Usage", - "The ability to access dataset usage information (includes usage statistics and queries)."); - - public static final Privilege VIEW_DATASET_PROFILE_PRIVILEGE = Privilege.of( - "VIEW_DATASET_PROFILE", - "View Dataset Profile", - "The ability to access dataset profile (snapshot statistics)"); - - public static final Privilege EDIT_QUERIES_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_QUERIES", - "Edit Dataset Queries", - "The ability to edit the Queries for a Dataset."); + public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_TAGS", + "Edit Dataset Column Tags", + "The ability to edit the column (field) tags associated with a dataset schema."); + + public static final Privilege EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_GLOSSARY_TERMS", + "Edit Dataset Column Glossary Terms", + "The ability to edit the column (field) glossary terms associated with a dataset schema."); + + public static final Privilege EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_DESCRIPTION", + "Edit Dataset Column Descriptions", + "The ability to edit the column (field) descriptions associated with a dataset schema."); + + public static final Privilege VIEW_DATASET_USAGE_PRIVILEGE = + Privilege.of( + "VIEW_DATASET_USAGE", + "View Dataset Usage", + "The ability to access dataset usage information (includes usage statistics and queries)."); + + public static final Privilege VIEW_DATASET_PROFILE_PRIVILEGE = + Privilege.of( + "VIEW_DATASET_PROFILE", + "View Dataset Profile", + "The ability to access dataset profile (snapshot statistics)"); + + public static final Privilege EDIT_QUERIES_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_QUERIES", + "Edit Dataset Queries", + "The ability to edit the Queries for a Dataset."); // Tag Privileges - public static final Privilege EDIT_TAG_COLOR_PRIVILEGE = Privilege.of( - "EDIT_TAG_COLOR", - "Edit Tag Color", - "The ability to change the color of a Tag."); + public static final Privilege EDIT_TAG_COLOR_PRIVILEGE = + Privilege.of("EDIT_TAG_COLOR", "Edit Tag Color", "The ability to change the color of a Tag."); // Group Privileges - public static final Privilege EDIT_GROUP_MEMBERS_PRIVILEGE = Privilege.of( - "EDIT_GROUP_MEMBERS", - "Edit Group Members", - "The ability to add and remove members to a group."); + public static final Privilege EDIT_GROUP_MEMBERS_PRIVILEGE = + Privilege.of( + "EDIT_GROUP_MEMBERS", + "Edit Group Members", + "The ability to add and remove members to a group."); // User Privileges - public static final Privilege EDIT_USER_PROFILE_PRIVILEGE = Privilege.of( - "EDIT_USER_PROFILE", - "Edit User Profile", - "The ability to change the user's profile including display name, bio, title, profile image, etc."); + public static final Privilege EDIT_USER_PROFILE_PRIVILEGE = + Privilege.of( + "EDIT_USER_PROFILE", + "Edit User Profile", + "The ability to change the user's profile including display name, bio, title, profile image, etc."); // User + Group Privileges - public static final Privilege EDIT_CONTACT_INFO_PRIVILEGE = Privilege.of( - "EDIT_CONTACT_INFO", - "Edit Contact Information", - "The ability to change the contact information such as email & chat handles."); + public static final Privilege EDIT_CONTACT_INFO_PRIVILEGE = + Privilege.of( + "EDIT_CONTACT_INFO", + "Edit Contact Information", + "The ability to change the contact information such as email & chat handles."); // Glossary Node Privileges - public static final Privilege MANAGE_GLOSSARY_CHILDREN_PRIVILEGE = Privilege.of( - "MANAGE_GLOSSARY_CHILDREN", - "Manage Direct Glossary Children", - "The ability to create and delete the direct children of this entity."); + public static final Privilege MANAGE_GLOSSARY_CHILDREN_PRIVILEGE = + Privilege.of( + "MANAGE_GLOSSARY_CHILDREN", + "Manage Direct Glossary Children", + "The ability to create and delete the direct children of this entity."); // Glossary Node Privileges - public static final Privilege MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE = Privilege.of( - "MANAGE_ALL_GLOSSARY_CHILDREN", - "Manage All Glossary Children", - "The ability to create and delete everything underneath this entity."); - - // REST API Specific Privileges (not adding to lists of privileges above as those affect GraphQL as well) - public static final Privilege GET_TIMELINE_PRIVILEGE = Privilege.of( - "GET_TIMELINE_PRIVILEGE", - "Get Timeline API", - "The ability to use the GET Timeline API."); - - public static final Privilege GET_ENTITY_PRIVILEGE = Privilege.of( - "GET_ENTITY_PRIVILEGE", - "Get Entity + Relationships API", - "The ability to use the GET Entity and Relationships API."); - - public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = Privilege.of( - "GET_TIMESERIES_ASPECT_PRIVILEGE", - "Get Timeseries Aspect API", - "The ability to use the GET Timeseries Aspect API."); - - public static final Privilege GET_COUNTS_PRIVILEGE = Privilege.of( - "GET_COUNTS_PRIVILEGE", - "Get Aspect/Entity Count APIs", - "The ability to use the GET Aspect/Entity Count APIs."); - - public static final Privilege RESTORE_INDICES_PRIVILEGE = Privilege.of( - "RESTORE_INDICES_PRIVILEGE", - "Restore Indicies API", - "The ability to use the Restore Indices API."); - - public static final Privilege GET_TIMESERIES_INDEX_SIZES_PRIVILEGE = Privilege.of( - "GET_TIMESERIES_INDEX_SIZES_PRIVILEGE", - "Get Timeseries index sizes API", - "The ability to use the get Timeseries indices size API."); - - public static final Privilege TRUNCATE_TIMESERIES_INDEX_PRIVILEGE = Privilege.of( - "TRUNCATE_TIMESERIES_INDEX_PRIVILEGE", - "Truncate timeseries aspect index size API", - "The ability to use the API to truncate a timeseries index."); - - public static final Privilege GET_ES_TASK_STATUS_PRIVILEGE = Privilege.of( - "GET_ES_TASK_STATUS_PRIVILEGE", - "Get ES task status API", - "The ability to use the get task status API for an ElasticSearch task."); - - public static final Privilege SEARCH_PRIVILEGE = Privilege.of( - "SEARCH_PRIVILEGE", - "Search API", - "The ability to access search APIs."); - - public static final Privilege SET_WRITEABLE_PRIVILEGE = Privilege.of( - "SET_WRITEABLE_PRIVILEGE", - "Enable/Disable Writeability API", - "The ability to enable or disable GMS writeability for data migrations."); - - public static final Privilege APPLY_RETENTION_PRIVILEGE = Privilege.of( - "APPLY_RETENTION_PRIVILEGE", - "Apply Retention API", - "The ability to apply retention using the API."); - - public static final Privilege PRODUCE_PLATFORM_EVENT_PRIVILEGE = Privilege.of( - "PRODUCE_PLATFORM_EVENT_PRIVILEGE", - "Produce Platform Event API", - "The ability to produce Platform Events using the API."); - - public static final ResourcePrivileges DATASET_PRIVILEGES = ResourcePrivileges.of( - "dataset", - "Datasets", - "Datasets indexed by DataHub", Stream.of( - COMMON_ENTITY_PRIVILEGES, - ImmutableList.of( - VIEW_DATASET_USAGE_PRIVILEGE, - VIEW_DATASET_PROFILE_PRIVILEGE, - EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE, - EDIT_DATASET_COL_TAGS_PRIVILEGE, - EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE, - EDIT_ENTITY_ASSERTIONS_PRIVILEGE, - EDIT_LINEAGE_PRIVILEGE, - EDIT_ENTITY_EMBED_PRIVILEGE, - EDIT_QUERIES_PRIVILEGE)) - .flatMap(Collection::stream) - .collect(Collectors.toList()) - ); + public static final Privilege MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE = + Privilege.of( + "MANAGE_ALL_GLOSSARY_CHILDREN", + "Manage All Glossary Children", + "The ability to create and delete everything underneath this entity."); + + // REST API Specific Privileges (not adding to lists of privileges above as those affect GraphQL + // as well) + public static final Privilege GET_TIMELINE_PRIVILEGE = + Privilege.of( + "GET_TIMELINE_PRIVILEGE", "Get Timeline API", "The ability to use the GET Timeline API."); + + public static final Privilege GET_ENTITY_PRIVILEGE = + Privilege.of( + "GET_ENTITY_PRIVILEGE", + "Get Entity + Relationships API", + "The ability to use the GET Entity and Relationships API."); + + public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = + Privilege.of( + "GET_TIMESERIES_ASPECT_PRIVILEGE", + "Get Timeseries Aspect API", + "The ability to use the GET Timeseries Aspect API."); + + public static final Privilege GET_COUNTS_PRIVILEGE = + Privilege.of( + "GET_COUNTS_PRIVILEGE", + "Get Aspect/Entity Count APIs", + "The ability to use the GET Aspect/Entity Count APIs."); + + public static final Privilege RESTORE_INDICES_PRIVILEGE = + Privilege.of( + "RESTORE_INDICES_PRIVILEGE", + "Restore Indicies API", + "The ability to use the Restore Indices API."); + + public static final Privilege GET_TIMESERIES_INDEX_SIZES_PRIVILEGE = + Privilege.of( + "GET_TIMESERIES_INDEX_SIZES_PRIVILEGE", + "Get Timeseries index sizes API", + "The ability to use the get Timeseries indices size API."); + + public static final Privilege TRUNCATE_TIMESERIES_INDEX_PRIVILEGE = + Privilege.of( + "TRUNCATE_TIMESERIES_INDEX_PRIVILEGE", + "Truncate timeseries aspect index size API", + "The ability to use the API to truncate a timeseries index."); + + public static final Privilege GET_ES_TASK_STATUS_PRIVILEGE = + Privilege.of( + "GET_ES_TASK_STATUS_PRIVILEGE", + "Get ES task status API", + "The ability to use the get task status API for an ElasticSearch task."); + + public static final Privilege SEARCH_PRIVILEGE = + Privilege.of("SEARCH_PRIVILEGE", "Search API", "The ability to access search APIs."); + + public static final Privilege SET_WRITEABLE_PRIVILEGE = + Privilege.of( + "SET_WRITEABLE_PRIVILEGE", + "Enable/Disable Writeability API", + "The ability to enable or disable GMS writeability for data migrations."); + + public static final Privilege APPLY_RETENTION_PRIVILEGE = + Privilege.of( + "APPLY_RETENTION_PRIVILEGE", + "Apply Retention API", + "The ability to apply retention using the API."); + + public static final Privilege PRODUCE_PLATFORM_EVENT_PRIVILEGE = + Privilege.of( + "PRODUCE_PLATFORM_EVENT_PRIVILEGE", + "Produce Platform Event API", + "The ability to produce Platform Events using the API."); + + public static final ResourcePrivileges DATASET_PRIVILEGES = + ResourcePrivileges.of( + "dataset", + "Datasets", + "Datasets indexed by DataHub", + Stream.of( + COMMON_ENTITY_PRIVILEGES, + ImmutableList.of( + VIEW_DATASET_USAGE_PRIVILEGE, + VIEW_DATASET_PROFILE_PRIVILEGE, + EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE, + EDIT_DATASET_COL_TAGS_PRIVILEGE, + EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_ASSERTIONS_PRIVILEGE, + EDIT_LINEAGE_PRIVILEGE, + EDIT_ENTITY_EMBED_PRIVILEGE, + EDIT_QUERIES_PRIVILEGE)) + .flatMap(Collection::stream) + .collect(Collectors.toList())); // Charts Privileges - public static final ResourcePrivileges CHART_PRIVILEGES = ResourcePrivileges.of( - "chart", - "Charts", - "Charts indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges CHART_PRIVILEGES = + ResourcePrivileges.of( + "chart", + "Charts", + "Charts indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Dashboard Privileges - public static final ResourcePrivileges DASHBOARD_PRIVILEGES = ResourcePrivileges.of( - "dashboard", - "Dashboards", - "Dashboards indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges DASHBOARD_PRIVILEGES = + ResourcePrivileges.of( + "dashboard", + "Dashboards", + "Dashboards indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Data Doc Privileges - public static final ResourcePrivileges NOTEBOOK_PRIVILEGES = ResourcePrivileges.of( - "notebook", - "Notebook", - "Notebook indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges NOTEBOOK_PRIVILEGES = + ResourcePrivileges.of( + "notebook", "Notebook", "Notebook indexed by DataHub", COMMON_ENTITY_PRIVILEGES); // Data Flow Privileges - public static final ResourcePrivileges DATA_FLOW_PRIVILEGES = ResourcePrivileges.of( - "dataFlow", - "Data Pipelines", - "Data Pipelines indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges DATA_FLOW_PRIVILEGES = + ResourcePrivileges.of( + "dataFlow", + "Data Pipelines", + "Data Pipelines indexed by DataHub", + COMMON_ENTITY_PRIVILEGES); // Data Job Privileges - public static final ResourcePrivileges DATA_JOB_PRIVILEGES = ResourcePrivileges.of( - "dataJob", - "Data Tasks", - "Data Tasks indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges DATA_JOB_PRIVILEGES = + ResourcePrivileges.of( + "dataJob", + "Data Tasks", + "Data Tasks indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Tag Privileges - public static final ResourcePrivileges TAG_PRIVILEGES = ResourcePrivileges.of( - "tag", - "Tags", - "Tags indexed by DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_TAG_COLOR_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges TAG_PRIVILEGES = + ResourcePrivileges.of( + "tag", + "Tags", + "Tags indexed by DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_TAG_COLOR_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE)); // Container Privileges - public static final ResourcePrivileges CONTAINER_PRIVILEGES = ResourcePrivileges.of( - "container", - "Containers", - "Containers indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges CONTAINER_PRIVILEGES = + ResourcePrivileges.of( + "container", "Containers", "Containers indexed by DataHub", COMMON_ENTITY_PRIVILEGES); // Domain Privileges - public static final Privilege MANAGE_DATA_PRODUCTS_PRIVILEGE = Privilege.of( - "MANAGE_DATA_PRODUCTS", - "Manage Data Products", - "The ability to create, edit, and delete Data Products within a Domain"); - + public static final Privilege MANAGE_DATA_PRODUCTS_PRIVILEGE = + Privilege.of( + "MANAGE_DATA_PRODUCTS", + "Manage Data Products", + "The ability to create, edit, and delete Data Products within a Domain"); // Domain Privileges - public static final ResourcePrivileges DOMAIN_PRIVILEGES = ResourcePrivileges.of( - "domain", - "Domains", - "Domains created on DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, MANAGE_DATA_PRODUCTS_PRIVILEGE) - ); + public static final ResourcePrivileges DOMAIN_PRIVILEGES = + ResourcePrivileges.of( + "domain", + "Domains", + "Domains created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + MANAGE_DATA_PRODUCTS_PRIVILEGE)); // Data Product Privileges - public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = ResourcePrivileges.of( - "dataProduct", - "Data Products", - "Data Products created on DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, EDIT_ENTITY_TAGS_PRIVILEGE, - EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, EDIT_ENTITY_DOMAINS_PRIVILEGE) - ); + public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = + ResourcePrivileges.of( + "dataProduct", + "Data Products", + "Data Products created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + EDIT_ENTITY_TAGS_PRIVILEGE, + EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_DOMAINS_PRIVILEGE)); // Glossary Term Privileges - public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = ResourcePrivileges.of( - "glossaryTerm", - "Glossary Terms", - "Glossary Terms created on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = + ResourcePrivileges.of( + "glossaryTerm", + "Glossary Terms", + "Glossary Terms created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); // Glossary Node Privileges - public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = ResourcePrivileges.of( - "glossaryNode", - "Glossary Term Groups", - "Glossary Term Groups created on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE, - MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, - MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE) - ); + public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = + ResourcePrivileges.of( + "glossaryNode", + "Glossary Term Groups", + "Glossary Term Groups created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, + MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)); // Group Privileges - public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = ResourcePrivileges.of( - "corpGroup", - "Groups", - "Groups on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_GROUP_MEMBERS_PRIVILEGE, - EDIT_CONTACT_INFO_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = + ResourcePrivileges.of( + "corpGroup", + "Groups", + "Groups on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_GROUP_MEMBERS_PRIVILEGE, + EDIT_CONTACT_INFO_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); // User Privileges - public static final ResourcePrivileges CORP_USER_PRIVILEGES = ResourcePrivileges.of( - "corpuser", - "Users", - "Users on DataHub", + public static final ResourcePrivileges CORP_USER_PRIVILEGES = + ResourcePrivileges.of( + "corpuser", + "Users", + "Users on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_CONTACT_INFO_PRIVILEGE, + EDIT_USER_PROFILE_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); + + public static final List<ResourcePrivileges> ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_CONTACT_INFO_PRIVILEGE, - EDIT_USER_PROFILE_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); - - public static final List<ResourcePrivileges> ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( - DATASET_PRIVILEGES, - DASHBOARD_PRIVILEGES, - CHART_PRIVILEGES, - DATA_FLOW_PRIVILEGES, - DATA_JOB_PRIVILEGES, - TAG_PRIVILEGES, - CONTAINER_PRIVILEGES, - DOMAIN_PRIVILEGES, - GLOSSARY_TERM_PRIVILEGES, - GLOSSARY_NODE_PRIVILEGES, - CORP_GROUP_PRIVILEGES, - CORP_USER_PRIVILEGES, - NOTEBOOK_PRIVILEGES, - DATA_PRODUCT_PRIVILEGES - ); + DATASET_PRIVILEGES, + DASHBOARD_PRIVILEGES, + CHART_PRIVILEGES, + DATA_FLOW_PRIVILEGES, + DATA_JOB_PRIVILEGES, + TAG_PRIVILEGES, + CONTAINER_PRIVILEGES, + DOMAIN_PRIVILEGES, + GLOSSARY_TERM_PRIVILEGES, + GLOSSARY_NODE_PRIVILEGES, + CORP_GROUP_PRIVILEGES, + CORP_USER_PRIVILEGES, + NOTEBOOK_PRIVILEGES, + DATA_PRODUCT_PRIVILEGES); // Merge all entity specific resource privileges to create a superset of all resource privileges - public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = ResourcePrivileges.of( - "all", - "All Types", - "All Types", - ENTITY_RESOURCE_PRIVILEGES.stream().flatMap(resourcePrivileges -> resourcePrivileges.getPrivileges().stream()).distinct().collect( - Collectors.toList()) - ); + public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = + ResourcePrivileges.of( + "all", + "All Types", + "All Types", + ENTITY_RESOURCE_PRIVILEGES.stream() + .flatMap(resourcePrivileges -> resourcePrivileges.getPrivileges().stream()) + .distinct() + .collect(Collectors.toList())); public static final List<ResourcePrivileges> RESOURCE_PRIVILEGES = - ImmutableList.<ResourcePrivileges>builder().addAll(ENTITY_RESOURCE_PRIVILEGES) + ImmutableList.<ResourcePrivileges>builder() + .addAll(ENTITY_RESOURCE_PRIVILEGES) .add(ALL_RESOURCE_PRIVILEGES) .build(); @@ -593,9 +623,10 @@ static ResourcePrivileges of( String resourceTypeDisplayName, String resourceTypeDescription, List<Privilege> privileges) { - return new ResourcePrivileges(resourceType, resourceTypeDisplayName, resourceTypeDescription, privileges); + return new ResourcePrivileges( + resourceType, resourceTypeDisplayName, resourceTypeDescription, privileges); } } - private PoliciesConfig() { } + private PoliciesConfig() {} } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java index 2d4e355a93e53..00342ff2afbe2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java @@ -9,31 +9,29 @@ import com.linkedin.r2.transport.common.bridge.client.TransportClientAdapter; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.RestClient; -import org.apache.commons.lang.StringUtils; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLParameters; import java.net.URI; import java.security.InvalidParameterException; import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.HashMap; import java.util.Map; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLParameters; +import org.apache.commons.lang.StringUtils; public class DefaultRestliClientFactory { private static final String DEFAULT_REQUEST_TIMEOUT_IN_MS = "10000"; - private DefaultRestliClientFactory() { - } + private DefaultRestliClientFactory() {} @Nonnull - public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, - @Nonnull String restLiClientD2ZkPath) { - final D2Client d2Client = new D2ClientBuilder() + public static RestClient getRestLiD2Client( + @Nonnull String restLiClientD2ZkHost, @Nonnull String restLiClientD2ZkPath) { + final D2Client d2Client = + new D2ClientBuilder() .setZkHosts(restLiClientD2ZkHost) .setBasePath(restLiClientD2ZkPath) .build(); @@ -42,18 +40,27 @@ public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, } @Nonnull - public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, - @Nullable String sslProtocol) { + public static RestClient getRestLiClient( + @Nonnull String restLiServerHost, + int restLiServerPort, + boolean useSSL, + @Nullable String sslProtocol) { return getRestLiClient(restLiServerHost, restLiServerPort, useSSL, sslProtocol, null); } @Nonnull - public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, - @Nullable String sslProtocol, @Nullable Map<String, String> params) { + public static RestClient getRestLiClient( + @Nonnull String restLiServerHost, + int restLiServerPort, + boolean useSSL, + @Nullable String sslProtocol, + @Nullable Map<String, String> params) { return getRestLiClient( - URI.create(String.format("%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), - sslProtocol, - params); + URI.create( + String.format( + "%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), + sslProtocol, + params); } @Nonnull @@ -62,8 +69,10 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s } @Nonnull - public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String sslProtocol, - @Nullable Map<String, String> inputParams) { + public static RestClient getRestLiClient( + @Nonnull URI gmsUri, + @Nullable String sslProtocol, + @Nullable Map<String, String> inputParams) { if (StringUtils.isBlank(gmsUri.getHost()) || gmsUri.getPort() <= 0) { throw new InvalidParameterException("Invalid restli server host name or port!"); } @@ -82,7 +91,7 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s SSLParameters sslParameters = new SSLParameters(); if (sslProtocol != null) { - sslParameters.setProtocols(new String[]{sslProtocol}); + sslParameters.setProtocols(new String[] {sslProtocol}); } params.put(HttpClientFactory.HTTP_SSL_PARAMS, sslParameters); } @@ -90,7 +99,8 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s return getHttpRestClient(gmsUri, params); } - private static RestClient getHttpRestClient(@Nonnull URI gmsUri, @Nonnull Map<String, Object> params) { + private static RestClient getHttpRestClient( + @Nonnull URI gmsUri, @Nonnull Map<String, Object> params) { Map<String, Object> finalParams = new HashMap<>(); finalParams.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, DEFAULT_REQUEST_TIMEOUT_IN_MS); finalParams.putAll(params); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java index 09220bb481a03..737f79dc1c441 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java @@ -11,7 +11,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class RestliUtil { private RestliUtil() { @@ -19,8 +18,9 @@ private RestliUtil() { } /** - * Executes the provided supplier and convert the results to a {@link Task}. - * Exceptions thrown during the execution will be properly wrapped in {@link RestLiServiceException}. + * Executes the provided supplier and convert the results to a {@link Task}. Exceptions thrown + * during the execution will be properly wrapped in {@link RestLiServiceException}. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -31,7 +31,8 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { } catch (Throwable throwable) { // Convert IllegalArgumentException to BAD REQUEST - if (throwable instanceof IllegalArgumentException || throwable.getCause() instanceof IllegalArgumentException) { + if (throwable instanceof IllegalArgumentException + || throwable.getCause() instanceof IllegalArgumentException) { throwable = badRequestException(throwable.getMessage()); } @@ -47,20 +48,24 @@ public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier) { public static <T> Task<T> toTask(@Nonnull Supplier<T> supplier, String metricName) { Timer.Context context = MetricUtils.timer(metricName).time(); // Stop timer on success and failure - return toTask(supplier).transform(orig -> { - context.stop(); - if (orig.isFailed()) { - MetricUtils.counter(MetricRegistry.name(metricName, "failed")).inc(); - } else { - MetricUtils.counter(MetricRegistry.name(metricName, "success")).inc(); - } - return orig; - }); + return toTask(supplier) + .transform( + orig -> { + context.stop(); + if (orig.isFailed()) { + MetricUtils.counter(MetricRegistry.name(metricName, "failed")).inc(); + } else { + MetricUtils.counter(MetricRegistry.name(metricName, "success")).inc(); + } + return orig; + }); } /** - * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} instead. - * A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the optional is emtpy. + * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} + * instead. A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the + * optional is emtpy. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java index 913def2a040f4..5f3975b066fde 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java @@ -1,21 +1,19 @@ package com.linkedin.metadata.utils; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import lombok.extern.slf4j.Slf4j; - import java.time.Clock; - -import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; +import lombok.extern.slf4j.Slf4j; @Slf4j public class AuditStampUtils { - private AuditStampUtils() { - } + private AuditStampUtils() {} - public static AuditStamp createDefaultAuditStamp() { - return new AuditStamp() - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) - .setTime(Clock.systemUTC().millis()); - } + public static AuditStamp createDefaultAuditStamp() { + return new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(Clock.systemUTC().millis()); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java index 3b2116fa65127..f03d4c76c70d8 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java @@ -8,27 +8,29 @@ import com.linkedin.metadata.query.BrowseResultMetadata; import java.util.stream.Collectors; - public class BrowseUtil { - private BrowseUtil() { - } + private BrowseUtil() {} - public static com.linkedin.metadata.query.BrowseResult convertToLegacyResult(BrowseResult browseResult) { - com.linkedin.metadata.query.BrowseResult legacyResult = new com.linkedin.metadata.query.BrowseResult(); + public static com.linkedin.metadata.query.BrowseResult convertToLegacyResult( + BrowseResult browseResult) { + com.linkedin.metadata.query.BrowseResult legacyResult = + new com.linkedin.metadata.query.BrowseResult(); legacyResult.setFrom(browseResult.getFrom()); legacyResult.setPageSize(browseResult.getPageSize()); legacyResult.setNumEntities(browseResult.getNumEntities()); - legacyResult.setEntities(new BrowseResultEntityArray(browseResult.getEntities() - .stream() - .map(entity -> new BrowseResultEntity(entity.data())) - .collect(Collectors.toList()))); + legacyResult.setEntities( + new BrowseResultEntityArray( + browseResult.getEntities().stream() + .map(entity -> new BrowseResultEntity(entity.data())) + .collect(Collectors.toList()))); BrowseResultMetadata legacyMetadata = new BrowseResultMetadata(); - legacyMetadata.setGroups(new BrowseResultGroupArray(browseResult.getGroups() - .stream() - .map(group -> new BrowseResultGroup(group.data())) - .collect(Collectors.toList()))); + legacyMetadata.setGroups( + new BrowseResultGroupArray( + browseResult.getGroups().stream() + .map(group -> new BrowseResultGroup(group.data())) + .collect(Collectors.toList()))); legacyMetadata.setPath(browseResult.getMetadata().getPath()); legacyMetadata.setTotalNumEntities(browseResult.getMetadata().getTotalNumEntities()); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java index 551683153aadd..32422b2a2d4a8 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java @@ -11,62 +11,79 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class ConcurrencyUtils { - private ConcurrencyUtils() { - } + private ConcurrencyUtils() {} - public static <O, T> List<T> transformAndCollectAsync(List<O> originalList, Function<O, T> transformer) { + public static <O, T> List<T> transformAndCollectAsync( + List<O> originalList, Function<O, T> transformer) { return transformAndCollectAsync(originalList, transformer, Collectors.toList()); } /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion i.e. each element transform is run as a separate CompleteableFuture and then joined at + * the end */ - public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync(Collection<O> originalCollection, - Function<O, T> transformer, Collector<T, ?, OUTPUT> collector) { + public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync( + Collection<O> originalCollection, + Function<O, T> transformer, + Collector<T, ?, OUTPUT> collector) { return originalCollection.stream() .map(element -> CompletableFuture.supplyAsync(() -> transformer.apply(element))) - .collect(Collectors.collectingAndThen(Collectors.toList(), - completableFutureList -> completableFutureList.stream().map(CompletableFuture::join))) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), + completableFutureList -> + completableFutureList.stream().map(CompletableFuture::join))) .collect(collector); } - /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * with exceptions handled by the input exceptionHandler - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion with exceptions handled by the input exceptionHandler i.e. each element transform is + * run as a separate CompleteableFuture and then joined at the end */ - public static <O, T> List<T> transformAndCollectAsync(List<O> originalList, Function<O, T> transformer, + public static <O, T> List<T> transformAndCollectAsync( + List<O> originalList, + Function<O, T> transformer, BiFunction<O, Throwable, ? extends T> exceptionHandler) { - return transformAndCollectAsync(originalList, transformer, exceptionHandler, Collectors.toList()); + return transformAndCollectAsync( + originalList, transformer, exceptionHandler, Collectors.toList()); } /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * with exceptions handled by the input exceptionHandler - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion with exceptions handled by the input exceptionHandler i.e. each element transform is + * run as a separate CompleteableFuture and then joined at the end */ - public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync(Collection<O> originalCollection, - Function<O, T> transformer, BiFunction<O, Throwable, ? extends T> exceptionHandler, Collector<T, ?, OUTPUT> collector) { + public static <O, T, OUTPUT> OUTPUT transformAndCollectAsync( + Collection<O> originalCollection, + Function<O, T> transformer, + BiFunction<O, Throwable, ? extends T> exceptionHandler, + Collector<T, ?, OUTPUT> collector) { return originalCollection.stream() - .map(element -> CompletableFuture.supplyAsync(() -> transformer.apply(element)) - .exceptionally(e -> exceptionHandler.apply(element, e))) + .map( + element -> + CompletableFuture.supplyAsync(() -> transformer.apply(element)) + .exceptionally(e -> exceptionHandler.apply(element, e))) .filter(Objects::nonNull) - .collect(Collectors.collectingAndThen(Collectors.toList(), - completableFutureList -> completableFutureList.stream().map(CompletableFuture::join))) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), + completableFutureList -> + completableFutureList.stream().map(CompletableFuture::join))) .collect(collector); } /** - * Wait for a list of futures to end with a timeout and only return results that were returned before the timeout - * expired + * Wait for a list of futures to end with a timeout and only return results that were returned + * before the timeout expired */ - public static <T> List<T> getAllCompleted(List<CompletableFuture<T>> futuresList, long timeout, TimeUnit unit) { - CompletableFuture<Void> allFuturesResult = CompletableFuture.allOf(futuresList.toArray(new CompletableFuture[0])); + public static <T> List<T> getAllCompleted( + List<CompletableFuture<T>> futuresList, long timeout, TimeUnit unit) { + CompletableFuture<Void> allFuturesResult = + CompletableFuture.allOf(futuresList.toArray(new CompletableFuture[0])); try { allFuturesResult.get(timeout, unit); } catch (Exception e) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java index 441661497cadc..a40cf4da7abbc 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java @@ -19,17 +19,16 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DataPlatformInstanceUtils { - private DataPlatformInstanceUtils() { - } + private DataPlatformInstanceUtils() {} private static DataPlatformUrn getPlatformUrn(String name) { return new DataPlatformUrn(name.toLowerCase()); } - private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyAspect) throws URISyntaxException { + private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyAspect) + throws URISyntaxException { switch (entityType) { case "dataset": return ((DatasetKey) keyAspect).getPlatform(); @@ -40,7 +39,8 @@ private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyA case "dataFlow": return getPlatformUrn(((DataFlowKey) keyAspect).getOrchestrator()); case "dataJob": - return getPlatformUrn(DataFlowUrn.createFromUrn(((DataJobKey) keyAspect).getFlow()).getOrchestratorEntity()); + return getPlatformUrn( + DataFlowUrn.createFromUrn(((DataJobKey) keyAspect).getFlow()).getOrchestratorEntity()); case "dataProcess": return getPlatformUrn(((DataProcessKey) keyAspect).getOrchestrator()); case "mlModel": @@ -52,17 +52,23 @@ private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyA case "mlModelGroup": return ((MLModelGroupKey) keyAspect).getPlatform(); default: - log.debug(String.format("Failed to generate default platform for unknown entity type %s", entityType)); + log.debug( + String.format( + "Failed to generate default platform for unknown entity type %s", entityType)); return null; } } - public static Optional<DataPlatformInstance> buildDataPlatformInstance(String entityType, RecordTemplate keyAspect) { + public static Optional<DataPlatformInstance> buildDataPlatformInstance( + String entityType, RecordTemplate keyAspect) { try { return Optional.ofNullable(getDefaultDataPlatform(entityType, keyAspect)) .map(platform -> new DataPlatformInstance().setPlatform(platform)); } catch (URISyntaxException e) { - log.error("Failed to generate data platform instance for entity {}, keyAspect {}", entityType, keyAspect); + log.error( + "Failed to generate data platform instance for entity {}, keyAspect {}", + entityType, + keyAspect); return Optional.empty(); } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java index 2ad2d5028ba7d..161b0f845f7e2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java @@ -16,17 +16,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - @Slf4j public class EntityKeyUtils { private static final Logger logger = LoggerFactory.getLogger(EntityKeyUtils.class); - private EntityKeyUtils() { - } + private EntityKeyUtils() {} @Nonnull - public static Urn getUrnFromProposal(MetadataChangeProposal metadataChangeProposal, AspectSpec keyAspectSpec) { + public static Urn getUrnFromProposal( + MetadataChangeProposal metadataChangeProposal, AspectSpec keyAspectSpec) { if (metadataChangeProposal.hasEntityUrn()) { Urn urn = metadataChangeProposal.getEntityUrn(); @@ -39,11 +38,13 @@ public static Urn getUrnFromProposal(MetadataChangeProposal metadataChangePropos return urn; } if (metadataChangeProposal.hasEntityKeyAspect()) { - RecordTemplate keyAspectRecord = GenericRecordUtils.deserializeAspect( + RecordTemplate keyAspectRecord = + GenericRecordUtils.deserializeAspect( metadataChangeProposal.getEntityKeyAspect().getValue(), metadataChangeProposal.getEntityKeyAspect().getContentType(), keyAspectSpec); - return EntityKeyUtils.convertEntityKeyToUrn(keyAspectRecord, metadataChangeProposal.getEntityType()); + return EntityKeyUtils.convertEntityKeyToUrn( + keyAspectRecord, metadataChangeProposal.getEntityType()); } throw new IllegalArgumentException("One of urn and keyAspect must be set"); } @@ -61,39 +62,46 @@ public static Urn getUrnFromLog(MetadataChangeLog metadataChangeLog, AspectSpec return urn; } if (metadataChangeLog.hasEntityKeyAspect()) { - RecordTemplate keyAspectRecord = GenericRecordUtils.deserializeAspect( - metadataChangeLog.getEntityKeyAspect().getValue(), - metadataChangeLog.getEntityKeyAspect().getContentType(), - keyAspectSpec); - return EntityKeyUtils.convertEntityKeyToUrn(keyAspectRecord, metadataChangeLog.getEntityType()); + RecordTemplate keyAspectRecord = + GenericRecordUtils.deserializeAspect( + metadataChangeLog.getEntityKeyAspect().getValue(), + metadataChangeLog.getEntityKeyAspect().getContentType(), + keyAspectSpec); + return EntityKeyUtils.convertEntityKeyToUrn( + keyAspectRecord, metadataChangeLog.getEntityType()); } throw new IllegalArgumentException("One of urn and keyAspect must be set"); } /** - * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given - * the urn & the {@link AspectSpec} of the key. + * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given the urn + * & the {@link AspectSpec} of the key. * - * Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the - * number of urn key parts does not match the number of fields in the key schema, an {@link IllegalArgumentException} will be thrown. + * <p>Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the + * number of urn key parts does not match the number of fields in the key schema, an {@link + * IllegalArgumentException} will be thrown. * * @param urn raw entity urn * @param keyAspectSpec key aspect spec - * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of - * the provided key schema in order. - * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema (field number or type mismatch) + * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of the + * provided key schema in order. + * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema + * (field number or type mismatch) */ @Nonnull - public static RecordTemplate convertUrnToEntityKey(@Nonnull final Urn urn, @Nonnull final AspectSpec keyAspectSpec) { + public static RecordTemplate convertUrnToEntityKey( + @Nonnull final Urn urn, @Nonnull final AspectSpec keyAspectSpec) { RecordDataSchema keySchema = keyAspectSpec.getPegasusSchema(); // #1. Ensure we have a class to bind into. - Class<? extends RecordTemplate> clazz = keyAspectSpec.getDataTemplateClass().asSubclass(RecordTemplate.class); + Class<? extends RecordTemplate> clazz = + keyAspectSpec.getDataTemplateClass().asSubclass(RecordTemplate.class); // #2. Bind fields into a DataMap if (urn.getEntityKey().getParts().size() != keySchema.getFields().size()) { throw new IllegalArgumentException( - "Failed to convert urn to entity key: urns parts and key fields do not have same length for " + urn); + "Failed to convert urn to entity key: urns parts and key fields do not have same length for " + + urn); } final DataMap dataMap = new DataMap(); for (int i = 0; i < urn.getEntityKey().getParts().size(); i++) { @@ -107,28 +115,35 @@ public static RecordTemplate convertUrnToEntityKey(@Nonnull final Urn urn, @Nonn try { constructor = clazz.getConstructor(DataMap.class); return constructor.newInstance(dataMap); - } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + } catch (NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new IllegalArgumentException( - String.format("Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", + String.format( + "Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", clazz.getName())); } } /** - * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given - * the urn & the {@link RecordDataSchema} of the key. + * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given the urn + * & the {@link RecordDataSchema} of the key. * - * Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the - * number of urn key parts does not match the number of fields in the key schema, an {@link IllegalArgumentException} will be thrown. + * <p>Parts of the urn are bound into fields in the keySchema based on field <b>index</b>. If the + * number of urn key parts does not match the number of fields in the key schema, an {@link + * IllegalArgumentException} will be thrown. * * @param urn raw entity urn * @param keySchema schema of the entity key - * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of - * the provided key schema in order. - * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema (field number or type mismatch) + * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of the + * provided key schema in order. + * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema + * (field number or type mismatch) */ @Nonnull - public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn urn, @Nonnull final RecordDataSchema keySchema) { + public static RecordTemplate convertUrnToEntityKeyInternal( + @Nonnull final Urn urn, @Nonnull final RecordDataSchema keySchema) { // #1. Ensure we have a class to bind into. Class<? extends RecordTemplate> clazz; @@ -136,8 +151,10 @@ public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn ur clazz = Class.forName(keySchema.getFullName()).asSubclass(RecordTemplate.class); } catch (ClassNotFoundException e) { throw new IllegalArgumentException( - String.format("Failed to find RecordTemplate class associated with provided RecordDataSchema named %s", - keySchema.getFullName()), e); + String.format( + "Failed to find RecordTemplate class associated with provided RecordDataSchema named %s", + keySchema.getFullName()), + e); } // #2. Bind fields into a DataMap @@ -157,29 +174,37 @@ public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn ur try { constructor = clazz.getConstructor(DataMap.class); return constructor.newInstance(dataMap); - } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + } catch (NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new IllegalArgumentException( - String.format("Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", + String.format( + "Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", clazz.getName())); } } /** - * Implicitly converts an Entity Key {@link RecordTemplate} into the corresponding {@link Urn} string. + * Implicitly converts an Entity Key {@link RecordTemplate} into the corresponding {@link Urn} + * string. * - * Parts of the key record are bound into fields in the urn based on field <b>index</b>. + * <p>Parts of the key record are bound into fields in the urn based on field <b>index</b>. * * @param keyAspect a {@link RecordTemplate} representing the key. * @param entityName name of the entity to use during Urn construction * @return an {@link Urn} created by binding the fields of the key aspect to an Urn. */ @Nonnull - public static Urn convertEntityKeyToUrn(@Nonnull final RecordTemplate keyAspect, @Nonnull final String entityName) { + public static Urn convertEntityKeyToUrn( + @Nonnull final RecordTemplate keyAspect, @Nonnull final String entityName) { final List<String> urnParts = new ArrayList<>(); for (RecordDataSchema.Field field : keyAspect.schema().getFields()) { Object value = keyAspect.data().get(field.getName()); String valueString = value == null ? "" : value.toString(); - urnParts.add(valueString); // TODO: Determine whether all fields, including urns, should be URL encoded. + urnParts.add( + valueString); // TODO: Determine whether all fields, including urns, should be URL + // encoded. } return Urn.createFromTuple(entityName, urnParts); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java index 3ef415b4d31be..fc28367e6c7ee 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java @@ -1,54 +1,51 @@ package com.linkedin.metadata.utils; +import com.datahub.util.RecordUtils; import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.GenericPayload; import java.nio.charset.StandardCharsets; import javax.annotation.Nonnull; - public class GenericRecordUtils { public static final String JSON = "application/json"; - private GenericRecordUtils() { - } + private GenericRecordUtils() {} - /** - * Deserialize the given value into the aspect based on the input aspectSpec - */ + /** Deserialize the given value into the aspect based on the input aspectSpec */ @Nonnull - public static RecordTemplate deserializeAspect(@Nonnull ByteString aspectValue, @Nonnull String contentType, + public static RecordTemplate deserializeAspect( + @Nonnull ByteString aspectValue, + @Nonnull String contentType, @Nonnull AspectSpec aspectSpec) { return deserializeAspect(aspectValue, contentType, aspectSpec.getDataTemplateClass()); } @Nonnull - public static <T extends RecordTemplate> T deserializeAspect(@Nonnull ByteString aspectValue, - @Nonnull String contentType, @Nonnull Class<T> clazz) { + public static <T extends RecordTemplate> T deserializeAspect( + @Nonnull ByteString aspectValue, @Nonnull String contentType, @Nonnull Class<T> clazz) { if (!contentType.equals(JSON)) { - throw new IllegalArgumentException(String.format("%s content type is not supported", contentType)); + throw new IllegalArgumentException( + String.format("%s content type is not supported", contentType)); } return RecordUtils.toRecordTemplate(clazz, aspectValue.asString(StandardCharsets.UTF_8)); } @Nonnull public static <T extends RecordTemplate> T deserializePayload( - @Nonnull ByteString payloadValue, - @Nonnull String contentType, - @Nonnull Class<T> clazz) { + @Nonnull ByteString payloadValue, @Nonnull String contentType, @Nonnull Class<T> clazz) { if (!contentType.equals(JSON)) { - throw new IllegalArgumentException(String.format("%s content type is not supported", contentType)); + throw new IllegalArgumentException( + String.format("%s content type is not supported", contentType)); } return RecordUtils.toRecordTemplate(clazz, payloadValue.asString(StandardCharsets.UTF_8)); } @Nonnull public static <T extends RecordTemplate> T deserializePayload( - @Nonnull ByteString payloadValue, - @Nonnull Class<T> clazz) { + @Nonnull ByteString payloadValue, @Nonnull Class<T> clazz) { return deserializePayload(payloadValue, JSON, clazz); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java index d923005c8c023..d9a4768ada05f 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java @@ -1,36 +1,38 @@ package com.linkedin.metadata.utils; +import javax.annotation.Nonnull; import org.json.JSONException; import org.json.JSONObject; -import javax.annotation.Nonnull; - - public class IngestionUtils { private static final String PIPELINE_NAME = "pipeline_name"; - private IngestionUtils() { - } + private IngestionUtils() {} /** - * Injects a pipeline_name into a recipe if there isn't a pipeline_name already there. - * The pipeline_name will be the urn of the ingestion source. + * Injects a pipeline_name into a recipe if there isn't a pipeline_name already there. The + * pipeline_name will be the urn of the ingestion source. * * @param pipelineName the new pipeline name in the recipe. * @return a modified recipe JSON string */ - public static String injectPipelineName(@Nonnull String originalJson, @Nonnull final String pipelineName) { + public static String injectPipelineName( + @Nonnull String originalJson, @Nonnull final String pipelineName) { try { final JSONObject jsonRecipe = new JSONObject(originalJson); - boolean hasPipelineName = jsonRecipe.has(PIPELINE_NAME) && jsonRecipe.get(PIPELINE_NAME) != null && !jsonRecipe.get(PIPELINE_NAME).equals(""); + boolean hasPipelineName = + jsonRecipe.has(PIPELINE_NAME) + && jsonRecipe.get(PIPELINE_NAME) != null + && !jsonRecipe.get(PIPELINE_NAME).equals(""); if (!hasPipelineName) { jsonRecipe.put(PIPELINE_NAME, pipelineName); return jsonRecipe.toString(); } } catch (JSONException e) { - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided.", e); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided.", e); } return originalJson; } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java index 9794d101ecda9..cde83c1382283 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java @@ -17,49 +17,66 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * Static utility class providing methods for extracting entity metadata from Pegasus models. - */ +/** Static utility class providing methods for extracting entity metadata from Pegasus models. */ @Slf4j public class PegasusUtils { - private PegasusUtils() { - } + private PegasusUtils() {} public static String getEntityNameFromSchema(final RecordDataSchema entitySnapshotSchema) { - final Object entityAnnotationObj = entitySnapshotSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { - return EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotSchema.getFullName()).getName(); + return EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract entity name from provided schema %s", entitySnapshotSchema.getName())); + log.error( + String.format( + "Failed to extract entity name from provided schema %s", + entitySnapshotSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract entity name from provided schema %s", entitySnapshotSchema.getName())); + String.format( + "Failed to extract entity name from provided schema %s", + entitySnapshotSchema.getName())); } // TODO: Figure out a better iteration strategy. - public static String getAspectNameFromFullyQualifiedName(final String fullyQualifiedRecordTemplateName) { - final RecordTemplate template = RecordUtils.toRecordTemplate(fullyQualifiedRecordTemplateName, new DataMap()); + public static String getAspectNameFromFullyQualifiedName( + final String fullyQualifiedRecordTemplateName) { + final RecordTemplate template = + RecordUtils.toRecordTemplate(fullyQualifiedRecordTemplateName, new DataMap()); final RecordDataSchema aspectSchema = template.schema(); return getAspectNameFromSchema(aspectSchema); } public static String getAspectNameFromSchema(final RecordDataSchema aspectSchema) { - final Object aspectAnnotationObj = aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { - return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()).getName(); + return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + log.error( + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); } - public static <T> Class<? extends T> getDataTemplateClassFromSchema(final NamedDataSchema schema, final Class<T> clazz) { + public static <T> Class<? extends T> getDataTemplateClassFromSchema( + final NamedDataSchema schema, final Class<T> clazz) { try { - return Class.forName(schema.getFullName()).asSubclass(clazz); + return Class.forName(schema.getFullName()).asSubclass(clazz); } catch (ClassNotFoundException e) { - log.error("Unable to find class for RecordDataSchema named " + schema.getFullName() + " " + e.getMessage()); - throw new ModelConversionException("Unable to find class for RecordDataSchema named " + schema.getFullName(), e); + log.error( + "Unable to find class for RecordDataSchema named " + + schema.getFullName() + + " " + + e.getMessage()); + throw new ModelConversionException( + "Unable to find class for RecordDataSchema named " + schema.getFullName(), e); } } @@ -67,9 +84,17 @@ public static String urnToEntityName(final Urn urn) { return urn.getEntityType(); } - public static MetadataChangeLog constructMCL(@Nullable MetadataChangeProposal base, String entityName, Urn urn, ChangeType changeType, - String aspectName, AuditStamp auditStamp, RecordTemplate newAspectValue, SystemMetadata newSystemMetadata, - RecordTemplate oldAspectValue, SystemMetadata oldSystemMetadata) { + public static MetadataChangeLog constructMCL( + @Nullable MetadataChangeProposal base, + String entityName, + Urn urn, + ChangeType changeType, + String aspectName, + AuditStamp auditStamp, + RecordTemplate newAspectValue, + SystemMetadata newSystemMetadata, + RecordTemplate oldAspectValue, + SystemMetadata oldSystemMetadata) { final MetadataChangeLog metadataChangeLog; if (base != null) { metadataChangeLog = new MetadataChangeLog(new DataMap(base.data())); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java index 69bd3b461eb12..35e15c1e5b693 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java @@ -8,23 +8,19 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.FilterValue; - +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.net.URISyntaxException; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilders; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - - @Slf4j public class SearchUtil { public static final String AGGREGATION_SEPARATOR_CHAR = "␞"; @@ -33,23 +29,29 @@ public class SearchUtil { private static final String URN_PREFIX = "urn:"; private static final String REMOVED = "removed"; - private SearchUtil() { - } + private SearchUtil() {} /* * @param aggregations the aggregations coming back from elasticsearch combined with the filters from the search request * @param filteredValues the set of values provided by the search request */ - public static List<FilterValue> convertToFilters(Map<String, Long> aggregations, Set<String> filteredValues) { - return aggregations.entrySet().stream().map(entry -> { - return createFilterValue(entry.getKey(), entry.getValue(), filteredValues.contains(entry.getKey())); - }).sorted(Comparator.comparingLong(value -> -value.getFacetCount())).collect(Collectors.toList()); + public static List<FilterValue> convertToFilters( + Map<String, Long> aggregations, Set<String> filteredValues) { + return aggregations.entrySet().stream() + .map( + entry -> { + return createFilterValue( + entry.getKey(), entry.getValue(), filteredValues.contains(entry.getKey())); + }) + .sorted(Comparator.comparingLong(value -> -value.getFacetCount())) + .collect(Collectors.toList()); } public static FilterValue createFilterValue(String value, Long facetCount, Boolean isFilteredOn) { // TODO(indy): test this String[] aggregationTokens = value.split(AGGREGATION_SEPARATOR_CHAR); - FilterValue result = new FilterValue().setValue(value).setFacetCount(facetCount).setFiltered(isFilteredOn); + FilterValue result = + new FilterValue().setValue(value).setFacetCount(facetCount).setFiltered(isFilteredOn); String lastValue = aggregationTokens[aggregationTokens.length - 1]; if (lastValue.startsWith(URN_PREFIX)) { try { @@ -61,56 +63,77 @@ public static FilterValue createFilterValue(String value, Long facetCount, Boole return result; } - private static Criterion transformEntityTypeCriterion(Criterion criterion, IndexConvention indexConvention) { - return criterion.setField("_index").setValues( - new StringArray(criterion.getValues().stream().map(value -> String.join("", value.split("_"))) - .map(indexConvention::getEntityIndexName) - .collect(Collectors.toList()))) - .setValue(indexConvention.getEntityIndexName(String.join("", criterion.getValue().split("_")))); + private static Criterion transformEntityTypeCriterion( + Criterion criterion, IndexConvention indexConvention) { + return criterion + .setField("_index") + .setValues( + new StringArray( + criterion.getValues().stream() + .map(value -> String.join("", value.split("_"))) + .map(indexConvention::getEntityIndexName) + .collect(Collectors.toList()))) + .setValue( + indexConvention.getEntityIndexName(String.join("", criterion.getValue().split("_")))); } - private static ConjunctiveCriterion transformConjunctiveCriterion(ConjunctiveCriterion conjunctiveCriterion, - IndexConvention indexConvention) { - return new ConjunctiveCriterion().setAnd( - conjunctiveCriterion.getAnd().stream().map( - criterion -> criterion.getField().equalsIgnoreCase(INDEX_VIRTUAL_FIELD) - ? transformEntityTypeCriterion(criterion, indexConvention) - : criterion) - .collect(Collectors.toCollection(CriterionArray::new))); + private static ConjunctiveCriterion transformConjunctiveCriterion( + ConjunctiveCriterion conjunctiveCriterion, IndexConvention indexConvention) { + return new ConjunctiveCriterion() + .setAnd( + conjunctiveCriterion.getAnd().stream() + .map( + criterion -> + criterion.getField().equalsIgnoreCase(INDEX_VIRTUAL_FIELD) + ? transformEntityTypeCriterion(criterion, indexConvention) + : criterion) + .collect(Collectors.toCollection(CriterionArray::new))); } - private static ConjunctiveCriterionArray transformConjunctiveCriterionArray(ConjunctiveCriterionArray criterionArray, - IndexConvention indexConvention) { + private static ConjunctiveCriterionArray transformConjunctiveCriterionArray( + ConjunctiveCriterionArray criterionArray, IndexConvention indexConvention) { return new ConjunctiveCriterionArray( - criterionArray.stream().map( - conjunctiveCriterion -> transformConjunctiveCriterion(conjunctiveCriterion, indexConvention)) + criterionArray.stream() + .map( + conjunctiveCriterion -> + transformConjunctiveCriterion(conjunctiveCriterion, indexConvention)) .collect(Collectors.toList())); } /** - * Allows filtering on entities which are stored as different indices under the hood by transforming the tag - * _entityType to _index and updating the type to the index name. + * Allows filtering on entities which are stored as different indices under the hood by + * transforming the tag _entityType to _index and updating the type to the index name. * - * @param filter The filter to parse and transform if needed + * @param filter The filter to parse and transform if needed * @param indexConvention The index convention used to generate the index name for an entity * @return A filter, with the changes if necessary */ - public static Filter transformFilterForEntities(Filter filter, @Nonnull IndexConvention indexConvention) { + public static Filter transformFilterForEntities( + Filter filter, @Nonnull IndexConvention indexConvention) { if (filter != null && filter.getOr() != null) { - return new Filter().setOr(transformConjunctiveCriterionArray(filter.getOr(), indexConvention)); + return new Filter() + .setOr(transformConjunctiveCriterionArray(filter.getOr(), indexConvention)); } return filter; } /** - * Applies a default filter to remove entities that are soft deleted only if there isn't a filter for the REMOVED field already + * Applies a default filter to remove entities that are soft deleted only if there isn't a filter + * for the REMOVED field already */ - public static BoolQueryBuilder filterSoftDeletedByDefault(@Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { + public static BoolQueryBuilder filterSoftDeletedByDefault( + @Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { boolean removedInOrFilter = false; if (filter != null) { - removedInOrFilter = filter.getOr().stream().anyMatch( - or -> or.getAnd().stream().anyMatch(criterion -> criterion.getField().equals(REMOVED) || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX)) - ); + removedInOrFilter = + filter.getOr().stream() + .anyMatch( + or -> + or.getAnd().stream() + .anyMatch( + criterion -> + criterion.getField().equals(REMOVED) + || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX))); } if (!removedInOrFilter) { filterQuery.mustNot(QueryBuilders.matchQuery(REMOVED, true)); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java index f4be950575624..b0f42231b27f3 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java @@ -7,12 +7,11 @@ @Slf4j public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} - public static SystemMetadata createDefaultSystemMetadata() { - return new SystemMetadata() - .setRunId(Constants.DEFAULT_RUN_ID) - .setLastObserved(System.currentTimeMillis()); - } + public static SystemMetadata createDefaultSystemMetadata() { + return new SystemMetadata() + .setRunId(Constants.DEFAULT_RUN_ID) + .setLastObserved(System.currentTimeMillis()); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java index 4179345370007..4a3f78fcef7bd 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java @@ -6,10 +6,7 @@ import java.util.Optional; import javax.annotation.Nonnull; - -/** - * The convention for naming search indices - */ +/** The convention for naming search indices */ public interface IndexConvention { Optional<String> getPrefix(); @@ -36,6 +33,7 @@ public interface IndexConvention { /** * Inverse of getEntityIndexName + * * @param indexName The index name to parse * @return a string, the entity name that that index is for, or empty if one cannot be extracted */ @@ -43,9 +41,10 @@ public interface IndexConvention { /** * Inverse of getEntityIndexName + * * @param timeseriesAspectIndexName The index name to parse - * @return a pair of strings, the entity name and the aspect name that that index is for, - * or empty if one cannot be extracted + * @return a pair of strings, the entity name and the aspect name that that index is for, or empty + * if one cannot be extracted */ Optional<Pair<String, String>> getEntityAndAspectName(String timeseriesAspectIndexName); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java index e607139203b57..764630eb73973 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java @@ -10,7 +10,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.StringUtils; - // Default implementation of search index naming convention public class IndexConventionImpl implements IndexConvention { // Map from Entity name -> Index name @@ -19,17 +18,25 @@ public class IndexConventionImpl implements IndexConvention { private final String _getAllEntityIndicesPattern; private final String _getAllTimeseriesIndicesPattern; - private final static String ENTITY_INDEX_VERSION = "v2"; - private final static String ENTITY_INDEX_SUFFIX = "index"; - private final static String TIMESERIES_INDEX_VERSION = "v1"; - private final static String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; + private static final String ENTITY_INDEX_VERSION = "v2"; + private static final String ENTITY_INDEX_SUFFIX = "index"; + private static final String TIMESERIES_INDEX_VERSION = "v1"; + private static final String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; public IndexConventionImpl(@Nullable String prefix) { _prefix = StringUtils.isEmpty(prefix) ? Optional.empty() : Optional.of(prefix); _getAllEntityIndicesPattern = - _prefix.map(p -> p + "_").orElse("") + "*" + ENTITY_INDEX_SUFFIX + "_" + ENTITY_INDEX_VERSION; + _prefix.map(p -> p + "_").orElse("") + + "*" + + ENTITY_INDEX_SUFFIX + + "_" + + ENTITY_INDEX_VERSION; _getAllTimeseriesIndicesPattern = - _prefix.map(p -> p + "_").orElse("") + "*" + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + TIMESERIES_INDEX_VERSION; + _prefix.map(p -> p + "_").orElse("") + + "*" + + TIMESERIES_ENTITY_INDEX_SUFFIX + + "_" + + TIMESERIES_INDEX_VERSION; } private String createIndexName(String baseName) { @@ -85,7 +92,9 @@ public String getEntityIndexName(String entityName) { @Nonnull @Override public String getTimeseriesAspectIndexName(String entityName, String aspectName) { - return this.getIndexName(entityName + "_" + aspectName) + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + return this.getIndexName(entityName + "_" + aspectName) + + TIMESERIES_ENTITY_INDEX_SUFFIX + + "_" + TIMESERIES_INDEX_VERSION; } @@ -108,8 +117,10 @@ public Optional<String> getEntityName(String indexName) { @Override public Optional<Pair<String, String>> getEntityAndAspectName(String timeseriesAspectIndexName) { - Optional<String> entityAndAspect = extractIndexBase(timeseriesAspectIndexName, TIMESERIES_ENTITY_INDEX_SUFFIX + "_" - + TIMESERIES_INDEX_VERSION); + Optional<String> entityAndAspect = + extractIndexBase( + timeseriesAspectIndexName, + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + TIMESERIES_INDEX_VERSION); if (entityAndAspect.isPresent()) { String[] entityAndAspectTokens = entityAndAspect.get().split("_"); if (entityAndAspectTokens.length == 2) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java index 982557f2b5358..885ed74d11471 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.utils.exception; -/** - * An exception to be thrown when certain graph entities are not supported. - */ +/** An exception to be thrown when certain graph entities are not supported. */ public class UnsupportedGraphEntities extends RuntimeException { public UnsupportedGraphEntities(String message) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java index d053272c19e7d..67f0ae4c77eaf 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java @@ -6,16 +6,13 @@ import java.util.ArrayList; import java.util.List; - /** - * A Log Filter that can be configured to omit logs containing a specific message string. - * Configured inside logback.xml. + * A Log Filter that can be configured to omit logs containing a specific message string. Configured + * inside logback.xml. */ public class LogMessageFilter extends AbstractMatcherFilter<ILoggingEvent> { - /** - * A set of messages to exclude. - */ + /** A set of messages to exclude. */ private final List<String> excluded = new ArrayList<>(); @Override @@ -33,4 +30,4 @@ public FilterReply decide(ILoggingEvent event) { public void addExcluded(String message) { this.excluded.add(message); } -} \ No newline at end of file +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java index 9a8848e090fb8..3a47c11f8d748 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java @@ -7,10 +7,8 @@ import com.codahale.metrics.Timer; import com.codahale.metrics.jmx.JmxReporter; - public class MetricUtils { - private MetricUtils() { - } + private MetricUtils() {} public static final String DELIMITER = "_"; @@ -32,7 +30,8 @@ public static Counter counter(Class<?> klass, String metricName) { public static void exceptionCounter(Class<?> klass, String metricName, Throwable t) { String[] splitClassName = t.getClass().getName().split("[.]"); - String snakeCase = splitClassName[splitClassName.length - 1].replaceAll("([A-Z][a-z])", DELIMITER + "$1"); + String snakeCase = + splitClassName[splitClassName.length - 1].replaceAll("([A-Z][a-z])", DELIMITER + "$1"); counter(klass, metricName).inc(); counter(klass, metricName + DELIMITER + snakeCase).inc(); @@ -50,7 +49,8 @@ public static Timer timer(String metricName) { return REGISTRY.timer(MetricRegistry.name(metricName)); } - public static <T extends Gauge<?>> T gauge(Class<?> clazz, String metricName, MetricRegistry.MetricSupplier<T> supplier) { + public static <T extends Gauge<?>> T gauge( + Class<?> clazz, String metricName, MetricRegistry.MetricSupplier<T> supplier) { return REGISTRY.gauge(MetricRegistry.name(clazz, metricName), supplier); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java index e120fdb3b342f..7a6479a313244 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.utils; +import static org.testng.Assert.*; + import com.datahub.test.KeyPartEnum; import com.datahub.test.TestEntityKey; import com.linkedin.common.urn.Urn; @@ -8,11 +10,8 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; -/** - * Tests the capabilities of {@link EntityKeyUtils} - */ +/** Tests the capabilities of {@link EntityKeyUtils} */ public class EntityKeyUtilsTest { @Test @@ -22,36 +21,42 @@ public void testConvertEntityKeyToUrn() throws Exception { key.setKeyPart2(Urn.createFromString("urn:li:testEntity2:part2")); key.setKeyPart3(KeyPartEnum.VALUE_1); - final Urn expectedUrn = Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); + final Urn expectedUrn = + Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); final Urn actualUrn = EntityKeyUtils.convertEntityKeyToUrn(key, "testEntity1"); assertEquals(actualUrn.toString(), expectedUrn.toString()); } @Test public void testConvertEntityKeyToUrnInternal() throws Exception { - final Urn urn = Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); + final Urn urn = + Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); final TestEntityKey expectedKey = new TestEntityKey(); expectedKey.setKeyPart1("part1"); expectedKey.setKeyPart2(Urn.createFromString("urn:li:testEntity2:part2")); expectedKey.setKeyPart3(KeyPartEnum.VALUE_1); - final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKeyInternal(urn, expectedKey.schema()); + final RecordTemplate actualKey = + EntityKeyUtils.convertUrnToEntityKeyInternal(urn, expectedKey.schema()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } @Test public void testConvertEntityUrnToKey() throws Exception { - final Urn urn = Urn.createFromString("urn:li:testEntity:(part1,urn:li:testEntity:part2,VALUE_1)"); + final Urn urn = + Urn.createFromString("urn:li:testEntity:(part1,urn:li:testEntity:part2,VALUE_1)"); final TestEntityKey expectedKey = new TestEntityKey(); expectedKey.setKeyPart1("part1"); expectedKey.setKeyPart2(Urn.createFromString("urn:li:testEntity:part2")); expectedKey.setKeyPart3(KeyPartEnum.VALUE_1); - ConfigEntityRegistry entityRegistry = new ConfigEntityRegistry( - TestEntityKey.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry entityRegistry = + new ConfigEntityRegistry( + TestEntityKey.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); final EntitySpec entitySpec = entityRegistry.getEntitySpec(PegasusUtils.urnToEntityName(urn)); - final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); + final RecordTemplate actualKey = + EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java index 8b2078c7b9533..6288ed80e6881 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java @@ -1,29 +1,29 @@ package com.linkedin.metadata.utils; -import org.testng.annotations.Test; - - import static org.testng.Assert.assertEquals; +import org.testng.annotations.Test; + public class IngestionUtilsTest { private final String ingestionSourceUrn = "urn:li:ingestionSource:12345"; @Test public void injectPipelineNameWhenThere() { - String recipe = "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"test\"}"; + String recipe = + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"test\"}"; assertEquals(recipe, IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn)); } @Test public void injectPipelineNameWhenNotThere() { - String recipe = "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}}}"; + String recipe = + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}}}"; recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn); assertEquals( recipe, - "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"urn:li:ingestionSource:12345\"}" - ); + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"urn:li:ingestionSource:12345\"}"); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java index b60b7fb64f3f9..fd606f57477a0 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.utils; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.FilterValue; @@ -9,12 +11,7 @@ import java.util.Set; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - -/** - * Tests the capabilities of {@link EntityKeyUtils} - */ +/** Tests the capabilities of {@link EntityKeyUtils} */ public class SearchUtilTest { @Test @@ -25,21 +22,22 @@ public void testConvertToFilters() throws Exception { Set<String> filteredValues = ImmutableSet.of("urn:li:tag:def"); - List<FilterValue> filters = - SearchUtil.convertToFilters(aggregations, filteredValues); - - assertEquals(filters.get(0), new FilterValue() - .setFiltered(false) - .setValue("urn:li:tag:abc") - .setEntity(Urn.createFromString("urn:li:tag:abc")) - .setFacetCount(3L) - ); - - assertEquals(filters.get(1), new FilterValue() - .setFiltered(true) - .setValue("urn:li:tag:def") - .setEntity(Urn.createFromString("urn:li:tag:def")) - .setFacetCount(0L) - ); + List<FilterValue> filters = SearchUtil.convertToFilters(aggregations, filteredValues); + + assertEquals( + filters.get(0), + new FilterValue() + .setFiltered(false) + .setValue("urn:li:tag:abc") + .setEntity(Urn.createFromString("urn:li:tag:abc")) + .setFacetCount(3L)); + + assertEquals( + filters.get(1), + new FilterValue() + .setFiltered(true) + .setValue("urn:li:tag:def") + .setEntity(Urn.createFromString("urn:li:tag:def")) + .setFacetCount(0L)); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java index 5310871140fc9..f3e52c9989775 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.utils.elasticsearch; +import static org.testng.Assert.*; + import com.linkedin.util.Pair; import java.util.Optional; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class IndexConventionImplTest { @Test @@ -19,7 +18,9 @@ public void testIndexConventionNoPrefix() { assertEquals(indexConventionNoPrefix.getEntityName(expectedIndexName), Optional.of(entityName)); assertEquals(indexConventionNoPrefix.getEntityName("totally not an index"), Optional.empty()); assertEquals(indexConventionNoPrefix.getEntityName("dataset_v2"), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.of("dashboard")); + assertEquals( + indexConventionNoPrefix.getEntityName("dashboardindex_v2_1683649932260"), + Optional.of("dashboard")); } @Test @@ -32,22 +33,32 @@ public void testIndexConventionPrefix() { assertEquals(indexConventionPrefix.getEntityName(expectedIndexName), Optional.of(entityName)); assertEquals(indexConventionPrefix.getEntityName("totally not an index"), Optional.empty()); assertEquals(indexConventionPrefix.getEntityName("prefix_dataset_v2"), Optional.empty()); - assertEquals(indexConventionPrefix.getEntityName("prefix_dashboardindex_v2_1683649932260"), Optional.of("dashboard")); - assertEquals(indexConventionPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityName("prefix_dashboardindex_v2_1683649932260"), + Optional.of("dashboard")); + assertEquals( + indexConventionPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.empty()); } + @Test public void testTimeseriesIndexConventionNoPrefix() { IndexConvention indexConventionNoPrefix = new IndexConventionImpl(null); String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "dataset_datasetusagestatisticsaspect_v1"; - assertEquals(indexConventionNoPrefix.getTimeseriesAspectIndexName(entityName, aspectName), expectedIndexName); + assertEquals( + indexConventionNoPrefix.getTimeseriesAspectIndexName(entityName, aspectName), + expectedIndexName); assertEquals(indexConventionNoPrefix.getPrefix(), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName(expectedIndexName), Optional.of( - Pair.of(entityName, aspectName))); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName(expectedIndexName), + Optional.of(Pair.of(entityName, aspectName))); + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); assertEquals(indexConventionNoPrefix.getEntityAndAspectName("dataset_v2"), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName("dashboard_dashboardusagestatisticsaspect_v1"), + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName( + "dashboard_dashboardusagestatisticsaspect_v1"), Optional.of(Pair.of("dashboard", "dashboardusagestatistics"))); } @@ -57,10 +68,17 @@ public void testTimeseriesIndexConventionPrefix() { String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "prefix_dataset_datasetusagestatisticsaspect_v1"; - assertEquals(indexConventionPrefix.getTimeseriesAspectIndexName(entityName, aspectName), expectedIndexName); + assertEquals( + indexConventionPrefix.getTimeseriesAspectIndexName(entityName, aspectName), + expectedIndexName); assertEquals(indexConventionPrefix.getPrefix(), Optional.of("prefix")); - assertEquals(indexConventionPrefix.getEntityAndAspectName(expectedIndexName), Optional.of(Pair.of(entityName, aspectName))); - assertEquals(indexConventionPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); - assertEquals(indexConventionPrefix.getEntityAndAspectName("prefix_datasetusagestatisticsaspect_v1"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityAndAspectName(expectedIndexName), + Optional.of(Pair.of(entityName, aspectName))); + assertEquals( + indexConventionPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityAndAspectName("prefix_datasetusagestatisticsaspect_v1"), + Optional.empty()); } } diff --git a/mock-entity-registry/src/main/java/mock/MockAspectSpec.java b/mock-entity-registry/src/main/java/mock/MockAspectSpec.java index 594bc583eeef0..92321cce3d905 100644 --- a/mock-entity-registry/src/main/java/mock/MockAspectSpec.java +++ b/mock-entity-registry/src/main/java/mock/MockAspectSpec.java @@ -12,16 +12,24 @@ import java.util.List; import javax.annotation.Nonnull; - public class MockAspectSpec extends AspectSpec { - public MockAspectSpec(@Nonnull AspectAnnotation aspectAnnotation, + public MockAspectSpec( + @Nonnull AspectAnnotation aspectAnnotation, @Nonnull List<SearchableFieldSpec> searchableFieldSpecs, @Nonnull List<SearchScoreFieldSpec> searchScoreFieldSpecs, @Nonnull List<RelationshipFieldSpec> relationshipFieldSpecs, @Nonnull List<TimeseriesFieldSpec> timeseriesFieldSpecs, - @Nonnull List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs, RecordDataSchema schema, + @Nonnull List<TimeseriesFieldCollectionSpec> timeseriesFieldCollectionSpecs, + RecordDataSchema schema, Class<RecordTemplate> aspectClass) { - super(aspectAnnotation, searchableFieldSpecs, searchScoreFieldSpecs, relationshipFieldSpecs, timeseriesFieldSpecs, - timeseriesFieldCollectionSpecs, schema, aspectClass); + super( + aspectAnnotation, + searchableFieldSpecs, + searchScoreFieldSpecs, + relationshipFieldSpecs, + timeseriesFieldSpecs, + timeseriesFieldCollectionSpecs, + schema, + aspectClass); } } diff --git a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java index 54dd25613ed4c..a324f9ce0195b 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java +++ b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java @@ -11,7 +11,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class MockEntityRegistry implements EntityRegistry { @Nonnull @Override @@ -48,5 +47,4 @@ public AspectTemplateEngine getAspectTemplateEngine() { public Map<String, AspectSpec> getAspectSpecs() { return new HashMap<>(); } - } diff --git a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java index d740fff29e258..0013d6615a71d 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java +++ b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java @@ -1,5 +1,7 @@ package mock; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePaths; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -27,9 +29,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class MockEntitySpec implements EntitySpec { private String _name; @@ -41,7 +40,8 @@ public MockEntitySpec(String name) { if (DATASET_ENTITY_NAME.equals(name)) { _aspectTypeMap.put(BROWSE_PATHS_ASPECT_NAME, getAspectSpec(BROWSE_PATHS_ASPECT_NAME)); _aspectTypeMap.put(BROWSE_PATHS_V2_ASPECT_NAME, getAspectSpec(BROWSE_PATHS_V2_ASPECT_NAME)); - _aspectTypeMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, getAspectSpec(DATA_PLATFORM_INSTANCE_ASPECT_NAME)); + _aspectTypeMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, getAspectSpec(DATA_PLATFORM_INSTANCE_ASPECT_NAME)); } } @@ -81,16 +81,23 @@ public AspectSpec getKeyAspectSpec() { return null; } - public <T extends RecordTemplate> AspectSpec createAspectSpec(T type, String name) { - return new MockAspectSpec(new AspectAnnotation(name, false, false, null), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), type.schema(), (Class<RecordTemplate>) type.getClass().asSubclass(RecordTemplate.class)); + public <T extends RecordTemplate> AspectSpec createAspectSpec(T type, String name) { + return new MockAspectSpec( + new AspectAnnotation(name, false, false, null), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + type.schema(), + (Class<RecordTemplate>) type.getClass().asSubclass(RecordTemplate.class)); } @Override public List<AspectSpec> getAspectSpecs() { - return ASPECT_TYPE_MAP.keySet().stream().map(name -> createAspectSpec(ASPECT_TYPE_MAP.get(name), name)).collect( - Collectors.toList()); + return ASPECT_TYPE_MAP.keySet().stream() + .map(name -> createAspectSpec(ASPECT_TYPE_MAP.get(name), name)) + .collect(Collectors.toList()); } @Override @@ -118,6 +125,7 @@ public Boolean hasAspect(String name) { ASPECT_TYPE_MAP.put(BROWSE_PATHS_V2_ASPECT_NAME, new BrowsePathsV2()); ASPECT_TYPE_MAP.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, new DataPlatformInstance()); } + @Override public AspectSpec getAspectSpec(String name) { return createAspectSpec(ASPECT_TYPE_MAP.get(name), name); diff --git a/test-models/src/main/java/com/datahub/utils/TestUtils.java b/test-models/src/main/java/com/datahub/utils/TestUtils.java index 1aca3a890caa6..6a2d219fa9b4d 100644 --- a/test-models/src/main/java/com/datahub/utils/TestUtils.java +++ b/test-models/src/main/java/com/datahub/utils/TestUtils.java @@ -9,7 +9,6 @@ import javax.annotation.Nonnull; import org.apache.commons.io.IOUtils; - public final class TestUtils { private TestUtils() { // Util class @@ -18,7 +17,8 @@ private TestUtils() { @Nonnull public static String loadJsonFromResource(@Nonnull String resourceName) throws IOException { final String jsonStr = - IOUtils.toString(ClassLoader.getSystemResourceAsStream(resourceName), Charset.defaultCharset()); + IOUtils.toString( + ClassLoader.getSystemResourceAsStream(resourceName), Charset.defaultCharset()); return jsonStr.replaceAll("\\s+", ""); } @@ -45,4 +45,3 @@ public static BarUrn makeBarUrn(int id) { return new BarUrn(id); } } - diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java index 6b1cd545ba00d..c9d308522f6b9 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class BarUrn extends Urn { public static final String ENTITY_TYPE = "bar"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public BarUrn(int id) { @@ -22,7 +22,8 @@ public int getBarIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java index 8970a011eca14..774da2687893b 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class BarUrnCoercer extends BaseUrnCoercer<BarUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new BarUrnCoercer(), BarUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new BarUrnCoercer(), BarUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java index ab0c28f9fbb9b..4fffa8b4f2558 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java @@ -5,10 +5,8 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public abstract class BaseUrnCoercer<T extends Urn> implements DirectCoercer<T> { - public BaseUrnCoercer() { - } + public BaseUrnCoercer() {} public Object coerceInput(T object) throws ClassCastException { return object.toString(); diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java index dddf7721c64a8..81e0adab84472 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java @@ -3,11 +3,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class BazUrn extends Urn { public static final String ENTITY_TYPE = "baz"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public BazUrn(int id) throws URISyntaxException { @@ -21,7 +21,8 @@ public int getBazIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java index 87b8929d236db..33ca9d0b060c6 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class BazUrnCoercer extends BaseUrnCoercer<BazUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new BazUrnCoercer(), BazUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new BazUrnCoercer(), BazUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java index a8f2bab3c21dd..1047e39f9905f 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java @@ -3,11 +3,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class FooUrn extends Urn { public static final String ENTITY_TYPE = "foo"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public FooUrn(int id) throws URISyntaxException { @@ -21,7 +21,8 @@ public int getFooIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java index a2d65dc5f8bd3..3e7bd95fdf3bc 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class FooUrnCoercer extends BaseUrnCoercer<FooUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new FooUrnCoercer(), FooUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new FooUrnCoercer(), FooUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java index 6cec6042401a1..bfa22bdeb7f90 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class PizzaUrn extends Urn { public static final String ENTITY_TYPE = "pizza"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public PizzaUrn(int id) { @@ -22,7 +22,8 @@ public int getPizzaId() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java index 64bfffe03f77d..30af8171e0eef 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class PizzaUrnCoercer extends BaseUrnCoercer<PizzaUrn> { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new PizzaUrnCoercer(), PizzaUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new PizzaUrnCoercer(), PizzaUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java index 8467f15f85a49..7af0eb39c70d9 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java @@ -3,7 +3,6 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class SingleAspectEntityUrn extends Urn { private static final String ENTITY_TYPE = "entitySingleAspectEntity"; From 3c0727e9b7195b05e55557bfa0a56f390808eb3a Mon Sep 17 00:00:00 2001 From: Aseem Bansal <asmbansal2@gmail.com> Date: Wed, 6 Dec 2023 15:07:50 +0530 Subject: [PATCH 051/263] feat(ci): split no cypress test suite (#9387) --- .github/workflows/docker-unified.yml | 12 ++++++------ smoke-test/smoke.sh | 8 +++++--- smoke-test/test_e2e.py | 2 ++ smoke-test/tests/privileges/test_privileges.py | 2 ++ smoke-test/tests/timeline/timeline_test.py | 2 ++ 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 8bb82a0a0608c..fef23f9efa85f 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -696,7 +696,12 @@ jobs: strategy: fail-fast: false matrix: - test_strategy: ["no_cypress", "cypress_suite1", "cypress_rest"] + test_strategy: [ + "no_cypress_suite0", + "no_cypress_suite1", + "cypress_suite1", + "cypress_rest" + ] needs: [ setup, @@ -792,11 +797,6 @@ jobs: ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | ./smoke-test/run-quickstart.sh - - name: sleep 60s - run: | - # we are doing this because gms takes time to get ready - # and we don't have a better readiness check when bootstrap is done - sleep 60s - name: Disk Check run: df -h . && docker images - name: Disable ES Disk Threshold diff --git a/smoke-test/smoke.sh b/smoke-test/smoke.sh index 3236a0e5c3f0c..db0389be1f489 100755 --- a/smoke-test/smoke.sh +++ b/smoke-test/smoke.sh @@ -24,12 +24,14 @@ source venv/bin/activate source ./set-cypress-creds.sh -# no_cypress, cypress_suite1, cypress_rest +# no_cypress_suite0, no_cypress_suite1, cypress_suite1, cypress_rest if [[ -z "${TEST_STRATEGY}" ]]; then pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke.xml else - if [ "$TEST_STRATEGY" == "no_cypress" ]; then - pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_non_cypress.xml -k 'not test_run_cypress' + if [ "$TEST_STRATEGY" == "no_cypress_suite0" ]; then + pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_non_cypress.xml -k 'not test_run_cypress' -m 'not no_cypress_suite1' + elif [ "$TEST_STRATEGY" == "no_cypress_suite1" ]; then + pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_non_cypress.xml -m 'no_cypress_suite1' else pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke_cypress_${TEST_STRATEGY}.xml tests/cypress/integration_test.py fi diff --git a/smoke-test/test_e2e.py b/smoke-test/test_e2e.py index 4a0a122b79670..abb4841314c4a 100644 --- a/smoke-test/test_e2e.py +++ b/smoke-test/test_e2e.py @@ -8,6 +8,8 @@ import tenacity from datahub.ingestion.run.pipeline import Pipeline +pytestmark = pytest.mark.no_cypress_suite1 + from tests.utils import ( get_frontend_url, get_gms_url, diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index d0f00734ae9f3..aa54a50b04e7f 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -5,6 +5,8 @@ get_frontend_url, get_admin_credentials,get_sleep_info) from tests.privileges.utils import * +pytestmark = pytest.mark.no_cypress_suite1 + sleep_sec, sleep_times = get_sleep_info() @pytest.fixture(scope="session") diff --git a/smoke-test/tests/timeline/timeline_test.py b/smoke-test/tests/timeline/timeline_test.py index 4705343c1a2ba..c075d981487db 100644 --- a/smoke-test/tests/timeline/timeline_test.py +++ b/smoke-test/tests/timeline/timeline_test.py @@ -1,4 +1,5 @@ import json +import pytest from time import sleep from datahub.cli import timeline_cli @@ -7,6 +8,7 @@ from tests.utils import (get_datahub_graph, ingest_file_via_rest, wait_for_writes_to_sync) +pytestmark = pytest.mark.no_cypress_suite1 def test_all(): platform = "urn:li:dataPlatform:kafka" From a9c5c3903c3af88bd1aaf45b1a131f04d3ef57c1 Mon Sep 17 00:00:00 2001 From: Aseem Bansal <asmbansal2@gmail.com> Date: Wed, 6 Dec 2023 15:58:20 +0530 Subject: [PATCH 052/263] fix(ingest/redshift): too many values unpack (#9394) --- .../src/datahub/ingestion/source/redshift/lineage.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index 05011b2d7a769..abed8505f168b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -381,7 +381,8 @@ def _get_upstream_lineages( qualified_table_name = dataset_urn.DatasetUrn.create_from_string( source.urn ).get_entity_id()[1] - db, schema, table = qualified_table_name.split(".") + # -3 because platform instance is optional and that can cause the split to have more than 3 elements + db, schema, table = qualified_table_name.split(".")[-3:] if db == raw_db_name: db = alias_db_name path = f"{db}.{schema}.{table}" From 2eee3332ead690178edf6a5e8f8f551fa0065163 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth <treff7es@gmail.com> Date: Wed, 6 Dec 2023 12:18:24 +0100 Subject: [PATCH 053/263] fix(ingest/redshift): Fix psycopg2 removal from Redshift Source (#9395) --- .../src/datahub/ingestion/source/redshift/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 51ad8a050adc2..540adbf4bfd15 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -10,7 +10,7 @@ from datahub.configuration.source_common import DatasetLineageProviderConfigBase from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.ingestion.source.data_lake_common.path_spec import PathSpec -from datahub.ingestion.source.sql.postgres import BasePostgresConfig +from datahub.ingestion.source.sql.sql_config import BasicSQLAlchemyConfig from datahub.ingestion.source.state.stateful_ingestion_base import ( StatefulLineageConfigMixin, StatefulProfilingConfigMixin, @@ -64,7 +64,7 @@ class RedshiftUsageConfig(BaseUsageConfig, StatefulUsageConfigMixin): class RedshiftConfig( - BasePostgresConfig, + BasicSQLAlchemyConfig, DatasetLineageProviderConfigBase, S3DatasetLineageProviderConfigBase, RedshiftUsageConfig, From 7a2b8bf5f9190441f667a733ce6328c50e62030c Mon Sep 17 00:00:00 2001 From: Jonas <150245047+accso-jo@users.noreply.github.com> Date: Wed, 6 Dec 2023 18:31:48 +0100 Subject: [PATCH 054/263] fix(ui): fixed font src spelling mistake (#9204) --- datahub-web-react/src/App.less | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/App.less b/datahub-web-react/src/App.less index a001aa103b33f..003e86981b2b2 100644 --- a/datahub-web-react/src/App.less +++ b/datahub-web-react/src/App.less @@ -4,5 +4,5 @@ @font-face { font-family: 'Manrope'; font-style: normal; - src: local('Mnarope'), url('./fonts/manrope.woff2') format('woff2'), + src: local('Manrope'), url('./fonts/manrope.woff2') format('woff2'), } From 27f23ecdd5d3635ac32ed51a10a339ee3e4870b3 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Wed, 6 Dec 2023 13:59:23 -0500 Subject: [PATCH 055/263] feat(ingest/unity): GE Profiling (#8951) --- docs/how/updating-datahub.md | 4 + .../sources/databricks/unity-catalog_pre.md | 3 +- .../databricks/unity-catalog_recipe.yml | 52 ++++-- metadata-ingestion/setup.py | 5 +- .../ingestion/source/bigquery_v2/profiler.py | 2 +- .../ingestion/source/ge_data_profiler.py | 24 ++- .../ingestion/source/redshift/profile.py | 3 +- .../source/snowflake/snowflake_profiler.py | 3 +- .../source/sql/sql_generic_profiler.py | 25 +-- .../{profiler.py => analyze_profiler.py} | 6 +- .../datahub/ingestion/source/unity/config.py | 78 ++++++-- .../ingestion/source/unity/ge_profiler.py | 170 ++++++++++++++++++ .../datahub/ingestion/source/unity/report.py | 7 +- .../datahub/ingestion/source/unity/source.py | 49 +++-- .../mysql/mysql_mces_no_db_golden.json | 27 +-- .../mysql/mysql_mces_with_db_golden.json | 73 ++++---- .../mysql_table_row_count_estimate_only.json | 121 ++++--------- .../tests/unit/test_unity_catalog_config.py | 8 +- 18 files changed, 449 insertions(+), 211 deletions(-) rename metadata-ingestion/src/datahub/ingestion/source/unity/{profiler.py => analyze_profiler.py} (96%) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index df179b0d0d2f7..94ab1b0611c33 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -12,6 +12,10 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. - #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. +- #8951: A great expectations based profiler has been added for the Unity Catalog source. +To use the old profiler, set `method: analyze` under the `profiling` section in your recipe. +To use the new profiler, set `method: ge`. Profiling is disabled by default, so to enable it, +one of these methods must be specified. ### Potential Downtime diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md index ae2883343d7e8..12540e1977f64 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md @@ -15,7 +15,8 @@ * [Privileges documentation](https://docs.databricks.com/data-governance/unity-catalog/manage-privileges/privileges.html) + To ingest your workspace's notebooks and respective lineage, your service principal must have `CAN_READ` privileges on the folders containing the notebooks you want to ingest: [guide](https://docs.databricks.com/en/security/auth-authz/access-control/workspace-acl.html#folder-permissions). + To `include_usage_statistics` (enabled by default), your service principal must have `CAN_MANAGE` permissions on any SQL Warehouses you want to ingest: [guide](https://docs.databricks.com/security/auth-authz/access-control/sql-endpoint-acl.html). - + To ingest `profiling` information with `call_analyze` (enabled by default), your service principal must have ownership or `MODIFY` privilege on any tables you want to profile. + + To ingest `profiling` information with `method: ge`, you need `SELECT` privileges on all profiled tables. + + To ingest `profiling` information with `method: analyze` and `call_analyze: true` (enabled by default), your service principal must have ownership or `MODIFY` privilege on any tables you want to profile. * Alternatively, you can run [ANALYZE TABLE](https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-analyze-table.html) yourself on any tables you want to profile, then set `call_analyze` to `false`. You will still need `SELECT` privilege on those tables to fetch the results. - Check the starter recipe below and replace `workspace_url` and `token` with your information from the previous steps. diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml b/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml index 7bc336d5f25fc..931552e7343d0 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_recipe.yml @@ -2,24 +2,38 @@ source: type: unity-catalog config: workspace_url: https://my-workspace.cloud.databricks.com - token: "mygenerated_databricks_token" - #metastore_id_pattern: - # deny: - # - 11111-2222-33333-44-555555 - #catalog_pattern: - # allow: - # - my-catalog - #schema_pattern: - # deny: - # - information_schema - #table_pattern: - # allow: - # - test.lineagedemo.dinner - # First you have to create domains on Datahub by following this guide -> https://datahubproject.io/docs/domains/#domains-setup-prerequisites-and-permissions - #domain: - # urn:li:domain:1111-222-333-444-555: - # allow: - # - main.* + token: "<token>" + include_metastore: false + include_ownership: true + profiling: + method: "ge" + enabled: true + warehouse_id: "<warehouse_id>" + profile_table_level_only: false + max_wait_secs: 60 + pattern: + deny: + - ".*\\.unwanted_schema" + +# profiling: +# method: "analyze" +# enabled: true +# warehouse_id: "<warehouse_id>" +# profile_table_level_only: true +# call_analyze: true + +# catalogs: ["my_catalog"] +# schema_pattern: +# deny: +# - information_schema +# table_pattern: +# allow: +# - my_catalog.my_schema.my_table +# First you have to create domains on Datahub by following this guide -> https://datahubproject.io/docs/domains/#domains-setup-prerequisites-and-permissions +# domain: +# urn:li:domain:1111-222-333-444-555: +# allow: +# - main.* stateful_ingestion: enabled: true @@ -27,4 +41,4 @@ source: pipeline_name: acme-corp-unity -# sink configs if needed \ No newline at end of file +# sink configs if needed diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 69cbe8d823450..dac865d2dac37 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -262,7 +262,8 @@ "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", - "databricks-sql-connector", + # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes + "databricks-sql-connector>=2.8.0", } mysql = sql_common | {"pymysql>=1.0.2"} @@ -393,7 +394,7 @@ "powerbi": microsoft_common | {"lark[regex]==1.1.4", "sqlparse"} | sqlglot_lib, "powerbi-report-server": powerbi_report_server, "vertica": sql_common | {"vertica-sqlalchemy-dialect[vertica-python]==0.0.8.1"}, - "unity-catalog": databricks | sqllineage_lib, + "unity-catalog": databricks | sql_common | sqllineage_lib, "fivetran": snowflake_common, } diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py index 8ae17600e0eea..4083eb6db77c1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/profiler.py @@ -183,7 +183,7 @@ def get_workunits( return yield from self.generate_profile_workunits( profile_requests, - self.config.profiling.max_workers, + max_workers=self.config.profiling.max_workers, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py index c334a97680e3e..abb415c90cc8b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ge_data_profiler.py @@ -27,6 +27,7 @@ import sqlalchemy as sa import sqlalchemy.sql.compiler +from great_expectations.core.profiler_types_mapping import ProfilerTypeMapping from great_expectations.core.util import convert_to_json_serializable from great_expectations.data_context import AbstractDataContext, BaseDataContext from great_expectations.data_context.types.base import ( @@ -77,8 +78,26 @@ SNOWFLAKE = "snowflake" BIGQUERY = "bigquery" REDSHIFT = "redshift" +DATABRICKS = "databricks" TRINO = "trino" +# Type names for Databricks, to match Title Case types in sqlalchemy +ProfilerTypeMapping.INT_TYPE_NAMES.append("Integer") +ProfilerTypeMapping.INT_TYPE_NAMES.append("SmallInteger") +ProfilerTypeMapping.INT_TYPE_NAMES.append("BigInteger") +ProfilerTypeMapping.FLOAT_TYPE_NAMES.append("Float") +ProfilerTypeMapping.FLOAT_TYPE_NAMES.append("Numeric") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("String") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("Text") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("Unicode") +ProfilerTypeMapping.STRING_TYPE_NAMES.append("UnicodeText") +ProfilerTypeMapping.BOOLEAN_TYPE_NAMES.append("Boolean") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("Date") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("DateTime") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("Time") +ProfilerTypeMapping.DATETIME_TYPE_NAMES.append("Interval") +ProfilerTypeMapping.BINARY_TYPE_NAMES.append("LargeBinary") + # The reason for this wacky structure is quite fun. GE basically assumes that # the config structures were generated directly from YML and further assumes that # they can be `deepcopy`'d without issue. The SQLAlchemy engine and connection @@ -697,6 +716,9 @@ def generate_dataset_profile( # noqa: C901 (complexity) 1, unique_count / non_null_count ) + if not profile.rowCount: + continue + self._get_dataset_column_sample_values(column_profile, column) if ( @@ -1172,7 +1194,7 @@ def _get_ge_dataset( }, ) - if platform == BIGQUERY: + if platform == BIGQUERY or platform == DATABRICKS: # This is done as GE makes the name as DATASET.TABLE # but we want it to be PROJECT.DATASET.TABLE instead for multi-project setups name_parts = pretty_name.split(".") diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py index 771636e8498a3..6fa3504ced139 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py @@ -59,8 +59,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, - self.config.profiling.max_workers, - db, + max_workers=self.config.profiling.max_workers, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 8e18d85d6f3ca..67953de47e5a3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -62,8 +62,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, - self.config.profiling.max_workers, - database.name, + max_workers=self.config.profiling.max_workers, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index aaeee5717a867..e309ff0d15311 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -69,8 +69,8 @@ def __init__( def generate_profile_workunits( self, requests: List[TableProfilerRequest], + *, max_workers: int, - db_name: Optional[str] = None, platform: Optional[str] = None, profiler_args: Optional[Dict] = None, ) -> Iterable[MetadataWorkUnit]: @@ -98,7 +98,7 @@ def generate_profile_workunits( return # Otherwise, if column level profiling is enabled, use GE profiler. - ge_profiler = self.get_profiler_instance(db_name) + ge_profiler = self.get_profiler_instance() for ge_profiler_request, profile in ge_profiler.generate_profiles( ge_profile_requests, max_workers, platform, profiler_args @@ -149,12 +149,18 @@ def get_profile_request( profile_table_level_only = self.config.profiling.profile_table_level_only dataset_name = self.get_dataset_name(table.name, schema_name, db_name) if not self.is_dataset_eligible_for_profiling( - dataset_name, table.last_altered, table.size_in_bytes, table.rows_count + dataset_name, + last_altered=table.last_altered, + size_in_bytes=table.size_in_bytes, + rows_count=table.rows_count, ): # Profile only table level if dataset is filtered from profiling # due to size limits alone if self.is_dataset_eligible_for_profiling( - dataset_name, table.last_altered, 0, 0 + dataset_name, + last_altered=table.last_altered, + size_in_bytes=None, + rows_count=None, ): profile_table_level_only = True else: @@ -199,9 +205,7 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector - def get_profiler_instance( - self, db_name: Optional[str] = None - ) -> "DatahubGEProfiler": + def get_profiler_instance(self) -> "DatahubGEProfiler": logger.debug(f"Getting profiler instance from {self.platform}") url = self.config.get_sql_alchemy_url() @@ -221,9 +225,10 @@ def get_profiler_instance( def is_dataset_eligible_for_profiling( self, dataset_name: str, - last_altered: Optional[datetime], - size_in_bytes: Optional[int], - rows_count: Optional[int], + *, + last_altered: Optional[datetime] = None, + size_in_bytes: Optional[int] = None, + rows_count: Optional[int] = None, ) -> bool: dataset_urn = make_dataset_urn_with_platform_instance( self.platform, diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/profiler.py b/metadata-ingestion/src/datahub/ingestion/source/unity/analyze_profiler.py similarity index 96% rename from metadata-ingestion/src/datahub/ingestion/source/unity/profiler.py rename to metadata-ingestion/src/datahub/ingestion/source/unity/analyze_profiler.py index 8066932e3afe9..4c8b22f2399b2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/analyze_profiler.py @@ -6,7 +6,7 @@ from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.workunit import MetadataWorkUnit -from datahub.ingestion.source.unity.config import UnityCatalogProfilerConfig +from datahub.ingestion.source.unity.config import UnityCatalogAnalyzeProfilerConfig from datahub.ingestion.source.unity.proxy import UnityCatalogApiProxy from datahub.ingestion.source.unity.proxy_types import ( ColumnProfile, @@ -23,8 +23,8 @@ @dataclass -class UnityCatalogProfiler: - config: UnityCatalogProfilerConfig +class UnityCatalogAnalyzeProfiler: + config: UnityCatalogAnalyzeProfilerConfig report: UnityCatalogReport proxy: UnityCatalogApiProxy dataset_urn_builder: Callable[[TableReference], str] diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 4e3deedddbc43..2c567120b4850 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -1,10 +1,12 @@ import logging import os from datetime import datetime, timedelta, timezone -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union +from urllib.parse import urlparse import pydantic from pydantic import Field +from typing_extensions import Literal from datahub.configuration.common import AllowDenyPattern, ConfigModel from datahub.configuration.source_common import ( @@ -13,6 +15,9 @@ ) from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.ingestion.source.ge_data_profiler import DATABRICKS +from datahub.ingestion.source.ge_profiling_config import GEProfilingConfig +from datahub.ingestion.source.sql.sql_config import SQLCommonConfig, make_sqlalchemy_uri from datahub.ingestion.source.state.stale_entity_removal_handler import ( StatefulStaleMetadataRemovalConfig, ) @@ -31,24 +36,20 @@ class UnityCatalogProfilerConfig(ConfigModel): - # TODO: Reduce duplicate code with DataLakeProfilerConfig, GEProfilingConfig, SQLAlchemyConfig - enabled: bool = Field( - default=False, description="Whether profiling should be done." - ) - operation_config: OperationConfig = Field( - default_factory=OperationConfig, - description="Experimental feature. To specify operation configs.", + method: str = Field( + description=( + "Profiling method to use." + " Options supported are `ge` and `analyze`." + " `ge` uses Great Expectations and runs SELECT SQL queries on profiled tables." + " `analyze` calls ANALYZE TABLE on profiled tables. Only works for delta tables." + ), ) + # TODO: Support cluster compute as well, for ge profiling warehouse_id: Optional[str] = Field( default=None, description="SQL Warehouse id, for running profiling queries." ) - profile_table_level_only: bool = Field( - default=False, - description="Whether to perform profiling at table-level only or include column-level profiling as well.", - ) - pattern: AllowDenyPattern = Field( default=AllowDenyPattern.allow_all(), description=( @@ -58,6 +59,24 @@ class UnityCatalogProfilerConfig(ConfigModel): ), ) + +class UnityCatalogAnalyzeProfilerConfig(UnityCatalogProfilerConfig): + method: Literal["analyze"] = "analyze" + + # TODO: Reduce duplicate code with DataLakeProfilerConfig, GEProfilingConfig, SQLAlchemyConfig + enabled: bool = Field( + default=False, description="Whether profiling should be done." + ) + operation_config: OperationConfig = Field( + default_factory=OperationConfig, + description="Experimental feature. To specify operation configs.", + ) + + profile_table_level_only: bool = Field( + default=False, + description="Whether to perform profiling at table-level only or include column-level profiling as well.", + ) + call_analyze: bool = Field( default=True, description=( @@ -89,7 +108,17 @@ def include_columns(self): return not self.profile_table_level_only +class UnityCatalogGEProfilerConfig(UnityCatalogProfilerConfig, GEProfilingConfig): + method: Literal["ge"] = "ge" + + max_wait_secs: Optional[int] = Field( + default=None, + description="Maximum time to wait for a table to be profiled.", + ) + + class UnityCatalogSourceConfig( + SQLCommonConfig, StatefulIngestionConfigBase, BaseUsageConfig, DatasetSourceConfigMixin, @@ -217,15 +246,34 @@ class UnityCatalogSourceConfig( description="Generate usage statistics.", ) - profiling: UnityCatalogProfilerConfig = Field( - default=UnityCatalogProfilerConfig(), description="Data profiling configuration" + profiling: Union[UnityCatalogGEProfilerConfig, UnityCatalogAnalyzeProfilerConfig] = Field( # type: ignore + default=UnityCatalogGEProfilerConfig(), + description="Data profiling configuration", + discriminator="method", ) + scheme: str = DATABRICKS + + def get_sql_alchemy_url(self): + return make_sqlalchemy_uri( + scheme=self.scheme, + username="token", + password=self.token, + at=urlparse(self.workspace_url).netloc, + db=None, + uri_opts={ + "http_path": f"/sql/1.0/warehouses/{self.profiling.warehouse_id}" + }, + ) + def is_profiling_enabled(self) -> bool: return self.profiling.enabled and is_profiling_enabled( self.profiling.operation_config ) + def is_ge_profiling(self) -> bool: + return self.profiling.method == "ge" + stateful_ingestion: Optional[StatefulStaleMetadataRemovalConfig] = pydantic.Field( default=None, description="Unity Catalog Stateful Ingestion Config." ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py new file mode 100644 index 0000000000000..e24ca8330777e --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/ge_profiler.py @@ -0,0 +1,170 @@ +import logging +from concurrent.futures import ThreadPoolExecutor, as_completed +from dataclasses import dataclass, field +from typing import Iterable, List, Optional + +from sqlalchemy import create_engine +from sqlalchemy.engine import Connection + +from datahub.ingestion.api.workunit import MetadataWorkUnit +from datahub.ingestion.source.sql.sql_config import SQLCommonConfig +from datahub.ingestion.source.sql.sql_generic import BaseTable +from datahub.ingestion.source.sql.sql_generic_profiler import ( + GenericProfiler, + TableProfilerRequest, +) +from datahub.ingestion.source.unity.config import UnityCatalogGEProfilerConfig +from datahub.ingestion.source.unity.proxy_types import Table, TableReference +from datahub.ingestion.source.unity.report import UnityCatalogReport + +logger = logging.getLogger(__name__) + + +@dataclass(init=False) +class UnityCatalogSQLGenericTable(BaseTable): + ref: TableReference = field(init=False) + + def __init__(self, table: Table): + self.name = table.name + self.comment = table.comment + self.created = table.created_at + self.last_altered = table.updated_at + self.column_count = len(table.columns) + self.ref = table.ref + self.size_in_bytes = None + self.rows_count = None + self.ddl = None + + +class UnityCatalogGEProfiler(GenericProfiler): + sql_common_config: SQLCommonConfig + profiling_config: UnityCatalogGEProfilerConfig + report: UnityCatalogReport + + def __init__( + self, + sql_common_config: SQLCommonConfig, + profiling_config: UnityCatalogGEProfilerConfig, + report: UnityCatalogReport, + ) -> None: + super().__init__(sql_common_config, report, "databricks") + self.profiling_config = profiling_config + # TODO: Consider passing dataset urn builder directly + # So there is no repeated logic between this class and source.py + + def get_workunits(self, tables: List[Table]) -> Iterable[MetadataWorkUnit]: + # Extra default SQLAlchemy option for better connection pooling and threading. + # https://docs.sqlalchemy.org/en/14/core/pooling.html#sqlalchemy.pool.QueuePool.params.max_overflow + self.config.options.setdefault( + "max_overflow", self.profiling_config.max_workers + ) + + url = self.config.get_sql_alchemy_url() + engine = create_engine(url, **self.config.options) + conn = engine.connect() + + profile_requests = [] + with ThreadPoolExecutor( + max_workers=self.profiling_config.max_workers + ) as executor: + futures = [ + executor.submit( + self.get_unity_profile_request, + UnityCatalogSQLGenericTable(table), + conn, + ) + for table in tables + ] + + try: + for i, completed in enumerate( + as_completed(futures, timeout=self.profiling_config.max_wait_secs) + ): + profile_request = completed.result() + if profile_request is not None: + profile_requests.append(profile_request) + if i > 0 and i % 100 == 0: + logger.info(f"Finished table-level profiling for {i} tables") + except TimeoutError: + logger.warning("Timed out waiting to complete table-level profiling.") + + if len(profile_requests) == 0: + return + + yield from self.generate_profile_workunits( + profile_requests, + max_workers=self.config.profiling.max_workers, + platform=self.platform, + profiler_args=self.get_profile_args(), + ) + + def get_dataset_name(self, table_name: str, schema_name: str, db_name: str) -> str: + # Note: unused... ideally should share logic with TableReference + return f"{db_name}.{schema_name}.{table_name}" + + def get_unity_profile_request( + self, table: UnityCatalogSQLGenericTable, conn: Connection + ) -> Optional[TableProfilerRequest]: + # TODO: Reduce code duplication with get_profile_request + skip_profiling = False + profile_table_level_only = self.profiling_config.profile_table_level_only + + dataset_name = table.ref.qualified_table_name + try: + table.size_in_bytes = _get_dataset_size_in_bytes(table, conn) + except Exception as e: + logger.warning(f"Failed to get table size for {dataset_name}: {e}") + + if table.size_in_bytes is None: + self.report.num_profile_missing_size_in_bytes += 1 + if not self.is_dataset_eligible_for_profiling( + dataset_name, + size_in_bytes=table.size_in_bytes, + last_altered=table.last_altered, + rows_count=0, # Can't get row count ahead of time + ): + # Profile only table level if dataset is filtered from profiling + # due to size limits alone + if self.is_dataset_eligible_for_profiling( + dataset_name, + last_altered=table.last_altered, + size_in_bytes=None, + rows_count=None, + ): + profile_table_level_only = True + else: + skip_profiling = True + + if table.column_count == 0: + skip_profiling = True + + if skip_profiling: + if self.profiling_config.report_dropped_profiles: + self.report.report_dropped(dataset_name) + return None + + self.report.report_entity_profiled(dataset_name) + logger.debug(f"Preparing profiling request for {dataset_name}") + return TableProfilerRequest( + table=table, + pretty_name=dataset_name, + batch_kwargs=dict(schema=table.ref.schema, table=table.name), + profile_table_level_only=profile_table_level_only, + ) + + +def _get_dataset_size_in_bytes( + table: UnityCatalogSQLGenericTable, conn: Connection +) -> Optional[int]: + name = ".".join( + conn.dialect.identifier_preparer.quote(c) + for c in [table.ref.catalog, table.ref.schema, table.ref.table] + ) + row = conn.execute(f"DESCRIBE DETAIL {name}").fetchone() + if row is None: + return None + else: + try: + return int(row._asdict()["sizeInBytes"]) + except Exception: + return None diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py index 4153d9dd88eb8..7f19b6e2103ea 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py @@ -2,15 +2,13 @@ from typing import Tuple from datahub.ingestion.api.report import EntityFilterReport -from datahub.ingestion.source.state.stale_entity_removal_handler import ( - StaleEntityRemovalSourceReport, -) +from datahub.ingestion.source.sql.sql_generic_profiler import ProfilingSqlReport from datahub.ingestion.source_report.ingestion_stage import IngestionStageReport from datahub.utilities.lossy_collections import LossyDict, LossyList @dataclass -class UnityCatalogReport(IngestionStageReport, StaleEntityRemovalSourceReport): +class UnityCatalogReport(IngestionStageReport, ProfilingSqlReport): metastores: EntityFilterReport = EntityFilterReport.field(type="metastore") catalogs: EntityFilterReport = EntityFilterReport.field(type="catalog") schemas: EntityFilterReport = EntityFilterReport.field(type="schema") @@ -36,5 +34,6 @@ class UnityCatalogReport(IngestionStageReport, StaleEntityRemovalSourceReport): profile_table_errors: LossyDict[str, LossyList[Tuple[str, str]]] = field( default_factory=LossyDict ) + num_profile_missing_size_in_bytes: int = 0 num_profile_failed_unsupported_column_type: int = 0 num_profile_failed_int_casts: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 44b5bbbcb0ceb..03b4f61a512d0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -2,7 +2,6 @@ import re import time from concurrent.futures import ThreadPoolExecutor -from datetime import timedelta from typing import Dict, Iterable, List, Optional, Set, Union from urllib.parse import urljoin @@ -52,9 +51,14 @@ from datahub.ingestion.source.state.stateful_ingestion_base import ( StatefulIngestionSourceBase, ) -from datahub.ingestion.source.unity.config import UnityCatalogSourceConfig +from datahub.ingestion.source.unity.analyze_profiler import UnityCatalogAnalyzeProfiler +from datahub.ingestion.source.unity.config import ( + UnityCatalogAnalyzeProfilerConfig, + UnityCatalogGEProfilerConfig, + UnityCatalogSourceConfig, +) from datahub.ingestion.source.unity.connection_test import UnityCatalogConnectionTest -from datahub.ingestion.source.unity.profiler import UnityCatalogProfiler +from datahub.ingestion.source.unity.ge_profiler import UnityCatalogGEProfiler from datahub.ingestion.source.unity.proxy import UnityCatalogApiProxy from datahub.ingestion.source.unity.proxy_types import ( DATA_TYPE_REGISTRY, @@ -170,6 +174,9 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): self.view_refs: Set[TableReference] = set() self.notebooks: FileBackedDict[Notebook] = FileBackedDict() + # Global map of tables, for profiling + self.tables: FileBackedDict[Table] = FileBackedDict() + @staticmethod def test_connection(config_dict: dict) -> TestConnectionReport: return UnityCatalogConnectionTest(config_dict).get_connection_test() @@ -233,16 +240,24 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: if self.config.is_profiling_enabled(): self.report.report_ingestion_stage_start("Wait on warehouse") assert wait_on_warehouse - timeout = timedelta(seconds=self.config.profiling.max_wait_secs) - wait_on_warehouse.result(timeout) - profiling_extractor = UnityCatalogProfiler( - self.config.profiling, - self.report, - self.unity_catalog_api_proxy, - self.gen_dataset_urn, - ) + wait_on_warehouse.result() + self.report.report_ingestion_stage_start("Profiling") - yield from profiling_extractor.get_workunits(self.table_refs) + if isinstance(self.config.profiling, UnityCatalogAnalyzeProfilerConfig): + yield from UnityCatalogAnalyzeProfiler( + self.config.profiling, + self.report, + self.unity_catalog_api_proxy, + self.gen_dataset_urn, + ).get_workunits(self.table_refs) + elif isinstance(self.config.profiling, UnityCatalogGEProfilerConfig): + yield from UnityCatalogGEProfiler( + sql_common_config=self.config, + profiling_config=self.config.profiling, + report=self.report, + ).get_workunits(list(self.tables.values())) + else: + raise ValueError("Unknown profiling config method") def build_service_principal_map(self) -> None: try: @@ -358,6 +373,16 @@ def process_tables(self, schema: Schema) -> Iterable[MetadataWorkUnit]: self.report.tables.dropped(table.id, f"table ({table.table_type})") continue + if ( + self.config.is_profiling_enabled() + and self.config.is_ge_profiling() + and self.config.profiling.pattern.allowed( + table.ref.qualified_table_name + ) + and not table.is_view + ): + self.tables[table.ref.qualified_table_name] = table + if table.is_view: self.view_refs.add(table.ref) else: diff --git a/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json b/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json index 38b03ce238d1c..a86ed53406e40 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_mces_no_db_golden.json @@ -2254,30 +2254,17 @@ { "fieldPath": "id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "description", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "customer_id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 } ] } @@ -2625,8 +2612,7 @@ { "fieldPath": "col", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 } ] } @@ -2655,8 +2641,7 @@ { "fieldPath": "dummy", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 } ] } @@ -2738,4 +2723,4 @@ "lastRunId": "no-run-id-provided" } } -] \ No newline at end of file +] diff --git a/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json b/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json index 5cfba57247bd3..b5ebca424d9a2 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_mces_with_db_golden.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -80,7 +84,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -95,7 +100,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -110,7 +116,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -230,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -247,7 +255,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -264,7 +273,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -284,7 +294,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -299,7 +310,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -395,7 +407,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -412,7 +425,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -429,7 +443,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -449,7 +464,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -572,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } }, { @@ -593,37 +610,25 @@ { "fieldPath": "id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "description", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "customer_id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 } ] } }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-test" + "runId": "mysql-test", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json b/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json index 7597013bd873a..634e04984986d 100644 --- a/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json +++ b/metadata-ingestion/tests/integration/mysql/mysql_table_row_count_estimate_only.json @@ -16,7 +16,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -31,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -46,7 +48,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -63,7 +66,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -78,7 +82,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -93,7 +98,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -213,7 +219,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -230,7 +237,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -250,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -265,7 +274,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -361,7 +371,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -378,7 +389,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -398,7 +410,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -420,88 +433,44 @@ "fieldPath": "id", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "min": "1", - "max": "5", - "mean": "3.0", - "median": "3", - "stdev": "1.5811388300841898", - "sampleValues": [ - "1", - "2", - "3", - "4", - "5" - ] + "nullCount": 0 }, { "fieldPath": "company", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "sampleValues": [ - "Company A", - "Company B", - "Company C", - "Company D", - "Company E" - ] + "nullCount": 0 }, { "fieldPath": "last_name", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "sampleValues": [ - "Axen", - "Bedecs", - "Donnell", - "Gratacos Solsona", - "Lee" - ] + "nullCount": 0 }, { "fieldPath": "first_name", "uniqueCount": 5, "uniqueProportion": 1, - "nullCount": 0, - "sampleValues": [ - "Anna", - "Antonio", - "Christina", - "Martin", - "Thomas" - ] + "nullCount": 0 }, { "fieldPath": "email_address", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "priority", "uniqueCount": 3, "uniqueProportion": 0.75, - "nullCount": 0, - "min": "3.8", - "max": "4.9", - "mean": "4.175000011920929", - "median": "4.0", - "stdev": "0.49244294899530355", - "sampleValues": [ - "4.0", - "4.9", - "4.0", - "3.8" - ] + "nullCount": 0 } ] } }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -522,37 +491,25 @@ { "fieldPath": "id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "description", "uniqueCount": 0, - "nullCount": 0, - "sampleValues": [] + "nullCount": 0 }, { "fieldPath": "customer_id", "uniqueCount": 0, - "nullCount": 0, - "min": "None", - "max": "None", - "mean": "None", - "median": "None", - "stdev": "0.0", - "sampleValues": [] + "nullCount": 0 } ] } }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "mysql-2020_04_14-07_00_00" + "runId": "mysql-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/test_unity_catalog_config.py b/metadata-ingestion/tests/unit/test_unity_catalog_config.py index 4be6f60171844..4098ed4074de2 100644 --- a/metadata-ingestion/tests/unit/test_unity_catalog_config.py +++ b/metadata-ingestion/tests/unit/test_unity_catalog_config.py @@ -38,7 +38,11 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", - "profiling": {"enabled": True, "warehouse_id": "my_warehouse_id"}, + "profiling": { + "enabled": True, + "method": "ge", + "warehouse_id": "my_warehouse_id", + }, } ) assert config.profiling.enabled is True @@ -47,7 +51,7 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", - "profiling": {"enabled": False}, + "profiling": {"enabled": False, "method": "ge"}, } ) assert config.profiling.enabled is False From 16fe22aafa13f9cbff33e4016658cf06df5b9adf Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Wed, 6 Dec 2023 15:21:56 -0500 Subject: [PATCH 056/263] feat(ui/last-updated): Calculate last updated time as max(properties time, operation time) (#9242) --- .../types/dataset/mappers/DatasetMapper.java | 11 +- .../src/main/resources/entity.graphql | 9 +- .../dataset/mappers/DatasetMapperTest.java | 26 ++- datahub-web-react/package.json | 1 + datahub-web-react/src/Mocks.tsx | 7 + .../src/app/entity/dataset/DatasetEntity.tsx | 5 +- .../stats/DatasetStatsSummarySubHeader.tsx | 5 +- .../src/app/entity/dataset/shared/utils.ts | 13 ++ .../AutoCompleteTooltipContent.tsx | 5 +- .../src/graphql/fragments.graphql | 4 + datahub-web-react/src/graphql/search.graphql | 12 + .../datahub/ingestion/source/unity/source.py | 38 +--- .../unity/unity_catalog_mces_golden.json | 207 ------------------ .../tests/unit/serde/test_serde.py | 2 +- .../openapi-entity-servlet/build.gradle | 2 +- 15 files changed, 79 insertions(+), 268 deletions(-) create mode 100644 datahub-web-react/src/app/entity/dataset/shared/utils.ts diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 8296bc8244995..7fa1decdf7f55 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -17,6 +17,7 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; @@ -200,10 +201,12 @@ private void mapDatasetProperties( } TimeStamp lastModified = gmsProperties.getLastModified(); if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } + Urn actor = lastModified.getActor(); + properties.setLastModified( + new AuditStamp(lastModified.getTime(), actor == null ? null : actor.toString())); + properties.setLastModifiedActor(actor == null ? null : actor.toString()); + } else { + properties.setLastModified(new AuditStamp(0L, null)); } } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 4f3769d908815..feb344154d11e 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -1789,12 +1789,13 @@ type DatasetProperties { """ Last Modified timestamp millis associated with the Dataset """ - lastModified: Long + lastModified: AuditStamp! """ - Actor associated with the Dataset's lastModified timestamp + Actor associated with the Dataset's lastModified timestamp. + Deprecated - Use lastModified.actor instead. """ - lastModifiedActor: String + lastModifiedActor: String @deprecated } @@ -11234,4 +11235,4 @@ input UpdateOwnershipTypeInput { The description of the Custom Ownership Type """ description: String -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 1959ae6d43208..b28dd287e3fe4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -2,6 +2,7 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetProperties; import com.linkedin.entity.Aspect; @@ -58,7 +59,8 @@ public void testDatasetPropertiesMapperWithCreatedAndLastModified() { expectedDatasetProperties.setQualifiedName("Test QualifiedName"); expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setLastModified( + new AuditStamp(20L, TEST_LAST_MODIFIED_ACTOR_URN.toString())); expectedDatasetProperties.setCreated(10L); expected.setProperties(expectedDatasetProperties); @@ -68,7 +70,11 @@ public void testDatasetPropertiesMapperWithCreatedAndLastModified() { actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); Assert.assertEquals( - actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); Assert.assertEquals( @@ -102,7 +108,7 @@ public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { expectedDatasetProperties.setName("Test"); expectedDatasetProperties.setLastModifiedActor(null); expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); + expectedDatasetProperties.setLastModified(new AuditStamp(0L, null)); expectedDatasetProperties.setCreated(null); expected.setProperties(expectedDatasetProperties); @@ -110,7 +116,11 @@ public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); Assert.assertEquals( - actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); Assert.assertEquals( @@ -152,7 +162,7 @@ public void testDatasetPropertiesMapperWithoutTimestampActors() { expectedDatasetProperties.setName("Test"); expectedDatasetProperties.setLastModifiedActor(null); expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setLastModified(new AuditStamp(20L, null)); expectedDatasetProperties.setCreated(10L); expected.setProperties(expectedDatasetProperties); @@ -160,7 +170,11 @@ public void testDatasetPropertiesMapperWithoutTimestampActors() { Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); Assert.assertEquals( - actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); Assert.assertEquals( diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index c26338ea285fb..b949c9ab9d11f 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -92,6 +92,7 @@ "scripts": { "analyze": "source-map-explorer 'dist/static/js/*.js'", "start": "yarn run generate && BROWSER=none REACT_APP_MOCK=false craco start", + "start:dev": "yarn run generate && DISABLE_ESLINT_PLUGIN=true BROWSER=none REACT_APP_MOCK=false craco start", "start:mock": "yarn run generate && BROWSER=none REACT_APP_MOCK=true craco start", "start:e2e": "REACT_APP_MOCK=cy BROWSER=none PORT=3010 craco start", "ec2-dev": "yarn run generate && CI=true;export CI;BROWSER=none craco start", diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index ada9a06ab5b95..17173fd28e07f 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -437,6 +437,11 @@ export const dataset3 = { }, ], externalUrl: 'https://data.hub', + lastModified: { + __typename: 'AuditStamp', + time: 0, + actor: null, + }, }, parentContainers: { __typename: 'ParentContainersResult', @@ -702,6 +707,7 @@ export const dataset5 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:5' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; @@ -716,6 +722,7 @@ export const dataset6 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:6' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index 7d40b97a66b3b..f60eb95937452 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -33,6 +33,7 @@ import DataProductSection from '../shared/containers/profile/sidebar/DataProduct import { getDataProduct } from '../shared/utils'; import AccessManagement from '../shared/tabs/Dataset/AccessManagement/AccessManagement'; import { matchedFieldPathsRenderer } from '../../search/matches/matchedFieldPathsRenderer'; +import { getLastUpdatedMs } from './shared/utils'; const SUBTYPES = { VIEW: 'view', @@ -310,9 +311,7 @@ export class DatasetEntity implements Entity<Dataset> { rowCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].rowCount} columnCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].columnCount} sizeInBytes={(data as any).lastProfile?.length && (data as any).lastProfile[0].sizeInBytes} - lastUpdatedMs={ - (data as any).lastOperation?.length && (data as any).lastOperation[0].lastUpdatedTimestamp - } + lastUpdatedMs={getLastUpdatedMs(data.properties, (data as any)?.lastOperation)} health={data.health} degree={(result as any).degree} paths={(result as any).paths} diff --git a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx index 36b7d251950b4..c1e2c1aa298b6 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx @@ -3,6 +3,7 @@ import { DatasetStatsSummary as DatasetStatsSummaryObj } from '../../../../../.. import { useBaseEntity } from '../../../../shared/EntityContext'; import { GetDatasetQuery } from '../../../../../../graphql/dataset.generated'; import { DatasetStatsSummary } from '../../../shared/DatasetStatsSummary'; +import { getLastUpdatedMs } from '../../../shared/utils'; export const DatasetStatsSummarySubHeader = () => { const result = useBaseEntity<GetDatasetQuery>(); @@ -13,15 +14,13 @@ export const DatasetStatsSummarySubHeader = () => { const maybeLastProfile = dataset?.datasetProfiles && dataset.datasetProfiles.length ? dataset.datasetProfiles[0] : undefined; - const maybeLastOperation = dataset?.operations && dataset.operations.length ? dataset.operations[0] : undefined; - const rowCount = maybeLastProfile?.rowCount; const columnCount = maybeLastProfile?.columnCount; const sizeInBytes = maybeLastProfile?.sizeInBytes; const totalSqlQueries = dataset?.usageStats?.aggregations?.totalSqlQueries; const queryCountLast30Days = maybeStatsSummary?.queryCountLast30Days; const uniqueUserCountLast30Days = maybeStatsSummary?.uniqueUserCountLast30Days; - const lastUpdatedMs = maybeLastOperation?.lastUpdatedTimestamp; + const lastUpdatedMs = getLastUpdatedMs(dataset?.properties, dataset?.operations); return ( <DatasetStatsSummary diff --git a/datahub-web-react/src/app/entity/dataset/shared/utils.ts b/datahub-web-react/src/app/entity/dataset/shared/utils.ts new file mode 100644 index 0000000000000..fedd54385e7ab --- /dev/null +++ b/datahub-web-react/src/app/entity/dataset/shared/utils.ts @@ -0,0 +1,13 @@ +import { DatasetProperties, Operation } from '../../../../types.generated'; + +export function getLastUpdatedMs( + properties: Pick<DatasetProperties, 'lastModified'> | null | undefined, + operations: Pick<Operation, 'lastUpdatedTimestamp'>[] | null | undefined, +): number | undefined { + return ( + Math.max( + properties?.lastModified?.time || 0, + (operations?.length && operations[0].lastUpdatedTimestamp) || 0, + ) || undefined + ); +} diff --git a/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx b/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx index dfe32c7805a9b..4e40c29722c4d 100644 --- a/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx +++ b/datahub-web-react/src/app/search/autoComplete/AutoCompleteTooltipContent.tsx @@ -3,6 +3,7 @@ import React from 'react'; import styled from 'styled-components'; import { Dataset, Entity, EntityType } from '../../../types.generated'; import { DatasetStatsSummary } from '../../entity/dataset/shared/DatasetStatsSummary'; +import { getLastUpdatedMs } from '../../entity/dataset/shared/utils'; import { useEntityRegistry } from '../../useEntityRegistry'; import { ArrowWrapper } from './ParentContainers'; @@ -48,9 +49,7 @@ export default function AutoCompleteTooltipContent({ entity }: Props) { rowCount={(entity as any).lastProfile?.length && (entity as any).lastProfile[0].rowCount} columnCount={(entity as any).lastProfile?.length && (entity as any).lastProfile[0].columnCount} sizeInBytes={(entity as any).lastProfile?.length && (entity as any).lastProfile[0].sizeInBytes} - lastUpdatedMs={ - (entity as any).lastOperation?.length && (entity as any).lastOperation[0].lastUpdatedTimestamp - } + lastUpdatedMs={getLastUpdatedMs((entity as any)?.properties, (entity as any)?.lastOperation)} queryCountLast30Days={(entity as Dataset).statsSummary?.queryCountLast30Days} uniqueUserCountLast30Days={(entity as Dataset).statsSummary?.uniqueUserCountLast30Days} mode="tooltip-content" diff --git a/datahub-web-react/src/graphql/fragments.graphql b/datahub-web-react/src/graphql/fragments.graphql index d693779d1169b..b77ef9d1ad29c 100644 --- a/datahub-web-react/src/graphql/fragments.graphql +++ b/datahub-web-react/src/graphql/fragments.graphql @@ -240,6 +240,10 @@ fragment nonRecursiveDatasetFields on Dataset { value } externalUrl + lastModified { + time + actor + } } editableProperties { description diff --git a/datahub-web-react/src/graphql/search.graphql b/datahub-web-react/src/graphql/search.graphql index 6ca2a78f93d25..7034116f76129 100644 --- a/datahub-web-react/src/graphql/search.graphql +++ b/datahub-web-react/src/graphql/search.graphql @@ -13,6 +13,10 @@ fragment autoCompleteFields on Entity { properties { name qualifiedName + lastModified { + time + actor + } } parentContainers { ...parentContainersFields @@ -39,6 +43,10 @@ fragment autoCompleteFields on Entity { description qualifiedName externalUrl + lastModified { + time + actor + } } } } @@ -336,6 +344,10 @@ fragment nonSiblingsDatasetSearchFields on Dataset { value } externalUrl + lastModified { + time + actor + } } ownership { ...ownershipFields diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 03b4f61a512d0..d1940c1d57607 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -1,6 +1,5 @@ import logging import re -import time from concurrent.futures import ThreadPoolExecutor from typing import Dict, Iterable, List, Optional, Set, Union from urllib.parse import urljoin @@ -87,8 +86,6 @@ DomainsClass, MySqlDDLClass, NullTypeClass, - OperationClass, - OperationTypeClass, OwnerClass, OwnershipClass, OwnershipTypeClass, @@ -402,7 +399,6 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn sub_type = self._create_table_sub_type_aspect(table) schema_metadata = self._create_schema_metadata_aspect(table) - operation = self._create_table_operation_aspect(table) domain = self._get_domain_aspect(dataset_name=table.ref.qualified_table_name) ownership = self._create_table_ownership_aspect(table) data_platform_instance = self._create_data_platform_instance_aspect() @@ -424,7 +420,6 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn view_props, sub_type, schema_metadata, - operation, domain, ownership, data_platform_instance, @@ -696,10 +691,10 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: int(table.created_at.timestamp() * 1000), make_user_urn(table.created_by) ) last_modified = created - if table.updated_at and table.updated_by is not None: + if table.updated_at: last_modified = TimeStampClass( int(table.updated_at.timestamp() * 1000), - make_user_urn(table.updated_by), + table.updated_by and make_user_urn(table.updated_by), ) return DatasetPropertiesClass( @@ -712,35 +707,6 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: externalUrl=f"{self.external_url_base}/{table.ref.external_path}", ) - def _create_table_operation_aspect(self, table: Table) -> OperationClass: - """Produce an operation aspect for a table. - - If a last updated time is present, we produce an update operation. - Otherwise, we produce a create operation. We do this in addition to - setting the last updated time in the dataset properties aspect, as - the UI is currently missing the ability to display the last updated - from the properties aspect. - """ - - reported_time = int(time.time() * 1000) - - operation = OperationClass( - timestampMillis=reported_time, - lastUpdatedTimestamp=int(table.created_at.timestamp() * 1000), - actor=make_user_urn(table.created_by), - operationType=OperationTypeClass.CREATE, - ) - - if table.updated_at and table.updated_by is not None: - operation = OperationClass( - timestampMillis=reported_time, - lastUpdatedTimestamp=int(table.updated_at.timestamp() * 1000), - actor=make_user_urn(table.updated_by), - operationType=OperationTypeClass.UPDATE, - ) - - return operation - def _create_table_ownership_aspect(self, table: Table) -> Optional[OwnershipClass]: owner_urn = self.get_owner_urn(table.owner) if owner_urn is not None: diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 2e92215d70b99..d25c86a3a1f9a 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -524,29 +524,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", @@ -877,29 +854,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", @@ -1230,29 +1184,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", @@ -1719,29 +1650,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", @@ -2072,29 +1980,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", @@ -2425,29 +2310,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", @@ -2914,29 +2776,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", @@ -3267,29 +3106,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", @@ -3620,29 +3436,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "operation", - "aspect": { - "json": { - "timestampMillis": 1638860400000, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "actor": "urn:li:corpuser:abc@acryl.io", - "operationType": "UPDATE", - "lastUpdatedTimestamp": 1666186049633 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", diff --git a/metadata-ingestion/tests/unit/serde/test_serde.py b/metadata-ingestion/tests/unit/serde/test_serde.py index d2d6a0bdda5b9..53ffdf46a6d1e 100644 --- a/metadata-ingestion/tests/unit/serde/test_serde.py +++ b/metadata-ingestion/tests/unit/serde/test_serde.py @@ -100,7 +100,7 @@ def test_serde_to_avro( fo.seek(0) in_records = list(fastavro.reader(fo, return_record_name=True)) in_mces = [ - MetadataChangeEventClass.from_obj(record, tuples=True) + MetadataChangeEventClass.from_obj(record, tuples=True) # type: ignore for record in in_records ] diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index dbec469085b07..00353392dedef 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -77,4 +77,4 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'delegatePattern' : "false" ] } -tasks.getByName("compileJava").dependsOn(openApiGenerate) \ No newline at end of file +tasks.getByName("compileJava").dependsOn(openApiGenerate) From dc6f16984673948f45d466db576eb74b2f45e6f8 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 7 Dec 2023 08:59:25 +0900 Subject: [PATCH 057/263] docs: add youtube link to townhall button on docs (#9381) --- .../_components/TownhallButton/index.jsx | 24 +++++++----- .../TownhallButton/townhallbutton.module.scss | 37 +++++++++++++------ 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/docs-website/src/pages/_components/TownhallButton/index.jsx b/docs-website/src/pages/_components/TownhallButton/index.jsx index 11dc2dc5c8476..22643846f2cf2 100644 --- a/docs-website/src/pages/_components/TownhallButton/index.jsx +++ b/docs-website/src/pages/_components/TownhallButton/index.jsx @@ -11,20 +11,26 @@ const TownhallButton = () => { const daysUntilLastThursday = lastThursday - currentDay; - let showButton = false; - let currentMonth = ''; + let buttonText = ''; + let buttonLink = ''; + let townhallSeasonClass = ''; if (daysUntilLastThursday > 0 && daysUntilLastThursday <= 14) { - showButton = true; - currentMonth = new Intl.DateTimeFormat('en-US', { month: 'long' }).format(today); + const currentMonth = new Intl.DateTimeFormat('en-US', { month: 'long' }).format(today); + buttonText = `Join ${currentMonth} Townhall! ✨`; + buttonLink = 'http://rsvp.datahubproject.io'; + townhallSeasonClass = 'townhall-season' + } else { + buttonText = 'Watch Our Latest Townhall! 👀'; + buttonLink = 'https://www.youtube.com/playlist?list=PLdCtLs64vZvHTXGqybmOfyxXbGDn2Reb9'; + townhallSeasonClass = 'non-townhall-season' } + return ( - showButton && ( - <Link to="http://rsvp.datahubproject.io" className={clsx('button button--primary button--md', styles.feature)}> - Join {currentMonth} Townhall! ✨ - </Link> - ) + <Link to={buttonLink} className={clsx('button button--primary button--md', styles[townhallSeasonClass])}> + {buttonText} + </Link> ); }; diff --git a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss index 951bc99015302..3d30c65f89539 100644 --- a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss +++ b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss @@ -1,14 +1,29 @@ -.feature { - color: white; - border: 1px solid transparent; - background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); - background-origin: border-box; - opacity: 90%; - - &:hover { - opacity: 100%; - background: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + .townhall-season { + color: white; + border: 1px solid transparent; background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); background-origin: border-box; + opacity: 90%; + + &:hover { + opacity: 100%; + background: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-image: linear-gradient(to right, #1890ff 0%, #9c27b0 100%); + background-origin: border-box; + } } -} + + .non-townhall-season { + color: white; + border: 1px solid transparent; + background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); + background-origin: border-box; + opacity: 90%; + + &:hover { + opacity: 100%; + background: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); + background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); + background-origin: border-box; + } + } \ No newline at end of file From 1ce752ed6e825c12ad373cd4063083b95c252a56 Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Thu, 7 Dec 2023 16:14:09 +0900 Subject: [PATCH 058/263] fix: set new sidebar section (#9393) --- docs-website/sidebars.js | 773 +++++++++++++++------------- docs-website/src/styles/global.scss | 25 +- 2 files changed, 424 insertions(+), 374 deletions(-) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index c70a609a4cc4b..67943ba8d7016 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -8,6 +8,11 @@ module.exports = { // operators overviewSidebar: [ + { + type: "html", + value: "<div>Getting Started</div>", + defaultStyle: true, + }, { label: "What Is DataHub?", type: "category", @@ -31,82 +36,187 @@ module.exports = { }, { type: "category", - label: "Integrations", - link: { type: "doc", id: "metadata-ingestion/README" }, + label: "Features", + link: { + type: "generated-index", + title: "Feature Guides", + description: "Learn about the features of DataHub.", + }, items: [ - // The purpose of this section is to provide a deeper understanding of how ingestion works. - // Readers should be able to find details for ingesting from all systems, apply transformers, understand sinks, - // and understand key concepts of the Ingestion Framework (Sources, Sinks, Transformers, and Recipes) + "docs/ui-ingestion", + "docs/how/search", + "docs/schema-history", + // "docs/how/ui-tabs-guide", + "docs/domains", + "docs/dataproducts", + "docs/glossary/business-glossary", + "docs/tags", + "docs/ownership/ownership-types", + "docs/authorization/access-policies-guide", + "docs/features/dataset-usage-and-query-history", + "docs/posts", + "docs/sync-status", + "docs/generated/lineage/lineage-feature-guide", + { + type: "doc", + id: "docs/tests/metadata-tests", + className: "saasOnly", + }, + "docs/act-on-metadata/impact-analysis", { - "Quickstart Guides": [ - "metadata-ingestion/cli-ingestion", + label: "Observability", + type: "category", + items: [ { - BigQuery: [ - "docs/quick-ingestion-guides/bigquery/overview", - "docs/quick-ingestion-guides/bigquery/setup", - "docs/quick-ingestion-guides/bigquery/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/freshness-assertions", + className: "saasOnly", }, { - Redshift: [ - "docs/quick-ingestion-guides/redshift/overview", - "docs/quick-ingestion-guides/redshift/setup", - "docs/quick-ingestion-guides/redshift/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/volume-assertions", + className: "saasOnly", }, { - Snowflake: [ - "docs/quick-ingestion-guides/snowflake/overview", - "docs/quick-ingestion-guides/snowflake/setup", - "docs/quick-ingestion-guides/snowflake/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/custom-sql-assertions", + className: "saasOnly", }, { - Tableau: [ - "docs/quick-ingestion-guides/tableau/overview", - "docs/quick-ingestion-guides/tableau/setup", - "docs/quick-ingestion-guides/tableau/configuration", - ], + type: "doc", + id: "docs/managed-datahub/observe/column-assertions", + className: "saasOnly", }, + ], + }, + { + Guides: ["docs/features/feature-guides/ui-lineage"], + }, + ], + }, + { + label: "Managed DataHub", + type: "category", + collapsed: true, + link: { + type: "doc", + id: "docs/managed-datahub/managed-datahub-overview", + }, + items: [ + "docs/managed-datahub/welcome-acryl", + { + type: "doc", + id: "docs/managed-datahub/saas-slack-setup", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/approval-workflows", + className: "saasOnly", + }, + { + "Metadata Ingestion With Acryl": [ + "docs/managed-datahub/metadata-ingestion-with-acryl/ingestion", + ], + }, + { + "DataHub API": [ { - PowerBI: [ - "docs/quick-ingestion-guides/powerbi/overview", - "docs/quick-ingestion-guides/powerbi/setup", - "docs/quick-ingestion-guides/powerbi/configuration", - ], + type: "doc", + id: "docs/managed-datahub/datahub-api/entity-events-api", + className: "saasOnly", }, { - Looker: [ - "docs/quick-ingestion-guides/looker/overview", - "docs/quick-ingestion-guides/looker/setup", - "docs/quick-ingestion-guides/looker/configuration", + "GraphQL API": [ + "docs/managed-datahub/datahub-api/graphql-api/getting-started", + { + type: "doc", + id: "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", + className: "saasOnly", + }, ], }, ], }, - "metadata-ingestion/recipe_overview", { - type: "category", - label: "Sources", - link: { type: "doc", id: "metadata-ingestion/source_overview" }, - items: [ - // collapse these; add push-based at top + Integrations: [ { type: "doc", - id: "docs/lineage/airflow", - label: "Airflow", + id: "docs/managed-datahub/integrations/aws-privatelink", + className: "saasOnly", }, - //"docker/airflow/local_airflow", - "metadata-integration/java/spark-lineage/README", - "metadata-ingestion/integration_docs/great-expectations", - "metadata-integration/java/datahub-protobuf/README", - //"metadata-ingestion/source-docs-template", { - type: "autogenerated", - dirName: "docs/generated/ingestion/sources", // '.' means the current docs folder + type: "doc", + id: "docs/managed-datahub/integrations/oidc-sso-integration", + className: "saasOnly", + }, + ], + }, + { + "Operator Guide": [ + { + type: "doc", + id: "docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge", + className: "saasOnly", }, ], }, + { + type: "doc", + id: "docs/managed-datahub/chrome-extension", + }, + { + type: "doc", + id: "docs/managed-datahub/subscription-and-notification", + className: "saasOnly", + }, + { + "Managed DataHub Release History": [ + "docs/managed-datahub/release-notes/v_0_2_13", + "docs/managed-datahub/release-notes/v_0_2_12", + "docs/managed-datahub/release-notes/v_0_2_11", + "docs/managed-datahub/release-notes/v_0_2_10", + "docs/managed-datahub/release-notes/v_0_2_9", + "docs/managed-datahub/release-notes/v_0_2_8", + "docs/managed-datahub/release-notes/v_0_2_7", + "docs/managed-datahub/release-notes/v_0_2_6", + "docs/managed-datahub/release-notes/v_0_2_5", + "docs/managed-datahub/release-notes/v_0_2_4", + "docs/managed-datahub/release-notes/v_0_2_3", + "docs/managed-datahub/release-notes/v_0_2_2", + "docs/managed-datahub/release-notes/v_0_2_1", + "docs/managed-datahub/release-notes/v_0_2_0", + "docs/managed-datahub/release-notes/v_0_1_73", + "docs/managed-datahub/release-notes/v_0_1_72", + "docs/managed-datahub/release-notes/v_0_1_70", + "docs/managed-datahub/release-notes/v_0_1_69", + ], + }, + ], + }, + { + type: "html", + value: "<div>Integrations</div>", + defaultStyle: true, + }, + { + type: "category", + link: { + type: "doc", + id: "metadata-ingestion/README", + }, + label: "Overview", + items: [ + { + type: "doc", + label: "Recipe", + id: "metadata-ingestion/recipe_overview", + }, { type: "category", label: "Sinks", @@ -127,30 +237,104 @@ module.exports = { }, items: ["metadata-ingestion/docs/transformer/dataset_transformer"], }, + ], + }, + { + "Quickstart Guides": [ + "metadata-ingestion/cli-ingestion", { - "Advanced Guides": [ - { - "Scheduling Ingestion": [ - "metadata-ingestion/schedule_docs/intro", - "metadata-ingestion/schedule_docs/cron", - "metadata-ingestion/schedule_docs/airflow", - "metadata-ingestion/schedule_docs/kubernetes", - ], - }, - - "docs/platform-instances", - "metadata-ingestion/docs/dev_guides/stateful", - "metadata-ingestion/docs/dev_guides/classification", - "metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source", - "metadata-ingestion/docs/dev_guides/sql_profiles", - "metadata-ingestion/docs/dev_guides/profiling_ingestions", + BigQuery: [ + "docs/quick-ingestion-guides/bigquery/overview", + "docs/quick-ingestion-guides/bigquery/setup", + "docs/quick-ingestion-guides/bigquery/configuration", + ], + }, + { + Redshift: [ + "docs/quick-ingestion-guides/redshift/overview", + "docs/quick-ingestion-guides/redshift/setup", + "docs/quick-ingestion-guides/redshift/configuration", + ], + }, + { + Snowflake: [ + "docs/quick-ingestion-guides/snowflake/overview", + "docs/quick-ingestion-guides/snowflake/setup", + "docs/quick-ingestion-guides/snowflake/configuration", + ], + }, + { + Tableau: [ + "docs/quick-ingestion-guides/tableau/overview", + "docs/quick-ingestion-guides/tableau/setup", + "docs/quick-ingestion-guides/tableau/configuration", + ], + }, + { + PowerBI: [ + "docs/quick-ingestion-guides/powerbi/overview", + "docs/quick-ingestion-guides/powerbi/setup", + "docs/quick-ingestion-guides/powerbi/configuration", + ], + }, + { + Looker: [ + "docs/quick-ingestion-guides/looker/overview", + "docs/quick-ingestion-guides/looker/setup", + "docs/quick-ingestion-guides/looker/configuration", ], }, ], }, { type: "category", - label: "Deployment", + label: "Sources", + link: { type: "doc", id: "metadata-ingestion/source_overview" }, + items: [ + // collapse these; add push-based at top + { + type: "doc", + id: "docs/lineage/airflow", + label: "Airflow", + }, + //"docker/airflow/local_airflow", + "metadata-integration/java/spark-lineage/README", + "metadata-ingestion/integration_docs/great-expectations", + "metadata-integration/java/datahub-protobuf/README", + //"metadata-ingestion/source-docs-template", + { + type: "autogenerated", + dirName: "docs/generated/ingestion/sources", // '.' means the current docs folder + }, + ], + }, + { + "Advanced Guides": [ + { + "Scheduling Ingestion": [ + "metadata-ingestion/schedule_docs/intro", + "metadata-ingestion/schedule_docs/cron", + "metadata-ingestion/schedule_docs/airflow", + "metadata-ingestion/schedule_docs/kubernetes", + ], + }, + + "docs/platform-instances", + "metadata-ingestion/docs/dev_guides/stateful", + "metadata-ingestion/docs/dev_guides/classification", + "metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source", + "metadata-ingestion/docs/dev_guides/sql_profiles", + "metadata-ingestion/docs/dev_guides/profiling_ingestions", + ], + }, + { + type: "html", + value: "<div>Deployment</div>", + defaultStyle: true, + }, + { + type: "category", + label: "Deployment Guides", link: { type: "generated-index", title: "Deployment Guides", @@ -158,109 +342,111 @@ module.exports = { "Learn how to deploy DataHub to your environment, set up authentication, manage upgrades, and more.", }, items: [ - // The purpose of this section is to provide the minimum steps required to deploy DataHub to the vendor of your choosing "docs/deploy/aws", "docs/deploy/gcp", "docs/deploy/azure", "docker/README", "docs/deploy/kubernetes", + ], + }, + { + type: "category", + label: "Advanced Guides", + items: [ "docs/deploy/confluent-cloud", "docs/deploy/environment-vars", "docs/how/extract-container-logs", ], }, { - type: "category", - label: "Admin", - items: [ - { - Authentication: [ - "docs/authentication/README", - "docs/authentication/concepts", - "docs/authentication/changing-default-credentials", - "docs/authentication/guides/add-users", - { - "Frontend Authentication": [ - "docs/authentication/guides/jaas", - "docs/authentication/guides/sso/configure-oidc-react", - "docs/authentication/guides/sso/configure-oidc-behind-proxy", - ], - }, - "docs/authentication/introducing-metadata-service-authentication", - "docs/authentication/personal-access-tokens", - ], - }, - { - Authorization: [ - "docs/authorization/README", - "docs/authorization/roles", - "docs/authorization/policies", - "docs/authorization/groups", - ], - }, - { - "Advanced Guides": [ - "docs/how/delete-metadata", - "docs/how/configuring-authorization-with-apache-ranger", - "docs/how/backup-datahub", - "docs/how/restore-indices", - "docs/advanced/db-retention", - "docs/advanced/monitoring", - "docs/deploy/telemetry", - "docs/how/kafka-config", - "docs/advanced/no-code-upgrade", - "docs/how/jattach-guide", + type: "html", + value: "<div>Admin</div>", + defaultStyle: true, + }, + { + Authentication: [ + "docs/authentication/README", + "docs/authentication/concepts", + "docs/authentication/changing-default-credentials", + "docs/authentication/guides/add-users", + { + "Frontend Authentication": [ + "docs/authentication/guides/jaas", + "docs/authentication/guides/sso/configure-oidc-react", + "docs/authentication/guides/sso/configure-oidc-behind-proxy", ], }, + "docs/authentication/introducing-metadata-service-authentication", + "docs/authentication/personal-access-tokens", ], }, { - Developers: [ - // The purpose of this section is to provide developers & technical users with - // concrete tutorials for how to work with the DataHub CLI & APIs - { - Architecture: [ - "docs/architecture/architecture", - "docs/components", - "docs/architecture/metadata-ingestion", - "docs/architecture/metadata-serving", - "docs/architecture/docker-containers", - ], - }, + Authorization: [ + "docs/authorization/README", + "docs/authorization/roles", + "docs/authorization/policies", + "docs/authorization/groups", + ], + }, + { + "Advanced Guides": [ + "docs/how/delete-metadata", + "docs/how/configuring-authorization-with-apache-ranger", + "docs/how/backup-datahub", + "docs/how/restore-indices", + "docs/advanced/db-retention", + "docs/advanced/monitoring", + "docs/deploy/telemetry", + "docs/how/kafka-config", + "docs/advanced/no-code-upgrade", + "docs/how/jattach-guide", + ], + }, + { + type: "html", + value: "<div>Developers</div>", + defaultStyle: true, + }, + { + Architecture: [ + "docs/architecture/architecture", + "docs/components", + "docs/architecture/metadata-ingestion", + "docs/architecture/metadata-serving", + "docs/architecture/docker-containers", + ], + }, + { + "Metadata Model": [ + "docs/modeling/metadata-model", + "docs/modeling/extending-the-metadata-model", + "docs/what/mxe", { - "Metadata Model": [ - "docs/modeling/metadata-model", - "docs/modeling/extending-the-metadata-model", - "docs/what/mxe", + Entities: [ { - Entities: [ - { - type: "autogenerated", - dirName: "docs/generated/metamodel/entities", // '.' means the current docs folder - }, - ], + type: "autogenerated", + dirName: "docs/generated/metamodel/entities", // '.' means the current docs folder }, ], }, - { - "Developing on DataHub": [ - "docs/developers", - "docs/docker/development", - "metadata-ingestion/developing", - "docs/api/graphql/graphql-endpoint-development", - { - Modules: [ - "datahub-web-react/README", - "datahub-frontend/README", - "datahub-graphql-core/README", - "metadata-service/README", - "metadata-jobs/mae-consumer-job/README", - "metadata-jobs/mce-consumer-job/README", - ], - }, + ], + }, + { + "Developing on DataHub": [ + "docs/developers", + "docs/docker/development", + "metadata-ingestion/developing", + "docs/api/graphql/graphql-endpoint-development", + { + Modules: [ + "datahub-web-react/README", + "datahub-frontend/README", + "datahub-graphql-core/README", + "metadata-service/README", + "metadata-jobs/mae-consumer-job/README", + "metadata-jobs/mce-consumer-job/README", ], }, - "docs/plugins", { Troubleshooting: [ "docs/troubleshooting/quickstart", @@ -268,24 +454,30 @@ module.exports = { "docs/troubleshooting/general", ], }, - { - Advanced: [ - "metadata-ingestion/docs/dev_guides/reporting_telemetry", - "docs/advanced/mcp-mcl", - "docker/datahub-upgrade/README", - "docs/advanced/no-code-modeling", - "datahub-web-react/src/app/analytics/README", - "docs/how/migrating-graph-service-implementation", - "docs/advanced/field-path-spec-v2", - "metadata-ingestion/adding-source", - "docs/how/add-custom-ingestion-source", - "docs/how/add-custom-data-platform", - "docs/advanced/browse-paths-upgrade", - "docs/browseV2/browse-paths-v2", - ], - }, ], }, + { + "Advanced Guides": [ + "metadata-ingestion/docs/dev_guides/reporting_telemetry", + "docs/advanced/mcp-mcl", + "docker/datahub-upgrade/README", + "docs/advanced/no-code-modeling", + "datahub-web-react/src/app/analytics/README", + "docs/how/migrating-graph-service-implementation", + "docs/advanced/field-path-spec-v2", + "metadata-ingestion/adding-source", + "docs/how/add-custom-ingestion-source", + "docs/how/add-custom-data-platform", + "docs/advanced/browse-paths-upgrade", + "docs/browseV2/browse-paths-v2", + "docs/plugins", + ], + }, + { + type: "html", + value: "<div>API & SDKs</div>", + defaultStyle: true, + }, { type: "category", label: "API", @@ -408,6 +600,13 @@ module.exports = { }, ], }, + ], + }, + { + type: "category", + label: "SDK", + link: { type: "doc", id: "docs/api/datahub-apis" }, + items: [ { "Python SDK": [ "metadata-ingestion/as-a-library", @@ -421,237 +620,81 @@ module.exports = { }, ], }, - "metadata-integration/java/as-a-library", - { - "API and SDK Guides": [ - "docs/advanced/patch", - "docs/api/tutorials/datasets", - "docs/api/tutorials/lineage", - "docs/api/tutorials/tags", - "docs/api/tutorials/terms", - "docs/api/tutorials/owners", - "docs/api/tutorials/domains", - "docs/api/tutorials/deprecation", - "docs/api/tutorials/descriptions", - "docs/api/tutorials/custom-properties", - "docs/api/tutorials/ml", - ], - }, - { - type: "category", - label: "DataHub CLI", - link: { type: "doc", id: "docs/cli" }, - items: ["docs/datahub_lite"], - }, { - type: "category", - label: "Datahub Actions", - link: { type: "doc", id: "docs/act-on-metadata" }, - items: [ - "docs/actions/README", - "docs/actions/quickstart", - "docs/actions/concepts", - { - Sources: [ - { - type: "autogenerated", - dirName: "docs/actions/sources", - }, - ], - }, - { - Events: [ - { - type: "autogenerated", - dirName: "docs/actions/events", - }, - ], - }, - { - Actions: [ - { - type: "autogenerated", - dirName: "docs/actions/actions", - }, - ], - }, - { - Guides: [ - { - type: "autogenerated", - dirName: "docs/actions/guides", - }, - ], - }, - ], + type: "doc", + label: "Java SDK", + id: "metadata-integration/java/as-a-library", }, ], }, { type: "category", - label: "Features", - link: { - type: "generated-index", - title: "Feature Guides", - description: "Learn about the features of DataHub.", - }, - items: [ - "docs/ui-ingestion", - "docs/how/search", - "docs/schema-history", - // "docs/how/ui-tabs-guide", - "docs/domains", - "docs/dataproducts", - "docs/glossary/business-glossary", - "docs/tags", - "docs/ownership/ownership-types", - "docs/authorization/access-policies-guide", - "docs/features/dataset-usage-and-query-history", - "docs/posts", - "docs/sync-status", - "docs/generated/lineage/lineage-feature-guide", - { - type: "doc", - id: "docs/tests/metadata-tests", - className: "saasOnly", - }, - "docs/act-on-metadata/impact-analysis", - { - label: "Observability", - type: "category", - items: [ - { - type: "doc", - id: "docs/managed-datahub/observe/freshness-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/volume-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/custom-sql-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/column-assertions", - className: "saasOnly", - }, - ], - }, - { - Guides: ["docs/features/feature-guides/ui-lineage"], - }, - ], + label: "DataHub CLI", + link: { type: "doc", id: "docs/cli" }, + items: ["docs/datahub_lite"], }, { - label: "Managed DataHub", type: "category", - collapsed: true, - link: { - type: "doc", - id: "docs/managed-datahub/managed-datahub-overview", - }, + label: "Datahub Actions", + link: { type: "doc", id: "docs/act-on-metadata" }, items: [ - "docs/managed-datahub/welcome-acryl", - { - type: "doc", - id: "docs/managed-datahub/saas-slack-setup", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/approval-workflows", - className: "saasOnly", - }, + "docs/actions/README", + "docs/actions/quickstart", + "docs/actions/concepts", { - "Metadata Ingestion With Acryl": [ - "docs/managed-datahub/metadata-ingestion-with-acryl/ingestion", - ], - }, - { - "DataHub API": [ - { - type: "doc", - id: "docs/managed-datahub/datahub-api/entity-events-api", - className: "saasOnly", - }, + Sources: [ { - "GraphQL API": [ - "docs/managed-datahub/datahub-api/graphql-api/getting-started", - { - type: "doc", - id: "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", - className: "saasOnly", - }, - ], + type: "autogenerated", + dirName: "docs/actions/sources", }, ], }, { - Integrations: [ + Events: [ { - type: "doc", - id: "docs/managed-datahub/integrations/aws-privatelink", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/integrations/oidc-sso-integration", - className: "saasOnly", + type: "autogenerated", + dirName: "docs/actions/events", }, ], }, { - "Operator Guide": [ - { - type: "doc", - id: "docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws", - className: "saasOnly", - }, + Actions: [ { - type: "doc", - id: "docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge", - className: "saasOnly", + type: "autogenerated", + dirName: "docs/actions/actions", }, ], }, { - type: "doc", - id: "docs/managed-datahub/chrome-extension", - }, - { - type: "doc", - id: "docs/managed-datahub/subscription-and-notification", - className: "saasOnly", - }, - { - "Managed DataHub Release History": [ - "docs/managed-datahub/release-notes/v_0_2_13", - "docs/managed-datahub/release-notes/v_0_2_12", - "docs/managed-datahub/release-notes/v_0_2_11", - "docs/managed-datahub/release-notes/v_0_2_10", - "docs/managed-datahub/release-notes/v_0_2_9", - "docs/managed-datahub/release-notes/v_0_2_8", - "docs/managed-datahub/release-notes/v_0_2_7", - "docs/managed-datahub/release-notes/v_0_2_6", - "docs/managed-datahub/release-notes/v_0_2_5", - "docs/managed-datahub/release-notes/v_0_2_4", - "docs/managed-datahub/release-notes/v_0_2_3", - "docs/managed-datahub/release-notes/v_0_2_2", - "docs/managed-datahub/release-notes/v_0_2_1", - "docs/managed-datahub/release-notes/v_0_2_0", - "docs/managed-datahub/release-notes/v_0_1_73", - "docs/managed-datahub/release-notes/v_0_1_72", - "docs/managed-datahub/release-notes/v_0_1_70", - "docs/managed-datahub/release-notes/v_0_1_69", + Guides: [ + { + type: "autogenerated", + dirName: "docs/actions/guides", + }, ], }, ], }, + { + "API & SDK Guides": [ + "docs/advanced/patch", + "docs/api/tutorials/datasets", + "docs/api/tutorials/lineage", + "docs/api/tutorials/tags", + "docs/api/tutorials/terms", + "docs/api/tutorials/owners", + "docs/api/tutorials/domains", + "docs/api/tutorials/deprecation", + "docs/api/tutorials/descriptions", + "docs/api/tutorials/custom-properties", + "docs/api/tutorials/ml", + ], + }, + { + type: "html", + value: "<div>Community</div>", + defaultStyle: true, + }, { label: "Community", type: "category", diff --git a/docs-website/src/styles/global.scss b/docs-website/src/styles/global.scss index 16e3893ed08b7..1682b322d7cd5 100644 --- a/docs-website/src/styles/global.scss +++ b/docs-website/src/styles/global.scss @@ -47,7 +47,7 @@ --ifm-card-border-radius: calc(var(--ifm-global-radius) * 1.5); /* Menu */ - --ifm-menu-link-padding-vertical: 0.6rem; + --ifm-menu-link-padding-vertical: 0.3rem; --ifm-menu-link-padding-horizontal: 1rem; --ifm-menu-link-sublist-icon: url('data:image/svg+xml;utf8,<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M6.47 9.47L8 7.94333L9.53 9.47L10 9L8 7L6 9L6.47 9.47Z" fill="black" fill-opacity="0.5"/></svg>'); --ifm-menu-color-background-hover: var(--ifm-color-primary-opaque); @@ -286,22 +286,29 @@ div[class^="announcementBar"] { } } - .theme-doc-sidebar-item-category-level-1 .menu__link { + .menuHtmlItem_node_modules-\@docusaurus-theme-classic-lib-theme-DocSidebarItem-Html-styles-module { + font-weight: 600; + } + + .menu__link { font-weight: 400; + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 0.2rem) calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 1rem); } - .theme-doc-sidebar-item-category-level-1 .menu__link--active { - font-weight: 600; + .menu__link--active { + font-weight: 400; + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 0.2rem) calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 1rem); } .theme-doc-sidebar-item-category-level-1 > div > a:first-child { - color: var(--ifm-navbar-link-color); - font-weight: 600; - padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) var(--ifm-menu-link-padding-horizontal); + font-weight: 400; + color: var(--ifm-menu-color); + padding: calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 0.2rem) calc(var(--ifm-menu-link-padding-vertical) + 0.2rem) calc(var(--ifm-menu-link-padding-horizontal) + 1rem); } + .theme-doc-sidebar-item-category-level-1 > div > a.menu__link--active { - color: var(--ifm-navbar-link-color); - font-weight: 600; + color: var(--ifm-menu-color); + font-weight: 400; } } From 1b48877abe2a368659be7005a17529e9b7a3ed9f Mon Sep 17 00:00:00 2001 From: Matthias De Geyter <matthias.degeyter@gmail.com> Date: Thu, 7 Dec 2023 14:54:02 +0100 Subject: [PATCH 059/263] fix(ingest/json-schema): take into account environment (#9385) Co-authored-by: Tamas Nemeth <treff7es@gmail.com> --- .../src/datahub/ingestion/source/schema/json_schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py index 2ac946b23deb0..f6e944f4fc3cb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py @@ -271,6 +271,7 @@ def _load_one_file( platform=self.config.platform, name=dataset_name, platform_instance=self.config.platform_instance, + env=self.config.env, ) yield MetadataChangeProposalWrapper( entityUrn=dataset_urn, aspect=meta From 4c348a8eea53194eb37c7dfb8d10820a83791030 Mon Sep 17 00:00:00 2001 From: haeniya <yanik.haeni@gmail.com> Date: Thu, 7 Dec 2023 15:39:43 +0100 Subject: [PATCH 060/263] feat(datahub-frontend): make Java memory options configurable via ENV variable (#9215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Yanik Häni <Yanik.Haeni1@swisscom.com> --- docker/datahub-frontend/start.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/datahub-frontend/start.sh b/docker/datahub-frontend/start.sh index 9dc1514144bb1..12e6b8915096d 100755 --- a/docker/datahub-frontend/start.sh +++ b/docker/datahub-frontend/start.sh @@ -43,8 +43,7 @@ fi # make sure there is no whitespace at the beginning and the end of # this string -export JAVA_OPTS="-Xms512m \ - -Xmx1024m \ +export JAVA_OPTS="${JAVA_MEMORY_OPTS:-"-Xms512m -Xmx1024m"} \ -Dhttp.port=$SERVER_PORT \ -Dconfig.file=datahub-frontend/conf/application.conf \ -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf \ From a6726c12ddc749345df81d5783927eeb7b043b9c Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz <andrew.sikowitz@acryl.io> Date: Thu, 7 Dec 2023 11:03:37 -0500 Subject: [PATCH 061/263] docs(ingest/sql-queries): Add documentation (#9406) --- .../docs/sources/sql-queries/sql-queries.md | 8 ++++++++ .../sources/sql-queries/sql-queries_recipe.yml | 9 +++++++++ .../datahub/ingestion/source/sql_queries.py | 18 ++++++++++++++++-- 3 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 metadata-ingestion/docs/sources/sql-queries/sql-queries.md create mode 100644 metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml diff --git a/metadata-ingestion/docs/sources/sql-queries/sql-queries.md b/metadata-ingestion/docs/sources/sql-queries/sql-queries.md new file mode 100644 index 0000000000000..e829b4366bb84 --- /dev/null +++ b/metadata-ingestion/docs/sources/sql-queries/sql-queries.md @@ -0,0 +1,8 @@ +### Example Queries File + +```json +{"query": "SELECT x FROM my_table", "timestamp": 1689232738.051, "user": "user_a", "downstream_tables": [], "upstream_tables": ["my_database.my_schema.my_table"]} +{"query": "INSERT INTO my_table VALUES (1, 'a')", "timestamp": 1689232737.669, "user": "user_b", "downstream_tables": ["my_database.my_schema.my_table"], "upstream_tables": []} +``` + +Note that this is not a valid standard JSON file, but rather a file containing one JSON object per line. diff --git a/metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml b/metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml new file mode 100644 index 0000000000000..58af21e8a5ba4 --- /dev/null +++ b/metadata-ingestion/docs/sources/sql-queries/sql-queries_recipe.yml @@ -0,0 +1,9 @@ +datahub_api: # Only necessary if using a non-DataHub sink, e.g. the file sink + server: http://localhost:8080 + timeout_sec: 60 +source: + type: sql-queries + config: + platform: "snowflake" + default_db: "SNOWFLAKE" + query_file: "./queries.json" diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py index fcf97e461967c..58e9682df935e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py @@ -88,11 +88,25 @@ def compute_stats(self) -> None: @platform_name("SQL Queries") @config_class(SqlQueriesSourceConfig) -@support_status(SupportStatus.TESTING) +@support_status(SupportStatus.INCUBATING) @capability(SourceCapability.LINEAGE_COARSE, "Parsed from SQL queries") @capability(SourceCapability.LINEAGE_FINE, "Parsed from SQL queries") class SqlQueriesSource(Source): - # TODO: Documentation + """ + This source reads a specifically-formatted JSON file containing SQL queries and parses them to generate lineage. + + This file should contain one JSON object per line, with the following fields: + - query: string - The SQL query to parse. + - timestamp (optional): number - The timestamp of the query, in seconds since the epoch. + - user (optional): string - The user who ran the query. + This user value will be directly converted into a DataHub user urn. + - operation_type (optional): string - Platform-specific operation type, used if the operation type can't be parsed. + - downstream_tables (optional): string[] - Fallback list of tables that the query writes to, + used if the query can't be parsed. + - upstream_tables (optional): string[] - Fallback list of tables the query reads from, + used if the query can't be parsed. + """ + urns: Optional[Set[str]] schema_resolver: SchemaResolver builder: SqlParsingBuilder From 923e76d20b1ecd52e8f813ec645e41e0be692e3d Mon Sep 17 00:00:00 2001 From: Hyejin Yoon <0327jane@gmail.com> Date: Fri, 8 Dec 2023 02:18:35 +0900 Subject: [PATCH 062/263] docs: fix duplicated overview link for api section (#9402) --- docs-website/sidebars.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 67943ba8d7016..5d7c6b06adad4 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -478,10 +478,14 @@ module.exports = { value: "<div>API & SDKs</div>", defaultStyle: true, }, + { + type: "doc", + id: "docs/api/datahub-apis", + label: "Overview", + }, { type: "category", label: "API", - link: { type: "doc", id: "docs/api/datahub-apis" }, items: [ { "GraphQL API": [ @@ -605,7 +609,6 @@ module.exports = { { type: "category", label: "SDK", - link: { type: "doc", id: "docs/api/datahub-apis" }, items: [ { "Python SDK": [ From 3096aa6ffa8148b9fdc4047f5916e75e22a83ee5 Mon Sep 17 00:00:00 2001 From: Olga Dimova <38855943+olgadimova@users.noreply.github.com> Date: Thu, 7 Dec 2023 20:44:24 +0300 Subject: [PATCH 063/263] =?UTF-8?q?feat(glossary):=20add=20toggle=20sideba?= =?UTF-8?q?r=20button=20and=20functionality=20to=20Busine=E2=80=A6=20(#922?= =?UTF-8?q?2)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Chris Collins <chriscollins3456@gmail.com> --- .../entity/shared/GlossaryEntityContext.tsx | 25 ++++++++++++++++--- .../src/app/glossary/BusinessGlossaryPage.tsx | 16 +++++++++++- .../src/app/glossary/GlossaryRoutes.tsx | 11 +++++++- .../src/app/glossary/GlossarySidebar.tsx | 23 +++++++++++++---- .../src/app/glossary/useToggleSidebar.tsx | 17 +++++++++++++ .../src/app/shared/sidebar/components.tsx | 1 + 6 files changed, 83 insertions(+), 10 deletions(-) create mode 100644 datahub-web-react/src/app/glossary/useToggleSidebar.tsx diff --git a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx index f00f16647c94b..79ec142fd801d 100644 --- a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx +++ b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx @@ -10,6 +10,8 @@ export interface GlossaryEntityContextType { // This will happen when you edit a name, move a term/group, create a new term/group, and delete a term/group urnsToUpdate: string[]; setUrnsToUpdate: (updatdUrns: string[]) => void; + isSidebarOpen: boolean; + setIsSidebarOpen: (isOpen: boolean) => void; } export const GlossaryEntityContext = React.createContext<GlossaryEntityContextType>({ @@ -18,10 +20,27 @@ export const GlossaryEntityContext = React.createContext<GlossaryEntityContextTy setEntityData: () => {}, urnsToUpdate: [], setUrnsToUpdate: () => {}, + isSidebarOpen: true, + setIsSidebarOpen: () => {}, }); export const useGlossaryEntityData = () => { - const { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate } = - useContext(GlossaryEntityContext); - return { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate }; + const { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + } = useContext(GlossaryEntityContext); + return { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + }; }; diff --git a/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx b/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx index a5262265fd23d..4e424b776a8ce 100644 --- a/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx +++ b/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx @@ -20,6 +20,8 @@ import { import { OnboardingTour } from '../onboarding/OnboardingTour'; import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; import { useUserContext } from '../context/useUserContext'; +import useToggleSidebar from './useToggleSidebar'; +import ToggleSidebarButton from '../search/ToggleSidebarButton'; export const HeaderWrapper = styled(TabToolbar)` padding: 15px 45px 10px 24px; @@ -38,6 +40,12 @@ const MainContentWrapper = styled.div` flex-direction: column; `; +const TitleContainer = styled.div` + display: flex; + align-items: center; + gap: 12px; +`; + export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; @@ -56,6 +64,7 @@ function BusinessGlossaryPage() { } = useGetRootGlossaryNodesQuery(); const entityRegistry = useEntityRegistry(); const { setEntityData } = useGlossaryEntityData(); + const { isOpen: isSidebarOpen, toggleSidebar } = useToggleSidebar(); useEffect(() => { setEntityData(null); @@ -94,7 +103,12 @@ function BusinessGlossaryPage() { )} <MainContentWrapper data-testid="glossary-entities-list"> <HeaderWrapper> - <Typography.Title level={3}>Business Glossary</Typography.Title> + <TitleContainer> + <ToggleSidebarButton isOpen={isSidebarOpen} onClick={toggleSidebar} /> + <Typography.Title style={{ margin: '0' }} level={3}> + Business Glossary + </Typography.Title> + </TitleContainer> <div> <Button data-testid="add-term-button" diff --git a/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx b/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx index abba77d1a302d..0062cefee067f 100644 --- a/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx +++ b/datahub-web-react/src/app/glossary/GlossaryRoutes.tsx @@ -20,12 +20,21 @@ export default function GlossaryRoutes() { const entityRegistry = useEntityRegistry(); const [entityData, setEntityData] = useState<GenericEntityProperties | null>(null); const [urnsToUpdate, setUrnsToUpdate] = useState<string[]>([]); + const [isSidebarOpen, setIsSidebarOpen] = useState<boolean>(true); const isAtRootGlossary = window.location.pathname === PageRoutes.GLOSSARY; return ( <GlossaryEntityContext.Provider - value={{ isInGlossaryContext: true, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate }} + value={{ + isInGlossaryContext: true, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + }} > {!isAtRootGlossary && <GlossaryEntitiesPath />} <ContentWrapper> diff --git a/datahub-web-react/src/app/glossary/GlossarySidebar.tsx b/datahub-web-react/src/app/glossary/GlossarySidebar.tsx index 4126c8f2bb53f..4fa99da70eaa6 100644 --- a/datahub-web-react/src/app/glossary/GlossarySidebar.tsx +++ b/datahub-web-react/src/app/glossary/GlossarySidebar.tsx @@ -1,14 +1,25 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import GlossarySearch from './GlossarySearch'; import GlossaryBrowser from './GlossaryBrowser/GlossaryBrowser'; import { ProfileSidebarResizer } from '../entity/shared/containers/profile/sidebar/ProfileSidebarResizer'; import { SidebarWrapper } from '../shared/sidebar/components'; +import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; export default function GlossarySidebar() { - const [browserWidth, setBrowserWith] = useState(window.innerWidth * 0.2); + const [browserWidth, setBrowserWidth] = useState(window.innerWidth * 0.2); + const [previousBrowserWidth, setPreviousBrowserWidth] = useState(window.innerWidth * 0.2); + const { isSidebarOpen } = useGlossaryEntityData(); + + useEffect(() => { + if (isSidebarOpen) { + setBrowserWidth(previousBrowserWidth); + } else { + setBrowserWidth(0); + } + }, [isSidebarOpen, previousBrowserWidth]); return ( <> @@ -17,9 +28,11 @@ export default function GlossarySidebar() { <GlossaryBrowser openToEntity /> </SidebarWrapper> <ProfileSidebarResizer - setSidePanelWidth={(width) => - setBrowserWith(Math.min(Math.max(width, MIN_BROWSWER_WIDTH), MAX_BROWSER_WIDTH)) - } + setSidePanelWidth={(width) => { + const newWidth = Math.min(Math.max(width, MIN_BROWSWER_WIDTH), MAX_BROWSER_WIDTH); + setBrowserWidth(newWidth); + setPreviousBrowserWidth(newWidth); + }} initialSize={browserWidth} isSidebarOnLeft /> diff --git a/datahub-web-react/src/app/glossary/useToggleSidebar.tsx b/datahub-web-react/src/app/glossary/useToggleSidebar.tsx new file mode 100644 index 0000000000000..3f2e02385d84e --- /dev/null +++ b/datahub-web-react/src/app/glossary/useToggleSidebar.tsx @@ -0,0 +1,17 @@ +import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; +import useToggle from '../shared/useToggle'; + +const useToggleSidebar = () => { + const { isSidebarOpen, setIsSidebarOpen } = useGlossaryEntityData(); + + const { isOpen, toggle: toggleSidebar } = useToggle({ + initialValue: isSidebarOpen ?? true, + onToggle: (isNowOpen: boolean) => { + setIsSidebarOpen(isNowOpen); + }, + }); + + return { isOpen, toggleSidebar } as const; +}; + +export default useToggleSidebar; diff --git a/datahub-web-react/src/app/shared/sidebar/components.tsx b/datahub-web-react/src/app/shared/sidebar/components.tsx index 5d123d6022790..c5e529bd3a91c 100644 --- a/datahub-web-react/src/app/shared/sidebar/components.tsx +++ b/datahub-web-react/src/app/shared/sidebar/components.tsx @@ -7,6 +7,7 @@ export const SidebarWrapper = styled.div<{ width: number }>` max-height: 100%; width: ${(props) => props.width}px; min-width: ${(props) => props.width}px; + display: ${(props) => (props.width ? 'block' : 'none')}; `; export function RotatingTriangle({ isOpen, onClick }: { isOpen: boolean; onClick?: () => void }) { From d182667eebd0e3d95057431cf7ce6f013ce713d0 Mon Sep 17 00:00:00 2001 From: John Joyce <john@acryl.io> Date: Thu, 7 Dec 2023 10:13:09 -0800 Subject: [PATCH 064/263] refactor(ui): Refactor entity registry to be inside App Providers (#9399) Merging due to unrelated table failing --- datahub-web-react/src/App.tsx | 59 ++----------------- datahub-web-react/src/app/AppProviders.tsx | 13 ++-- .../src/app/EntityRegistryProvider.tsx | 10 ++++ datahub-web-react/src/app/ProtectedRoutes.tsx | 20 +++---- .../src/app/buildEntityRegistry.ts | 48 +++++++++++++++ .../src/app/useBuildEntityRegistry.tsx | 8 +++ 6 files changed, 87 insertions(+), 71 deletions(-) create mode 100644 datahub-web-react/src/app/EntityRegistryProvider.tsx create mode 100644 datahub-web-react/src/app/buildEntityRegistry.ts create mode 100644 datahub-web-react/src/app/useBuildEntityRegistry.tsx diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 342a89f350429..1d9f5d2b43993 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useMemo, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import Cookies from 'js-cookie'; import { message } from 'antd'; import { BrowserRouter as Router } from 'react-router-dom'; @@ -8,34 +8,11 @@ import { ThemeProvider } from 'styled-components'; import { Helmet, HelmetProvider } from 'react-helmet-async'; import './App.less'; import { Routes } from './app/Routes'; -import EntityRegistry from './app/entity/EntityRegistry'; -import { DashboardEntity } from './app/entity/dashboard/DashboardEntity'; -import { ChartEntity } from './app/entity/chart/ChartEntity'; -import { UserEntity } from './app/entity/user/User'; -import { GroupEntity } from './app/entity/group/Group'; -import { DatasetEntity } from './app/entity/dataset/DatasetEntity'; -import { DataFlowEntity } from './app/entity/dataFlow/DataFlowEntity'; -import { DataJobEntity } from './app/entity/dataJob/DataJobEntity'; -import { TagEntity } from './app/entity/tag/Tag'; -import { EntityRegistryContext } from './entityRegistryContext'; import { Theme } from './conf/theme/types'; import defaultThemeConfig from './conf/theme/theme_light.config.json'; import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; -import { GlossaryTermEntity } from './app/entity/glossaryTerm/GlossaryTermEntity'; -import { MLFeatureEntity } from './app/entity/mlFeature/MLFeatureEntity'; -import { MLPrimaryKeyEntity } from './app/entity/mlPrimaryKey/MLPrimaryKeyEntity'; -import { MLFeatureTableEntity } from './app/entity/mlFeatureTable/MLFeatureTableEntity'; -import { MLModelEntity } from './app/entity/mlModel/MLModelEntity'; -import { MLModelGroupEntity } from './app/entity/mlModelGroup/MLModelGroupEntity'; -import { DomainEntity } from './app/entity/domain/DomainEntity'; -import { ContainerEntity } from './app/entity/container/ContainerEntity'; -import GlossaryNodeEntity from './app/entity/glossaryNode/GlossaryNodeEntity'; -import { DataPlatformEntity } from './app/entity/dataPlatform/DataPlatformEntity'; -import { DataProductEntity } from './app/entity/dataProduct/DataProductEntity'; -import { DataPlatformInstanceEntity } from './app/entity/dataPlatformInstance/DataPlatformInstanceEntity'; -import { RoleEntity } from './app/entity/Access/RoleEntity'; import possibleTypesResult from './possibleTypes.generated'; /* @@ -101,32 +78,6 @@ const App: React.VFC = () => { }); }, []); - const entityRegistry = useMemo(() => { - const register = new EntityRegistry(); - register.register(new DatasetEntity()); - register.register(new DashboardEntity()); - register.register(new ChartEntity()); - register.register(new UserEntity()); - register.register(new GroupEntity()); - register.register(new TagEntity()); - register.register(new DataFlowEntity()); - register.register(new DataJobEntity()); - register.register(new GlossaryTermEntity()); - register.register(new MLFeatureEntity()); - register.register(new MLPrimaryKeyEntity()); - register.register(new MLFeatureTableEntity()); - register.register(new MLModelEntity()); - register.register(new MLModelGroupEntity()); - register.register(new DomainEntity()); - register.register(new ContainerEntity()); - register.register(new GlossaryNodeEntity()); - register.register(new RoleEntity()); - register.register(new DataPlatformEntity()); - register.register(new DataProductEntity()); - register.register(new DataPlatformInstanceEntity()); - return register; - }, []); - return ( <HelmetProvider> <ThemeProvider theme={dynamicThemeConfig}> @@ -134,11 +85,9 @@ const App: React.VFC = () => { <Helmet> <title>{dynamicThemeConfig.content.title} - - - - - + + + diff --git a/datahub-web-react/src/app/AppProviders.tsx b/datahub-web-react/src/app/AppProviders.tsx index 1ced44048b502..81a8ddbfc9bac 100644 --- a/datahub-web-react/src/app/AppProviders.tsx +++ b/datahub-web-react/src/app/AppProviders.tsx @@ -4,6 +4,7 @@ import { EducationStepsProvider } from '../providers/EducationStepsProvider'; import UserContextProvider from './context/UserContextProvider'; import QuickFiltersProvider from '../providers/QuickFiltersProvider'; import SearchContextProvider from './search/context/SearchContextProvider'; +import EntityRegistryProvider from './EntityRegistryProvider'; interface Props { children: React.ReactNode; @@ -13,11 +14,13 @@ export default function AppProviders({ children }: Props) { return ( - - - {children} - - + + + + {children} + + + ); diff --git a/datahub-web-react/src/app/EntityRegistryProvider.tsx b/datahub-web-react/src/app/EntityRegistryProvider.tsx new file mode 100644 index 0000000000000..9e283c0d07fc8 --- /dev/null +++ b/datahub-web-react/src/app/EntityRegistryProvider.tsx @@ -0,0 +1,10 @@ +import React from 'react'; +import { EntityRegistryContext } from '../entityRegistryContext'; +import useBuildEntityRegistry from './useBuildEntityRegistry'; + +const EntityRegistryProvider = ({ children }: { children: React.ReactNode }) => { + const entityRegistry = useBuildEntityRegistry(); + return {children}; +}; + +export default EntityRegistryProvider; diff --git a/datahub-web-react/src/app/ProtectedRoutes.tsx b/datahub-web-react/src/app/ProtectedRoutes.tsx index 469e0d6030b35..a3f072e764bea 100644 --- a/datahub-web-react/src/app/ProtectedRoutes.tsx +++ b/datahub-web-react/src/app/ProtectedRoutes.tsx @@ -13,25 +13,23 @@ import EmbedLookup from './embed/lookup'; * Container for all views behind an authentication wall. */ export const ProtectedRoutes = (): JSX.Element => { - const entityRegistry = useEntityRegistry(); - return ( - - - - } /> - } /> - {entityRegistry.getEntities().map((entity) => ( + + + } /> + } /> + {useEntityRegistry() + .getEntities() + .map((entity) => ( } /> ))} - } /> - - + } /> + ); diff --git a/datahub-web-react/src/app/buildEntityRegistry.ts b/datahub-web-react/src/app/buildEntityRegistry.ts new file mode 100644 index 0000000000000..4f74681570802 --- /dev/null +++ b/datahub-web-react/src/app/buildEntityRegistry.ts @@ -0,0 +1,48 @@ +import EntityRegistry from './entity/EntityRegistry'; +import { DashboardEntity } from './entity/dashboard/DashboardEntity'; +import { ChartEntity } from './entity/chart/ChartEntity'; +import { UserEntity } from './entity/user/User'; +import { GroupEntity } from './entity/group/Group'; +import { DatasetEntity } from './entity/dataset/DatasetEntity'; +import { DataFlowEntity } from './entity/dataFlow/DataFlowEntity'; +import { DataJobEntity } from './entity/dataJob/DataJobEntity'; +import { TagEntity } from './entity/tag/Tag'; +import { GlossaryTermEntity } from './entity/glossaryTerm/GlossaryTermEntity'; +import { MLFeatureEntity } from './entity/mlFeature/MLFeatureEntity'; +import { MLPrimaryKeyEntity } from './entity/mlPrimaryKey/MLPrimaryKeyEntity'; +import { MLFeatureTableEntity } from './entity/mlFeatureTable/MLFeatureTableEntity'; +import { MLModelEntity } from './entity/mlModel/MLModelEntity'; +import { MLModelGroupEntity } from './entity/mlModelGroup/MLModelGroupEntity'; +import { DomainEntity } from './entity/domain/DomainEntity'; +import { ContainerEntity } from './entity/container/ContainerEntity'; +import GlossaryNodeEntity from './entity/glossaryNode/GlossaryNodeEntity'; +import { DataPlatformEntity } from './entity/dataPlatform/DataPlatformEntity'; +import { DataProductEntity } from './entity/dataProduct/DataProductEntity'; +import { DataPlatformInstanceEntity } from './entity/dataPlatformInstance/DataPlatformInstanceEntity'; +import { RoleEntity } from './entity/Access/RoleEntity'; + +export default function buildEntityRegistry() { + const registry = new EntityRegistry(); + registry.register(new DatasetEntity()); + registry.register(new DashboardEntity()); + registry.register(new ChartEntity()); + registry.register(new UserEntity()); + registry.register(new GroupEntity()); + registry.register(new TagEntity()); + registry.register(new DataFlowEntity()); + registry.register(new DataJobEntity()); + registry.register(new GlossaryTermEntity()); + registry.register(new MLFeatureEntity()); + registry.register(new MLPrimaryKeyEntity()); + registry.register(new MLFeatureTableEntity()); + registry.register(new MLModelEntity()); + registry.register(new MLModelGroupEntity()); + registry.register(new DomainEntity()); + registry.register(new ContainerEntity()); + registry.register(new GlossaryNodeEntity()); + registry.register(new RoleEntity()); + registry.register(new DataPlatformEntity()); + registry.register(new DataProductEntity()); + registry.register(new DataPlatformInstanceEntity()); + return registry; +} \ No newline at end of file diff --git a/datahub-web-react/src/app/useBuildEntityRegistry.tsx b/datahub-web-react/src/app/useBuildEntityRegistry.tsx new file mode 100644 index 0000000000000..2beb5edae8b02 --- /dev/null +++ b/datahub-web-react/src/app/useBuildEntityRegistry.tsx @@ -0,0 +1,8 @@ +import { useMemo } from 'react'; +import buildEntityRegistry from './buildEntityRegistry'; + +export default function useBuildEntityRegistry() { + return useMemo(() => { + return buildEntityRegistry(); + }, []); +} From 81a93dc95151a59ec6b0d8ee8e9eefd3de8b6ca3 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 7 Dec 2023 15:59:10 -0500 Subject: [PATCH 065/263] feat(ui): handle content prop changes in Editor component (#9400) --- datahub-web-react/codegen.yml | 12 +--------- .../components/editor/Editor.tsx | 24 ++++++++++++++++--- 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/datahub-web-react/codegen.yml b/datahub-web-react/codegen.yml index 35728e8aeb7d4..417d6a8f1c2a6 100644 --- a/datahub-web-react/codegen.yml +++ b/datahub-web-react/codegen.yml @@ -1,16 +1,6 @@ overwrite: true schema: - - '../datahub-graphql-core/src/main/resources/app.graphql' - - '../datahub-graphql-core/src/main/resources/entity.graphql' - - '../datahub-graphql-core/src/main/resources/search.graphql' - - '../datahub-graphql-core/src/main/resources/analytics.graphql' - - '../datahub-graphql-core/src/main/resources/recommendation.graphql' - - '../datahub-graphql-core/src/main/resources/auth.graphql' - - '../datahub-graphql-core/src/main/resources/ingestion.graphql' - - '../datahub-graphql-core/src/main/resources/timeline.graphql' - - '../datahub-graphql-core/src/main/resources/tests.graphql' - - '../datahub-graphql-core/src/main/resources/step.graphql' - - '../datahub-graphql-core/src/main/resources/lineage.graphql' + - '../datahub-graphql-core/src/main/resources/*.graphql' config: scalars: Long: number diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx index 038507c620706..bd2e410fb30d9 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/editor/Editor.tsx @@ -1,4 +1,4 @@ -import React, { forwardRef, useEffect, useImperativeHandle } from 'react'; +import React, { forwardRef, useEffect, useImperativeHandle, useState } from 'react'; import DOMPurify from 'dompurify'; import { BlockquoteExtension, @@ -79,9 +79,20 @@ export const Editor = forwardRef((props: EditorProps, ref) => { manager.view.focus(); } }); + + // We need to track the modified content that we expect to be in the editor. + // This way, if the content prop changes, we can update the editor content to match + // if needed. However, we don't want to update the editor content on normal typing + // changes because that would cause the cursor to jump around unexpectedly. + const [modifiedContent, setModifiedContent] = useState(content); useEffect(() => { - if (readOnly && content) { + if (readOnly && content !== undefined) { + manager.store.commands.setContent(content); + } else if (!readOnly && content !== undefined && modifiedContent !== content) { + // If we get a content change that doesn't match what we're tracking to be in the editor, + // then we need to update the editor content to match the new props content. manager.store.commands.setContent(content); + setModifiedContent(content); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [readOnly, content]); @@ -97,7 +108,14 @@ export const Editor = forwardRef((props: EditorProps, ref) => { - {onChange && } + {onChange && ( + { + setModifiedContent(md); + onChange(md); + }} + /> + )} )} From e3e9904d214c0ae206b6fe9e51cec3703018f226 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 7 Dec 2023 16:01:23 -0500 Subject: [PATCH 066/263] fix(ingest/profiling): Add back db_name to sql_generic_profiler methods (#9407) --- .../src/datahub/ingestion/source/redshift/profile.py | 1 + .../ingestion/source/snowflake/snowflake_profiler.py | 1 + .../datahub/ingestion/source/sql/sql_generic_profiler.py | 7 +++++-- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py index 6fa3504ced139..b05850cef6e94 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py @@ -60,6 +60,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, max_workers=self.config.profiling.max_workers, + db_name=db, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 67953de47e5a3..89857c4564267 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -63,6 +63,7 @@ def get_workunits( yield from self.generate_profile_workunits( profile_requests, max_workers=self.config.profiling.max_workers, + db_name=database.name, platform=self.platform, profiler_args=self.get_profile_args(), ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index e309ff0d15311..844a458d9f1ab 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -71,6 +71,7 @@ def generate_profile_workunits( requests: List[TableProfilerRequest], *, max_workers: int, + db_name: Optional[str] = None, platform: Optional[str] = None, profiler_args: Optional[Dict] = None, ) -> Iterable[MetadataWorkUnit]: @@ -98,7 +99,7 @@ def generate_profile_workunits( return # Otherwise, if column level profiling is enabled, use GE profiler. - ge_profiler = self.get_profiler_instance() + ge_profiler = self.get_profiler_instance(db_name) for ge_profiler_request, profile in ge_profiler.generate_profiles( ge_profile_requests, max_workers, platform, profiler_args @@ -205,7 +206,9 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector - def get_profiler_instance(self) -> "DatahubGEProfiler": + def get_profiler_instance( + self, db_name: Optional[str] = None + ) -> "DatahubGEProfiler": logger.debug(f"Getting profiler instance from {self.platform}") url = self.config.get_sql_alchemy_url() From 724736939aa33b28c561ec3814c1e1ba3ceffe3b Mon Sep 17 00:00:00 2001 From: Amanda Ng <10681923+ngamanda@users.noreply.github.com> Date: Fri, 8 Dec 2023 05:48:50 +0800 Subject: [PATCH 067/263] feat(observability): add actor urn to GraphQL spans (#9382) Co-authored-by: RyanHolstien --- metadata-service/graphql-servlet-impl/build.gradle | 2 +- .../src/main/java/com/datahub/graphql/GraphQLController.java | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/metadata-service/graphql-servlet-impl/build.gradle b/metadata-service/graphql-servlet-impl/build.gradle index 52fd20ef32389..51f67631159d3 100644 --- a/metadata-service/graphql-servlet-impl/build.gradle +++ b/metadata-service/graphql-servlet-impl/build.gradle @@ -20,7 +20,7 @@ dependencies { implementation externalDependency.charle implementation externalDependency.jetbrains - + implementation externalDependency.opentelemetryApi } configurations.all{ diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 692208c42f90c..0cae64c507ad7 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -15,6 +15,7 @@ import com.linkedin.datahub.graphql.exception.DataHubGraphQLError; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.ExecutionResult; +import io.opentelemetry.api.trace.Span; import java.util.Collections; import java.util.List; import java.util.Map; @@ -95,6 +96,7 @@ CompletableFuture> postGraphQL(HttpEntity httpEnt */ Authentication authentication = AuthenticationContext.getAuthentication(); SpringQueryContext context = new SpringQueryContext(true, authentication, _authorizerChain); + Span.current().setAttribute("actor.urn", context.getActorUrn()); return CompletableFuture.supplyAsync( () -> { From f03c66ca1f1d2ade0bd5d65da9c74d0f66ea1201 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 7 Dec 2023 17:18:16 -0500 Subject: [PATCH 068/263] fix(ingest/lookml): make deploy key optional (#9378) --- .../src/datahub/configuration/git.py | 23 ++- .../ingestion/source/git/git_import.py | 29 +++- .../ingestion/source/looker/lookml_source.py | 164 ++++++++---------- .../tests/integration/git/test_git_clone.py | 10 ++ .../tests/integration/lookml/test_lookml.py | 2 +- 5 files changed, 138 insertions(+), 90 deletions(-) diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 80eb41c100b10..9ea9007553839 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -1,10 +1,12 @@ import os -from typing import Any, Dict, Optional +import pathlib +from typing import Any, Dict, Optional, Union from pydantic import Field, FilePath, SecretStr, validator from datahub.configuration.common import ConfigModel from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.ingestion.source.git.git_import import GitClone _GITHUB_PREFIX = "https://github.com/" _GITLAB_PREFIX = "https://gitlab.com/" @@ -141,3 +143,22 @@ def branch_for_clone(self) -> Optional[str]: if "branch" in self.__fields_set__: return self.branch return None + + def clone( + self, + tmp_path: Union[pathlib.Path, str], + fallback_deploy_key: Optional[SecretStr] = None, + ) -> pathlib.Path: + """Clones the repo into a temporary directory and returns the path to the checkout.""" + + assert self.repo_ssh_locator + + git_clone = GitClone(str(tmp_path)) + + checkout_dir = git_clone.clone( + ssh_key=self.deploy_key or fallback_deploy_key, + repo_url=self.repo_ssh_locator, + branch=self.branch_for_clone, + ) + + return checkout_dir diff --git a/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py b/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py index 55eeb2bc6dcab..2122374c1e404 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py +++ b/metadata-ingestion/src/datahub/ingestion/source/git/git_import.py @@ -6,6 +6,7 @@ from uuid import uuid4 import git +from git.util import remove_password_if_present from pydantic import SecretStr logger = logging.getLogger(__name__) @@ -53,7 +54,10 @@ def clone( " -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" ) logger.debug(f"ssh_command={git_ssh_cmd}") - logger.info(f"⏳ Cloning repo '{repo_url}', this can take some time...") + + logger.info( + f"⏳ Cloning repo '{self.sanitize_repo_url(repo_url)}', this can take some time..." + ) self.last_repo_cloned = git.Repo.clone_from( repo_url, checkout_dir, @@ -69,3 +73,26 @@ def clone( def get_last_repo_cloned(self) -> Optional[git.Repo]: return self.last_repo_cloned + + @staticmethod + def sanitize_repo_url(repo_url: str) -> str: + """Sanitizes the repo URL for logging purposes. + + Args: + repo_url (str): The repository URL. + + Returns: + str: The sanitized repository URL. + + Examples: + >>> GitClone.sanitize_repo_url("https://username:password@github.com/org/repo.git") + 'https://*****:*****@github.com/org/repo.git' + + >>> GitClone.sanitize_repo_url("https://github.com/org/repo.git") + 'https://github.com/org/repo.git' + + >>> GitClone.sanitize_repo_url("git@github.com:org/repo.git") + 'git@github.com:org/repo.git' + """ + + return remove_password_if_present([repo_url])[0] diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index 93c405f0a39f2..b76bef49a7e6f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -301,13 +301,13 @@ def check_base_folder_if_not_provided( ) -> Optional[pydantic.DirectoryPath]: if v is None: git_info: Optional[GitInfo] = values.get("git_info") - if git_info and git_info.deploy_key: - # We have git_info populated correctly, base folder is not needed - pass + if git_info: + if not git_info.deploy_key: + logger.warning( + "git_info is provided, but no SSH key is present. If the repo is not public, we'll fail to clone it." + ) else: - raise ValueError( - "base_folder is not provided. Neither has a github deploy_key or deploy_key_file been provided" - ) + raise ValueError("Neither base_folder nor git_info has been provided.") return v @@ -1831,14 +1831,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: assert self.source_config.git_info # we don't have a base_folder, so we need to clone the repo and process it locally start_time = datetime.now() - git_clone = GitClone(tmp_dir) - # Github info deploy key is always populated - assert self.source_config.git_info.deploy_key - assert self.source_config.git_info.repo_ssh_locator - checkout_dir = git_clone.clone( - ssh_key=self.source_config.git_info.deploy_key, - repo_url=self.source_config.git_info.repo_ssh_locator, - branch=self.source_config.git_info.branch_for_clone, + checkout_dir = self.source_config.git_info.clone( + tmp_path=tmp_dir, ) self.reporter.git_clone_latency = datetime.now() - start_time self.source_config.base_folder = checkout_dir.resolve() @@ -1853,29 +1847,20 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: for project, p_ref in self.source_config.project_dependencies.items(): # If we were given GitHub info, we need to clone the project. if isinstance(p_ref, GitInfo): - assert p_ref.repo_ssh_locator - - p_cloner = GitClone(f"{tmp_dir}/_included_/{project}") try: - p_checkout_dir = p_cloner.clone( - ssh_key=( - # If a deploy key was provided, use it. Otherwise, fall back - # to the main project deploy key. - p_ref.deploy_key - or ( - self.source_config.git_info.deploy_key - if self.source_config.git_info - else None - ) - ), - repo_url=p_ref.repo_ssh_locator, - branch=p_ref.branch_for_clone, + p_checkout_dir = p_ref.clone( + tmp_path=f"{tmp_dir}/_included_/{project}", + # If a deploy key was provided, use it. Otherwise, fall back + # to the main project deploy key, if present. + fallback_deploy_key=self.source_config.git_info.deploy_key + if self.source_config.git_info + else None, ) p_ref = p_checkout_dir.resolve() except Exception as e: logger.warning( - f"Failed to clone remote project {project}. This can lead to failures in parsing lookml files later on: {e}", + f"Failed to clone project dependency {project}. This can lead to failures in parsing lookml files later on: {e}", ) visited_projects.add(project) continue @@ -1910,68 +1895,73 @@ def _recursively_check_manifests( return manifest = self.get_manifest_if_present(project_path) - if manifest: - # Special case handling if the root project has a name in the manifest file. - if project_name == _BASE_PROJECT_NAME and manifest.project_name: - if ( - self.source_config.project_name is not None - and manifest.project_name != self.source_config.project_name - ): - logger.warning( - f"The project name in the manifest file '{manifest.project_name}'" - f"does not match the configured project name '{self.source_config.project_name}'. " - "This can lead to failures in LookML include resolution and lineage generation." - ) - elif self.source_config.project_name is None: - self.source_config.project_name = manifest.project_name + if not manifest: + return - # Clone the remote project dependencies. - for remote_project in manifest.remote_dependencies: - if remote_project.name in project_visited: - continue + # Special case handling if the root project has a name in the manifest file. + if project_name == _BASE_PROJECT_NAME and manifest.project_name: + if ( + self.source_config.project_name is not None + and manifest.project_name != self.source_config.project_name + ): + logger.warning( + f"The project name in the manifest file '{manifest.project_name}'" + f"does not match the configured project name '{self.source_config.project_name}'. " + "This can lead to failures in LookML include resolution and lineage generation." + ) + elif self.source_config.project_name is None: + self.source_config.project_name = manifest.project_name - p_cloner = GitClone(f"{tmp_dir}/_remote_/{project_name}") - try: - # TODO: For 100% correctness, we should be consulting - # the manifest lock file for the exact ref to use. + # Clone the remote project dependencies. + for remote_project in manifest.remote_dependencies: + if remote_project.name in project_visited: + continue + if remote_project.name in self.base_projects_folder: + # In case a remote_dependency is specified in the project_dependencies config, + # we don't need to clone it again. + continue - p_checkout_dir = p_cloner.clone( - ssh_key=( - self.source_config.git_info.deploy_key - if self.source_config.git_info - else None - ), - repo_url=remote_project.url, - ) + p_cloner = GitClone(f"{tmp_dir}/_remote_/{remote_project.name}") + try: + # TODO: For 100% correctness, we should be consulting + # the manifest lock file for the exact ref to use. + + p_checkout_dir = p_cloner.clone( + ssh_key=( + self.source_config.git_info.deploy_key + if self.source_config.git_info + else None + ), + repo_url=remote_project.url, + ) - self.base_projects_folder[ - remote_project.name - ] = p_checkout_dir.resolve() - repo = p_cloner.get_last_repo_cloned() - assert repo - remote_git_info = GitInfo( - url_template=remote_project.url, - repo="dummy/dummy", # set to dummy values to bypass validation - branch=repo.active_branch.name, - ) - remote_git_info.repo = ( - "" # set to empty because url already contains the full path - ) - self.remote_projects_git_info[remote_project.name] = remote_git_info + self.base_projects_folder[ + remote_project.name + ] = p_checkout_dir.resolve() + repo = p_cloner.get_last_repo_cloned() + assert repo + remote_git_info = GitInfo( + url_template=remote_project.url, + repo="dummy/dummy", # set to dummy values to bypass validation + branch=repo.active_branch.name, + ) + remote_git_info.repo = ( + "" # set to empty because url already contains the full path + ) + self.remote_projects_git_info[remote_project.name] = remote_git_info - except Exception as e: - logger.warning( - f"Failed to clone remote project {project_name}. This can lead to failures in parsing lookml files later on", - e, - ) - project_visited.add(project_name) - else: - self._recursively_check_manifests( - tmp_dir, remote_project.name, project_visited - ) + except Exception as e: + logger.warning( + f"Failed to clone remote project {project_name}. This can lead to failures in parsing lookml files later on: {e}", + ) + project_visited.add(project_name) + else: + self._recursively_check_manifests( + tmp_dir, remote_project.name, project_visited + ) - for project in manifest.local_dependencies: - self._recursively_check_manifests(tmp_dir, project, project_visited) + for project in manifest.local_dependencies: + self._recursively_check_manifests(tmp_dir, project, project_visited) def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 assert self.source_config.base_folder diff --git a/metadata-ingestion/tests/integration/git/test_git_clone.py b/metadata-ingestion/tests/integration/git/test_git_clone.py index 3436c692f5d95..2428a6dfb1c9e 100644 --- a/metadata-ingestion/tests/integration/git/test_git_clone.py +++ b/metadata-ingestion/tests/integration/git/test_git_clone.py @@ -1,3 +1,4 @@ +import doctest import os import pytest @@ -81,6 +82,15 @@ def test_github_branch(): assert config.branch_for_clone == "main" +def test_sanitize_repo_url(): + import datahub.ingestion.source.git.git_import + + assert doctest.testmod(datahub.ingestion.source.git.git_import) == ( + 0, + 3, + ) # 0 failures, 3 tests + + def test_git_clone_public(tmp_path): git_clone = GitClone(str(tmp_path)) checkout_dir = git_clone.clone( diff --git a/metadata-ingestion/tests/integration/lookml/test_lookml.py b/metadata-ingestion/tests/integration/lookml/test_lookml.py index a71b597863148..1ed0d05c84263 100644 --- a/metadata-ingestion/tests/integration/lookml/test_lookml.py +++ b/metadata-ingestion/tests/integration/lookml/test_lookml.py @@ -799,7 +799,7 @@ def test_lookml_base_folder(): ) with pytest.raises( - pydantic.ValidationError, match=r"base_folder.+not provided.+deploy_key" + pydantic.ValidationError, match=r"base_folder.+nor.+git_info.+provided" ): LookMLSourceConfig.parse_obj({"api": fake_api}) From 0e40d38f4c24aac34b5a54127077f5021a347b91 Mon Sep 17 00:00:00 2001 From: Teppo Naakka Date: Fri, 8 Dec 2023 14:02:03 +0200 Subject: [PATCH 069/263] fix(ingest/powerbi): fix powerbi chart input handling (#9415) --- .../src/datahub/ingestion/source/powerbi/powerbi.py | 8 ++++++-- .../integration/powerbi/golden_test_container.json | 12 ------------ 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py index dc4394efcf245..4b1d0403ac776 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py @@ -504,7 +504,9 @@ def to_datahub_chart_mcp( logger.info(f"{Constant.CHART_URN}={chart_urn}") - ds_input: List[str] = self.to_urn_set(ds_mcps) + ds_input: List[str] = self.to_urn_set( + [x for x in ds_mcps if x.entityType == Constant.DATASET] + ) def tile_custom_properties(tile: powerbi_data_classes.Tile) -> dict: custom_properties: dict = { @@ -927,7 +929,9 @@ def to_chart_mcps( logger.debug(f"{Constant.CHART_URN}={chart_urn}") - ds_input: List[str] = self.to_urn_set(ds_mcps) + ds_input: List[str] = self.to_urn_set( + [x for x in ds_mcps if x.entityType == Constant.DATASET] + ) # Create chartInfo mcp # Set chartUrl only if tile is created from Report diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_container.json b/metadata-ingestion/tests/integration/powerbi/golden_test_container.json index 91b5499eaadcb..7a9ce135b4e24 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_container.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_container.json @@ -1400,9 +1400,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -1546,9 +1543,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" }, @@ -2387,9 +2381,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, @@ -2514,9 +2505,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:6ac0662f0f2fc3a9196ac505da2182b2" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,library-dataset.public_issue_history,DEV)" }, From d52f0305eb86bc2902c59e5bea19aa1d7cf883d1 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 8 Dec 2023 13:13:49 -0500 Subject: [PATCH 070/263] fix(ingest): fix metadata for custom python packages (#9391) --- docs/modeling/extending-the-metadata-model.md | 17 ++++++--- metadata-ingestion/scripts/avro_codegen.py | 29 +++------------ .../scripts/custom_package_codegen.py | 7 ++++ .../src/datahub/_codegen/__init__.py | 0 .../src/datahub/_codegen/aspect.py | 36 +++++++++++++++++++ 5 files changed, 60 insertions(+), 29 deletions(-) create mode 100644 metadata-ingestion/src/datahub/_codegen/__init__.py create mode 100644 metadata-ingestion/src/datahub/_codegen/aspect.py diff --git a/docs/modeling/extending-the-metadata-model.md b/docs/modeling/extending-the-metadata-model.md index ba101be16b98e..293688a8b89e5 100644 --- a/docs/modeling/extending-the-metadata-model.md +++ b/docs/modeling/extending-the-metadata-model.md @@ -256,7 +256,7 @@ to deploy during development. This will allow Datahub to read and write your new import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - + If you're purely using the custom models locally, you can use a local development-mode install of the DataHub CLI. @@ -273,12 +273,21 @@ If you want to use your custom models beyond your local machine without forking This package should be installed alongside the base `acryl-datahub` package, and its metadata models will take precedence over the default ones. ```bash -cd metadata-ingestion -../gradlew customPackageGenerate -Ppackage_name=my-company-datahub-models -Ppackage_version="0.0.1" +$ cd metadata-ingestion +$ ../gradlew customPackageGenerate -Ppackage_name=my-company-datahub-models -Ppackage_version="0.0.1" + +Successfully built my-company-datahub-models-0.0.1.tar.gz and acryl_datahub_cloud-0.0.1-py3-none-any.whl + +Generated package at custom-package/my-company-datahub-models +This package should be installed alongside the main acryl-datahub package. + +Install the custom package locally with `pip install custom-package/my-company-datahub-models` +To enable others to use it, share the file at custom-package/my-company-datahub-models/dist/*.whl and have them install it with `pip install .whl` +Alternatively, publish it to PyPI with `twine upload custom-package/my-company-datahub-models/dist/*` ``` This will generate some Python build artifacts, which you can distribute within your team or publish to PyPI. -The command output will contain additional details and exact CLI commands you can use. +The command output contains additional details and exact CLI commands you can use. diff --git a/metadata-ingestion/scripts/avro_codegen.py b/metadata-ingestion/scripts/avro_codegen.py index c6f6bac128b79..bd4988f990534 100644 --- a/metadata-ingestion/scripts/avro_codegen.py +++ b/metadata-ingestion/scripts/avro_codegen.py @@ -252,34 +252,12 @@ def annotate_aspects(aspects: List[dict], schema_class_file: Path) -> None: schema_classes_lines = schema_class_file.read_text().splitlines() line_lookup_table = {line: i for i, line in enumerate(schema_classes_lines)} - # Create the Aspect class. - # We ensure that it cannot be instantiated directly, as - # per https://stackoverflow.com/a/7989101/5004662. + # Import the _Aspect class. schema_classes_lines[ line_lookup_table["__SCHEMAS: Dict[str, RecordSchema] = {}"] ] += """ -class _Aspect(DictWrapper): - ASPECT_NAME: ClassVar[str] = None # type: ignore - ASPECT_TYPE: ClassVar[str] = "default" - ASPECT_INFO: ClassVar[dict] = None # type: ignore - - def __init__(self): - if type(self) is _Aspect: - raise TypeError("_Aspect is an abstract class, and cannot be instantiated directly.") - super().__init__() - - @classmethod - def get_aspect_name(cls) -> str: - return cls.ASPECT_NAME # type: ignore - - @classmethod - def get_aspect_type(cls) -> str: - return cls.ASPECT_TYPE - - @classmethod - def get_aspect_info(cls) -> dict: - return cls.ASPECT_INFO +from datahub._codegen.aspect import _Aspect """ for aspect in aspects: @@ -776,6 +754,7 @@ def generate( import importlib from typing import TYPE_CHECKING +from datahub._codegen.aspect import _Aspect from datahub.utilities.docs_build import IS_SPHINX_BUILD from datahub.utilities._custom_package_loader import get_custom_models_package @@ -785,7 +764,7 @@ def generate( from ._schema_classes import * # Required explicitly because __all__ doesn't include _ prefixed names. - from ._schema_classes import _Aspect, __SCHEMA_TYPES + from ._schema_classes import __SCHEMA_TYPES if IS_SPHINX_BUILD: # Set __module__ to the current module so that Sphinx will document the diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index a5883c9ae9020..8582e165987ec 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -73,6 +73,8 @@ def generate( """ ) + (src_path / "py.typed").write_text("") + (package_path / "setup.py").write_text( f"""{autogen_header} from setuptools import setup @@ -87,6 +89,11 @@ def generate( "avro-gen3=={_avrogen_version}", "acryl-datahub", ], + package_data={{ + "{python_package_name}": ["py.typed"], + "{python_package_name}.models": ["schema.avsc"], + "{python_package_name}.models.schemas": ["*.avsc"], + }}, entry_points={{ "datahub.custom_packages": [ "models={python_package_name}.models.schema_classes", diff --git a/metadata-ingestion/src/datahub/_codegen/__init__.py b/metadata-ingestion/src/datahub/_codegen/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/_codegen/aspect.py b/metadata-ingestion/src/datahub/_codegen/aspect.py new file mode 100644 index 0000000000000..28fa3f1536a86 --- /dev/null +++ b/metadata-ingestion/src/datahub/_codegen/aspect.py @@ -0,0 +1,36 @@ +from typing import ClassVar + +from avrogen.dict_wrapper import DictWrapper + + +class _Aspect(DictWrapper): + """Base class for all aspects types. + + All codegened types inherit from DictWrapper, either directly or indirectly. + Types that are aspects inherit directly from _Aspect. + """ + + ASPECT_NAME: ClassVar[str] = None # type: ignore + ASPECT_TYPE: ClassVar[str] = "default" + ASPECT_INFO: ClassVar[dict] = None # type: ignore + + def __init__(self): + if type(self) is _Aspect: + # Ensure that it cannot be instantiated directly, as + # per https://stackoverflow.com/a/7989101/5004662. + raise TypeError( + "_Aspect is an abstract class, and cannot be instantiated directly." + ) + super().__init__() + + @classmethod + def get_aspect_name(cls) -> str: + return cls.ASPECT_NAME # type: ignore + + @classmethod + def get_aspect_type(cls) -> str: + return cls.ASPECT_TYPE + + @classmethod + def get_aspect_info(cls) -> dict: + return cls.ASPECT_INFO From 08a9b9b6de8abba52a887d6c8b3df19855cde377 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 8 Dec 2023 16:18:34 -0500 Subject: [PATCH 071/263] fix(ingest): bug fixes and docs updates (#9422) --- docs/modeling/extending-the-metadata-model.md | 2 +- metadata-ingestion/build.gradle | 5 ++++- .../scripts/custom_package_codegen.py | 4 +++- .../src/datahub/ingestion/source/dbt/dbt_common.py | 2 -- .../src/datahub/ingestion/source/dbt/dbt_core.py | 13 +++++++++++++ .../src/datahub/ingestion/source/sql/oracle.py | 2 +- ...t_most_config_and_modified_since_admin_only.json | 3 --- node_modules/.yarn-integrity | 12 ------------ yarn.lock | 4 ---- 9 files changed, 22 insertions(+), 25 deletions(-) delete mode 100644 node_modules/.yarn-integrity delete mode 100644 yarn.lock diff --git a/docs/modeling/extending-the-metadata-model.md b/docs/modeling/extending-the-metadata-model.md index 293688a8b89e5..dc4edd3306f95 100644 --- a/docs/modeling/extending-the-metadata-model.md +++ b/docs/modeling/extending-the-metadata-model.md @@ -282,7 +282,7 @@ Generated package at custom-package/my-company-datahub-models This package should be installed alongside the main acryl-datahub package. Install the custom package locally with `pip install custom-package/my-company-datahub-models` -To enable others to use it, share the file at custom-package/my-company-datahub-models/dist/*.whl and have them install it with `pip install .whl` +To enable others to use it, share the file at custom-package/my-company-datahub-models/dist/.whl and have them install it with `pip install .whl` Alternatively, publish it to PyPI with `twine upload custom-package/my-company-datahub-models/dist/*` ``` diff --git a/metadata-ingestion/build.gradle b/metadata-ingestion/build.gradle index 0d8de625ec709..047699f084c61 100644 --- a/metadata-ingestion/build.gradle +++ b/metadata-ingestion/build.gradle @@ -57,7 +57,10 @@ task installPackage(type: Exec, dependsOn: installPackageOnly) { } task codegen(type: Exec, dependsOn: [environmentSetup, installPackage, ':metadata-events:mxe-schemas:build']) { - inputs.files(project.fileTree(dir: "../metadata-events/mxe-schemas/src/", include: "**/*.avsc")) + inputs.files( + project.fileTree(dir: "../metadata-events/mxe-schemas/src/", include: "**/*.avsc"), + project.fileTree(dir: "scripts"), + ) outputs.dir('src/datahub/metadata') commandLine 'bash', '-c', "source ${venv_name}/bin/activate && ./scripts/codegen.sh" } diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 8582e165987ec..3f59fdf2cc548 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -116,7 +116,9 @@ def generate( click.echo() click.echo(f"Install the custom package locally with `pip install {package_path}`") click.echo( - f"To enable others to use it, share the file at {package_path}/dist/*.whl and have them install it with `pip install .whl`" + "To enable others to use it, share the file at " + f"{package_path}/dist/{package_name}-{package_version}-py3-none-any.whl " + "and have them install it with `pip install .whl`" ) click.echo( f"Alternatively, publish it to PyPI with `twine upload {package_path}/dist/*`" diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index 919ba5a4b285a..af28be310587a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -1315,8 +1315,6 @@ def get_schema_metadata( self.config.strip_user_ids_from_email, ) - # TODO if infer_dbt_schemas, load from saved schemas too - canonical_schema: List[SchemaField] = [] for column in node.columns: description = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index a7703b203bcee..ac2b2815f3caa 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -466,6 +466,19 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: catalog_version, ) = self.loadManifestAndCatalog() + # If catalog_version is between 1.7.0 and 1.7.2, report a warning. + if ( + catalog_version + and catalog_version.startswith("1.7.") + and catalog_version < "1.7.3" + ): + self.report.report_warning( + "dbt_catalog_version", + f"Due to a bug in dbt, dbt version {catalog_version} will have incomplete metadata on sources. " + "Please upgrade to dbt version 1.7.3 or later. " + "See https://github.com/dbt-labs/dbt-core/issues/9119 for details on the bug.", + ) + additional_custom_props = { "manifest_schema": manifest_schema, "manifest_version": manifest_version, diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py index 7ee54200c6493..122520a730801 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/oracle.py @@ -157,7 +157,7 @@ def __getattr__(self, item: str) -> Any: @platform_name("Oracle") @config_class(OracleConfig) -@support_status(SupportStatus.CERTIFIED) +@support_status(SupportStatus.INCUBATING) @capability(SourceCapability.DOMAINS, "Enabled by default") class OracleSource(SQLAlchemySource): """ diff --git a/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json b/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json index b301ca1c1b988..52add6b002197 100644 --- a/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json +++ b/metadata-ingestion/tests/integration/powerbi/golden_test_most_config_and_modified_since_admin_only.json @@ -865,9 +865,6 @@ } }, "inputs": [ - { - "string": "urn:li:container:977b804137a1d2bf897ff1bbf440a1cc" - }, { "string": "urn:li:dataset:(urn:li:dataPlatform:powerbi,hr_pbi_test.dbo_book_issue,DEV)" }, diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity deleted file mode 100644 index 42a6cb985ab1b..0000000000000 --- a/node_modules/.yarn-integrity +++ /dev/null @@ -1,12 +0,0 @@ -{ - "systemParams": "darwin-arm64-93", - "modulesFolders": [ - "node_modules" - ], - "flags": [], - "linkedModules": [], - "topLevelPatterns": [], - "lockfileEntries": {}, - "files": [], - "artifacts": {} -} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index fb57ccd13afbd..0000000000000 --- a/yarn.lock +++ /dev/null @@ -1,4 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - From 3e79a1325cf8eca29a8bb818a50762366bfd5d22 Mon Sep 17 00:00:00 2001 From: noggi Date: Fri, 8 Dec 2023 13:37:12 -0800 Subject: [PATCH 072/263] Pin alpine base image version to 3.18 (#9421) --- docker/datahub-frontend/Dockerfile | 2 +- docker/datahub-gms/Dockerfile | 2 +- docker/datahub-mae-consumer/Dockerfile | 2 +- docker/datahub-mce-consumer/Dockerfile | 2 +- docker/datahub-upgrade/Dockerfile | 2 +- docker/elasticsearch-setup/Dockerfile | 2 +- docker/mysql-setup/Dockerfile | 2 +- docker/postgres-setup/Dockerfile | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 9c26d73f4f40b..0c4c229af34f0 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -1,7 +1,7 @@ # Defining environment ARG APP_ENV=prod -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Configurable repositories ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index 1e13fa492c7f0..9c79e1da542f0 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -24,7 +24,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Upgrade Alpine and base packages ENV JMX_VERSION=0.18.0 diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 3bacd3b2dc81a..5bfa5f35ace17 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Re-declaring args from above to make them available in this stage (will inherit default values) ARG ALPINE_REPO_URL diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index bb22ab82f4402..cc79a3072c193 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Re-declaring args from above to make them available in this stage (will inherit default values) ARG ALPINE_REPO_URL diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 551d61f41b979..2beb5b54dac38 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base # Re-declaring args from above to make them available in this stage (will inherit default values) ARG ALPINE_REPO_URL diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index f4dd1cb9b018e..ea64f94f88727 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -23,7 +23,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 AS base +FROM alpine:3.18 AS base ARG ALPINE_REPO_URL diff --git a/docker/mysql-setup/Dockerfile b/docker/mysql-setup/Dockerfile index 8b7ca704c32cd..409f96a325830 100644 --- a/docker/mysql-setup/Dockerfile +++ b/docker/mysql-setup/Dockerfile @@ -17,7 +17,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 +FROM alpine:3.18 COPY --from=binary /go/bin/dockerize /usr/local/bin ARG ALPINE_REPO_URL diff --git a/docker/postgres-setup/Dockerfile b/docker/postgres-setup/Dockerfile index e10f70571501e..673ce979477be 100644 --- a/docker/postgres-setup/Dockerfile +++ b/docker/postgres-setup/Dockerfile @@ -17,7 +17,7 @@ WORKDIR /go/src/github.com/jwilder/dockerize RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION -FROM alpine:3 +FROM alpine:3.18 COPY --from=binary /go/bin/dockerize /usr/local/bin ARG ALPINE_REPO_URL From 159a013b0515f8a94b88d62e4ad20aad228fac9d Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 8 Dec 2023 17:52:13 -0500 Subject: [PATCH 073/263] fix(cypress) Fix flakiness of cypress test for glossary navigation (#9410) Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- .../shared/EntityDropdown/useDeleteEntity.tsx | 6 ++++ .../src/app/glossary/cacheUtils.ts | 36 +++++++++++++++++++ .../e2e/glossary/glossary_navigation.js | 3 +- 3 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 datahub-web-react/src/app/glossary/cacheUtils.ts diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx index 1e4737135ed74..171a36b1cfbcc 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx @@ -7,6 +7,7 @@ import analytics, { EventType } from '../../../analytics'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getParentNodeToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; import { useHandleDeleteDomain } from './useHandleDeleteDomain'; +import { removeTermFromGlossaryNode } from '../../../glossary/cacheUtils'; /** * Performs the flow for deleting an entity of a given type. @@ -30,6 +31,7 @@ function useDeleteEntity( const maybeDeleteEntity = getDeleteEntityMutation(type)(); const deleteEntity = (maybeDeleteEntity && maybeDeleteEntity[0]) || undefined; + const client = maybeDeleteEntity?.[1].client; function handleDeleteEntity() { deleteEntity?.({ @@ -54,6 +56,10 @@ function useDeleteEntity( handleDeleteDomain(); } + if (client && entityData.type === EntityType.GlossaryTerm && entityData?.parentNodes?.nodes) { + removeTermFromGlossaryNode(client, entityData.parentNodes.nodes[0].urn, urn); + } + setTimeout( () => { setHasBeenDeleted(true); diff --git a/datahub-web-react/src/app/glossary/cacheUtils.ts b/datahub-web-react/src/app/glossary/cacheUtils.ts new file mode 100644 index 0000000000000..f70901bf71f2f --- /dev/null +++ b/datahub-web-react/src/app/glossary/cacheUtils.ts @@ -0,0 +1,36 @@ +import { ApolloClient } from '@apollo/client'; +import { GetGlossaryNodeDocument, GetGlossaryNodeQuery } from '../../graphql/glossaryNode.generated'; + +export function removeTermFromGlossaryNode( + client: ApolloClient, + glossaryNodeUrn: string, + glossaryTermUrn: string, +) { + // Read the data from our cache for this query. + const currData: GetGlossaryNodeQuery | null = client.readQuery({ + query: GetGlossaryNodeDocument, + variables: { urn: glossaryNodeUrn }, + }); + + // Remove the term from the existing children set. + const newTermChildren = { + relationships: [ + ...(currData?.glossaryNode?.children?.relationships || []).filter( + (relationship) => relationship.entity?.urn !== glossaryTermUrn, + ), + ], + total: (currData?.glossaryNode?.children?.total || 1) - 1, + }; + + // Write our data back to the cache. + client.writeQuery({ + query: GetGlossaryNodeDocument, + variables: { urn: glossaryNodeUrn }, + data: { + glossaryNode: { + ...currData?.glossaryNode, + children: newTermChildren, + }, + }, + }); +} diff --git a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js index c6e9d93f71b8c..7ddf36aa87c2d 100644 --- a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js +++ b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js @@ -1,6 +1,6 @@ const glossaryTerm = "CypressGlosssaryNavigationTerm"; const glossaryTermGroup = "CypressGlosssaryNavigationGroup"; -const glossaryParentGroup = "Cypress"; +const glossaryParentGroup = "CypressNode"; describe("glossary sidebar navigation test", () => { it("create term and term parent group, move and delete term group", () => { @@ -33,6 +33,7 @@ describe("glossary sidebar navigation test", () => { // Move a term group from the root level to be under a parent term group cy.goToGlossaryList(); cy.clickOptionWithText(glossaryTermGroup); + cy.wait(3000) cy.openThreeDotDropdown(); cy.clickOptionWithText("Move"); cy.get('[data-testid="move-glossary-entity-modal"]').contains(glossaryParentGroup).click({force: true}); From cff32e9c742f9bff2db686445e3f9cddaa6caf38 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Sat, 9 Dec 2023 05:37:00 +0530 Subject: [PATCH 074/263] fix(ingest/transformer): correct registration (#9418) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index dac865d2dac37..e894cbf043338 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -650,7 +650,7 @@ "simple_add_dataset_properties = datahub.ingestion.transformer.add_dataset_properties:SimpleAddDatasetProperties", "pattern_add_dataset_schema_terms = datahub.ingestion.transformer.add_dataset_schema_terms:PatternAddDatasetSchemaTerms", "pattern_add_dataset_schema_tags = datahub.ingestion.transformer.add_dataset_schema_tags:PatternAddDatasetSchemaTags", - "extract_owners_from_tags = datahub.ingestion.transformer.extract_ownership_from_tags:ExtractOwnersFromTagsTransformer", + "extract_ownership_from_tags = datahub.ingestion.transformer.extract_ownership_from_tags:ExtractOwnersFromTagsTransformer", ], "datahub.ingestion.sink.plugins": [ "file = datahub.ingestion.sink.file:FileSink", From e4d8dcbc02d2dae73b7054813b900af239795485 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Mon, 11 Dec 2023 09:43:23 -0500 Subject: [PATCH 075/263] docs(ingest/sql-queries): Rearrange sections (#9426) --- .../sql-queries/{sql-queries.md => sql-queries_pre.md} | 5 +++-- .../src/datahub/ingestion/source/sql_queries.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) rename metadata-ingestion/docs/sources/sql-queries/{sql-queries.md => sql-queries_pre.md} (67%) diff --git a/metadata-ingestion/docs/sources/sql-queries/sql-queries.md b/metadata-ingestion/docs/sources/sql-queries/sql-queries_pre.md similarity index 67% rename from metadata-ingestion/docs/sources/sql-queries/sql-queries.md rename to metadata-ingestion/docs/sources/sql-queries/sql-queries_pre.md index e829b4366bb84..2d915f0bcf84d 100644 --- a/metadata-ingestion/docs/sources/sql-queries/sql-queries.md +++ b/metadata-ingestion/docs/sources/sql-queries/sql-queries_pre.md @@ -1,8 +1,9 @@ -### Example Queries File +#### Example Queries File ```json {"query": "SELECT x FROM my_table", "timestamp": 1689232738.051, "user": "user_a", "downstream_tables": [], "upstream_tables": ["my_database.my_schema.my_table"]} {"query": "INSERT INTO my_table VALUES (1, 'a')", "timestamp": 1689232737.669, "user": "user_b", "downstream_tables": ["my_database.my_schema.my_table"], "upstream_tables": []} ``` -Note that this is not a valid standard JSON file, but rather a file containing one JSON object per line. +Note that this file does not represent a single JSON object, but instead newline-delimited JSON, in which +each line is a separate JSON object. diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py index 58e9682df935e..c3d6657c81fa7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql_queries.py @@ -93,8 +93,9 @@ def compute_stats(self) -> None: @capability(SourceCapability.LINEAGE_FINE, "Parsed from SQL queries") class SqlQueriesSource(Source): """ - This source reads a specifically-formatted JSON file containing SQL queries and parses them to generate lineage. + This source reads a newline-delimited JSON file containing SQL queries and parses them to generate lineage. + ### Query File Format This file should contain one JSON object per line, with the following fields: - query: string - The SQL query to parse. - timestamp (optional): number - The timestamp of the query, in seconds since the epoch. From 5ac854dcb1f1516a5325ef5bbac466d08c016fcb Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Mon, 11 Dec 2023 22:43:58 +0530 Subject: [PATCH 076/263] fix(ui): Adjusting the view of the Column Stats (#9430) --- .../shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx index 080fba6619977..0cbb79dde49cd 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Stats/snapshot/ColumnStats.tsx @@ -14,6 +14,8 @@ type Props = { const StatSection = styled.div` padding: 20px 20px; overflow: auto; + display: flex; + flex-direction: column; `; const NameText = styled(Typography.Text)` @@ -162,7 +164,12 @@ export default function ColumnStats({ columnStats }: Props) { return ( Column Stats - + ); } From 8a1122049c02c4929d8029c25dac517e5fdafc48 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 11 Dec 2023 14:25:43 -0800 Subject: [PATCH 077/263] feat(patch): support fine grained lineage patches (#9408) Co-authored-by: Harshal Sheth --- .../dataset/UpstreamLineageTemplate.java | 271 ++++++++++++- .../registry/UpstreamLineageTemplateTest.java | 359 ++++++++++++++++++ .../java/com/linkedin/metadata/Constants.java | 5 + .../src/datahub/specific/dataset.py | 107 +++++- .../unit/patch/complex_dataset_patch.json | 45 ++- .../tests/unit/patch/test_patch_builder.py | 16 + .../dataset/UpstreamLineagePatchBuilder.java | 231 ++++++++++- .../java/datahub/client/patch/PatchTest.java | 24 +- 8 files changed, 1023 insertions(+), 35 deletions(-) create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java index 35816895669be..81a4065dedb1a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java @@ -1,20 +1,41 @@ package com.linkedin.metadata.models.registry.template.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.Streams; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataset.FineGrainedLineageArray; import com.linkedin.dataset.UpstreamArray; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; import java.util.Collections; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class UpstreamLineageTemplate implements ArrayMergingTemplate { +public class UpstreamLineageTemplate extends CompoundKeyTemplate { + // Fields private static final String UPSTREAMS_FIELD_NAME = "upstreams"; private static final String DATASET_FIELD_NAME = "dataset"; + private static final String FINE_GRAINED_LINEAGES_FIELD_NAME = "fineGrainedLineages"; + private static final String FINE_GRAINED_UPSTREAM_TYPE = "upstreamType"; + private static final String FINE_GRAINED_UPSTREAMS = "upstreams"; + private static final String FINE_GRAINED_DOWNSTREAM_TYPE = "downstreamType"; + private static final String FINE_GRAINED_DOWNSTREAMS = "downstreams"; + private static final String FINE_GRAINED_TRANSFORMATION_OPERATION = "transformOperation"; + private static final String FINE_GRAINED_CONFIDENCE_SCORE = "confidenceScore"; - // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key + // Template support + private static final String NONE_TRANSFORMATION_TYPE = "NONE"; + private static final Float DEFAULT_CONFIDENCE_SCORE = 1.0f; @Override public UpstreamLineage getSubtype(RecordTemplate recordTemplate) throws ClassCastException { @@ -42,14 +63,250 @@ public UpstreamLineage getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap( - baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + ((ObjectNode) transformedNode) + .set( + FINE_GRAINED_LINEAGES_FIELD_NAME, + combineAndTransformFineGrainedLineages( + transformedNode.get(FINE_GRAINED_LINEAGES_FIELD_NAME))); + + return transformedNode; } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray( - patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + JsonNode rebasedNode = + transformedMapToArray( + patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + ((ObjectNode) rebasedNode) + .set( + FINE_GRAINED_LINEAGES_FIELD_NAME, + reconstructFineGrainedLineages(rebasedNode.get(FINE_GRAINED_LINEAGES_FIELD_NAME))); + return rebasedNode; + } + + /** + * Combines fine grained lineage array into a map using upstream and downstream types as keys, + * defaulting when not present. Due to this construction, patches will look like: path: + * /fineGrainedLineages/TRANSFORMATION_OPERATION/(upstreamType || downstreamType)/TYPE/FIELD_URN, + * op: ADD/REMOVE, value: float (confidenceScore) Due to the way FineGrainedLineage was designed + * it doesn't necessarily have a consistent key we can reference, so this specialized method + * mimics the arrayFieldToMap of the super class with the specialization that it does not put the + * full value of the aspect at the end of the key, just the particular array. This prevents + * unintended overwrites through improper MCP construction that is technically allowed by the + * schema when combining under fields that form the natural key. + * + * @param fineGrainedLineages the fine grained lineage array node + * @return the modified {@link JsonNode} with array fields transformed to maps + */ + private JsonNode combineAndTransformFineGrainedLineages(@Nullable JsonNode fineGrainedLineages) { + ObjectNode mapNode = instance.objectNode(); + if (!(fineGrainedLineages instanceof ArrayNode) || fineGrainedLineages.isEmpty()) { + return mapNode; + } + JsonNode lineageCopy = fineGrainedLineages.deepCopy(); + + lineageCopy + .elements() + .forEachRemaining( + node -> { + JsonNode nodeClone = node.deepCopy(); + String transformationOperation = + nodeClone.has(FINE_GRAINED_TRANSFORMATION_OPERATION) + ? nodeClone.get(FINE_GRAINED_TRANSFORMATION_OPERATION).asText() + : NONE_TRANSFORMATION_TYPE; + + if (!mapNode.has(transformationOperation)) { + mapNode.set(transformationOperation, instance.objectNode()); + } + ObjectNode transformationOperationNode = + (ObjectNode) mapNode.get(transformationOperation); + + Float confidenceScore = + nodeClone.has(FINE_GRAINED_CONFIDENCE_SCORE) + ? nodeClone.get(FINE_GRAINED_CONFIDENCE_SCORE).floatValue() + : DEFAULT_CONFIDENCE_SCORE; + + String upstreamType = + nodeClone.has(FINE_GRAINED_UPSTREAM_TYPE) + ? nodeClone.get(FINE_GRAINED_UPSTREAM_TYPE).asText() + : null; + String downstreamType = + nodeClone.has(FINE_GRAINED_DOWNSTREAM_TYPE) + ? nodeClone.get(FINE_GRAINED_DOWNSTREAM_TYPE).asText() + : null; + ArrayNode upstreams = + nodeClone.has(FINE_GRAINED_UPSTREAMS) + ? (ArrayNode) nodeClone.get(FINE_GRAINED_UPSTREAMS) + : null; + ArrayNode downstreams = + nodeClone.has(FINE_GRAINED_DOWNSTREAMS) + ? (ArrayNode) nodeClone.get(FINE_GRAINED_DOWNSTREAMS) + : null; + + // Handle upstreams + if (upstreamType == null) { + // Determine default type + Urn upstreamUrn = + upstreams != null ? UrnUtils.getUrn(upstreams.get(0).asText()) : null; + if (upstreamUrn != null + && SCHEMA_FIELD_ENTITY_NAME.equals(upstreamUrn.getEntityType())) { + upstreamType = FINE_GRAINED_LINEAGE_FIELD_SET_TYPE; + } else { + upstreamType = FINE_GRAINED_LINEAGE_DATASET_TYPE; + } + } + if (!transformationOperationNode.has(FINE_GRAINED_UPSTREAM_TYPE)) { + transformationOperationNode.set(FINE_GRAINED_UPSTREAM_TYPE, instance.objectNode()); + } + ObjectNode upstreamTypeNode = + (ObjectNode) transformationOperationNode.get(FINE_GRAINED_UPSTREAM_TYPE); + if (!upstreamTypeNode.has(upstreamType)) { + upstreamTypeNode.set(upstreamType, instance.objectNode()); + } + if (upstreams != null) { + addUrnsToSubType(upstreamTypeNode, upstreams, upstreamType, confidenceScore); + } + + // Handle downstreams + if (downstreamType == null) { + // Determine default type + if (downstreams != null && downstreams.size() > 1) { + downstreamType = FINE_GRAINED_LINEAGE_FIELD_SET_TYPE; + } else { + downstreamType = FINE_GRAINED_LINEAGE_FIELD_TYPE; + } + } + if (!transformationOperationNode.has(FINE_GRAINED_DOWNSTREAM_TYPE)) { + transformationOperationNode.set( + FINE_GRAINED_DOWNSTREAM_TYPE, instance.objectNode()); + } + ObjectNode downstreamTypeNode = + (ObjectNode) transformationOperationNode.get(FINE_GRAINED_DOWNSTREAM_TYPE); + if (!downstreamTypeNode.has(downstreamType)) { + downstreamTypeNode.set(downstreamType, instance.objectNode()); + } + if (downstreams != null) { + addUrnsToSubType(downstreamTypeNode, downstreams, downstreamType, confidenceScore); + } + }); + return mapNode; + } + + private void addUrnsToSubType( + JsonNode superType, ArrayNode urnsList, String subType, Float confidenceScore) { + ObjectNode upstreamSubTypeNode = (ObjectNode) superType.get(subType); + // Will overwrite repeat urns with different confidence scores with the most recently seen + upstreamSubTypeNode.setAll( + Streams.stream(urnsList.elements()) + .map(JsonNode::asText) + .distinct() + .collect(Collectors.toMap(urn -> urn, urn -> instance.numberNode(confidenceScore)))); + } + + /** + * Takes the transformed fine grained lineages map from pre-processing and reconstructs an array + * of FineGrainedLineages Avoids producing side effects by copying nodes, use resulting node and + * not the original + * + * @param transformedFineGrainedLineages the transformed fine grained lineage map + * @return the modified {@link JsonNode} formatted consistent with the original schema + */ + private ArrayNode reconstructFineGrainedLineages(JsonNode transformedFineGrainedLineages) { + if (transformedFineGrainedLineages instanceof ArrayNode) { + // We already have an ArrayNode, no need to transform. This happens during `replace` + // operations + return (ArrayNode) transformedFineGrainedLineages; + } + ObjectNode mapNode = (ObjectNode) transformedFineGrainedLineages; + ArrayNode arrayNode = instance.arrayNode(); + + mapNode + .fieldNames() + .forEachRemaining( + transformationOperation -> { + final ObjectNode transformationOperationNode = + (ObjectNode) mapNode.get(transformationOperation); + final ObjectNode upstreamType = + transformationOperationNode.has(FINE_GRAINED_UPSTREAM_TYPE) + ? (ObjectNode) transformationOperationNode.get(FINE_GRAINED_UPSTREAM_TYPE) + : instance.objectNode(); + final ObjectNode downstreamType = + transformationOperationNode.has(FINE_GRAINED_DOWNSTREAM_TYPE) + ? (ObjectNode) transformationOperationNode.get(FINE_GRAINED_DOWNSTREAM_TYPE) + : instance.objectNode(); + + // Handle upstreams + if (!upstreamType.isEmpty()) { + populateTypeNode( + upstreamType, + transformationOperation, + FINE_GRAINED_UPSTREAM_TYPE, + FINE_GRAINED_UPSTREAMS, + FINE_GRAINED_DOWNSTREAM_TYPE, + arrayNode); + } + + // Handle downstreams + if (!downstreamType.isEmpty()) { + populateTypeNode( + downstreamType, + transformationOperation, + FINE_GRAINED_DOWNSTREAM_TYPE, + FINE_GRAINED_DOWNSTREAMS, + FINE_GRAINED_UPSTREAM_TYPE, + arrayNode); + } + }); + + return arrayNode; + } + + private void populateTypeNode( + JsonNode typeNode, + String transformationOperation, + String typeName, + String arrayTypeName, + String defaultTypeName, + ArrayNode arrayNode) { + typeNode + .fieldNames() + .forEachRemaining( + subTypeName -> { + ObjectNode subType = (ObjectNode) typeNode.get(subTypeName); + if (!subType.isEmpty()) { + ObjectNode fineGrainedLineage = instance.objectNode(); + AtomicReference minimumConfidenceScore = new AtomicReference<>(1.0f); + + fineGrainedLineage.put(typeName, subTypeName); + fineGrainedLineage.put( + FINE_GRAINED_TRANSFORMATION_OPERATION, transformationOperation); + // Array to actually be filled out + fineGrainedLineage.set(arrayTypeName, instance.arrayNode()); + // Added to pass model validation, because we have no way of appropriately pairing + // upstreams and downstreams + // within fine grained lineages consistently due to being able to have multiple + // downstream types paired with a single + // transform operation, we just set a default type because it's a required property + fineGrainedLineage.put(defaultTypeName, FINE_GRAINED_LINEAGE_FIELD_SET_TYPE); + subType + .fieldNames() + .forEachRemaining( + subTypeKey -> { + ((ArrayNode) fineGrainedLineage.get(arrayTypeName)).add(subTypeKey); + Float scoreValue = subType.get(subTypeKey).floatValue(); + if (scoreValue <= minimumConfidenceScore.get()) { + minimumConfidenceScore.set(scoreValue); + fineGrainedLineage.set( + FINE_GRAINED_CONFIDENCE_SCORE, + instance.numberNode(minimumConfidenceScore.get())); + } + }); + arrayNode.add(fineGrainedLineage); + } + }); } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java new file mode 100644 index 0000000000000..07982a87be56c --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java @@ -0,0 +1,359 @@ +package com.linkedin.metadata.models.registry; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + +import com.fasterxml.jackson.databind.node.NumericNode; +import com.github.fge.jackson.jsonpointer.JsonPointer; +import com.github.fge.jsonpatch.AddOperation; +import com.github.fge.jsonpatch.JsonPatch; +import com.github.fge.jsonpatch.JsonPatchOperation; +import com.github.fge.jsonpatch.RemoveOperation; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.DataMap; +import com.linkedin.dataset.FineGrainedLineage; +import com.linkedin.dataset.FineGrainedLineageDownstreamType; +import com.linkedin.dataset.FineGrainedLineageUpstreamType; +import com.linkedin.dataset.UpstreamLineage; +import com.linkedin.metadata.models.registry.template.dataset.UpstreamLineageTemplate; +import java.util.ArrayList; +import java.util.List; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class UpstreamLineageTemplateTest { + @Test + public void testPatchUpstream() throws Exception { + UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate(); + UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + NumericNode upstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + upstreamConfidenceScore); + patchOperations.add(operation); + JsonPatch jsonPatch = new JsonPatch(patchOperations); + + // Initial population test + UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap = new DataMap(); + dataMap.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage = new FineGrainedLineage(dataMap); + UrnArray urns = new UrnArray(); + Urn urn1 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"); + urns.add(urn1); + fineGrainedLineage.setUpstreams(urns); + fineGrainedLineage.setTransformOperation("CREATE"); + fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + Assert.assertEquals(result.getFineGrainedLineages().get(0), fineGrainedLineage); + + // Test non-overwrite upstreams and correct confidence score + JsonPatchOperation operation2 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + upstreamConfidenceScore); + NumericNode upstreamConfidenceScore2 = instance.numberNode(0.1f); + JsonPatchOperation operation3 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + upstreamConfidenceScore2); + List patchOperations2 = new ArrayList<>(); + patchOperations2.add(operation2); + patchOperations2.add(operation3); + JsonPatch jsonPatch2 = new JsonPatch(patchOperations2); + UpstreamLineage result2 = upstreamLineageTemplate.applyPatch(result, jsonPatch2); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap2 = new DataMap(); + dataMap2.put("confidenceScore", 0.1); + FineGrainedLineage fineGrainedLineage2 = new FineGrainedLineage(dataMap2); + UrnArray urns2 = new UrnArray(); + Urn urn2 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"); + urns2.add(urn1); + urns2.add(urn2); + fineGrainedLineage2.setUpstreams(urns2); + fineGrainedLineage2.setTransformOperation("CREATE"); + fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + Assert.assertEquals(result2.getFineGrainedLineages().get(0), fineGrainedLineage2); + + // Check different upstream types + JsonPatchOperation operation4 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"), + upstreamConfidenceScore); + List patchOperations3 = new ArrayList<>(); + patchOperations3.add(operation4); + JsonPatch jsonPatch3 = new JsonPatch(patchOperations3); + UpstreamLineage result3 = upstreamLineageTemplate.applyPatch(result2, jsonPatch3); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap3 = new DataMap(); + dataMap3.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage3 = new FineGrainedLineage(dataMap3); + UrnArray urns3 = new UrnArray(); + Urn urn3 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"); + urns3.add(urn3); + fineGrainedLineage3.setUpstreams(urns3); + fineGrainedLineage3.setTransformOperation("CREATE"); + fineGrainedLineage3.setUpstreamType(FineGrainedLineageUpstreamType.DATASET); + fineGrainedLineage3.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + // Splits into two for different types + Assert.assertEquals(result3.getFineGrainedLineages().get(1), fineGrainedLineage3); + + // Check different transform types + JsonPatchOperation operation5 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"), + upstreamConfidenceScore); + List patchOperations4 = new ArrayList<>(); + patchOperations4.add(operation5); + JsonPatch jsonPatch4 = new JsonPatch(patchOperations4); + UpstreamLineage result4 = upstreamLineageTemplate.applyPatch(result3, jsonPatch4); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap4 = new DataMap(); + dataMap4.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage4 = new FineGrainedLineage(dataMap4); + UrnArray urns4 = new UrnArray(); + Urn urn4 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"); + urns4.add(urn4); + fineGrainedLineage4.setUpstreams(urns4); + fineGrainedLineage4.setTransformOperation("TRANSFORM"); + fineGrainedLineage4.setUpstreamType(FineGrainedLineageUpstreamType.DATASET); + fineGrainedLineage4.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + // New entry in array because of new transformation type + Assert.assertEquals(result4.getFineGrainedLineages().get(2), fineGrainedLineage4); + + // Remove + JsonPatchOperation removeOperation = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)")); + JsonPatchOperation removeOperation2 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)")); + JsonPatchOperation removeOperation3 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)")); + JsonPatchOperation removeOperation4 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/upstreamType/DATASET/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)")); + + List removeOperations = new ArrayList<>(); + removeOperations.add(removeOperation); + removeOperations.add(removeOperation2); + removeOperations.add(removeOperation3); + removeOperations.add(removeOperation4); + JsonPatch removePatch = new JsonPatch(removeOperations); + UpstreamLineage finalResult = upstreamLineageTemplate.applyPatch(result4, removePatch); + Assert.assertEquals(upstreamLineageTemplate.getDefault(), finalResult); + } + + @Test + public void testPatchDownstream() throws Exception { + UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate(); + UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + NumericNode downstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + downstreamConfidenceScore); + patchOperations.add(operation); + JsonPatch jsonPatch = new JsonPatch(patchOperations); + + // Initial population test + UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap = new DataMap(); + dataMap.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage = new FineGrainedLineage(dataMap); + UrnArray urns = new UrnArray(); + Urn urn1 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"); + urns.add(urn1); + fineGrainedLineage.setDownstreams(urns); + fineGrainedLineage.setTransformOperation("CREATE"); + fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + Assert.assertEquals(result.getFineGrainedLineages().get(0), fineGrainedLineage); + + // Test non-overwrite downstreams and correct confidence score + JsonPatchOperation operation2 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + downstreamConfidenceScore); + NumericNode downstreamConfidenceScore2 = instance.numberNode(0.1f); + JsonPatchOperation operation3 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"), + downstreamConfidenceScore2); + List patchOperations2 = new ArrayList<>(); + patchOperations2.add(operation2); + patchOperations2.add(operation3); + JsonPatch jsonPatch2 = new JsonPatch(patchOperations2); + UpstreamLineage result2 = upstreamLineageTemplate.applyPatch(result, jsonPatch2); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap2 = new DataMap(); + dataMap2.put("confidenceScore", 0.1); + FineGrainedLineage fineGrainedLineage2 = new FineGrainedLineage(dataMap2); + UrnArray urns2 = new UrnArray(); + Urn urn2 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"); + urns2.add(urn1); + urns2.add(urn2); + fineGrainedLineage2.setDownstreams(urns2); + fineGrainedLineage2.setTransformOperation("CREATE"); + fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + Assert.assertEquals(result2.getFineGrainedLineages().get(0), fineGrainedLineage2); + + // Check different downstream types + JsonPatchOperation operation4 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"), + downstreamConfidenceScore); + List patchOperations3 = new ArrayList<>(); + patchOperations3.add(operation4); + JsonPatch jsonPatch3 = new JsonPatch(patchOperations3); + UpstreamLineage result3 = upstreamLineageTemplate.applyPatch(result2, jsonPatch3); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap3 = new DataMap(); + dataMap3.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage3 = new FineGrainedLineage(dataMap3); + UrnArray urns3 = new UrnArray(); + Urn urn3 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)"); + urns3.add(urn3); + fineGrainedLineage3.setDownstreams(urns3); + fineGrainedLineage3.setTransformOperation("CREATE"); + fineGrainedLineage3.setDownstreamType(FineGrainedLineageDownstreamType.FIELD); + fineGrainedLineage3.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + // Splits into two for different types + Assert.assertEquals(result3.getFineGrainedLineages().get(1), fineGrainedLineage3); + + // Check different transform types + JsonPatchOperation operation5 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"), + downstreamConfidenceScore); + List patchOperations4 = new ArrayList<>(); + patchOperations4.add(operation5); + JsonPatch jsonPatch4 = new JsonPatch(patchOperations4); + UpstreamLineage result4 = upstreamLineageTemplate.applyPatch(result3, jsonPatch4); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap4 = new DataMap(); + dataMap4.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage4 = new FineGrainedLineage(dataMap4); + UrnArray urns4 = new UrnArray(); + Urn urn4 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)"); + urns4.add(urn4); + fineGrainedLineage4.setDownstreams(urns4); + fineGrainedLineage4.setTransformOperation("TRANSFORM"); + fineGrainedLineage4.setDownstreamType(FineGrainedLineageDownstreamType.FIELD); + fineGrainedLineage4.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + // New entry in array because of new transformation type + Assert.assertEquals(result4.getFineGrainedLineages().get(2), fineGrainedLineage4); + + // Remove + JsonPatchOperation removeOperation = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)")); + JsonPatchOperation removeOperation2 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)")); + JsonPatchOperation removeOperation3 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD)")); + JsonPatchOperation removeOperation4 = + new RemoveOperation( + new JsonPointer( + "/fineGrainedLineages/TRANSFORM/downstreamType/FIELD/urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD)")); + + List removeOperations = new ArrayList<>(); + removeOperations.add(removeOperation); + removeOperations.add(removeOperation2); + removeOperations.add(removeOperation3); + removeOperations.add(removeOperation4); + JsonPatch removePatch = new JsonPatch(removeOperations); + UpstreamLineage finalResult = upstreamLineageTemplate.applyPatch(result4, removePatch); + Assert.assertEquals(upstreamLineageTemplate.getDefault(), finalResult); + } + + @Test + public void testUpAndDown() throws Exception { + UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate(); + UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + NumericNode downstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/downstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + downstreamConfidenceScore); + patchOperations.add(operation); + NumericNode upstreamConfidenceScore = instance.numberNode(1.0f); + JsonPatchOperation operation2 = + new AddOperation( + new JsonPointer( + "/fineGrainedLineages/CREATE/upstreamType/FIELD_SET/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"), + upstreamConfidenceScore); + patchOperations.add(operation2); + JsonPatch jsonPatch = new JsonPatch(patchOperations); + + // Initial population test + UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch); + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap = new DataMap(); + dataMap.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage = new FineGrainedLineage(dataMap); + UrnArray urns = new UrnArray(); + Urn urn1 = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)"); + urns.add(urn1); + fineGrainedLineage.setTransformOperation("CREATE"); + fineGrainedLineage.setUpstreams(urns); + fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage.setDownstreams(urns); + + // Hack because Jackson parses values to doubles instead of floats + DataMap dataMap2 = new DataMap(); + dataMap2.put("confidenceScore", 1.0); + FineGrainedLineage fineGrainedLineage2 = new FineGrainedLineage(dataMap2); + fineGrainedLineage2.setTransformOperation("CREATE"); + fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET); + fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET); + fineGrainedLineage2.setDownstreams(urns); + + Assert.assertEquals(result.getFineGrainedLineages().get(1), fineGrainedLineage2); + } +} diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index f5a3c9c12ff70..3d9b533dc8f72 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -125,6 +125,11 @@ public class Constants { public static final String VIEW_PROPERTIES_ASPECT_NAME = "viewProperties"; public static final String DATASET_PROFILE_ASPECT_NAME = "datasetProfile"; + // Aspect support + public static final String FINE_GRAINED_LINEAGE_DATASET_TYPE = "DATASET"; + public static final String FINE_GRAINED_LINEAGE_FIELD_SET_TYPE = "FIELD_SET"; + public static final String FINE_GRAINED_LINEAGE_FIELD_TYPE = "FIELD"; + // Chart public static final String CHART_KEY_ASPECT_NAME = "chartKey"; public static final String CHART_INFO_ASPECT_NAME = "chartInfo"; diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index fcfe049fb15cf..294a80572669b 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -1,4 +1,4 @@ -from typing import Dict, Generic, List, Optional, TypeVar, Union +from typing import Dict, Generic, List, Optional, Tuple, TypeVar, Union from urllib.parse import quote from datahub.emitter.mcp_patch_builder import MetadataPatchProposal @@ -6,6 +6,9 @@ DatasetPropertiesClass as DatasetProperties, EditableDatasetPropertiesClass as EditableDatasetProperties, EditableSchemaMetadataClass as EditableSchemaMetadata, + FineGrainedLineageClass as FineGrainedLineage, + FineGrainedLineageDownstreamTypeClass as FineGrainedLineageDownstreamType, + FineGrainedLineageUpstreamTypeClass as FineGrainedLineageUpstreamType, GlobalTagsClass as GlobalTags, GlossaryTermAssociationClass as Term, GlossaryTermsClass as GlossaryTerms, @@ -144,6 +147,108 @@ def set_upstream_lineages(self, upstreams: List[Upstream]) -> "DatasetPatchBuild ) return self + def add_fine_grained_upstream_lineage( + self, fine_grained_lineage: FineGrainedLineage + ) -> "DatasetPatchBuilder": + ( + transform_op, + upstream_type, + downstream_type, + ) = DatasetPatchBuilder.get_fine_grained_key(fine_grained_lineage) + for upstream_urn in fine_grained_lineage.upstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "add", + path=DatasetPatchBuilder.quote_fine_grained_upstream_path( + transform_op, upstream_type, upstream_urn + ), + value=fine_grained_lineage.confidenceScore, + ) + for downstream_urn in fine_grained_lineage.downstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "add", + path=DatasetPatchBuilder.quote_fine_grained_downstream_path( + transform_op, downstream_type, downstream_urn + ), + value=fine_grained_lineage.confidenceScore, + ) + return self + + @staticmethod + def get_fine_grained_key( + fine_grained_lineage: FineGrainedLineage, + ) -> Tuple[str, str, str]: + transform_op = fine_grained_lineage.transformOperation or "NONE" + upstream_type = ( + fine_grained_lineage.upstreamType + if isinstance(fine_grained_lineage.upstreamType, str) + else FineGrainedLineageUpstreamType.FIELD_SET + ) + downstream_type = ( + fine_grained_lineage.downstreamType + if isinstance(fine_grained_lineage.downstreamType, str) + else FineGrainedLineageDownstreamType.FIELD_SET + ) + return transform_op, upstream_type, downstream_type + + @staticmethod + def quote_fine_grained_downstream_path( + transform_op: str, downstream_type: str, downstream_urn: str + ) -> str: + return ( + f"/fineGrainedLineages/{quote(transform_op, safe='')}/downstreamType/" + f"{quote(downstream_type, safe='')}/{quote(downstream_urn, safe='')}" + ) + + @staticmethod + def quote_fine_grained_upstream_path( + transform_op: str, upstream_type: str, upstream_urn: str + ) -> str: + return ( + f"/fineGrainedLineages/{quote(transform_op, safe='')}/upstreamType/" + f"{quote(upstream_type, safe='')}/{quote(upstream_urn, safe='')}" + ) + + def remove_fine_grained_upstream_lineage( + self, fine_grained_lineage: FineGrainedLineage + ) -> "DatasetPatchBuilder": + ( + transform_op, + upstream_type, + downstream_type, + ) = DatasetPatchBuilder.get_fine_grained_key(fine_grained_lineage) + for upstream_urn in fine_grained_lineage.upstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "remove", + path=DatasetPatchBuilder.quote_fine_grained_upstream_path( + transform_op, upstream_type, upstream_urn + ), + value={}, + ) + for downstream_urn in fine_grained_lineage.downstreams or []: + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "remove", + path=DatasetPatchBuilder.quote_fine_grained_downstream_path( + transform_op, downstream_type, downstream_urn + ), + value={}, + ) + return self + + def set_fine_grained_upstream_lineages( + self, fine_grained_lineages: List[FineGrainedLineage] + ) -> "DatasetPatchBuilder": + self._add_patch( + UpstreamLineage.ASPECT_NAME, + "add", + path="/fineGrainedLineages", + value=fine_grained_lineages, + ) + return self + def add_tag(self, tag: Tag) -> "DatasetPatchBuilder": self._add_patch( GlobalTags.ASPECT_NAME, "add", path=f"/tags/{tag.tag}", value=tag diff --git a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json index d5dfe125942fb..ed5a7723ac2bf 100644 --- a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json +++ b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json @@ -42,26 +42,31 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD)", - "changeType": "PATCH", - "aspectName": "upstreamLineage", - "aspect": { - "json": [ - { - "op": "add", - "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2Cfct_users_created_upstream%2CPROD%29", - "value": { - "auditStamp": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "dataset": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created_upstream,PROD)", - "type": "TRANSFORMED" - } - } - ] - } + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD)", + "changeType": "PATCH", + "aspectName": "upstreamLineage", + "aspect": { + "json": [ + { + "op": "add", + "path": "/upstreams/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2Cfct_users_created_upstream%2CPROD%29", + "value": { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created_upstream,PROD)", + "type": "TRANSFORMED" + } + }, + { + "op": "add", + "path": "/fineGrainedLineages/TRANSFORM/upstreamType/DATASET/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2Cfct_users_created_upstream%2CPROD%29", + "value": 1.0 + } + ] + } }, { "entityType": "dataset", diff --git a/metadata-ingestion/tests/unit/patch/test_patch_builder.py b/metadata-ingestion/tests/unit/patch/test_patch_builder.py index 0701b3d696895..f05c4978f8644 100644 --- a/metadata-ingestion/tests/unit/patch/test_patch_builder.py +++ b/metadata-ingestion/tests/unit/patch/test_patch_builder.py @@ -7,6 +7,9 @@ from datahub.ingestion.sink.file import write_metadata_file from datahub.metadata.schema_classes import ( DatasetLineageTypeClass, + FineGrainedLineageClass, + FineGrainedLineageDownstreamTypeClass, + FineGrainedLineageUpstreamTypeClass, GenericAspectClass, MetadataChangeProposalClass, TagAssociationClass, @@ -53,6 +56,19 @@ def test_complex_dataset_patch( type=DatasetLineageTypeClass.TRANSFORMED, ) ) + .add_fine_grained_upstream_lineage( + fine_grained_lineage=FineGrainedLineageClass( + upstreamType=FineGrainedLineageUpstreamTypeClass.DATASET, + upstreams=[ + make_dataset_urn( + platform="hive", name="fct_users_created_upstream", env="PROD" + ) + ], + downstreamType=FineGrainedLineageDownstreamTypeClass.FIELD_SET, + transformOperation="TRANSFORM", + confidenceScore=1.0, + ) + ) ) patcher.for_field("field1").add_tag(TagAssociationClass(tag=make_tag_urn("tag1"))) diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java index 6ded8a25b4e22..9db2ebc522e09 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java @@ -5,10 +5,14 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; import com.linkedin.dataset.DatasetLineageType; +import com.linkedin.dataset.FineGrainedLineageDownstreamType; +import com.linkedin.dataset.FineGrainedLineageUpstreamType; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.ToString; import org.apache.commons.lang3.tuple.ImmutableTriple; @@ -16,7 +20,8 @@ public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder { - private static final String PATH_START = "/upstreams/"; + private static final String UPSTREAMS_PATH_START = "/upstreams/"; + private static final String FINE_GRAINED_PATH_START = "/fineGrainedLineages/"; private static final String DATASET_KEY = "dataset"; private static final String AUDIT_STAMP_KEY = "auditStamp"; private static final String TIME_KEY = "time"; @@ -34,13 +39,233 @@ public UpstreamLineagePatchBuilder addUpstream( .set(AUDIT_STAMP_KEY, auditStamp); pathValues.add( - ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), UPSTREAMS_PATH_START + datasetUrn, value)); return this; } public UpstreamLineagePatchBuilder removeUpstream(@Nonnull DatasetUrn datasetUrn) { pathValues.add( - ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), UPSTREAMS_PATH_START + datasetUrn, null)); + return this; + } + + /** + * Method for adding an upstream FineGrained Dataset + * + * @param datasetUrn dataset to be set as upstream + * @param confidenceScore optional, confidence score for the lineage edge. Defaults to 1.0 for + * full confidence + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @return this builder + */ + public UpstreamLineagePatchBuilder addFineGrainedUpstreamDataset( + @Nonnull DatasetUrn datasetUrn, + @Nullable Float confidenceScore, + @Nonnull String transformationOperation) { + Float finalConfidenceScore = getConfidenceScoreOrDefault(confidenceScore); + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + "DATASET" + + "/" + + datasetUrn, + instance.numberNode(finalConfidenceScore))); + return this; + } + + /** + * Adds a field as a fine grained upstream + * + * @param schemaFieldUrn a schema field to be marked as upstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param confidenceScore optional, confidence score for the lineage edge. Defaults to 1.0 for + * full confidence + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the upstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder addFineGrainedUpstreamField( + @Nonnull Urn schemaFieldUrn, + @Nullable Float confidenceScore, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageUpstreamType type) { + Float finalConfidenceScore = getConfidenceScoreOrDefault(confidenceScore); + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageUpstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + instance.numberNode(finalConfidenceScore))); + + return this; + } + + /** + * Adds a field as a fine grained downstream + * + * @param schemaFieldUrn a schema field to be marked as downstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param confidenceScore optional, confidence score for the lineage edge. Defaults to 1.0 for + * full confidence + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the downstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder addFineGrainedDownstreamField( + @Nonnull Urn schemaFieldUrn, + @Nullable Float confidenceScore, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageDownstreamType type) { + Float finalConfidenceScore = getConfidenceScoreOrDefault(confidenceScore); + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageDownstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "downstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + instance.numberNode(finalConfidenceScore))); + return this; + } + + private Float getConfidenceScoreOrDefault(@Nullable Float confidenceScore) { + float finalConfidenceScore; + if (confidenceScore != null && confidenceScore > 0 && confidenceScore <= 1.0f) { + finalConfidenceScore = confidenceScore; + } else { + finalConfidenceScore = 1.0f; + } + + return finalConfidenceScore; + } + + /** + * Removes a field as a fine grained upstream + * + * @param schemaFieldUrn a schema field to be marked as upstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the upstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder removeFineGrainedUpstreamField( + @Nonnull Urn schemaFieldUrn, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageUpstreamType type) { + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageUpstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + null)); + + return this; + } + + public UpstreamLineagePatchBuilder removeFineGrainedUpstreamDataset( + @Nonnull DatasetUrn datasetUrn, @Nonnull String transformationOperation) { + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "upstreamType" + + "/" + + "DATASET" + + "/" + + datasetUrn, + null)); + return this; + } + + /** + * Adds a field as a fine grained downstream + * + * @param schemaFieldUrn a schema field to be marked as downstream, format: + * urn:li:schemaField(DATASET_URN, COLUMN NAME) + * @param transformationOperation string operation type that describes the transformation + * operation happening in the lineage edge + * @param type the downstream lineage type, either Field or Field Set + * @return this builder + */ + public UpstreamLineagePatchBuilder removeFineGrainedDownstreamField( + @Nonnull Urn schemaFieldUrn, + @Nonnull String transformationOperation, + @Nullable FineGrainedLineageDownstreamType type) { + String finalType; + if (type == null) { + // Default to set of fields if not explicitly a single field + finalType = FineGrainedLineageDownstreamType.FIELD_SET.toString(); + } else { + finalType = type.toString(); + } + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + FINE_GRAINED_PATH_START + + transformationOperation + + "/" + + "downstreamType" + + "/" + + finalType + + "/" + + schemaFieldUrn, + null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index 1d387acb0ce12..563742990f546 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -14,6 +14,7 @@ import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dataset.DatasetLineageType; import com.linkedin.metadata.graph.LineageDirection; @@ -49,15 +50,21 @@ public class PatchTest { public void testLocalUpstream() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { + DatasetUrn upstreamUrn = + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"); + Urn schemaFieldUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD), foo)"); MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() .urn( UrnUtils.getUrn( "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addUpstream( - DatasetUrn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), - DatasetLineageType.TRANSFORMED) + .addUpstream(upstreamUrn, DatasetLineageType.TRANSFORMED) + .addFineGrainedUpstreamDataset(upstreamUrn, null, "TRANSFORM") + .addFineGrainedUpstreamField(schemaFieldUrn, null, "TRANSFORM", null) + .addFineGrainedDownstreamField(schemaFieldUrn, null, "TRANSFORM", null) .build(); Future response = restEmitter.emit(upstreamPatch); @@ -73,6 +80,12 @@ public void testLocalUpstream() { public void testLocalUpstreamRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { + DatasetUrn upstreamUrn = + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"); + Urn schemaFieldUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD), foo)"); MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() .urn( @@ -81,6 +94,9 @@ public void testLocalUpstreamRemove() { .removeUpstream( DatasetUrn.createFromString( "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .removeFineGrainedUpstreamDataset(upstreamUrn, "TRANSFORM") + .removeFineGrainedUpstreamField(schemaFieldUrn, "TRANSFORM", null) + .removeFineGrainedDownstreamField(schemaFieldUrn, "TRANSFORM", null) .build(); Future response = restEmitter.emit(upstreamPatch); From 79ccbc57d1c3266025c8e52ce18fbfcff550c387 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Mon, 11 Dec 2023 14:41:23 -0800 Subject: [PATCH 078/263] fix(CVE-2023-6378): update logback classic (#9438) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index f5e5403e822e7..b16e3ca169c71 100644 --- a/build.gradle +++ b/build.gradle @@ -16,7 +16,7 @@ buildscript { ext.playVersion = '2.8.18' ext.log4jVersion = '2.19.0' ext.slf4jVersion = '1.7.36' - ext.logbackClassic = '1.2.12' + ext.logbackClassic = '1.2.13' ext.hadoop3Version = '3.3.5' ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.6' From ee4e8dd74c569d0dfc98e8eb13034c91b0ad61a8 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:03:30 +0530 Subject: [PATCH 079/263] feat: allow the sidebar size to be draggable (#9401) --- .../src/app/search/SearchResults.tsx | 2 +- .../src/app/search/sidebar/BrowseSidebar.tsx | 51 ++++++++++++------- .../src/app/search/sidebar/EntityNode.tsx | 3 +- .../cypress/cypress/e2e/browse/browseV2.js | 10 ++-- 4 files changed, 41 insertions(+), 25 deletions(-) diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index 56e83e4235027..d7ad6d517d8fe 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -197,7 +197,7 @@ export const SearchResults = ({ {showBrowseV2 && ( - + )} diff --git a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx index 822e75b65febc..c16bcdcaf6c72 100644 --- a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx +++ b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useState } from 'react'; import styled from 'styled-components'; import { Typography } from 'antd'; import EntityNode from './EntityNode'; @@ -7,10 +7,16 @@ import SidebarLoadingError from './SidebarLoadingError'; import { SEARCH_RESULTS_BROWSE_SIDEBAR_ID } from '../../onboarding/config/SearchOnboardingConfig'; import useSidebarEntities from './useSidebarEntities'; import { ANTD_GRAY_V2 } from '../../entity/shared/constants'; +import { ProfileSidebarResizer } from '../../entity/shared/containers/profile/sidebar/ProfileSidebarResizer'; -const Sidebar = styled.div<{ visible: boolean; width: number }>` + +export const MAX_BROWSER_WIDTH = 500; +export const MIN_BROWSWER_WIDTH = 200; + +export const SidebarWrapper = styled.div<{ visible: boolean; width: number }>` height: 100%; width: ${(props) => (props.visible ? `${props.width}px` : '0')}; + min-width: ${(props) => (props.visible ? `${props.width}px` : '0')}; transition: width 250ms ease-in-out; border-right: 1px solid ${(props) => props.theme.styles['border-color-base']}; background-color: ${ANTD_GRAY_V2[1]}; @@ -37,29 +43,38 @@ const SidebarBody = styled.div<{ visible: boolean }>` type Props = { visible: boolean; - width: number; }; -const BrowseSidebar = ({ visible, width }: Props) => { +const BrowseSidebar = ({ visible }: Props) => { const { error, entityAggregations, retry } = useSidebarEntities({ skip: !visible, }); + const [browserWidth, setBrowserWith] = useState(window.innerWidth * 0.2); return ( - - - Navigate - - - {entityAggregations && !entityAggregations.length &&
No results found
} - {entityAggregations?.map((entityAggregation) => ( - - - - ))} - {error && } -
-
+ <> + + + Navigate + + + {entityAggregations && !entityAggregations.length &&
No results found
} + {entityAggregations?.map((entityAggregation) => ( + + + + ))} + {error && } +
+
+ + setBrowserWith(Math.min(Math.max(widthProp, MIN_BROWSWER_WIDTH), MAX_BROWSER_WIDTH)) + } + initialSize={browserWidth} + isSidebarOnLeft + /> + ); }; diff --git a/datahub-web-react/src/app/search/sidebar/EntityNode.tsx b/datahub-web-react/src/app/search/sidebar/EntityNode.tsx index e04e4253dca13..627d19c4fb10c 100644 --- a/datahub-web-react/src/app/search/sidebar/EntityNode.tsx +++ b/datahub-web-react/src/app/search/sidebar/EntityNode.tsx @@ -38,7 +38,8 @@ const EntityNode = () => { onToggle: (isNowOpen: boolean) => trackToggleNodeEvent(isNowOpen, 'entity'), }); - const onClickHeader = () => { + const onClickHeader = (e) => { + e.preventDefault(); if (count) toggle(); }; diff --git a/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js b/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js index a61b9030b13c6..f45edc5fa0481 100644 --- a/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js +++ b/smoke-test/tests/cypress/cypress/e2e/browse/browseV2.js @@ -46,31 +46,31 @@ describe("search", () => { cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\d\d\dpx$/); + .should("match", /\d\d\dpx$/); cy.get("[data-testid=browse-v2-toggle").click(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\dpx$/); + .should("match", /\dpx$/); cy.reload(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\dpx$/); + .should("match", /\dpx$/); cy.get("[data-testid=browse-v2-toggle").click(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\d\d\dpx$/); + .should("match", /\d\d\dpx$/); cy.reload(); cy.get("[data-testid=browse-v2") .invoke("css", "width") - .should("match", /^\d\d\dpx$/); + .should("match", /\d\d\dpx$/); }); it("should take you to the old browse experience when clicking entity type on home page with the browse flag off", () => { From abbc4cdc577647d7b97a03117c4317805a3a8ce3 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Tue, 12 Dec 2023 17:26:29 +0530 Subject: [PATCH 080/263] fix(json-schema): do not send invalid URLs (#9417) --- .../ingestion/source/schema/json_schema.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py index f6e944f4fc3cb..c7e8a15d8dfa4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/schema/json_schema.py @@ -9,6 +9,7 @@ from os.path import basename, dirname from pathlib import Path from typing import Any, Iterable, List, Optional, Union +from urllib.parse import urlparse import jsonref from pydantic import AnyHttpUrl, DirectoryPath, FilePath, validator @@ -53,6 +54,16 @@ logger = logging.getLogger(__name__) +def is_url_valid(url: Optional[str]) -> bool: + if url is None: + return False + try: + result = urlparse(url) + return all([result.scheme, result.netloc]) + except Exception: + return False + + class URIReplacePattern(ConfigModel): match: str = Field( description="Pattern to match on uri-s as part of reference resolution. See replace field", @@ -281,12 +292,14 @@ def _load_one_file( entityUrn=dataset_urn, aspect=models.StatusClass(removed=False) ).as_workunit() + external_url = JsonSchemaTranslator._get_id_from_any_schema(schema_dict) + if not is_url_valid(external_url): + external_url = None + yield MetadataChangeProposalWrapper( entityUrn=dataset_urn, aspect=models.DatasetPropertiesClass( - externalUrl=JsonSchemaTranslator._get_id_from_any_schema( - schema_dict - ), + externalUrl=external_url, name=dataset_simple_name, description=JsonSchemaTranslator._get_description_from_any_schema( schema_dict From ffccc6556110ea197402ad1de72117ffd5509a8d Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 12 Dec 2023 18:31:58 +0100 Subject: [PATCH 081/263] fix(ingest/profiling) Fixing profile eligibility check (#9446) --- .../datahub/ingestion/source/sql/sql_generic_profiler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index 844a458d9f1ab..a2f91e5fae1a9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -274,16 +274,16 @@ def is_dataset_eligible_for_profiling( return False if self.config.profiling.profile_table_size_limit is not None and ( - size_in_bytes is None - or size_in_bytes / (2**30) + size_in_bytes is not None + and size_in_bytes / (2**30) > self.config.profiling.profile_table_size_limit ): self.report.profiling_skipped_size_limit[schema_name] += 1 return False if self.config.profiling.profile_table_row_limit is not None and ( - rows_count is None - or rows_count > self.config.profiling.profile_table_row_limit + rows_count is not None + and rows_count > self.config.profiling.profile_table_row_limit ): self.report.profiling_skipped_row_limit[schema_name] += 1 return False From 66f90c7ffd483f397c99dbf494280d3cd9ef10dd Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 12 Dec 2023 12:32:59 -0500 Subject: [PATCH 082/263] fix(ingest): avoid git dependency in dbt (#9447) --- metadata-ingestion/src/datahub/configuration/git.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index 9ea9007553839..a5f88744661a4 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -6,7 +6,6 @@ from datahub.configuration.common import ConfigModel from datahub.configuration.validate_field_rename import pydantic_renamed_field -from datahub.ingestion.source.git.git_import import GitClone _GITHUB_PREFIX = "https://github.com/" _GITLAB_PREFIX = "https://gitlab.com/" @@ -151,6 +150,9 @@ def clone( ) -> pathlib.Path: """Clones the repo into a temporary directory and returns the path to the checkout.""" + # We import this here to avoid a hard dependency on gitpython. + from datahub.ingestion.source.git.git_import import GitClone + assert self.repo_ssh_locator git_clone = GitClone(str(tmp_path)) From 02982ed88600f9b11c2387e540299c437ca21ed6 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Tue, 12 Dec 2023 12:38:21 -0500 Subject: [PATCH 083/263] feat(ingest): add retries for tableau (#9437) --- .../src/datahub/ingestion/source/tableau.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index da44d09121c6c..f870e99df27c5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -21,7 +21,7 @@ import tableauserverclient as TSC from pydantic import root_validator, validator from pydantic.fields import Field -from requests.adapters import ConnectionError +from requests.adapters import ConnectionError, HTTPAdapter from tableauserverclient import ( PersonalAccessTokenAuth, Server, @@ -29,6 +29,7 @@ TableauAuth, ) from tableauserverclient.server.endpoint.exceptions import NonXMLResponseError +from urllib3 import Retry import datahub.emitter.mce_builder as builder import datahub.utilities.sqlglot_lineage as sqlglot_l @@ -174,6 +175,7 @@ class TableauConnectionConfig(ConfigModel): description="Unique relationship between the Tableau Server and site", ) + max_retries: int = Field(3, description="Number of retries for failed requests.") ssl_verify: Union[bool, str] = Field( default=True, description="Whether to verify SSL certificates. If using self-signed certificates, set to false or provide the path to the .pem certificate bundle.", @@ -224,6 +226,17 @@ def make_tableau_client(self) -> Server: # From https://stackoverflow.com/a/50159273/5004662. server._session.trust_env = False + # Setup request retries. + adapter = HTTPAdapter( + max_retries=Retry( + total=self.max_retries, + backoff_factor=1, + status_forcelist=[429, 500, 502, 503, 504], + ) + ) + server._session.mount("http://", adapter) + server._session.mount("https://", adapter) + server.auth.sign_in(authentication) return server except ServerResponseError as e: From 9899aca4995ec0bd5a7e3ccc6c7e1495b4ee78df Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 12 Dec 2023 12:16:27 -0600 Subject: [PATCH 084/263] docs(updating-datahub): update docs for v0.12.1 (#9441) --- docs/how/updating-datahub.md | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 94ab1b0611c33..36be572f2886e 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -7,15 +7,26 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. + +### Potential Downtime + +### Deprecations + +### Other Notable Changes + +## 0.12.1 + +### Breaking Changes + - #9244: The `redshift-legacy` and `redshift-legacy-usage` sources, which have been deprecated for >6 months, have been removed. The new `redshift` source is a superset of the functionality provided by those legacy sources. - `database_alias` config is no longer supported in SQL sources namely - Redshift, MySQL, Oracle, Postgres, Trino, Presto-on-hive. The config will automatically be ignored if it's present in your recipe. It has been deprecated since v0.9.6. - #9257: The Python SDK urn types are now autogenerated. The new classes are largely backwards compatible with the previous, manually written classes, but many older methods are now deprecated in favor of a more uniform interface. The only breaking change is that the signature for the director constructor e.g. `TagUrn("tag", ["tag_name"])` is no longer supported, and the simpler `TagUrn("tag_name")` should be used instead. The canonical place to import the urn classes from is `datahub.metadata.urns.*`. Other import paths, like `datahub.utilities.urns.corpuser_urn.CorpuserUrn` are retained for backwards compatibility, but are considered deprecated. - #9286: The `DataHubRestEmitter.emit` method no longer returns anything. It previously returned a tuple of timestamps. - #8951: A great expectations based profiler has been added for the Unity Catalog source. -To use the old profiler, set `method: analyze` under the `profiling` section in your recipe. -To use the new profiler, set `method: ge`. Profiling is disabled by default, so to enable it, -one of these methods must be specified. + To use the old profiler, set `method: analyze` under the `profiling` section in your recipe. + To use the new profiler, set `method: ge`. Profiling is disabled by default, so to enable it, + one of these methods must be specified. ### Potential Downtime From eb8cbd8b4150b31429cf09158cb1113f275ac544 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 13 Dec 2023 12:19:49 +0530 Subject: [PATCH 085/263] feat: Allow specifying Data Product URN via UI (#9386) Co-authored-by: Aseem Bansal --- .../DataHubDataFetcherExceptionHandler.java | 40 +++++++---- .../CreateDataProductResolver.java | 1 + .../src/main/resources/entity.graphql | 4 ++ .../CreateDataProductModal.tsx | 5 +- .../DataProductAdvancedOption.tsx | 68 +++++++++++++++++++ .../DataProductBuilderForm.tsx | 11 ++- .../entity/domain/DataProductsTab/types.ts | 6 ++ .../metadata/service/DataProductService.java | 22 +++++- .../tests/privileges/test_privileges.py | 7 +- 9 files changed, 137 insertions(+), 27 deletions(-) create mode 100644 datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 7c3ea1d581b6e..746ce0cdc10fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -12,6 +12,8 @@ @Slf4j public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { + private static final String DEFAULT_ERROR_MESSAGE = "An unknown error occurred."; + @Override public DataFetcherExceptionHandlerResult onException( DataFetcherExceptionHandlerParameters handlerParameters) { @@ -19,28 +21,40 @@ public DataFetcherExceptionHandlerResult onException( SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); - log.error("Failed to execute DataFetcher", exception); - DataHubGraphQLErrorCode errorCode = DataHubGraphQLErrorCode.SERVER_ERROR; - String message = "An unknown error occurred."; + String message = DEFAULT_ERROR_MESSAGE; - // note: make sure to access the true error message via `getCause()` - if (exception.getCause() instanceof IllegalArgumentException) { + IllegalArgumentException illException = + findFirstThrowableCauseOfClass(exception, IllegalArgumentException.class); + if (illException != null) { + log.error("Failed to execute", illException); errorCode = DataHubGraphQLErrorCode.BAD_REQUEST; - message = exception.getCause().getMessage(); + message = illException.getMessage(); } - if (exception instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception).errorCode(); - message = exception.getMessage(); + DataHubGraphQLException graphQLException = + findFirstThrowableCauseOfClass(exception, DataHubGraphQLException.class); + if (graphQLException != null) { + log.error("Failed to execute", graphQLException); + errorCode = graphQLException.errorCode(); + message = graphQLException.getMessage(); } - if (exception.getCause() instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception.getCause()).errorCode(); - message = exception.getCause().getMessage(); + if (illException == null && graphQLException == null) { + log.error("Failed to execute", exception); } - DataHubGraphQLError error = new DataHubGraphQLError(message, path, sourceLocation, errorCode); return DataFetcherExceptionHandlerResult.newResult().error(error).build(); } + + T findFirstThrowableCauseOfClass(Throwable throwable, Class clazz) { + while (throwable != null) { + if (clazz.isInstance(throwable)) { + return (T) throwable; + } else { + throwable = throwable.getCause(); + } + } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index 10c487a839f35..8ac7b2c3ce375 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -47,6 +47,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm try { final Urn dataProductUrn = _dataProductService.createDataProduct( + input.getId(), input.getProperties().getName(), input.getProperties().getDescription(), authentication); diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index feb344154d11e..307c7f7b383e3 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -11055,6 +11055,10 @@ input CreateDataProductInput { The primary key of the Domain """ domainUrn: String! + """ + An optional id for the new data product + """ + id: String } """ diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx index 2d82521a90df5..0610fbfa7a770 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx @@ -32,6 +32,7 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on variables: { input: { domainUrn: domain.urn, + id: builderState.id, properties: { name: builderState.name, description: builderState.description || undefined, @@ -49,10 +50,10 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on onClose(); } }) - .catch(() => { + .catch(( error ) => { onClose(); message.destroy(); - message.error({ content: 'Failed to create Data Product. An unexpected error occurred' }); + message.error({ content: `Failed to create Data Product: ${error.message}.` }); }); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx new file mode 100644 index 0000000000000..a077a0308af1f --- /dev/null +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx @@ -0,0 +1,68 @@ +import React from "react"; +import { Collapse, Form, Input, Typography } from "antd"; +import styled from "styled-components"; +import { validateCustomUrnId } from '../../../shared/textUtil'; +import { DataProductBuilderFormProps } from "./types"; + + +const FormItem = styled(Form.Item)` + .ant-form-item-label { + padding-bottom: 2px; + } +`; + +const FormItemWithMargin = styled(FormItem)` + margin-bottom: 16px; +`; + +const FormItemNoMargin = styled(FormItem)` + margin-bottom: 0; +`; + +const AdvancedLabel = styled(Typography.Text)` + color: #373d44; +`; + +export function DataProductAdvancedOption({builderState, updateBuilderState }: DataProductBuilderFormProps){ + + function updateDataProductId(id: string) { + updateBuilderState({ + ...builderState, + id, + }); + } + + return ( + + Advanced Options} key="1"> + Data Product Id} + help="By default, a random UUID will be generated to uniquely identify this data product. If + you'd like to provide a custom id instead to more easily keep track of this data product, + you may provide it here. Be careful, you cannot easily change the data product id after + creation." + > + ({ + validator(_, value) { + if (value && validateCustomUrnId(value)) { + return Promise.resolve(); + } + return Promise.reject(new Error('Please enter a valid Data product id')); + }, + }), + ]} + > + updateDataProductId(e.target.value)} + /> + + + + + ) +} \ No newline at end of file diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx index b5a27a6e1b876..98bb09098a36e 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx @@ -3,18 +3,14 @@ import React from 'react'; import styled from 'styled-components'; import { Editor as MarkdownEditor } from '../../shared/tabs/Documentation/components/editor/Editor'; import { ANTD_GRAY } from '../../shared/constants'; -import { DataProductBuilderState } from './types'; +import { DataProductBuilderFormProps } from './types'; +import { DataProductAdvancedOption } from './DataProductAdvancedOption'; const StyledEditor = styled(MarkdownEditor)` border: 1px solid ${ANTD_GRAY[4]}; `; -type Props = { - builderState: DataProductBuilderState; - updateBuilderState: (newState: DataProductBuilderState) => void; -}; - -export default function DataProductBuilderForm({ builderState, updateBuilderState }: Props) { +export default function DataProductBuilderForm({ builderState, updateBuilderState }: DataProductBuilderFormProps) { function updateName(name: string) { updateBuilderState({ ...builderState, @@ -47,6 +43,7 @@ export default function DataProductBuilderForm({ builderState, updateBuilderStat Description}> + ); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts index 1ed3ede39cfbe..fe22e3ed9a2a4 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts @@ -1,4 +1,10 @@ export type DataProductBuilderState = { name: string; + id?: string; description?: string; }; + +export type DataProductBuilderFormProps = { + builderState: DataProductBuilderState; + updateBuilderState: (newState: DataProductBuilderState) => void; +}; \ No newline at end of file diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java index 10016ee89605b..d60427a27a5c5 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,6 +24,7 @@ import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.r2.RemoteInvocationException; import java.util.List; import java.util.Objects; import java.util.UUID; @@ -58,11 +61,26 @@ public DataProductService(@Nonnull EntityClient entityClient, @Nonnull GraphClie * @return the urn of the newly created DataProduct */ public Urn createDataProduct( - @Nullable String name, @Nullable String description, @Nonnull Authentication authentication) { + @Nullable String id, + @Nullable String name, + @Nullable String description, + @Nonnull Authentication authentication) { // 1. Generate a unique id for the new DataProduct. final DataProductKey key = new DataProductKey(); - key.setId(UUID.randomUUID().toString()); + if (id != null && !id.isBlank()) { + key.setId(id); + } else { + key.setId(UUID.randomUUID().toString()); + } + try { + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, DATA_PRODUCT_ENTITY_NAME), authentication)) { + throw new IllegalArgumentException("This Data product already exists!"); + } + } catch (RemoteInvocationException e) { + throw new RuntimeException("Unable to check for existence of Data Product!"); + } // 2. Create a new instance of DataProductProperties final DataProductProperties properties = new DataProductProperties(); diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index aa54a50b04e7f..75e2265f1f555 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -63,7 +63,7 @@ def _ensure_cant_perform_action(session, json,assertion_key): action_response.raise_for_status() action_data = action_response.json() - assert action_data["errors"][0]["extensions"]["code"] == 403 + assert action_data["errors"][0]["extensions"]["code"] == 403, action_data["errors"][0] assert action_data["errors"][0]["extensions"]["type"] == "UNAUTHORIZED" assert action_data["data"][assertion_key] == None @@ -367,8 +367,9 @@ def test_privilege_to_create_and_manage_policies(): # Verify new user can't create a policy create_policy = { - "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n - createPolicy(input: $input) }""", + "query": """mutation createPolicy($input: PolicyUpdateInput!) { + createPolicy(input: $input) + }""", "variables": { "input": { "type": "PLATFORM", From 5af799ee892a0a1f9655ff569c4da63ffa976e52 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 13 Dec 2023 14:31:24 +0530 Subject: [PATCH 086/263] feat(ownership): add button to copy urn of an Ownership Type (#9452) --- .../entity/ownership/table/ActionsColumn.tsx | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx index 41e07520a0ece..e08853ad150bf 100644 --- a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx +++ b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Dropdown, MenuProps, Popconfirm, Typography, message, notification } from 'antd'; -import { DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; +import { CopyOutlined, DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; import styled from 'styled-components/macro'; import { OwnershipTypeEntity } from '../../../../types.generated'; import { useDeleteOwnershipTypeMutation } from '../../../../graphql/ownership.generated'; @@ -48,6 +48,10 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe setOwnershipType(ownershipType); }; + const onCopy=() => { + navigator.clipboard.writeText(ownershipType.urn); + } + const [deleteOwnershipTypeMutation] = useDeleteOwnershipTypeMutation(); const onDelete = () => { @@ -106,6 +110,15 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe ), }, + { + key: 'copy', + icon: ( + + + Copy Urn + + ), + }, ]; const onClick: MenuProps['onClick'] = (e) => { @@ -113,6 +126,9 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe if (key === 'edit') { editOnClick(); } + else if( key === 'copy') { + onCopy(); + } }; const menuProps: MenuProps = { From a92230b32162dc26776210a3278eadaafaa6e08e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EA=B9=80=EA=B0=80=EC=9C=A4?= <60080153+KaYunKIM@users.noreply.github.com> Date: Thu, 14 Dec 2023 02:30:18 +0900 Subject: [PATCH 087/263] docs(ingest/tableau): add token to sink config in sample recipe (#9411) Co-authored-by: KaYunKIM Co-authored-by: Harshal Sheth --- metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml b/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml index ed6567b5889df..a9db27bb52a23 100644 --- a/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml +++ b/metadata-ingestion/examples/recipes/tableau_to_datahub.dhub.yaml @@ -18,3 +18,4 @@ sink: type: "datahub-rest" config: server: "http://localhost:8080" + token: token_value # optional From 3cde9549a290d2560d9eebaa4fc5a3521266a841 Mon Sep 17 00:00:00 2001 From: allizex <150264485+allizex@users.noreply.github.com> Date: Wed, 13 Dec 2023 20:26:45 +0100 Subject: [PATCH 088/263] feat(glossary): add ability to clone glossary term(name and documentation) from term profile menu (#9445) Co-authored-by: Olga Dimova <38855943+olgadimova@users.noreply.github.com> --- .../glossaryTerm/GlossaryTermEntity.tsx | 7 +++- .../CreateGlossaryEntityModal.tsx | 34 ++++++++++++++++--- .../shared/EntityDropdown/EntityDropdown.tsx | 22 ++++++++++++ .../src/app/entity/shared/types.ts | 1 + 4 files changed, 59 insertions(+), 5 deletions(-) diff --git a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx index 080ee5889aec9..a6f6d9b0e2867 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx @@ -65,7 +65,12 @@ export class GlossaryTermEntity implements Entity { useEntityQuery={useGetGlossaryTermQuery as any} headerActionItems={new Set([EntityActionItem.BATCH_ADD_GLOSSARY_TERM])} headerDropdownItems={ - new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.MOVE, EntityMenuItems.DELETE]) + new Set([ + EntityMenuItems.UPDATE_DEPRECATION, + EntityMenuItems.CLONE, + EntityMenuItems.MOVE, + EntityMenuItems.DELETE, + ]) } isNameEditable hideBrowseBar diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx index 9788d36af2c65..d60e86b0af8ca 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx @@ -1,8 +1,9 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components/macro'; import { EditOutlined } from '@ant-design/icons'; import { message, Button, Input, Modal, Typography, Form, Collapse } from 'antd'; import DOMPurify from 'dompurify'; +import { useHistory } from 'react-router'; import { useCreateGlossaryTermMutation, useCreateGlossaryNodeMutation, @@ -16,6 +17,7 @@ import DescriptionModal from '../components/legacy/DescriptionModal'; import { validateCustomUrnId } from '../../../shared/textUtil'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getGlossaryRootToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; +import { getEntityPath } from '../containers/profile/utils'; const StyledItem = styled(Form.Item)` margin-bottom: 0; @@ -33,6 +35,7 @@ interface Props { entityType: EntityType; onClose: () => void; refetchData?: () => void; + isCloning?: boolean; } function CreateGlossaryEntityModal(props: Props) { @@ -43,15 +46,31 @@ function CreateGlossaryEntityModal(props: Props) { const entityRegistry = useEntityRegistry(); const [stagedId, setStagedId] = useState(undefined); const [stagedName, setStagedName] = useState(''); - const [selectedParentUrn, setSelectedParentUrn] = useState(entityData.urn); + const [selectedParentUrn, setSelectedParentUrn] = useState(props.isCloning ? '' : entityData.urn); const [documentation, setDocumentation] = useState(''); const [isDocumentationModalVisible, setIsDocumentationModalVisible] = useState(false); const [createButtonDisabled, setCreateButtonDisabled] = useState(true); const refetch = useRefetch(); + const history = useHistory(); const [createGlossaryTermMutation] = useCreateGlossaryTermMutation(); const [createGlossaryNodeMutation] = useCreateGlossaryNodeMutation(); + useEffect(() => { + if (props.isCloning && entityData.entityData) { + const { properties } = entityData.entityData; + + if (properties?.name) { + setStagedName(properties.name); + form.setFieldValue('name', properties.name); + } + + if (properties?.description) { + setDocumentation(properties.description); + } + } + }, [props.isCloning, entityData.entityData, form]); + function createGlossaryEntity() { const mutation = entityType === EntityType.GlossaryTerm ? createGlossaryTermMutation : createGlossaryNodeMutation; @@ -67,7 +86,7 @@ function CreateGlossaryEntityModal(props: Props) { }, }, }) - .then(() => { + .then((res) => { message.loading({ content: 'Updating...', duration: 2 }); setTimeout(() => { analytics.event({ @@ -82,12 +101,19 @@ function CreateGlossaryEntityModal(props: Props) { refetch(); if (isInGlossaryContext) { // either refresh this current glossary node or the root nodes or root terms - const nodeToUpdate = entityData?.urn || getGlossaryRootToUpdate(entityType); + const nodeToUpdate = selectedParentUrn || getGlossaryRootToUpdate(entityType); updateGlossarySidebar([nodeToUpdate], urnsToUpdate, setUrnsToUpdate); } if (refetchData) { refetchData(); } + if (props.isCloning) { + const redirectUrn = + entityType === EntityType.GlossaryTerm + ? res.data?.createGlossaryTerm + : res.data?.createGlossaryNode; + history.push(getEntityPath(entityType, redirectUrn, entityRegistry, false, false)); + } }, 2000); }) .catch((e) => { diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 5d4f9d9f875cf..8d7f1cca9c1cb 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -9,6 +9,7 @@ import { LinkOutlined, MoreOutlined, PlusOutlined, + CopyOutlined, } from '@ant-design/icons'; import { Redirect } from 'react-router'; import { EntityType } from '../../../../types.generated'; @@ -32,6 +33,7 @@ export enum EntityMenuItems { ADD_TERM_GROUP, DELETE, MOVE, + CLONE, } export const MenuIcon = styled(MoreOutlined)<{ fontSize?: number }>` @@ -107,6 +109,7 @@ function EntityDropdown(props: Props) { const [isCreateTermModalVisible, setIsCreateTermModalVisible] = useState(false); const [isCreateNodeModalVisible, setIsCreateNodeModalVisible] = useState(false); + const [isCloneEntityModalVisible, setIsCloneEntityModalVisible] = useState(false); const [isDeprecationModalVisible, setIsDeprecationModalVisible] = useState(false); const [isMoveModalVisible, setIsMoveModalVisible] = useState(false); @@ -230,6 +233,17 @@ function EntityDropdown(props: Props) { )} + {menuItems.has(EntityMenuItems.CLONE) && ( + setIsCloneEntityModalVisible(true)} + > + +  Clone + + + )} } trigger={['click']} @@ -250,6 +264,14 @@ function EntityDropdown(props: Props) { refetchData={refetchForNodes} /> )} + {isCloneEntityModalVisible && ( + setIsCloneEntityModalVisible(false)} + refetchData={entityType === EntityType.GlossaryTerm ? refetchForTerms : refetchForNodes} + isCloning + /> + )} {isDeprecationModalVisible && ( ; properties?: Maybe<{ + name?: Maybe; description?: Maybe; qualifiedName?: Maybe; sourceUrl?: Maybe; From a495d652e0e08885ce35eb3110a27853c2c05071 Mon Sep 17 00:00:00 2001 From: skrydal Date: Wed, 13 Dec 2023 20:34:20 +0100 Subject: [PATCH 089/263] feat(ingestion): Add typeUrn handling to ownership transformers (#9370) --- .../docs/transformer/dataset_transformer.md | 32 +++++++------- .../src/datahub/emitter/mce_builder.py | 31 ++++++------- .../transformer/add_dataset_ownership.py | 34 +++++--------- .../tests/unit/test_pipeline.py | 5 ++- .../tests/unit/test_transform_dataset.py | 44 ++++++++++++++++++- 5 files changed, 86 insertions(+), 60 deletions(-) diff --git a/metadata-ingestion/docs/transformer/dataset_transformer.md b/metadata-ingestion/docs/transformer/dataset_transformer.md index d1a1555a3ca02..1c84a2759d23e 100644 --- a/metadata-ingestion/docs/transformer/dataset_transformer.md +++ b/metadata-ingestion/docs/transformer/dataset_transformer.md @@ -55,12 +55,12 @@ transformers: ``` ## Simple Add Dataset ownership ### Config Details -| Field | Required | Type | Default | Description | -|-----------------------------|----------|--------------|---------------|------------------------------------------------------------------| -| `owner_urns` | ✅ | list[string] | | List of owner urns. | -| `ownership_type` | | string | `DATAOWNER` | ownership type of the owners. | -| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | -| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | +| Field | Required | Type | Default | Description | +|--------------------|----------|--------------|-------------|---------------------------------------------------------------------| +| `owner_urns` | ✅ | list[string] | | List of owner urns. | +| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) | +| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | +| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | For transformer behaviour on `replace_existing` and `semantics`, please refer section [Relationship Between replace_existing And semantics](#relationship-between-replace_existing-and-semantics). @@ -95,7 +95,7 @@ transformers: - "urn:li:corpuser:username1" - "urn:li:corpuser:username2" - "urn:li:corpGroup:groupname" - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owners, however overwrite the owners available for the dataset on DataHub GMS ```yaml @@ -107,7 +107,7 @@ transformers: - "urn:li:corpuser:username1" - "urn:li:corpuser:username2" - "urn:li:corpGroup:groupname" - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owners, however keep the owners available for the dataset on DataHub GMS ```yaml @@ -124,12 +124,12 @@ transformers: ## Pattern Add Dataset ownership ### Config Details -| Field | Required | Type | Default | Description | -|-----------------------------|--------- |-----------------------|------------------|-----------------------------------------------------------------------------------------| -| `owner_pattern` | ✅ | map[regx, list[urn]] | | entity urn with regular expression and list of owners urn apply to matching entity urn. | -| `ownership_type` | | string | `DATAOWNER` | ownership type of the owners. | -| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | -| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | +| Field | Required | Type | Default | Description | +|--------------------|----------|----------------------|-------------|-----------------------------------------------------------------------------------------| +| `owner_pattern` | ✅ | map[regx, list[urn]] | | entity urn with regular expression and list of owners urn apply to matching entity urn. | +| `ownership_type` | | string | "DATAOWNER" | ownership type of the owners (either as enum or ownership type urn) | +| `replace_existing` | | boolean | `false` | Whether to remove owners from entity sent by ingestion source. | +| `semantics` | | enum | `OVERWRITE` | Whether to OVERWRITE or PATCH the entity present on DataHub GMS. | let’s suppose we’d like to append a series of users who we know to own a different dataset from a data source but aren't detected during normal ingestion. To do so, we can use the `pattern_add_dataset_ownership` module that’s included in the ingestion framework. This will match the pattern to `urn` of the dataset and assign the respective owners. @@ -158,7 +158,7 @@ The config, which we’d append to our ingestion recipe YAML, would look like th rules: ".*example1.*": ["urn:li:corpuser:username1"] ".*example2.*": ["urn:li:corpuser:username2"] - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owner, however overwrite the owners available for the dataset on DataHub GMS ```yaml @@ -170,7 +170,7 @@ The config, which we’d append to our ingestion recipe YAML, would look like th rules: ".*example1.*": ["urn:li:corpuser:username1"] ".*example2.*": ["urn:li:corpuser:username2"] - ownership_type: "PRODUCER" + ownership_type: "urn:li:ownershipType:__system__producer" ``` - Add owner, however keep the owners available for the dataset on DataHub GMS ```yaml diff --git a/metadata-ingestion/src/datahub/emitter/mce_builder.py b/metadata-ingestion/src/datahub/emitter/mce_builder.py index 64c9ec1bb5704..3b2c87ea25a31 100644 --- a/metadata-ingestion/src/datahub/emitter/mce_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mce_builder.py @@ -9,12 +9,13 @@ from typing import ( TYPE_CHECKING, Any, + Iterable, List, Optional, + Tuple, Type, TypeVar, Union, - cast, get_type_hints, ) @@ -342,26 +343,20 @@ def make_ml_model_group_urn(platform: str, group_name: str, env: str) -> str: ) -def is_valid_ownership_type(ownership_type: Optional[str]) -> bool: - return ownership_type is not None and ownership_type in [ - OwnershipTypeClass.TECHNICAL_OWNER, - OwnershipTypeClass.BUSINESS_OWNER, - OwnershipTypeClass.DATA_STEWARD, - OwnershipTypeClass.NONE, - OwnershipTypeClass.DEVELOPER, - OwnershipTypeClass.DATAOWNER, - OwnershipTypeClass.DELEGATE, - OwnershipTypeClass.PRODUCER, - OwnershipTypeClass.CONSUMER, - OwnershipTypeClass.STAKEHOLDER, +def get_class_fields(_class: Type[object]) -> Iterable[str]: + return [ + f + for f in dir(_class) + if not callable(getattr(_class, f)) and not f.startswith("_") ] -def validate_ownership_type(ownership_type: Optional[str]) -> str: - if is_valid_ownership_type(ownership_type): - return cast(str, ownership_type) - else: - raise ValueError(f"Unexpected ownership type: {ownership_type}") +def validate_ownership_type(ownership_type: str) -> Tuple[str, Optional[str]]: + if ownership_type.startswith("urn:li:"): + return OwnershipTypeClass.CUSTOM, ownership_type + if ownership_type in get_class_fields(OwnershipTypeClass): + return ownership_type, None + raise ValueError(f"Unexpected ownership type: {ownership_type}") def make_lineage_mce( diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py index 71cf6cfa7e92b..73cb8e4d6739b 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_ownership.py @@ -14,11 +14,8 @@ from datahub.ingestion.transformer.dataset_transformer import ( DatasetOwnershipTransformer, ) -from datahub.metadata.schema_classes import ( - OwnerClass, - OwnershipClass, - OwnershipTypeClass, -) +from datahub.metadata._schema_classes import OwnershipTypeClass +from datahub.metadata.schema_classes import OwnerClass, OwnershipClass class AddDatasetOwnershipConfig(TransformerSemanticsConfigModel): @@ -102,7 +99,7 @@ def transform_aspect( class DatasetOwnershipBaseConfig(TransformerSemanticsConfigModel): - ownership_type: Optional[str] = OwnershipTypeClass.DATAOWNER + ownership_type: str = OwnershipTypeClass.DATAOWNER class SimpleDatasetOwnershipConfig(DatasetOwnershipBaseConfig): @@ -114,11 +111,14 @@ class SimpleAddDatasetOwnership(AddDatasetOwnership): """Transformer that adds a specified set of owners to each dataset.""" def __init__(self, config: SimpleDatasetOwnershipConfig, ctx: PipelineContext): - ownership_type = builder.validate_ownership_type(config.ownership_type) + ownership_type, ownership_type_urn = builder.validate_ownership_type( + config.ownership_type + ) owners = [ OwnerClass( owner=owner, type=ownership_type, + typeUrn=ownership_type_urn, ) for owner in config.owner_urns ] @@ -147,29 +147,17 @@ class PatternDatasetOwnershipConfig(DatasetOwnershipBaseConfig): class PatternAddDatasetOwnership(AddDatasetOwnership): """Transformer that adds a specified set of owners to each dataset.""" - def getOwners( - self, - key: str, - owner_pattern: KeyValuePattern, - ownership_type: Optional[str] = None, - ) -> List[OwnerClass]: - owners = [ - OwnerClass( - owner=owner, - type=builder.validate_ownership_type(ownership_type), - ) - for owner in owner_pattern.value(key) - ] - return owners - def __init__(self, config: PatternDatasetOwnershipConfig, ctx: PipelineContext): - ownership_type = builder.validate_ownership_type(config.ownership_type) owner_pattern = config.owner_pattern + ownership_type, ownership_type_urn = builder.validate_ownership_type( + config.ownership_type + ) generic_config = AddDatasetOwnershipConfig( get_owners_to_add=lambda urn: [ OwnerClass( owner=owner, type=ownership_type, + typeUrn=ownership_type_urn, ) for owner in owner_pattern.value(urn) ], diff --git a/metadata-ingestion/tests/unit/test_pipeline.py b/metadata-ingestion/tests/unit/test_pipeline.py index 7ce78f0ab3e13..0f3c984196a78 100644 --- a/metadata-ingestion/tests/unit/test_pipeline.py +++ b/metadata-ingestion/tests/unit/test_pipeline.py @@ -214,7 +214,10 @@ def test_run_including_registered_transformation(self): "transformers": [ { "type": "simple_add_dataset_ownership", - "config": {"owner_urns": ["urn:li:corpuser:foo"]}, + "config": { + "owner_urns": ["urn:li:corpuser:foo"], + "ownership_type": "urn:li:ownershipType:__system__technical_owner", + }, } ], "sink": {"type": "tests.test_helpers.sink_helpers.RecordingSink"}, diff --git a/metadata-ingestion/tests/unit/test_transform_dataset.py b/metadata-ingestion/tests/unit/test_transform_dataset.py index bc95451620d22..8014df2f5c519 100644 --- a/metadata-ingestion/tests/unit/test_transform_dataset.py +++ b/metadata-ingestion/tests/unit/test_transform_dataset.py @@ -234,7 +234,7 @@ def test_simple_dataset_ownership_transformation(mock_time): assert last_event.entityUrn == outputs[0].record.proposedSnapshot.urn assert all( [ - owner.type == models.OwnershipTypeClass.DATAOWNER + owner.type == models.OwnershipTypeClass.DATAOWNER and owner.typeUrn is None for owner in last_event.aspect.owners ] ) @@ -247,7 +247,7 @@ def test_simple_dataset_ownership_transformation(mock_time): assert len(second_ownership_aspect.owners) == 3 assert all( [ - owner.type == models.OwnershipTypeClass.DATAOWNER + owner.type == models.OwnershipTypeClass.DATAOWNER and owner.typeUrn is None for owner in second_ownership_aspect.owners ] ) @@ -293,6 +293,44 @@ def test_simple_dataset_ownership_with_type_transformation(mock_time): assert ownership_aspect.owners[0].type == models.OwnershipTypeClass.PRODUCER +def test_simple_dataset_ownership_with_type_urn_transformation(mock_time): + input = make_generic_dataset() + + transformer = SimpleAddDatasetOwnership.create( + { + "owner_urns": [ + builder.make_user_urn("person1"), + ], + "ownership_type": "urn:li:ownershipType:__system__technical_owner", + }, + PipelineContext(run_id="test"), + ) + + output = list( + transformer.transform( + [ + RecordEnvelope(input, metadata={}), + RecordEnvelope(EndOfStream(), metadata={}), + ] + ) + ) + + assert len(output) == 3 + + # original MCE is unchanged + assert input == output[0].record + + ownership_aspect = output[1].record.aspect + + assert isinstance(ownership_aspect, OwnershipClass) + assert len(ownership_aspect.owners) == 1 + assert ownership_aspect.owners[0].type == OwnershipTypeClass.CUSTOM + assert ( + ownership_aspect.owners[0].typeUrn + == "urn:li:ownershipType:__system__technical_owner" + ) + + def _test_extract_tags(in_urn: str, regex_str: str, out_tag: str) -> None: input = make_generic_dataset(entity_urn=in_urn) transformer = ExtractDatasetTags.create( @@ -883,6 +921,7 @@ def test_pattern_dataset_ownership_transformation(mock_time): ".*example2.*": [builder.make_user_urn("person2")], } }, + "ownership_type": "DATAOWNER", }, PipelineContext(run_id="test"), ) @@ -2233,6 +2272,7 @@ def fake_ownership_class(entity_urn: str) -> models.OwnershipClass: "replace_existing": False, "semantics": TransformerSemantics.PATCH, "owner_urns": [owner2], + "ownership_type": "DATAOWNER", }, pipeline_context=pipeline_context, ) From 32d237b56f54c83bd7b8d343b04d36f53ae72d0a Mon Sep 17 00:00:00 2001 From: Arun Vasudevan <12974850+arunvasudevan@users.noreply.github.com> Date: Wed, 13 Dec 2023 16:02:21 -0600 Subject: [PATCH 090/263] fix(ingest): reduce GraphQL Logs to warning for circuit breaker (#9436) --- .../src/datahub/api/circuit_breaker/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py b/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py index 4dcf40454736b..27317826264b8 100644 --- a/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py +++ b/metadata-ingestion/src/datahub/api/circuit_breaker/__init__.py @@ -1,3 +1,7 @@ +import logging + +from gql.transport.requests import log as requests_logger + from datahub.api.circuit_breaker.assertion_circuit_breaker import ( AssertionCircuitBreaker, AssertionCircuitBreakerConfig, @@ -6,3 +10,5 @@ OperationCircuitBreaker, OperationCircuitBreakerConfig, ) + +requests_logger.setLevel(logging.WARNING) From 288e458739ec15e0d294ed5c0eb54963fee01071 Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Thu, 14 Dec 2023 06:19:05 +0530 Subject: [PATCH 091/263] refactor(ui): support Apollo caching for settings / Policies (#9442) --- .../app/permissions/policy/ManagePolicies.tsx | 194 ++------------- .../policy/_tests_/policyUtils.test.tsx | 110 +++++++++ .../src/app/permissions/policy/policyUtils.ts | 98 ++++++++ .../src/app/permissions/policy/usePolicy.ts | 227 ++++++++++++++++++ 4 files changed, 460 insertions(+), 169 deletions(-) create mode 100644 datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx create mode 100644 datahub-web-react/src/app/permissions/policy/usePolicy.ts diff --git a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx index 2f0c284fc4e8f..72c22f3bddc2c 100644 --- a/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx +++ b/datahub-web-react/src/app/permissions/policy/ManagePolicies.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useMemo, useState } from 'react'; -import { Button, Empty, message, Modal, Pagination, Tag } from 'antd'; +import { Button, Empty, message, Pagination, Tag } from 'antd'; import styled from 'styled-components/macro'; import * as QueryString from 'query-string'; import { DeleteOutlined, PlusOutlined } from '@ant-design/icons'; @@ -7,26 +7,15 @@ import { useLocation } from 'react-router'; import PolicyBuilderModal from './PolicyBuilderModal'; import { Policy, - PolicyUpdateInput, PolicyState, - PolicyType, - Maybe, - ResourceFilterInput, - PolicyMatchFilter, - PolicyMatchFilterInput, - PolicyMatchCriterionInput, - EntityType, } from '../../../types.generated'; import { useAppConfig } from '../../useAppConfig'; import PolicyDetailsModal from './PolicyDetailsModal'; import { - useCreatePolicyMutation, - useDeletePolicyMutation, useListPoliciesQuery, - useUpdatePolicyMutation, } from '../../../graphql/policy.generated'; import { Message } from '../../shared/Message'; -import { EMPTY_POLICY } from './policyUtils'; +import { DEFAULT_PAGE_SIZE, EMPTY_POLICY } from './policyUtils'; import TabToolbar from '../../entity/shared/components/styled/TabToolbar'; import { StyledTable } from '../../entity/shared/components/styled/StyledTable'; import AvatarsGroup from '../AvatarsGroup'; @@ -37,6 +26,7 @@ import { scrollToTop } from '../../shared/searchUtils'; import analytics, { EventType } from '../../analytics'; import { POLICIES_CREATE_POLICY_ID, POLICIES_INTRO_ID } from '../../onboarding/config/PoliciesOnboardingConfig'; import { OnboardingTour } from '../../onboarding/OnboardingTour'; +import { usePolicy } from './usePolicy'; const SourceContainer = styled.div` overflow: auto; @@ -84,58 +74,6 @@ const PageContainer = styled.span` overflow: auto; `; -const DEFAULT_PAGE_SIZE = 10; - -type PrivilegeOptionType = { - type?: string; - name?: Maybe; -}; - -const toFilterInput = (filter: PolicyMatchFilter): PolicyMatchFilterInput => { - return { - criteria: filter.criteria?.map((criterion): PolicyMatchCriterionInput => { - return { - field: criterion.field, - values: criterion.values.map((criterionValue) => criterionValue.value), - condition: criterion.condition, - }; - }), - }; -}; - -const toPolicyInput = (policy: Omit): PolicyUpdateInput => { - let policyInput: PolicyUpdateInput = { - type: policy.type, - name: policy.name, - state: policy.state, - description: policy.description, - privileges: policy.privileges, - actors: { - users: policy.actors.users, - groups: policy.actors.groups, - allUsers: policy.actors.allUsers, - allGroups: policy.actors.allGroups, - resourceOwners: policy.actors.resourceOwners, - resourceOwnersTypes: policy.actors.resourceOwnersTypes, - }, - }; - if (policy.resources !== null && policy.resources !== undefined) { - let resourceFilter: ResourceFilterInput = { - type: policy.resources.type, - resources: policy.resources.resources, - allResources: policy.resources.allResources, - }; - if (policy.resources.filter) { - resourceFilter = { ...resourceFilter, filter: toFilterInput(policy.resources.filter) }; - } - // Add the resource filters. - policyInput = { - ...policyInput, - resources: resourceFilter, - }; - } - return policyInput; -}; // TODO: Cleanup the styling. export const ManagePolicies = () => { @@ -163,9 +101,7 @@ export const ManagePolicies = () => { const [focusPolicyUrn, setFocusPolicyUrn] = useState(undefined); const [focusPolicy, setFocusPolicy] = useState>(EMPTY_POLICY); - // Construct privileges - const platformPrivileges = policiesConfig?.platformPrivileges || []; - const resourcePrivileges = policiesConfig?.resourcePrivileges || []; + const { loading: policiesLoading, @@ -183,15 +119,6 @@ export const ManagePolicies = () => { fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', }); - // Any time a policy is removed, edited, or created, refetch the list. - const [createPolicy, { error: createPolicyError }] = useCreatePolicyMutation(); - - const [updatePolicy, { error: updatePolicyError }] = useUpdatePolicyMutation(); - - const [deletePolicy, { error: deletePolicyError }] = useDeletePolicyMutation(); - - const updateError = createPolicyError || updatePolicyError || deletePolicyError; - const totalPolicies = policiesData?.listPolicies?.total || 0; const policies = useMemo(() => policiesData?.listPolicies?.policies || [], [policiesData]); @@ -212,28 +139,6 @@ export const ManagePolicies = () => { setShowPolicyBuilderModal(false); }; - const getPrivilegeNames = (policy: Omit) => { - let privileges: PrivilegeOptionType[] = []; - if (policy?.type === PolicyType.Platform) { - privileges = platformPrivileges - .filter((platformPrivilege) => policy.privileges.includes(platformPrivilege.type)) - .map((platformPrivilege) => { - return { type: platformPrivilege.type, name: platformPrivilege.displayName }; - }); - } else { - const allResourcePriviliges = resourcePrivileges.find( - (resourcePrivilege) => resourcePrivilege.resourceType === 'all', - ); - privileges = - allResourcePriviliges?.privileges - .filter((resourcePrivilege) => policy.privileges.includes(resourcePrivilege.type)) - .map((b) => { - return { type: b.type, name: b.displayName }; - }) || []; - } - return privileges; - }; - const onViewPolicy = (policy: Policy) => { setShowViewPolicyModal(true); setFocusPolicyUrn(policy?.urn); @@ -247,79 +152,30 @@ export const ManagePolicies = () => { }; const onEditPolicy = (policy: Policy) => { - setShowPolicyBuilderModal(true); - setFocusPolicyUrn(policy?.urn); - setFocusPolicy({ ...policy }); - }; - - // On Delete Policy handler - const onRemovePolicy = (policy: Policy) => { - Modal.confirm({ - title: `Delete ${policy?.name}`, - content: `Are you sure you want to remove policy?`, - onOk() { - deletePolicy({ variables: { urn: policy?.urn as string } }); // There must be a focus policy urn. - analytics.event({ - type: EventType.DeleteEntityEvent, - entityUrn: policy?.urn, - entityType: EntityType.DatahubPolicy, - }); - message.success('Successfully removed policy.'); - setTimeout(() => { - policiesRefetch(); - }, 3000); - onCancelViewPolicy(); - }, - onCancel() {}, - okText: 'Yes', - maskClosable: true, - closable: true, - }); + setShowPolicyBuilderModal(true); + setFocusPolicyUrn(policy?.urn); + setFocusPolicy({ ...policy }); }; - // On Activate and deactivate Policy handler - const onToggleActiveDuplicate = (policy: Policy) => { - const newState = policy?.state === PolicyState.Active ? PolicyState.Inactive : PolicyState.Active; - const newPolicy = { - ...policy, - state: newState, - }; - updatePolicy({ - variables: { - urn: policy?.urn as string, // There must be a focus policy urn. - input: toPolicyInput(newPolicy), - }, - }); - message.success(`Successfully ${newState === PolicyState.Active ? 'activated' : 'deactivated'} policy.`); - setTimeout(() => { - policiesRefetch(); - }, 3000); - setShowViewPolicyModal(false); - }; - - // On Add/Update Policy handler - const onSavePolicy = (savePolicy: Omit) => { - if (focusPolicyUrn) { - // If there's an URN associated with the focused policy, then we are editing an existing policy. - updatePolicy({ variables: { urn: focusPolicyUrn, input: toPolicyInput(savePolicy) } }); - analytics.event({ - type: EventType.UpdatePolicyEvent, - policyUrn: focusPolicyUrn, - }); - } else { - // If there's no URN associated with the focused policy, then we are creating. - createPolicy({ variables: { input: toPolicyInput(savePolicy) } }); - analytics.event({ - type: EventType.CreatePolicyEvent, - }); - } - message.success('Successfully saved policy.'); - setTimeout(() => { - policiesRefetch(); - }, 3000); - onClosePolicyBuilder(); - }; + const { + createPolicyError, + updatePolicyError, + deletePolicyError, + onSavePolicy, + onToggleActiveDuplicate, + onRemovePolicy, + getPrivilegeNames + } = usePolicy( + policiesConfig, + focusPolicyUrn, + policiesRefetch, + setShowViewPolicyModal, + onCancelViewPolicy, + onClosePolicyBuilder + ); + const updateError = createPolicyError || updatePolicyError || deletePolicyError; + const tableColumns = [ { title: 'Name', diff --git a/datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx b/datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx new file mode 100644 index 0000000000000..06d2e97255139 --- /dev/null +++ b/datahub-web-react/src/app/permissions/policy/_tests_/policyUtils.test.tsx @@ -0,0 +1,110 @@ +import { + addOrUpdatePoliciesInList, + updateListPoliciesCache, + removeFromListPoliciesCache, + } from '../policyUtils'; + + // Mock the Apollo Client readQuery and writeQuery methods + const mockReadQuery = jest.fn(); + const mockWriteQuery = jest.fn(); + + jest.mock('@apollo/client', () => ({ + ...jest.requireActual('@apollo/client'), + useApolloClient: () => ({ + readQuery: mockReadQuery, + writeQuery: mockWriteQuery, + }), + })); + + describe('addOrUpdatePoliciesInList', () => { + it('should add a new policy to the list', () => { + const existingPolicies = [{ urn: 'existing-urn' }]; + const newPolicies = { urn: 'new-urn' }; + + const result = addOrUpdatePoliciesInList(existingPolicies, newPolicies); + + expect(result.length).toBe(existingPolicies.length + 1); + expect(result).toContain(newPolicies); + }); + + it('should update an existing policy in the list', () => { + const existingPolicies = [{ urn: 'existing-urn' }]; + const newPolicies = { urn: 'existing-urn', updatedField: 'new-value' }; + + const result = addOrUpdatePoliciesInList(existingPolicies, newPolicies); + + expect(result.length).toBe(existingPolicies.length); + expect(result).toContainEqual(newPolicies); + }); + }); + + describe('updateListPoliciesCache', () => { + // Mock client.readQuery response + const mockReadQueryResponse = { + listPolicies: { + start: 0, + count: 1, + total: 1, + policies: [{ urn: 'existing-urn' }], + }, + }; + + beforeEach(() => { + mockReadQuery.mockReturnValueOnce(mockReadQueryResponse); + }); + + it('should update the list policies cache with a new policy', () => { + const mockClient = { + readQuery: mockReadQuery, + writeQuery: mockWriteQuery, + }; + + const policiesToAdd = [{ urn: 'new-urn' }]; + const pageSize = 10; + + updateListPoliciesCache(mockClient, policiesToAdd, pageSize); + + // Ensure writeQuery is called with the expected data + expect(mockWriteQuery).toHaveBeenCalledWith({ + query: expect.any(Object), + variables: { input: { start: 0, count: pageSize, query: undefined } }, + data: expect.any(Object), + }); + }); + }); + + describe('removeFromListPoliciesCache', () => { + // Mock client.readQuery response + const mockReadQueryResponse = { + listPolicies: { + start: 0, + count: 1, + total: 1, + policies: [{ urn: 'existing-urn' }], + }, + }; + + beforeEach(() => { + mockReadQuery.mockReturnValueOnce(mockReadQueryResponse); + }); + + it('should remove a policy from the list policies cache', () => { + const mockClient = { + readQuery: mockReadQuery, + writeQuery: mockWriteQuery, + }; + + const urnToRemove = 'existing-urn'; + const pageSize = 10; + + removeFromListPoliciesCache(mockClient, urnToRemove, pageSize); + + // Ensure writeQuery is called with the expected data + expect(mockWriteQuery).toHaveBeenCalledWith({ + query: expect.any(Object), + variables: { input: { start: 0, count: pageSize } }, + data: expect.any(Object), + }); + }); + }); + \ No newline at end of file diff --git a/datahub-web-react/src/app/permissions/policy/policyUtils.ts b/datahub-web-react/src/app/permissions/policy/policyUtils.ts index 2f178fcdeb5c3..27aa8fcd351e9 100644 --- a/datahub-web-react/src/app/permissions/policy/policyUtils.ts +++ b/datahub-web-react/src/app/permissions/policy/policyUtils.ts @@ -10,6 +10,9 @@ import { ResourceFilter, ResourcePrivileges, } from '../../../types.generated'; +import { ListPoliciesDocument, ListPoliciesQuery } from '../../../graphql/policy.generated'; + +export const DEFAULT_PAGE_SIZE = 10; export const EMPTY_POLICY = { type: PolicyType.Metadata, @@ -126,3 +129,98 @@ export const setFieldValues = ( } return { ...filter, criteria: [...restCriteria, createCriterion(resourceFieldType, fieldValues)] }; }; + +export const addOrUpdatePoliciesInList = (existingPolicies, newPolicies) => { + const policies = [...existingPolicies]; + let didUpdate = false; + const updatedPolicies = policies.map((policy) => { + if (policy.urn === newPolicies.urn) { + didUpdate = true; + return newPolicies; + } + return policy; + }); + return didUpdate ? updatedPolicies : [newPolicies, ...existingPolicies]; +}; + +/** + * Add an entry to the ListPolicies cache. + */ +export const updateListPoliciesCache = (client, policies, pageSize) => { + // Read the data from our cache for this query. + const currData: ListPoliciesQuery | null = client.readQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + query: undefined, + }, + }, + }); + + // Add our new policy into the existing list. + const existingPolicies = [...(currData?.listPolicies?.policies || [])]; + const newPolicies = addOrUpdatePoliciesInList(existingPolicies, policies); + const didAddTest = newPolicies.length > existingPolicies.length; + + // Write our data back to the cache. + client.writeQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + query: undefined, + }, + }, + data: { + + listPolicies: { + __typename: 'ListPoliciesResult', + start: 0, + count: didAddTest ? (currData?.listPolicies?.count || 0) + 1 : currData?.listPolicies?.count, + total: didAddTest ? (currData?.listPolicies?.total || 0) + 1 : currData?.listPolicies?.total, + policies: newPolicies, + }, + }, + }); +}; + +/** + * Remove an entry from the ListTests cache. + */ +export const removeFromListPoliciesCache = (client, urn, pageSize) => { + // Read the data from our cache for this query. + const currData: ListPoliciesQuery | null = client.readQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + }, + }, + }); + + // Remove the policy from the existing tests set. + const newPolicies = [...(currData?.listPolicies?.policies || []).filter((policy) => policy.urn !== urn)]; + + // Write our data back to the cache. + client.writeQuery({ + query: ListPoliciesDocument, + variables: { + input: { + start: 0, + count: pageSize, + }, + }, + data: { + listPolicies: { + start: currData?.listPolicies?.start || 0, + count: (currData?.listPolicies?.count || 1) - 1, + total: (currData?.listPolicies?.total || 1) - 1, + policies: newPolicies, + }, + }, + }); +}; diff --git a/datahub-web-react/src/app/permissions/policy/usePolicy.ts b/datahub-web-react/src/app/permissions/policy/usePolicy.ts new file mode 100644 index 0000000000000..6f359805e42db --- /dev/null +++ b/datahub-web-react/src/app/permissions/policy/usePolicy.ts @@ -0,0 +1,227 @@ +import { Modal, message } from 'antd'; +import { useApolloClient } from '@apollo/client'; +import { + EntityType, + Policy, + PolicyMatchCriterionInput, + PolicyMatchFilter, + PolicyMatchFilterInput, + PolicyState, + PolicyType, + Maybe, + PolicyUpdateInput, + ResourceFilterInput, +} from '../../../types.generated'; +import { useCreatePolicyMutation, useDeletePolicyMutation, useUpdatePolicyMutation } from '../../../graphql/policy.generated'; +import analytics, { EventType } from '../../analytics'; +import { DEFAULT_PAGE_SIZE, removeFromListPoliciesCache, updateListPoliciesCache } from './policyUtils'; + + +type PrivilegeOptionType = { + type?: string; + name?: Maybe; +}; + +export function usePolicy( + policiesConfig, + focusPolicyUrn, + policiesRefetch, + setShowViewPolicyModal, + onCancelViewPolicy, + onClosePolicyBuilder +){ + + const client = useApolloClient(); + + // Construct privileges + const platformPrivileges = policiesConfig?.platformPrivileges || []; + const resourcePrivileges = policiesConfig?.resourcePrivileges || []; + + // Any time a policy is removed, edited, or created, refetch the list. + const [createPolicy, { error: createPolicyError }] = useCreatePolicyMutation(); + + const [updatePolicy, { error: updatePolicyError }] = useUpdatePolicyMutation(); + + const [deletePolicy, { error: deletePolicyError }] = useDeletePolicyMutation(); + + const toFilterInput = (filter: PolicyMatchFilter): PolicyMatchFilterInput => { + return { + criteria: filter.criteria?.map((criterion): PolicyMatchCriterionInput => { + return { + field: criterion.field, + values: criterion.values.map((criterionValue) => criterionValue.value), + condition: criterion.condition, + }; + }), + }; + }; + + const toPolicyInput = (policy: Omit): PolicyUpdateInput => { + let policyInput: PolicyUpdateInput = { + type: policy.type, + name: policy.name, + state: policy.state, + description: policy.description, + privileges: policy.privileges, + actors: { + users: policy.actors.users, + groups: policy.actors.groups, + allUsers: policy.actors.allUsers, + allGroups: policy.actors.allGroups, + resourceOwners: policy.actors.resourceOwners, + resourceOwnersTypes: policy.actors.resourceOwnersTypes, + }, + }; + if (policy.resources !== null && policy.resources !== undefined) { + let resourceFilter: ResourceFilterInput = { + type: policy.resources.type, + resources: policy.resources.resources, + allResources: policy.resources.allResources, + }; + if (policy.resources.filter) { + resourceFilter = { ...resourceFilter, filter: toFilterInput(policy.resources.filter) }; + } + // Add the resource filters. + policyInput = { + ...policyInput, + resources: resourceFilter, + }; + } + return policyInput; + }; + + const getPrivilegeNames = (policy: Omit) => { + let privileges: PrivilegeOptionType[] = []; + if (policy?.type === PolicyType.Platform) { + privileges = platformPrivileges + .filter((platformPrivilege) => policy.privileges.includes(platformPrivilege.type)) + .map((platformPrivilege) => { + return { type: platformPrivilege.type, name: platformPrivilege.displayName }; + }); + } else { + const allResourcePriviliges = resourcePrivileges.find( + (resourcePrivilege) => resourcePrivilege.resourceType === 'all', + ); + privileges = + allResourcePriviliges?.privileges + .filter((resourcePrivilege) => policy.privileges.includes(resourcePrivilege.type)) + .map((b) => { + return { type: b.type, name: b.displayName }; + }) || []; + } + return privileges; + }; + + // On Delete Policy handler + const onRemovePolicy = (policy: Policy) => { + Modal.confirm({ + title: `Delete ${policy?.name}`, + content: `Are you sure you want to remove policy?`, + onOk() { + deletePolicy({ variables: { urn: policy?.urn as string } }) + .then(()=>{ + // There must be a focus policy urn. + analytics.event({ + type: EventType.DeleteEntityEvent, + entityUrn: policy?.urn, + entityType: EntityType.DatahubPolicy, + }); + message.success('Successfully removed policy.'); + removeFromListPoliciesCache(client,policy?.urn, DEFAULT_PAGE_SIZE); + setTimeout(() => { + policiesRefetch(); + }, 3000); + onCancelViewPolicy(); + }) + }, + onCancel() {}, + okText: 'Yes', + maskClosable: true, + closable: true, + }); + }; + + // On Activate and deactivate Policy handler + const onToggleActiveDuplicate = (policy: Policy) => { + const newState = policy?.state === PolicyState.Active ? PolicyState.Inactive : PolicyState.Active; + const newPolicy = { + ...policy, + state: newState, + }; + updatePolicy({ + variables: { + urn: policy?.urn as string, // There must be a focus policy urn. + input: toPolicyInput(newPolicy), + }, + }).then(()=>{ + const updatePolicies= { + ...newPolicy, + __typename: 'ListPoliciesResult', + } + updateListPoliciesCache(client,updatePolicies,DEFAULT_PAGE_SIZE); + message.success(`Successfully ${newState === PolicyState.Active ? 'activated' : 'deactivated'} policy.`); + setTimeout(() => { + policiesRefetch(); + }, 3000); + }) + + setShowViewPolicyModal(false); + }; + + // On Add/Update Policy handler + const onSavePolicy = (savePolicy: Omit) => { + if (focusPolicyUrn) { + // If there's an URN associated with the focused policy, then we are editing an existing policy. + updatePolicy({ variables: { urn: focusPolicyUrn, input: toPolicyInput(savePolicy) } }) + .then(()=>{ + const newPolicy = { + __typename: 'ListPoliciesResult', + urn: focusPolicyUrn, + ...savePolicy, + }; + analytics.event({ + type: EventType.UpdatePolicyEvent, + policyUrn: focusPolicyUrn, + }); + message.success('Successfully saved policy.'); + updateListPoliciesCache(client,newPolicy,DEFAULT_PAGE_SIZE); + setTimeout(() => { + policiesRefetch(); + }, 1000); + onClosePolicyBuilder(); + }) + } else { + // If there's no URN associated with the focused policy, then we are creating. + createPolicy({ variables: { input: toPolicyInput(savePolicy) } }) + .then((result)=>{ + const newPolicy = { + __typename: 'ListPoliciesResult', + urn: result?.data?.createPolicy, + ...savePolicy, + type: null, + actors: null, + resources: null, + }; + analytics.event({ + type: EventType.CreatePolicyEvent, + }); + message.success('Successfully saved policy.'); + setTimeout(() => { + policiesRefetch(); + }, 1000); + updateListPoliciesCache(client,newPolicy,DEFAULT_PAGE_SIZE); + onClosePolicyBuilder(); + }) + } + }; + + return{ + createPolicyError, + updatePolicyError, + deletePolicyError, + onSavePolicy, + onToggleActiveDuplicate, + onRemovePolicy, + getPrivilegeNames, + } +} \ No newline at end of file From b87f9774ae646180675023196871f5965a5d97c3 Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 14 Dec 2023 06:41:30 +0530 Subject: [PATCH 092/263] =?UTF-8?q?refactor=20|=20PRD-785=20|=20datahub=20?= =?UTF-8?q?oss:=20migrate=20use=20of=20useGetAuthenticatedU=E2=80=A6=20(#9?= =?UTF-8?q?456)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: John Joyce --- datahub-web-react/src/app/AdminConsole.tsx | 8 ++++---- datahub-web-react/src/app/embed/EmbeddedPage.tsx | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/datahub-web-react/src/app/AdminConsole.tsx b/datahub-web-react/src/app/AdminConsole.tsx index 8b14ca35763d1..f6395a3bd3cb8 100644 --- a/datahub-web-react/src/app/AdminConsole.tsx +++ b/datahub-web-react/src/app/AdminConsole.tsx @@ -4,9 +4,9 @@ import { Menu } from 'antd'; import styled from 'styled-components'; import { BankOutlined, BarChartOutlined, MenuOutlined } from '@ant-design/icons'; import Sider from 'antd/lib/layout/Sider'; -import { useGetAuthenticatedUser } from './useGetAuthenticatedUser'; import { useAppConfig } from './useAppConfig'; import { ANTD_GRAY } from './entity/shared/constants'; +import { useUserContext } from './context/useUserContext'; const ToggleContainer = styled.div` background-color: ${ANTD_GRAY[4]}; @@ -32,7 +32,7 @@ const ControlSlideOut = styled(Sider)` * Container for all views behind an authentication wall. */ export const AdminConsole = (): JSX.Element => { - const me = useGetAuthenticatedUser(); + const me = useUserContext(); const [adminConsoleOpen, setAdminConsoleOpen] = useState(false); const { config } = useAppConfig(); @@ -40,8 +40,8 @@ export const AdminConsole = (): JSX.Element => { const isAnalyticsEnabled = config?.analyticsConfig.enabled; const isPoliciesEnabled = config?.policiesConfig.enabled; - const showAnalytics = (isAnalyticsEnabled && me && me.platformPrivileges.viewAnalytics) || false; - const showPolicyBuilder = (isPoliciesEnabled && me && me.platformPrivileges.managePolicies) || false; + const showAnalytics = (isAnalyticsEnabled && me && me?.platformPrivileges?.viewAnalytics) || false; + const showPolicyBuilder = (isPoliciesEnabled && me && me?.platformPrivileges?.managePolicies) || false; const showAdminConsole = showAnalytics || showPolicyBuilder; const onMenuItemClick = () => { diff --git a/datahub-web-react/src/app/embed/EmbeddedPage.tsx b/datahub-web-react/src/app/embed/EmbeddedPage.tsx index 429f83f34af6e..603a72675c433 100644 --- a/datahub-web-react/src/app/embed/EmbeddedPage.tsx +++ b/datahub-web-react/src/app/embed/EmbeddedPage.tsx @@ -8,9 +8,9 @@ import { VIEW_ENTITY_PAGE } from '../entity/shared/constants'; import { decodeUrn } from '../entity/shared/utils'; import CompactContext from '../shared/CompactContext'; import { useEntityRegistry } from '../useEntityRegistry'; -import { useGetAuthenticatedUserUrn } from '../useGetAuthenticatedUser'; import analytics from '../analytics/analytics'; import { EventType } from '../analytics'; +import { useUserContext } from '../context/useUserContext'; const EmbeddedPageWrapper = styled.div` max-height: 100%; @@ -39,11 +39,11 @@ export default function EmbeddedPage({ entityType }: Props) { }); }, [entityType, urn]); - const authenticatedUserUrn = useGetAuthenticatedUserUrn(); + const { urn : authenticatedUserUrn } = useUserContext(); const { data } = useGetGrantedPrivilegesQuery({ variables: { input: { - actorUrn: authenticatedUserUrn, + actorUrn: authenticatedUserUrn as string, resourceSpec: { resourceType: entityType, resourceUrn: urn }, }, }, From ff0570edacdd967d8fef23ac3333ccc93e50e406 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Wed, 13 Dec 2023 17:12:48 -0800 Subject: [PATCH 093/263] refactor(ui): Minor improvements & refactoring (#9420) --- .../search/EmbeddedListSearchResults.tsx | 6 +- .../src/app/lineage/LineageLoadingSection.tsx | 5 +- datahub-web-react/src/graphql/domain.graphql | 4 +- datahub-web-react/src/graphql/lineage.graphql | 167 ++++++++++++------ datahub-web-react/src/graphql/query.graphql | 10 ++ .../com/linkedin/query/QueryProperties.pdl | 7 +- 6 files changed, 139 insertions(+), 60 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index 1daf2a4c59b70..80fc2aa223fdf 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { Pagination, Typography } from 'antd'; +import { Pagination, Spin, Typography } from 'antd'; import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; @@ -61,7 +61,7 @@ const LoadingContainer = styled.div` `; const StyledLoading = styled(LoadingOutlined)` - font-size: 36px; + font-size: 32px; color: ${ANTD_GRAY[7]}; padding-bottom: 18px; ]`; @@ -128,7 +128,7 @@ export const EmbeddedListSearchResults = ({ {loading && ( - + } /> )} {!loading && ( diff --git a/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx b/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx index 9d84de0c21172..3b7f0e48ecdf4 100644 --- a/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx +++ b/datahub-web-react/src/app/lineage/LineageLoadingSection.tsx @@ -1,5 +1,6 @@ import * as React from 'react'; import styled from 'styled-components'; +import { Spin } from 'antd'; import { LoadingOutlined } from '@ant-design/icons'; import { ANTD_GRAY } from '../entity/shared/constants'; @@ -13,7 +14,7 @@ const Container = styled.div` `; const StyledLoading = styled(LoadingOutlined)` - font-size: 36px; + font-size: 32px; color: ${ANTD_GRAY[7]}; padding-bottom: 18px; ]`; @@ -21,7 +22,7 @@ const StyledLoading = styled(LoadingOutlined)` export default function LineageLoadingSection() { return ( - + } /> ); } diff --git a/datahub-web-react/src/graphql/domain.graphql b/datahub-web-react/src/graphql/domain.graphql index 951b93fcba9af..170a5b5df476b 100644 --- a/datahub-web-react/src/graphql/domain.graphql +++ b/datahub-web-react/src/graphql/domain.graphql @@ -27,9 +27,7 @@ query getDomain($urn: String!) { } } } - children: relationships(input: { types: ["IsPartOf"], direction: INCOMING, start: 0, count: 0 }) { - total - } + ...domainEntitiesFields } } diff --git a/datahub-web-react/src/graphql/lineage.graphql b/datahub-web-react/src/graphql/lineage.graphql index dc511ca411e8d..4e9b8aacfcfa1 100644 --- a/datahub-web-react/src/graphql/lineage.graphql +++ b/datahub-web-react/src/graphql/lineage.graphql @@ -164,6 +164,9 @@ fragment lineageNodeProperties on EntityWithRelationships { domain { ...entityDomain } + parentContainers { + ...parentContainersFields + } ...entityDataProduct status { removed @@ -188,6 +191,9 @@ fragment lineageNodeProperties on EntityWithRelationships { ownership { ...ownershipFields } + parentContainers { + ...parentContainersFields + } subTypes { typeNames } @@ -361,6 +367,60 @@ fragment partialLineageResults on EntityLineageResult { filtered } +fragment entityLineage on Entity { + urn + type + ...lineageNodeProperties + ...canEditLineageFragment + ... on Dataset { + schemaMetadata(version: 0) @include(if: $showColumns) { + ...schemaMetadataFields + } + siblings { + isPrimary + siblings { + urn + type + ... on Dataset { + exists + } + ...lineageNodeProperties + } + } + } + ... on Chart { + inputFields @include(if: $showColumns) { + ...inputFieldsFields + } + } + ... on EntityWithRelationships { + upstream: lineage( + input: { + direction: UPSTREAM + start: 0 + count: 100 + separateSiblings: $separateSiblings + startTimeMillis: $startTimeMillis + endTimeMillis: $endTimeMillis + } + ) @skip(if: $excludeUpstream) { + ...fullLineageResults + } + downstream: lineage( + input: { + direction: DOWNSTREAM + start: 0 + count: 100 + separateSiblings: $separateSiblings + startTimeMillis: $startTimeMillis + endTimeMillis: $endTimeMillis + } + ) @skip(if: $excludeDownstream) { + ...fullLineageResults + } + } +} + query getEntityLineage( $urn: String! $separateSiblings: Boolean @@ -371,57 +431,21 @@ query getEntityLineage( $excludeDownstream: Boolean = false ) { entity(urn: $urn) { - urn - type - ...lineageNodeProperties - ...canEditLineageFragment - ... on Dataset { - schemaMetadata(version: 0) @include(if: $showColumns) { - ...schemaMetadataFields - } - siblings { - isPrimary - siblings { - urn - type - ... on Dataset { - exists - } - ...lineageNodeProperties - } - } - } - ... on Chart { - inputFields @include(if: $showColumns) { - ...inputFieldsFields - } - } - ... on EntityWithRelationships { - upstream: lineage( - input: { - direction: UPSTREAM - start: 0 - count: 100 - separateSiblings: $separateSiblings - startTimeMillis: $startTimeMillis - endTimeMillis: $endTimeMillis - } - ) @skip(if: $excludeUpstream) { - ...fullLineageResults - } - downstream: lineage( - input: { - direction: DOWNSTREAM - start: 0 - count: 100 - separateSiblings: $separateSiblings - startTimeMillis: $startTimeMillis - endTimeMillis: $endTimeMillis - } - ) @skip(if: $excludeDownstream) { - ...fullLineageResults - } - } + ...entityLineage + } +} + +query getBulkEntityLineage( + $urns: [String!]!, + $separateSiblings: Boolean + $showColumns: Boolean! + $startTimeMillis: Long + $endTimeMillis: Long + $excludeUpstream: Boolean = false + $excludeDownstream: Boolean = false +) { + entities(urns: $urns) { + ...entityLineage } } @@ -489,3 +513,44 @@ query getLineageCounts( } } } + +query getSearchAcrossLineageCounts( + $urn: String! + $excludeUpstream: Boolean = false + $excludeDownstream: Boolean = false +) { + upstreams: searchAcrossLineage( + input: { + urn: $urn + query: "*" + start: 0 + count: 10000 + filters: [{ field: "degree", value: "1", values: ["1"] }] + direction: UPSTREAM + } + ) @skip(if: $excludeUpstream) { + start + count + total + facets { + ...facetFields + } + } + downstreams: searchAcrossLineage( + input: { + urn: $urn + query: "*" + start: 0 + count: 10000 + filters: [{ field: "degree", value: "1", values: ["1"] }] + direction: DOWNSTREAM + } + ) @skip(if: $excludeDownstream) { + start + count + total + facets { + ...facetFields + } + } +} \ No newline at end of file diff --git a/datahub-web-react/src/graphql/query.graphql b/datahub-web-react/src/graphql/query.graphql index 84908b24f9ae7..e24c12a4448b1 100644 --- a/datahub-web-react/src/graphql/query.graphql +++ b/datahub-web-react/src/graphql/query.graphql @@ -1,3 +1,13 @@ +query getQuery($urn: String!) { + entity(urn: $urn) { + urn + type + ... on QueryEntity { + ...query + } + } +} + fragment query on QueryEntity { urn properties { diff --git a/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl index 3ba19d348913b..9587775dbed3a 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/query/QueryProperties.pdl @@ -1,6 +1,7 @@ namespace com.linkedin.query import com.linkedin.common.AuditStamp +import com.linkedin.common.Urn /** * Information about a Query against one or more data assets (e.g. Tables or Views). @@ -22,7 +23,11 @@ record QueryProperties { /** * The query was entered manually by a user (via the UI). */ - MANUAL + MANUAL, + /** + * The query was discovered by a crawler. + */ + SYSTEM } /** From 70e64e80786a2112b3c77d790d9634ee17dd1d34 Mon Sep 17 00:00:00 2001 From: Seokyun Ha Date: Thu, 14 Dec 2023 18:02:37 +0900 Subject: [PATCH 094/263] feat(ingest): add ingest `--no-progress` option (#9300) --- docs/cli.md | 1 + metadata-ingestion/src/datahub/cli/ingest_cli.py | 10 ++++++++++ .../src/datahub/ingestion/run/pipeline.py | 6 +++++- 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docs/cli.md b/docs/cli.md index 8845ed5a6dac7..cb5077db42906 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -98,6 +98,7 @@ Command Options: --preview-workunits The number of workunits to produce for preview --strict-warnings If enabled, ingestion runs with warnings will yield a non-zero error code --test-source-connection When set, ingestion will only test the source connection details from the recipe + --no-progress If enabled, mute intermediate progress ingestion reports ``` #### ingest --dry-run diff --git a/metadata-ingestion/src/datahub/cli/ingest_cli.py b/metadata-ingestion/src/datahub/cli/ingest_cli.py index b7827ec9f050b..569a836f3ef5c 100644 --- a/metadata-ingestion/src/datahub/cli/ingest_cli.py +++ b/metadata-ingestion/src/datahub/cli/ingest_cli.py @@ -97,6 +97,13 @@ def ingest() -> None: @click.option( "--no-spinner", type=bool, is_flag=True, default=False, help="Turn off spinner" ) +@click.option( + "--no-progress", + type=bool, + is_flag=True, + default=False, + help="If enabled, mute intermediate progress ingestion reports", +) @telemetry.with_telemetry( capture_kwargs=[ "dry_run", @@ -105,6 +112,7 @@ def ingest() -> None: "test_source_connection", "no_default_report", "no_spinner", + "no_progress", ] ) def run( @@ -117,6 +125,7 @@ def run( report_to: str, no_default_report: bool, no_spinner: bool, + no_progress: bool, ) -> None: """Ingest metadata into DataHub.""" @@ -170,6 +179,7 @@ async def run_ingestion_and_check_upgrade() -> int: preview_workunits, report_to, no_default_report, + no_progress, raw_pipeline_config, ) diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py index f2735c24ca19d..25e17d692109a 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py @@ -173,6 +173,7 @@ def __init__( preview_workunits: int = 10, report_to: Optional[str] = None, no_default_report: bool = False, + no_progress: bool = False, ): self.config = config self.dry_run = dry_run @@ -180,6 +181,7 @@ def __init__( self.preview_workunits = preview_workunits self.report_to = report_to self.reporters: List[PipelineRunListener] = [] + self.no_progress = no_progress self.num_intermediate_workunits = 0 self.last_time_printed = int(time.time()) self.cli_report = CliReport() @@ -330,6 +332,7 @@ def create( preview_workunits: int = 10, report_to: Optional[str] = "datahub", no_default_report: bool = False, + no_progress: bool = False, raw_config: Optional[dict] = None, ) -> "Pipeline": config = PipelineConfig.from_dict(config_dict, raw_config) @@ -340,6 +343,7 @@ def create( preview_workunits=preview_workunits, report_to=report_to, no_default_report=no_default_report, + no_progress=no_progress, ) def _time_to_print(self) -> bool: @@ -379,7 +383,7 @@ def run(self) -> None: self.preview_workunits if self.preview_mode else None, ): try: - if self._time_to_print(): + if self._time_to_print() and not self.no_progress: self.pretty_print_summary(currently_running=True) except Exception as e: logger.warning(f"Failed to print summary {e}") From b0de1dc0ce7a2de221a27f12dfecea9924380ab2 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Thu, 14 Dec 2023 18:41:50 +0530 Subject: [PATCH 095/263] fix(powerbi): add access token refresh (#9405) Co-authored-by: elish7lapid Co-authored-by: treff7es --- .../ingestion/source/powerbi/config.py | 1 + .../powerbi/rest_api_wrapper/data_resolver.py | 15 +- .../tests/integration/powerbi/test_powerbi.py | 235 +++++++++++++++--- 3 files changed, 212 insertions(+), 39 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py index f71afac737ca6..70786efff79a4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/config.py @@ -95,6 +95,7 @@ class Constant: TITLE = "title" EMBED_URL = "embedUrl" ACCESS_TOKEN = "access_token" + ACCESS_TOKEN_EXPIRY = "expires_in" IS_READ_ONLY = "isReadOnly" WEB_URL = "webUrl" ODATA_COUNT = "@odata.count" diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py index c6314c212d104..3aeffa60bc28e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/rest_api_wrapper/data_resolver.py @@ -1,6 +1,7 @@ import logging import math from abc import ABC, abstractmethod +from datetime import datetime, timedelta from time import sleep from typing import Any, Dict, List, Optional @@ -59,6 +60,7 @@ def __init__( tenant_id: str, ): self.__access_token: Optional[str] = None + self.__access_token_expiry_time: Optional[datetime] = None self.__tenant_id = tenant_id # Test connection by generating access token logger.info("Trying to connect to {}".format(self._get_authority_url())) @@ -128,7 +130,7 @@ def get_authorization_header(self): return {Constant.Authorization: self.get_access_token()} def get_access_token(self): - if self.__access_token is not None: + if self.__access_token is not None and not self._is_access_token_expired(): return self.__access_token logger.info("Generating PowerBi access token") @@ -150,11 +152,22 @@ def get_access_token(self): self.__access_token = "Bearer {}".format( auth_response.get(Constant.ACCESS_TOKEN) ) + safety_gap = 300 + self.__access_token_expiry_time = datetime.now() + timedelta( + seconds=( + max(auth_response.get(Constant.ACCESS_TOKEN_EXPIRY, 0) - safety_gap, 0) + ) + ) logger.debug(f"{Constant.PBIAccessToken}={self.__access_token}") return self.__access_token + def _is_access_token_expired(self) -> bool: + if not self.__access_token_expiry_time: + return True + return self.__access_token_expiry_time < datetime.now() + def get_dashboards(self, workspace: Workspace) -> List[Dashboard]: """ Get the list of dashboard from PowerBi for the given workspace identifier diff --git a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py index c9b0ded433749..b2cbccf983eb0 100644 --- a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py +++ b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py @@ -1,8 +1,10 @@ +import datetime import logging import re import sys from typing import Any, Dict, List, cast from unittest import mock +from unittest.mock import MagicMock import pytest from freezegun import freeze_time @@ -31,13 +33,23 @@ def enable_logging(): logging.getLogger().setLevel(logging.DEBUG) -def mock_msal_cca(*args, **kwargs): - class MsalClient: - def acquire_token_for_client(self, *args, **kwargs): - return { - "access_token": "dummy", - } +class MsalClient: + call_num = 0 + token: Dict[str, Any] = { + "access_token": "dummy", + } + + @staticmethod + def acquire_token_for_client(*args, **kwargs): + MsalClient.call_num += 1 + return MsalClient.token + + @staticmethod + def reset(): + MsalClient.call_num = 0 + +def mock_msal_cca(*args, **kwargs): return MsalClient() @@ -627,7 +639,13 @@ def default_source_config(): @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration -def test_powerbi_ingest(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_powerbi_ingest( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -658,7 +676,7 @@ def test_powerbi_ingest(mock_msal, pytestconfig, tmp_path, mock_time, requests_m mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_mces.json", + output_path=f"{tmp_path}/powerbi_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -667,8 +685,12 @@ def test_powerbi_ingest(mock_msal, pytestconfig, tmp_path, mock_time, requests_m @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_powerbi_platform_instance_ingest( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -711,8 +733,12 @@ def test_powerbi_platform_instance_ingest( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_powerbi_ingest_urn_lower_case( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -752,8 +778,12 @@ def test_powerbi_ingest_urn_lower_case( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_override_ownership( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -783,7 +813,7 @@ def test_override_ownership( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_mces_disabled_ownership.json", + output_path=f"{tmp_path}/powerbi_mces_disabled_ownership.json", golden_path=f"{test_resources_dir}/{mce_out_file}", ) @@ -792,8 +822,13 @@ def test_override_ownership( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_scan_all_workspaces( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -828,7 +863,7 @@ def test_scan_all_workspaces( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_mces_scan_all_workspaces.json", + output_path=f"{tmp_path}/powerbi_mces_scan_all_workspaces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -836,7 +871,14 @@ def test_scan_all_workspaces( @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration -def test_extract_reports(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_extract_reports( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -868,7 +910,7 @@ def test_extract_reports(mock_msal, pytestconfig, tmp_path, mock_time, requests_ mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_report_mces.json", + output_path=f"{tmp_path}/powerbi_report_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -876,7 +918,13 @@ def test_extract_reports(mock_msal, pytestconfig, tmp_path, mock_time, requests_ @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration -def test_extract_lineage(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_extract_lineage( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -925,8 +973,12 @@ def test_extract_lineage(mock_msal, pytestconfig, tmp_path, mock_time, requests_ @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_extract_endorsements( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" register_mock_api(request_mock=requests_mock) @@ -957,7 +1009,7 @@ def test_extract_endorsements( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_endorsement_mces.json", + output_path=f"{tmp_path}/powerbi_endorsement_mces.json", golden_path=f"{test_resources_dir}/{mce_out_file}", ) @@ -966,8 +1018,12 @@ def test_extract_endorsements( @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_admin_access_is_not_allowed( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1024,8 +1080,12 @@ def test_admin_access_is_not_allowed( @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) def test_workspace_container( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: enable_logging() test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1062,11 +1122,92 @@ def test_workspace_container( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_container_mces.json", + output_path=f"{tmp_path}/powerbi_container_mces.json", golden_path=f"{test_resources_dir}/{mce_out_file}", ) +@mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) +def test_access_token_expiry_with_long_expiry( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + enable_logging() + + register_mock_api(request_mock=requests_mock) + + pipeline = Pipeline.create( + { + "run_id": "powerbi-test", + "source": { + "type": "powerbi", + "config": { + **default_source_config(), + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{tmp_path}/powerbi_access_token_mces.json", + }, + }, + } + ) + + # for long expiry, the token should only be requested once. + MsalClient.token = { + "access_token": "dummy2", + "expires_in": 3600, + } + + MsalClient.reset() + pipeline.run() + # We expect the token to be requested twice (once for AdminApiResolver and one for RegularApiResolver) + assert MsalClient.call_num == 2 + + +@mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) +def test_access_token_expiry_with_short_expiry( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: + enable_logging() + + register_mock_api(request_mock=requests_mock) + + pipeline = Pipeline.create( + { + "run_id": "powerbi-test", + "source": { + "type": "powerbi", + "config": { + **default_source_config(), + }, + }, + "sink": { + "type": "file", + "config": { + "filename": f"{tmp_path}/powerbi_access_token_mces.json", + }, + }, + } + ) + + # for short expiry, the token should be requested when expires. + MsalClient.token = { + "access_token": "dummy", + "expires_in": 0, + } + pipeline.run() + assert MsalClient.call_num > 2 + + def dataset_type_mapping_set_to_all_platform(pipeline: Pipeline) -> None: source_config: PowerBiDashboardSourceConfig = cast( PowerBiDashboardSource, pipeline.source @@ -1306,8 +1447,12 @@ def validate_pipeline(pipeline: Pipeline) -> None: @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration def test_reports_with_failed_page_request( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: """ Test that all reports are fetched even if a single page request fails """ @@ -1419,8 +1564,12 @@ def test_reports_with_failed_page_request( @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) def test_independent_datasets_extraction( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1503,14 +1652,20 @@ def test_independent_datasets_extraction( mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_independent_mces.json", + output_path=f"{tmp_path}/powerbi_independent_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) -def test_cll_extraction(mock_msal, pytestconfig, tmp_path, mock_time, requests_mock): +def test_cll_extraction( + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: test_resources_dir = pytestconfig.rootpath / "tests/integration/powerbi" @@ -1553,7 +1708,7 @@ def test_cll_extraction(mock_msal, pytestconfig, tmp_path, mock_time, requests_m mce_helpers.check_golden_file( pytestconfig, - output_path=tmp_path / "powerbi_cll_mces.json", + output_path=f"{tmp_path}/powerbi_cll_mces.json", golden_path=f"{test_resources_dir}/{golden_file}", ) @@ -1561,8 +1716,12 @@ def test_cll_extraction(mock_msal, pytestconfig, tmp_path, mock_time, requests_m @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) def test_cll_extraction_flags( - mock_msal, pytestconfig, tmp_path, mock_time, requests_mock -): + mock_msal: MagicMock, + pytestconfig: pytest.Config, + tmp_path: str, + mock_time: datetime.datetime, + requests_mock: Any, +) -> None: register_mock_api( request_mock=requests_mock, From 9ecda6485202ce89291bd1485c861cf7be1b8741 Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:07:48 +0530 Subject: [PATCH 096/263] fix(analytics): do not ping the track endpoint before login (#9462) --- datahub-web-react/src/app/analytics/analytics.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/analytics/analytics.ts b/datahub-web-react/src/app/analytics/analytics.ts index a66d76a09cf4d..468164069cfd0 100644 --- a/datahub-web-react/src/app/analytics/analytics.ts +++ b/datahub-web-react/src/app/analytics/analytics.ts @@ -30,16 +30,17 @@ export function getMergedTrackingOptions(options?: any) { export default { page: (data?: PageData, options?: any, callback?: (...params: any[]) => any) => { + const actorUrn = Cookies.get(CLIENT_AUTH_COOKIE) || undefined; const modifiedData = { ...data, type: EventType[EventType.PageViewEvent], - actorUrn: Cookies.get(CLIENT_AUTH_COOKIE) || undefined, + actorUrn, timestamp: Date.now(), date: new Date().toString(), userAgent: navigator.userAgent, browserId: getBrowserId(), }; - if (NODE_ENV === 'test') { + if (NODE_ENV === 'test' || !actorUrn) { return null; } const trackingOptions = getMergedTrackingOptions(options); From aac1c55a14fdf65cb51f1fd0f441d93eb7757098 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 14 Dec 2023 21:05:06 +0530 Subject: [PATCH 097/263] feat(ingest/unity): enable hive metastore ingestion (#9416) --- metadata-ingestion/setup.py | 5 +- .../ingestion/source/bigquery_v2/bigquery.py | 4 + .../ingestion/source/source_registry.py | 9 + .../datahub/ingestion/source/unity/config.py | 51 +- .../source/unity/hive_metastore_proxy.py | 242 ++ .../datahub/ingestion/source/unity/proxy.py | 22 + .../ingestion/source/unity/proxy_types.py | 38 +- .../datahub/ingestion/source/unity/report.py | 4 +- .../datahub/ingestion/source/unity/source.py | 64 +- .../unity/test_unity_catalog_ingest.py | 77 +- .../unity/unity_catalog_mces_golden.json | 2509 +++++++++-------- .../tests/unit/test_unity_catalog_config.py | 65 +- 12 files changed, 1958 insertions(+), 1132 deletions(-) create mode 100644 metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index e894cbf043338..5d15d7167b63e 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -263,7 +263,8 @@ "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes - "databricks-sql-connector>=2.8.0", + # Version 3.0.0 required SQLAlchemy > 2.0.21 + "databricks-sql-connector>=2.8.0,<3.0.0", } mysql = sql_common | {"pymysql>=1.0.2"} @@ -395,6 +396,8 @@ "powerbi-report-server": powerbi_report_server, "vertica": sql_common | {"vertica-sqlalchemy-dialect[vertica-python]==0.0.8.1"}, "unity-catalog": databricks | sql_common | sqllineage_lib, + # databricks is alias for unity-catalog and needs to be kept in sync + "databricks": databricks | sql_common | sqllineage_lib, "fivetran": snowflake_common, } diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 6959a48313010..9813945683289 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -1031,6 +1031,10 @@ def gen_dataset_urn_from_ref(self, ref: BigQueryTableRef) -> str: def gen_schema_fields(self, columns: List[BigqueryColumn]) -> List[SchemaField]: schema_fields: List[SchemaField] = [] + # Below line affects HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR in global scope + # TODO: Refractor this such that + # converter = HiveColumnToAvroConverter(struct_type_separator=" "); + # converter.get_schema_fields_for_hive_column(...) HiveColumnToAvroConverter._STRUCT_TYPE_SEPARATOR = " " _COMPLEX_TYPE = re.compile("^(struct|array)") last_id = -1 diff --git a/metadata-ingestion/src/datahub/ingestion/source/source_registry.py b/metadata-ingestion/src/datahub/ingestion/source/source_registry.py index c3fbab3f9a012..e003c658f45e8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/source_registry.py +++ b/metadata-ingestion/src/datahub/ingestion/source/source_registry.py @@ -14,3 +14,12 @@ "mssql-odbc", "mssql", ) + +# Use databricks as alias for unity-catalog ingestion source. +# As mentioned here - https://docs.databricks.com/en/data-governance/unity-catalog/enable-workspaces.html, +# Databricks is rolling out Unity Catalog gradually across accounts. +# TODO: Rename unity-catalog source to databricks source, once it is rolled out for all accounts +source_registry.register_alias( + "databricks", + "unity-catalog", +) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 2c567120b4850..96971faeea69f 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -129,6 +129,14 @@ class UnityCatalogSourceConfig( workspace_url: str = pydantic.Field( description="Databricks workspace url. e.g. https://my-workspace.cloud.databricks.com" ) + warehouse_id: Optional[str] = pydantic.Field( + default=None, + description="SQL Warehouse id, for running queries. If not set, will use the default warehouse.", + ) + include_hive_metastore: bool = pydantic.Field( + default=False, + description="Whether to ingest legacy `hive_metastore` catalog. This requires executing queries on SQL warehouse.", + ) workspace_name: Optional[str] = pydantic.Field( default=None, description="Name of the workspace. Default to deployment name present in workspace_url", @@ -254,16 +262,17 @@ class UnityCatalogSourceConfig( scheme: str = DATABRICKS - def get_sql_alchemy_url(self): + def get_sql_alchemy_url(self, database: Optional[str] = None) -> str: + uri_opts = {"http_path": f"/sql/1.0/warehouses/{self.warehouse_id}"} + if database: + uri_opts["catalog"] = database return make_sqlalchemy_uri( scheme=self.scheme, username="token", password=self.token, at=urlparse(self.workspace_url).netloc, - db=None, - uri_opts={ - "http_path": f"/sql/1.0/warehouses/{self.profiling.warehouse_id}" - }, + db=database, + uri_opts=uri_opts, ) def is_profiling_enabled(self) -> bool: @@ -304,3 +313,35 @@ def include_metastore_warning(cls, v: bool) -> bool: logger.warning(msg) add_global_warning(msg) return v + + @pydantic.root_validator(skip_on_failure=True) + def set_warehouse_id_from_profiling(cls, values: Dict[str, Any]) -> Dict[str, Any]: + profiling: Optional[UnityCatalogProfilerConfig] = values.get("profiling") + if not values.get("warehouse_id") and profiling and profiling.warehouse_id: + values["warehouse_id"] = profiling.warehouse_id + if ( + values.get("warehouse_id") + and profiling + and profiling.warehouse_id + and values["warehouse_id"] != profiling.warehouse_id + ): + raise ValueError( + "When `warehouse_id` is set, it must match the `warehouse_id` in `profiling`." + ) + + if values.get("include_hive_metastore") and not values.get("warehouse_id"): + raise ValueError( + "When `include_hive_metastore` is set, `warehouse_id` must be set." + ) + + if values.get("warehouse_id") and profiling and not profiling.warehouse_id: + profiling.warehouse_id = values["warehouse_id"] + + return values + + @pydantic.validator("schema_pattern", always=True) + def schema_pattern_should__always_deny_information_schema( + cls, v: AllowDenyPattern + ) -> AllowDenyPattern: + v.deny.append(".*\\.information_schema") + return v diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py new file mode 100644 index 0000000000000..99b2ff998662c --- /dev/null +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py @@ -0,0 +1,242 @@ +import logging +from datetime import datetime +from functools import lru_cache +from typing import Iterable, List, Optional + +from databricks.sdk.service.catalog import ColumnTypeName, DataSourceFormat +from databricks.sql.types import Row +from sqlalchemy import create_engine, inspect +from sqlalchemy.engine.reflection import Inspector + +from datahub.ingestion.api.closeable import Closeable +from datahub.ingestion.source.unity.proxy_types import ( + Catalog, + Column, + CustomCatalogType, + HiveTableType, + Metastore, + Schema, + Table, +) + +logger = logging.getLogger(__name__) +HIVE_METASTORE = "hive_metastore" + +type_map = { + "boolean": ColumnTypeName.BOOLEAN, + "tinyint": ColumnTypeName.INT, + "smallint": ColumnTypeName.INT, + "int": ColumnTypeName.INT, + "bigint": ColumnTypeName.LONG, + "float": ColumnTypeName.FLOAT, + "double": ColumnTypeName.DOUBLE, + "decimal": ColumnTypeName.DECIMAL, + "string": ColumnTypeName.STRING, + "varchar": ColumnTypeName.STRING, + "timestamp": ColumnTypeName.TIMESTAMP, + "date": ColumnTypeName.DATE, + "binary": ColumnTypeName.BINARY, +} + + +class HiveMetastoreProxy(Closeable): + # TODO: Support for view lineage using SQL parsing + # Why not use hive ingestion source directly here ? + # 1. hive ingestion source assumes 2-level namespace heirarchy and currently + # there is no other intermediate interface except sqlalchemy inspector + # that can be used to fetch hive metadata. + # 2. hive recipe for databricks (databricks+pyhive dialect) does not + # readily support SQL warehouse. Also this dialect is not actively maintained. + """ + Proxy to read metadata from hive_metastore databricks catalog. This is required + as unity catalog apis do not return details about this legacy metastore. + """ + + def __init__(self, sqlalchemy_url: str, options: dict) -> None: + try: + self.inspector = HiveMetastoreProxy.get_inspector(sqlalchemy_url, options) + except Exception: + # This means that there is no `hive_metastore` catalog in databricks workspace + # Not tested but seems like the logical conclusion. + raise + + @staticmethod + def get_inspector(sqlalchemy_url: str, options: dict) -> Inspector: + engine = create_engine(sqlalchemy_url, **options) + return inspect(engine.connect()) + + def hive_metastore_catalog(self, metastore: Optional[Metastore]) -> Catalog: + return Catalog( + id=HIVE_METASTORE, + name=HIVE_METASTORE, + comment=None, + metastore=metastore, + owner=None, + type=CustomCatalogType.HIVE_METASTORE_CATALOG, + ) + + def hive_metastore_schemas(self, catalog: Catalog) -> Iterable[Schema]: + for schema_name in self.inspector.get_schema_names(): + yield Schema( + name=schema_name, + id=f"{catalog.id}.{schema_name}", + catalog=catalog, + comment=None, + owner=None, + ) + + def hive_metastore_tables(self, schema: Schema) -> Iterable[Table]: + views = self.inspector.get_view_names(schema.name) + for table_name in views: + yield self._get_table(schema, table_name, True) + + for table_name in self.inspector.get_table_names(schema.name): + if table_name in views: + continue + yield self._get_table(schema, table_name, False) + + def _get_table(self, schema: Schema, table_name: str, is_view: bool) -> Table: + columns = self._get_columns(schema, table_name) + detailed_info = self._get_table_info(schema, table_name) + + comment = detailed_info.pop("Comment", None) + storage_location = detailed_info.pop("Location", None) + datasource_format = self._get_datasource_format( + detailed_info.pop("Provider", None) + ) + + created_at = self._get_created_at(detailed_info.pop("Created Time", None)) + + return Table( + name=table_name, + id=f"{schema.id}.{table_name}", + table_type=self._get_table_type(detailed_info.pop("Type", None)), + schema=schema, + columns=columns, + storage_location=storage_location, + data_source_format=datasource_format, + view_definition=self._get_view_definition(schema.name, table_name) + if is_view + else None, + properties=detailed_info, + owner=None, + generation=None, + created_at=created_at, + created_by=None, + updated_at=None, + updated_by=None, + table_id=f"{schema.id}.{table_name}", + comment=comment, + ) + + def _get_created_at(self, created_at: Optional[str]) -> Optional[datetime]: + return ( + datetime.strptime(created_at, "%a %b %d %H:%M:%S %Z %Y") + if created_at + else None + ) + + def _get_datasource_format( + self, provider: Optional[str] + ) -> Optional[DataSourceFormat]: + raw_format = provider + if raw_format: + try: + return DataSourceFormat(raw_format.upper()) + except Exception: + logger.debug(f"Unknown datasource format : {raw_format}") + pass + return None + + def _get_view_definition(self, schema_name: str, table_name: str) -> Optional[str]: + try: + rows = self._execute_sql( + f"SHOW CREATE TABLE `{schema_name}`.`{table_name}`" + ) + for row in rows: + return row[0] + except Exception: + logger.debug( + f"Failed to get view definition for {schema_name}.{table_name}" + ) + return None + + def _get_table_type(self, type: Optional[str]) -> HiveTableType: + if type == "EXTERNAL": + return HiveTableType.HIVE_EXTERNAL_TABLE + elif type == "MANAGED": + return HiveTableType.HIVE_MANAGED_TABLE + elif type == "VIEW": + return HiveTableType.HIVE_VIEW + else: + return HiveTableType.UNKNOWN + + def _get_table_info(self, schema: Schema, table_name: str) -> dict: + rows = self._describe_extended(schema.name, table_name) + + index = rows.index(("# Detailed Table Information", "", "")) + rows = rows[index + 1 :] + # Copied from https://github.com/acryldata/PyHive/blob/master/pyhive/sqlalchemy_hive.py#L375 + # Generate properties dictionary. + properties = {} + active_heading = None + for col_name, data_type, value in rows: + col_name = col_name.rstrip() + if col_name.startswith("# "): + continue + elif col_name == "" and data_type is None: + active_heading = None + continue + elif col_name != "" and data_type is None: + active_heading = col_name + elif col_name != "" and data_type is not None: + properties[col_name] = data_type.strip() + else: + # col_name == "", data_type is not None + prop_name = "{} {}".format(active_heading, data_type.rstrip()) + properties[prop_name] = value.rstrip() + + return properties + + def _get_columns(self, schema: Schema, table_name: str) -> List[Column]: + rows = self._describe_extended(schema.name, table_name) + + columns: List[Column] = [] + for i, row in enumerate(rows): + if i == 0 and row[0].strip() == "col_name": + continue # first row + if row[0].strip() in ( + "", + "# Partition Information", + "# Detailed Table Information", + ): + break + columns.append( + Column( + name=row[0].strip(), + id=f"{schema.id}.{table_name}.{row[0].strip()}", + type_text=row[1].strip(), + type_name=type_map.get(row[1].strip().lower()), + type_scale=None, + type_precision=None, + position=None, + nullable=None, + comment=row[2], + ) + ) + + return columns + + @lru_cache(maxsize=1) + def _describe_extended(self, schema_name: str, table_name: str) -> List[Row]: + """ + Rows are structured as shown in examples here + https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-aux-describe-table.html#examples + """ + return self._execute_sql(f"DESCRIBE EXTENDED `{schema_name}`.`{table_name}`") + + def _execute_sql(self, sql: str) -> List[Row]: + return self.inspector.bind.execute(sql).fetchall() + + def close(self): + self.inspector.bind.close() # type:ignore diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py index 375c76db8e971..13baa8b57a639 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py @@ -26,6 +26,7 @@ from databricks.sdk.service.workspace import ObjectType import datahub +from datahub.ingestion.source.unity.hive_metastore_proxy import HiveMetastoreProxy from datahub.ingestion.source.unity.proxy_profiling import ( UnityCatalogProxyProfilingMixin, ) @@ -33,6 +34,7 @@ ALLOWED_STATEMENT_TYPES, Catalog, Column, + CustomCatalogType, ExternalTableReference, Metastore, Notebook, @@ -87,6 +89,7 @@ def __init__( personal_access_token: str, warehouse_id: Optional[str], report: UnityCatalogReport, + hive_metastore_proxy: Optional[HiveMetastoreProxy] = None, ): self._workspace_client = WorkspaceClient( host=workspace_url, @@ -96,6 +99,7 @@ def __init__( ) self.warehouse_id = warehouse_id or "" self.report = report + self.hive_metastore_proxy = hive_metastore_proxy def check_basic_connectivity(self) -> bool: return bool(self._workspace_client.catalogs.list()) @@ -105,6 +109,9 @@ def assigned_metastore(self) -> Metastore: return self._create_metastore(response) def catalogs(self, metastore: Optional[Metastore]) -> Iterable[Catalog]: + if self.hive_metastore_proxy: + yield self.hive_metastore_proxy.hive_metastore_catalog(metastore) + response = self._workspace_client.catalogs.list() if not response: logger.info("Catalogs not found") @@ -122,6 +129,12 @@ def catalog( return self._create_catalog(metastore, response) def schemas(self, catalog: Catalog) -> Iterable[Schema]: + if ( + self.hive_metastore_proxy + and catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG + ): + yield from self.hive_metastore_proxy.hive_metastore_schemas(catalog) + return response = self._workspace_client.schemas.list(catalog_name=catalog.name) if not response: logger.info(f"Schemas not found for catalog {catalog.id}") @@ -130,6 +143,12 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: yield self._create_schema(catalog, schema) def tables(self, schema: Schema) -> Iterable[Table]: + if ( + self.hive_metastore_proxy + and schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG + ): + yield from self.hive_metastore_proxy.hive_metastore_tables(schema) + return with patch("databricks.sdk.service.catalog.TableInfo", TableInfoWithGeneration): response = self._workspace_client.tables.list( catalog_name=schema.catalog.name, schema_name=schema.name @@ -244,6 +263,9 @@ def list_lineages_by_column(self, table_name: str, column_name: str) -> dict: ) def table_lineage(self, table: Table, include_entity_lineage: bool) -> None: + if table.schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG: + # Lineage is not available for Hive Metastore Tables. + return None # Lineage endpoint doesn't exists on 2.1 version try: response: dict = self.list_lineages_by_table( diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py index 315c1c0d20186..e5951cb0fa4ff 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py @@ -4,7 +4,8 @@ import logging from dataclasses import dataclass, field from datetime import datetime -from typing import Dict, FrozenSet, List, Optional, Set +from enum import Enum +from typing import Dict, FrozenSet, List, Optional, Set, Union from databricks.sdk.service.catalog import ( CatalogType, @@ -75,6 +76,17 @@ NotebookId = int +class CustomCatalogType(Enum): + HIVE_METASTORE_CATALOG = "HIVE_METASTORE_CATALOG" + + +class HiveTableType(Enum): + HIVE_MANAGED_TABLE = "HIVE_MANAGED_TABLE" + HIVE_EXTERNAL_TABLE = "HIVE_EXTERNAL_TABLE" + HIVE_VIEW = "HIVE_VIEW" + UNKNOWN = "UNKNOWN" + + @dataclass class CommonProperty: id: str @@ -95,7 +107,7 @@ class Metastore(CommonProperty): class Catalog(CommonProperty): metastore: Optional[Metastore] owner: Optional[str] - type: CatalogType + type: Union[CatalogType, CustomCatalogType] @dataclass @@ -107,11 +119,11 @@ class Schema(CommonProperty): @dataclass class Column(CommonProperty): type_text: str - type_name: ColumnTypeName - type_precision: int - type_scale: int - position: int - nullable: bool + type_name: Optional[ColumnTypeName] + type_precision: Optional[int] + type_scale: Optional[int] + position: Optional[int] + nullable: Optional[bool] comment: Optional[str] @@ -212,11 +224,11 @@ class Table(CommonProperty): columns: List[Column] storage_location: Optional[str] data_source_format: Optional[DataSourceFormat] - table_type: TableType + table_type: Union[TableType, HiveTableType] owner: Optional[str] generation: Optional[int] - created_at: datetime - created_by: str + created_at: Optional[datetime] + created_by: Optional[str] updated_at: Optional[datetime] updated_by: Optional[str] table_id: str @@ -231,7 +243,11 @@ class Table(CommonProperty): def __post_init__(self): self.ref = TableReference.create(self) - self.is_view = self.table_type in [TableType.VIEW, TableType.MATERIALIZED_VIEW] + self.is_view = self.table_type in [ + TableType.VIEW, + TableType.MATERIALIZED_VIEW, + HiveTableType.HIVE_VIEW, + ] @dataclass diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py index 7f19b6e2103ea..0770d9d27055c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import Tuple +from typing import Optional, Tuple from datahub.ingestion.api.report import EntityFilterReport from datahub.ingestion.source.sql.sql_generic_profiler import ProfilingSqlReport @@ -16,6 +16,8 @@ class UnityCatalogReport(IngestionStageReport, ProfilingSqlReport): table_profiles: EntityFilterReport = EntityFilterReport.field(type="table profile") notebooks: EntityFilterReport = EntityFilterReport.field(type="notebook") + hive_metastore_catalog_found: Optional[bool] = None + num_column_lineage_skipped_column_count: int = 0 num_external_upstreams_lacking_permissions: int = 0 num_external_upstreams_unsupported: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index d1940c1d57607..43c5e24439377 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -58,6 +58,10 @@ ) from datahub.ingestion.source.unity.connection_test import UnityCatalogConnectionTest from datahub.ingestion.source.unity.ge_profiler import UnityCatalogGEProfiler +from datahub.ingestion.source.unity.hive_metastore_proxy import ( + HIVE_METASTORE, + HiveMetastoreProxy, +) from datahub.ingestion.source.unity.proxy import UnityCatalogApiProxy from datahub.ingestion.source.unity.proxy_types import ( DATA_TYPE_REGISTRY, @@ -142,12 +146,17 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): self.config = config self.report: UnityCatalogReport = UnityCatalogReport() + + self.init_hive_metastore_proxy() + self.unity_catalog_api_proxy = UnityCatalogApiProxy( config.workspace_url, config.token, - config.profiling.warehouse_id, + config.warehouse_id, report=self.report, + hive_metastore_proxy=self.hive_metastore_proxy, ) + self.external_url_base = urljoin(self.config.workspace_url, "/explore/data") # Determine the platform_instance_name @@ -174,6 +183,23 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): # Global map of tables, for profiling self.tables: FileBackedDict[Table] = FileBackedDict() + def init_hive_metastore_proxy(self): + self.hive_metastore_proxy: Optional[HiveMetastoreProxy] = None + if self.config.include_hive_metastore: + try: + self.hive_metastore_proxy = HiveMetastoreProxy( + self.config.get_sql_alchemy_url(HIVE_METASTORE), self.config.options + ) + self.report.hive_metastore_catalog_found = True + except Exception as e: + logger.debug("Exception", exc_info=True) + self.warn( + logger, + HIVE_METASTORE, + f"Failed to connect to hive_metastore due to {e}", + ) + self.report.hive_metastore_catalog_found = False + @staticmethod def test_connection(config_dict: dict) -> TestConnectionReport: return UnityCatalogConnectionTest(config_dict).get_connection_test() @@ -194,7 +220,7 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: self.report.report_ingestion_stage_start("Ingestion Setup") wait_on_warehouse = None - if self.config.is_profiling_enabled(): + if self.config.is_profiling_enabled() or self.config.include_hive_metastore: self.report.report_ingestion_stage_start("Start warehouse") # Can take several minutes, so start now and wait later wait_on_warehouse = self.unity_catalog_api_proxy.start_warehouse() @@ -204,6 +230,9 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: f"SQL warehouse {self.config.profiling.warehouse_id} not found", ) return + else: + # wait until warehouse is started + wait_on_warehouse.result() if self.config.include_ownership: self.report.report_ingestion_stage_start("Ingest service principals") @@ -678,18 +707,25 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: custom_properties["table_type"] = table.table_type.value - custom_properties["created_by"] = table.created_by - custom_properties["created_at"] = str(table.created_at) + if table.created_by: + custom_properties["created_by"] = table.created_by if table.properties: custom_properties.update({k: str(v) for k, v in table.properties.items()}) custom_properties["table_id"] = table.table_id - custom_properties["owner"] = table.owner - custom_properties["updated_by"] = table.updated_by - custom_properties["updated_at"] = str(table.updated_at) - - created = TimeStampClass( - int(table.created_at.timestamp() * 1000), make_user_urn(table.created_by) - ) + if table.owner: + custom_properties["owner"] = table.owner + if table.updated_by: + custom_properties["updated_by"] = table.updated_by + if table.updated_at: + custom_properties["updated_at"] = str(table.updated_at) + + created: Optional[TimeStampClass] = None + if table.created_at: + custom_properties["created_at"] = str(table.created_at) + created = TimeStampClass( + int(table.created_at.timestamp() * 1000), + make_user_urn(table.created_by) if table.created_by else None, + ) last_modified = created if table.updated_at: last_modified = TimeStampClass( @@ -780,3 +816,9 @@ def _create_schema_field(column: Column) -> List[SchemaFieldClass]: description=column.comment, ) ] + + def close(self): + if self.hive_metastore_proxy: + self.hive_metastore_proxy.close() + + super().close() diff --git a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py index c43ba7eee5847..aab7630d57f46 100644 --- a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py +++ b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py @@ -3,6 +3,7 @@ from unittest.mock import patch import databricks +import pytest from databricks.sdk.service.catalog import ( CatalogInfo, GetMetastoreSummaryResponse, @@ -12,12 +13,15 @@ from freezegun import freeze_time from datahub.ingestion.run.pipeline import Pipeline +from datahub.ingestion.source.unity.hive_metastore_proxy import HiveMetastoreProxy from tests.test_helpers import mce_helpers FROZEN_TIME = "2021-12-07 07:00:00" SERVICE_PRINCIPAL_ID_1 = str(uuid.uuid4()) SERVICE_PRINCIPAL_ID_2 = str(uuid.uuid4()) +pytestmark = pytest.mark.integration_batch_1 + def register_mock_api(request_mock): api_vs_response = { @@ -215,6 +219,65 @@ def register_mock_data(workspace_client): ] +def mock_hive_sql(query): + if query == "DESCRIBE EXTENDED `bronze_kambi`.`bet`": + return [ + ("betStatusId", "bigint", None), + ("channelId", "bigint", None), + ( + "combination", + "struct>,eventId:bigint,eventName:string,eventStartDate:string,live:boolean,odds:double,outcomeIds:array,outcomeLabel:string,sportId:string,status:string,voidReason:string>>,payout:double,rewardExtraPayout:double,stake:double>", + None, + ), + ("", "", ""), + ("# Detailed Table Information", "", ""), + ("Catalog", "hive_metastore", ""), + ("Database", "bronze_kambi", ""), + ("Table", "bet", ""), + ("Created Time", "Wed Jun 22 05:14:56 UTC 2022", ""), + ("Last Access", "UNKNOWN", ""), + ("Created By", "Spark 3.2.1", ""), + ("Type", "MANAGED", ""), + ("Location", "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", ""), + ("Provider", "delta", ""), + ("Owner", "root", ""), + ("Is_managed_location", "true", ""), + ( + "Table Properties", + "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", + "", + ), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`view1`": + return [ + ("betStatusId", "bigint", None), + ("channelId", "bigint", None), + ( + "combination", + "struct>,eventId:bigint,eventName:string,eventStartDate:string,live:boolean,odds:double,outcomeIds:array,outcomeLabel:string,sportId:string,status:string,voidReason:string>>,payout:double,rewardExtraPayout:double,stake:double>", + None, + ), + ("", "", ""), + ("# Detailed Table Information", "", ""), + ("Catalog", "hive_metastore", ""), + ("Database", "bronze_kambi", ""), + ("Table", "view1", ""), + ("Created Time", "Wed Jun 22 05:14:56 UTC 2022", ""), + ("Last Access", "UNKNOWN", ""), + ("Created By", "Spark 3.2.1", ""), + ("Type", "VIEW", ""), + ("Owner", "root", ""), + ] + elif query == "SHOW CREATE TABLE `bronze_kambi`.`view1`": + return [ + ( + "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", + ) + ] + + return [] + + @freeze_time(FROZEN_TIME) def test_ingestion(pytestconfig, tmp_path, requests_mock): test_resources_dir = pytestconfig.rootpath / "tests/integration/unity" @@ -223,11 +286,21 @@ def test_ingestion(pytestconfig, tmp_path, requests_mock): output_file_name = "unity_catalog_mcps.json" - with patch("databricks.sdk.WorkspaceClient") as WorkspaceClient: + with patch("databricks.sdk.WorkspaceClient") as WorkspaceClient, patch.object( + HiveMetastoreProxy, "get_inspector" + ) as get_inspector, patch.object(HiveMetastoreProxy, "_execute_sql") as execute_sql: workspace_client: mock.MagicMock = mock.MagicMock() WorkspaceClient.return_value = workspace_client register_mock_data(workspace_client) + inspector = mock.MagicMock() + inspector.get_schema_names.return_value = ["bronze_kambi"] + inspector.get_view_names.return_value = ["view1"] + inspector.get_table_names.return_value = ["bet", "view1"] + get_inspector.return_value = inspector + + execute_sql.side_effect = mock_hive_sql + config_dict: dict = { "run_id": "unity-catalog-test", "pipeline_name": "unity-catalog-test-pipeline", @@ -237,6 +310,8 @@ def test_ingestion(pytestconfig, tmp_path, requests_mock): "workspace_url": "https://dummy.cloud.databricks.com", "token": "fake", "include_ownership": True, + "include_hive_metastore": True, + "warehouse_id": "test", }, }, "sink": { diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index d25c86a3a1f9a..98a6615dd2b52 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -114,7 +114,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -123,11 +123,10 @@ "platform": "databricks", "env": "PROD", "metastore": "acryl metastore", - "catalog": "main" + "catalog": "hive_metastore" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", - "name": "main", - "description": "Main catalog (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore", + "name": "hive_metastore" } }, "systemMetadata": { @@ -138,7 +137,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -156,10 +155,18 @@ "entityType": "container", "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "containerProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "main" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", + "name": "main", + "description": "Main catalog (auto-created)" } }, "systemMetadata": { @@ -170,7 +177,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -188,21 +195,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "container", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" } }, "systemMetadata": { @@ -213,12 +211,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -229,7 +227,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -250,32 +248,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", - "name": "default", - "description": "Default schema (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -291,7 +264,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -307,13 +280,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Schema" + "Catalog" ] } }, @@ -325,14 +298,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:abc@acryl.io", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -350,12 +323,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" } }, "systemMetadata": { @@ -366,21 +339,20 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "containerProperties", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } - ] + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "hive_metastore", + "unity_schema": "bronze_kambi" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi", + "name": "bronze_kambi" } }, "systemMetadata": { @@ -390,13 +362,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "status", "aspect": { "json": { - "container": "urn:li:container:5ada0a9773235325e506410c512feabb" + "removed": false } }, "systemMetadata": { @@ -406,40 +378,18 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "browsePathsV2", "aspect": { "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.default.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "path": [ + { + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + } + ] } }, "systemMetadata": { @@ -449,14 +399,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Schema" ] } }, @@ -467,55 +417,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "container", "aspect": { "json": { - "schemaName": "acryl_metastore.main.default.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } - ] + "container": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" } }, "systemMetadata": { @@ -525,22 +433,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -551,7 +450,23 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -562,12 +477,8 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - }, - { - "id": "urn:li:container:5ada0a9773235325e506410c512feabb", - "urn": "urn:li:container:5ada0a9773235325e506410c512feabb" + "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" } ] } @@ -579,22 +490,33 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "information_schema" + "table_type": "HIVE_VIEW", + "Catalog": "hive_metastore", + "Database": "bronze_kambi", + "Table": "view1", + "Last Access": "UNKNOWN", + "Created By": "Spark 3.2.1", + "Owner": "root", + "table_id": "hive_metastore.bronze_kambi.view1", + "created_at": "2022-06-22 05:14:56" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/view1", + "name": "view1", + "qualifiedName": "hive_metastore.bronze_kambi.view1", + "created": { + "time": 1655874896000 }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/information_schema", - "name": "information_schema", - "description": "Information schema (auto-created)" + "lastModified": { + "time": 1655874896000 + }, + "tags": [] } }, "systemMetadata": { @@ -604,13 +526,15 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "viewProperties", "aspect": { "json": { - "removed": false + "materialized": false, + "viewLogic": "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", + "viewLanguage": "SQL" } }, "systemMetadata": { @@ -621,13 +545,22 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "containerProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" - } + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "main", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", + "name": "default", + "description": "Default schema (auto-created)" + } }, "systemMetadata": { "lastObserved": 1638860400000, @@ -636,14 +569,14 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Schema" + "View" ] } }, @@ -654,49 +587,8 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:Service Principal 1", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -707,8 +599,12 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" + }, + { + "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", + "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" } ] } @@ -720,74 +616,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProperties", - "aspect": { - "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/information_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.information_schema.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "removed": false } }, "systemMetadata": { @@ -798,12 +633,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.main.information_schema.quickstart_table", + "schemaName": "hive_metastore.bronze_kambi.view1", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -822,144 +657,409 @@ }, "fields": [ { - "fieldPath": "columnA", - "nullable": true, + "fieldPath": "betStatusId", + "nullable": false, "type": { "type": { "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "int", + "nativeDataType": "bigint", "recursive": false, "isPartOfKey": false }, { - "fieldPath": "columnB", - "nullable": true, + "fieldPath": "channelId", + "nullable": false, "type": { "type": { - "com.linkedin.schema.StringType": {} + "com.linkedin.schema.NumberType": {} } }, - "nativeDataType": "string", + "nativeDataType": "bigint", "recursive": false, "isPartOfKey": false - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ + }, { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>\"}" + }, { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=long].combinationref", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" }, { - "id": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6", - "urn": "urn:li:container:0e09e6ec299ef004941e25221d3ef6b6" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "quickstart_schema" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].eachway", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].livebetting", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].betoffertypeid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].criterionid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].criterionname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventgroupid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=long].id", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=string].name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventstartdate", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=boolean].live", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=long].outcomeids", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "long" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].outcomelabel", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].sportid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].status", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].voidreason", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].payout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].rewardextrapayout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + } + ] + } + }, + "systemMetadata": { "lastObserved": 1638860400000, "runId": "unity-catalog-test", "lastRunId": "no-run-id-provided" @@ -967,7 +1067,23 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:databricks" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -985,14 +1101,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -1009,13 +1125,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" } }, "systemMetadata": { @@ -1026,37 +1142,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } }, "systemMetadata": { @@ -1067,37 +1158,34 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { "json": { "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "storage_location": "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.quickstart_schema.quickstart_table", + "table_type": "HIVE_MANAGED_TABLE", + "Catalog": "hive_metastore", + "Database": "bronze_kambi", + "Table": "bet", + "Last Access": "UNKNOWN", + "Created By": "Spark 3.2.1", + "Owner": "root", + "Is_managed_location": "true", + "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", + "table_id": "hive_metastore.bronze_kambi.bet", + "created_at": "2022-06-22 05:14:56" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", + "name": "bet", + "qualifiedName": "hive_metastore.bronze_kambi.bet", "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" + "time": 1655874896000 }, "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" + "time": 1655874896000 }, "tags": [] } @@ -1110,7 +1198,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1127,53 +1215,20 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "browsePathsV2", "aspect": { "json": { - "schemaName": "acryl_metastore.main.quickstart_schema.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ + "path": [ { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } ] } @@ -1186,32 +1241,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1222,12 +1252,12 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" }, { - "id": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "urn": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", + "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" } ] } @@ -1239,272 +1269,429 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "schemaMetadata", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog" + "schemaName": "hive_metastore.bronze_kambi.bet", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog", - "name": "quickstart_catalog", - "description": "" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Catalog" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog", - "unity_schema": "default" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", - "name": "default", - "description": "Default schema (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Schema" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "betStatusId", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "channelId", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.RecordType": {} + } + }, + "nativeDataType": "struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"struct>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>,payout:double,rewardextrapayout:double,stake:double>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=long].combinationref", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].eachway", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=boolean].livebetting", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>,eventid:bigint,eventname:string,eventstartdate:string,live:boolean,odds:double,outcomeids:array,outcomelabel:string,sportid:string,status:string,voidreason:string>>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].betoffertypeid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].criterionid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].criterionname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].currentodds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventgroupid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "record" + ] + } + } + }, + "nativeDataType": "array>", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array>\"}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=long].id", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=struct].eventgrouppath.[type=string].name", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=long].eventid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "bigint", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"bigint\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventname", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].eventstartdate", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=boolean].live", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.BooleanType": {} + } + }, + "nativeDataType": "boolean", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"boolean\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=double].odds", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=array].[type=long].outcomeids", + "nullable": false, + "type": { + "type": { + "com.linkedin.schema.ArrayType": { + "nestedType": [ + "long" + ] + } + } + }, + "nativeDataType": "array", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"array\"}" + }, { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].outcomelabel", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].sportid", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" }, { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].status", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=array].[type=struct].outcomes.[type=string].voidreason", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"string\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].payout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].rewardextrapayout", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + }, + { + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" } ] } @@ -1517,12 +1704,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" + "container": "urn:li:container:5ada0a9773235325e506410c512feabb" } }, "systemMetadata": { @@ -1533,7 +1720,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1554,9 +1741,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.default.quickstart_table", + "qualifiedName": "main.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -1576,7 +1763,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1594,12 +1781,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.default.quickstart_table", + "schemaName": "acryl_metastore.main.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1652,7 +1839,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -1677,7 +1864,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1688,12 +1875,12 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" }, { - "id": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "urn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" + "id": "urn:li:container:5ada0a9773235325e506410c512feabb", + "urn": "urn:li:container:5ada0a9773235325e506410c512feabb" } ] } @@ -1706,7 +1893,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -1715,12 +1902,12 @@ "platform": "databricks", "env": "PROD", "metastore": "acryl metastore", - "catalog": "quickstart_catalog", - "unity_schema": "information_schema" + "catalog": "main", + "unity_schema": "quickstart_schema" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/information_schema", - "name": "information_schema", - "description": "Information schema (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -1731,7 +1918,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1747,7 +1934,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -1763,7 +1950,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1781,14 +1968,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:Service Principal 1", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -1806,12 +1993,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } }, "systemMetadata": { @@ -1822,7 +2009,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:29f99476d533719be0cebc374d5265dc", + "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -1833,8 +2020,8 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" } ] } @@ -1847,12 +2034,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:29f99476d533719be0cebc374d5265dc" + "container": "urn:li:container:481380c5a355638fc626eca8380cdda9" } }, "systemMetadata": { @@ -1863,7 +2050,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1884,9 +2071,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/information_schema/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.information_schema.quickstart_table", + "qualifiedName": "main.quickstart_schema.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -1906,7 +2093,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -1924,12 +2111,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.information_schema.quickstart_table", + "schemaName": "acryl_metastore.main.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1981,8 +2168,136 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + }, + { + "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + }, + { + "id": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "urn": "urn:li:container:481380c5a355638fc626eca8380cdda9" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "quickstart_catalog" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog", + "name": "quickstart_catalog", + "description": "" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:databricks" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Catalog" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -2006,8 +2321,24 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2016,14 +2347,6 @@ { "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:29f99476d533719be0cebc374d5265dc", - "urn": "urn:li:container:29f99476d533719be0cebc374d5265dc" } ] } @@ -2036,7 +2359,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2046,11 +2369,11 @@ "env": "PROD", "metastore": "acryl metastore", "catalog": "quickstart_catalog", - "unity_schema": "quickstart_schema" + "unity_schema": "default" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -2061,7 +2384,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2077,7 +2400,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -2093,7 +2416,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2111,14 +2434,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -2136,7 +2459,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -2152,7 +2475,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2177,12 +2500,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:47a033e31b92a120f08f297c05d286f1" + "container": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" } }, "systemMetadata": { @@ -2193,7 +2516,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -2214,9 +2537,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", + "qualifiedName": "quickstart_catalog.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -2236,7 +2559,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2254,12 +2577,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table", + "schemaName": "acryl_metastore.quickstart_catalog.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -2312,7 +2635,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -2324,153 +2647,9 @@ } ], "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:47a033e31b92a120f08f297c05d286f1", - "urn": "urn:li:container:47a033e31b92a120f08f297c05d286f1" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", - "name": "system", - "description": "System catalog (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Catalog" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:Service Principal 2", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -2480,8 +2659,8 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2490,6 +2669,14 @@ { "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + }, + { + "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + }, + { + "id": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "urn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" } ] } @@ -2502,7 +2689,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2511,12 +2698,12 @@ "platform": "databricks", "env": "PROD", "metastore": "acryl metastore", - "catalog": "system", - "unity_schema": "default" + "catalog": "quickstart_catalog", + "unity_schema": "quickstart_schema" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", - "name": "default", - "description": "Default schema (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -2527,7 +2714,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2543,7 +2730,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -2559,7 +2746,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2577,14 +2764,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:abc@acryl.io", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -2602,12 +2789,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" } }, "systemMetadata": { @@ -2618,7 +2805,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2629,8 +2816,8 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" } ] } @@ -2643,12 +2830,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:b330768923270ff5450695bee1c94247" + "container": "urn:li:container:47a033e31b92a120f08f297c05d286f1" } }, "systemMetadata": { @@ -2659,7 +2846,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -2680,9 +2867,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", "name": "quickstart_table", - "qualifiedName": "system.default.quickstart_table", + "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -2702,7 +2889,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2720,12 +2907,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.default.quickstart_table", + "schemaName": "acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -2778,7 +2965,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -2803,7 +2990,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2814,12 +3001,148 @@ "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" }, { - "id": "urn:li:container:b330768923270ff5450695bee1c94247", - "urn": "urn:li:container:b330768923270ff5450695bee1c94247" + "id": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "urn": "urn:li:container:47a033e31b92a120f08f297c05d286f1" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "containerProperties", + "aspect": { + "json": { + "customProperties": { + "platform": "databricks", + "env": "PROD", + "metastore": "acryl metastore", + "catalog": "system" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", + "name": "system", + "description": "System catalog (auto-created)" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "dataPlatformInstance", + "aspect": { + "json": { + "platform": "urn:li:dataPlatform:databricks" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Catalog" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:Service Principal 2", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" } ] } @@ -2832,7 +3155,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2842,11 +3165,11 @@ "env": "PROD", "metastore": "acryl metastore", "catalog": "system", - "unity_schema": "information_schema" + "unity_schema": "default" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/information_schema", - "name": "information_schema", - "description": "Information schema (auto-created)" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -2857,7 +3180,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2873,7 +3196,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -2889,7 +3212,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2907,14 +3230,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:Service Principal 1", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -2932,7 +3255,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "container", "aspect": { @@ -2948,7 +3271,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", + "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -2973,12 +3296,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59" + "container": "urn:li:container:b330768923270ff5450695bee1c94247" } }, "systemMetadata": { @@ -2989,7 +3312,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -3010,9 +3333,9 @@ "updated_by": "abc@acryl.io", "updated_at": "2022-10-19 13:27:29.633000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/information_schema/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "system.information_schema.quickstart_table", + "qualifiedName": "system.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -3032,7 +3355,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -3050,12 +3373,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.information_schema.quickstart_table", + "schemaName": "acryl_metastore.system.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -3108,7 +3431,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { @@ -3133,7 +3456,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { @@ -3148,8 +3471,8 @@ "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" }, { - "id": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59", - "urn": "urn:li:container:cb26af5fb7ba2e1c6f2cd804101a5a59" + "id": "urn:li:container:b330768923270ff5450695bee1c94247", + "urn": "urn:li:container:b330768923270ff5450695bee1c94247" } ] } @@ -3506,22 +3829,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.information_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", @@ -3556,7 +3863,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3572,7 +3879,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3588,7 +3895,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3604,7 +3911,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.information_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3620,7 +3927,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { diff --git a/metadata-ingestion/tests/unit/test_unity_catalog_config.py b/metadata-ingestion/tests/unit/test_unity_catalog_config.py index 4098ed4074de2..3c0994cde7889 100644 --- a/metadata-ingestion/tests/unit/test_unity_catalog_config.py +++ b/metadata-ingestion/tests/unit/test_unity_catalog_config.py @@ -67,7 +67,6 @@ def test_profiling_requires_warehouses_id(): @freeze_time(FROZEN_TIME) def test_workspace_url_should_start_with_https(): - with pytest.raises(ValueError, match="Workspace URL must start with http scheme"): UnityCatalogSourceConfig.parse_obj( { @@ -76,3 +75,67 @@ def test_workspace_url_should_start_with_https(): "profiling": {"enabled": True}, } ) + + +def test_global_warehouse_id_is_set_from_profiling(): + config = UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "profiling": { + "method": "ge", + "enabled": True, + "warehouse_id": "my_warehouse_id", + }, + } + ) + assert config.profiling.warehouse_id == "my_warehouse_id" + assert config.warehouse_id == "my_warehouse_id" + + +def test_set_different_warehouse_id_from_profiling(): + with pytest.raises( + ValueError, + match="When `warehouse_id` is set, it must match the `warehouse_id` in `profiling`.", + ): + UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "warehouse_id": "my_global_warehouse_id", + "profiling": { + "method": "ge", + "enabled": True, + "warehouse_id": "my_warehouse_id", + }, + } + ) + + +def test_warehouse_id_must_be_set_if_include_hive_metastore_is_true(): + with pytest.raises( + ValueError, + match="When `include_hive_metastore` is set, `warehouse_id` must be set.", + ): + UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "include_hive_metastore": True, + } + ) + + +def test_set_profiling_warehouse_id_from_global(): + config = UnityCatalogSourceConfig.parse_obj( + { + "token": "token", + "workspace_url": "https://XXXXXXXXXXXXXXXXXXXXX", + "warehouse_id": "my_global_warehouse_id", + "profiling": { + "method": "ge", + "enabled": True, + }, + } + ) + assert config.profiling.warehouse_id == "my_global_warehouse_id" From 0d6a5e5df25b58af0a434d5d2f83f6ef463ba99b Mon Sep 17 00:00:00 2001 From: siddiquebagwan-gslab Date: Thu, 14 Dec 2023 21:06:28 +0530 Subject: [PATCH 098/263] feat(ingestion/transformer): create tag if not exist (#9076) --- .../src/datahub/ingestion/graph/client.py | 24 ++++++ .../ingestion/transformer/add_dataset_tags.py | 42 ++++++++++- .../ingestion/transformer/base_transformer.py | 75 +++++++++++++++---- .../tests/unit/test_transform_dataset.py | 32 ++++++-- 4 files changed, 154 insertions(+), 19 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/graph/client.py b/metadata-ingestion/src/datahub/ingestion/graph/client.py index d91165ac9777c..5c24b06dde999 100644 --- a/metadata-ingestion/src/datahub/ingestion/graph/client.py +++ b/metadata-ingestion/src/datahub/ingestion/graph/client.py @@ -787,9 +787,11 @@ def get_aspect_counts(self, aspect: str, urn_like: Optional[str] = None) -> int: def execute_graphql(self, query: str, variables: Optional[Dict] = None) -> Dict: url = f"{self.config.server}/api/graphql" + body: Dict = { "query": query, } + if variables: body["variables"] = variables @@ -1065,6 +1067,28 @@ def parse_sql_lineage( default_schema=default_schema, ) + def create_tag(self, tag_name: str) -> str: + graph_query: str = """ + mutation($tag_detail: CreateTagInput!) { + createTag(input: $tag_detail) + } + """ + + variables = { + "tag_detail": { + "name": tag_name, + "id": tag_name, + }, + } + + res = self.execute_graphql( + query=graph_query, + variables=variables, + ) + + # return urn + return res["createTag"] + def close(self) -> None: self._make_schema_resolver.cache_clear() super().close() diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py index 5a276ad899c48..72a8c226e491e 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/add_dataset_tags.py @@ -1,14 +1,24 @@ +import logging from typing import Callable, List, Optional, cast +import datahub.emitter.mce_builder as builder from datahub.configuration.common import ( KeyValuePattern, TransformerSemanticsConfigModel, ) from datahub.configuration.import_resolver import pydantic_resolve_key from datahub.emitter.mce_builder import Aspect +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.transformer.dataset_transformer import DatasetTagsTransformer -from datahub.metadata.schema_classes import GlobalTagsClass, TagAssociationClass +from datahub.metadata.schema_classes import ( + GlobalTagsClass, + TagAssociationClass, + TagKeyClass, +) +from datahub.utilities.urns.tag_urn import TagUrn + +logger = logging.getLogger(__name__) class AddDatasetTagsConfig(TransformerSemanticsConfigModel): @@ -22,11 +32,13 @@ class AddDatasetTags(DatasetTagsTransformer): ctx: PipelineContext config: AddDatasetTagsConfig + processed_tags: List[TagAssociationClass] def __init__(self, config: AddDatasetTagsConfig, ctx: PipelineContext): super().__init__() self.ctx = ctx self.config = config + self.processed_tags = [] @classmethod def create(cls, config_dict: dict, ctx: PipelineContext) -> "AddDatasetTags": @@ -45,11 +57,38 @@ def transform_aspect( tags_to_add = self.config.get_tags_to_add(entity_urn) if tags_to_add is not None: out_global_tags_aspect.tags.extend(tags_to_add) + self.processed_tags.extend( + tags_to_add + ) # Keep track of tags added so that we can create them in handle_end_of_stream return self.get_result_semantics( self.config, self.ctx.graph, entity_urn, out_global_tags_aspect ) + def handle_end_of_stream(self) -> List[MetadataChangeProposalWrapper]: + + mcps: List[MetadataChangeProposalWrapper] = [] + + logger.debug("Generating tags") + + for tag_association in self.processed_tags: + ids: List[str] = TagUrn.create_from_string( + tag_association.tag + ).get_entity_id() + + assert len(ids) == 1, "Invalid Tag Urn" + + tag_name: str = ids[0] + + mcps.append( + MetadataChangeProposalWrapper( + entityUrn=builder.make_tag_urn(tag=tag_name), + aspect=TagKeyClass(name=tag_name), + ) + ) + + return mcps + class SimpleDatasetTagConfig(TransformerSemanticsConfigModel): tag_urns: List[str] @@ -82,6 +121,7 @@ class PatternAddDatasetTags(AddDatasetTags): """Transformer that adds a specified set of tags to each dataset.""" def __init__(self, config: PatternDatasetTagsConfig, ctx: PipelineContext): + config.tag_pattern.all tag_pattern = config.tag_pattern generic_config = AddDatasetTagsConfig( get_tags_to_add=lambda _: [ diff --git a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py index e0d6ae720c9a1..8b6f42dcfba4b 100644 --- a/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py +++ b/metadata-ingestion/src/datahub/ingestion/transformer/base_transformer.py @@ -17,13 +17,30 @@ log = logging.getLogger(__name__) -class LegacyMCETransformer(Transformer, metaclass=ABCMeta): +def _update_work_unit_id( + envelope: RecordEnvelope, urn: str, aspect_name: str +) -> Dict[Any, Any]: + structured_urn = Urn.create_from_string(urn) + simple_name = "-".join(structured_urn.get_entity_id()) + record_metadata = envelope.metadata.copy() + record_metadata.update({"workunit_id": f"txform-{simple_name}-{aspect_name}"}) + return record_metadata + + +class HandleEndOfStreamTransformer: + def handle_end_of_stream(self) -> List[MetadataChangeProposalWrapper]: + return [] + + +class LegacyMCETransformer( + Transformer, HandleEndOfStreamTransformer, metaclass=ABCMeta +): @abstractmethod def transform_one(self, mce: MetadataChangeEventClass) -> MetadataChangeEventClass: pass -class SingleAspectTransformer(metaclass=ABCMeta): +class SingleAspectTransformer(HandleEndOfStreamTransformer, metaclass=ABCMeta): @abstractmethod def aspect_name(self) -> str: """Implement this method to specify a single aspect that the transformer is interested in subscribing to. No default provided.""" @@ -180,6 +197,32 @@ def _transform_or_record_mcpw( self._record_mcp(envelope.record) return envelope if envelope.record.aspect is not None else None + def _handle_end_of_stream( + self, envelope: RecordEnvelope + ) -> Iterable[RecordEnvelope]: + + if not isinstance(self, SingleAspectTransformer) and not isinstance( + self, LegacyMCETransformer + ): + return + + mcps: List[MetadataChangeProposalWrapper] = self.handle_end_of_stream() + + for mcp in mcps: + if mcp.aspect is None or mcp.entityUrn is None: # to silent the lint error + continue + + record_metadata = _update_work_unit_id( + envelope=envelope, + aspect_name=mcp.aspect.get_aspect_name(), # type: ignore + urn=mcp.entityUrn, + ) + + yield RecordEnvelope( + record=mcp, + metadata=record_metadata, + ) + def transform( self, record_envelopes: Iterable[RecordEnvelope] ) -> Iterable[RecordEnvelope]: @@ -216,17 +259,10 @@ def transform( else None, ) if transformed_aspect: - # for end of stream records, we modify the workunit-id structured_urn = Urn.create_from_string(urn) - simple_name = "-".join(structured_urn.get_entity_id()) - record_metadata = envelope.metadata.copy() - record_metadata.update( - { - "workunit_id": f"txform-{simple_name}-{self.aspect_name()}" - } - ) - yield RecordEnvelope( - record=MetadataChangeProposalWrapper( + + mcp: MetadataChangeProposalWrapper = ( + MetadataChangeProposalWrapper( entityUrn=urn, entityType=structured_urn.get_type(), systemMetadata=last_seen_mcp.systemMetadata @@ -234,8 +270,21 @@ def transform( else last_seen_mce_system_metadata, aspectName=self.aspect_name(), aspect=transformed_aspect, - ), + ) + ) + + record_metadata = _update_work_unit_id( + envelope=envelope, + aspect_name=mcp.aspect.get_aspect_name(), # type: ignore + urn=mcp.entityUrn, + ) + + yield RecordEnvelope( + record=mcp, metadata=record_metadata, ) + self._mark_processed(urn) + yield from self._handle_end_of_stream(envelope=envelope) + yield envelope diff --git a/metadata-ingestion/tests/unit/test_transform_dataset.py b/metadata-ingestion/tests/unit/test_transform_dataset.py index 8014df2f5c519..546549dcf37a4 100644 --- a/metadata-ingestion/tests/unit/test_transform_dataset.py +++ b/metadata-ingestion/tests/unit/test_transform_dataset.py @@ -813,13 +813,25 @@ def test_simple_dataset_tags_transformation(mock_time): ] ) ) - assert len(outputs) == 3 + + assert len(outputs) == 5 # Check that tags were added. tags_aspect = outputs[1].record.aspect + assert tags_aspect.tags[0].tag == builder.make_tag_urn("NeedsDocumentation") assert tags_aspect assert len(tags_aspect.tags) == 2 - assert tags_aspect.tags[0].tag == builder.make_tag_urn("NeedsDocumentation") + + # Check new tag entity should be there + assert outputs[2].record.aspectName == "tagKey" + assert outputs[2].record.aspect.name == "NeedsDocumentation" + assert outputs[2].record.entityUrn == builder.make_tag_urn("NeedsDocumentation") + + assert outputs[3].record.aspectName == "tagKey" + assert outputs[3].record.aspect.name == "Legacy" + assert outputs[3].record.entityUrn == builder.make_tag_urn("Legacy") + + assert isinstance(outputs[4].record, EndOfStream) def dummy_tag_resolver_method(dataset_snapshot): @@ -853,7 +865,7 @@ def test_pattern_dataset_tags_transformation(mock_time): ) ) - assert len(outputs) == 3 + assert len(outputs) == 5 tags_aspect = outputs[1].record.aspect assert tags_aspect assert len(tags_aspect.tags) == 2 @@ -1363,7 +1375,7 @@ def test_mcp_add_tags_missing(mock_time): ] input_stream.append(RecordEnvelope(record=EndOfStream(), metadata={})) outputs = list(transformer.transform(input_stream)) - assert len(outputs) == 3 + assert len(outputs) == 5 assert outputs[0].record == dataset_mcp # Check that tags were added, this will be the second result tags_aspect = outputs[1].record.aspect @@ -1395,13 +1407,23 @@ def test_mcp_add_tags_existing(mock_time): ] input_stream.append(RecordEnvelope(record=EndOfStream(), metadata={})) outputs = list(transformer.transform(input_stream)) - assert len(outputs) == 2 + + assert len(outputs) == 4 + # Check that tags were added, this will be the second result tags_aspect = outputs[0].record.aspect assert tags_aspect assert len(tags_aspect.tags) == 3 assert tags_aspect.tags[0].tag == builder.make_tag_urn("Test") assert tags_aspect.tags[1].tag == builder.make_tag_urn("NeedsDocumentation") + assert tags_aspect.tags[2].tag == builder.make_tag_urn("Legacy") + + # Check tag entities got added + assert outputs[1].record.entityType == "tag" + assert outputs[1].record.entityUrn == builder.make_tag_urn("NeedsDocumentation") + assert outputs[2].record.entityType == "tag" + assert outputs[2].record.entityUrn == builder.make_tag_urn("Legacy") + assert isinstance(outputs[-1].record, EndOfStream) From ecef50f8fc75309562cf2729380ed18d5020ae8b Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Thu, 14 Dec 2023 08:03:36 -0800 Subject: [PATCH 099/263] =?UTF-8?q?fix(ingest):=20make=20user=5Furn=20and?= =?UTF-8?q?=20group=5Furn=20generation=20consider=20user=20and=E2=80=A6=20?= =?UTF-8?q?(#9026)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Aseem Bansal --- .../src/datahub/emitter/mce_builder.py | 8 +++---- .../tests/unit/test_mce_builder.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/emitter/mce_builder.py b/metadata-ingestion/src/datahub/emitter/mce_builder.py index 3b2c87ea25a31..9da1b0ab56f89 100644 --- a/metadata-ingestion/src/datahub/emitter/mce_builder.py +++ b/metadata-ingestion/src/datahub/emitter/mce_builder.py @@ -193,20 +193,20 @@ def assertion_urn_to_key(assertion_urn: str) -> Optional[AssertionKeyClass]: def make_user_urn(username: str) -> str: """ - Makes a user urn if the input is not a user urn already + Makes a user urn if the input is not a user or group urn already """ return ( f"urn:li:corpuser:{username}" - if not username.startswith("urn:li:corpuser:") + if not username.startswith(("urn:li:corpuser:", "urn:li:corpGroup:")) else username ) def make_group_urn(groupname: str) -> str: """ - Makes a group urn if the input is not a group urn already + Makes a group urn if the input is not a user or group urn already """ - if groupname and groupname.startswith("urn:li:corpGroup:"): + if groupname and groupname.startswith(("urn:li:corpGroup:", "urn:li:corpuser:")): return groupname else: return f"urn:li:corpGroup:{groupname}" diff --git a/metadata-ingestion/tests/unit/test_mce_builder.py b/metadata-ingestion/tests/unit/test_mce_builder.py index b9025d76a3a1d..d7c84f7863b40 100644 --- a/metadata-ingestion/tests/unit/test_mce_builder.py +++ b/metadata-ingestion/tests/unit/test_mce_builder.py @@ -33,3 +33,25 @@ def test_create_dataset_urn_with_reserved_chars() -> None: ) == "urn:li:dataset:(urn:li:dataPlatform:platform%29,platform%2Cinstance.table_%28name%29,PROD)" ) + + +def test_make_user_urn() -> None: + assert builder.make_user_urn("someUser") == "urn:li:corpuser:someUser" + assert ( + builder.make_user_urn("urn:li:corpuser:someUser") == "urn:li:corpuser:someUser" + ) + assert ( + builder.make_user_urn("urn:li:corpGroup:someGroup") + == "urn:li:corpGroup:someGroup" + ) + + +def test_make_group_urn() -> None: + assert builder.make_group_urn("someGroup") == "urn:li:corpGroup:someGroup" + assert ( + builder.make_group_urn("urn:li:corpGroup:someGroup") + == "urn:li:corpGroup:someGroup" + ) + assert ( + builder.make_group_urn("urn:li:corpuser:someUser") == "urn:li:corpuser:someUser" + ) From 1741c07d769f56a9cf066172725384b4e8780839 Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:01:51 +0530 Subject: [PATCH 100/263] feat(ingestion): Add test_connection methods for important sources (#9334) --- .../datahub/ingestion/source/dbt/dbt_cloud.py | 89 ++-- .../datahub/ingestion/source/dbt/dbt_core.py | 56 ++- .../src/datahub/ingestion/source/kafka.py | 74 ++- .../ingestion/source/powerbi/powerbi.py | 22 +- .../ingestion/source/sql/sql_common.py | 26 +- .../src/datahub/ingestion/source/tableau.py | 23 +- .../ingestion/source_config/sql/snowflake.py | 2 +- .../tests/integration/dbt/test_dbt.py | 69 ++- .../tests/integration/kafka/test_kafka.py | 85 +++- .../tests/integration/mysql/test_mysql.py | 38 +- .../tests/integration/powerbi/test_powerbi.py | 23 +- .../tableau/test_tableau_ingest.py | 21 +- .../test_helpers/test_connection_helpers.py | 47 ++ .../tests/unit/test_snowflake_source.py | 428 +++++++----------- .../tests/unit/test_sql_common.py | 62 ++- 15 files changed, 684 insertions(+), 381 deletions(-) create mode 100644 metadata-ingestion/tests/test_helpers/test_connection_helpers.py diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py index a9685b2554553..069c1f2781460 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_cloud.py @@ -14,7 +14,12 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import SourceCapability +from datahub.ingestion.api.source import ( + CapabilityReport, + SourceCapability, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.source.dbt.dbt_common import ( DBTColumn, DBTCommonConfig, @@ -177,7 +182,7 @@ class DBTCloudConfig(DBTCommonConfig): @support_status(SupportStatus.INCUBATING) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -class DBTCloudSource(DBTSourceBase): +class DBTCloudSource(DBTSourceBase, TestableSource): """ This source pulls dbt metadata directly from the dbt Cloud APIs. @@ -199,6 +204,57 @@ def create(cls, config_dict, ctx): config = DBTCloudConfig.parse_obj(config_dict) return cls(config, ctx, "dbt") + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source_config = DBTCloudConfig.parse_obj_allow_extras(config_dict) + DBTCloudSource._send_graphql_query( + metadata_endpoint=source_config.metadata_endpoint, + token=source_config.token, + query=_DBT_GRAPHQL_QUERY.format(type="tests", fields="jobId"), + variables={ + "jobId": source_config.job_id, + "runId": source_config.run_id, + }, + ) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + + @staticmethod + def _send_graphql_query( + metadata_endpoint: str, token: str, query: str, variables: Dict + ) -> Dict: + logger.debug(f"Sending GraphQL query to dbt Cloud: {query}") + response = requests.post( + metadata_endpoint, + json={ + "query": query, + "variables": variables, + }, + headers={ + "Authorization": f"Bearer {token}", + "X-dbt-partner-source": "acryldatahub", + }, + ) + + try: + res = response.json() + if "errors" in res: + raise ValueError( + f'Unable to fetch metadata from dbt Cloud: {res["errors"]}' + ) + data = res["data"] + except JSONDecodeError as e: + response.raise_for_status() + raise e + + return data + def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: # TODO: In dbt Cloud, commands are scheduled as part of jobs, where # each job can have multiple runs. We currently only fully support @@ -213,6 +269,8 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: for node_type, fields in _DBT_FIELDS_BY_TYPE.items(): logger.info(f"Fetching {node_type} from dbt Cloud") data = self._send_graphql_query( + metadata_endpoint=self.config.metadata_endpoint, + token=self.config.token, query=_DBT_GRAPHQL_QUERY.format(type=node_type, fields=fields), variables={ "jobId": self.config.job_id, @@ -232,33 +290,6 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: return nodes, additional_metadata - def _send_graphql_query(self, query: str, variables: Dict) -> Dict: - logger.debug(f"Sending GraphQL query to dbt Cloud: {query}") - response = requests.post( - self.config.metadata_endpoint, - json={ - "query": query, - "variables": variables, - }, - headers={ - "Authorization": f"Bearer {self.config.token}", - "X-dbt-partner-source": "acryldatahub", - }, - ) - - try: - res = response.json() - if "errors" in res: - raise ValueError( - f'Unable to fetch metadata from dbt Cloud: {res["errors"]}' - ) - data = res["data"] - except JSONDecodeError as e: - response.raise_for_status() - raise e - - return data - def _parse_into_dbt_node(self, node: Dict) -> DBTNode: key = node["uniqueId"] diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py index ac2b2815f3caa..563b005d7a88d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_core.py @@ -18,7 +18,12 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import SourceCapability +from datahub.ingestion.api.source import ( + CapabilityReport, + SourceCapability, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.source.aws.aws_common import AwsConnectionConfig from datahub.ingestion.source.dbt.dbt_common import ( DBTColumn, @@ -60,11 +65,6 @@ class DBTCoreConfig(DBTCommonConfig): _github_info_deprecated = pydantic_renamed_field("github_info", "git_info") - @property - def s3_client(self): - assert self.aws_connection - return self.aws_connection.get_s3_client() - @validator("aws_connection") def aws_connection_needed_if_s3_uris_present( cls, aws_connection: Optional[AwsConnectionConfig], values: Dict, **kwargs: Any @@ -363,7 +363,7 @@ def load_test_results( @support_status(SupportStatus.CERTIFIED) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -class DBTCoreSource(DBTSourceBase): +class DBTCoreSource(DBTSourceBase, TestableSource): """ The artifacts used by this source are: - [dbt manifest file](https://docs.getdbt.com/reference/artifacts/manifest-json) @@ -387,12 +387,34 @@ def create(cls, config_dict, ctx): config = DBTCoreConfig.parse_obj(config_dict) return cls(config, ctx, "dbt") - def load_file_as_json(self, uri: str) -> Any: + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source_config = DBTCoreConfig.parse_obj_allow_extras(config_dict) + DBTCoreSource.load_file_as_json( + source_config.manifest_path, source_config.aws_connection + ) + DBTCoreSource.load_file_as_json( + source_config.catalog_path, source_config.aws_connection + ) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + + @staticmethod + def load_file_as_json( + uri: str, aws_connection: Optional[AwsConnectionConfig] + ) -> Dict: if re.match("^https?://", uri): return json.loads(requests.get(uri).text) elif re.match("^s3://", uri): u = urlparse(uri) - response = self.config.s3_client.get_object( + assert aws_connection + response = aws_connection.get_s3_client().get_object( Bucket=u.netloc, Key=u.path.lstrip("/") ) return json.loads(response["Body"].read().decode("utf-8")) @@ -410,12 +432,18 @@ def loadManifestAndCatalog( Optional[str], Optional[str], ]: - dbt_manifest_json = self.load_file_as_json(self.config.manifest_path) + dbt_manifest_json = self.load_file_as_json( + self.config.manifest_path, self.config.aws_connection + ) - dbt_catalog_json = self.load_file_as_json(self.config.catalog_path) + dbt_catalog_json = self.load_file_as_json( + self.config.catalog_path, self.config.aws_connection + ) if self.config.sources_path is not None: - dbt_sources_json = self.load_file_as_json(self.config.sources_path) + dbt_sources_json = self.load_file_as_json( + self.config.sources_path, self.config.aws_connection + ) sources_results = dbt_sources_json["results"] else: sources_results = {} @@ -491,7 +519,9 @@ def load_nodes(self) -> Tuple[List[DBTNode], Dict[str, Optional[str]]]: # This will populate the test_results field on each test node. all_nodes = load_test_results( self.config, - self.load_file_as_json(self.config.test_results_path), + self.load_file_as_json( + self.config.test_results_path, self.config.aws_connection + ), all_nodes, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/kafka.py b/metadata-ingestion/src/datahub/ingestion/source/kafka.py index 25520e7aa66ff..99ef737206ab0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/kafka.py +++ b/metadata-ingestion/src/datahub/ingestion/source/kafka.py @@ -15,6 +15,7 @@ ConfigResource, TopicMetadata, ) +from confluent_kafka.schema_registry.schema_registry_client import SchemaRegistryClient from datahub.configuration.common import AllowDenyPattern from datahub.configuration.kafka import KafkaConsumerConnectionConfig @@ -40,7 +41,13 @@ support_status, ) from datahub.ingestion.api.registry import import_path -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceCapability +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + SourceCapability, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import DatasetSubTypes from datahub.ingestion.source.kafka_schema_registry_base import KafkaSchemaRegistryBase @@ -133,6 +140,18 @@ class KafkaSourceConfig( ) +def get_kafka_consumer( + connection: KafkaConsumerConnectionConfig, +) -> confluent_kafka.Consumer: + return confluent_kafka.Consumer( + { + "group.id": "test", + "bootstrap.servers": connection.bootstrap, + **connection.consumer_config, + } + ) + + @dataclass class KafkaSourceReport(StaleEntityRemovalSourceReport): topics_scanned: int = 0 @@ -145,6 +164,45 @@ def report_dropped(self, topic: str) -> None: self.filtered.append(topic) +class KafkaConnectionTest: + def __init__(self, config_dict: dict): + self.config = KafkaSourceConfig.parse_obj_allow_extras(config_dict) + self.report = KafkaSourceReport() + self.consumer: confluent_kafka.Consumer = get_kafka_consumer( + self.config.connection + ) + + def get_connection_test(self) -> TestConnectionReport: + capability_report = { + SourceCapability.SCHEMA_METADATA: self.schema_registry_connectivity(), + } + return TestConnectionReport( + basic_connectivity=self.basic_connectivity(), + capability_report={ + k: v for k, v in capability_report.items() if v is not None + }, + ) + + def basic_connectivity(self) -> CapabilityReport: + try: + self.consumer.list_topics(timeout=10) + return CapabilityReport(capable=True) + except Exception as e: + return CapabilityReport(capable=False, failure_reason=str(e)) + + def schema_registry_connectivity(self) -> CapabilityReport: + try: + SchemaRegistryClient( + { + "url": self.config.connection.schema_registry_url, + **self.config.connection.schema_registry_config, + } + ).get_subjects() + return CapabilityReport(capable=True) + except Exception as e: + return CapabilityReport(capable=False, failure_reason=str(e)) + + @platform_name("Kafka") @config_class(KafkaSourceConfig) @support_status(SupportStatus.CERTIFIED) @@ -160,7 +218,7 @@ def report_dropped(self, topic: str) -> None: SourceCapability.SCHEMA_METADATA, "Schemas associated with each topic are extracted from the schema registry. Avro and Protobuf (certified), JSON (incubating). Schema references are supported.", ) -class KafkaSource(StatefulIngestionSourceBase): +class KafkaSource(StatefulIngestionSourceBase, TestableSource): """ This plugin extracts the following: - Topics from the Kafka broker @@ -183,12 +241,8 @@ def create_schema_registry( def __init__(self, config: KafkaSourceConfig, ctx: PipelineContext): super().__init__(config, ctx) self.source_config: KafkaSourceConfig = config - self.consumer: confluent_kafka.Consumer = confluent_kafka.Consumer( - { - "group.id": "test", - "bootstrap.servers": self.source_config.connection.bootstrap, - **self.source_config.connection.consumer_config, - } + self.consumer: confluent_kafka.Consumer = get_kafka_consumer( + self.source_config.connection ) self.init_kafka_admin_client() self.report: KafkaSourceReport = KafkaSourceReport() @@ -226,6 +280,10 @@ def init_kafka_admin_client(self) -> None: f"Failed to create Kafka Admin Client due to error {e}.", ) + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + return KafkaConnectionTest(config_dict).get_connection_test() + @classmethod def create(cls, config_dict: Dict, ctx: PipelineContext) -> "KafkaSource": config: KafkaSourceConfig = KafkaSourceConfig.parse_obj(config_dict) diff --git a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py index 4b1d0403ac776..cdf7c975c0614 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/powerbi/powerbi.py @@ -19,7 +19,13 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, SourceReport +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + SourceReport, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.source_helpers import auto_workunit from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import ( @@ -1147,7 +1153,7 @@ def report_to_datahub_work_units( SourceCapability.LINEAGE_FINE, "Disabled by default, configured using `extract_column_level_lineage`. ", ) -class PowerBiDashboardSource(StatefulIngestionSourceBase): +class PowerBiDashboardSource(StatefulIngestionSourceBase, TestableSource): """ This plugin extracts the following: - Power BI dashboards, tiles and datasets @@ -1186,6 +1192,18 @@ def __init__(self, config: PowerBiDashboardSourceConfig, ctx: PipelineContext): self, self.source_config, self.ctx ) + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + PowerBiAPI(PowerBiDashboardSourceConfig.parse_obj_allow_extras(config_dict)) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + @classmethod def create(cls, config_dict, ctx): config = PowerBiDashboardSourceConfig.parse_obj(config_dict) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 590bc7f696784..a831dfa50342d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -15,6 +15,7 @@ Tuple, Type, Union, + cast, ) import sqlalchemy.dialects.postgresql.base @@ -35,7 +36,12 @@ from datahub.emitter.sql_parsing_builder import SqlParsingBuilder from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.incremental_lineage_helper import auto_incremental_lineage -from datahub.ingestion.api.source import MetadataWorkUnitProcessor +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source.common.subtypes import ( DatasetContainerSubTypes, @@ -298,7 +304,7 @@ class ProfileMetadata: dataset_name_to_storage_bytes: Dict[str, int] = field(default_factory=dict) -class SQLAlchemySource(StatefulIngestionSourceBase): +class SQLAlchemySource(StatefulIngestionSourceBase, TestableSource): """A Base class for all SQL Sources that use SQLAlchemy to extend""" def __init__(self, config: SQLCommonConfig, ctx: PipelineContext, platform: str): @@ -348,6 +354,22 @@ def __init__(self, config: SQLCommonConfig, ctx: PipelineContext, platform: str) else: self._view_definition_cache = {} + @classmethod + def test_connection(cls, config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source = cast( + SQLAlchemySource, + cls.create(config_dict, PipelineContext(run_id="test_connection")), + ) + list(source.get_inspectors()) + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + def warn(self, log: logging.Logger, key: str, reason: str) -> None: self.report.report_warning(key, reason[:100]) log.warning(f"{key} => {reason}") diff --git a/metadata-ingestion/src/datahub/ingestion/source/tableau.py b/metadata-ingestion/src/datahub/ingestion/source/tableau.py index f870e99df27c5..ed5fe543310b8 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/tableau.py +++ b/metadata-ingestion/src/datahub/ingestion/source/tableau.py @@ -58,7 +58,13 @@ platform_name, support_status, ) -from datahub.ingestion.api.source import MetadataWorkUnitProcessor, Source +from datahub.ingestion.api.source import ( + CapabilityReport, + MetadataWorkUnitProcessor, + Source, + TestableSource, + TestConnectionReport, +) from datahub.ingestion.api.workunit import MetadataWorkUnit from datahub.ingestion.source import tableau_constant as c from datahub.ingestion.source.common.subtypes import ( @@ -469,7 +475,7 @@ class TableauSourceReport(StaleEntityRemovalSourceReport): SourceCapability.LINEAGE_FINE, "Enabled by default, configure using `extract_column_level_lineage`", ) -class TableauSource(StatefulIngestionSourceBase): +class TableauSource(StatefulIngestionSourceBase, TestableSource): platform = "tableau" def __hash__(self): @@ -509,6 +515,19 @@ def __init__( self._authenticate() + @staticmethod + def test_connection(config_dict: dict) -> TestConnectionReport: + test_report = TestConnectionReport() + try: + source_config = TableauConfig.parse_obj_allow_extras(config_dict) + source_config.make_tableau_client() + test_report.basic_connectivity = CapabilityReport(capable=True) + except Exception as e: + test_report.basic_connectivity = CapabilityReport( + capable=False, failure_reason=str(e) + ) + return test_report + def close(self) -> None: try: if self.server is not None: diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index ccc4e115729a2..46bd24c7e1f4c 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -143,7 +143,7 @@ def _check_oauth_config(oauth_config: Optional[OAuthConfiguration]) -> None: "'oauth_config' is none but should be set when using OAUTH_AUTHENTICATOR authentication" ) if oauth_config.use_certificate is True: - if oauth_config.provider == OAuthIdentityProvider.OKTA.value: + if oauth_config.provider == OAuthIdentityProvider.OKTA: raise ValueError( "Certificate authentication is not supported for Okta." ) diff --git a/metadata-ingestion/tests/integration/dbt/test_dbt.py b/metadata-ingestion/tests/integration/dbt/test_dbt.py index 95b5374bbb41d..587831495c1ea 100644 --- a/metadata-ingestion/tests/integration/dbt/test_dbt.py +++ b/metadata-ingestion/tests/integration/dbt/test_dbt.py @@ -10,20 +10,25 @@ from datahub.ingestion.run.pipeline import Pipeline from datahub.ingestion.run.pipeline_config import PipelineConfig, SourceConfig from datahub.ingestion.source.dbt.dbt_common import DBTEntitiesEnabled, EmitDirective -from datahub.ingestion.source.dbt.dbt_core import DBTCoreConfig +from datahub.ingestion.source.dbt.dbt_core import DBTCoreConfig, DBTCoreSource from datahub.ingestion.source.sql.sql_types import ( ATHENA_SQL_TYPES_MAP, TRINO_SQL_TYPES_MAP, resolve_athena_modified_type, resolve_trino_modified_type, ) -from tests.test_helpers import mce_helpers +from tests.test_helpers import mce_helpers, test_connection_helpers FROZEN_TIME = "2022-02-03 07:00:00" GMS_PORT = 8080 GMS_SERVER = f"http://localhost:{GMS_PORT}" +@pytest.fixture(scope="module") +def test_resources_dir(pytestconfig): + return pytestconfig.rootpath / "tests/integration/dbt" + + @dataclass class DbtTestConfig: run_id: str @@ -195,7 +200,14 @@ def set_paths( ) @pytest.mark.integration @freeze_time(FROZEN_TIME) -def test_dbt_ingest(dbt_test_config, pytestconfig, tmp_path, mock_time, requests_mock): +def test_dbt_ingest( + dbt_test_config, + test_resources_dir, + pytestconfig, + tmp_path, + mock_time, + requests_mock, +): config: DbtTestConfig = dbt_test_config test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" @@ -233,11 +245,48 @@ def test_dbt_ingest(dbt_test_config, pytestconfig, tmp_path, mock_time, requests ) +@pytest.mark.parametrize( + "config_dict, is_success", + [ + ( + { + "manifest_path": "dbt_manifest.json", + "catalog_path": "dbt_catalog.json", + "target_platform": "postgres", + }, + True, + ), + ( + { + "manifest_path": "dbt_manifest.json", + "catalog_path": "dbt_catalog-this-file-does-not-exist.json", + "target_platform": "postgres", + }, + False, + ), + ], +) @pytest.mark.integration @freeze_time(FROZEN_TIME) -def test_dbt_tests(pytestconfig, tmp_path, mock_time, **kwargs): - test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" +def test_dbt_test_connection(test_resources_dir, config_dict, is_success): + config_dict["manifest_path"] = str( + (test_resources_dir / config_dict["manifest_path"]).resolve() + ) + config_dict["catalog_path"] = str( + (test_resources_dir / config_dict["catalog_path"]).resolve() + ) + report = test_connection_helpers.run_test_connection(DBTCoreSource, config_dict) + if is_success: + test_connection_helpers.assert_basic_connectivity_success(report) + else: + test_connection_helpers.assert_basic_connectivity_failure( + report, "No such file or directory" + ) + +@pytest.mark.integration +@freeze_time(FROZEN_TIME) +def test_dbt_tests(test_resources_dir, pytestconfig, tmp_path, mock_time, **kwargs): # Run the metadata ingestion pipeline. output_file = tmp_path / "dbt_test_events.json" golden_path = test_resources_dir / "dbt_test_events_golden.json" @@ -340,9 +389,9 @@ def test_resolve_athena_modified_type(data_type, expected_data_type): @pytest.mark.integration @freeze_time(FROZEN_TIME) -def test_dbt_tests_only_assertions(pytestconfig, tmp_path, mock_time, **kwargs): - test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" - +def test_dbt_tests_only_assertions( + test_resources_dir, pytestconfig, tmp_path, mock_time, **kwargs +): # Run the metadata ingestion pipeline. output_file = tmp_path / "test_only_assertions.json" @@ -418,10 +467,8 @@ def test_dbt_tests_only_assertions(pytestconfig, tmp_path, mock_time, **kwargs): @pytest.mark.integration @freeze_time(FROZEN_TIME) def test_dbt_only_test_definitions_and_results( - pytestconfig, tmp_path, mock_time, **kwargs + test_resources_dir, pytestconfig, tmp_path, mock_time, **kwargs ): - test_resources_dir = pytestconfig.rootpath / "tests/integration/dbt" - # Run the metadata ingestion pipeline. output_file = tmp_path / "test_only_definitions_and_assertions.json" diff --git a/metadata-ingestion/tests/integration/kafka/test_kafka.py b/metadata-ingestion/tests/integration/kafka/test_kafka.py index 63d284801c94c..dfdbea5de5cbf 100644 --- a/metadata-ingestion/tests/integration/kafka/test_kafka.py +++ b/metadata-ingestion/tests/integration/kafka/test_kafka.py @@ -3,18 +3,22 @@ import pytest from freezegun import freeze_time -from tests.test_helpers import mce_helpers +from datahub.ingestion.api.source import SourceCapability +from datahub.ingestion.source.kafka import KafkaSource +from tests.test_helpers import mce_helpers, test_connection_helpers from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import wait_for_port FROZEN_TIME = "2020-04-14 07:00:00" -@freeze_time(FROZEN_TIME) -@pytest.mark.integration -def test_kafka_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time): - test_resources_dir = pytestconfig.rootpath / "tests/integration/kafka" +@pytest.fixture(scope="module") +def test_resources_dir(pytestconfig): + return pytestconfig.rootpath / "tests/integration/kafka" + +@pytest.fixture(scope="module") +def mock_kafka_service(docker_compose_runner, test_resources_dir): with docker_compose_runner( test_resources_dir / "docker-compose.yml", "kafka", cleanup=False ) as docker_services: @@ -31,14 +35,67 @@ def test_kafka_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time): command = f"{test_resources_dir}/send_records.sh {test_resources_dir}" subprocess.run(command, shell=True, check=True) - # Run the metadata ingestion pipeline. - config_file = (test_resources_dir / "kafka_to_file.yml").resolve() - run_datahub_cmd(["ingest", "-c", f"{config_file}"], tmp_path=tmp_path) + yield docker_compose_runner + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_kafka_ingest( + mock_kafka_service, test_resources_dir, pytestconfig, tmp_path, mock_time +): + # Run the metadata ingestion pipeline. + config_file = (test_resources_dir / "kafka_to_file.yml").resolve() + run_datahub_cmd(["ingest", "-c", f"{config_file}"], tmp_path=tmp_path) - # Verify the output. - mce_helpers.check_golden_file( - pytestconfig, - output_path=tmp_path / "kafka_mces.json", - golden_path=test_resources_dir / "kafka_mces_golden.json", - ignore_paths=[], + # Verify the output. + mce_helpers.check_golden_file( + pytestconfig, + output_path=tmp_path / "kafka_mces.json", + golden_path=test_resources_dir / "kafka_mces_golden.json", + ignore_paths=[], + ) + + +@pytest.mark.parametrize( + "config_dict, is_success", + [ + ( + { + "connection": { + "bootstrap": "localhost:29092", + "schema_registry_url": "http://localhost:28081", + }, + }, + True, + ), + ( + { + "connection": { + "bootstrap": "localhost:2909", + "schema_registry_url": "http://localhost:2808", + }, + }, + False, + ), + ], +) +@pytest.mark.integration +@freeze_time(FROZEN_TIME) +def test_kafka_test_connection(mock_kafka_service, config_dict, is_success): + report = test_connection_helpers.run_test_connection(KafkaSource, config_dict) + if is_success: + test_connection_helpers.assert_basic_connectivity_success(report) + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[SourceCapability.SCHEMA_METADATA], + ) + else: + test_connection_helpers.assert_basic_connectivity_failure( + report, "Failed to get metadata" + ) + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + failure_capabilities={ + SourceCapability.SCHEMA_METADATA: "Failed to establish a new connection" + }, ) diff --git a/metadata-ingestion/tests/integration/mysql/test_mysql.py b/metadata-ingestion/tests/integration/mysql/test_mysql.py index 23fd97ff2671e..c19198c7d2bbd 100644 --- a/metadata-ingestion/tests/integration/mysql/test_mysql.py +++ b/metadata-ingestion/tests/integration/mysql/test_mysql.py @@ -3,7 +3,8 @@ import pytest from freezegun import freeze_time -from tests.test_helpers import mce_helpers +from datahub.ingestion.source.sql.mysql import MySQLSource +from tests.test_helpers import mce_helpers, test_connection_helpers from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import wait_for_port @@ -75,3 +76,38 @@ def test_mysql_ingest_no_db( output_path=tmp_path / "mysql_mces.json", golden_path=test_resources_dir / golden_file, ) + + +@pytest.mark.parametrize( + "config_dict, is_success", + [ + ( + { + "host_port": "localhost:53307", + "database": "northwind", + "username": "root", + "password": "example", + }, + True, + ), + ( + { + "host_port": "localhost:5330", + "database": "wrong_db", + "username": "wrong_user", + "password": "wrong_pass", + }, + False, + ), + ], +) +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_mysql_test_connection(mysql_runner, config_dict, is_success): + report = test_connection_helpers.run_test_connection(MySQLSource, config_dict) + if is_success: + test_connection_helpers.assert_basic_connectivity_success(report) + else: + test_connection_helpers.assert_basic_connectivity_failure( + report, "Connection refused" + ) diff --git a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py index b2cbccf983eb0..4e8469f919db9 100644 --- a/metadata-ingestion/tests/integration/powerbi/test_powerbi.py +++ b/metadata-ingestion/tests/integration/powerbi/test_powerbi.py @@ -21,7 +21,7 @@ Report, Workspace, ) -from tests.test_helpers import mce_helpers +from tests.test_helpers import mce_helpers, test_connection_helpers pytestmark = pytest.mark.integration_batch_2 FROZEN_TIME = "2022-02-03 07:00:00" @@ -681,6 +681,27 @@ def test_powerbi_ingest( ) +@freeze_time(FROZEN_TIME) +@mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) +@pytest.mark.integration +def test_powerbi_test_connection_success(mock_msal): + report = test_connection_helpers.run_test_connection( + PowerBiDashboardSource, default_source_config() + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_powerbi_test_connection_failure(): + report = test_connection_helpers.run_test_connection( + PowerBiDashboardSource, default_source_config() + ) + test_connection_helpers.assert_basic_connectivity_failure( + report, "Unable to get authority configuration" + ) + + @freeze_time(FROZEN_TIME) @mock.patch("msal.ConfidentialClientApplication", side_effect=mock_msal_cca) @pytest.mark.integration diff --git a/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py b/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py index 0510f4a40f659..90fa71013338d 100644 --- a/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py +++ b/metadata-ingestion/tests/integration/tableau/test_tableau_ingest.py @@ -28,7 +28,7 @@ ) from datahub.metadata.schema_classes import MetadataChangeProposalClass, UpstreamClass from datahub.utilities.sqlglot_lineage import SqlParsingResult -from tests.test_helpers import mce_helpers +from tests.test_helpers import mce_helpers, test_connection_helpers from tests.test_helpers.state_helpers import ( get_current_checkpoint_from_pipeline, validate_all_providers_have_committed_successfully, @@ -290,6 +290,25 @@ def test_tableau_ingest(pytestconfig, tmp_path, mock_datahub_graph): ) +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_tableau_test_connection_success(): + with mock.patch("datahub.ingestion.source.tableau.Server"): + report = test_connection_helpers.run_test_connection( + TableauSource, config_source_default + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + +@freeze_time(FROZEN_TIME) +@pytest.mark.integration +def test_tableau_test_connection_failure(): + report = test_connection_helpers.run_test_connection( + TableauSource, config_source_default + ) + test_connection_helpers.assert_basic_connectivity_failure(report, "Unable to login") + + @freeze_time(FROZEN_TIME) @pytest.mark.integration def test_tableau_cll_ingest(pytestconfig, tmp_path, mock_datahub_graph): diff --git a/metadata-ingestion/tests/test_helpers/test_connection_helpers.py b/metadata-ingestion/tests/test_helpers/test_connection_helpers.py new file mode 100644 index 0000000000000..45543033ae010 --- /dev/null +++ b/metadata-ingestion/tests/test_helpers/test_connection_helpers.py @@ -0,0 +1,47 @@ +from typing import Dict, List, Optional, Type, Union + +from datahub.ingestion.api.source import ( + CapabilityReport, + SourceCapability, + TestableSource, + TestConnectionReport, +) + + +def run_test_connection( + source_cls: Type[TestableSource], config_dict: Dict +) -> TestConnectionReport: + return source_cls.test_connection(config_dict) + + +def assert_basic_connectivity_success(report: TestConnectionReport) -> None: + assert report is not None + assert report.basic_connectivity + assert report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason is None + + +def assert_basic_connectivity_failure( + report: TestConnectionReport, expected_reason: str +) -> None: + assert report is not None + assert report.basic_connectivity + assert not report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason + assert expected_reason in report.basic_connectivity.failure_reason + + +def assert_capability_report( + capability_report: Optional[Dict[Union[SourceCapability, str], CapabilityReport]], + success_capabilities: List[SourceCapability] = [], + failure_capabilities: Dict[SourceCapability, str] = {}, +) -> None: + assert capability_report + for capability in success_capabilities: + assert capability_report[capability] + assert capability_report[capability].failure_reason is None + for capability, expected_reason in failure_capabilities.items(): + assert not capability_report[capability].capable + failure_reason = capability_report[capability].failure_reason + assert failure_reason + assert expected_reason in failure_reason diff --git a/metadata-ingestion/tests/unit/test_snowflake_source.py b/metadata-ingestion/tests/unit/test_snowflake_source.py index 343f4466fd6fd..536c91ace4f5e 100644 --- a/metadata-ingestion/tests/unit/test_snowflake_source.py +++ b/metadata-ingestion/tests/unit/test_snowflake_source.py @@ -1,3 +1,4 @@ +from typing import Any, Dict from unittest.mock import MagicMock, patch import pytest @@ -24,10 +25,20 @@ SnowflakeObjectAccessEntry, ) from datahub.ingestion.source.snowflake.snowflake_v2 import SnowflakeV2Source +from tests.test_helpers import test_connection_helpers + +default_oauth_dict: Dict[str, Any] = { + "client_id": "client_id", + "client_secret": "secret", + "use_certificate": False, + "provider": "microsoft", + "scopes": ["datahub_role"], + "authority_url": "https://dev-abc.okta.com/oauth2/def/v1/token", +} def test_snowflake_source_throws_error_on_account_id_missing(): - with pytest.raises(ValidationError): + with pytest.raises(ValidationError, match="account_id\n field required"): SnowflakeV2Config.parse_obj( { "username": "user", @@ -37,27 +48,21 @@ def test_snowflake_source_throws_error_on_account_id_missing(): def test_no_client_id_invalid_oauth_config(): - oauth_dict = { - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "client_secret": "6Hb9apkbc6HD7", - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - } - with pytest.raises(ValueError): + oauth_dict = default_oauth_dict.copy() + del oauth_dict["client_id"] + with pytest.raises(ValueError, match="client_id\n field required"): OAuthConfiguration.parse_obj(oauth_dict) def test_snowflake_throws_error_on_client_secret_missing_if_use_certificate_is_false(): - oauth_dict = { - "client_id": "882e9831-7ea51cb2b954", - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": False, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - } + oauth_dict = default_oauth_dict.copy() + del oauth_dict["client_secret"] OAuthConfiguration.parse_obj(oauth_dict) - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="'oauth_config.client_secret' was none but should be set when using use_certificate false for oauth_config", + ): SnowflakeV2Config.parse_obj( { "account_id": "test", @@ -68,16 +73,13 @@ def test_snowflake_throws_error_on_client_secret_missing_if_use_certificate_is_f def test_snowflake_throws_error_on_encoded_oauth_private_key_missing_if_use_certificate_is_true(): - oauth_dict = { - "client_id": "882e9831-7ea51cb2b954", - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": True, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - "encoded_oauth_public_key": "fkdsfhkshfkjsdfiuwrwfkjhsfskfhksjf==", - } + oauth_dict = default_oauth_dict.copy() + oauth_dict["use_certificate"] = True OAuthConfiguration.parse_obj(oauth_dict) - with pytest.raises(ValueError): + with pytest.raises( + ValueError, + match="'base64_encoded_oauth_private_key' was none but should be set when using certificate for oauth_config", + ): SnowflakeV2Config.parse_obj( { "account_id": "test", @@ -88,16 +90,13 @@ def test_snowflake_throws_error_on_encoded_oauth_private_key_missing_if_use_cert def test_snowflake_oauth_okta_does_not_support_certificate(): - oauth_dict = { - "client_id": "882e9831-7ea51cb2b954", - "provider": "okta", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": True, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - "encoded_oauth_public_key": "fkdsfhkshfkjsdfiuwrwfkjhsfskfhksjf==", - } + oauth_dict = default_oauth_dict.copy() + oauth_dict["use_certificate"] = True + oauth_dict["provider"] = "okta" OAuthConfiguration.parse_obj(oauth_dict) - with pytest.raises(ValueError): + with pytest.raises( + ValueError, match="Certificate authentication is not supported for Okta." + ): SnowflakeV2Config.parse_obj( { "account_id": "test", @@ -108,79 +107,52 @@ def test_snowflake_oauth_okta_does_not_support_certificate(): def test_snowflake_oauth_happy_paths(): - okta_dict = { - "client_id": "client_id", - "client_secret": "secret", - "provider": "okta", - "scopes": ["datahub_role"], - "authority_url": "https://dev-abc.okta.com/oauth2/def/v1/token", - } + oauth_dict = default_oauth_dict.copy() + oauth_dict["provider"] = "okta" assert SnowflakeV2Config.parse_obj( { "account_id": "test", "authentication_type": "OAUTH_AUTHENTICATOR", - "oauth_config": okta_dict, + "oauth_config": oauth_dict, } ) - - microsoft_dict = { - "client_id": "client_id", - "provider": "microsoft", - "scopes": ["https://microsoft.com/f4b353d5-ef8d/.default"], - "use_certificate": True, - "authority_url": "https://login.microsoftonline.com/yourorganisation.com", - "encoded_oauth_public_key": "publickey", - "encoded_oauth_private_key": "privatekey", - } + oauth_dict["use_certificate"] = True + oauth_dict["provider"] = "microsoft" + oauth_dict["encoded_oauth_public_key"] = "publickey" + oauth_dict["encoded_oauth_private_key"] = "privatekey" assert SnowflakeV2Config.parse_obj( { "account_id": "test", "authentication_type": "OAUTH_AUTHENTICATOR", - "oauth_config": microsoft_dict, + "oauth_config": oauth_dict, } ) +default_config_dict: Dict[str, Any] = { + "username": "user", + "password": "password", + "account_id": "https://acctname.snowflakecomputing.com", + "warehouse": "COMPUTE_WH", + "role": "sysadmin", +} + + def test_account_id_is_added_when_host_port_is_present(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "host_port": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config_dict = default_config_dict.copy() + del config_dict["account_id"] + config_dict["host_port"] = "acctname" + config = SnowflakeV2Config.parse_obj(config_dict) assert config.account_id == "acctname" def test_account_id_with_snowflake_host_suffix(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "https://acctname.snowflakecomputing.com", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config = SnowflakeV2Config.parse_obj(default_config_dict) assert config.account_id == "acctname" def test_snowflake_uri_default_authentication(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) - + config = SnowflakeV2Config.parse_obj(default_config_dict) assert config.get_sql_alchemy_url() == ( "snowflake://user:password@acctname" "?application=acryl_datahub" @@ -191,17 +163,10 @@ def test_snowflake_uri_default_authentication(): def test_snowflake_uri_external_browser_authentication(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "authentication_type": "EXTERNAL_BROWSER_AUTHENTICATOR", - } - ) - + config_dict = default_config_dict.copy() + del config_dict["password"] + config_dict["authentication_type"] = "EXTERNAL_BROWSER_AUTHENTICATOR" + config = SnowflakeV2Config.parse_obj(config_dict) assert config.get_sql_alchemy_url() == ( "snowflake://user@acctname" "?application=acryl_datahub" @@ -212,18 +177,12 @@ def test_snowflake_uri_external_browser_authentication(): def test_snowflake_uri_key_pair_authentication(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "authentication_type": "KEY_PAIR_AUTHENTICATOR", - "private_key_path": "/a/random/path", - "private_key_password": "a_random_password", - } - ) + config_dict = default_config_dict.copy() + del config_dict["password"] + config_dict["authentication_type"] = "KEY_PAIR_AUTHENTICATOR" + config_dict["private_key_path"] = "/a/random/path" + config_dict["private_key_password"] = "a_random_password" + config = SnowflakeV2Config.parse_obj(config_dict) assert config.get_sql_alchemy_url() == ( "snowflake://user@acctname" @@ -235,63 +194,35 @@ def test_snowflake_uri_key_pair_authentication(): def test_options_contain_connect_args(): - config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config = SnowflakeV2Config.parse_obj(default_config_dict) connect_args = config.get_options().get("connect_args") assert connect_args is not None def test_snowflake_config_with_view_lineage_no_table_lineage_throws_error(): - with pytest.raises(ValidationError): - SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "include_view_lineage": True, - "include_table_lineage": False, - } - ) + config_dict = default_config_dict.copy() + config_dict["include_view_lineage"] = True + config_dict["include_table_lineage"] = False + with pytest.raises( + ValidationError, + match="include_table_lineage must be True for include_view_lineage to be set", + ): + SnowflakeV2Config.parse_obj(config_dict) def test_snowflake_config_with_column_lineage_no_table_lineage_throws_error(): - with pytest.raises(ValidationError): - SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "include_column_lineage": True, - "include_table_lineage": False, - } - ) + config_dict = default_config_dict.copy() + config_dict["include_column_lineage"] = True + config_dict["include_table_lineage"] = False + with pytest.raises( + ValidationError, + match="include_table_lineage must be True for include_column_lineage to be set", + ): + SnowflakeV2Config.parse_obj(config_dict) def test_snowflake_config_with_no_connect_args_returns_base_connect_args(): - config: SnowflakeV2Config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - ) + config: SnowflakeV2Config = SnowflakeV2Config.parse_obj(default_config_dict) assert config.get_options()["connect_args"] is not None assert config.get_options()["connect_args"] == { CLIENT_PREFETCH_THREADS: 10, @@ -300,7 +231,10 @@ def test_snowflake_config_with_no_connect_args_returns_base_connect_args(): def test_private_key_set_but_auth_not_changed(): - with pytest.raises(ValidationError): + with pytest.raises( + ValidationError, + match="Either `private_key` and `private_key_path` is set but `authentication_type` is DEFAULT_AUTHENTICATOR. Should be set to 'KEY_PAIR_AUTHENTICATOR' when using key pair authentication", + ): SnowflakeV2Config.parse_obj( { "account_id": "acctname", @@ -310,19 +244,11 @@ def test_private_key_set_but_auth_not_changed(): def test_snowflake_config_with_connect_args_overrides_base_connect_args(): - config: SnowflakeV2Config = SnowflakeV2Config.parse_obj( - { - "username": "user", - "password": "password", - "account_id": "acctname", - "database_pattern": {"allow": {"^demo$"}}, - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - "connect_args": { - CLIENT_PREFETCH_THREADS: 5, - }, - } - ) + config_dict = default_config_dict.copy() + config_dict["connect_args"] = { + CLIENT_PREFETCH_THREADS: 5, + } + config: SnowflakeV2Config = SnowflakeV2Config.parse_obj(config_dict) assert config.get_options()["connect_args"] is not None assert config.get_options()["connect_args"][CLIENT_PREFETCH_THREADS] == 5 assert config.get_options()["connect_args"][CLIENT_SESSION_KEEP_ALIVE] is True @@ -331,35 +257,20 @@ def test_snowflake_config_with_connect_args_overrides_base_connect_args(): @patch("snowflake.connector.connect") def test_test_connection_failure(mock_connect): mock_connect.side_effect = Exception("Failed to connect to snowflake") - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert not report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason - assert "Failed to connect to snowflake" in report.basic_connectivity.failure_reason + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_failure( + report, "Failed to connect to snowflake" + ) @patch("snowflake.connector.connect") def test_test_connection_basic_success(mock_connect): - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) def setup_mock_connect(mock_connect, query_results=None): @@ -400,31 +311,18 @@ def query_results(query): return [] raise ValueError(f"Unexpected query: {query}") - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } setup_mock_connect(mock_connect, query_results) - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - - assert report.capability_report - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert not report.capability_report[SourceCapability.SCHEMA_METADATA].capable - failure_reason = report.capability_report[ - SourceCapability.SCHEMA_METADATA - ].failure_reason - assert failure_reason - - assert ( - "Current role TEST_ROLE does not have permissions to use warehouse" - in failure_reason + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[SourceCapability.CONTAINERS], + failure_capabilities={ + SourceCapability.SCHEMA_METADATA: "Current role TEST_ROLE does not have permissions to use warehouse" + }, ) @@ -445,25 +343,17 @@ def query_results(query): setup_mock_connect(mock_connect, query_results) - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - assert report.capability_report - - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert not report.capability_report[SourceCapability.SCHEMA_METADATA].capable - assert ( - report.capability_report[SourceCapability.SCHEMA_METADATA].failure_reason - is not None + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[SourceCapability.CONTAINERS], + failure_capabilities={ + SourceCapability.SCHEMA_METADATA: "Either no tables exist or current role does not have permissions to access them" + }, ) @@ -488,24 +378,19 @@ def query_results(query): setup_mock_connect(mock_connect, query_results) - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - assert report.capability_report - - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert report.capability_report[SourceCapability.SCHEMA_METADATA].capable - assert report.capability_report[SourceCapability.DESCRIPTIONS].capable + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[ + SourceCapability.CONTAINERS, + SourceCapability.SCHEMA_METADATA, + SourceCapability.DESCRIPTIONS, + ], + ) @patch("snowflake.connector.connect") @@ -538,25 +423,21 @@ def query_results(query): setup_mock_connect(mock_connect, query_results) - config = { - "username": "user", - "password": "password", - "account_id": "missing", - "warehouse": "COMPUTE_WH", - "role": "sysadmin", - } - report = SnowflakeV2Source.test_connection(config) - assert report is not None - assert report.basic_connectivity - assert report.basic_connectivity.capable - assert report.basic_connectivity.failure_reason is None - assert report.capability_report - - assert report.capability_report[SourceCapability.CONTAINERS].capable - assert report.capability_report[SourceCapability.SCHEMA_METADATA].capable - assert report.capability_report[SourceCapability.DATA_PROFILING].capable - assert report.capability_report[SourceCapability.DESCRIPTIONS].capable - assert report.capability_report[SourceCapability.LINEAGE_COARSE].capable + report = test_connection_helpers.run_test_connection( + SnowflakeV2Source, default_config_dict + ) + test_connection_helpers.assert_basic_connectivity_success(report) + + test_connection_helpers.assert_capability_report( + capability_report=report.capability_report, + success_capabilities=[ + SourceCapability.CONTAINERS, + SourceCapability.SCHEMA_METADATA, + SourceCapability.DATA_PROFILING, + SourceCapability.DESCRIPTIONS, + SourceCapability.LINEAGE_COARSE, + ], + ) def test_aws_cloud_region_from_snowflake_region_id(): @@ -610,11 +491,10 @@ def test_azure_cloud_region_from_snowflake_region_id(): def test_unknown_cloud_region_from_snowflake_region_id(): - with pytest.raises(Exception) as e: + with pytest.raises(Exception, match="Unknown snowflake region"): SnowflakeV2Source.get_cloud_region_from_snowflake_region_id( "somecloud_someregion" ) - assert "Unknown snowflake region" in str(e) def test_snowflake_object_access_entry_missing_object_id(): diff --git a/metadata-ingestion/tests/unit/test_sql_common.py b/metadata-ingestion/tests/unit/test_sql_common.py index e23d290b611f4..a98bf64171122 100644 --- a/metadata-ingestion/tests/unit/test_sql_common.py +++ b/metadata-ingestion/tests/unit/test_sql_common.py @@ -1,8 +1,7 @@ from typing import Dict -from unittest.mock import Mock +from unittest import mock import pytest -from sqlalchemy.engine.reflection import Inspector from datahub.ingestion.source.sql.sql_common import PipelineContext, SQLAlchemySource from datahub.ingestion.source.sql.sql_config import SQLCommonConfig @@ -13,19 +12,24 @@ class _TestSQLAlchemyConfig(SQLCommonConfig): def get_sql_alchemy_url(self): - pass + return "mysql+pymysql://user:pass@localhost:5330" class _TestSQLAlchemySource(SQLAlchemySource): - pass + @classmethod + def create(cls, config_dict, ctx): + config = _TestSQLAlchemyConfig.parse_obj(config_dict) + return cls(config, ctx, "TEST") + + +def get_test_sql_alchemy_source(): + return _TestSQLAlchemySource.create( + config_dict={}, ctx=PipelineContext(run_id="test_ctx") + ) def test_generate_foreign_key(): - config: SQLCommonConfig = _TestSQLAlchemyConfig() - ctx: PipelineContext = PipelineContext(run_id="test_ctx") - platform: str = "TEST" - inspector: Inspector = Mock() - source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) + source = get_test_sql_alchemy_source() fk_dict: Dict[str, str] = { "name": "test_constraint", "referred_table": "test_table", @@ -37,7 +41,7 @@ def test_generate_foreign_key(): dataset_urn="test_urn", schema="test_schema", fk_dict=fk_dict, - inspector=inspector, + inspector=mock.Mock(), ) assert fk_dict.get("name") == foreign_key.name @@ -48,11 +52,7 @@ def test_generate_foreign_key(): def test_use_source_schema_for_foreign_key_if_not_specified(): - config: SQLCommonConfig = _TestSQLAlchemyConfig() - ctx: PipelineContext = PipelineContext(run_id="test_ctx") - platform: str = "TEST" - inspector: Inspector = Mock() - source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) + source = get_test_sql_alchemy_source() fk_dict: Dict[str, str] = { "name": "test_constraint", "referred_table": "test_table", @@ -63,7 +63,7 @@ def test_use_source_schema_for_foreign_key_if_not_specified(): dataset_urn="test_urn", schema="test_schema", fk_dict=fk_dict, - inspector=inspector, + inspector=mock.Mock(), ) assert fk_dict.get("name") == foreign_key.name @@ -105,14 +105,32 @@ def test_get_platform_from_sqlalchemy_uri(uri: str, expected_platform: str) -> N def test_get_db_schema_with_dots_in_view_name(): - config: SQLCommonConfig = _TestSQLAlchemyConfig() - ctx: PipelineContext = PipelineContext(run_id="test_ctx") - platform: str = "TEST" - source = _TestSQLAlchemySource(config=config, ctx=ctx, platform=platform) - + source = get_test_sql_alchemy_source() database, schema = source.get_db_schema( dataset_identifier="database.schema.long.view.name1" ) - assert database == "database" assert schema == "schema" + + +def test_test_connection_success(): + source = get_test_sql_alchemy_source() + with mock.patch( + "datahub.ingestion.source.sql.sql_common.SQLAlchemySource.get_inspectors", + side_effect=lambda: [], + ): + report = source.test_connection({}) + assert report is not None + assert report.basic_connectivity + assert report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason is None + + +def test_test_connection_failure(): + source = get_test_sql_alchemy_source() + report = source.test_connection({}) + assert report is not None + assert report.basic_connectivity + assert not report.basic_connectivity.capable + assert report.basic_connectivity.failure_reason + assert "Connection refused" in report.basic_connectivity.failure_reason From 26114dfeb2d255f1b2a562396908f48c8dd0ad64 Mon Sep 17 00:00:00 2001 From: naoki kuroda <68233204+nnnkkk7@users.noreply.github.com> Date: Fri, 15 Dec 2023 05:42:45 +0900 Subject: [PATCH 101/263] docs: fix sample command for container logs (#9427) --- docs/how/extract-container-logs.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/how/extract-container-logs.md b/docs/how/extract-container-logs.md index 9251d0665c02c..b5fbb4c83cc64 100644 --- a/docs/how/extract-container-logs.md +++ b/docs/how/extract-container-logs.md @@ -86,7 +86,7 @@ Depending on your issue, you may be interested to view both debug and normal inf Since log files are named based on the current date, you'll need to use "ls" to see which files currently exist. To do so, you can use the `kubectl exec` command, using the pod name recorded in step one: ``` -kubectl exec datahub-frontend-1231ead-6767 -n default -- ls -la /tmp/datahub/logs/gms +kubectl exec datahub-gms-c578b47cd-7676 -n default -- ls -la /tmp/datahub/logs/gms total 36388 drwxr-xr-x 2 datahub datahub 4096 Jul 29 07:45 . @@ -131,5 +131,5 @@ Now you should be able to view the logs locally. There are a few ways to get files out of the pod and into a local file. You can either use `kubectl cp` or simply `cat` and pipe the file of interest. We'll show an example using the latter approach: ``` -kubectl exec datahub-frontend-1231ead-6767 -n default -- cat /tmp/datahub/logs/gms/gms.log > my-local-gms.log +kubectl exec datahub-gms-c578b47cd-7676 -n default -- cat /tmp/datahub/logs/gms/gms.log > my-local-gms.log ``` \ No newline at end of file From 4354af20126d1befb2c7391c23310a4eca5bb688 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 14 Dec 2023 16:54:40 -0500 Subject: [PATCH 102/263] fix(ingest): bump source configs json schema version (#9424) --- docs-website/genJsonSchema/gen_json_schema.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs-website/genJsonSchema/gen_json_schema.py b/docs-website/genJsonSchema/gen_json_schema.py index 81c1d5a2c1a30..4af72487644bd 100644 --- a/docs-website/genJsonSchema/gen_json_schema.py +++ b/docs-website/genJsonSchema/gen_json_schema.py @@ -7,7 +7,7 @@ def get_base() -> Any: return { - "$schema": "http://json-schema.org/draft-04/schema#", + "$schema": "https://json-schema.org/draft/2020-12/schema", "id": "https://json.schemastore.org/datahub-ingestion", "title": "Datahub Ingestion", "description": "Root schema of Datahub Ingestion", @@ -116,7 +116,7 @@ def get_base() -> Any: "bootstrap": { "type": "string", "description": "Kafka bootstrap URL.", - "default": "localhost:9092" + "default": "localhost:9092", }, "producer_config": { "type": "object", @@ -125,7 +125,7 @@ def get_base() -> Any: "schema_registry_url": { "type": "string", "description": "URL of schema registry being used.", - "default": "http://localhost:8081" + "default": "http://localhost:8081", }, "schema_registry_config": { "type": "object", From 0ea6145a9d491a1b882ba5a7a4667fb323d31dc4 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Fri, 15 Dec 2023 00:12:45 +0100 Subject: [PATCH 103/263] fix(ingest/profiling): Add option to enable external table profiling (#9463) --- .../datahub/ingestion/source/ge_profiling_config.py | 5 +++++ .../src/datahub/ingestion/source/redshift/profile.py | 9 +++++++++ .../ingestion/source/snowflake/snowflake_profiler.py | 10 ++++++++++ .../ingestion/source/snowflake/snowflake_schema.py | 3 +++ .../ingestion/source/sql/sql_generic_profiler.py | 3 +++ .../tests/integration/snowflake/common.py | 1 + 6 files changed, 31 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py b/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py index 24a3e520d8caf..f340a7b41b7af 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/ge_profiling_config.py @@ -167,6 +167,11 @@ class GEProfilingConfig(ConfigModel): "Applicable only if `use_sampling` is set to True.", ) + profile_external_tables: bool = Field( + default=False, + description="Whether to profile external tables. Only Snowflake and Redshift supports this.", + ) + @pydantic.root_validator(pre=True) def deprecate_bigquery_temp_table_schema(cls, values): # TODO: Update docs to remove mention of this field. diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py index b05850cef6e94..eed82ec4d83e7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/profile.py @@ -48,6 +48,15 @@ def get_workunits( if not self.config.schema_pattern.allowed(schema): continue for table in tables[db].get(schema, {}): + if ( + not self.config.profiling.profile_external_tables + and table.type == "EXTERNAL_TABLE" + ): + self.report.profiling_skipped_other[schema] += 1 + logger.info( + f"Skipping profiling of external table {db}.{schema}.{table.name}" + ) + continue # Emit the profile work unit profile_request = self.get_profile_request(table, schema, db) if profile_request is not None: diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 89857c4564267..4bda7da422e9d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -50,6 +50,16 @@ def get_workunits( profile_requests = [] for schema in database.schemas: for table in db_tables[schema.name]: + if ( + not self.config.profiling.profile_external_tables + and table.type == "EXTERNAL TABLE" + ): + logger.info( + f"Skipping profiling of external table {database.name}.{schema.name}.{table.name}" + ) + self.report.profiling_skipped_other[schema.name] += 1 + continue + profile_request = self.get_profile_request( table, schema.name, database.name ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py index e5b214ba35e4b..9526bdec4b05d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_schema.py @@ -77,6 +77,7 @@ def get_precise_native_type(self): @dataclass class SnowflakeTable(BaseTable): + type: Optional[str] = None clustering_key: Optional[str] = None pk: Optional[SnowflakePK] = None columns: List[SnowflakeColumn] = field(default_factory=list) @@ -265,6 +266,7 @@ def get_tables_for_database( tables[table["TABLE_SCHEMA"]].append( SnowflakeTable( name=table["TABLE_NAME"], + type=table["TABLE_TYPE"], created=table["CREATED"], last_altered=table["LAST_ALTERED"], size_in_bytes=table["BYTES"], @@ -288,6 +290,7 @@ def get_tables_for_schema( tables.append( SnowflakeTable( name=table["TABLE_NAME"], + type=table["TABLE_TYPE"], created=table["CREATED"], last_altered=table["LAST_ALTERED"], size_in_bytes=table["BYTES"], diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py index a2f91e5fae1a9..30fad9ad584c1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_generic_profiler.py @@ -35,6 +35,9 @@ class DetailedProfilerReportMixin: profiling_skipped_row_limit: TopKDict[str, int] = field( default_factory=int_top_k_dict ) + + profiling_skipped_other: TopKDict[str, int] = field(default_factory=int_top_k_dict) + num_tables_not_eligible_profiling: Dict[str, int] = field( default_factory=int_top_k_dict ) diff --git a/metadata-ingestion/tests/integration/snowflake/common.py b/metadata-ingestion/tests/integration/snowflake/common.py index b21cea5f0988d..53b87636068bf 100644 --- a/metadata-ingestion/tests/integration/snowflake/common.py +++ b/metadata-ingestion/tests/integration/snowflake/common.py @@ -79,6 +79,7 @@ def default_query_results( # noqa: C901 { "TABLE_SCHEMA": "TEST_SCHEMA", "TABLE_NAME": "TABLE_{}".format(tbl_idx), + "TABLE_TYPE": "BASE TABLE", "CREATED": datetime(2021, 6, 8, 0, 0, 0, 0), "LAST_ALTERED": datetime(2021, 6, 8, 0, 0, 0, 0), "BYTES": 1024, From 6a169357283790e158472957f87f8c6cfbe67136 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 15 Dec 2023 11:23:04 -0600 Subject: [PATCH 104/263] fix(operations): fix get index sizes integer wrap (#9450) --- .../ElasticSearchTimeseriesAspectService.java | 8 +- .../TimeseriesAspectServiceUnitTest.java | 78 +++++++++++++++++++ .../timeseries/TimeseriesIndexSizeResult.pdl | 3 + ...nkedin.operations.operations.snapshot.json | 5 ++ 4 files changed, 90 insertions(+), 4 deletions(-) create mode 100644 metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index eec7680a56ecb..f9ab86d41335d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -206,10 +206,10 @@ public List getIndexSizes() { elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); } - int sizeBytes = - entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); - float sizeMb = (float) sizeBytes / 1000; - elemResult.setSizeMb(sizeMb); + long sizeBytes = + entry.getValue().get("primaries").get("store").get("size_in_bytes").asLong(); + double sizeMb = (double) sizeBytes / 1000000; + elemResult.setSizeInMb(sizeMb); res.add(elemResult); }); return res; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java new file mode 100644 index 0000000000000..a23267dcf6f55 --- /dev/null +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceUnitTest.java @@ -0,0 +1,78 @@ +package com.linkedin.metadata.timeseries.search; + +import static org.mockito.Mockito.*; + +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.NumericNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.elastic.ElasticSearchTimeseriesAspectService; +import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.timeseries.TimeseriesIndexSizeResult; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpEntity; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestHighLevelClient; +import org.testng.Assert; +import org.testng.annotations.Test; + +/** + * Test using mocks instead of integration for testing functionality not dependent on a real server + * response + */ +public class TimeseriesAspectServiceUnitTest { + + private final RestHighLevelClient _searchClient = mock(RestHighLevelClient.class); + private final IndexConvention _indexConvention = mock(IndexConvention.class); + private final TimeseriesAspectIndexBuilders _timeseriesAspectIndexBuilders = + mock(TimeseriesAspectIndexBuilders.class); + private final EntityRegistry _entityRegistry = mock(EntityRegistry.class); + private final ESBulkProcessor _bulkProcessor = mock(ESBulkProcessor.class); + private final RestClient _restClient = mock(RestClient.class); + private final TimeseriesAspectService _timeseriesAspectService = + new ElasticSearchTimeseriesAspectService( + _searchClient, + _indexConvention, + _timeseriesAspectIndexBuilders, + _entityRegistry, + _bulkProcessor, + 0); + + private static final String INDEX_PATTERN = "indexPattern"; + + @Test + public void testGetIndicesIntegerWrap() throws IOException { + when(_indexConvention.getAllTimeseriesAspectIndicesPattern()).thenReturn(INDEX_PATTERN); + when(_searchClient.getLowLevelClient()).thenReturn(_restClient); + ObjectNode jsonNode = JsonNodeFactory.instance.objectNode(); + ObjectNode indicesNode = JsonNodeFactory.instance.objectNode(); + ObjectNode indexNode = JsonNodeFactory.instance.objectNode(); + ObjectNode primariesNode = JsonNodeFactory.instance.objectNode(); + ObjectNode storeNode = JsonNodeFactory.instance.objectNode(); + NumericNode bytesNode = JsonNodeFactory.instance.numberNode(8078398031L); + storeNode.set("size_in_bytes", bytesNode); + primariesNode.set("store", storeNode); + indexNode.set("primaries", primariesNode); + indicesNode.set("someIndexName", indexNode); + jsonNode.set("indices", indicesNode); + + Response response = mock(Response.class); + HttpEntity responseEntity = mock(HttpEntity.class); + when(response.getEntity()).thenReturn(responseEntity); + when(responseEntity.getContent()) + .thenReturn(IOUtils.toInputStream(jsonNode.toString(), StandardCharsets.UTF_8)); + when(_restClient.performRequest(any(Request.class))).thenReturn(response); + + List results = _timeseriesAspectService.getIndexSizes(); + + Assert.assertEquals(results.get(0).getSizeInMb(), 8078.398031); + } +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl b/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl index b888ef7c0716b..35297314187bf 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/timeseries/TimeseriesIndexSizeResult.pdl @@ -22,5 +22,8 @@ record TimeseriesIndexSizeResult{ /** * Size */ + @deprecated = "use sizeInMb instead" sizeMb: float = 0 + + sizeInMb: double = 0 } diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index 339ce62de6298..eae0eed2dd50b 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -3668,6 +3668,11 @@ "name" : "sizeMb", "type" : "float", "doc" : "Size", + "default" : 0.0, + "deprecated" : "use sizeInMb instead" + }, { + "name" : "sizeInMb", + "type" : "double", "default" : 0.0 } ] }, { From 824df5a6a3e9fed2f18f3e454c40b8d822011b5c Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 15 Dec 2023 13:28:33 -0600 Subject: [PATCH 105/263] feat(build): gradle 8, jdk17, neo4j 5 (#9458) --- .github/workflows/airflow-plugin.yml | 5 + .github/workflows/build-and-test.yml | 4 +- .github/workflows/check-datahub-jars.yml | 4 +- .github/workflows/docker-unified.yml | 39 +- .github/workflows/documentation.yml | 4 +- .github/workflows/metadata-ingestion.yml | 5 + .github/workflows/metadata-io.yml | 4 +- .github/workflows/metadata-model.yml | 5 + .github/workflows/publish-datahub-jars.yml | 4 +- .github/workflows/spark-smoke-test.yml | 4 +- build.gradle | 137 +- buildSrc/build.gradle | 13 +- .../pegasus/gradle/PegasusPlugin.java | 2444 +++++++++++++++++ .../gradle/tasks/ChangedFileReportTask.java | 124 + datahub-frontend/build.gradle | 22 +- datahub-frontend/play.gradle | 19 +- datahub-graphql-core/build.gradle | 3 +- datahub-web-react/build.gradle | 10 +- docker/datahub-frontend/Dockerfile | 7 +- docker/datahub-frontend/start.sh | 2 + docker/datahub-gms/Dockerfile | 4 +- docker/datahub-ingestion/build.gradle | 6 +- docker/datahub-mae-consumer/Dockerfile | 4 +- docker/datahub-mce-consumer/Dockerfile | 4 +- docker/datahub-upgrade/Dockerfile | 4 +- docker/kafka-setup/Dockerfile | 2 +- docs-website/build.gradle | 18 +- docs-website/vercel-setup.sh | 2 +- docs/developers.md | 10 +- docs/how/updating-datahub.md | 4 + docs/troubleshooting/build.md | 4 +- entity-registry/build.gradle | 7 +- gradle/wrapper/gradle-wrapper.properties | 2 +- li-utils/build.gradle | 20 +- metadata-auth/auth-api/build.gradle | 9 +- metadata-events/mxe-utils-avro/build.gradle | 5 +- .../java/datahub-client/build.gradle | 16 +- .../datahub-protobuf-example/build.gradle | 4 - .../java/datahub-protobuf/build.gradle | 8 +- .../java/examples/build.gradle | 16 +- .../java/spark-lineage/build.gradle | 68 +- .../java/spark-lineage/scripts/check_jar.sh | 4 +- .../docker/SparkBase.Dockerfile | 2 +- .../python_test_run.sh | 13 +- .../spark-smoke-test/spark-docker.conf | 4 + .../test-spark-lineage/build.gradle | 11 - .../datahub/spark/TestCoalesceJobLineage.java | 5 +- .../datahub/spark/TestSparkJobsLineage.java | 3 + metadata-io/build.gradle | 5 +- .../graph/neo4j/Neo4jGraphService.java | 4 +- metadata-jobs/mae-consumer/build.gradle | 1 + metadata-jobs/mce-consumer/build.gradle | 3 +- metadata-jobs/pe-consumer/build.gradle | 3 +- metadata-models-custom/build.gradle | 2 +- metadata-models-validator/build.gradle | 4 +- metadata-models/build.gradle | 20 +- metadata-service/auth-config/build.gradle | 4 +- metadata-service/auth-filter/build.gradle | 4 +- metadata-service/auth-impl/build.gradle | 4 +- ...formInstanceFieldResolverProviderTest.java | 4 +- .../auth-servlet-impl/build.gradle | 4 +- metadata-service/factories/build.gradle | 4 +- .../graphql-servlet-impl/build.gradle | 4 +- metadata-service/openapi-servlet/build.gradle | 4 +- metadata-service/plugin/build.gradle | 6 +- .../src/test/sample-test-plugins/build.gradle | 4 +- metadata-service/restli-api/build.gradle | 6 +- metadata-service/restli-client/build.gradle | 6 +- .../restli-servlet-impl/build.gradle | 6 +- .../schema-registry-api/build.gradle | 7 +- .../schema-registry-servlet/build.gradle | 4 +- metadata-service/services/build.gradle | 6 +- metadata-service/servlet/build.gradle | 4 +- metadata-utils/build.gradle | 4 +- mock-entity-registry/build.gradle | 4 +- smoke-test/build.gradle | 7 +- test-models/build.gradle | 16 +- vercel.json | 2 +- 78 files changed, 3008 insertions(+), 266 deletions(-) create mode 100644 buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java create mode 100644 buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index d0c0f52781b9a..cd1e159b7d53c 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -49,6 +49,11 @@ jobs: extra_pip_extras: plugin-v2 fail-fast: false steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 10c137a206531..dab64cf2dca5e 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -37,11 +37,11 @@ jobs: with: timezoneLinux: ${{ matrix.timezone }} - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 8e507ea40fd96..46d97ffec8861 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -28,11 +28,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index fef23f9efa85f..169a86000adcc 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -79,6 +79,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -135,6 +140,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -191,6 +201,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -247,6 +262,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -303,6 +323,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -537,6 +562,11 @@ jobs: needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_slim_build] steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -618,6 +648,11 @@ jobs: needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_full_build] steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -720,11 +755,11 @@ jobs: run: df -h . && docker images - name: Check out the repo uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index c94282938120e..29953b8b70d91 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -27,11 +27,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index ec6bd4141cc6f..4e04fef3b3980 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -44,6 +44,11 @@ jobs: - python-version: "3.10" fail-fast: false steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 48f230ce14c8d..2188fcb07c77a 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -29,11 +29,11 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index eb098a327e4cb..d0112f1b14e7a 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -29,6 +29,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index ec7985ef3b3d0..24d1c5436b315 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -49,11 +49,11 @@ jobs: if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 70b66d6452b26..60e183cce5179 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -30,11 +30,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/build.gradle b/build.gradle index b16e3ca169c71..a7a85db0398e2 100644 --- a/build.gradle +++ b/build.gradle @@ -1,17 +1,20 @@ buildscript { + ext.jdkVersion = 17 + ext.javaClassVersion = 11 + ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.46.8' + ext.pegasusVersion = '29.48.4' ext.mavenVersion = '3.6.3' ext.springVersion = '5.3.29' ext.springBootVersion = '2.7.14' ext.openTelemetryVersion = '1.18.0' - ext.neo4jVersion = '4.4.9' - ext.neo4jTestVersion = '4.4.25' - ext.neo4jApocVersion = '4.4.0.20:all' + ext.neo4jVersion = '5.14.0' + ext.neo4jTestVersion = '5.14.0' + ext.neo4jApocVersion = '5.14.0' ext.testContainersVersion = '1.17.4' ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x - ext.jacksonVersion = '2.15.2' + ext.jacksonVersion = '2.15.3' ext.jettyVersion = '9.4.46.v20220331' ext.playVersion = '2.8.18' ext.log4jVersion = '2.19.0' @@ -29,19 +32,19 @@ buildscript { buildscript.repositories.addAll(project.repositories) dependencies { classpath 'com.linkedin.pegasus:gradle-plugins:' + pegasusVersion - classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4' + classpath 'com.github.node-gradle:gradle-node-plugin:7.0.1' classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.2.0' classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0" classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0" classpath "org.gradle.playframework:gradle-playframework:0.14" - classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1" + classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.2" } } plugins { - id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' - id 'com.github.johnrengelman.shadow' version '6.1.0' + id 'com.gorylenko.gradle-git-properties' version '2.4.1' + id 'com.github.johnrengelman.shadow' version '8.1.1' apply false id 'com.palantir.docker' version '0.35.0' apply false id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ @@ -149,19 +152,20 @@ project.ext.externalDependency = [ 'log4jApi': "org.apache.logging.log4j:log4j-api:$log4jVersion", 'log4j12Api': "org.slf4j:log4j-over-slf4j:$slf4jVersion", 'log4j2Api': "org.apache.logging.log4j:log4j-to-slf4j:$log4jVersion", - 'lombok': 'org.projectlombok:lombok:1.18.16', + 'lombok': 'org.projectlombok:lombok:1.18.30', 'mariadbConnector': 'org.mariadb.jdbc:mariadb-java-client:2.6.0', 'mavenArtifact': "org.apache.maven:maven-artifact:$mavenVersion", 'mixpanel': 'com.mixpanel:mixpanel-java:1.4.4', - 'mockito': 'org.mockito:mockito-core:3.0.0', - 'mockitoInline': 'org.mockito:mockito-inline:3.0.0', + 'mockito': 'org.mockito:mockito-core:4.11.0', + 'mockitoInline': 'org.mockito:mockito-inline:4.11.0', 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, - 'neo4jApoc': 'org.neo4j.procedure:apoc:' + neo4jApocVersion, + 'neo4jApocCore': 'org.neo4j.procedure:apoc-core:' + neo4jApocVersion, + 'neo4jApocCommon': 'org.neo4j.procedure:apoc-common:' + neo4jApocVersion, 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', @@ -190,8 +194,8 @@ project.ext.externalDependency = [ 'servletApi': 'javax.servlet:javax.servlet-api:3.1.0', 'shiroCore': 'org.apache.shiro:shiro-core:1.11.0', 'snakeYaml': 'org.yaml:snakeyaml:2.0', - 'sparkSql' : 'org.apache.spark:spark-sql_2.11:2.4.8', - 'sparkHive' : 'org.apache.spark:spark-hive_2.11:2.4.8', + 'sparkSql' : 'org.apache.spark:spark-sql_2.12:3.0.3', + 'sparkHive' : 'org.apache.spark:spark-hive_2.12:3.0.3', 'springBeans': "org.springframework:spring-beans:$springVersion", 'springContext': "org.springframework:spring-context:$springVersion", 'springCore': "org.springframework:spring-core:$springVersion", @@ -210,7 +214,6 @@ project.ext.externalDependency = [ 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', - 'testngJava8': 'org.testng:testng:7.5.1', 'testng': 'org.testng:testng:7.8.0', 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, @@ -226,13 +229,69 @@ project.ext.externalDependency = [ 'charle': 'com.charleskorn.kaml:kaml:0.53.0', 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', - 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' + 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0', + 'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2' ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' + + tasks.withType(Test).configureEach { + // https://docs.gradle.org/current/userguide/performance.html + maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 + + if (project.configurations.getByName("testImplementation").getDependencies() + .any{ it.getName().contains("testng") }) { + useTestNG() + } + } + + if (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application') + || project.plugins.hasPlugin('pegasus')) { + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + compileJava { + options.release = javaClassVersion + } + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + // not duplicated, need to set this outside and inside afterEvaluate + afterEvaluate { + compileJava { + options.release = javaClassVersion + } + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + } + } } configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { @@ -264,8 +323,9 @@ subprojects { failOnNoGitDirectory = false } - plugins.withType(JavaPlugin) { + plugins.withType(JavaPlugin).configureEach { dependencies { + implementation externalDependency.annotationApi constraints { implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') @@ -276,18 +336,30 @@ subprojects { implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion") } } + spotless { java { googleJavaFormat() target project.fileTree(project.projectDir) { - include '**/*.java' - exclude 'build/**/*.java' - exclude '**/generated/**/*.*' - exclude '**/mainGeneratedDataTemplate/**/*.*' - exclude '**/mainGeneratedRest/**/*.*' + include 'src/**/*.java' + exclude 'src/**/resources/' + exclude 'src/**/generated/' + exclude 'src/**/mainGeneratedDataTemplate/' + exclude 'src/**/mainGeneratedRest/' + exclude 'src/renamed/avro/' + exclude 'src/test/sample-test-plugins/' } } } + + if (project.plugins.hasPlugin('pegasus')) { + dependencies { + dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 + restClientCompile spec.product.pegasus.restliClient + } + } + afterEvaluate { def spotlessJavaTask = tasks.findByName('spotlessJava') def processTask = tasks.findByName('processResources') @@ -305,28 +377,11 @@ subprojects { } } - tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(11) - } - } - tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) - } - // https://docs.gradle.org/current/userguide/performance.html - maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 - - if (project.configurations.getByName("testImplementation").getDependencies() - .any{ it.getName().contains("testng") }) { - useTestNG() - } - } - afterEvaluate { if (project.plugins.hasPlugin('pegasus')) { dependencies { dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1f9d30d520171..0c2d91e1f7ac1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,9 +1,11 @@ -apply plugin: 'java' - buildscript { apply from: '../repositories.gradle' } +plugins { + id 'java' +} + dependencies { /** * Forked version of abandoned repository: https://github.com/fge/json-schema-avro @@ -21,6 +23,9 @@ dependencies { implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' implementation 'commons-io:commons-io:2.11.0' - compileOnly 'org.projectlombok:lombok:1.18.14' - annotationProcessor 'org.projectlombok:lombok:1.18.14' + compileOnly 'org.projectlombok:lombok:1.18.30' + annotationProcessor 'org.projectlombok:lombok:1.18.30' + + // pegasus dependency, overrides for tasks + implementation 'com.linkedin.pegasus:gradle-plugins:29.48.4' } \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java new file mode 100644 index 0000000000000..2460abcad6f9e --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java @@ -0,0 +1,2444 @@ +/* + * Copyright (c) 2019 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.linkedin.pegasus.gradle; + +import com.linkedin.pegasus.gradle.PegasusOptions.IdlOptions; +import com.linkedin.pegasus.gradle.internal.CompatibilityLogChecker; +import com.linkedin.pegasus.gradle.tasks.ChangedFileReportTask; +import com.linkedin.pegasus.gradle.tasks.CheckIdlTask; +import com.linkedin.pegasus.gradle.tasks.CheckPegasusSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.CheckRestModelTask; +import com.linkedin.pegasus.gradle.tasks.CheckSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.GenerateAvroSchemaTask; +import com.linkedin.pegasus.gradle.tasks.GenerateDataTemplateTask; +import com.linkedin.pegasus.gradle.tasks.GeneratePegasusSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.GenerateRestClientTask; +import com.linkedin.pegasus.gradle.tasks.GenerateRestModelTask; +import com.linkedin.pegasus.gradle.tasks.PublishRestModelTask; +import com.linkedin.pegasus.gradle.tasks.TranslateSchemasTask; +import com.linkedin.pegasus.gradle.tasks.ValidateExtensionSchemaTask; +import com.linkedin.pegasus.gradle.tasks.ValidateSchemaAnnotationTask; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.gradle.api.Action; +import org.gradle.api.GradleException; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ConfigurationContainer; +import org.gradle.api.file.FileCollection; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginConvention; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.publish.PublishingExtension; +import org.gradle.api.publish.ivy.IvyPublication; +import org.gradle.api.publish.ivy.plugins.IvyPublishPlugin; +import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.Delete; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.Sync; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.bundling.Jar; +import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.api.tasks.javadoc.Javadoc; +import org.gradle.language.base.plugins.LifecycleBasePlugin; +import org.gradle.language.jvm.tasks.ProcessResources; +import org.gradle.plugins.ide.eclipse.EclipsePlugin; +import org.gradle.plugins.ide.eclipse.model.EclipseModel; +import org.gradle.plugins.ide.idea.IdeaPlugin; +import org.gradle.plugins.ide.idea.model.IdeaModule; +import org.gradle.util.GradleVersion; + + +/** + * Pegasus code generation plugin. + * The supported project layout for this plugin is as follows: + * + *
+ *   --- api/
+ *   |   --- build.gradle
+ *   |   --- src/
+ *   |       --- <sourceSet>/
+ *   |       |   --- idl/
+ *   |       |   |   --- <published idl (.restspec.json) files>
+ *   |       |   --- java/
+ *   |       |   |   --- <packageName>/
+ *   |       |   |       --- <common java files>
+ *   |       |   --- pegasus/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <data schema (.pdsc) files>
+ *   |       --- <sourceSet>GeneratedDataTemplate/
+ *   |       |   --- java/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <data template source files generated from data schema (.pdsc) files>
+ *   |       --- <sourceSet>GeneratedAvroSchema/
+ *   |       |   --- avro/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <avsc avro schema files (.avsc) generated from pegasus schema files>
+ *   |       --- <sourceSet>GeneratedRest/
+ *   |           --- java/
+ *   |               --- <packageName>/
+ *   |                   --- <rest client source (.java) files generated from published idl>
+ *   --- impl/
+ *   |   --- build.gradle
+ *   |   --- src/
+ *   |       --- <sourceSet>/
+ *   |       |   --- java/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <resource class source (.java) files>
+ *   |       --- <sourceSet>GeneratedRest/
+ *   |           --- idl/
+ *   |               --- <generated idl (.restspec.json) files>
+ *   --- <other projects>/
+ * 
+ *
    + *
  • + * api: contains all the files which are commonly depended by the server and + * client implementation. The common files include the data schema (.pdsc) files, + * the idl (.restspec.json) files and potentially Java interface files used by both sides. + *
  • + *
  • + * impl: contains the resource class for server implementation. + *
  • + *
+ *

Performs the following functions:

+ * + *

Generate data model and data template jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, the plugin generates the data template source (.java) files from the + * data schema (.pdsc) files, and furthermore compiles the source files and packages them + * to jar files. Details of jar contents will be explained in following paragraphs. + * In general, data schema files should exist only in api projects. + *

+ * + *

+ * Configure the server and client implementation projects to depend on the + * api project's dataTemplate configuration to get access to the generated data templates + * from within these projects. This allows api classes to be built first so that implementation + * projects can consume them. We recommend this structure to avoid circular dependencies + * (directly or indirectly) among implementation projects. + *

+ * + *

Detail:

+ * + *

+ * Generates data template source (.java) files from data schema (.pdsc) files, + * compiles the data template source (.java) files into class (.class) files, + * creates a data model jar file and a data template jar file. + * The data model jar file contains the source data schema (.pdsc) files. + * The data template jar file contains both the source data schema (.pdsc) files + * and the generated data template class (.class) files. + *

+ * + *

+ * In the data template generation phase, the plugin creates a new target source set + * for the generated files. The new target source set's name is the input source set name's + * suffixed with "GeneratedDataTemplate", e.g. "mainGeneratedDataTemplate". + * The plugin invokes PegasusDataTemplateGenerator to generate data template source (.java) files + * for all data schema (.pdsc) files present in the input source set's pegasus + * directory, e.g. "src/main/pegasus". The generated data template source (.java) files + * will be in the new target source set's java source directory, e.g. + * "src/mainGeneratedDataTemplate/java". In addition to + * the data schema (.pdsc) files in the pegasus directory, the dataModel configuration + * specifies resolver path for the PegasusDataTemplateGenerator. The resolver path + * provides the data schemas and previously generated data template classes that + * may be referenced by the input source set's data schemas. In most cases, the dataModel + * configuration should contain data template jars. + *

+ * + *

+ * The next phase is the data template compilation phase, the plugin compiles the generated + * data template source (.java) files into class files. The dataTemplateCompile configuration + * specifies the pegasus jars needed to compile these classes. The compileClasspath of the + * target source set is a composite of the dataModel configuration which includes the data template + * classes that were previously generated and included in the dependent data template jars, + * and the dataTemplateCompile configuration. + * This configuration should specify a dependency on the Pegasus data jar. + *

+ * + *

+ * The following phase is creating the the data model jar and the data template jar. + * This plugin creates the data model jar that includes the contents of the + * input source set's pegasus directory, and sets the jar file's classification to + * "data-model". Hence, the resulting jar file's name should end with "-data-model.jar". + * It adds the data model jar as an artifact to the dataModel configuration. + * This jar file should only contain data schema (.pdsc) files. + *

+ * + *

+ * This plugin also create the data template jar that includes the contents of the input + * source set's pegasus directory and the java class output directory of the + * target source set. It sets the jar file's classification to "data-template". + * Hence, the resulting jar file's name should end with "-data-template.jar". + * It adds the data template jar file as an artifact to the dataTemplate configuration. + * This jar file contains both data schema (.pdsc) files and generated data template + * class (.class) files. + *

+ * + *

+ * This plugin will ensure that data template source files are generated before + * compiling the input source set and before the idea and eclipse tasks. It + * also adds the generated classes to the compileClasspath of the input source set. + *

+ * + *

+ * The configurations that apply to generating the data model and data template jars + * are as follow: + *

    + *
  • + * The dataTemplateCompile configuration specifies the classpath for compiling + * the generated data template source (.java) files. In most cases, + * it should be the Pegasus data jar. + * (The default compile configuration is not used for compiling data templates because + * it is not desirable to include non data template dependencies in the data template jar.) + * The configuration should not directly include data template jars. Data template jars + * should be included in the dataModel configuration. + *
  • + *
  • + * The dataModel configuration provides the value of the "generator.resolver.path" + * system property that is passed to PegasusDataTemplateGenerator. In most cases, + * this configuration should contain only data template jars. The data template jars + * contain both data schema (.pdsc) files and generated data template (.class) files. + * PegasusDataTemplateGenerator will not generate data template (.java) files for + * classes that can be found in the resolver path. This avoids redundant generation + * of the same classes, and inclusion of these classes in multiple jars. + * The dataModel configuration is also used to publish the data model jar which + * contains only data schema (.pdsc) files. + *
  • + *
  • + * The testDataModel configuration is similar to the dataModel configuration + * except it is used when generating data templates from test source sets. + * It extends from the dataModel configuration. It is also used to publish + * the data model jar from test source sets. + *
  • + *
  • + * The dataTemplate configuration is used to publish the data template + * jar which contains both data schema (.pdsc) files and the data template class + * (.class) files generated from these data schema (.pdsc) files. + *
  • + *
  • + * The testDataTemplate configuration is similar to the dataTemplate configuration + * except it is used when publishing the data template jar files generated from + * test source sets. + *
  • + *
+ *

+ * + *

Performs the following functions:

+ * + *

Generate avro schema jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, the task 'generateAvroSchema' generates the avro schema (.avsc) + * files from pegasus schema (.pdsc) files. In general, data schema files should exist + * only in api projects. + *

+ * + *

+ * Configure the server and client implementation projects to depend on the + * api project's avroSchema configuration to get access to the generated avro schemas + * from within these projects. + *

+ * + *

+ * This plugin also create the avro schema jar that includes the contents of the input + * source set's avro directory and the avsc schema files. + * The resulting jar file's name should end with "-avro-schema.jar". + *

+ * + *

Generate rest model and rest client jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, generates rest client source (.java) files from the idl, + * compiles the rest client source (.java) files to rest client class (.class) files + * and puts them in jar files. In general, the api project should be only place that + * contains the publishable idl files. If the published idl changes an existing idl + * in the api project, the plugin will emit message indicating this has occurred and + * suggest that the entire project be rebuilt if it is desirable for clients of the + * idl to pick up the newly published changes. + *

+ * + *

+ * In the impl project, generates the idl (.restspec.json) files from the input + * source set's resource class files, then compares them against the existing idl + * files in the api project for compatibility checking. If incompatible changes are + * found, the build fails (unless certain flag is specified, see below). If the + * generated idl passes compatibility checks (see compatibility check levels below), + * publishes the generated idl (.restspec.json) to the api project. + *

+ * + *

Detail:

+ * + *

rest client generation phase: in api project

+ * + *

+ * In this phase, the rest client source (.java) files are generated from the + * api project idl (.restspec.json) files using RestRequestBuilderGenerator. + * The generated rest client source files will be in the new target source set's + * java source directory, e.g. "src/mainGeneratedRest/java". + *

+ * + *

+ * RestRequestBuilderGenerator requires access to the data schemas referenced + * by the idl. The dataModel configuration specifies the resolver path needed + * by RestRequestBuilderGenerator to access the data schemas referenced by + * the idl that is not in the source set's pegasus directory. + * This plugin automatically includes the data schema (.pdsc) files in the + * source set's pegasus directory in the resolver path. + * In most cases, the dataModel configuration should contain data template jars. + * The data template jars contains both data schema (.pdsc) files and generated + * data template class (.class) files. By specifying data template jars instead + * of data model jars, redundant generation of data template classes is avoided + * as classes that can be found in the resolver path are not generated. + *

+ * + *

rest client compilation phase: in api project

+ * + *

+ * In this phase, the plugin compiles the generated rest client source (.java) + * files into class files. The restClientCompile configuration specifies the + * pegasus jars needed to compile these classes. The compile classpath is a + * composite of the dataModel configuration which includes the data template + * classes that were previously generated and included in the dependent data template + * jars, and the restClientCompile configuration. + * This configuration should specify a dependency on the Pegasus restli-client jar. + *

+ * + *

+ * The following stage is creating the the rest model jar and the rest client jar. + * This plugin creates the rest model jar that includes the + * generated idl (.restspec.json) files, and sets the jar file's classification to + * "rest-model". Hence, the resulting jar file's name should end with "-rest-model.jar". + * It adds the rest model jar as an artifact to the restModel configuration. + * This jar file should only contain idl (.restspec.json) files. + *

+ * + *

+ * This plugin also create the rest client jar that includes the generated + * idl (.restspec.json) files and the java class output directory of the + * target source set. It sets the jar file's classification to "rest-client". + * Hence, the resulting jar file's name should end with "-rest-client.jar". + * It adds the rest client jar file as an artifact to the restClient configuration. + * This jar file contains both idl (.restspec.json) files and generated rest client + * class (.class) files. + *

+ * + *

idl generation phase: in server implementation project

+ * + *

+ * Before entering this phase, the plugin will ensure that generating idl will + * occur after compiling the input source set. It will also ensure that IDEA + * and Eclipse tasks runs after rest client source (.java) files are generated. + *

+ * + *

+ * In this phase, the plugin creates a new target source set for the generated files. + * The new target source set's name is the input source set name's* suffixed with + * "GeneratedRest", e.g. "mainGeneratedRest". The plugin invokes + * RestLiResourceModelExporter to generate idl (.restspec.json) files for each + * IdlItem in the input source set's pegasus IdlOptions. The generated idl files + * will be in target source set's idl directory, e.g. "src/mainGeneratedRest/idl". + * For example, the following adds an IdlItem to the source set's pegasus IdlOptions. + * This line should appear in the impl project's build.gradle. If no IdlItem is added, + * this source set will be excluded from generating idl and checking idl compatibility, + * even there are existing idl files. + *

+ *   pegasus.main.idlOptions.addIdlItem(["com.linkedin.restli.examples.groups.server"])
+ * 
+ *

+ * + *

+ * After the idl generation phase, each included idl file is checked for compatibility against + * those in the api project. In case the current interface breaks compatibility, + * by default the build fails and reports all compatibility errors and warnings. Otherwise, + * the build tasks in the api project later will package the resource classes into jar files. + * User can change the compatibility requirement between the current and published idl by + * setting the "rest.model.compatibility" project property, i.e. + * "gradle -Prest.model.compatibility= ..." The following levels are supported: + *

    + *
  • ignore: idl compatibility check will occur but its result will be ignored. + * The result will be aggregated and printed at the end of the build.
  • + *
  • backwards: build fails if there are backwards incompatible changes in idl. + * Build continues if there are only compatible changes.
  • + *
  • equivalent (default): build fails if there is any functional changes (compatible or + * incompatible) in the current idl. Only docs and comments are allowed to be different.
  • + *
+ * The plugin needs to know where the api project is. It searches the api project in the + * following steps. If all searches fail, the build fails. + *
    + *
  1. + * Use the specified project from the impl project build.gradle file. The ext.apiProject + * property explicitly assigns the api project. E.g. + *
    + *       ext.apiProject = project(':groups:groups-server-api')
    + *     
    + * If multiple such statements exist, the last will be used. Wrong project path causes Gradle + * evaluation error. + *
  2. + *
  3. + * If no ext.apiProject property is defined, the plugin will try to guess the + * api project name with the following conventions. The search stops at the first successful match. + *
      + *
    1. + * If the impl project name ends with the following suffixes, substitute the suffix with "-api". + *
        + *
      1. -impl
      2. + *
      3. -service
      4. + *
      5. -server
      6. + *
      7. -server-impl
      8. + *
      + * This list can be overridden by inserting the following line to the project build.gradle: + *
      + *           ext.apiProjectSubstitutionSuffixes = ['-new-suffix-1', '-new-suffix-2']
      + *         
      + * Alternatively, this setting could be applied globally to all projects by putting it in + * the subprojects section of the root build.gradle. + *
    2. + *
    3. + * Append "-api" to the impl project name. + *
    4. + *
    + *
  4. + *
+ * The plugin invokes RestLiResourceModelCompatibilityChecker to check compatibility. + *

+ * + *

+ * The idl files in the api project are not generated by the plugin, but rather + * "published" from the impl project. The publishRestModel task is used to copy the + * idl files to the api project. This task is invoked automatically if the idls are + * verified to be "safe". "Safe" is determined by the "rest.model.compatibility" + * property. Because this task is skipped if the idls are functionally equivalent + * (not necessarily identical, e.g. differ in doc fields), if the default "equivalent" + * compatibility level is used, no file will be copied. If such automatic publishing + * is intended to be skip, set the "rest.model.skipPublish" property to true. + * Note that all the properties are per-project and can be overridden in each project's + * build.gradle file. + *

+ * + *

+ * Please always keep in mind that if idl publishing is happened, a subsequent whole-project + * rebuild is necessary to pick up the changes. Otherwise, the Hudson job will fail and + * the source code commit will fail. + *

+ * + *

+ * The configurations that apply to generating the rest model and rest client jars + * are as follow: + *

    + *
  • + * The restClientCompile configuration specifies the classpath for compiling + * the generated rest client source (.java) files. In most cases, + * it should be the Pegasus restli-client jar. + * (The default compile configuration is not used for compiling rest client because + * it is not desirable to include non rest client dependencies, such as + * the rest server implementation classes, in the data template jar.) + * The configuration should not directly include data template jars. Data template jars + * should be included in the dataModel configuration. + *
  • + *
  • + * The dataModel configuration provides the value of the "generator.resolver.path" + * system property that is passed to RestRequestBuilderGenerator. + * This configuration should contain only data template jars. The data template jars + * contain both data schema (.pdsc) files and generated data template (.class) files. + * The RestRequestBuilderGenerator will only generate rest client classes. + * The dataModel configuration is also included in the compile classpath for the + * generated rest client source files. The dataModel configuration does not + * include generated data template classes, then the Java compiler may not able to + * find the data template classes referenced by the generated rest client. + *
  • + *
  • + * The testDataModel configuration is similar to the dataModel configuration + * except it is used when generating rest client source files from + * test source sets. + *
  • + *
  • + * The restModel configuration is used to publish the rest model jar + * which contains generated idl (.restspec.json) files. + *
  • + *
  • + * The testRestModel configuration is similar to the restModel configuration + * except it is used to publish rest model jar files generated from + * test source sets. + *
  • + *
  • + * The restClient configuration is used to publish the rest client jar + * which contains both generated idl (.restspec.json) files and + * the rest client class (.class) files generated from from these + * idl (.restspec.json) files. + *
  • + *
  • + * The testRestClient configuration is similar to the restClient configuration + * except it is used to publish rest client jar files generated from + * test source sets. + *
  • + *
+ *

+ * + *

+ * This plugin considers test source sets whose names begin with 'test' or 'integTest' to be + * test source sets. + *

+ */ +public class PegasusPlugin implements Plugin +{ + public static boolean debug = false; + + private static final GradleVersion MIN_REQUIRED_VERSION = GradleVersion.version("1.0"); // Next: 5.2.1 + private static final GradleVersion MIN_SUGGESTED_VERSION = GradleVersion.version("5.2.1"); // Next: 5.3 + + // + // Constants for generating sourceSet names and corresponding directory names + // for generated code + // + private static final String DATA_TEMPLATE_GEN_TYPE = "DataTemplate"; + private static final String REST_GEN_TYPE = "Rest"; + private static final String AVRO_SCHEMA_GEN_TYPE = "AvroSchema"; + + public static final String DATA_TEMPLATE_FILE_SUFFIX = ".pdsc"; + public static final String PDL_FILE_SUFFIX = ".pdl"; + // gradle property to opt OUT schema annotation validation, by default this feature is enabled. + private static final String DISABLE_SCHEMA_ANNOTATION_VALIDATION = "schema.annotation.validation.disable"; + // gradle property to opt in for destroying stale files from the build directory, + // by default it is disabled, because it triggers hot-reload (even if it results in a no-op) + private static final String DESTROY_STALE_FILES_ENABLE = "enableDestroyStaleFiles"; + public static final Collection DATA_TEMPLATE_FILE_SUFFIXES = new ArrayList<>(); + + public static final String IDL_FILE_SUFFIX = ".restspec.json"; + public static final String SNAPSHOT_FILE_SUFFIX = ".snapshot.json"; + public static final String SNAPSHOT_COMPAT_REQUIREMENT = "rest.model.compatibility"; + public static final String IDL_COMPAT_REQUIREMENT = "rest.idl.compatibility"; + // Pegasus schema compatibility level configuration, which is used to define the {@link CompatibilityLevel}. + public static final String PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.pegasusSchema.compatibility"; + // Pegasus extension schema compatibility level configuration, which is used to define the {@link CompatibilityLevel} + public static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.extensionSchema.compatibility"; + // CompatibilityOptions Mode configuration, which is used to define the {@link CompatibilityOptions#Mode} in the compatibility checker. + private static final String PEGASUS_COMPATIBILITY_MODE = "pegasusPlugin.pegasusSchemaCompatibilityCheckMode"; + + private static final Pattern TEST_DIR_REGEX = Pattern.compile("^(integ)?[Tt]est"); + private static final String SNAPSHOT_NO_PUBLISH = "rest.model.noPublish"; + private static final String SNAPSHOT_FORCE_PUBLISH = "rest.model.forcePublish"; + private static final String PROCESS_EMPTY_IDL_DIR = "rest.idl.processEmptyIdlDir"; + private static final String IDL_NO_PUBLISH = "rest.idl.noPublish"; + private static final String IDL_FORCE_PUBLISH = "rest.idl.forcePublish"; + private static final String SKIP_IDL_CHECK = "rest.idl.skipCheck"; + // gradle property to skip running GenerateRestModel task. + // Note it affects GenerateRestModel task only, and does not skip tasks depends on GenerateRestModel. + private static final String SKIP_GENERATE_REST_MODEL= "rest.model.skipGenerateRestModel"; + private static final String SUPPRESS_REST_CLIENT_RESTLI_2 = "rest.client.restli2.suppress"; + private static final String SUPPRESS_REST_CLIENT_RESTLI_1 = "rest.client.restli1.suppress"; + + private static final String GENERATOR_CLASSLOADER_NAME = "pegasusGeneratorClassLoader"; + + private static final String CONVERT_TO_PDL_REVERSE = "convertToPdl.reverse"; + private static final String CONVERT_TO_PDL_KEEP_ORIGINAL = "convertToPdl.keepOriginal"; + private static final String CONVERT_TO_PDL_SKIP_VERIFICATION = "convertToPdl.skipVerification"; + private static final String CONVERT_TO_PDL_PRESERVE_SOURCE_CMD = "convertToPdl.preserveSourceCmd"; + + // Below variables are used to collect data across all pegasus projects (sub-projects) and then print information + // to the user at the end after build is finished. + private static StringBuffer _restModelCompatMessage = new StringBuffer(); + private static final Collection _needCheckinFiles = new ArrayList<>(); + private static final Collection _needBuildFolders = new ArrayList<>(); + private static final Collection _possibleMissingFilesInEarlierCommit = new ArrayList<>(); + + private static final String RUN_ONCE = "runOnce"; + private static final Object STATIC_PROJECT_EVALUATED_LOCK = new Object(); + + private static final List UNUSED_CONFIGURATIONS = Arrays.asList( + "dataTemplateGenerator", "restTools", "avroSchemaGenerator"); + // Directory in the dataTemplate jar that holds schemas translated from PDL to PDSC. + private static final String TRANSLATED_SCHEMAS_DIR = "legacyPegasusSchemas"; + // Enable the use of argFiles for the tasks that support them + private static final String ENABLE_ARG_FILE = "pegasusPlugin.enableArgFile"; + // Enable the generation of fluent APIs + private static final String ENABLE_FLUENT_API = "pegasusPlugin.enableFluentApi"; + + // This config impacts GenerateDataTemplateTask and GenerateRestClientTask; + // If not set, by default all paths generated in these two tasks will be lower-case. + // This default behavior is needed because Linux, MacOS, Windows treat case sensitive paths differently, + // and we want to be consistent, so we choose lower-case as default case for path generated + private static final String CODE_GEN_PATH_CASE_SENSITIVE = "pegasusPlugin.generateCaseSensitivePath"; + + private static final String PEGASUS_PLUGIN_CONFIGURATION = "pegasusPlugin"; + + // Enable the use of generic pegasus schema compatibility checker + private static final String ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK = "pegasusPlugin.enablePegasusSchemaCompatibilityCheck"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT = "PegasusSchemaSnapshot"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT = "PegasusExtensionSchemaSnapshot"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR = "pegasusSchemaSnapshot"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR = "pegasusExtensionSchemaSnapshot"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusSchemaSnapshotDir"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusExtensionSchemaSnapshotDir"; + + private static final String SRC = "src"; + + private static final String SCHEMA_ANNOTATION_HANDLER_CONFIGURATION = "schemaAnnotationHandler"; + + private static final String COMPATIBILITY_OPTIONS_MODE_EXTENSION = "EXTENSION"; + + + @SuppressWarnings("unchecked") + private Class> _thisPluginType = (Class>) + getClass().asSubclass(Plugin.class); + + private Task _generateSourcesJarTask; + private Javadoc _generateJavadocTask; + private Task _generateJavadocJarTask; + private boolean _configureIvyPublications = true; + + public void setPluginType(Class> pluginType) + { + _thisPluginType = pluginType; + } + + public void setSourcesJarTask(Task sourcesJarTask) + { + _generateSourcesJarTask = sourcesJarTask; + } + + public void setJavadocJarTask(Task javadocJarTask) + { + _generateJavadocJarTask = javadocJarTask; + } + + public void setConfigureIvyPublications(boolean configureIvyPublications) { + _configureIvyPublications = configureIvyPublications; + } + + @Override + public void apply(Project project) + { + checkGradleVersion(project); + + project.getPlugins().apply(JavaPlugin.class); + + // this HashMap will have a PegasusOptions per sourceSet + project.getExtensions().getExtraProperties().set("pegasus", new HashMap<>()); + // this map will extract PegasusOptions.GenerationMode to project property + project.getExtensions().getExtraProperties().set("PegasusGenerationMode", + Arrays.stream(PegasusOptions.GenerationMode.values()) + .collect(Collectors.toMap(PegasusOptions.GenerationMode::name, Function.identity()))); + + synchronized (STATIC_PROJECT_EVALUATED_LOCK) + { + // Check if this is the first time the block will run. Pegasus plugin can run multiple times in a build if + // multiple sub-projects applied the plugin. + if (!project.getRootProject().hasProperty(RUN_ONCE) + || !Boolean.parseBoolean(String.valueOf(project.getRootProject().property(RUN_ONCE)))) + { + project.getGradle().projectsEvaluated(gradle -> + gradle.getRootProject().subprojects(subproject -> + UNUSED_CONFIGURATIONS.forEach(configurationName -> { + Configuration conf = subproject.getConfigurations().findByName(configurationName); + if (conf != null && !conf.getDependencies().isEmpty()) { + subproject.getLogger().warn("*** Project {} declares dependency to unused configuration \"{}\". " + + "This configuration is deprecated and you can safely remove the dependency. ***", + subproject.getPath(), configurationName); + } + }) + ) + ); + + // Re-initialize the static variables as they might have stale values from previous run. With Gradle 3.0 and + // gradle daemon enabled, the plugin class might not be loaded for every run. + DATA_TEMPLATE_FILE_SUFFIXES.clear(); + DATA_TEMPLATE_FILE_SUFFIXES.add(DATA_TEMPLATE_FILE_SUFFIX); + DATA_TEMPLATE_FILE_SUFFIXES.add(PDL_FILE_SUFFIX); + + _restModelCompatMessage = new StringBuffer(); + _needCheckinFiles.clear(); + _needBuildFolders.clear(); + _possibleMissingFilesInEarlierCommit.clear(); + + project.getGradle().buildFinished(result -> + { + StringBuilder endOfBuildMessage = new StringBuilder(); + if (_restModelCompatMessage.length() > 0) + { + endOfBuildMessage.append(_restModelCompatMessage); + } + + if (!_needCheckinFiles.isEmpty()) + { + endOfBuildMessage.append(createModifiedFilesMessage(_needCheckinFiles, _needBuildFolders)); + } + + if (!_possibleMissingFilesInEarlierCommit.isEmpty()) + { + endOfBuildMessage.append(createPossibleMissingFilesMessage(_possibleMissingFilesInEarlierCommit)); + } + + if (endOfBuildMessage.length() > 0) + { + result.getGradle().getRootProject().getLogger().quiet(endOfBuildMessage.toString()); + } + }); + + // Set an extra property on the root project to indicate the initialization is complete for the current build. + project.getRootProject().getExtensions().getExtraProperties().set(RUN_ONCE, true); + } + } + + ConfigurationContainer configurations = project.getConfigurations(); + + // configuration for getting the required classes to make pegasus call main methods + configurations.maybeCreate(PEGASUS_PLUGIN_CONFIGURATION); + + // configuration for compiling generated data templates + Configuration dataTemplateCompile = configurations.maybeCreate("dataTemplateCompile"); + dataTemplateCompile.setVisible(false); + + // configuration for running rest client generator + Configuration restClientCompile = configurations.maybeCreate("restClientCompile"); + restClientCompile.setVisible(false); + + // configuration for running data template generator + // DEPRECATED! This configuration is no longer used. Please stop using it. + Configuration dataTemplateGenerator = configurations.maybeCreate("dataTemplateGenerator"); + dataTemplateGenerator.setVisible(false); + + // configuration for running rest client generator + // DEPRECATED! This configuration is no longer used. Please stop using it. + Configuration restTools = configurations.maybeCreate("restTools"); + restTools.setVisible(false); + + // configuration for running Avro schema generator + // DEPRECATED! To skip avro schema generation, use PegasusOptions.generationModes + Configuration avroSchemaGenerator = configurations.maybeCreate("avroSchemaGenerator"); + avroSchemaGenerator.setVisible(false); + + // configuration for depending on data schemas and potentially generated data templates + // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml + Configuration dataModel = configurations.maybeCreate("dataModel"); + Configuration testDataModel = configurations.maybeCreate("testDataModel"); + testDataModel.extendsFrom(dataModel); + + // configuration for depending on data schemas and potentially generated data templates + // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml + Configuration avroSchema = configurations.maybeCreate("avroSchema"); + Configuration testAvroSchema = configurations.maybeCreate("testAvroSchema"); + testAvroSchema.extendsFrom(avroSchema); + + // configuration for depending on rest idl and potentially generated client builders + // and for publishing jars containing rest idl to the project artifacts for including in the ivy.xml + Configuration restModel = configurations.maybeCreate("restModel"); + Configuration testRestModel = configurations.maybeCreate("testRestModel"); + testRestModel.extendsFrom(restModel); + + // configuration for publishing jars containing data schemas and generated data templates + // to the project artifacts for including in the ivy.xml + // + // published data template jars depends on the configurations used to compile the classes + // in the jar, this includes the data models/templates used by the data template generator + // and the classes used to compile the generated classes. + Configuration dataTemplate = configurations.maybeCreate("dataTemplate"); + dataTemplate.extendsFrom(dataTemplateCompile, dataModel); + Configuration testDataTemplate = configurations.maybeCreate("testDataTemplate"); + testDataTemplate.extendsFrom(dataTemplate, testDataModel); + + // configuration for processing and validating schema annotation during build time. + // + // The configuration contains dependencies to schema annotation handlers which would process schema annotations + // and validate. + Configuration schemaAnnotationHandler = configurations.maybeCreate(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION); + + // configuration for publishing jars containing rest idl and generated client builders + // to the project artifacts for including in the ivy.xml + // + // published client builder jars depends on the configurations used to compile the classes + // in the jar, this includes the data models/templates (potentially generated by this + // project and) used by the data template generator and the classes used to compile + // the generated classes. + Configuration restClient = configurations.maybeCreate("restClient"); + restClient.extendsFrom(restClientCompile, dataTemplate); + Configuration testRestClient = configurations.maybeCreate("testRestClient"); + testRestClient.extendsFrom(restClient, testDataTemplate); + + Properties properties = new Properties(); + InputStream inputStream = getClass().getResourceAsStream("/pegasus-version.properties"); + if (inputStream != null) + { + try + { + properties.load(inputStream); + } + catch (IOException e) + { + throw new GradleException("Unable to read pegasus-version.properties file.", e); + } + + String version = properties.getProperty("pegasus.version"); + + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data-avro-generator:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:generator:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:restli-tools:" + version); + } + else + { + project.getLogger().lifecycle("Unable to add pegasus dependencies to {}. Please be sure that " + + "'com.linkedin.pegasus:data', 'com.linkedin.pegasus:data-avro-generator', 'com.linkedin.pegasus:generator', 'com.linkedin.pegasus:restli-tools'" + + " are available on the configuration pegasusPlugin", + project.getPath()); + } + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "org.slf4j:slf4j-simple:1.7.2"); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, project.files(System.getProperty("java.home") + "/../lib/tools.jar")); + + // this call has to be here because: + // 1) artifact cannot be published once projects has been evaluated, so we need to first + // create the tasks and artifact handler, then progressively append sources + // 2) in order to append sources progressively, the source and documentation tasks and artifacts must be + // configured/created before configuring and creating the code generation tasks. + + configureGeneratedSourcesAndJavadoc(project); + + ChangedFileReportTask changedFileReportTask = project.getTasks() + .create("changedFilesReport", ChangedFileReportTask.class); + + project.getTasks().getByName("check").dependsOn(changedFileReportTask); + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + sourceSets.all(sourceSet -> + { + if (sourceSet.getName().toLowerCase(Locale.US).contains("generated")) + { + return; + } + + checkAvroSchemaExist(project, sourceSet); + + // the idl Generator input options will be inside the PegasusOptions class. Users of the + // plugin can set the inputOptions in their build.gradle + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + pegasusOptions.put(sourceSet.getName(), new PegasusOptions()); + + // rest model generation could fail on incompatibility + // if it can fail, fail it early + configureRestModelGeneration(project, sourceSet); + + // Do compatibility check for schemas under "pegasus" directory if the configuration property is provided. + if (isPropertyTrue(project, ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK)) + { + configurePegasusSchemaSnapshotGeneration(project, sourceSet, false); + } + + configurePegasusSchemaSnapshotGeneration(project, sourceSet, true); + + configureConversionUtilities(project, sourceSet); + + GenerateDataTemplateTask generateDataTemplateTask = configureDataTemplateGeneration(project, sourceSet); + + configureAvroSchemaGeneration(project, sourceSet); + + configureRestClientGeneration(project, sourceSet); + + if (!isPropertyTrue(project, DISABLE_SCHEMA_ANNOTATION_VALIDATION)) + { + configureSchemaAnnotationValidation(project, sourceSet, generateDataTemplateTask); + } + + Task cleanGeneratedDirTask = project.task(sourceSet.getTaskName("clean", "GeneratedDir")); + cleanGeneratedDirTask.doLast(new CacheableAction<>(task -> + { + deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE); + deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE); + deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE); + })); + + // make clean depends on deleting the generated directories + project.getTasks().getByName("clean").dependsOn(cleanGeneratedDirTask); + + // Set data schema directories as resource roots + configureDataSchemaResourcesRoot(project, sourceSet); + }); + + project.getExtensions().getExtraProperties().set(GENERATOR_CLASSLOADER_NAME, getClass().getClassLoader()); + } + + protected void configureSchemaAnnotationValidation(Project project, + SourceSet sourceSet, + GenerateDataTemplateTask generateDataTemplatesTask) + { + // Task would execute based on the following order. + // generateDataTemplatesTask -> validateSchemaAnnotationTask + + // Create ValidateSchemaAnnotation task + ValidateSchemaAnnotationTask validateSchemaAnnotationTask = project.getTasks() + .create(sourceSet.getTaskName("validate", "schemaAnnotation"), ValidateSchemaAnnotationTask.class, task -> + { + task.setInputDir(generateDataTemplatesTask.getInputDir()); + task.setResolverPath(getDataModelConfig(project, sourceSet)); // same resolver path as generateDataTemplatesTask + task.setClassPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION) + .plus(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); + task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + } + ); + + // validateSchemaAnnotationTask depend on generateDataTemplatesTask + validateSchemaAnnotationTask.dependsOn(generateDataTemplatesTask); + + // Check depends on validateSchemaAnnotationTask. + project.getTasks().getByName("check").dependsOn(validateSchemaAnnotationTask); + } + + + + @SuppressWarnings("deprecation") + protected void configureGeneratedSourcesAndJavadoc(Project project) + { + _generateJavadocTask = project.getTasks().create("generateJavadoc", Javadoc.class); + + if (_generateSourcesJarTask == null) + { + // + // configuration for publishing jars containing sources for generated classes + // to the project artifacts for including in the ivy.xml + // + ConfigurationContainer configurations = project.getConfigurations(); + Configuration generatedSources = configurations.maybeCreate("generatedSources"); + Configuration testGeneratedSources = configurations.maybeCreate("testGeneratedSources"); + testGeneratedSources.extendsFrom(generatedSources); + + _generateSourcesJarTask = project.getTasks().create("generateSourcesJar", Jar.class, jarTask -> { + jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + jarTask.setDescription("Generates a jar file containing the sources for the generated Java classes."); + // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + jarTask.getArchiveClassifier().set("sources"); + }); + + project.getArtifacts().add("generatedSources", _generateSourcesJarTask); + } + + if (_generateJavadocJarTask == null) + { + // + // configuration for publishing jars containing Javadoc for generated classes + // to the project artifacts for including in the ivy.xml + // + ConfigurationContainer configurations = project.getConfigurations(); + Configuration generatedJavadoc = configurations.maybeCreate("generatedJavadoc"); + Configuration testGeneratedJavadoc = configurations.maybeCreate("testGeneratedJavadoc"); + testGeneratedJavadoc.extendsFrom(generatedJavadoc); + + _generateJavadocJarTask = project.getTasks().create("generateJavadocJar", Jar.class, jarTask -> { + jarTask.dependsOn(_generateJavadocTask); + jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + jarTask.setDescription("Generates a jar file containing the Javadoc for the generated Java classes."); + // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + jarTask.getArchiveClassifier().set("javadoc"); + jarTask.from(_generateJavadocTask.getDestinationDir()); + }); + + project.getArtifacts().add("generatedJavadoc", _generateJavadocJarTask); + } + else + { + // TODO: Tighten the types so that _generateJavadocJarTask must be of type Jar. + ((Jar) _generateJavadocJarTask).from(_generateJavadocTask.getDestinationDir()); + _generateJavadocJarTask.dependsOn(_generateJavadocTask); + } + } + + private static void deleteGeneratedDir(Project project, SourceSet sourceSet, String dirType) + { + String generatedDirPath = getGeneratedDirPath(project, sourceSet, dirType); + project.getLogger().info("Delete generated directory {}", generatedDirPath); + project.delete(generatedDirPath); + } + + private static > Class getCompatibilityLevelClass(Project project) + { + ClassLoader generatorClassLoader = (ClassLoader) project.property(GENERATOR_CLASSLOADER_NAME); + + String className = "com.linkedin.restli.tools.idlcheck.CompatibilityLevel"; + try + { + @SuppressWarnings("unchecked") + Class enumClass = (Class) generatorClassLoader.loadClass(className).asSubclass(Enum.class); + return enumClass; + } + catch (ClassNotFoundException e) + { + throw new RuntimeException("Could not load class " + className); + } + } + + private static void addGeneratedDir(Project project, SourceSet sourceSet, Collection configurations) + { + project.getPlugins().withType(IdeaPlugin.class, ideaPlugin -> { + IdeaModule ideaModule = ideaPlugin.getModel().getModule(); + // stupid if block needed because of stupid assignment required to update source dirs + if (isTestSourceSet(sourceSet)) + { + Set sourceDirs = ideaModule.getTestSourceDirs(); + sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); + // this is stupid but assignment is required + ideaModule.setTestSourceDirs(sourceDirs); + if (debug) + { + System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule testSourceDirs " + + ideaModule.getTestSourceDirs()); + } + } + else + { + Set sourceDirs = ideaModule.getSourceDirs(); + sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); + // this is stupid but assignment is required + ideaModule.setSourceDirs(sourceDirs); + if (debug) + { + System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule sourceDirs " + + ideaModule.getSourceDirs()); + } + } + Collection compilePlus = ideaModule.getScopes().get("COMPILE").get("plus"); + compilePlus.addAll(configurations); + ideaModule.getScopes().get("COMPILE").put("plus", compilePlus); + }); + } + + private static void checkAvroSchemaExist(Project project, SourceSet sourceSet) + { + String sourceDir = "src" + File.separatorChar + sourceSet.getName(); + File avroSourceDir = project.file(sourceDir + File.separatorChar + "avro"); + if (avroSourceDir.exists()) + { + project.getLogger().lifecycle("{}'s {} has non-empty avro directory. pegasus plugin does not process avro directory", + project.getName(), sourceDir); + } + } + + // Compute the name of the source set that will contain a type of an input generated code. + // e.g. genType may be 'DataTemplate' or 'Rest' + private static String getGeneratedSourceSetName(SourceSet sourceSet, String genType) + { + return sourceSet.getName() + "Generated" + genType; + } + + // Compute the directory name that will contain a type generated code of an input source set. + // e.g. genType may be 'DataTemplate' or 'Rest' + public static String getGeneratedDirPath(Project project, SourceSet sourceSet, String genType) + { + String override = getOverridePath(project, sourceSet, "overrideGeneratedDir"); + String sourceSetName = getGeneratedSourceSetName(sourceSet, genType); + String base = override == null ? "src" : override; + + return base + File.separatorChar + sourceSetName; + } + + public static String getDataSchemaPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overridePegasusDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "pegasus"; + } + else + { + return override; + } + } + + private static String getExtensionSchemaPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideExtensionSchemaDir"); + if(override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "extensions"; + } + else + { + return override; + } + } + + private static String getSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideSnapshotDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "snapshot"; + } + else + { + return override; + } + } + + private static String getIdlPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideIdlDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "idl"; + } + else + { + return override; + } + } + + private static String getPegasusSchemaSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE); + if (override == null) + { + return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_SCHEMA_SNAPSHOT_DIR; + } + else + { + return override; + } + } + + private static String getPegasusExtensionSchemaSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE); + if (override == null) + { + return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR; + } + else + { + return override; + } + } + + private static String getOverridePath(Project project, SourceSet sourceSet, String overridePropertyName) + { + String sourceSetPropertyName = sourceSet.getName() + '.' + overridePropertyName; + String override = getNonEmptyProperty(project, sourceSetPropertyName); + + if (override == null && sourceSet.getName().equals("main")) + { + override = getNonEmptyProperty(project, overridePropertyName); + } + + return override; + } + + private static boolean isTestSourceSet(SourceSet sourceSet) + { + return TEST_DIR_REGEX.matcher(sourceSet.getName()).find(); + } + + private static Configuration getDataModelConfig(Project project, SourceSet sourceSet) + { + return isTestSourceSet(sourceSet) + ? project.getConfigurations().getByName("testDataModel") + : project.getConfigurations().getByName("dataModel"); + } + + private static boolean isTaskSuccessful(Task task) + { + return task.getState().getExecuted() + // Task is not successful if it is not upto date and is skipped. + && !(task.getState().getSkipped() && !task.getState().getUpToDate()) + && task.getState().getFailure() == null; + } + + private static boolean isResultEquivalent(File compatibilityLogFile) + { + return isResultEquivalent(compatibilityLogFile, false); + } + + private static boolean isResultEquivalent(File compatibilityLogFile, boolean restSpecOnly) + { + CompatibilityLogChecker logChecker = new CompatibilityLogChecker(); + try + { + logChecker.write(Files.readAllBytes(compatibilityLogFile.toPath())); + } + catch (IOException e) + { + throw new GradleException("Error while processing compatibility report: " + e.getMessage()); + } + return logChecker.getRestSpecCompatibility().isEmpty() && + (restSpecOnly || logChecker.getModelCompatibility().isEmpty()); + } + + protected void configureRestModelGeneration(Project project, SourceSet sourceSet) + { + if (sourceSet.getAllSource().isEmpty()) + { + project.getLogger().info("No source files found for sourceSet {}. Skipping idl generation.", sourceSet.getName()); + return; + } + + // afterEvaluate needed so that api project can be overridden via ext.apiProject + project.afterEvaluate(p -> + { + // find api project here instead of in each project's plugin configuration + // this allows api project relation options (ext.api*) to be specified anywhere in the build.gradle file + // alternatively, pass closures to task configuration, and evaluate the closures when task is executed + Project apiProject = getCheckedApiProject(project); + + // make sure the api project is evaluated. Important for configure-on-demand mode. + if (apiProject != null) + { + project.evaluationDependsOn(apiProject.getPath()); + + if (!apiProject.getPlugins().hasPlugin(_thisPluginType)) + { + apiProject = null; + } + } + + if (apiProject == null) + { + return; + } + + Task untypedJarTask = project.getTasks().findByName(sourceSet.getJarTaskName()); + if (!(untypedJarTask instanceof Jar)) + { + return; + } + Jar jarTask = (Jar) untypedJarTask; + + String snapshotCompatPropertyName = findProperty(FileCompatibilityType.SNAPSHOT); + if (project.hasProperty(snapshotCompatPropertyName) && "off".equalsIgnoreCase((String) project.property(snapshotCompatPropertyName))) + { + project.getLogger().lifecycle("Project {} snapshot compatibility level \"OFF\" is deprecated. Default to \"IGNORE\".", + project.getPath()); + } + + // generate the rest model + FileCollection restModelCodegenClasspath = project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)) + .plus(sourceSet.getRuntimeClasspath()); + String destinationDirPrefix = getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + File.separatorChar; + FileCollection restModelResolverPath = apiProject.files(getDataSchemaPath(project, sourceSet)) + .plus(getDataModelConfig(apiProject, sourceSet)); + Set watchedRestModelInputDirs = buildWatchedRestModelInputDirs(project, sourceSet); + Set restModelInputDirs = difference(sourceSet.getAllSource().getSrcDirs(), + sourceSet.getResources().getSrcDirs()); + + Task generateRestModelTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "restModel"), GenerateRestModelTask.class, task -> + { + task.dependsOn(project.getTasks().getByName(sourceSet.getClassesTaskName())); + task.setCodegenClasspath(restModelCodegenClasspath); + task.setWatchedCodegenClasspath(restModelCodegenClasspath + .filter(file -> !"main".equals(file.getName()) && !"classes".equals(file.getName()))); + task.setInputDirs(restModelInputDirs); + task.setWatchedInputDirs(watchedRestModelInputDirs.isEmpty() + ? restModelInputDirs : watchedRestModelInputDirs); + // we need all the artifacts from runtime for any private implementation classes the server code might need. + task.setSnapshotDestinationDir(project.file(destinationDirPrefix + "snapshot")); + task.setIdlDestinationDir(project.file(destinationDirPrefix + "idl")); + + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + task.setIdlOptions(pegasusOptions.get(sourceSet.getName()).idlOptions); + + task.setResolverPath(restModelResolverPath); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> !isPropertyTrue(project, SKIP_GENERATE_REST_MODEL)); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE))); + }); + + File apiSnapshotDir = apiProject.file(getSnapshotPath(apiProject, sourceSet)); + File apiIdlDir = apiProject.file(getIdlPath(apiProject, sourceSet)); + apiSnapshotDir.mkdirs(); + + if (!isPropertyTrue(project, SKIP_IDL_CHECK)) + { + apiIdlDir.mkdirs(); + } + + CheckRestModelTask checkRestModelTask = project.getTasks() + .create(sourceSet.getTaskName("check", "RestModel"), CheckRestModelTask.class, task -> + { + task.dependsOn(generateRestModelTask); + task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.setPreviousSnapshotDirectory(apiSnapshotDir); + task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.setPreviousIdlDirectory(apiIdlDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setModelCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); + task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK)); + + task.doLast(new CacheableAction<>(t -> + { + if (!task.isEquivalent()) + { + _restModelCompatMessage.append(task.getWholeMessage()); + } + })); + }); + + CheckSnapshotTask checkSnapshotTask = project.getTasks() + .create(sourceSet.getTaskName("check", "Snapshot"), CheckSnapshotTask.class, task -> { + task.dependsOn(generateRestModelTask); + task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.setPreviousSnapshotDirectory(apiSnapshotDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSnapshotCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); + + task.onlyIf(t -> isPropertyTrue(project, SKIP_IDL_CHECK)); + }); + + CheckIdlTask checkIdlTask = project.getTasks() + .create(sourceSet.getTaskName("check", "Idl"), CheckIdlTask.class, task -> + { + task.dependsOn(generateRestModelTask); + task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.setPreviousIdlDirectory(apiIdlDir); + task.setResolverPath(restModelResolverPath); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setIdlCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + + task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK) + && !"OFF".equals(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL))); + }); + + // rest model publishing involves cross-project reference + // configure after all projects have been evaluated + // the file copy can be turned off by "rest.model.noPublish" flag + Task publishRestliSnapshotTask = project.getTasks() + .create(sourceSet.getTaskName("publish", "RestliSnapshot"), PublishRestModelTask.class, task -> + { + task.dependsOn(checkRestModelTask, checkSnapshotTask, checkIdlTask); + task.from(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.into(apiSnapshotDir); + task.setSuffix(SNAPSHOT_FILE_SUFFIX); + + task.onlyIf(t -> + isPropertyTrue(project, SNAPSHOT_FORCE_PUBLISH) || + ( + !isPropertyTrue(project, SNAPSHOT_NO_PUBLISH) && + ( + ( + isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkSnapshotTask) && + checkSnapshotTask.getSummaryTarget().exists() && + !isResultEquivalent(checkSnapshotTask.getSummaryTarget()) + ) || + ( + !isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkRestModelTask) && + checkRestModelTask.getSummaryTarget().exists() && + !isResultEquivalent(checkRestModelTask.getSummaryTarget()) + ) + )) + ); + }); + + Task publishRestliIdlTask = project.getTasks() + .create(sourceSet.getTaskName("publish", "RestliIdl"), PublishRestModelTask.class, task -> { + task.dependsOn(checkRestModelTask, checkIdlTask, checkSnapshotTask); + task.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.into(apiIdlDir); + task.setSuffix(IDL_FILE_SUFFIX); + + task.onlyIf(t -> + isPropertyTrue(project, IDL_FORCE_PUBLISH) || + ( + !isPropertyTrue(project, IDL_NO_PUBLISH) && + ( + ( + isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkSnapshotTask) && + checkSnapshotTask.getSummaryTarget().exists() && + !isResultEquivalent(checkSnapshotTask.getSummaryTarget(), true) + ) || + ( + !isPropertyTrue(project, SKIP_IDL_CHECK) && + ( + (isTaskSuccessful(checkRestModelTask) && + checkRestModelTask.getSummaryTarget().exists() && + !isResultEquivalent(checkRestModelTask.getSummaryTarget(), true)) || + (isTaskSuccessful(checkIdlTask) && + checkIdlTask.getSummaryTarget().exists() && + !isResultEquivalent(checkIdlTask.getSummaryTarget())) + ) + ) + )) + ); + }); + + project.getLogger().info("API project selected for {} is {}", + publishRestliIdlTask.getPath(), apiProject.getPath()); + + jarTask.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + // add generated .restspec.json files as resources to the jar + jarTask.dependsOn(publishRestliSnapshotTask, publishRestliIdlTask); + + ChangedFileReportTask changedFileReportTask = (ChangedFileReportTask) project.getTasks() + .getByName("changedFilesReport"); + + // Use the files from apiDir for generating the changed files report as we need to notify user only when + // source system files are modified. + changedFileReportTask.setIdlFiles(SharedFileUtils.getSuffixedFiles(project, apiIdlDir, IDL_FILE_SUFFIX)); + changedFileReportTask.setSnapshotFiles(SharedFileUtils.getSuffixedFiles(project, apiSnapshotDir, + SNAPSHOT_FILE_SUFFIX)); + changedFileReportTask.mustRunAfter(publishRestliSnapshotTask, publishRestliIdlTask); + changedFileReportTask.doLast(new CacheableAction<>(t -> + { + if (!changedFileReportTask.getNeedCheckinFiles().isEmpty()) + { + project.getLogger().info("Adding modified files to need checkin list..."); + _needCheckinFiles.addAll(changedFileReportTask.getNeedCheckinFiles()); + _needBuildFolders.add(getCheckedApiProject(project).getPath()); + } + })); + }); + } + + protected void configurePegasusSchemaSnapshotGeneration(Project project, SourceSet sourceSet, boolean isExtensionSchema) + { + File schemaDir = isExtensionSchema? project.file(getExtensionSchemaPath(project, sourceSet)) + : project.file(getDataSchemaPath(project, sourceSet)); + + if ((isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, PDL_FILE_SUFFIX).isEmpty()) || + (!isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, DATA_TEMPLATE_FILE_SUFFIXES).isEmpty())) + { + return; + } + + Path publishablePegasusSchemaSnapshotDir = project.getBuildDir().toPath().resolve(sourceSet.getName() + + (isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT)); + + Task generatePegasusSchemaSnapshot = generatePegasusSchemaSnapshot(project, sourceSet, + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, schemaDir, + publishablePegasusSchemaSnapshotDir.toFile(), isExtensionSchema); + + File pegasusSchemaSnapshotDir = project.file(isExtensionSchema ? getPegasusExtensionSchemaSnapshotPath(project, sourceSet) + : getPegasusSchemaSnapshotPath(project, sourceSet)); + pegasusSchemaSnapshotDir.mkdirs(); + + Task checkSchemaSnapshot = project.getTasks().create(sourceSet.getTaskName("check", + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT), + CheckPegasusSnapshotTask.class, task -> + { + task.dependsOn(generatePegasusSchemaSnapshot); + task.setCurrentSnapshotDirectory(publishablePegasusSchemaSnapshotDir.toFile()); + task.setPreviousSnapshotDirectory(pegasusSchemaSnapshotDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) + .plus(project.getConfigurations().getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); + task.setCompatibilityLevel(isExtensionSchema ? + PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) + :PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT)); + task.setCompatibilityMode(isExtensionSchema ? COMPATIBILITY_OPTIONS_MODE_EXTENSION : + PropertyUtil.findCompatMode(project, PEGASUS_COMPATIBILITY_MODE)); + task.setExtensionSchema(isExtensionSchema); + task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); + + task.onlyIf(t -> + { + String pegasusSnapshotCompatPropertyName = isExtensionSchema ? + findProperty(FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) + : findProperty(FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT); + return !project.hasProperty(pegasusSnapshotCompatPropertyName) || + !"off".equalsIgnoreCase((String) project.property(pegasusSnapshotCompatPropertyName)); + }); + }); + + Task publishPegasusSchemaSnapshot = publishPegasusSchemaSnapshot(project, sourceSet, + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, checkSchemaSnapshot, + publishablePegasusSchemaSnapshotDir.toFile(), pegasusSchemaSnapshotDir); + + project.getTasks().getByName(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).dependsOn(publishPegasusSchemaSnapshot); + } + + @SuppressWarnings("deprecation") + protected void configureAvroSchemaGeneration(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + File avroDir = project.file(getGeneratedDirPath(project, sourceSet, AVRO_SCHEMA_GEN_TYPE) + + File.separatorChar + "avro"); + + // generate avro schema files from data schema + Task generateAvroSchemaTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "avroSchema"), GenerateAvroSchemaTask.class, task -> { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(avroDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> + { + if (task.getInputDir().exists()) + { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + if (pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.AVRO)) + { + return true; + } + } + + return !project.getConfigurations().getByName("avroSchemaGenerator").isEmpty(); + }); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE))); + }); + + project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(generateAvroSchemaTask); + + // create avro schema jar file + + Task avroSchemaJarTask = project.getTasks().create(sourceSet.getName() + "AvroSchemaJar", Jar.class, task -> + { + // add path prefix to each file in the data schema directory + task.from(avroDir, copySpec -> + copySpec.eachFile(fileCopyDetails -> + fileCopyDetails.setPath("avro" + File.separatorChar + fileCopyDetails.getPath()))); + + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "avro-schema")); + task.setDescription("Generate an avro schema jar"); + }); + + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("avroSchema", avroSchemaJarTask); + } + else + { + project.getArtifacts().add("testAvroSchema", avroSchemaJarTask); + } + } + + protected void configureConversionUtilities(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + boolean reverse = isPropertyTrue(project, CONVERT_TO_PDL_REVERSE); + boolean keepOriginal = isPropertyTrue(project, CONVERT_TO_PDL_KEEP_ORIGINAL); + boolean skipVerification = isPropertyTrue(project, CONVERT_TO_PDL_SKIP_VERIFICATION); + String preserveSourceCmd = getNonEmptyProperty(project, CONVERT_TO_PDL_PRESERVE_SOURCE_CMD); + + // Utility task for migrating between PDSC and PDL. + project.getTasks().create(sourceSet.getTaskName("convert", "ToPdl"), TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(dataSchemaDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setPreserveSourceCmd(preserveSourceCmd); + if (reverse) + { + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDSC); + } + else + { + task.setSourceFormat(SchemaFileType.PDSC); + task.setDestinationFormat(SchemaFileType.PDL); + } + task.setKeepOriginal(keepOriginal); + task.setSkipVerification(skipVerification); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> task.getInputDir().exists()); + task.doLast(new CacheableAction<>(t -> + { + project.getLogger().lifecycle("Pegasus schema conversion complete."); + project.getLogger().lifecycle("All pegasus schema files in " + dataSchemaDir + " have been converted"); + project.getLogger().lifecycle("You can use '-PconvertToPdl.reverse=true|false' to change the direction of conversion."); + })); + }); + + // Helper task for reformatting existing PDL schemas by generating them again. + project.getTasks().create(sourceSet.getTaskName("reformat", "Pdl"), TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(dataSchemaDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDL); + task.setKeepOriginal(true); + task.setSkipVerification(true); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> task.getInputDir().exists()); + task.doLast(new CacheableAction<>(t -> project.getLogger().lifecycle("PDL reformat complete."))); + }); + } + + @SuppressWarnings("deprecation") + protected GenerateDataTemplateTask configureDataTemplateGeneration(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + File generatedDataTemplateDir = project.file(getGeneratedDirPath(project, sourceSet, DATA_TEMPLATE_GEN_TYPE) + + File.separatorChar + "java"); + File publishableSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "Schemas"); + File publishableLegacySchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "LegacySchemas"); + File publishableExtensionSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "ExtensionSchemas"); + + // generate data template source files from data schema + GenerateDataTemplateTask generateDataTemplatesTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "dataTemplate"), GenerateDataTemplateTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(generatedDataTemplateDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) + { + task.setGenerateLowercasePath(false); + } + + task.onlyIf(t -> + { + if (task.getInputDir().exists()) + { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + return pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.PEGASUS); + } + + return false; + }); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE))); + }); + + // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. + ((Jar) _generateSourcesJarTask).from(generateDataTemplatesTask.getDestinationDir()); + _generateSourcesJarTask.dependsOn(generateDataTemplatesTask); + + _generateJavadocTask.source(generateDataTemplatesTask.getDestinationDir()); + _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() + .plus(project.getConfigurations().getByName("dataTemplateCompile")) + .plus(generateDataTemplatesTask.getResolverPath())); + _generateJavadocTask.dependsOn(generateDataTemplatesTask); + + // Add extra dependencies for data model compilation + project.getDependencies().add("dataTemplateCompile", "com.google.code.findbugs:jsr305:3.0.2"); + + // create new source set for generated java source and class files + String targetSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> + { + ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedDataTemplateDir)); + ss.setCompileClasspath(getDataModelConfig(project, sourceSet) + .plus(project.getConfigurations().getByName("dataTemplateCompile"))); + }); + + // idea plugin needs to know about new generated java source directory and its dependencies + addGeneratedDir(project, targetSourceSet, Arrays.asList( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("dataTemplateCompile"))); + + // Set source compatibility to 1.8 as the data-templates now generate code with Java 8 features. + JavaCompile compileTask = project.getTasks() + .withType(JavaCompile.class).getByName(targetSourceSet.getCompileJavaTaskName()); + compileTask.doFirst(new CacheableAction<>(task -> { + ((JavaCompile) task).setSourceCompatibility("1.8"); + ((JavaCompile) task).setTargetCompatibility("1.8"); + })); + // make sure that java source files have been generated before compiling them + compileTask.dependsOn(generateDataTemplatesTask); + + // Dummy task to maintain backward compatibility + // TODO: Delete this task once use cases have had time to reference the new task + Task destroyStaleFiles = project.getTasks().create(sourceSet.getName() + "DestroyStaleFiles", Delete.class); + destroyStaleFiles.onlyIf(task -> { + project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); + return false; + }); + + // Dummy task to maintain backward compatibility, as this task was replaced by CopySchemas + // TODO: Delete this task once use cases have had time to reference the new task + Task copyPdscSchemasTask = project.getTasks().create(sourceSet.getName() + "CopyPdscSchemas", Copy.class); + copyPdscSchemasTask.dependsOn(destroyStaleFiles); + copyPdscSchemasTask.onlyIf(task -> { + project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); + return false; + }); + + // Prepare schema files for publication by syncing schema folders. + Task prepareSchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "CopySchemas", Sync.class, task -> + { + task.from(dataSchemaDir, syncSpec -> DATA_TEMPLATE_FILE_SUFFIXES.forEach(suffix -> syncSpec.include("**/*" + suffix))); + task.into(publishableSchemasBuildDir); + }); + prepareSchemasForPublishTask.dependsOn(copyPdscSchemasTask); + + Collection dataTemplateJarDepends = new ArrayList<>(); + dataTemplateJarDepends.add(compileTask); + dataTemplateJarDepends.add(prepareSchemasForPublishTask); + + // Convert all PDL files back to PDSC for publication + // TODO: Remove this conversion permanently once translated PDSCs are no longer needed. + Task prepareLegacySchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "TranslateSchemas", TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(publishableLegacySchemasBuildDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDSC); + task.setKeepOriginal(true); + task.setSkipVerification(true); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + + prepareLegacySchemasForPublishTask.dependsOn(destroyStaleFiles); + dataTemplateJarDepends.add(prepareLegacySchemasForPublishTask); + + // extension schema directory + File extensionSchemaDir = project.file(getExtensionSchemaPath(project, sourceSet)); + + if (!SharedFileUtils.getSuffixedFiles(project, extensionSchemaDir, PDL_FILE_SUFFIX).isEmpty()) + { + // Validate extension schemas if extension schemas are provided. + ValidateExtensionSchemaTask validateExtensionSchemaTask = project.getTasks() + .create(sourceSet.getTaskName("validate", "ExtensionSchemas"), ValidateExtensionSchemaTask.class, task -> + { + task.setInputDir(extensionSchemaDir); + task.setResolverPath( + getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); + task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + + Task prepareExtensionSchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "CopyExtensionSchemas", Sync.class, task -> + { + task.from(extensionSchemaDir, syncSpec -> syncSpec.include("**/*" + PDL_FILE_SUFFIX)); + task.into(publishableExtensionSchemasBuildDir); + }); + + prepareExtensionSchemasForPublishTask.dependsOn(validateExtensionSchemaTask); + prepareExtensionSchemasForPublishTask.dependsOn(copyPdscSchemasTask); + dataTemplateJarDepends.add(prepareExtensionSchemasForPublishTask); + } + + // include pegasus files in the output of this SourceSet + project.getTasks().withType(ProcessResources.class).getByName(targetSourceSet.getProcessResourcesTaskName(), it -> + { + it.from(prepareSchemasForPublishTask, copy -> copy.into("pegasus")); + // TODO: Remove this permanently once translated PDSCs are no longer needed. + it.from(prepareLegacySchemasForPublishTask, copy -> copy.into(TRANSLATED_SCHEMAS_DIR)); + Sync copyExtensionSchemasTask = project.getTasks().withType(Sync.class).findByName(sourceSet.getName() + "CopyExtensionSchemas"); + if (copyExtensionSchemasTask != null) + { + it.from(copyExtensionSchemasTask, copy -> copy.into("extensions")); + } + }); + + // create data template jar file + Jar dataTemplateJarTask = project.getTasks() + .create(sourceSet.getName() + "DataTemplateJar", Jar.class, task -> + { + task.dependsOn(dataTemplateJarDepends); + task.from(targetSourceSet.getOutput()); + + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "data-template")); + task.setDescription("Generate a data template jar"); + }); + + // add the data model and date template jars to the list of project artifacts. + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("dataTemplate", dataTemplateJarTask); + } + else + { + project.getArtifacts().add("testDataTemplate", dataTemplateJarTask); + } + + // include additional dependencies into the appropriate configuration used to compile the input source set + // must include the generated data template classes and their dependencies the configuration. + // "compile" and "testCompile" configurations have been removed in Gradle 7, + // but to keep the maximum backward compatibility, here we handle Gradle 7 and earlier version differently + // Once MIN_REQUIRED_VERSION reaches 7.0, we can remove the check of isAtLeastGradle7() + String compileConfigName; + if (isAtLeastGradle7()) { + compileConfigName = isTestSourceSet(sourceSet) ? "testImplementation" : project.getConfigurations().findByName("api") != null ? "api" : "implementation"; + } + else + { + compileConfigName = isTestSourceSet(sourceSet) ? "testCompile" : "compile"; + } + + Configuration compileConfig = project.getConfigurations().maybeCreate(compileConfigName); + compileConfig.extendsFrom( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("dataTemplateCompile")); + + // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. + // Replace it with getArchiveFile() on Gradle 7, + // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 + // DataHub Note - applied FIXME + project.getDependencies().add(compileConfigName, project.files( + isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); + + if (_configureIvyPublications) { + // The below Action is only applied when the 'ivy-publish' is applied by the consumer. + // If the consumer does not use ivy-publish, this is a noop. + // this Action prepares the project applying the pegasus plugin to publish artifacts using these steps: + // 1. Registers "feature variants" for pegasus-specific artifacts; + // see https://docs.gradle.org/6.1/userguide/feature_variants.html + // 2. Wires legacy configurations like `dataTemplateCompile` to auto-generated feature variant *Api and + // *Implementation configurations for backwards compatibility. + // 3. Configures the Ivy Publication to include auto-generated feature variant *Api and *Implementation + // configurations and their dependencies. + project.getPlugins().withType(IvyPublishPlugin.class, ivyPublish -> { + if (!isAtLeastGradle61()) + { + throw new GradleException("Using the ivy-publish plugin with the pegasus plugin requires Gradle 6.1 or higher " + + "at build time. Please upgrade."); + } + + JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); + // create new capabilities per source set; automatically creates api and implementation configurations + String featureName = mapSourceSetToFeatureName(targetSourceSet); + try + { + /* + reflection is required to preserve compatibility with Gradle 5.2.1 and below + TODO once Gradle 5.3+ is required, remove reflection and replace with: + java.registerFeature(featureName, featureSpec -> { + featureSpec.usingSourceSet(targetSourceSet); + }); + */ + Method registerFeature = JavaPluginExtension.class.getDeclaredMethod("registerFeature", String.class, Action.class); + Action/**/ featureSpecAction = createFeatureVariantFromSourceSet(targetSourceSet); + registerFeature.invoke(java, featureName, featureSpecAction); + } + catch (ReflectiveOperationException e) + { + throw new GradleException("Unable to register new feature variant", e); + } + + // expose transitive dependencies to consumers via variant configurations + Configuration featureConfiguration = project.getConfigurations().getByName(featureName); + Configuration mainGeneratedDataTemplateApi = project.getConfigurations().getByName(targetSourceSet.getApiConfigurationName()); + featureConfiguration.extendsFrom(mainGeneratedDataTemplateApi); + mainGeneratedDataTemplateApi.extendsFrom( + getDataModelConfig(project, targetSourceSet), + project.getConfigurations().getByName("dataTemplateCompile")); + + // Configure the existing IvyPublication + // For backwards-compatibility, make the legacy dataTemplate/testDataTemplate configurations extend + // their replacements, auto-created when we registered the new feature variant + project.afterEvaluate(p -> { + PublishingExtension publishing = p.getExtensions().getByType(PublishingExtension.class); + // When configuring a Gradle Publication, use this value to find the name of the publication to configure. Defaults to "ivy". + String publicationName = p.getExtensions().getExtraProperties().getProperties().getOrDefault("PegasusPublicationName", "ivy").toString(); + IvyPublication ivyPublication = publishing.getPublications().withType(IvyPublication.class).getByName(publicationName); + ivyPublication.configurations(configurations -> configurations.create(featureName, legacyConfiguration -> { + legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getApiElementsConfigurationName()).getName()); + legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getRuntimeElementsConfigurationName()).getName()); + })); + }); + }); + } + + if (debug) + { + System.out.println("configureDataTemplateGeneration sourceSet " + sourceSet.getName()); + System.out.println(compileConfigName + ".allDependencies : " + + project.getConfigurations().getByName(compileConfigName).getAllDependencies()); + System.out.println(compileConfigName + ".extendsFrom: " + + project.getConfigurations().getByName(compileConfigName).getExtendsFrom()); + System.out.println(compileConfigName + ".transitive: " + + project.getConfigurations().getByName(compileConfigName).isTransitive()); + } + + project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(dataTemplateJarTask); + return generateDataTemplatesTask; + } + + private String mapSourceSetToFeatureName(SourceSet sourceSet) { + String featureName = ""; + switch (sourceSet.getName()) { + case "mainGeneratedDataTemplate": + featureName = "dataTemplate"; + break; + case "testGeneratedDataTemplate": + featureName = "testDataTemplate"; + break; + case "mainGeneratedRest": + featureName = "restClient"; + break; + case "testGeneratedRest": + featureName = "testRestClient"; + break; + case "mainGeneratedAvroSchema": + featureName = "avroSchema"; + break; + case "testGeneratedAvroSchema": + featureName = "testAvroSchema"; + break; + default: + String msg = String.format("Unable to map %s to an appropriate feature name", sourceSet); + throw new GradleException(msg); + } + return featureName; + } + + // Generate rest client from idl files generated from java source files in the specified source set. + // + // This generates rest client source files from idl file generated from java source files + // in the source set. The generated rest client source files will be in a new source set. + // It also compiles the rest client source files into classes, and creates both the + // rest model and rest client jar files. + // + @SuppressWarnings("deprecation") + protected void configureRestClientGeneration(Project project, SourceSet sourceSet) + { + // idl directory for api project + File idlDir = project.file(getIdlPath(project, sourceSet)); + if (SharedFileUtils.getSuffixedFiles(project, idlDir, IDL_FILE_SUFFIX).isEmpty() && !isPropertyTrue(project, + PROCESS_EMPTY_IDL_DIR)) + { + return; + } + File generatedRestClientDir = project.file(getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + + File.separatorChar + "java"); + + // always include imported data template jars in compileClasspath of rest client + FileCollection dataModelConfig = getDataModelConfig(project, sourceSet); + + // if data templates generated from this source set, add the generated data template jar to compileClasspath + // of rest client. + String dataTemplateSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); + + Jar dataTemplateJarTask = null; + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + FileCollection dataModels; + if (sourceSets.findByName(dataTemplateSourceSetName) != null) + { + if (debug) + { + System.out.println("sourceSet " + sourceSet.getName() + " has generated sourceSet " + dataTemplateSourceSetName); + } + dataTemplateJarTask = (Jar) project.getTasks().getByName(sourceSet.getName() + "DataTemplateJar"); + // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. + // Replace it with getArchiveFile() on Gradle 7, + // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 + // DataHub Note - applied FIXME + dataModels = dataModelConfig.plus(project.files( + isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); + } + else + { + dataModels = dataModelConfig; + } + + // create source set for generated rest model, rest client source and class files. + String targetSourceSetName = getGeneratedSourceSetName(sourceSet, REST_GEN_TYPE); + SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> + { + ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedRestClientDir)); + ss.setCompileClasspath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); + }); + + project.getPlugins().withType(EclipsePlugin.class, eclipsePlugin -> { + EclipseModel eclipseModel = (EclipseModel) project.getExtensions().findByName("eclipse"); + eclipseModel.getClasspath().getPlusConfigurations() + .add(project.getConfigurations().getByName("restClientCompile")); + }); + + // idea plugin needs to know about new rest client source directory and its dependencies + addGeneratedDir(project, targetSourceSet, Arrays.asList( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("restClientCompile"))); + + // generate the rest client source files + GenerateRestClientTask generateRestClientTask = project.getTasks() + .create(targetSourceSet.getTaskName("generate", "restClient"), GenerateRestClientTask.class, task -> + { + task.dependsOn(project.getConfigurations().getByName("dataTemplate")); + task.setInputDir(idlDir); + task.setResolverPath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); + task.setRuntimeClasspath(project.getConfigurations().getByName("dataModel") + .plus(project.getConfigurations().getByName("dataTemplate").getArtifacts().getFiles())); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setDestinationDir(generatedRestClientDir); + task.setRestli2FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_2)); + task.setRestli1FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_1)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) + { + task.setGenerateLowercasePath(false); + } + if (isPropertyTrue(project, ENABLE_FLUENT_API)) + { + task.setGenerateFluentApi(true); + } + task.doFirst(new CacheableAction<>(t -> project.delete(generatedRestClientDir))); + }); + + if (dataTemplateJarTask != null) + { + generateRestClientTask.dependsOn(dataTemplateJarTask); + } + + // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. + ((Jar) _generateSourcesJarTask).from(generateRestClientTask.getDestinationDir()); + _generateSourcesJarTask.dependsOn(generateRestClientTask); + + _generateJavadocTask.source(generateRestClientTask.getDestinationDir()); + _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() + .plus(project.getConfigurations().getByName("restClientCompile")) + .plus(generateRestClientTask.getResolverPath())); + _generateJavadocTask.dependsOn(generateRestClientTask); + + // make sure rest client source files have been generated before compiling them + JavaCompile compileGeneratedRestClientTask = (JavaCompile) project.getTasks() + .getByName(targetSourceSet.getCompileJavaTaskName()); + compileGeneratedRestClientTask.dependsOn(generateRestClientTask); + compileGeneratedRestClientTask.getOptions().getCompilerArgs().add("-Xlint:-deprecation"); + + // create the rest model jar file + Task restModelJarTask = project.getTasks().create(sourceSet.getName() + "RestModelJar", Jar.class, task -> + { + task.from(idlDir, copySpec -> + { + copySpec.eachFile(fileCopyDetails -> project.getLogger() + .info("Add idl file: {}", fileCopyDetails)); + copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); + }); + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-model")); + task.setDescription("Generate rest model jar"); + }); + + // create the rest client jar file + Task restClientJarTask = project.getTasks() + .create(sourceSet.getName() + "RestClientJar", Jar.class, task -> + { + task.dependsOn(compileGeneratedRestClientTask); + task.from(idlDir, copySpec -> { + copySpec.eachFile(fileCopyDetails -> { + project.getLogger().info("Add interface file: {}", fileCopyDetails); + fileCopyDetails.setPath("idl" + File.separatorChar + fileCopyDetails.getPath()); + }); + copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); + }); + task.from(targetSourceSet.getOutput()); + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-client")); + task.setDescription("Generate rest client jar"); + }); + + // add the rest model jar and the rest client jar to the list of project artifacts. + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("restModel", restModelJarTask); + project.getArtifacts().add("restClient", restClientJarTask); + } + else + { + project.getArtifacts().add("testRestModel", restModelJarTask); + project.getArtifacts().add("testRestClient", restClientJarTask); + } + } + + // Return the appendix for generated jar files. + // The source set name is not included for the main source set. + private static String getAppendix(SourceSet sourceSet, String suffix) + { + return sourceSet.getName().equals("main") ? suffix : sourceSet.getName() + '-' + suffix; + } + + private static Project getApiProject(Project project) + { + if (project.getExtensions().getExtraProperties().has("apiProject")) + { + return (Project) project.getExtensions().getExtraProperties().get("apiProject"); + } + + List subsSuffixes; + if (project.getExtensions().getExtraProperties().has("apiProjectSubstitutionSuffixes")) + { + @SuppressWarnings("unchecked") + List suffixValue = (List) project.getExtensions() + .getExtraProperties().get("apiProjectSubstitutionSuffixes"); + + subsSuffixes = suffixValue; + } + else + { + subsSuffixes = Arrays.asList("-impl", "-service", "-server", "-server-impl"); + } + + for (String suffix : subsSuffixes) + { + if (project.getPath().endsWith(suffix)) + { + String searchPath = project.getPath().substring(0, project.getPath().length() - suffix.length()) + "-api"; + Project apiProject = project.findProject(searchPath); + if (apiProject != null) + { + return apiProject; + } + } + } + + return project.findProject(project.getPath() + "-api"); + } + + private static Project getCheckedApiProject(Project project) + { + Project apiProject = getApiProject(project); + + if (apiProject == project) + { + throw new GradleException("The API project of ${project.path} must not be itself."); + } + + return apiProject; + } + + /** + * return the property value if the property exists and is not empty (-Pname=value) + * return null if property does not exist or the property is empty (-Pname) + * + * @param project the project where to look for the property + * @param propertyName the name of the property + */ + public static String getNonEmptyProperty(Project project, String propertyName) + { + if (!project.hasProperty(propertyName)) + { + return null; + } + + String propertyValue = project.property(propertyName).toString(); + if (propertyValue.isEmpty()) + { + return null; + } + + return propertyValue; + } + + /** + * Return true if the given property exists and its value is true + * + * @param project the project where to look for the property + * @param propertyName the name of the property + */ + public static boolean isPropertyTrue(Project project, String propertyName) + { + return project.hasProperty(propertyName) && Boolean.valueOf(project.property(propertyName).toString()); + } + + private static String createModifiedFilesMessage(Collection nonEquivExpectedFiles, + Collection foldersToBeBuilt) + { + StringBuilder builder = new StringBuilder(); + builder.append("\nRemember to checkin the changes to the following new or modified files:\n"); + for (String file : nonEquivExpectedFiles) + { + builder.append(" "); + builder.append(file); + builder.append("\n"); + } + + if (!foldersToBeBuilt.isEmpty()) + { + builder.append("\nThe file modifications include service interface changes, you can build the the following projects " + + "to re-generate the client APIs accordingly:\n"); + for (String folder : foldersToBeBuilt) + { + builder.append(" "); + builder.append(folder); + builder.append("\n"); + } + } + + return builder.toString(); + } + + private static String createPossibleMissingFilesMessage(Collection missingFiles) + { + StringBuilder builder = new StringBuilder(); + builder.append("If this is the result of an automated build, then you may have forgotten to check in some snapshot or idl files:\n"); + for (String file : missingFiles) + { + builder.append(" "); + builder.append(file); + builder.append("\n"); + } + + return builder.toString(); + } + + private static String findProperty(FileCompatibilityType type) + { + String property; + switch (type) + { + case SNAPSHOT: + property = SNAPSHOT_COMPAT_REQUIREMENT; + break; + case IDL: + property = IDL_COMPAT_REQUIREMENT; + break; + case PEGASUS_SCHEMA_SNAPSHOT: + property = PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT; + break; + case PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: + property = PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT; + break; + default: + throw new GradleException("No property defined for compatibility type " + type); + } + return property; + } + + private static Set buildWatchedRestModelInputDirs(Project project, SourceSet sourceSet) { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + File rootPath = new File(project.getProjectDir(), + pegasusOptions.get(sourceSet.getName()).restModelOptions.getRestResourcesRootPath()); + + IdlOptions idlOptions = pegasusOptions.get(sourceSet.getName()).idlOptions; + + // if idlItems exist, only watch the smaller subset + return idlOptions.getIdlItems().stream() + .flatMap(idlItem -> Arrays.stream(idlItem.packageNames)) + .map(packageName -> new File(rootPath, packageName.replace('.', '/'))) + .collect(Collectors.toCollection(TreeSet::new)); + } + + private static Set difference(Set left, Set right) + { + Set result = new HashSet<>(left); + result.removeAll(right); + return result; + } + + /** + * Configures the given source set so that its data schema directory (usually 'pegasus') is marked as a resource root. + * The purpose of this is to improve the IDE experience. Makes sure to exclude this directory from being packaged in + * with the default Jar task. + */ + private static void configureDataSchemaResourcesRoot(Project project, SourceSet sourceSet) + { + sourceSet.resources(sourceDirectorySet -> { + final String dataSchemaPath = getDataSchemaPath(project, sourceSet); + final File dataSchemaRoot = project.file(dataSchemaPath); + sourceDirectorySet.srcDir(dataSchemaPath); + project.getLogger().info("Adding resource root '{}'", dataSchemaPath); + + final String extensionsSchemaPath = getExtensionSchemaPath(project, sourceSet); + final File extensionsSchemaRoot = project.file(extensionsSchemaPath); + sourceDirectorySet.srcDir(extensionsSchemaPath); + project.getLogger().info("Adding resource root '{}'", extensionsSchemaPath); + + // Exclude the data schema and extensions schema directory from being copied into the default Jar task + sourceDirectorySet.getFilter().exclude(fileTreeElement -> { + final File file = fileTreeElement.getFile(); + // Traversal starts with the children of a resource root, so checking the direct parent is sufficient + final boolean underDataSchemaRoot = dataSchemaRoot.equals(file.getParentFile()); + final boolean underExtensionsSchemaRoot = extensionsSchemaRoot.equals(file.getParentFile()); + final boolean exclude = (underDataSchemaRoot || underExtensionsSchemaRoot); + if (exclude) + { + project.getLogger().info("Excluding resource directory '{}'", file); + } + return exclude; + }); + }); + } + + private Task generatePegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, File inputDir, File outputDir, + boolean isExtensionSchema) + { + return project.getTasks().create(sourceSet.getTaskName("generate", taskName), + GeneratePegasusSnapshotTask.class, task -> + { + task.setInputDir(inputDir); + task.setResolverPath(getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); + task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setPegasusSchemaSnapshotDestinationDir(outputDir); + task.setExtensionSchema(isExtensionSchema); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + } + + private Task publishPegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, Task checkPegasusSnapshotTask, + File inputDir, File outputDir) + { + return project.getTasks().create(sourceSet.getTaskName("publish", taskName), + Sync.class, task -> + { + task.dependsOn(checkPegasusSnapshotTask); + task.from(inputDir); + task.into(outputDir); + task.onlyIf(t -> !SharedFileUtils.getSuffixedFiles(project, inputDir, PDL_FILE_SUFFIX).isEmpty()); + }); + } + + private void checkGradleVersion(Project project) + { + if (MIN_REQUIRED_VERSION.compareTo(GradleVersion.current()) > 0) + { + throw new GradleException(String.format("This plugin does not support %s. Please use %s or later.", + GradleVersion.current(), + MIN_REQUIRED_VERSION)); + } + if (MIN_SUGGESTED_VERSION.compareTo(GradleVersion.current()) > 0) + { + project.getLogger().warn(String.format("Pegasus supports %s, but it may not be supported in the next major release. Please use %s or later.", + GradleVersion.current(), + MIN_SUGGESTED_VERSION)); + } + } + + /** + * Reflection is necessary to obscure types introduced in Gradle 5.3 + * + * @param sourceSet the target sourceset upon which to create a new feature variant + * @return an Action which modifies a org.gradle.api.plugins.FeatureSpec instance + */ + private Action/**/ createFeatureVariantFromSourceSet(SourceSet sourceSet) + { + return featureSpec -> { + try + { + Class clazz = Class.forName("org.gradle.api.plugins.FeatureSpec"); + Method usingSourceSet = clazz.getDeclaredMethod("usingSourceSet", SourceSet.class); + usingSourceSet.invoke(featureSpec, sourceSet); + } + catch (ReflectiveOperationException e) + { + throw new GradleException("Unable to invoke FeatureSpec#usingSourceSet(SourceSet)", e); + } + }; + } + + protected static boolean isAtLeastGradle61() + { + return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("6.1")) >= 0; + } + + public static boolean isAtLeastGradle7() { + return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("7.0")) >= 0; + } +} \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java new file mode 100644 index 0000000000000..a2aafaf1be017 --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java @@ -0,0 +1,124 @@ +package com.linkedin.pegasus.gradle.tasks; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import org.gradle.api.DefaultTask; +import org.gradle.api.file.FileCollection; +import org.gradle.api.specs.Specs; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.work.FileChange; +import org.gradle.work.InputChanges; + + +public class ChangedFileReportTask extends DefaultTask +{ + private final Collection _needCheckinFiles = new ArrayList<>(); + + private FileCollection _idlFiles = getProject().files(); + private FileCollection _snapshotFiles = getProject().files(); + + public ChangedFileReportTask() + { + //with Gradle 6.0, Declaring an incremental task without outputs is not allowed. + getOutputs().upToDateWhen(Specs.satisfyNone()); + } + + // DataHub Note - updated for InputChanges + @TaskAction + public void checkFilesForChanges(InputChanges inputChanges) + { + getLogger().lifecycle("Checking idl and snapshot files for changes..."); + getLogger().info("idlFiles: " + _idlFiles.getAsPath()); + getLogger().info("snapshotFiles: " + _snapshotFiles.getAsPath()); + + Set filesRemoved = new HashSet<>(); + Set filesAdded = new HashSet<>(); + Set filesChanged = new HashSet<>(); + + if (inputChanges.isIncremental()) + { + Consumer handleChange = change -> + { + switch (change.getChangeType()) { + case ADDED: + filesAdded.add(change.getFile().getAbsolutePath()); + break; + case REMOVED: + filesRemoved.add(change.getFile().getAbsolutePath()); + break; + case MODIFIED: + filesChanged.add(change.getFile().getAbsolutePath()); + break; + } + }; + + inputChanges.getFileChanges(_idlFiles).forEach(handleChange); + inputChanges.getFileChanges(_snapshotFiles).forEach(handleChange); + + if (!filesRemoved.isEmpty()) + { + String files = joinByComma(filesRemoved); + _needCheckinFiles.add(files); + getLogger().lifecycle( + "The following files have been removed, be sure to remove them from source control: {}", files); + } + + if (!filesAdded.isEmpty()) + { + String files = joinByComma(filesAdded); + _needCheckinFiles.add(files); + getLogger().lifecycle("The following files have been added, be sure to add them to source control: {}", files); + } + + if (!filesChanged.isEmpty()) + { + String files = joinByComma(filesChanged); + _needCheckinFiles.add(files); + getLogger().lifecycle( + "The following files have been changed, be sure to commit the changes to source control: {}", files); + } + } + } + + private String joinByComma(Set files) + { + return files.stream().collect(Collectors.joining(", ")); + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getSnapshotFiles() + { + return _snapshotFiles; + } + + public void setSnapshotFiles(FileCollection snapshotFiles) + { + _snapshotFiles = snapshotFiles; + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getIdlFiles() + { + return _idlFiles; + } + + public void setIdlFiles(FileCollection idlFiles) + { + _idlFiles = idlFiles; + } + + @Internal + public Collection getNeedCheckinFiles() + { + return _needCheckinFiles; + } +} \ No newline at end of file diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index a1b97701dbf88..437c72e6394ea 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -2,6 +2,7 @@ plugins { id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' + id 'org.gradle.playframework' } apply from: "../gradle/versioning/versioning.gradle" @@ -20,7 +21,6 @@ model { } task myTar(type: Tar) { - extension = "tgz" compression = Compression.GZIP from("${buildDir}/stage") @@ -119,3 +119,23 @@ task cleanLocalDockerImages { } } dockerClean.finalizedBy(cleanLocalDockerImages) + +// gradle 8 fixes +tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist' +tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist' +stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist +tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts +playBinaryDistTar.dependsOn createMainStartScripts +playBinaryDistZip.dependsOn createMainStartScripts +createMainStartScripts.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageMainDist' +createPlayBinaryZipDist.dependsOn 'stageMainDist' diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index dd1ceee411f74..84fb4c02620b8 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -1,4 +1,3 @@ -apply plugin: "org.gradle.playframework" // Change this to listen on a different port project.ext.httpPort = 9001 @@ -101,4 +100,22 @@ play { test { useJUnitPlatform() + + def playJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + //"--add-opens=java.base/java.net=ALL-UNNAMED", + //"--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = playJava17CompatibleJvmArgs } diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index fba0031351b58..6e8cb93966922 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -1,7 +1,8 @@ plugins { + id 'java' id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" } -apply plugin: 'java' + dependencies { implementation project(':metadata-service:restli-client') diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index fd36e5ac4bc2c..72821d8b97dc0 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'distribution' + id 'com.github.node-gradle.node' } -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' node { @@ -35,7 +35,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } @@ -94,7 +94,7 @@ configurations { distZip { dependsOn yarnQuickBuild - baseName 'datahub-web-react' + archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } @@ -112,5 +112,5 @@ jar { into('public') { from zipTree(distZip.outputs.files.first()) } - classifier = 'assets' + archiveClassifier = 'assets' } diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 0c4c229af34f0..17d691177aa34 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -17,7 +17,7 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ ENV LD_LIBRARY_PATH="/lib:/lib64" @@ -25,7 +25,10 @@ ENV LD_LIBRARY_PATH="/lib:/lib64" FROM base as prod-install COPY ./datahub-frontend.zip / -RUN unzip datahub-frontend.zip && rm datahub-frontend.zip +RUN unzip datahub-frontend.zip -d /datahub-frontend \ + && mv /datahub-frontend/main/* /datahub-frontend \ + && rmdir /datahub-frontend/main \ + && rm datahub-frontend.zip COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend diff --git a/docker/datahub-frontend/start.sh b/docker/datahub-frontend/start.sh index 12e6b8915096d..f5de9c87968b0 100755 --- a/docker/datahub-frontend/start.sh +++ b/docker/datahub-frontend/start.sh @@ -49,6 +49,8 @@ export JAVA_OPTS="${JAVA_MEMORY_OPTS:-"-Xms512m -Xmx1024m"} \ -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf \ -Dlogback.configurationFile=datahub-frontend/conf/logback.xml \ -Dlogback.debug=false \ + --add-opens java.base/java.lang=ALL-UNNAMED \ + --add-opens=java.base/java.util=ALL-UNNAMED \ ${PROMETHEUS_AGENT:-} ${OTEL_AGENT:-} \ ${TRUSTSTORE_FILE:-} ${TRUSTSTORE_TYPE:-} ${TRUSTSTORE_PASSWORD:-} \ ${HTTP_PROXY:-} ${HTTPS_PROXY:-} ${NO_PROXY:-} \ diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index 9c79e1da542f0..b26a02c1d3b15 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -40,14 +40,14 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 52db594e2ef85..36444210f1938 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -45,9 +45,9 @@ docker { buildArgs(dockerBuildArgs) } -tasks.getByName('docker').dependsOn(['build', - ':docker:datahub-ingestion-base:docker', - ':metadata-ingestion:codegen']) +tasks.getByName('dockerPrepare').dependsOn(['build', + ':docker:datahub-ingestion-base:docker', + ':metadata-ingestion:codegen']) task mkdirBuildDocker { doFirst { diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 5bfa5f35ace17..9b7c6e762462e 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index cc79a3072c193..4da94794e0ead 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin FROM base as prod-install diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 2beb5b54dac38..00dae87dfc3de 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -38,13 +38,13 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index f6a4b62a79356..53353863b6e5f 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -31,7 +31,7 @@ LABEL name="kafka" version=${KAFKA_VERSION} RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi RUN apk add --no-cache bash coreutils -RUN apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community +RUN apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip RUN mkdir -p /opt \ diff --git a/docs-website/build.gradle b/docs-website/build.gradle index a213ec1ae8194..2644491a2a5f8 100644 --- a/docs-website/build.gradle +++ b/docs-website/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' +plugins { + id 'distribution' + id 'com.github.node-gradle.node' +} node { @@ -12,10 +14,10 @@ node { } // Version of node to use. - version = '16.16.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.1' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -31,7 +33,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } /* @@ -122,7 +124,11 @@ task yarnBuild(type: YarnTask, dependsOn: [yarnLint, yarnGenerate, downloadHisto // See https://stackoverflow.com/questions/53230823/fatal-error-ineffective-mark-compacts-near-heap-limit-allocation-failed-java // and https://github.com/facebook/docusaurus/issues/8329. // TODO: As suggested in https://github.com/facebook/docusaurus/issues/4765, try switching to swc-loader. - environment = ['NODE_OPTIONS': '--max-old-space-size=10248'] + if (project.hasProperty('useSystemNode') && project.getProperty('useSystemNode').toBoolean()) { + environment = ['NODE_OPTIONS': '--max-old-space-size=10248'] + } else { + environment = ['NODE_OPTIONS': '--max-old-space-size=10248 --openssl-legacy-provider'] + } args = ['run', 'build'] } diff --git a/docs-website/vercel-setup.sh b/docs-website/vercel-setup.sh index db532e167b59f..915635b24ee88 100755 --- a/docs-website/vercel-setup.sh +++ b/docs-website/vercel-setup.sh @@ -12,7 +12,7 @@ set -euxo pipefail yum groupinstall "Development Tools" -y yum erase openssl-devel -y -yum install openssl11 openssl11-devel libffi-devel bzip2-devel wget -y +yum install openssl11 openssl11-devel libffi-devel bzip2-devel wget nodejs -y wget https://www.python.org/ftp/python/3.10.11/Python-3.10.11.tgz tar -xf Python-3.10.11.tgz diff --git a/docs/developers.md b/docs/developers.md index c3c3a59283e66..60d31f5e4523f 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -6,16 +6,12 @@ title: "Local Development" ## Requirements -- Both [Java 11 JDK](https://openjdk.org/projects/jdk/11/) and [Java 8 JDK](https://openjdk.java.net/projects/jdk8/) +- [Java 17 JDK](https://openjdk.org/projects/jdk/17/) - [Python 3.10](https://www.python.org/downloads/release/python-3100/) - [Docker](https://www.docker.com/) - [Docker Compose](https://docs.docker.com/compose/) - Docker engine with at least 8GB of memory to run tests. -:::caution - -Do not try to use a JDK newer than JDK 11. The build process does not currently work with newer JDKs versions. - ::: On macOS, these can be installed using [Homebrew](https://brew.sh/). @@ -147,11 +143,11 @@ You're probably using a Java version that's too new for gradle. Run the followin java --version ``` -While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). +While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 17](https://openjdk.org/projects/jdk/17/) (aka Java 17). #### Getting `cannot find symbol` error for `javax.annotation.Generated` -Similar to the previous issue, please use Java 1.8 to build the project. +Similar to the previous issue, please use Java 17 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. #### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 36be572f2886e..61ad2d623d72a 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -7,11 +7,15 @@ This file documents any backwards-incompatible changes in DataHub and assists pe ### Breaking Changes - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. +- Neo4j 5.x, may require migration from 4.x +- Build now requires JDK17 (Runtime Java 11) ### Potential Downtime ### Deprecations +- Spark 2.x (including previous JDK8 build requirements) + ### Other Notable Changes ## 0.12.1 diff --git a/docs/troubleshooting/build.md b/docs/troubleshooting/build.md index 112bcdc47e956..7b4ae98cdb03b 100644 --- a/docs/troubleshooting/build.md +++ b/docs/troubleshooting/build.md @@ -10,11 +10,11 @@ You're probably using a Java version that's too new for gradle. Run the followin java --version ``` -While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11). +While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 17](https://openjdk.org/projects/jdk/17/) (aka Java 17). ## Getting `cannot find symbol` error for `javax.annotation.Generated` -Similar to the previous issue, please use Java 1.8 to build the project. +Similar to the previous issue, please use Java 17 to build the project. You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details. ## `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error diff --git a/entity-registry/build.gradle b/entity-registry/build.gradle index 3da0bf5bb4fb8..77cca24c0e723 100644 --- a/entity-registry/build.gradle +++ b/entity-registry/build.gradle @@ -1,10 +1,13 @@ -apply plugin: 'pegasus' -apply plugin: 'java-library' +plugins { + id 'pegasus' + id 'java-library' +} dependencies { implementation spec.product.pegasus.data implementation spec.product.pegasus.generator api project(path: ':metadata-models') + api project(path: ':metadata-models', configuration: "dataTemplate") implementation externalDependency.slf4jApi compileOnly externalDependency.lombok implementation externalDependency.guava diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 4e86b9270786f..bdc9a83b1e652 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/li-utils/build.gradle b/li-utils/build.gradle index 1d5222e39185a..975cd2bccccf3 100644 --- a/li-utils/build.gradle +++ b/li-utils/build.gradle @@ -1,17 +1,9 @@ -apply plugin: 'java-library' -apply plugin: 'pegasus' - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'java-library' + id 'pegasus' } + dependencies { api spec.product.pegasus.data implementation externalDependency.commonsLang @@ -28,7 +20,7 @@ dependencies { testImplementation externalDependency.commonsIo testImplementation project(':test-models') testImplementation project(path: ':test-models', configuration: 'testDataTemplate') - testImplementation externalDependency.testngJava8 + testImplementation externalDependency.testng } idea { @@ -38,4 +30,4 @@ idea { } // Need to compile backing java parameterDefinitions with the data template. -sourceSets.mainGeneratedDataTemplate.java.srcDirs('src/main/javaPegasus/') \ No newline at end of file +sourceSets.mainGeneratedDataTemplate.java.srcDirs('src/main/javaPegasus/') diff --git a/metadata-auth/auth-api/build.gradle b/metadata-auth/auth-api/build.gradle index 7159aa5f15e61..c68c3019bd2b4 100644 --- a/metadata-auth/auth-api/build.gradle +++ b/metadata-auth/auth-api/build.gradle @@ -15,13 +15,12 @@ test { } jar { - archiveName = "$project.name-lib.jar" + archiveClassifier = "lib" } shadowJar { zip64 true - classifier = null - archiveName = "$project.name-${version}.jar" + archiveClassifier = "" exclude "META-INF/*.RSA", "META-INF/*.SF","META-INF/*.DSA" } @@ -39,12 +38,12 @@ dependencies() { } task sourcesJar(type: Jar) { - classifier 'sources' + archiveClassifier = 'sources' from sourceSets.main.allJava } task javadocJar(type: Jar, dependsOn: javadoc) { - classifier 'javadoc' + archiveClassifier = 'javadoc' from javadoc.destinationDir } diff --git a/metadata-events/mxe-utils-avro/build.gradle b/metadata-events/mxe-utils-avro/build.gradle index 3493797ab4f97..98bfb9127b209 100644 --- a/metadata-events/mxe-utils-avro/build.gradle +++ b/metadata-events/mxe-utils-avro/build.gradle @@ -1,8 +1,11 @@ -apply plugin: 'java-library' +plugins { + id 'java-library' +} dependencies { api project(':metadata-events:mxe-avro') api project(':metadata-models') + api project(path: ':metadata-models', configuration: "dataTemplate") api spec.product.pegasus.dataAvro testImplementation externalDependency.testng diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 7ae01faaaabdd..b14953d7ce021 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -14,19 +14,9 @@ import org.apache.tools.ant.filters.ReplaceTokens jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } -} - dependencies { implementation project(':metadata-models') + implementation project(path: ':metadata-models', configuration: "dataTemplate") implementation(externalDependency.kafkaAvroSerializer) { exclude group: "org.apache.avro" } @@ -49,7 +39,7 @@ dependencies { annotationProcessor externalDependency.lombok // VisibleForTesting compileOnly externalDependency.guava - testImplementation externalDependency.testngJava8 + testImplementation externalDependency.testng testImplementation externalDependency.mockito testImplementation externalDependency.mockServer testImplementation externalDependency.mockServerClient @@ -241,4 +231,4 @@ sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/res clean { project.delete("$projectDir/generated") -} +} \ No newline at end of file diff --git a/metadata-integration/java/datahub-protobuf-example/build.gradle b/metadata-integration/java/datahub-protobuf-example/build.gradle index 4e53d8ed763ba..1efb43360457a 100644 --- a/metadata-integration/java/datahub-protobuf-example/build.gradle +++ b/metadata-integration/java/datahub-protobuf-example/build.gradle @@ -64,10 +64,6 @@ protobuf { task publishSchema(dependsOn: build) { description "Publishes protobuf schema in the `main` sourceSet to DataHub" - def javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) - } - fileTree("schema").matching { exclude "protobuf/meta/**" }.each {f -> diff --git a/metadata-integration/java/datahub-protobuf/build.gradle b/metadata-integration/java/datahub-protobuf/build.gradle index bc919119f8fac..2cb36a14cb9c7 100644 --- a/metadata-integration/java/datahub-protobuf/build.gradle +++ b/metadata-integration/java/datahub-protobuf/build.gradle @@ -12,12 +12,6 @@ apply from: '../versioning.gradle' jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation -afterEvaluate { - if (project.plugins.hasPlugin('java')) { - sourceCompatibility = 11 - targetCompatibility = 11 - } -} ext { javaMainClass = "datahub.protobuf.Proto2DataHub" } @@ -211,4 +205,4 @@ nexusStaging { password = System.getenv("NEXUS_PASSWORD") } - +startScripts.dependsOn shadowJar \ No newline at end of file diff --git a/metadata-integration/java/examples/build.gradle b/metadata-integration/java/examples/build.gradle index 581e9f82da0dc..ddf574e8c8905 100644 --- a/metadata-integration/java/examples/build.gradle +++ b/metadata-integration/java/examples/build.gradle @@ -1,16 +1,6 @@ -apply plugin: 'java' -apply plugin: 'jacoco' - - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'java' + id 'jacoco' } dependencies { diff --git a/metadata-integration/java/spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/build.gradle index 7143ac4833143..c5dd9b5012c29 100644 --- a/metadata-integration/java/spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/build.gradle @@ -11,17 +11,6 @@ apply from: '../versioning.gradle' jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } -} - //to rename artifacts for publish project.archivesBaseName = 'datahub-'+project.name @@ -34,18 +23,19 @@ configurations { dependencies { - //Needed for tie breaking of guava version need for spark and wiremock - provided(externalDependency.hadoopMapreduceClient) { - force = true + constraints { + provided(externalDependency.hadoopMapreduceClient) { + because 'Needed for tie breaking of guava version need for spark and wiremock' + } + provided(externalDependency.hadoopCommon) { + because 'required for org.apache.hadoop.util.StopWatch' + } + provided(externalDependency.commonsIo) { + because 'required for org.apache.commons.io.Charsets that is used internally' + } } - provided(externalDependency.hadoopCommon) { - force = true - } // required for org.apache.hadoop.util.StopWatch - - provided(externalDependency.commonsIo) { - force = true - } // required for org.apache.commons.io.Charsets that is used internally + provided 'org.scala-lang:scala-library:2.12.18' implementation externalDependency.slf4jApi compileOnly externalDependency.lombok @@ -86,7 +76,7 @@ task checkShadowJar(type: Exec) { shadowJar { zip64=true - classifier='' + archiveClassifier = '' mergeServiceFiles() def exclude_modules = project @@ -107,7 +97,7 @@ shadowJar { // preventing java multi-release JAR leakage // https://github.com/johnrengelman/shadow/issues/729 - exclude('module-info.class', 'META-INF/versions/**') + exclude('module-info.class', 'META-INF/versions/**', 'LICENSE', 'NOTICE') // prevent jni conflict with spark exclude '**/libzstd-jni.*' @@ -138,6 +128,25 @@ jacocoTestReport { test { forkEvery = 1 useJUnit() + + def sparkJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + "--add-opens=java.base/java.net=ALL-UNNAMED", + "--add-opens=java.base/java.nio=ALL-UNNAMED", + //"--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = sparkJava17CompatibleJvmArgs + finalizedBy jacocoTestReport } @@ -151,12 +160,12 @@ task integrationTest(type: Exec, dependsOn: [shadowJar, ':docker:quickstartSlim' } task sourcesJar(type: Jar) { - classifier 'sources' + archiveClassifier = 'sources' from sourceSets.main.allJava } task javadocJar(type: Jar, dependsOn: javadoc) { - classifier 'javadoc' + archiveClassifier = 'javadoc' from javadoc.destinationDir } @@ -224,3 +233,12 @@ nexusStaging { username = System.getenv("NEXUS_USERNAME") password = System.getenv("NEXUS_PASSWORD") } + +task cleanExtraDirs { + delete "$projectDir/derby.log" + delete "$projectDir/src/test/resources/data/hive" + delete "$projectDir/src/test/resources/data/out.csv" + delete "$projectDir/src/test/resources/data/out_persist.csv" + delete "$projectDir/spark-smoke-test/venv" +} +clean.finalizedBy(cleanExtraDirs) diff --git a/metadata-integration/java/spark-lineage/scripts/check_jar.sh b/metadata-integration/java/spark-lineage/scripts/check_jar.sh index dd9cae68f31cb..275b91304e7ee 100755 --- a/metadata-integration/java/spark-lineage/scripts/check_jar.sh +++ b/metadata-integration/java/spark-lineage/scripts/check_jar.sh @@ -34,7 +34,9 @@ jar -tvf $jarFile |\ grep -v "linux/" |\ grep -v "darwin" |\ grep -v "MetadataChangeProposal.avsc" |\ - grep -v "aix" + grep -v "aix" |\ + grep -v "library.properties" |\ + grep -v "rootdoc.txt" if [ $? -ne 0 ]; then echo "✅ No unexpected class paths found in ${jarFile}" diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile b/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile index 119338be6c2a9..21d0701fcfcd6 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/docker/SparkBase.Dockerfile @@ -17,7 +17,7 @@ RUN apt-get update -y && \ apt-get install /tmp/zulu-repo_1.0.0-3_all.deb && \ apt-get update && \ # apt-cache search zulu && \ - apt-get install -y --no-install-recommends zulu11-jre && \ + apt-get install -y --no-install-recommends zulu17-jre && \ apt-get clean && \ curl -sS https://archive.apache.org/dist/spark/spark-${spark_version}/spark-${spark_version}-bin-hadoop${hadoop_version}.tgz -o spark.tgz && \ tar -xf spark.tgz && \ diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh b/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh index 429f692500c80..c06e2faec0bcb 100755 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/python-spark-lineage-test/python_test_run.sh @@ -7,25 +7,24 @@ saluation () { echo "--------------------------------------------------------" - echo "Starting execution $1" + echo "Starting execution $1 (properties: $2)" echo "--------------------------------------------------------" } -saluation "HdfsIn2HdfsOut1.py" - +saluation "HdfsIn2HdfsOut1.py" $2 spark-submit --properties-file $2 HdfsIn2HdfsOut1.py -saluation "HdfsIn2HdfsOut2.py" +saluation "HdfsIn2HdfsOut2.py" $2 spark-submit --properties-file $2 HdfsIn2HdfsOut2.py -saluation "HdfsIn2HiveCreateTable.py" +saluation "HdfsIn2HiveCreateTable.py" $2 spark-submit --properties-file $2 HdfsIn2HiveCreateTable.py -saluation "HdfsIn2HiveCreateInsertTable.py" +saluation "HdfsIn2HiveCreateInsertTable.py" $2 spark-submit --properties-file $2 HdfsIn2HiveCreateInsertTable.py -saluation "HiveInHiveOut.py" +saluation "HiveInHiveOut.py" $2 spark-submit --properties-file $2 HiveInHiveOut.py diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf b/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf index 43103c3db65ad..a511d9f114f2b 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/spark-docker.conf @@ -4,3 +4,7 @@ spark.jars file:///opt/workspace/datahub-spark-lineage*.jar spark.extraListeners datahub.spark.DatahubSparkListener spark.datahub.rest.server http://datahub-gms:8080 + +spark.driver.extraJavaOptions --add-opens java.base/java.lang=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED +spark.executor.extraJavaOptions --add-opens java.base/java.lang=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED + diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle index 12aa1775d6104..6337f8c9beec6 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/build.gradle @@ -17,17 +17,6 @@ repositories { jcenter() } -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } -} - dependencies { implementation 'org.apache.spark:spark-sql_2.11:2.4.8' } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java index 2df468fc03e74..053055716eaa0 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java @@ -37,7 +37,7 @@ public class TestCoalesceJobLineage { private static final String APP_NAME = "sparkCoalesceTestApp"; - private static final String TEST_RELATIVE_PATH = "../"; + private static final String TEST_RELATIVE_PATH = ""; private static final String RESOURCE_DIR = "src/test/resources"; private static final String DATA_DIR = TEST_RELATIVE_PATH + RESOURCE_DIR + "/data"; private static final String WAREHOUSE_LOC = DATA_DIR + "/hive/warehouse/coalesce"; @@ -142,6 +142,9 @@ public void setup() { "spark.datahub.parent.datajob_urn", "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .config( + "javax.jdo.option.ConnectionURL", + "jdbc:derby:;databaseName=build/tmp/metastore_db_coalesce;create=true") .enableHiveSupport() .getOrCreate(); diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index 3a70c10e0c1f9..fa896814d16f6 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -191,6 +191,9 @@ public static void setup() { .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .config( + "javax.jdo.option.ConnectionURL", + "jdbc:derby:;databaseName=build/tmp/metastore_db_spark;create=true") .enableHiveSupport() .getOrCreate(); diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index 48f80f06d07c2..568b99acdf894 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -62,7 +62,10 @@ dependencies { testImplementation externalDependency.h2 testImplementation externalDependency.mysqlConnector testImplementation externalDependency.neo4jHarness - testImplementation (externalDependency.neo4jApoc) { + testImplementation (externalDependency.neo4jApocCore) { + exclude group: 'org.yaml', module: 'snakeyaml' + } + testImplementation (externalDependency.neo4jApocCommon) { exclude group: 'org.yaml', module: 'snakeyaml' } testImplementation externalDependency.mockito diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index 217d54c5c0b0f..c8d3147711eba 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -432,8 +432,8 @@ private Pair> generateLineageStatementAndParameters( + "(b)) " + "WHERE a <> b " + " AND ALL(rt IN relationships(path) WHERE " - + " (EXISTS(rt.source) AND rt.source = 'UI') OR " - + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " + + " (rt.source IS NOT NULL AND rt.source = 'UI') OR " + + " (rt.createdOn IS NULL AND rt.updatedOn IS NULL) OR " + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " + " ) " diff --git a/metadata-jobs/mae-consumer/build.gradle b/metadata-jobs/mae-consumer/build.gradle index fcb8b62e4ac9d..2e068d5a3501e 100644 --- a/metadata-jobs/mae-consumer/build.gradle +++ b/metadata-jobs/mae-consumer/build.gradle @@ -60,6 +60,7 @@ task avroSchemaSources(type: Copy) { } compileJava.dependsOn avroSchemaSources +processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") diff --git a/metadata-jobs/mce-consumer/build.gradle b/metadata-jobs/mce-consumer/build.gradle index 97eec9fcff051..5fa65c06de714 100644 --- a/metadata-jobs/mce-consumer/build.gradle +++ b/metadata-jobs/mce-consumer/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'pegasus' } -apply plugin: 'pegasus' configurations { avro @@ -49,6 +49,7 @@ task avroSchemaSources(type: Copy) { } compileJava.dependsOn avroSchemaSources +processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") diff --git a/metadata-jobs/pe-consumer/build.gradle b/metadata-jobs/pe-consumer/build.gradle index 81e8b8c9971f0..2fd19af92971e 100644 --- a/metadata-jobs/pe-consumer/build.gradle +++ b/metadata-jobs/pe-consumer/build.gradle @@ -1,7 +1,7 @@ plugins { id 'java' + id 'pegasus' } -apply plugin: 'pegasus' configurations { avro @@ -37,6 +37,7 @@ task avroSchemaSources(type: Copy) { } compileJava.dependsOn avroSchemaSources +processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") diff --git a/metadata-models-custom/build.gradle b/metadata-models-custom/build.gradle index 71d3b0fd1f736..3ac08dca7c0db 100644 --- a/metadata-models-custom/build.gradle +++ b/metadata-models-custom/build.gradle @@ -16,8 +16,8 @@ buildscript { plugins { id 'base' id 'maven-publish' + id 'pegasus' } -apply plugin: 'pegasus' if (project.hasProperty('projVersion')) { project.version = project.projVersion diff --git a/metadata-models-validator/build.gradle b/metadata-models-validator/build.gradle index c8d1d2e6651d6..1dae53e817ae1 100644 --- a/metadata-models-validator/build.gradle +++ b/metadata-models-validator/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(":entity-registry") diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index e90a4042c1921..04c90fa444f0c 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -1,20 +1,12 @@ import io.datahubproject.GenerateJsonSchemaTask -apply plugin: 'java-library' -apply plugin: 'pegasus' -apply plugin: 'org.hidetake.swagger.generator' - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'pegasus' + id 'java-library' + id 'org.hidetake.swagger.generator' } + dependencies { api spec.product.pegasus.data constraints { @@ -35,7 +27,7 @@ dependencies { swaggerCodegen externalDependency.swaggerCli testImplementation externalDependency.guava - testImplementation externalDependency.testngJava8 + testImplementation externalDependency.testng } sourceSets { diff --git a/metadata-service/auth-config/build.gradle b/metadata-service/auth-config/build.gradle index c7a1128897dd5..8302e3b0c2fe6 100644 --- a/metadata-service/auth-config/build.gradle +++ b/metadata-service/auth-config/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(path: ':metadata-models') diff --git a/metadata-service/auth-filter/build.gradle b/metadata-service/auth-filter/build.gradle index 61e9015adc942..9d763ca11421b 100644 --- a/metadata-service/auth-filter/build.gradle +++ b/metadata-service/auth-filter/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-auth:auth-api') diff --git a/metadata-service/auth-impl/build.gradle b/metadata-service/auth-impl/build.gradle index 60d622dea5447..4f4b0658caf24 100644 --- a/metadata-service/auth-impl/build.gradle +++ b/metadata-service/auth-impl/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} compileJava { diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java index d5d5b0c4e6c71..f03113f3eb9bd 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java @@ -8,7 +8,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; @@ -68,7 +68,7 @@ public void shouldReturnFieldValueWithResourceSpecIfTypeIsDataPlatformInstance() assertEquals( Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); - verifyZeroInteractions(entityClientMock); + verifyNoMoreInteractions(entityClientMock); } @Test diff --git a/metadata-service/auth-servlet-impl/build.gradle b/metadata-service/auth-servlet-impl/build.gradle index 7945b3b4e9a06..b8310bbd4ebc0 100644 --- a/metadata-service/auth-servlet-impl/build.gradle +++ b/metadata-service/auth-servlet-impl/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-auth:auth-api') diff --git a/metadata-service/factories/build.gradle b/metadata-service/factories/build.gradle index 86644e3b034da..145ec7e65188c 100644 --- a/metadata-service/factories/build.gradle +++ b/metadata-service/factories/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java-library' +plugins { + id 'java-library' +} dependencies { api project(':metadata-io') diff --git a/metadata-service/graphql-servlet-impl/build.gradle b/metadata-service/graphql-servlet-impl/build.gradle index 51f67631159d3..5767698242118 100644 --- a/metadata-service/graphql-servlet-impl/build.gradle +++ b/metadata-service/graphql-servlet-impl/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':datahub-graphql-core') diff --git a/metadata-service/openapi-servlet/build.gradle b/metadata-service/openapi-servlet/build.gradle index 1909b4862d294..0430d4427528d 100644 --- a/metadata-service/openapi-servlet/build.gradle +++ b/metadata-service/openapi-servlet/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { diff --git a/metadata-service/plugin/build.gradle b/metadata-service/plugin/build.gradle index 00a6384b923a0..3f91b8f6ae6ba 100644 --- a/metadata-service/plugin/build.gradle +++ b/metadata-service/plugin/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { @@ -30,4 +32,4 @@ test { clean { dependsOn ':metadata-service:plugin:src:test:sample-test-plugins:clean' -} +} \ No newline at end of file diff --git a/metadata-service/plugin/src/test/sample-test-plugins/build.gradle b/metadata-service/plugin/src/test/sample-test-plugins/build.gradle index f299a35db0f64..d4b2b4c92ad63 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/build.gradle +++ b/metadata-service/plugin/src/test/sample-test-plugins/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} jar { archiveFileName = "sample-plugins.jar" diff --git a/metadata-service/restli-api/build.gradle b/metadata-service/restli-api/build.gradle index 352738d01f8da..505320e8267ee 100644 --- a/metadata-service/restli-api/build.gradle +++ b/metadata-service/restli-api/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'pegasus' +plugins { + id 'pegasus' +} dependencies { dataModel project(':metadata-models') @@ -17,4 +19,4 @@ dependencies { because("CVE-2023-1428, CVE-2023-32731") } } -} \ No newline at end of file +} diff --git a/metadata-service/restli-client/build.gradle b/metadata-service/restli-client/build.gradle index 7cad1981ad911..86336755dc095 100644 --- a/metadata-service/restli-client/build.gradle +++ b/metadata-service/restli-client/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'pegasus' -apply plugin: 'java-library' +plugins { + id 'pegasus' + id 'java-library' +} dependencies { api project(':metadata-service:restli-api') diff --git a/metadata-service/restli-servlet-impl/build.gradle b/metadata-service/restli-servlet-impl/build.gradle index de6fb6690e693..ec5b645ee233c 100644 --- a/metadata-service/restli-servlet-impl/build.gradle +++ b/metadata-service/restli-servlet-impl/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'java' -apply plugin: 'pegasus' +plugins { + id 'java' + id 'pegasus' +} sourceSets { integTest { diff --git a/metadata-service/schema-registry-api/build.gradle b/metadata-service/schema-registry-api/build.gradle index 077d7d4f2d6a4..c146d5202fef9 100644 --- a/metadata-service/schema-registry-api/build.gradle +++ b/metadata-service/schema-registry-api/build.gradle @@ -1,5 +1,8 @@ -apply plugin: 'java' -apply plugin: 'org.hidetake.swagger.generator' +plugins { + id 'org.hidetake.swagger.generator' + id 'java' +} + dependencies { // Dependencies for open api diff --git a/metadata-service/schema-registry-servlet/build.gradle b/metadata-service/schema-registry-servlet/build.gradle index 554ac696c94fd..7bab51d51a86c 100644 --- a/metadata-service/schema-registry-servlet/build.gradle +++ b/metadata-service/schema-registry-servlet/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-service:factories') diff --git a/metadata-service/services/build.gradle b/metadata-service/services/build.gradle index b6af3d330d185..c683b0c75f40a 100644 --- a/metadata-service/services/build.gradle +++ b/metadata-service/services/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'java' -apply plugin: 'org.hidetake.swagger.generator' +plugins { + id 'org.hidetake.swagger.generator' + id 'java' +} configurations { enhance diff --git a/metadata-service/servlet/build.gradle b/metadata-service/servlet/build.gradle index eb2cd9c2d3de7..f961bf6a9de7e 100644 --- a/metadata-service/servlet/build.gradle +++ b/metadata-service/servlet/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':metadata-io') diff --git a/metadata-utils/build.gradle b/metadata-utils/build.gradle index 7bc6aa2d43442..3d65675219624 100644 --- a/metadata-utils/build.gradle +++ b/metadata-utils/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java-library' +plugins { + id 'java-library' +} dependencies { api externalDependency.avro diff --git a/mock-entity-registry/build.gradle b/mock-entity-registry/build.gradle index 12d7e58eee0a1..8242d6451dd60 100644 --- a/mock-entity-registry/build.gradle +++ b/mock-entity-registry/build.gradle @@ -1,4 +1,6 @@ -apply plugin: 'java' +plugins { + id 'java' +} dependencies { implementation project(':entity-registry') diff --git a/smoke-test/build.gradle b/smoke-test/build.gradle index ee0ea3c7be384..1614a4b8527dc 100644 --- a/smoke-test/build.gradle +++ b/smoke-test/build.gradle @@ -11,10 +11,10 @@ node { } // Version of node to use. - version = '16.8.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.1' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -30,11 +30,12 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } task yarnInstall(type: YarnTask) { println "Root directory: ${project.rootDir}"; + environment = ['NODE_OPTIONS': '--openssl-legacy-provider'] args = ['install', '--cwd', "${project.rootDir}/smoke-test/tests/cypress"] } \ No newline at end of file diff --git a/test-models/build.gradle b/test-models/build.gradle index c74f7249fa1d9..e8733f0525870 100644 --- a/test-models/build.gradle +++ b/test-models/build.gradle @@ -1,17 +1,9 @@ -apply plugin: 'pegasus' -apply plugin: 'java-library' - -tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(8) - } -} -tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(8) - } +plugins { + id 'pegasus' + id 'java-library' } + dependencies { implementation spec.product.pegasus.data implementation externalDependency.commonsIo diff --git a/vercel.json b/vercel.json index d5515e68b05bd..a1815cab8ae88 100644 --- a/vercel.json +++ b/vercel.json @@ -1,5 +1,5 @@ { - "buildCommand": "./gradlew :docs-website:build", + "buildCommand": "./gradlew -PuseSystemNode=true :docs-website:build", "github": { "silent": true, "autoJobCancelation": true From caef6771b828d8ee94f76801a9121f4e1a2e7561 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 15 Dec 2023 15:07:56 -0500 Subject: [PATCH 106/263] feat(ingest/redshift): drop repeated operations (#9440) --- metadata-ingestion/setup.py | 6 +- .../ingestion/source/redshift/report.py | 3 +- .../ingestion/source/redshift/usage.py | 68 +++++++++++++++++-- .../redshift-usage/test_redshift_usage.py | 54 ++++++++++++++- 4 files changed, 121 insertions(+), 10 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 5d15d7167b63e..1bc1bc5100b08 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -368,7 +368,11 @@ | {"psycopg2-binary", "pymysql>=1.0.2"}, "pulsar": {"requests"}, "redash": {"redash-toolbelt", "sql-metadata"} | sqllineage_lib, - "redshift": sql_common | redshift_common | usage_common | sqlglot_lib, + "redshift": sql_common + | redshift_common + | usage_common + | sqlglot_lib + | {"cachetools"}, "s3": {*s3_base, *data_lake_profiling}, "gcs": {*s3_base, *data_lake_profiling}, "sagemaker": aws_common, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py index b845580f35939..333c851650fb3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/report.py @@ -29,7 +29,8 @@ class RedshiftReport(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowRep lineage_mem_size: Dict[str, str] = field(default_factory=TopKDict) tables_in_mem_size: Dict[str, str] = field(default_factory=TopKDict) views_in_mem_size: Dict[str, str] = field(default_factory=TopKDict) - num_operational_stats_skipped: int = 0 + num_operational_stats_filtered: int = 0 + num_repeated_operations_dropped: int = 0 num_usage_stat_skipped: int = 0 num_lineage_tables_dropped: int = 0 num_lineage_dropped_query_parser: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index c789e605b9c29..409027a8805a0 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -4,6 +4,7 @@ from datetime import datetime from typing import Callable, Dict, Iterable, List, Optional, Tuple, Union +import cachetools import pydantic.error_wrappers import redshift_connector from pydantic.fields import Field @@ -251,7 +252,7 @@ def _get_workunits_internal( ) -> Iterable[MetadataWorkUnit]: self.report.num_usage_workunits_emitted = 0 self.report.num_usage_stat_skipped = 0 - self.report.num_operational_stats_skipped = 0 + self.report.num_operational_stats_filtered = 0 if self.config.include_operational_stats: self.report.report_ingestion_stage_start(USAGE_EXTRACTION_OPERATIONAL_STATS) @@ -304,8 +305,13 @@ def _gen_operation_aspect_workunits( ) # Generate operation aspect work units from the access events - yield from self._gen_operation_aspect_workunits_from_access_events( - access_events_iterable, all_tables=all_tables + yield from ( + mcpw.as_workunit() + for mcpw in self._drop_repeated_operations( + self._gen_operation_aspect_workunits_from_access_events( + access_events_iterable, all_tables=all_tables + ) + ) ) def _should_process_event( @@ -366,11 +372,61 @@ def _gen_access_events_from_history_query( yield access_event results = cursor.fetchmany() + def _drop_repeated_operations( + self, events: Iterable[MetadataChangeProposalWrapper] + ) -> Iterable[MetadataChangeProposalWrapper]: + """Drop repeated operations on the same entity. + + ASSUMPTION: Events are ordered by lastUpdatedTimestamp, descending. + + Operations are only dropped if they were within 1 minute of each other, + and have the same operation type, user, and entity. + + This is particularly useful when we see a string of insert operations + that are all really part of the same overall operation. + """ + + OPERATION_CACHE_MAXSIZE = 1000 + DROP_WINDOW_SEC = 10 + + # All timestamps are in milliseconds. + timestamp_low_watermark = 0 + + def timer(): + return -timestamp_low_watermark + + # dict of entity urn -> (last event's actor, operation type) + # TODO: Remove the type ignore and use TTLCache[key_type, value_type] directly once that's supported in Python 3.9. + last_events: Dict[str, Tuple[Optional[str], str]] = cachetools.TTLCache( # type: ignore[assignment] + maxsize=OPERATION_CACHE_MAXSIZE, ttl=DROP_WINDOW_SEC * 1000, timer=timer + ) + + for event in events: + assert isinstance(event.aspect, OperationClass) + + timestamp_low_watermark = min( + timestamp_low_watermark, event.aspect.lastUpdatedTimestamp + ) + + urn = event.entityUrn + assert urn + assert isinstance(event.aspect.operationType, str) + value: Tuple[Optional[str], str] = ( + event.aspect.actor, + event.aspect.operationType, + ) + if urn in last_events and last_events[urn] == value: + self.report.num_repeated_operations_dropped += 1 + continue + + last_events[urn] = value + yield event + def _gen_operation_aspect_workunits_from_access_events( self, events_iterable: Iterable[RedshiftAccessEvent], all_tables: Dict[str, Dict[str, List[Union[RedshiftView, RedshiftTable]]]], - ) -> Iterable[MetadataWorkUnit]: + ) -> Iterable[MetadataChangeProposalWrapper]: self.report.num_operational_stats_workunits_emitted = 0 for event in events_iterable: if not ( @@ -384,7 +440,7 @@ def _gen_operation_aspect_workunits_from_access_events( continue if not self._should_process_event(event, all_tables=all_tables): - self.report.num_operational_stats_skipped += 1 + self.report.num_operational_stats_filtered += 1 continue assert event.operation_type in ["insert", "delete"] @@ -406,7 +462,7 @@ def _gen_operation_aspect_workunits_from_access_events( resource: str = f"{event.database}.{event.schema_}.{event.table}".lower() yield MetadataChangeProposalWrapper( entityUrn=self.dataset_urn_builder(resource), aspect=operation_aspect - ).as_workunit() + ) self.report.num_operational_stats_workunits_emitted += 1 def _aggregate_access_events( diff --git a/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py b/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py index 74eec82b39ba3..a9eebb8d54154 100644 --- a/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py +++ b/metadata-ingestion/tests/integration/redshift-usage/test_redshift_usage.py @@ -2,11 +2,11 @@ import pathlib from pathlib import Path from typing import Dict, List, Union -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch from freezegun import freeze_time -from datahub.emitter.mce_builder import make_dataset_urn +from datahub.emitter.mce_builder import make_dataset_urn, make_user_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.sink.file import write_metadata_file from datahub.ingestion.source.redshift.config import RedshiftConfig @@ -20,6 +20,7 @@ MetadataChangeEvent, MetadataChangeProposal, ) +from datahub.metadata.schema_classes import OperationClass, OperationTypeClass from tests.test_helpers import mce_helpers FROZEN_TIME = "2021-09-15 09:00:00" @@ -243,3 +244,52 @@ def load_access_events(test_resources_dir: pathlib.Path) -> List[Dict]: with access_events_history_file.open() as access_events_json: access_events = json.loads(access_events_json.read()) return access_events + + +def test_duplicate_operations_dropped(): + report = RedshiftReport() + usage_extractor = RedshiftUsageExtractor( + config=MagicMock(), + connection=MagicMock(), + report=report, + dataset_urn_builder=MagicMock(), + redundant_run_skip_handler=None, + ) + + user = make_user_urn("jdoe") + urnA = "urn:li:dataset:(urn:li:dataPlatform:redshift,db.schema.tableA,PROD)" + urnB = "urn:li:dataset:(urn:li:dataPlatform:redshift,db.schema.tableB,PROD)" + + opA1 = MetadataChangeProposalWrapper( + entityUrn=urnA, + aspect=OperationClass( + timestampMillis=100 * 1000, + lastUpdatedTimestamp=95 * 1000, + actor=user, + operationType=OperationTypeClass.INSERT, + ), + ) + opB1 = MetadataChangeProposalWrapper( + entityUrn=urnB, + aspect=OperationClass( + timestampMillis=101 * 1000, + lastUpdatedTimestamp=94 * 1000, + actor=user, + operationType=OperationTypeClass.INSERT, + ), + ) + opA2 = MetadataChangeProposalWrapper( + entityUrn=urnA, + aspect=OperationClass( + timestampMillis=102 * 1000, + lastUpdatedTimestamp=90 * 1000, + actor=user, + operationType=OperationTypeClass.INSERT, + ), + ) + + dedups = list(usage_extractor._drop_repeated_operations([opA1, opB1, opA2])) + assert dedups == [ + opA1, + opB1, + ] From e58e2bf3be6cf43923ff400667406ee6dc95cd3a Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Mon, 18 Dec 2023 11:02:33 +0530 Subject: [PATCH 107/263] feat: Deprecation 'Note' changed to Markdown Renderable (#9396) Setting auto merge after test cases are passed --- .../EntityDropdown/UpdateDeprecationModal.tsx | 14 +++- .../components/styled/DeprecationPill.tsx | 82 +++++++++++++++++-- .../tests/cypress/cypress/support/commands.js | 2 +- 3 files changed, 86 insertions(+), 12 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx index 6ae893e12575f..25527497b33a8 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx @@ -1,7 +1,10 @@ import React from 'react'; -import { Button, DatePicker, Form, Input, message, Modal } from 'antd'; +import { Button, DatePicker, Form, message, Modal } from 'antd'; +import styled from 'styled-components'; import { useBatchUpdateDeprecationMutation } from '../../../../graphql/mutations.generated'; import { handleBatchError } from '../utils'; +import { Editor } from '../tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../constants'; type Props = { urns: string[]; @@ -9,6 +12,10 @@ type Props = { refetch?: () => void; }; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4.5]}; +`; + export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { const [batchUpdateDeprecation] = useBatchUpdateDeprecationMutation(); const [form] = Form.useForm(); @@ -64,10 +71,11 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { } + width='40%' >
- - + + diff --git a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx index f60a74247ebcc..9ec2aab193aa0 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useState } from 'react'; import { InfoCircleOutlined } from '@ant-design/icons'; import { Divider, message, Modal, Popover, Tooltip, Typography } from 'antd'; import { blue } from '@ant-design/colors'; @@ -8,6 +8,8 @@ import { Deprecation } from '../../../../../types.generated'; import { getLocaleTimezone } from '../../../../shared/time/timeUtils'; import { ANTD_GRAY } from '../../constants'; import { useBatchUpdateDeprecationMutation } from '../../../../../graphql/mutations.generated'; +import { Editor } from '../../tabs/Documentation/components/editor/Editor'; +import StripMarkdownText, { removeMarkdown } from './StripMarkdownText'; const DeprecatedContainer = styled.div` height: 18px; @@ -38,11 +40,6 @@ const DeprecatedTitle = styled(Typography.Text)` font-weight: bold; `; -const DeprecatedSubTitle = styled(Typography.Text)` - display: block; - margin-bottom: 5px; -`; - const LastEvaluatedAtLabel = styled.div` padding: 0; margin: 0; @@ -70,15 +67,42 @@ const IconGroup = styled.div` } `; +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + height: 100%; + min-height: 22px; + margin-bottom: 14px; +`; +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; type Props = { urn: string; deprecation: Deprecation; refetch?: () => void; showUndeprecate: boolean | null; }; +const ABBREVIATED_LIMIT = 80; export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: Props) => { const [batchUpdateDeprecationMutation] = useBatchUpdateDeprecationMutation(); + const [expanded, setExpanded] = useState(false); + const overLimit = deprecation?.note && removeMarkdown(deprecation?.note).length > 80; /** * Deprecation Decommission Timestamp */ @@ -131,14 +155,56 @@ export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: return ( {deprecation?.note !== '' && Deprecation note} {isDividerNeeded && } - {deprecation?.note !== '' && {deprecation.note}} + + {expanded || !overLimit ? ( + <> + { + deprecation?.note && deprecation?.note !== '' && + <> + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + } + + ) : ( + <> + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {deprecation.note} + + + )} + {deprecation?.decommissionTime !== null && ( diff --git a/smoke-test/tests/cypress/cypress/support/commands.js b/smoke-test/tests/cypress/cypress/support/commands.js index 5e3664f944edf..ffbd050488181 100644 --- a/smoke-test/tests/cypress/cypress/support/commands.js +++ b/smoke-test/tests/cypress/cypress/support/commands.js @@ -171,7 +171,7 @@ Cypress.Commands.add("deleteFromDropdown", () => { Cypress.Commands.add("addViaFormModal", (text, modelHeader) => { cy.waitTextVisible(modelHeader); - cy.get(".ant-form-item-control-input-content > input[type='text']").first().type(text); + cy.get('.ProseMirror-focused').type(text); cy.get(".ant-modal-footer > button:nth-child(2)").click(); }); From b4fe451d932315546ebd98623f1572a66c41ad43 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Mon, 18 Dec 2023 12:38:30 +0530 Subject: [PATCH 108/263] feat : markdown support for group description (#9455) --- .../group/EditGroupDescriptionModal.tsx | 64 ++++++++ .../src/app/entity/group/GroupInfoSideBar.tsx | 145 ++++++++++++++++-- .../app/identity/group/CreateGroupModal.tsx | 106 +++++++------ .../cypress/e2e/settings/managing_groups.js | 6 +- 4 files changed, 261 insertions(+), 60 deletions(-) create mode 100644 datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx diff --git a/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx new file mode 100644 index 0000000000000..a898a73c254ef --- /dev/null +++ b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx @@ -0,0 +1,64 @@ +import React, { useState } from 'react'; +import { Button, Modal, Form } from 'antd'; +import styled from 'styled-components'; + +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../shared/constants'; + +type Props = { + onClose: () => void; + onSaveAboutMe: () => void; + setStagedDescription: (des: string) => void; + stagedDescription: string | undefined; +}; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4]}; +`; + +export default function EditGroupDescriptionModal({ + onClose, + onSaveAboutMe, + setStagedDescription, + stagedDescription, +}: Props) { + const [form] = Form.useForm(); + const [aboutText,setAboutText] = useState(stagedDescription) + + function updateDescription(description: string) { + setAboutText(aboutText) + setStagedDescription(description); + + } + + const saveDescription = () => { + onSaveAboutMe(); + onClose(); + }; + + return ( + + + + + } + > + + +
+ +
+
+ +
+ ); +} diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index d9eaed2682ea1..07885a4d0f630 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -16,14 +16,15 @@ import { EmptyValue, SocialDetails, EditButton, - AboutSection, - AboutSectionText, GroupsSection, + AboutSection, } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; import { useUserContext } from '../../context/useUserContext'; - -const { Paragraph } = Typography; +import StripMarkdownText, { removeMarkdown } from '../shared/components/styled/StripMarkdownText'; +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import EditGroupDescriptionModal from './EditGroupDescriptionModal'; +import { REDESIGN_COLORS } from '../shared/constants'; type SideBarData = { photoUrl: string | undefined; @@ -80,6 +81,61 @@ const GroupTitle = styled(Typography.Title)` } `; +const EditIcon = styled(EditOutlined)` + cursor: pointer; + color: ${REDESIGN_COLORS.BLUE}; +`; +const AddNewDescription = styled(Button)` + display: none; + margin: -4px; + width: 140px; +`; + +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + text-align:left; + font-weight: normal; + font + min-height: 22px; + + &:hover ${AddNewDescription} { + display: block; + } + & ins.diff { + background-color: #b7eb8f99; + text-decoration: none; + &:hover { + background-color: #b7eb8faa; + } + } + & del.diff { + background-color: #ffa39e99; + text-decoration: line-through; + &: hover { + background-color: #ffa39eaa; + } + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; + /** * Responsible for reading & writing users. */ @@ -106,7 +162,17 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { const me = useUserContext(); const canEditGroup = me?.platformPrivileges?.manageIdentities; const [groupTitle, setGroupTitle] = useState(name); + const [expanded, setExpanded] = useState(false); + const [isUpdatingDescription, SetIsUpdatingDescription] = useState(false); + const [stagedDescription, setStagedDescription] = useState(aboutText); + const [updateName] = useUpdateNameMutation(); + const overLimit = removeMarkdown(aboutText || '').length > 80; + const ABBREVIATED_LIMIT = 80; + + useEffect(() => { + setStagedDescription(aboutText); + }, [aboutText]); useEffect(() => { setGroupTitle(groupTitle); @@ -136,12 +202,12 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { }; // About Text save - const onSaveAboutMe = (inputString) => { + const onSaveAboutMe = () => { updateCorpGroupPropertiesMutation({ variables: { urn: urn || '', input: { - description: inputString, + description: stagedDescription, }, }, }) @@ -201,16 +267,65 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { - {TITLES.about} - - - {aboutText || } - - + + {TITLES.about} + + SetIsUpdatingDescription(true)} data-testid="edit-icon" /> + + + + {(aboutText && expanded) || !overLimit ? ( + <> + {/* Read only viewer for displaying group description */} + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + ) : ( + <> + {/* Display abbreviated description with option to read more */} + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {aboutText} + + + )} + + {/* Modal for updating group description */} + {isUpdatingDescription && ( + { + SetIsUpdatingDescription(false); + setStagedDescription(aboutText); + }} + onSaveAboutMe={onSaveAboutMe} + setStagedDescription={setStagedDescription} + stagedDescription={stagedDescription} + /> + )} diff --git a/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx b/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx index 214cb251767c9..4ba714ca23ae0 100644 --- a/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx +++ b/datahub-web-react/src/app/identity/group/CreateGroupModal.tsx @@ -1,16 +1,23 @@ -import React, { useState } from 'react'; +import React, { useRef, useState } from 'react'; import { message, Button, Input, Modal, Typography, Form, Collapse } from 'antd'; +import styled from 'styled-components'; import { useCreateGroupMutation } from '../../../graphql/group.generated'; import { useEnterKeyListener } from '../../shared/useEnterKeyListener'; import { validateCustomUrnId } from '../../shared/textUtil'; import analytics, { EventType } from '../../analytics'; import { CorpGroup, EntityType } from '../../../types.generated'; +import { Editor as MarkdownEditor } from '../../entity/shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../../entity/shared/constants'; type Props = { onClose: () => void; onCreate: (group: CorpGroup) => void; }; +const StyledEditor = styled(MarkdownEditor)` + border: 1px solid ${ANTD_GRAY[4]}; +`; + export default function CreateGroupModal({ onClose, onCreate }: Props) { const [stagedName, setStagedName] = useState(''); const [stagedDescription, setStagedDescription] = useState(''); @@ -19,45 +26,54 @@ export default function CreateGroupModal({ onClose, onCreate }: Props) { const [createButtonEnabled, setCreateButtonEnabled] = useState(true); const [form] = Form.useForm(); + // Reference to the styled editor for handling focus + const styledEditorRef = useRef(null); + const onCreateGroup = () => { - createGroupMutation({ - variables: { - input: { - id: stagedId, - name: stagedName, - description: stagedDescription, - }, - }, - }) - .then(({ data, errors }) => { - if (!errors) { - analytics.event({ - type: EventType.CreateGroupEvent, - }); - message.success({ - content: `Created group!`, - duration: 3, - }); - // TODO: Get a full corp group back from create endpoint. - onCreate({ - urn: data?.createGroup || '', - type: EntityType.CorpGroup, + // Check if the Enter key was pressed inside the styled editor to prevent unintended form submission + const isEditorNewlineKeypress = + document.activeElement !== styledEditorRef.current && + !styledEditorRef.current?.contains(document.activeElement); + if (isEditorNewlineKeypress) { + createGroupMutation({ + variables: { + input: { + id: stagedId, name: stagedName, - info: { - description: stagedDescription, - }, - }); - } - }) - .catch((e) => { - message.destroy(); - message.error({ content: `Failed to create group!: \n ${e.message || ''}`, duration: 3 }); + description: stagedDescription, + }, + }, }) - .finally(() => { - setStagedName(''); - setStagedDescription(''); - }); - onClose(); + .then(({ data, errors }) => { + if (!errors) { + analytics.event({ + type: EventType.CreateGroupEvent, + }); + message.success({ + content: `Created group!`, + duration: 3, + }); + // TODO: Get a full corp group back from create endpoint. + onCreate({ + urn: data?.createGroup || '', + type: EntityType.CorpGroup, + name: stagedName, + info: { + description: stagedDescription, + }, + }); + } + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to create group!: \n ${e.message || ''}`, duration: 3 }); + }) + .finally(() => { + setStagedName(''); + setStagedDescription(''); + }); + onClose(); + } }; // Handle the Enter press @@ -65,8 +81,13 @@ export default function CreateGroupModal({ onClose, onCreate }: Props) { querySelectorToExecuteClick: '#createGroupButton', }); + function updateDescription(description: string) { + setStagedDescription(description); + } + return ( Description
}> An optional description for your new group. - - setStagedDescription(event.target.value)} - /> + + {/* Styled editor for the group description */} +
+ +
diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js index 70219a550cd8b..978a245c3d9e3 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/managing_groups.js @@ -72,8 +72,10 @@ describe("create and manage group", () => { cy.focused().clear().type(`Test group EDITED ${test_id}{enter}`); cy.waitTextVisible("Name Updated"); cy.contains(`Test group EDITED ${test_id}`).should("be.visible"); - cy.contains("Test group description").find('[aria-label="edit"]').click(); - cy.focused().type(" EDITED{enter}"); + cy.get('[data-testid="edit-icon"]').click(); + cy.waitTextVisible("Edit Description"); + cy.get("#description").should("be.visible").type(" EDITED"); + cy.get("#updateGroupButton").click(); cy.waitTextVisible("Changes saved."); cy.contains("Test group description EDITED").should("be.visible"); cy.clickOptionWithText("Add Owners"); From 9d386fbd6f9a0436b25daa2b4603d1fa0b8f44ee Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 18 Dec 2023 05:38:16 -0500 Subject: [PATCH 109/263] feat(ingest): enable CLL for dbt by default (#9466) --- .../ingestion/source/dbt/dbt_common.py | 7 +- .../ingestion/source/looker/looker_common.py | 2 +- .../source/looker/looker_lib_wrapper.py | 2 +- .../dbt_enabled_with_schemas_mces_golden.json | 248 ++++++++++++ .../dbt_test_column_meta_mapping_golden.json | 383 ++++++++++++++++++ ...th_complex_owner_patterns_mces_golden.json | 248 ++++++++++++ ...th_data_platform_instance_mces_golden.json | 248 ++++++++++++ ...h_non_incremental_lineage_mces_golden.json | 248 ++++++++++++ ..._target_platform_instance_mces_golden.json | 248 ++++++++++++ 9 files changed, 1630 insertions(+), 4 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py index af28be310587a..7bec07b40c4bd 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dbt/dbt_common.py @@ -300,7 +300,7 @@ class DBTCommonConfig( description="When enabled, schemas will be inferred from the dbt node definition.", ) include_column_lineage: bool = Field( - default=False, + default=True, description="When enabled, column-level lineage will be extracted from the dbt node definition. Requires `infer_dbt_schemas` to be enabled. " "If you run into issues where the column name casing does not match up with properly, providing a datahub_api or using the rest sink will improve accuracy.", ) @@ -696,7 +696,10 @@ def get_column_type( @support_status(SupportStatus.CERTIFIED) @capability(SourceCapability.DELETION_DETECTION, "Enabled via stateful ingestion") @capability(SourceCapability.LINEAGE_COARSE, "Enabled by default") -@capability(SourceCapability.LINEAGE_FINE, "Enabled using `include_column_lineage`") +@capability( + SourceCapability.LINEAGE_FINE, + "Enabled by default, configure using `include_column_lineage`", +) class DBTSourceBase(StatefulIngestionSourceBase): def __init__(self, config: DBTCommonConfig, ctx: PipelineContext, platform: str): super().__init__(config, ctx) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py index e440750cba0d0..53533a8d27c9b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py @@ -1015,7 +1015,7 @@ def __init__( self.report = report self.source_config = source_config - @lru_cache() + @lru_cache(maxsize=200) def get_explore(self, model: str, explore: str) -> Optional[LookerExplore]: looker_explore = LookerExplore.from_api( model, diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py index 988caba1c0d74..8959868c27114 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_lib_wrapper.py @@ -114,7 +114,7 @@ def get_available_permissions(self) -> Set[str]: return permissions - @lru_cache(maxsize=2000) + @lru_cache(maxsize=1000) def get_user(self, id_: str, user_fields: str) -> Optional[User]: self.client_stats.user_calls += 1 try: diff --git a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json index e4f01ef7a6c53..4deb725ed2b44 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json @@ -247,6 +247,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -428,6 +508,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -650,6 +765,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -789,6 +1002,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json index 4d5b008b695f9..588470ef41631 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json @@ -201,6 +201,98 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer_snapshot,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.customer_snapshot,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),initial_full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -360,6 +452,52 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_details,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an-aliased-view-for-monthly-billing,PROD),email)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -574,6 +712,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.an_aliased_view_for_payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -741,6 +977,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.public.an_aliased_view_for_payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -1011,6 +1282,118 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),active)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),active)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),activebool)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),activebool)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),address_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),address_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),create_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),create_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),first_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),last_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_update)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),last_update)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),store_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer_snapshot,PROD),store_id)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json index 0bdd5e3c895c2..926e8b8c8ed84 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json @@ -211,6 +211,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -375,6 +455,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -597,6 +712,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -736,6 +949,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json index 5ab0b11e37771..3727603266f25 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json @@ -212,6 +212,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -376,6 +456,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -598,6 +713,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -737,6 +950,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,dbt-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json index 3725e590fee9e..ec879e6af766a 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json @@ -212,6 +212,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -376,6 +456,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -598,6 +713,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -737,6 +950,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json index a47abab6b40f7..e25c5e4faf6af 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json @@ -212,6 +212,86 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),first_name)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),last_name)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),full_name)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.customer,PROD),email)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),email)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),address)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),address)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.city,PROD),city)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),city)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),postal_code)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),postal_code)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.address,PROD),phone)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.customer_details,PROD),phone)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -376,6 +456,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-monthly-billing,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -598,6 +713,104 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),amount)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),customer_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_date)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),payment_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),payment_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),rental_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),rental_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),rental_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_01,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_02,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_03,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_04,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_05,PROD),staff_id)", + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.public.payment_p2020_06,PROD),staff_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.an-aliased-view-for-payments,PROD),staff_id)" + ], + "confidenceScore": 0.9 + } ] } }, @@ -737,6 +950,41 @@ "dataset": "urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD)", "type": "TRANSFORMED" } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),payment_date)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),billing_month)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),customer_id)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),customer_id)" + ], + "confidenceScore": 0.9 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:postgres,ps-instance-1.pagila.dbt_postgres.an-aliased-view-for-payments,PROD),amount)" + ], + "downstreamType": "FIELD_SET", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:dbt,pagila.dbt_postgres.payments_by_customer_by_month,PROD),amount)" + ], + "confidenceScore": 0.9 + } ] } }, From 03590a194885b2fbbb5249aef909d761c3ffc12c Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Mon, 18 Dec 2023 19:54:31 +0100 Subject: [PATCH 110/263] fix(ingest/snowflake) - Fixing snowflake url with default region (#9443) --- metadata-ingestion/setup.py | 8 +- .../source/snowflake/snowflake_utils.py | 28 ++++- .../snowflake/snowflake_golden.json | 116 +++++++++--------- .../integration/sql_server/test_sql_server.py | 5 + .../tests/unit/test_snowflake_source.py | 27 ++++ 5 files changed, 120 insertions(+), 64 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 1bc1bc5100b08..cb13a40125c0d 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -354,7 +354,11 @@ "mlflow": {"mlflow-skinny>=2.3.0"}, "mode": {"requests", "tenacity>=8.0.1"} | sqllineage_lib, "mongodb": {"pymongo[srv]>=3.11", "packaging"}, - "mssql": sql_common | {"sqlalchemy-pytds>=0.3", "pyOpenSSL"}, + "mssql": sql_common + | { + "sqlalchemy-pytds>=0.3", + "pyOpenSSL", + }, "mssql-odbc": sql_common | {"pyodbc"}, "mysql": mysql, # mariadb should have same dependency as mysql @@ -559,7 +563,7 @@ "kafka-connect", "ldap", "mongodb", - "mssql", + "mssql" if sys.version_info >= (3, 8) else None, "mysql", "mariadb", "redash", diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py index 5a451bf197d34..af8d8824a4b17 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_utils.py @@ -9,8 +9,8 @@ from datahub.configuration.pattern_utils import is_schema_allowed from datahub.ingestion.source.snowflake.constants import ( GENERIC_PERMISSION_ERROR_KEY, - SNOWFLAKE_DEFAULT_CLOUD, SNOWFLAKE_REGION_CLOUD_REGION_MAPPING, + SnowflakeCloudProvider, SnowflakeObjectDomain, ) from datahub.ingestion.source.snowflake.snowflake_config import SnowflakeV2Config @@ -72,6 +72,15 @@ def report_error(self, key: str, reason: str) -> None: class SnowflakeCommonMixin: platform = "snowflake" + CLOUD_REGION_IDS_WITHOUT_CLOUD_SUFFIX = [ + "us-west-2", + "us-east-1", + "eu-west-1", + "eu-central-1", + "ap-southeast-1", + "ap-southeast-2", + ] + @staticmethod def create_snowsight_base_url( account_locator: str, @@ -79,12 +88,23 @@ def create_snowsight_base_url( cloud: str, privatelink: bool = False, ) -> Optional[str]: + if cloud: + url_cloud_provider_suffix = f".{cloud}" + + if cloud == SnowflakeCloudProvider.AWS: + # Some AWS regions do not have cloud suffix. See below the list: + # https://docs.snowflake.com/en/user-guide/admin-account-identifier#non-vps-account-locator-formats-by-cloud-platform-and-region + if ( + cloud_region_id + in SnowflakeCommonMixin.CLOUD_REGION_IDS_WITHOUT_CLOUD_SUFFIX + ): + url_cloud_provider_suffix = "" + else: + url_cloud_provider_suffix = f".{cloud}" if privatelink: url = f"https://app.{account_locator}.{cloud_region_id}.privatelink.snowflakecomputing.com/" - elif cloud == SNOWFLAKE_DEFAULT_CLOUD: - url = f"https://app.snowflake.com/{cloud_region_id}/{account_locator}/" else: - url = f"https://app.snowflake.com/{cloud_region_id}.{cloud}/{account_locator}/" + url = f"https://app.snowflake.com/{cloud_region_id}{url_cloud_provider_suffix}/{account_locator}/" return url @staticmethod diff --git a/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json b/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json index c7273fee5a2e5..ece54f00eeaa0 100644 --- a/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json +++ b/metadata-ingestion/tests/integration/snowflake/snowflake_golden.json @@ -11,20 +11,20 @@ "env": "PROD", "database": "test_db" }, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/", "name": "TEST_DB", "description": "Comment for TEST_DB", "created": { - "time": 1623110400000 + "time": 1623103200000 }, "lastModified": { - "time": 1623110400000 + "time": 1623103200000 } } }, "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", + "lastObserved": 1615443388097, + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -144,20 +144,20 @@ "database": "test_db", "schema": "test_schema" }, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/", "name": "TEST_SCHEMA", "description": "comment for TEST_DB.TEST_SCHEMA", "created": { - "time": 1623110400000 + "time": 1623103200000 }, "lastModified": { - "time": 1623110400000 + "time": 1623103200000 } } }, "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "snowflake-2022_06_07-17_00_00", + "lastObserved": 1615443388097, + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -489,22 +489,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_1/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_1/", "name": "TABLE_1", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_1", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -788,22 +788,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_2/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_2/", "name": "TABLE_2", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_2", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1087,22 +1087,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_3/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_3/", "name": "TABLE_3", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_3", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1386,22 +1386,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_4/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_4/", "name": "TABLE_4", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_4", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1685,22 +1685,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_5/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_5/", "name": "TABLE_5", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_5", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -1984,22 +1984,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_6/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_6/", "name": "TABLE_6", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_6", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -2283,22 +2283,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_7/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_7/", "name": "TABLE_7", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_7", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -2582,22 +2582,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_8/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_8/", "name": "TABLE_8", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_8", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -2881,22 +2881,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_9/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_9/", "name": "TABLE_9", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_9", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -3180,22 +3180,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_10/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/table/TABLE_10/", "name": "TABLE_10", "qualifiedName": "TEST_DB.TEST_SCHEMA.TABLE_10", "description": "Comment for Table", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -3470,22 +3470,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_1/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_1/", "name": "VIEW_1", "qualifiedName": "TEST_DB.TEST_SCHEMA.VIEW_1", "description": "Comment for View", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, @@ -3805,22 +3805,22 @@ "aspect": { "json": { "customProperties": {}, - "externalUrl": "https://app.snowflake.com/ap-south-1/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_2/", + "externalUrl": "https://app.snowflake.com/ap-south-1.aws/abc12345/#/data/databases/TEST_DB/schemas/TEST_SCHEMA/view/VIEW_2/", "name": "VIEW_2", "qualifiedName": "TEST_DB.TEST_SCHEMA.VIEW_2", "description": "Comment for View", "created": { - "time": 1623090600000 + "time": 1623103200000 }, "lastModified": { - "time": 1623090600000 + "time": 1623103200000 }, "tags": [] } }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "snowflake-2023_08_04-09_52_28", + "runId": "snowflake-2023_12_18-10_16_09", "lastRunId": "no-run-id-provided" } }, diff --git a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py index f439a322c2677..5ed672d527264 100644 --- a/metadata-ingestion/tests/integration/sql_server/test_sql_server.py +++ b/metadata-ingestion/tests/integration/sql_server/test_sql_server.py @@ -1,5 +1,6 @@ import os import subprocess +import sys import time import pytest @@ -8,6 +9,10 @@ from tests.test_helpers.click_helpers import run_datahub_cmd from tests.test_helpers.docker_helpers import cleanup_image, wait_for_port +pytestmark = pytest.mark.skipif( + sys.version_info < (3, 8), reason="requires python 3.8 or higher" +) + @pytest.fixture(scope="module") def mssql_runner(docker_compose_runner, pytestconfig): diff --git a/metadata-ingestion/tests/unit/test_snowflake_source.py b/metadata-ingestion/tests/unit/test_snowflake_source.py index 536c91ace4f5e..69a7510692df1 100644 --- a/metadata-ingestion/tests/unit/test_snowflake_source.py +++ b/metadata-ingestion/tests/unit/test_snowflake_source.py @@ -24,6 +24,7 @@ from datahub.ingestion.source.snowflake.snowflake_usage_v2 import ( SnowflakeObjectAccessEntry, ) +from datahub.ingestion.source.snowflake.snowflake_utils import SnowflakeCommonMixin from datahub.ingestion.source.snowflake.snowflake_v2 import SnowflakeV2Source from tests.test_helpers import test_connection_helpers @@ -584,3 +585,29 @@ def test_email_filter_query_generation_with_case_insensitive_filter(): filter_query == "AND (rlike(user_name, '.*@example.com','c')) AND NOT (rlike(user_name, '.*@example2.com','c'))" ) + + +def test_create_snowsight_base_url_us_west(): + ( + cloud, + cloud_region_id, + ) = SnowflakeCommonMixin.get_cloud_region_from_snowflake_region_id("aws_us_west_2") + + result = SnowflakeCommonMixin.create_snowsight_base_url( + "account_locator", cloud_region_id, cloud, False + ) + assert result == "https://app.snowflake.com/us-west-2/account_locator/" + + +def test_create_snowsight_base_url_ap_northeast_1(): + ( + cloud, + cloud_region_id, + ) = SnowflakeCommonMixin.get_cloud_region_from_snowflake_region_id( + "aws_ap_northeast_1" + ) + + result = SnowflakeCommonMixin.create_snowsight_base_url( + "account_locator", cloud_region_id, cloud, False + ) + assert result == "https://app.snowflake.com/ap-northeast-1.aws/account_locator/" From 193d1464a628fc800e926f04fcd4bd1d6774d858 Mon Sep 17 00:00:00 2001 From: noggi Date: Mon, 18 Dec 2023 14:06:17 -0800 Subject: [PATCH 111/263] Fix downstream CI issue (#9479) --- docker/datahub-ingestion-base/Dockerfile | 2 +- docker/datahub-ingestion/Dockerfile | 2 +- docker/datahub-ingestion/build.gradle | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index e0f9fdc997071..81fec61ea5073 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -4,7 +4,7 @@ ARG BASE_IMAGE=base # Defining custom repo urls for use in enterprise environments. Re-used between stages below. ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine ARG GITHUB_REPO_URL=https://github.com -ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG DEBIAN_REPO_URL=https://deb.debian.org/debian ARG PIP_MIRROR_URL=null FROM golang:1-alpine3.18 AS dockerize-binary diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 9516c31a19e21..2898a363a0a18 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -3,7 +3,7 @@ ARG APP_ENV=full ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head ARG PIP_MIRROR_URL=null -ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG DEBIAN_REPO_URL=https://deb.debian.org/debian FROM $BASE_IMAGE:$DOCKER_VERSION as base USER 0 diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 36444210f1938..0b08f189e6b45 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -33,7 +33,7 @@ docker { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')] + def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", ''), BASE_IMAGE: "${docker_registry}/datahub-ingestion-base"] // Add build args if they are defined (needed for some CI or enterprise environments) if (project.hasProperty('pipMirrorUrl')) { From ecda3e618704c5eb335ad1a21c30f0c935581f64 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Mon, 18 Dec 2023 18:26:33 -0500 Subject: [PATCH 112/263] feat(ingest): pydantic v2 compatibility (#9434) --- .github/workflows/airflow-plugin.yml | 7 ++-- .../airflow-plugin/tox.ini | 9 +++++ metadata-ingestion/setup.py | 39 ++++++++++++++++--- .../api/entities/datacontract/assertion.py | 4 +- .../datacontract/assertion_operator.py | 16 ++++---- .../datacontract/data_quality_assertion.py | 11 +++--- .../api/entities/datacontract/datacontract.py | 23 +++++------ .../datacontract/freshness_assertion.py | 15 ++++--- .../entities/datacontract/schema_assertion.py | 14 ++++--- .../src/datahub/cli/check_cli.py | 13 ++++++- .../src/datahub/configuration/common.py | 16 +++++++- .../src/datahub/configuration/datetimes.py | 4 +- .../pydantic_migration_helpers.py | 29 ++++++++++++++ .../configuration/time_window_config.py | 16 ++++++-- .../configuration/validate_field_rename.py | 4 +- .../ingestion/glossary/datahub_classifier.py | 11 +++++- .../source/bigquery_v2/bigquery_config.py | 2 +- .../ingestion/source/delta_lake/config.py | 4 +- .../source/snowflake/snowflake_config.py | 2 +- .../ingestion/source_config/sql/snowflake.py | 2 +- .../src/datahub/utilities/urns/urn_iter.py | 2 +- .../integration/snowflake/test_snowflake.py | 16 ++++---- .../unit/{ => config}/test_allow_deny.py | 0 .../unit/{ => config}/test_config_clean.py | 0 .../tests/unit/config/test_config_model.py | 18 +++++++-- .../{ => config}/test_pydantic_validators.py | 13 +++++-- .../{ => config}/test_time_window_config.py | 0 27 files changed, 209 insertions(+), 81 deletions(-) rename metadata-ingestion/tests/unit/{ => config}/test_allow_deny.py (100%) rename metadata-ingestion/tests/unit/{ => config}/test_config_clean.py (100%) rename metadata-ingestion/tests/unit/{ => config}/test_pydantic_validators.py (92%) rename metadata-ingestion/tests/unit/{ => config}/test_time_window_config.py (100%) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index cd1e159b7d53c..70816e5f093d1 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -32,6 +32,7 @@ jobs: strategy: matrix: include: + # Note: this should be kept in sync with tox.ini. - python-version: "3.8" extra_pip_requirements: "apache-airflow~=2.1.4" extra_pip_extras: plugin-v1 @@ -39,13 +40,13 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.4.0" + extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.6.0" + extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0" + extra_pip_requirements: "apache-airflow>=2.7.0 pydantic==2.4.2" extra_pip_extras: plugin-v2 fail-fast: false steps: diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 1010bd2933e45..27ae2ce65ba65 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -10,6 +10,7 @@ envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py31 use_develop = true extras = dev,integration-tests,plugin-v1 deps = + # This should be kept in sync with the Github Actions matrix. -e ../../metadata-ingestion/ # Airflow version airflow21: apache-airflow~=2.1.0 @@ -20,7 +21,15 @@ deps = # See https://github.com/datahub-project/datahub/pull/9365 airflow24: apache-airflow~=2.4.0,pluggy==1.0.0 airflow26: apache-airflow~=2.6.0 + # Respect the constraints file on pendulum. + # See https://github.com/apache/airflow/issues/36274 + airflow24,airflow26: pendulum>=2.0,<3.0 + # The Airflow 2.7 constraints file points at pydantic v2, so we match that here. + # https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt + # Note that Airflow is actually compatible with both pydantic v1 and v2, and the + # constraints file is overly restrictive. airflow27: apache-airflow~=2.7.0 + airflow27: pydantic==2.4.2 commands = pytest --cov-append {posargs} diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index cb13a40125c0d..13c9d3c99aaca 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -14,9 +14,10 @@ "mypy_extensions>=0.4.3", # Actual dependencies. "typing-inspect", + # pydantic 1.8.2 is incompatible with mypy 0.910. + # See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910. # pydantic 1.10.3 is incompatible with typing-extensions 4.1.1 - https://github.com/pydantic/pydantic/issues/4885 - # pydantic 2 makes major, backwards-incompatible changes - https://github.com/pydantic/pydantic/issues/4887 - "pydantic>=1.5.1,!=1.10.3,<2", + "pydantic>=1.10.0,!=1.10.3", "mixpanel>=4.9.0", "sentry-sdk", } @@ -53,6 +54,18 @@ "ruamel.yaml", } +pydantic_no_v2 = { + # pydantic 2 makes major, backwards-incompatible changes - https://github.com/pydantic/pydantic/issues/4887 + # Tags sources that require the pydantic v2 API. + "pydantic<2", +} + +plugin_common = { + # While pydantic v2 support is experimental, require that all plugins + # continue to use v1. This will ensure that no ingestion recipes break. + *pydantic_no_v2, +} + rest_common = {"requests", "requests_file"} kafka_common = { @@ -118,6 +131,7 @@ "sqlalchemy>=1.4.39, <2", # Required for SQL profiling. "great-expectations>=0.15.12, <=0.15.50", + *pydantic_no_v2, # because of great-expectations # scipy version restricted to reduce backtracking, used by great-expectations, "scipy>=1.7.2", # GE added handling for higher version of jinja2 @@ -229,6 +243,7 @@ iceberg_common = { # Iceberg Python SDK "pyiceberg", + *pydantic_no_v2, # because of pyiceberg "pyarrow>=9.0.0, <13.0.0", } @@ -477,9 +492,6 @@ "flake8-bugbear==23.3.12", "isort>=5.7.0", "mypy==1.0.0", - # pydantic 1.8.2 is incompatible with mypy 0.910. - # See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910. - "pydantic>=1.10.0", *test_api_requirements, pytest_dep, "pytest-asyncio>=0.16.0", @@ -740,7 +752,22 @@ extras_require={ "base": list(framework_common), **{ - plugin: list(framework_common | dependencies) + plugin: list( + framework_common + | ( + plugin_common + if plugin + not in { + "airflow", + "datahub-rest", + "datahub-kafka", + "sync-file-emitter", + "sql-parser", + } + else set() + ) + | dependencies + ) for (plugin, dependencies) in plugins.items() }, "all": list( diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py index c45d4ddc92458..89ac528efe81a 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion.py @@ -1,7 +1,7 @@ from typing import Optional -from datahub.configuration import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel -class BaseAssertion(ConfigModel): +class BaseAssertion(v1_ConfigModel): description: Optional[str] = None diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py index a41b0f7aafd9f..dc0c97d1c74e5 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/assertion_operator.py @@ -2,7 +2,7 @@ from typing_extensions import Literal, Protocol -from datahub.configuration import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel from datahub.metadata.schema_classes import ( AssertionStdOperatorClass, AssertionStdParameterClass, @@ -58,7 +58,7 @@ def _generate_assertion_std_parameters( ) -class EqualToOperator(ConfigModel): +class EqualToOperator(v1_ConfigModel): type: Literal["equal_to"] value: Union[str, int, float] @@ -71,7 +71,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class BetweenOperator(ConfigModel): +class BetweenOperator(v1_ConfigModel): type: Literal["between"] min: Union[int, float] max: Union[int, float] @@ -87,7 +87,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: ) -class LessThanOperator(ConfigModel): +class LessThanOperator(v1_ConfigModel): type: Literal["less_than"] value: Union[int, float] @@ -100,7 +100,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class GreaterThanOperator(ConfigModel): +class GreaterThanOperator(v1_ConfigModel): type: Literal["greater_than"] value: Union[int, float] @@ -113,7 +113,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class LessThanOrEqualToOperator(ConfigModel): +class LessThanOrEqualToOperator(v1_ConfigModel): type: Literal["less_than_or_equal_to"] value: Union[int, float] @@ -126,7 +126,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class GreaterThanOrEqualToOperator(ConfigModel): +class GreaterThanOrEqualToOperator(v1_ConfigModel): type: Literal["greater_than_or_equal_to"] value: Union[int, float] @@ -139,7 +139,7 @@ def generate_parameters(self) -> AssertionStdParametersClass: return _generate_assertion_std_parameters(value=self.value) -class NotNullOperator(ConfigModel): +class NotNullOperator(v1_ConfigModel): type: Literal["not_null"] operator: str = AssertionStdOperatorClass.NOT_NULL diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py index 6a3944ba36baf..975aa359bd203 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/data_quality_assertion.py @@ -1,12 +1,11 @@ from typing import List, Optional, Union -import pydantic from typing_extensions import Literal import datahub.emitter.mce_builder as builder from datahub.api.entities.datacontract.assertion import BaseAssertion from datahub.api.entities.datacontract.assertion_operator import Operators -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel, v1_Field from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.schema_classes import ( AssertionInfoClass, @@ -25,7 +24,7 @@ class IdConfigMixin(BaseAssertion): - id_raw: Optional[str] = pydantic.Field( + id_raw: Optional[str] = v1_Field( default=None, alias="id", description="The id of the assertion. If not provided, one will be generated using the type.", @@ -38,7 +37,7 @@ def generate_default_id(self) -> str: class CustomSQLAssertion(IdConfigMixin, BaseAssertion): type: Literal["custom_sql"] sql: str - operator: Operators = pydantic.Field(discriminator="type") + operator: Operators = v1_Field(discriminator="type") def generate_default_id(self) -> str: return f"{self.type}-{self.sql}-{self.operator.id()}" @@ -89,11 +88,11 @@ def generate_assertion_info(self, entity_urn: str) -> AssertionInfoClass: ) -class DataQualityAssertion(ConfigModel): +class DataQualityAssertion(v1_ConfigModel): __root__: Union[ CustomSQLAssertion, ColumnUniqueAssertion, - ] = pydantic.Field(discriminator="type") + ] = v1_Field(discriminator="type") @property def id(self) -> str: diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py b/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py index f3c6be55e5fea..e0ef85d5fd66c 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/datacontract.py @@ -1,7 +1,6 @@ import collections from typing import Iterable, List, Optional, Tuple -import pydantic from ruamel.yaml import YAML from typing_extensions import Literal @@ -11,7 +10,11 @@ ) from datahub.api.entities.datacontract.freshness_assertion import FreshnessAssertion from datahub.api.entities.datacontract.schema_assertion import SchemaAssertion -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import ( + v1_ConfigModel, + v1_Field, + v1_validator, +) from datahub.emitter.mce_builder import datahub_guid, make_assertion_urn from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.schema_classes import ( @@ -26,7 +29,7 @@ from datahub.utilities.urns.urn import guess_entity_type -class DataContract(ConfigModel): +class DataContract(v1_ConfigModel): """A yml representation of a Data Contract. This model is used as a simpler, Python-native representation of a DataHub data contract. @@ -36,29 +39,27 @@ class DataContract(ConfigModel): version: Literal[1] - id: Optional[str] = pydantic.Field( + id: Optional[str] = v1_Field( default=None, alias="urn", description="The data contract urn. If not provided, one will be generated.", ) - entity: str = pydantic.Field( + entity: str = v1_Field( description="The entity urn that the Data Contract is associated with" ) # TODO: add support for properties # properties: Optional[Dict[str, str]] = None - schema_field: Optional[SchemaAssertion] = pydantic.Field( - default=None, alias="schema" - ) + schema_field: Optional[SchemaAssertion] = v1_Field(default=None, alias="schema") - freshness: Optional[FreshnessAssertion] = pydantic.Field(default=None) + freshness: Optional[FreshnessAssertion] = v1_Field(default=None) # TODO: Add a validator to ensure that ids are unique - data_quality: Optional[List[DataQualityAssertion]] = pydantic.Field(default=None) + data_quality: Optional[List[DataQualityAssertion]] = v1_Field(default=None) _original_yaml_dict: Optional[dict] = None - @pydantic.validator("data_quality") + @v1_validator("data_quality") # type: ignore def validate_data_quality( cls, data_quality: Optional[List[DataQualityAssertion]] ) -> Optional[List[DataQualityAssertion]]: diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py index 71741d76b22fc..8694276688967 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/freshness_assertion.py @@ -3,11 +3,10 @@ from datetime import timedelta from typing import List, Union -import pydantic from typing_extensions import Literal from datahub.api.entities.datacontract.assertion import BaseAssertion -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel, v1_Field from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.metadata.schema_classes import ( AssertionInfoClass, @@ -25,10 +24,10 @@ class CronFreshnessAssertion(BaseAssertion): type: Literal["cron"] - cron: str = pydantic.Field( + cron: str = v1_Field( description="The cron expression to use. See https://crontab.guru/ for help." ) - timezone: str = pydantic.Field( + timezone: str = v1_Field( "UTC", description="The timezone to use for the cron schedule. Defaults to UTC.", ) @@ -58,10 +57,10 @@ def generate_freshness_assertion_schedule(self) -> FreshnessAssertionScheduleCla ) -class FreshnessAssertion(ConfigModel): - __root__: Union[ - CronFreshnessAssertion, FixedIntervalFreshnessAssertion - ] = pydantic.Field(discriminator="type") +class FreshnessAssertion(v1_ConfigModel): + __root__: Union[CronFreshnessAssertion, FixedIntervalFreshnessAssertion] = v1_Field( + discriminator="type" + ) @property def id(self): diff --git a/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py b/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py index b62f94e0592fc..39297d1a98d02 100644 --- a/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py +++ b/metadata-ingestion/src/datahub/api/entities/datacontract/schema_assertion.py @@ -3,11 +3,10 @@ import json from typing import List, Union -import pydantic from typing_extensions import Literal from datahub.api.entities.datacontract.assertion import BaseAssertion -from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import v1_ConfigModel, v1_Field from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.extractor.json_schema_util import get_schema_metadata from datahub.metadata.schema_classes import ( @@ -23,7 +22,7 @@ class JsonSchemaContract(BaseAssertion): type: Literal["json-schema"] - json_schema: dict = pydantic.Field(alias="json-schema") + json_schema: dict = v1_Field(alias="json-schema") _schema_metadata: SchemaMetadataClass @@ -37,7 +36,10 @@ def _init_private_attributes(self) -> None: ) -class FieldListSchemaContract(BaseAssertion, arbitrary_types_allowed=True): +class FieldListSchemaContract(BaseAssertion): + class Config: + arbitrary_types_allowed = True + type: Literal["field-list"] fields: List[SchemaFieldClass] @@ -56,8 +58,8 @@ def _init_private_attributes(self) -> None: ) -class SchemaAssertion(ConfigModel): - __root__: Union[JsonSchemaContract, FieldListSchemaContract] = pydantic.Field( +class SchemaAssertion(v1_ConfigModel): + __root__: Union[JsonSchemaContract, FieldListSchemaContract] = v1_Field( discriminator="type" ) diff --git a/metadata-ingestion/src/datahub/cli/check_cli.py b/metadata-ingestion/src/datahub/cli/check_cli.py index f7996900f7a7a..2732a72aea539 100644 --- a/metadata-ingestion/src/datahub/cli/check_cli.py +++ b/metadata-ingestion/src/datahub/cli/check_cli.py @@ -126,10 +126,21 @@ def metadata_diff( default=False, help="Include extra information for each plugin.", ) +@click.option( + "--source", + type=str, + default=None, +) @telemetry.with_telemetry() -def plugins(verbose: bool) -> None: +def plugins(source: Optional[str], verbose: bool) -> None: """List the enabled ingestion plugins.""" + if source: + # Quick helper for one-off checks with full stack traces. + source_registry.get(source) + click.echo(f"Source {source} is enabled.") + return + click.secho("Sources:", bold=True) click.echo(source_registry.summary(verbose=verbose, col_width=25)) click.echo() diff --git a/metadata-ingestion/src/datahub/configuration/common.py b/metadata-ingestion/src/datahub/configuration/common.py index f225856ca43ce..0030332bcfd54 100644 --- a/metadata-ingestion/src/datahub/configuration/common.py +++ b/metadata-ingestion/src/datahub/configuration/common.py @@ -99,8 +99,20 @@ def _schema_extra(schema: Dict[str, Any], model: Type["ConfigModel"]) -> None: @classmethod def parse_obj_allow_extras(cls: Type[_ConfigSelf], obj: Any) -> _ConfigSelf: - with unittest.mock.patch.object(cls.Config, "extra", pydantic.Extra.allow): - return cls.parse_obj(obj) + if PYDANTIC_VERSION_2: + try: + with unittest.mock.patch.dict( + cls.model_config, # type: ignore + {"extra": "allow"}, + clear=False, + ): + cls.model_rebuild(force=True) # type: ignore + return cls.parse_obj(obj) + finally: + cls.model_rebuild(force=True) # type: ignore + else: + with unittest.mock.patch.object(cls.Config, "extra", pydantic.Extra.allow): + return cls.parse_obj(obj) class PermissiveConfigModel(ConfigModel): diff --git a/metadata-ingestion/src/datahub/configuration/datetimes.py b/metadata-ingestion/src/datahub/configuration/datetimes.py index 41af7565593d9..1520462fa9bf8 100644 --- a/metadata-ingestion/src/datahub/configuration/datetimes.py +++ b/metadata-ingestion/src/datahub/configuration/datetimes.py @@ -65,6 +65,8 @@ def parse_absolute_time(input: str) -> datetime: def parse_relative_timespan(input: str) -> timedelta: + raw_input = input + neg = False input = input.strip() @@ -79,7 +81,7 @@ def parse_relative_timespan(input: str) -> timedelta: if neg: delta = -delta - logger.debug(f'Parsed "{input}" as {delta}.') + logger.debug(f'Parsed "{raw_input}" as {delta}.') return delta diff --git a/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py b/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py index f1876b500598b..bd931abe2e84d 100644 --- a/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py +++ b/metadata-ingestion/src/datahub/configuration/pydantic_migration_helpers.py @@ -19,12 +19,41 @@ class PydanticDeprecatedSince20(Warning): # type: ignore if PYDANTIC_VERSION_2: from pydantic import BaseModel as GenericModel + from pydantic.v1 import ( # type: ignore + BaseModel as v1_BaseModel, + Extra as v1_Extra, + Field as v1_Field, + root_validator as v1_root_validator, + validator as v1_validator, + ) else: + from pydantic import ( # type: ignore + BaseModel as v1_BaseModel, + Extra as v1_Extra, + Field as v1_Field, + root_validator as v1_root_validator, + validator as v1_validator, + ) from pydantic.generics import GenericModel # type: ignore +class v1_ConfigModel(v1_BaseModel): + """A simplified variant of our main ConfigModel class. + + This one only uses pydantic v1 features. + """ + + class Config: + extra = v1_Extra.forbid + underscore_attrs_are_private = True + + __all__ = [ "PYDANTIC_VERSION_2", "PydanticDeprecatedSince20", "GenericModel", + "v1_ConfigModel", + "v1_Field", + "v1_root_validator", + "v1_validator", ] diff --git a/metadata-ingestion/src/datahub/configuration/time_window_config.py b/metadata-ingestion/src/datahub/configuration/time_window_config.py index 15de7470e4d82..f20ab85be0585 100644 --- a/metadata-ingestion/src/datahub/configuration/time_window_config.py +++ b/metadata-ingestion/src/datahub/configuration/time_window_config.py @@ -68,6 +68,12 @@ def default_start_time( assert abs(delta) >= get_bucket_duration_delta( values["bucket_duration"] ), "Relative start time should be in terms of configured bucket duration. e.g '-2 days' or '-2 hours'." + + # The end_time's default value is not yet populated, in which case + # we can just manually generate it here. + if "end_time" not in values: + values["end_time"] = datetime.now(tz=timezone.utc) + return get_time_bucket( values["end_time"] + delta, values["bucket_duration"] ) @@ -80,9 +86,13 @@ def default_start_time( @pydantic.validator("start_time", "end_time") def ensure_timestamps_in_utc(cls, v: datetime) -> datetime: - assert ( - v.tzinfo == timezone.utc - ), 'timezone is not UTC; try adding a "Z" to the value e.g. "2021-07-20T00:00:00Z"' + if v.tzinfo is None: + raise ValueError( + "Timestamps must be in UTC. Try adding a 'Z' to the value e.g. '2021-07-20T00:00:00Z'" + ) + + # If the timestamp is timezone-aware but not in UTC, convert it to UTC. + v = v.astimezone(timezone.utc) return v diff --git a/metadata-ingestion/src/datahub/configuration/validate_field_rename.py b/metadata-ingestion/src/datahub/configuration/validate_field_rename.py index bb01f2b787123..de2a16e9bf247 100644 --- a/metadata-ingestion/src/datahub/configuration/validate_field_rename.py +++ b/metadata-ingestion/src/datahub/configuration/validate_field_rename.py @@ -49,4 +49,6 @@ def _validate_field_rename(cls: Type, values: dict) -> dict: # validator with pre=True gets all the values that were passed in. # Given that a renamed field doesn't show up in the fields list, we can't use # the field-level validator, even with a different field name. - return pydantic.root_validator(pre=True, allow_reuse=True)(_validate_field_rename) + return pydantic.root_validator(pre=True, skip_on_failure=True, allow_reuse=True)( + _validate_field_rename + ) diff --git a/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py b/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py index 1f2b7f5689ea3..42eb930c80f9d 100644 --- a/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py +++ b/metadata-ingestion/src/datahub/ingestion/glossary/datahub_classifier.py @@ -8,6 +8,7 @@ from pydantic.fields import Field from datahub.configuration.common import ConfigModel +from datahub.configuration.pydantic_migration_helpers import PYDANTIC_VERSION_2 from datahub.ingestion.glossary.classifier import Classifier @@ -50,7 +51,10 @@ class ValuesFactorConfig(ConfigModel): class PredictionFactorsAndWeights(ConfigModel): class Config: - allow_population_by_field_name = True + if PYDANTIC_VERSION_2: + populate_by_name = True + else: + allow_population_by_field_name = True Name: float = Field(alias="name") Description: float = Field(alias="description") @@ -60,7 +64,10 @@ class Config: class InfoTypeConfig(ConfigModel): class Config: - allow_population_by_field_name = True + if PYDANTIC_VERSION_2: + populate_by_name = True + else: + allow_population_by_field_name = True Prediction_Factors_and_Weights: PredictionFactorsAndWeights = Field( description="Factors and their weights to consider when predicting info types", diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index cbe68a454ea43..c13b08a6d9656 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -284,7 +284,7 @@ def validate_bigquery_audit_metadata_datasets( return v - @root_validator(pre=False) + @root_validator(pre=False, skip_on_failure=True) def backward_compatibility_configs_set(cls, values: Dict) -> Dict: project_id = values.get("project_id") project_id_pattern = values.get("project_id_pattern") diff --git a/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py b/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py index f3616ca648a3e..81a54d1327d05 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/delta_lake/config.py @@ -4,6 +4,7 @@ import pydantic from cached_property import cached_property from pydantic import Field +from typing_extensions import Literal from datahub.configuration.common import AllowDenyPattern from datahub.configuration.source_common import ( @@ -46,10 +47,9 @@ class DeltaLakeSourceConfig(PlatformInstanceConfigMixin, EnvConfigMixin): "'/' and URNs will be created using " "relative_path only.", ) - platform: str = Field( + platform: Literal["delta-lake"] = Field( default="delta-lake", description="The platform that this source connects to", - const=True, ) platform_instance: Optional[str] = Field( default=None, diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py index 032bdef178fdf..b896df1fa340e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_config.py @@ -176,7 +176,7 @@ def validate_include_column_lineage(cls, v, values): ) return v - @root_validator(pre=False) + @root_validator(pre=False, skip_on_failure=True) def validate_unsupported_configs(cls, values: Dict) -> Dict: value = values.get("include_read_operational_stats") if value is not None and value: diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py index 46bd24c7e1f4c..e9db82ce75cd9 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/sql/snowflake.py @@ -107,7 +107,7 @@ def validate_account_id(cls, account_id: str) -> str: return account_id @pydantic.validator("authentication_type", always=True) - def authenticator_type_is_valid(cls, v, values, field): + def authenticator_type_is_valid(cls, v, values): if v not in VALID_AUTH_TYPES.keys(): raise ValueError( f"unsupported authenticator type '{v}' was provided," diff --git a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py index 4f228494f416b..3389a6fb05ee8 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py +++ b/metadata-ingestion/src/datahub/utilities/urns/urn_iter.py @@ -150,7 +150,7 @@ def modify_urn(urn: str) -> str: if guess_entity_type(urn) == "dataset": return _lowercase_dataset_urn(urn) elif guess_entity_type(urn) == "schemaField": - cur_urn = Urn.create_from_string(urn) + cur_urn = Urn.from_string(urn) cur_urn._entity_ids[0] = _lowercase_dataset_urn(cur_urn._entity_ids[0]) return str(cur_urn) return urn diff --git a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py index 1b58696e4014c..39a62056a7e4a 100644 --- a/metadata-ingestion/tests/integration/snowflake/test_snowflake.py +++ b/metadata-ingestion/tests/integration/snowflake/test_snowflake.py @@ -87,18 +87,18 @@ def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph): confidence_level_threshold=0.58, info_types_config={ "Age": InfoTypeConfig( - Prediction_Factors_and_Weights=PredictionFactorsAndWeights( - Name=0, Values=1, Description=0, Datatype=0 + prediction_factors_and_weights=PredictionFactorsAndWeights( + name=0, values=1, description=0, datatype=0 ) ), "CloudRegion": InfoTypeConfig( - Prediction_Factors_and_Weights=PredictionFactorsAndWeights( - Name=0, - Description=0, - Datatype=0, - Values=1, + prediction_factors_and_weights=PredictionFactorsAndWeights( + name=0, + description=0, + datatype=0, + values=1, ), - Values=ValuesFactorConfig( + values=ValuesFactorConfig( prediction_type="regex", regex=[ r"(af|ap|ca|eu|me|sa|us)-(central|north|(north(?:east|west))|south|south(?:east|west)|east|west)-\d+" diff --git a/metadata-ingestion/tests/unit/test_allow_deny.py b/metadata-ingestion/tests/unit/config/test_allow_deny.py similarity index 100% rename from metadata-ingestion/tests/unit/test_allow_deny.py rename to metadata-ingestion/tests/unit/config/test_allow_deny.py diff --git a/metadata-ingestion/tests/unit/test_config_clean.py b/metadata-ingestion/tests/unit/config/test_config_clean.py similarity index 100% rename from metadata-ingestion/tests/unit/test_config_clean.py rename to metadata-ingestion/tests/unit/config/test_config_clean.py diff --git a/metadata-ingestion/tests/unit/config/test_config_model.py b/metadata-ingestion/tests/unit/config/test_config_model.py index ffac5c465f554..f53390a3deb18 100644 --- a/metadata-ingestion/tests/unit/config/test_config_model.py +++ b/metadata-ingestion/tests/unit/config/test_config_model.py @@ -3,8 +3,11 @@ import pydantic import pytest -from datahub.configuration.common import ConfigModel, redact_raw_config -from datahub.ingestion.source.unity.config import UnityCatalogSourceConfig +from datahub.configuration.common import ( + AllowDenyPattern, + ConfigModel, + redact_raw_config, +) def test_extras_not_allowed(): @@ -76,8 +79,15 @@ def test_config_redaction(): def test_shared_defaults(): - c1 = UnityCatalogSourceConfig(token="s", workspace_url="https://workspace_url") - c2 = UnityCatalogSourceConfig(token="s", workspace_url="https://workspace_url") + class SourceConfig(ConfigModel): + token: str + workspace_url: str + catalog_pattern: AllowDenyPattern = pydantic.Field( + default=AllowDenyPattern.allow_all(), + ) + + c1 = SourceConfig(token="s", workspace_url="https://workspace_url") + c2 = SourceConfig(token="s", workspace_url="https://workspace_url") assert c2.catalog_pattern.allow == [".*"] c1.catalog_pattern.allow += ["foo"] diff --git a/metadata-ingestion/tests/unit/test_pydantic_validators.py b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py similarity index 92% rename from metadata-ingestion/tests/unit/test_pydantic_validators.py rename to metadata-ingestion/tests/unit/config/test_pydantic_validators.py index 3e9ec6cbaf357..399245736805c 100644 --- a/metadata-ingestion/tests/unit/test_pydantic_validators.py +++ b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py @@ -7,7 +7,10 @@ from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.configuration.validate_field_rename import pydantic_renamed_field -from datahub.utilities.global_warning_util import get_global_warnings +from datahub.utilities.global_warning_util import ( + clear_global_warnings, + get_global_warnings, +) def test_field_rename(): @@ -76,9 +79,11 @@ class TestModel(ConfigModel): def test_field_deprecated(): + clear_global_warnings() + class TestModel(ConfigModel): - d1: Optional[str] - d2: Optional[str] + d1: Optional[str] = None + d2: Optional[str] = None b: str _validate_deprecated_d1 = pydantic_field_deprecated("d1") @@ -93,3 +98,5 @@ class TestModel(ConfigModel): assert v.d2 == "deprecated" assert any(["d1 is deprecated" in warning for warning in get_global_warnings()]) assert any(["d2 is deprecated" in warning for warning in get_global_warnings()]) + + clear_global_warnings() diff --git a/metadata-ingestion/tests/unit/test_time_window_config.py b/metadata-ingestion/tests/unit/config/test_time_window_config.py similarity index 100% rename from metadata-ingestion/tests/unit/test_time_window_config.py rename to metadata-ingestion/tests/unit/config/test_time_window_config.py From 7b067822bd8602c00fe5a0efdd15a6bb7a33bad6 Mon Sep 17 00:00:00 2001 From: John Joyce Date: Mon, 18 Dec 2023 18:35:02 -0800 Subject: [PATCH 113/263] feat(gms): Add support for platform-based browse (#9376) Co-authored-by: John Joyce --- .../graphql/featureflags/FeatureFlags.java | 1 + .../resolvers/chart/BrowseV2Resolver.java | 20 +++- .../resolvers/config/AppConfigResolver.java | 1 + .../graphql/resolvers/search/SearchUtils.java | 14 +++ .../src/main/resources/app.graphql | 5 + .../src/main/resources/search.graphql | 9 +- .../browse/BrowseV2ResolverTest.java | 2 +- datahub-web-react/src/appConfigContext.tsx | 1 + datahub-web-react/src/graphql/app.graphql | 1 + .../metadata/client/JavaEntityClient.java | 24 +++++ .../elasticsearch/ElasticSearchService.java | 12 +++ .../elasticsearch/query/ESBrowseDAO.java | 91 +++++++++++++++++++ .../src/main/resources/application.yml | 1 + .../linkedin/entity/client/EntityClient.java | 22 +++++ .../entity/client/RestliEntityClient.java | 14 +++ .../metadata/search/EntitySearchService.java | 19 ++++ 16 files changed, 231 insertions(+), 6 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 07bd1fba5d8a8..e74ed09849763 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -12,6 +12,7 @@ public class FeatureFlags { private boolean readOnlyModeEnabled = false; private boolean showSearchFiltersV2 = false; private boolean showBrowseV2 = false; + private boolean platformBrowseV2 = false; private PreProcessHooks preProcessHooks; private boolean showAcrylInfo = false; private boolean showAccessManagement = false; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 292d6108b7a04..da4a3a76dd7e0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -2,14 +2,16 @@ import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; @@ -43,8 +45,8 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final BrowseV2Input input = bindArgument(environment.getArgument("input"), BrowseV2Input.class); - final String entityName = EntityTypeMapper.getName(input.getType()); + final List entityNames = getEntityNames(input); final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; @@ -70,7 +72,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, + entityNames, pathStr, maybeResolvedView != null ? SearchUtils.combineFilters( @@ -87,6 +89,18 @@ public CompletableFuture get(DataFetchingEnvironment environmen }); } + public static List getEntityNames(BrowseV2Input input) { + List entityTypes; + if (input.getTypes() != null && input.getTypes().size() > 0) { + entityTypes = input.getTypes(); + } else if (input.getType() != null) { + entityTypes = ImmutableList.of(input.getType()); + } else { + entityTypes = BROWSE_ENTITY_TYPES; + } + return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } + private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { BrowseResultsV2 results = new BrowseResultsV2(); results.setTotal(browseResults.getNumGroups()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index 34f7f133f6fb9..81b52991cde90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -175,6 +175,7 @@ public CompletableFuture get(final DataFetchingEnvironment environmen .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) .setShowAccessManagement(_featureFlags.isShowAccessManagement()) .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2()) .build(); appConfig.setFeatureFlags(featureFlagsConfig); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index d04cb57e1a860..444ab4bcc3c3c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -92,6 +92,20 @@ private SearchUtils() {} EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); + /** Entities that are part of browse by default */ + public static final List BROWSE_ENTITY_TYPES = + ImmutableList.of( + EntityType.DATASET, + EntityType.DASHBOARD, + EntityType.CHART, + EntityType.CONTAINER, + EntityType.MLMODEL, + EntityType.MLMODEL_GROUP, + EntityType.MLFEATURE_TABLE, + EntityType.DATA_FLOW, + EntityType.DATA_JOB, + EntityType.NOTEBOOK); + /** A prioritized list of source filter types used to generate quick filters */ public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 075a3b0fac43b..52451e195ee84 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -437,6 +437,11 @@ type FeatureFlagsConfig { """ showBrowseV2: Boolean! + """ + Whether browse v2 is platform mode, which means that platforms are displayed instead of entity types at the root. + """ + platformBrowseV2: Boolean! + """ Whether we should show CTAs in the UI related to moving to Managed DataHub by Acryl. """ diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index e0cde5a2db9f9..8f2377edb546e 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1176,9 +1176,14 @@ Input required for browse queries """ input BrowseV2Input { """ - The browse entity type + The browse entity type - deprecated use types instead """ - type: EntityType! + type: EntityType + + """ + The browse entity type - deprecated use types instead. If not provided, all types will be used. + """ + types: [EntityType!] """ The browse path V2 - a list with each entry being part of the browse path V2 diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index bffc2b31af2b9..433772d7e2cfe 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -249,7 +249,7 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.browseV2( - Mockito.eq(entityName), + Mockito.eq(ImmutableList.of(entityName)), Mockito.eq(path), Mockito.eq(filter), Mockito.eq(query), diff --git a/datahub-web-react/src/appConfigContext.tsx b/datahub-web-react/src/appConfigContext.tsx index 4087ad453687c..8c1089b868e5a 100644 --- a/datahub-web-react/src/appConfigContext.tsx +++ b/datahub-web-react/src/appConfigContext.tsx @@ -50,6 +50,7 @@ export const DEFAULT_APP_CONFIG = { showAcrylInfo: false, showAccessManagement: false, nestedDomainsEnabled: true, + platformBrowseV2: false, }, }; diff --git a/datahub-web-react/src/graphql/app.graphql b/datahub-web-react/src/graphql/app.graphql index 4e9bbb11d8c5a..fe28340349147 100644 --- a/datahub-web-react/src/graphql/app.graphql +++ b/datahub-web-react/src/graphql/app.graphql @@ -65,6 +65,7 @@ query appConfig { showAcrylInfo showAccessManagement nestedDomainsEnabled + platformBrowseV2 } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 53b974b560e2a..e7ec4d313b5f5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -235,6 +235,30 @@ public BrowseResultV2 browseV2( return _entitySearchService.browseV2(entityName, path, filter, input, start, count); } + /** + * Gets browse V2 snapshot of a given path + * + * @param entityNames entities being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { + // TODO: cache browseV2 results + return _entitySearchService.browseV2(entityNames, path, filter, input, start, count); + } + @SneakyThrows @Deprecated public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index f40da59a149fa..fd7491fe32ea3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -210,6 +210,18 @@ public BrowseResultV2 browseV2( return esBrowseDAO.browseV2(entityName, path, filter, input, start, count); } + @Nonnull + @Override + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { + return esBrowseDAO.browseV2(entityNames, path, filter, input, start, count); + } + @Nonnull @Override public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 5ea60b24a577a..3c71a2dfd9180 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -427,6 +427,44 @@ public BrowseResultV2 browseV2( } } + public BrowseResultV2 browseV2( + @Nonnull List entities, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { + try { + final SearchResponse groupsResponse; + + try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { + final String finalInput = input.isEmpty() ? "*" : input; + groupsResponse = + client.search( + constructGroupsSearchRequestBrowseAcrossEntities( + entities, path, filter, finalInput), + RequestOptions.DEFAULT); + } + + final BrowseGroupsResultV2 browseGroupsResult = + extractGroupsResponseV2(groupsResponse, path, start, count); + final int numGroups = browseGroupsResult.getTotalGroups(); + + return new BrowseResultV2() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) + .setGroups(new BrowseResultGroupV2Array(browseGroupsResult.getGroups())) + .setNumGroups(numGroups) + .setFrom(start) + .setPageSize(count); + } catch (Exception e) { + log.error("Browse Across Entities query failed: " + e.getMessage()); + throw new ESQueryException("Browse Across Entities query failed: ", e); + } + } + @Nonnull private SearchRequest constructGroupsSearchRequestV2( @Nonnull String entityName, @@ -448,6 +486,33 @@ private SearchRequest constructGroupsSearchRequestV2( return searchRequest; } + @Nonnull + private SearchRequest constructGroupsSearchRequestBrowseAcrossEntities( + @Nonnull List entities, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { + + List entitySpecs = + entities.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); + + String[] indexArray = + entities.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new); + + final SearchRequest searchRequest = new SearchRequest(indexArray); + final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(0); + searchSourceBuilder.query( + buildQueryStringBrowseAcrossEntities( + entitySpecs, + path, + SearchUtil.transformFilterForEntities(filter, indexConvention), + input)); + searchSourceBuilder.aggregation(buildAggregationsV2(path)); + searchRequest.source(searchSourceBuilder); + return searchRequest; + } + /** * Extracts the name of group from path. * @@ -494,6 +559,32 @@ private QueryBuilder buildQueryStringV2( return queryBuilder; } + @Nonnull + private QueryBuilder buildQueryStringBrowseAcrossEntities( + @Nonnull List entitySpecs, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { + final int browseDepthVal = getPathDepthV2(path); + + final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); + + QueryBuilder query = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getQuery(input, false); + queryBuilder.must(query); + + if (!path.isEmpty()) { + queryBuilder.filter(QueryBuilders.matchQuery(BROWSE_PATH_V2, path)); + } + + queryBuilder.filter(QueryBuilders.rangeQuery(BROWSE_PATH_V2_DEPTH).gt(browseDepthVal)); + + queryBuilder.filter(SearchRequestHandler.getFilterQuery(filter)); + + return queryBuilder; + } + @Nonnull private AggregationBuilder buildAggregationsV2(@Nonnull String path) { final String currentLevel = ESUtils.escapeReservedCharacters(path) + "␟.*"; diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index a52b705cb8da6..0ea6b8712953e 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -317,6 +317,7 @@ featureFlags: showAccessManagement: ${SHOW_ACCESS_MANAGEMENT:false} #Whether we should show AccessManagement tab in the datahub UI. showSearchFiltersV2: ${SHOW_SEARCH_FILTERS_V2:true} # Enables showing the search filters V2 experience. showBrowseV2: ${SHOW_BROWSE_V2:true} # Enables showing the browse v2 sidebar experience. + platformBrowseV2: ${PLATFORM_BROWSE_V2:false} # Enables the platform browse experience, instead of the entity-oriented browse default. preProcessHooks: uiEnabled: ${PRE_PROCESS_HOOKS_UI_ENABLED:true} # Circumvents Kafka for processing index updates for UI changes sourced from GraphQL to avoid processing delays showAcrylInfo: ${SHOW_ACRYL_INFO:false} # Show different CTAs within DataHub around moving to Managed DataHub. Set to true for the demo site. diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 7bc50a8f3dc7e..598c252b4f766 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -153,6 +153,28 @@ public BrowseResultV2 browseV2( @Nonnull Authentication authentication) throws RemoteInvocationException; + /** + * Gets browse snapshot of a given path + * + * @param entityNames entities being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; + @Deprecated public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index c854cb9dd279e..d68c472ea9170 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -381,6 +381,20 @@ public BrowseResultV2 browseV2( throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } + @Nonnull + @Override + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); + } + public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException { EntitiesDoIngestRequestBuilder requestBuilder = diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 09a63e769f025..189ae09e1b938 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -207,6 +207,25 @@ public BrowseResultV2 browseV2( int start, int count); + /** + * Gets browse snapshot of a given path + * + * @param entityNames set of entities being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull List entityNames, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count); + /** * Gets a list of paths for a given urn. * From 1124ccc4ee02e60980af19d525d5203dd6719a1d Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Tue, 19 Dec 2023 17:29:37 +0530 Subject: [PATCH 114/263] fix(ui/users): searching for users on Users page shows incorrect roles (#9474) --- datahub-web-react/src/app/identity/user/UserList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index dce3aa2c68a8d..8e2bc21f0693f 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -77,7 +77,7 @@ export const UserList = () => { query: (query?.length && query) || undefined, }, }, - fetchPolicy: (query?.length || 0) > 0 ? 'no-cache' : 'cache-first', + fetchPolicy: 'no-cache', }); const totalUsers = usersData?.listUsers?.total || 0; From 94a1603676b6a0fb9e2129b416caf39b100f6d0f Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 19 Dec 2023 16:30:21 +0100 Subject: [PATCH 115/263] fix(ingest/redshift: Fixing operation query to not return duplicate operations (#9481) --- .../ingestion/source/redshift/usage.py | 26 ++++++++++++------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py index 409027a8805a0..e40406b994c9b 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/usage.py @@ -85,15 +85,18 @@ sq.endtime AS endtime, 'insert' AS operation_type FROM - stl_insert si + (select userid, query, sum(rows) as rows, tbl + from stl_insert si + where si.rows > 0 + AND si.starttime >= '{start_time}' + AND si.starttime < '{end_time}' + group by userid, query, tbl + ) as si JOIN svv_table_info sti ON si.tbl = sti.table_id JOIN stl_query sq ON si.query = sq.query JOIN svl_user_info sui ON sq.userid = sui.usesysid WHERE - si.starttime >= '{start_time}' - AND si.starttime < '{end_time}' - AND si.rows > 0 - AND sq.aborted = 0) + sq.aborted = 0) UNION (SELECT DISTINCT sd.userid AS userid, @@ -109,15 +112,18 @@ sq.endtime AS endtime, 'delete' AS operation_type FROM - stl_delete sd + (select userid, query, sum(rows) as rows, tbl + from stl_delete sd + where sd.rows > 0 + AND sd.starttime >= '{start_time}' + AND sd.starttime < '{end_time}' + group by userid, query, tbl + ) as sd JOIN svv_table_info sti ON sd.tbl = sti.table_id JOIN stl_query sq ON sd.query = sq.query JOIN svl_user_info sui ON sq.userid = sui.usesysid WHERE - sd.starttime >= '{start_time}' - AND sd.starttime < '{end_time}' - AND sd.rows > 0 - AND sq.aborted = 0) + sq.aborted = 0) ORDER BY endtime DESC """.strip() From 265d6bdb534c17b1b370033b81a5c20c434b49d0 Mon Sep 17 00:00:00 2001 From: purnimagarg1 <139125209+purnimagarg1@users.noreply.github.com> Date: Tue, 19 Dec 2023 22:41:18 +0530 Subject: [PATCH 116/263] Fade recipe section to transparent on Ingestion Run Details (#9404) --- .../ExecutionRequestDetailsModal.tsx | 35 +++++++++++-------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx b/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx index 96dfc05e39153..0799f8af1173d 100644 --- a/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx +++ b/datahub-web-react/src/app/ingest/source/executions/ExecutionRequestDetailsModal.tsx @@ -83,11 +83,11 @@ const ShowMoreButton = styled(Button)` padding: 0px; `; -const LogsContainer = styled.div` +const DetailsContainer = styled.div` margin-bottom: -25px; ${(props) => - props.areLogsExpandable && - !props.showExpandedLogs && + props.areDetailsExpandable && + !props.showExpandedDetails && ` -webkit-mask-image: linear-gradient(to bottom, rgba(0,0,0,1) 50%, rgba(255,0,0,0.5) 60%, rgba(255,0,0,0) 90% ); mask-image: linear-gradient(to bottom, rgba(0,0,0,1) 50%, rgba(255,0,0,0.5) 60%, rgba(255,0,0,0) 90%); @@ -102,9 +102,9 @@ const modalBodyStyle = { padding: 0, }; -type LogsContainerProps = { - showExpandedLogs: boolean; - areLogsExpandable: boolean; +type DetailsContainerProps = { + showExpandedDetails: boolean; + areDetailsExpandable: boolean; }; type Props = { @@ -124,7 +124,7 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { downloadFile(output, `exec-${urn}.log`); }; - const logs = (showExpandedLogs && output) || output.slice(0, 250); + const logs = (showExpandedLogs && output) || output?.split('\n').slice(0, 5).join('\n'); const result = data?.executionRequest?.result?.status; useEffect(() => { @@ -154,10 +154,10 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { } catch (e) { recipeYaml = ''; } - const recipe = showExpandedRecipe ? recipeYaml : recipeYaml?.split('\n').slice(0, 1).join('\n'); + const recipe = showExpandedRecipe ? recipeYaml : recipeYaml?.split('\n').slice(0, 5).join('\n'); - const areLogsExpandable = output.length > 250; - const isRecipeExpandable = recipeYaml?.includes('\n'); + const areLogsExpandable = output?.split(/\r\n|\r|\n/)?.length > 5; + const isRecipeExpandable = recipeYaml?.split(/\r\n|\r|\n/)?.length > 5; return ( { Download - +
{`${logs}${!showExpandedLogs && areLogsExpandable ? '...' : ''}`}
-
+ {areLogsExpandable && ( setShowExpandedLogs(!showExpandedLogs)}> {showExpandedLogs ? 'Hide' : 'Show More'} @@ -216,9 +216,14 @@ export const ExecutionDetailsModal = ({ urn, visible, onClose }: Props) => { The recipe used for this ingestion run. - -
{`${recipe}${!showExpandedRecipe && isRecipeExpandable ? '\n...' : ''}`}
-
+ + +
{`${recipe}${!showExpandedRecipe && isRecipeExpandable ? '...' : ''}`}
+
+
{isRecipeExpandable && ( setShowExpandedRecipe((v) => !v)}> {showExpandedRecipe ? 'Hide' : 'Show More'} From 92c9940bbd5fd2109f62b7145cfaf981d40704c3 Mon Sep 17 00:00:00 2001 From: Ellie O'Neil <110510035+eboneil@users.noreply.github.com> Date: Tue, 19 Dec 2023 09:24:03 -0800 Subject: [PATCH 117/263] Allow message_name field for protobuf ingestion (#9480) --- .../java/datahub-protobuf/build.gradle | 9 +++------ .../src/main/java/datahub/protobuf/Proto2DataHub.java | 11 +++++++++++ .../java/datahub/protobuf/ProtobufDatasetTest.java | 6 +++--- .../test/java/datahub/protobuf/ProtobufUtilsTest.java | 4 ++-- .../java/datahub/protobuf/model/ProtobufEnumTest.java | 4 ++-- .../datahub/protobuf/model/ProtobufFieldTest.java | 4 ++-- .../datahub/protobuf/model/ProtobufGraphTest.java | 4 ++-- .../datahub/protobuf/model/ProtobufMessageTest.java | 4 ++-- .../protobuf/model/ProtobufOneOfFieldTest.java | 4 ++-- .../datahub/protobuf/visitors/VisitContextTest.java | 4 ++-- .../protobuf/visitors/dataset/DatasetVisitorTest.java | 4 ++-- .../visitors/dataset/DescriptionVisitorTest.java | 4 ++-- .../protobuf/visitors/dataset/DomainVisitorTest.java | 4 ++-- .../dataset/InstitutionalMemoryVisitorTest.java | 4 ++-- .../dataset/KafkaTopicPropertyVisitorTest.java | 4 ++-- .../visitors/dataset/OwnershipVisitorTest.java | 4 ++-- .../visitors/dataset/PropertyVisitorTest.java | 4 ++-- .../visitors/dataset/TermAssociationVisitorTest.java | 4 ++-- .../field/ProtobufExtensionFieldVisitorTest.java | 4 ++-- .../visitors/field/SchemaFieldVisitorTest.java | 4 ++-- .../datahub/protobuf/visitors/tag/TagVisitorTest.java | 4 ++-- 21 files changed, 53 insertions(+), 45 deletions(-) diff --git a/metadata-integration/java/datahub-protobuf/build.gradle b/metadata-integration/java/datahub-protobuf/build.gradle index 2cb36a14cb9c7..c8082b875d321 100644 --- a/metadata-integration/java/datahub-protobuf/build.gradle +++ b/metadata-integration/java/datahub-protobuf/build.gradle @@ -31,10 +31,10 @@ dependencies { implementation externalDependency.commonsCli implementation externalDependency.httpAsyncClient implementation externalDependency.slf4jApi + implementation externalDependency.jacksonCore compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok - testImplementation externalDependency.junitJupiterApi - testRuntimeOnly externalDependency.junitJupiterEngine + testImplementation externalDependency.testng } import java.nio.file.Paths @@ -61,10 +61,7 @@ jacocoTestReport { dependsOn test // tests are required to run before generating the report } -test { - useJUnit() - finalizedBy jacocoTestReport -} +test.finalizedBy jacocoTestReport task checkShadowJar(type: Exec) { diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java index dcc95222fabf2..429c6d6bfeba4 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java @@ -67,6 +67,13 @@ public class Proto2DataHub { "[Optional if using --directory] The protobuf source file. Typically a .proto file.") .build(); + private static final Option OPTION_MESSAGE_NAME = + Option.builder() + .longOpt("message_name") + .hasArg() + .desc("[Optional] The protobuf message name to read from.") + .build(); + private static final Option OPTION_DIR = Option.builder() .longOpt("directory") @@ -166,6 +173,7 @@ static class AppConfig { private final String dataPlatform; private final String protoc; private final String inputFile; + private final String messageName; private final String inputDir; private final TransportOptions transport; private final String filename; @@ -191,6 +199,7 @@ static class AppConfig { dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); protoc = cli.getOptionValue(OPTION_DESCRIPTOR); inputFile = cli.getOptionValue(OPTION_FILE, null); + messageName = cli.getOptionValue(OPTION_MESSAGE_NAME, null); transport = TransportOptions.valueOf( cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); @@ -250,6 +259,7 @@ public static void main(String[] args) throws Exception { .addOption(OPTION_DATAHUB_TOKEN) .addOption(OPTION_DESCRIPTOR) .addOption(OPTION_FILE) + .addOption(OPTION_MESSAGE_NAME) .addOption(OPTION_DIR) .addOption(OPTION_EXCLUDE_PATTERN) .addOption(OPTION_DATAHUB_USER) @@ -354,6 +364,7 @@ public static void main(String[] args) throws Exception { .setGithubOrganization(config.githubOrg) .setSlackTeamId(config.slackId) .setSubType(config.subType) + .setMessageName(config.messageName) .build(); dataset diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java index e96bb63220b04..62f3b0453be09 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java @@ -1,8 +1,8 @@ package datahub.protobuf; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; import com.linkedin.common.FabricType; import com.linkedin.common.GlobalTags; @@ -34,7 +34,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufDatasetTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java index e2599cb4c3f68..9bf649041e035 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java @@ -2,13 +2,13 @@ import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtoc; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.ExtensionRegistry; import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufUtilsTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java index fed9f250b359f..ae539a8e8fa4a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java @@ -1,6 +1,6 @@ package datahub.protobuf.model; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.EnumDescriptorProto; @@ -11,7 +11,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufEnumTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 6d4dc8bc4d585..9508f4778e5c8 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.model; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Set; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufFieldTest { private static final DescriptorProto EXPECTED_MESSAGE_PROTO = diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java index 488222b87766d..6ca0c5b45cb5e 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java @@ -2,14 +2,14 @@ import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufGraphTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java index 1d6b3907d76d9..1126895aec57a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java @@ -1,6 +1,6 @@ package datahub.protobuf.model; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; @@ -11,7 +11,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufMessageTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java index c8bd8a322aad5..9db06f23a2bdf 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java @@ -1,6 +1,6 @@ package datahub.protobuf.model; -import static org.junit.jupiter.api.Assertions.*; +import static org.testng.Assert.*; import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; @@ -12,7 +12,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufOneOfFieldTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java index 2fc5f3834a749..fe27af7461860 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.testng.Assert.assertNotEquals; import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import datahub.protobuf.model.FieldTypeEdge; @@ -13,7 +13,7 @@ import java.util.Set; import java.util.stream.Collectors; import org.jgrapht.GraphPath; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class VisitContextTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java index de9a0f5ec4abe..6e99599c852b4 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.visitors.dataset; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.data.template.RecordTemplate; @@ -14,7 +14,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class DatasetVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java index 679048fb48a53..42d8f1ad4c83c 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java @@ -1,14 +1,14 @@ package datahub.protobuf.visitors.dataset; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class DescriptionVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java index c24fc30766f0e..3330c09c49436 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; @@ -10,7 +10,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class DomainVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java index a57916441bfcb..45be30fe96210 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.visitors.dataset; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; @@ -9,7 +9,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class InstitutionalMemoryVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java index 5f8572cf6ddd8..2da53dad2c0be 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; @@ -11,7 +11,7 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class KafkaTopicPropertyVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java index 1b0aff28eb517..adc94487dab3c 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; @@ -14,7 +14,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class OwnershipVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java index 13912100f28a5..be65330954051 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java @@ -3,7 +3,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; import static java.util.Map.entry; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; @@ -11,7 +11,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class PropertyVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java index f734c00bb76e0..79e7075c65209 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.GlossaryTermUrn; @@ -10,7 +10,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class TermAssociationVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java index eec397011a4ce..ff1aa643ac8df 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java @@ -1,7 +1,7 @@ package datahub.protobuf.visitors.field; import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -23,7 +23,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class ProtobufExtensionFieldVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java index af31a80d3b53a..59d9e0ca6e518 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.schema.NumberType; import com.linkedin.schema.SchemaField; @@ -15,7 +15,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class SchemaFieldVisitorTest { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java index 258d816d9d1da..ab477e19aabe4 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java @@ -2,7 +2,7 @@ import static datahub.protobuf.TestFixtures.getTestProtobufGraph; import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testng.Assert.assertEquals; import com.linkedin.tag.TagProperties; import datahub.event.MetadataChangeProposalWrapper; @@ -11,7 +11,7 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; +import org.testng.annotations.Test; public class TagVisitorTest { From 8f19138f68ce6376588f4e09617be7e3c325a70f Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 19 Dec 2023 12:00:54 -0600 Subject: [PATCH 118/263] feat(docker-compose): consolidate docker-compose profiles (#9478) --- build.gradle | 1 + .../upgrade/config/NoCodeCleanupConfig.java | 12 + .../upgrade/config/NoCodeUpgradeConfig.java | 12 + .../upgrade/config/RestoreBackupConfig.java | 12 + .../upgrade/config/RestoreIndicesConfig.java | 12 + .../datahub/upgrade/nocode/NoCodeUpgrade.java | 12 +- .../nocodecleanup/NoCodeCleanupUpgrade.java | 12 +- .../upgrade/restorebackup/RestoreBackup.java | 12 +- .../restoreindices/RestoreIndices.java | 9 +- docker/build.gradle | 216 ++++----- docker/profiles/README.md | 104 +++++ docker/profiles/cassandra | 1 + docker/profiles/datahub-actions | 1 + docker/profiles/datahub-frontend | 1 + docker/profiles/datahub-gms | 1 + docker/profiles/datahub-mae-consumer | 1 + docker/profiles/datahub-mce-consumer | 1 + docker/profiles/datahub-upgrade | 1 + docker/profiles/docker-compose.actions.yml | 45 ++ docker/profiles/docker-compose.frontend.yml | 119 +++++ docker/profiles/docker-compose.gms.yml | 429 ++++++++++++++++++ .../profiles/docker-compose.prerequisites.yml | 387 ++++++++++++++++ docker/profiles/docker-compose.yml | 13 + docker/profiles/elasticsearch | 1 + docker/profiles/elasticsearch-setup | 1 + docker/profiles/kafka-broker | 1 + docker/profiles/kafka-setup | 1 + docker/profiles/monitoring | 1 + docker/profiles/mysql | 1 + docker/profiles/mysql-setup | 1 + docker/profiles/neo4j | 1 + docker/profiles/postgres | 1 + docker/profiles/postgres-setup | 1 + 33 files changed, 1288 insertions(+), 136 deletions(-) create mode 100644 docker/profiles/README.md create mode 120000 docker/profiles/cassandra create mode 120000 docker/profiles/datahub-actions create mode 120000 docker/profiles/datahub-frontend create mode 120000 docker/profiles/datahub-gms create mode 120000 docker/profiles/datahub-mae-consumer create mode 120000 docker/profiles/datahub-mce-consumer create mode 120000 docker/profiles/datahub-upgrade create mode 100644 docker/profiles/docker-compose.actions.yml create mode 100644 docker/profiles/docker-compose.frontend.yml create mode 100644 docker/profiles/docker-compose.gms.yml create mode 100644 docker/profiles/docker-compose.prerequisites.yml create mode 100644 docker/profiles/docker-compose.yml create mode 120000 docker/profiles/elasticsearch create mode 120000 docker/profiles/elasticsearch-setup create mode 120000 docker/profiles/kafka-broker create mode 120000 docker/profiles/kafka-setup create mode 120000 docker/profiles/monitoring create mode 120000 docker/profiles/mysql create mode 120000 docker/profiles/mysql-setup create mode 120000 docker/profiles/neo4j create mode 120000 docker/profiles/postgres create mode 120000 docker/profiles/postgres-setup diff --git a/build.gradle b/build.gradle index a7a85db0398e2..bb01a15a7db8d 100644 --- a/build.gradle +++ b/build.gradle @@ -46,6 +46,7 @@ plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.1' id 'com.github.johnrengelman.shadow' version '8.1.1' apply false id 'com.palantir.docker' version '0.35.0' apply false + id 'com.avast.gradle.docker-compose' version '0.17.5' id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 24bcec5852b4f..5ba5c8a90fd4a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -7,13 +7,16 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class NoCodeCleanupConfig { @@ -26,6 +29,7 @@ public class NoCodeCleanupConfig { "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -34,4 +38,12 @@ public NoCodeCleanupUpgrade createInstance() { final IndexConvention indexConvention = applicationContext.getBean(IndexConvention.class); return new NoCodeCleanupUpgrade(ebeanServer, graphClient, searchClient, indexConvention); } + + @Bean(name = "noCodeCleanup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeCleanupUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeCleanupUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index 68009d7ed1718..d968e8521867e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -6,12 +6,15 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class NoCodeUpgradeConfig { @@ -19,6 +22,7 @@ public class NoCodeUpgradeConfig { @Bean(name = "noCodeUpgrade") @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -29,4 +33,12 @@ public NoCodeUpgrade createInstance() { return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); } + + @Bean(name = "noCodeUpgrade") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 743e4ffe84b0e..116d62878f5c6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -8,12 +8,15 @@ import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class RestoreBackupConfig { @Autowired ApplicationContext applicationContext; @@ -27,6 +30,7 @@ public class RestoreBackupConfig { "searchService", "entityRegistry" }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -40,4 +44,12 @@ public RestoreBackup createInstance() { return new RestoreBackup( ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); } + + @Bean(name = "restoreBackup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreBackup createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreBackup(null, null, null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index d258c4a4d1a52..9d229f315d709 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -7,18 +7,22 @@ import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class RestoreIndicesConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -31,4 +35,12 @@ public RestoreIndices createInstance() { return new RestoreIndices( ebeanServer, entityService, entityRegistry, entitySearchService, graphService); } + + @Bean(name = "restoreIndices") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreIndices createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreIndices(null, null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index 6753d309b9f50..674efb2b8ba78 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; public class NoCodeUpgrade implements Upgrade { @@ -26,12 +27,17 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); - _cleanupSteps = buildCleanupSteps(); + if (server != null) { + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index 8a267be6ad808..6d3125423b443 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; import org.opensearch.client.RestHighLevelClient; public class NoCodeCleanupUpgrade implements Upgrade { @@ -18,12 +19,17 @@ public class NoCodeCleanupUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeCleanupUpgrade( - final Database server, + @Nullable final Database server, final GraphService graphClient, final RestHighLevelClient searchClient, final IndexConvention indexConvention) { - _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); - _cleanupSteps = buildCleanupSteps(); + if (server != null) { + _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index b11abb2d6bc23..4ac295b4fdfb7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -16,20 +16,26 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public class RestoreBackup implements Upgrade { private final List _steps; public RestoreBackup( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = - buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + if (server != null) { + _steps = + buildSteps( + server, entityService, entityRegistry, entityClient, graphClient, searchClient); + } else { + _steps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 8bb3b0073710a..d38685553dff2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -13,6 +13,7 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; @@ -29,12 +30,16 @@ public class RestoreIndices implements Upgrade { private final List _steps; public RestoreIndices( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, final GraphService graphService) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + if (server != null) { + _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + } else { + _steps = List.of(); + } } @Override diff --git a/docker/build.gradle b/docker/build.gradle index bc79be501b395..190202620c382 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -1,6 +1,9 @@ plugins { id 'java' // required by versioning + id 'docker-compose' } +import com.avast.gradle.dockercompose.tasks.ComposeUp +import com.avast.gradle.dockercompose.tasks.ComposeDownForced apply from: "../gradle/versioning/versioning.gradle" @@ -18,144 +21,107 @@ ext { debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job'] - debug_compose_args = [ - '-f', 'docker-compose-without-neo4j.yml', - '-f', 'docker-compose-without-neo4j.override.yml', - '-f', 'docker-compose-without-neo4j.m1.yml', // updates to mariadb - '-f', 'docker-compose.dev.yml' - ] + compose_args = ['-f', 'profiles/docker-compose.yml'] debug_reloadable = [ - 'datahub-gms', - 'datahub-frontend-react' + 'datahub-gms-debug', + 'system-update-debug', + 'frontend-debug' ] - // Postgres pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] - pg_compose_args = [ - '-f', 'docker-compose-without-neo4j.yml', - '-f', 'docker-compose-without-neo4j.postgres.override.yml' - ] } -task quickstart(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(quickstart_modules.collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - // environment "ACTIONS_VERSION", 'alpine3.18-slim' - // environment "DATAHUB_ACTIONS_IMAGE", 'nginx' - - // Elastic - // environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch' - // environment "DATAHUB_SEARCH_TAG", '7.10.1' - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] +tasks.register('quickstart') {} +tasks.register('quickstartSlim') {} +tasks.register('quickstartDebug') {} +tasks.register('quickstartPg') {} - commandLine 'bash', '-c', cmd.join(" ") +tasks.withType(ComposeDownForced) { + removeVolumes = true } - -task quickstartSlim(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(([':docker:datahub-ingestion'] + quickstart_modules).collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - environment "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" - environment "ACTIONS_VERSION", "v${version}-slim" - environment "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' - environment "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] - - commandLine 'bash', '-c', cmd.join(" ") +task quickstartNuke { + finalizedBy(tasks.withType(ComposeDownForced)) } -task quickstartNuke(type: Exec, dependsOn: ":metadata-ingestion:install") { - shouldRunAfter(':metadata-ingestion:clean') - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker nuke' - ] - commandLine 'bash', '-c', cmd.join(" ") +dockerCompose { + quickstart { + isRequiredBy(tasks.named('quickstart')) + composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + + environment.put 'DATAHUB_VERSION', "v${version}" + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + quickstartPg { + isRequiredBy(tasks.named('quickstartPg')) + composeAdditionalArgs = ['--profile', 'quickstart-postgres'] + + environment.put 'DATAHUB_VERSION', "v${version}" + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + quickstartSlim { + isRequiredBy(tasks.named('quickstartSlim')) + composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + + environment.put 'DATAHUB_VERSION', "v${version}" + environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" + environment.put "ACTIONS_VERSION", "v${version}-slim" + environment.put "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' + environment.put "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + quickstartDebug { + isRequiredBy(tasks.named('quickstartDebug')) + composeAdditionalArgs = ['--profile', 'debug'] + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } } - -task quickstartDebug(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(debug_modules.collect { it + ':dockerTagDebug' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - - // Elastic - // environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch' - // environment "DATAHUB_SEARCH_TAG", '7.10.1' - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--version', "debug", - '--dump-logs-on-failure' - ] + debug_compose_args - commandLine 'bash', '-c', cmd.join(" ") +tasks.getByName('quickstartComposeUp').dependsOn( + quickstart_modules.collect { it + ':dockerTag' }) +tasks.getByName('quickstartPgComposeUp').dependsOn( + pg_quickstart_modules.collect { it + ':dockerTag' }) +tasks.getByName('quickstartSlimComposeUp').dependsOn( + ([':docker:datahub-ingestion'] + quickstart_modules) + .collect { it + ':dockerTag' }) +tasks.getByName('quickstartDebugComposeUp').dependsOn( + debug_modules.collect { it + ':dockerTagDebug' } +) +tasks.withType(ComposeUp).configureEach { + shouldRunAfter('quickstartNuke') } + task debugReload(type: Exec) { - def cmd = ['docker compose -p datahub'] + debug_compose_args + ['restart'] + debug_reloadable + def cmd = ['docker compose -p datahub --profile debug'] + compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") } - -task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(pg_quickstart_modules.collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - environment "DATAHUB_POSTGRES_VERSION", "15.5" - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] + pg_compose_args - - commandLine 'bash', '-c', cmd.join(" ") -} \ No newline at end of file diff --git a/docker/profiles/README.md b/docker/profiles/README.md new file mode 100644 index 0000000000000..df09f15cd85ce --- /dev/null +++ b/docker/profiles/README.md @@ -0,0 +1,104 @@ +# Docker Compose Profiles + +This directory contains a set of docker compose definitions which are designed to run several configurations +for quickstart use-cases as well as development use-cases. These configurations cover a few of the wide variety of +infrastructure configurations that DataHub can operate on. + +Requirements: +* Use the profiles requires a modern version of docker. +* If using the debug/development profiles, you will need to have built the `debug` docker images locally. See the Development Profiles section for more details. + +```bash +$ cd docker/profiles +$ docker compose --profile up +``` + +Use Control-c (`^c`) to terminate the running system. This will automatically stop all running containers. + +To remove the containers use the following: + +```bash +docker compose --profile rm +``` + +Please refer to docker's documentation for more details. + +The following sections detail a few of the profiles and their intended use-cases. For a complete list of profiles +and their configuration please see the table at the end of each section. + +## Quickstart Profiles + +Quickstart profiles are primarily a way to test drive DataHub features before committing to a production ready deployment. +A couple of these profiles are also used in our continuous integration (CI) tests. + +Note: Quickstart profiles use docker images with the `head` tag. These images up updated when changes are committed +to the DataHub github repository. This can be overridden to use a stable release tag by prefixing the commands with +`DATAHUB_VERSION=v0.12.1` for example. + +### `quickstart` + +This is the default configuration MySQL and OpenSearch for the storage and GMS running with integrated consumers. + +### `quickstart-consumers` + +This configuration is identical to `quickstart` how it runs standalone consumers instead of consumers integrated with the GMS container. + +### `quickstart-postgres` + +Identical to `quickstart` with Postgres instead of MySQL. + +### `quickstart-cassandra` + +Uses Cassandra as the primary data store along with Neo4j as the graph database. + +### `quickstart-storage` + +Just run the `quickstart` data stores without the DataHub components. This mode is useful for debugging when running the frontend and GMS components outside +of docker. + +### Quickstart Profiles Table +| Profile Name | MySQL | Postgres | Cassandra | Neo4j | Frontend | GMS | Actions | SystemUpdate | MAE | MCE | Kafka | OpenSearch | +|----------------------|-------|----------|-----------|-------|----------|-----|---------|--------------|-----|-----|-------|------------| +| quickstart | X | | | | X | X | X | X | | | X | X | +| quickstart-frontend | X | | | | X | | | X | | | X | X | +| quickstart-backend | X | | | | | X | X | X | | | X | X | +| quickstart-postgres | | X | | | X | X | X | X | | | X | X | +| quickstart-cassandra | | | X | X | X | X | X | X | | | X | X | +| quickstart-consumers | X | | | | X | X | X | X | X | X | X | X | +| quickstart-storage | X | | | | | | | | | | X | X | + +## Development Profiles + +* Runs `debug` tagged images +* JVM Debug Mode Enabled +* Exposes local jars and scripts to the containers +* Can run non-default one-off configurations (neo4j, cassandra, elasticsearch) + +The docker images used are the `debug` images which are created by building locally. These images are +created by running the gradle command. + +```bash +./gradlew dockerTagDebug +``` + +For a complete list of profiles see the table at the end of this section. + +### `quickstart-backend` + +Run everything except for the `frontend` component. Useful for running just a local (non-docker) frontend. + +### `quickstart-frontend` + +Runs everything except for the GMS. Useful for running just a local (non-docker) GMS instance. + +### Development Profiles Table +| Profile Name | MySQL | Postgres | Cassandra | Neo4j | Frontend | GMS | Actions | SystemUpdate | MAE | MCE | Kafka | OpenSearch | Elasticsearch | +|---------------------|-------|----------|-----------|-------|----------|-----|---------|--------------|-----|-----|-------|------------|---------------| +| debug | X | | | | X | X | X | X | | | X | X | | +| debug-frontend | X | | | | X | | | X | | | X | X | | +| debug-backend | X | | | | | X | X | X | | | X | X | | +| debug-postgres | | X | | | X | X | X | X | | | X | X | | +| debug-cassandra | | | X | | X | X | X | X | | | X | X | | +| debug-consumers | X | | | | X | X | X | X | X | X | X | X | | +| debug-neo4j | X | | | X | X | X | X | X | | | X | X | | +| debug-elasticsearch | X | | | | X | X | X | X | | | X | | X | \ No newline at end of file diff --git a/docker/profiles/cassandra b/docker/profiles/cassandra new file mode 120000 index 0000000000000..d9af9adbce5ca --- /dev/null +++ b/docker/profiles/cassandra @@ -0,0 +1 @@ +../cassandra \ No newline at end of file diff --git a/docker/profiles/datahub-actions b/docker/profiles/datahub-actions new file mode 120000 index 0000000000000..fea4275be45ff --- /dev/null +++ b/docker/profiles/datahub-actions @@ -0,0 +1 @@ +../datahub-actions/ \ No newline at end of file diff --git a/docker/profiles/datahub-frontend b/docker/profiles/datahub-frontend new file mode 120000 index 0000000000000..74a18b81b7e3b --- /dev/null +++ b/docker/profiles/datahub-frontend @@ -0,0 +1 @@ +../datahub-frontend \ No newline at end of file diff --git a/docker/profiles/datahub-gms b/docker/profiles/datahub-gms new file mode 120000 index 0000000000000..de2f067e4c0e0 --- /dev/null +++ b/docker/profiles/datahub-gms @@ -0,0 +1 @@ +../datahub-gms \ No newline at end of file diff --git a/docker/profiles/datahub-mae-consumer b/docker/profiles/datahub-mae-consumer new file mode 120000 index 0000000000000..90974047792c5 --- /dev/null +++ b/docker/profiles/datahub-mae-consumer @@ -0,0 +1 @@ +../datahub-mae-consumer \ No newline at end of file diff --git a/docker/profiles/datahub-mce-consumer b/docker/profiles/datahub-mce-consumer new file mode 120000 index 0000000000000..288c9d91c28b3 --- /dev/null +++ b/docker/profiles/datahub-mce-consumer @@ -0,0 +1 @@ +../datahub-mce-consumer \ No newline at end of file diff --git a/docker/profiles/datahub-upgrade b/docker/profiles/datahub-upgrade new file mode 120000 index 0000000000000..8ff77fd5562e7 --- /dev/null +++ b/docker/profiles/datahub-upgrade @@ -0,0 +1 @@ +../datahub-upgrade \ No newline at end of file diff --git a/docker/profiles/docker-compose.actions.yml b/docker/profiles/docker-compose.actions.yml new file mode 100644 index 0000000000000..a509a6a67d270 --- /dev/null +++ b/docker/profiles/docker-compose.actions.yml @@ -0,0 +1,45 @@ + +x-datahub-actions-service: &datahub-actions-service + hostname: actions + image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} + env_file: datahub-actions/env/docker.env + environment: + ACTIONS_EXTRA_PACKAGES: ${ACTIONS_EXTRA_PACKAGES:-} + ACTIONS_CONFIG: ${ACTIONS_CONFIG:-} + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + SCHEMA_REGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + +services: + datahub-actions-quickstart: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart + - quickstart-backend + depends_on: + datahub-gms-quickstart: + condition: service_healthy + datahub-actions-quickstart-cassandra: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart-cassandra + depends_on: + datahub-gms-quickstart-cassandra: + condition: service_healthy + datahub-actions-quickstart-postgres: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart-postgres + depends_on: + datahub-gms-quickstart-postgres: + condition: service_healthy + datahub-actions-quickstart-consumers: + <<: *datahub-actions-service + container_name: actions + profiles: + - quickstart-consumers + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml new file mode 100644 index 0000000000000..2b82829648dac --- /dev/null +++ b/docker/profiles/docker-compose.frontend.yml @@ -0,0 +1,119 @@ + +x-datahub-frontend-service: &datahub-frontend-service + hostname: datahub-frontend-react + image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + ports: + - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 + env_file: datahub-frontend/env/docker.env + environment: &datahub-frontend-service-env + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +x-datahub-frontend-service-dev: &datahub-frontend-service-dev + <<: *datahub-frontend-service + image: linkedin/datahub-frontend-react:debug + ports: + - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 + - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 + environment: + <<: *datahub-frontend-service-env + JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 + DATAHUB_ANALYTICS_ENABLED: ${DATAHUB_ANALYTICS_ENABLED:-true} + volumes: + - ../../datahub-frontend/build/stage/playBinary:/datahub-frontend + +services: + frontend-quickstart: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart + - quickstart-frontend + depends_on: + system-update-quickstart: + condition: service_completed_successfully + frontend-quickstart-cassandra: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart-cassandra + depends_on: + system-update-quickstart-cassandra: + condition: service_completed_successfully + frontend-quickstart-postgres: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart-postgres + depends_on: + system-update-quickstart-postgres: + condition: service_completed_successfully + frontend-quickstart-consumers: + <<: *datahub-frontend-service + container_name: frontend + profiles: + - quickstart-consumers + depends_on: + system-update-quickstart: + condition: service_completed_successfully + frontend-debug: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug + depends_on: + system-update-debug: + condition: service_completed_successfully + frontend-debug-frontend: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-frontend + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + frontend-debug-postgres: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-postgres + depends_on: + system-update-debug-postgres: + condition: service_completed_successfully + frontend-debug-cassandra: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-cassandra + depends_on: + system-update-debug-cassandra: + condition: service_completed_successfully + frontend-debug-consumers: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-consumers + depends_on: + system-update-debug: + condition: service_completed_successfully + frontend-debug-neo4j: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-neo4j + depends_on: + system-update-debug-neo4j: + condition: service_completed_successfully + frontend-debug-elasticsearch: + <<: *datahub-frontend-service-dev + container_name: datahub-frontend-dev + profiles: + - debug-elasticsearch + depends_on: + system-update-debug-elasticsearch: + condition: service_completed_successfully \ No newline at end of file diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml new file mode 100644 index 0000000000000..01602c8b906b9 --- /dev/null +++ b/docker/profiles/docker-compose.gms.yml @@ -0,0 +1,429 @@ +################################# +# Common Environment Variables +################################# +x-primary-datastore-mysql-env: &primary-datastore-mysql-env + EBEAN_DATASOURCE_HOST: mysql:3306 + EBEAN_DATASOURCE_URL: 'jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2' + EBEAN_DATASOURCE_DRIVER: com.mysql.jdbc.Driver + +x-primary-datastore-postgres-env: &primary-datastore-postgres-env + EBEAN_DATASOURCE_HOST: postgres:5432 + EBEAN_DATASOURCE_URL: 'jdbc:postgresql://postgres:5432/datahub' + EBEAN_DATASOURCE_DRIVER: org.postgresql.Driver + EBEAN_POSTGRES_USE_AWS_IAM_AUTH: ${EBEAN_POSTGRES_USE_AWS_IAM_AUTH:-false} + +x-primary-datastore-cassandra-env: &primary-datastore-cassandra-env + CASSANDRA_DATASOURCE_USERNAME: cassandra + CASSANDRA_DATASOURCE_PASSWORD: cassandra + CASSANDRA_HOSTS: cassandra + CASSANDRA_PORT: 9042 + CASSANDRA_DATASOURCE_HOST: 'cassandra:9042' + ENTITY_SERVICE_IMPL: cassandra + +x-graph-datastore-neo4j-env: &graph-datastore-neo4j-env + GRAPH_SERVICE_IMPL: neo4j + NEO4J_HOST: 'http://neo4j:7474' + NEO4J_URI: 'bolt://neo4j' + NEO4J_USERNAME: neo4j + NEO4J_PASSWORD: datahub +x-graph-datastore-search-env: &graph-datastore-search-env + GRAPH_SERVICE_IMPL: elasticsearch + +x-search-datastore-elasticsearch-env: &search-datastore-env + ELASTICSEARCH_HOST: search + ELASTICSEARCH_PORT: 9200 + ELASTICSEARCH_PROTOCOL: http + ELASTICSEARCH_USE_SSL: ${ELASTICSEARCH_USE_SSL:-false} + +x-kafka-env: &kafka-env + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + # KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + SCHEMA_REGISTRY_TYPE: INTERNAL + KAFKA_SCHEMAREGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + +x-datahub-quickstart-telemetry-env: &datahub-quickstart-telemetry-env + DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-quickstart} + DATAHUB_TELEMETRY_ENABLED: ${DATAHUB_TELEMETRY_ENABLED:-true} + +x-datahub-dev-telemetry-env: &datahub-dev-telemetry-env + DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-dev} + DATAHUB_TELEMETRY_ENABLED: ${DATAHUB_TELEMETRY_ENABLED:-true} + +################################# +# System Update +################################# +x-datahub-system-update-service: &datahub-system-update-service + hostname: datahub-system-update + image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} + command: + - -u + - SystemUpdate + env_file: datahub-upgrade/env/docker.env + environment: &datahub-system-update-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] + SCHEMA_REGISTRY_SYSTEM_UPDATE: ${SCHEMA_REGISTRY_SYSTEM_UPDATE:-true} + SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS: ${SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS:-true} + SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION: ${SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION:-true} + +x-datahub-system-update-service-dev: &datahub-system-update-service-dev + <<: *datahub-system-update-service + image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:debug + ports: + - ${DATAHUB_MAPPED_UPGRADE_DEBUG_PORT:-5003}:5003 + environment: &datahub-system-update-dev-env + <<: [*datahub-dev-telemetry-env, *datahub-system-update-env] + SKIP_ELASTICSEARCH_CHECK: false + REPROCESS_DEFAULT_BROWSE_PATHS_V2: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:-false} + JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5003' + volumes: + - ../../datahub-upgrade/build/libs/:/datahub/datahub-upgrade/bin/ + - ../../metadata-models/src/main/resources/:/datahub/datahub-gms/resources + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +################################# +# GMS +################################# +x-datahub-gms-service: &datahub-gms-service + hostname: datahub-gms + image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + ports: + - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 + env_file: datahub-gms/env/docker.env + environment: &datahub-gms-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] + healthcheck: + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health + start_period: 90s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +x-datahub-gms-service-dev: &datahub-gms-service-dev + <<: *datahub-gms-service + image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:debug + ports: + - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 + - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 + environment: &datahub-gms-dev-env + <<: [*datahub-dev-telemetry-env, *datahub-gms-env] + SKIP_ELASTICSEARCH_CHECK: false + METADATA_SERVICE_AUTH_ENABLED: false + JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5001' + BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE: false + SEARCH_SERVICE_ENABLE_CACHE: false + LINEAGE_SEARCH_CACHE_ENABLED: false + SHOW_BROWSE_V2: true + volumes: + - ./datahub-gms/start.sh:/datahub/datahub-gms/scripts/start.sh + - ./datahub-gms/jetty.xml:/datahub/datahub-gms/scripts/jetty.xml + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-gms/scripts/prometheus-config.yaml + - ../../metadata-models/src/main/resources/:/datahub/datahub-gms/resources + - ../../metadata-service/war/build/libs/:/datahub/datahub-gms/bin + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +################################# +# MAE Consumer +################################# +x-datahub-mae-consumer-service: &datahub-mae-consumer-service + hostname: datahub-mae-consumer + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + ports: + - 9091:9091 + env_file: datahub-mae-consumer/env/docker.env + environment: &datahub-mae-consumer-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] + +x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev + <<: *datahub-mae-consumer-service + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:debug + environment: + <<: [*datahub-dev-telemetry-env, *datahub-mae-consumer-env] + volumes: + - ./datahub-mae-consumer/start.sh:/datahub/datahub-mae-consumer/scripts/start.sh + - ../../metadata-models/src/main/resources/:/datahub/datahub-mae-consumer/resources + - ../../metadata-jobs/mae-consumer-job/build/libs/:/datahub/datahub-mae-consumer/bin/ + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml + +################################# +# MCE Consumer +################################# +x-datahub-mce-consumer-service: &datahub-mce-consumer-service + hostname: datahub-mce-consumer + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + ports: + - 9090:9090 + env_file: datahub-mce-consumer/env/docker.env + environment: &datahub-mce-consumer-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] + +x-datahub-mce-consumer-service-dev: &datahub-mce-consumer-service-dev + <<: *datahub-mce-consumer-service + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:debug + environment: + <<: [*datahub-dev-telemetry-env, *datahub-mce-consumer-env] + volumes: + - ./datahub-mce-consumer/start.sh:/datahub/datahub-mce-consumer/scripts/start.sh + - ../../metadata-jobs/mce-consumer-job/build/libs/:/datahub/datahub-mce-consumer/bin + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mce-consumer/scripts/prometheus-config.yaml + +services: + ################################# + # System Update + ################################# + system-update-quickstart: + <<: *datahub-system-update-service + container_name: system-update + profiles: + - quickstart + - quickstart-storage + - quickstart-consumers + - quickstart-frontend + - quickstart-backend + depends_on: + mysql-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-quickstart-cassandra: + <<: *datahub-system-update-service + container_name: system-update + profiles: + - quickstart-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *graph-datastore-neo4j-env, *datahub-system-update-env] + depends_on: + neo4j: + condition: service_healthy + cassandra-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-quickstart-postgres: + <<: *datahub-system-update-service + container_name: system-update + profiles: + - quickstart-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-system-update-env] + depends_on: + postgres-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-debug: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug + - debug-backend + - debug-consumers + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-elasticsearch: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-elasticsearch + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + elasticsearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-postgres: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-system-update-dev-env] + depends_on: + postgres-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-cassandra: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *datahub-system-update-dev-env] + depends_on: + cassandra-setup: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-neo4j: + <<: *datahub-system-update-service-dev + container_name: system-update-dev + profiles: + - debug-neo4j + environment: + <<: [*graph-datastore-neo4j-env, *datahub-system-update-dev-env] + depends_on: + neo4j: + condition: service_healthy + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + ################################# + # GMS + ################################# + datahub-gms-quickstart: + <<: *datahub-gms-service + profiles: + - quickstart + - quickstart-backend + container_name: datahub-gms + depends_on: + system-update-quickstart: + condition: service_completed_successfully + datahub-gms-quickstart-cassandra: + <<: *datahub-gms-service + profiles: + - quickstart-cassandra + container_name: datahub-gms + environment: + <<: [*primary-datastore-cassandra-env, *graph-datastore-neo4j-env, *datahub-gms-env] + depends_on: + system-update-quickstart-cassandra: + condition: service_completed_successfully + datahub-gms-quickstart-postgres: + <<: *datahub-gms-service + profiles: + - quickstart-postgres + container_name: datahub-gms + environment: + <<: [*primary-datastore-postgres-env, *datahub-gms-env] + depends_on: + system-update-quickstart-postgres: + condition: service_completed_successfully + datahub-gms-quickstart-consumers: + <<: *datahub-gms-service + profiles: + - quickstart-consumers + container_name: datahub-gms + environment: + <<: *datahub-gms-env + MAE_CONSUMER_ENABLED: false + MCE_CONSUMER_ENABLED: false + depends_on: + system-update-quickstart: + condition: service_completed_successfully + datahub-gms-debug: + <<: *datahub-gms-service-dev + profiles: + - debug + - debug-backend + container_name: datahub-gms-dev + depends_on: + system-update-debug: + condition: service_completed_successfully + datahub-gms-debug-postgres: + <<: *datahub-gms-service-dev + profiles: + - debug-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-gms-dev-env] + container_name: datahub-gms-dev + depends_on: + system-update-debug-postgres: + condition: service_completed_successfully + datahub-gms-debug-cassandra: + <<: *datahub-gms-service-dev + profiles: + - debug-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *datahub-gms-dev-env] + container_name: datahub-gms-dev + depends_on: + system-update-debug-cassandra: + condition: service_completed_successfully + datahub-gms-debug-consumers: + <<: *datahub-gms-service-dev + profiles: + - debug-consumers + environment: + <<: *datahub-gms-dev-env + MAE_CONSUMER_ENABLED: false + MCE_CONSUMER_ENABLED: false + container_name: datahub-gms-dev + depends_on: + system-update-debug: + condition: service_completed_successfully + datahub-gms-debug-neo4j: + <<: *datahub-gms-service-dev + profiles: + - debug-neo4j + environment: + <<: [*graph-datastore-neo4j-env, *datahub-gms-dev-env] + container_name: datahub-gms-dev + depends_on: + system-update-debug-neo4j: + condition: service_completed_successfully + datahub-gms-debug-elasticsearch: + <<: *datahub-gms-service-dev + profiles: + - debug-elasticsearch + container_name: datahub-gms-dev + depends_on: + system-update-debug-elasticsearch: + condition: service_completed_successfully + ################################# + # MAE Consumer + ################################# + datahub-mae-consumer-quickstart-consumers: + <<: *datahub-mae-consumer-service + profiles: + - quickstart-consumers + container_name: datahub-mae-consumer + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-mae-consumer-quickstart-consumers-dev: + <<: *datahub-mae-consumer-service-dev + profiles: + - debug-consumers + container_name: datahub-mae-consumer-dev + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy + ################################# + # MCE Consumer + ################################# + datahub-mce-consumer-quickstart-consumers: + <<: *datahub-mce-consumer-service + profiles: + - quickstart-consumers + container_name: datahub-mce-consumer + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-mce-consumer-quickstart-consumers-dev: + <<: *datahub-mce-consumer-service-dev + profiles: + - debug-consumers + container_name: datahub-mce-consumer-dev + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy \ No newline at end of file diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml new file mode 100644 index 0000000000000..d90d4a252f993 --- /dev/null +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -0,0 +1,387 @@ +# Common environment +x-search-datastore-search: &search-datastore-environment + ELASTICSEARCH_HOST: search + ELASTICSEARCH_PORT: 9200 + ELASTICSEARCH_PROTOCOL: http + ELASTICSEARCH_USE_SSL: ${ELASTICSEARCH_USE_SSL:-false} + +# Primary Storage Profiles +x-mysql-profiles-quickstart: &mysql-profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-consumers +x-mysql-profiles-dev: &mysql-profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-consumers + - debug-neo4j + - debug-elasticsearch +x-mysql-profiles: &mysql-profiles + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-consumers + - debug + - debug-frontend + - debug-backend + - debug-consumers + - debug-neo4j + - debug-elasticsearch + +x-postgres-profiles-quickstart: &postgres-profiles-quickstart + - quickstart-postgres +x-postgres-profiles-dev: &postgres-profiles-dev + - debug-postgres +x-postgres-profiles: &postgres-profiles + - quickstart-postgres + - debug-postgres + +x-cassandra-profiles: &cassandra-profiles + - quickstart-cassandra + - debug-cassandra + +# Graph Storage Profiles +x-neo4j-profiles: &neo4j-profiles + - quickstart-cassandra + - debug-neo4j + +# Search Storage Profiles +x-elasticsearch-profiles: &elasticsearch-profiles + - debug-elasticsearch + +x-opensearch-profiles-quickstart: &opensearch-profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers +x-opensearch-profiles-dev: &opensearch-profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j +x-opensearch-profiles: &opensearch-profiles + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j + +# Debug vs Quickstart Profiles +x-profiles-quickstart: &profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers +x-profiles-dev: &profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j + - debug-elasticsearch + +services: + mysql: + container_name: mysql + profiles: *mysql-profiles + hostname: mysql + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} + command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=caching_sha2_password + ports: + - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 + env_file: mysql/env/docker.env + restart: on-failure + healthcheck: + test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD + start_period: 10s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - ./mysql/init.sql:/docker-entrypoint-initdb.d/init.sql + - mysqldata:/var/lib/mysql + mysql-setup: &mysql-setup + container_name: mysql-setup + profiles: *mysql-profiles-quickstart + hostname: mysql-setup + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head} + env_file: mysql-setup/env/docker.env + depends_on: + mysql: + condition: service_healthy + labels: + datahub_setup_job: true + mysql-setup-dev: + <<: *mysql-setup + container_name: mysql-setup-dev + profiles: *mysql-profiles-dev + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:debug + postgres: + container_name: postgres + profiles: *postgres-profiles + hostname: postgres + image: postgres:${DATAHUB_POSTGRES_VERSION:-15.5} + env_file: postgres/env/docker.env + ports: + - '5432:5432' + restart: on-failure + healthcheck: + test: [ "CMD-SHELL", "pg_isready" ] + start_period: 20s + interval: 2s + timeout: 10s + retries: 5 + volumes: + - ./postgres/init.sql:/docker-entrypoint-initdb.d/init.sql + - postgresdata:/var/lib/postgresql/data + postgres-setup: &postgres-setup + container_name: postgres-setup + profiles: *postgres-profiles-quickstart + hostname: postgres-setup + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:${DATAHUB_VERSION:-head} + env_file: postgres-setup/env/docker.env + depends_on: + postgres: + condition: service_healthy + labels: + datahub_setup_job: true + postgres-setup-dev: + <<: *postgres-setup + container_name: postgres-setup-dev + profiles: *postgres-profiles-dev + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:debug + cassandra: + container_name: cassandra + profiles: *cassandra-profiles + hostname: cassandra + image: cassandra:4.1 + ports: + - 9042:9042 + healthcheck: + test: cqlsh -u cassandra -p cassandra -e 'describe keyspaces' + interval: 15s + timeout: 10s + retries: 10 + volumes: + - cassandradata:/var/lib/cassandra + cassandra-setup: + container_name: cassandra-setup + profiles: *cassandra-profiles + hostname: cassandra-setup + image: cassandra:4.1 + command: /bin/bash -c "cqlsh cassandra -f /init.cql" + depends_on: + cassandra: + condition: service_healthy + volumes: + - ./cassandra/init.cql:/init.cql + labels: + datahub_setup_job: true + neo4j: + container_name: neo4j + profiles: *neo4j-profiles + hostname: neo4j + image: neo4j:4.4.28-community + ports: + - ${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474}:7474 + - ${DATAHUB_MAPPED_NEO4J_BOLT_PORT:-7687}:7687 + env_file: neo4j/env/docker.env + healthcheck: + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} + start_period: 5s + interval: 1s + retries: 5 + timeout: 5s + volumes: + - neo4jdata:/data + kafka-broker: + container_name: kafka-broker + hostname: kafka-broker + image: confluentinc/cp-kafka:7.4.0 + command: + - /bin/bash + - -c + - | + # Generate KRaft clusterID + file_path="/var/lib/kafka/data/clusterID" + + if [ ! -f "$$file_path" ]; then + /bin/kafka-storage random-uuid > $$file_path + echo "Cluster id has been created..." + # KRaft required step: Format the storage directory with a new cluster ID + kafka-storage format --ignore-formatted -t $$(cat "$$file_path") -c /etc/kafka/kafka.properties + fi + + export CLUSTER_ID=$$(cat "$$file_path") + echo "CLUSTER_ID=$$CLUSTER_ID" + + /etc/confluent/docker/run + ports: + - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 + env_file: kafka-broker/env/docker.env + environment: + KAFKA_NODE_ID: 1 + KAFKA_ADVERTISED_LISTENERS: BROKER://kafka-broker:29092,EXTERNAL://kafka-broker:9092 + KAFKA_LISTENERS: BROKER://kafka-broker:29092,EXTERNAL://kafka-broker:9092,CONTROLLER://kafka-broker:39092 + KAFKA_INTER_BROKER_LISTENER_NAME: BROKER + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,BROKER:PLAINTEXT,EXTERNAL:PLAINTEXT + KAFKA_PROCESS_ROLES: controller, broker + KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka-broker:39092 + # https://github.com/confluentinc/cp-all-in-one/issues/120 + KAFKA_LOG4J_LOGGERS: 'org.apache.kafka.image.loader.MetadataLoader=WARN' + KAFKA_ZOOKEEPER_CONNECT: null + healthcheck: + test: nc -z kafka-broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} + start_period: 60s + interval: 1s + retries: 5 + timeout: 5s + volumes: + - broker:/var/lib/kafka/data/ + kafka-setup: &kafka-setup + container_name: kafka-setup + profiles: *profiles-quickstart + hostname: kafka-setup + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + env_file: kafka-setup/env/docker.env + environment: &kafka-setup-env + DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-false} + KAFKA_BOOTSTRAP_SERVER: kafka-broker:29092 + USE_CONFLUENT_SCHEMA_REGISTRY: false + depends_on: + kafka-broker: + condition: service_healthy + labels: + datahub_setup_job: true + kafka-setup-dev: + <<: *kafka-setup + container_name: kafka-setup-dev + profiles: *profiles-dev + environment: + <<: *kafka-setup-env + DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-true} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:debug + elasticsearch: + container_name: elasticsearch + profiles: *elasticsearch-profiles + hostname: search + image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} + ports: + - ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200 + env_file: elasticsearch/env/docker.env + environment: + - discovery.type=single-node + - ${XPACK_SECURITY_ENABLED:-xpack.security.enabled=false} + deploy: + resources: + limits: + memory: 1G + healthcheck: + test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + start_period: 20s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - esdata:/usr/share/elasticsearch/data + elasticsearch-setup-dev: &elasticsearch-setup-dev + container_name: elasticsearch-setup-dev + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + profiles: *elasticsearch-profiles + hostname: elasticsearch-setup + env_file: elasticsearch-setup/env/docker.env + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-false} + depends_on: + elasticsearch: + condition: service_healthy + labels: + datahub_setup_job: true + opensearch: + container_name: opensearch + profiles: *opensearch-profiles + hostname: search + image: ${DATAHUB_SEARCH_IMAGE:-opensearchproject/opensearch}:${DATAHUB_SEARCH_TAG:-2.9.0} + ports: + - ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200 + env_file: elasticsearch/env/docker.env + environment: + - discovery.type=single-node + - ${XPACK_SECURITY_ENABLED:-plugins.security.disabled=true} + deploy: + resources: + limits: + memory: 1G + healthcheck: + test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + start_period: 20s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - osdata:/usr/share/elasticsearch/data + opensearch-setup: &opensearch-setup + <<: *elasticsearch-setup-dev + container_name: opensearch-setup + profiles: *opensearch-profiles-quickstart + hostname: opensearch-setup + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} + depends_on: + opensearch: + condition: service_healthy + labels: + datahub_setup_job: true + opensearch-setup-dev: + <<: *opensearch-setup + container_name: opensearch-setup-dev + profiles: *opensearch-profiles-dev + hostname: opensearch-setup-dev + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} + depends_on: + opensearch: + condition: service_healthy + +networks: + default: + name: datahub_network + +volumes: + neo4jdata: + esdata: + osdata: + broker: + mysqldata: + cassandradata: + postgresdata: diff --git a/docker/profiles/docker-compose.yml b/docker/profiles/docker-compose.yml new file mode 100644 index 0000000000000..534ca9702e2d7 --- /dev/null +++ b/docker/profiles/docker-compose.yml @@ -0,0 +1,13 @@ +--- +version: '3.9' +name: datahub + +include: + # Contains storage layers: i.e. mysql, kafka, elasticsearch + - docker-compose.prerequisites.yml + # Actions pod + - docker-compose.actions.yml + # Frontend + - docker-compose.frontend.yml + # Remaining components: i.e. gms, system-update, consumers + - docker-compose.gms.yml diff --git a/docker/profiles/elasticsearch b/docker/profiles/elasticsearch new file mode 120000 index 0000000000000..7712783b3e8d6 --- /dev/null +++ b/docker/profiles/elasticsearch @@ -0,0 +1 @@ +../elasticsearch \ No newline at end of file diff --git a/docker/profiles/elasticsearch-setup b/docker/profiles/elasticsearch-setup new file mode 120000 index 0000000000000..670a10e8c3786 --- /dev/null +++ b/docker/profiles/elasticsearch-setup @@ -0,0 +1 @@ +../elasticsearch-setup \ No newline at end of file diff --git a/docker/profiles/kafka-broker b/docker/profiles/kafka-broker new file mode 120000 index 0000000000000..23b248a4e0bbd --- /dev/null +++ b/docker/profiles/kafka-broker @@ -0,0 +1 @@ +../broker \ No newline at end of file diff --git a/docker/profiles/kafka-setup b/docker/profiles/kafka-setup new file mode 120000 index 0000000000000..35b9c167ac26e --- /dev/null +++ b/docker/profiles/kafka-setup @@ -0,0 +1 @@ +../kafka-setup \ No newline at end of file diff --git a/docker/profiles/monitoring b/docker/profiles/monitoring new file mode 120000 index 0000000000000..1371b42ae4593 --- /dev/null +++ b/docker/profiles/monitoring @@ -0,0 +1 @@ +../monitoring \ No newline at end of file diff --git a/docker/profiles/mysql b/docker/profiles/mysql new file mode 120000 index 0000000000000..057b59f760165 --- /dev/null +++ b/docker/profiles/mysql @@ -0,0 +1 @@ +../mysql \ No newline at end of file diff --git a/docker/profiles/mysql-setup b/docker/profiles/mysql-setup new file mode 120000 index 0000000000000..f9199ec3fc58f --- /dev/null +++ b/docker/profiles/mysql-setup @@ -0,0 +1 @@ +../mysql-setup \ No newline at end of file diff --git a/docker/profiles/neo4j b/docker/profiles/neo4j new file mode 120000 index 0000000000000..0d4849d989d43 --- /dev/null +++ b/docker/profiles/neo4j @@ -0,0 +1 @@ +../neo4j \ No newline at end of file diff --git a/docker/profiles/postgres b/docker/profiles/postgres new file mode 120000 index 0000000000000..be56a57bd0ab8 --- /dev/null +++ b/docker/profiles/postgres @@ -0,0 +1 @@ +../postgres \ No newline at end of file diff --git a/docker/profiles/postgres-setup b/docker/profiles/postgres-setup new file mode 120000 index 0000000000000..38f51721feacb --- /dev/null +++ b/docker/profiles/postgres-setup @@ -0,0 +1 @@ +../postgres-setup/ \ No newline at end of file From a29fce9d823dee31480e2efee1dc1bf16fd4c739 Mon Sep 17 00:00:00 2001 From: Nate Bryant Date: Tue, 19 Dec 2023 15:08:55 -0500 Subject: [PATCH 119/263] Adds urnBasedPagination option to datahub-upgrade RestoreIndices (#9232) Co-authored-by: RyanHolstien --- .../restoreindices/RestoreIndices.java | 1 + .../upgrade/restoreindices/SendMAEStep.java | 62 ++++++++++++++++--- docker/datahub-upgrade/README.md | 12 +++- .../metadata/entity/EntityServiceImpl.java | 2 + .../metadata/entity/ebean/EbeanAspectDao.java | 22 ++++++- .../restoreindices/RestoreIndicesArgs.java | 8 +++ .../restoreindices/RestoreIndicesResult.java | 2 + 7 files changed, 96 insertions(+), 13 deletions(-) diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index d38685553dff2..f46bb9b05624d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -24,6 +24,7 @@ public class RestoreIndices implements Upgrade { public static final String WRITER_POOL_SIZE = "WRITER_POOL_SIZE"; public static final String URN_ARG_NAME = "urn"; public static final String URN_LIKE_ARG_NAME = "urnLike"; + public static final String URN_BASED_PAGINATION_ARG_NAME = "urnBasedPagination"; public static final String STARTING_OFFSET_ARG_NAME = "startingOffset"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index ce59cf2edb84e..574b1f08b5f54 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -31,6 +31,7 @@ public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_STARTING_OFFSET = 0; private static final int DEFAULT_THREADS = 1; + private static final boolean DEFAULT_URN_BASED_PAGINATION = false; private final Database _server; private final EntityService _entityService; @@ -89,6 +90,7 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { result.numThreads = getThreadCount(context.parsedArgs()); result.batchDelayMs = getBatchDelayMs(context.parsedArgs()); result.start = getStartingOffset(context.parsedArgs()); + result.urnBasedPagination = getUrnBasedPagination(context.parsedArgs()); if (containsKey(context.parsedArgs(), RestoreIndices.ASPECT_NAME_ARG_NAME)) { result.aspectName = context.parsedArgs().get(RestoreIndices.ASPECT_NAME_ARG_NAME).get(); } @@ -140,18 +142,49 @@ public Function executable() { List> futures = new ArrayList<>(); startTime = System.currentTimeMillis(); - while (start < rowCount) { - args = args.clone(); - args.start = start; - futures.add(executor.submit(new KafkaJob(context, args))); - start = start + args.batchSize; - } - while (futures.size() > 0) { - List tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult : tmpResults) { - reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + if (args.urnBasedPagination) { + RestoreIndicesResult previousResult = null; + int rowsProcessed = 1; + while (rowsProcessed > 0) { + args = args.clone(); + if (previousResult != null) { + args.lastUrn = previousResult.lastUrn; + args.lastAspect = previousResult.lastAspect; + } + args.start = start; + context + .report() + .addLine( + String.format( + "Getting next batch of urns + aspects, starting with %s - %s", + args.lastUrn, args.lastAspect)); + Future future = executor.submit(new KafkaJob(context, args)); + try { + RestoreIndicesResult result = future.get(); + reportStats(context, finalJobResult, result, rowCount, startTime); + previousResult = result; + rowsProcessed = result.rowsMigrated + result.ignored; + context.report().addLine(String.format("Rows processed this loop %d", rowsProcessed)); + start += args.batchSize; + } catch (InterruptedException | ExecutionException e) { + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + } + } else { + while (start < rowCount) { + args = args.clone(); + args.start = start; + futures.add(executor.submit(new KafkaJob(context, args))); + start = start + args.batchSize; + } + while (futures.size() > 0) { + List tmpResults = iterateFutures(futures); + for (RestoreIndicesResult tmpResult : tmpResults) { + reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + } } } + executor.shutdown(); if (finalJobResult.rowsMigrated != rowCount) { float percentFailed = 0.0f; @@ -233,6 +266,15 @@ private int getThreadCount(final Map> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } + private boolean getUrnBasedPagination(final Map> parsedArgs) { + boolean urnBasedPagination = DEFAULT_URN_BASED_PAGINATION; + if (containsKey(parsedArgs, RestoreIndices.URN_BASED_PAGINATION_ARG_NAME)) { + urnBasedPagination = + Boolean.parseBoolean(parsedArgs.get(RestoreIndices.URN_BASED_PAGINATION_ARG_NAME).get()); + } + return urnBasedPagination; + } + private int getInt( final Map> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; diff --git a/docker/datahub-upgrade/README.md b/docker/datahub-upgrade/README.md index 0d019971604d6..9c96114cdb2dd 100644 --- a/docker/datahub-upgrade/README.md +++ b/docker/datahub-upgrade/README.md @@ -15,8 +15,16 @@ to metadata_aspect_v2 table. Arguments: 2. **NoCodeDataMigrationCleanup**: Cleanses graph index, search index, and key-value store of legacy DataHub data (metadata_aspect table) once the No Code Data Migration has completed successfully. No arguments. -3. **RestoreIndices**: Restores indices by fetching the latest version of each aspect and producing MAE - +3. **RestoreIndices**: Restores indices by fetching the latest version of each aspect and producing MAE. Arguments: + - *batchSize* (Optional): The number of rows to migrate at a time. Defaults to 1000. + - *batchDelayMs* (Optional): The number of milliseconds of delay between migrated batches. Used for rate limiting. Defaults to 250. + - *numThreads* (Optional): The number of threads to use, defaults to 1. Note that this is not used if `urnBasedPagination` is true. + - *aspectName* (Optional): The aspect name for producing events. + - *urn* (Optional): The urn for producing events. + - *urnLike* (Optional): The urn pattern for producing events, using `%` as a wild card + - *urnBasedPagination* (Optional): Paginate the SQL results using the urn + aspect string instead of `OFFSET`. Defaults to false, + though should improve performance for large amounts of data. + 4. **RestoreBackup**: Restores the storage stack from a backup of the local database ## Environment Variables diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index a333839416556..7bd8e763cdc27 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -1161,6 +1161,7 @@ public RestoreIndicesResult restoreIndices( Urn urn; try { urn = Urn.createFromString(aspect.getKey().getUrn()); + result.lastUrn = urn.toString(); } catch (Exception e) { logger.accept( String.format( @@ -1188,6 +1189,7 @@ public RestoreIndicesResult restoreIndices( result.timeEntityRegistryCheckMs += System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); final String aspectName = aspect.getKey().getAspect(); + result.lastAspect = aspectName; // 3. Verify that the aspect is a valid aspect associated with the entity AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index b2b47c1d5ba32..26946890daa3b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -477,11 +477,31 @@ public PagedList getPagedAspects(final RestoreIndicesArgs args) { if (args.urnLike != null) { exp = exp.like(EbeanAspectV2.URN_COLUMN, args.urnLike); } + + int start = args.start; + if (args.urnBasedPagination) { + start = 0; + if (args.lastUrn != null && !args.lastUrn.isEmpty()) { + exp = exp.where().ge(EbeanAspectV2.URN_COLUMN, args.lastUrn); + + // To prevent processing the same aspect multiple times in a restore, it compares against + // the last aspect if the urn matches the last urn + if (args.lastAspect != null && !args.lastAspect.isEmpty()) { + exp = + exp.where() + .and() + .or() + .ne(EbeanAspectV2.URN_COLUMN, args.lastUrn) + .gt(EbeanAspectV2.ASPECT_COLUMN, args.lastAspect); + } + } + } + return exp.orderBy() .asc(EbeanAspectV2.URN_COLUMN) .orderBy() .asc(EbeanAspectV2.ASPECT_COLUMN) - .setFirstRow(args.start) + .setFirstRow(start) .setMaxRows(args.batchSize) .findPagedList(); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java index d8fcbe0b7d44d..e50b44b7f0eca 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java @@ -11,6 +11,9 @@ public class RestoreIndicesArgs implements Cloneable { public String aspectName; public String urn; public String urnLike; + public Boolean urnBasedPagination = false; + public String lastUrn = ""; + public String lastAspect = ""; @Override public RestoreIndicesArgs clone() { @@ -51,4 +54,9 @@ public RestoreIndicesArgs setBatchSize(Integer batchSize) { } return this; } + + public RestoreIndicesArgs setUrnBasedPagination(Boolean urnBasedPagination) { + this.urnBasedPagination = urnBasedPagination; + return this; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java index 8479338660db0..a270cf4548bed 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java @@ -13,4 +13,6 @@ public class RestoreIndicesResult { public long aspectCheckMs = 0; public long createRecordMs = 0; public long sendMessageMs = 0; + public String lastUrn = ""; + public String lastAspect = ""; } From 3777730d782bc1069f7752f74a199aa6447be0d0 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:30:47 -0600 Subject: [PATCH 120/263] fix(quickstart): force strings for mysql version (#9485) --- docker/quickstart/quickstart_version_mapping.yaml | 8 ++++---- .../src/datahub/cli/quickstart_versioning.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docker/quickstart/quickstart_version_mapping.yaml b/docker/quickstart/quickstart_version_mapping.yaml index 9948bd55fdc0b..b08cfda175aa9 100644 --- a/docker/quickstart/quickstart_version_mapping.yaml +++ b/docker/quickstart/quickstart_version_mapping.yaml @@ -23,7 +23,7 @@ quickstart_version_map: default: composefile_git_ref: master docker_tag: head - mysql_tag: 5.7 + mysql_tag: "5.7" # default: # Use this to pin default to a specific version. # composefile_git_ref: fd1bd51541a132017a648f4a2f037eec8f70ba26 # v0.10.0 + quickstart compose file fixes # docker_tag: v0.10.0 @@ -31,19 +31,19 @@ quickstart_version_map: head: composefile_git_ref: master docker_tag: head - mysql_tag: 5.7 + mysql_tag: "5.7" # v0.13.0 we upgraded MySQL image for EOL v0.13.0: composefile_git_ref: master docker_tag: head - mysql_tag: 8.2 + mysql_tag: "8.2" # v0.9.6 images contain security vulnerabilities v0.9.6: composefile_git_ref: v0.9.6.1 docker_tag: v0.9.6.1 - mysql_tag: 5.7 + mysql_tag: "5.7" # If stable is not defined the latest released version will be used. # stable: diff --git a/metadata-ingestion/src/datahub/cli/quickstart_versioning.py b/metadata-ingestion/src/datahub/cli/quickstart_versioning.py index be7439f330dfb..1c3ce93c1f788 100644 --- a/metadata-ingestion/src/datahub/cli/quickstart_versioning.py +++ b/metadata-ingestion/src/datahub/cli/quickstart_versioning.py @@ -94,7 +94,7 @@ def fetch_quickstart_config(cls) -> "QuickstartVersionMappingConfig": try: release = cls._fetch_latest_version() config.quickstart_version_map["stable"] = QuickstartExecutionPlan( - composefile_git_ref=release, docker_tag=release, mysql_tag=release + composefile_git_ref=release, docker_tag=release, mysql_tag="5.7" ) except Exception: click.echo( @@ -123,7 +123,7 @@ def get_quickstart_execution_plan( QuickstartExecutionPlan( composefile_git_ref=composefile_git_ref, docker_tag=docker_tag, - mysql_tag=mysql_tag, + mysql_tag=str(mysql_tag), ), ) # new CLI version is downloading the composefile corresponding to the requested version From 76be5173b292b936216aad1409090b70615a78f8 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 19 Dec 2023 15:52:59 -0600 Subject: [PATCH 121/263] fix(docker): fix frontend dev docker path (#9488) --- docker/docker-compose.dev.yml | 2 +- docker/profiles/docker-compose.frontend.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 774c4e17bee21..a69fb977a3417 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -24,7 +24,7 @@ services: - JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 - DATAHUB_ANALYTICS_ENABLED=${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - - ../datahub-frontend/build/stage/playBinary:/datahub-frontend + - ../datahub-frontend/build/stage/main:/datahub-frontend datahub-gms: image: linkedin/datahub-gms:debug ports: diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index 2b82829648dac..80cb4e7b4b596 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -21,7 +21,7 @@ x-datahub-frontend-service-dev: &datahub-frontend-service-dev JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 DATAHUB_ANALYTICS_ENABLED: ${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - - ../../datahub-frontend/build/stage/playBinary:/datahub-frontend + - ../../datahub-frontend/build/stage/main:/datahub-frontend services: frontend-quickstart: From 16d3df620f07c4d41118be9c8f38dc0cf46df76f Mon Sep 17 00:00:00 2001 From: Salman-Apptware <101426513+Salman-Apptware@users.noreply.github.com> Date: Wed, 20 Dec 2023 16:32:52 +0530 Subject: [PATCH 122/263] fix(ui): Tab doesn't represent the page you are on for non-data asset pages (#9468) --- datahub-web-react/src/app/AppProviders.tsx | 13 ++++---- .../src/app/entity/group/GroupInfoSideBar.tsx | 17 +++++++++++ .../src/app/entity/user/UserInfoSideBar.tsx | 19 +++++++++++- .../src/app/search/SearchablePage.tsx | 27 +++++++++++++++++ .../src/app/shared/BrowserTabTitleContext.tsx | 30 +++++++++++++++++++ 5 files changed, 100 insertions(+), 6 deletions(-) create mode 100644 datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx diff --git a/datahub-web-react/src/app/AppProviders.tsx b/datahub-web-react/src/app/AppProviders.tsx index 81a8ddbfc9bac..00597e1cf7640 100644 --- a/datahub-web-react/src/app/AppProviders.tsx +++ b/datahub-web-react/src/app/AppProviders.tsx @@ -5,6 +5,7 @@ import UserContextProvider from './context/UserContextProvider'; import QuickFiltersProvider from '../providers/QuickFiltersProvider'; import SearchContextProvider from './search/context/SearchContextProvider'; import EntityRegistryProvider from './EntityRegistryProvider'; +import { BrowserTitleProvider } from './shared/BrowserTabTitleContext'; interface Props { children: React.ReactNode; @@ -15,11 +16,13 @@ export default function AppProviders({ children }: Props) { - - - {children} - - + + + + {children} + + + diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index 07885a4d0f630..044b09dc185e5 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -21,6 +21,7 @@ import { } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; import { useUserContext } from '../../context/useUserContext'; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; import StripMarkdownText, { removeMarkdown } from '../shared/components/styled/StripMarkdownText'; import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; import EditGroupDescriptionModal from './EditGroupDescriptionModal'; @@ -157,6 +158,22 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { const { url } = useRouteMatch(); const history = useHistory(); + const { updateTitle } = useBrowserTitle(); + + useEffect(()=>{ + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if(name){ + updateTitle(`Group | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if(name){ // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + /* eslint-disable @typescript-eslint/no-unused-vars */ const [editGroupModal, showEditGroupModal] = useState(false); const me = useUserContext(); diff --git a/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx b/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx index c01dd3a635924..71bfbfcd49a16 100644 --- a/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx @@ -1,5 +1,5 @@ import { Divider, message, Space, Button, Typography, Tag } from 'antd'; -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { EditOutlined, MailOutlined, PhoneOutlined, SlackOutlined } from '@ant-design/icons'; import { useUpdateCorpUserPropertiesMutation } from '../../../graphql/user.generated'; import { EntityRelationship, DataHubRole } from '../../../types.generated'; @@ -21,6 +21,7 @@ import { import EntityGroups from '../shared/EntityGroups'; import { mapRoleIcon } from '../../identity/user/UserUtils'; import { useUserContext } from '../../context/useUserContext'; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; const { Paragraph } = Typography; @@ -61,6 +62,22 @@ export default function UserInfoSideBar({ sideBarData, refetch }: Props) { const me = useUserContext(); const isProfileOwner = me?.user?.urn === urn; + const { updateTitle } = useBrowserTitle(); + + useEffect(()=>{ + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if(name){ + updateTitle(`User | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if(name){ // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + const getEditModalData = { urn, name, diff --git a/datahub-web-react/src/app/search/SearchablePage.tsx b/datahub-web-react/src/app/search/SearchablePage.tsx index 9d02d85d3634c..53dfc866b9b64 100644 --- a/datahub-web-react/src/app/search/SearchablePage.tsx +++ b/datahub-web-react/src/app/search/SearchablePage.tsx @@ -3,6 +3,7 @@ import { useHistory, useLocation } from 'react-router'; import { debounce } from 'lodash'; import * as QueryString from 'query-string'; import { useTheme } from 'styled-components'; +import { Helmet } from 'react-helmet-async'; import { SearchHeader } from './SearchHeader'; import { useEntityRegistry } from '../useEntityRegistry'; import { EntityType, FacetFilterInput } from '../../types.generated'; @@ -19,6 +20,7 @@ import { useQuickFiltersContext } from '../../providers/QuickFiltersContext'; import { useUserContext } from '../context/useUserContext'; import { useSelectedSortOption } from './context/SearchContext'; import { HALF_SECOND_IN_MS } from '../entity/shared/tabs/Dataset/Queries/utils/constants'; +import { useBrowserTitle } from '../shared/BrowserTabTitleContext'; const styles = { children: { @@ -68,6 +70,28 @@ export const SearchablePage = ({ onSearch, onAutoComplete, children }: Props) => const { user } = userContext; const viewUrn = userContext.localState?.selectedViewUrn; + const { title, updateTitle } = useBrowserTitle(); + + useEffect(() => { + // Update the title only if it's not already set and there is a valid pathname + if (!title && location.pathname) { + const formattedPath = location.pathname + .split('/') + .filter(word => word !== '') + .map(word => word.charAt(0).toUpperCase() + word.slice(1)) + .join(' | '); + + if (formattedPath) { + return updateTitle(formattedPath); + } + } + + // Clean up the title when the component unmounts + return () => { + updateTitle(''); + }; + }, [location.pathname, title, updateTitle]); + useEffect(() => { if (suggestionsData !== undefined) { setNewSuggestionData(suggestionsData); @@ -140,6 +164,9 @@ export const SearchablePage = ({ onSearch, onAutoComplete, children }: Props) => authenticatedUserPictureLink={user?.editableProperties?.pictureLink} entityRegistry={entityRegistry} /> + + {title} +
{children}
); diff --git a/datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx b/datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx new file mode 100644 index 0000000000000..284e2771124c8 --- /dev/null +++ b/datahub-web-react/src/app/shared/BrowserTabTitleContext.tsx @@ -0,0 +1,30 @@ +import React, { createContext, ReactNode, useContext } from 'react'; + +interface BrowserTitleContextProps { + title: string; + updateTitle: (newTitle: string) => void; +} + +const BrowserTitleContext = createContext(undefined); + +export const BrowserTitleProvider: React.FC<{ children: ReactNode }> = ({ children }) => { + const [title, setTitle] = React.useState(''); + + const updateTitle = (newTitle: string) => { + setTitle(newTitle); + }; + + return ( + + {children} + + ); +}; + +export const useBrowserTitle = () => { + const context = useContext(BrowserTitleContext); + if (!context) { + throw new Error('useBrowserTitle must be used within a BrowserTitleProvider'); + } + return context; +}; From c8e59aabedb9a6f43f4bcfbf20bdffad6abc85d5 Mon Sep 17 00:00:00 2001 From: noggi Date: Wed, 20 Dec 2023 12:33:23 -0800 Subject: [PATCH 123/263] Do not sync demo in downstream repos (#9493) --- .github/workflows/docker-unified.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 169a86000adcc..7cef38b1cd47c 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -911,13 +911,13 @@ jobs: ] steps: - uses: aws-actions/configure-aws-credentials@v1 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} aws-region: us-west-2 - uses: isbang/sqs-action@v0.2.0 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' From bf813d1d24107d858260dc2852489e034eb4cf8c Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 20 Dec 2023 15:49:03 -0500 Subject: [PATCH 124/263] fix(ingest): update ingest_stats event with transformer types (#9487) --- metadata-ingestion/src/datahub/ingestion/run/pipeline.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py index 25e17d692109a..d7c70dbea0b14 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline.py @@ -528,6 +528,9 @@ def log_ingestion_stats(self) -> None: { "source_type": self.config.source.type, "sink_type": self.config.sink.type, + "transformer_types": [ + transformer.type for transformer in self.config.transformers or [] + ], "records_written": stats.discretize( self.sink.get_report().total_records_written ), From 50be329492048534cb83c6f81bad87c5c49ee05c Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 21 Dec 2023 13:24:33 +0530 Subject: [PATCH 125/263] feat(ui/glossary): Keep the same tab selected when browsing Glossary (#9469) --- .../shared/EntityDropdown/EntityDropdown.tsx | 1 + .../containers/profile/header/EntityTabs.tsx | 1 + .../entity/shared/containers/profile/utils.ts | 16 ++++++++ .../app/glossary/GlossaryBrowser/NodeItem.tsx | 2 +- .../app/glossary/GlossaryBrowser/TermItem.tsx | 9 ++++- .../e2e/glossary/glossary_navigation.js | 38 +++++++++++++++++++ 6 files changed, 64 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 8d7f1cca9c1cb..664a77a731d34 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -180,6 +180,7 @@ function EntityDropdown(props: Props) { )} {menuItems.has(EntityMenuItems.ADD_TERM) && ( setIsCreateTermModalVisible(true)} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index 58693eca8af0e..25e044259f240 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -39,6 +39,7 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { return ( ( - + ))} )} diff --git a/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx b/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx index 6980c15a1c256..56495b53eded3 100644 --- a/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx +++ b/datahub-web-react/src/app/glossary/GlossaryBrowser/TermItem.tsx @@ -5,6 +5,7 @@ import { useEntityRegistry } from '../../useEntityRegistry'; import { ANTD_GRAY } from '../../entity/shared/constants'; import { ChildGlossaryTermFragment } from '../../../graphql/glossaryNode.generated'; import { useGlossaryEntityData } from '../../entity/shared/GlossaryEntityContext'; +import { useGlossaryActiveTabPath } from '../../entity/shared/containers/profile/utils'; const TermWrapper = styled.div` font-weight: normal; @@ -47,13 +48,15 @@ interface Props { term: ChildGlossaryTermFragment; isSelecting?: boolean; selectTerm?: (urn: string, displayName: string) => void; + includeActiveTabPath?: boolean; } function TermItem(props: Props) { - const { term, isSelecting, selectTerm } = props; + const { term, isSelecting, selectTerm, includeActiveTabPath } = props; const { entityData } = useGlossaryEntityData(); const entityRegistry = useEntityRegistry(); + const activeTabPath = useGlossaryActiveTabPath(); function handleSelectTerm() { if (selectTerm) { @@ -68,7 +71,9 @@ function TermItem(props: Props) { {!isSelecting && ( {entityRegistry.getDisplayName(term.type, isOnEntityPage ? entityData : term)} diff --git a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js index 7ddf36aa87c2d..dd3b0a567c75f 100644 --- a/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js +++ b/smoke-test/tests/cypress/cypress/e2e/glossary/glossary_navigation.js @@ -1,4 +1,5 @@ const glossaryTerm = "CypressGlosssaryNavigationTerm"; +const glossarySecondTerm = "CypressGlossarySecondTerm"; const glossaryTermGroup = "CypressGlosssaryNavigationGroup"; const glossaryParentGroup = "CypressNode"; @@ -30,6 +31,39 @@ describe("glossary sidebar navigation test", () => { cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryTermGroup).click().wait(3000); cy.get('*[class^="GlossaryEntitiesList"]').contains(glossaryTerm).should("be.visible"); + // Create another term and move it to the same term group + cy.clickOptionWithText(glossaryTermGroup); + cy.openThreeDotDropdown(); + cy.clickOptionWithTestId("entity-menu-add-term-button"); + + // Wait for the create term modal to be visible + cy.waitTextVisible("Create Glossary Term"); + cy.enterTextInTestId("create-glossary-entity-modal-name", glossarySecondTerm); + cy.clickOptionWithTestId("glossary-entity-modal-create-button"); + + // Wait for the new term to be visible in the sidebar + cy.clickOptionWithText(glossarySecondTerm).wait(3000); + + // Move the term to the created term group + cy.openThreeDotDropdown(); + cy.clickOptionWithTestId("entity-menu-move-button"); + cy.get('[data-testid="move-glossary-entity-modal"]').contains(glossaryTermGroup).click({ force: true }); + cy.get('[data-testid="move-glossary-entity-modal"]').contains(glossaryTermGroup).should("be.visible"); + cy.clickOptionWithTestId("glossary-entity-modal-move-button"); + cy.waitTextVisible("Moved Glossary Term!"); + + // Ensure the new term is under the parent term group in the navigation sidebar + cy.get('[data-testid="glossary-browser-sidebar"]').contains(glossaryTermGroup).click(); + cy.get('*[class^="GlossaryEntitiesList"]').contains(glossarySecondTerm).should("be.visible"); + + + // Switch between terms and ensure the "Properties" tab is active + cy.clickOptionWithText(glossaryTerm); + cy.get('[data-testid="entity-tab-headers-test-id"]').contains("Properties").click({ force: true }); + cy.get('[data-node-key="Properties"]').contains("Properties").should("have.attr", "aria-selected", "true"); + cy.clickOptionWithText(glossarySecondTerm); + cy.get('[data-node-key="Properties"]').contains("Properties").should("have.attr", "aria-selected", "true"); + // Move a term group from the root level to be under a parent term group cy.goToGlossaryList(); cy.clickOptionWithText(glossaryTermGroup); @@ -52,6 +86,10 @@ describe("glossary sidebar navigation test", () => { cy.clickOptionWithText(glossaryTerm).wait(3000); cy.deleteFromDropdown(); cy.waitTextVisible("Deleted Glossary Term!"); + cy.clickOptionWithText(glossaryTermGroup); + cy.clickOptionWithText(glossarySecondTerm).wait(3000); + cy.deleteFromDropdown(); + cy.waitTextVisible("Deleted Glossary Term!"); cy.clickOptionWithText(glossaryParentGroup); cy.clickOptionWithText(glossaryTermGroup).wait(3000); cy.deleteFromDropdown(); From 80fb145a7b85b323f339d7901658dd9fde5bd4db Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 21 Dec 2023 17:57:41 +0530 Subject: [PATCH 126/263] style(search): Tag overflow add padding (#9497) --- datahub-web-react/src/app/preview/DefaultPreviewCard.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx index 36c4c020e7131..a6d8422f827d5 100644 --- a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx +++ b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx @@ -114,6 +114,7 @@ const TagContainer = styled.div` margin-left: 0px; margin-top: 3px; flex-wrap: wrap; + margin-right: 8px; `; const TagSeparator = styled.div` From a49a435eef92b20cdc9878c8189b8ca0288e8b7f Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Thu, 21 Dec 2023 19:38:46 +0530 Subject: [PATCH 127/263] feat(analytics): change MAU chart to be until last month (#9499) --- .../datahub/graphql/analytics/resolver/GetChartsResolver.java | 3 ++- .../main/java/com/linkedin/datahub/graphql/util/DateUtil.java | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index 3f635872747a5..6ba3c5090f1c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -91,6 +91,7 @@ private List getProductAnalyticsCharts(Authentication authentica final List charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); + final DateTime startOfThisMonth = dateUtil.getStartOfThisMonth(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); @@ -103,7 +104,7 @@ private List getProductAnalyticsCharts(Authentication authentica charts.add( getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), - startOfNextMonth.minusMillis(1), + startOfThisMonth.minusMillis(1), "Monthly Active Users", DateInterval.MONTH)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index 4b837605d4e31..677ad8afbaca3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -13,6 +13,10 @@ public DateTime getStartOfNextWeek() { return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); } + public DateTime getStartOfThisMonth() { + return setTimeToZero(getNow().withDayOfMonth(1)); + } + public DateTime getStartOfNextMonth() { return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); } From 55cb56821c00ec993ee5a4c560d7b49d8d71258b Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Thu, 21 Dec 2023 10:33:25 -0600 Subject: [PATCH 128/263] fix(kafka): fix infinite deserialization logging (#9494) --- docker/docker-compose-without-neo4j.yml | 2 ++ ...docker-compose.consumers-without-neo4j.yml | 3 ++ docker/docker-compose.consumers.yml | 3 ++ docker/docker-compose.dev.yml | 1 + docker/docker-compose.yml | 2 ++ .../docker-compose-m1.quickstart.yml | 1 + ...er-compose-without-neo4j-m1.quickstart.yml | 1 + ...ocker-compose-without-neo4j.quickstart.yml | 1 + ...ose.consumers-without-neo4j.quickstart.yml | 2 ++ .../docker-compose.consumers.quickstart.yml | 2 ++ .../quickstart/docker-compose.quickstart.yml | 1 + .../config/kafka/ConsumerConfiguration.java | 1 + .../src/main/resources/application.yml | 1 + .../kafka/KafkaEventConsumerFactory.java | 30 ++++++++++++++++--- 14 files changed, 47 insertions(+), 4 deletions(-) diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 6191994eaa1ea..0d58a1d91b70b 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -43,6 +43,8 @@ services: context: ../ dockerfile: docker/datahub-gms/Dockerfile env_file: datahub-gms/env/docker-without-neo4j.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} healthcheck: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s diff --git a/docker/docker-compose.consumers-without-neo4j.yml b/docker/docker-compose.consumers-without-neo4j.yml index 8228951d9385f..f1be585232a1a 100644 --- a/docker/docker-compose.consumers-without-neo4j.yml +++ b/docker/docker-compose.consumers-without-neo4j.yml @@ -15,6 +15,8 @@ services: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile env_file: datahub-mae-consumer/env/docker-without-neo4j.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} datahub-mce-consumer: container_name: datahub-mce-consumer hostname: datahub-mce-consumer @@ -28,3 +30,4 @@ services: environment: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} diff --git a/docker/docker-compose.consumers.yml b/docker/docker-compose.consumers.yml index 2d37094035859..8d331cea2f0b9 100644 --- a/docker/docker-compose.consumers.yml +++ b/docker/docker-compose.consumers.yml @@ -15,6 +15,8 @@ services: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile env_file: datahub-mae-consumer/env/docker.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} depends_on: neo4j: condition: service_healthy @@ -36,6 +38,7 @@ services: - NEO4J_USERNAME=neo4j - NEO4J_PASSWORD=datahub - GRAPH_SERVICE_IMPL=neo4j + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} depends_on: neo4j: condition: service_healthy diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index a69fb977a3417..7067b68fba3f9 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -45,6 +45,7 @@ services: - SEARCH_SERVICE_ENABLE_CACHE=false - LINEAGE_SEARCH_CACHE_ENABLED=false - SHOW_BROWSE_V2=true + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} volumes: - ./datahub-gms/start.sh:/datahub/datahub-gms/scripts/start.sh - ./datahub-gms/jetty.xml:/datahub/datahub-gms/scripts/jetty.xml diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 95f56fe47e3cc..146055830d04e 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -36,6 +36,8 @@ services: container_name: datahub-gms hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 build: diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 7b7ca4052f324..8b87001915283 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index 53dacaf6ef63b..5373e93da6bcb 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 1ca91aa19206d..51a40395e3459 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index d05933df96a43..4ed57dca1f080 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -6,6 +6,7 @@ services: datahub-mae-consumer: container_name: datahub-mae-consumer environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -44,6 +45,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index f0bd3a0f927c8..ba8432d8a89af 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -9,6 +9,7 @@ services: neo4j: condition: service_healthy environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -54,6 +55,7 @@ services: - GRAPH_SERVICE_IMPL=neo4j - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=true diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index c77b4418b6f36..56071cfe1e9e6 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -97,6 +97,7 @@ services: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index b505674f2ed9c..61b9d5c816790 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -6,4 +6,5 @@ public class ConsumerConfiguration { private int maxPartitionFetchBytes; + private boolean stopOnDeserializationError; } diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 0ea6b8712953e..36498f7c45fea 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -236,6 +236,7 @@ kafka: maxRequestSize: ${KAFKA_PRODUCER_MAX_REQUEST_SIZE:5242880} # the max bytes sent by the producer, also see kafka-setup MAX_MESSAGE_BYTES for matching value consumer: maxPartitionFetchBytes: ${KAFKA_CONSUMER_MAX_PARTITION_FETCH_BYTES:5242880} # the max bytes consumed per partition + stopOnDeserializationError: ${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:true} # Stops kafka listener container on deserialization error, allows user to fix problems before moving past problematic offset. If false will log and move forward past the offset schemaRegistry: type: ${SCHEMA_REGISTRY_TYPE:KAFKA} # INTERNAL or KAFKA or AWS_GLUE url: ${KAFKA_SCHEMAREGISTRY_URL:http://localhost:8081} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index 2a6338ac15e93..4c0308546d857 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -21,6 +21,11 @@ import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; import org.springframework.kafka.config.KafkaListenerContainerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; +import org.springframework.kafka.listener.CommonContainerStoppingErrorHandler; +import org.springframework.kafka.listener.CommonDelegatingErrorHandler; +import org.springframework.kafka.listener.DefaultErrorHandler; +import org.springframework.kafka.support.serializer.DeserializationException; +import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer; @Slf4j @Configuration @@ -66,8 +71,6 @@ private static Map buildCustomizedProperties( SchemaRegistryConfig schemaRegistryConfig) { KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); - // Specify (de)serializers for record keys and for record values. - consumerProps.setKeyDeserializer(StringDeserializer.class); // Records will be flushed every 10 seconds. consumerProps.setEnableAutoCommit(true); consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); @@ -81,7 +84,13 @@ private static Map buildCustomizedProperties( Map customizedProperties = baseKafkaProperties.buildConsumerProperties(); customizedProperties.put( - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class); + customizedProperties.put( + ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, StringDeserializer.class); + customizedProperties.put( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class); + customizedProperties.put( + ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, schemaRegistryConfig.getDeserializer()); // Override KafkaProperties with SchemaRegistryConfig only for non-empty values schemaRegistryConfig.getProperties().entrySet().stream() @@ -98,7 +107,8 @@ private static Map buildCustomizedProperties( @Bean(name = "kafkaEventConsumer") protected KafkaListenerContainerFactory createInstance( @Qualifier("kafkaConsumerFactory") - DefaultKafkaConsumerFactory kafkaConsumerFactory) { + DefaultKafkaConsumerFactory kafkaConsumerFactory, + @Qualifier("configurationProvider") ConfigurationProvider configurationProvider) { ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); @@ -106,6 +116,18 @@ protected KafkaListenerContainerFactory createInstance( factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); factory.setConcurrency(kafkaEventConsumerConcurrency); + /* Sets up a delegating error handler for Deserialization errors, if disabled will + use DefaultErrorHandler (does back-off retry and then logs) rather than stopping the container. Stopping the container + prevents lost messages until the error can be examined, disabling this will allow progress, but may lose data + */ + if (configurationProvider.getKafka().getConsumer().isStopOnDeserializationError()) { + CommonDelegatingErrorHandler delegatingErrorHandler = + new CommonDelegatingErrorHandler(new DefaultErrorHandler()); + delegatingErrorHandler.addDelegate( + DeserializationException.class, new CommonContainerStoppingErrorHandler()); + factory.setCommonErrorHandler(delegatingErrorHandler); + } + log.info( String.format( "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", From b80d2f471c559cd31cedb47a79cf07e779b065b9 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 13:35:34 -0500 Subject: [PATCH 129/263] fix(ingest/fivetran): only materialize upstream lineage (#9490) --- .../ingestion/source/fivetran/fivetran.py | 19 +++++++---- .../integration/fivetran/fivetran_golden.json | 32 ------------------- 2 files changed, 12 insertions(+), 39 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py index c0395b4e4e796..12e362fa8a3e3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py +++ b/metadata-ingestion/src/datahub/ingestion/source/fivetran/fivetran.py @@ -7,6 +7,7 @@ DataProcessInstance, InstanceRunResult, ) +from datahub.emitter.mcp import MetadataChangeProposalWrapper from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -248,13 +249,17 @@ def _get_connector_workunits( # Map Fivetran's connector entity with Datahub's datajob entity datajob = self._generate_datajob_from_connector(connector) - for mcp in datajob.generate_mcp(materialize_iolets=True): - if mcp.entityType == "dataset" and isinstance(mcp.aspect, StatusClass): - # While we "materialize" the referenced datasets, we don't want them - # to be tracked by stateful ingestion. - yield mcp.as_workunit(is_primary_source=False) - else: - yield mcp.as_workunit() + for mcp in datajob.generate_mcp(materialize_iolets=False): + yield mcp.as_workunit() + + # Materialize the upstream referenced datasets. + # We assume that the downstreams are materialized by other ingestion sources. + for iolet in datajob.inlets: + # We don't want these to be tracked by stateful ingestion. + yield MetadataChangeProposalWrapper( + entityUrn=str(iolet), + aspect=StatusClass(removed=False), + ).as_workunit(is_primary_source=False) # Map Fivetran's job/sync history entity with Datahub's data process entity for job in connector.jobs: diff --git a/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json b/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json index a72c960a72296..b8f05fa6e93aa 100644 --- a/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json +++ b/metadata-ingestion/tests/integration/fivetran/fivetran_golden.json @@ -178,38 +178,6 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.employee,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:snowflake,test_database.postgres_public.company,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1654621200000, - "runId": "powerbi-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataJob", "entityUrn": "urn:li:dataJob:(urn:li:dataFlow:(fivetran,calendar_elected,PROD),calendar_elected)", From a18c72083d763b08282b67146881d4f918b257de Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 13:50:39 -0500 Subject: [PATCH 130/263] feat(ingest): handle multiline string coercion (#9484) --- docs-website/download_historical_versions.py | 4 +- docs/developers.md | 6 +-- .../src/datahub/configuration/git.py | 12 +---- .../validate_multiline_string.py | 31 ++++++++++++ .../ingestion/source/bigquery_v2/lineage.py | 2 +- .../ingestion/source/looker/lookml_source.py | 7 ++- .../source_config/usage/bigquery_usage.py | 3 ++ .../src/datahub/utilities/logging_manager.py | 1 + .../unit/config/test_pydantic_validators.py | 50 +++++++++++++++---- 9 files changed, 86 insertions(+), 30 deletions(-) create mode 100644 metadata-ingestion/src/datahub/configuration/validate_multiline_string.py diff --git a/docs-website/download_historical_versions.py b/docs-website/download_historical_versions.py index 53ee9cf1e63ef..7493210ffa2a5 100644 --- a/docs-website/download_historical_versions.py +++ b/docs-website/download_historical_versions.py @@ -37,9 +37,9 @@ def fetch_urls( except Exception as e: if attempt < max_retries: print(f"Attempt {attempt + 1}/{max_retries}: {e}") - time.sleep(retry_delay) + time.sleep(retry_delay * 2**attempt) else: - print(f"Max retries reached. Unable to fetch data.") + print("Max retries reached. Unable to fetch data.") raise diff --git a/docs/developers.md b/docs/developers.md index 60d31f5e4523f..fe007a56ddc68 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -17,10 +17,8 @@ title: "Local Development" On macOS, these can be installed using [Homebrew](https://brew.sh/). ```shell -# Install Java 8 and 11 -brew tap homebrew/cask-versions -brew install java11 -brew install --cask zulu8 +# Install Java +brew install openjdk@17 # Install Python brew install python@3.10 # you may need to add this to your PATH diff --git a/metadata-ingestion/src/datahub/configuration/git.py b/metadata-ingestion/src/datahub/configuration/git.py index a5f88744661a4..3c76c8da0d571 100644 --- a/metadata-ingestion/src/datahub/configuration/git.py +++ b/metadata-ingestion/src/datahub/configuration/git.py @@ -1,4 +1,3 @@ -import os import pathlib from typing import Any, Dict, Optional, Union @@ -6,6 +5,7 @@ from datahub.configuration.common import ConfigModel from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.configuration.validate_multiline_string import pydantic_multiline_string _GITHUB_PREFIX = "https://github.com/" _GITLAB_PREFIX = "https://gitlab.com/" @@ -92,15 +92,7 @@ class GitInfo(GitReference): description="The url to call `git clone` on. We infer this for github and gitlab repos, but it is required for other hosts.", ) - @validator("deploy_key_file") - def deploy_key_file_should_be_readable( - cls, v: Optional[FilePath] - ) -> Optional[FilePath]: - if v is not None: - # pydantic does existence checks, we just need to check if we can read it - if not os.access(v, os.R_OK): - raise ValueError(f"Unable to read deploy key file {v}") - return v + _fix_deploy_key_newlines = pydantic_multiline_string("deploy_key") @validator("deploy_key", pre=True, always=True) def deploy_key_filled_from_deploy_key_file( diff --git a/metadata-ingestion/src/datahub/configuration/validate_multiline_string.py b/metadata-ingestion/src/datahub/configuration/validate_multiline_string.py new file mode 100644 index 0000000000000..0baaf4f0264b9 --- /dev/null +++ b/metadata-ingestion/src/datahub/configuration/validate_multiline_string.py @@ -0,0 +1,31 @@ +from typing import Optional, Type, Union + +import pydantic + + +def pydantic_multiline_string(field: str) -> classmethod: + """If the field is present and contains an escaped newline, replace it with a real newline. + + This makes the assumption that the field value is never supposed to have a + r"\n" in it, and instead should only have newline characters. This is generally + a safe assumption for SSH keys and similar. + + The purpose of this helper is to make us more forgiving of small formatting issues + in recipes, without sacrificing correctness across the board. + """ + + def _validate_field( + cls: Type, v: Union[None, str, pydantic.SecretStr] + ) -> Optional[str]: + if v is not None: + if isinstance(v, pydantic.SecretStr): + v = v.get_secret_value() + v = v.replace(r"\n", "\n") + + return v + + # Hack: Pydantic maintains unique list of validators by referring its __name__. + # https://github.com/pydantic/pydantic/blob/v1.10.9/pydantic/main.py#L264 + # This hack ensures that multiple field deprecated do not overwrite each other. + _validate_field.__name__ = f"{_validate_field.__name__}_{field}" + return pydantic.validator(field, pre=True, allow_reuse=True)(_validate_field) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py index eddd08c92b808..b44b06feb95af 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/lineage.py @@ -175,7 +175,7 @@ def make_lineage_edges_from_parsing_result( table_name = str( BigQueryTableRef.from_bigquery_table( BigqueryTableIdentifier.from_string_name( - DatasetUrn.create_from_string(table_urn).get_dataset_name() + DatasetUrn.from_string(table_urn).name ) ) ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py index b76bef49a7e6f..33079f3fd9ac1 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/lookml_source.py @@ -2060,10 +2060,9 @@ def get_internal_workunits(self) -> Iterable[MetadataWorkUnit]: # noqa: C901 ) logger.debug("Failed to process explore", exc_info=e) - processed_view_files = processed_view_map.get(model.connection) - if processed_view_files is None: - processed_view_map[model.connection] = set() - processed_view_files = processed_view_map[model.connection] + processed_view_files = processed_view_map.setdefault( + model.connection, set() + ) project_name = self.get_project_name(model_name) logger.debug(f"Model: {model_name}; Includes: {model.resolved_includes}") diff --git a/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py b/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py index 5eb9c83236e4f..13abe73cc4e09 100644 --- a/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source_config/usage/bigquery_usage.py @@ -11,6 +11,7 @@ from datahub.configuration.common import AllowDenyPattern, ConfigurationError from datahub.configuration.source_common import EnvConfigMixin from datahub.configuration.validate_field_removal import pydantic_removed_field +from datahub.configuration.validate_multiline_string import pydantic_multiline_string from datahub.ingestion.source.usage.usage_common import BaseUsageConfig from datahub.ingestion.source_config.bigquery import BigQueryBaseConfig @@ -44,6 +45,8 @@ class BigQueryCredential(ConfigModel): description="If not set it will be default to https://www.googleapis.com/robot/v1/metadata/x509/client_email", ) + _fix_private_key_newlines = pydantic_multiline_string("private_key") + @pydantic.root_validator(skip_on_failure=True) def validate_config(cls, values: Dict[str, Any]) -> Dict[str, Any]: if values.get("client_x509_cert_url") is None: diff --git a/metadata-ingestion/src/datahub/utilities/logging_manager.py b/metadata-ingestion/src/datahub/utilities/logging_manager.py index a8eacb0a9938d..62aa1ca7ab791 100644 --- a/metadata-ingestion/src/datahub/utilities/logging_manager.py +++ b/metadata-ingestion/src/datahub/utilities/logging_manager.py @@ -199,6 +199,7 @@ def configure_logging(debug: bool, log_file: Optional[str] = None) -> Iterator[N for handler in handlers: root_logger.removeHandler(handler) for lib in DATAHUB_PACKAGES: + lib_logger = logging.getLogger(lib) lib_logger.removeHandler(handler) lib_logger.propagate = True diff --git a/metadata-ingestion/tests/unit/config/test_pydantic_validators.py b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py index 399245736805c..f687a2776f6e2 100644 --- a/metadata-ingestion/tests/unit/config/test_pydantic_validators.py +++ b/metadata-ingestion/tests/unit/config/test_pydantic_validators.py @@ -1,12 +1,14 @@ from typing import Optional +import pydantic import pytest from pydantic import ValidationError -from datahub.configuration.common import ConfigModel +from datahub.configuration.common import ConfigModel, ConfigurationWarning from datahub.configuration.validate_field_deprecation import pydantic_field_deprecated from datahub.configuration.validate_field_removal import pydantic_removed_field from datahub.configuration.validate_field_rename import pydantic_renamed_field +from datahub.configuration.validate_multiline_string import pydantic_multiline_string from datahub.utilities.global_warning_util import ( clear_global_warnings, get_global_warnings, @@ -22,8 +24,9 @@ class TestModel(ConfigModel): v = TestModel.parse_obj({"b": "original"}) assert v.b == "original" - v = TestModel.parse_obj({"a": "renamed"}) - assert v.b == "renamed" + with pytest.warns(ConfigurationWarning, match="a is deprecated"): + v = TestModel.parse_obj({"a": "renamed"}) + assert v.b == "renamed" with pytest.raises(ValidationError): TestModel.parse_obj({"a": "foo", "b": "bar"}) @@ -44,9 +47,10 @@ class TestModel(ConfigModel): assert v.b == "original" assert v.b1 == "original" - v = TestModel.parse_obj({"a": "renamed", "a1": "renamed"}) - assert v.b == "renamed" - assert v.b1 == "renamed" + with pytest.warns(ConfigurationWarning, match=r"a.* is deprecated"): + v = TestModel.parse_obj({"a": "renamed", "a1": "renamed"}) + assert v.b == "renamed" + assert v.b1 == "renamed" with pytest.raises(ValidationError): TestModel.parse_obj({"a": "foo", "b": "bar", "b1": "ok"}) @@ -74,8 +78,9 @@ class TestModel(ConfigModel): v = TestModel.parse_obj({"b": "original"}) assert v.b == "original" - v = TestModel.parse_obj({"b": "original", "r1": "removed", "r2": "removed"}) - assert v.b == "original" + with pytest.warns(ConfigurationWarning, match=r"r\d was removed"): + v = TestModel.parse_obj({"b": "original", "r1": "removed", "r2": "removed"}) + assert v.b == "original" def test_field_deprecated(): @@ -92,7 +97,10 @@ class TestModel(ConfigModel): v = TestModel.parse_obj({"b": "original"}) assert v.b == "original" - v = TestModel.parse_obj({"b": "original", "d1": "deprecated", "d2": "deprecated"}) + with pytest.warns(ConfigurationWarning, match=r"d\d.+ deprecated"): + v = TestModel.parse_obj( + {"b": "original", "d1": "deprecated", "d2": "deprecated"} + ) assert v.b == "original" assert v.d1 == "deprecated" assert v.d2 == "deprecated" @@ -100,3 +108,27 @@ class TestModel(ConfigModel): assert any(["d2 is deprecated" in warning for warning in get_global_warnings()]) clear_global_warnings() + + +def test_multiline_string_fixer(): + class TestModel(ConfigModel): + s: str + m: Optional[pydantic.SecretStr] = None + + _validate_s = pydantic_multiline_string("s") + _validate_m = pydantic_multiline_string("m") + + v = TestModel.parse_obj({"s": "foo\nbar"}) + assert v.s == "foo\nbar" + + v = TestModel.parse_obj({"s": "foo\\nbar"}) + assert v.s == "foo\nbar" + + v = TestModel.parse_obj({"s": "normal", "m": "foo\\nbar"}) + assert v.s == "normal" + assert v.m + assert v.m.get_secret_value() == "foo\nbar" + + v = TestModel.parse_obj({"s": "normal", "m": pydantic.SecretStr("foo\\nbar")}) + assert v.m + assert v.m.get_secret_value() == "foo\nbar" From cfc641f0d03408b85ae75c2e4830c5f307ce6a68 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Thu, 21 Dec 2023 20:32:51 +0100 Subject: [PATCH 131/263] fix(ingest/databricks): Pinning databricks sdk to not fail on mypy issues (#9500) --- metadata-ingestion/setup.py | 4 +++- .../src/datahub/ingestion/source/aws/aws_common.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 13c9d3c99aaca..0dcac7a7fc1b4 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -274,7 +274,9 @@ databricks = { # 0.1.11 appears to have authentication issues with azure databricks - "databricks-sdk>=0.9.0", + # 0.16.0 added py.typed support which caused mypy to fail. The databricks sdk is pinned until we resolve mypy issues. + # https://github.com/databricks/databricks-sdk-py/pull/483 + "databricks-sdk>=0.9.0,<0.16.0", "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes diff --git a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py index 0fb211a5d7b16..421991a0966c3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/aws/aws_common.py @@ -167,7 +167,7 @@ def get_session(self) -> Session: return session - def get_credentials(self) -> Dict[str, str]: + def get_credentials(self) -> Dict[str, Optional[str]]: credentials = self.get_session().get_credentials() if credentials is not None: return { From ca518d6c78d994d59879b29f5afa8ffd1cff56df Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 20:28:45 -0500 Subject: [PATCH 132/263] feat(ingest): remove librdkafka hacks (#9507) --- docker/datahub-ingestion-base/Dockerfile | 9 ----- .../base-requirements.txt | 2 +- metadata-ingestion/developing.md | 3 +- .../scripts/datahub_preflight.sh | 19 +++------- metadata-ingestion/scripts/install_deps.sh | 5 ++- metadata-ingestion/setup.py | 35 ++++--------------- 6 files changed, 14 insertions(+), 59 deletions(-) diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index 81fec61ea5073..558a5afe2c2cf 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -30,9 +30,6 @@ ARG DEBIAN_REPO_URL ARG PIP_MIRROR_URL ARG GITHUB_REPO_URL -ENV LIBRDKAFKA_VERSION=1.6.2 -ENV CONFLUENT_KAFKA_VERSION=1.6.1 - ENV DEBIAN_FRONTEND noninteractive # Optionally set corporate mirror for apk and pip @@ -40,7 +37,6 @@ RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi RUN apt-get update && apt-get install -y -qq \ - make \ python3-ldap \ libldap2-dev \ libsasl2-dev \ @@ -53,11 +49,6 @@ RUN apt-get update && apt-get install -y -qq \ unzip \ ldap-utils \ && python -m pip install --no-cache --upgrade pip wheel setuptools \ - && wget -q ${GITHUB_REPO_URL}/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ - tar -xz -C /root \ - && cd /root/librdkafka-${LIBRDKAFKA_VERSION} \ - && ./configure --prefix /usr && make && make install && cd .. && rm -rf /root/librdkafka-${LIBRDKAFKA_VERSION} \ - && apt-get remove -y make \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* # compiled against newer golang for security fixes diff --git a/docker/datahub-ingestion-base/base-requirements.txt b/docker/datahub-ingestion-base/base-requirements.txt index eb082d50b3020..141382466ab9f 100644 --- a/docker/datahub-ingestion-base/base-requirements.txt +++ b/docker/datahub-ingestion-base/base-requirements.txt @@ -65,7 +65,7 @@ colorlog==4.8.0 comm==0.1.4 confection==0.1.3 ConfigUpdater==3.1.1 -confluent-kafka==1.8.2 +confluent-kafka==2.3.0 connexion==2.14.2 cron-descriptor==1.4.0 croniter==2.0.1 diff --git a/metadata-ingestion/developing.md b/metadata-ingestion/developing.md index d5f834936cdcf..d1eef21974f1d 100644 --- a/metadata-ingestion/developing.md +++ b/metadata-ingestion/developing.md @@ -11,8 +11,7 @@ Also take a look at the guide to [adding a source](./adding-source.md). 1. Python 3.7+ must be installed in your host environment. 2. Java8 (gradle won't work with newer versions) -3. On MacOS: `brew install librdkafka` -4. On Debian/Ubuntu: `sudo apt install librdkafka-dev python3-dev python3-venv` +4. On Debian/Ubuntu: `sudo apt install python3-dev python3-venv` 5. On Fedora (if using LDAP source integration): `sudo yum install openldap-devel` ### Set up your Python environment diff --git a/metadata-ingestion/scripts/datahub_preflight.sh b/metadata-ingestion/scripts/datahub_preflight.sh index e82be9d7b27b7..9676964f4d49d 100755 --- a/metadata-ingestion/scripts/datahub_preflight.sh +++ b/metadata-ingestion/scripts/datahub_preflight.sh @@ -45,8 +45,6 @@ arm64_darwin_preflight() { pip3 install --no-use-pep517 scipy fi - printf "✨ Setting up librdkafka prerequisities\n" - brew_install "librdkafka" "1.9.1" brew_install "openssl@1.1" brew install "postgresql@14" @@ -69,25 +67,16 @@ arm64_darwin_preflight() { export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1 export GRPC_PYTHON_BUILD_SYSTEM_ZLIB - CPPFLAGS="-I$(brew --prefix openssl@1.1)/include -I$(brew --prefix librdkafka)/include" + CPPFLAGS="-I$(brew --prefix openssl@1.1)/include" export CPPFLAGS - LDFLAGS="-L$(brew --prefix openssl@1.1)/lib -L$(brew --prefix librdkafka)/lib" + LDFLAGS="-L$(brew --prefix openssl@1.1)/lib" export LDFLAGS - CPATH="$(brew --prefix librdkafka)/include" - export CPATH - C_INCLUDE_PATH="$(brew --prefix librdkafka)/include" - export C_INCLUDE_PATH - LIBRARY_PATH="$(brew --prefix librdkafka)/lib" - export LIBRARY_PATH cat << EOF export GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=1 export GRPC_PYTHON_BUILD_SYSTEM_ZLIB=1 - export CPPFLAGS="-I$(brew --prefix openssl@1.1)/include -I$(brew --prefix librdkafka)/include" - export LDFLAGS="-L$(brew --prefix openssl@1.1)/lib -L$(brew --prefix librdkafka)/lib -L$(brew --prefix postgresql@14)/lib/postgresql@14" - export CPATH="$(brew --prefix librdkafka)/include" - export C_INCLUDE_PATH="$(brew --prefix librdkafka)/include" - export LIBRARY_PATH="$(brew --prefix librdkafka)/lib" + export CPPFLAGS="-I$(brew --prefix openssl@1.1)/include" + export LDFLAGS="-L$(brew --prefix openssl@1.1)/lib -L$(brew --prefix postgresql@14)/lib/postgresql@14" EOF diff --git a/metadata-ingestion/scripts/install_deps.sh b/metadata-ingestion/scripts/install_deps.sh index 7e6b6956d8bb8..bae0278056ebb 100755 --- a/metadata-ingestion/scripts/install_deps.sh +++ b/metadata-ingestion/scripts/install_deps.sh @@ -2,7 +2,8 @@ set -euxo pipefail if [ "$(uname)" == "Darwin" ]; then - brew install librdkafka + # None + true else sudo_cmd="" if command -v sudo; then @@ -11,7 +12,6 @@ else if command -v yum; then $sudo_cmd yum install -y \ - librdkafka-devel \ openldap-devel \ cyrus-sasl-devel \ openldap-clients \ @@ -21,7 +21,6 @@ else libxslt-devel else $sudo_cmd apt-get update && $sudo_cmd apt-get install -y \ - librdkafka-dev \ python3-ldap \ libldap2-dev \ libsasl2-dev \ diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 0dcac7a7fc1b4..c834700388d62 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -69,35 +69,12 @@ rest_common = {"requests", "requests_file"} kafka_common = { - # The confluent_kafka package provides a number of pre-built wheels for - # various platforms and architectures. However, it does not provide wheels - # for arm64 (including M1 Macs) or aarch64 (Docker's linux/arm64). This has - # remained an open issue on the confluent_kafka project for a year: - # - https://github.com/confluentinc/confluent-kafka-python/issues/1182 - # - https://github.com/confluentinc/confluent-kafka-python/pull/1161 - # - # When a wheel is not available, we must build from source instead. - # Building from source requires librdkafka to be installed. - # Most platforms have an easy way to install librdkafka: - # - MacOS: `brew install librdkafka` gives latest, which is 1.9.x or newer. - # - Debian: `apt install librdkafka` gives 1.6.0 (https://packages.debian.org/bullseye/librdkafka-dev). - # - Ubuntu: `apt install librdkafka` gives 1.8.0 (https://launchpad.net/ubuntu/+source/librdkafka). - # - # Moreover, confluent_kafka 1.9.0 introduced a hard compatibility break, and - # requires librdkafka >=1.9.0. As such, installing confluent_kafka 1.9.x on - # most arm64 Linux machines will fail, since it will build from source but then - # fail because librdkafka is too old. Hence, we have added an extra requirement - # that requires confluent_kafka<1.9.0 on non-MacOS arm64/aarch64 machines, which - # should ideally allow the builds to succeed in default conditions. We still - # want to allow confluent_kafka >= 1.9.0 for M1 Macs, which is why we can't - # broadly restrict confluent_kafka to <1.9.0. - # - # Note that this is somewhat of a hack, since we don't actually require the - # older version of confluent_kafka on those machines. Additionally, we will - # need monitor the Debian/Ubuntu PPAs and modify this rule if they start to - # support librdkafka >= 1.9.0. - "confluent_kafka>=1.5.0", - 'confluent_kafka<1.9.0; platform_system != "Darwin" and (platform_machine == "aarch64" or platform_machine == "arm64")', + # Note that confluent_kafka 1.9.0 introduced a hard compatibility break, and + # requires librdkafka >=1.9.0. This is generally not an issue, since they + # now provide prebuilt wheels for most platforms, including M1 Macs and + # Linux aarch64 (e.g. Docker's linux/arm64). Installing confluent_kafka + # from source remains a pain. + "confluent_kafka>=1.9.0", # We currently require both Avro libraries. The codegen uses avro-python3 (above) # schema parsers at runtime for generating and reading JSON into Python objects. # At the same time, we use Kafka's AvroSerializer, which internally relies on From be329986ab4b177899d16990fec31597ae765c58 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 21 Dec 2023 20:30:36 -0500 Subject: [PATCH 133/263] feat(ingest): rename custom package path from models to metadata (#9502) --- docs/modeling/extending-the-metadata-model.md | 8 ++++++++ metadata-ingestion/scripts/custom_package_codegen.py | 10 +++++----- metadata-ingestion/src/datahub/telemetry/telemetry.py | 5 +++++ 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/docs/modeling/extending-the-metadata-model.md b/docs/modeling/extending-the-metadata-model.md index dc4edd3306f95..8b308fb65d243 100644 --- a/docs/modeling/extending-the-metadata-model.md +++ b/docs/modeling/extending-the-metadata-model.md @@ -289,6 +289,14 @@ Alternatively, publish it to PyPI with `twine upload custom-package/my-company-d This will generate some Python build artifacts, which you can distribute within your team or publish to PyPI. The command output contains additional details and exact CLI commands you can use. +Once this package is installed, you can use the DataHub CLI as normal, and it will use your custom models. +You'll also be able to import those models, with IDE support, by changing your imports. + +```diff +- from datahub.metadata.schema_classes import DatasetPropertiesClass ++ from my_company_datahub_models.metadata.schema_classes import DatasetPropertiesClass +``` + diff --git a/metadata-ingestion/scripts/custom_package_codegen.py b/metadata-ingestion/scripts/custom_package_codegen.py index 3f59fdf2cc548..714728087d4b6 100644 --- a/metadata-ingestion/scripts/custom_package_codegen.py +++ b/metadata-ingestion/scripts/custom_package_codegen.py @@ -62,7 +62,7 @@ def generate( entity_registry=entity_registry, pdl_path=pdl_path, schemas_path=schemas_path, - outdir=str(src_path / "models"), + outdir=str(src_path / "metadata"), enable_custom_loader=False, ) @@ -91,13 +91,13 @@ def generate( ], package_data={{ "{python_package_name}": ["py.typed"], - "{python_package_name}.models": ["schema.avsc"], - "{python_package_name}.models.schemas": ["*.avsc"], + "{python_package_name}.metadata": ["schema.avsc"], + "{python_package_name}.metadata.schemas": ["*.avsc"], }}, entry_points={{ "datahub.custom_packages": [ - "models={python_package_name}.models.schema_classes", - "urns={python_package_name}.models._urns.urn_defs", + "models={python_package_name}.metadata.schema_classes", + "urns={python_package_name}.metadata._urns.urn_defs", ], }}, ) diff --git a/metadata-ingestion/src/datahub/telemetry/telemetry.py b/metadata-ingestion/src/datahub/telemetry/telemetry.py index 615be00d5455f..c399f2e1a27e5 100644 --- a/metadata-ingestion/src/datahub/telemetry/telemetry.py +++ b/metadata-ingestion/src/datahub/telemetry/telemetry.py @@ -16,6 +16,7 @@ from datahub.cli.cli_utils import DATAHUB_ROOT_FOLDER, get_boolean_env_variable from datahub.configuration.common import ExceptionWithProps from datahub.ingestion.graph.client import DataHubGraph +from datahub.metadata.schema_classes import _custom_package_path from datahub.utilities.perf_timer import PerfTimer logger = logging.getLogger(__name__) @@ -89,6 +90,10 @@ if any(var in os.environ for var in CI_ENV_VARS): ENV_ENABLED = False +# Also disable if a custom metadata model package is in use. +if _custom_package_path: + ENV_ENABLED = False + TIMEOUT = int(os.environ.get("DATAHUB_TELEMETRY_TIMEOUT", "10")) MIXPANEL_ENDPOINT = "track.datahubproject.io/mp" MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a" From 4fe1df6892a7e45fe59a26990b441a67dd4faf93 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 22 Dec 2023 11:57:24 +0530 Subject: [PATCH 134/263] feat(ui): edit link option (#9498) --- .../Documentation/components/LinkList.tsx | 119 ++++++++++++++++-- 1 file changed, 110 insertions(+), 9 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx index bcce994c3f0f8..1b5c3d54009da 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx @@ -1,14 +1,15 @@ -import React from 'react'; +import React, { useState } from 'react'; import { Link } from 'react-router-dom'; import styled from 'styled-components/macro'; -import { message, Button, List, Typography } from 'antd'; -import { LinkOutlined, DeleteOutlined } from '@ant-design/icons'; +import { message, Button, List, Typography, Modal, Form, Input } from 'antd'; +import { LinkOutlined, DeleteOutlined, EditOutlined } from '@ant-design/icons'; import { EntityType, InstitutionalMemoryMetadata } from '../../../../../../types.generated'; -import { useEntityData } from '../../../EntityContext'; +import { useEntityData, useMutationUrn } from '../../../EntityContext'; import { useEntityRegistry } from '../../../../../useEntityRegistry'; import { ANTD_GRAY } from '../../../constants'; import { formatDateString } from '../../../containers/profile/utils'; -import { useRemoveLinkMutation } from '../../../../../../graphql/mutations.generated'; +import { useAddLinkMutation, useRemoveLinkMutation } from '../../../../../../graphql/mutations.generated'; +import analytics, { EntityActionType, EventType } from '../../../../../analytics'; const LinkListItem = styled(List.Item)` border-radius: 5px; @@ -33,10 +34,15 @@ type LinkListProps = { }; export const LinkList = ({ refetch }: LinkListProps) => { - const { urn: entityUrn, entityData } = useEntityData(); + const [editModalVisble, setEditModalVisible] = useState(false); + const [linkDetails, setLinkDetails] = useState(undefined); + const { urn: entityUrn, entityData, entityType } = useEntityData(); const entityRegistry = useEntityRegistry(); const [removeLinkMutation] = useRemoveLinkMutation(); const links = entityData?.institutionalMemory?.elements || []; + const [form] = Form.useForm(); + const [addLinkMutation] = useAddLinkMutation(); + const mutationUrn = useMutationUrn(); const handleDeleteLink = async (metadata: InstitutionalMemoryMetadata) => { try { @@ -53,8 +59,98 @@ export const LinkList = ({ refetch }: LinkListProps) => { refetch?.(); }; + const handleEditLink = (metadata: InstitutionalMemoryMetadata) => { + form.setFieldsValue({ + url: metadata.url, + label: metadata.description, + }); + setLinkDetails(metadata); + setEditModalVisible(true); + }; + + const handleClose = () => { + form.resetFields(); + setEditModalVisible(false); + }; + + const handleEdit = async (formData: any) => { + if (!linkDetails) return; + try { + await removeLinkMutation({ + variables: { input: { linkUrl: linkDetails.url, resourceUrn: linkDetails.associatedUrn || entityUrn } }, + }); + await addLinkMutation({ + variables: { input: { linkUrl: formData.url, label: formData.label, resourceUrn: mutationUrn } }, + }); + + message.success({ content: 'Link Updated', duration: 2 }); + + analytics.event({ + type: EventType.EntityActionEvent, + entityType, + entityUrn: mutationUrn, + actionType: EntityActionType.UpdateLinks, + }); + + refetch?.(); + handleClose(); + } catch (e: unknown) { + message.destroy(); + + if (e instanceof Error) { + message.error({ content: `Error updating link: \n ${e.message || ''}`, duration: 2 }); + } + } + }; + return entityData ? ( <> + + Cancel + , + , + ]} + > +
+ + + + + + +
+
{links.length > 0 && ( { renderItem={(link) => ( handleDeleteLink(link)} type="text" shape="circle" danger> - - + <> + + + } > Date: Fri, 22 Dec 2023 02:18:22 -0500 Subject: [PATCH 135/263] feat(ingest): support CLL for redshift materialized views with auto refresh (#9508) --- metadata-ingestion/setup.py | 2 +- .../src/datahub/utilities/sqlglot_lineage.py | 122 ++++++++++++------ ...dshift_materialized_view_auto_refresh.json | 54 ++++++++ .../tests/unit/sql_parsing/test_sql_detach.py | 46 +++++++ .../unit/sql_parsing/test_sqlglot_lineage.py | 72 ++++------- 5 files changed, 207 insertions(+), 89 deletions(-) create mode 100644 metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json create mode 100644 metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index c834700388d62..4632c20cd3b96 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -98,7 +98,7 @@ sqlglot_lib = { # Using an Acryl fork of sqlglot. # https://github.com/tobymao/sqlglot/compare/main...hsheth2:sqlglot:hsheth?expand=1 - "acryl-sqlglot==19.0.2.dev10", + "acryl-sqlglot==20.4.1.dev14", } sql_common = ( diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index fc3efef2ba532..f84b3f8b94a2e 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -5,7 +5,7 @@ import logging import pathlib from collections import defaultdict -from typing import Any, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union import pydantic.dataclasses import sqlglot @@ -60,6 +60,8 @@ ), ) ) +# Quick check that the rules were loaded correctly. +assert 0 < len(RULES_BEFORE_TYPE_ANNOTATION) < len(sqlglot.optimizer.optimizer.RULES) class GraphQLSchemaField(TypedDict): @@ -150,12 +152,16 @@ class _TableName(_FrozenModel): def as_sqlglot_table(self) -> sqlglot.exp.Table: return sqlglot.exp.Table( - catalog=self.database, db=self.db_schema, this=self.table + catalog=sqlglot.exp.Identifier(this=self.database) + if self.database + else None, + db=sqlglot.exp.Identifier(this=self.db_schema) if self.db_schema else None, + this=sqlglot.exp.Identifier(this=self.table), ) def qualified( self, - dialect: str, + dialect: sqlglot.Dialect, default_db: Optional[str] = None, default_schema: Optional[str] = None, ) -> "_TableName": @@ -271,7 +277,9 @@ def make_from_error(cls, error: Exception) -> "SqlParsingResult": ) -def _parse_statement(sql: sqlglot.exp.ExpOrStr, dialect: str) -> sqlglot.Expression: +def _parse_statement( + sql: sqlglot.exp.ExpOrStr, dialect: sqlglot.Dialect +) -> sqlglot.Expression: statement: sqlglot.Expression = sqlglot.maybe_parse( sql, dialect=dialect, error_level=sqlglot.ErrorLevel.RAISE ) @@ -279,8 +287,7 @@ def _parse_statement(sql: sqlglot.exp.ExpOrStr, dialect: str) -> sqlglot.Express def _table_level_lineage( - statement: sqlglot.Expression, - dialect: str, + statement: sqlglot.Expression, dialect: sqlglot.Dialect ) -> Tuple[Set[_TableName], Set[_TableName]]: # Generate table-level lineage. modified = { @@ -482,6 +489,26 @@ def close(self) -> None: ] _SupportedColumnLineageTypesTuple = (sqlglot.exp.Subqueryable, sqlglot.exp.DerivedTable) +DIALECTS_WITH_CASE_INSENSITIVE_COLS = { + # Column identifiers are case-insensitive in BigQuery, so we need to + # do a normalization step beforehand to make sure it's resolved correctly. + "bigquery", + # Our snowflake source lowercases column identifiers, so we are forced + # to do fuzzy (case-insensitive) resolution instead of exact resolution. + "snowflake", + # Teradata column names are case-insensitive. + # A name, even when enclosed in double quotation marks, is not case sensitive. For example, CUSTOMER and Customer are the same. + # See more below: + # https://documentation.sas.com/doc/en/pgmsascdc/9.4_3.5/acreldb/n0ejgx4895bofnn14rlguktfx5r3.htm + "teradata", +} +DIALECTS_WITH_DEFAULT_UPPERCASE_COLS = { + # In some dialects, column identifiers are effectively case insensitive + # because they are automatically converted to uppercase. Most other systems + # automatically lowercase unquoted identifiers. + "snowflake", +} + class UnsupportedStatementTypeError(TypeError): pass @@ -495,8 +522,8 @@ class SqlUnderstandingError(Exception): # TODO: Break this up into smaller functions. def _column_level_lineage( # noqa: C901 statement: sqlglot.exp.Expression, - dialect: str, - input_tables: Dict[_TableName, SchemaInfo], + dialect: sqlglot.Dialect, + table_schemas: Dict[_TableName, SchemaInfo], output_table: Optional[_TableName], default_db: Optional[str], default_schema: Optional[str], @@ -515,19 +542,9 @@ def _column_level_lineage( # noqa: C901 column_lineage: List[_ColumnLineageInfo] = [] - use_case_insensitive_cols = dialect in { - # Column identifiers are case-insensitive in BigQuery, so we need to - # do a normalization step beforehand to make sure it's resolved correctly. - "bigquery", - # Our snowflake source lowercases column identifiers, so we are forced - # to do fuzzy (case-insensitive) resolution instead of exact resolution. - "snowflake", - # Teradata column names are case-insensitive. - # A name, even when enclosed in double quotation marks, is not case sensitive. For example, CUSTOMER and Customer are the same. - # See more below: - # https://documentation.sas.com/doc/en/pgmsascdc/9.4_3.5/acreldb/n0ejgx4895bofnn14rlguktfx5r3.htm - "teradata", - } + use_case_insensitive_cols = _is_dialect_instance( + dialect, DIALECTS_WITH_CASE_INSENSITIVE_COLS + ) sqlglot_db_schema = sqlglot.MappingSchema( dialect=dialect, @@ -537,14 +554,16 @@ def _column_level_lineage( # noqa: C901 table_schema_normalized_mapping: Dict[_TableName, Dict[str, str]] = defaultdict( dict ) - for table, table_schema in input_tables.items(): + for table, table_schema in table_schemas.items(): normalized_table_schema: SchemaInfo = {} for col, col_type in table_schema.items(): if use_case_insensitive_cols: col_normalized = ( # This is required to match Sqlglot's behavior. col.upper() - if dialect in {"snowflake"} + if _is_dialect_instance( + dialect, DIALECTS_WITH_DEFAULT_UPPERCASE_COLS + ) else col.lower() ) else: @@ -561,7 +580,7 @@ def _column_level_lineage( # noqa: C901 if use_case_insensitive_cols: def _sqlglot_force_column_normalizer( - node: sqlglot.exp.Expression, dialect: "sqlglot.DialectType" = None + node: sqlglot.exp.Expression, ) -> sqlglot.exp.Expression: if isinstance(node, sqlglot.exp.Column): node.this.set("quoted", False) @@ -572,9 +591,7 @@ def _sqlglot_force_column_normalizer( # "Prior to case normalization sql %s", # statement.sql(pretty=True, dialect=dialect), # ) - statement = statement.transform( - _sqlglot_force_column_normalizer, dialect, copy=False - ) + statement = statement.transform(_sqlglot_force_column_normalizer, copy=False) # logger.debug( # "Sql after casing normalization %s", # statement.sql(pretty=True, dialect=dialect), @@ -595,7 +612,8 @@ def _schema_aware_fuzzy_column_resolve( # Optimize the statement + qualify column references. logger.debug( - "Prior to qualification sql %s", statement.sql(pretty=True, dialect=dialect) + "Prior to column qualification sql %s", + statement.sql(pretty=True, dialect=dialect), ) try: # Second time running qualify, this time with: @@ -678,7 +696,7 @@ def _schema_aware_fuzzy_column_resolve( # Otherwise, we can't process it. continue - if dialect == "bigquery" and output_col.lower() in { + if _is_dialect_instance(dialect, "bigquery") and output_col.lower() in { "_partitiontime", "_partitiondate", }: @@ -923,7 +941,7 @@ def _translate_sqlglot_type( def _translate_internal_column_lineage( table_name_urn_mapping: Dict[_TableName, str], raw_column_lineage: _ColumnLineageInfo, - dialect: str, + dialect: sqlglot.Dialect, ) -> ColumnLineageInfo: downstream_urn = None if raw_column_lineage.downstream.table: @@ -956,18 +974,44 @@ def _translate_internal_column_lineage( ) -def _get_dialect(platform: str) -> str: +def _get_dialect_str(platform: str) -> str: # TODO: convert datahub platform names to sqlglot dialect if platform == "presto-on-hive": return "hive" - if platform == "mssql": + elif platform == "mssql": return "tsql" - if platform == "athena": + elif platform == "athena": return "trino" + elif platform == "mysql": + # In sqlglot v20+, MySQL is now case-sensitive by default, which is the + # default behavior on Linux. However, MySQL's default case sensitivity + # actually depends on the underlying OS. + # For us, it's simpler to just assume that it's case-insensitive, and + # let the fuzzy resolution logic handle it. + return "mysql, normalization_strategy = lowercase" else: return platform +def _get_dialect(platform: str) -> sqlglot.Dialect: + return sqlglot.Dialect.get_or_raise(_get_dialect_str(platform)) + + +def _is_dialect_instance( + dialect: sqlglot.Dialect, platforms: Union[str, Iterable[str]] +) -> bool: + if isinstance(platforms, str): + platforms = [platforms] + else: + platforms = list(platforms) + + dialects = [sqlglot.Dialect.get_or_raise(platform) for platform in platforms] + + if any(isinstance(dialect, dialect_class.__class__) for dialect_class in dialects): + return True + return False + + def _sqlglot_lineage_inner( sql: sqlglot.exp.ExpOrStr, schema_resolver: SchemaResolver, @@ -975,7 +1019,7 @@ def _sqlglot_lineage_inner( default_schema: Optional[str] = None, ) -> SqlParsingResult: dialect = _get_dialect(schema_resolver.platform) - if dialect == "snowflake": + if _is_dialect_instance(dialect, "snowflake"): # in snowflake, table identifiers must be uppercased to match sqlglot's behavior. if default_db: default_db = default_db.upper() @@ -1064,7 +1108,7 @@ def _sqlglot_lineage_inner( column_lineage = _column_level_lineage( select_statement, dialect=dialect, - input_tables=table_name_schema_mapping, + table_schemas=table_name_schema_mapping, output_table=downstream_table, default_db=default_db, default_schema=default_schema, @@ -1204,13 +1248,13 @@ def replace_cte_refs(node: sqlglot.exp.Expression) -> sqlglot.exp.Expression: full_new_name, dialect=dialect, into=sqlglot.exp.Table ) - # We expect node.parent to be a Table or Column. - # Either way, it should support catalog/db/name. parent = node.parent - if "catalog" in parent.arg_types: + # We expect node.parent to be a Table or Column, both of which support catalog/db/name. + # However, we check the parent's arg_types to be safe. + if "catalog" in parent.arg_types and table_expr.catalog: parent.set("catalog", table_expr.catalog) - if "db" in parent.arg_types: + if "db" in parent.arg_types and table_expr.db: parent.set("db", table_expr.db) new_node = sqlglot.exp.Identifier(this=table_expr.name) diff --git a/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json new file mode 100644 index 0000000000000..fce65056a32f7 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/goldens/test_redshift_materialized_view_auto_refresh.json @@ -0,0 +1,54 @@ +{ + "query_type": "CREATE", + "in_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:redshift,customer,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:redshift,orders,PROD)" + ], + "out_tables": [ + "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)" + ], + "column_lineage": [ + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)", + "column": "cust_id", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,customer,PROD)", + "column": "cust_id" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)", + "column": "first_name", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,customer,PROD)", + "column": "first_name" + } + ] + }, + { + "downstream": { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,mv_total_orders,PROD)", + "column": "total_amount", + "column_type": null, + "native_column_type": null + }, + "upstreams": [ + { + "table": "urn:li:dataset:(urn:li:dataPlatform:redshift,orders,PROD)", + "column": "amount" + } + ] + } + ] +} \ No newline at end of file diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py b/metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py new file mode 100644 index 0000000000000..c99b05c35e0f5 --- /dev/null +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sql_detach.py @@ -0,0 +1,46 @@ +from datahub.utilities.sqlglot_lineage import detach_ctes + + +def test_detach_ctes_simple(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "_my_cte_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table ON table2.id = _my_cte_table.id" + ) + + +def test_detach_ctes_with_alias(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 AS tablealias ON table2.id = tablealias.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "_my_cte_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table AS tablealias ON table2.id = tablealias.id" + ) + + +def test_detach_ctes_with_multipart_replacement(): + original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" + detached_expr = detach_ctes( + original, + platform="snowflake", + cte_mapping={"__cte_0": "my_db.my_schema.my_table"}, + ) + detached = detached_expr.sql(dialect="snowflake") + + assert ( + detached + == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN my_db.my_schema.my_table ON table2.id = my_db.my_schema.my_table.id" + ) diff --git a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py index 7f69e358f8f11..eb1ba06669112 100644 --- a/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py +++ b/metadata-ingestion/tests/unit/sql_parsing/test_sqlglot_lineage.py @@ -3,59 +3,11 @@ import pytest from datahub.testing.check_sql_parser_result import assert_sql_result -from datahub.utilities.sqlglot_lineage import ( - _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT, - detach_ctes, -) +from datahub.utilities.sqlglot_lineage import _UPDATE_ARGS_NOT_SUPPORTED_BY_SELECT RESOURCE_DIR = pathlib.Path(__file__).parent / "goldens" -def test_detach_ctes_simple(): - original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" - detached_expr = detach_ctes( - original, - platform="snowflake", - cte_mapping={"__cte_0": "_my_cte_table"}, - ) - detached = detached_expr.sql(dialect="snowflake") - - assert ( - detached - == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table ON table2.id = _my_cte_table.id" - ) - - -def test_detach_ctes_with_alias(): - original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 AS tablealias ON table2.id = tablealias.id" - detached_expr = detach_ctes( - original, - platform="snowflake", - cte_mapping={"__cte_0": "_my_cte_table"}, - ) - detached = detached_expr.sql(dialect="snowflake") - - assert ( - detached - == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN _my_cte_table AS tablealias ON table2.id = tablealias.id" - ) - - -def test_detach_ctes_with_multipart_replacement(): - original = "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN __cte_0 ON table2.id = __cte_0.id" - detached_expr = detach_ctes( - original, - platform="snowflake", - cte_mapping={"__cte_0": "my_db.my_schema.my_table"}, - ) - detached = detached_expr.sql(dialect="snowflake") - - assert ( - detached - == "WITH __cte_0 AS (SELECT * FROM table1) SELECT * FROM table2 JOIN my_db.my_schema.my_table ON table2.id = my_db.my_schema.my_table.id" - ) - - def test_select_max(): # The COL2 should get normalized to col2. assert_sql_result( @@ -1023,3 +975,25 @@ def test_postgres_complex_update(): }, expected_file=RESOURCE_DIR / "test_postgres_complex_update.json", ) + + +def test_redshift_materialized_view_auto_refresh(): + # Example query from the redshift docs: https://docs.aws.amazon.com/prescriptive-guidance/latest/materialized-views-redshift/refreshing-materialized-views.html + assert_sql_result( + """ +CREATE MATERIALIZED VIEW mv_total_orders +AUTO REFRESH YES -- Add this clause to auto refresh the MV +AS + SELECT c.cust_id, + c.first_name, + sum(o.amount) as total_amount + FROM orders o + JOIN customer c + ON c.cust_id = o.customer_id + GROUP BY c.cust_id, + c.first_name; +""", + dialect="redshift", + expected_file=RESOURCE_DIR + / "test_redshift_materialized_view_auto_refresh.json", + ) From db55fadb734546b796352aeb38ec2719ce770cf9 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Fri, 22 Dec 2023 19:48:30 +0530 Subject: [PATCH 136/263] feat(ui): add custom cron option for UI based ingestion (#9510) --- .../source/builder/CreateScheduleStep.tsx | 38 ++++++++++++++----- .../source/builder/SelectTemplateStep.tsx | 4 +- 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx b/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx index 7a14b6a794189..3745ee0f44dc0 100644 --- a/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/CreateScheduleStep.tsx @@ -1,4 +1,4 @@ -import { Button, Form, Switch, Typography } from 'antd'; +import { Button, Checkbox, Form, Input, Switch, Typography } from 'antd'; import React, { useMemo, useState } from 'react'; import { Cron } from 'react-js-cron'; import 'react-js-cron/dist/styles.css'; @@ -31,6 +31,10 @@ const CronText = styled(Typography.Paragraph)` color: ${ANTD_GRAY[7]}; `; +const AdvancedCheckBox = styled(Typography.Text)` + margin-right: 10px; + margin-bottom: 8px; +`; const CronSuccessCheck = styled(CheckCircleOutlined)` color: ${REDESIGN_COLORS.BLUE}; margin-right: 4px; @@ -68,8 +72,8 @@ export const CreateScheduleStep = ({ state, updateState, goTo, prev }: StepProps const { schedule } = state; const interval = schedule?.interval?.replaceAll(', ', ' ') || DAILY_MIDNIGHT_CRON_INTERVAL; const timezone = schedule?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone; - const [scheduleEnabled, setScheduleEnabled] = useState(!!schedule); + const [advancedCronCheck, setAdvancedCronCheck] = useState(false); const [scheduleCronInterval, setScheduleCronInterval] = useState(interval); const [scheduleTimezone, setScheduleTimezone] = useState(timezone); @@ -137,13 +141,29 @@ export const CreateScheduleStep = ({ state, updateState, goTo, prev }: StepProps )}
Schedule}> - +
+ Advanced + setAdvancedCronCheck(event.target.checked)} + /> +
+ {advancedCronCheck ? ( + setScheduleCronInterval(e.target.value)} + /> + ) : ( + + )} {cronAsText.error && <>Invalid cron schedule. Cron must be of UNIX form:} {!cronAsText.text && ( diff --git a/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx b/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx index 8aaa4f3448686..6b771d459c4ef 100644 --- a/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx +++ b/datahub-web-react/src/app/ingest/source/builder/SelectTemplateStep.tsx @@ -70,7 +70,9 @@ export const SelectTemplateStep = ({ state, updateState, goTo, cancel, ingestion }; const filteredSources = ingestionSources.filter( - (source) => source.displayName.includes(searchFilter) || source.name.includes(searchFilter), + (source) => + source.displayName.toLocaleLowerCase().includes(searchFilter.toLocaleLowerCase()) || + source.name.toLocaleLowerCase().includes(searchFilter.toLocaleLowerCase()), ); return ( From 0d8568e087b5489b49161423ed299dec84e32f1e Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 22 Dec 2023 14:59:14 -0500 Subject: [PATCH 137/263] fix(ingest): update dbt type inference (#9512) --- .../integration/dbt/dbt_enabled_with_schemas_mces_golden.json | 2 +- .../integration/dbt/dbt_test_column_meta_mapping_golden.json | 2 +- .../dbt/dbt_test_with_complex_owner_patterns_mces_golden.json | 2 +- .../dbt/dbt_test_with_data_platform_instance_mces_golden.json | 2 +- .../dbt/dbt_test_with_non_incremental_lineage_mces_golden.json | 2 +- .../dbt/dbt_test_with_target_platform_instance_mces_golden.json | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json index 4deb725ed2b44..fa26a93479a4f 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_enabled_with_schemas_mces_golden.json @@ -153,7 +153,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json index 588470ef41631..f2208fd98c203 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_column_meta_mapping_golden.json @@ -87,7 +87,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json index 926e8b8c8ed84..a27eeb3775960 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_complex_owner_patterns_mces_golden.json @@ -117,7 +117,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json index 3727603266f25..43336ca585bcc 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_data_platform_instance_mces_golden.json @@ -118,7 +118,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json index ec879e6af766a..27ea568d010fa 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_non_incremental_lineage_mces_golden.json @@ -118,7 +118,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, diff --git a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json index e25c5e4faf6af..07296e175d9ec 100644 --- a/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json +++ b/metadata-ingestion/tests/integration/dbt/dbt_test_with_target_platform_instance_mces_golden.json @@ -118,7 +118,7 @@ "com.linkedin.pegasus2avro.schema.StringType": {} } }, - "nativeDataType": "VARCHAR", + "nativeDataType": "TEXT", "recursive": false, "isPartOfKey": false }, From ed5bdfc5aec65978145a72d2701941ed21b35554 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 22 Dec 2023 17:12:31 -0500 Subject: [PATCH 138/263] feat(ingest/redshift): merge CLL instead of overwriting (#9513) --- .../ingestion/source/redshift/lineage.py | 74 ++++++++++++------- .../src/datahub/utilities/sqlglot_lineage.py | 5 +- 2 files changed, 49 insertions(+), 30 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py index abed8505f168b..8135e1d44c102 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/lineage.py @@ -41,6 +41,7 @@ UpstreamLineageClass, ) from datahub.utilities import memory_footprint +from datahub.utilities.dedup_list import deduplicate_list from datahub.utilities.urns import dataset_urn logger: logging.Logger = logging.getLogger(__name__) @@ -85,6 +86,30 @@ def __post_init__(self): else: self.dataset_lineage_type = DatasetLineageTypeClass.TRANSFORMED + def merge_lineage( + self, + upstreams: Set[LineageDataset], + cll: Optional[List[sqlglot_l.ColumnLineageInfo]], + ) -> None: + self.upstreams = self.upstreams.union(upstreams) + + # Merge CLL using the output column name as the merge key. + self.cll = self.cll or [] + existing_cll: Dict[str, sqlglot_l.ColumnLineageInfo] = { + c.downstream.column: c for c in self.cll + } + for c in cll or []: + if c.downstream.column in existing_cll: + # Merge using upstream + column name as the merge key. + existing_cll[c.downstream.column].upstreams = deduplicate_list( + [*existing_cll[c.downstream.column].upstreams, *c.upstreams] + ) + else: + # New output column, just add it as is. + self.cll.append(c) + + self.cll = self.cll or None + class RedshiftLineageExtractor: def __init__( @@ -161,7 +186,12 @@ def _get_sources_from_query( ) sources.append(source) - return sources, parsed_result.column_lineage + return ( + sources, + parsed_result.column_lineage + if self.config.include_view_column_lineage + else None, + ) def _build_s3_path_from_row(self, filename: str) -> str: path = filename.strip() @@ -208,7 +238,7 @@ def _get_sources( "Only s3 source supported with copy. The source was: {path}." ) self.report.num_lineage_dropped_not_support_copy_path += 1 - return sources, cll + return [], None path = strip_s3_prefix(self._get_s3_path(path)) urn = make_dataset_urn_with_platform_instance( platform=platform.value, @@ -284,7 +314,6 @@ def _populate_lineage_map( ddl=lineage_row.ddl, filename=lineage_row.filename, ) - target.cll = cll target.upstreams.update( self._get_upstream_lineages( @@ -294,13 +323,13 @@ def _populate_lineage_map( raw_db_name=raw_db_name, ) ) + target.cll = cll - # Merging downstreams if dataset already exists and has downstreams + # Merging upstreams if dataset already exists and has upstreams if target.dataset.urn in self._lineage_map: - self._lineage_map[target.dataset.urn].upstreams = self._lineage_map[ - target.dataset.urn - ].upstreams.union(target.upstreams) - + self._lineage_map[target.dataset.urn].merge_lineage( + upstreams=target.upstreams, cll=target.cll + ) else: self._lineage_map[target.dataset.urn] = target @@ -420,7 +449,10 @@ def populate_lineage( ) -> None: populate_calls: List[Tuple[str, LineageCollectorType]] = [] - if self.config.table_lineage_mode == LineageMode.STL_SCAN_BASED: + if self.config.table_lineage_mode in { + LineageMode.STL_SCAN_BASED, + LineageMode.MIXED, + }: # Populate table level lineage by getting upstream tables from stl_scan redshift table query = RedshiftQuery.stl_scan_based_lineage_query( self.config.database, @@ -428,15 +460,10 @@ def populate_lineage( self.end_time, ) populate_calls.append((query, LineageCollectorType.QUERY_SCAN)) - elif self.config.table_lineage_mode == LineageMode.SQL_BASED: - # Populate table level lineage by parsing table creating sqls - query = RedshiftQuery.list_insert_create_queries_sql( - db_name=database, - start_time=self.start_time, - end_time=self.end_time, - ) - populate_calls.append((query, LineageCollectorType.QUERY_SQL_PARSER)) - elif self.config.table_lineage_mode == LineageMode.MIXED: + if self.config.table_lineage_mode in { + LineageMode.SQL_BASED, + LineageMode.MIXED, + }: # Populate table level lineage by parsing table creating sqls query = RedshiftQuery.list_insert_create_queries_sql( db_name=database, @@ -445,15 +472,7 @@ def populate_lineage( ) populate_calls.append((query, LineageCollectorType.QUERY_SQL_PARSER)) - # Populate table level lineage by getting upstream tables from stl_scan redshift table - query = RedshiftQuery.stl_scan_based_lineage_query( - db_name=database, - start_time=self.start_time, - end_time=self.end_time, - ) - populate_calls.append((query, LineageCollectorType.QUERY_SCAN)) - - if self.config.include_views: + if self.config.include_views and self.config.include_view_lineage: # Populate table level lineage for views query = RedshiftQuery.view_lineage_query() populate_calls.append((query, LineageCollectorType.VIEW)) @@ -540,7 +559,6 @@ def get_lineage( dataset_urn: str, schema: RedshiftSchema, ) -> Optional[Tuple[UpstreamLineageClass, Dict[str, str]]]: - upstream_lineage: List[UpstreamClass] = [] cll_lineage: List[FineGrainedLineage] = [] diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index f84b3f8b94a2e..b43c8de4c8f3d 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -193,7 +193,7 @@ class _ColumnRef(_FrozenModel): column: str -class ColumnRef(_ParserBaseModel): +class ColumnRef(_FrozenModel): table: Urn column: str @@ -929,6 +929,7 @@ def _translate_sqlglot_type( TypeClass = ArrayTypeClass elif sqlglot_type in { sqlglot.exp.DataType.Type.UNKNOWN, + sqlglot.exp.DataType.Type.NULL, }: return None else: @@ -1090,7 +1091,7 @@ def _sqlglot_lineage_inner( table_schemas_resolved=total_schemas_resolved, ) logger.debug( - f"Resolved {len(table_name_schema_mapping)} of {len(tables)} table schemas" + f"Resolved {total_schemas_resolved} of {total_tables_discovered} table schemas" ) # Simplify the input statement for column-level lineage generation. From 4448cf1f2d777c82d913e5ee0aeabd0e2785fad3 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 26 Dec 2023 16:30:24 +0530 Subject: [PATCH 139/263] fix(ui/ingestion): add debounce on search on ingestion listing page (#9516) --- .../entity/shared/tabs/Dataset/Queries/utils/constants.ts | 1 + datahub-web-react/src/app/ingest/secret/SecretsList.tsx | 8 +++++++- .../src/app/ingest/source/IngestionSourceList.tsx | 8 +++++++- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts index 5176c1207874c..025705abc580e 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/utils/constants.ts @@ -16,5 +16,6 @@ export const DEFAULT_MAX_RECENT_QUERIES = 9; */ export const MAX_ROWS_BEFORE_DEBOUNCE = 50; export const HALF_SECOND_IN_MS = 500; +export const ONE_SECOND_IN_MS = 1000; export const ADD_UNAUTHORIZED_MESSAGE = 'You are not authorized to add Queries to this entity.'; diff --git a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx index 2728fff0ccba3..1a960997e6bee 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretsList.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretsList.tsx @@ -1,5 +1,6 @@ import React, { useEffect, useState } from 'react'; import { Button, Empty, message, Modal, Pagination, Typography } from 'antd'; +import { debounce } from 'lodash'; import { DeleteOutlined, PlusOutlined } from '@ant-design/icons'; import * as QueryString from 'query-string'; import { useLocation } from 'react-router'; @@ -18,6 +19,7 @@ import { SearchBar } from '../../search/SearchBar'; import { useEntityRegistry } from '../../useEntityRegistry'; import { scrollToTop } from '../../shared/searchUtils'; import { addSecretToListSecretsCache, removeSecretFromListSecretsCache } from './cacheUtils'; +import { ONE_SECOND_IN_MS } from '../../entity/shared/tabs/Dataset/Queries/utils/constants'; const DeleteButtonContainer = styled.div` display: flex; @@ -84,6 +86,10 @@ export const SecretsList = () => { setPage(newPage); }; + const debouncedSetQuery = debounce((newQuery: string | undefined) => { + setQuery(newQuery); + }, ONE_SECOND_IN_MS); + const onSubmit = (state: SecretBuilderState, resetBuilderState: () => void) => { createSecretMutation({ variables: { @@ -199,7 +205,7 @@ export const SecretsList = () => { onSearch={() => null} onQueryChange={(q) => { setPage(1); - setQuery(q); + debouncedSetQuery(q); }} entityRegistry={entityRegistry} hideRecommendations diff --git a/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx b/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx index 6188845694f9e..e6db6bfcc9a61 100644 --- a/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx +++ b/datahub-web-react/src/app/ingest/source/IngestionSourceList.tsx @@ -1,5 +1,6 @@ import { PlusOutlined, RedoOutlined } from '@ant-design/icons'; import React, { useCallback, useEffect, useState } from 'react'; +import { debounce } from 'lodash'; import * as QueryString from 'query-string'; import { useLocation } from 'react-router'; import { Button, message, Modal, Pagination, Select } from 'antd'; @@ -30,6 +31,7 @@ import { INGESTION_CREATE_SOURCE_ID, INGESTION_REFRESH_SOURCES_ID, } from '../../onboarding/config/IngestionOnboardingConfig'; +import { ONE_SECOND_IN_MS } from '../../entity/shared/tabs/Dataset/Queries/utils/constants'; const PLACEHOLDER_URN = 'placeholder-urn'; @@ -133,6 +135,10 @@ export const IngestionSourceList = () => { setLastRefresh(new Date().getTime()); }, [refetch]); + const debouncedSetQuery = debounce((newQuery: string | undefined) => { + setQuery(newQuery); + }, ONE_SECOND_IN_MS); + function hasActiveExecution() { return !!filteredSources.find((source) => source.executions?.executionRequests.find((request) => isExecutionRequestActive(request)), @@ -401,7 +407,7 @@ export const IngestionSourceList = () => { onSearch={() => null} onQueryChange={(q) => { setPage(1); - setQuery(q); + debouncedSetQuery(q); }} entityRegistry={entityRegistry} hideRecommendations From d399a530576974da9beb1af24d7ea5f98922b6d3 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Tue, 26 Dec 2023 18:26:40 +0530 Subject: [PATCH 140/263] fix(ui): correct the color of edit links (#9517) --- .../entity/shared/tabs/Documentation/components/LinkList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx index 1b5c3d54009da..9f94a830ac1cf 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx @@ -159,7 +159,7 @@ export const LinkList = ({ refetch }: LinkListProps) => { - + + } + trigger={['click']} + > + + ), }, From b7a0bbcb3d6000d3d9827ab19f13c3118d0bfc19 Mon Sep 17 00:00:00 2001 From: Fernando Marino` Date: Thu, 28 Dec 2023 01:24:25 +0100 Subject: [PATCH 146/263] feat(ingest/openapi): support proxies and alternate auth schemes (#9492) Co-authored-by: Fernando Marino Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/openapi.py | 41 +++++++++++++++---- .../ingestion/source/openapi_parser.py | 26 ++++++++---- 2 files changed, 51 insertions(+), 16 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi.py b/metadata-ingestion/src/datahub/ingestion/source/openapi.py index 3925ba51c16dd..ad62ef7362aeb 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi.py @@ -52,6 +52,13 @@ class OpenApiConfig(ConfigModel): ignore_endpoints: list = Field(default=[], description="") username: str = Field(default="", description="") password: str = Field(default="", description="") + proxies: Optional[dict] = Field( + default=None, + description="Eg. " + "`{'http': 'http://10.10.1.10:3128', 'https': 'http://10.10.1.10:1080'}`." + "If authentication is required, add it to the proxy url directly e.g. " + "`http://user:pass@10.10.1.10:3128/`.", + ) forced_examples: dict = Field(default={}, description="") token: Optional[str] = Field(default=None, description="") get_token: dict = Field(default={}, description="") @@ -87,9 +94,13 @@ def get_swagger(self) -> Dict: password=self.password, tok_url=url4req, method=self.get_token["request_type"], + proxies=self.proxies, ) sw_dict = get_swag_json( - self.url, token=self.token, swagger_file=self.swagger_file + self.url, + token=self.token, + swagger_file=self.swagger_file, + proxies=self.proxies, ) # load the swagger file else: # using basic auth for accessing endpoints @@ -98,6 +109,7 @@ def get_swagger(self) -> Dict: username=self.username, password=self.password, swagger_file=self.swagger_file, + proxies=self.proxies, ) return sw_dict @@ -258,10 +270,15 @@ def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901 tot_url = clean_url(config.url + self.url_basepath + endpoint_k) if config.token: - response = request_call(tot_url, token=config.token) + response = request_call( + tot_url, token=config.token, proxies=config.proxies + ) else: response = request_call( - tot_url, username=config.username, password=config.password + tot_url, + username=config.username, + password=config.password, + proxies=config.proxies, ) if response.status_code == 200: fields2add, root_dataset_samples[dataset_name] = extract_fields( @@ -281,10 +298,15 @@ def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901 url_guess = try_guessing(endpoint_k, root_dataset_samples) tot_url = clean_url(config.url + self.url_basepath + url_guess) if config.token: - response = request_call(tot_url, token=config.token) + response = request_call( + tot_url, token=config.token, proxies=config.proxies + ) else: response = request_call( - tot_url, username=config.username, password=config.password + tot_url, + username=config.username, + password=config.password, + proxies=config.proxies, ) if response.status_code == 200: fields2add, _ = extract_fields(response, dataset_name) @@ -304,10 +326,15 @@ def get_workunits_internal(self) -> Iterable[ApiWorkUnit]: # noqa: C901 ) tot_url = clean_url(config.url + self.url_basepath + composed_url) if config.token: - response = request_call(tot_url, token=config.token) + response = request_call( + tot_url, token=config.token, proxies=config.proxies + ) else: response = request_call( - tot_url, username=config.username, password=config.password + tot_url, + username=config.username, + password=config.password, + proxies=config.proxies, ) if response.status_code == 200: fields2add, _ = extract_fields(response, dataset_name) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py index 1ab40bc8be73d..84bb3ad452611 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py @@ -51,6 +51,7 @@ def request_call( token: Optional[str] = None, username: Optional[str] = None, password: Optional[str] = None, + proxies: Optional[dict] = None, ) -> requests.Response: headers = {"accept": "application/json"} @@ -60,8 +61,8 @@ def request_call( ) elif token is not None: - headers["Authorization"] = f"Bearer {token}" - return requests.get(url, headers=headers) + headers["Authorization"] = f"{token}" + return requests.get(url, proxies=proxies, headers=headers) else: return requests.get(url, headers=headers) @@ -72,12 +73,15 @@ def get_swag_json( username: Optional[str] = None, password: Optional[str] = None, swagger_file: str = "", + proxies: Optional[dict] = None, ) -> Dict: tot_url = url + swagger_file if token is not None: - response = request_call(url=tot_url, token=token) + response = request_call(url=tot_url, token=token, proxies=proxies) else: - response = request_call(url=tot_url, username=username, password=password) + response = request_call( + url=tot_url, username=username, password=password, proxies=proxies + ) if response.status_code != 200: raise Exception(f"Unable to retrieve {tot_url}, error {response.status_code}") @@ -251,7 +255,7 @@ def compose_url_attr(raw_url: str, attr_list: list) -> str: attr_list=["2",]) asd2 == "http://asd.com/2" """ - splitted = re.split(r"\{[^}]+\}", raw_url) + splitted = re.split(r"\{[^}]+}", raw_url) if splitted[-1] == "": # it can happen that the last element is empty splitted = splitted[:-1] composed_url = "" @@ -265,7 +269,7 @@ def compose_url_attr(raw_url: str, attr_list: list) -> str: def maybe_theres_simple_id(url: str) -> str: - dets = re.findall(r"(\{[^}]+\})", url) # searching the fields between parenthesis + dets = re.findall(r"(\{[^}]+})", url) # searching the fields between parenthesis if len(dets) == 0: return url dets_w_id = [det for det in dets if "id" in det] # the fields containing "id" @@ -349,6 +353,7 @@ def get_tok( password: str = "", tok_url: str = "", method: str = "post", + proxies: Optional[dict] = None, ) -> str: """ Trying to post username/password to get auth. @@ -357,12 +362,15 @@ def get_tok( url4req = url + tok_url if method == "post": # this will make a POST call with username and password - data = {"username": username, "password": password} + data = {"username": username, "password": password, "maxDuration": True} # url2post = url + "api/authenticate/" - response = requests.post(url4req, data=data) + response = requests.post(url4req, proxies=proxies, json=data) if response.status_code == 200: cont = json.loads(response.content) - token = cont["tokens"]["access"] + if "token" in cont: # other authentication scheme + token = cont["token"] + else: # works only for bearer authentication scheme + token = f"Bearer {cont['tokens']['access']}" elif method == "get": # this will make a GET call with username and password response = requests.get(url4req) From 754d8814477d050e907aeca6c561d98372b60dc5 Mon Sep 17 00:00:00 2001 From: cburroughs Date: Wed, 27 Dec 2023 19:33:41 -0500 Subject: [PATCH 147/263] build(ingest/feast): upgrade to latest feast version (#9439) --- metadata-ingestion/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 4632c20cd3b96..32d49ffc73fa3 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -316,7 +316,7 @@ # https://github.com/elastic/elasticsearch-py/issues/1639#issuecomment-883587433 "elasticsearch": {"elasticsearch==7.13.4"}, "feast": { - "feast~=0.31.1", + "feast~=0.34.1", "flask-openid>=1.3.0", # typeguard 3.x, released on 2023-03-14, seems to cause issues with Feast. "typeguard<3", From 9f79f44dd69a5a86864ccc31473305bdf1c2f4bb Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 27 Dec 2023 20:05:17 -0500 Subject: [PATCH 148/263] build: enable gradle caching (#9525) --- .github/workflows/airflow-plugin.yml | 1 + .github/workflows/build-and-test.yml | 1 + .github/workflows/check-datahub-jars.yml | 1 + .github/workflows/docker-unified.yml | 27 ++++++++++++--------- .github/workflows/documentation.yml | 1 + .github/workflows/metadata-ingestion.yml | 1 + .github/workflows/metadata-io.yml | 2 ++ .github/workflows/metadata-model.yml | 2 ++ .github/workflows/publish-datahub-jars.yml | 2 ++ .github/workflows/spark-smoke-test.yml | 2 ++ gradle.properties | 2 +- gradle/wrapper/gradle-wrapper.jar | Bin 61624 -> 61608 bytes gradlew | 4 +-- 13 files changed, 32 insertions(+), 14 deletions(-) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 70816e5f093d1..97a0da8546ed1 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -55,6 +55,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index dab64cf2dca5e..6daf1904ba3ae 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -42,6 +42,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 46d97ffec8861..556cd87f12df0 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -33,6 +33,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 7cef38b1cd47c..454e766140245 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -84,6 +84,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -145,6 +146,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -206,6 +208,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -267,6 +270,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -328,6 +332,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -567,6 +572,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -653,6 +659,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -731,12 +738,13 @@ jobs: strategy: fail-fast: false matrix: - test_strategy: [ - "no_cypress_suite0", - "no_cypress_suite1", - "cypress_suite1", - "cypress_rest" - ] + test_strategy: + [ + "no_cypress_suite0", + "no_cypress_suite1", + "cypress_suite1", + "cypress_rest", + ] needs: [ setup, @@ -760,6 +768,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" @@ -904,11 +913,7 @@ jobs: deploy_datahub_head: name: Deploy to Datahub HEAD runs-on: ubuntu-latest - needs: - [ - setup, - smoke_test - ] + needs: [setup, smoke_test] steps: - uses: aws-actions/configure-aws-credentials@v1 if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 29953b8b70d91..e1671cc021919 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -32,6 +32,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 4e04fef3b3980..af73db483f9ae 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -49,6 +49,7 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 2188fcb07c77a..96229642244b6 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -34,9 +34,11 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Gradle build (and test) # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs # running build first without datahub-web-react:yarnBuild and then with it is 100% stable diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index d0112f1b14e7a..265a66aa236ae 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -34,10 +34,12 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Run model generation diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index 24d1c5436b315..0a311be33cd30 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -54,9 +54,11 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: checkout upstream repo run: | git remote add upstream https://github.com/datahub-project/datahub.git diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 60e183cce5179..94692bd3c2336 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -35,9 +35,11 @@ jobs: with: distribution: "zulu" java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Remove images diff --git a/gradle.properties b/gradle.properties index 1cd349344b432..f410ff01bf397 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,7 +1,7 @@ org.gradle.daemon=false org.gradle.configureondemand=true org.gradle.parallel=true -org.gradle.caching=false +org.gradle.caching=true # Increase gradle JVM memory to 3GB to allow tests to run locally org.gradle.jvmargs=-Xmx3000m diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index afba109285af78dbd2a1d187e33ac4f87c76e392..ccebba7710deaf9f98673a68957ea02138b60d0a 100644 GIT binary patch delta 11632 zcmZvCWmsHIvn}otoWWsmcPF@o;2JczyX!!3mq9{scP9i11PcU$4GzKGB{&IihkW_o zbKd)$`O#H-t*YJKyL-C(sjk*_`0{mlL^UON1bi?o20=0j9xOh%ei@J~uLo722sQ!? z42-jzH3vM*|5z;${D%Z1z>-6?+!a8R2&^_5R82;A{zEDJH88;dK(!yMfTk1-S2$Sw zCIswy1gQ9kmj(+JN(3g)qKZ%!32Jsub^v`?A}@l6ieT!WPuco!LiX35zyZJ<06|Me)};q$8UbE@JzIV7 zjabmR_!-#~61M#3PnCX3B*N=plNEo@)#a7bm0u@^k2l!XFA*mqTmj$fmF4iiq}LnC zF*Umt9<5P>#-!gN=S1d=2veDI!b$hC1Ln*>#i7yq^5KWPfkp2dxeSgIp8vhCpfFTE zV1wZQzExKaNRfI@`aAs-h+&iccdh_M%o_IV1>F#1(UCqNF2(lopwF*!G|`Yi+*}KY z_^VrRSQ!Go{X8FQ)7(c)FxfA3b^cwoy{7NHo*g_DAfbd4l;(a`|9s2tSzN_Vx?Em; z;hF;K(bxSLnqQ=w$d`>%SU^n@^Pi|(mMvuh0UNbZ*3pPEqIl6{lDJInhNCldqRNw9 z=$)@$*s!12V~KYN7t(143=hwuLM(2DGf-$GhYeQgp>5QzNMHH6(LkJ>v>BOt=Oi#|f+HvN*zP?|OMLfmXfJOvg~^ z+P0-<6_#rYiv&UYihhcOFRn*&!j^szdCaBEt|z&sf|%3)F<{;T^hTp{k+(<;eKdg;pqg8c@Q` zvCZht=B@Tg0J)?P!+-rXNJTM^34+xq)yspMn|7Anogw~^%`&!pux&Y%yDi%Mb7G(m zjY7wbTtyllcE!Hrs5L07C!{8~cWPum|1DckPu~UHMdQq}>5oNYcGnd@$)0$-K z#}DDT47{IUYHZ~MA|y6n4;#JRzjWeiFK9xX-#%#B(dX|(jG1efSmt#Sd(tTAct61T z)lZ?Q%2zprzKM^&ZMOUre~z3cju$$pHN_ya<43#01FE66$%`B`Hi|iNYsJIiZI5{G zv1T@cX`#C_P44Iwdju!pycPl!wR10ll2wz+!%2g0Toe{r2#O|WVb!mSl7B^25B2i~ zqHg!|n_0MZ3abhl!52%AOp^trBq>g1{glL>jFeQRZ9K&aA6aoSs90x1D%)HFi5_vl!N2ggjGZ#w)G+DyCm7Cdn zMp32|$lo&_%lM`T$)rI(^D?Ux(fSdVJe1*(NWW<|#aOr5Brdj@BBp^L%CL%W@88nJ z$5O#=5Tr(Ds4-!gcli+TZJ-c>V&~LYj0f5JnKAC}} zo4;Tk!k;E25$qg9CrG~x3%6E>n3ROQ?5Td7+>=MS35r`Ne&$x)>M`tnjcN9wzpdUk@Fq^Ro zEY4~4y-m&6j=jeH^V5$MH(L*=21{y3s_a+<&Kol< z#C{>*WrRxAJ_83`ePJ4_3xKX%BWo?OZL|fx~xpRi-Q73L-#oVqhw14kdH6 zanOGujOl8pv3Ko{)%?paa|;Y1N==Molf3)Z%D_kocrd;@J1};jBc*E{pYj*~AV%D4 z6BbLqXd-)Y@Z#xieFLC#-<36^E62EO6c_wt;(ETD^c(nR^KL5Ret6zggSP-pU3`bn z>fsguY(YTsXYV`{LvA_{9Djz@w*Li#bGLDt&_%1BH|tMgcTkI_28q*WQ6R1LEUsmR zo;Phy#X?-Dm@>dXfr>SAAQS|v?*W>~t=pe{=WQmT;`v+w?zSWOtDV^a|JSzuxr4wa z2p$H87zqZ39(-;`2GX~6(R8qNVK;X$b$1WfQu3aX!+)f21b)@=LDA?fsvx0I#81+v z3a8~ol(Ml^_IV#hUmP3FBPeY-lr~VjUz+t8eT(hUSLk8twy4>Dns~5JzJNe9A{m?6 zme{uHO<4qbeAuK5zPPOo7JUbou!)D4r!VNdAq7jsr5zkH4!Yb=3IFbw*TPx!V8vz# z`MInAzUeGHyh^{zYP~(&7hQmr72gHV1Z$0DX|lAAEx^36e*z>cr!mS!yovW6qwUT| zn)8TbNP(5#?vTc}dro>%u%xxO&oWnm%{x`-Ba>zXL?c@()UH=R3Wqlp10KMXEF+4c zW7$bIe9zh!0!kGpq-B?3UNGL`YY(#ed>JObE%~j;PE98|uCfWwxeA40_;-$ROHboQ z+Ntw}U*wc&CcCV&Ip$TbzNMgXH1Mls>Pk{{dAZ&%@igsP3o#XqT4oh>b4x%; z=iLLvEjM@gLwXm1q)Ll+w~r(>lBL%V!98)BCcqDRE$1)FUI*BlBUbTKgcy$2=xV#H z&K*3dJAsHklpeWePq&)AKY>KZmk78Yi6M#^DDc=1dlcyC{+Q0zI+s!ZK20BbRoOJX zb&_?FCCU^_NkUT@<$%D7?6tBneYk(kH#AcqFE8}3XGHJ?}g&)N=vHD78 zoJ7>3)EALmaLa@^ef|nfT`YHi%wh(RSFr$Ifih9Z8>&m+L_2>yb<9*)cuYIKaVl z6$=UE-2g(iI?>&Vv-aN`WMrAxBXl8*bV;ztb3@(x!ng3%qB2%dT)D|Ljb0)V6gk4- zZ;=g;Fe|g-vm=CT;POEWc~`BJ^k;To=D>A!LEL1PgA zz=?m2z*Jzpg?WkQdL#oUMPxLxG??w~dD3}tF?T}=lWQj4&FxgP;T0^>dT9P*P>fPJ zB+Yu!=Eg98)-DglC(^ePp>|-gTwv;4V!_)jiEAqALdJ?=@_;*+K}=vW93*i&Ol*nG z^9Da=y-4s=pN);>hl)LL8aTQU5-aK*ZtlYRabLtXr_o>451$GwqnzFC#PSkMX<2-+ zS2CvMOlUcYQPkD6h~a;HlbUJAQh&{n`YMi0m-z{ZitrF%ho?=0*fNIarq(!rm8{tK znd(rlO+EL%mC_8kgF2uZ4dgJUe_-Wimy+DL96E-~U6a`P0{cuYKX82j@c@+CMV?&| zhBtrPQPD`PUx;|kp_jpyZ3iB352>T$`GIck$dORnCCCj6VkJj+$vCXJm<^^EePm!y z%(IVe3pAJxya?=&@W`$B^idQyMKFACi}udo}EqNUCI_{_C^E zsm|pN26tJ;3LkH!H|3mb5p#nvmq5MZ;W~YbOSrK_s;=u9>A#0`*o`3&6%hso0}Td- z?eCD{gZ{Gu$LkpA;_2c)Vky4o#4x5olMKfU53(xRTj+2>>~_~&(gc{pym84`bbcqHy- z`b`TAE{+4YGMe&7DdDnzf8X~RgOZT;GaHDS4j3m3&~?!s;QI_rrvpe9TN(~xHn1YQ zYAUQ?(g4@f!?*xBddcS$E6#(C0aWXKR9fpc+N^IHwrE`dfi!-^_P9SWhCiFSuvApF zj5zjOVd2T;by%&!gV=KtQVlX`-WkC|JzSJF;|1H z%)UACPG#~|-?~CuhgXcWVhMPBy?DOICRmZQgn2TLez4loup9bT>;7H2mSeHo0EidU zWvy~YX1L&-Ut>JY+UuxqfY0riUd>AM%B7V%F6<(omjo?Z)kM|lS9J41KMcUqKwqgf&yPcGMII@*lJ?0LAb_xM=ls8~lf z1J+$Fk85i3EjB#;|xIr*Xybc096dJ7(kBml}>Z! zm2@E}AAeQyds@R6NGG}p{npJ>h{OPq>S?|w>1199ij~52`gwdBh<Jf>sij;7IoV z@^tO(yvaa8y|_nf+=K-c845Y|;aD7p2d~jU!_ce?%Y|sdr1#ut{gZ$P4@!Ly(GiM; zej;8!o{coegW297K8=FL7}IANaSkLK8?0XH37P^SY!5o>bHc&*Sr(|9{R-&5%o_xL z7s+{wuP4T-#C>DdLA=Iv%Z?uF7{{S~U^Hz^1`kFt3(m&)zL0AJLYtkT}Q zaoP*W#Rz+EX0qBie?pnucfs=)p_hAsQ7aImOUo9BiJS%3Ba!e%;=>n-m|rSlW{FF< zG0pxcFpSY&XcY9g5r*IJ*cFnk!j2+bV0e6DH40sv+^BxxP-MHKKOr31!nPppF5jC_ z4NIg>9W9APBY9EIKJ+0zH~aZ$&0O~N zf5*bCl`L21p`kGl8w?E9pY^m*uP9u78QR2|iV?&v#00W|_@ zL~vM1Nc(46j-hX?t~;2tlZ!F4ogZsHvdu2Hcr?_M)4r{=`Pgb;?_D#mZ*OootL>=W z_0;BU#jQvI_xMubB=D5o`XSGKyuc2EAz@+lk;kK_a{dtTIW^G+hk@l>wYEF_Gd1R2;Rx=IvTiJ&ocJF7}PAo49=yUuQOXXr}l*O2$rj zB;E0V&x+oTMvmK-J`;|`4y3E&T)pZt7Sld}d3c!ayrlN2Wj)(-hE#`(U^>s*L4X-3 zSKZkNc@<`Uto^#renX1G%8{FfX=+fH#}{~h?Zwl>Tk$g2rp`f?xCW=mO}=;Uh!-#I z$_`f~u;!e+L177craEY@carrpZuKD1k>`_%Qwy*!Fuu6=6UtdQ7`Ps zmhc~%sC{UI=Gt^EQ=q={R}Imvg>>Gz6p~o614C2l@&}Ts5G%Q;#$Q$ zN%fw`7331JG|<=_?_+!KpeXxxg)Bssi9(^IpUIDS!;QRtCL3gDkEgfeXf(EUz}dk` z5#js;zwa~SJBt`)D%+M~T+^x!H_QoCn_84lS&Hb*hK_Kdy{vQXde*~m3{z3Bn)$r@ zennVvZijwmgPjVa)?m|3i7FyxE3*7SQlz>5deffw@tvGM?<37r>#-Hh(rk`>&(*nL zlIH+bO(gSiR0c0drEGhLK8o`t5eCbMg%Zt@?XUS|5*3Lb>imHt^2~>Q!x%wXmz6&F z5u^f~Z`(z9B=N|aU56wZGIVTzmjv-mtylHGigDj1!qz@|9|LUsSboea*kjY6AtxhA zCk{}h(|=9yqHm;DrRgiT!Au;7nK^DHQqFm5RlEGeyAt3h5SC#j#Qg0fu~Q;IhZ%OK zDM8XciVr5W*mlMX)<2!tB_}f~0<0Jf)4%rCPK{T!UXWVEDRkJu$eE3o0KEh-t7-5dl-X5dWB2nYh*z;m- z`_l&;%}K}Jo|k&%beab~A*ntWA3Y>;!cgBPE*zvX7zTI9)pdWZm`60V2}m)|-uCM@ zAE=h`o<$w*p4X3N)nVZ|K#<$1*ZhEGZ7~(zuN<+m2+DRHOb zNf1j3{6Xw6Z04&Is7RQ4vf+cVfM)vsCB9hAl*zt!4n;n)=RG|1UWM9Erfht&MA+zc z#@@8jFVFUvFK`14T35rRbq5}ZMio<*9&H0AQ@$Vple&w!JEVB$)Ql{Tu1sgholX}b z-(F;XV9SdR<=r25q-xZ<(zMY7+A{=<*&dJdAoNTcZ+?a-N!a$E6pt-qtLvu+zr`M7 z#=OsNlIyd-6UT${FR<@aa&CSa39pXcC^V_=Qglh~8v7;hFHNO=?pe|R3KlU60q02s zz&#+nCA7kgk8v)nHvR1ltRb61>_RS9_56W4$TdFom<*B=N4*Uc`&eURwTHRU%q1qR zA4hjh^>!ZuBzFDVRCg)Qjb2-mYe(^(((3hxLK5ji#%|6ZNGPZ8V1truuNwFW=K!eG zA7;e%>d{j{BiQ>Mq|3DzeZTQ?uzQM^8Wcgpr7p3<8$CW*Q?qZgV0T+`4e?oG_+Xb3 z>-*-Nkg}9{2%F<<%J6Y|$W5!vt<+x!8qpR?R`u0Syzc=dtYY!J%ZV1`*I$<0 zgRp&eh|(#h7UmJd4n-_LIhj^A;4{nmhnB0R8KhM5VJ zTOZq&S*XYPNdl{|3mq{wMYiF!6y_^WX3cAt*$lRQVV>g8aNIq1+N@Za6L&aWuvhBh zU9)9oI)Zjqhs3Du^6J``)~g3Hy?g2S)feWMg=2{qc*5dLs;WI99(rcIt7Hi!n|dgs z@yr`G1oJQ+)?Lh^Y~&EZSM+necNdN14*` zRB`d;)-vab?Ic%p9$=4*AByKT<$98OyDq+w{>jrXOaxPVn+}n*LnUS$CB4Ez8h>5nG#}f z%ehQGu|h+NuIZ|3Jh(EMzHMOlS9TwgXP-yXKWxGc1K}L zR5c>?qP8L#5-(W7)WcDrgw9>0e+SI@&=6DP2r{Ov} zakYS6{E03GaI>^GYd8&7=?vrv3#sRt=N<)n_+Yw_>i&q}o$6vMyX5>%a zxgvlrm|4#rurzD>Rg>mCN8%t`STLjw3Gq#lB`xcDw*u3zgLo~BuaagTLy{}TK8ZZ2 zr_5NZ-2ZT!__;av)jl_|UB=C@A(U-fs=o`lp-I!zotr}OO=WEL6Yjg122x`kk?!+%b?G$#IcP z4`8#FRE|uxkqLXRFw0D`griN1Z`C83q>?WIa+wo3K&WrF5^<-|LI+^z6 zutc8neP67R$LPT-r8!zsG8AYNW&1q+9ylvMZHNfIeMQVg-y)R0%7AQFetB0bBaUQW zS6BrJ!g9`oMHB$t+m>{unk>pcmcBDB+J_iV-ayVK8v~2e(occTwqIY8H0ZkCiY`WP z$%>GQu-LMWhG4kcGLIH%+P*Xj&D6-E4-Dy2D9)9k*HNX>&h^7>GbDb%HHyVawi3Tn zl>X9O9Vu5U$x~TyNkI}438D7xUY16rPV_LBGLFN>_W1>}anQ8wCOu2d(7x0##g6w6 z9#|Dp7?YiFf7$xLxi8)akJ)wt(2LXNh0(A}%b--y&P2~9JcH+a>t zk7UD->bIgGU^>IZo9Q$F&T#5+67YiFsoj=-x?1nx+UXndM8vz+e5*{U?7)0jqKH|G zcmgTflkWm0l}+S$^C;J%`1slUg!3-=RSI-YLl&)-Vid9*PtIWRvpJxcls5K7@xe~N znNXzmrbNv0+uuqW{Mc=xPY&bq&B*6ApQ8xymcYq-V?Dzr?(! zkHdo*b%s#9Z^Rp4jVRz-gnJQ)+_cet*jU?^(1k7ytd`jJ_lQh+(71p30K+XXv41J8 z<{J77q|pw(ez#&Y&y&~;q&l3jDU;jeYd0wJ*DDGPbFQsm;X61Flqfo{RZm(i+9#R$ ztkj3x+DMet48M(H<0tc|;WII%<{E6pdB)k2KOab$#@jYbdZW1?#N2_$v;9q1z`WAz zEcFm(bOb1&HSsx*nX;$wO(@|||B*-<#zyQuC^hj~fA7K4CV(6f_Pd5eV@wxTjdT9T z;Egtzb-q_7bf&Xbj%cOTxmevTzWHwK%SwUu3;XrI6FCn{e{t5ad>>wVc#roZo_5Y3 ze|m4N|Hg)-TAt=6bYC!#EUsUSk07$Vz4>#-PS_Fw?tAyTvW4!w&!dPJd?h+>%)zAa z=pd3TQG$5o?#gn&&r3*eX>eHvA^B34Y);MoanfEEFGe{rdLkNqXTM(FW<$=H=oHXt1Z z^{Kcz8PAqne|sRt3IG1h6QM%sdsv@jJjEb(pNuBrO6?H>p?E>c9{4seCbz{L1_^Y@ z4592bZEz51ySawkmNvdiywbQLaoQ`|R$lp4UOJ>*4`<9#9zMQ)pBCTg(Vo+(OYIN5 zm_9B|3v6z$NF1|Cxw$vipHjl4wz&9MeL7nVDy=iOsKtUk>Y!Jq^Kiuv%Snr&d>Q)mC!c)IDUVr@t;X8*7=>u-rXBKm! z3Z-BT^05gV8?|s~5aKK$d9#yM5Q8Mdd6kS#%vg9@AbOX-@zz6)EMDy)Y z*-l4!Nu>VaceD`-yAGf~+%M2fin!#CZtC^s5u0tB=iY-qL9y@8xeVTQ+SAhLtY5FDGRFa5Mo`9BH$@N<)$KM@4eL;U&Xqf$+6bc=j_= zcFgC{zlk>95lwN)K|Q#J%cA1mFSpdu0uxs;eBz_Wpr7m^Jg&{t!<1 zF1Q+={Dz!C!J2r@31~)+!R~U^(|SwhdhA(j0V2C=Oby7lm}7dWH&?3}fI_Lb3}9G6 zdkOTh_%JTPup`uT=fZ%NMHY6(*k2mTHJ^E{9w+G=2 z^-2`8%EzT!cgs;3;-Tj7lmp|w#|Q2aWCD+E zJ@?=c3M>gi0U=~HC0ICo_~(odAl#7%7$PDJ#@>3)#%zHC`(l5b5O$zh9pDBD0GM+J z4Q^B74`{jb{H6kF05}T@PD%f%?}s$bU-yk2ARMFIACOay;xEXl2*7|QZ~plehnOis zX&TJ`Tr2*!9Ywo9I7CA**&Yq}U>5^g!|^qtEmE zAIE=P$NtL^+ZF1_@lU6_4uEjxUQmbtdg1Wm9~k=(2q)+L7e)mKc+>qu1wpAf|9>dV z0OEhBH7NBR^p`?O2}b&d;Wz@q?WX*ff!e~@LbhJ*Pz{-a|fC?-U znB(|4?dR1o5Uwx>ELlbfp2)%a$2mUte@~CNT&Mv#cTv+|U@C(G9N?EHNdId{ z{&n^7uWxf&0W~N4|E3q_7XY~W1Pu=H%bzBVo;=U&wG$v*L(d-|_!JY&a7qCMbNT>a zlT$Rfw7$Q@b0+hLQy|>(@E>6M^mzrig#yRpe}MQiWU$cLvz6u<5Kd+4FG!6B#Dh+} I<>$8jKcJOiWB>pF delta 11565 zcmZ{K1yoy27j1AU(&Ad&wYa+$cZcFq+-ZSe#XV5my|@(D;_g!1gS)oK3;n;>@BiOg zFIkzp@7`xl=H9t0=Vay-Z@@Hez`!ZX!N4Mepir>m2r!|LJuo$}p4+%a^QWQY;^-xv zB=+K6{R8+lKS~zYNtWXWCP7euYcKcE3DYdSx4vbLn{z}&9HsHs0$=7;Oz=8cx0P=4#l(@j~f z;OEdqG}B`CT!MR)x?0FaqpQ}U9i_sk!cvdtdBS1hlgE2@kb?gT(};b6F@b$VXZs@l zr*dFb5i-ApMc@22V$fn4td0kKo@>Ex?)@8u#X>AmdcLy8uLqO1fgLa(29DVWG!j25 zd>pv?p1S=g_lhAlD?xxp&!RL#{`Rnt!O_jG;`5;e!D8sw9j&-w#Sb`SeGTq5F#c|} zyf~@H?)yvns*)e9k0Z^;6c0`aM)4c+8|zPin&UjVL^AtF1sE(lv@T2yN)7XH1-U4m z8zbJk`AUj)fn~90=|NzsR?@S}XJIh1Da++Wjn$H>;n(x2kzr>;O-EwIKp5WErT0rt zkGg>{dMSa^1N+(Q&Sl%?A)U+T2ln6X?D&&-jM|FVP4sg~!=>O9w7-48PsL{Jmqe}r zcI)~t49*Txc73yR5)3`KSZ^kiy#2ODHJeJM7?oP(D$U8H<@UY2`CEz&wHILtXB5q8 zvx43gkM#|Pp~ba3y(e4?tY&@cw^iYF`|A8?$GYZRqb;2caFf&`Io zHi_X*O_Bn9Je9h9qb1}l`F04@O?Bp?`SO>3)uoVh{zgWtxh7ffmNmQ`mh2*PzmvYx z{y_t6le4Mfh$jupEHs))J0UP?lPSSLgI;DVWha{LOU=ooEZC+gY3JEGv}mw02h<-f z^fzkEhO;T0TznW4MNOJx?QJ&7d-`mhDO;0cY9l)7ILJ=AyOSn1B2bVf+b|BARTKnX z4EuHJlE3@HM-glx3KtbRr#?h5q=-aN62DQl`Ag7T_ud`ZHrYK(IAcz)uy?C@i_x&& zgzkF1K4mPSH8b5tT`_pVG$Q@$AP~fV!P9lE3dL;WR}#`9TYW^Sh) zP+C(x#F2#bE&}f-QAN5$`k9sXl2aQbrzMbXV+)g1JzhE35kb{=e{kvTO!W?0` zg$uRQ>0HW(IZ8m>!lw?-2vxG}`0-)=vV(r+H zHCn(y*qCG^im|S>H^w${z)?1dFjbwFbgv4QWK2%uEv*;@ZCIbw>*%={k#Md^Qj83x z&MAtwF*v%oV$eN$v~L`;^;@I7Ot`=GyiKRMF$wcT(83r)&SJ0lG3)1HqVUDT#2}&- z^P}EThW6U)m4=N}krqM9FV(>2+`L$${s2?j!*XRYBU4QWrz@ABmX6M&Jf5wk1==C@ zc^Bu(eSHi0XejLJ~Hs?^fJ!EweF#@mXefD);HCG|e z$J>|g6NycFz1nYuKlZOD6g>8)qEsU2P3cY^kMAGg#p^3GEj}`5$#MgF)?n2B^ggGf z@3mC!4W-dO$A=5h`rIf+MJi|jZ9Q7Yu1Xo}PD_}^+TELki6;py6+nV4)HX5DKo&r3 zxrUZ!5%8^@VtnjS1_$@)M{Q)jrrVv_t35%^aF^EZzKQn8S7W2F=EhIik#X~rI>J)! zpam~i9;0=B#)ELq$jtyC%=6Put&V^cFK4ueJA7qx1UC6hDuk5npK>bV0`0tBGpV>k zdv`7W@5T{`acdMB2!KQES3Cn924m7DI&Q5M^Fj6Ur|Y57*O{jUINDJcCfLU2RsRaU z&&hi&T?lKNR3s7+lc>Op&y65Skw-&IfPWa68|M_E#o>L>TTYpWNGQ)~L$U?Xtqs9H$Z;PQ z#PE>>_utq|VC1tL0|NkvL*g?#$oL}>P}$hkO5NGm-Q3y5*jC)i-1MWlGqb6!v5QNh zy4>d_2`oX(pK-PfKE*-Q)=1<&l$v2PII07I@tcLpj?;diggWDQPKEWlI&2iYWDHyn zVRz_N`_S5r_v541uj1NH|vwIUH-%b&rSE=Io&PmP`dj>K!*abJ@9dG%+VfTNvm z#HTywLc13ej^Y_YtYvFh7)RnX~#%7j;s5^KDQ{ z{z*TIA#XfqCH1{MX{+WYLM90{P%K#BY|c~FD9KxCAQ_>cxmXC>Ij$@6-ZaOxv{lrX zc|0NOOz;Fwpv)^#;+L^q^_{~TsNj+|P8n`hS7`P} zKOR}zkL6Sp$i1qDZXXDg9%#)5?$2R_m!odBIONQ-DkYe7usyuLunW;j0kt-gd14ym zN3sW4uQBfIUxyN)+YjoNlKn8-Y58X59=E(NcV2hg?~X^s)qw4lHf*!9hRLNf1j8Ti z4qCTOL`X96-oJht!_2MN$_&$3)|&W8-QLrJGQxhIxY=2+z_V}MB>{IxT|xU8E0UM0Zr_pQo-ro9Lm4Eopxy7p#3QePg(>#)QDR zy(Pot`GZwBTRUc`5<#F*2_7ePQSWvuQHq^68>0^?2??94gdLCNK&gDGQPGBdMZ9V2(28U}|Kl7=2KMXzCYN_hOA;F8cn_C14Thk1v$;(!a&N zdMJ+oQDol-Y)H0zsMg zZ}5+QzWW6$2{%HHj%!JKhNNGhT`|dGUmr85PXaQwv9n9z6B!kY8A0ziD^r7=lhXXW zt6jujwU~f#C{Iu5R>rLwwy&K>S1>;)uZn@CjmE9qDKKW@FT*H~F%tjF;2D98IOWF| zW{TAQw|V66RM)HzM=8=D;Y^7m^)Q3JO=uhOCRt}F$Q?bw{Q0T*n`-AG^3h;WXkzm>E^w{1@bp$Wn&zrMHkCun4(GtJh)fXnU$Lkq`}B-?@;u zjLkOPj*T0w0Rf)jZGg+h+RzrLla2Nm1rl^`4mg}l>%0{gf8A0a=5=Cl2}UFS$Gp|Z z5JB^CGZxdrH{h_5v4UB10fkTY*{}S8XNm8I-dfhsVs-aB+5c8q8{$x#q+yVzFwevF zIzjGVSGSpVo8mKX$a19>p$;S9^)J`-CKDc#0Z+nteUn8C}h3Q z3Ni(uBnS#D0+kA{tBFkzTA+^^xS5ro^RDbp#i-Id7uOULQ718&syup|Z8rkBtOc;| zblGZo+2?MK_6}+(Fwa7!ssSENi_NyERzJ3Yn-_OYyPiBGQuBL~bph_a&xAcm2fXWT z%2eo%86G@P6U>vU><4NWNR!{CYL-P!PZ(^nEca9-9TZ8`KJBlF<$b!Xj!gX4W7DhO z%~u|>w9twA2};GVx69#wXVDspv6KW30H8z$i5B65M4X6#OKO-}c+bQxKdTr+-_eH! zREx<1Fj>B9R%u4jPQWoi+0`vF50aJHyYE}%P1u~4w+Y<(H#{NTP%W^1pH6Zw1cJ7% z_*-OUO%V08Fs@V;fg0Guu_>4oDQ9dCbCi(Yf7Oc znd$d^X4{PgV)wU3zQ!@r@6vE;B@t6puK40;@B;xum5scS%#?_m%6lQq(3?{9m9fgg zP(&Un8ndx%pDz??aBh$}y!jAEQ^vbG=aeM;fFf5l4@LGnj6nvJr$~);lqHRo?b^gC z@&@pC$v|4;*`bep*rRj3pQH9XzPi=KsWY`CGcW3P0oV6i>g#KM9AEJcI}f3?jaW6O zxN$6P-0+BJGghF$gdH9*9Y=rV;;2m?Yn)D1*t`6?f2Pros`tGPn#N>inzadJn5;&qE!HGaY=F@n4Zp@L+lcdk zf5NB-2MpE@^czh=$@G|d8$;>AO=E9Ykl89z`ctjDNVYw~Ik7`9ue{L&!_}FqIu*TB z2Q*rjgEQLrc#!{Tial6kH@6+Rz3T(9emMb0ggOe;wjSKthJV26xnJQCc8KkIXaLC> zMfU^)J?oWFCD8h!UK@4i$T~wxms0Ml!IV{v5_M+AY~T6yfu_UcZrJLqlvbI)pDELb zLfo1}jgwVTU)mv`21q?CPW{XI*P$7H3ZUw?qI>z#nd;>y8A?4G*N=j+L*}Eq)V*nC z4mHh1V5=}5f(xz=TbC$|<;2Q`AIgt{Q`>WS$Y$qddGr-pf{)g+XCrAW0A=G76f#U z%28wCW~@gG^c2(aZb%(X(UoNsIL~-rec>;y1^q5C6a#OgX39W{!T{iIZ$TQ+n-V^<&+^4f>`X8qLR-~<O4pA~qUV}!D^Lv)@+bmZ)3&!1 z(zAowG~^Qd`DI(*j5e&>%17u|tI5n%HOHZF2#(b?6&BWu6T6 z653gtSdk*6n|ILkV$q;w(q<<{cuP6~XD~c+JV_w{9TN{q0QLs9ACYI!Y`L9BX&=UH z^VCD_%kw`gU=;_DRN8|Gl;K_u$$Xy=Aj?-C9xsbp_J@#E{PW^V4n6@Sgsh^0NRIG9 zaG*H|@;!oRpyQY2tu+XINd5Mbf){1%LyY4x(-9#D=AvHc|4N?uOooisumFHiCIEmG zWbA+|JN@k}zLgTDHi+9{ zF^Jwq5>8a*r|%VCI<^PSwri?(~qFvt1ZJi z&){Vjhqpe@W1mBzIszF3J(0f}g`)B@G}ra3ZC$e!-oM3FC0 z@c9i>9b=W&F$;L<2H2Y@Wwb6Ctt~$46Xb{KQ(q2qRv$J&GGqrkBHJQZkD!~$BM zb_bWdz4Wrre~^Ch(v8JE-z1$q$GF$0A7!&9&)XxNhxKggqZvL6C-q5JiW@_RYt+nS z2-NQ{1w!kyP?SpkQZwI7B~{oB22>`3tqHi{t&u9H8|h?*JJB)@3x1N0?u%LGG1{S> zQSbjOJI9G?fWPa8vX6_l6V)ADD!7-4&D@Hyd3YB!9y`w-=t@8+sykdx&;V!Mrff8P z48^sRC!QMGmm#`1Vn84r=#7)bq`)YnS3vVNA9yUl)QRLpT@VJ_ls1l3g}sfn^d)_l zICp9ca63?|1lN*xAJc9%R*Is zkE8v-t8?u&c^5$1#?-zSziZd7yv1VW{RXreEAvx|QFGqBKBHU+vrq$SM{q39zD&(k zHWc|P$-evK+n3PuxnyObNxQD#2VQO0bl}fv9J!)g{*l?qWSylsi$c}I$ee*yyc-#4)x!3Bx0xO-LI=5n)jOWl%54mOG`7OA6CMjc?UPHw^_P93kFL$~MAS za)*C<|9eaB*)oULSacXJUT!cX~Oea?jgfsKi1E6YW;BP%u)wOt;;RA3oS zS}4QfMSsdrbSf5y4Z>%J#ni8-qneS{k>Wzei?eCmTp!*$Jtgcin?B|Cj_&hJ-z68f z(>p-!Y;KE_C-?LZ3SF<=P>uC6B(R1G1I%!OQ zu2wJTEN#Ao_bGZ;tUWvMA4)k_CV^yCj0^4YgVW|W*}}+c?zF*hPOyWnAtFPza(XH) z4WEc7kgO`g-BalZ>ja3Q12>80gC#r`;c2ErQ2ad;Anq9t$_OdZO~ zOM+55qMn9*?CyPJ7o$7jo-R(ZRejJ`%qBiT)<%sVvO$fCn!I{qI}sGZq%Wj?gw06+ zbWr4nN|XAlJIlduKuS*3jtCO zk-XS!6@TW1#x0UAtJ~2{jvA2j6Rdn{UItRPqTUCgTf{mkZ5R>R6n7u=D$58c?qp4Y z)c3Nmr0^kXfw^!2y>v~!2QmRF$nO0WJs2>& zGT(7)%gPQjOiw0@Bk|KK;PHw|jgW0r3E_U{S2 zVBb-S*+6F5Y`=W`Fo_aKr=AC0Gw9`dh`HaGGJNrtDn~2Sw5#c78LwA9Y04GfTE@&p zcy}1{h8JW$VGisDPgYyy`#CHp*zWNs49F0^v9?J2Phw)?!lLe^rnrE|%nw{w=xcu; zJX_WY_3Rv%=zpqx#cU-HgRc&-0oYm10iwK&yFr`T_3NZ&-GYPSkQ&^NTmQ#t9 z9%x(H5$d<9<7D|_MZ|Eb;a)M~ZP&;*&JeLa+NOi}7jM)!%GCK8 z%^b(6d#NWhHpwR|!`Fe(t~-Jc1{odw-1`qc?PB1xdjuKn6Z6QvhP?;zb`5ZwciLFP zCGphJbW=>_Y*Uuu2fj;nDhHm98N_jB*-^9xno)J0xVmm&v8NqHDk5Q+p?qzXAPp75 z7}C7u%|TTLipK{U#5BQ$In%ZcuO_cH7v%yJORtHotKXR_H$WMqLodc)CJhj^E%~mFPk5+c9$2XATCL@>V=1%jTNtO z7E$IX;%W-mdf;;runR0*AF=W;+fh|doS=%w7;fx{)X~FC@}@Z9aWG=5^P~nWe$_^2 zc1WT!?hdd@5kPdMb5GgXKx4oU@v&vdz)<4pT?P`VFs2)khN!Gb{-7#}m*C0?F5pWf zE8+(a*UM+%s~AG1IF{4nNTcjMyf%vXW%b&Ka2ko9fZN3_5_YA#m>#~?q1+t5Ek3}_ zH^^|}Ud2})+`n^-#1osDl#Ey}g_**G1Fe)B&Z7^<1-tT7?Mr1jL#D*wwoZ*z+?cRv znLltyjqf=jYwq3>`cC#FkxXsC{L;P8^@;ac4DnGl)d*FEUW@nO;bV2E))Ae`P1;di z4tDm~_&^;#i9W2&L)loz4%pS#&>6fP$4hM*aM5hWH&;Q9V9sX3sz(#jg79j`QFhY&?m$l4|nYVg6rJ}Q{fp)pGy6&m-N#!=sYsuEh=C~ z*+^bp$?*FV*XTIYU`@{AYw4cB*C9I^4f2>P?^WI6V=vn3azr!j;ayq`MCq2T7sbDm zXz6l2d@>)ZlJ&yH!IU$or2QdX!#|DR@0@WZyfTV~im8`T>U@A|I0O5R#Y4g6=~=%D z_4}d<1K3dZSJa4X<=pj1p`*V6&W9c#)I>!^pd)kz8LpJ#PXL^>sAz#`E-Jy8OH(i{ z{}0YMT-uMg-gZVh)Z9w@C+^)}2~dd9w!Q7D&5#A3Zsa9A<%YL)Z5}3UODa^Pe-3g6 zHpuoCpjO~rZ0XNhd(hdwQmW1%Dk=Ip7nPpX;`dFU)!lv?#+{8J1V>@Ezr`*c==(@) zW2sD2{HCWqrmm{=u4&OHbLNDmCEV=_EZ{)@iMj01pGI1n5p3B|Wt1YnhzOPUnHago zVyVjZyi=R*H$jvubg%#SIgxRc+og;y+QEHxVV8O$uE#yM^E7Zaa~zS@fr(txYo`;@ z^dt49)c1W;^>pV#wz9sh$%2Y~K$e8z%Gt5RIZqMCAcD!6Z?;jdJ`)^^%*aZ&17Rba zUKftZP+}IVm_~De-WxC*s&nM`Op)8S=fOLq)MZG`NIm+Giu76-QfH+nWe2bJDLQrJ zzxKt14`FLCpCXJcwo}(K8ID03q+p(-l&lzD72BP)Uzk-l!OM`9_<5SM1Pqwgqwp~L z86cZs5ks$BtL2{4ZKySXEnnkerSLQ28{dgpJ8FtpruCdkb=jb*v%A$#?QKOT7nhS5 z9WNM8`ZP{z5At%t_~#md3vlBFq8Yk%{Nx4oj2pftJ9&>pjppQfR0(+w55jJat60Ik zBH^bJuKHn_ayeZKpOMIJe&7dAdgGggl<^Jwp`ACVnlAQ~mr76J?am*5gq9q8fWDZ1 zO=L?Sfc#mb%M+LUR=Z3hywv23j=Jg6qoM28-W$BUK+OEwo4;B%#TEKEmyP^FB~(Z@ zi@Z>2tJKe3EpGY4dYeoI`^@E~tN$lM8KX`Y=v_kz+`LEfb|E-qLjj=W*IPB?djOnX zxKvb8$Zy#->AYcvn>bCwrkIBvjo(%yhP@kdRxuR9i(c9~3YgYHX4)d`R%96x5qlVs z3H%)U!&lhk*VJs)pD;_1XVM0CL{GK$Rj)!he$&YiWHQkIvO419*h(Eb&3@;JPjVjI z>Z6WzK4>b|6Z14l_6z_j*wUfji?~rUS1;^^_HJR{;rOPeS|6ToRdWL%3Jq zWf7PD>73Yxg^u;sM^_VxSJpnR@wcA{urH$0mZaGmg1x6SYOcM7PkFEMtJ2&Uf{0#i zr9(q0<>+2t(bn1lb4=>^v*#2?_TXH{zC?ITo?khoDr9B9=R-eq?@rF@`nIAT118|9 zN2*vsZm9oJz2c|G0=TsnITpJz$bG_e>&%zrh-g>f{=q{w@ql*hPuFt(WPqoeX9t+P zWu>oZ5dfzS?^ELu7{k%86{RH}W#y4fFvT{7DHf0fgySv)wI<5zaIfsUQ@`XBL+f-% zwQ!`CO&$}hbP*g=5Vc#B%@wnp2}SicNE!PkSy-#zqD8&k9P$;JLP`+N-p9Nj-VilX zn}tdp?d5R&t4Lp&3PqsR}SB6{ij%@+~0if5I8Y=9YgxOR~WJbbF zf{olEeA3enqPc|DSjz9-yv3Fu2@u9zZ%VK)N}MU;Fo3#|^Uo(Kf#nMSI`QZU%2!L8 zZjXMe9Hz2LDf4P(DMo{Jtx%Qb85j0)<&jh{Ve;&CCPvdo1S@Ln)P?5h0Qohlk}c##czc|4z;*%r%M!X3nQ6t=QIT&gu|`*L8L;nSxCwc@+7Pmd ziw4?l=CHaW+WFN7Zc2v>ElQJM;%!b!TV743e9lpLr#c5$rz6NBdEoe=F^W3J2XD?7 z_U1Fn@13pWb4t=X%PG5skKy$jrihe3zyG3-v?mBewKw3%@P&7T@3f|{dWNNB`y)4+ z1cqe|#VqvoYmnB>Xz3uh>E0Wi{1{H&$n&zQ>;f6@5&Oun!0C!0;MfgI$)OX;VM^bd zrLe)dOEWkHn4R=tl{|sA2HmTwf`}g2S|(9Bn9>^jlrPV`ORSK2TP%|&e0k*|k}vYq znot6D((UnDjrnah^UudTg@xg9jP1ACwdSwh?J)Hz^D~DzIm;EmN}32$Q zU|@m)k1=SmUnXR~2R%HR-Kz2BCYJs(mX!UnQqHH^&e6+coNvrqkDSfpe{S(Y@rL(rr>j=kRowbHWB$RMNgQ;dvidvC>Gm74y zr+&+;&x!UXu9)m(g1>!G*m&4X2TrmSc39TeMeLIxgTK%UJ;^=?q}Aw*>)bn(Va?CP z!%x03B51pQ%QQ=ircVNW`JfytYAKP|fW#@bwxoVXLa!_-W*@PD;tt6=+4OqmCuvxj z@J}V_=GCE=6hG=5wFUR7YnDKtEDMGh!r)nwo?oHZyL_YGF}b+JA{Zx21@=uV`^p!k z2v9g+vJ6`5Lm#f?axjaAw&!p;6I99P zt8*QC9uOI^6>3?@Km~Th8c;#@i&rax{a{d}h(zj5>1LU6r*MYfZOZN)%var+F^a&i zvan4S)KA7`5ZQH7hi-~OFXCz{q|^x|8hg^^Z;XqEBAO_6dzi%^`F+$l+rA!^={v+BEq~W#UV%Lc|K$OKBrMsBeY# z9}0Gr2!hB~dHy+fSY?2qv{fxg8}|wygtEr{N5KaftO@>OQ?*6|K?`e!kd}Mh3esk; zzu@O}O-NgcgAa1p;DexZB79KyhQLcLli`J zTiOtHl?@-nxQz^*%Ldw@r2y$|Lxv71_Mm{u2IXu^LGT6-J_@7)^3N;ZzkuhD8SBo= z<(%{5zxr4D4szNNg|H`r_@I#;Wat{fe@x*)%t9y#%5qT9STM*StX&G|GLiq1AhPJ| zzcLUar;GoQ`*(36?$#j0x#S;$222Sd#xciUSI0Blkb_0}v5( zc7O)Cwr2<_=JA&VdQkthw?CwSp8ot-syZZr*iS&ne&9bO+Fx6eBMRuP_`kNKAe_IF zFGQLK{gE1vs39)aAjD$o9|Aq?-{J8cQ$S~C{GnZsA!WRvxXk~4>OwRBNTlBoK*Pr` zBWEu9Z<(s-uS|G?|6jN0fA%*1-k_JMIV}073+mso2A)trBX&S&vcJ>>gsSTTRd Date: Thu, 28 Dec 2023 04:06:41 -0500 Subject: [PATCH 149/263] chore(build): update base-requirements + add script for regeneration (#9524) --- .../base-requirements.txt | 317 +++++++++--------- .../regenerate-base-requirements.sh | 37 ++ 2 files changed, 195 insertions(+), 159 deletions(-) create mode 100755 docker/datahub-ingestion-base/regenerate-base-requirements.sh diff --git a/docker/datahub-ingestion-base/base-requirements.txt b/docker/datahub-ingestion-base/base-requirements.txt index 141382466ab9f..9092875902794 100644 --- a/docker/datahub-ingestion-base/base-requirements.txt +++ b/docker/datahub-ingestion-base/base-requirements.txt @@ -1,149 +1,147 @@ -# Excluded for slim -# pyspark==3.0.3 -# pydeequ==1.0.1 - +# Generated requirements file. Run ./regenerate-base-requirements.sh to regenerate. acryl-datahub-classify==0.0.8 -acryl-PyHive==0.6.14 -acryl-sqlglot==18.5.2.dev45 +acryl-PyHive==0.6.16 +acryl-sqlglot==20.4.1.dev14 aenum==3.1.15 -aiohttp==3.8.6 +aiohttp==3.9.1 aiosignal==1.3.1 -alembic==1.12.0 +alembic==1.13.1 altair==4.2.0 +annotated-types==0.6.0 anyio==3.7.1 -apache-airflow==2.7.2 -apache-airflow-providers-common-sql==1.7.2 -apache-airflow-providers-ftp==3.5.2 -apache-airflow-providers-http==4.5.2 -apache-airflow-providers-imap==3.3.2 -apache-airflow-providers-sqlite==3.4.3 -apispec==6.3.0 +apache-airflow==2.7.3 +apache-airflow-providers-common-sql==1.9.0 +apache-airflow-providers-ftp==3.7.0 +apache-airflow-providers-http==4.8.0 +apache-airflow-providers-imap==3.5.0 +apache-airflow-providers-sqlite==3.6.0 +apispec==6.3.1 appdirs==1.4.4 appnope==0.1.3 -argcomplete==3.1.2 +argcomplete==3.2.1 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 asgiref==3.7.2 asn1crypto==1.5.1 -asttokens==2.4.0 +asttokens==2.4.1 async-timeout==4.0.3 -asynch==0.2.2 +asynch==0.2.3 attrs==23.1.0 -avro==1.10.2 +avro==1.11.3 avro-gen3==0.7.11 -Babel==2.13.0 -backcall==0.2.0 +Babel==2.14.0 backoff==2.2.1 beautifulsoup4==4.12.2 bleach==6.1.0 -blinker==1.6.3 +blinker==1.7.0 blis==0.7.11 -boto3==1.28.62 -botocore==1.31.62 +boto3==1.34.8 +botocore==1.34.8 bowler==0.9.0 bracex==2.4 cached-property==1.5.2 cachelib==0.9.0 -cachetools==5.3.1 +cachetools==5.3.2 catalogue==2.0.10 -cattrs==23.1.2 -certifi==2023.7.22 +cattrs==23.2.3 +certifi==2023.11.17 cffi==1.16.0 chardet==5.2.0 -charset-normalizer==3.3.0 -ciso8601==2.3.0 +charset-normalizer==3.3.2 +ciso8601==2.3.1 click==8.1.7 click-default-group==1.2.4 click-spinner==0.1.10 clickclick==20.10.2 -clickhouse-cityhash==1.0.2.4 clickhouse-driver==0.2.6 clickhouse-sqlalchemy==0.2.4 -cloudpickle==2.2.1 +cloudpickle==3.0.0 colorama==0.4.6 colorlog==4.8.0 -comm==0.1.4 -confection==0.1.3 -ConfigUpdater==3.1.1 +comm==0.2.0 +confection==0.1.4 +ConfigUpdater==3.2 confluent-kafka==2.3.0 connexion==2.14.2 cron-descriptor==1.4.0 croniter==2.0.1 -cryptography==41.0.4 +cryptography==41.0.7 cx-Oracle==8.3.0 cymem==2.0.8 -dask==2023.9.3 +dask==2023.12.1 databricks-cli==0.18.0 databricks-dbapi==0.6.0 -databricks-sdk==0.10.0 +databricks-sdk==0.15.0 +databricks-sql-connector==2.9.3 debugpy==1.8.0 decorator==5.1.1 defusedxml==0.7.1 -deltalake==0.11.0 +deltalake==0.14.0 Deprecated==1.2.14 dill==0.3.7 dnspython==2.4.2 -docker==6.1.3 +docker==7.0.0 docutils==0.20.1 ecdsa==0.18.0 elasticsearch==7.13.4 email-validator==1.3.1 entrypoints==0.4 et-xmlfile==1.1.0 -exceptiongroup==1.1.3 -executing==2.0.0 -expandvars==0.11.0 -fastapi==0.103.2 -fastavro==1.8.4 -fastjsonschema==2.18.1 +exceptiongroup==1.2.0 +executing==2.0.1 +expandvars==0.12.0 +fastapi==0.108.0 +fastavro==1.9.2 +fastjsonschema==2.19.0 feast==0.31.1 -filelock==3.12.4 +filelock==3.13.1 fissix==21.11.13 Flask==2.2.5 flatdict==4.0.1 -frozenlist==1.4.0 -fsspec==2023.9.2 +frozenlist==1.4.1 +fsspec==2023.12.2 future==0.18.3 -GeoAlchemy2==0.14.1 -gitdb==4.0.10 -GitPython==3.1.37 -google-api-core==2.12.0 -google-auth==2.23.3 -google-cloud-appengine-logging==1.3.2 +GeoAlchemy2==0.14.3 +gitdb==4.0.11 +GitPython==3.1.40 +google-api-core==2.15.0 +google-auth==2.25.2 +google-cloud-appengine-logging==1.4.0 google-cloud-audit-log==0.2.5 -google-cloud-bigquery==3.12.0 -google-cloud-core==2.3.3 +google-cloud-bigquery==3.14.1 +google-cloud-core==2.4.1 google-cloud-datacatalog-lineage==0.2.2 google-cloud-logging==3.5.0 google-crc32c==1.5.0 google-re2==1.1 -google-resumable-media==2.6.0 -googleapis-common-protos==1.60.0 +google-resumable-media==2.7.0 +googleapis-common-protos==1.62.0 gql==3.4.1 graphql-core==3.2.3 graphviz==0.20.1 great-expectations==0.15.50 -greenlet==3.0.0 -grpc-google-iam-v1==0.12.6 -grpcio==1.59.0 -grpcio-reflection==1.59.0 -grpcio-status==1.59.0 -grpcio-tools==1.59.0 +greenlet==3.0.3 +grpc-google-iam-v1==0.13.0 +grpcio==1.60.0 +grpcio-reflection==1.60.0 +grpcio-status==1.60.0 +grpcio-tools==1.60.0 gssapi==1.8.3 gunicorn==21.2.0 h11==0.14.0 -httpcore==0.18.0 -httptools==0.6.0 -httpx==0.25.0 +hdbcli==2.19.20 +httpcore==1.0.2 +httptools==0.6.1 +httpx==0.26.0 humanfriendly==10.0 -idna==3.4 +idna==3.6 ijson==3.2.3 -importlib-metadata==6.8.0 -importlib-resources==6.1.0 +importlib-metadata==6.11.0 +importlib-resources==6.1.1 inflection==0.5.1 ipaddress==1.0.23 ipykernel==6.17.1 -ipython==8.16.1 +ipython==8.19.0 ipython-genutils==0.2.0 ipywidgets==8.1.1 iso3166==2.1.1 @@ -152,34 +150,34 @@ itsdangerous==2.1.2 jedi==0.19.1 Jinja2==3.1.2 jmespath==1.0.1 -JPype1==1.4.1 +JPype1==1.5.0 jsonlines==4.0.0 jsonpatch==1.33 jsonpointer==2.4 jsonref==1.1.0 -jsonschema==4.19.1 -jsonschema-specifications==2023.7.1 +jsonschema==4.20.0 +jsonschema-specifications==2023.12.1 jupyter-server==1.24.0 jupyter_client==7.4.9 jupyter_core==4.12.0 -jupyterlab-pygments==0.2.2 jupyterlab-widgets==3.0.9 +jupyterlab_pygments==0.3.0 langcodes==3.3.0 lark==1.1.4 -lazy-object-proxy==1.9.0 +lazy-object-proxy==1.10.0 leb128==1.0.5 -limits==3.6.0 +limits==3.7.0 linear-tsv==1.1.0 linkify-it-py==2.0.2 -lkml==1.3.1 +lkml==1.3.3 locket==1.0.0 lockfile==0.12.2 looker-sdk==23.0.0 -lxml==4.9.3 +lxml==4.9.4 lz4==4.3.2 -makefun==1.15.1 -Mako==1.2.4 -Markdown==3.5 +makefun==1.15.2 +Mako==1.3.0 +Markdown==3.5.1 markdown-it-py==3.0.0 MarkupSafe==2.1.3 marshmallow==3.20.1 @@ -190,26 +188,26 @@ mdit-py-plugins==0.4.0 mdurl==0.1.2 mistune==3.0.2 mixpanel==4.10.0 -mlflow-skinny==2.7.1 +mlflow-skinny==2.9.2 mmh3==4.0.1 mmhash3==3.0.1 more-itertools==10.1.0 moreorless==0.4.0 -moto==4.2.5 +moto==4.2.12 msal==1.22.0 multidict==6.0.4 murmurhash==1.0.10 -mypy==1.6.0 +mypy==1.8.0 mypy-extensions==1.0.0 nbclassic==1.0.0 nbclient==0.6.3 -nbconvert==7.9.2 +nbconvert==7.13.1 nbformat==5.9.1 nest-asyncio==1.5.8 -networkx==3.1 +networkx==3.2.1 notebook==6.5.6 notebook_shim==0.2.3 -numpy==1.26.0 +numpy==1.26.2 oauthlib==3.2.2 okta==1.7.0 openlineage-airflow==1.2.0 @@ -217,110 +215,107 @@ openlineage-integration-common==1.2.0 openlineage-python==1.2.0 openlineage_sql==1.2.0 openpyxl==3.1.2 -opentelemetry-api==1.20.0 -opentelemetry-exporter-otlp==1.20.0 -opentelemetry-exporter-otlp-proto-common==1.20.0 -opentelemetry-exporter-otlp-proto-grpc==1.20.0 -opentelemetry-exporter-otlp-proto-http==1.20.0 -opentelemetry-proto==1.20.0 -opentelemetry-sdk==1.20.0 -opentelemetry-semantic-conventions==0.41b0 +opentelemetry-api==1.22.0 +opentelemetry-exporter-otlp==1.22.0 +opentelemetry-exporter-otlp-proto-common==1.22.0 +opentelemetry-exporter-otlp-proto-grpc==1.22.0 +opentelemetry-exporter-otlp-proto-http==1.22.0 +opentelemetry-proto==1.22.0 +opentelemetry-sdk==1.22.0 +opentelemetry-semantic-conventions==0.43b0 ordered-set==4.1.0 -oscrypto==1.3.0 packaging==23.2 pandas==1.5.3 pandavro==1.5.2 pandocfilters==1.5.0 -parse==1.19.1 +parse==1.20.0 parso==0.8.3 partd==1.4.1 -pathspec==0.11.2 -pathy==0.10.2 +pathspec==0.12.1 +pathy==0.10.3 pendulum==2.1.2 -pexpect==4.8.0 +pexpect==4.9.0 phonenumbers==8.13.0 -pickleshare==0.7.5 platformdirs==3.11.0 pluggy==1.3.0 preshed==3.0.9 prison==0.2.1 -progressbar2==4.2.0 -prometheus-client==0.17.1 -prompt-toolkit==3.0.39 -proto-plus==1.22.3 -protobuf==4.24.4 -psutil==5.9.5 +progressbar2==4.3.2 +prometheus-client==0.19.0 +prompt-toolkit==3.0.43 +proto-plus==1.23.0 +protobuf==4.25.1 +psutil==5.9.7 psycopg2-binary==2.9.9 ptyprocess==0.7.0 pure-eval==0.2.2 pure-sasl==0.6.2 -py-partiql-parser==0.3.7 +py-partiql-parser==0.5.0 pyarrow==11.0.0 -pyasn1==0.5.0 +pyasn1==0.5.1 pyasn1-modules==0.3.0 -pyathena==2.4.1 -pycountry==22.3.5 +pyathena==2.25.2 +pycountry==23.12.11 pycparser==2.21 pycryptodome==3.19.0 -pycryptodomex==3.19.0 pydantic==1.10.13 +pydantic_core==2.14.6 pydash==7.0.6 -pydruid==0.6.5 -Pygments==2.16.1 +pydruid==0.6.6 +Pygments==2.17.2 pyiceberg==0.4.0 -pymongo==4.5.0 +pymongo==4.6.1 PyMySQL==1.1.0 -pyOpenSSL==23.2.0 +pyOpenSSL==23.3.0 pyparsing==3.0.9 pyspnego==0.10.2 python-daemon==3.0.1 python-dateutil==2.8.2 python-dotenv==1.0.0 python-jose==3.3.0 -python-ldap==3.4.3 +python-ldap==3.4.4 python-nvd3==0.15.0 python-slugify==8.0.1 python-stdnum==1.19 -python-tds==1.13.0 +python-tds==1.14.0 python-utils==3.8.1 python3-openid==3.2.0 pytz==2023.3.post1 pytzdata==2020.1 PyYAML==6.0.1 pyzmq==24.0.1 -ratelimiter==1.2.0.post0 redash-toolbelt==0.1.9 -redshift-connector==2.0.914 -referencing==0.30.2 -regex==2023.10.3 +redshift-connector==2.0.918 +referencing==0.32.0 +regex==2023.12.25 requests==2.31.0 requests-file==1.5.1 requests-gssapi==1.2.3 requests-ntlm==1.2.0 requests-toolbelt==0.10.1 -responses==0.23.3 +responses==0.24.1 rfc3339-validator==0.1.4 rfc3986==2.0.0 -rich==13.6.0 -rich-argparse==1.3.0 -rpds-py==0.10.6 +rich==13.7.0 +rich-argparse==1.4.0 +rpds-py==0.15.2 rsa==4.9 ruamel.yaml==0.17.17 ruamel.yaml.clib==0.2.8 -s3transfer==0.7.0 -schwifty==2023.9.0 -scipy==1.11.3 +s3transfer==0.10.0 +schwifty==2023.11.2 +scipy==1.11.4 scramp==1.4.4 Send2Trash==1.8.2 -sentry-sdk==1.32.0 +sentry-sdk==1.39.1 setproctitle==1.3.3 simple-salesforce==1.12.5 six==1.16.0 smart-open==6.4.0 smmap==5.0.1 sniffio==1.3.0 -snowflake-connector-python==3.2.1 -snowflake-sqlalchemy==1.5.0 +snowflake-connector-python==3.6.0 +snowflake-sqlalchemy==1.5.1 sortedcontainers==2.4.0 soupsieve==2.5 spacy==3.4.3 @@ -328,67 +323,71 @@ spacy-legacy==3.0.12 spacy-loggers==1.0.5 sql-metadata==2.2.2 SQLAlchemy==1.4.44 -sqlalchemy-bigquery==1.8.0 -SQLAlchemy-JSONField==1.0.1.post0 +sqlalchemy-bigquery==1.9.0 +sqlalchemy-hana==1.1.1 +SQLAlchemy-JSONField==1.0.2 sqlalchemy-pytds==0.3.5 sqlalchemy-redshift==0.8.14 SQLAlchemy-Utils==0.41.1 -sqlalchemy2-stubs==0.0.2a35 +sqlalchemy2-stubs==0.0.2a37 sqllineage==1.3.8 sqlparse==0.4.4 srsly==2.4.8 stack-data==0.6.3 -starlette==0.27.0 +starlette==0.32.0.post1 strictyaml==1.7.3 tableauserverclient==0.25 tableschema==1.20.2 tabulate==0.9.0 tabulator==1.53.5 tenacity==8.2.3 -termcolor==2.3.0 -terminado==0.17.1 +teradatasql==20.0.0.2 +teradatasqlalchemy==17.20.0.0 +termcolor==2.4.0 +terminado==0.18.0 text-unidecode==1.3 thinc==8.1.12 -thrift==0.13.0 +thrift==0.16.0 thrift-sasl==0.4.3 tinycss2==1.2.1 toml==0.10.2 tomli==2.0.1 -tomlkit==0.12.1 +tomlkit==0.12.3 toolz==0.12.0 -tornado==6.3.3 +tornado==6.4 tqdm==4.66.1 traitlets==5.2.1.post0 trino==0.327.0 typeguard==2.13.3 typer==0.7.0 -types-PyYAML==6.0.12.12 typing-inspect==0.9.0 -typing_extensions==4.8.0 -tzlocal==5.1 +typing_extensions==4.9.0 +tzlocal==5.2 uc-micro-py==1.0.2 -ujson==5.8.0 +ujson==5.9.0 unicodecsv==0.14.1 -urllib3==1.26.17 -uvicorn==0.23.2 -uvloop==0.17.0 -vertica-python==1.3.5 -vertica-sqlalchemy-dialect==0.0.8 +universal-pathlib==0.1.4 +urllib3==1.26.18 +uvicorn==0.25.0 +uvloop==0.19.0 +vertica-python==1.3.8 +vertica-sqlalchemy-dialect==0.0.8.1 vininfo==1.7.0 volatile==2.1.0 wasabi==0.10.1 -watchfiles==0.20.0 +watchfiles==0.21.0 wcmatch==8.5 -wcwidth==0.2.8 +wcwidth==0.2.12 webencodings==0.5.1 -websocket-client==1.6.4 -websockets==11.0.3 +websocket-client==1.7.0 +websockets==12.0 Werkzeug==2.2.3 widgetsnbextension==4.0.9 -wrapt==1.15.0 -WTForms==3.1.0 +wrapt==1.16.0 +WTForms==3.0.1 xlrd==2.0.1 xmltodict==0.13.0 -yarl==1.9.2 +yarl==1.9.4 zeep==4.2.1 -zstd==1.5.5.1 \ No newline at end of file +zipp==3.17.0 +zstd==1.5.5.1 diff --git a/docker/datahub-ingestion-base/regenerate-base-requirements.sh b/docker/datahub-ingestion-base/regenerate-base-requirements.sh new file mode 100755 index 0000000000000..6fb331afa484a --- /dev/null +++ b/docker/datahub-ingestion-base/regenerate-base-requirements.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# This script is used to regenerate the base-requirements.txt file + +set -euxo pipefail +cd "$( dirname "${BASH_SOURCE[0]}" )" + +SCRIPT_NAME=$(basename "$0") +DATAHUB_DIR=$(pwd)/../.. + +# Create a virtualenv. +VENV_DIR=$(mktemp -d) +python -c "import sys; assert sys.version_info >= (3, 9), 'Python 3.9 or higher is required.'" +python -m venv $VENV_DIR +source $VENV_DIR/bin/activate +pip install --upgrade pip setuptools wheel +echo "Using virtualenv at $VENV_DIR" + +# Install stuff. +pushd $DATAHUB_DIR/metadata-ingestion +pip install -e . +pip install -e '../metadata-ingestion-modules/airflow-plugin/[plugin-v2]' +pip install -e '.[all]' +popd + +# Generate the requirements file. +# Removing Flask deps due as per https://github.com/datahub-project/datahub/pull/6867/files +# Removing py4j and PyJWT due to https://github.com/datahub-project/datahub/pull/6868/files +# Removing pyspark and pydeequ because we don't want them in the slim image, so they can be added separately. +# TODO: It's unclear if these removals are still actually needed. +echo "# Generated requirements file. Run ./$SCRIPT_NAME to regenerate." > base-requirements.txt +pip freeze \ + | grep -v -E "^-e" \ + | grep -v "Flask-" \ + | grep -v -E "(py4j|PyJWT)==" \ + | grep -v -E "(pyspark|pydeequ)==" \ + >> base-requirements.txt From 4efa46f8c91dfdedc21b7081143d196c7a0be0da Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Thu, 28 Dec 2023 15:05:14 +0530 Subject: [PATCH 150/263] test(cypress/users): add automatic reset password test (#9515) --- .../src/app/identity/user/UserListItem.tsx | 17 ++- .../app/identity/user/ViewResetTokenModal.tsx | 7 +- .../cypress/e2e/mutations/add_users.js | 135 +++++++++++++----- 3 files changed, 114 insertions(+), 45 deletions(-) diff --git a/datahub-web-react/src/app/identity/user/UserListItem.tsx b/datahub-web-react/src/app/identity/user/UserListItem.tsx index 69b8a6c2d1355..8ad3d7d93d657 100644 --- a/datahub-web-react/src/app/identity/user/UserListItem.tsx +++ b/datahub-web-react/src/app/identity/user/UserListItem.tsx @@ -98,8 +98,8 @@ export default function UserListItem({ user, canManageUserCredentials, selectRol
{displayName}
-
- {user.username} +
+ {user.username}
{userStatus && ( @@ -121,8 +121,12 @@ export default function UserListItem({ user, canManageUserCredentials, selectRol trigger={['click']} overlay={ - setIsViewingResetToken(true)}> -   Reset user password + setIsViewingResetToken(true)} + data-testid="reset-menu-item" + > +   Reset user password  Delete @@ -130,7 +134,10 @@ export default function UserListItem({ user, canManageUserCredentials, selectRol } > - + Generate a new reset link! Note, any old links will cease to be active. - + diff --git a/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js b/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js index e19c6065d4274..ba225ba37884b 100644 --- a/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js +++ b/smoke-test/tests/cypress/cypress/e2e/mutations/add_users.js @@ -1,47 +1,104 @@ const tryToSignUp = () => { - let number = Math.floor(Math.random() * 100000); - let name = `Example Name ${number}`; - cy.enterTextInTestId("email", `example${number}@example.com`); - cy.enterTextInTestId("name", name); - cy.enterTextInTestId("password", "Example password"); - cy.enterTextInTestId("confirmPassword", "Example password"); - - cy.mouseover("#title").click(); - cy.waitTextVisible("Other").click(); - - cy.get("[type=submit]").click(); - return name; + let number = Math.floor(Math.random() * 100000); + let name = `Example Name ${number}`; + let email = `example${number}@example.com`; + cy.enterTextInTestId("email", email); + cy.enterTextInTestId("name", name); + cy.enterTextInTestId("password", "Example password"); + cy.enterTextInTestId("confirmPassword", "Example password"); + + cy.mouseover("#title").click(); + cy.waitTextVisible("Other").click(); + + cy.get("[type=submit]").click(); + return { name, email }; }; describe("add_user", () => { - it("go to user link and invite a user", () => { - cy.login(); + let registeredEmail = ""; + it("go to user link and invite a user", () => { + cy.login(); + + cy.visit("/settings/identities/users"); + cy.waitTextVisible("Invite Users"); + cy.clickOptionWithText("Invite Users"); + + cy.waitTextVisible(/signup\?invite_token=\w{32}/) + .then(($elem) => { + const inviteLink = $elem.text(); + cy.log(inviteLink); cy.visit("/settings/identities/users"); - cy.waitTextVisible("Invite Users"); - - cy.clickOptionWithText("Invite Users"); - - cy.waitTextVisible(/signup\?invite_token=\w{32}/).then(($elem) => { - const inviteLink = $elem.text(); - cy.log(inviteLink); - cy.visit("/settings/identities/users"); - cy.logout(); - cy.visit(inviteLink); - let name = tryToSignUp(); - cy.waitTextVisible("Welcome to DataHub"); - cy.hideOnboardingTour(); - cy.waitTextVisible(name); - }).then(() => { - cy.logout(); - cy.visit("/signup?invite_token=bad_token"); - tryToSignUp(); - cy.waitTextVisible("Failed to log in! An unexpected error occurred."); - }); + cy.logout(); + cy.visit(inviteLink); + const { name, email } = tryToSignUp(); + registeredEmail = email; + cy.waitTextVisible("Welcome to DataHub"); + cy.hideOnboardingTour(); + cy.waitTextVisible(name); + }) + .then(() => { + cy.logout(); + cy.visit("/signup?invite_token=bad_token"); + tryToSignUp(); + cy.waitTextVisible("Failed to log in! An unexpected error occurred."); + }); + }); + + it("Verify you can’t generate a reset password link for a non-native user", () => { + cy.login(); + cy.visit("/settings/identities/users"); + cy.waitTextVisible("Invite Users"); + cy.get("[data-testid=userItem-non-native]").first().click(); + cy.get('[data-testid="reset-menu-item"]').should( + "have.attr", + "aria-disabled", + "true" + ); + }); + + it("Generate a reset password link for a native user", () => { + cy.login(); + cy.visit("/settings/identities/users"); + cy.waitTextVisible("Invite Users"); + cy.get(`[data-testid="email-native"]`) + .contains(registeredEmail) + .should("exist") + .parents(".ant-list-item") + .find('[data-testid="userItem-native"]') + .should("be.visible") + .click(); + + cy.get("[data-testid=resetButton]").first().click(); + cy.get("[data-testid=refreshButton]").click(); + cy.waitTextVisible("Generated new link to reset credentials"); + + cy.window().then((win) => { + cy.stub(win, "prompt"); }); -}); + cy.get(".ant-typography-copy").should("be.visible").click(); + cy.get(".ant-modal-close").should("be.visible").click(); -// Verify you can’t generate a reset password link for a non-native user (root, for example) -// Generate a reset password link for a native user -// Log out, then verify that using a bad reset token in the URL doesn’t allow you to reset password -// Use the correct reset link to reset native user credentials \ No newline at end of file + cy.waitTextVisible(/reset\?reset_token=\w{32}/) + .then(($elem) => { + const inviteLink = $elem.text(); + cy.logout(); + cy.visit(inviteLink); + cy.enterTextInTestId("email", registeredEmail); + cy.enterTextInTestId("password", "Example Reset Password"); + cy.enterTextInTestId("confirmPassword", "Example Reset Password"); + cy.get("[type=submit]").click(); + cy.waitTextVisible("Welcome back"); + cy.hideOnboardingTour(); + }) + .then(() => { + cy.logout(); + cy.visit("/reset?reset_token=bad_token"); + cy.enterTextInTestId("email", registeredEmail); + cy.enterTextInTestId("password", "Example Reset Password"); + cy.enterTextInTestId("confirmPassword", "Example Reset Password"); + cy.get("[type=submit]").click(); + cy.waitTextVisible("Failed to log in!"); + }); + }); +}); From 3635c1c2213cfb8421d89b7cc106ab236d72c7ec Mon Sep 17 00:00:00 2001 From: Shubham Jagtap <132359390+shubhamjagtap639@users.noreply.github.com> Date: Thu, 28 Dec 2023 15:24:26 +0530 Subject: [PATCH 151/263] feat(ingestion/bigquery): Use sqlglot_lineage for usage and add more perf timers (#9247) Co-authored-by: Andrew Sikowitz --- metadata-ingestion/setup.py | 2 - .../ingestion/source/bigquery_v2/bigquery.py | 22 +- .../source/bigquery_v2/bigquery_audit.py | 16 +- .../source/bigquery_v2/bigquery_config.py | 5 + .../source/bigquery_v2/bigquery_report.py | 12 +- .../ingestion/source/bigquery_v2/usage.py | 86 ++--- .../datahub/utilities/bigquery_sql_parser.py | 92 ----- .../src/datahub/utilities/sqlglot_lineage.py | 8 +- .../bigquery/test_bigquery_usage.py | 8 +- .../tests/unit/test_bigquery_sql_lineage.py | 66 +++- .../tests/unit/test_bigquery_sql_parser.py | 327 ------------------ .../tests/unit/test_bigquery_usage.py | 14 +- .../unit/test_bigqueryv2_usage_source.py | 6 +- 13 files changed, 159 insertions(+), 505 deletions(-) delete mode 100644 metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py delete mode 100644 metadata-ingestion/tests/unit/test_bigquery_sql_parser.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 32d49ffc73fa3..8e4791e253c7c 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -295,8 +295,6 @@ "bigquery": sql_common | bigquery_common | { - # TODO: I doubt we need all three sql parsing libraries. - *sqllineage_lib, *sqlglot_lib, "sqlalchemy-bigquery>=1.4.1", "google-cloud-datacatalog-lineage==0.2.2", diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py index 9813945683289..3704eae96aece 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery.py @@ -221,6 +221,7 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): self.bigquery_data_dictionary = BigQuerySchemaApi( self.report.schema_api_perf, self.config.get_bigquery_client() ) + self.sql_parser_schema_resolver = self._init_schema_resolver() redundant_lineage_run_skip_handler: Optional[ RedundantLineageRunSkipHandler @@ -253,6 +254,7 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): self.usage_extractor = BigQueryUsageExtractor( config, self.report, + schema_resolver=self.sql_parser_schema_resolver, dataset_urn_builder=self.gen_dataset_urn_from_ref, redundant_run_skip_handler=redundant_usage_run_skip_handler, ) @@ -283,8 +285,6 @@ def __init__(self, ctx: PipelineContext, config: BigQueryV2Config): # Maps view ref -> actual sql self.view_definitions: FileBackedDict[str] = FileBackedDict() - self.sql_parser_schema_resolver = self._init_schema_resolver() - self.add_config_to_report() atexit.register(cleanup, config) @@ -371,7 +371,10 @@ def usage_capability_test( report: BigQueryV2Report, ) -> CapabilityReport: usage_extractor = BigQueryUsageExtractor( - connection_conf, report, lambda ref: "" + connection_conf, + report, + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda ref: "", ) for project_id in project_ids: try: @@ -447,7 +450,9 @@ def _init_schema_resolver(self) -> SchemaResolver: self.config.lineage_parse_view_ddl or self.config.lineage_use_sql_parser ) schema_ingestion_enabled = ( - self.config.include_views and self.config.include_tables + self.config.include_schema_metadata + and self.config.include_tables + and self.config.include_views ) if schema_resolution_required and not schema_ingestion_enabled: @@ -545,10 +550,11 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: if not projects: return - for project_id in projects: - self.report.set_ingestion_stage(project_id.id, METADATA_EXTRACTION) - logger.info(f"Processing project: {project_id.id}") - yield from self._process_project(project_id) + if self.config.include_schema_metadata: + for project_id in projects: + self.report.set_ingestion_stage(project_id.id, METADATA_EXTRACTION) + logger.info(f"Processing project: {project_id.id}") + yield from self._process_project(project_id) if self.config.include_usage_statistics: yield from self.usage_extractor.get_usage_workunits( diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py index 55366d6c57cf8..8cef10ca23448 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_audit.py @@ -12,6 +12,7 @@ get_first_missing_key, get_first_missing_key_any, ) +from datahub.utilities.urns.dataset_urn import DatasetUrn AuditLogEntry = Any @@ -178,6 +179,17 @@ def from_string_name(cls, ref: str) -> "BigQueryTableRef": raise ValueError(f"invalid BigQuery table reference: {ref}") return cls(BigqueryTableIdentifier(parts[1], parts[3], parts[5])) + @classmethod + def from_urn(cls, urn: str) -> "BigQueryTableRef": + """Raises: ValueError if urn is not a valid BigQuery table URN.""" + dataset_urn = DatasetUrn.create_from_string(urn) + split = dataset_urn.get_dataset_name().rsplit(".", 3) + if len(split) == 3: + project, dataset, table = split + else: + _, project, dataset, table = split + return cls(BigqueryTableIdentifier(project, dataset, table)) + def is_temporary_table(self, prefixes: List[str]) -> bool: for prefix in prefixes: if self.table_identifier.dataset.startswith(prefix): @@ -566,7 +578,7 @@ def from_query_event( query_event: QueryEvent, debug_include_full_payloads: bool = False, ) -> "ReadEvent": - readEvent = ReadEvent( + return ReadEvent( actor_email=query_event.actor_email, timestamp=query_event.timestamp, resource=read_resource, @@ -577,8 +589,6 @@ def from_query_event( from_query=True, ) - return readEvent - @classmethod def from_exported_bigquery_audit_metadata( cls, row: BigQueryAuditMetadata, debug_include_full_payloads: bool = False diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py index c13b08a6d9656..58f2a600c2ff7 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_config.py @@ -94,6 +94,11 @@ class BigQueryV2Config( description="Regex patterns for project_id to filter in ingestion.", ) + include_schema_metadata: bool = Field( + default=True, + description="Whether to ingest the BigQuery schema, i.e. projects, schemas, tables, and views.", + ) + usage: BigQueryUsageConfig = Field( default=BigQueryUsageConfig(), description="Usage related configs" ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py index 9d92b011ee285..69913b383af87 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/bigquery_report.py @@ -33,6 +33,13 @@ class BigQueryAuditLogApiPerfReport(Report): list_log_entries: PerfTimer = field(default_factory=PerfTimer) +@dataclass +class BigQueryProcessingPerfReport(Report): + sql_parsing_sec: PerfTimer = field(default_factory=PerfTimer) + store_usage_event_sec: PerfTimer = field(default_factory=PerfTimer) + usage_state_size: Optional[str] = None + + @dataclass class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowReport): num_total_lineage_entries: TopKDict[str, int] = field(default_factory=TopKDict) @@ -120,8 +127,6 @@ class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowR read_reasons_stat: Counter[str] = field(default_factory=collections.Counter) operation_types_stat: Counter[str] = field(default_factory=collections.Counter) - usage_state_size: Optional[str] = None - exclude_empty_projects: Optional[bool] = None schema_api_perf: BigQuerySchemaApiPerfReport = field( @@ -130,6 +135,9 @@ class BigQueryV2Report(ProfilingSqlReport, IngestionStageReport, BaseTimeWindowR audit_log_api_perf: BigQueryAuditLogApiPerfReport = field( default_factory=BigQueryAuditLogApiPerfReport ) + processing_perf: BigQueryProcessingPerfReport = field( + default_factory=BigQueryProcessingPerfReport + ) lineage_start_time: Optional[datetime] = None lineage_end_time: Optional[datetime] = None diff --git a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py index 65b559550ffc5..ccc64184f3346 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/bigquery_v2/usage.py @@ -35,7 +35,6 @@ AuditEvent, AuditLogEntry, BigQueryAuditMetadata, - BigqueryTableIdentifier, BigQueryTableRef, QueryEvent, ReadEvent, @@ -60,9 +59,9 @@ USAGE_EXTRACTION_USAGE_AGGREGATION, ) from datahub.metadata.schema_classes import OperationClass, OperationTypeClass -from datahub.utilities.bigquery_sql_parser import BigQuerySQLParser from datahub.utilities.file_backed_collections import ConnectionWrapper, FileBackedDict from datahub.utilities.perf_timer import PerfTimer +from datahub.utilities.sqlglot_lineage import SchemaResolver, sqlglot_lineage logger: logging.Logger = logging.getLogger(__name__) @@ -284,7 +283,7 @@ def delete_original_read_events_for_view_query_events(self) -> None: ) def report_disk_usage(self, report: BigQueryV2Report) -> None: - report.usage_state_size = str( + report.processing_perf.usage_state_size = str( { "main": humanfriendly.format_size(os.path.getsize(self.conn.filename)), "queries": humanfriendly.format_size( @@ -310,11 +309,14 @@ def __init__( self, config: BigQueryV2Config, report: BigQueryV2Report, + *, + schema_resolver: SchemaResolver, dataset_urn_builder: Callable[[BigQueryTableRef], str], redundant_run_skip_handler: Optional[RedundantUsageRunSkipHandler] = None, ): self.config: BigQueryV2Config = config self.report: BigQueryV2Report = report + self.schema_resolver = schema_resolver self.dataset_urn_builder = dataset_urn_builder # Replace hash of query with uuid if there are hash conflicts self.uuid_to_query: Dict[str, str] = {} @@ -415,10 +417,11 @@ def generate_read_events_from_query( ) -> Iterable[AuditEvent]: try: tables = self.get_tables_from_query( - query_event_on_view.project_id, query_event_on_view.query, + default_project=query_event_on_view.project_id, + default_dataset=query_event_on_view.default_dataset, ) - assert tables is not None and len(tables) != 0 + assert len(tables) != 0 for table in tables: yield AuditEvent.create( ReadEvent.from_query_event(table, query_event_on_view) @@ -462,12 +465,15 @@ def _ingest_events( self.report.num_view_query_events += 1 for new_event in self.generate_read_events_from_query(query_event): - num_generated += self._store_usage_event( - new_event, usage_state, table_refs - ) - num_aggregated += self._store_usage_event( - audit_event, usage_state, table_refs - ) + with self.report.processing_perf.store_usage_event_sec: + num_generated += self._store_usage_event( + new_event, usage_state, table_refs + ) + with self.report.processing_perf.store_usage_event_sec: + num_aggregated += self._store_usage_event( + audit_event, usage_state, table_refs + ) + except Exception as e: logger.warning( f"Unable to store usage event {audit_event}", exc_info=True @@ -905,54 +911,38 @@ def _generate_filter(self, corrected_start_time, corrected_end_time): ) def get_tables_from_query( - self, default_project: str, query: str - ) -> Optional[List[BigQueryTableRef]]: + self, query: str, default_project: str, default_dataset: Optional[str] = None + ) -> List[BigQueryTableRef]: """ This method attempts to parse bigquery objects read in the query """ if not query: - return None + return [] - parsed_tables = set() try: - parser = BigQuerySQLParser( - query, - self.config.sql_parser_use_external_process, - use_raw_names=self.config.lineage_sql_parser_use_raw_names, - ) - tables = parser.get_tables() - except Exception as ex: + with self.report.processing_perf.sql_parsing_sec: + result = sqlglot_lineage( + query, + self.schema_resolver, + default_db=default_project, + default_schema=default_dataset, + ) + except Exception: logger.debug( - f"Sql parsing failed on this query on view: {query}. " - f"Usage won't be added. The error was {ex}." + f"Sql parsing failed on this query on view: {query}. Usage won't be added." ) - return None + logger.debug(result.debug_info) + return [] - for table in tables: - parts = table.split(".") - if len(parts) == 2: - parsed_tables.add( - BigQueryTableRef( - BigqueryTableIdentifier( - project_id=default_project, dataset=parts[0], table=parts[1] - ) - ).get_sanitized_table_ref() - ) - elif len(parts) == 3: - parsed_tables.add( - BigQueryTableRef( - BigqueryTableIdentifier( - project_id=parts[0], dataset=parts[1], table=parts[2] - ) - ).get_sanitized_table_ref() - ) - else: - logger.debug( - f"Invalid table identifier {table} when parsing query on view {query}" - ) + parsed_table_refs = [] + for urn in result.in_tables: + try: + parsed_table_refs.append(BigQueryTableRef.from_urn(urn)) + except ValueError: + logger.debug(f"Invalid urn {urn} when parsing query on view {query}") self.report.num_view_query_events_failed_table_identification += 1 - return list(parsed_tables) + return parsed_table_refs def _report_error( self, label: str, e: Exception, group: Optional[str] = None diff --git a/metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py b/metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py deleted file mode 100644 index 4ad41f1fe23c9..0000000000000 --- a/metadata-ingestion/src/datahub/utilities/bigquery_sql_parser.py +++ /dev/null @@ -1,92 +0,0 @@ -import re -from typing import List - -import sqlparse - -from datahub.utilities.sql_parser import SqlLineageSQLParser, SQLParser - - -class BigQuerySQLParser(SQLParser): - parser: SQLParser - - def __init__( - self, - sql_query: str, - use_external_process: bool = False, - use_raw_names: bool = False, - ) -> None: - super().__init__(sql_query) - - self._parsed_sql_query = self.parse_sql_query(sql_query) - self.parser = SqlLineageSQLParser( - self._parsed_sql_query, use_external_process, use_raw_names - ) - - def parse_sql_query(self, sql_query: str) -> str: - sql_query = BigQuerySQLParser._parse_bigquery_comment_sign(sql_query) - sql_query = BigQuerySQLParser._escape_keyword_from_as_field_name(sql_query) - sql_query = BigQuerySQLParser._escape_cte_name_after_keyword_with(sql_query) - - sql_query = sqlparse.format( - sql_query.strip(), - reindent_aligned=True, - strip_comments=True, - ) - - sql_query = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - sql_query - ) - sql_query = BigQuerySQLParser._escape_object_name_after_keyword_from(sql_query) - sql_query = BigQuerySQLParser._remove_comma_before_from(sql_query) - - return sql_query - - @staticmethod - def _parse_bigquery_comment_sign(sql_query: str) -> str: - return re.sub(r"#(.*)", r"-- \1", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_keyword_from_as_field_name(sql_query: str) -> str: - return re.sub(r"(\w*\.from)", r"`\1`", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_cte_name_after_keyword_with(sql_query: str) -> str: - """ - Escape the first cte name in case it is one of reserved words - """ - return re.sub(r"(with\s)([^`\s()]+)", r"\1`\2`", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_table_or_view_name_at_create_statement(sql_query: str) -> str: - """ - Reason: in case table name contains hyphens which breaks sqllineage later on - """ - return re.sub( - r"(create.*\s)(table\s|view\s)([^`\s()]+)(?=\sas)", - r"\1\2`\3`", - sql_query, - flags=re.IGNORECASE, - ) - - @staticmethod - def _remove_comma_before_from(sql_query: str) -> str: - return re.sub(r",(\s*?)(?=from)", r" ", sql_query, flags=re.IGNORECASE) - - @staticmethod - def _escape_object_name_after_keyword_from(sql_query: str) -> str: - """ - Reason: in case table name contains hyphens which breaks sqllineage later on - Note: ignore cases of having keyword FROM as part of datetime function EXTRACT - """ - return re.sub( - r"(? List[str]: - return self.parser.get_tables() - - def get_columns(self) -> List[str]: - return self.parser.get_columns() diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index b43c8de4c8f3d..0f84871d6c96a 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -333,6 +333,9 @@ def _table_level_lineage( return tables, modified +TABLE_CASE_SENSITIVE_PLATFORMS = {"bigquery"} + + class SchemaResolver(Closeable): def __init__( self, @@ -402,7 +405,10 @@ def resolve_table(self, table: _TableName) -> Tuple[str, Optional[SchemaInfo]]: if schema_info: return urn_lower, schema_info - return urn_lower, None + if self.platform in TABLE_CASE_SENSITIVE_PLATFORMS: + return urn, None + else: + return urn_lower, None def _resolve_schema_info(self, urn: str) -> Optional[SchemaInfo]: if urn in self._schema_cache: diff --git a/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py b/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py index bbc3378450bff..9bbe9c45887a8 100644 --- a/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py +++ b/metadata-ingestion/tests/performance/bigquery/test_bigquery_usage.py @@ -14,6 +14,7 @@ from datahub.ingestion.source.bigquery_v2.bigquery_report import BigQueryV2Report from datahub.ingestion.source.bigquery_v2.usage import BigQueryUsageExtractor from datahub.utilities.perf_timer import PerfTimer +from datahub.utilities.sqlglot_lineage import SchemaResolver from tests.performance.bigquery.bigquery_events import generate_events, ref_from_table from tests.performance.data_generation import ( NormalDistribution, @@ -47,7 +48,10 @@ def run_test(): usage_extractor = BigQueryUsageExtractor( config, report, - lambda ref: make_dataset_urn("bigquery", str(ref.table_identifier)), + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda ref: make_dataset_urn( + "bigquery", str(ref.table_identifier) + ), ) report.set_ingestion_stage("All", "Event Generation") @@ -83,7 +87,7 @@ def run_test(): print( f"Peak Memory Used: {humanfriendly.format_size(peak_memory_usage - pre_mem_usage)}" ) - print(f"Disk Used: {report.usage_state_size}") + print(f"Disk Used: {report.processing_perf.usage_state_size}") print(f"Hash collisions: {report.num_usage_query_hash_collisions}") diff --git a/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py b/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py index f807be747a193..755e9081dda39 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_sql_lineage.py @@ -1,4 +1,35 @@ -from datahub.utilities.bigquery_sql_parser import BigQuerySQLParser +from typing import List + +from datahub.ingestion.source.bigquery_v2.bigquery_audit import BigQueryTableRef +from datahub.utilities.sqlglot_lineage import SchemaResolver, sqlglot_lineage + + +class BigQuerySQLParser: + def __init__(self, sql_query: str, schema_resolver: SchemaResolver) -> None: + self.result = sqlglot_lineage(sql_query, schema_resolver) + + def get_tables(self) -> List[str]: + ans = [] + for urn in self.result.in_tables: + table_ref = BigQueryTableRef.from_urn(urn) + ans.append(str(table_ref.table_identifier)) + return ans + + def get_columns(self) -> List[str]: + ans = [] + for col_info in self.result.column_lineage or []: + for col_ref in col_info.upstreams: + ans.append(col_ref.column) + return ans + + +def test_bigquery_sql_lineage_basic(): + parser = BigQuerySQLParser( + sql_query="""SELECT * FROM project_1.database_1.view_1""", + schema_resolver=SchemaResolver(platform="bigquery"), + ) + + assert parser.get_tables() == ["project_1.database_1.view_1"] def test_bigquery_sql_lineage_hash_as_comment_sign_is_accepted(): @@ -14,7 +45,8 @@ def test_bigquery_sql_lineage_hash_as_comment_sign_is_accepted(): -- this comment will not break sqllineage either # this comment will not break sqllineage either FROM `project.dataset.src_tbl` - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.dataset.src_tbl"] @@ -39,7 +71,7 @@ def test_bigquery_sql_lineage_camel_case_table(): # this comment will not break sqllineage either FROM `project.dataset.CamelCaseTable` """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.dataset.CamelCaseTable"] @@ -64,7 +96,7 @@ def test_bigquery_sql_lineage_camel_case_dataset(): # this comment will not break sqllineage either FROM `project.DataSet.table` """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.DataSet.table"] @@ -89,7 +121,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset(): # this comment will not break sqllineage either FROM `project.DataSet.CamelTable` """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.DataSet.CamelTable"] @@ -117,7 +149,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset_subquery(): SELECT * FROM `project.DataSet.CamelTable` ) """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.DataSet.CamelTable"] @@ -146,7 +178,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset_joins(): LEFT JOIN `project.DataSet3.CamelTable3` on c.id = b.id """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -179,7 +211,7 @@ def test_bigquery_sql_lineage_camel_case_table_and_dataset_joins_and_subquery(): LEFT JOIN (SELECT * FROM `project.DataSet3.CamelTable3`) c ON c.id = b.id """, - use_raw_names=True, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -199,7 +231,8 @@ def test_bigquery_sql_lineage_keyword_data_is_accepted(): FROM `project.example_dataset.example_table` ) SELECT * FROM data - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.example_dataset.example_table"] @@ -213,7 +246,8 @@ def test_bigquery_sql_lineage_keyword_admin_is_accepted(): FROM `project.example_dataset.example_table` ) SELECT * FROM admin - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.example_dataset.example_table"] @@ -238,7 +272,8 @@ def test_bigquery_sql_lineage_cte_alias_as_keyword_is_accepted(): ) SELECT * FROM map - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -255,7 +290,8 @@ def test_bigquery_sql_lineage_create_or_replace_view_name_with_hyphens_is_accept FROM project.dataset.src_table_a UNION SELECT * FROM `project.dataset.src_table_b` - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == [ @@ -270,7 +306,8 @@ def test_bigquery_sql_lineage_source_table_name_with_hyphens_is_accepted(): CREATE OR REPLACE VIEW `project.dataset.test_view` AS SELECT * FROM test-project.dataset.src_table - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["test-project.dataset.src_table"] @@ -282,7 +319,8 @@ def test_bigquery_sql_lineage_from_as_column_name_is_accepted(): CREATE OR REPLACE VIEW `project.dataset.test_view` AS SELECT x.from AS col FROM project.dataset.src_table AS x - """ + """, + schema_resolver=SchemaResolver(platform="bigquery"), ) assert parser.get_tables() == ["project.dataset.src_table"] diff --git a/metadata-ingestion/tests/unit/test_bigquery_sql_parser.py b/metadata-ingestion/tests/unit/test_bigquery_sql_parser.py deleted file mode 100644 index 2a73bfc5e8b68..0000000000000 --- a/metadata-ingestion/tests/unit/test_bigquery_sql_parser.py +++ /dev/null @@ -1,327 +0,0 @@ -import pytest - -from datahub.utilities.bigquery_sql_parser import BigQuerySQLParser - - -def test_bigquery_sql_parser_comments_are_removed(): - parser = BigQuerySQLParser( - sql_query=""" -/* -HERE IS A STANDARD COMMENT BLOCK -THIS WILL NOT BREAK sqllineage -*/ -CREATE OR REPLACE TABLE `project.dataset.test_view` AS -#This, comment will not break sqllineage -SELECT foo --- this comment will not break sqllineage either -# this comment will not break sqllineage either - FROM `project.dataset.src_table` -""" - ) - - assert ( - parser._parsed_sql_query - == """CREATE OR REPLACE TABLE `project.dataset.test_view` AS SELECT foo - FROM `project.dataset.src_table`""" - ) - - assert parser.get_tables() == ["project.dataset.src_table"] - - -def test_bigquery_sql_parser_formats_input_sql(): - parser = BigQuerySQLParser( - sql_query=""" -CREATE OR REPLACE TABLE `project.dataset.test_view` AS -SELECT foo FROM `project.dataset.src_table_a` AS a -INNER JOIN `project.dataset.src_table_b` AS b ON a.key_field = b.key_field -""" - ) - - assert ( - parser._parsed_sql_query - == """CREATE OR REPLACE TABLE `project.dataset.test_view` AS SELECT foo - FROM `project.dataset.src_table_a` AS a - INNER JOIN `project.dataset.src_table_b` AS b - ON a.key_field = b.key_field""" - ) - - assert parser.get_tables() == [ - "project.dataset.src_table_a", - "project.dataset.src_table_b", - ] - - -def test_remove_comma_before_from(): - assert ( - BigQuerySQLParser._remove_comma_before_from( - """ -select a, b,from `project.dataset.table_name_1` -""" - ) - == """ -select a, b from `project.dataset.table_name_1` -""" - ) - - assert ( - BigQuerySQLParser._remove_comma_before_from( - """ -select a, b from `project.dataset.table_name_1` -""" - ) - == """ -select a, b from `project.dataset.table_name_1` -""" - ) - - assert ( - BigQuerySQLParser._remove_comma_before_from( - """ -select - a, - b, -from `project.dataset.table_name_1` -""" - ) - == """ -select - a, - b from `project.dataset.table_name_1` -""" - ) - - -def test_bigquery_sql_parser_subquery(): - parser = BigQuerySQLParser( - sql_query=""" - create or replace table smoke_test_db.table_from_view_and_table - as (select b.date_utc, v.revenue from smoke_test_db.base_table b, smoke_test_db.view_from_table v - """ - ) - assert parser.get_tables() == [ - "smoke_test_db.base_table", - "smoke_test_db.view_from_table", - ] - - -def test_bigquery_sql_parser_comment_sign_switched_correctly(): - sql_query = BigQuerySQLParser._parse_bigquery_comment_sign( - """ -#upper comment -SELECT * FROM hello -# lower comment -""" - ) - - assert ( - sql_query - == """ --- upper comment -SELECT * FROM hello --- lower comment -""" - ) - - -def test_bigquery_sql_parser_keyword_from_is_escaped_if_used_as_fieldname(): - sql_query = BigQuerySQLParser._escape_keyword_from_as_field_name( - """ -SELECT hello.from AS col FROM hello -""" - ) - - assert ( - sql_query - == """ -SELECT `hello.from` AS col FROM hello -""" - ) - - -def test_bigquery_sql_parser_first_cte_name_is_escaped(): - sql_query = BigQuerySQLParser._escape_cte_name_after_keyword_with( - """ -CREATE OR REPLACE VIEW `test_view` AS -WITH cte_1 AS ( - SELECT * FROM foo -), -cte_2 AS ( - SELECT * FROM bar -) -SELECT * FROM cte_1 UNION ALL -SELECT * FROM cte_2 -""" - ) - - assert ( - sql_query - == """ -CREATE OR REPLACE VIEW `test_view` AS -WITH `cte_1` AS ( - SELECT * FROM foo -), -cte_2 AS ( - SELECT * FROM bar -) -SELECT * FROM cte_1 UNION ALL -SELECT * FROM cte_2 -""" - ) - - -def test_bigquery_sql_parser_table_name_is_escaped_at_create_statement(): - sql_query_create = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE TABLE project.dataset.test_table AS -col_1 STRING, -col_2 STRING -""" - ) - - sql_query_create_or_replace = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE OR REPLACE TABLE project.dataset.test_table AS -col_1 STRING, -col_2 STRING -""" - ) - - assert ( - sql_query_create - == """ -CREATE TABLE `project.dataset.test_table` AS -col_1 STRING, -col_2 STRING -""" - ) - assert ( - sql_query_create_or_replace - == """ -CREATE OR REPLACE TABLE `project.dataset.test_table` AS -col_1 STRING, -col_2 STRING -""" - ) - - -def test_bigquery_sql_parser_view_name_is_escaped_at_create_statement(): - sql_query_create = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE VIEW project.dataset.test_view AS -SELECT * FROM project.dataset.src_table -""" - ) - - sql_query_create_or_replace = BigQuerySQLParser._escape_table_or_view_name_at_create_statement( - """ -CREATE OR REPLACE VIEW project.dataset.test_view AS -SELECT * FROM project.dataset.src_table -""" - ) - - assert ( - sql_query_create - == """ -CREATE VIEW `project.dataset.test_view` AS -SELECT * FROM project.dataset.src_table -""" - ) - assert ( - sql_query_create_or_replace - == """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT * FROM project.dataset.src_table -""" - ) - - -def test_bigquery_sql_parser_object_name_is_escaped_after_keyword_from(): - sql_query = BigQuerySQLParser._escape_object_name_after_keyword_from( - """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT * FROM src-project.dataset.src_table_a UNION ALL -SELECT * FROM project.dataset.src_table_b -""" - ) - - assert ( - sql_query - == """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT * FROM `src-project.dataset.src_table_a` UNION ALL -SELECT * FROM `project.dataset.src_table_b` -""" - ) - - -def test_bigquery_sql_parser_field_name_is_not_escaped_after_keyword_from_in_datetime_functions(): - sql_query = BigQuerySQLParser._escape_object_name_after_keyword_from( - """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT -EXTRACT(MICROSECOND FROM time_field) AS col_1, -EXTRACT(MILLISECOND FROM time_field) AS col_2, -EXTRACT(SECOND FROM time_field) AS col_3, -EXTRACT(MINUTE FROM time_field) AS col_4, -EXTRACT(HOUR FROM time_field) AS col_5, -EXTRACT(DAYOFWEEK FROM time_field) AS col_6, -EXTRACT(DAY FROM time_field) AS col_7, -EXTRACT(DAYOFYEAR FROM time_field) AS col_8, -EXTRACT(WEEK FROM time_field) AS col_9, -EXTRACT(WEEK FROM time_field) AS col_10, -EXTRACT(ISOWEEK FROM time_field) AS col_11, -EXTRACT(MONTH FROM time_field) AS col_12, -EXTRACT(QUARTER FROM time_field) AS col_13, -EXTRACT(YEAR FROM time_field) AS col_14, -EXTRACT(ISOYEAR FROM time_field) AS col_15, -EXTRACT(DATE FROM time_field) AS col_16, -EXTRACT(TIME FROM time_field) AS col_17 -FROM src-project.dataset.src_table_a -""" - ) - - assert ( - sql_query - == """ -CREATE OR REPLACE VIEW `project.dataset.test_view` AS -SELECT -EXTRACT(MICROSECOND FROM time_field) AS col_1, -EXTRACT(MILLISECOND FROM time_field) AS col_2, -EXTRACT(SECOND FROM time_field) AS col_3, -EXTRACT(MINUTE FROM time_field) AS col_4, -EXTRACT(HOUR FROM time_field) AS col_5, -EXTRACT(DAYOFWEEK FROM time_field) AS col_6, -EXTRACT(DAY FROM time_field) AS col_7, -EXTRACT(DAYOFYEAR FROM time_field) AS col_8, -EXTRACT(WEEK FROM time_field) AS col_9, -EXTRACT(WEEK FROM time_field) AS col_10, -EXTRACT(ISOWEEK FROM time_field) AS col_11, -EXTRACT(MONTH FROM time_field) AS col_12, -EXTRACT(QUARTER FROM time_field) AS col_13, -EXTRACT(YEAR FROM time_field) AS col_14, -EXTRACT(ISOYEAR FROM time_field) AS col_15, -EXTRACT(DATE FROM time_field) AS col_16, -EXTRACT(TIME FROM time_field) AS col_17 -FROM `src-project.dataset.src_table_a` -""" - ) - - -def test_bigquery_sql_parser_with_semicolon_in_from(): - sql_query = """CREATE VIEW `acryl-staging.smoke_test_db.view_from_table`\nAS select * from smoke_test_db.base_table;""" - - table_list = BigQuerySQLParser(sql_query).get_tables() - table_list.sort() - assert table_list == ["smoke_test_db.base_table"] - - -@pytest.mark.xfail -def test_bigquery_sql_parser_with_parenthesis_in_from(): - sql_query = """ - CREATE VIEW `acryl-staging.smoke_test_db.view_from_table` AS - select * from smoke_test_db.base_table LEFT JOIN UNNEST(my_array) ON day1 = day2; - """ - - table_list = BigQuerySQLParser(sql_query).get_tables() - table_list.sort() - assert table_list == ["smoke_test_db.base_table"] diff --git a/metadata-ingestion/tests/unit/test_bigquery_usage.py b/metadata-ingestion/tests/unit/test_bigquery_usage.py index c0055763bc15b..664d3112810ff 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_usage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_usage.py @@ -35,6 +35,7 @@ TimeWindowSizeClass, ) from datahub.testing.compare_metadata_json import diff_metadata_json +from datahub.utilities.sqlglot_lineage import SchemaResolver from tests.performance.bigquery.bigquery_events import generate_events, ref_from_table from tests.performance.data_generation import generate_data, generate_queries from tests.performance.data_model import Container, FieldAccess, Query, Table, View @@ -202,7 +203,10 @@ def usage_extractor(config: BigQueryV2Config) -> BigQueryUsageExtractor: return BigQueryUsageExtractor( config, report, - lambda ref: make_dataset_urn("bigquery", str(ref.table_identifier)), + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda ref: make_dataset_urn( + "bigquery", str(ref.table_identifier) + ), ) @@ -961,21 +965,21 @@ def test_operational_stats( def test_get_tables_from_query(usage_extractor): assert usage_extractor.get_tables_from_query( - PROJECT_1, "SELECT * FROM project-1.database_1.view_1" + "SELECT * FROM project-1.database_1.view_1", default_project=PROJECT_1 ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "view_1")) ] assert usage_extractor.get_tables_from_query( - PROJECT_1, "SELECT * FROM database_1.view_1" + "SELECT * FROM database_1.view_1", default_project=PROJECT_1 ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "view_1")) ] assert sorted( usage_extractor.get_tables_from_query( - PROJECT_1, "SELECT v.id, v.name, v.total, t.name as name1 FROM database_1.view_1 as v inner join database_1.table_1 as t on v.id=t.id", + default_project=PROJECT_1, ) ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "table_1")), @@ -984,8 +988,8 @@ def test_get_tables_from_query(usage_extractor): assert sorted( usage_extractor.get_tables_from_query( - PROJECT_1, "CREATE TABLE database_1.new_table AS SELECT v.id, v.name, v.total, t.name as name1 FROM database_1.view_1 as v inner join database_1.table_1 as t on v.id=t.id", + default_project=PROJECT_1, ) ) == [ BigQueryTableRef(BigqueryTableIdentifier("project-1", "database_1", "table_1")), diff --git a/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py b/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py index 44fd840f28d59..25e849a509293 100644 --- a/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py +++ b/metadata-ingestion/tests/unit/test_bigqueryv2_usage_source.py @@ -10,6 +10,7 @@ from datahub.ingestion.source.bigquery_v2.bigquery_config import BigQueryV2Config from datahub.ingestion.source.bigquery_v2.bigquery_report import BigQueryV2Report from datahub.ingestion.source.bigquery_v2.usage import BigQueryUsageExtractor +from datahub.utilities.sqlglot_lineage import SchemaResolver FROZEN_TIME = "2021-07-20 00:00:00" @@ -114,7 +115,10 @@ def test_bigqueryv2_filters(): corrected_start_time = config.start_time - config.max_query_duration corrected_end_time = config.end_time + config.max_query_duration filter: str = BigQueryUsageExtractor( - config, BigQueryV2Report(), lambda x: "" + config, + BigQueryV2Report(), + schema_resolver=SchemaResolver(platform="bigquery"), + dataset_urn_builder=lambda x: "", )._generate_filter(corrected_start_time, corrected_end_time) assert filter == expected_filter From 60347d6735ea2136d721bbf6644ae82df6519d9c Mon Sep 17 00:00:00 2001 From: Diego Reiriz Cores Date: Thu, 28 Dec 2023 12:09:10 +0100 Subject: [PATCH 152/263] fix(ingest/mongodb): support disabling schemaSamplingSize (#9295) Co-authored-by: Harshal Sheth --- .../src/datahub/ingestion/source/mongodb.py | 8 +++++--- .../tests/integration/mongodb/test_mongodb.py | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index 2aa8b1d37d477..283ab652f23c6 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -102,7 +102,7 @@ class MongoDBConfig( ) schemaSamplingSize: Optional[PositiveInt] = Field( default=1000, - description="Number of documents to use when inferring schema size. If set to `0`, all documents will be scanned.", + description="Number of documents to use when inferring schema size. If set to `null`, all documents will be scanned.", ) useRandomSampling: bool = Field( default=True, @@ -225,13 +225,15 @@ def construct_schema_pymongo( ] if use_random_sampling: # get sample documents in collection - aggregations.append({"$sample": {"size": sample_size}}) + if sample_size: + aggregations.append({"$sample": {"size": sample_size}}) documents = collection.aggregate( aggregations, allowDiskUse=True, ) else: - aggregations.append({"$limit": sample_size}) + if sample_size: + aggregations.append({"$limit": sample_size}) documents = collection.aggregate(aggregations, allowDiskUse=True) return construct_schema(list(documents), delimiter) diff --git a/metadata-ingestion/tests/integration/mongodb/test_mongodb.py b/metadata-ingestion/tests/integration/mongodb/test_mongodb.py index 56fb471d4c9f1..0a0ba55ff5b80 100644 --- a/metadata-ingestion/tests/integration/mongodb/test_mongodb.py +++ b/metadata-ingestion/tests/integration/mongodb/test_mongodb.py @@ -26,6 +26,7 @@ def test_mongodb_ingest(docker_compose_runner, pytestconfig, tmp_path, mock_time "password": "examplepass", "maxDocumentSize": 25000, "platform_instance": "instance", + "schemaSamplingSize": None, }, }, "sink": { From 2cd38a469d5ac607bd510a0ca045d151b4657afd Mon Sep 17 00:00:00 2001 From: Tony Ouyang Date: Thu, 28 Dec 2023 03:09:30 -0800 Subject: [PATCH 153/263] fix(ingest): Fix mongodb ingestion when platform_instance is missing from recipe (#9486) Co-authored-by: Harshal Sheth --- metadata-ingestion/src/datahub/ingestion/source/mongodb.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py index 283ab652f23c6..577da91ee82da 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/mongodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/mongodb.py @@ -379,6 +379,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: platform_instance=self.config.platform_instance, ) + # Initialize data_platform_instance with a default value + data_platform_instance = None if self.config.platform_instance: data_platform_instance = DataPlatformInstanceClass( platform=make_data_platform_urn(platform), From e343b69ce4881ceefdf4af0cafea29188092de52 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Thu, 28 Dec 2023 16:50:13 +0530 Subject: [PATCH 154/263] fix(ingest/snowflake): explicit set schema if public schema is absent (#9526) --- .../source/snowflake/snowflake_profiler.py | 14 ++++++++++++++ .../ingestion/source/snowflake/snowflake_query.py | 4 ++++ .../source/state/stateful_ingestion_base.py | 2 +- 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py index 4bda7da422e9d..9a37f779bbcd5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_profiler.py @@ -24,6 +24,8 @@ logger = logging.getLogger(__name__) +PUBLIC_SCHEMA = "PUBLIC" + class SnowflakeProfiler(GenericProfiler, SnowflakeCommonMixin): def __init__( @@ -36,6 +38,7 @@ def __init__( self.config: SnowflakeV2Config = config self.report: SnowflakeV2Report = report self.logger = logger + self.database_default_schema: Dict[str, str] = dict() def get_workunits( self, database: SnowflakeDatabase, db_tables: Dict[str, List[SnowflakeTable]] @@ -47,6 +50,10 @@ def get_workunits( "max_overflow", self.config.profiling.max_workers ) + if PUBLIC_SCHEMA not in db_tables: + # If PUBLIC schema is absent, we use any one of schemas as default schema + self.database_default_schema[database.name] = list(db_tables.keys())[0] + profile_requests = [] for schema in database.schemas: for table in db_tables[schema.name]: @@ -136,9 +143,16 @@ def get_profiler_instance( ) def callable_for_db_connection(self, db_name: str) -> Callable: + schema_name = self.database_default_schema.get(db_name) + def get_db_connection(): conn = self.config.get_connection() conn.cursor().execute(SnowflakeQuery.use_database(db_name)) + + # As mentioned here - https://docs.snowflake.com/en/sql-reference/sql/use-database#usage-notes + # no schema is selected if PUBLIC schema is absent. We need to explicitly call `USE SCHEMA ` + if schema_name: + conn.cursor().execute(SnowflakeQuery.use_schema(schema_name)) return conn return get_db_connection diff --git a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py index 267f7cf074909..724e4392f1d61 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/snowflake/snowflake_query.py @@ -80,6 +80,10 @@ def show_tags() -> str: def use_database(db_name: str) -> str: return f'use database "{db_name}"' + @staticmethod + def use_schema(schema_name: str) -> str: + return f'use schema "{schema_name}"' + @staticmethod def get_databases(db_name: Optional[str]) -> str: db_clause = f'"{db_name}".' if db_name is not None else "" diff --git a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py index 8a448f40e95b4..61d39b18f523d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py +++ b/metadata-ingestion/src/datahub/ingestion/source/state/stateful_ingestion_base.py @@ -98,7 +98,7 @@ class StatefulIngestionConfigBase(GenericModel, Generic[CustomConfig]): ) -class StatefulLineageConfigMixin: +class StatefulLineageConfigMixin(ConfigModel): enable_stateful_lineage_ingestion: bool = Field( default=True, description="Enable stateful lineage ingestion." From 4de2c24249697fa68831f880fda216ddb46fba3d Mon Sep 17 00:00:00 2001 From: Sumit Patil <91715217+sumitappt@users.noreply.github.com> Date: Thu, 28 Dec 2023 21:37:57 +0530 Subject: [PATCH 155/263] style(search): Border is too thick for sidebar (#9528) --- .../src/app/search/sidebar/BrowseSidebar.tsx | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx index c16bcdcaf6c72..1731727c14cfc 100644 --- a/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx +++ b/datahub-web-react/src/app/search/sidebar/BrowseSidebar.tsx @@ -9,7 +9,6 @@ import useSidebarEntities from './useSidebarEntities'; import { ANTD_GRAY_V2 } from '../../entity/shared/constants'; import { ProfileSidebarResizer } from '../../entity/shared/containers/profile/sidebar/ProfileSidebarResizer'; - export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; @@ -18,7 +17,6 @@ export const SidebarWrapper = styled.div<{ visible: boolean; width: number }>` width: ${(props) => (props.visible ? `${props.width}px` : '0')}; min-width: ${(props) => (props.visible ? `${props.width}px` : '0')}; transition: width 250ms ease-in-out; - border-right: 1px solid ${(props) => props.theme.styles['border-color-base']}; background-color: ${ANTD_GRAY_V2[1]}; background: white; `; @@ -53,7 +51,12 @@ const BrowseSidebar = ({ visible }: Props) => { return ( <> - + Navigate From 5321352852a511bf92685290fc8a4371faaed876 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 29 Dec 2023 12:53:58 +0530 Subject: [PATCH 156/263] style(ui): humanise duration shown on ingestion page (#9530) --- .../executions/IngestionExecutionTable.tsx | 8 +++---- .../src/app/shared/formatDuration.ts | 21 +++++++++++++++++++ 2 files changed, 24 insertions(+), 5 deletions(-) create mode 100644 datahub-web-react/src/app/shared/formatDuration.ts diff --git a/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx b/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx index 8c81cc36ae3f9..a9d9283ef1377 100644 --- a/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx +++ b/datahub-web-react/src/app/ingest/source/executions/IngestionExecutionTable.tsx @@ -4,6 +4,7 @@ import { StyledTable } from '../../../entity/shared/components/styled/StyledTabl import { ExecutionRequest } from '../../../../types.generated'; import { ButtonsColumn, SourceColumn, StatusColumn, TimeColumn } from './IngestionExecutionTableColumns'; import { SUCCESS } from '../utils'; +import { formatDuration } from '../../../shared/formatDuration'; interface Props { executionRequests: ExecutionRequest[]; @@ -34,13 +35,10 @@ export default function IngestionExecutionTable({ render: TimeColumn, }, { - title: 'Duration (s)', + title: 'Duration', dataIndex: 'duration', key: 'duration', - render: (durationMs: number) => { - const seconds = (durationMs && `${durationMs / 1000}s`) || 'None'; - return seconds; - }, + render: (durationMs: number) => formatDuration(durationMs), }, { title: 'Status', diff --git a/datahub-web-react/src/app/shared/formatDuration.ts b/datahub-web-react/src/app/shared/formatDuration.ts new file mode 100644 index 0000000000000..1028b46f70b31 --- /dev/null +++ b/datahub-web-react/src/app/shared/formatDuration.ts @@ -0,0 +1,21 @@ +export const formatDuration = (durationMs: number): string => { + if (!durationMs) return 'None'; + + const seconds = durationMs / 1000; + + if (seconds < 60) { + return `${seconds.toFixed(1)} s`; + } + + const minutes = Math.floor(seconds / 60); + const remainingSeconds = Math.round(seconds % 60); + + if (minutes < 60) { + return `${minutes} min ${remainingSeconds} s`; + } + + const hours = Math.floor(minutes / 60); + const remainingMinutes = Math.round(minutes % 60); + + return `${hours} hr ${remainingMinutes} min`; +}; From 06bd9b988d3006d57350476ccec18b2a5e7aac37 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Fri, 29 Dec 2023 21:34:06 +0530 Subject: [PATCH 157/263] fix(cli): upsert for data product external url (#9534) --- metadata-ingestion/src/datahub/specific/dataproduct.py | 2 +- .../entities/dataproducts/golden_dataproduct_out_upsert.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/specific/dataproduct.py b/metadata-ingestion/src/datahub/specific/dataproduct.py index 301a0ff63f2f0..bb49ac47b3ef8 100644 --- a/metadata-ingestion/src/datahub/specific/dataproduct.py +++ b/metadata-ingestion/src/datahub/specific/dataproduct.py @@ -152,7 +152,7 @@ def set_external_url(self, external_url: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, "replace", - path="/external_url", + path="/externalUrl", value=external_url, ) return self diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json index ca4aafe848f60..97c2330f58bc7 100644 --- a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json @@ -5,7 +5,7 @@ "changeType": "PATCH", "aspectName": "dataProductProperties", "aspect": { - "value": "[{\"op\": \"replace\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"replace\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"replace\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"replace\", \"path\": \"/external_url\", \"value\": \"https://github.com/datahub-project/datahub\"}]", + "value": "[{\"op\": \"replace\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"replace\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"replace\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"replace\", \"path\": \"/externalUrl\", \"value\": \"https://github.com/datahub-project/datahub\"}]", "contentType": "application/json-patch+json" } }, From 31f9c796763677a4d452066d9b49b4088e65da19 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 2 Jan 2024 13:22:22 +0530 Subject: [PATCH 158/263] fix posts are failing to be created as Admin user (#9533) --- datahub-web-react/src/app/settings/posts/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/settings/posts/utils.ts b/datahub-web-react/src/app/settings/posts/utils.ts index ce48c7400738c..9958a0e8d9f0e 100644 --- a/datahub-web-react/src/app/settings/posts/utils.ts +++ b/datahub-web-react/src/app/settings/posts/utils.ts @@ -16,7 +16,7 @@ export const addToListPostCache = (client, newPost, pageSize) => { }); // Add our new post into the existing list. - const newPosts = [newPost, ...(currData?.listPosts?.posts || [])]; + const newPosts = [...(currData?.listPosts?.posts || [])]; // Write our data back to the cache. client.writeQuery({ From 0bb838b904807c8fdc8266b6395023079b4dce4f Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Tue, 2 Jan 2024 21:45:55 +0530 Subject: [PATCH 159/263] fix(ui): while creating secrets via UI validate validate characters (#9548) --- datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx index 30f04d61b8fc9..c099d9a580efa 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx @@ -81,7 +81,7 @@ export const SecretBuilderModal = ({ initialState, visible, onSubmit, onCancel } }, { whitespace: false }, { min: 1, max: 50 }, - { pattern: /^[^\s\t${}\\,'"]+$/, message: 'This secret name is not allowed.' }, + { pattern: /^[a-zA-Z_]+[a-zA-Z0-9_]*$/, message: 'Please start the secret name with a letter, followed by letters, digits, or underscores only.' }, ]} hasFeedback > From 6d72640e9149343363885ec275d89fb48d9a9626 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:47:58 +0530 Subject: [PATCH 160/263] feat(ui): add databricks logo (#9473) --- datahub-web-react/src/app/ingest/source/builder/constants.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/datahub-web-react/src/app/ingest/source/builder/constants.ts b/datahub-web-react/src/app/ingest/source/builder/constants.ts index 08538729de40b..bd792d78856d5 100644 --- a/datahub-web-react/src/app/ingest/source/builder/constants.ts +++ b/datahub-web-react/src/app/ingest/source/builder/constants.ts @@ -103,6 +103,8 @@ export const CUSTOM = 'custom'; export const CUSTOM_URN = `urn:li:dataPlatform:${CUSTOM}`; export const UNITY_CATALOG = 'unity-catalog'; export const UNITY_CATALOG_URN = `urn:li:dataPlatform:${UNITY_CATALOG}`; +export const DATABRICKS = 'databricks'; +export const DATABRICKS_URN = `urn:li:dataPlatform:${DATABRICKS}`; export const DBT_CLOUD = 'dbt-cloud'; export const DBT_CLOUD_URN = `urn:li:dataPlatform:dbt`; export const VERTICA = 'vertica'; @@ -143,6 +145,7 @@ export const PLATFORM_URN_TO_LOGO = { [TRINO_URN]: trinoLogo, [SUPERSET_URN]: supersetLogo, [UNITY_CATALOG_URN]: databricksLogo, + [DATABRICKS_URN]: databricksLogo, [VERTICA_URN]: verticaLogo, [FIVETRAN_URN]: fivetranLogo, [CSV_URN]: csvLogo, From 29f2142a2c128f7f165f9011eff3bc647ae92185 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:48:43 +0530 Subject: [PATCH 161/263] feat(databricks): add hive metastore analyze profiling (#9511) --- metadata-ingestion/setup.py | 4 +- .../ingestion/source/sql/sql_config.py | 8 +- .../datahub/ingestion/source/unity/config.py | 16 +- .../source/unity/hive_metastore_proxy.py | 109 ++++++++- .../datahub/ingestion/source/unity/proxy.py | 125 +++++++--- .../ingestion/source/unity/proxy_profiling.py | 50 ++-- .../ingestion/source/unity/proxy_types.py | 24 +- .../datahub/ingestion/source/unity/report.py | 6 + .../datahub/ingestion/source/unity/source.py | 27 ++- .../datahub/ingestion/source/unity/usage.py | 5 +- .../unity/test_unity_catalog_ingest.py | 104 +++++++- .../unity/unity_catalog_mces_golden.json | 228 ++++++++++++++++-- 12 files changed, 600 insertions(+), 106 deletions(-) diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 8e4791e253c7c..10db019b51381 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -251,9 +251,7 @@ databricks = { # 0.1.11 appears to have authentication issues with azure databricks - # 0.16.0 added py.typed support which caused mypy to fail. The databricks sdk is pinned until we resolve mypy issues. - # https://github.com/databricks/databricks-sdk-py/pull/483 - "databricks-sdk>=0.9.0,<0.16.0", + "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", # Version 2.4.0 includes sqlalchemy dialect, 2.8.0 includes some bug fixes diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py index 54edab6f3b84b..c0dc70301ba34 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_config.py @@ -112,7 +112,13 @@ def ensure_profiling_pattern_is_passed_to_profiling( cls, values: Dict[str, Any] ) -> Dict[str, Any]: profiling: Optional[GEProfilingConfig] = values.get("profiling") - if profiling is not None and profiling.enabled: + # Note: isinstance() check is required here as unity-catalog source reuses + # SQLCommonConfig with different profiling config than GEProfilingConfig + if ( + profiling is not None + and isinstance(profiling, GEProfilingConfig) + and profiling.enabled + ): profiling._allow_deny_patterns = values["profile_pattern"] return values diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index 96971faeea69f..df36153af9d83 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -95,14 +95,6 @@ class UnityCatalogAnalyzeProfilerConfig(UnityCatalogProfilerConfig): description="Number of worker threads to use for profiling. Set to 1 to disable.", ) - @pydantic.root_validator(skip_on_failure=True) - def warehouse_id_required_for_profiling( - cls, values: Dict[str, Any] - ) -> Dict[str, Any]: - if values.get("enabled") and not values.get("warehouse_id"): - raise ValueError("warehouse_id must be set when profiling is enabled.") - return values - @property def include_columns(self): return not self.profile_table_level_only @@ -254,6 +246,7 @@ class UnityCatalogSourceConfig( description="Generate usage statistics.", ) + # TODO: Remove `type:ignore` by refactoring config profiling: Union[UnityCatalogGEProfilerConfig, UnityCatalogAnalyzeProfilerConfig] = Field( # type: ignore default=UnityCatalogGEProfilerConfig(), description="Data profiling configuration", @@ -316,7 +309,9 @@ def include_metastore_warning(cls, v: bool) -> bool: @pydantic.root_validator(skip_on_failure=True) def set_warehouse_id_from_profiling(cls, values: Dict[str, Any]) -> Dict[str, Any]: - profiling: Optional[UnityCatalogProfilerConfig] = values.get("profiling") + profiling: Optional[ + Union[UnityCatalogGEProfilerConfig, UnityCatalogAnalyzeProfilerConfig] + ] = values.get("profiling") if not values.get("warehouse_id") and profiling and profiling.warehouse_id: values["warehouse_id"] = profiling.warehouse_id if ( @@ -337,6 +332,9 @@ def set_warehouse_id_from_profiling(cls, values: Dict[str, Any]) -> Dict[str, An if values.get("warehouse_id") and profiling and not profiling.warehouse_id: profiling.warehouse_id = values["warehouse_id"] + if profiling and profiling.enabled and not profiling.warehouse_id: + raise ValueError("warehouse_id must be set when profiling is enabled.") + return values @pydantic.validator("schema_pattern", always=True) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py index 99b2ff998662c..814d86a2f3234 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py @@ -12,11 +12,14 @@ from datahub.ingestion.source.unity.proxy_types import ( Catalog, Column, + ColumnProfile, CustomCatalogType, HiveTableType, Metastore, Schema, Table, + TableProfile, + TableReference, ) logger = logging.getLogger(__name__) @@ -38,6 +41,18 @@ "binary": ColumnTypeName.BINARY, } +NUM_NULLS = "num_nulls" +DISTINCT_COUNT = "distinct_count" +MIN = "min" +MAX = "max" +AVG_COL_LEN = "avg_col_len" +MAX_COL_LEN = "max_col_len" +VERSION = "version" + +ROWS = "rows" +BYTES = "bytes" +TABLE_STAT_LIST = {ROWS, BYTES} + class HiveMetastoreProxy(Closeable): # TODO: Support for view lineage using SQL parsing @@ -67,7 +82,7 @@ def get_inspector(sqlalchemy_url: str, options: dict) -> Inspector: def hive_metastore_catalog(self, metastore: Optional[Metastore]) -> Catalog: return Catalog( - id=HIVE_METASTORE, + id=f"{metastore.id}.{HIVE_METASTORE}" if metastore else HIVE_METASTORE, name=HIVE_METASTORE, comment=None, metastore=metastore, @@ -95,9 +110,14 @@ def hive_metastore_tables(self, schema: Schema) -> Iterable[Table]: continue yield self._get_table(schema, table_name, False) - def _get_table(self, schema: Schema, table_name: str, is_view: bool) -> Table: + def _get_table( + self, + schema: Schema, + table_name: str, + is_view: bool = False, + ) -> Table: columns = self._get_columns(schema, table_name) - detailed_info = self._get_table_info(schema, table_name) + detailed_info = self._get_table_info(schema.name, table_name) comment = detailed_info.pop("Comment", None) storage_location = detailed_info.pop("Location", None) @@ -129,6 +149,74 @@ def _get_table(self, schema: Schema, table_name: str, is_view: bool) -> Table: comment=comment, ) + def get_table_profile( + self, ref: TableReference, include_column_stats: bool = False + ) -> TableProfile: + columns = self._get_columns( + Schema( + id=ref.schema, + name=ref.schema, + # This is okay, as none of this is used in profiling + catalog=self.hive_metastore_catalog(None), + comment=None, + owner=None, + ), + ref.table, + ) + detailed_info = self._get_table_info(ref.schema, ref.table) + + table_stats = ( + self._get_cached_table_statistics(detailed_info["Statistics"]) + if detailed_info.get("Statistics") + else {} + ) + + return TableProfile( + num_rows=int(table_stats[ROWS]) + if table_stats.get(ROWS) is not None + else None, + total_size=int(table_stats[BYTES]) + if table_stats.get(BYTES) is not None + else None, + num_columns=len(columns), + column_profiles=[ + self._get_column_profile(column.name, ref) for column in columns + ] + if include_column_stats + else [], + ) + + def _get_column_profile(self, column: str, ref: TableReference) -> ColumnProfile: + + props = self._column_describe_extended(ref.schema, ref.table, column) + col_stats = {} + for prop in props: + col_stats[prop[0]] = prop[1] + return ColumnProfile( + name=column, + null_count=int(col_stats[NUM_NULLS]) + if col_stats.get(NUM_NULLS) is not None + else None, + distinct_count=int(col_stats[DISTINCT_COUNT]) + if col_stats.get(DISTINCT_COUNT) is not None + else None, + min=col_stats.get(MIN), + max=col_stats.get(MAX), + avg_len=col_stats.get(AVG_COL_LEN), + max_len=col_stats.get(MAX_COL_LEN), + version=col_stats.get(VERSION), + ) + + def _get_cached_table_statistics(self, statistics: str) -> dict: + # statistics is in format "xx bytes" OR "1382 bytes, 2 rows" + table_stats = dict() + for prop in statistics.split(","): + value_key_list = prop.strip().split(" ") # value_key_list -> [value, key] + if len(value_key_list) == 2 and value_key_list[1] in TABLE_STAT_LIST: + table_stats[value_key_list[1]] = value_key_list[0] + + return table_stats + def _get_created_at(self, created_at: Optional[str]) -> Optional[datetime]: return ( datetime.strptime(created_at, "%a %b %d %H:%M:%S %Z %Y") @@ -171,8 +259,8 @@ def _get_table_type(self, type: Optional[str]) -> HiveTableType: else: return HiveTableType.UNKNOWN - def _get_table_info(self, schema: Schema, table_name: str) -> dict: - rows = self._describe_extended(schema.name, table_name) + def _get_table_info(self, schema_name: str, table_name: str) -> dict: + rows = self._describe_extended(schema_name, table_name) index = rows.index(("# Detailed Table Information", "", "")) rows = rows[index + 1 :] @@ -235,6 +323,17 @@ def _describe_extended(self, schema_name: str, table_name: str) -> List[Row]: """ return self._execute_sql(f"DESCRIBE EXTENDED `{schema_name}`.`{table_name}`") + def _column_describe_extended( + self, schema_name: str, table_name: str, column_name: str + ) -> List[Row]: + """ + Rows are structured as shown in examples here + https://docs.databricks.com/en/sql/language-manual/sql-ref-syntax-aux-describe-table.html#examples + """ + return self._execute_sql( + f"DESCRIBE EXTENDED `{schema_name}`.`{table_name}` {column_name}" + ) + def _execute_sql(self, sql: str) -> List[Row]: return self.inspector.bind.execute(sql).fetchall() diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py index 13baa8b57a639..b414f3f188c23 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy.py @@ -4,7 +4,7 @@ import dataclasses import logging from datetime import datetime, timezone -from typing import Any, Dict, Iterable, List, Optional, Union +from typing import Any, Dict, Iterable, List, Optional, Union, cast from unittest.mock import patch from databricks.sdk import WorkspaceClient @@ -49,16 +49,19 @@ logger: logging.Logger = logging.getLogger(__name__) +@dataclasses.dataclass class TableInfoWithGeneration(TableInfo): generation: Optional[int] = None - @classmethod def as_dict(self) -> dict: return {**super().as_dict(), "generation": self.generation} @classmethod def from_dict(cls, d: Dict[str, Any]) -> "TableInfoWithGeneration": - table_info = super().from_dict(d) + table_info: TableInfoWithGeneration = cast( + TableInfoWithGeneration, + super().from_dict(d), + ) table_info.generation = d.get("generation") return table_info @@ -72,7 +75,10 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> "QueryFilterWithStatementTypes": - v = super().from_dict(d) + v: QueryFilterWithStatementTypes = cast( + QueryFilterWithStatementTypes, + super().from_dict(d), + ) v.statement_types = d["statement_types"] return v @@ -104,7 +110,7 @@ def __init__( def check_basic_connectivity(self) -> bool: return bool(self._workspace_client.catalogs.list()) - def assigned_metastore(self) -> Metastore: + def assigned_metastore(self) -> Optional[Metastore]: response = self._workspace_client.metastores.summary() return self._create_metastore(response) @@ -117,7 +123,9 @@ def catalogs(self, metastore: Optional[Metastore]) -> Iterable[Catalog]: logger.info("Catalogs not found") return [] for catalog in response: - yield self._create_catalog(metastore, catalog) + optional_catalog = self._create_catalog(metastore, catalog) + if optional_catalog: + yield optional_catalog def catalog( self, catalog_name: str, metastore: Optional[Metastore] @@ -126,7 +134,11 @@ def catalog( if not response: logger.info(f"Catalog {catalog_name} not found") return None - return self._create_catalog(metastore, response) + optional_catalog = self._create_catalog(metastore, response) + if optional_catalog: + return optional_catalog + + return None def schemas(self, catalog: Catalog) -> Iterable[Schema]: if ( @@ -140,7 +152,9 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: logger.info(f"Schemas not found for catalog {catalog.id}") return [] for schema in response: - yield self._create_schema(catalog, schema) + optional_schema = self._create_schema(catalog, schema) + if optional_schema: + yield optional_schema def tables(self, schema: Schema) -> Iterable[Table]: if ( @@ -158,28 +172,38 @@ def tables(self, schema: Schema) -> Iterable[Table]: return [] for table in response: try: - yield self._create_table(schema, table) + optional_table = self._create_table( + schema, cast(TableInfoWithGeneration, table) + ) + if optional_table: + yield optional_table except Exception as e: logger.warning(f"Error parsing table: {e}") self.report.report_warning("table-parse", str(e)) def service_principals(self) -> Iterable[ServicePrincipal]: for principal in self._workspace_client.service_principals.list(): - yield self._create_service_principal(principal) + optional_sp = self._create_service_principal(principal) + if optional_sp: + yield optional_sp def workspace_notebooks(self) -> Iterable[Notebook]: for obj in self._workspace_client.workspace.list("/", recursive=True): - if obj.object_type == ObjectType.NOTEBOOK: + if obj.object_type == ObjectType.NOTEBOOK and obj.object_id and obj.path: yield Notebook( id=obj.object_id, path=obj.path, language=obj.language, created_at=datetime.fromtimestamp( obj.created_at / 1000, tz=timezone.utc - ), + ) + if obj.created_at + else None, modified_at=datetime.fromtimestamp( obj.modified_at / 1000, tz=timezone.utc - ), + ) + if obj.modified_at + else None, ) def query_history( @@ -204,7 +228,9 @@ def query_history( ) for query_info in self._query_history(filter_by=filter_by): try: - yield self._create_query(query_info) + optional_query = self._create_query(query_info) + if optional_query: + yield optional_query except Exception as e: logger.warning(f"Error parsing query: {e}") self.report.report_warning("query-parse", str(e)) @@ -229,15 +255,16 @@ def _query_history( "max_results": max_results, # Max batch size } - response: dict = self._workspace_client.api_client.do( + response: dict = self._workspace_client.api_client.do( # type: ignore method, path, body={**body, "filter_by": filter_by.as_dict()} ) + # we use default raw=False in above request, therefore will always get dict while True: if "res" not in response or not response["res"]: return for v in response["res"]: yield QueryInfo.from_dict(v) - response = self._workspace_client.api_client.do( + response = self._workspace_client.api_client.do( # type: ignore method, path, body={**body, "page_token": response["next_page_token"]} ) @@ -245,7 +272,7 @@ def list_lineages_by_table( self, table_name: str, include_entity_lineage: bool ) -> dict: """List table lineage by table name.""" - return self._workspace_client.api_client.do( + return self._workspace_client.api_client.do( # type: ignore method="GET", path="/api/2.0/lineage-tracking/table-lineage", body={ @@ -256,7 +283,7 @@ def list_lineages_by_table( def list_lineages_by_column(self, table_name: str, column_name: str) -> dict: """List column lineage by table name and column name.""" - return self._workspace_client.api_client.do( + return self._workspace_client.api_client.do( # type: ignore "GET", "/api/2.0/lineage-tracking/column-lineage", body={"table_name": table_name, "column_name": column_name}, @@ -325,7 +352,9 @@ def _escape_sequence(value: str) -> str: @staticmethod def _create_metastore( obj: Union[GetMetastoreSummaryResponse, MetastoreInfo] - ) -> Metastore: + ) -> Optional[Metastore]: + if not obj.name: + return None return Metastore( name=obj.name, id=UnityCatalogApiProxy._escape_sequence(obj.name), @@ -339,7 +368,10 @@ def _create_metastore( def _create_catalog( self, metastore: Optional[Metastore], obj: CatalogInfo - ) -> Catalog: + ) -> Optional[Catalog]: + if not obj.name: + self.report.num_catalogs_missing_name += 1 + return None catalog_name = self._escape_sequence(obj.name) return Catalog( name=obj.name, @@ -350,7 +382,10 @@ def _create_catalog( type=obj.catalog_type, ) - def _create_schema(self, catalog: Catalog, obj: SchemaInfo) -> Schema: + def _create_schema(self, catalog: Catalog, obj: SchemaInfo) -> Optional[Schema]: + if not obj.name: + self.report.num_schemas_missing_name += 1 + return None return Schema( name=obj.name, id=f"{catalog.id}.{self._escape_sequence(obj.name)}", @@ -359,11 +394,14 @@ def _create_schema(self, catalog: Catalog, obj: SchemaInfo) -> Schema: owner=obj.owner, ) - def _create_column(self, table_id: str, obj: ColumnInfo) -> Column: + def _create_column(self, table_id: str, obj: ColumnInfo) -> Optional[Column]: + if not obj.name: + self.report.num_columns_missing_name += 1 + return None return Column( name=obj.name, id=f"{table_id}.{self._escape_sequence(obj.name)}", - type_text=obj.type_text, + type_text=obj.type_text or "", type_name=obj.type_name, type_scale=obj.type_scale, type_precision=obj.type_precision, @@ -372,7 +410,12 @@ def _create_column(self, table_id: str, obj: ColumnInfo) -> Column: comment=obj.comment, ) - def _create_table(self, schema: Schema, obj: TableInfoWithGeneration) -> Table: + def _create_table( + self, schema: Schema, obj: TableInfoWithGeneration + ) -> Optional[Table]: + if not obj.name: + self.report.num_tables_missing_name += 1 + return None table_id = f"{schema.id}.{self._escape_sequence(obj.name)}" return Table( name=obj.name, @@ -381,26 +424,40 @@ def _create_table(self, schema: Schema, obj: TableInfoWithGeneration) -> Table: schema=schema, storage_location=obj.storage_location, data_source_format=obj.data_source_format, - columns=[ - self._create_column(table_id, column) for column in obj.columns or [] - ], + columns=list(self._extract_columns(obj.columns, table_id)) + if obj.columns + else [], view_definition=obj.view_definition or None, properties=obj.properties or {}, owner=obj.owner, generation=obj.generation, - created_at=datetime.fromtimestamp(obj.created_at / 1000, tz=timezone.utc), + created_at=datetime.fromtimestamp(obj.created_at / 1000, tz=timezone.utc) + if obj.created_at + else None, created_by=obj.created_by, updated_at=datetime.fromtimestamp(obj.updated_at / 1000, tz=timezone.utc) if obj.updated_at + else None + if obj.updated_at else None, updated_by=obj.updated_by, table_id=obj.table_id, comment=obj.comment, ) + def _extract_columns( + self, columns: List[ColumnInfo], table_id: str + ) -> Iterable[Column]: + for column in columns: + optional_column = self._create_column(table_id, column) + if optional_column: + yield optional_column + def _create_service_principal( self, obj: DatabricksServicePrincipal - ) -> ServicePrincipal: + ) -> Optional[ServicePrincipal]: + if not obj.display_name or not obj.application_id: + return None return ServicePrincipal( id=f"{obj.id}.{self._escape_sequence(obj.display_name)}", display_name=obj.display_name, @@ -408,8 +465,14 @@ def _create_service_principal( active=obj.active, ) - @staticmethod - def _create_query(info: QueryInfo) -> Query: + def _create_query(self, info: QueryInfo) -> Optional[Query]: + if ( + not info.query_text + or not info.query_start_time_ms + or not info.query_end_time_ms + ): + self.report.num_queries_missing_info += 1 + return None return Query( query_id=info.query_id, query_text=info.query_text, diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py index ab38119d01a9b..5992f103ccac3 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_profiling.py @@ -14,6 +14,10 @@ StatementStatus, ) +from datahub.ingestion.source.unity.hive_metastore_proxy import ( + HIVE_METASTORE, + HiveMetastoreProxy, +) from datahub.ingestion.source.unity.proxy_types import ( ColumnProfile, TableProfile, @@ -30,6 +34,7 @@ class UnityCatalogProxyProfilingMixin: _workspace_client: WorkspaceClient report: UnityCatalogReport warehouse_id: str + hive_metastore_proxy: Optional[HiveMetastoreProxy] def check_profiling_connectivity(self): self._workspace_client.warehouses.get(self.warehouse_id) @@ -136,6 +141,8 @@ def _analyze_table( def _check_analyze_table_statement_status( self, execute_response: ExecuteStatementResponse, max_wait_secs: int ) -> bool: + if not execute_response.statement_id or not execute_response.status: + return False statement_id: str = execute_response.statement_id status: StatementStatus = execute_response.status @@ -152,13 +159,15 @@ def _check_analyze_table_statement_status( statement_id ) self._raise_if_error(response, "get-statement") - status = response.status + status = response.status # type: ignore return status.state == StatementState.SUCCEEDED def _get_table_profile( self, ref: TableReference, include_columns: bool ) -> TableProfile: + if self.hive_metastore_proxy and ref.catalog == HIVE_METASTORE: + return self.hive_metastore_proxy.get_table_profile(ref, include_columns) table_info = self._workspace_client.tables.get(ref.qualified_table_name) return self._create_table_profile(table_info, include_columns=include_columns) @@ -166,7 +175,12 @@ def _create_table_profile( self, table_info: TableInfo, include_columns: bool ) -> TableProfile: # Warning: this implementation is brittle -- dependent on properties that can change - columns_names = [column.name for column in table_info.columns] + columns_names = ( + [column.name for column in table_info.columns if column.name] + if table_info.columns + else [] + ) + return TableProfile( num_rows=self._get_int(table_info, "spark.sql.statistics.numRows"), total_size=self._get_int(table_info, "spark.sql.statistics.totalSize"), @@ -182,6 +196,7 @@ def _create_table_profile( def _create_column_profile( self, column: str, table_info: TableInfo ) -> ColumnProfile: + tblproperties = table_info.properties or {} return ColumnProfile( name=column, null_count=self._get_int( @@ -190,25 +205,18 @@ def _create_column_profile( distinct_count=self._get_int( table_info, f"spark.sql.statistics.colStats.{column}.distinctCount" ), - min=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.min" - ), - max=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.max" - ), - avg_len=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.avgLen" - ), - max_len=table_info.properties.get( - f"spark.sql.statistics.colStats.{column}.maxLen" - ), - version=table_info.properties.get( + min=tblproperties.get(f"spark.sql.statistics.colStats.{column}.min"), + max=tblproperties.get(f"spark.sql.statistics.colStats.{column}.max"), + avg_len=tblproperties.get(f"spark.sql.statistics.colStats.{column}.avgLen"), + max_len=tblproperties.get(f"spark.sql.statistics.colStats.{column}.maxLen"), + version=tblproperties.get( f"spark.sql.statistics.colStats.{column}.version" ), ) def _get_int(self, table_info: TableInfo, field: str) -> Optional[int]: - value = table_info.properties.get(field) + tblproperties = table_info.properties or {} + value = tblproperties.get(field) if value is not None: try: return int(value) @@ -223,14 +231,18 @@ def _get_int(self, table_info: TableInfo, field: str) -> Optional[int]: def _raise_if_error( response: Union[ExecuteStatementResponse, GetStatementResponse], key: str ) -> None: - if response.status.state in [ + if response.status and response.status.state in [ StatementState.FAILED, StatementState.CANCELED, StatementState.CLOSED, ]: raise DatabricksError( - response.status.error.message, - error_code=response.status.error.error_code.value, + response.status.error.message + if response.status.error and response.status.error.message + else "Unknown Error", + error_code=response.status.error.error_code.value + if response.status.error and response.status.error.error_code + else "Unknown Error Code", status=response.status.state.value, context=key, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py index e5951cb0fa4ff..c66189d99f738 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/proxy_types.py @@ -96,8 +96,8 @@ class CommonProperty: @dataclass class Metastore(CommonProperty): - global_metastore_id: str # Global across clouds and regions - metastore_id: str + global_metastore_id: Optional[str] # Global across clouds and regions + metastore_id: Optional[str] owner: Optional[str] cloud: Optional[str] region: Optional[str] @@ -107,7 +107,7 @@ class Metastore(CommonProperty): class Catalog(CommonProperty): metastore: Optional[Metastore] owner: Optional[str] - type: Union[CatalogType, CustomCatalogType] + type: Optional[Union[CatalogType, CustomCatalogType]] @dataclass @@ -224,14 +224,14 @@ class Table(CommonProperty): columns: List[Column] storage_location: Optional[str] data_source_format: Optional[DataSourceFormat] - table_type: Union[TableType, HiveTableType] + table_type: Optional[Union[TableType, HiveTableType]] owner: Optional[str] generation: Optional[int] created_at: Optional[datetime] created_by: Optional[str] updated_at: Optional[datetime] updated_by: Optional[str] - table_id: str + table_id: Optional[str] view_definition: Optional[str] properties: Dict[str, str] upstreams: Dict[TableReference, Dict[str, List[str]]] = field(default_factory=dict) @@ -252,16 +252,16 @@ def __post_init__(self): @dataclass class Query: - query_id: str + query_id: Optional[str] query_text: str - statement_type: QueryStatementType + statement_type: Optional[QueryStatementType] start_time: datetime end_time: datetime # User who ran the query - user_id: int + user_id: Optional[int] user_name: Optional[str] # Email or username # User whose credentials were used to run the query - executed_as_user_id: int + executed_as_user_id: Optional[int] executed_as_user_name: Optional[str] @@ -310,9 +310,9 @@ def __bool__(self): class Notebook: id: NotebookId path: str - language: Language - created_at: datetime - modified_at: datetime + language: Optional[Language] + created_at: Optional[datetime] + modified_at: Optional[datetime] upstreams: FrozenSet[TableReference] = field(default_factory=frozenset) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py index 0770d9d27055c..02eedb67f4cc2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/report.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/report.py @@ -39,3 +39,9 @@ class UnityCatalogReport(IngestionStageReport, ProfilingSqlReport): num_profile_missing_size_in_bytes: int = 0 num_profile_failed_unsupported_column_type: int = 0 num_profile_failed_int_casts: int = 0 + + num_catalogs_missing_name: int = 0 + num_schemas_missing_name: int = 0 + num_tables_missing_name: int = 0 + num_columns_missing_name: int = 0 + num_queries_missing_info: int = 0 diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 43c5e24439377..1bc47c6307849 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -304,22 +304,28 @@ def process_notebooks(self) -> Iterable[MetadataWorkUnit]: yield from self._gen_notebook_workunits(notebook) def _gen_notebook_workunits(self, notebook: Notebook) -> Iterable[MetadataWorkUnit]: + + properties = {"path": notebook.path} + if notebook.language: + properties["language"] = notebook.language.value + mcps = MetadataChangeProposalWrapper.construct_many( entityUrn=self.gen_notebook_urn(notebook), aspects=[ DatasetPropertiesClass( name=notebook.path.rsplit("/", 1)[-1], - customProperties={ - "path": notebook.path, - "language": notebook.language.value, - }, + customProperties=properties, externalUrl=urljoin( self.config.workspace_url, f"#notebook/{notebook.id}" ), - created=TimeStampClass(int(notebook.created_at.timestamp() * 1000)), + created=TimeStampClass(int(notebook.created_at.timestamp() * 1000)) + if notebook.created_at + else None, lastModified=TimeStampClass( int(notebook.modified_at.timestamp() * 1000) - ), + ) + if notebook.modified_at + else None, ), SubTypesClass(typeNames=[DatasetSubTypes.NOTEBOOK]), BrowsePathsClass(paths=notebook.path.split("/")), @@ -352,6 +358,9 @@ def process_metastores(self) -> Iterable[MetadataWorkUnit]: metastore: Optional[Metastore] = None if self.config.include_metastore: metastore = self.unity_catalog_api_proxy.assigned_metastore() + if not metastore: + self.report.report_failure("Metastore", "Not found") + return yield from self.gen_metastore_containers(metastore) yield from self.process_catalogs(metastore) if metastore and self.config.include_metastore: @@ -705,13 +714,15 @@ def _create_table_property_aspect(self, table: Table) -> DatasetPropertiesClass: if table.generation is not None: custom_properties["generation"] = str(table.generation) - custom_properties["table_type"] = table.table_type.value + if table.table_type: + custom_properties["table_type"] = table.table_type.value if table.created_by: custom_properties["created_by"] = table.created_by if table.properties: custom_properties.update({k: str(v) for k, v in table.properties.items()}) - custom_properties["table_id"] = table.table_id + if table.table_id: + custom_properties["table_id"] = table.table_id if table.owner: custom_properties["owner"] = table.owner if table.updated_by: diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py b/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py index ab21c1a318659..f07e7a92d8762 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/usage.py @@ -117,7 +117,10 @@ def _get_workunits_internal( def _generate_operation_workunit( self, query: Query, table_info: QueryTableInfo ) -> Iterable[MetadataWorkUnit]: - if query.statement_type not in OPERATION_STATEMENT_TYPES: + if ( + not query.statement_type + or query.statement_type not in OPERATION_STATEMENT_TYPES + ): return None # Not sure about behavior when there are multiple target tables. This is a best attempt. diff --git a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py index aab7630d57f46..05f1db0b932f8 100644 --- a/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py +++ b/metadata-ingestion/tests/integration/unity/test_unity_catalog_ingest.py @@ -186,6 +186,8 @@ def register_mock_data(workspace_client): "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", }, "generation": 2, "metastore_id": "2c983545-d403-4f87-9063-5b7e3b6d3736", @@ -200,6 +202,57 @@ def register_mock_data(workspace_client): ) ] + workspace_client.tables.get = lambda *args, **kwargs: databricks.sdk.service.catalog.TableInfo.from_dict( + { + "name": "quickstart_table", + "catalog_name": "quickstart_catalog", + "schema_name": "quickstart_schema", + "table_type": "MANAGED", + "data_source_format": "DELTA", + "columns": [ + { + "name": "columnA", + "type_text": "int", + "type_json": '{"name":"columnA","type":"integer","nullable":true,"metadata":{}}', + "type_name": "INT", + "type_precision": 0, + "type_scale": 0, + "position": 0, + "nullable": True, + }, + { + "name": "columnB", + "type_text": "string", + "type_json": '{"name":"columnB","type":"string","nullable":true,"metadata":{}}', + "type_name": "STRING", + "type_precision": 0, + "type_scale": 0, + "position": 1, + "nullable": True, + }, + ], + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "properties": { + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + }, + "generation": 2, + "metastore_id": "2c983545-d403-4f87-9063-5b7e3b6d3736", + "full_name": "quickstart_catalog.quickstart_schema.quickstart_table", + "data_access_configuration_id": "00000000-0000-0000-0000-000000000000", + "created_at": 1666185698688, + "created_by": "abc@acryl.io", + "updated_at": 1666186049633, + "updated_by": "abc@acryl.io", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + } + ) + workspace_client.service_principals.list.return_value = [ ServicePrincipal.from_dict(d) for d in [ @@ -220,7 +273,50 @@ def register_mock_data(workspace_client): def mock_hive_sql(query): - if query == "DESCRIBE EXTENDED `bronze_kambi`.`bet`": + + if query == "DESCRIBE EXTENDED `bronze_kambi`.`bet` betStatusId": + return [ + ("col_name", "betStatusId"), + ("data_type", "bigint"), + ("comment", None), + ("min", None), + ("max", None), + ("num_nulls", 0), + ("distinct_count", 1), + ("avg_col_len", 8), + ("max_col_len", 8), + ("histogram", None), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`bet` channelId": + return [ + ("col_name", "channelId"), + ("data_type", "bigint"), + ("comment", None), + ("min", None), + ("max", None), + ("num_nulls", 0), + ("distinct_count", 1), + ("avg_col_len", 8), + ("max_col_len", 8), + ("histogram", None), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`bet` combination": + return [ + ("col_name", "combination"), + ( + "data_type", + "struct>,eventId:bigint,eventName:string,eventStartDate:string,live:boolean,odds:double,outcomeIds:array,outcomeLabel:string,sportId:string,status:string,voidReason:string>>,payout:double,rewardExtraPayout:double,stake:double>", + ), + ("comment", None), + ("min", None), + ("max", None), + ("num_nulls", None), + ("distinct_count", None), + ("avg_col_len", None), + ("max_col_len", None), + ("histogram", None), + ] + elif query == "DESCRIBE EXTENDED `bronze_kambi`.`bet`": return [ ("betStatusId", "bigint", None), ("channelId", "bigint", None), @@ -237,6 +333,7 @@ def mock_hive_sql(query): ("Created Time", "Wed Jun 22 05:14:56 UTC 2022", ""), ("Last Access", "UNKNOWN", ""), ("Created By", "Spark 3.2.1", ""), + ("Statistics", "1024 bytes, 3 rows", ""), ("Type", "MANAGED", ""), ("Location", "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", ""), ("Provider", "delta", ""), @@ -312,6 +409,11 @@ def test_ingestion(pytestconfig, tmp_path, requests_mock): "include_ownership": True, "include_hive_metastore": True, "warehouse_id": "test", + "profiling": { + "enabled": True, + "method": "analyze", + "call_analyze": False, + }, }, }, "sink": { diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 98a6615dd2b52..383f94144ffdc 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -504,7 +504,7 @@ "Last Access": "UNKNOWN", "Created By": "Spark 3.2.1", "Owner": "root", - "table_id": "hive_metastore.bronze_kambi.view1", + "table_id": "acryl_metastore.hive_metastore.bronze_kambi.view1", "created_at": "2022-06-22 05:14:56" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/view1", @@ -638,7 +638,7 @@ "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "hive_metastore.bronze_kambi.view1", + "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.view1", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1172,10 +1172,11 @@ "Table": "bet", "Last Access": "UNKNOWN", "Created By": "Spark 3.2.1", + "Statistics": "1024 bytes, 3 rows", "Owner": "root", "Is_managed_location": "true", "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", - "table_id": "hive_metastore.bronze_kambi.bet", + "table_id": "acryl_metastore.hive_metastore.bronze_kambi.bet", "created_at": "2022-06-22 05:14:56" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", @@ -1275,7 +1276,7 @@ "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "hive_metastore.bronze_kambi.bet", + "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.bet", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1731,15 +1732,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", "name": "quickstart_table", @@ -2061,15 +2064,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", "name": "quickstart_table", @@ -2527,15 +2532,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", "name": "quickstart_table", @@ -2857,15 +2864,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", "name": "quickstart_table", @@ -3323,15 +3332,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", "name": "quickstart_table", @@ -3653,15 +3664,17 @@ "generation": "2", "table_type": "MANAGED", "created_by": "abc@acryl.io", - "created_at": "2022-10-19 13:21:38.688000+00:00", "delta.lastCommitTimestamp": "1666185711000", "delta.lastUpdateVersion": "1", "delta.minReaderVersion": "1", "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", "owner": "account users", "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00" + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/quickstart_schema/quickstart_table", "name": "quickstart_table", @@ -3813,6 +3826,69 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920011, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703581191932, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 3, + "columnCount": 3, + "fieldProfiles": [ + { + "fieldPath": "betStatusId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + }, + { + "fieldPath": "channelId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + } + ], + "sizeInBytes": 1024 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", @@ -3829,6 +3905,30 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580406273, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", @@ -3845,6 +3945,78 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920008, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920011, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920012, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", @@ -3877,6 +4049,30 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", + "aspect": { + "json": { + "timestampMillis": 1703580920010, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", From 2d302fe754969a4ec64b678d6a4002558eee66b3 Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Wed, 3 Jan 2024 13:59:20 +0530 Subject: [PATCH 162/263] fix(cypress): make setting manage policy test not flaky (#9547) --- .../cypress/e2e/settings/manage_policies.js | 247 ++++++++---------- 1 file changed, 104 insertions(+), 143 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js b/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js index 6515d92285e2e..0e69a4e7f287a 100644 --- a/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js +++ b/smoke-test/tests/cypress/cypress/e2e/settings/manage_policies.js @@ -4,149 +4,110 @@ const platform_policy_edited = `Platform test policy ${test_id} EDITED`; const metadata_policy_name = `Metadata test policy ${test_id}`; const metadata_policy_edited = `Metadata test policy ${test_id} EDITED`; + + +function searchAndToggleMetadataPolicyStatus(metadataPolicyName, targetStatus) { + cy.get('[data-testid="search-input"]').should('be.visible'); + cy.get('[data-testid="search-input"]').eq(1).type(metadataPolicyName); + cy.contains('tr', metadataPolicyName).as('metadataPolicyRow'); + cy.contains(targetStatus).click(); +} + +function clickFocusAndType(Id, text) { + cy.clickOptionWithTestId(Id) + .focused().clear() + .type(text); +} + +function updateAndSave(Id, groupName, text) { + cy.clickOptionWithTestId(Id).type(groupName); + cy.get(`[title='${text}']`).click(); + cy.focused().blur(); +} + +function clickOnButton(saveButton) { + cy.get(`#${saveButton}`).click(); +} + +function createPolicy(decription, policyName) { + clickFocusAndType("policy-description", decription) + clickOnButton("nextButton"); + updateAndSave("privileges", "All", "All Privileges", "nextButton") + clickOnButton("nextButton"); + updateAndSave("users", "All", "All Users") + updateAndSave("groups", "All", "All Groups") + clickOnButton("saveButton"); + cy.waitTextVisible("Successfully saved policy."); + cy.waitTextVisible(policyName); +} + +function editPolicy(policyName, editPolicy, description, policyEdited, visibleDiscription) { + searchAndToggleMetadataPolicyStatus(policyName, 'EDIT') + cy.clickOptionWithTestId("policy-name") + cy.focused().clear().type(editPolicy); + cy.clickOptionWithTestId("policy-description"); + cy.focused().clear().type(description); + clickOnButton("nextButton"); + clickOnButton("nextButton"); + clickOnButton("saveButton"); + cy.waitTextVisible("Successfully saved policy."); + cy.waitTextVisible(policyEdited); + cy.waitTextVisible(visibleDiscription);; +} + +function deletePolicy(policyEdited, deletePolicy) { + searchAndToggleMetadataPolicyStatus(policyEdited, 'DEACTIVATE') + cy.waitTextVisible("Successfully deactivated policy.") + cy.contains('DEACTIVATE').should('not.exist') + cy.contains('ACTIVATE').click(); + cy.waitTextVisible("Successfully activated policy.") + cy.get("[data-icon='delete']").click(); + cy.waitTextVisible(deletePolicy); + cy.clickOptionWithText("Yes"); + cy.waitTextVisible("Successfully removed policy."); + cy.ensureTextNotPresent(policyEdited); +} + describe("create and manage platform and metadata policies", () => { + beforeEach(() => { + cy.loginWithCredentials(); + cy.visit("/settings/permissions/policies"); + }); + + it("create platform policy", () => { + cy.waitTextVisible("Manage Permissions"); + cy.clickOptionWithText("Create new policy"); + clickFocusAndType("policy-name", platform_policy_name) + cy.get('[data-testid="policy-type"] [title="Metadata"]').click(); + cy.clickOptionWithTestId("platform"); + createPolicy(`Platform policy description ${test_id}`, platform_policy_name) + }); + + it("edit platform policy", () => { + editPolicy(`${platform_policy_name}`, platform_policy_edited, + `Platform policy description ${test_id} EDITED`, + platform_policy_edited, `Platform policy description ${test_id} EDITED`) + }); + + it("deactivate and activate platform policy", () => { + deletePolicy(`${platform_policy_edited}`, `Delete ${platform_policy_edited}`, `${platform_policy_edited}`) + }); + + it("create metadata policy", () => { + cy.clickOptionWithText("Create new policy"); + clickFocusAndType("policy-name", metadata_policy_name) + cy.get('[data-testid="policy-type"]').should('have.text', 'Metadata'); + createPolicy(`Metadata policy description ${test_id}`, metadata_policy_name) + }); + + it("edit metadata policy", () => { + editPolicy(`${metadata_policy_name}`, metadata_policy_edited, + `Metadata policy description ${test_id} EDITED`, + metadata_policy_edited, `Metadata policy description ${test_id} EDITED`) + }); + + it("deactivate and activate metadata policy", () => { + deletePolicy(`${metadata_policy_name}`, `Delete ${metadata_policy_name}`, `${metadata_policy_edited}`) + }); - it("create platform policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.waitTextVisible("Manage Permissions"); - cy.clickOptionWithText("Create new policy"); - cy.clickOptionWithTestId("policy-name") - .focused() - .type(platform_policy_name); - cy.get('[data-testid="policy-type"] [title="Metadata"]').click(); - cy.clickOptionWithTestId("platform"); - cy.clickOptionWithTestId("policy-description") - .focused() - .type(`Platform policy description ${test_id}`); - cy.get("#nextButton").click(); - cy.get('[data-testid="privileges"]').type("All"); - cy.clickOptionWithText("All Privileges").focused().blur(); - cy.get("#nextButton").click(); - cy.get('[data-testid="users"]').type("All"); - cy.get("[title='All Users']").click(); - cy.focused().blur(); - cy.get('[data-testid="groups"]').type("All"); - cy.get("[title='All Groups']").click(); - cy.focused().blur(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(platform_policy_name); - }); - - it("edit platform policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${platform_policy_name}` ) - .contains('EDIT') - .click(); - cy.clickOptionWithTestId("policy-name"); - cy.focused().clear().type(platform_policy_edited); - cy.clickOptionWithTestId("policy-description"); - cy.focused().clear().type(`Platform policy description ${test_id} EDITED`); - cy.get("#nextButton").click(); - cy.get("#nextButton").click(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(platform_policy_edited); - cy.waitTextVisible(`Platform policy description ${test_id} EDITED`); - }); - - it("deactivate and activate platform policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${platform_policy_edited}` ) - .contains('DEACTIVATE') - .click(); - cy.waitTextVisible("Successfully deactivated policy.") - cy.contains('tr', `${platform_policy_edited}` ) - .contains('INACTIVE') - .should("be.visible"); - cy.contains('tr', `${platform_policy_edited}` ) - .contains('ACTIVATE') - .click(); - cy.waitTextVisible("Successfully activated policy.") - cy.contains('tr', `${platform_policy_edited}` ) - .contains('ACTIVE') - .should("be.visible"); - cy.contains('tr', `${platform_policy_edited}` ) - .find("[data-icon='delete']") - .click(); - cy.waitTextVisible(`Delete ${platform_policy_edited}`); - cy.clickOptionWithText("Yes"); - cy.waitTextVisible("Successfully removed policy."); - cy.ensureTextNotPresent(`${platform_policy_edited}`); - - }); - - it("create metadata policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.clickOptionWithText("Create new policy"); - cy.clickOptionWithTestId("policy-name") - .focused() - .type(metadata_policy_name); - cy.get('[data-testid="policy-type"]').should('have.text', 'Metadata'); - cy.clickOptionWithTestId("policy-description") - .focused() - .type(`Metadata policy description ${test_id}`); - cy.get("#nextButton").click(); - cy.get('[data-testid="privileges"]').type("All"); - cy.clickOptionWithText("All Privileges").focused().blur(); - cy.get("#nextButton").click(); - cy.get('[data-testid="users"]').type("All"); - cy.get("[title='All Users']").click(); - cy.focused().blur(); - cy.get('[data-testid="groups"]').type("All"); - cy.get("[title='All Groups']").click(); - cy.focused().blur(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(metadata_policy_name); - }); - - it("edit metadata policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${metadata_policy_name}` ) - .contains('EDIT') - .click(); - cy.clickOptionWithTestId("policy-name") - cy.focused().clear().type(metadata_policy_edited); - cy.clickOptionWithTestId("policy-description"); - cy.focused().clear().type(`Metadata policy description ${test_id} EDITED`); - cy.get("#nextButton").click(); - cy.get("#nextButton").click(); - cy.get("#saveButton").click(); - cy.waitTextVisible("Successfully saved policy."); - cy.waitTextVisible(metadata_policy_edited); - cy.waitTextVisible(`Metadata policy description ${test_id} EDITED`); - }); - - it("deactivate and activate metadata policy", () => { - cy.loginWithCredentials(); - cy.visit("/settings/permissions/policies"); - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('DEACTIVATE') - .click(); - cy.waitTextVisible("Successfully deactivated policy.") - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('INACTIVE') - .should("be.visible"); - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('ACTIVATE') - .click(); - cy.waitTextVisible("Successfully activated policy.") - cy.contains('tr', `${metadata_policy_edited}` ) - .contains('ACTIVE') - .should("be.visible"); - cy.contains('tr', `${metadata_policy_edited}` ) - .find("[data-icon='delete']") - .click(); - cy.waitTextVisible(`Delete ${metadata_policy_edited}`); - cy.clickOptionWithText("Yes"); - cy.waitTextVisible("Successfully removed policy."); - cy.ensureTextNotPresent(`${metadata_policy_edited}`); - }); - }); \ No newline at end of file From c395d86139c773cd374fa6a52587614787580192 Mon Sep 17 00:00:00 2001 From: kushagra-apptware <81357546+kushagra-apptware@users.noreply.github.com> Date: Wed, 3 Jan 2024 14:00:28 +0530 Subject: [PATCH 163/263] fix(ui): search user incorrect role shown (#9532) --- datahub-web-react/src/app/identity/user/SelectRole.tsx | 6 +++++- datahub-web-react/src/app/identity/user/UserList.tsx | 9 ++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/datahub-web-react/src/app/identity/user/SelectRole.tsx b/datahub-web-react/src/app/identity/user/SelectRole.tsx index 011eae0fbd8b3..deaa85f14b088 100644 --- a/datahub-web-react/src/app/identity/user/SelectRole.tsx +++ b/datahub-web-react/src/app/identity/user/SelectRole.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { UserOutlined } from '@ant-design/icons'; import { Select } from 'antd'; import { useApolloClient } from '@apollo/client'; @@ -49,6 +49,10 @@ export default function SelectRole({ user, userRoleUrn, selectRoleOptions, refet const [currentRoleUrn, setCurrentRoleUrn] = useState(defaultRoleUrn); const [isViewingAssignRole, setIsViewingAssignRole] = useState(false); + useEffect(() => { + setCurrentRoleUrn(defaultRoleUrn); + }, [defaultRoleUrn]); + const onSelectRole = (roleUrn: string) => { setCurrentRoleUrn(roleUrn); setIsViewingAssignRole(true); diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 8e2bc21f0693f..22b44e5f2d625 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -52,6 +52,7 @@ export const UserList = () => { const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); const paramsQuery = (params?.query as string) || undefined; const [query, setQuery] = useState(undefined); + const [usersList, setUsersList] = useState>([]); useEffect(() => setQuery(paramsQuery), [paramsQuery]); const [page, setPage] = useState(1); @@ -81,8 +82,9 @@ export const UserList = () => { }); const totalUsers = usersData?.listUsers?.total || 0; - const users = usersData?.listUsers?.users || []; - + useEffect(()=> { + setUsersList(usersData?.listUsers?.users || []); + }, [usersData]); const onChangePage = (newPage: number) => { scrollToTop(); setPage(newPage); @@ -145,6 +147,7 @@ export const UserList = () => { onQueryChange={(q) => { setPage(1); setQuery(q); + setUsersList([]); }} entityRegistry={entityRegistry} hideRecommendations @@ -155,7 +158,7 @@ export const UserList = () => { locale={{ emptyText: , }} - dataSource={users} + dataSource={usersList} renderItem={(item: any) => ( handleDelete(item.urn as string)} From 21075e606707df42f25c4ab2d37ef6b2d97daf0d Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Wed, 3 Jan 2024 00:39:58 -0800 Subject: [PATCH 164/263] fix(ci): make test flexible to allow sha-based cli versions (#9551) --- smoke-test/tests/read_only/test_services_up.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/smoke-test/tests/read_only/test_services_up.py b/smoke-test/tests/read_only/test_services_up.py index cbe92625f4689..b1b3b1d6f4bd7 100644 --- a/smoke-test/tests/read_only/test_services_up.py +++ b/smoke-test/tests/read_only/test_services_up.py @@ -2,6 +2,7 @@ import pytest import requests +import re from tests.utils import get_gms_url, wait_for_healthcheck_util @@ -13,6 +14,8 @@ def test_services_up(): wait_for_healthcheck_util() +def looks_like_a_short_sha(sha: str) -> bool: + return len(sha) == 7 and re.match(r"[0-9a-f]{7}", sha) is not None @pytest.mark.read_only def test_gms_config_accessible(): @@ -30,4 +33,4 @@ def test_gms_config_accessible(): default_cli_version: str = gms_config["managedIngestion"]["defaultCliVersion"] print(f"Default CLI version: {default_cli_version}") assert not default_cli_version.startswith("@") - assert "." in default_cli_version + assert "." in default_cli_version or looks_like_a_short_sha(default_cli_version), "Default CLI version does not look like a version string" From 2e3141e1db5be0b24c343812a885dc494168a7de Mon Sep 17 00:00:00 2001 From: Kunal-kankriya <127090035+Kunal-kankriya@users.noreply.github.com> Date: Wed, 3 Jan 2024 18:59:16 +0530 Subject: [PATCH 165/263] tests(cypress): add navigation in search test (#9545) --- .../e2e/search/query_and_filter_search.js | 156 ++++++++++++------ 1 file changed, 102 insertions(+), 54 deletions(-) diff --git a/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js b/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js index 4637310b86496..59105be587803 100644 --- a/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js +++ b/smoke-test/tests/cypress/cypress/e2e/search/query_and_filter_search.js @@ -1,57 +1,105 @@ +const datasetNames = { + dashboardsType: "Baz Dashboard", + pipelinesType: "Users", + MlmoduleType: "cypress-model", + glossaryTermsType: "CypressColumnInfoType", + tags: "some-cypress-feature-1", + hivePlatform: "cypress_logging_events", + airflowPlatform: "User Creations", + awsPlatform: "project/root/events/logging_events_bckp", + hdfsPlatform: "SampleHdfsDataset" +}; + +const searchToExecute = (value) => { + cy.get("input[data-testid=search-input]").eq(0).type(`${value}{enter}`); + cy.waitTextPresent("Type"); +}; + +const selectFilteredEntity = (textToClick, entity, url) => { + cy.get(`[data-testid=filter-dropdown-${textToClick}]`).click({ force: true }); + cy.get(`[data-testid="filter-option-${entity}"]`).click({ force: true }); + cy.get("[data-testid=update-filters]").click({ force: true }); + cy.url().should("include", `${url}`); + cy.get("[data-testid=update-filters]").should("not.be.visible"); + cy.get('.ant-pagination-next').scrollIntoView().should('be.visible'); +}; + +const verifyFilteredEntity = (text) => { + cy.get('.ant-typography').contains(text).should('be.visible'); +}; + describe("auto-complete dropdown, filter plus query search test", () => { + + beforeEach(() => { + cy.loginWithCredentials(); + cy.visit('/'); + }); + + it.skip("Verify the 'filter by type' section + query", () => { + + //Dashboard + searchToExecute("*"); + selectFilteredEntity("Type", "Dashboards", "filter__entityType"); + cy.clickOptionWithText(datasetNames.dashboardsType); + verifyFilteredEntity('Dashboard'); + + //Ml Models + searchToExecute("*"); + selectFilteredEntity("Type", "ML Models", "filter__entityType"); + cy.clickOptionWithText(datasetNames.MlmoduleType); + verifyFilteredEntity('ML Model'); + + //Piplines + searchToExecute("*"); + selectFilteredEntity("Type", "Pipelines", "filter__entityType"); + cy.clickOptionWithText(datasetNames.pipelinesType); + verifyFilteredEntity('Pipeline'); + + }); + + it("Verify the 'filter by Glossary term' section + query", () => { + + //Glossary Term + searchToExecute("*"); + selectFilteredEntity("Type", "Glossary Terms", "filter__entityType"); + cy.clickOptionWithText(datasetNames.glossaryTermsType); + verifyFilteredEntity('Glossary Term'); +}); + + it("Verify the 'filter by platform' section + query", () => { + + //Hive + searchToExecute("*"); + selectFilteredEntity("Platform", "Hive", "filter_platform"); + cy.clickOptionWithText(datasetNames.hivePlatform); + verifyFilteredEntity('Hive'); + + //AWS S3 + searchToExecute("*"); + selectFilteredEntity("Platform", "AWS S3", "filter_platform"); + cy.clickOptionWithText(datasetNames.awsPlatform); + verifyFilteredEntity('AWS S3'); + + //HDFS + searchToExecute("*"); + selectFilteredEntity("Platform", "HDFS", "filter_platform"); + cy.clickOptionWithText(datasetNames.hdfsPlatform); + verifyFilteredEntity('HDFS'); + + //Airflow + searchToExecute("*"); + selectFilteredEntity("Platform", "Airflow", "filter_platform"); + cy.clickOptionWithText(datasetNames.airflowPlatform); + verifyFilteredEntity('Airflow'); + }); - const platformQuerySearch = (query,test_id,active_filter) => { - cy.visit("/"); - cy.get("input[data-testid=search-input]").type(query); - cy.get(`[data-testid="quick-filter-urn:li:dataPlatform:${test_id}"]`).click(); - cy.focused().type("{enter}").wait(3000); - cy.url().should( - "include", - `?filter_platform___false___EQUAL___0=urn%3Ali%3AdataPlatform%3A${test_id}` - ); - cy.get('[data-testid="search-input"]').should("have.value", query); - cy.get(`[data-testid="active-filter-${active_filter}"]`).should("be.visible"); - cy.contains("of 0 results").should("not.exist"); - cy.contains(/of [0-9]+ results/); - } - - const entityQuerySearch = (query,test_id,active_filter) => { - cy.visit("/"); - cy.get("input[data-testid=search-input]").type(query); - cy.get(`[data-testid="quick-filter-${test_id}"]`).click(); - cy.focused().type("{enter}").wait(3000); - cy.url().should( - "include", - `?filter__entityType___false___EQUAL___0=${test_id}` - ); - cy.get('[data-testid="search-input"]').should("have.value", query); - cy.get(`[data-testid="active-filter-${active_filter}"]`).should("be.visible"); - cy.contains("of 0 results").should("not.exist"); - cy.contains(/of [0-9]+ results/); - } - - it("verify the 'filter by' section + query (result in search page with query applied + filter applied)", () => { - // Platform query plus filter test - cy.loginWithCredentials(); - // Airflow - platformQuerySearch ("cypress","airflow","Airflow"); - // BigQuery - platformQuerySearch ("cypress","bigquery","BigQuery"); - // dbt - platformQuerySearch ("cypress","dbt","dbt"); - // Hive - platformQuerySearch ("cypress","hive","Hive"); - - // Entity type query plus filter test - // Datasets - entityQuerySearch ("cypress","DATASET","Datasets"); - // Dashboards - entityQuerySearch ("cypress","DASHBOARD","Dashboards"); - // Pipelines - entityQuerySearch ("cypress","DATA_FLOW","Pipelines"); - // Domains - entityQuerySearch ("Marketing","DOMAIN","Domains"); - // Glossary Terms - entityQuerySearch ("cypress","GLOSSARY_TERM","Glossary Terms"); + it("Verify the 'filter by tag' section + query", () => { + + //CypressFeatureTag + searchToExecute("*"); + selectFilteredEntity("Tag", "CypressFeatureTag", "filter_tags"); + cy.clickOptionWithText(datasetNames.tags); + cy.mouseover('[data-testid="tag-CypressFeatureTag"]'); + verifyFilteredEntity('Feature'); }); -}); \ No newline at end of file +}); From ff78e3c172fee880cdbe1aa3333cf4a73926c910 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 3 Jan 2024 19:47:19 +0530 Subject: [PATCH 166/263] docs(acryl cloud): release notes for 0.2.14.1 (#9554) --- docs-website/sidebars.js | 1 + docs/managed-datahub/release-notes/v_0_2_14.md | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 docs/managed-datahub/release-notes/v_0_2_14.md diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 5d7c6b06adad4..2b8873c678778 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -177,6 +177,7 @@ module.exports = { }, { "Managed DataHub Release History": [ + "docs/managed-datahub/release-notes/v_0_2_14", "docs/managed-datahub/release-notes/v_0_2_13", "docs/managed-datahub/release-notes/v_0_2_12", "docs/managed-datahub/release-notes/v_0_2_11", diff --git a/docs/managed-datahub/release-notes/v_0_2_14.md b/docs/managed-datahub/release-notes/v_0_2_14.md new file mode 100644 index 0000000000000..8ad1f19503e06 --- /dev/null +++ b/docs/managed-datahub/release-notes/v_0_2_14.md @@ -0,0 +1,17 @@ +# v0.2.14.1 +--- + +Release Availability Date +--- +02-Jan-2023 + +Recommended CLI/SDK +--- +- `v0.12.1.3` with release notes at https://github.com/acryldata/datahub/releases/tag/v0.12.1.3 + +If you are using an older CLI/SDK version then please upgrade it. This applies for all CLI/SDK usages, if you are using it through your terminal, github actions, airflow, in python SDK somewhere, Java SKD etc. This is a strong recommendation to upgrade as we keep on pushing fixes in the CLI and it helps us support you better. + +## Release Changelog +--- +- Since `v0.2.13` these changes from OSS DataHub https://github.com/datahub-project/datahub/compare/d9de854d276c118afc55264ecc9e2712b91b4ab2...31f9c796763677a4d452066d9b49b4088e65da19 have been pulled in. + From c3c4bef1ad746a57a1a6cff821a732fe8114f695 Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 3 Jan 2024 22:59:39 +0530 Subject: [PATCH 167/263] ci(doc): tweak build rule to avoid docker build for docs (#9555) --- .github/workflows/docker-unified.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 454e766140245..8afce059572c7 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -4,12 +4,14 @@ on: branches: - master paths-ignore: + - "docs-website/**" - "docs/**" - "**.md" pull_request: branches: - "**" paths-ignore: + - "docs-website/**" - "docs/**" - "**.md" release: From c9613043c86e169a888d5ac60f0efdcd1551a2b0 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 14:28:22 -0500 Subject: [PATCH 168/263] fix(ingest): improve kafka-connect test stability (#9519) --- .../tests/integration/kafka/docker-compose.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/tests/integration/kafka/docker-compose.yml b/metadata-ingestion/tests/integration/kafka/docker-compose.yml index 43f30cbe1e665..0a4422e07515c 100644 --- a/metadata-ingestion/tests/integration/kafka/docker-compose.yml +++ b/metadata-ingestion/tests/integration/kafka/docker-compose.yml @@ -1,5 +1,5 @@ --- -version: '3.8' +version: "3.8" services: zookeeper: image: confluentinc/cp-zookeeper:7.2.2 @@ -9,7 +9,8 @@ services: ports: - "52181" volumes: - - test_zkdata:/var/opt/zookeeper + - test_zkdata:/var/lib/zookeeper/data + - test_zklogs:/var/lib/zookeeper/log broker: image: confluentinc/cp-kafka:7.2.2 @@ -34,3 +35,4 @@ services: volumes: test_zkdata: + test_zklogs: From 83b904e379b0e9a13d22659e483c6d3d4c9b29ba Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 14:28:32 -0500 Subject: [PATCH 169/263] fix(ingest/looker): add user stats to report (#9505) --- .../ingestion/source/looker/looker_common.py | 5 +++++ .../ingestion/source/looker/looker_config.py | 5 ----- .../ingestion/source/looker/looker_source.py | 13 +++++-------- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py index 53533a8d27c9b..94a56bb9281cb 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_common.py @@ -1059,6 +1059,7 @@ class LookerDashboardSourceReport(StaleEntityRemovalSourceReport): dashboards_scanned_for_usage: int = 0 charts_scanned_for_usage: int = 0 charts_with_activity: LossySet[str] = dataclasses_field(default_factory=LossySet) + accessed_dashboards: int = 0 dashboards_with_activity: LossySet[str] = dataclasses_field( default_factory=LossySet ) @@ -1066,6 +1067,10 @@ class LookerDashboardSourceReport(StaleEntityRemovalSourceReport): _looker_explore_registry: Optional[LookerExploreRegistry] = None total_explores: int = 0 explores_scanned: int = 0 + + resolved_user_ids: int = 0 + email_ids_missing: int = 0 # resolved users with missing email addresses + _looker_api: Optional[LookerAPI] = None query_latency: Dict[str, datetime.timedelta] = dataclasses_field( default_factory=dict diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py index 514f22b4f2158..52a21e8f12259 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_config.py @@ -160,11 +160,6 @@ class LookerDashboardSourceConfig( description="When enabled, extracts ownership from Looker directly. When disabled, ownership is left empty " "for dashboards and charts.", ) - actor: Optional[str] = Field( - None, - description="This config is deprecated in favor of `extract_owners`. Previously, was the actor to use in " - "ownership properties of ingested metadata.", - ) strip_user_ids_from_email: bool = Field( False, description="When enabled, converts Looker user emails of the form name@domain.com to urn:li:corpuser:name " diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index 7e8fbfde12042..0cce267bf5579 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -129,9 +129,6 @@ class LookerDashboardSource(TestableSource, StatefulIngestionSourceBase): source_config: LookerDashboardSourceConfig reporter: LookerDashboardSourceReport user_registry: LookerUserRegistry - accessed_dashboards: int = 0 - resolved_user_ids: int = 0 - email_ids_missing: int = 0 # resolved users with missing email addresses reachable_look_registry: Set[ str ] # Keep track of look-id which are reachable from Dashboard @@ -866,7 +863,7 @@ def _get_folder_path(self, folder: FolderBase, client: LookerAPI) -> str: def _get_looker_dashboard( self, dashboard: Dashboard, client: LookerAPI ) -> LookerDashboard: - self.accessed_dashboards += 1 + self.reporter.accessed_dashboards += 1 if dashboard.folder is None: logger.debug(f"{dashboard.id} has no folder") dashboard_folder_path = None @@ -928,9 +925,9 @@ def _get_looker_user(self, user_id: Optional[str]) -> Optional[LookerUser]: if user is not None and self.source_config.extract_owners: # Keep track of how many user ids we were able to resolve - self.resolved_user_ids += 1 + self.reporter.resolved_user_ids += 1 if user.email is None: - self.email_ids_missing += 1 + self.reporter.email_ids_missing += 1 return user @@ -1313,8 +1310,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: if ( self.source_config.extract_owners - and self.resolved_user_ids > 0 - and self.email_ids_missing == self.resolved_user_ids + and self.reporter.resolved_user_ids > 0 + and self.reporter.email_ids_missing == self.reporter.resolved_user_ids ): # Looks like we tried to extract owners and could not find their email addresses. This is likely a permissions issue self.reporter.report_warning( From 186b6f942d3fa7f0ce379add72cbcb57bccd4bb0 Mon Sep 17 00:00:00 2001 From: Shirshanka Das Date: Wed, 3 Jan 2024 12:21:06 -0800 Subject: [PATCH 170/263] perf(lineage): Rewrite lineage query for Elastic graph store (#9552) --- .../graph/elastic/ESGraphQueryDAO.java | 82 ++++--- .../graph/search/ESGraphQueryDAOTest.java | 94 ++++++- ...1.json => lineage_query_filters_full.json} | 98 ++++---- ...eage_query_filters_full_empty_filters.json | 60 +++++ ...e_query_filters_full_multiple_filters.json | 229 ++++++++++++++++++ .../lineage_query_filters_limited.json | 32 +++ 6 files changed, 508 insertions(+), 87 deletions(-) rename metadata-io/src/test/resources/elasticsearch/sample_filters/{lineage_query_filters_1.json => lineage_query_filters_full.json} (81%) create mode 100644 metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json create mode 100644 metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json create mode 100644 metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 92960bc9222ab..97cb186ce948c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -336,17 +336,10 @@ private List getLineageRelationships( Collectors.toMap( Function.identity(), entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); - BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); - // Get all relation types relevant to the set of urns to hop from - urnsPerEntityType.forEach( - (entityType, urns) -> - finalQuery.should( - getQueryForLineage( - urns, - edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), - graphFilters, - startTimeMillis, - endTimeMillis))); + + QueryBuilder finalQuery = + getLineageQuery( + urnsPerEntityType, edgesPerEntityType, graphFilters, startTimeMillis, endTimeMillis); SearchResponse response = executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); Set entityUrnSet = new HashSet<>(entityUrns); @@ -361,18 +354,53 @@ private List getLineageRelationships( entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); } - // Get search query for given list of edges and source urns @VisibleForTesting - public static QueryBuilder getQueryForLineage( - @Nonnull List urns, - @Nonnull List lineageEdges, + public static QueryBuilder getLineageQuery( + @Nonnull Map> urnsPerEntityType, + @Nonnull Map> edgesPerEntityType, @Nonnull GraphFilters graphFilters, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - BoolQueryBuilder query = QueryBuilders.boolQuery(); - if (lineageEdges.isEmpty()) { - return query; + BoolQueryBuilder entityTypeQueries = QueryBuilders.boolQuery(); + // Get all relation types relevant to the set of urns to hop from + urnsPerEntityType.forEach( + (entityType, urns) -> { + if (edgesPerEntityType.containsKey(entityType) + && !edgesPerEntityType.get(entityType).isEmpty()) { + entityTypeQueries.should( + getLineageQueryForEntityType( + urns, edgesPerEntityType.get(entityType), graphFilters)); + } + }); + + BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); + + finalQuery.filter(entityTypeQueries); + finalQuery.filter(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); + finalQuery.filter(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + + /* + * Optional - Add edge filtering based on time windows. + */ + if (startTimeMillis != null && endTimeMillis != null) { + finalQuery.filter(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); + } else { + log.debug( + String.format( + "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", + startTimeMillis, endTimeMillis)); } + + return finalQuery; + } + + // Get search query for given list of edges and source urns + @VisibleForTesting + public static QueryBuilder getLineageQueryForEntityType( + @Nonnull List urns, + @Nonnull List lineageEdges, + @Nonnull GraphFilters graphFilters) { + BoolQueryBuilder query = QueryBuilders.boolQuery(); Map> edgesByDirection = lineageEdges.stream().collect(Collectors.groupingBy(EdgeInfo::getDirection)); @@ -388,18 +416,6 @@ public static QueryBuilder getQueryForLineage( query.should(getIncomingEdgeQuery(urns, incomingEdges, graphFilters)); } - /* - * Optional - Add edge filtering based on time windows. - */ - if (startTimeMillis != null && endTimeMillis != null) { - query.must(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); - } else { - log.debug( - String.format( - "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", - startTimeMillis, endTimeMillis)); - } - return query; } @@ -601,9 +617,6 @@ private static BoolQueryBuilder getOutGoingEdgeQuery( BoolQueryBuilder outgoingEdgeQuery = QueryBuilders.boolQuery(); outgoingEdgeQuery.must(buildUrnFilters(urns, SOURCE)); outgoingEdgeQuery.must(buildEdgeFilters(outgoingEdges)); - outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - outgoingEdgeQuery.must( - buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return outgoingEdgeQuery; } @@ -612,9 +625,6 @@ private static BoolQueryBuilder getIncomingEdgeQuery( BoolQueryBuilder incomingEdgeQuery = QueryBuilders.boolQuery(); incomingEdgeQuery.must(buildUrnFilters(urns, DESTINATION)); incomingEdgeQuery.must(buildEdgeFilters(incomingEdges)); - incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - incomingEdgeQuery.must( - buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return incomingEdgeQuery; } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java index 9fc9490bfd7ef..5b7f880e6d83a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java @@ -23,16 +23,40 @@ public class ESGraphQueryDAOTest { - private static final String TEST_QUERY_FILE = - "elasticsearch/sample_filters/lineage_query_filters_1.json"; + private static final String TEST_QUERY_FILE_LIMITED = + "elasticsearch/sample_filters/lineage_query_filters_limited.json"; + private static final String TEST_QUERY_FILE_FULL = + "elasticsearch/sample_filters/lineage_query_filters_full.json"; + private static final String TEST_QUERY_FILE_FULL_EMPTY_FILTERS = + "elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json"; + private static final String TEST_QUERY_FILE_FULL_MULTIPLE_FILTERS = + "elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json"; @Test private static void testGetQueryForLineageFullArguments() throws Exception { - URL url = Resources.getResource(TEST_QUERY_FILE); - String expectedQuery = Resources.toString(url, StandardCharsets.UTF_8); - - List urns = new ArrayList<>(); + URL urlLimited = Resources.getResource(TEST_QUERY_FILE_LIMITED); + String expectedQueryLimited = Resources.toString(urlLimited, StandardCharsets.UTF_8); + URL urlFull = Resources.getResource(TEST_QUERY_FILE_FULL); + String expectedQueryFull = Resources.toString(urlFull, StandardCharsets.UTF_8); + URL urlFullEmptyFilters = Resources.getResource(TEST_QUERY_FILE_FULL_EMPTY_FILTERS); + String expectedQueryFullEmptyFilters = + Resources.toString(urlFullEmptyFilters, StandardCharsets.UTF_8); + URL urlFullMultipleFilters = Resources.getResource(TEST_QUERY_FILE_FULL_MULTIPLE_FILTERS); + String expectedQueryFullMultipleFilters = + Resources.toString(urlFullMultipleFilters, StandardCharsets.UTF_8); + + List urns = List.of(Urn.createFromString("urn:li:dataset:test-urn")); + List urnsMultiple1 = + ImmutableList.of( + UrnUtils.getUrn("urn:li:dataset:test-urn"), + UrnUtils.getUrn("urn:li:dataset:test-urn2"), + UrnUtils.getUrn("urn:li:dataset:test-urn3")); + List urnsMultiple2 = + ImmutableList.of( + UrnUtils.getUrn("urn:li:chart:test-urn"), + UrnUtils.getUrn("urn:li:chart:test-urn2"), + UrnUtils.getUrn("urn:li:chart:test-urn3")); List edgeInfos = new ArrayList<>( ImmutableList.of( @@ -40,14 +64,64 @@ private static void testGetQueryForLineageFullArguments() throws Exception { "DownstreamOf", RelationshipDirection.INCOMING, Constants.DATASET_ENTITY_NAME))); + List edgeInfosMultiple1 = + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, Constants.DATASET_ENTITY_NAME), + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.OUTGOING, Constants.DATASET_ENTITY_NAME)); + List edgeInfosMultiple2 = + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, Constants.DATA_JOB_ENTITY_NAME), + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.OUTGOING, Constants.DATA_JOB_ENTITY_NAME)); + String entityType = "testEntityType"; + Map> urnsPerEntityType = Map.of(entityType, urns); + Map> urnsPerEntityTypeMultiple = + Map.of( + Constants.DATASET_ENTITY_NAME, + urnsMultiple1, + Constants.CHART_ENTITY_NAME, + urnsMultiple2); + Map> edgesPerEntityType = Map.of(entityType, edgeInfos); + Map> edgesPerEntityTypeMultiple = + Map.of( + Constants.DATASET_ENTITY_NAME, edgeInfosMultiple1, + Constants.DATA_JOB_ENTITY_NAME, edgeInfosMultiple2); GraphFilters graphFilters = new GraphFilters(ImmutableList.of(Constants.DATASET_ENTITY_NAME)); + GraphFilters graphFiltersMultiple = + new GraphFilters( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME)); Long startTime = 0L; Long endTime = 1L; - QueryBuilder builder = - ESGraphQueryDAO.getQueryForLineage(urns, edgeInfos, graphFilters, startTime, endTime); - - Assert.assertEquals(builder.toString(), expectedQuery); + QueryBuilder limitedBuilder = + ESGraphQueryDAO.getLineageQueryForEntityType(urns, edgeInfos, graphFilters); + + QueryBuilder fullBuilder = + ESGraphQueryDAO.getLineageQuery( + urnsPerEntityType, edgesPerEntityType, graphFilters, startTime, endTime); + + QueryBuilder fullBuilderEmptyFilters = + ESGraphQueryDAO.getLineageQuery( + urnsPerEntityType, edgesPerEntityType, GraphFilters.emptyGraphFilters, null, null); + + QueryBuilder fullBuilderMultipleFilters = + ESGraphQueryDAO.getLineageQuery( + urnsPerEntityTypeMultiple, + edgesPerEntityTypeMultiple, + graphFiltersMultiple, + startTime, + endTime); + + Assert.assertEquals(limitedBuilder.toString(), expectedQueryLimited); + Assert.assertEquals(fullBuilder.toString(), expectedQueryFull); + Assert.assertEquals(fullBuilderEmptyFilters.toString(), expectedQueryFullEmptyFilters); + Assert.assertEquals(fullBuilderMultipleFilters.toString(), expectedQueryFullMultipleFilters); } @Test diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_1.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full.json similarity index 81% rename from metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_1.json rename to metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full.json index eb84638f0ccd0..0a1cee08414a9 100644 --- a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_1.json +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full.json @@ -1,6 +1,62 @@ { "bool" : { - "must" : [ + "filter" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "destination.urn" : [ + "urn:li:dataset:test-urn" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "terms" : { + "source.entityType" : [ + "dataset" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "destination.entityType" : [ + "dataset" + ], + "boost" : 1.0 + } + }, { "bool" : { "should" : [ @@ -160,46 +216,6 @@ } } ], - "should" : [ - { - "bool" : { - "must" : [ - { - "terms" : { - "destination.urn" : [ ], - "boost" : 1.0 - } - }, - { - "terms" : { - "relationshipType" : [ - "DownstreamOf" - ], - "boost" : 1.0 - } - }, - { - "terms" : { - "source.entityType" : [ - "dataset" - ], - "boost" : 1.0 - } - }, - { - "terms" : { - "destination.entityType" : [ - "dataset" - ], - "boost" : 1.0 - } - } - ], - "adjust_pure_negative" : true, - "boost" : 1.0 - } - } - ], "adjust_pure_negative" : true, "boost" : 1.0 } diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json new file mode 100644 index 0000000000000..ab2841d6602d8 --- /dev/null +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_empty_filters.json @@ -0,0 +1,60 @@ +{ + "bool" : { + "filter" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "destination.urn" : [ + "urn:li:dataset:test-urn" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "terms" : { + "source.entityType" : [ ], + "boost" : 1.0 + } + }, + { + "terms" : { + "destination.entityType" : [ ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json new file mode 100644 index 0000000000000..39f595e0e8dd2 --- /dev/null +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_full_multiple_filters.json @@ -0,0 +1,229 @@ +{ + "bool" : { + "filter" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "source.urn" : [ + "urn:li:dataset:test-urn", + "urn:li:dataset:test-urn2", + "urn:li:dataset:test-urn3" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf", + "Consumes" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "terms" : { + "source.entityType" : [ + "dataset", + "dashboard", + "dataJob" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "destination.entityType" : [ + "dataset", + "dashboard", + "dataJob" + ], + "boost" : 1.0 + } + }, + { + "bool" : { + "should" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "exists" : { + "field" : "createdOn", + "boost" : 1.0 + } + }, + { + "range" : { + "createdOn" : { + "from" : 0, + "to" : 1, + "include_lower" : true, + "include_upper" : true, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "exists" : { + "field" : "updatedOn", + "boost" : 1.0 + } + }, + { + "range" : { + "updatedOn" : { + "from" : 0, + "to" : 1, + "include_lower" : true, + "include_upper" : true, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "bool" : { + "should" : [ + { + "bool" : { + "must_not" : [ + { + "exists" : { + "field" : "createdOn", + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "term" : { + "createdOn" : { + "value" : 0, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "should" : [ + { + "bool" : { + "must_not" : [ + { + "exists" : { + "field" : "updatedOn", + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "bool" : { + "must" : [ + { + "term" : { + "updatedOn" : { + "value" : 0, + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + }, + { + "term" : { + "properties.source" : { + "value" : "UI", + "boost" : 1.0 + } + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json new file mode 100644 index 0000000000000..95d468ec3dac8 --- /dev/null +++ b/metadata-io/src/test/resources/elasticsearch/sample_filters/lineage_query_filters_limited.json @@ -0,0 +1,32 @@ +{ + "bool" : { + "should" : [ + { + "bool" : { + "must" : [ + { + "terms" : { + "destination.urn" : [ + "urn:li:dataset:test-urn" + ], + "boost" : 1.0 + } + }, + { + "terms" : { + "relationshipType" : [ + "DownstreamOf" + ], + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } + } + ], + "adjust_pure_negative" : true, + "boost" : 1.0 + } +} \ No newline at end of file From f06b5c782099ace00116fd33dda73af5a48e4184 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 15:30:11 -0500 Subject: [PATCH 171/263] feat(ingest): improve config loading helpers (#9477) --- .../datahub/configuration/config_loader.py | 48 ++++++++------ .../datahub/ingestion/run/pipeline_config.py | 3 +- .../src/datahub/secret/__init__.py | 0 .../datahub/secret/datahub_secret_store.py | 66 +++++++++++++++++++ .../datahub/secret/datahub_secrets_client.py | 45 +++++++++++++ .../src/datahub/secret/secret_common.py | 59 +++++++++++++++++ .../src/datahub/secret/secret_store.py | 43 ++++++++++++ 7 files changed, 244 insertions(+), 20 deletions(-) create mode 100644 metadata-ingestion/src/datahub/secret/__init__.py create mode 100644 metadata-ingestion/src/datahub/secret/datahub_secret_store.py create mode 100644 metadata-ingestion/src/datahub/secret/datahub_secrets_client.py create mode 100644 metadata-ingestion/src/datahub/secret/secret_common.py create mode 100644 metadata-ingestion/src/datahub/secret/secret_store.py diff --git a/metadata-ingestion/src/datahub/configuration/config_loader.py b/metadata-ingestion/src/datahub/configuration/config_loader.py index 2f41af6f7286e..4266bac0c79ab 100644 --- a/metadata-ingestion/src/datahub/configuration/config_loader.py +++ b/metadata-ingestion/src/datahub/configuration/config_loader.py @@ -1,56 +1,59 @@ import io +import os import pathlib import re import sys import tempfile import unittest.mock -from typing import Any, Dict, Set, Union +from typing import Any, Dict, Mapping, Optional, Set, Union from urllib import parse import requests -from expandvars import UnboundVariable, expandvars +from expandvars import UnboundVariable, expand from datahub.configuration.common import ConfigurationError, ConfigurationMechanism from datahub.configuration.json_loader import JsonConfigurationMechanism from datahub.configuration.toml import TomlConfigurationMechanism from datahub.configuration.yaml import YamlConfigurationMechanism +Environ = Mapping[str, str] -def _resolve_element(element: str) -> str: + +def _resolve_element(element: str, environ: Environ) -> str: if re.search(r"(\$\{).+(\})", element): - return expandvars(element, nounset=True) + return expand(element, nounset=True, environ=environ) elif element.startswith("$"): try: - return expandvars(element, nounset=True) + return expand(element, nounset=True, environ=environ) except UnboundVariable: return element else: return element -def _resolve_list(ele_list: list) -> list: +def _resolve_list(ele_list: list, environ: Environ) -> list: new_v: list = [] for ele in ele_list: if isinstance(ele, str): - new_v.append(_resolve_element(ele)) + new_v.append(_resolve_element(ele, environ=environ)) elif isinstance(ele, list): - new_v.append(_resolve_list(ele)) + new_v.append(_resolve_list(ele, environ=environ)) elif isinstance(ele, dict): - new_v.append(resolve_env_variables(ele)) + new_v.append(resolve_env_variables(ele, environ=environ)) else: new_v.append(ele) return new_v -def resolve_env_variables(config: dict) -> dict: +def resolve_env_variables(config: dict, environ: Environ) -> dict: new_dict: Dict[Any, Any] = {} for k, v in config.items(): if isinstance(v, dict): - new_dict[k] = resolve_env_variables(v) + new_dict[k] = resolve_env_variables(v, environ=environ) elif isinstance(v, list): - new_dict[k] = _resolve_list(v) + new_dict[k] = _resolve_list(v, environ=environ) elif isinstance(v, str): - new_dict[k] = _resolve_element(v) + new_dict[k] = _resolve_element(v, environ=environ) else: new_dict[k] = v return new_dict @@ -60,13 +63,20 @@ def list_referenced_env_variables(config: dict) -> Set[str]: # This is a bit of a hack, but expandvars does a bunch of escaping # and other logic that we don't want to duplicate here. - with unittest.mock.patch("expandvars.getenv") as mock_getenv: - mock_getenv.return_value = "mocked_value" + vars = set() + + def mock_get_env(key: str, default: Optional[str] = None) -> str: + vars.add(key) + if default is not None: + return default + return "mocked_value" + + mock = unittest.mock.MagicMock() + mock.get.side_effect = mock_get_env - resolve_env_variables(config) + resolve_env_variables(config, environ=mock) - calls = mock_getenv.mock_calls - return set([call[1][0] for call in calls]) + return vars WRITE_TO_FILE_DIRECTIVE_PREFIX = "__DATAHUB_TO_FILE_" @@ -147,7 +157,7 @@ def load_config_file( config = raw_config.copy() if resolve_env_vars: - config = resolve_env_variables(config) + config = resolve_env_variables(config, environ=os.environ) if process_directives: config = _process_directives(config) diff --git a/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py b/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py index f22f94c9e9351..c0f6add6df006 100644 --- a/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py +++ b/metadata-ingestion/src/datahub/ingestion/run/pipeline_config.py @@ -1,5 +1,6 @@ import datetime import logging +import os import uuid from typing import Any, Dict, List, Optional @@ -112,7 +113,7 @@ def default_sink_is_datahub_rest(cls, values: Dict[str, Any]) -> Any: } # resolve env variables if present default_sink_config = config_loader.resolve_env_variables( - default_sink_config + default_sink_config, environ=os.environ ) values["sink"] = default_sink_config diff --git a/metadata-ingestion/src/datahub/secret/__init__.py b/metadata-ingestion/src/datahub/secret/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/secret/datahub_secret_store.py b/metadata-ingestion/src/datahub/secret/datahub_secret_store.py new file mode 100644 index 0000000000000..8301ff2d9dc1a --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/datahub_secret_store.py @@ -0,0 +1,66 @@ +import logging +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel, validator + +from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph +from datahub.secret.datahub_secrets_client import DataHubSecretsClient +from datahub.secret.secret_store import SecretStore + +logger = logging.getLogger(__name__) + + +class DataHubSecretStoreConfig(BaseModel): + graph_client: Optional[DataHubGraph] = None + graph_client_config: Optional[DatahubClientConfig] = None + + class Config: + arbitrary_types_allowed = True + + @validator("graph_client") + def check_graph_connection(cls, v: DataHubGraph) -> DataHubGraph: + if v is not None: + v.test_connection() + return v + + +# An implementation of SecretStore that fetches secrets from DataHub +class DataHubSecretStore(SecretStore): + # Client for fetching secrets from DataHub GraphQL API + client: DataHubSecretsClient + + def __init__(self, config: DataHubSecretStoreConfig): + # Attempt to establish an outbound connection to DataHub and create a client. + if config.graph_client is not None: + self.client = DataHubSecretsClient(graph=config.graph_client) + elif config.graph_client_config is not None: + graph = DataHubGraph(config.graph_client_config) + self.client = DataHubSecretsClient(graph) + else: + raise Exception( + "Invalid configuration provided: unable to construct DataHub Graph Client." + ) + + def get_secret_values(self, secret_names: List[str]) -> Dict[str, Union[str, None]]: + # Fetch the secret from DataHub, using the credentials provided in the configuration. + # Use the GraphQL API. + try: + return self.client.get_secret_values(secret_names) + except Exception: + # Failed to resolve secrets, return empty. + logger.exception( + f"Caught exception while attempting to fetch secrets from DataHub. Secret names: {secret_names}" + ) + return {} + + def get_secret_value(self, secret_name: str) -> Union[str, None]: + secret_value_dict = self.get_secret_values([secret_name]) + return secret_value_dict.get(secret_name) + + def get_id(self) -> str: + return "datahub" + + @classmethod + def create(cls, config: Any) -> "DataHubSecretStore": + config = DataHubSecretStoreConfig.parse_obj(config) + return cls(config) diff --git a/metadata-ingestion/src/datahub/secret/datahub_secrets_client.py b/metadata-ingestion/src/datahub/secret/datahub_secrets_client.py new file mode 100644 index 0000000000000..c60aeff5db2f3 --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/datahub_secrets_client.py @@ -0,0 +1,45 @@ +from typing import Dict, List, Optional + +from datahub.ingestion.graph.client import DataHubGraph + + +class DataHubSecretsClient: + """Class used to fetch secrets from DataHub.""" + + graph: DataHubGraph + + def __init__(self, graph: DataHubGraph): + self.graph = graph + + def get_secret_values(self, secret_names: List[str]) -> Dict[str, Optional[str]]: + if len(secret_names) == 0: + return {} + + request_json = { + "query": """query getSecretValues($input: GetSecretValuesInput!) {\n + getSecretValues(input: $input) {\n + name\n + value\n + }\n + }""", + "variables": {"input": {"secrets": secret_names}}, + } + # TODO: Use graph.execute_graphql() instead. + + # Fetch secrets using GraphQL API f + response = self.graph._session.post( + f"{self.graph.config.server}/api/graphql", json=request_json + ) + response.raise_for_status() + + # Verify response + res_data = response.json() + if "errors" in res_data: + raise Exception("Failed to retrieve secrets from DataHub.") + + # Convert list of name, value secret pairs into a dict and return + secret_value_list = res_data["data"]["getSecretValues"] + secret_value_dict = dict() + for secret_value in secret_value_list: + secret_value_dict[secret_value["name"]] = secret_value["value"] + return secret_value_dict diff --git a/metadata-ingestion/src/datahub/secret/secret_common.py b/metadata-ingestion/src/datahub/secret/secret_common.py new file mode 100644 index 0000000000000..2f7a584d87538 --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/secret_common.py @@ -0,0 +1,59 @@ +import json +import logging +from typing import List + +from datahub.configuration.config_loader import ( + list_referenced_env_variables, + resolve_env_variables, +) +from datahub.secret.secret_store import SecretStore + +logger = logging.getLogger(__name__) + + +def resolve_secrets(secret_names: List[str], secret_stores: List[SecretStore]) -> dict: + # Attempt to resolve secret using by checking each configured secret store. + final_secret_values = dict({}) + + for secret_store in secret_stores: + try: + # Retrieve secret values from the store. + secret_values_dict = secret_store.get_secret_values(secret_names) + # Overlay secret values from each store, if not None. + for secret_name, secret_value in secret_values_dict.items(): + if secret_value is not None: + # HACK: We previously, incorrectly replaced newline characters with + # a r'\n' string. This was a lossy conversion, since we can no longer + # distinguish between a newline character and the literal '\n' in + # the secret value. For now, we assume that all r'\n' strings are + # actually newline characters. This will break if a secret value + # genuinely contains the string r'\n'. + # Once this PR https://github.com/datahub-project/datahub/pull/9484 + # has baked for a while, we should be able to remove this hack. + # TODO: This logic should live in the DataHub secret client/store, + # not the general secret resolution logic. + secret_value = secret_value.replace(r"\n", "\n") + + final_secret_values[secret_name] = secret_value + except Exception: + logger.exception( + f"Failed to fetch secret values from secret store with id {secret_store.get_id()}" + ) + return final_secret_values + + +def resolve_recipe(recipe: str, secret_stores: List[SecretStore]) -> dict: + json_recipe_raw = json.loads(recipe) + + # 1. Extract all secrets needing resolved. + secrets_to_resolve = list_referenced_env_variables(json_recipe_raw) + + # 2. Resolve secret values + secret_values_dict = resolve_secrets(list(secrets_to_resolve), secret_stores) + + # 3. Substitute secrets into recipe file + json_recipe_resolved = resolve_env_variables( + json_recipe_raw, environ=secret_values_dict + ) + + return json_recipe_resolved diff --git a/metadata-ingestion/src/datahub/secret/secret_store.py b/metadata-ingestion/src/datahub/secret/secret_store.py new file mode 100644 index 0000000000000..d6d61d8c3c924 --- /dev/null +++ b/metadata-ingestion/src/datahub/secret/secret_store.py @@ -0,0 +1,43 @@ +from abc import abstractmethod +from typing import Dict, List, Optional + +from datahub.configuration.common import ConfigModel + + +class SecretStoreConfig(ConfigModel): + type: str + config: Dict + + +class SecretStore: + """ + Abstract base class for a Secret Store, or a class that resolves "secret" values by name. + """ + + @classmethod + @abstractmethod + def create(cls, configs: dict) -> "SecretStore": + pass + + @abstractmethod + def get_secret_values(self, secret_names: List[str]) -> Dict[str, Optional[str]]: + """ + Attempt to fetch a group of secrets, returning a Dictionary of the secret of None if one + cannot be resolved by the store. + """ + + def get_secret_value(self, secret_name: str) -> Optional[str]: + secret_value_dict = self.get_secret_values([secret_name]) + return secret_value_dict.get(secret_name) + + @abstractmethod + def get_id(self) -> str: + """ + Get a unique name or id associated with the Secret Store. + """ + + @abstractmethod + def close(self) -> None: + """ + Wraps up the task + """ From 822d0eb014080fef030cdee84731878787c38c61 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Wed, 3 Jan 2024 15:11:07 -0600 Subject: [PATCH 172/263] feat(patch): add dashboardInfo and chartInfo support for patch (#9536) --- .../registry/SnapshotEntityRegistry.java | 4 + .../template/AspectTemplateEngine.java | 4 +- .../template/chart/ChartInfoTemplate.java | 82 ++++ .../dashboard/DashboardInfoTemplate.java | 105 +++++ .../datajob/DataJobInputOutputTemplate.java | 2 - .../registry/patch/ChartInfoTemplateTest.java | 41 ++ .../patch/DashboardInfoTemplateTest.java | 41 ++ .../UpstreamLineageTemplateTest.java | 2 +- .../src/datahub/specific/chart.py | 316 ++++++++++++++ .../src/datahub/specific/dashboard.py | 410 ++++++++++++++++++ .../src/datahub/specific/datajob.py | 12 +- .../src/datahub/specific/dataproduct.py | 10 +- .../src/datahub/specific/dataset.py | 8 +- .../src/datahub/specific/ownership.py | 2 +- .../golden_dataproduct_out_upsert.json | 2 +- .../unit/patch/complex_dataset_patch.json | 2 +- .../tests/unit/patch/test_patch_builder.py | 47 +- .../patch/chart/ChartInfoPatchBuilder.java | 41 ++ .../client/patch/common/PatchUtil.java | 84 ++++ .../dashboard/DashboardInfoPatchBuilder.java | 103 +++++ .../DataJobInputOutputPatchBuilder.java | 73 +--- .../java/datahub/client/patch/PatchTest.java | 89 ++++ 22 files changed, 1385 insertions(+), 95 deletions(-) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java rename entity-registry/src/test/java/com/linkedin/metadata/models/registry/{ => patch}/UpstreamLineageTemplateTest.java (99%) create mode 100644 metadata-ingestion/src/datahub/specific/chart.py create mode 100644 metadata-ingestion/src/datahub/specific/dashboard.py create mode 100644 metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java create mode 100644 metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java create mode 100644 metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index cfc2c0901ce0d..bb0113abc9ed6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -12,9 +12,11 @@ import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.models.registry.template.Template; +import com.linkedin.metadata.models.registry.template.chart.ChartInfoTemplate; import com.linkedin.metadata.models.registry.template.common.GlobalTagsTemplate; import com.linkedin.metadata.models.registry.template.common.GlossaryTermsTemplate; import com.linkedin.metadata.models.registry.template.common.OwnershipTemplate; +import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; import com.linkedin.metadata.models.registry.template.dataflow.DataFlowInfoTemplate; import com.linkedin.metadata.models.registry.template.datajob.DataJobInfoTemplate; import com.linkedin.metadata.models.registry.template.datajob.DataJobInputOutputTemplate; @@ -79,6 +81,8 @@ private AspectTemplateEngine populateTemplateEngine(Map aspe aspectSpecTemplateMap.put(DATA_JOB_INFO_ASPECT_NAME, new DataJobInfoTemplate()); aspectSpecTemplateMap.put( DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); + aspectSpecTemplateMap.put(CHART_INFO_ASPECT_NAME, new ChartInfoTemplate()); + aspectSpecTemplateMap.put(DASHBOARD_INFO_ASPECT_NAME, new DashboardInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java index 95849a94bae29..029eb688c5291 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java @@ -32,7 +32,9 @@ public class AspectTemplateEngine { DATA_FLOW_INFO_ASPECT_NAME, DATA_JOB_INFO_ASPECT_NAME, DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME) .collect(Collectors.toSet()); private final Map> _aspectTemplateMap; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java new file mode 100644 index 0000000000000..654f923e7322d --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java @@ -0,0 +1,82 @@ +package com.linkedin.metadata.models.registry.template.chart; + +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.chart.ChartDataSourceTypeArray; +import com.linkedin.chart.ChartInfo; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.ChangeAuditStamps; +import com.linkedin.common.EdgeArray; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import java.util.Collections; +import javax.annotation.Nonnull; + +public class ChartInfoTemplate implements ArrayMergingTemplate { + + private static final String INPUT_EDGES_FIELD_NAME = "inputEdges"; + private static final String INPUTS_FIELD_NAME = "inputs"; + private static final String DESTINATION_URN_FIELD_NAME = "destinationUrn"; + + @Override + public ChartInfo getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (recordTemplate instanceof ChartInfo) { + return (ChartInfo) recordTemplate; + } + throw new ClassCastException("Unable to cast RecordTemplate to DataJobInputOutput"); + } + + @Override + public Class getTemplateType() { + return ChartInfo.class; + } + + @Nonnull + @Override + public ChartInfo getDefault() { + ChartInfo chartInfo = new ChartInfo(); + chartInfo.setDescription(""); + chartInfo.setTitle(""); + ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + changeAuditStamps.setCreated(auditStamp).setLastModified(auditStamp); + chartInfo.setLastModified(changeAuditStamps); + chartInfo.setInputEdges(new EdgeArray()); + + // Deprecated fields + chartInfo.setInputs(new ChartDataSourceTypeArray()); + + return chartInfo; + } + + @Nonnull + @Override + public JsonNode transformFields(JsonNode baseNode) { + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + INPUT_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = arrayFieldToMap(transformedNode, INPUTS_FIELD_NAME, Collections.emptyList()); + + return transformedNode; + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + JsonNode rebasedNode = + transformedMapToArray( + patched, INPUT_EDGES_FIELD_NAME, Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = transformedMapToArray(rebasedNode, INPUTS_FIELD_NAME, Collections.emptyList()); + + return rebasedNode; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java new file mode 100644 index 0000000000000..eae04b5285adf --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java @@ -0,0 +1,105 @@ +package com.linkedin.metadata.models.registry.template.dashboard; + +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.ChangeAuditStamps; +import com.linkedin.common.ChartUrnArray; +import com.linkedin.common.EdgeArray; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.dashboard.DashboardInfo; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import java.util.Collections; +import javax.annotation.Nonnull; + +public class DashboardInfoTemplate implements ArrayMergingTemplate { + + private static final String CHART_EDGES_FIELD_NAME = "chartEdges"; + private static final String DATASET_EDGES_FIELD_NAME = "datasetEdges"; + private static final String DATASETS_FIELD_NAME = "datasets"; + private static final String CHARTS_FIELD_NAME = "charts"; + private static final String DESTINATION_URN_FIELD_NAME = "destinationUrn"; + + @Override + public DashboardInfo getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (recordTemplate instanceof DashboardInfo) { + return (DashboardInfo) recordTemplate; + } + throw new ClassCastException("Unable to cast RecordTemplate to DataJobInputOutput"); + } + + @Override + public Class getTemplateType() { + return DashboardInfo.class; + } + + @Nonnull + @Override + public DashboardInfo getDefault() { + DashboardInfo dashboardInfo = new DashboardInfo(); + dashboardInfo.setTitle(""); + dashboardInfo.setDescription(""); + ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + changeAuditStamps.setCreated(auditStamp).setLastModified(auditStamp); + dashboardInfo.setLastModified(changeAuditStamps); + dashboardInfo.setChartEdges(new EdgeArray()); + dashboardInfo.setDatasetEdges(new EdgeArray()); + + // Deprecated fields + dashboardInfo.setDatasets(new UrnArray()); + dashboardInfo.setCharts(new ChartUrnArray()); + + return dashboardInfo; + } + + @Nonnull + @Override + public JsonNode transformFields(JsonNode baseNode) { + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + CHART_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap( + transformedNode, + DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + transformedNode = + arrayFieldToMap(transformedNode, DATASETS_FIELD_NAME, Collections.emptyList()); + + transformedNode = arrayFieldToMap(transformedNode, CHARTS_FIELD_NAME, Collections.emptyList()); + + return transformedNode; + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + JsonNode rebasedNode = + transformedMapToArray( + patched, + DATASET_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = + transformedMapToArray( + rebasedNode, + CHART_EDGES_FIELD_NAME, + Collections.singletonList(DESTINATION_URN_FIELD_NAME)); + + rebasedNode = transformedMapToArray(rebasedNode, DATASETS_FIELD_NAME, Collections.emptyList()); + rebasedNode = transformedMapToArray(rebasedNode, CHARTS_FIELD_NAME, Collections.emptyList()); + + return rebasedNode; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java index 889297734e977..6761892b1b31b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java @@ -23,8 +23,6 @@ public class DataJobInputOutputTemplate implements ArrayMergingTemplate patchOperations = new ArrayList<>(); + ObjectNode edgeNode = instance.objectNode(); + edgeNode.put( + "destinationUrn", "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/inputEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + edgeNode); + patchOperations.add(operation); + JsonPatch patch = new JsonPatch(patchOperations); + ChartInfo result = chartInfoTemplate.applyPatch(dashboardInfo, patch); + + Assert.assertEquals( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + result.getInputEdges().get(0).getDestinationUrn()); + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java new file mode 100644 index 0000000000000..962ff1d40d873 --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java @@ -0,0 +1,41 @@ +package com.linkedin.metadata.models.registry.patch; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.fge.jackson.jsonpointer.JsonPointer; +import com.github.fge.jsonpatch.AddOperation; +import com.github.fge.jsonpatch.JsonPatch; +import com.github.fge.jsonpatch.JsonPatchOperation; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.dashboard.DashboardInfo; +import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; +import java.util.ArrayList; +import java.util.List; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class DashboardInfoTemplateTest { + + @Test + public void testDashboardInfoTemplate() throws Exception { + DashboardInfoTemplate dashboardInfoTemplate = new DashboardInfoTemplate(); + DashboardInfo dashboardInfo = dashboardInfoTemplate.getDefault(); + List patchOperations = new ArrayList<>(); + ObjectNode edgeNode = instance.objectNode(); + edgeNode.put( + "destinationUrn", "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"); + JsonPatchOperation operation = + new AddOperation( + new JsonPointer( + "/datasetEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + edgeNode); + patchOperations.add(operation); + JsonPatch patch = new JsonPatch(patchOperations); + DashboardInfo result = dashboardInfoTemplate.applyPatch(dashboardInfo, patch); + + Assert.assertEquals( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"), + result.getDatasetEdges().get(0).getDestinationUrn()); + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java similarity index 99% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java index 07982a87be56c..8f410ae8da085 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/UpstreamLineageTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry; +package com.linkedin.metadata.models.registry.patch; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; diff --git a/metadata-ingestion/src/datahub/specific/chart.py b/metadata-ingestion/src/datahub/specific/chart.py new file mode 100644 index 0000000000000..5dc394e8ebe0f --- /dev/null +++ b/metadata-ingestion/src/datahub/specific/chart.py @@ -0,0 +1,316 @@ +import time +from typing import Dict, List, Optional, TypeVar, Union +from urllib.parse import quote + +from datahub.emitter.mcp_patch_builder import MetadataPatchProposal +from datahub.metadata.schema_classes import ( + AuditStampClass, + ChartInfoClass as ChartInfo, + EdgeClass as Edge, + GlobalTagsClass as GlobalTags, + GlossaryTermAssociationClass as Term, + GlossaryTermsClass as GlossaryTerms, + KafkaAuditHeaderClass, + OwnerClass as Owner, + OwnershipTypeClass, + SystemMetadataClass, + TagAssociationClass as Tag, +) +from datahub.specific.custom_properties import CustomPropertiesPatchHelper +from datahub.specific.ownership import OwnershipPatchHelper +from datahub.utilities.urns.tag_urn import TagUrn +from datahub.utilities.urns.urn import Urn + +T = TypeVar("T", bound=MetadataPatchProposal) + + +class ChartPatchBuilder(MetadataPatchProposal): + def __init__( + self, + urn: str, + system_metadata: Optional[SystemMetadataClass] = None, + audit_header: Optional[KafkaAuditHeaderClass] = None, + ) -> None: + """ + Initializes a ChartPatchBuilder instance. + + Args: + urn: The URN of the chart + system_metadata: The system metadata of the chart (optional). + audit_header: The Kafka audit header of the chart (optional). + """ + super().__init__( + urn, "chart", system_metadata=system_metadata, audit_header=audit_header + ) + self.custom_properties_patch_helper = CustomPropertiesPatchHelper( + self, ChartInfo.ASPECT_NAME + ) + self.ownership_patch_helper = OwnershipPatchHelper(self) + + def _mint_auditstamp(self, message: Optional[str] = None) -> AuditStampClass: + """ + Creates an AuditStampClass instance with the current timestamp and other default values. + + Args: + message: The message associated with the audit stamp (optional). + + Returns: + An instance of AuditStampClass. + """ + return AuditStampClass( + time=int(time.time() * 1000.0), + actor="urn:li:corpuser:datahub", + message=message, + ) + + def _ensure_urn_type( + self, entity_type: str, edges: List[Edge], context: str + ) -> None: + """ + Ensures that the destination URNs in the given edges have the specified entity type. + + Args: + entity_type: The entity type to check against. + edges: A list of Edge objects. + context: The context or description of the operation. + + Raises: + ValueError: If any of the destination URNs is not of the specified entity type. + """ + for e in edges: + urn = Urn.create_from_string(e.destinationUrn) + if not urn.get_type() == entity_type: + raise ValueError( + f"{context}: {e.destinationUrn} is not of type {entity_type}" + ) + + def add_owner(self, owner: Owner) -> "ChartPatchBuilder": + """ + Adds an owner to the ChartPatchBuilder. + + Args: + owner: The Owner object to add. + + Returns: + The ChartPatchBuilder instance. + """ + self.ownership_patch_helper.add_owner(owner) + return self + + def remove_owner( + self, owner: str, owner_type: Optional[OwnershipTypeClass] = None + ) -> "ChartPatchBuilder": + """ + Removes an owner from the ChartPatchBuilder. + + Args: + owner: The owner to remove. + owner_type: The ownership type of the owner (optional). + + Returns: + The ChartPatchBuilder instance. + + Notes: + `owner_type` is optional. + """ + self.ownership_patch_helper.remove_owner(owner, owner_type) + return self + + def set_owners(self, owners: List[Owner]) -> "ChartPatchBuilder": + """ + Sets the owners of the ChartPatchBuilder. + + Args: + owners: A list of Owner objects. + + Returns: + The ChartPatchBuilder instance. + """ + self.ownership_patch_helper.set_owners(owners) + return self + + def add_input_edge(self, input: Union[Edge, Urn, str]) -> "ChartPatchBuilder": + """ + Adds an input to the ChartPatchBuilder. + + Args: + input: The input, which can be an Edge object, Urn object, or a string. + + Returns: + The ChartPatchBuilder instance. + + Notes: + If `input` is an Edge object, it is used directly. If `input` is a Urn object or string, + it is converted to an Edge object and added with default audit stamps. + """ + if isinstance(input, Edge): + input_urn: str = input.destinationUrn + input_edge: Edge = input + elif isinstance(input, (Urn, str)): + input_urn = str(input) + + input_edge = Edge( + destinationUrn=input_urn, + created=self._mint_auditstamp(), + lastModified=self._mint_auditstamp(), + ) + + self._ensure_urn_type("dataset", [input_edge], "add_dataset") + self._add_patch( + ChartInfo.ASPECT_NAME, + "add", + path=f"/inputEdges/{quote(input_urn, safe='')}", + value=input_urn, + ) + return self + + def remove_input_edge(self, input: Union[str, Urn]) -> "ChartPatchBuilder": + """ + Removes an input from the ChartPatchBuilder. + + Args: + input: The input to remove, specified as a string or Urn object. + + Returns: + The ChartPatchBuilder instance. + """ + self._add_patch( + ChartInfo.ASPECT_NAME, + "remove", + path=f"/inputEdges/{input}", + value={}, + ) + return self + + def set_input_edges(self, inputs: List[Edge]) -> "ChartPatchBuilder": + """ + Sets the input edges for the ChartPatchBuilder. + + Args: + inputs: A list of Edge objects representing the input edges. + + Returns: + The ChartPatchBuilder instance. + + Notes: + This method replaces all existing inputs with the given inputs. + """ + self._add_patch( + ChartInfo.ASPECT_NAME, + "add", + path="/inputEdges", + value=inputs, + ) + return self + + def add_tag(self, tag: Tag) -> "ChartPatchBuilder": + """ + Adds a tag to the ChartPatchBuilder. + + Args: + tag: The Tag object representing the tag to be added. + + Returns: + The ChartPatchBuilder instance. + """ + self._add_patch( + GlobalTags.ASPECT_NAME, "add", path=f"/tags/{tag.tag}", value=tag + ) + return self + + def remove_tag(self, tag: Union[str, Urn]) -> "ChartPatchBuilder": + """ + Removes a tag from the ChartPatchBuilder. + + Args: + tag: The tag to remove, specified as a string or Urn object. + + Returns: + The ChartPatchBuilder instance. + """ + if isinstance(tag, str) and not tag.startswith("urn:li:tag:"): + tag = TagUrn.create_from_id(tag) + self._add_patch(GlobalTags.ASPECT_NAME, "remove", path=f"/tags/{tag}", value={}) + return self + + def add_term(self, term: Term) -> "ChartPatchBuilder": + """ + Adds a glossary term to the ChartPatchBuilder. + + Args: + term: The Term object representing the glossary term to be added. + + Returns: + The ChartPatchBuilder instance. + """ + self._add_patch( + GlossaryTerms.ASPECT_NAME, "add", path=f"/terms/{term.urn}", value=term + ) + return self + + def remove_term(self, term: Union[str, Urn]) -> "ChartPatchBuilder": + """ + Removes a glossary term from the ChartPatchBuilder. + + Args: + term: The term to remove, specified as a string or Urn object. + + Returns: + The ChartPatchBuilder instance. + """ + if isinstance(term, str) and not term.startswith("urn:li:glossaryTerm:"): + term = "urn:li:glossaryTerm:" + term + self._add_patch( + GlossaryTerms.ASPECT_NAME, "remove", path=f"/terms/{term}", value={} + ) + return self + + def set_custom_properties( + self, custom_properties: Dict[str, str] + ) -> "ChartPatchBuilder": + """ + Sets the custom properties for the ChartPatchBuilder. + + Args: + custom_properties: A dictionary containing the custom properties to be set. + + Returns: + The ChartPatchBuilder instance. + + Notes: + This method replaces all existing custom properties with the given dictionary. + """ + self._add_patch( + ChartInfo.ASPECT_NAME, + "add", + path="/customProperties", + value=custom_properties, + ) + return self + + def add_custom_property(self, key: str, value: str) -> "ChartPatchBuilder": + """ + Adds a custom property to the ChartPatchBuilder. + + Args: + key: The key of the custom property. + value: The value of the custom property. + + Returns: + The ChartPatchBuilder instance. + """ + self.custom_properties_patch_helper.add_property(key, value) + return self + + def remove_custom_property(self, key: str) -> "ChartPatchBuilder": + """ + Removes a custom property from the ChartPatchBuilder. + + Args: + key: The key of the custom property to remove. + + Returns: + The ChartPatchBuilder instance. + """ + self.custom_properties_patch_helper.remove_property(key) + return self diff --git a/metadata-ingestion/src/datahub/specific/dashboard.py b/metadata-ingestion/src/datahub/specific/dashboard.py new file mode 100644 index 0000000000000..855dcc5685cea --- /dev/null +++ b/metadata-ingestion/src/datahub/specific/dashboard.py @@ -0,0 +1,410 @@ +import time +from typing import Dict, List, Optional, TypeVar, Union +from urllib.parse import quote + +from datahub.emitter.mcp_patch_builder import MetadataPatchProposal +from datahub.metadata.schema_classes import ( + AuditStampClass, + DashboardInfoClass as DashboardInfo, + EdgeClass as Edge, + GlobalTagsClass as GlobalTags, + GlossaryTermAssociationClass as Term, + GlossaryTermsClass as GlossaryTerms, + KafkaAuditHeaderClass, + OwnerClass as Owner, + OwnershipTypeClass, + SystemMetadataClass, + TagAssociationClass as Tag, +) +from datahub.specific.custom_properties import CustomPropertiesPatchHelper +from datahub.specific.ownership import OwnershipPatchHelper +from datahub.utilities.urns.tag_urn import TagUrn +from datahub.utilities.urns.urn import Urn + +T = TypeVar("T", bound=MetadataPatchProposal) + + +class DashboardPatchBuilder(MetadataPatchProposal): + def __init__( + self, + urn: str, + system_metadata: Optional[SystemMetadataClass] = None, + audit_header: Optional[KafkaAuditHeaderClass] = None, + ) -> None: + """ + Initializes a DashboardPatchBuilder instance. + + Args: + urn: The URN of the dashboard + system_metadata: The system metadata of the dashboard (optional). + audit_header: The Kafka audit header of the dashboard (optional). + """ + super().__init__( + urn, "dashboard", system_metadata=system_metadata, audit_header=audit_header + ) + self.custom_properties_patch_helper = CustomPropertiesPatchHelper( + self, DashboardInfo.ASPECT_NAME + ) + self.ownership_patch_helper = OwnershipPatchHelper(self) + + def _mint_auditstamp(self, message: Optional[str] = None) -> AuditStampClass: + """ + Creates an AuditStampClass instance with the current timestamp and other default values. + + Args: + message: The message associated with the audit stamp (optional). + + Returns: + An instance of AuditStampClass. + """ + return AuditStampClass( + time=int(time.time() * 1000.0), + actor="urn:li:corpuser:datahub", + message=message, + ) + + def _ensure_urn_type( + self, entity_type: str, edges: List[Edge], context: str + ) -> None: + """ + Ensures that the destination URNs in the given edges have the specified entity type. + + Args: + entity_type: The entity type to check against. + edges: A list of Edge objects. + context: The context or description of the operation. + + Raises: + ValueError: If any of the destination URNs is not of the specified entity type. + """ + for e in edges: + urn = Urn.create_from_string(e.destinationUrn) + if not urn.get_type() == entity_type: + raise ValueError( + f"{context}: {e.destinationUrn} is not of type {entity_type}" + ) + + def add_owner(self, owner: Owner) -> "DashboardPatchBuilder": + """ + Adds an owner to the DashboardPatchBuilder. + + Args: + owner: The Owner object to add. + + Returns: + The DashboardPatchBuilder instance. + """ + self.ownership_patch_helper.add_owner(owner) + return self + + def remove_owner( + self, owner: str, owner_type: Optional[OwnershipTypeClass] = None + ) -> "DashboardPatchBuilder": + """ + Removes an owner from the DashboardPatchBuilder. + + Args: + owner: The owner to remove. + owner_type: The ownership type of the owner (optional). + + Returns: + The DashboardPatchBuilder instance. + + Notes: + `owner_type` is optional. + """ + self.ownership_patch_helper.remove_owner(owner, owner_type) + return self + + def set_owners(self, owners: List[Owner]) -> "DashboardPatchBuilder": + """ + Sets the owners of the DashboardPatchBuilder. + + Args: + owners: A list of Owner objects. + + Returns: + The DashboardPatchBuilder instance. + """ + self.ownership_patch_helper.set_owners(owners) + return self + + def add_dataset_edge( + self, dataset: Union[Edge, Urn, str] + ) -> "DashboardPatchBuilder": + """ + Adds an dataset to the DashboardPatchBuilder. + + Args: + dataset: The dataset, which can be an Edge object, Urn object, or a string. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If the dataset is not a Dataset urn. + + Notes: + If `dataset` is an Edge object, it is used directly. If `dataset` is a Urn object or string, + it is converted to an Edge object and added with default audit stamps. + """ + if isinstance(dataset, Edge): + dataset_urn: str = dataset.destinationUrn + dataset_edge: Edge = dataset + elif isinstance(dataset, (Urn, str)): + dataset_urn = str(dataset) + if not dataset_urn.startswith("urn:li:dataset:"): + raise ValueError(f"Input {dataset} is not a Dataset urn") + + dataset_edge = Edge( + destinationUrn=dataset_urn, + created=self._mint_auditstamp(), + lastModified=self._mint_auditstamp(), + ) + + self._ensure_urn_type("dataset", [dataset_edge], "add_dataset") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path=f"/datasetEdges/{quote(dataset_urn, safe='')}", + value=dataset_edge, + ) + return self + + def remove_dataset_edge(self, dataset: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes a dataset edge from the DashboardPatchBuilder. + + Args: + dataset: The dataset to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + DashboardInfo.ASPECT_NAME, + "remove", + path=f"/datasetEdges/{dataset}", + value={}, + ) + return self + + def set_dataset_edges(self, datasets: List[Edge]) -> "DashboardPatchBuilder": + """ + Sets the dataset edges for the DashboardPatchBuilder. + + Args: + datasets: A list of Edge objects representing the dataset edges. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If any of the input edges are not of type 'Datset'. + + Notes: + This method replaces all existing datasets with the given inputs. + """ + self._ensure_urn_type("dataset", datasets, "dataset edges") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path="/datasetEdges", + value=datasets, + ) + return self + + def add_chart_edge(self, chart: Union[Edge, Urn, str]) -> "DashboardPatchBuilder": + """ + Adds a chart edge to the DashboardPatchBuilder. + + Args: + chart: The dataset, which can be an Edge object, Urn object, or a string. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If the edge is not a Chart urn. + + Notes: + If `chart` is an Edge object, it is used directly. If `chart` is a Urn object or string, + it is converted to an Edge object and added with default audit stamps. + """ + if isinstance(chart, Edge): + chart_urn: str = chart.destinationUrn + chart_edge: Edge = chart + elif isinstance(chart, (Urn, str)): + chart_urn = str(chart) + if not chart_urn.startswith("urn:li:chart:"): + raise ValueError(f"Input {chart} is not a Chart urn") + + chart_edge = Edge( + destinationUrn=chart_urn, + created=self._mint_auditstamp(), + lastModified=self._mint_auditstamp(), + ) + + self._ensure_urn_type("dataset", [chart_edge], "add_chart_edge") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path=f"/chartEdges/{quote(chart_urn, safe='')}", + value=chart_edge, + ) + return self + + def remove_chart_edge(self, chart: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes an chart edge from the DashboardPatchBuilder. + + Args: + chart: The chart to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + DashboardInfo.ASPECT_NAME, + "remove", + path=f"/chartEdges/{chart}", + value={}, + ) + return self + + def set_chart_edges(self, charts: List[Edge]) -> "DashboardPatchBuilder": + """ + Sets the chart edges for the DashboardPatchBuilder. + + Args: + charts: A list of Edge objects representing the chart edges. + + Returns: + The DashboardPatchBuilder instance. + + Raises: + ValueError: If any of the edges are not of type 'chart'. + + Notes: + This method replaces all existing charts with the given charts. + """ + self._ensure_urn_type("chart", charts, "set_charts") + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path="/chartEdges", + value=charts, + ) + return self + + def add_tag(self, tag: Tag) -> "DashboardPatchBuilder": + """ + Adds a tag to the DashboardPatchBuilder. + + Args: + tag: The Tag object representing the tag to be added. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + GlobalTags.ASPECT_NAME, "add", path=f"/tags/{tag.tag}", value=tag + ) + return self + + def remove_tag(self, tag: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes a tag from the DashboardPatchBuilder. + + Args: + tag: The tag to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + if isinstance(tag, str) and not tag.startswith("urn:li:tag:"): + tag = TagUrn.create_from_id(tag) + self._add_patch(GlobalTags.ASPECT_NAME, "remove", path=f"/tags/{tag}", value={}) + return self + + def add_term(self, term: Term) -> "DashboardPatchBuilder": + """ + Adds a glossary term to the DashboardPatchBuilder. + + Args: + term: The Term object representing the glossary term to be added. + + Returns: + The DashboardPatchBuilder instance. + """ + self._add_patch( + GlossaryTerms.ASPECT_NAME, "add", path=f"/terms/{term.urn}", value=term + ) + return self + + def remove_term(self, term: Union[str, Urn]) -> "DashboardPatchBuilder": + """ + Removes a glossary term from the DashboardPatchBuilder. + + Args: + term: The term to remove, specified as a string or Urn object. + + Returns: + The DashboardPatchBuilder instance. + """ + if isinstance(term, str) and not term.startswith("urn:li:glossaryTerm:"): + term = "urn:li:glossaryTerm:" + term + self._add_patch( + GlossaryTerms.ASPECT_NAME, "remove", path=f"/terms/{term}", value={} + ) + return self + + def set_custom_properties( + self, custom_properties: Dict[str, str] + ) -> "DashboardPatchBuilder": + """ + Sets the custom properties for the DashboardPatchBuilder. + + Args: + custom_properties: A dictionary containing the custom properties to be set. + + Returns: + The DashboardPatchBuilder instance. + + Notes: + This method replaces all existing custom properties with the given dictionary. + """ + self._add_patch( + DashboardInfo.ASPECT_NAME, + "add", + path="/customProperties", + value=custom_properties, + ) + return self + + def add_custom_property(self, key: str, value: str) -> "DashboardPatchBuilder": + """ + Adds a custom property to the DashboardPatchBuilder. + + Args: + key: The key of the custom property. + value: The value of the custom property. + + Returns: + The DashboardPatchBuilder instance. + """ + self.custom_properties_patch_helper.add_property(key, value) + return self + + def remove_custom_property(self, key: str) -> "DashboardPatchBuilder": + """ + Removes a custom property from the DashboardPatchBuilder. + + Args: + key: The key of the custom property to remove. + + Returns: + The DashboardPatchBuilder instance. + """ + self.custom_properties_patch_helper.remove_property(key) + return self diff --git a/metadata-ingestion/src/datahub/specific/datajob.py b/metadata-ingestion/src/datahub/specific/datajob.py index 7ebaee6b918c1..0338a1320c15b 100644 --- a/metadata-ingestion/src/datahub/specific/datajob.py +++ b/metadata-ingestion/src/datahub/specific/datajob.py @@ -207,7 +207,7 @@ def set_input_datajobs(self, inputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("dataJob", inputs, "input datajobs") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/inputDatajobEdges", value=inputs, ) @@ -290,7 +290,7 @@ def set_input_datasets(self, inputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("dataset", inputs, "set_input_datasets") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/inputDatasetEdges", value=inputs, ) @@ -375,7 +375,7 @@ def set_output_datasets(self, outputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("dataset", outputs, "set_output_datasets") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/outputDatasetEdges", value=outputs, ) @@ -463,7 +463,7 @@ def set_input_dataset_fields(self, inputs: List[Edge]) -> "DataJobPatchBuilder": self._ensure_urn_type("schemaField", inputs, "set_input_dataset_fields") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/inputDatasetFields", value=inputs, ) @@ -551,7 +551,7 @@ def set_output_dataset_fields(self, outputs: List[Edge]) -> "DataJobPatchBuilder self._ensure_urn_type("schemaField", outputs, "set_output_dataset_fields") self._add_patch( DataJobInputOutput.ASPECT_NAME, - "replace", + "add", path="/outputDatasetFields", value=outputs, ) @@ -636,7 +636,7 @@ def set_custom_properties( """ self._add_patch( DataJobInfo.ASPECT_NAME, - "replace", + "add", path="/customProperties", value=custom_properties, ) diff --git a/metadata-ingestion/src/datahub/specific/dataproduct.py b/metadata-ingestion/src/datahub/specific/dataproduct.py index bb49ac47b3ef8..2c174e0c9a6cb 100644 --- a/metadata-ingestion/src/datahub/specific/dataproduct.py +++ b/metadata-ingestion/src/datahub/specific/dataproduct.py @@ -85,7 +85,7 @@ def remove_term(self, term: Union[str, Urn]) -> "DataProductPatchBuilder": def set_name(self, name: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/name", value=name, ) @@ -94,7 +94,7 @@ def set_name(self, name: str) -> "DataProductPatchBuilder": def set_description(self, description: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/description", value=description, ) @@ -105,7 +105,7 @@ def set_custom_properties( ) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/customProperties", value=custom_properties, ) @@ -124,7 +124,7 @@ def set_assets( ) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/assets", value=assets, ) @@ -151,7 +151,7 @@ def remove_asset(self, asset_urn: str) -> "DataProductPatchBuilder": def set_external_url(self, external_url: str) -> "DataProductPatchBuilder": self._add_patch( DataProductProperties.ASPECT_NAME, - "replace", + "add", path="/externalUrl", value=external_url, ) diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index 294a80572669b..62ee4fc57b61b 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -143,7 +143,7 @@ def remove_upstream_lineage( def set_upstream_lineages(self, upstreams: List[Upstream]) -> "DatasetPatchBuilder": self._add_patch( - UpstreamLineage.ASPECT_NAME, "replace", path="/upstreams", value=upstreams + UpstreamLineage.ASPECT_NAME, "add", path="/upstreams", value=upstreams ) return self @@ -297,7 +297,7 @@ def set_description( DatasetProperties.ASPECT_NAME if not editable else EditableDatasetProperties.ASPECT_NAME, - "replace", + "add", path="/description", value=description, ) @@ -308,7 +308,7 @@ def set_custom_properties( ) -> "DatasetPatchBuilder": self._add_patch( DatasetProperties.ASPECT_NAME, - "replace", + "add", path="/customProperties", value=custom_properties, ) @@ -326,7 +326,7 @@ def set_display_name(self, display_name: str) -> "DatasetPatchBuilder": if display_name is not None: self._add_patch( DatasetProperties.ASPECT_NAME, - "replace", + "add", path="/name", value=display_name, ) diff --git a/metadata-ingestion/src/datahub/specific/ownership.py b/metadata-ingestion/src/datahub/specific/ownership.py index 334b45a67437f..c2a3874a3a33f 100644 --- a/metadata-ingestion/src/datahub/specific/ownership.py +++ b/metadata-ingestion/src/datahub/specific/ownership.py @@ -43,6 +43,6 @@ def remove_owner( def set_owners(self, owners: List[OwnerClass]) -> "OwnershipPatchHelper": self._parent._add_patch( - OwnershipClass.ASPECT_NAME, "replace", path="/owners", value=owners + OwnershipClass.ASPECT_NAME, "add", path="/owners", value=owners ) return self diff --git a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json index 97c2330f58bc7..66bc2ce0c2a0c 100644 --- a/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json +++ b/metadata-ingestion/tests/unit/api/entities/dataproducts/golden_dataproduct_out_upsert.json @@ -5,7 +5,7 @@ "changeType": "PATCH", "aspectName": "dataProductProperties", "aspect": { - "value": "[{\"op\": \"replace\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"replace\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"replace\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"replace\", \"path\": \"/externalUrl\", \"value\": \"https://github.com/datahub-project/datahub\"}]", + "value": "[{\"op\": \"add\", \"path\": \"/name\", \"value\": \"Pet of the Week Campaign\"}, {\"op\": \"add\", \"path\": \"/assets\", \"value\": [{\"destinationUrn\": \"urn:li:container:DATABASE\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:container:SCHEMA\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}, {\"destinationUrn\": \"urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,test_feature_table_all_feature_dtypes)\", \"created\": {\"time\": 1681455600000, \"actor\": \"urn:li:corpuser:datahub\", \"message\": \"yaml\"}}]}, {\"op\": \"add\", \"path\": \"/customProperties\", \"value\": {\"version\": \"2.0\", \"classification\": \"pii\"}}, {\"op\": \"add\", \"path\": \"/externalUrl\", \"value\": \"https://github.com/datahub-project/datahub\"}]", "contentType": "application/json-patch+json" } }, diff --git a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json index ed5a7723ac2bf..bcc619a09401e 100644 --- a/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json +++ b/metadata-ingestion/tests/unit/patch/complex_dataset_patch.json @@ -7,7 +7,7 @@ "aspect": { "json": [ { - "op": "replace", + "op": "add", "path": "/description", "value": "test description" }, diff --git a/metadata-ingestion/tests/unit/patch/test_patch_builder.py b/metadata-ingestion/tests/unit/patch/test_patch_builder.py index f05c4978f8644..e68f948be8aa0 100644 --- a/metadata-ingestion/tests/unit/patch/test_patch_builder.py +++ b/metadata-ingestion/tests/unit/patch/test_patch_builder.py @@ -3,7 +3,12 @@ import pytest -from datahub.emitter.mce_builder import make_dataset_urn, make_tag_urn +from datahub.emitter.mce_builder import ( + make_chart_urn, + make_dashboard_urn, + make_dataset_urn, + make_tag_urn, +) from datahub.ingestion.sink.file import write_metadata_file from datahub.metadata.schema_classes import ( DatasetLineageTypeClass, @@ -15,6 +20,8 @@ TagAssociationClass, UpstreamClass, ) +from datahub.specific.chart import ChartPatchBuilder +from datahub.specific.dashboard import DashboardPatchBuilder from datahub.specific.dataset import DatasetPatchBuilder @@ -80,3 +87,41 @@ def test_complex_dataset_patch( pytestconfig.rootpath / "tests/unit/patch/complex_dataset_patch.json" ).read_text() ) + + +def test_basic_chart_patch_builder(): + patcher = ChartPatchBuilder( + make_chart_urn(platform="hive", name="fct_users_created") + ).add_tag(TagAssociationClass(tag=make_tag_urn("test_tag"))) + + assert patcher.build() == [ + MetadataChangeProposalClass( + entityType="chart", + entityUrn="urn:li:chart:(hive,fct_users_created)", + changeType="PATCH", + aspectName="globalTags", + aspect=GenericAspectClass( + value=b'[{"op": "add", "path": "/tags/urn:li:tag:test_tag", "value": {"tag": "urn:li:tag:test_tag"}}]', + contentType="application/json-patch+json", + ), + ), + ] + + +def test_basic_dashboard_patch_builder(): + patcher = DashboardPatchBuilder( + make_dashboard_urn(platform="hive", name="fct_users_created") + ).add_tag(TagAssociationClass(tag=make_tag_urn("test_tag"))) + + assert patcher.build() == [ + MetadataChangeProposalClass( + entityType="dashboard", + entityUrn="urn:li:dashboard:(hive,fct_users_created)", + changeType="PATCH", + aspectName="globalTags", + aspect=GenericAspectClass( + value=b'[{"op": "add", "path": "/tags/urn:li:tag:test_tag", "value": {"tag": "urn:li:tag:test_tag"}}]', + contentType="application/json-patch+json", + ), + ), + ] diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java new file mode 100644 index 0000000000000..0655d2b3eb8eb --- /dev/null +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java @@ -0,0 +1,41 @@ +package datahub.client.patch.chart; + +import static com.linkedin.metadata.Constants.*; +import static datahub.client.patch.common.PatchUtil.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.urn.Urn; +import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import datahub.client.patch.PatchOperationType; +import javax.annotation.Nonnull; +import org.apache.commons.lang3.tuple.ImmutableTriple; + +public class ChartInfoPatchBuilder extends AbstractMultiFieldPatchBuilder { + private static final String INPUT_EDGES_PATH_START = "/inputEdges/"; + + // Simplified with just Urn + public ChartInfoPatchBuilder addInputEdge(@Nonnull Urn urn) { + ObjectNode value = createEdgeValue(urn); + + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_EDGES_PATH_START + urn, value)); + return this; + } + + public ChartInfoPatchBuilder removeInputEdge(@Nonnull Urn urn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), INPUT_EDGES_PATH_START + urn, null)); + return this; + } + + @Override + protected String getAspectName() { + return CHART_INFO_ASPECT_NAME; + } + + @Override + protected String getEntityType() { + return CHART_ENTITY_NAME; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java new file mode 100644 index 0000000000000..69db36c6e038c --- /dev/null +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java @@ -0,0 +1,84 @@ +package datahub.client.patch.common; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.Edge; +import com.linkedin.common.urn.Urn; +import javax.annotation.Nonnull; + +public class PatchUtil { + private PatchUtil() {} + + private static final String TIME_KEY = "time"; + private static final String ACTOR_KEY = "actor"; + private static final String IMPERSONATOR_KEY = "impersonator"; + private static final String MESSAGE_KEY = "message"; + private static final String LAST_MODIFIED_KEY = "lastModified"; + private static final String CREATED_KEY = "created"; + private static final String DESTINATION_URN_KEY = "destinationUrn"; + private static final String SOURCE_URN_KEY = "sourceUrn"; + + private static final String PROPERTIES_KEY = "properties"; + + public static ObjectNode createEdgeValue(@Nonnull Edge edge) { + ObjectNode value = instance.objectNode(); + + ObjectNode created = instance.objectNode(); + if (edge.getCreated() == null) { + created.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + } else { + created + .put(TIME_KEY, edge.getCreated().getTime()) + .put(ACTOR_KEY, edge.getCreated().getActor().toString()); + if (edge.getCreated().getImpersonator() != null) { + created.put(IMPERSONATOR_KEY, edge.getCreated().getImpersonator().toString()); + } + if (edge.getCreated().getMessage() != null) { + created.put(MESSAGE_KEY, edge.getCreated().getMessage()); + } + } + value.set(CREATED_KEY, created); + + ObjectNode lastModified = instance.objectNode(); + if (edge.getLastModified() == null) { + lastModified.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + } else { + lastModified + .put(TIME_KEY, edge.getLastModified().getTime()) + .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); + if (edge.getLastModified().getImpersonator() != null) { + lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); + } + if (edge.getLastModified().getMessage() != null) { + lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); + } + } + value.set(LAST_MODIFIED_KEY, lastModified); + + if (edge.getProperties() != null) { + ObjectNode propertiesNode = instance.objectNode(); + edge.getProperties().forEach((k, v) -> propertiesNode.set(k, instance.textNode(v))); + value.set(PROPERTIES_KEY, propertiesNode); + } + + value.put(DESTINATION_URN_KEY, edge.getDestinationUrn().toString()); + if (edge.getSourceUrn() != null) { + value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); + } + + return value; + } + + public static ObjectNode createEdgeValue(@Nonnull Urn urn) { + ObjectNode value = instance.objectNode(); + ObjectNode auditStamp = instance.objectNode(); + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + + value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); + value.set(CREATED_KEY, auditStamp); + + return value; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java new file mode 100644 index 0000000000000..cadde582f1c64 --- /dev/null +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java @@ -0,0 +1,103 @@ +package datahub.client.patch.dashboard; + +import static com.linkedin.metadata.Constants.*; +import static datahub.client.patch.common.PatchUtil.*; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.Edge; +import com.linkedin.common.urn.ChartUrn; +import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; +import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import datahub.client.patch.PatchOperationType; +import javax.annotation.Nonnull; +import org.apache.commons.lang3.tuple.ImmutableTriple; + +public class DashboardInfoPatchBuilder + extends AbstractMultiFieldPatchBuilder { + private static final String CHART_EDGES_PATH_START = "/chartEdges/"; + private static final String DATASET_EDGES_PATH_START = "/datasetEdges/"; + + // Simplified with just Urn + public DashboardInfoPatchBuilder addChartEdge(@Nonnull ChartUrn urn) { + ObjectNode value = createEdgeValue(urn); + + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), CHART_EDGES_PATH_START + urn, value)); + return this; + } + + public DashboardInfoPatchBuilder removeChartEdge(@Nonnull ChartUrn urn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), CHART_EDGES_PATH_START + urn, null)); + return this; + } + + public DashboardInfoPatchBuilder addDatasetEdge(@Nonnull DatasetUrn urn) { + ObjectNode value = createEdgeValue(urn); + + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), DATASET_EDGES_PATH_START + urn, value)); + return this; + } + + public DashboardInfoPatchBuilder removeDatasetEdge(@Nonnull DatasetUrn urn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), DATASET_EDGES_PATH_START + urn, null)); + return this; + } + + // Full Edge modification + public DashboardInfoPatchBuilder addEdge(@Nonnull Edge edge) { + ObjectNode value = createEdgeValue(edge); + String path = getEdgePath(edge); + + pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), path, value)); + return this; + } + + public DashboardInfoPatchBuilder removeEdge(@Nonnull Edge edge) { + String path = getEdgePath(edge); + + pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), path, null)); + return this; + } + + /** + * Determines Edge path based on supplied Urn, if not a valid entity type throws + * IllegalArgumentException + * + * @param edge + * @return + * @throws IllegalArgumentException if destinationUrn is an invalid entity type + */ + private String getEdgePath(@Nonnull Edge edge) { + Urn destinationUrn = edge.getDestinationUrn(); + + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType())) { + return DATASET_EDGES_PATH_START + destinationUrn; + } + + if (CHART_ENTITY_NAME.equals(destinationUrn.getEntityType())) { + return CHART_EDGES_PATH_START + destinationUrn; + } + + // TODO: Output Data Jobs not supported by aspect, add here if this changes + + throw new IllegalArgumentException( + String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); + } + + @Override + protected String getAspectName() { + return DASHBOARD_INFO_ASPECT_NAME; + } + + @Override + protected String getEntityType() { + return DASHBOARD_ENTITY_NAME; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java index 0fb0454533fc0..fc250daffe916 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java @@ -2,6 +2,7 @@ import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; import static com.linkedin.metadata.Constants.*; +import static datahub.client.patch.common.PatchUtil.*; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -20,21 +21,9 @@ public class DataJobInputOutputPatchBuilder private static final String INPUT_DATA_JOB_EDGES_PATH_START = "/inputDatajobEdges/"; private static final String INPUT_DATASET_EDGES_PATH_START = "/inputDatasetEdges/"; private static final String OUTPUT_DATASET_EDGES_PATH_START = "/outputDatasetEdges/"; - - private static final String DESTINATION_URN_KEY = "destinationUrn"; - private static final String SOURCE_URN_KEY = "sourceUrn"; - private static final String LAST_MODIFIED_KEY = "lastModified"; - private static final String CREATED_KEY = "created"; - private static final String PROPERTIES_KEY = "properties"; - private static final String INPUT_DATASET_FIELDS_PATH_START = "/inputDatasetFields/"; private static final String OUTPUT_DATASET_FIELDS_PATH_START = "/outputDatasetFields/"; - private static final String TIME_KEY = "time"; - private static final String ACTOR_KEY = "actor"; - private static final String IMPERSONATOR_KEY = "impersonator"; - private static final String MESSAGE_KEY = "message"; - // Simplified with just Urn public DataJobInputOutputPatchBuilder addInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { ObjectNode value = createEdgeValue(dataJobUrn); @@ -144,66 +133,6 @@ public DataJobInputOutputPatchBuilder removeEdge( return this; } - private ObjectNode createEdgeValue(@Nonnull Urn urn) { - ObjectNode value = instance.objectNode(); - ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - - value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); - value.set(CREATED_KEY, auditStamp); - - return value; - } - - private ObjectNode createEdgeValue(@Nonnull Edge edge) { - ObjectNode value = instance.objectNode(); - - ObjectNode created = instance.objectNode(); - if (edge.getCreated() == null) { - created.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - } else { - created - .put(TIME_KEY, edge.getCreated().getTime()) - .put(ACTOR_KEY, edge.getCreated().getActor().toString()); - if (edge.getCreated().getImpersonator() != null) { - created.put(IMPERSONATOR_KEY, edge.getCreated().getImpersonator().toString()); - } - if (edge.getCreated().getMessage() != null) { - created.put(MESSAGE_KEY, edge.getCreated().getMessage()); - } - } - value.set(CREATED_KEY, created); - - ObjectNode lastModified = instance.objectNode(); - if (edge.getLastModified() == null) { - lastModified.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - } else { - lastModified - .put(TIME_KEY, edge.getLastModified().getTime()) - .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); - if (edge.getLastModified().getImpersonator() != null) { - lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); - } - if (edge.getLastModified().getMessage() != null) { - lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); - } - } - value.set(LAST_MODIFIED_KEY, lastModified); - - if (edge.getProperties() != null) { - ObjectNode propertiesNode = instance.objectNode(); - edge.getProperties().forEach((k, v) -> propertiesNode.set(k, instance.textNode(v))); - value.set(PROPERTIES_KEY, propertiesNode); - } - - value.put(DESTINATION_URN_KEY, edge.getDestinationUrn().toString()); - if (edge.getSourceUrn() != null) { - value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); - } - - return value; - } - /** * Determines Edge path based on supplied Urn, if not a valid entity type throws * IllegalArgumentException diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index 563742990f546..5bd10245899e4 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -8,6 +8,7 @@ import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.OwnershipType; import com.linkedin.common.TagAssociation; +import com.linkedin.common.urn.ChartUrn; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DataPlatformUrn; @@ -22,7 +23,9 @@ import datahub.client.MetadataWriteResponse; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; +import datahub.client.patch.chart.ChartInfoPatchBuilder; import datahub.client.patch.common.OwnershipPatchBuilder; +import datahub.client.patch.dashboard.DashboardInfoPatchBuilder; import datahub.client.patch.dataflow.DataFlowInfoPatchBuilder; import datahub.client.patch.datajob.DataJobInfoPatchBuilder; import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; @@ -551,4 +554,90 @@ public void testLocalDataJobInputAddEdge() { System.out.println(Arrays.asList(e.getStackTrace())); } } + + @Test + @Ignore + public void testLocalChartInfoAdd() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal chartInfoPatch = + new ChartInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:chart:(dashboardTool,chartId)")) + .addInputEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .build(); + Future response = restEmitter.emit(chartInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } + + @Test + @Ignore + public void testLocalChartInfoRemove() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal chartInfoPatch = + new ChartInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:chart:(dashboardTool,chartId)")) + .removeInputEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .build(); + Future response = restEmitter.emit(chartInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } + + @Test + @Ignore + public void testLocalDashboardInfoAdd() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal dashboardInfoPatch = + new DashboardInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dashboard:(dashboardTool,dashboardId)")) + .addDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addChartEdge(ChartUrn.createFromString("urn:li:chart:(dashboartTool, chartId)")) + .build(); + Future response = restEmitter.emit(dashboardInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } + + @Test + @Ignore + public void testLocalDashboardInfoRemove() { + RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); + try { + MetadataChangeProposal dashboardInfoPatch = + new DashboardInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dashboard:(dashboardTool,dashboardId)")) + .removeDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .removeChartEdge(ChartUrn.createFromString("urn:li:chart:(dashboardTool, chartId)")) + .build(); + Future response = restEmitter.emit(dashboardInfoPatch); + + System.out.println(response.get().getResponseContent()); + + } catch (URISyntaxException | IOException | ExecutionException | InterruptedException e) { + System.out.println(Arrays.asList(e.getStackTrace())); + } + } } From 296e41dfed325116c2a5661c32ae27790b28aafd Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Wed, 3 Jan 2024 15:58:50 -0600 Subject: [PATCH 173/263] feat(docker): docker compose profiles updates (#9514) Co-authored-by: Harshal Sheth --- docker/build.gradle | 7 ++++++- docker/profiles/README.md | 2 +- docker/profiles/docker-compose.actions.yml | 2 +- docker/profiles/docker-compose.frontend.yml | 4 ++-- docker/profiles/docker-compose.gms.yml | 16 ++++++++-------- .../profiles/docker-compose.prerequisites.yml | 18 +++++++++--------- docs/developers.md | 2 +- docs/how/updating-datahub.md | 3 ++- 8 files changed, 30 insertions(+), 24 deletions(-) diff --git a/docker/build.gradle b/docker/build.gradle index 190202620c382..189c4959e0442 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -31,6 +31,11 @@ ext { pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] } +tasks.register('minDockerCompose2.20', Exec) { + executable 'bash' + args '-c', 'echo -e "$(docker compose version --short)\n2.20"|sort --version-sort --check=quiet --reverse' +} + tasks.register('quickstart') {} tasks.register('quickstartSlim') {} tasks.register('quickstartDebug') {} @@ -118,9 +123,9 @@ tasks.getByName('quickstartDebugComposeUp').dependsOn( ) tasks.withType(ComposeUp).configureEach { shouldRunAfter('quickstartNuke') + dependsOn tasks.named("minDockerCompose2.20") } - task debugReload(type: Exec) { def cmd = ['docker compose -p datahub --profile debug'] + compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") diff --git a/docker/profiles/README.md b/docker/profiles/README.md index df09f15cd85ce..fb3c9e3c84a7a 100644 --- a/docker/profiles/README.md +++ b/docker/profiles/README.md @@ -5,7 +5,7 @@ for quickstart use-cases as well as development use-cases. These configurations infrastructure configurations that DataHub can operate on. Requirements: -* Use the profiles requires a modern version of docker. +* Using profiles requires docker compose >= 2.20. * If using the debug/development profiles, you will need to have built the `debug` docker images locally. See the Development Profiles section for more details. ```bash diff --git a/docker/profiles/docker-compose.actions.yml b/docker/profiles/docker-compose.actions.yml index a509a6a67d270..676a72bae3201 100644 --- a/docker/profiles/docker-compose.actions.yml +++ b/docker/profiles/docker-compose.actions.yml @@ -1,7 +1,7 @@ x-datahub-actions-service: &datahub-actions-service hostname: actions - image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} + image: ${DATAHUB_ACTIONS_IMAGE:-${DATAHUB_ACTIONS_REPO:-acryldata}/datahub-actions}:${ACTIONS_VERSION:-head} env_file: datahub-actions/env/docker.env environment: ACTIONS_EXTRA_PACKAGES: ${ACTIONS_EXTRA_PACKAGES:-} diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index 80cb4e7b4b596..6e1bbc0be70f5 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -1,7 +1,7 @@ x-datahub-frontend-service: &datahub-frontend-service hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 env_file: datahub-frontend/env/docker.env @@ -12,7 +12,7 @@ x-datahub-frontend-service: &datahub-frontend-service x-datahub-frontend-service-dev: &datahub-frontend-service-dev <<: *datahub-frontend-service - image: linkedin/datahub-frontend-react:debug + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:debug ports: - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index 01602c8b906b9..93072a76d4041 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -54,7 +54,7 @@ x-datahub-dev-telemetry-env: &datahub-dev-telemetry-env ################################# x-datahub-system-update-service: &datahub-system-update-service hostname: datahub-system-update - image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_UPGRADE_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-upgrade}:${DATAHUB_VERSION:-head} command: - -u - SystemUpdate @@ -67,7 +67,7 @@ x-datahub-system-update-service: &datahub-system-update-service x-datahub-system-update-service-dev: &datahub-system-update-service-dev <<: *datahub-system-update-service - image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:debug + image: ${DATAHUB_UPGRADE_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-upgrade}:debug ports: - ${DATAHUB_MAPPED_UPGRADE_DEBUG_PORT:-5003}:5003 environment: &datahub-system-update-dev-env @@ -85,7 +85,7 @@ x-datahub-system-update-service-dev: &datahub-system-update-service-dev ################################# x-datahub-gms-service: &datahub-gms-service hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 env_file: datahub-gms/env/docker.env @@ -102,7 +102,7 @@ x-datahub-gms-service: &datahub-gms-service x-datahub-gms-service-dev: &datahub-gms-service-dev <<: *datahub-gms-service - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:debug + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:debug ports: - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 @@ -128,7 +128,7 @@ x-datahub-gms-service-dev: &datahub-gms-service-dev ################################# x-datahub-mae-consumer-service: &datahub-mae-consumer-service hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 env_file: datahub-mae-consumer/env/docker.env @@ -137,7 +137,7 @@ x-datahub-mae-consumer-service: &datahub-mae-consumer-service x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev <<: *datahub-mae-consumer-service - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:debug + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:debug environment: <<: [*datahub-dev-telemetry-env, *datahub-mae-consumer-env] volumes: @@ -151,7 +151,7 @@ x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev ################################# x-datahub-mce-consumer-service: &datahub-mce-consumer-service hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 env_file: datahub-mce-consumer/env/docker.env @@ -160,7 +160,7 @@ x-datahub-mce-consumer-service: &datahub-mce-consumer-service x-datahub-mce-consumer-service-dev: &datahub-mce-consumer-service-dev <<: *datahub-mce-consumer-service - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:debug + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:debug environment: <<: [*datahub-dev-telemetry-env, *datahub-mce-consumer-env] volumes: diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml index d90d4a252f993..232239c6c70d0 100644 --- a/docker/profiles/docker-compose.prerequisites.yml +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -128,7 +128,7 @@ services: container_name: mysql-setup profiles: *mysql-profiles-quickstart hostname: mysql-setup - image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mysql-setup}:${DATAHUB_VERSION:-head} env_file: mysql-setup/env/docker.env depends_on: mysql: @@ -139,7 +139,7 @@ services: <<: *mysql-setup container_name: mysql-setup-dev profiles: *mysql-profiles-dev - image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:debug + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mysql-setup}:debug postgres: container_name: postgres profiles: *postgres-profiles @@ -162,7 +162,7 @@ services: container_name: postgres-setup profiles: *postgres-profiles-quickstart hostname: postgres-setup - image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-postgres-setup}:${DATAHUB_VERSION:-head} env_file: postgres-setup/env/docker.env depends_on: postgres: @@ -173,7 +173,7 @@ services: <<: *postgres-setup container_name: postgres-setup-dev profiles: *postgres-profiles-dev - image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:debug + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-postgres-setup}:debug cassandra: container_name: cassandra profiles: *cassandra-profiles @@ -267,7 +267,7 @@ services: container_name: kafka-setup profiles: *profiles-quickstart hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-kafka-setup}:${DATAHUB_VERSION:-head} env_file: kafka-setup/env/docker.env environment: &kafka-setup-env DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-false} @@ -285,7 +285,7 @@ services: environment: <<: *kafka-setup-env DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-true} - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:debug + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-kafka-setup}:debug elasticsearch: container_name: elasticsearch profiles: *elasticsearch-profiles @@ -311,7 +311,7 @@ services: - esdata:/usr/share/elasticsearch/data elasticsearch-setup-dev: &elasticsearch-setup-dev container_name: elasticsearch-setup-dev - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:debug profiles: *elasticsearch-profiles hostname: elasticsearch-setup env_file: elasticsearch-setup/env/docker.env @@ -351,7 +351,7 @@ services: container_name: opensearch-setup profiles: *opensearch-profiles-quickstart hostname: opensearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} environment: <<: *search-datastore-environment USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} @@ -365,7 +365,7 @@ services: container_name: opensearch-setup-dev profiles: *opensearch-profiles-dev hostname: opensearch-setup-dev - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:debug + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-elasticsearch-setup}:debug environment: <<: *search-datastore-environment USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} diff --git a/docs/developers.md b/docs/developers.md index fe007a56ddc68..4e31aceeb4382 100644 --- a/docs/developers.md +++ b/docs/developers.md @@ -9,7 +9,7 @@ title: "Local Development" - [Java 17 JDK](https://openjdk.org/projects/jdk/17/) - [Python 3.10](https://www.python.org/downloads/release/python-3100/) - [Docker](https://www.docker.com/) -- [Docker Compose](https://docs.docker.com/compose/) +- [Docker Compose >=2.20](https://docs.docker.com/compose/) - Docker engine with at least 8GB of memory to run tests. ::: diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index 61ad2d623d72a..fb082bea7d151 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -8,7 +8,8 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances. - Neo4j 5.x, may require migration from 4.x -- Build now requires JDK17 (Runtime Java 11) +- Build requires JDK17 (Runtime Java 11) +- Build requires Docker Compose > 2.20 ### Potential Downtime From 424057862790b520e6d6e7d9d0a04f52aa46e500 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Wed, 3 Jan 2024 17:16:16 -0500 Subject: [PATCH 174/263] feat(ui): switch to vite and vitest (#9451) --- .github/workflows/metadata-io.yml | 3 - .github/workflows/spark-smoke-test.yml | 3 +- build.gradle | 4 + datahub-frontend/build.gradle | 18 - datahub-frontend/conf/routes | 11 +- datahub-web-react/.env | 4 +- datahub-web-react/.eslintrc.js | 3 +- datahub-web-react/build.gradle | 66 +- datahub-web-react/craco.config.js | 75 - datahub-web-react/datahub-frontend.graphql | 389 - datahub-web-react/{public => }/index.html | 14 +- datahub-web-react/package.json | 59 +- .../public/{ => assets}/favicon.ico | Bin .../public/{ => assets}/logo.png | Bin datahub-web-react/public/manifest.json | 2 +- datahub-web-react/src/App.less | 5 +- datahub-web-react/src/App.test.tsx | 15 +- datahub-web-react/src/App.tsx | 35 +- datahub-web-react/src/Mocks.tsx | 12 + datahub-web-react/src/app/Routes.tsx | 4 +- .../src/app/analytics/analytics.ts | 2 +- .../src/app/domain/DomainIcon.tsx | 2 +- .../src/app/entity/dataJob/tabs/RunsTab.tsx | 2 +- .../entity/dataset/profile/OperationsTab.tsx | 2 +- .../dataset/profile/__tests__/Schema.test.tsx | 70 +- .../__tests__/SchemaDescriptionField.test.tsx | 4 +- .../__tests__/PlatformContent.test.tsx | 6 +- .../embed/UpstreamHealth/FailingEntity.tsx | 2 +- .../embed/UpstreamHealth/UpstreamHealth.tsx | 2 +- .../__tests__/DocumentationTab.test.tsx | 8 +- .../editor/__tests__/Editor.test.tsx | 2 +- .../Entity/__tests__/DataJobFlowTab.test.tsx | 6 +- .../entity/user/__tests__/UserHeader.test.tsx | 11 - .../ingest/source/builder/RecipeBuilder.tsx | 8 +- .../source/builder/RecipeForm/FormField.tsx | 8 +- .../source/builder/RecipeForm/RecipeForm.tsx | 8 +- .../RecipeForm/SecretField/SecretField.tsx | 16 +- .../TestConnection/TestConnectionModal.tsx | 2 +- .../app/ingest/source/builder/YamlEditor.tsx | 3 +- .../lineage/__tests__/LineageEdges.test.tsx | 22 +- .../__tests__/LineageEntityView.test.tsx | 2 +- .../lineage/__tests__/LineageTree.test.tsx | 12 +- .../policy/_tests_/policyUtils.test.tsx | 175 +- .../src/app/preview/DefaultPreviewCard.tsx | 4 +- .../__tests__/Recommendations.test.tsx | 1 + .../src/app/search/ToggleSidebarButton.tsx | 4 +- .../__tests__/FilterRendererRegistry.test.tsx | 6 +- .../src/app/search/filters/utils.tsx | 2 +- .../src/app/search/sidebar/EntityLink.tsx | 2 +- .../app/search/sorting/SearchSortSelect.tsx | 2 +- datahub-web-react/src/conf/Global.ts | 1 - .../src/conf/theme/global-variables.less | 26 +- .../src/graphql-mock/createServer.ts | 12 - datahub-web-react/src/graphql-mock/server.ts | 84 - datahub-web-react/src/index.tsx | 3 +- datahub-web-react/src/react-app-env.d.ts | 1 - datahub-web-react/src/setupProxy.js | 37 - datahub-web-react/src/setupTests.ts | 21 +- .../utils/test-utils/TestPageContainer.tsx | 2 +- datahub-web-react/src/vite-env.d.ts | 2 + datahub-web-react/tsconfig.json | 5 +- datahub-web-react/vite.config.ts | 100 + datahub-web-react/yarn.lock | 8860 +++-------------- smoke-test/tests/cypress/package-lock.json | 2031 ---- .../tests/read_only/test_services_up.py | 2 +- 65 files changed, 1905 insertions(+), 10400 deletions(-) delete mode 100644 datahub-web-react/craco.config.js delete mode 100644 datahub-web-react/datahub-frontend.graphql rename datahub-web-react/{public => }/index.html (66%) rename datahub-web-react/public/{ => assets}/favicon.ico (100%) rename datahub-web-react/public/{ => assets}/logo.png (100%) delete mode 100644 datahub-web-react/src/graphql-mock/createServer.ts delete mode 100644 datahub-web-react/src/graphql-mock/server.ts delete mode 100644 datahub-web-react/src/react-app-env.d.ts delete mode 100644 datahub-web-react/src/setupProxy.js create mode 100644 datahub-web-react/src/vite-env.d.ts create mode 100644 datahub-web-react/vite.config.ts delete mode 100644 smoke-test/tests/cypress/package-lock.json diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 96229642244b6..c964352c3e129 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -40,9 +40,6 @@ jobs: python-version: "3.10" cache: "pip" - name: Gradle build (and test) - # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs - # running build first without datahub-web-react:yarnBuild and then with it is 100% stable - # datahub-frontend:unzipAssets depends on datahub-web-react:yarnBuild but gradle does not know about it run: | ./gradlew :metadata-io:test - uses: actions/upload-artifact@v3 diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 94692bd3c2336..bd99905a513d6 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -51,8 +51,7 @@ jobs: -x :datahub-web-react:yarnLint \ -x :datahub-web-react:yarnGenerate \ -x :datahub-web-react:yarnInstall \ - -x :datahub-web-react:yarnQuickBuild \ - -x :datahub-web-react:copyAssets \ + -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar - uses: actions/upload-artifact@v3 diff --git a/build.gradle b/build.gradle index bb01a15a7db8d..4680598165d28 100644 --- a/build.gradle +++ b/build.gradle @@ -325,6 +325,10 @@ subprojects { } plugins.withType(JavaPlugin).configureEach { + if (project.name == 'datahub-web-react') { + return + } + dependencies { implementation externalDependency.annotationApi constraints { diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 437c72e6394ea..1174c5c5cfd5d 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -1,5 +1,4 @@ plugins { - id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' id 'org.gradle.playframework' @@ -39,23 +38,6 @@ artifacts { archives myTar } -graphqlCodegen { - // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = ["$projectDir/conf/datahub-frontend.graphql".toString()] - outputDir = new File("$projectDir/app/graphql") - packageName = "generated" - generateApis = true - modelValidationAnnotation = "" - customTypesMapping = [ - Long: "Long", - ] -} - -tasks.withType(Checkstyle) { - exclude "**/generated/**" -} - - /* PLAY UPGRADE NOTE Generates the distribution jars under the expected names. The playFramework plugin only accepts certain name values diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 3102c26497fed..6b53a2789e7cc 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,11 +36,14 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) -# Map static resources from the /public folder to the /assets URL path -GET /assets/*file controllers.Assets.at(path="/public", file) - # Analytics route POST /track controllers.TrackingController.track(request: Request) -# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle +# Known React asset routes +GET /assets/*file controllers.Assets.at(path="/public/assets", file) +GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) +GET /manifest.json controllers.Assets.at(path="/public", file="manifest.json") +GET /robots.txt controllers.Assets.at(path="/public", file="robots.txt") + +# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle's index.html GET /*path controllers.Application.index(path) diff --git a/datahub-web-react/.env b/datahub-web-react/.env index e5529bbdaa56d..7c02340752104 100644 --- a/datahub-web-react/.env +++ b/datahub-web-react/.env @@ -1,5 +1,3 @@ -PUBLIC_URL=/assets REACT_APP_THEME_CONFIG=theme_light.config.json SKIP_PREFLIGHT_CHECK=true -BUILD_PATH=build/yarn -REACT_APP_PROXY_TARGET=http://localhost:9002 \ No newline at end of file +REACT_APP_PROXY_TARGET=http://localhost:9002 diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 2806942dd1053..e48dfdb23a4e7 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -5,7 +5,7 @@ module.exports = { 'airbnb-typescript', 'airbnb/hooks', 'plugin:@typescript-eslint/recommended', - 'plugin:jest/recommended', + 'plugin:vitest/recommended', 'prettier', ], plugins: ['@typescript-eslint'], @@ -46,6 +46,7 @@ module.exports = { argsIgnorePattern: '^_', }, ], + 'vitest/prefer-to-be': 'off', }, settings: { react: { diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index 72821d8b97dc0..c0355b935137a 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -19,7 +19,7 @@ node { version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.1' + yarnVersion = '1.22.21' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -44,10 +44,33 @@ node { */ task yarnInstall(type: YarnTask) { args = ['install'] + + // The node_modules directory can contain built artifacts, so + // it's not really safe to cache it. + outputs.cacheIf { false } + + inputs.files( + file('yarn.lock'), + file('package.json'), + ) + outputs.dir('node_modules') } task yarnGenerate(type: YarnTask, dependsOn: yarnInstall) { args = ['run', 'generate'] + + outputs.cacheIf { true } + + inputs.files( + yarnInstall.inputs.files, + file('codegen.yml'), + project.fileTree(dir: "../datahub-graphql-core/src/main/resources/", include: "*.graphql"), + project.fileTree(dir: "src", include: "**/*.graphql"), + ) + + outputs.files( + project.fileTree(dir: "src", include: "**/*.generated.ts"), + ) } task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -55,7 +78,8 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { } task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - args = ['run', 'test', '--watchAll', 'false'] + // Explicitly runs in non-watch mode. + args = ['run', 'test', 'run'] } task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -68,13 +92,24 @@ task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { args = ['run', 'lint-fix'] } -task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnTest, yarnLint]) { - args = ['run', 'build'] -} - -task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { +task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] + + outputs.cacheIf { true } + inputs.files( + file('index.html'), + project.fileTree(dir: "src"), + project.fileTree(dir: "public"), + + yarnInstall.inputs.files, + yarnGenerate.outputs.files, + + file('.env'), + file('vite.config.ts'), + file('tsconfig.json'), + ) + outputs.dir('dist') } task cleanExtraDirs { @@ -82,9 +117,8 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete 'src/types.generated.ts' delete fileTree('../datahub-frontend/public') - delete fileTree(dir: 'src/graphql', include: '*.generated.ts') + delete fileTree(dir: 'src', include: '*.generated.ts') } clean.finalizedBy(cleanExtraDirs) @@ -93,24 +127,16 @@ configurations { } distZip { - dependsOn yarnQuickBuild + dependsOn yarnBuild archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } -task copyAssets(dependsOn: distZip) { - doLast { - copy { - from zipTree(distZip.outputs.files.first()) - into "../datahub-frontend/public" - } - } -} - jar { - dependsOn distZip, copyAssets + dependsOn distZip into('public') { from zipTree(distZip.outputs.files.first()) } archiveClassifier = 'assets' } +build.dependsOn jar diff --git a/datahub-web-react/craco.config.js b/datahub-web-react/craco.config.js deleted file mode 100644 index 6ede45902128f..0000000000000 --- a/datahub-web-react/craco.config.js +++ /dev/null @@ -1,75 +0,0 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ -require('dotenv').config(); -const { whenProd } = require('@craco/craco'); -const CracoAntDesignPlugin = require('craco-antd'); -const path = require('path'); -const CopyWebpackPlugin = require('copy-webpack-plugin'); - -// eslint-disable-next-line import/no-dynamic-require -const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); - -function addLessPrefixToKeys(styles) { - const output = {}; - Object.keys(styles).forEach((key) => { - output[`@${key}`] = styles[key]; - }); - return output; -} - -module.exports = { - webpack: { - configure: { - optimization: whenProd(() => ({ - splitChunks: { - cacheGroups: { - vendor: { - test: /[\\/]node_modules[\\/]/, - name: 'vendors', - chunks: 'all', - }, - }, - }, - })), - // Webpack 5 no longer automatically pollyfill core Node.js modules - resolve: { fallback: { fs: false } }, - // Ignore Webpack 5's missing source map warnings from node_modules - ignoreWarnings: [{ module: /node_modules/, message: /source-map-loader/ }], - }, - plugins: { - add: [ - // Self host images by copying them to the build directory - new CopyWebpackPlugin({ - patterns: [{ from: 'src/images', to: 'platforms' }], - }), - // Copy monaco-editor files to the build directory - new CopyWebpackPlugin({ - patterns: [ - { from: 'node_modules/monaco-editor/min/vs/', to: 'monaco-editor/vs' }, - { from: 'node_modules/monaco-editor/min-maps/vs/', to: 'monaco-editor/min-maps/vs' }, - ], - }), - ], - }, - }, - plugins: [ - { - plugin: CracoAntDesignPlugin, - options: { - customizeThemeLessPath: path.join(__dirname, 'src/conf/theme/global-variables.less'), - customizeTheme: addLessPrefixToKeys(themeConfig.styles), - }, - }, - ], - jest: { - configure: { - // Use dist files instead of source files - moduleNameMapper: { - '^d3-interpolate-path': `d3-interpolate-path/build/d3-interpolate-path`, - '^d3-(.*)$': `d3-$1/dist/d3-$1`, - '^lib0/((?!dist).*)$': 'lib0/dist/$1.cjs', - '^y-protocols/(.*)$': 'y-protocols/dist/$1.cjs', - '\\.(css|less)$': '/src/__mocks__/styleMock.js', - }, - }, - }, -}; diff --git a/datahub-web-react/datahub-frontend.graphql b/datahub-web-react/datahub-frontend.graphql deleted file mode 100644 index 6df3c387e14fe..0000000000000 --- a/datahub-web-react/datahub-frontend.graphql +++ /dev/null @@ -1,389 +0,0 @@ -scalar Long - -schema { - query: Query - mutation: Mutation -} - -type Query { - dataset(urn: String!): Dataset - user(urn: String!): CorpUser - search(input: SearchInput!): SearchResults - autoComplete(input: AutoCompleteInput!): AutoCompleteResults - browse(input: BrowseInput!): BrowseResults - browsePaths(input: BrowsePathsInput!): [[String!]!] -} - -type Mutation { - logIn(username: String!, password: String!): CorpUser - updateDataset(input: DatasetUpdateInput!): Dataset -} - -input DatasetUpdateInput { - urn: String! - ownership: OwnershipUpdate -} - -input OwnershipUpdate { - owners: [OwnerUpdate!] -} - -input OwnerUpdate { - # The owner URN, eg urn:li:corpuser:1 - owner: String! - - # The owner role type - type: OwnershipType! -} - -enum OwnershipSourceType { - AUDIT - DATABASE - FILE_SYSTEM - ISSUE_TRACKING_SYSTEM - MANUAL - SERVICE - SOURCE_CONTROL - OTHER -} - -type OwnershipSource { - """ - The type of the source - """ - type: OwnershipSourceType! - - """ - A reference URL for the source - """ - url: String -} - -enum OwnershipType { - """ - A person or group that is in charge of developing the code - """ - DEVELOPER - - """ - A person or group that is owning the data - """ - DATAOWNER - - """ - A person or a group that overseas the operation, e.g. a DBA or SRE. - """ - DELEGATE - - """ - A person, group, or service that produces/generates the data - """ - PRODUCER - - """ - A person, group, or service that consumes the data - """ - CONSUMER - - """ - A person or a group that has direct business interest - """ - STAKEHOLDER -} - -type Owner { - """ - Owner object - """ - owner: CorpUser! - - """ - The type of the ownership - """ - type: OwnershipType - - """ - Source information for the ownership - """ - source: OwnershipSource -} - -type Ownership { - owners: [Owner!] - - lastModified: Long! -} - -enum FabricType { - """ - Designates development fabrics - """ - DEV - - """ - Designates early-integration (staging) fabrics - """ - EI - - """ - Designates production fabrics - """ - PROD - - """ - Designates corporation fabrics - """ - CORP -} - -enum PlatformNativeType { - """ - Table - """ - TABLE - - """ - View - """ - VIEW - - """ - Directory in file system - """ - DIRECTORY - - """ - Stream - """ - STREAM - - """ - Bucket in key value store - """ - BUCKET -} - -type PropertyTuple { - key: String! - value: String -} - -type SubTypes { - typeNames: [String!] -} - -type Dataset { - urn: String! - - platform: String! - - name: String! - - origin: FabricType! - - description: String - - uri: String - - platformNativeType: PlatformNativeType - - tags: [String!]! - - properties: [PropertyTuple!] - - createdTime: Long! - - modifiedTime: Long! - - ownership: Ownership - - subTypes: SubTypes -} - -type CorpUserInfo { - active: Boolean! - - displayName: String - - email: String! - - title: String - - manager: CorpUser - - departmentId: Long - - departmentName: String - - firstName: String - - lastName: String - - fullName: String - - countryCode: String -} - -type CorpUserEditableInfo { - aboutMe: String - - teams: [String!] - - skills: [String!] - - pictureLink: String -} - -type CorpUser { - urn: String! - - username: String! - - info: CorpUserInfo - - editableInfo: CorpUserEditableInfo -} - -type CorpGroup implements Entity { - """ - The unique user URN - """ - urn: String! - - """ - GMS Entity Type - """ - type: EntityType! - - """ - group name e.g. wherehows-dev, ask_metadata - """ - name: String - - """ - Information of the corp group - """ - info: CorpGroupInfo -} - - -type CorpGroupInfo { - """ - email of this group - """ - email: String! - - """ - owners of this group - """ - admins: [String!]! - - """ - List of ldap urn in this group. - """ - members: [String!]! - - """ - List of groups in this group. - """ - groups: [String!]! -} - -enum EntityType { - DATASET - USER - DATA_FLOW - DATA_JOB - CORP_USER - CORP_GROUP -} - -# Search Input -input SearchInput { - type: EntityType! - query: String! - start: Int - count: Int - filters: [FacetFilterInput!] -} - -input FacetFilterInput { - field: String! # Facet Field Name - value: String! # Facet Value -} - -# Search Output -type SearchResults { - start: Int! - count: Int! - total: Int! - elements: [SearchResult!]! - facets: [FacetMetadata!] -} - -union SearchResult = Dataset | CorpUser - -type FacetMetadata { - field: String! - aggregations: [AggregationMetadata!]! -} - -type AggregationMetadata { - value: String! - count: Long! -} - -# Autocomplete Input -input AutoCompleteInput { - type: EntityType! - query: String! - field: String # Field name - limit: Int - filters: [FacetFilterInput!] -} - -# Autocomplete Output -type AutoCompleteResults { - query: String! - suggestions: [String!]! -} - -# Browse Inputs -input BrowseInput { - type: EntityType! - path: [String!] - start: Int - count: Int - filters: [FacetFilterInput!] -} - -# Browse Output -type BrowseResults { - entities: [BrowseResultEntity!]! - start: Int! - count: Int! - total: Int! - metadata: BrowseResultMetadata! -} - -type BrowseResultEntity { - name: String! - urn: String! -} - -type BrowseResultMetadata { - path: [String!] - groups: [BrowseResultGroup!]! - totalNumEntities: Long! -} - -type BrowseResultGroup { - name: String! - count: Long! -} - -# Browse Paths Input -input BrowsePathsInput { - type: EntityType! - urn: String! -} diff --git a/datahub-web-react/public/index.html b/datahub-web-react/index.html similarity index 66% rename from datahub-web-react/public/index.html rename to datahub-web-react/index.html index ead3a0aba82cb..9490881246e12 100644 --- a/datahub-web-react/public/index.html +++ b/datahub-web-react/index.html @@ -2,7 +2,7 @@ - + @@ -10,21 +10,13 @@ manifest.json provides metadata used when your web app is installed on a user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/ --> - - + DataHub
+ {YZsV6YUR2!Z;~3eE%z z=hzm_B}$};S9&?&2jbwL5QZ7Hwu!>u7qK>2yk%S})6iaGd~kHs3FtKV7w6My6K z@C(@Q>BzUZ`4KBFt>W8z?(rMSoquQ*e?@{0HKV1Xb%NI{nyiKMv*7UjjZyiNU&o9~ z1<{1jtv5XA+lStv3q5r)6!T224d0-@;_15SO1F=H8sDb>y?=;+XLhgoYZq_d|JrM| zdv50rhCG+1iSx63+5esNTx|KBu>I1H-MW4D%g@vs<%ix4EWrA*toXn9%g^23`2XF$ zBWL1$(J`wxG2$)+i8&fvW&40i_ikO{}1WG{ym_~6hS zz9(Q+%vhE?b|try?P_$8Ke^&OTJmQaeyzvQ(#b_r?q5&tg;;w`4>l*#gga^l?>p+9 zzRzfYNLse&Gfq*)h5{EX+hJ(SX0NFh$}urW+gYg64?Du-&w3j-uY|8f@0beQexsLH zrB>1CV&_ri-(@J`QaJ(l{nvxjk{H{A`GdKmFT3X1tJ}P~F8KW6*BsHwwBMyu-u>p= zx8MG?{yhK8Kl~xq1SbLb6Ne^gZ74>Ww2bMizTskDypZwlwq5t3d$-?t%bofj#GTt! z@7ABW4glKZM;%IH&Sq$} z|Ci7G`kf~(|Nd34-R}>P7k|as+poRucE2RJ^54n7u;I`(n-3G3wu8)bmSnFW0rV_Gu65J%ZPL@5gM~+{i!ih#@}i(nqqMJk3{{wgb%oC-^0_ z7ktrvuVI_Sa^PjDxBlF%+i(2*ZTggtE+MZrPi}&#&BDxOL#&&ZtLYUVTreze*|kCB z{`1axlwsxXlc=4BH{shqpyRJQ$$e4-qc-AT*83kh{+jc$(^a~?{-53Km$;t$%Col@ ze#yBTm+N?}?fH6B{TKhmE!%_h$08kP)-L(OeDETOxy@*#`>tQUYkR>TIa^<7JbfyT zG2)pOwr~BJJ9p#BekYFR&4v8uZbAV#2eGT}WAA-nyYZtBY)ACv#p5-8_Lc__8n_+T zb?)=mF@n_V*I|1(>C4=PjT5ik^LN69u#XFX^$i?=u{RQbL6}3wj~n-2DTm!{!$GVT zMI;Hp+NQ3IWNEvA=V)=(iD3&Xj^marU2qtC#WFCjAT>zr%1S?J3m}yYiUW4h+tDYA zA|m&q(T@NPn1rzLzv>NJwq~R8!07^YAoU&FIKm*%x)~SKvMnwoVC=?_Z&5SnuyUG? zVNT>E#L7RSiKKFaM)z1rD}lM_g_k?C0P*R6NI$+HuwY z;jg)P?+XmR;a}b0tHsLO{)92k8Vg|&FlTeOhsCZxvf`tG?6c{gsb4(g&F06_-uJdU zw}1NWAKAY9o8G^@;m2=W9z4Pgcw!_T$R?LLs*<2LUl(|>vD^^h?_=QsM}Q@NJmbxA zv2!F+&AK+9t^+)bMXzS>=0r~%qyZnMT*3F3fBe?%AAj>lw*TilKDK@2UCSE_Mtbza z>1Um|J>>;w7@rvMN+v&PoKj}J_ORP!$kGOa{UG*#1j_WNW z-{zZWIVUBxFWgL>191S<7@rk0zICji;fB2;8YJ#m0R>QESB!`fw_+JezF9x~V(Z?V z!qWjp3}jd&p2ZLVtN_EmMPWmDbBbqSM7L9`ircpH%r|}t<3$gmk4dxyMbxvfivoWK z&y=M3=LV1Ar-A+t+hG#2afp5M81Z{0hS54m;V>C-cd%<>7mX|2kiwq=8z2Y+$AwR< zwzqpvq>4#_R*cDC0@=pA2W49r)WhLp8so}$&7V7ueV={YrSGKt;=kfkSH!C5?sSl- zLQ7ZtorI(nxAX7#tG%5h;L_^*w(CB8@AiY=_TlY^|Mpe-R`kJNs7w9>%QPxt*I&F= z4(piurGRz}y>_qsJ?#S}SsEdXp$L_}v;iRhg0gImcL9K$I&f;%{9A9a18(ns+dbQl z{H<&CKys}=Ag~}l>hJ=6ymxJI@BKUCc6jLqFAa99m^akl`G$k*v7*oMxkvP4PhXh# zD^{G|BW;PUc^uS1Cbe#BJrnw+yY_kz<9(oKzEsbami2RPl#_L0k5t`f`41XbTs!(u zc+(DP=jY`svtHp?4&hPxh=fd|ANiMgNoVpFWc}=hMzX6R`bO{lXz+b(3 z`}l|T#>5_t-$?0}{Cfs2;F@=UWf;z}>720r)qiuFuNXX0jy-zWYtA)i$F02kPP@HE z{^HQ*ZxU|{^>B3eh(8o?)o<^Y`!imqXJ6%KfAXZL)!=1)^)07w&bEP@g*uG&50LpM z%czK9TVlzEIX8y-x88r*vi0gm{_;oMt;cX#6G-jK*7g-oR*6MF$IYX0nFoX8k{SlY zTKOLkO&-A@O3Y0}?uAGU-A;6M49o%iBuqgLZi-mMhOzb3+xbC^a0z66gHKV}#s_Bo z&vDxxXBy1ab}%JN5{AOaHdxV`KVrMLx~#B|e| zgiRIRIFNHIWzFbPtHfH*kLHxh#-aDeDByuut^w_B+3}-^)mRWcgVy+r1!W=RXKGK7uC4SUiTU z#&rhCPUdB4(?9{FE>KL6r>5lsE0QUMvZ~x|mQv=~Yz!!kFMWp%-{?xtf z4O4o$o%9#~tX*4Y=j`&wo3?{6>gRan&h~ZlK1L{QZ;n!RH|vj$)5xAMRs&%j@PUm) zBKzA<`*!8#g3AJf4$^%GUwi8mkNzmK$N{gu^|2L-6m$)9tkFO357@!FgIoh}=%pIsk<@C;i&yDc{ol)vEAiwf*elR-uSPN{)q1M+#S8>%g;78c*I!q zZ_AGDn97;?j;+F?<4EJ0TYUjZ3yyK{m5llQcKqTR%Ub`AW8}3!wweyHOO3Mn70`~w zO=I&{eHxE2fkK0UZbOKDZpaza_xgAK!zLDn3q;k)xZubV3e;{zispk!~`4MfCB3RNsc22*31K%N3cO<= zui~^ns$S4xCV(}rOw4QI_S}G|ork$)mZJcJnWOnjgBMuFtDtaj%ryuorDte|TC^Sh z!=~a0qtdh+7#*|m;F4P7qZ|LrjyU}8$UcovUh%7k_@-t=N}C*{bQ7;0E%`@g0Otro z+w}n1tv4v+%PPk!4K+h6~Yr*2>L^+z^7mfE-n^AG<*V+|x|TT8tzEB{%) zVF!rpcK92m?hw#aQGlcVo*E$QkMB;CuJQ31dxSU$s$tW1aOgOw&%gBK?Umnf!S>pJ zdgb<2-+GBYjkWupMnN32<7aadYsOe`!-%m+owI@S*MA! zK1N>ockFOF8X>$!HD>Hd-@iT24_WQgp7Mgc50aHF4{Gc>XYTqdx1%|&JXmk`LTYDQ zJA$D>)drgJDqnS`ZUvL+k40IHKDaF{y?uz27M?Rw!e^gWv^G^AEO;}*3;)5tE#Z&6 z=kD8=i8Rh};V*U4uKPc%Fk5`9EczaZg$E85&0&o{7s15_O>K*9x*{We+hPwes)Hs+X z=Sm4DfLD2m0E$}toqzEsr^?@Q`KDoHp!5j!@z2ixEL~3Hb6&pRU#P$SbGOtpgJ3pi z@I9sn-^W@P397uaRz$Dd@MTOgZ3q9`Z`|)0h{G+13-sOz&q!YNr!U?<_>Q~u9T460 zeHDIRoLIDc7bEL$i2@VT^<%&dJKY$~KK|Aw0V|!UeCW|-U&(elTIxJCV*kcH=sj+I zxL_y%a|zc542#L_$mRNly%(LeJ>w;3ZCC1ARIJ7Cw($|@WBE5fGtP+J^wW4 z)iv%I?n~&d@+T%OG283tb?r6v%J7kUOlxgDR92lwiDe+onC#{sA0n%nL;SIa3;Ls5 zq@kSkEBY0Nc>GVSj?w)DmBd0B3n0VzkYCnkwXrZX8IJuA!?|lxOZfAXwx|T&i|Ne( zG|4q<*|Z*e+Nv|Oc(l)sElkxq*`7HLpO;rl7yxRbKFQ5KI;?GEr%{j{{;_B-cu+Z# zbp@F>gbKvd;io;nf5oqj)c3ILdrD7p>Q+=F7vVxBU&-cq$HAQ{cgXJJN(~$q zzTj-%zT$tlRF^BcZ+qW=;md^j-h{qTcOm9!}a{-`zrj7`xBmZ{5$b}05O&h9dqaWuYj#H z4`OG-9p&@E-_Tdn_h@HdaME_<%9C_K##~^M1siLr(TFE3mggX+!}fps7%59zbz`6O zxdFKvYyBNl0g%AKC(_`8ZMC7(96};MhtIlx;e|MIOj;HPd?Kq&8>|sCTn85b)Qp#* zkBJ!pOuYXG$K+*g*CCk}g0mJcf03}k6WE1TW|iJfB6xRVwrjXbwhM8v#|)Vhw~u0( zEOEfZz>0yNB9$F#`8zFe)H?pbSe>uqm7T|pFO;z$((|#{zx3VD0P-`Uz2~iWY&Tt} zzfP4+YO9eFuClq4T&zOtNL*2aqFIoU&c^lvB5BKd6^6Ki2tstFAcA%X1g% zjr!xYCqMu6?S>EEnH+0Pl8Xb{z4Px1^t|I^)G|ueW8+BBcIM62FlyZwY0SK~jH@^& z7{}i}`fXtE&RQ4Sfz>wEgH>Yp%_nX~z-72&WSdmK)>1Ohqr)M*6|TsRyvpC8#UZ)Q zbtDF|=BC+*W;WB_&Nr(Tg#az&_*AllyhWTkb?Z8I7Oneb(om<6GmirhIdq2a}sVcE4Y);}Z3E-db1Wtt<1w&3i(Q z*=J$R4)CaQ^ggsqd(8@OKo{UN{ei)ic+b{Lh-Y#YUiD|4ccQKzrHd!l1r*QaeVloA zHO)_2@Ap5*!2GkDy}u>s^TcV0!D0Kq7Tx}AmV@U%FC+0OuYE4F{+us3bGuf*xeiV! zE$8ha=Op}yxALM;Ssw=5_QAK`x7P#6mCreCd-Ln=+T|~RR0PyV&%gI}xFi9~;8?Y7 zn+B~kNfq=&v#qB&$SC(?;G`ZYkkkX9R)`uQcCDhv#Kq(Yfkdg8=+c^-V-;{)TPHKe z$DI8M7i1OFVh)bMAokb>d-BJ9ZDFr#VYRQH&PqB@gix`QW23A961Lqe&3EIerWrx zH{E9HRqjfo6YcyFs|Hg{Whkd*abhlmYOQnqB}T()u`ajiSqRS@K2?pqcm5@(`1aHn z9Qz`C59#H+2laAKU%s77qB9=7F7eKm|wn}{eTM|gg@A^K zoxczBm$(RVgUjGwdx-Wyg>G_^0bTNR|J*{|e)Ui8rg`DBq69Mqld^?v-uV z);@hM+Gvf~rY6$-ds16osts}l|5`6NvU;DS$?6W?7{t`p*>PX+S5QuQPWrRT) zcK)uv#Q=mgY=ev&Yr`xOyK89v(uAV&Tnd6d;GNN`^7`;^-s>wzuRjTm@4?2)&qtnc@^<6ZdP%P^ z9MAhS`7_V`7uXan96JBzvtpZn;n_!ddH>rvmd2G$FN4jO{9_LXEIZd(2jfN!A>7qi z7y5Z_W-U8PdawuoxY;)#ie@tz8DEpW|7{q@NsNBkcfb%`05Iw}Jdk)yln}*=nZ5_& zL7avo0gz+Ut;XS`NdcwUZInKXLL$?N@tBv`G33H@=)rXm)Qr4QVp3;mzt)wDIbO3ZoBDs z_erdv|H1LGR)Cl8+-gDSdjF60ALmY)j>XL0n9COsA9X;^{`ufL@7eo1KF|96GZVjJ z`c6hz<2>Er7d{+YPe1sJ0Pk;I$NMUKw9B8FDb z;?($-lwsT3SNM)8+s=PMw7(AJeh^^hk8jT3iaolH7A{$-8Nt}b&-st0QwDT>qebN-Ds6Ak=;A<&KK1P9V;uxsEaLo*3(& z^B1epj2R66YUfJ5`?Syc9DO9Y{$cMBOZdUF&(i~-eO~AM*PhbZp0|>tvKN|cM z-1}dAx?lAn$6ikUt%EK%T56aC#(k3H&C^CP^sxPgEhkLe3lzc8{#}=G|0NsqmpP>} zOoImji;U3sT3VumVhS5T>c%x?wIc2V%Pp{nK@>@l*u5{;Oa+m#w&%p zjO`=5bs+{$@OO^AIoi+4;?oPAzG7EM+l}zDaB?XMn9<9CA$G2O+qJsEx|eC;xTrDv zv_AgEFWj;{%CJ6^y4Us3sxr)4RNw2=ROY&79g&Z9)Cq8uo^{s0-_!*T_i0>`o-alY9QI3v z@;S=5_F3DkxZ%P%-GBHzvR$p;ZQrL|t{?0&rtuTU;VDA35p&WSK8M|Z#dRbglq4|f z>XS+0>djk|^r_tvUa?|}K3sP1zrw%3>R#TmqA>W|KS?!C?(SrGApfxdn2P}D<{$S= z5Z3xfUv}8G9k`ycPjIVPam}Sn#v&ucQ3qrO#o0m>v^l{d-~m4B5n9$Gc`#iZgP2T4 zH8)9(vCF{<_|BHtIqoRP(k;5gVm#tckTFW}$Mn;B@b+K2m8z-f$e?QdX!+Z&>K3~~ zNN2QB*Z~>*xM9dkSpAvWaydr#{@=KB`>tyBQWH-#Ia2blUK7!&oR}; zCZnDFCG9ZC2V4o=#VtQ5H5Oc&&Z|GwfOqsMQIBDt)O-y(hfzl6^#PX!CL9S6_GE z_NIS(i?0Z9<+IP&uKesXx67V%`gYlqPv0)m7xVb0`loAq_~A$Vr8q9pe(eYE+diZR zj%(g`-}Z?1+`+Zj=T4eB_);$}33mo_*LnZ5F5NWDDzdJ*!&tOqx3)R{UQ;dbu+xA~ zM7{7!kN@DtrCH2DfK6i{u}P{Sb1co*adTYluKUmf$9(|7{*o(C-mZDCUh-04xSRh- zg0!er?hCNDYKN0-oPO$r4XsZXpFa`BxqSiZWRZTcvNAXJoPjRQ;*KP919)oR0&sjwbwDOFyK6izWg<U%TN9;k+4#aj$v@u-5e#R;T10N_fY`3dqUaVIJq!|_spL$ z-xlYAn9IUETI0)i_&Z1A1RO;CKlqNjbtkLeyGE?r$)}yLJ?=_9m^?+dC!MATl{2=- zso(dv;05t1YkZNB2MTW2eJK6)SKY6VRd+VsQ_p4pd%d^4zBorRH|g8>S))pgHSua? zO;TIw6mIO%ikEd~Wp~%fYZ4cOX6`6@0NL-QM!rmWp}viO%e8d{=$$*$;-R@!7)v0$wOZ(Q)(P}*^lD0fe2xqlX41u6-n-1OuMim2(b`D8cYKT;8zoTNg z7;U9qF0+WmLE`pbED4ZeG&$TQYuiNUNTpJ})`1f}qqW5!Z0pOYIpOcPa&h?8wa3_~ zjr@f%<3@L)Ue>+*X=k>_qxSEAdp>yH8JBns`=I`)*GA$cGLJRqG9sUdXD>Xk@!-H^>wUY0@dLW)oA1iw4$Zk<*PUF}e$CvH4$_{e@A55MF}{b4e4Bkw`L zM2@;o8L?ysMUt-q?C=2Rs8;8{x9S+6cv1L#;3h$u+G(ebcW@C{LD}Vir&0=D(r9gfbBC_f!7PejB_Vbr? z3cCx64b47FVajZDArjp7ok|s&zhd~W-g?H3=MZ911xWVswn~oW?!LKTu^TJat9o_ddz=VaSx?TENXKbhL@mu3}>9QTyejx9E zG>3I<9M6ZuVW%6P45az|f zR|A!yE9^Q^sz@!)a- zS0mRS%)HdZx7Hcof8Qh9{d%8*PdhQbPoEsC9S~mT6Z$!(Mz9>wQ<(5~9#}@}R5f$y zG%^$8xaEsr*~={2y8nx+-C92OzsND>eUN<~KzJGJ zZ9jJ>A>8`>CG}CA(qZ5@p}4~--kYC+^!U;Gi}ehoo8gK;&Q0Y@9VoP_PE6ky)M~Ex zU$H4(0M_DkT@qs^kU9l-`x+XfsaK2iW?;-)z<)t4rGti?c=GFc@l2w<2CM4iVPnutN;jT~9z zfp7nI$FdV&7olb7?Jt7Ew|tDj!O*e_9kjTmBTsHl92+J~v58$-Terr=F80c#jM{&I zOYS^J?7(8KW%04|cjSa`I`+lPxE(}Z<;n=9<~DLFh~RnUvyOd<4s5#lyB|oc}!Y8JHz=9jk%1%2|k9fyvd<^>T+m#rg0J;Dj1LuIco=hE72gI@sFQTSJ z#@L3o@eG$T!-w~7Ui4+>#_3Uxk9+c|BaU&vb@&<6?g?jJa5JwCD1+wDMI*WL@F~an zL+Xrk^hd&(Cv5lKoj7AX9o__ly34^firIJW=G7eF2QP%mziVe3AIiYvsw=(~qWK%a zAlWJLvcM`_{P^!p5*r|=zs(ZG#_}zW#?qLbg*(mnGagxM=j6$-$XU-JHiZyvW0!lN zG$D?HAWm!F`W{G&y9dURF@n2{nae+x7+{v(WBlYpe7h5KVA#A4<)aQ;Q}Q_K@UNKVfelcG8}5qZn8gV! z*&uT3;F_lv@L|Jl+Y_I;@5@)b|3PzPfHWq6Zv5|O)=QZy+DoJuSm4HrOJ!#7>RV#;i^ANKbyq z+&Yj12lMxS#b^akG+Y7zhu$X{+Z-$nI^_!=zx&|=Yz}Mv5ncyq043*PIe;1d6{K=T zZ7%4GVDGtuv8*m3wja@tJjLgrg zdl_r}v=*_7KzIwcMZ_c*{v9iS+qR2r%?Tgf(z*v2yzplC#1^47Q999zX?rQ{gAJ`F zz_I`9Uuo#l9KZIzB<3FqZPAAAF^9AFgvU7PW}M!aj&3ndhLdetITe7hq<|ld{qH|} zW?`(25~nhBw-PN?1Filob8>@7lC{j}TnnY)w4 zZ}6|d8ozwNZ_3^XJ(G}f>_8B+^z>^k=INq)WK{EUO@m|K5x4vmp!0v6Uc%a^U89fh z8m6(TCth+1>mB|8%WiOHT*10`!kwRlroQsGVO!^diu>^GU*03)4rM0byRYF6V6SKk zSAWLg(wM0-wy8r=w=)&7eH*LC+w$tOe>=Y+9D4xQcvat6RkKb-o@zl~IoPYgBd8F}nf3mL+t?eHJmr-Ao>$?v)2VZCIi*Y}RocxmwbOX?E#OFX#l z@Da`~f6W!kd$#(vzwL(O$A#8;9k*&?tXhjI|72eaJ=RY#M=5Kxu0J~FmK91$vv|Q< zxXA;DGIW11|J;8C10UJ?U;sOZe__z|0yeJf=bmpK1!HpzBL^Dyc4dHUWDOQR_J~iC zk)WD$Zw`_rHa}@A=e1bF#sLUiutY3EbZv(enNycW# zJT1-|Cw^==8VjLX`|OOJN9>wg_tgTJGJyBK;HuiaXdHHDJGcomy*pCH7X z#hhz#=@a+61mN`_z1QxQaVxPFLRqQaUqlTb&riCyZk3*eDK=$awg~L zjc@SCzhkAwh!wFoHHWqt{Ndt|(x3#(92w)Ty=VjAY9RT}xa>Ms_gHs4Kt>$KV6{v= zA=`H6WH*=cH8;E2e_>OR)TtfO$CdYKe*Z(5<~ayx#u$16IXZt>>U|#gtK}Fw^L8%O ztTi?ZX$^z-h%TMK-*Y^^y~_E7NP)_SVUUdCTbAojCDN$;`j>&;^1q4 zE;HdldYtSS{*B+f8lrPUU#O&f2><9if1=vHVi>nz?-pjsXYT3nW)W2F8H~TOf}-2txp!6arW_F*0}qQ2e-TL&|hdH zmO#KV!3aBsIT1s;sHL|pHIl6I_ZU01VdsfwmiJ-O)*IV<{*Dq4YQ+nW8Gp?`HFJ1_ zXU*-6kjlAZA7{EQzg^0YZtqWE`58#@5U{y5u*rrmBG*=f&S^0<+Fu8*(p@vn9)H3(%WR( zxF+_REIa@8{O>*EwUudXl7MSUTw&*&Dy$0MEC@|J&uwb~V6}ZB3?8C`0MaK&rIDDD zR?Dcl4gXfFAN+%W#Q%s?G}ISj?K89RzQS4`<81>1ASjv|#lQoP>m(82AS2_(?@wIJ zLN92x)4Y4(wVex8qfoMhrXTHMGfAbyu`x6I?y*U+F7K7kJ-ynAWE_Z!GiH}6T2i2++kW@ai9 z_P$Avjbuf^4p@@|&Th;zF&?yS&!Ek$c}x!=u0L)9^QUx_C{qx<=LkY`cpa))hmGy$ zelwnI?1p){w;w&a8?WB;83-Romc8Rt{>Cco!5>`s=%3k`>Ow$6&R@QbzE9(ifq^v~ z4C-0yI{#@Bn|c2g&BA2l(|Or~`gfkAc!f$0*?>#_*^j9UI5yD{Uakb@`j?e4u*B%n zS~vJ7c;d*U_kaA`YxW(-^>=i^cNolwWhyW`&`efJ+lLe9A0xj9;*(LzoRn}XBXBev z@8zo-^zv1jFy!p| zlW)fZnYsQE1#vPh{`f>>xk0dQZj#1F$I8tf#UEE17`N^H41{M^bN%C;BRItC+G^;A zjPT~7-gYxdv;`0&RHiFApKkiI-}S`p)qm--qaHi}=K315j^VgqRZ>cqQ!~W{CBMC&ELWW%3MOoxoHUJeIG#1)UQPD9i~$m@atnv>U)%{aYBb(N z2e64}A)r0mI!kZ}(PGv9B*;J8?*iD`X^aBRM69es7Nz(gtArC*^TKUTOglcdzDd}6 z1VeB40ic-$LvQygP?R|~*5L0TOAK;y;b;Dy7Kqgi$b_7jml0#n)n9PQey@pNe|0~A zq*mmZjnuHs8f>AXcbtlZNOK?HVJ_l-SpL%4HU*GVHgqBQ9^N_=_&X@ayn#FHSIP{!&l_rYD^E$o38Y%N5&Gbe*l^wI_YfncLU@?aTA! z!vb)=!cNT2KYM8;(EN)6aSq%6Wo8<^PE5|xdqT(im@0OB-{)R^jF%Q(>qy)bfSbvh ze`<=A`MbW%ro4f`8~(6o{aa~|{hxvGiL&9}L_@%lfm>_QVI03E&}Rq-zQnmtJ5wL! z?fi*fP{Vc2hcO{&O|kR$*?UO-gEYD&fgZbtfpq?6-{8>c^;v?p+P>&vBiqh@va6UC zcjPhvBq@A+UH{eCV*%gZvY~yGB(Ns7iSlw7m$Vv103`7&jOvHB1qJ$Qpw?$?E zRKVEBmgM+{ZTID#pp}^QE5l*bxDlinaq`Ak>=eKaRdwESSy=o#|AmO_*K1a`BLU6#y(U&(#;TJ795)83IdvVp8C1-{zkHg?;7~P1j@yg> z=mpzTUvT_O!4!e*=`T8ad%+j;eTM=lUR5RUiRO|}(U=Yg-6H9AXJ&1XhMsBiKcc_b z_i4T=?kGx7n-|F@JOL!_u+_I+tSXwhM1!xJ-H{#5pWmSOuU(euu{y2A z|6nE=@5Q+Rt!k`!zfd@pzt5k>qA&mU3xDLgA?)4*iI(-3R?-2?)Xn@Q#? ze&IjWIsco0IR)C79nyOFk7pn=U=}t-h-Z%*8Ywa_2t=DmT-h4Q`WV?qzUx8^ z92@T*Qe2@DtBX=Sv63i##ZH)j)Y$yZmCX%I0~Dr(lF4S}GRFl(jKXo^-jdPx`ve+3YP<28`3xjrPC!2J=a@D^BJm{ocLhq+Dm%`caS1OB8gm%C zT+#3Q0CGO>gMdl0WNZQ&c3=F%UgBZa{3F93h_E6jEK4G+FZdG|HN@!o#`$~Sdgu1O zx7~T@Zb0nzdB=$<0(~dry5m2?#Qw~4PC7JyFpMiw-S>Gt;%E^5 zNHl)%gzbJ^72tS{pSR_G4BwxCQ2869#+`r5od3ebW)!S>R?vZ!GFUqJFYDiS2`6J( z7Qh4K?KgGF*L6_fgonQ^d}v;if6m{zelqp~5~dIwEEvtpkqS35A;`QrbsX}?b~GuB zD$$T)ETJDjY(6x}xgYHSZ=wwdn43|t*P#5Z(*ar33Dhn@Z};BxNupy;+sHfCs&}%B zA;lebL^cup7o!qJwWHYOFBf8&|1Q7^R>en@JCS&J+6S>Z5C_!amO=Dc#3=TC26E@E z>j@p4>?1L$FuU~E*x3}Yr(u`qWfP!mu9zHs#G%rI|Gp0(yzfwK17MsGXb$P&RRLVL zmg3g((4x-P*5OiJ(f5K&cV7y8@PS9RANcDZ-hSloe02Ni@4K!O4nQ6GZTD~f->=-B z_J?@pBCuJQs!zv@y7P}+@l#hLsTZCWE=pPL`w6VP|1ox$c>&kxBaY!YBu?swWpVN_ zcR+(AI1Pl5|6c#v*KyzO-t*<8Q}wOzaycshmNy^smazNCrxpD99?J3B3A!}R8F~=u zX8z#Hf3lI*(1lN@ysdX%-jc8Rk7U|?nC;$wx#rjuJZ8Or5$(#+&NkN1A~7nAW&24J zp^tOHB3#r&7jlSnL`TWyqv&lr z7HAL>vGJ}x#9)vT^_U6`f5t^=m33J74SB^iXC;ka^Itg?Tw*7@IY0$0V`4<%sARQ{ z(>**}+kOXajm0M}jExq2cb1t~5o(T(DIN-6@!9C%#<#WiY4_gEWjbd51Z&c|YYZ8m#VL_E9TmhWKKq!qj2NB#huaaMhscY+@^;sSv8 z>pDT*Qld2nWk2}KO5wwx2fGAV&t5*;aTxw>8-$KYKPCG=1My3Af}FnDIwoME(e(QD z`mgrpun6D|OGMmGK4tzjYFiy1(lZd+5q;V1xnFSnYdc;3)HC$P`W4%^e*crU7kee zdCs9K4^Ra+c$K-uODTGu^MA%2>)*im7p9>+E`jkTDALZ~?!zyGSPtP)`S{#D>il)o zqA+7cpez;0+ZQuI5B@^mZBmw~ey)cZWTqjc0qmmCyLHcGh}a-JdUnGZ|JYbpeskd% z#lzUM7Kw{?jaASqACHj(Q}jpCjb1Uxx3NZFe(suo_()XzHJ3dbv11LKn8ehWGXdun z*Kjjlu+fETacf<<^PPp0Pd)n1&w-Z1{rBpmBCfe*v6EW!5WWTQ$F$;C@0vyAN-YJn zii3Ty4}ha13;&s#c}TkVx6$<;NV0EE6{7Pq$7#{=P@7ZpDQ=)33h{J@JsWz)OU~ZT zI{&2Ym4EWcF`s=9<6z@ut*?3QW!qo-p{H!0_nHf~({*)UVupP?ez&_aY z{@>@l2)s|#GZ4a#0Lw7^Q582Fur(1OoGNF-e)6O3Wk6kBxsjtDQga4Gk0XPIkm)y82!I{l~}&{cI)-;{N2ZR5-9VoRi-(Hzs3vMc5k8oXgLA=^U_E~F;nQKOUiiqX=>UR~Y6CO*mezt%%Ooqv{Z5OC_~x()vL2O#As zlYxzcZQ8T@`CxR|{a3(ZoBcCmTMD(Mey}kA&Pj7*Znc0bLPW_w^ag@lYH!-e)1tG3 z;P~XmKb>7FkAo-XdPGheMbUhW%aGy@cry8*a4jhtV>y z4hY#c{!lNmiPiep0?e&g5NlMuR2Wzv+m%;iB^E$w$XB~SfFph5J60Br=oMzDPg(Bp z8uzgNzIy)yXxBlpyG6!~5iR+*t@#fl4Jv;SCNt!PfGzmcWAHDX(c^d9P3xQbQXHe@ z0ih_aiWBm-h*B`C8r$smlId}!^Zp~;>*A0 zqV2E#@Kd(0`x}qnE_-TSIe;21buzo+XtBV^I{Y8mj_mm(QhciEPkh^DdRdNhW9I0! z08zqqPgYedXY828t7|2phv=-|NwmBSB&~YzAJcmvm3KH!@Q&pO*kmAsR0T`BT19OA zR!hC~I8hECaWTiWb!^8Jf9Bjqttv)H#1jPDxj<-&BlRmC{>X>U8Xd4%;v_YWYn$`% zNj&b1N3-LMO&s)%S5$B;4$=%Eez`@@q;g*JA2zm5Apkv8jy;*eT2co+^NiWCLBA#dO z@f+w^-gj?(!HT>gpgw3!H6NT9NGE;jmN@8LvT{di1VB2Wf^i5R;uJ(260_^Y7YX*U9UO1I;zm7_LyXkooiv%_!MF`@aX$kr!?R8Liin z{nP6wC3R9a`O8QDAKdd9$cbDIOBn4x!WlYp^xYhrjZ)MSohkQ+YF6ps$@GgV_c{LP z@BkH zlbUB_t&Lm@=ThWipkr+Oq|b$vU>2i|3%Q(9h>(HgN_60AFcv~E#&&{?)dZPj7#7jh znQ?_Tj>e$q&}w?O`*pM42vpOKAcAYOSk#;DO0k+jP$Jv}XTs|`){ms-gh zi#Ux1kgfiC69r&kGVod;uLc6|Jsi3G(J#e<-?{o(+FL%Zc#hCqiv?meN3Yy$hg`ls z7cw;E!MjcpIvU?%{^$SORogdx=au?y!hzQ}V)6+)_9wS))wQ+W@-w$=zy2R@%g0ZP z6D9S2hQIz%t0Q|}#=S{i`Rp^cKmXlN*nZ@1U!z}T)XxG7B)PgsGRs=I9+IeFk#s>GNg7+-E=(#9+A&;Ae%Ku6p)V%xqnck`M3q z#CAJL&(N|b9nARPbTb!XDu3*_9oCv3e7sgEN=!+`Yf7!qAJR(bu436oris~JN!D;+ zN$gPc!1iAW2+ag}@n;mVb#oHVcE=VNCNu(+KlaDuUs>5)W82}1w_}DNT(Z)dufqnOnw04WIVI$7A7kKdTn|&$y#& z*abQ|e=oF>?mlZetB{&y?q#gCQ~@nybMy!SID{N+tv};Z9QNk*@L2}=vSfP|f9V^^ zyroV|>>w~19LgU({L2(Ceoy_M{FM)G@7EUxKNXGlN&d(;Ub6koAAQR9n%6#Vd;EBL zixjA88Z^0X)MIUYg6z-!{mcEk@L4OwC$cfaq)-y4uAQN`;MZ3UB~L)VMK$5fAbr~ zi3%m;hu&DFYoFYI1&rg-_g^?vFp%IaT=%HDRPK`#;DCqd?>=ml}Y4WQmbJAg;vL^BEGjvhkj z+s3g;ya1TnTJtqQ^dqJ%5(8PwyD#-;WXJD)k5)BS0bD=wXCc-)WQ@nGD??`O?!8qL zp4ksPyV-L2qsZ3Nref~<0K&J?KLW0sgJo2_#cFU1Pfj8=_({MiLJR-OwK%{#-@`~f zZo2k??FYa8BirMjcKY_Lmz?8^7xKk3E)e*Ex8Lb&WqtlvU9_E{_ZmJ?jgJw(@Qcs$ z?HawI&oh#D{?hGvGhF=G$NT;c$U#ngd;OdK{uSE~ef!nh9r}IvJ}1U{75>ipyHX(K z%<_5WsVe9-mqsOREQ<;UGk#BGeP)5|SZ#blPzH_HwvH<-$1EXhX;v23TC%=ncha7Z zvk&Nfua39&gz-7C^yX3X9ouXxT-$HDz?8i|~ROMM47Tpl*J<~8cyHr3-!BOGnzrkVa#tg_2XSQuL)CFC?t zU&$CX9?h>8WE7$lK5+>Gj)NuHcuP>t{B#(xWbH*jhNlhGRVJMX?jvSnb+(#!(c7;~ zKI1w*Yo8iZt14FIPf^!8P%rA7#z3BSVXaeZcdoq;wlR(uf)!zX)MW10T|1^6k@x!S z`vAf-5Fo0?$c!|W}L2mf}c-upmG^!yh4weRQSunUkBAO5X-wqNzc&|Q%wdXzKA!Pg@ z@?BSKKlnGV-tM}U2a(zau0+C_dBqEl*UV4 zUEw|tdw|zwbJ@xN zAb^bx&M37?oLEL5&W-}|PJ^31iH=w|Ykkn%3?i?%Wl+f`AZP?|kcL25tT|4Qa)$vN11c&ACn?XJxkQ0Ow(z?%#TJ>m-yI#Glt|zNwTOEdCA|hh_ z+*5y4-m&0nW|^fM?*Z)7_#_q(cDF-wXz$%QcD?>sgl0}&RMw+7q=k2UlCO-4e+WFH zk3~Pc{nEd>ar=kg_`dC@zyG?uzteI^xMPa=Nb!rl^!)8F{^KjRZ~eX}ZM+HpdubQx zmkht*@95K6`snw;{IeQ4Iz}1v`jda^A{%u6uR~Mnc@bpQPvt7gd$faZwr8E^9r_>} z0!FM|6?Dlx7YEwD4)-`YMD0Zp6VsNXPgpux8*G(BZV zKE&w0)}reYsKvt|!Ck{J?)Zr__rC}XjtOMzb|1R-Ke+!xDd+fbS_X&Z5C8fip+;sD zTAl1Jk?6|Mi2y+JVrblt;!V=(N{sCa32$2zRm1>*s(%8+CS(DuqEK7=q_^Ez1Qfe1 z@sF18T77!YxHEEws##~+eb=_v$K@;Klyy<51GU#t2g>Tw+Fl3d`bQKzY^!7Mkv8)J z$^Z5}e^~0g<#H{?C=L$cNa5t@3T)op!)5HB_BI21PThi*R6?`)3nxuILj`$2-?w<9 zo{fC>H@$EBH~;u!+YMLu6_3mG6V&Z{5yj($yW$yVY_I;GE*Y6%PyVhKb!vbJ9yR}+ z^VcQOK>)q}>vw7z_OPDsD9t8suUpoPGDB{xQSq|8oXnq$9R3l%IX>}@J$kTy)6Cd* zpQwG7I7dN@7#drLJzU6l>@5uW-}}GrzwB>E9nyp0^9O6{AB}x=aL(>MocTAOGIy8w z5MpWqwVs+5a5&jKBAZ8Pe-CRe4EUMG0MIOIb0c;0C)|>MhVe8PkYw97dInO>mI3ve z0_2eBlW;o)2$1+9?;E7D5Yrp0brzgvACUI@WY9xPjoLl$Lq9;(ln zfnSeKF`N_Gj*Pv4Rj10h>&vv)ZFi67}c>fI^pZ`VYZqI!s7k_lt{{7agc_}2}2-+#3W!5SaEjb|5M;xjV;RyK#! zyg7_qT3y_U!O|0d#3r6w8;6SnNj^VsXCs(mpoxa&V^N%wm<{W&E4NEWh6$~@3~ga? z7iAcr(k*3RfwGC{M?;^G(f4E+WZ;c(K=zq7Wi5Qkh~4tAWV1+&S%<3A!S$QvYmjU- znHE+I40lT_gBj!HWu(hKE)zi`D_o^~K){*x>a^5yjEWLjgF6Oy{hy)&y&-0&4 zweQm&tS5^O(Fqze864dIjoEVF=Nyv%u=5_P8Mv|~%xER!yf9YvtKOSSf}L70uN$`D zMp8JTpS8rGo9$wWDEu{;?xj3D|9Sq-EQk%P_dl3ZUy$t;lR)Df#vQiU5VvcD5 z0Y}lO>0L+-t&w3!On~^HPb5bWjyD;8#!OfTlQ|p7tQX8 z*0BSby~-lAynASb3zqtQk#qNZ0O^a1+P4z)4ozB)GhzhVM`XV9RaD=f5dARzi@bft zzEYv=k_~mJzufshV&Z$){Z~Sd1Z1BCkeinmMdIqocks70{2j)J=7nAI%K4kKw=oOM zCso-M#;nD}ZJhBE7U0dx%$nRnqyxfgf`;fy7*`5I!*2CMHsUZ4zV#KyIvtQjZ3En@ z*}`Ki;7xO!ifO^`gTuKO@p~6De@;f)tPtv1*j^KC4z6G01VWV+2srJIk0k?q-7m4T z{lh10ciepZm$7(b^y~}tX{VwWJ^|ld#xfwf)jH^ZqzE)}m!2Vl^zdqbNmk$?w}VhB zA3K($8&F~)=gOIZfb@GnQg z?$SNLUyO<(b~Th&Mm{b82j@gt^KZY%Ii?ZgWSC$aJ~N^5B}mF6!^Fe3ipl`m5cD{p zV_}Nmsw-p1qUdew1;Bm85%MGegT%5)#t<3)j4Ojx_ARe`6R74l^k0)@bVzrJt`hsv_uN&O z!mb!L1&lnA7OSSRf{@{yDI^FGM^TdsFSz+<=b^t&K; z-10#D18Ha?g>yPMIJ(4)Cw~jV4-<6w^8Xn5XWeBW{n7c$uM+o1m4U4M9Vu=jKEG4` z3qN+l_J*Ihar>NCoV&g3)fa3}eAb!v`mb@|0r}!5p0Zu{q5D%)3Ss_pU9%Ok=ER6>f)Iha|_N5hy?+h`&S z9~-mwCuJD1WNxhD52$m*$gT1REnnoaeSk4Afa`v&{Hsv538e2qJcJsd)D0f`6R?6q zMrTb^iHZG!i2_e7#@V%w0~-&S7|4e~xP&(GLJwm>QxrqU$8mzeaBPHDz@cm!g86PS z_4^Ow8amV11@HpIGexoUl9z^~66H%O44oK*k|Xjgn?u7i3A1rz=Xlvfy_MRGzBy%l z!gga_x_h-bY<$TTrw^8$f7cjJVWy5Gt92Td^rTEq80(6$*h`OVY9vWJc~F=MNEAtVd(J^p6l#Sc`|RryM;U`aYn<4_ z)!rSTb?s=1<`l`py6<|&8*bC>_U-XcIemNSSDdfwW1YA67w;PP6Vab@;VIk4Kiuy_ zCF9PYDyAwK?Zi`(2d~%VJTaB-g3C_b>to0KcDgTq=w%>2>&m+IY9#7G2AsS6-?5YX zzRx~x*B2_ma6}GjBjOSanPZi{$Ia1>?OQkpbp&a@w)edDzun3gJ-Wn=wN*QuWaIQx zr^d`aLwC@)ECBKY1-)*|`J3=L;y%_A@`HWG5)6)v-8#4?Bmck|Apz@6bn_cnxr-xd z7ez<6O2S}*pwAes*;j9C=@Phu)4O%gWQcYPY=!+}?02S7lcDBoXWkhX972Vtbgm=NXI-}{KUHE z6qx1iPV>%x#3&SVVsqeQKh`oEjD7%tU;+Fq7ini5m|#!GfXD!C`+pZim;G>a9Q-S~ z=7{=+yyK4EQ9}SbbHtE_p!AH28Jgf6%paS@s8A~=;C944(A7Ikq&fzGF9`nR_guIA z{cm{R_PT#^-FD6U`T^30pY+2$ckgZZJahZ0Hs~YyU!B4siAHUz1 zD{s1{z6T_?#K1lG7u2YEBub9hQN3>FW$KUUOP%|@Y{ob8p-K#T^RFY)j_X*(r^og> zR{n5n{#jzo(;P3@x!;h$;Z49i;Bwgg7p^O(wE*z4zWNSF-<)mJb;EdYV-W1FfmO1N zLOr=M=f+U~*84A8wqE_nA6zRZlzv+@lYnJ`TePq$BXy3Qz$sdTCD1g4XK$QES~7HC zb>TaS=HUuLM4?j7VDw_(kJ<;4s>Hg$wzh#sI|poN=1`jUJ1SNPlod`XBJ`U*qxNS! z8b*q>gDF|th91Y60J@v5-S4{xJOfD)CMW#8L4@k?gUm8Gn7?WE`c)xq9&TkG$&M}h zY8996c3n_$pSFG%gc>2a#I3tiMc#+Zas0qcW0>nP(Ag*mc9!I`!8W^_t1V z8T=6)$REGark!GC8);adB$4bt^#a39pKaxEAm)zp#gb454I=iwTIU2w=w#R(vG#Pc! zc(l}y>B@Kd9f;My?O*vr4g8|G6W6-Zlh8yArH%zXy$6CNhZPJN#&VX*FKu9rSp*)B z#j=fEb`HHz9k9vlB)-yfHcNr@^vp4MYN+^(8;db5R}4+q#_hQFSr;T`<7h19?HJ^@ zHo^D%Fn(#T^6&TwFHdupIuXD?r9*QeMDt`Ei;P;n_XEfi^Z?Q^0udgpky>-> zmnJOJp{cq~&dUT*yV={K;1DGIrS5tTBV@V>v~h3Mdm#HXz6(MtcL5Ya^EaYbXHoSX zOzi+CxkLj({>JoJbK&+7%Zg$<0~4=o(BsR&`(HzLNbGRQxF}AW0b0&L6t|Cmqsl<|yZR{(=;OrmtCYq?XuY zI}@-Dn@&>J`#yl&=w~27=-1)ty?=-*%$-;r42H#_<1#<*(mfwQcm`6fY4`a{>e&G@ zSmI?ZBkS0WLEk*Q1I4B1!LsmB zIV6rF&it&1N$2nTFIm{L@tA1r@QkbWi@UgmB|SM4FQ$bh<6O#QNe)m~0bbC;aYeP# zDx@(l8wbGukmh6^XDTgQ%tKIQ{ZDqTyNQSc^8cT`_kg?gy2?9W_1>G+Em?B!HZC|| ziU}AS7>3ROA%GJGLMkO;;>5o|>JXR=VaNnXC4?}LfMJXiz{J=z7i>$o$+C)7Y{` z06yTtpW@UljvNJ4oY%Apyko$B*aQPrZ`ffbfH^MciN`3dgpLq?n;An~smVQeuRa%;4`~4jOmNGI1@j+xf?Y`Zc<73%28T zOyZ?xL`qvZG@05LQj-cd_(W>}3rk!e>5vg~ulzeFaIiCQK&@&Xc1N$qUNT-R4BK}2 z^FaEccigw!s}Gs~xZB5Xe0cfIpSj)f9n)N=`|li(veZ23YsSDFovpRlwB4>}4!->+ z{_{|t?t+kYZnOkXt=4$VZ8w>nw&N?1dw2!X{2dn-$v@GiW9cnX_&x9clR3linusA| zXRP!+M@C%NK;3$upA4;)((aSI*1+yT7~C|S+RP)5E5z;kb@*_IEm1NmUv-Xb1(Veu zi?SL$*xt~^!g|T_{kR6foZZJ-l*i)MGA3!s8^*PFv+By@2x-)e~^ zAz@6s$1z0KG52s!c)Q3-w(|x7VI#f;;X)4PH0YZE7HS={{IMCq*Ze6;EHQ|e2aGG1 z@UNHyqpkSxUvEWRyW4x`XY(pVjb#JMa0wkAj#-?jg=`SH`H==exZs^ytu|dN>@&`j zZG0+TzKkShn{ms{Y9N?w|Qo7gCuYYGWi+BPQ_v2^(G zuYERbO_Gc&XZvK_FF%tP08%nhp9-#hbT~J8N8Kl;ylrvghwfi4xcmeh4%jY!>Iuu$ z@4hGe<=OdD*HJ%7&7aU}VIUd7+~noJ@;CoxFWd5IugOFBu}jLy8=#$^K>97LDQ4$C z^DnitAint%ZX{6rD}OA^yC}Ihc#T|;NVyMx8EkRse2t0EXv7R)@1-tzYSzJ66SxMN zvj?24Z%Q4kJ^IKem*4oex9MxS$1G=Gbo6rCIY;@I=6wQ2zCwKShaXzr`<8o_cfIK@ zzjc)a9c{!*oR;m6KUTR9&G)6|c+#0i`f7+h-(UZzP9<{Zt+~+ER$s`~A5o@hC;+1TK zXKR4r{kwUeGY`Cr3o-1TO~uqL+Y}9ry|{AR$lu$r>O2u3&LQpp-oVA3fJFBbNRuTW z7XoEDSO-NjbZwyP0zsYD*p_S3i_kqbDR>pjKF(~|YG5E^*@3c5d{86j7*p%pra6T_ zCTQZH*q)Asv0X$%)_k{n%#4xH%&JbN;`}Szz%otc)Nza%p`&idr5u3K_qPHRrPo~lU9 zu4MgSWeM?Y&ur#KZvJO^k2D#a*Ie2!{Uy&j4zq){+i%WqdU~&E%*JZ6) zY;B<&xJ2#g=N|mY7JI*K>gB8YldLe5T)`!OCfN&9EQV5BKGv_-aq`CuOtt;;pI~j* zJI3Kl350nOv}z3juOvH6ZHhxo%j~wv3)8M2ER=n&QuyLuI(HSn7aZ965I~r!XNyZRj9M=q@~H^N1H|G52K=&zPYUgWRcb?Cu{3ofB1*9Z3MXOP8{-gEE$D1`c@6+MNvEq40aXs_ zTW?gWA}KSO#~vRObrk?Z(mMI{d;mt*{#}q>2<+P%B-%VDS<9yQkoDBQKA4jYXK&j{ zUww1f2QQB6(IG1VkBOH-Nj*0=iJPMVw&tyCe-?+<`M2Ie9d5k({+&NazesOmHHTio zo;ObyN)yMgT(EWg3Od9+p9Ed@=HIySS2Ww|P>Q>DzAlLVn<;1W?c~)@p*1sc_lX}Z zB_wWU&7nay=jLbo^0is+yhy^oeK&hjLZ7~v_8r;;I#wACyu=Dx#=E`(x&7vc*BU5R z^6s7o2{q@F{LN|(hj-2QF`Cs(?YC3!%XwB zrmML*o&Ch!o@{TqE}y&DiG#rRsYhWaRxH=YFU_r=2xw@Gn2z!Pe?5_^QSEB^Ga-t3$K%T% zceMI*9LdK_bBH4Zr-L5&?F%9=@m%vEK-9wF~B9`D$H6RTVYW;*+9hh z;1QCR4Z9XL^2R~lxA+SHS)6^GXQM!+p=*B@l|S2W0@ymKNWnlYSQ+bhgoEo~e6g?mJ2t)R!+-*nU82`fGK+0sqnfWSu?&{Y zz>AY8FKZ67~`x3KhfP{l~hiZyBz27`Z5z@^u} z0D>}6>2*R;>B|&@7^VJEUh?eS&h5o3CVvhAEf)Z^!ZUKuR(Yx2d3{Z$)?d=IcYOlU zxxXAqhlJm{|GR)9Q2{$=BqdTE;Me&_-gBFw-*8;>uxINP%a0#{!qsg9OL3q?YqQRu zwafl%P3*=(jrEVbIbq!%j<4CEt6+_9uQF_V`@$at^KY=W$m3B1j$EYP=QNiM)|-%i zViwjqQh-5lmI}af8&FK#LnMg}V-P%Pc`zfhBU*&j%%#bII|9i7J7o2@!G8r9oUPwC z0bBN>F!5rRUGr?XiHb$R)Q}Y5(>(*txBa^=bFN-!+*P=%c65+q(K1WT+Ymza9@+cBxFQBF7gs9tEz6R+h%0oLJ*5F_Q#b0rW+Q`Io{W@sb4hzkb zA71U5o+E8Lf3Nchz#uV^4_gKmllaCepQbW6z*a)!?KW_Xe%Q`G$m0X)w2Fy;gQ9S# zWM(=XkfGDD*WW_R@NYc=;fm#lqtF-aar;Hsyi&3h3&D;>Ez z`*=+x$M)RIc7FxZKp>jG{Kx%Z5P(z~_>NT~=KRq}!NckY&$U>#>x0+DE`_<}KkN$M zbMmLot)wqm^X`6_U~F+83$29b^= zi!K_hGWH;(Gc@DSOvK?M5B`m&G2GFw$)Uwta>b1vyA)<{D!uKDz^Hui2e)~5Oo#8u zwmx$E;9l_>)8Km?vJk~Tw41(JS+5Z)*I^Bhz-`VSQEyl!5G)IwkaN1`z4z{P&55J+ z&74b~apFcD%pqZmpC-azYgIAD0-1StUw)h2x-P60t@|XH%sY`?KLg<{EH7WuZuga^ z;xs=DDUn+{*>B~K&Z^ZC$#9vm!oLF#N81O(Zt<=-1#TJs`lm7Ys!#QnOaMJR_JqTh z6Hf6L9|OFf+i>t%Cl|S2i!64{3kBt?c%`SlNH}1@^XBLD*Uyguu9HsxKVt zSHqdec)-MOR(Nj5_I6>IU^n|A8yt$jSmf3@e7xYbI&63w*LsN^C-*3fMjJfAob+4t zoxhzbcE{YbgK*R7%2_lD8H>bvOy9V<=1Tp>V~6(q&pX}u*wi3F>ikv+$vcP#;o&?h zzoEvd6TodF|1JL1SuG~|G2!j~KWK~hdl;uPk$d72XaMcAPd0$CD;im+LP%GpE6?Tz zez6UJaYCRFiVNwtzcf?+qb?&#?SBK5t-95pALH%zKk7SlCW6E=Ko@3kBBWUntLeeB z`4N8NRl^caN?YLvjQF=xd0+gDW0#Y4rh{GDwO7`+1`W@_gUUa^KGtFIZ=Rh8vGwo5 zr|tH^eV=)lI*wP%p^qrk4tI}r zIVfYIl8=+3b27+coL=^x>d>x$;AwB{^%wuxx|sIs#g>IZY~}-VyxfDm%?+JyEJ|z> z!_I>fHeg$7+mb{8Lhp4l#&!Ljn2?1(RXQlg%|(C!^7k<*Nohdj9BoSZJIx&vBuWJ zUjr-bKWtD#WUH5_1$rGCV~nG_Tf zUdCY{S$LreA0N#hL;P$McQEk-D%}Rxj_WOhdmGuK_CJx(=?_W)F2IW0`H#;)jKjfX z>=sA+6_WxhZRu+^1&jo$85w(a+H*X`fU|5MNgGMl+%i|rYV&Y$&A(*o+T6P060LAI z!+A~|Vs_j{8u1Yf?maF+<=;NZe$}mhlIdSKHO4ew0nFoyx8B*31NO(Cbma2%7oXys zyY3zHK=~KLk$(V!FmSwp3E%cEUr(fp$y(I=@BZXYZ@+O7q{DT_ZaDVx8J5EV*1wCf zQovvF%Wrar9bk3|q{e01IOgwzbQQPr@AVHtdkjbmaP24+Y2X-ER=b~&kXInMSHuR; zeC9BZy~fz#)BM}Ln0eX<$WAT%lL7Ah{^IE`J8`nu|L~gk=+%b+I{#RTYw@VfVR472 zS{@f{_^&Z8d|FP?vr6_J-E>v{qk@0de~McH(2ZCcY*uj)03mi|JnW`g4Vm2Laqp34 ziLUeC))9nQb7MZ183i1Q$KX{C7EkUyCk`PEOwTFMLzX*Cfi&6+sIi<R#TR9U`Y>3mb75799_@{kQX% z9FZK{DpJJ)939`l+UUUQPxx4eKX%Mpw#8rKr@Z+1 z<+u}fyB5clZ@#wxD!!57o*HiO7j~5+xs~nM|56@%!r?l7%WhvCzV?a-90$y9&e7>5 zKjf~X`w$r1tkl7zm}nrAUVVDn=moTK|M$X^{>-5K$$zq6V;ItL8at00#VrJ)w`gVwAF6?LULr4nc(O z3E83)#cu0CN-wbb#EG?Ka}|JXa;rDO_#*?uf5LQJgWJX~K&zqa=8a0S?O8T$(hF|f zz2bvsf5MmmP$Q@9SdP1fFI$HPpjad)W*e>IU-8yEHpKgzJpCo7`p(z9?7jsFfv&aV zp*R2TMYbIGtUtl0{OxXk_KlSs+;({a!SIwb`6_SLgw8OMSNa;Q1%%V6KhD*-ZDFaFR=q6Kt$!0qpt;gs;TX_R1dy9lsfxwj$FH z{+jeY*yP{-Hb_Dmrf%@h{r@?iyW87WcYN%T<%SRF19Y$7R{lyLoFJygWQvt#@+u^* zmHOt_^N!Qm73^uZ{T=x5+!vCPLFWjM&U-3gVmo~DHDZ9xz6eDeUW3JaDkREKzOf=(c)>S0!Mr$6;0bfTd zo*2)?4Y8>}KIMZrs&_6}!r@9}^CW zwE}}%7Tp`g%xe9cclX|7d_uvgrh>Ta`MZ69o>w1<$ zB%fO0^@NZB*k+CEdD%C963Q%mlEBChRpr?E)1$Yz@tA1Lc_cIfgn2xRt7T4Ba-Uqz z5)OeghWv+3+(#m|6SNE1x{BpaGD70-f{gg#Gs0=mg|kB`ECZ;_2mc1QN(bWT53_!} z=nQ!O{rpRJyR*!E{(Bw2@C$eX`8b)nF3vN}x z0t(P87&W%~xATt)`>ech*1x_ZRQT+XDb5IAG5Zd=z5b!xcU-(yXcKg>_0BuX>wk!hf(b;xi`o^<`4&g5xkAsJdKVt6Tz8UmS*z8HqK54n&vicuf z<-fIjRlnrl>0v`HTn`tIlIf3HOW9-eRsO6s05*v?Yl5EJA-w|Wm4FC<+>8Uyl9-Fe zMIh^sHZ2SojXJvg2J7?&O`K8Y@b0g(KQf;((>ef4ph-@bwo zv5UWF856McPfXUPd8GGRN#^F7;k5dpM=T*s+XY6WI{hbh5WzYwrA^_lknAfSrC$+mR* z6Bu=Bp8_cU34$(rkuq%A77x0CRRr5-GVu^R2T&LOnOf}fg^m69$lrGD0)IHEH&#V( zSH93`tk{+_K<1hibe^b?*~d>HGcd5ys0|>`EI`;Zn*m63Zx$L?KTN$IS%lTQiX8V~ z$a{nAK}w6;it=CUzQ?KM{ynRMU45_c3jB?9VuF_Ycsq56kq9J|)l4d?y0M(CpEB z|F=FcCe<*gx@zvr_GvFYK_6i6#`QgK%BgmYW&b=-t=tJr93&mtqQpi)lW+6C>;=2M z`uO-q9$D_YD_3^xwF_8|dHm`ClBFoCiBxm`#*odxn&@J8VE*yz$1nPA`#c^M4|T-p zSlydkQ8t#Vg7NBS8@SE0=kIu&N_Te*9OWArF=W8#hEapXm(P8C@8h`tg8j1N z`MR#_JkQ4|ohXK`-4{?6?fNg_v-7$@J4vJby|ZJ(3MSt-yxN!f*T;9?(DwuPl)T2t z#UbCb?dAjH7V`*%H1`QyvIB=)-oK#pL?6^nilbj2gmyyMgM!=7TF(!8nrAhMkVo4% zY0gN4YY4Y!T4$*aXwlV;8+Dl;;5Ph3IcGzpHgRM{{&_@MT5^epjB?e$r_DSaxJy+O zZ^I4I&*h5!(6icw#0XQkuNxJ;jiXWS3BXdMF6MUv?^SGC^*@v+mQsZY)U&Zm!?!2~4+<=To`a|!N{QWD_Ju^)PYh!SbJ^l)*G<81gV|ETUcZP~mv6YIm1rVOk7{t) zPo);qXPLFyE=&E#HSjFXM<`x5NCv)iVol?g5ju}9SSgybADS|It+FZb&~@!W?K*LW zca6iM`UcGAt)@u#y?!#&s}{J-C{&^2c+~ejai2XKL01f?LuN{Ig>%}uBMbR2|MEeC zgc@uo$C`jC?tWX+>1Z*!xwmA}Zg6wPcEC+hdu}-AzXhsKT-b8xkKTPNm1+YzZ@%6i zEe7(0Z8$oc7^g{NLojBerRj&>wgHFOOd-*wiEqs6wpoCo=n-*_k?ro=e|gz$sk+hj zA>iSEW&{7+m+}nIzmv3tZy8#(5DJ4t{>OpLX~87_`3o0XE`}2?U8tX=3wSP_Y$=q| z>|k7e5m_FT??L^7F9F`V-5qC};)8$jpk+{ZLP8jprnvdEzs{H$xO9DCPs8dUUn zvO=$KKMxi;UzgA@8DQC1?G&!q%AAI`pN&F4+dEx#yQ=ENI!j9~gyf6k%HS8unD^cf zHAPWBmmjS+?DTapJPoNP{2qh z)fIlCy?br*%E0m>ExncRL$inG`pk*@P-?UQ7~iZ@E|La{8xThu z|JYWTx^BGTyx`7tJvkUQ?7a?DWsuuk$e-#8LXrYZF#9N~vwABb4=nBIxYY;xJ;o@j zv6LNzFmBehHyFQqjA)e^ar*@AcAAs)#U=5O81+ubN;|JFfAAH;uQ$!(xT`kFQhv2Z zrqqj?95d}puvV(l&G{}2s=SwN3m-QDEqJ?~!ZO|_%Zsf6zKo+G1%AhC9k>Z@*yc>` z`@34=BiI*M9_DGFI~nu((>F!;XW~;V?hE|X@2mIPQ_iW|Cw@iCk}rGW(WouF7N{iz zCeRNxgjBAM2kjizM*JXxv+q}-yq6zWs>_)s353iUL1TR`=W9ryV%rGM+QdhD)#bl; z-1v-UO&Op3$O>Q*V&5|B8(PHX8mc{f=l7%*I7s2UrNoOmR)5GyF+6Cpm-MOYr#cE} zo^sEhs>kO?h>2kN%a4yUbC{uuC$E$CpNn1U)3osY(cs27j*QiveS~9k2(Z@ zCE^v#T4q`OAG>T%Uf?CQbZ^K-wA+N803CnoSnU~evOGnek~gyfu8Ow$&h|gp5t<_+ z8|h*Wkf}_6EMe;LCpl!QzNzO-V5+dy4wYF8n3eWrax))GqiPa!3PuEtdDvTS-w3nr z1JHROD@nNrN_E4%8{Pr;X?We@2#WT8i++ZXR8*V($Vs5!V|=DabI!$JAXcjCR=&n& z99^ei=_~YoLGZ(lZiQiu zkLC6Wz9D~kKWXunD0sxCkiR~qSw>cEY5czYyc@AORp<)-dGARDL;!K#D$s=QrvPk| zM&2D6DK@jPxnw7BbS~J72C3lto9WH|=7GhVPUU$S&|bXXEdtLsFW(frdL_}UeFm@~ zI8UTnl3(RojEU7^{#C;>G8Hs@_csT<)u{u78t~`C(UhMo^>O+M_hoTMLi9pNrn0A-LS|U$O0#jIM-goa`&B5jwca0?;$RLWEfEN9bu^hjv z!GSz|{$KPSqBhQ4t@YNkuO3VY3$yG+M`{h-rz4#zIlucI$D$Iq?a{0Y)z^^Mo&raG zS7A8~?S`zKoQdSD-p`qOuLre9wubwxy6ksz{pF|74y!%}AGBbI!Fw>KN3GW`E++}`%Kr89Y!R1Kh zfmF4Mq1y(ormoi<$4q{h*`b3d1llKg5%XxVo`3XWThz1uX;Tg8mbL_aXRFFPpnwuJ zqYAt+XPKabR~-+LvQT5tDQq*O7nHnG%0Z8bB-No!3lkMHJj{R@02&Kcf%`@5@?Jzg zsM|__L5e?*>5V%d6r!edSqP#P&ei##GiJeOXWtFLHGtv6%RbF#;Hde7yXvl}iV=x= z&M7Q+UY$KwRj1S!7?28>uuL3_0Q?s^=9g!M=8ta0041xn9qF2yT|e6m9|8YbS(K`X zAkBK$IpepFu8GgW5#4S9%ZQa~9|G{Fc@bG|N5^k+T~c@GHyrbrJ?s|8(?xWIKPya` zb%%pZw*U|B0Rw$nxGpcPBC++t+p7-tzyne_MXVF=@!-HkL7y5bi;3WBO};%Eio0fs ze(N}5MJ_;G@tn&$5W(^5C11|_u9R-`~TE_UB!*noIUu6p_-->AP@wWanjRP zSqf~9L*qZPo6h0lT25kDHWY3deg zamf~UyVl-hE*&ALRlgJ*PW_OyfLWSa?g~cxnaE%+#aCI3fY?lM8|K8X%k5Xjg0#WC z2f@FzhxqR~-B0;j%FzdA5Oh3vP(&Y{|Hv>yOG9Z*{n z+*qSS)I>D@)|4HSLx0xPjQf6C8~b|-8?BegT8k=V!$2Z9dahanZhr%uMXTZ0yh0fy3P8>6n2%6!2w5rSjtIGhtYfGg`)#DUd zUVYr;TnHLU!c^kXm7Zk!=eM(N-@>r_Yv?N6qI2?mfo(U+&5-Py!t*C}1Zgj3gPUBw zZifpaNlq$sERPbu%FI0Zn+xIJSAFQ~-bUFZ%R0c5Yr)qeX!GOxRs5s=I+|=B8yRM?|Sl!S((b$qG|#+9OhX=r5NA&!aZEZ zU6s~Z0MP5m^A8hNt8(Gkr|a5*n_plpJ!Ud>uD&j}{RWi4g`Zs%dJeT+jt9*RjG%I%+eOJ zO_CB3r1}VrM2wu7>KJp`Rj{%o7OnyJW|MpUHSs zx>yD;J6klyox()U_7g#PSE98rIwaJZs%5Cu;Bm`5UdPdGbfzD`pW*NsaBv@F-$-52-8 zj}6;a=Kv+LYq5bTuxb{0>(0ZG=*Ze!ZM*^q0%12hYDa#=DjJDnRo4nh6~8x&*2>;l zw+&nu?V{0hgR$Iz3IjSdUhe$R9|FWa9sDH2hY8#}r85lxkD0D30T4*OZHjkD-iDzD!o?HZrk9U9|E^?b_AT~@gdro=Ae^fMM>26 zV55cByn2)30bl>TuU*!bRV+(F9PW1>-yR0u{;3mw;VpvD!G+){b0b%Z+(4d|c*;&@ z##|ZqbZ0K@)v5zXZK+U7sri`P*MMbKmdPGA@XbV!fv3d8gxCF%Vb{%gt!q?dhV5sa zXO@#Ui>`4qN(j&&@e$EfEA8xzIZ{>o?~ZbAs-xTrtg+4yt(K+C6-#MaI5gH&SfV(; zZSh+wbxaBSt$P={Tf%VXP0!j1%>!m#8gIyc=BvIQAvOk$X^MSG@V87x>(Aj+qt>=+^`OtsgKq@I!FL+1F#qn`EsNn>r$K);Tx$G|H!KHyExqf9Us zGA|{k!ui#q!_ev8Wc``i7u6$a&`r7ZT8<-ZR^%#wl#s$W6b(i+$smPwOxt{K0R~7y zOkRBQg$pPvc~9fArxSpRs%D?te${2EArQLmyYVraPmO(oCvH$y?q*>yWa7+J=UH2C zTl5_v;x8m~9uEMp-0dCpq`wPRHZ+^4GCq&RHg@A1TN|Dc4!yW-y`sO}S+`?vmD%?4 zy#>pAtX%80cKF&Vd_W0BXlZw|jofvemHNUjaoc=Nru*0=V7HU7{?CdZZ`N@+w;57p zlglrHHr!O$Jn_elsb5ZbHNw+syxx;LCseml{NfLJSce7o_fsFNTj2-P#KgUHVl3QP ztX;k!!ykta6o5BWWy*I2nmNBDj@{Qh*^z-r__Njc5b50Oz87mp#1d(}o;x7==0SsB zcz#{^r@XDO!I$4$NNUe06pARMMO4IOoa64t6%_l)jGj`4Ed8l?-hUj$uIs_vLKIQ{ z$`<8rxJviXRGgvL)BC)^;eWiJ$4g#^xifj7+e2C&VtqRc{fZ38x#fB$X#LyfRM%z0 zXHeMR_rMU?SqnZG?l}S2BL7|3n*A0AA=H?wg6j z3XPq!wG<0c^vjvpDDI|3g`gV0&|#3xRA2Rxe4TTVa{1%cTk3Bt*;P+TK(R@G;XlBq zzxDJt2~|zJHmv5igo~2WPb6!u5OmfE%slCM&vCPsLHR!8k5~Q7HE8HSfPPmZM*4QF z*FnP?=L@{Z^{^sWgNsQHZMxx@bCZYiLH6cPjBi5N^{jtj9#g^N>iexGlNVfvHLL_F zNV}}kqX<&fU8m2rNp>H|!GCZQ@;GptUD}s;>>FObcuUg4C>E2d!#ZMF*f%Z4*q{8> zym!%-fBS;xb9s4?U}=N59AhSz28Y zX8r|qX}OX&J4cg!ZBcX;hl^AS{~PmzKa1bKyfLEZhF**8fb zaBd7*4>`Uh#$SrtCr1n?ewj^qcuKmpRIL2Yp!+y;{Mo_n_vRK0;)DLk|B(|j*!SCdUBi_Tu^E5~5gj#y%O}NnH@HmBHS7is2q3bVN6YrvXkuCqj9c8bF zJ%rgBZ|_i$4k`%M^r{o2)e*gic)KhPSuj+@O82*M`ZRp6w%}tkgh_l;P1)RPvr$td zCw*IAKDlKxb1H;z9jP(-lIbtY<^8(PwEslv+_KPtiQVSuE(bk+3s`5yYpv&`sXP)T zU%ziVL!qB%@PJ467XdGRO|8b5J;=Ki@$JNSejbE)PLb-*uI<%UBQy>Yt$)45+R5+V z|5qkD=qQDn|Hkpi6pFvex!R|jF1zf%(M0OpT#I41kP!SIQ79ab;{K#ZR|P8&^+2mW zUgY|`tN}C54$NE#BEzefhx|0BYTVs<`UX4C{-yE4PaZt90v6n;c14J`|F z*TRy{B++G>I9|AUO#F2HNmD#17dC2Y)u@mETNF7S{*9sZ){)*+CM?Y;~BrnFu0zG}rh_z2~3P1R-!2Q$%D(knWTYTdZ z18oMBQNLFW0W@HL$k(i~NwK7tV`!DNZy=QYE0-hi%pjk*w7ZoluO`&*1ZaO8y#iJj z0_xx{z&$?bc0k7%!?I_$#JS?M9)jj`h3u676Q9B7m30FP{VdNYUtSmZPJV=f(b5b@ z7%@(u`t=p)>P7EoW^Kh+1*He++}~{sp~PWbWnQX^m`OzE`Lq=l_`ZJ zbne(e+GGzWaC`Y`PL8el%xaiDPfEWY0hzydyLswX3lRuLnSFb`l*xCvp7bu_laJDN zsj@7L)P~vBnVjYQUeRMxCIws9fj!Ecyk`pyyLMlYmO8xHj|t{#F}&beRp;C>KxO-9 zzGn}UkmhxVR_rlkcarSSE5)0Yz+wx35tK>?a2 zY`7Cq(3X{Z4 zU}ej0p3(AA5{?8F#GfkC#>Qsz^w zS^S`tu|O*P^_fNp@89~@*fpmf{!rz$-t^u#0*7B7#ST=HxCqT*JYyG@}VrD>sC+!?DYiH=SJ}6qxQXppy6LD@_Qcx zKoy>IWjGhc+>Zv*RY8QAJ|UjHR@%2k@ZqeI^6;wEMYTD2(~gpd!a{GNG? ze4{-9lNu2H%$2yx@ykb6B!X(bC0=`=^=U-SBr zJH2x(h+C-O3&g!B(V_f{%pybg*dz8>rv*-1ro!nlhbJxmOlk|G4r|BZ=!<4V1Y745 z*cQ^{>aN@`x5M zekoUU$8o#$T?xGZgnqUD{1&WPSy;bzJ;{fP+TRCv=f{FpI<62LM$f3pxr5z$Pjp-j zC;uMH@&{eU;#6+Z3cS{}%bbmv@U1MTy_>nUq2}mmz;dTa3H;)ChIV8yAOAw&9mH0n z+7h{h?jtEp2|5JH6f5ZrzA%lIVwa5qnQXU9Y2c9vENMjE>uq^#T5@1^7iA(aPsBEg z%{02ob$^Qf2X#uO~JqOku#{g3?9tjcCXU5V^?CtBAUFW!)* zRx|{0%K{{dlKU2Ph=14NS!H;}l4br0w|oDGVA4NQFV+@fo~kXaS{aDd4+|Q*t@{No ztZ7E>52P%WW!TurAr@A+v+RmVkHBSg*IW!X&n{S{W?C^;0;Z7g!X-*;Cq;nt4p z-=xQ$VOoKp+>&iG&*ih??VT^jH*;4;4D6kG)y7~OLlV3ev(M`CXYr7o@J&_h8{7+7 zk7fTkBB;_8>T)(uYey3M2OK!e>69^WApaPy+j9nyq-3Olky!h6d}S1Bz#@Cz&d%d5 zP-P(w7lf2|AdBR==1Hh#)=F7iD*MRYcuaKVq z)&hq-^SD|85xg65_?ScaY#{=P+diRs*GR#F?)#Tl{H7o<(s;Z%th&kEXI1j=)C-`X_#hzGFpsUb@iaUYt5z z@a9lyD%gjP?YGaw^qugJp)ZjV)5Lf+!g_kc*JI&eNFv7 zQ=#Izj{B%NSYfwtKd~(;yE?wOJwl3Xw4z#!r))IvX(Hcq-Q`#OjY*PEhV$F2=d99R z(s!Ih^nRMcjL&2U+FeCLL}Rl>4A${2pnI8E!9G z)@mDIAF?K1aJ-+{dy(n-IA{;p-X5%noBSaIy({e=ZhbAd7tZGQj2!Z2md!}$rrYT) z3B5x0`@WN3^Sj~%9TSJwX-U(xV;&uBzqr$_f=_w*HGR&Q)X+2+y#-Ai5%QY-t3SBVE{{}zr6DZjuj!jV>1o9MPT!a^Glh@O#yb`#7 z$^z_Z!s71b?}Puow&v$jn?FfFCBdS%82B)6J?Aec@t$QXzB2~4DhnP``rqrx|E(sc z=5#Vo1${S>0&&P)QEBI&m4f`#K7S{0;!q9RbQ_rjk}jg6O%b39&4z(qb;xX5WSGDa zosQfZFi1Zmzt`o@WZrbeV^^BMu4cSdr)Pay6zjT@!UTeDl+hHm$7q?E_lMmJnj??e zy{o@)qurIJm+ir@c!-Q)8J>^)mFyw#=I6z{`tIYaxkkpyq()TPTK*fzKLjVByyEy( zB--=K;Xkxdp@tO`SW--jz=FuIEFfmVZ8K8l`M~njZyF!X-dHXJYNE-m}qb}E8h1m;B)n2$%_Jg)m9rgCciG7 z7lN?W)F$nf0tW##Q=kXX#ZbWC+8MwMTr>FFki!{ZdhY0Go)$7lxdjSz*OZI349-g} za;G;Eu;r3VufQB0TR(b_d#%T8I4eCz^{lO*g`54yrZ4U^nPFwq75G!~F%*pLWI$(L z(Cz;GS1m5Hfe6mZ^S8mrSK`EwYPNk_2($tq2%C6YOD+^NmYMgvmD}(3JH!W6ZU8XQ z!=V)i&f!{cSPy6bqs`{XY$qVH&?S^3ZmOk+qrlDUf_F}(1UEUZ?-ztTPlf6kEK zB7-U^<2S%1g_73I1qx9kXHbg>{zAaY!d~(OS-LpxTnnlvk9-o+?AJQ;`C9D`LD{eg z>A59>M%Q+Ud$1u=_fkYH6axmFghBL`zxO^m(qHjAOb|R+d;pV4@q5oP)q-m69Q0D3 z-HMD9zFsNAYx+TR1dXpUD_c6m$8$i^m428lTkud*>{iUZx4mC|#o*{cpLDGiKJl&% zx|yKD!dQ`k+IlMXl6V8`BA`pXtZnhInv)~o471n^nmLOpkkAUDvYNOX;>1A}tg`1p zbkA}n#{)AvH1FZHhb5oTji7T2|!(V88f zAAZQ$2RadEPWE10+Htbno#jtB1T6|lE0tpzinR;l99TtaK$>Dn6HpE*H9NAcutNoS@1n)WQ3`Th6$MQKCNngL(r zJoa7ZkpxNje0+QF)y5{76l8YXhRyG5eq)g~no>HuI{nYWr_|&x4we>s6IA&@qX#oE zpVOYyo*+(Q=R>l=g#YzvM9OOaa#Xr2)SxJUMKh2jN+#SF^f&nFJxD|RkEYD!loCW_ z2+%aHQNmDt&eP`@1mcUY#${`3de)j5c(->o`=kF`Ihk_%TcOxwkdu`5;FwykL7CJ3 zlJ%}kgSS$~g^9#Jq_F3{??F;o+ct~&{Mn+KXrp_P;+xvvvFFsaXf7dWgOdF9uUAQ< z4L2Wy_=uH~oglHDOb1-Tmp||){Xo#akv0!DCEAM~1pWLaCEh=m>%-=ZSfW<)xJ2A8 z_x~G3DJ_(oh1Z-WMs>Zd&aBq9`7S0M_P0~`?1$7@Lu(=hi8-yx(Jyj15lCRNw8NI; zyG%a83neHozd3P7b6#75H19A|$C&#Townz||B@*6l&j~B-DsUr#smqKzBby4Owe#U zloGf6DAOQ#0xhM1z#%|evY09CpS-k}KFp(A{T3%beoLyzY08|GT1A>tbg-kjpm;Ra zfUgVO>x2~HHmTY(38DS7>oVssvMbF(ExxxQ5Ov)Rd_PEiR7X$1?C{%7d=r4a0pljgMf*dgrhbW5-OxxORHmMze(+zb;FL z%TXt5GX{CAM2L%DtHHoeY^mJsPD?%rNt5hh;CCPl*;)<1vT5v>8}k3YD-=r%(8sBtfNPgnl^jXq-b}EHC71fOHdjEK&^2iwX zPyWU|_h1$r2>S~K-XOp&Cg(qp!|@lC?UM9wv&%}gf1bIuKYA{WdXw8RGq{MUkSEF6 zK#bA~Vg600C1Y>2wGgb9BOhFdCjfD$oY_j9d+ zjFokYnId~;e=DKRLLW`Z0)1T1T7#Eo`$I=rJGaf=bx(tf=EP`zYsROJK50%&qQ75C z!xVnf|D^runb@T5{kmk1T^Bn9!FY?hKFc(WBSZRwJ6pW^Fr<52)^s;|rY^1L&h5Vk z!5Lk-)%#^K<}T-a8Km~V6Qqa)kb)l$PJrmF21yToUeUxk#Y~SaPt?duLHKMx$+y^_ z+l}fCo&p<81mm(>FJvT{{dnZ|d@p_`ekW{HSC{63ycTx4Vs)eWdMk(aQ;ZX$XW(zR z=z54_!LMy?BE_o(`h^yO(nmvFnfNuh07+lG>Ua`duKSe?U|AH^Mtn`A%eSa@l?3bz zL0^Etq_HqS)%N)@;(|-ZxZvz-T4T0*))`_R_&2?GU*G4LzpEvNVeV-)hA1U{{~}lX zJsjT6&50B(I4hGdg_z;9_ILVU5K6$#R1&o|rT2Q(aQ(&1V~ELy+~w-ZpZ8x?57hPy zgQyZqQODPfveYcA*J;UyW~k%>fA!3%;r#t_Oyzv2GLZ2n{TQG<_p1+970XjQRvhim zP3HK?rzq&OQfF?s#rJkK5AnC_w9M7X&w|_PxRSb}@(nRlxvw=frSkp_C9b-anzG30 z5#A@M0fnf<4dt-lT7IdfgGk?sTZVT|>Oc51|9`SA=1MQ7{wFn2nW7e8)CG6ZmZL+v zWvM60{gte|{8xiS-$*=)S&1H$+ddh>?l>D2J%IO4|Gh}GhCQQ+h-n!8_>E$XDbz*} z`=`o#xa`nbEWzKFL=wa{Xq`e^d2OxvL576jUhc2f#Pz?FDyp>J78!ZKPx!m-4X#gm zD3AKQ&8rjiH_%cN5(R?fQQ69||H|D-GgdYweD(Zh806dCW6|6rYW^#NwT*Yb6(7O9 zZ~4Kln4$qJ`B~*%MjIQ8c4;rOBy*MS?n%5LD-%mp2WoqTzOP*}^jPhlzsvG-vpcBq zK=WnBm?nmh_k-p0H~!CI(>S_ceSDAbYe^Pp(kRKI9t9bbQua_40_n2Ir;inOS+WaS z^6{_i*qSkDP;hMMv4=f4V7XF&5d+0<28N8i1!QEaHGC!ITTWARWF>K5nFMI}=nOaK zWp4VLZe5{zEn0=SR&OT7SDJ&*2cDk!(v#zqOMx-fq(Q76PiTAA;(h2L!e6KHcwIAm;X#KTf(@vOg>3#`| zIaw9(vRqAXV`jRo+q}cj;`*d-LN@8VK)>9sd!U%7TniP_&5Ii|J#2_TO;+ED-a}QK zuEIXB>qrSoiZ{RcqRYbmb6Ysi4QH(LMGp1;Y~R;1>H5+s>ETZpX3hGhx+O#D)?jmb zpQKkb`N35tD4}2H+mMFI$zVh(p^r$*_VrMPA&4%8nDq;;7-Aif6urIm(wrJpL2hMKqN&t^aHrYcAkYBTr1goHd`0C`Hhe>$`&pp15 zEmaW&Z6=36+`g5XUUl{Z2Lx_ljec@h^gt-C85CrEq{ij0wy>cb?DWla&eXQT`5*{n z7*oDF4zyZse3YJs>&m6b0y#^8l)hHLT({^~iDRVdMPH1N;Mae2gb!lnwI`~dF2A2Y z-M&?W+QJLnJYRByMkblGhe|RJPk7jOI$TaNvD8zN$1G-cA&suk3UP{?71N-F7y2r! zgxRzv7MoVg5$0)*V9q4d;d;tkm4>;8O+OWda+1Q8tA7?|-@V~O6U3J-i!r)ebM58# zyo$q{qZ0)?R>gx)dNvz-4#=PuxEgrr;blT=!!_^59|Ufw0PP09T7L99W@}#Y=7;t9 zj^y@SCnEUv><2@^9$;l+itk{CTDG6B{8Tp6je`kU?}O{jn?9ccoY#N`CAaSLf!*rMX!_7(?y=y+KlKodnT0&trNLB?z@;}C|Q2}7lhxA zRf)a-+w#I{dCiRX2l0dW-OtoGd?>Y2lYU z6Y~U1f9fg`s=fYJCPGqvKe14j)boB=L_CiRL|`y+GS`v^1btz7=~uAkhw`}+=)HK~ z@o6{d(54n zsa47pwxcn_?UqzWrWin`f4wP>8=ZhguLK~aW}?EMX%0*AV*Aa>f@n69zW;2l;Z9IlF*Y>CYJH97&f8^3XglSJ1K^|$O-cs}qQvPg=!sdq5Q9x0Ua?vF+tbR?FnTA2JkzXBcp7@xr<&-jEz zVbBPe%hxb#a8G3Y#Yb}}gH>3zbeL&=_#^HB2DKdOK7O(4qD8j6lRye&3%aW~gn*uOm{fm9gLbspn|4te= zq50i@sB?~6yJp9`5kW8G__o51KXY%1Yv`D{hCV?TAk}6`lvqlG>;8z0eO%%#86C9A zyIFpJrv%V1s>8RcRIQ&|L-+BGHs2ad=pqRaYp4IRlE~CWRkJM;|NrRC zSck<)E0F4SX$jPW`_4K9r8MgbUK0l+firMx2vHsg4G!@}M*z3Ob2{{Y56WMao`l&s z*J+(^3>2euutvF)(pZDgsrI}1bjqNcRpF8;?1D$$tZceoY~SvK*G`CTkb;=IpJq(E zgMNH=NSK&d%f3*OhWULCu(}ULkpCY4+ghxFxNAexL!@zvF|eRKOOLU$#7%Kewke1y>(ym*>d8pETR53_r^MIC z`E}^lnj29r;#ZzU;^ymZqc7kDe~mY&Dwnxl=>wQPS&SQ{MPIOxhbEsc-zgaeZIFET z4bQmAZSSbbDyOQr>^#_l*}?}FHD{7cvFe_AVXOr-qNC}c6Ti;}6*ddrdkQ|x07Q20 z=zuHq(m?+iFt7^EpEri4hdBwbmkxx(7v4tn3>h(a6|XK#i#RXtO%g%%8wI!WbhS{?3VnIubR>+Fely9)rd#As$);AoX@7+8pMI>JV!KC8<3Az@<~D6YeP> zW54o_(c%w}tKI84e4KaD7ZWv&{2#!r(r0&q)J-e(Q#NPT8NRAk3Y${ne_bH{uNzat z&-nQxiKkb%y`_N`3BI3zwlxup7=R@{h<$Rbttx!q!;Hz1wNsIwV!|eD$#n(K*3vF$ z=iK&;CM{$aK55A4_RWvgDt&-*l|?mn#zYLw5WPi1A}6NfrL5a^!c8(^I1yc7ytwJq zRsExh@bGuy-`6qC<30RwO$&nj-Ooy1PRNl)t)bi{v#RAEFNh>$@(DVwn)L|EW1 z1Guqk=Mm~KT@s%pYf1BA;&iSxxTR5Hm$8#Sotr`OX6-l-Je5D^+j)suN598oRG{G7 z+sxZMJ+{V5sjb-<`T5?WPC=()v-lbVeE~IwPY;n=PSbL#yG3r*DqelIY|%ub5l)uA zrq2lC{XLlPO!TDtb&ZFx8-~(*Bt$HC$skBNmBYep`9b#cB>tZBO*>u$PAQYWZ12mf z=p_l2F&%v0W2iLfaB{W(J{8LD1BU6hO|MNCOjG}JkjQn=)MuP&^rM2ngR?j{DV|Ab zb{&bs(M|Qqk?BiQfvGjsgCT&J#WefcKozXF-Pdg{CIvGnFGtp}HCHhr?}Qqo5sN+m zU*O~+oCO+{?`0ctgCtcxW1>}Lmd{Oio+RjtzWR9~9vm|#HhSt-(>Rqej)D4KI`Eo1 z3_Ufo`l3#4$k=@PFfstbW12wq?gJ})3bp!Ugn-}{tT-tvR&QMY{Ef&Nzj}~{h-gqN zS4gtBGi*RFh3cnY%G!U~-WGv!G&Ea#1;c_XV)!^VQ|agWGhhAQNIAFfnfmbskM!>< z`a0;$^F;8h1V#h9%@H0uL0$5!=e*a;3?V*sMNw??mHykch!&HZsTRpO=1W zaZLUd)bXGAWAi@HH5$|*X@eZ^FV_<|hFrB29AqW?55^GI+fx^K{S|`Z&zIseGQ0N3 zu^euUBi7MenNwJU+HSLXZXpE7N;cjw&}TV-5fFS{T05>>n$n^bwXBkG!J%(T#=m7V2xi1#5{)HHL7%*KZ` zg7gQR{;*fbb2&%pB+_{G-quU{AjYhn=$tQw)3&m-EEO_AJ9UdOq_sleS1`&Wc{Xphbs!^rd4p#=N%+F_$H^NG^9~JU7~Q~88q!%$2s!a}J>t-o3l?!D z-?3bcKb61Q?Cee-mTV{}b$gFpsa@%i`7J!Gzi|HbSOU%xIa&>Un3^1^;d1`hqMH$) z-|KPfiL>G6-QFQv`v4b`3P5~>14V^@&(iJ<2??v_E0`dPD>+UbxRDU4mc~;diad6V7=}R+6s$^`R zaG;(qdi|(p_h8eh_qb>?!HW{B!4@34eVPVB-rOb8WmXOIuVU1GCfbs`5cG=G*exA* z2bTb`-P>rFmdB%P+;#<^bI+tu=j)Z$l{9R7cwWxIAVN$yLX@%D$LQk>hcrPJ*OFP; z7vy(!#~F5w3^DB zExgc{`)bD$+`2T~{5bNDvrPCiz}x@6rUfLww?gnzO*h>enkJC4cQ*aTEZjK%Qr+Ff zG2vN>$br@In$^xgj(%}|;tKV~4}Ti(HD{{fA?tX&;G4ICQd`rawbWm{pCQ2-`kIL@ zlDcxv?Q45MnepJj{|3?t{2VaM7*mFy0xuQWK!vkp)l{poBVFabV(b@4i-UR*ltuiN2|2+&k#8%;C?JAWe zC{_#&cEY{}!PDifI^mESWC}J?QOssGBtc(iV_`6pUYd;O#m5)6)JQ9&)Pjco&3rwy zo8WHEL4hrwhW@e3YYN_{G;5GSmXrm*tCceM{GX8)hrTlDT)v?;t&4nN9(^v6H#l@4 z6ywggIPAF>ETOsFbDo?`(;&_|tNnO5Q{QDPVNE#LPrzmo_yI!BA$2t?FHw#OL%5zmZ>GykS%CqrN{R zk<-Ec9NuMj|1)k#xY@rI6k}+CUvPN&cVjpO#`*2x(j#gWmT5KcdeDp2*q1jg+26G+ z;w*zB7gHhYr#E|tR!~uc zJ&>Jax7Zpmw}IieWz|J|n55{h5_)o(YNglxqJI$hmd*@mZxY~&BGM~K$)yL@Q4@20 zhYC_+7!T~5cL48xbQ0qv&U>`~LqX9&2F@`aTwZn-4?gn@{RpLx=35y7qQIG@%c`XM zxt-uO;#9+7U`N8YPAhn+e_^;*5k&s#xrUZ71`Yeq0N?y5!ypveE|-Ri^j9Brs%djW z-`yg)p!uzGH2z)EPbn84Q5P=d_Clrp#N0Z<7wcX_J&#Htymx2YZP2@*=jHpS&fPYU zxrYdR=T$)Oi+eYmed>ehpzZ(iEOT#a&Bpam_xd!eKZAXRfXBHOqf$=(7xy zrg~Ol!o;WDRkt-ph=0y?INu6Sk-U!hB7!u2qWzXb*ksXAUnoekw^y}_y4F5XWP=#z z&zTz9bx$GP_~%6Xb&|R52zl0@SSAI&xqhw2H-eKXoCmQZ2gG&noZ-}s1msTu@U!k2Wujy*m!8F@-ayq|FY~K1&eSl;>$FniCTJD_Gp{jWE!2Z(vtbE`4M; z@0zpErsK&W+xi4PZayc#^+s?U~8ovs^{WInT$OP8C4Jh}*udGlZaMGitFbk1|mOO>iDPMHg{L zbS{3aWpg`p=eSXY(d-4W0@hes20z`_(=aco7c;U0ue8|3a?Y2{gA4HeJxFm^ zWB!@@6%x|NWlR2)_~Dxr?AVmNQ5GfoW}ip@QVn%fz%1Un)mTV;@fP^+G;`TD8jhTfE%W58x>L2K~gl}Ve-X*Tyk zFxo<}g{4>cW*5+9fyM&Yazpd&J||$qn}#I}w06EE_|$^t$@s*$1lnzb7d#JQqtTEA z6S1r0PFV#5KAFt}u1rdtoI!400Cfe=f`dBn;xKf^Rv16~wQA<_kG#)YFh=~*v`01H zkMCq%`!;UTM(D>#X65cZQoN`NqvpP%?Zx$HF__zCZw*)QlakY&xf`LUjjU>{i%1=% zk>37C7FOviwQpk--a|gtJa4=gJN#|DSff?pBBhasf_%XviXk92)zYd4#`x6Zz@j%r zO>}Xg0drqa#5c)v)Yh1cueOaV7~Tdcdi*|oh%+enO7)|G@P5Jv6X({bXk3Wf?a*0O zWq_yuoEXNyWy$7ZCx2>`Ai-;g3 zBt$Fi4p3L*1s9Zy9NXPyMH)Oad475X2f!BN0KcW~DXJJkhAu>+sSAD6T}a(1yTUEY zB-NKM9d`Z*q37A|pkHDOVwH?T&5=nTg1JEu7f4(>R;PL}u9CRtMs%AWi2cXF$FSeZ z2L1z&)EnKrxkB_}Hh{gp?Ypn@v5V(1jrx_9$m@yNo}*sj45lR#4%h#*uYmA(zE8H* zX6Psd+Aw;cVG+nbr{KGnke@r>W8;iaKK7I9iN<$d4Xwpd1@%D9cT}g2JP~hq4AFie zOHVY7T{e@gf_DQ_K7+#!rG_2Yu7od{UIQ+h^PdsbT1`k{jLkfD$AR6s7sH@4n-~}5 z3}EM!=aMA;-vjcLX4VA1p~k5>I}Z95MM{l`bm+!vYSaT{%HO^hgsY=JU{(ixttO<& z{~-6;8JvABP574UN``YjBCUD5wY(QN;-0po`tiyu8GpOFdHX#v{PXHksIb1GH_nOg zpk=E6{{x^vU%z(dCRL1W^Y|wZTi*M&JC>Jx(b+qH9>NFQPt`ZG-u_>vZ)ORtW0-&C zAAX_3m<>bg9IcmV)MC=vun+oNN0tY59rr;1 z{u5D{Wv%mHjUJ~aASQqF_bgF*4lt>@Dmrj|kFV2b9#4M3p?%Tf%HO(s;q9qIY3E&b ztS*InZvO>?*$zZzEXp82sm26z93~e!-u% zNB-jp#2epa=cL=SW%N}pS0;J~DRd+ywQT~MrHAddA_sG2XEWE&wwi`_dvqT=jxGiv zGC>gKWZJ_G{&9$T6$epl$+NwN(6JCl=Ru7NAd!ukx`@Au>v8icn+NWFY`OWG`}TXu z+cMkePCotU<%v%_X?gpv>CB~sR5SK8?zquz9$Oba_BPkCIm$fQule&i#_zr1;|s4O z&b#!`e+|9KV$rYdkM=d;KSx(&Jnu=z>zWjg`P3>SE}_@(W72hRm3!)1`B&lX8r#jt zoqz12LpD8*l!Lsd^7itzA=vp-8YDwM>=@vZ{4p4Ip4g*|(tX)qc*63$ z&tES+5b1%BZ~cYamtXwRTb2j4-=F-#Lwn*m|MFw?rGmqC{x1E-h%n@r(Fof4o3aiz zvCYcAN94NV6hs5G&Jn`cD=vakHhLKp5H9bt0J!8x!d}NNf{u z_!$vY1=1h>bMr!sot{{>alE#S{~%(_=+*>hTgEF>_QL z*`t@c^cL5>pLld#T_|O-94>nd!4`k!-UuY3iG7mc+_fqxA?oS+#*J@ zmO`_Xq-->IExPMEFxPF=tm^f7Uw_u}*y_{H}-kFbT8hGGntqn^I_YZo8MTB)yDSiSt;VbE?goEZe`msI^NvJ zWAoVd`(RV&Z+yWUQwtq!DBrb+#j2B0Z+KbWUwc+2!?LUeEK$kHwQlIjSXpN=$ z3coH!Q%C|6?~b#w8>52Wjeql=`w5^k0DSuy9NAp>SwD}#I80Zj|BFBJO#N{1G~9M> zZ~l)TU4HgQuE~a(yu8rUe1F5)ZsyeXaO?kBbx$d%_i_5=@t1ttg*sjRp`X~V@ekDB z{;`LatKNC{a@8N)?Ni9#bB9hyN$s0J?;p#i{o#!iA;;{iwVz`iEdK;t5xm|4U2Aoi zuS5RCXPmHH{=5_Qzj+$Qh8b4oVU5*O$q5pZubjfevr!M@XGVgjB4+m-DjPe9p#I&v3yaOTUN|uYn|4w|w z*ij0?1lc%GvNO)mb?a!bG#E-=%1{>FQQ5}3uaO802XO;0x(?4lt@}y6?w2oG_lFE) z`{6&hb^7i9>}Q_G@3Q=ZU+mk4RFZ>ML@}@4{5h@fvZ_YBx!-sEpJz;GoPX$G{P+(y zEnOwd#RCw*BJw~upanAMXmVPfT*<= z1+vqn`P1h7XCFGf{KrnwPs*6CtMN7}{~Z^P+c){cjNU(e|G(g6M^9h-ovYn9i-G_0 zop($hz=NlHOJ-QVDh1PFGDE-n@MJJV8ir+_*n?&^6#pu0!!nmwKI4yhMkq$6B{0l;P|7<2 z!rA&l(LO0bQGi%9YsJBA=D-U-SogRK4mkCOu)-GyG6|65a<8Bzw~>|93-8TrJa9|- zR~#%?uZ0}gSCMwTFd60s%#T@wEP(ivOVQ+8UVrG#7wOkHbIZq`edIm2PH%scFCs$H zAGploq~=hCN4zeZTUW%CSgJ<*vaEb5c+NLIar(yZK4snSDogCW?fGl2tvB93ZNb;p zo7{7J*Omv&Z#VgqviOHP2~aSsKkLDtu#UxD0FFBazb?d+|9D3L&W}Fvz?DvbB`b2jE58~I+yUpU~O{xyeB{|Y~O@K=rN zZI9qrZNK@GH{n-K{49q{E9=x*3ozjV^2F%{FI(+bV)XatZ@+!|i{HKzxi|y!r-sen z;F!NT>zkgu$be9I0Bp@UaFxI8ieECeuQIG!GLmgjeVR)^S^499bigTqK4)3+Sm!T? zDFH z#;C$AkX*Fpyjd!c1YRt0ir(^N$IcJlQlKHOUwCzKSR@elm4Dl;L!R+Z(acxdPR?TF z78@=4sHpL&=Wd+-!+*PQ<=>lM`ra+Lh+N&$l)K6)3s2b?H*#&R$H>2yv0VB)sYM&k z_s2Vuzw%p-pT6$pCr*bRvD%aJxqBlG=ut25v3eE*I|^x;j7@m=vQ{5s`f zybJoE4g2atk_R1JJw7mbV2uyOJAXF*1M%m{_tSrQ_w<3^-cq+BsS~1Je&flJ#p;XJ z_7$YerVsrtZ%?nJdM*QK9t*EvKicJpV5BDCx|2Q8mOA61>iT_L0*Myy^UvKV0_L@7p@P z^^IRd30N<$g+rt^ir_FGF5$Tm5%bgXmoZ>7{?Ow$u^a(=nm!9yr zeo9$k|A`*>2lBuDv%97bykpCB2Y>C84WoQ(|6`n$%u8urj#>b&Oojx=D3-DCss&~U zj^+4;uReDAYCQRH)%KV8H2k0b#?}OF=1C{K`kvVbkG?%-;>Bv!_WrkQoi2Sh9s%8= z^=uN4J{40%=*XNW#+d8f|Fmt@Q%KAZWEeI^i4#8Q6&D7y8hroN&RyqYYbz?2fWr3tRlMj&{w@$= zjY?wZaPptD-AZ+1L zVcT5sOSm`=r}Tm~`l`D8P%d%RI{82R;G?*A*S*t+|6mK=p8JUYuJd@k6R9Zwlj-YdOWk^u4(7?hh5WFTh}`Ydp-O9w!ANZ*V3=wwl)~_<*W76PR{ye|HCgn zv+~XV?gZQ%arIv4FGmo^>7(Ni*PUMI#P0d33t^Q#k(Sf_AR3GVvDk zZ|5gEAQbjGm()3CF;>BfKP%=Yu8A3$Sdkn4!LMSG--vVh9g}szzt%EDKk{DW)bJRH zKVt0nG+aQau)`27k0L>^zlbB!jeW7A6jSnL<5(?1^rMKnW*xAwRx}-^jzEb$1Re(} zXe&tP1B2FN9YgjRHw3-A@D$0XTmf5)5R+sQeCFX}`{E6wz6cBaE4KM7Xp2SFO;_ zaGI|E?6&EHzqbYN9@;S-hR=TRb0q)Vrs=+W9-2P(zB{Jhdc%#=r~dqIeHf6n=H9IG zwO;jtoGoj|>3Du{(dP>=V#-|p>A$#VdiO8f zGUH^f*&N|NM(_FXIqe&d*bh%!T>lWyS*%!-bX{R6l>_uWA!C~^dBI=e3_Uq@UNWLH zd_k&!6+-5N;fSGGHT$9sv2J__6~A*o>#2uDw66ZhV-K93{#8d$SA6U)KaYJeJiH{% z;s#-W=*S5(%+aMB)muljoB7vwHY1*D*lfOj2Tqu`>waGQ<5n2Q;+FRN;e_?SQsb|w zKKIdW)BE0X%k+DAOYY}Cwhb4KhgD@CPz@A&v3y*E`2u*2w9VRj0D-Z$&xp4yy7Z3H zaR(f@&-CiopE^C|*{kg}uHWZBx_x@*8*kRhrtxoFi0E;W2YHTCPQLK*dvSxt1Mpm$ zgJ`twcFva^GHt`#ym=8>PyURxnEx1z`(I@xK{h%~;ZkqNsIDnN82e-^v#wCAoWIhW zC!M=#4x7C8B4^*j*9kpB5s=Xcmyg7eVkBC;$e#aZxXMNxM*biWT+SEaS+{d{d?k*d zZ84LeW*BQ!poS@iOwxqPmv9X10+ch(s3l2K~C~wi|c(>3ghM)yf6qOdFupvdYKeTBpKdjLn>Sd!4dgNjP(Rhs3?=Nu0(8) ziQC{n$qq3(phn`)7a!+1j6^L_4z2K%5j^rA!p}1icuWxj+jGNa>{^L{WaFXnA9#KK zlXvU024_EQ{hwB+Pd;;m7m?dH@0vE_4!~?J6@=#on7@z&alW;lwlOZ_T6BYTWG+K~ znI^>+v}ytDBlsA{=4*HAVObw~_m=54{N3k(gZ7>H)6?T|pQwS{KgjTw>vm3`yyVX5 zUB7VCbjfdR!A%eEn;yj9gSfujTdw|+odCTsSbyz1AyZw~&wEh|2(58_d$Q>B1<*P} z%9f>IPu%I}A3S~Eo6gW%lqp%I-SD{`)35);jnhNJuSywU9>X!|cQI>GfIMu8Va~jP z;!(#RFddH@Dz3ND?%Zc>obKN8;I!qs2UKI`?UvqL`b&%Z+A)U;i-Y8`PC}e>TrDH^ z-u>lEcnAxSDb2hVOD-hkfO+t1JpVLDw5JZ_EQq9+D0|W} zxho9)CT7O$)Ao7A)8F@~3CpMAm=2GF7ZNI>DY{QSbY+V;l>{Q=N-9?!7w^UvxH-VQ z04;auQV{Wy!u&EE`{rQaMm4X-(+3sx9?;CD7p@BSaxY zEPq$ud9x1MJ!7q^fZl)9x=EZmQPjpj0#s&&766C2lsLA$exHZ8&;HBrI)0_kc-X^) zJnx;}dhw0ZgSfrTeE&OU=q0A@dN$5)3AQNnkNGe8!4szopTF9#)c7{&cm4b=(`6sz z-w@a4PYK1@*X9>qX31mc&*I9;!h+DEr1E74{@u6tO4X7Xl^_OXCVPACcv|;dX_kHw58a16mBjMzZlYOzR!~@DaZXI2=r4(2sEn z*Xp9Y7~Zvt8`ePl8HYOIEFoshpHhP9bFp=t`A{F+$p1yJJ$3q~?>=qOd|mVFKev7Q zl^_2C9`NK(2a_uK#((CRjDoR%b-UMVP@w&X+p zq{13R)V3c5Y%r1@iqyP&7N8thpY>~g>;%Wr>d|Qf?x_5dA3S-l{@y{Cmwja0^v?fu zbNgn*-uwDH9+7$dDisQ<7A@-|<5yO&2_O^^3^w{_HJ&#yTR*_Tx|s zugS9g-|N-(e^HjEEoPM;T=hDF@7^=pQ ze1vu_$VG|J`&#?4UdbP@wyfEU3ZDxICvC%WC_#&3iJVjgW`!uQI$Mfk6@p^d54xdB zA~%`}H(;n!HN6m;5<@Zj6oQIs5wS3AE-FW!XVmQJDLhS*kQPG|&ZBGmDeI=OX>mfb z9N2vAV#R-Y=LA9(PyETI)1SWK)u&7^{=U;=vQp27554tguF_k5UH_hc%W;Xv=(0&u zhDE6PLt6Q>w4$B^3==nNH2RRPFqAVMTjF!f8%dsow=AEG3(E1h5hHIAe+0fZ9=`t! zN9^CF@lnECZ`!3#yz|rU`NTW#@$}#W{&m!%fAc3_XnJWqPf@B*{?0IUDnnO|1yG3O z&zVX|Y1@F^Lv0(`%nyDIorGt2yz<9R#jl9g_Z*tJk7@eE`|q0G{a-gvkK#N1krQ!V zVjM@3;_ukRShbKN71ivsJJ=WZiF?_P@?y8zt#bH=h2MGO=IL{P?RPV()?-+NNGwKU z6aQXa3+tF%wH~T__VEm6pPhcKA3Xbi%U>tywtn3QFJ{! zj7euqT*j>aqP@(MbOW|*$~6^iMspND9>)-UykXXwTYrgn>Bata}@`*$6^H>g% z{Cllrw&q}5i4E4|Ip1*H^vc(rjdwEb-F=uYdFQ6-5B|pu6My7qjBMJao00meOLBBg zn4|4UD!O8;POF#l8l=J`CW|!7aWocQ9wg1?T?-+<37>@DM^Ad>A7S|vMZSZL58^ry ze+$|l?=pGU?YJ$9H-Hzd z_%N9GYpHww+AK_H*5IM?rzYbzfUxE7Z6<4)c;NgUDr@CP)Y~Tw?c2+gNyTFh06gb^ zJ9_%2?>|8g2$Sy>4+>3=W)>?2mXIjc4U>vT|CZ9u2VApGtl+|_T=%ejBsb+CxhB)TDdybl-adAB;ym{mhfn|d4NuwY zPkUc?#rEl~Z@5bLlWG@;dCl}g$QG+`jT_7(O1w30wZ!HzxU4*-?D>g6v1mnh{zeWS z)&!c@iMMF^wts)}^t9)%_L-s~!t^Ju|BwvhSS`(6GqXOf3jil+y z(OQnm2zLA7t>3Tw2|UPZ#pj7Ugb%g8{iilfSL1mijX*e6_*Onqu9yQC!kRSTB$8TG z-*B|ZB#Br43{MrC$GhMEMA&tl#nSdXwmn(nuIZy@E&c@JD|N^ieIIhPwk4KcXHesL zAg{QXu+&2XGL9=Kx>F+2L}H9y_7EumAp-i46GWsTL9OEw?I?MH+tNpoZ3|@^MOVBa zi=`Y=+ZiODV-X)+$0;RhFlZ<2g-t?_zgCQ@V8a-#eW zyt|5@I=|pKNA1-`gztFziWeQHXEj{=xozls{r25G}%XzCf-6UTQwo4ZEYJz#9lBb2*E1>jk`#DL!SB} z^=h2rPyvIp(EOcE%P;Y&4`yaQJRVi zbqF6i<2W4%OT{_9*6?o}=jEe)EOu8v{-k`xrT6O7>PMZh{$IcG?cwKt#m4ETD|h1U z><^lgIM@0OoT&6#IS=#d+{`f;+c&CE3~lcK$h7y9934BKFaD3C1} z+KrA1j}d#}G{9sT|Kese%>f=9IF22RCq^RK!eiEhTf@SX@Amn`C0nO+@XnRv z-n`2yACr3_p0|Ge=kC?RoET64QU}vna}+Pr@0Gk&t|RtzILXq7xF36A zvwsw0FsNaR6GO_2RF7qTk`J5&9k9fwX)LTIjN?zDV=fp2@{0+wWsH8X8(a#6gSV8- zIa_N^X9TOwd}Tu8-85;!g_tE+TgOKcXsp2>Hj?6?0((YhEvgIe6O_X&o~#U|MURjf zW0b2>hO*?NQi}2go7zth#6kHPI&lM#uFbzIcj5B|AODlBc(UIHeRa+HM|{NWbH3>~ z{f*%2&)(DjHiDRo@kX!Q8QT0UztV~*#?{A1!&Z%kCBo48Tg4Z7GZzP*WZ2?Mli@%) zzZwo6+6yzK6`y!!#fhoPQ`}X3OurSUVH1~m*8+|hI;AR2yfmqL(h(am<&;r8wQ|l4 zn#zB4JeVsWzvF%$?ql_;A3tq6;mp-;3nD&sxB(Badh_e9oA`zS#!DQkIc{ws|Awpl zD>eDB&7gol=GAx4&cDxzgJ^fv=S_NMXXj&J{|oA71=L>2+}$)sou{7I55-I5>i{9498K4_7# zJSdbcDUXg4PS`(!+wt=va^{5xPlq3~{>KL7B68hjJMpP?-?Y+h<()&cT5HDUKg2Zu zju9_|byU8UzpzD`7O_%aawR1}dB&I{H4MC-5E2s`4P{h%mYiH^WjRM|F`=KvEkf$+ zpTNS$e)t9zqU28;$E|T#Q{9c1im2HqnOX2*vpVh3DXipxJ-*6;hOHpYYeiskjBFE4 zIG1q5+I5!@B&Wv42OXi6w(=RU<)Cw-IrZg0*7HNh0+SbrY>Z1$)yJ2N%1;Z=O=tqP z;kf5d3ic6BMRp0dk(2l@(7)!gZPOS2=I-e!&pHyny4Zi7&^l*)76w1TekMM9anl#? zo$kf2$#Q{k&Fm;+a_2aG)?f6UYKu(Fnx6`W{Kxg5VdJ4{N}OUY=!XI`g1og_Su&_$toozhaOi&1 zi~sFO)3^TbCryVQx&9NXf(!?K6Y`$_evAHK{)qk_=54Py%$g)B8B(6{FS&$8mLi!{ z43BA9Bjp{JtVqUXAc<>LF6|x@-pSzd5911E`bMjwkvVjqL4%yN>s3em6Ag`SY zZHgB)4sQ(o$YuGZJ-?&=vTrzQdi779q5D{kd&B-$JMP%_@bo9YdF#YpyqOAJ`^05K@N#yLOZ@`_NSGkDntG|g{ z{i*w=d++SG&v&fK3qgj{pZGZaBGR*l&TVloM$W!xZ~Im~C4(8955tBp&t4DuNq9RI zF6SI$%L2f>Mzt?!HyWG3Xacou#AHbvf|EzS3B=z(UVgD0XW;>gZ}kkCO3H)h48|D7 zdb2yQr6LVGk)j=d=HIO-X0zOVk_adHG0a#T8?WM)ShSIdKJl;NG@?0TS9szy6fa_P zLZcKqcj#SI$)c!QampqPd;SxFO1APBNMwxZv7~9SNGS2f-l%fmiQ^QvF=lu_c>kl* z$NqGyerL=-u=BGId(-HfPf2_(E+pi4(--fX_zVlh9JvhRZ0Ph-HTvY?eL=azp@!bM z%^`(gbTwv6&* z)a0LD#4;&+P75if`{KLkXTOMl=Qw?O#@8IB&$kHjcRcWOGEaT(;rQ^~J=4y6A8Fh; z5GDFo;k*`|f4$bCPqHKbaxmJs?0qlLbzh&pwoc0qh|$R4=^?yJj2Dq7;kNF4lH_{Z z{`fo)|FHf=Ja+fKyDC=_s{GQ2kNi?C(yba6%Z9q}U%UR{MNH?<31%&+*r)HcY3B?& zKqD{y%f3%p6OgXBZRlG z*xSZiu%GwLBXA)(4mWAsciMCfZVrh1AT4Xzv}3RQ0B-qK!o5~d#$9T~{PB-A&8~Eb zoBU_7q8$1WCl@?0USerr0*S?1=k=X6mmkQ93AZ7Phh;c&>g!ekE1vuzY>Pz?4Z?v* zG9EuD`P^?lW_s0+ojyJN1xHQ??d7|xOmg{=w~0CbD-NHo_$%LPKKVEPeIRbLT0tV} zEg8)6`mLOZub~N)znD;%5MHw^lC7E!IB6hw2>Xy1k#lid_l-xae~qZb832z%VHXCH3XbH?U1t&@KWXjRL;S$D^FC;OgT_E@XhLU`B6F@vp8 zvu?z-WIBEEqCH!Wg3l$}i zJxv|c+jwRL!@9D;zT+1C*)QYIt!q|t(cl3s$&MDH7ze~)oE&8|9J;PB)X4$*8Bu37 z&mWExXXweL^O8e4qhpwpOofp7U^rrER?WU>L+rfqzD*x_&n^0!$Z1dBxD+$D*!_&R zmOlp{V0_-U_;IzoknpL4V+#jzaz5y?9jdpkTbX~>f56)V%&klB8h3kzB6dmsjF8Sv*5Z^n=@;`4$HqVPLaYN(^XGo6f8x0CE0548>>>Vp zYrHU>{}qQ#mtV?@2!Gn_b(-^^Wi39+IBQTnbBY~%E=S9=Wl_F>)d;w_No~fSy0TEu z&#PR9zlreE>WAQ#HtTJ?i16K6d=|-$d+JwrGtcI+hQBZ)gC0@&V%qp~&>T=fUG|x# z5^B$tyuDte@yVJNZC^6<&=gBGTTlSg!*d<@Iw~Ot;YC?B^RL$;Wx~ySa?!t+UGzt| z8*fnT!9t5nHHUdoW5!@?#i$j^(sqvI*G^c5y8m6Qv0Of4qGkvt>W7Y}(I z9n&Xt2_IjLffSC>>(?Dv*5~w}P|a626t59&j*#HL7tG@%Kqs6ejAdiQ^eIBh&~-|2hablP;x$*X-P>fXB^ zn%?rd8>ibh)rSp9h)Iw?216>ExBZj#BNSh0bN;GERCbG5_GI>&0CIu@97wPF8QW?V zPvR%2yjhG_l*|*84&e#@yq7smU4tQFfj=eqzij^pD~7p%s7~@tF`YRuM@j@THoY)A zm=Q$CS`#5b$;*@HMEHf>O@G$x|hZFlH4szm2 zjNa3`{d@Ch2KDm%Q}~?BT@N{EE_?Wltk-PD-wUpYL;jpEw#Gla;-`Sje?NQ%>&4%5 z`t-FgJ?U|KkQ47v`}vRGJze^qEz@Oxb(jA35^mYNoO}Op5Ur#*kZ9>S2ygWOGaoeh zg3{|K(d>}?y+*7XZ92qz+0MO9>tbW~0;~Q50o&Ney!_^6PNipO#}+KigJw4y-I zg~i3e_n)amFG*Z*NdT5r4K zq3PFt=tkV(0q;6&du+}ysjw%9?6d4&@q#k>%Q{9fks(LB>ffa1m#*e4ed>Sh_uC$zS6IjYI*MZI9Y#=eNa)SDv3 zmuDVkEj3SW=C`)^G;)F=CP^bBKjqn5juT)LfJK1YY}EA4f%32eae*l#F=jT292nA$ zjMW#B;)*#8RX)TvFL`bD3R@=B*gW|M>we@SiHLSr86{3Ir=EMr^y(iuXFBPuy}Qjt z{4VzQ-0{Hl(Ldg*i^&~00kKYOS|<)nyY*wOqDubiX@h3|H1h={eP2L;oZK>Vc`%@_ zpS-553{q|6L{Yi5HWO_Y^9t|u<%WS^OsQkCmsyFuRQUS_-bD1Y=N~ye2Y(ztFa;T68+x3X_qb{+wn9>HuK-}>$eqc$lsM< z&8jRIWM7sP8c1gKVN^C+vGw&|gRKSNTYJ9;7m-!(I;>CMzwRd7_Jq04S5urEUVGUW z4+_GIW4F$~5~0|OEdVWAEdrLGiW$J*dba;ze`t%IB=OeOYnULr>k{*G2xG)8rGRMhVxPX_9FOvxuy@NJvG&C`xX*pj@zaZ5d)nXWGX&u=_KbVw-)_^z z>lH$fRni&iPj%oo^M;YVSvtzm`QP9JfDyopm~Hd$QUE^X{z=PQQv@XZaKB^YyQ2@Ls7R zi?;4uXA*ypSO4%skKo;4+on%_ z@Q&#+Tukn{11F+^Q{psEMUtfzEC;Pef3d|bJ?+QZaLgvZ0ab5ogmv+5oQgoORY38H zN(n&X5DYSwIgZ^o-J{bPPu)0O^i@Yr7vU$4zsm>z<^krj&wS|a>Ak{_hv{hM@^- zI&EwCpvKR2#PCuf$achQ|fu zy_5(iK2?o4@WNT&tH$YD#JVeE5aDw<2tTCe|wrGRU*X#>BO2{#UWKK5YI0@qBu^qA3OhE3z@AsIL{Ir ztjRo<>&Ku+A9vvNE&t~9=}W%$m^Hp0F9H8v!duCI`h$1k?0gH5B~98 zLFawxVbhbJbvWK7blA!#ZpK}2y9Ljg|NURwG+p}{-vE(3s$@v3@3oW8!bRj~&zd%@ zcoErh11}=i;2leN+pB8D`h{IZa#0|3{sI8pYDGgD$=;meK_X0=OIEN$;|hJnlA+PU zm4j$0nsQ9~jR@&LG$kn!Rk_<2u=tHe)P~V>4Vp%r1_5;BiU4A(98F zY0P9$2y@jjDG_3fMmW%MidL>8qLHl$?jy~bSsPF+U%i+q4;8@0#Sl-itR<-N_p2cW zFzv(QPr;bE%ym%wh5WnLqh_6C696tF8Yy@3<*6Jb*JP^2RHAckcJCy#Hi@@L4D&Vo z;p&VF4xPU32hNzzx^T6}i7s)!N71%BAHZ|Ox9RI@^tW8U6B(I%Gwub1h`IKb4L-90 z-g4l$HpqWop~0Ry?h!QNmIF;7J~?YmHpL0!gE#Isoq7I7{iN}vryn|COM~T6}h* zKE(~E3PA*GQ7xlc-BYXJvNi>ZjTmxLWexF|C5HO|2Wt&W4j)g%yRyFLO{eM)>~q`4 zp54BQKZ)F^4;zY%_|I*U5;=Mg&N3FbnAHA)K8a*)s&6`7%||1SbCZ`R4b1a$$Sg(Y@2_mBX|IWMAE3Ed-rv(r*~Flwr}6$xV4sAUp1 zQ!FHHsZFUpJo36cl`?UGm9n&q>;Wpg2)zbl$5DqC(D_`;7L;mB%bVwf#y-IDb zs~)wwR%NN7aV%0f0PkwzV|7o&$@sMM4$=L?jz9e%uQ36C&yBUYefUXa)my$ZF5jKCbGttSH17XKKq2O@f7Lu->T3h5#NBZS-Mm?N{V*L`*7Vp@57bX0t9(v`ZnoaI3qOh6q+7g*#mHYwi#L>sRcRHA zSvCzrCGlemawY>t`*tCgFlC-SDSsq6w9e@yw0WWiFp0MlrtJqU}9h=LM2D!ghk)KHxAZUU@M&j(wY- zB_lj;)05qJ5^5npqO4o~mBg%bw8DlD5j{Zn6Cz_jIPgo;?nh1un1vpDhw^vi9OvcT z^T$X@NeN!=c{`J@f7Qv;MbA5W&8DZHzwfqV`$M{z@Pe`lUt91U_g(m7=Qh0Ki=RYZ zmcR30K47P`yh3wq3fePZy%yV{_=NLOCmc8(eZoQbdID~hzF|5Ew@W|vl!G3(w?Z$* ze60Cq{Hp4bx8cI^;d=~QwttdZ>ua?On=DT`Z^QI`|LM$?KiR(d>YdY@f9N_~MDUYH zZJRm^Ev24#wWYkzf@}`FToyOtnA*y(x4wUap08*5X7k^vun%AOyeRClI1c#`H7K!cm8@GxNCN%Q6KaTecK{fiNxUU?os# zfT{3H%;}6^wVAI>VjR%W%Q9RjUxKxFALmS!Fic!HCgB3F%_fEC0y#`ytk9Gkhi)B> z^OwAop)9(Z&>G4%u3?EVBVcJ0H}InC@LU8M!w4q8Rm;Ytj^pD`J$U+;_=)68@i!9Q z%6aJ*aKhW>I@1g0TU61Pa_Pk#bZ@co~u7@?}gNgWg2=eFcLH5UU z<=JmIbbp*!4#3|i_R|SwBgTgwb-;A=@lU}0ruOckOpkYcuKn!2(+7WVD{kFU7mmJt zuv%V^`TEta(4O(SYX20mn4gYY!GA9ekzCx%PftMDE9Z z!g?cXGhhg|RnK2ezl`l-%`yC?NYb2?nY;KPA2!azZSnji!0ezHw5SC1n#VZufa8_( zeO~&s_tq!Tt*&sIj0N!+rp~s&KttpcjYT{PD@P24RyGOjt>SGiL@Nl5XwPKrSAt=S zSYJ}Z+vczoN0Uu$yS3uLsPtZGumOp3B^PZg7yUR8f0+R9l)2s0;TR`lq2;N? z)B@F_$wOhoBPVftmje%rwCjb*FmKplAl$Ga#NP`zpqP#Mb^cI8R8r~rM7XcO1E}~A zsw4N}NrC)-nBWe{$(r{cu3aS?gLDJ%c|2D7iH002M$NklnH|v7gF4W@B zh-^OLnzQp?eEo_S0OAXaF@M<=aa$X=$lswV3Y781LDthn6D7e_*IIuH9_#$Y+BQF) z$OjLz^U_Q4F02r3h&G_63Q`P{e68pLlFoZFtuQLE9ViK495ZP2!!Xr|nG-1j1F-8! zV~K->C`J`diBRIUXw7-^sX&E;qSwp)~YOL!%yntlkM519riD&>Z&H7T($6XN%_RXNiqb^*S z-ze`k6_2)P@;669N-x$B!!{-yzt&jSNP~Q{9ukvH@{in}8#Re3D+k>sT&xHeLbU)K zjI9x{P}Xqn?+Umy4iS?sah!JH2>^X!E{m6Rf}z=BOu0&4Wl-4|JaP^WM&1ki;Jx;I zEbg)T$>iwc50bloKCD)Oo%cSB&&k|9ee8X=PuE?3-(Z{_k)EoBT@e*}YPdvf6zh=n zi$Zc}>%|=NvoG2>y%tY48@Pk6rihUqmweF}c+S}j*TU;Q_J{HEzaTtwn)EOO$Bzi9H%k@K-x z=C!%>^^5%bYd$gIQ*PlG0f!+Ra>nME;gen}nfG&!WOWD^PM7gXD)<%Fd+;l)(aD_X z?um$Q@xypw;Y;h_)TLDhAyw)1;>a*R^;Az=*9e2=IUd(+oScNoNx3!TU`-xu!K~q5 zby!>RflJaJYi74GOKmAt#I7)qa>!=CPAV>q8<8#)eC$3J14uvQlsY#D6*cm)RxD(% z?mYhrPqHn4%GhR{#9`9Ak)HniqxDC3{^9wb+wLpCi^k=+XngvEcTHdXo9*p+D{XCH znk^A8T0bm+)`d%g0hflW(I(jkG7_?{uff8${Z}#~&w0ki>AQXk7ZE%+W0iL8W%o^Q z`O$0f_oq&SBpn@|6a%kIF)2A@-v6afG9&+Tz!WuH!@uXBx}`UNJD{1}t5}+xb^$IT zKXuB=KXqMy`Htx~f8u863RcQV1QDwOxwdA&l*d!p+lGarXJiz8tDcS9T6FVQ10ov1PHqO%!Ydk~ z3_=@xE%Jvjn<2d%g$B)5xKy5fJ~(N5gfg+BnvKsLW=eC99jp050xZL^zCX0PbAf;E|~(Xc99h;grI z`B+mfoZ5T^wQLbJh`|yMdyVRIzvR&AHLpJ%Z!lQxB69Vo@11__$FA2C17}-mM!-bc z-omu=&!qQ8GqsE=f!f=W*1qoBJy`Z`+5b&b;4?1VFunTqC$Icd7axuL)}O>hY* zFCuIeuLZHhFPlNA*GpNTWB#&kBum&6QvQkKa@t;o&7NFbtJyh1&T+6({kXT*VCB?@ zrF#*6^b<%y>=|T9DKTccL8F`iYPe?9?QM%7tq%Q4D?$Veo3{HZgIb7g-{3H0K(=P3 zv7*DkSJbE*xd`fo$CAZUVPLxi4P1SPQ5NC^TTST$Moxv+^VW5Rp_)Rr%HNaX5`*Np zmJl?yH!oUuP+x4*TbdDK%)YaLy?ljN2+4Elf?+I&cYq;$-;lks!q)USJYx64uRLlx z7ta*ohZI-+{qsQU|5roa%7uSy{^F~a75B9U#9o@?sen=*L) zTm5I{pRXSmPdGShu(o~XyXCwuI~0%1#YJSryZB!HDPKe$!eeK%oyXq%$-Oa9BHC6X z5FEXq!5T{D&;1nsGHRAk0;E(tEVU#yta$XrVY!pfGoQ9$de!SsS^1|fJ{tG8UcX6i zCzoF{sCc53W$yfiA-%?c6E)}Sx>*gfS@?n{|8Cu1zsWx#Tx&-=hwH(xp zu>hfntzj|})^h5@Y0jUTA(P@E$>^9qp_5AwN2}%_=7}Zeu=QGx4*P;NY&L|AW_eMh z=x`za)W)53Yx9?$MM;y%;`&ADzVjcP!-v)kN=ix7;rr%m3OVR{dZjJfQvRbaINop+8iS5Tzfg5DUUxet9Cpf|K08X^R%KXLlXZ`)NSQBcMRFA{Leh!8f-`v%o~)Y%nL<#fS8@?ZA)R{+L<4eJ#wwS_0*ko)`scK3l5u3#0T|G#P~$~%IhEdMTO7Vxb^z`^+Cr?*X*1&U$bMn z>B{@?Sroplb#ebo z!t%;U@_`rUd_v)O{x~ioxahCauKdLI={GOF0e{yC|Gs6&p(j&%%9JDPRxOEOx|sj& z`O8gr4_jmDb#Y_~qtXAl&pKp!`H$ivf@jaK(yqi)4d3xIn{W|vjh9t8sjx>h@bmf) z3@S(GFQze)N$1ov8LaI%DXN!wB{F;cmQ00aAhfoBYTtH8T`$fC*H0jgV5byO_>{Rj zI)_46H$)f4Ycj;^zRIZMD!5h`WBFo_uee#9QPYwGDDlmYmh(pW?1d)B9hX?Nl3#*b zgSG(jQ*v(zPDH=~D*wbv-yF!19i&dCQk#b=Ct*+>*DiF$q<$Wr3@jXCMb!AqhB!7c zXnm_?Bun_vDsf~Dj5ku%ih!Z5QT;hs(x~IJ9e6?Eb4%DCbJ9W65yw1X zI{fGZ@Lln0zn@(^z^WHO$QN;(aeb>o}93LwGXu13j~6tXEv%Y{K~_pSN!N{ zD_=xD|FLb;TVH?U^vJ``!@O(%C_thpln$K}bVL63P#G6za$S~xuTT6Y&f5GNwtPPM z*@sRq`{9#T{;BKpf4zNr=NmU8*O8OsGbi!KA^Tr>ihno`*uo`$7&TD5Q=omJ8bykJ z7{G15S1fD%_69`Iq6AX ztYpaCS}XP;=N{Us0qxp}N=7Y1LIiB_Sr|!s;^p4qA(n>1Y8*w@{2+n&wmlHB8HSA{ z@z9U?(?NX1Z(^pm%gWDzdYtf+-{jR7zxlvG{1L5xw{HnCqgzS|CfCK*appsPaJ%JS zNj81Lrl$|`s`@zQOgYofaYnnY&fh1z&Wi}%4xJa6BcFKSbQt<0@S#TDru+$b3o74+ zsxSNzcpUGGxBWkePpR{H8T@(xpHR>I0eouxft?TQxfgs2-#xfs+_iO=o>rF=pqcTA z>`=<^PG1WmnDU3f`?A&#vr@_jH%`kF4u=XcSq3mpntA?q#3OxIO=Qap;|rgE#Po_EK6T~K`&{wY+oreu-#6$Yl3KV7L*N;KLa`pI4~ZxP@!}|1 zIBV7|b#A~`zU1!^%1R#EXpsiAd{+8*{17f8EB@4V`A4=-zx$@m6YqFHmYo;pW-E;n zNPfMaV)84udf~kw^j-#% z%NrJRX3t4BecZYCNr#qYKzj_cX`)ddr;0%nqk^HwK9&=4bGCvLvd|p2(r7w|f=+zg z>qXlT9*BQ16p35UKYSX04u+< z{KbPrmJ%7Lc z284#;1h`@ht|4fFrA)Mjp@$fT$wL0bKI5y8m|priTtxP5$^YP15cC|)HQ1(nvAV;a&Aw3-l5Y=e(=PVFPNYD zvwNm@|G&2?9{sDel!;Ih5Tb1q;QErA@TI=|1E!AWKydb6A9UP z!SjA2U@b3V_dEZ%mtD-ksGgI84ob;z&{#b!g0yvX2}dnuJ-8$kG(R2#WMCy6K#8Rr zV|nwaHU*_0$3gmnhF|e7^Kew5$)ECy9gFUXs>jY>=BeK3fKJftnhG|fISLPtp%(;) zF{y%!E98kaU;O?;;wlZIiCi7g(TqtOBrOP_~TG6H7(-2qaQM zI)34nov_KF$ty?kg2Pq|h3xs;bHxr93TrH%WQ70n6@$edv7^t)do6@ciGw>P38x&> zH{&hVs&RIB`wwnhfBKPw=9pv^MEqMU$kf1$yUva#34mZY$c`mGP_t9ml=T`JA!A?ra+ok0FDzUdDBZ1Mp+`&7e?Jq zBbm(2ih=Bjp-c9ie@RPD6?7FZX2#HZZf}XW0%f}v{(3=EeRcfnpof=v01h69en za8mDpjx(pPoDx$m!iBywmP1Z`;aS8d7T4eLW(c{J3-Kj^;?Dq#F~+HhJ#;^U(B9^B zpfAchik{sXry&o@hAkRos}%(e-Ubb(IOFNfoXUu>IESrt5Z#I!3-ZR5e1}%!MC=~g zai<-GzhNCXU2*BQmNO|S1Z~C7Q4%dnYhJ}*$Rqia9JNMp;*luTaBwfN4^$>fNn~!L zE!XXu?z!!u>B(Qd+C}87i#AOB#5ylP%thYB$$mqCBIZwka0XgXpja?ht0N%{<65Ig z?I$xhSKR6#rY2fq$BTnP5>qyIU2&V0iS2j6@h`ua!+_~@UP>t&K`OezS-Bha~uhG83qcfj1{D)WtQEbH;4&fu;-D%5zfNB`L<3f~j*nu$+u6YqjGta+W zOraan$E+%ig2qqNC!QfdWN)MjJwG7m#cOd^B1T1CZfW7 z;0tAVr*H8puKdAt^ELNR+irhwddiotb`d!qkJLR1cR0S{uXsmc6Oe1plH0&5bXy+` z@Zy$^WdkU@(>MNOEGlE?5svkZg691|PRx~WqB!qa>%WNXgJ1ET3;Y%Mn+R`t-?(ug zmW;u1r6K;<8|o#lL-4^8;{;T%<{;iiTlzLWdrdql|Ez!ZWb-$;X!i8O8pQ~h{hnGE zkdd`%LaRjSsU(;?xC?DHb^IbD;|eXT#wBR@;lvo%-I{J-!2dP~qscpuX6sK6xCH3r^AL1zpe<_VHe~%S}R`Y4iYcVT!Fhdl@<-9r2#0R=e zt;C-K2_Y?K5rw@0w1-C68Wsl%#}gxwY_88j51WRSJ4fmHcOHr*M{=jwM8{s12lf0FBa~V>xgXxCA6a|55xTg4WhV8;PFtCAAKbc&`ycNwwy{mXw;IM@Er~GIUzIo%+ryM+e^8NQHHaWGEFdaMmPynVyAzPH5DIp2X z=(?@t2xxNVX#(TMY&Hr%$HWaW{92ZOFpGVQ!JNh>q7YP{_Yq6VC%V=Sw!y>$=)uNAEw~cGCmXmK$-u6SPFJ zaTufa7-}|hE4rgqEaVFlr%&nbTlfo+q&X=wckwai&oQ}iFQbW1aJYc*Zy;ix%|SQ= zu&*yoBq%xbhQxV8D2TUV6(qi7YI_@FtjtBPE!hx;MEvCnC0C}SfR2t~#mK^oKTY|s z>1-4KO)PWhhWz7?7W`ThC&sdIp2Vb?A1vj72eg`MD0`kLA(px@0$OHq@$Wz(=zp|u zjspWa)qmTYe=MvU9nfCZ<)YWD2sK{z5xp*M4m1Nce()P?gN4^XV4e+J6;;~uD;LoO zX+8&Dx#&%UJ#t%9fNzCpaeT zZ~o%_xRv`O)03WYNL=i(KmLpZr%(KmpI2`$^_qj_xVB`Q^&yU9^A`if5w9Vl@Rh%? zMVc0|QeScsZH zf@FQgCr&l7MeNv9&DIJ-Tsi0krT4)~LmY&Sz0F*Z<`qg4P+D*9E7auCcn=r1@PR4WDH{y}OU-X6;y%usO@pmyP98JEn&nE1!cN z<}$@WKY_Gxl$s^EWF0g%4G0Hka=BV*SuDnsIC+UHC-ekl@lelk za2#SBzd2!PZgc544~9N-LE$)!`mEn99suIddJ~AO!kA;@Wx~O9e@VnqXE7lvbJtQc z$7VTX$pgOvv>P26EBV8LG1Oov9;|_dUoLHix5BArn(55ETa9EM$ zM=r~+R8m7Q?EmXOze5kdI_H_IT|`bf7q_gzN%jl4^@(B@mmU+No(wQ1S}@f``f!25 z{FAwJ&CLAey?ohynTx}SNoe+odHrQO^o;dmPd(7kJseNKqjW$1-n)F)f>!5bLgRS$ z`jLyIqG?&@Uk(f@+_3)#xqP@nILcYCo|_?RW4+mB%>eMQhCiH2HH%=U zmRp|_g21UmVxw;{X~eH!&~q-4aK(uNWke^m@swf>WkxJGOG2q!Rqst9SripZ0361e zC+`_I0;{Nu(PPxA<3?pCph@h_v8*e9^BZ!LMb^hy+Vd+h(tGvFA39+ayxT1-U#&a!T`ICR5*#@7539jiH-dO6%N7iL7=T+cQBn%FOi%*Os zaD*CG9<;WH#)DIUY@d@;tmv>eS;vrl#vKCh+=R3*pK=9k_|pk>;ZgX^CAPWKCKhq} zCbWaa(DVG^U;xlUG!&IHeA$irf(O(1HxNv!B@X+Hqg(8$5AXDG+47a3P97#;vUFA) z{C!{2Tv1fZM!o zSeK!knxK|LAG4lGu_{HNls2peS)}A?JI<(ihH3iBwvt6jEhXXv( zx?Z-4UV+?MNK?HK)z@9)5|}$2U+l!7;@HwhYQm0w(&E>~kav|JZ3>|55c83Wr!XZq zuJWLzlyiRxJH~IT2$S%aDI8=jBGebTW)CjJYA#MhHpZOH+M>PU;6VGtaz8Wkq;L$_ z;ssVl@h`v9C4WoeyYUBx8Q3n^@Mj=8!J#>{F811_M8L5*l?lzN#T%#L7kfb%h9G=N zk3Jh*LV<7)PRbsybTfa!r>E~EpfTflkhC~Y>~NGhVB@ zm6yTc5Wbm*rwWxi3=YgcKIVZdRS7c=|M;~%qRA%?O+hmUqKJId11M>|{^d_HV->6{ z|LpBsrayh_tr4xefBv^0GkyJQk1I|#6F(4`^Y{K`WbP@Gk@T-HgrW_@IXC#A?Y^!saB&CV_H1oXk)Y9 zSN=rPMm>k}gF}3mK;|`82tt}u8j(9)v94t3aU*#TD=Q)4H?cm^%9cjsmtg zhflh-y#i3_|1E!5NrVIIfq&V**=@;xUjyvW#9>Q2MQ-68&_bGsgk5985Y*v|w%Lf3 z_twXWGpcEyqqj&Dq`}G^Qjh%T?&ybC?<7!H$at!`(l31kk{$Ia!`tx^g<#46;H7`GQ z`np#g=a_U~*7EK0RZQyqk7DnC48+GqvI&1UvUxHo?!-gE8rU=u>BIiZ+pHt8O;_JP z-M-16^RROF$J5R~2ud>uN5*qZYS!-m=;$@;-WQQaJqvcW;qR(Lrl-u!LZkFo|Pn@}+wrW_b6{*gCxNFiIx-@tkOXf&7GXE{x+ zlN`{qsxedxBqagx=zlZ*484P#k=<8ovNyVuJaQ>ZNIei~*jERPWbKpqmX$7qcM^Kjk(#wodA>^jJeRE@u&@?3HT={Aa+Sm~)Z8xizaNh_9W zOeYX2En$l{<>Ie_4^B@$&7W3m?i+tfYCT$>kY!9B&ZPJkf#|z(;6K<~pIVSqj|W(b z!|`I{=4Twmn$s66=`o^8EJKuzYApV$vDdo2peVB;36aQ*OUo~g=(7A6?f?yksW>n; z8poncx$G#cn3b2pLlzn`oE>`pjLDXs5rzaXCVxwbYaxl##QD?4acy2I88)7>H?Q~p z^5*G7zqk5DjUx*33y#OX8C= zIlw@EjR{uv5RuE3pSX{pJ+>#F^aMFo^&i_H_uK(^viKD zNy*}QPKsA?s80bXSKN}3KQ185ULjpg#ceN*Za|P33lXQD*j~YOAe4BvGK#8idcw#jpOM`2ph4A-$l|0SUw0Pluew_=i3mgcsFGn2JK_S-$YRJb z2zK0okTFtGt=z5{LLw&4Q7o9i$|-Hr}4;RUTW&0tpc4 z^)p<#Qtm10hq3D~q{p(VHN|)zfP)WbO2+&-XE?`~1Nqjdc#OINH@}Vlviv&%t^izi z{$d6q&7NaXvf;#w&PgTgza zvm_!*B0WB}YS5LrI1u~$zqknxXqu*Hzlaxv^|x>Q-V^jh!wgGDoX3T%!U-apd{xwyu#-!%-pu};!@9u{*+tc9*2kS2fZLQ47R>^60{;CRc zO;k+{`dTS}nY+?fdu;x5Y+HZYlrIit!8yn1IP(6HNKk^BJ+SS8(zdgSJjED&`7ANR z*ttV$DyI$|DR7mU0ve{H#X(1brH9DgA(MG#9UBR=Yh%DwcsY=vbSbQK2~$ShqZKsO z=40sQ*n_w=(l3aN*%zt`o;f#O>=&X*JjgO1MUaI@csZtzrvz~c+B;TXa((#-SDXhsqybxoS6R?_#f5_3^V`E!TGn|K`1Let>i5pML)?Yfg zY(Zw6oPF0-DI6Q(#=k)p2tghmEq)RTu~`p~$e)nZJux3E{|evow*z}*!(LcI_4p(X~`qw8;&v`K}JQXX( z;@F5fZt_ojOJjk45YUtvQ50*{5_7plpXRaFKDg05oYfULi!IV^-Xe`iq3)*@(MDWap6+a}Qf4s0E9U;nWH3D0VAtW3h2y_$M$2_;6L z-Lsfbk|FyV6Q0dRdztenhvGhNYEzRTVMi72*N&)=LB zu)GlC{27-`ygbmnGb>%?AKVz8j2izM$%#M}ix|um*Ge46D8v~nOx0`j)66iipVXZi z8dL5ePXxqRsR@FLQOwjsMikEJ(+y^~VHutc5!D%~9Di zqT!C!>mV|6ZXRYu9zx=f{M-Jg34#@Jde<-9JbmK*cbeb|$8Y|D6Q?hK(NSdR_~^tj zKA4cSGiHSi7=7}GhW6zyUd^8~>FCoY=@rIO9C^ZhblU#_f*-p*^00Z?BYkH7lpP)O zKY9)w-e}cSAvlk0fxpDj7p7q`*P7F}$BL=~blzmB__QlLwVjX0`_hhI1lvb<>KBvP zHqXIIfJw9>!vOe-tQ=Zc9%4_7<7w0^Ly&Ol?TcR1`pHB(1C*E$DGF^JAcoaA^Bc`xD|I~DjZ#+g9;MDj7&I-}WapfGs@>3mkX++z;EDI9| zb*?+~@e||f6JL7T<4ZF3aU^j$@8Jt;8}iIQ#Z@L1gk8m-`Gl;+7qntjT=}6L|5FvU zusp%CYH(J|st9&i#8tax^zba`6Z!&NFJ9l_DA_ZVuP= zn$xuGi%O3jUFq44`Ewqg4&$SvOWXNZ{;Y#uc#H}Htrmd0MrBv|dxJ3y>ukL8A|78E z%U7HA8Zk#Uu8s2L8qqi97HF*MQCw;n4Ttau2LkEB{0+3Ej6}?wDcJ{kE5?w2e8rBK zwEvI2cZ>DETd%s--rd+zTB%fuVoL*fKp-NDhY$@YiAFED078fu5;akyiKnO-0YQue z;mRB0A%L6`jEP>E7=sW%2{%Yupot*eJphJsY)cQj-R*XK#(2gv=lAzt@9x_Mh$fwD z{l0U~=NZo!{~l`(105x4L)##oV*Ge0O@LS5`*P^FMiF>cKiRf;)TG^4M^X z&yT&TJF9 zXaCVt{I)nftv6^!$Be8syw>Hv|Bc`L#;^GJhkoAQ=J?0C;O7_o+dp2vO5}6;ri)0dH+%{Rf4tx9VF9I@*qvi+$I(Av)R3ig7bfc<%h!sQv{d! zbeX1NQE|rPE#!m(B1e#yOM^UX#Ct$c7zBQRp12t%1YwMgPcFj6N0T<7qN@5wR6#d; zSW_K`k=C_6)<0=O@DOb3=BiM`_CFeK=4z-k)*;@ak`W`UJ(zUQYAQW(NiHjIb?HM4 zKBN63Q*3YiL4myUifMc%j<5H%9Hi=RVIsJ7kqDku zM(ao1ll>RKEW&W++H=7M9_@2PeUA7)=EF33Z^#X%`lP3ARs+Mom$_(N&4FmIk2N3w zpM$f9Ay6noSmQB>i=25{C+&LaMG zwaIwFk88&y6S(+i5uRz~4(lh(`2oQnc>JaQ*IG6^dMfMrqcHNvoAMj;YTk3W8#C#D z?vugdQO^EvTgsJ-8b6mQwueH&6qiPKH;MT_(L<6l3yPGOkIoAUxe003Aq%I*IGO-v z!!CC6te{{wZ&pL6#>iZtFdu4y`-UpmwKX0FazLqz1!F^(LcA&1hl4&Cdz)g*5&LIv zNNR?)LGz#iZN_ZaI~amouWI{_7uSJhOIe80o+FByKo^p1S%NnY4+@D^4gpZG-j4qi zB|+XFR8B46av5t-a3TYjny^;DCBtVf(Zv(E!D`iU?qzbuerSKRR3(^u1><#?i1C5u zJLM~F6#iw@H5?37Gsi1|_%`3w|3lP@Xihy;D;_W2qctBwc*V?UFY!N`%Mnbj@u_0h z*pkXLT?=D{xuoEb&%Nb3}PKF`yTp)oi_F z%9;Cb$Nt5Ozx&sJ)7#hl{=XW!k9p;vTz=8N_m%pmm;5rr_J;!uSPP?Fvr6*vuYfU1 za#rLA!xV?qWyj32C4w0B_y1!b@3#-{Ppq4O=jCXO)5^yzIDvX@JYNL%`M(JiJ^s^w z-X(JuRKkU38)L_YPY1~cV_erJ#*3m(_~$?Z#T)@sxx5M zn2F*&5Z1{qXMl;K^YBxdR!u$B?h6?TD4cOWtb|Ba)w4w=p~>X{?YFE17*Q&{xxAS- zm-~02(XJMuBNKM-7eN&W1;>?WruX`fe_hK{ux9#wgdw;|MEzd>=DML;gWHGI8a(vq zdSjc^fr%Hq^E%^5CvMyjq-Lbwy5utb;gFiNR zp1U`M>EFJvf|q)jx%ke@*M)Gc=?pooGJOBK!s^apZW3rxYVE3p8$66 zhoBC3&hvXY;27nPi9u;jZRT>BA+2s`SbZibhvD)MA6obOcY&=wfmB1$|BN@bOd*vu z2I)BVZ^3mTJm(8aYRr{vZ-Qaql7kcMx&}Ac`>^uH0oXC#&S{v!H9+i97|r%VXyTqn zqc^NM%MW%Rb`#WbxVy^%L(DKROI_j0P`}!MU7cpGSY%W*-r9fVQ#*Z_TzvGY(HvoW zQy^!Se4>jyeyAS~&+EDMA-}zVD{Ol*EcYK3hAnEQ>-JB)p6_*I)=?q*YJw{ljna{G zFx&g|sHwPO=EMgdFX5Z}q55jCCIF3S#4qa`7sTl$4#u;R(JyMa0$Luj<v z#C_i0<7fJBkESTlHQB^V@Hn#)2c?DeN8A0c5(Go0JYMQ?JqvQymw)0@p6J|Vk&6 zuQ#7RQsl=|yuRaKwc%AY1aPV5XZ^EZ_Vy#baDOU-#7B-#f9-eXoO{-nW!JR5gmcM5O~iB5(KCpNY6I@DN5m%D`Iap#+i|!I)fa; zel+hxLhz-TU^tWGDMl{;6{da$c!sq`-dtMS|JDEcA0BYQPQd+_aq7*T6G?xAC2rxc zT1)*i!|Bm!_ILmH+BB`vrk?KEe6{Hfxba9Ft(mQ_wLQ_llcb*n;Hm*-p8qjTV21g$ zzC909bd|H#7%(SmQRPTkm|z%LXIXDId)cjkuG4}1@ZS^Roc3gPtvldzo8#9S z9HB&7@;v`}Xct>v4xCzDmN2>2_X{F>$CM6&x{P~Yf2}tH89=V}MRFCqs*Tfr=9}YQ zkL;ODF?>T;{HowT1~Ru=_ zUkQkCFNV|i244G0ZmM01hn(%419kO3K=RaswdXPFb^Xt5+|*kWj%(cgkAX-3K;g|j zA(cZ}uIdoi`U9*9l%NlW%j;kGtG6tx6=EM#{fA*lAVlq)6Kg)L)tPfrz2&vOY=o`c zlW^Ir(Po(UswAFs#lHZ?>7sT05GFy&JyMg9<9Q&29&g|Gy`Ovg?|#iUyzwV)e=y?< zKJ|&WfALp-_1jPV$N4|C;lrDknWcKp*beMtAXhG`ZbwP`*KFK0`4j(vFMj){e`$VO z@W=7^HNP+4K+YMd3PN4-{G|UxxaO>y3-t2uKqx-AH_n~@dk)-z(|{I(Zn5xv)qRTwj6UPLUF{pZZrk{tpCgKH^_-*S-+6zQnFUQI)L8Pyaw# zJ&3bk9x}mgy$ITWvbj!)HuLao15x41i?GlCHb4-9>ZZgMXUR%0`_Gc|$cU!o5OFA+kXg*ODpl|a7{RyOV2Mlim8b8?x(GKV!L?+EOL8( z_TRca9uc(O<$8!#E0(r@Q``U!78ReQm!|+CdqiLDDnOT>$6=}FT z%{I-U7m15_JrP#Kj&If}F+n%Rfu5-2sU|4*BnFT^wRec%4ClkpsIe}-2uze3dvTt9 z{14ksRQd}6d42FOuF*7O>?OaoHZ6h={g2P9K1I)XI1`(L%Q=jKz0?f0R!9JfiOomf zM5$SVcr*0<{y~N}HF+~VeI^5NFcYvQFdEDbWj10ixhHHW zd9VMl&V3WZ-G605W`g)!cB(b@8PPgIMttDkU{d1p$eB`ZtsOh~K@ymQ$`mU@X<6bXXQvI^7<|M?@saY$ zdor#G(zO#^4ysY=m*85Tcx%$1up)Cw=c%{GteKW|Vl+|zXHE~Y7~b*adM;?_bAyu)#{97+=Sn2 z1cob1=d>Jb@1K>7W#v&{Agw_aBXhmrtv%)1;bIs~eeUh=`G@9`9d-uYy8pQ`>k((;hR6>UeZ5Wr~kR_qob5P_24pd4yj7s)hgVxQl6G5 zeZBt*59Q%%|E=%r>cA+=eH`Lqn%@5t-H9;H!MgTgq=G(7r_GwCkzqUWF5%5u*WovW zF0Zi<`4sZTwvKTV6n;I-Iy4+&$?~QOa1J+A$DW{uUw-cX0ao$&N3a?tVBZI^eG(fF zWY-trJTM?gK-aYOofw&y{=r&)Jj8zq?*0ReQu@Ezw8Bsfk zGN}P3Z=LCH)9AGU${zi{@@5~1ypK%Fp|bFj-_J{AlA6NNy8Hj$@BaR`-~1~+{r2fU z{_*~BQ#5n^r+&#-y!~sx@h850)ld0i+ZeY>YK1UHN18>v5Dt>qmwx$=c>DSP=HL1D zFa6rDe*2MMqTfgVF~?`W=lkD&&+nK&>09EdE&Ix#4JMcM1^g~`9f6x*&J19&`&=%; zl{88O3i<1Ai?;gNKb{sM!>m8?i+=k5q?H*mO(#4oEaM4ioo3ylJe6cr{rl22E<5Ev(>{l{2yc>d9Kd|O9(M15+v~h!n`SbPv^f&%# zN~2WkpZMs%c9c|Zn#*;0tv9Cq-2_%EH3>%0tjd#h3pMWnvIYn?@oME^o4i0i3q#Kr zWM!xP^Y?$w3MU_qoYTYu_J&jGn3c|3)AwCjc7tj);@>GRfV-Oln!2?KBMVa;V8Fh&N)diuZ5YCS7&2xp;{dz(H* zuraUi`1p8eA=~D>?nSJj8ZE^mnuXTelW;4u<>{Xil3@9J8vp9A|4DCO{Zl{Q5AKz! z^Vj|1Z+rXS^*2Sn{*Qf!{=NwM&3tGS^}pjMe$m^{{CPk6?Pve|FaOv->rqDL-}5`a zw2$(d@i1dtUTwE23EEavi3!UTaS zjz8|7p6lUtnR&sg;(P;vK4v21?FW~TMT(@ccr_`Jm|%{?U`R2JgX~qf29;}#c}gXf z)jfdJ`&wY?>*;s_6by*FzhHAfrpDi(z0l>xKm3!>XiG?>+EM3vDTY3-!}8j_jmo3G zn;x$8T#n`t*S|S88eFyGpNgy+@7(h@Cj#$`t^UCOcpB(REq)}0pTAU5$4-UREjiimYp z6Vle&rJ147^Z%TDD?|o7M_u{<=Xrl$k;CutQ_|C&PB?5Xi+CUG+ zJ69iGp?$+0PEHK{LeYlbdtlS(JN6H`litF7dCGB}Z!|~!_@$1K2e#|X=iJ7B11fU= zt}C_}_8~nM+v{MBNbF#9!(Hw2aL~~0tqh!Z&Xk#cJncF-IGXH9PDTa!BD{>VIJhHG z?9m+N7W@9=C)H#%R=&NS@Fw@HUi-fY29xUHSpUb9Gspg%Tke0Jt??z`KCf#vWFQ&jy|tPz)|zDG`FR$G6cgHl)f+QqWxyk74(VcwLSn6f3?x{u&uNI&fxcd=MHGS?;!t9;Zm0A~w~U_mH4aNzWr@p*|*=q#Atu zhdZn(Q{hL)-P_zyCxo@0&t2}yZ$`s5+vl`&T)4vZ3lj1J=@F2b%hR=Ag)>67{Brf^ z*gvR(q2l-Zxi%49(+@_Q>!X`)6YhHlBC#gria1hz&=SHpSHV}`#E(t=Bx#?PKJ^s1 zyi7U1M%M#74=;Ez6V={c(*cYPuObguA8v=hl%4M#?06ES48A)mHcvoLLOpZpr@FjQ zPNIiUg3onw7|YS5!%`7wtmG72VZ+)uTwmqabp-q-W^qyVl22v)NFq<)Z#%sL&AHFE zAp9NQRk>f2p52RvgMQyCj)ii#418}C+nHCHT?w#5y9hoGziq}SAd%FD_+TWcWEV^{ z*=DIeN(WtDs#pB=AEo!*e1{Qpwq3}z8etJLT;OC^WC^=r2ftyyWrjQLqD+sIIrUBd zaP@m}wc_`|a$@Ck7rG6x2D8j&o$#s#5Yz8#eHM z{uxIi=a7DKV_C&dVd{g|>de~AuQG&603)Bqr&D^ITHm8IH$;=1C)j z1bGxt8%NZsw4ADx?}?j9nu?#=F@GAIb*~O@7WVu$p$`xe9%;Q$HnzzEV%^Ps^5b80hXB zgnu`1a=obYuPWF#U{NGXK4gd>4B|1^z({IuAF#kjf!R^7Vx14N3MM&*!y^4?9nBRdw&UW)&q$&)Xh8 zvj$;2VHWz8@Z7#xUXW5_T6IY$G zND)0Tw2}5TlA?2Mq20X0zR?0>b|fWVLZA!+a1P0CpF}>dPIB<#y_ET@k=3iy-;l<8 zHrO{J59|C|4Ll5rj1zSp-^$Lr(H1k$D7_|!3qzA?+HawTAn9ma@%Gef`xVc1y87e& zOm_Rf#-Vd0f_Q7q0}h;s=gLLC#Vf;zlVLNTQ)^(}=%)_B zXr(8Tdn1qvU|I59;oxNu^t=Lk7UO-lXm^vadVTpF`mz1>mu?iFmykSFee~ruZ(&TI zisR`1k)(M^dZ$sP9D6~kx1ArpGZlt?8*Ye|d={eOPkP=_&=7@1$vwwwQ>!Ycrm<-2 z7MSs0$5$z+jEJzOS$!-YiH!>dV=2$5Imrn4_;!l|4HbPXgJdE)bq|6buii&w4%S-j zmku0+p8qt33doj}!0?mlQ>9uNcM>RU_=yxt7gC0_TPBJ{>z~)YWaRH2r!=?0Qz5Td z%aInxPI4L2nHq-EM!68(-{foxV+;KdUVZqiZHs2tU@x`_We#6{e35)<7td3BHb1F( zBGFLs!Zbek*g&X9MQ~tk7NGv2Db)!0yMaf0PN*u2PgC{R-<5jujV;r3U9?{oKw0_~ zPBW4!BXpovMVXgQ586YV_y4p9*v(O|1S@OG^Glv9Fx_0m(X{G2MJI5=Xj0Q&?q0qX zJ#BT$gaUTmiRpIvDMCv?6?N$ay{g|e0Q+<=8cadIAcd^GGkpi|LH8i&`e8k~#Ln~1 z7R6C_X#hE`cKc1N`)2HhZOTL#p5P=5Pmc8Ba=(RpkAM45claBHlf8na$8T0VH@gNcNNPRtO%)kD8$A6lWIyS6x zcNx~3u*g*aN0h*!Gsqb>J`85h99%(9%;ddbABysHKx2*i2;%B zWSSv}NtJ|A6WVEZ%fr98qy=Z4Ybi(EYg(RqK?;Cap@ORbN|S0H`#eRDTcO07l7iA~ zvE+*5`6?5s#U(y(VQLp<1A=0Th1mQQ8EN2GSvYFm@BJJ1xZKx%K z3vzVh6BFRE?tgO7Y(21D?;GLUKV$Ch=~(r>U?r@-jpz98Slk1+|1_R!Tj^Dm+Xz=3 zV57W2&fWGn0jMJ$|D3BEEMr!4#WaQs!W-$Xtx=ixYc5McgUh1+v!3s56tiL5*&Lis zM8V@<{oub6*VJ4DA`RIlB0T$ozV4Z_1c^)JDf|5HTv68%4~F||1jm7`I{DLOoViC5 z>YUtHkdn9!u-aXb##?4^Q*U3+66rv);HAYG{fs<3r!b^AxPApk=A}W9pK%;@t3C=; zagEfNN^rdK;Il-0p)rSgmd@_fhD8yoW~9W-3;{0sLriNZoa)mnGs&0xcd!WS)mDtL z8tUnPZ(PU1oKX~?u=pf0+e4FDh1k6(0<4g~(iSNUy(Wv>5K+c`OL?JYyswa=kB`vh zIJP>X|MXliqIpnn#muzaya$qe_75{}OeK#F!68=%P33R)5s%6IiRX8PUL~-l+z@FKQXFNC}Jn>mrMR_AW;mW_jKU&zCXMvA5ZN_C!1ew`!RT_(|BEw-~O%(rK1$+?SMT zEESfa5f1Rs%me}FAHeM^2TIp`S@7;ws9_6^kX;@>mLHbAksW4(S7dIi_brDjj}xmn zc!TJ;k%jl43$Ki|qPMUkS$8qxyD@amLayIcioA48@{nEE%Frf)J@4E1(><6R&kk8u#JP(EOZ|+31wxdvtyK8N3~S$hFIbhi4SjisBhqPg@n`C8d->0E)+&RHKo{t7n@nkMSBQq0n zkBOEfRUI}>8wqT;Ycd|pi>?`_*}F_MGsb%B@$=odquJ)4*!1+pwvkbziveCucBQZ6 zc449JAlE%Y_v6bbI@0~>M6Op_PkuD7x2O3$$ucGh2~PRV;xL+n$~@0NuOhi%I#ag$ zPU@t>XfN*6Y+V4^BX;Qu^CXLsW5+Pt(x1fj(v!%6N*WF}OVUu{3goJ=8rN{J7FNC@ zDk>m*qxcXpf^+RDY@=C(eoYh3mHE*C*&o>V{(CBcp@E!GjA)|{e<;azy)qoNlZ*!q`! z;nas$eWy^1=ZGdTRf~R>QtN48DQQZUd*@24WDt7~8m-wj=*n(zCuz=pVA~PH;jncR zv*SZoiPALv$(Y>st7CAIkhDxm)6!0Mx|j;kNjBdey=t9DzkG(vmU(sfmgW8;5B#cj z$s)1JF4e?+!CK%_JusH!vV~V=K9TUP$LDkA+)-Iwo{H><>imyt)dMsJu7m8fqcAIBfo2OOm>4X?(q*&{)OqbjelgbJ=93B=pJTY93K?%E<5z1~>CazW@x5)h)xB_J zGFCEfIhjfczN|!wCy8s`D-47JcC4Ox+fSvv1uLnP`dI|u zt@!*W3-Ay9O-W58y-UN)wE;?q(H{i-ax`?c|2B zd@OBWpYw0sZgGeh%~46cliqlvLzk;D{On?n$H+uU6g!^wmvBtd2c}I&&OE)TyV22k z?(eYgx0w%S31RC%O+C7bY^Q*0ZJ1cpsI;hFi=YY98Ou?(OH*jN=VlN1hL*9468UW8 z6w{;uk&rgUikt{qFN28jw3^Cp)WP=~!X_=$RDTU=qaO?;TZKTkZBGLsUGEehKW z>qElHbIc+wCiGJ)IKM@E%xHXvVHK2P4bf21qiDqlxl%U5b=3{c=#&go;P z<%2YbM(l{M3E0LK+N87t+2wYSkZ<>8EWJ%3?X?xz`k5n{ui|9ecF5d_w>5#Gz`CSZ zJCaSwKh~?-aGF^Qum>jHr6h_hhZ+Cyt7uU$cYRU~9b;DarW3NM`b6xOqB=pXO?O&2 z`OeOxS8Q<_!6dvt@y+=;$J^FXBJsD6t;gQCq?qdoOsH!_d~CG$VK-GG)72wCXgU>t zmr_Ev%?7pag$BRDRt!8JV}GkBUx+_BjUcPV$WVpikx;Zi$`IrM)+#BjRM8Asf-_;0 ze2-zrt7kZDw4XJJf!^Sq8@;c|+_Wlh{#qGzf8i1z*Rq8^AUh*V}Xq} zHOyD3XTh_lY8%|cqe|35wC=evbnXtP&q)#KxIUc5+PF@)V-=LmzSoU+@)Tl$?sMkS zU6x!MdtLa+Us1#wo!italX+unk^WMdhnZhDIvb)pUMm9_a%`wM1$Ky?R+JqcXI5Pn z*D0lix9uGr))myL%xucBw(#zd%wnrR&uyjt6Q%p8b>5{XT{(yp?KZ zYVdq*V=X~#O_fK&p7}{ z<#zoUjT zvI^Il3Uf9m2JNLk^ka|esM4J2&n1Z11^J<}oTdL{nPy;jpPkbw#1PS0-&Pa1@z-M6 zhJptAZjVp!JbgZ}d(MX{=5onvUtis6FE?WK8dR;L(jpQ4{A=HyWMfarCmW;yJVu(U zaD`XHe<1R3^=<6V(o#tp6HGrD0?fa9UOTPico!3bn#U{&KNTr3xI1p_1dM=PkU4Ya z51)xBV|@sig!F14Q#m>D}6fy?CHBD9Y!xtIM| zL7%*B?ovAfO?He@HBJb3%cC*H5)synF@OgVw?uT3;KGeO5TmAuh!-bKY%Km4c28#f z(;eaAii%oaR%|XCaeyXfLrIE_v$MA8u_B?boR}hnSQ%(0|9A|*fD85E!_e~~ zjHdCQm&fV4RI5XO;Q-bbhskh)HQJ;@_SNr(#9G|lPtuv-Iig9tnLjmsJ`C^^KCc z=FHjlv^#Gnea5tzgfQtzpMEf>erh^U`AAN8rY&B@hJRl6!baS9un&4#2$cBmQ@>bk z$1YF1DCN5*je1gPgb`A)zGpyk|0T*#Wvpgl#bz#8p=vTB<$NKCVy0P`CNu_ezpE}j z*)Uin`Jm^Dd6_F!8F6)-l~oAke2zwWq-nDdtp5plXfvx-Sb3Sgh#g5|kOX7mgvf@z zWl2#ny~ehe$}4+PS_s7|T#6?^t{Aj4}W7#?{w#{Hlukr4>0zvm6F zqH0=GJy+2sO#9sDv1u$gvMIdwfZ&5aNY8n)8VNkpZ|YXYYcbHXz&{uckRGaQHc!$#SXEsi z-7@8T&z7_VzUS;(HwIjI!uSrhRdYL=^p-}J5&KZmsJXv`ia^W2$vGghkj2+0-6~uK zLPP=$Qw2-Bxub-oI0`agsJ_{JsG5(HnFEWb>lh%WW1#y;$H*H|>vPpRN58LmA^NvQ zdbn)KJhvd_UX4}mFv=%9AQx}#m-9~FeSU3X0q^iKB~%_zyv#T5`#sW(UC{y4$6|z6 zP5*Y~%!3m8(;l4p8=_MDnJ5Z}Ter;|xgk;i;RM4}BtFq-ld@4(@UXO3wgUYG0cT3O zP72x3o`tx5&SB;}uSnP)8>+C$!4~(9CdI!*bc_eX2%(bP`Iq6H! z%y4K$DPQEPmuGX;6&*AfuF)BJ35Wnek!Pts$tRz8C46`%G&1T6fw+3;&O%<6#Aad2 z`&JqS_Xt}5hYs9P((iqP?w_z!=_)JKu<3gvPgC?Nj37-7mt*N(oMi%slWT?r6)m}& zsh#zhbrmCZ%}#RtJ2UO5(&Vi&AKdq|;|>3c4Oq{5*`yFL51nfVRn=|MuX5YU9gqeU zv;KR3`2sTSS5OpAV>>Fq-sTDA%kcAic-m8qQt)C)t+~4H_7_cJca^4T!X@N@XjmEb z|9`N+_O|k2+miZQH9Kigs1|k!k3h&*E z0EVQEg~jtLz1)TpZN3BZ(LQTq_&jHj#~lnkR9k+Erif(edCY&a5@z?B-PQ(t}^Wy1sUFL?<$GSdzB@%KpqJh7A>~MsTKM`oIrXsjb-jP zFd`Ue(kRU5H52n#-%;vW+^i|Tk-bHi&G0^Unnk~4R%E&V{pB&XfGn@IokpC8#lb2m zp+|xOe=&yA^FyS;tJI!4+@x6bJeTh=s>kIF?!xNjAw@=a<_~mUA=nD>)B4Mg$QUl} zJs7!q82A5+64UZTOz#>I@vym@DnYib}e0ji^Q3f#kd`Q?Tspku^epW@PQ zFLd-ho=|}s_*9Z$(2WwBa961o6C@Yaez_{?vm+sla}qPe#}`|9@~^ei|ND|9^CT$*=_CsckI4eeabLF*`#WcKm|2 z_=(?G=X^IQ>o11a*GHuWN^2r~!hF%812(d&zAM>UuxoW2GXyR7W(V)A zq{s^R;)N1MHkLoBwJba_#Dz0|++ZIO4N4$SBYtq_XWp`!_CA*Pkn!3kTPdT+e#P-z z>9E~e=$iXyU>UiYiNet17b80aH8no*3IsZhYm#RRh0FuQsfD#_{xy$jEmF7(KU-<0 z3F{YGDe;i({Z3%hzOlL$A>D7i1K|pP8gUMf`!IcD4hVuFvo6SVts_fsX;O@OQSY6n zvSP&kj#t80yho9;m!?@J*u0lqesdU4H@gk!4Xj#;TX`nZ%}F?kovflYF}2dh#y%N? zCaOj@L#gmC&cY~;J$!cpr=6cpUpxhJ8)-v^nItB{pgS#(dH@S$oIlyT%ZUohshc0I z&w|88iI>cO@ZEgoIN9BKNDPcHGz&i&j~KIh>i_j|mrSc5GD~d5z450tVlEzYl=ob8 zcaqd0!;E{v+1X`)@AYv4rovCOQsvSYf1$T#iEns?`A)QM!Gkd*w z8g=uSB*g`@Olfz(NA^_v5jA5Lg-1pe9~{a}V81h#@i(YB3$OQui&b%Oe~wa26N%8h zG0zgdNNMdXo!OxeH>11a^Y@yAzR@H4EOD<^u-qj;kb$yaP!Ma8D9ck3sF4lIpuSiO zKl~lS(P<9snXc%vH;oO=Fghcq+E0;%jRbYEA%`7uD+o#TYs&OP(`7Df7n`=U_f^wl zVqH6N{lT}iXI0TIV1hah!X!awN-la3HE2l4VKanYAdA0%e6`XxY}vWY4zT{_i4nbK zZ;9{`o@dKX-7lc(i2rz^LrGDao&^uTN~{h=7^ zYw*Rb8;V=(cC2+z$&UqHTJVDI*RG;S?s@)^JAbR<(4mo;B%yNX?s{f}y*S^_=BxB4 z%FjOCQSx0LPZw)f81eoRo${Zu+Q)B2WVZBTzG-Gt16Ldjb3eK(R5oEQ58siH$jhi+ z4G^G_L8J_-J_E-KgZ`Z)(odkC~6NUK!y z(7MyH3aYT~JH3jLb_!3?C^5z7V|(j4ua8#WTo5F%tpWkzvLPO9HujBfQBq%{uMs>_|TjEd8VHy}IJJpVYm%-N{Gry)} zbscUhrhRflDLW)M4lSgm?sAWi+l^An-Vd_(!Hd2v`%Ce%f=s-t8$^1 zYfPkE!$WX)Z8(Z6__dt`e<6F`ke;F`(W*iVRj#AiU459~Zt}6?v2M>*M$bfm|cl-6{43r{($L9+~+H#|p|`wZ<97 z5rQ3xHrTqAE*E^t}Fkm z<7rUF^;JN*gBt*P^U|@ne$h$XCPw=LoffaUZj;RYNqUTGfuSR}UjZ;g9PZ*SZ2af> zS48$n2^^u{*aal_$lhIf<~{F6UrOGXG^?vPkpgTfhq&8n*L0msKT%_%X{0T4z%BKY zQjWiZa5KgUzgI){>WR8If^K)zHWNGiuVe4f-LlWbA7;<@Hm@}@j>w~;&T)F#)`$p- zL(e|x4)uRraPGNG53vxlFb5&|=NX)R7;Jk6L5FJgO0g&fz8bVUZ!WGT5VCK4s1aR6 z_`9Nw%~=G+3hf?fRHksCW>Es-r`;{RlfmwqTo_1(hc%|ICF1$=8=c?)k^F1FPx_vm zk0Cy45-n}BcgY2nRx_KpGmAiaH*vbl&4@K7)$hXh+KyKc++vPdoDw- zR=9*E98bhz$67vI#pihL`nwrqO0aHzxA_J09(t>#&{l?MZ$Z+PQ3^^MQR1)n^P%EW zui9^Dy-FnAN@}IXjNDn$Gu*+lW2j$aBC`e8_dFNZp5{GnR+E)dlH+q+UX8?2L#fX! z-O1e!u9-&vSy^r`y&apQ!ho|(Apgny?e%U-HoYXc92rzB!8K;Wp>MD zHt~Z&(Cnm_wA@PYj(_%yg}JVvD)c~amEPM^%gOBXjA0c1xnl}iucG0ODA3;Q_x2om z`ERm~-MSz5?ay9~+hb8-zJDX0P8%0?NDK}u8?FydpfD9Xj+)A2LodLF-M6933u&x}$tOH*k}HmA$9GQ=-y5ty8yjkI-LMJ?W1S?w@wg zAX%b*dMkJG9{9hKnfOkxH3w#N63an6{pyNriaS_Jz$^v@)&#Y|lGDCksf&o{m5tJz zE3=0&QyST433#|rZ{(b;wOH1>WNfy(LS}~kQ&Z~wGqbaWd#&F zXs5!?#r?$HsT&e0YSvSP$6QB4JHC&4$T8iXAQWAyLxPawwITYwqkUh^2&a}5K?+@Bt7PIiQA;n`rucq~^WWo3weD}F@v>idAo^)&#d%v62?@JgZ zsJib`5^)5w$iuUQXohAybqi2nO|S&Zf=Lm zeeuDbq^Oq(>s-+w0||>~-EB$A;6Ub7Rj5S)v_*|!AIaZLfJY7#c7eLp!Bi`>vKx&0 zkRWoN)c^`0i8;T^zk;jd!Lbj#dTf}szze%l!N9_D(HHP&;KU@0B_B;!j{?4pCcLJ# zK#FMRY675=H~k&|ckp@bUw1{KptP~y)+2?$+vK-jZBnGawy)Sc#|j+(rsOvMACupz zDHw&7f`h=0_CA)xFb`tLoADBWA|MNnVMfv7Th$wiK)OJ_KhzfMCvm`xb|D z`{Zo2(^}u8Y8AcqN(!H%Pqb|#_J_QiWGnQrS)IE!C&cAtYWF}C_b4oAJ@;MPmo7p> z^v9nhZmss8qg2|l9~AQVxy9TTSScB_NiH`vM|but;t^*{I@J>W`opi0`$)FB8Cv@% zWQO1ipIohlfs!T#Z@-u|5<~i0r#|oM(lQ2l2$oLzx+3NegF;A5;>uXJa>V4$={YOTFEo%vfxzE1HcCoKs zR2H#z8B^7EZysFu-wt0wIs)DK9V-triOl2P5qUtpL=ZCCunsbV^goxRT9w}ugta5B zlnoL!%LFl-Dhr|fR2g5IsmD_#$Z+c8)>=LS)9;10;~m#;oqfE&ygf?7#jINYI4g#W zAE?j9_PpX+$p8G*Y)eZ;-oiPRUar+8DvL#4@ALwM5$J?p@O6#@doGXH3oG&W@N|JF zkFwpvtoTKD;{!glo@ejptL7Wqx4HgQ23pt7j#DfXW#*h7=id;0jyY^E)Lxs%*Zw`- zCVlrrFuJ_IO~Bg_A0c`lGMuf5Ay1RmeR=@>5o$~+5F8>g)H83H5rJfIEg~KHpUFM~ zS2Z z&Ad%6Fr$Ze)2!S>aT-c%R`1~DyfXGzP#37X!d&1uOl1{1A_6eb92|Z97y4ZFIGxji zNnEbbHOdl{Ggj|XfhJtN!-OZa-(l*|y)TaxJ}s@ZVnKcoLzR-eu`dP94|P05lqH(q z(G#j|%Z7ea!BSiiomwrJu*Ui&ehb|mk@z2%moZ&uYG37w{b)rX|FC)bSfWqPzPpOr zf-Vy&x7?saCnhIQ$ZvkU|K0D~@185&7Js{2o^MsD5+~3v$puJjK68pUWYg2`3jXV# zpalL`wu(N@2~S*N%FHK1T^BQGw^{_mp1nL?wqSw^^mno*pG~h@BI8o{(n4o$j9-$g z_g-HZITZ=|C5yjGhrM`mdDPzU#EE74>%iBd0=hst;ZPQlN_FX3&ok1zmo&#Ctw@@o zI zFaQ(K?bDjup(?+QS#i{F8l~W3TDxMzWX z@-)77s%DB-1i#(wo$NPzs;|MenTyV^4$x$Or+b4d@j@FK4}M+bY_P4HiP;@@2J0LdpOxWC$tmNfA$MK&-CsA$9ox$wl(`wcXfH7Su_nM-7j91U0oVD zO`E?s2=7~0CBa-Z=P%VM@cnXv4lY=3f(=DFw?BTq6WoP>G8Fz1MONQ_FJg?*irjyQ z0sBNlYvqczUe-_SNn#s)rCaR?BR@W~{Y9{KBt5e^Ct`$kykY5g{s^~En5)InpgO}R zean;a;n_`SyoD*5f_98X^KOY;E}jEhy_a7A?dW2U~qw zbP{)=-9Gqg)tbSFU|Y($&M@sO(d}j@Dv zyK-=(<)fef%*ej~4Khw=HEb`#4O{lxRN`TZFD}(sXQ-UPN{izjTab49jS<@M7Wy>Z#Q}mPP+(8a6ijr-#3MU_WBm^!M(w#mLG#p)c*Yp|#LxD@qvrH&7`_M6MgA?keRVqXmyA^Yz$| z(F37~3rL>!C{7mT0X6dnqcO=J`jA$XYNh7gF)@kgRzCCPg=Ft58>uD3+o%7CKmitC z9B(zt5}3nm&*(EBJn{L)mrJ$(+E{MN+K~CGH8SI9vS<+P!RNoXskvm|#Kcb#CJ}W% z3;*-PPqssbBtpy!(v?1Rs103zQjXsmN6LfXvvWY z5cD{FazJPCzkQm=PhK+aWJg1+UwKuO$2fYg8Q`r?$v><@f`$==NWRD+(_?8I_A_7C zr=VHhjjTFzKew&c*ToInsR0<30bAW^3oVwPjZYP8B>X$I=Gcg8e~MUeIpe?5?qrI0 z;&3t+c~Oq5T#Mqv2q?6|8o-G#`6Q~uhN*-!|{+V!W2Q!KFqRpAE`f56vWC?MVc4KVZPC-cP zmO+5{6G=Vz%_-IG(UDipn56msxq0^b-KLkw75}=J7D;euiO<;C@Gwe92TVQkn(C^x z8pbs^in=;C(X~o1s2al=OMVP7|L2Z=@!K4sxedUfkgfQA6B4!lJ`AOE@!{H$`H@bc z4i85$5e|6~_f2nm%#-4HK5RqC7mIU_zR)@Ds&>2|a5DqOwZUT-Z~v@mBq!~XMVvv# zK-vsGj@(N4fi5_@@Mm&rO~4#A8hT+Vi#29ZA|){ja;hyBK@2u6>gcEkdqatcataU zLg(`Qz5(xLVxYGC_A5hu)`K`r^elgvjp`e~Kt+rs?PyQC~83 z{(j!CzgEgO`^}#%t(+=94+_)$u^N5(6d7NJpObs0;rs+RzA_*Z(5MGaAI<<L<07 z0MoSqQtE5(jRd;b|2YDH$ev`OGd&ol{b$vH$=_YJ`IDS$v_H+i@NV1 zK5hTDcH$i6oUDBCWXOPN;!(ml)}4`cQVk7H!2|M0=zi`ma+(kDkwJohZ9H}zd*g{| zbh9AZ=uxYTOlx)a13W0u73T9-VN_B7ErW(x}CuBECmIyS`nR5P=ECia!7 zGlBhI@kQPcsT$u^^_VWlW)O$)r$*|xsJF4Z7Rql7?`BuN1{S%B!?)^6|H?iT?!arD zMbb(8{GY$|Mt|y;RC#9ONii|`oJwffg}&!n>OA8tLGaT*A4nI@%PcO%eEs8b{RI!GiP$Y!Lp=m$(6VSiw2H6@} z*EvXjt70GTBYIhYA(Qxs*4>ePR&^vDxXt6`5%$zN9GzIvd;D$LaQZfdBmzf zs~X>~r;3J2tu@9N&wArOd}4_&#vsWA%X)h~@(9=RJEMbnVXMEBfth9lt};a zWlGel<+&YvhGx0ZOC}jDuP>k~@9N3kc7EpKfPh2|mD0B#;=DNO|2nX7)Q*nD9nLaL ze0>l|$<`XFvkbZe7g_2$Pq*f`ZNtDc{3m0SN=jNHtXR3l<;Sw!eDcCXpBKjtSBtln zM|L+Oj96V(o^*FOoIE5Ao)gv{6nsV%Gdo^Eubw(5q?G%T*IYO;m*1PWm_*WpisNyJ zSNr}*Q~h3@B^sALNa-lhQUxsVbn(je`hsg3iv@hSXW{nzU&$n1()_-Z{8T};^l&1^ z-d*^pczMHLdvU?_9YzBe?u4!;42JcO3`X0pe9htwK&;-FPeamlJXdSp!FwWpg+2lSql%DVz7i#FF9ZILod#U zW}z7sgToo@tS3?K95DCOD^Zd@KFZd8ob*Oa&w71n!#?%Wn$1zV@9+ZP(jwgH-ivD^ zf7ShQlnB$(kVP|qK!3*??1zfA%|S)!fHUe5x#eUbGe0|SdnUoJ-t&v9HSVJW?%;~q zg{yD< zMH3cW$;iY?lCoV9Qoiraqc$nT-xkReVB)qAC0Bu|LorMVc7C>CaKVN3gr;~qHTO45 zxl9HX5&Foebh5cjA6-mlJnG-1D#3Bq`%@@!+`fRrv=8Wg1a=slOD5i_{cImTpA}dm zsy#YGk*Z2QxoxJ{0}tZ)mNX;M{W#NPMf`$)1V+_zXF&#LH&}5bCil; zO&6WJ(Z86THCki72*pl&=6==4VIOByZV$}~k#$r{v*G1c(DHbR%rwW4=xU zVmR1*`>8<(|LX2%`VFIU7&cmqBk83n17%vYmI|DQx~%+487LI%{go#lUSb-st5ikZ z2pue4B4Yl)!0EZ8Z$0)kU1-9^n6uxtG;fFg3Z#6CKXICA+;usoL2qK-8cYB?XBkLV zrl)(N5c&MtVjGXTm2Da$nT$qv|)kQDoQK&|s%s5iWe%7bt|a`3=mNclG=@e*muGEW_-#wDR$9bddnH#EvQelA2>hR$W6?Rr*rF}V(c5U z<=Y!}zN?tQKs}V+1)C5Fjdc?^7 z@x8R#5(8Kbg>{IkDRy5Y@O{4zcE)oAYVZ@$Z+J|1^7V0IJdZ;~@ZY+C7v zg)z8C66^ z$wsO9YpyL`nS>r>#I}R@PwN5cXHfPyE{5x0Q}`yHNf`eF-U8|w!&wWLvaKQ?hr=Yl zM>qabEwnf2&}qNVIiJ6dbGa{!PQ3&Jflww3pUqt22*iKIH7C>lL(5!{-#zvE*ogMt zQ8&ITC=Fhz2sXpPc{?%b`;bX80&1usenXN=?Jk$rx6;yWs6_=mW`JnU~U@U&_N_jAn zfb}`Z>p-z$i-MaS<2vu}h~SbGC7TK-IOzO7Q`eAm3&F|628M9!(BqHN*Df66LI04= z<;+vDCylKz5QvAMS_qe#w^UDw1qNCN~xk@aJ>Q9+|QW^E0umWXXOhggxY~ccqa~`7c89|DmOgHD0X$L;N8y&OBM(CU@ABAwCxeyacO^UEH(xFyL!CbO)L@Im zo$?AKYG-$9Nn8QzV7A9Mlh)_@K*v!-<~-YN2jKVxM?78o#F3AWY#d!jS4gK>Y0<1( zJ?u~PxqAA#v5N8?wW2-@&9iZ5{^+*mB##)W8JRyn;zd4!#saDt4L>>-6ukJYGi9!& zZB5+CR?i~J`iE_>%)oA^(3dndN4D&ASeL4vV&6Mc(@%CZ{h^LV7nNphWfj~MY51ew z#$#Atmwi|Dft1L7Kz5!hRUoqExT$;0 zM$)Qu7xq!tn6az+NIgnYcY7@K!M*jdkUop+6{KFL+bXyy#jMVPOG$H1g1!^v2__9{ zM=FF<7&(z}y6r!B(XCiZ7gH}G66yNASVN`2zo&sF1M}@#UI1%TkeM~Ks)?(FG6jKS z=R+6BXJKV1uGbji>BH%oMkqK_dE=)mBLWvUUv6}{TDtN;q(}HP@82c<)SD&j&{_m8 zAVtRpRQBM$k*Jo|bR>h)fhRgPMPf~aC84hTsll+tAP&#nQ6wBBXWys1`gNouiON+j zgw=%U`|E#Oj+mpREl!5z(dG*HRTBYvZuj6;*$uy{n?;Va>&lZy0lH%qo4le*9UodH zdXJVzqyK?}Co&QSGymOZ?lOfcnShA|6?#VK{JJd(KJ*CMj$(#>=r)CuwlQ}9(bv`a=jx6e>$D0XwDm*6P(T9a1N zkzRn@Pr(@rT*8!rd2&8tpBrH0d;ZS7Tr7}SaKn&kz{fznVQkzAqm_9w4}(Jx9DQZR zlZl^=`To1MmpOn_@2Y6clIc@+LJ|D>@PmXG5#&upLN5v0G;ytySKX{3V^pyI{7}H} zT&Splvk&d#!BGw!vL5Rz1l5JITJcBCi5V9eMV+y%na6YMX)j|usm{NLN*>mKeMMyY z2PnovfPpyITu##784G02@4|pJZel&1Q1>)o$@+l}$a3u|zeg*aLp&xhQ+<~>Ju6M} zZ?kSxd!0Z^oU3l`gMhXK79`XzKEFpy1!V$TE}xmOE;k|sQ;rKL$6NSEEHw(P7Nmn8 z^SOT^7l*;Alh!$Z6s~8P-0++~=kd|~Q;XmG{?}7|+_?c;fm10qCq%RKgeT3%fWql7 z?M$!gY3o^!?>iQz_}lpC=1knd`oBF5U9?9MG*;lE-kKlLfAs{kd-pVT^;L|y_?=MZ zHj*;S5wou`RFCnWuDw>?B|X3)usUnSeY9Gye+SY_Je@Nwjn&5s454+?+G+L@fQDsE zs%7T(@7?sz+7;s|y(s2-h=lMaQ_xZ~$?e~PcrqcXit!nv(otfFLHFEP_?E%NI1z9BS^rdSqy1n3B9f_$N&%yQFB8({Oizo8h_vbZ@mm#FbvBx zN|~si#-?s2rH@u(V&E{Y`e=hwbP_&%s;4~RToW3gF}`(XT->XFXWQ10P7FinETa}Z zzlZ(azcP4P5dD?#;3YU(+2kHv6gVxneX(QcJVHdJoQ_Z^3`hVze+2z0mHbB+` z5A{)B%`x|$H0FJcwlGF=HU+|at#>#5lS`gHvoiNx^zQoV2M%FA=)b;{fAl1*_x4|N z;b3=8wmBDclNtZ2N3*&GJkEc;E9>7(JGfcuF=6xP0#;meIRb{RcW=)Qc%-YNOL@Z?tjMHN|Hpi z_kqkcp%mmM=ewYdiOuCId`S|vxO0NC+xx{BVz$0>gm_8uKHM`FFj zKN|TkC$hKyF;I+BMhAO=5!wDhWw;z=H#-<(CnOjcr(MD>Ds#`Bvwu#G%@t^`JpI(AbZZ;FXLnWK^_Jmj@{U{ez2Sv%OSpivfF zUpyPuB{gbZ&YGNCO7LJI&^XTEL^MXqY1o4DJj;PUR&ThhHGZ;20i7ivFy7NYUr4Lv zgb{$USkA^t@JU{;yMJ6wl~!4s8~GXR{V|>!XSUY(i7r9C#(v(1gw6*!YlMJnYQ_pk zdo)qfEL|fH?cWlYrzs!q(*S&_A06uj9WB!Nuo|3G?n!%pc#fZk*4%#ytB!l)$@3Dy zKtgliVL0dDROE$89YnT1{Xbm$-C5OqFjm652}~YD^+>z^ zwSO=dXjn&6JLis8c%qyC3QK_YumY;e!$dWs^OztaRieD8vk9#`&i?{wgqo>u;V|T2 z=4KWS#v|rg`awm@YbSjvB0iMmbNm^wTzxZ(iM(;Qe-13*)7{Y_6|ag>KdHv>(O7xr zJ{#G3Fp=>}yHxsNFf#dYwL22^KRP4k`$yUY3i25NqSrX}_D{WDIX7J7Y*Roo&a#Jd zunL4f80+GVcwYiL`?SC3clwWRjF?!LXnA4QtibfFS+~VKH!ZgBNxsN$y>T@=1noaz zyRUF&g{-gn^j}X;{Q9^VV|7reY?8z5rUxqmgHhc%|g>Hc~&kcBHq7xR+u5Ns1 z+F3#Oe+QBunu2-s#Qay)i=6*>GX2-!9HfioH>o|V!wBc#)bNWnfV5qRK7}3T`|nd| zav!s`>H`M_b9*4pMV);QblO2fmW!>Ua1P6@WZHiYz4=|$?Py70XGFf$!)CNhqK~{b z>Ta)fyeSWjajwn3f1d^>J~MP6qD6ijOSiY{v{KysJi|c$`3HpatYd}Xrm1YPqmaJ$ zR^i5b?PlKV>A4*5{FFl?30Lk>cj53h|9}5|B6|qCGc%0CR5njBtGI>75MU zll?s0(|bj0+yjur{rtC2&F@@q%NAmu_kWm8CJuO&dHQGe$@Pn7zYHh;vhms52Lz3N z=%VdU*J*oEpJm(U9}ja_=&}!5N3(S|u>qY;@@p-ubGoJ5GyjR$6Ugr`L2Mcy)B3aG zGLrf>w{_=nJ>qiA$hNP)Sk`*pzuL`{UXMw&_vgP}U^jQ4H$OdS)yc-Sof#A^|F5tj zAH4w7ks9^ARHIXV5~yG!fu#u7{NY9drrt*jit86dT;niL0CSv`#5>@0<0%ekUyna~ ztH6Q#xvKA4axIJVpSBkDq)*vb`&M;Bg?cCdfDh+4zr~pMVmT4Mn%vg+%vK*ho^X(& zi}%TYV9lnzEnUwl*1B)T^1U_W-}gUyDap@1)uy^-1v{$esc=raA_hQ{-r zUE!BwI^Kd&!RIeZSXgRBeIP&giH~k_g=E^z3;TXy2oM}0OJ#YBPwC@eQeMYo)PBl69mHTsn_3YENfQw6Il~SoWuR~_`&A*v|+8yEY zcr3^Lt4vgNgS(Y^>_B;mSrfn^SwPT5ZyyexK8UnFcZU_pPx93B1G4=IVC=%0M_&Q1 zXG2uv-C0H0WC|~^>URL2O^Fu^QaBGwv!1LQ=y7#Av?#kY52Akw5l9dPAyHbSBEk=IVyMi@}a@*wgZ}ST(^- zD%djp(5(XV^yfecws z6Tu|V;#~J#ruWzT2OKXintyA$*av|pZvR}hHcD80%ptcfpNXEyN+E0OJ@dO!4r{(V z9W|tmIdyH=;Xzu#V()yUtY4(!kiB{h=!f*kwYCcKa4C zK4+mR!2aU}C|Pi^W@N3GZU-TV10WL4sW{7YGXKdS7PzR0s4F>`f006D(4> zr?PrHBeubet0oFriorsUafcg^XKCl@(>XJV3wej6Wm=cz{I2Ii_CTr1`{S;PzDoDg zH(~k&J#{U0l|LEb{>*S$>LX6N9L$NbR~hU=X_oPw+A_a_fuJbG^4v&OUim%m!xztN#+ zplGHBXFNDwGb=-(KfgZT_es+}i$|6nMxVp)dGxk;{l}GUCG-BaBa(6~k{5WkfAX7O z1qc(1l5XM+@H#Ff;a{FH^?nen_yXzFr+RFU?! zp`72X%Q{+f&5(5mr$YJ23 z4>wr&n?D-K5?vC_E4AgAt`Ajr`=hiffafW~Rxkb70C z#wj|e#I4gO9nWP5LQv|}1`qQ4W{3kg6ZnkFYM1&Q*b(t6Iu3~(QE_g7v$mh*tf1AM zKhn$*zn4yhGJa3gV3+BA3cE~aMSmFjc7W9YkAjM>gFkaQAc^8|o(5f~OjIq4aXsYd=A3cctVsXd0vdY~vx3Em_)2+okxMoa%y|kX+ddURk{QOo2{du_m zIjfrJ0cQPGD{*J+{B{-IUZUklS%}GI*D{Fbs~)WF*DUY*yY!Hu_#Lr5UKWU7i(t6k z`K@4q5JAk#YcnPj{wII?H}+V2|9tMyWFDLUL#OQgXbEI&*N511p#0AN=KuU{yqy)i z#oztbd!b-BlpPR#LUGwZ&+Vyg*zxy)W}k1Pe#lWkZ^If3bQi*C;Px0k%S`V89BlB= z&jDMD92Lz8-Mz}|nGe^{%x~JbmB=GS=a)d9|NZxg7E+K19R)ezp3XzYdgr z-p!sl`Om#LUcm7ipf7g%yrWzSj)cKpHI&;PI{xD&4SM=-sUzdw=5AO6_!uYEVm zH@?fBM=R5#aeh&~KMcmH-{-6GuIGE64%7j>?)eGOYR*3abT0@2E#Ljt$M0wU^^f;w z(Vttn!apvaCqP%-NV4shQ^Yg>EAZ+a-Fokxv)y|6ryIW9Opb@?+iL@sJbnu3)-ylk zR!!w*%-M-)gPlXV`~8)j6h5TyzI0*Awf6n}_2nMpqGJD7{?FgWyH5qf_=8ApM#0nU zKDAkp*9r)id)l16YCW&>41Wl@f%l1Az7=erJj(@Z2ZA3)$?J~&S#kNe06Tzp_Br_n z6m}VxE&IHP(Z3j5WKL(ue)Bt6Gx-0)utyB%F$L>#V_xMAigO0RjY^(gtwh^k-5N65 zkGrrr+v*B;JhV1_GPvw;-MI7HvS5EM@a}~*N?@#QT=vTyX#DJPyiHDi3BJme0<}o@hMRJHNA+XNW^l-8(^AMgDA{ z`B1d|4;CTvS1a`8r-eYSEXR+qkExhRG=NB%f0=w1p*f2^JHNSFPh-!W?Pr`eZ*8LF zBzxv3mZKT38+(1>0>fC{JtLfyT(<~l3Ahf9K6e|lgLK~?-C&b}eOTM0cj?Lh^2d%2 z4)6INHNoiF{$TX4v^`)?4gw6r3T{SfpRE(!IxFzzupuRMaBTI>di$2C($oecz0jUj(X{KtW3bQegdz52gCcU;M9t zMUbsJ)c@`a^8 z8&TaQWPAF=`hVshuhE@teA9J+$J>TLVrpq;W_9a6%+@#mI~wc8)H7r& zI~A|DzvcCnPDQ?<@*T`lqb)4`PiE>hhG3G?|24mQ##(?C?|5XW__GI4mjJTUx?5vi zG(A<+`7#^-rCf{W*+$Fs1Oo&^vWC z{F0Cbqvx>3(PzP>{sul6b9NBHo=oOIiURufvRM4?d^66olw>NN0Om^v+uWN^v~T`- zKKaF%%N35MkFQa0rg`v;K9lYuLp(Eo^2A}N_03_kv#1Cf#=5>8H~{=`P}`NPx5A2A zWG5d=6C8ftN~;bYwj?C*+FPfV-**`-t!0QOs(e%3Q%ha8x43umISJh7X13~hagz2N z?BuWd>W3{))&9*N;P?FIMy^cY#LG0VwCwOXHLQ-2+Tf`5>yzK?_M6%Ld9B~)doE<# zemM{PXr&TOt*Mh|PZrw$^2zG!nH4Y7@I?llbLE||rT*9aJAs{5o)P%f@{4-(Svt{s zE-|veKlvNq!v*0?(CxKn>^=V|@#=0FT+AEvxr?_J682xVVh=+5%!el52KaeAUorK4 zey7GFXHNZ`)zAFaDu*clQ9l(0giIV%)S1U5jk`PA=f3t7azvho$kaehMc&-VQL(YphH6d|{1~mG9 zBvTtL0ch6Dy#Y?<(>Jo%%gOtdngU!OlOXsw(nvp3-9AI)EThRB=$w;6S6`dMxmtVY z7vOSO9i!;wy--hM?f7X&4n+La>K}pVkDtESWD%AuQ1(3e$u&=8@z6Dn{pPhN{UOoy z+KXsqX>To0|8T(1$Y#w2(jKt5@>~`u`#df)sIAeTtU2)YXZ_n;CfL9ID3}AC^FHS@ z_3+;Ne$K~pFY%pfKev>e@|Eh^`8z))IKHTbNFV~AO!SuzA-e|-LD@_7aaA5X45lZzrb@ zJ4iZ|O&OL=@>{_DXc~E!@F>-14QlZ3e~FHu#|an?*AqEs14e&xaS)Q4cGpz6eY9m_|K|06CsFT({NkdY`!jF^xR2)f!JYGW_j}`rqMx7hbG|m1%a0EZ zY1J)#`}E9D8@mkv?fHECn=3kjC`oNbPx2PMc|R|)iElCW`qw8)zC-AE!BruT0`L6G z#{5ki0qdxuXQ<~SpYx}gu7x|;|MH8^2ZN&e3;foQ^Djty^kpHo(?M6?ADsHub|YAa zrY;`(-goU#y3PMeTi9)C*xI-HG|sr~p<8tp?!R8?UoVTIZD}Kuj~c~vI_{IdA%h8S z=j6YKinE0qVb8n#nO_F>un7X5%?Wk@D5#xU@|^Q~Fi?$KCNbb7+fKukQ{P$pT_d1t zwfsF=8Q=LEvd0wB^Y@5}5LLp(ixHh!3(!HtzT>N2 z9mrPT8MN;nW0>RM*^G41P=CGd`A0Leu;!ersfT??XRl$g=+7Kzfvitk*UrC(_~{jV zbcg+Oe*OZc1nBv^XJymlmHdFO|C`DE9xPrbzb!UP-orB4TYAoRju+oMzY{qMfL!D6 z$>FVge1EXXCzQ2mlF1L(oVkI%`Fkb>o3anksi7NTpODte5$K&Cv*n%)wMr?$a(QU) ziFMA}Hg-WNQ;O_d9xGmSsL9MI^R43&4Mfi)Xkjh+=650aEQqUW{Ap;rTkG`_KsEHv ze+NQ)2lEc(oG2!;9~PzbUfSpIl*D0pba78}6*JrQ`hiqcY!88MN(b-7H$d_~I53B= zgQwN~(FwfwJO9BQ;Pr}x8Y-Q(6p|5~s%>okZ~io+cojIixumN5Z~i7tdFU4X{*HBG zl<%?Wt}5 zmQ-7g<+5yVdnc=Tzn~*pm;A5rQj6*SK$(DDD$JRc>4mxb=lbzLLzw&dZ*RHO|I8$; z_s*|oufgxho(_cXr9E@vIo|<(=U@G=`Pa+z9TcyhnNR)$`3a=@avc8+G?UL$iq4N! zML*izn4@*=PPOyP6k{tK9lyYs4R883Y6bGnAa?V*hwQw)_y*uSz|nHs3$Hv^sM&kn z+pT2&2sHm`;+0JzXDP)nKq%7-t3;eWf@4X#??Pj032E4pnXl@~mIV?xszF>uc=2srLUu z$m~2);A`|xewWivHew8`=fIedO(OfSSud*1RgAGWDjZ6=tUyIVt!SAuW}owBb@Lb< zcxR&TOMA4Je9>6`R#(b4^MnD5&sMbHpLcE#w1RwH!XiVyk001jY_+wEONp7f z^!CkX+rG3C(yOiMd8Ab#a`ox_sYIvqPt&ji%KS>;uX#+%Ud@xWjdVK#c0!ZBF?>dv zWjX=*9t@h<>JAxxmhb$e%C&s)vHd#u@-@%adAxbNc2T#+-|vwsScl%xnr5?w&S!-D z%Z5=o!teKmEyfQ7dbR21vX1%MPqI9EiJF9F&@0^!?fvEFH2XAGD4t!vr6Ys_4g3sG zkb4epSro750KMa8>{r}1GDg|-Ze*Oiv!w~bs%{u-3gZn?P Wh@#)nB!!{?0000Awxkyp()5qe}#gAPW*2Hz(9V%STD?hY@l7g%1J_1 zPm&x%zDQf>C|IhfKrun~0Z`Bp-=N_B%K}-5APW=}YymVBEMyD)-){vl|1T9fu>kh} z@Bf#Pn8R@#3Q8PGL0Uq?3;Ns-@jLav^1EW4o$h=`-pjJ*(7FT#5~d;pV}m#oCDM;j zaf;AO6r_(td4v2)Ixc(UxF%B-YI889E^Iv2uJwp90G1eO*&60Z1C zNLPox7FulH%%0tF&ME`)l{(XQm#1g%T%3ReB>KoKp)N?DPhuASYpyGi^36kfdD>vs z0LSu)%^@`{lFS>G*(om9$UO@lq_A=25l7Z3Afl|y0dj0)e$NbrD*g)$iU>5O!Oh_j zI%RhlqzU3HFOR~ZGZ{EC+tn`t($}?OY+)RPE$LX~Ai-7+Vu|{${g{ zF29Y0DN>Xe*r260H)TLSrHN08yZ#!jpe7o0Wa1gIeLU>hWSY4x6XvYp?C*t#SdCEJ zedGX^@s8vCw?&d8AoaXjVXF+J+npDN7_9k4_T8f+dX3h$sEwTpH5TfslhYY=224!6 zx}xtjPPhCy5H(~cf%F+0RDu2_9b|8G&lU_; zc)2zfq;K!{48|LcT<7iV3hP`m8VjZogmlmTz4`0Pr9L%bx`LZcj|(QScDYLgDAfjq zy-SQBTy1@VTB(IPJ9HuNL$?b~Z5O7&dJ(G8Jj8yrzl`n8(aTzOMf{zV+<{0-h|NS;-XHn~amVdTt5WeKNrF*-B+ zG(Y4E+mmDhg}l!mLufqPn+;e+dBW8?+KKt3m1I>_w{n7FhxP=I2G5R^yu8>@7g8_y zVQj1?Gu^i$Md5jet=Wg?`<<5Fr+kvt^(_CIQPYI~XtRaA%M}D&zD@Ca14`M5z?EuV zTwV){&S6?c{?`Xm)mt#`O&!mtPuzP_G7Ad^i7jaAo4f&ava{}QRN|quW@lIDz2D0Z zTyZfg#=K&Mm@PlQKX^~$h|1)h=mdV@_&29F#lWgPl<~c0Qij-um0hBW9A`ewyxtM0 zVT0EsjGyi0#i@ryx`zsBJqA)pl&Z@SF%p{78e2YrLO7Pnor|xXiwECpo)4R*%))!$ zeqC?{7aVS6%B`&8q(J#99Il>Cm&-8O)s|HL6MMhftIOv_bN>jhJcq(s&{N*2N%b%I zb)hu>#Ix{7{5#KVfcrMt@!+y^d2#re2B9Au$ip_a2nhin|ym@Ei12?FpYJ z-d!iv#yYs!m97}fzpB+sVvh&AR*;Tsx>XW;>>|}6!>H6?_a+nLD^yYT@VBCSaDNoA zk~*gis|RE~h>D85BnZ@zDnQj@XMp9|gjxvdWX^bH$3m2yR%gyby+0+A2vPJ#lTVl5 zj-GZJq^)=eb<(SUo{D{+6uRXj_p{nbnphIJQxex#+YDQ>-8!U(?IIb`(bRBC$D-)< zv4sM9xXxjmm>@hZ&|Xmga*lu75PB%U_YZ0zFN#&93LNmAnQ=I4mh_e!arq5op%mw= z?D!dEA#-{k7sleq!pa|kRCHIEGen`dSSLzvW$g9?jjDHqzRZ0c-lm-fQsGq6YQ}4B zpJNP!_#cr7+MPe?X(ThKBS6MvH8@CbhqgH-zO+ss&|0!3e;fd;B{Fj&XaFGNyc3m) zTQAeE2}J!KoeUf{hQ~X9Pp4;Eax4E)S;*LpOx67&qX||#{{)ZQALq&IS`MZbg|XD6 z?#e1S7dwxoXzaHJ5M+XtC2t$u;qMKag4D`ZZcB_FRoPcwB;IJ@xWfXSkt$BsTy%Wp z(Gd>RGj-73Z8RwK^%TGiu#|})I8;TQr}+i<3@t@d6xXJBlr}SwB6(3sLI^OSa0L*g zf=^g$(oxND2+%Hsn{EB^vt}$W?tdmDL3;xTlE4K+YCe)!#PrtH&H3vJ*OyBSjQ;p5 zCr(kcN(0i@yuZ}8Zj=G-*L?_}!<5%~<)E*1ptRh9qMKzhn#cu;pjAN5iXiBx$jZe$EkLrwmx^d-%HRTYA zT&8cpDl}6~2xAykw-of&4s1C4XArbSzCU7`OmaU)PiwO-QpO8oobr7?3mS=%lS#<9 zDl_u&N;x^vQv&kB|A>s4nSxK4r&!0Bd2_9)1c#maNJUCv72r_i zbeib!KvAk*$Dd{3wkk=tnEte-E<<=M=v(d2aS@GHhp8pu+wZ;Er~cj_XU=my`3yEbj@p!V+b?$6V-lT}^n ze>5IyGvR$#qxUke&oW^S`CLuLLk zH69V9Ux<>}tH;u|u;F4YF!f_z5q7;+u+AtTVEz)VAyJZ)?b)C&eG|Grr~i5O7rUw3 z?W_|C0mhA)64I%z!5aC-7sA720cn$IQc%GUqxG2&3?R^!&yQ8&%&?mTTrPHMXCYXU zK^Ce?FQ==DKkrs(-n2C9F>l;+PY_82baO3n(U+rf|#dRS>uhI7E!T(&VL2E+CdbtfOD`q@#B#RCrfBQ8 ze9B-WHAyD(5yEM^-F66q|lc_yG-LHRyoOGe1@>#y?irXD_%^ATz9(iL8Kel`eLn z+q%zgN{OSh>P5V9Yw5X0aqE|cB-0;vikLM0=Ss!v)l19@p|sGn#d>dIgk&wYB(2)) z(;T|GC?rvJS`HzP(eW;n+jia_%m2u`GL3%R+UFQQ?5kpKy)yki)?^octT0G4Qixg2 zr`tRi-bAeyhTtQ1_@NA+PA-18Rf321QBi~OjfWj3XIh-sY~Oe{$|x(!9td#0=<*ME zfqAumUOmhxbsIO%G|IZs-4lHX*yuJ%BxjyD0KVvO=>L#u@F*1vgTyq+&@`PU>zhPJ zT@J-JcVGF*!0AlUdAj2Azm|Tf16K`Lol2J@o#7>W*rQ zMl;p6iHsXYOQVsF=K~wl@dy_N4?`_6{_Z3qWiMZ~lpVvgYC)E|0G|I8`Z0xyQZp|OC=-8=wGsJMR5Ov#oPJA2b75)@YN zj2=^^`mwQ2OldRIY&4cBmI{j92)*&Pq$wDP-5ED&P|gC$zzTNI50 z4B0NLKo2!%#zJmtHr$e3{f|#_+u(GGWldfX$R|uC%9_#^gPn`C^_$(fyND5`bKCPz z+0YX91GV}aQQk1P0~hiWID7B-j*_FxJG0e)|l@mQPFLdCn#4~3R zLwaXlUfCD`v-+=)7D)UhAqgLLYHz~orQ^}r*W-bc`+H%SZSm{y&X3T2+`rDsH4c+e z6B0M;c~@w|uQF7X<7TmG^B3^Ng~kaIprP85Y4^xJKk%G)qTyJro_b>cyph4kf;>@& zCl!BFF~Uw(O-TQF))2o08l*RikF3!Gn*3H@<)!!%dW?n^)B=wp9$3O2-OjYFovps- zCw1AnWQ4-sKb4RJO;a&IAJbI=oRCf^WLCiC11a;*D^3&)lq=JC?&~8X3oXhotWHGp{hg;W^T4-2%`d3aNJPb4N$(q#4MlOUhAqE2Y#NH zHmPH(*a$LS-wP93L#H7a;*d!bVJGOu-Cqj*1 zU>3|T-uUNUkMA~YNr^^rYa+FK6G_LoQL9fz(aQT&y~#nqv&T`fxY~@U$^B4mp&H`M)H)s9)7ZpA&z?HCt2tQZBue_5-+Tu&{&a%ULa-Z6;S})=vt zpzzNPEqsS^0bHzn;g+)cC3@SCH=deQv7x6oj}Y_T^3sZ2a85Z=CXT2e{HE`j+?{{( z!|i`#f7{^wj$5J?aB1b70s1`v*^qGYT}CHm^mCer(M*OUs!E=NjQ|D~-r|wQ5B>OW zpR~0k0w?)Mx~Y3q7*ZuW5>#S(=v*#p!1G!z!md91`edCfp2gb9J03i81YPkHVwE|Y z9Y>q2t3SiD&o$av4g;2Y9I4NkLYO?qIr@f z-LDoVbAeUrekCbjNNN z^bUJ5>VS=xjj&Nw4NS$ZqXpmDUp{Z%p_m5tJ{bz;dGt&%1@l&)f4f6|{eiwNN+vQ9 z1beuOqr_gn)Mzd_59DQ>(k~){DJ0?2`NTRx5yy-~fBGj`iFUxzk zug$}7?Pn?pDeu(ty_wafHM2jts}E1T@OLX!m6|K9@$>222KnF39NR)Q6EWe`!2TDr zSm|ZNm}J>G6qJF=B?&k3m za*fosp1i>qu8!H(3F{7rM=cgfJ}w`$#^2}Ut7z2xvU!@#$}r=FN@8jnul^XH-Flxy zVrufiu9`@K0zps%AE4x?4jcnSA>NI7)H!yKrax_73ENJkx6FPuWUZvxm^u;^09OOD+{yN$v3rVA zC_u}fCZQQ-HNv3Zu6L>E{8-%>dxJuMjCk7xmde9<$*o3OGA1IqfU!H>Q?iQj|G@W? zr26MM7vH{#H8WElgN@8`dQ?ahm2-Q`a2$GnLKK?Wy3vH zM_GsEJt^7Oy{vGA*%Uoi{*?zT!mm$%Pan_fBaCI}i$=AYU$(e+!l%ARwEsQHcF$N8 z28HJPuLu2Qu(v1Y4qq7Iy^D9VD=!EyUB2ip+W2aSVT`>Qd0~H74okI~);Crq@1b7E z)tyhO?qvp5)&N^pQ0)66KL(lvIqBSeE1PCLERh_v1xaL~W`6!WwSJ+xQ(oT@kptSg zpsd|MIKzd5e;TeSB3V;#@X@4eRF(8=tdfpS_W6@3TdM7j(NDNC%fjdS{g*d?5t^S! z7ergwub+Bda%R4Gv64K+ZKo0t2Fh;)&oeC{a{)(k!2;{vs#dI7NlG4BNj=@6w1A%L zl6aI>0nbJXm%(`dK0aF>+>2C>2IP(8H4d_usdzlA5>N>Rcp# z8kCaIEHo7zaq~_y(4ve;t}(CmrbgZupKf(x2>mpPw?yBsSBC!saTx@juomuArT#a2 z7acTRuD|3Ho(L@zQc+DO*mKwQHZH<=_C&)3l&dEE0cMX)G(Q(k(BD_YEFeO@{P`l* zSa>te8D5r6;w>S4oc8x%Yl6FDhtxi1u5SCml5APhgnh~1vaFI_zCCW*Rdn|PdoYfu zU8w3qMruuFMBPh`ux&GjUCW@3ePpL01G=0b$A7ig0Y4N-IFH(!?RRnS%|Es%G<;Q# z(Bwj9ZM7TKwv^)_=9j?H(Ny0<*^7NG0krr6GkOjufUbTNqbkb7<#PCV*5&rw1b6Yr zl6v=!=wCu#TK%DE#r6BAi{*eLT+lG)io>^1qzsxsHOysaA1l??`Si0ka|8&_6dyaV zzOVSrkU)VOYtsC+N8xf;9rIAha={;gzaca|(PIVawB?+96D^MBoI9$XR*k@+?e~Z) zKcc=0TXTNxP+)vDe^NuP@w32Wr9c*1+t|x`8yOKt^qp#~x>G7jb>cdr5EdN;JLd3_ zoW3(gz}&H$Vgx^#TgLPYCaFy{TlaW6@wX>yH%hVX$-`ZB4C z!#pxnf(AnqGwp~FIey(X^s|M&B4J6{t!pgmub0L79ZEeQm+SrvSauUy8O`}ampZ0r z$)OcJJ+HX`VCtE}DExkLzTTdlIxckOeDbsBvGe;cy5VrT%2LOg5D(3RtwEZKBON~M zPy>Js#%9n~O$*=aiZjg6McYVei;6i}0K%E-Z=XdVx3NBAnN)IGFOK=plVRuL?7ca& zZqf!9K?WFeq~-3DmQKg;w4?*)`xVUtCgFuCjWNvYxM>^OxAl@VHQjJTetx7Z|8IEu zi^)@#9DT2o@OdmOJo8V@egj5XmjPw8RyZAyy(CrRELBUEp{C!-`^o;vpM;775}TppYdvr{Wdkk zc_I*mlM(Em9x&e3Y_uR}Ot)k5#}Z;a$$M&J6+u^cP`yS)Sf{P*Dk?o$;5M za4;Vts)*Gk+pDk(H?u#Gk=nYJY`ev=XNPH~K+?T>{|3s5ERVb+vpl}LVCcJIz_|Z` z>d#odM{k6IiRh=L$+WE%BOpKy{5rr*f*K2_>2w*VP$EWIT@h|A=*BKO>eG{|(qgqhppW=e0dySG0&rV6g`l=!V*%f+a>!y7{DIi15KqK$E9C`98Rl&7;NI-b>8Atrl^y~c?GD2v=%66={bqV*~c%uYy;i-=Az zv;T-8{^V$*H$;kJ1w{B6$c66HDZ>BY%SH{mt{4t=T9I=`XlVS8q-R#RpT_j+Iq8!i zK(HHw4jb-PtsTZ2B4>e?GbO(e`oN{&<7&nK=Gy>*ck5N(W#*EPqd%uE1Zb}8@?IUc zSw>2_7r*8~Xwfr$ou(VLmR?0B3+`(QGC42_O(mwxfgsuV{@=)3AWIv&2k9O-y>?@8 zq|4e<1aJ?}z;V^i`?iSIm%;s6buXlE(op7&LC0`VG#jx0;M*Q5AU z95FqbeZkp|B@~lIpCFvS4`}(*&(7WH5s>Sr^3x|x4J|B~;j`L&VU8@Kl3hEx z!YMZiXdkzpTZ{`ti}8)GcOZ4$Z zqRg@Oi#McOeg{hG#r43)s5WtN+9QKaW>Nbr^BB3>F&_Lq)u=bU4f&{djj<;R`wS8# zzKQeL@5;18&AVnCC#!;*h}E&1kokq97~E?XPj*omLbn>&vYM&-`+%yJPzaULdjk1iUqF zJj#NuxBvKVbX*?m%iM}`WO_=K6LDGSbhsX^(fiQFk#oSj*e{I(P8-=}%e z{?wmhG7z3b58mwBS7d{pyOtQf&33cGRet7S`xIo$0Or~26JQcXKhQpm$=B4fElC?X zMz%69uy!vem6p%?@?+rv5rnzn;fkX_#4}5JxjP3Su2wlFhIL}ur0Uh<5u~N}eh$d4 znJ_`b+>lbh{iYeCtqS%JC6-KTk7mL1By;OAkxe;libQ-7?FqCc|J#e()X9)udqrIu z<{`TR<-8hh%tx6UYM#Q*vF55t2fMQ#nJ*Da9*AXzUIgw9c2b~(Ie7Y z%m70>RC_10twiKN|EY>St(P4QPQRz!;06?$7#P-)u2MkLR&WiuwJ1bD*RPj14-rW` zx=t1CG;t^gu3SemZ=4t=Z-y`W(H17mX1nVXkE!3uEU|^A&c_=rYR$P^gkZ6>X0Hc2 zS4(~M3!?J45!dB0pc#_Dv3in{3nG8aw8C06ds=x!hZo#P+M3U;JWIa`bd_qfVkSU@ zVPUG;L-vg2%g%HNk$s!lG$<-RBn$w<>u`PC^$-M><)ZEf_1omvJ%m`3J;0$XDj{};FBrY( z{^;RQk+VzT;ntf}4jteTWG7jDK0^2-X~_cmqgbTuixpVU-c&NPH>b-b5U7rJq%U!* z=+dyI-t`tc8QP;rlV5_$_@8=-vZBb)w*uj`TQ)|r@vrc#9>1U6Q^5SNU>*fFEb*@FsIjKrM_{DbgZnwO0er#_bJSTv zx(Q7P>$e%o`jKeF;Um%fNiGn;j4Qug=Xd(kzN*V^>xopgB-kKe&^0y29xU=?jwk%5 zuf6?#P}a*dF_gS5=={)))eT!;L9)aN(q1&6d60De+WCWVOTiaXIzcR7t0*$^&1w+0 zw-GtiH7~?Gw69R$KqclK(-vW!xV>&l2gRt-)Z#+M6%=8(1XCDxXI%dU5s$T`Z$CU= zDf#!uIxRiyZn?c;qm*Z6fbgF8iy@dFeVz<$npiZ21D&^AhSS!I6ku6v>$8MACH7_W%`6NftETITkx7e;H|{weYtz#1%e7PJh*XEOeuJz?Om)=g7;O+?VyBq zmFn=Qb129VTySDDMniFc7H;QGjFTIN*Vo2+l8$T6C&B2(C<3P==60M`xm$MqwbQ?M zGO2A)J<~>}6|Jlw%_`0Q*?w%*XzA}&9Feso*gf72HGRAkHe$yt>m}qRS9%P{KX4XF zANead8KY0`_Cm?0)Oa?LSz?6u&>SHfL`$X>8hG(ENwHJNYh)PHjTyz|Y%oy+*2>cj zr;+{o*yi{gru#U>U3Y-~Cx_HnYU1AHZP5m^R@a5#x~=ggz_%=Ktaq@)d^I*b2oS@w zBGv1O$zvFRTf8mZwE&2TqB_vvA5TkD(u;i%P+ zWPD4oE15}zTSCwuL1eUfe2qXb!9?W7$-AYPe|1rrTcSI}HD-qY_5&z*G=>3?;$mA*Az}i_&OAL$@G$H z808bf8`7{}u$fX~*tYt!cQBf_{zg zd&qZ{Pp@FHpvO3V%tht6Wk{IWL&u6SNti|uNKHU4Hn&V|YABzh?yhinU|&JW z3z& zRJiwX9eNnUoYp~>>+gV2zUQ34;Er5v*I61r^t5wDy89+Zs#R{pB6nsGWd ziDNq?hxDW%ufq^(7;?rw`R0T^;~y{VWiRab1`bZnpCF8<* zE}9HEbMlDIDlnhItZux$5v8QbO5!vpho-8ui-%5^RI|tL=&530Fm2NO)7f1+;IL_M zv{Zw+8m~q>Vfm8jej8IOEmF5vPhzdDEb{Zp2UDGXC3Cfbj!o>G6?vlvip~HT*#DOx2!v@n7EKNGv zqRw^$X;di10+@+ew*ymYusGZ-<2$w9`?oBlU!48mG#~e!q=`k_X+rTN*^%_}^wGC* z6mNH&tvgegk)^JEGojV{dQ)AXT?&M2uO}_P_-dUvK5V+Sv7X6x6?4w|*;7)8G_2S- zHtDOsME->=JzVuhb{cd zu}v@S`bLB2v^?MSO=o{kx(C{2mg2T!Y6_M#<*xW##-5#~$~Fz|y9n*dXJ#98FCS`y ziC-&QE<9`E%u%>l2Df~ zbWqaP5Z!+3QO_B~T{aLSc4kKeh0X z85x<&?3pNymJX7n53Mfx_vk1)o^QbXzscYu=8(H!W8=3G16wSQ#F8`zY~f1}N(q1N z>*2o;4+6kAw`K6C$UO2XW@7?1F{SB^!ULA0naM86l35zVR;(T;zRwMsl!ucLqD_w^ z>bjHG{_yUF*(C%jLnJ=&-V<17Bzm$Rl4?hq7K8_I1+}=wbDvBk=8&JY9?)hl++d_6 zz__`jLvBmbn{q}e`P$`}N>A*F8Hjrs zYErikb}GBIV?aZwZr*xA=h@nchKGsjM{}g9A~W*h>kkfi@nAZ*yO!;rs;DZD!JmJ3 zygrU{yT~R!sGeg@XW{SVO2!t)pcKYq&hPoig@d5cKe zlgR?qgzQnQp%ry|0VpEJ4%T#RITu${Cj7lOT>?|x2-R_BJXNNyl98P+vw3`~7nz|a zYAY*gkw!`HsVq(2D}uaA2+At(QUILAkIFo&f3WFN)9ASCCQcuk-=lHs$>`9+*M+yk z)#73yWsOWE_#@elJ7e)V9kFYvjrHQ`@?{mPfSvSBN@s(vf(o&^4c$dYNq1*P12u0# z@;2eblsbLBt9k|K~_(M-^;|o!w=u?ejGl-uKh&FmyYw zr@<17s!7WScwBlLNGl{EZxs{RMw$AB=)ZR1Pd`Lv5_XHbNV>=arY@Ir8sG$-`CsOc zFW(;wdGt~9e}tk02LuoaJ-hMu|NrD+dwWzx36|HFpdCxkp}U$*UA zv|S>OC$PT6QBQ7EN5K70F@jcylLPrm1l!kJE9jf;lR$a-96#%Q*j}zcEM|+3eJRP| zja)V3C2a7!b~(?>1;hj4=&Vr|jmQW&5Nl1g&1%@eGUQAvcMN}bmf9z z0zamI=+Rm4GCc=%mg4Nd2qjd7)NUTgmLF z@4yl31KTM*v=F{{lc*#+DfU^QG7j(hYTfOc>aX%K8-`NHBmx1<7z*zALjZlO##s)s z_Fd^fvWtG;fDwkOh)_%zF@4^2Qy8el?EpOOF4OgogRp}4G12-RWO$5N9A*j;!mH!65tqK|8!sd~P2y^r`E21XhZJodOr3<$pg8oY+R- z*c9O;C{tB+q$z1HL90`Cgsaeg4yc%9tZ0i3fpBt%tFUHWR_8_ini0kY;g0h+aDOFi zJp&3O#B}ToWZbfm;QrHQc9!n%{rgiB@;K4KpS$)G?1dG@V0kb%_-7Pbp3YwhCcu3;dr3Pb0N4lG1MG0ro@ck5FQMKoQK zX*Sw~3ZPTt+#7N17QQNX*sMvMaVC4|`M)5PfySreqjei%R*?Bjh?q&&XFvG<+0H~L zGl8Y>!-{$rWB<{Ozp2YdfR`HtO5Eas<|VPyoL#EDBe@7&ciEk96u#&)Bj@hr&^Ojt zlkwA{GGWp>QrWDExlm;nCvJC6Rr+L&r$+Y+IdlLAUhwPgu;FgVVgkEOnf|h5_faVP z8i?l4?wv^PEpTsWgZyvs7Ea4HG0zijd#m0ukqOVKMGa!@Q@4j#;PK~M9(Q5UQV{<9 z#~jFG5t9V253bzi$yr2F1I#tF0r7n_914ZhY9Hwc0SL5@$b=BChNfRSdhW#>BRhsg z!&6d5xyC5$gF5H0&Uc+YPfq6FdSW33SFue#upuAnvWQhhWChWvO&5k3exfFRrPIze zik<VATffP`u4c2{0F?`Ts>$TFRQCkAcMCC+VDz`0AwGe) zO{1X`K$}Br4hYUVoVNQf;RUmRXM8eR#ETj|b}ZZwd0uc3NN_SBMYHhw^z44?tirYD zy8pck0&Vpjt-E72>q2k>GJ{9ciZ$ns{BRy|lYlI`;n8!7;M~xF8H0z9l*-%eE{8ip zyzfmteiqqwxTF@Lufe~E7UWi&UJKED5|(&kHb&23D|-zhzy^VI`WU7Tt3K+~3H)m> z{ceXJyHr@474O{IM_X;rV->Ex!)_}=AvUdYvM3b@1OobF=JMtO5rhYGd40L;!OVgy z7A%wc*@i}=-#&_*-p2Dl?g6e)tm3=VAz62Wt2r#_>Gb=<)HxM> zz=RuT7G2*H;b|-qG?((@D!<%na{%;z)1o*v8Wv)BkO1Ez+;aubL*vlLk4p3NgP8dh zJkd|M6?*hm{O1`El2mRGGeAzgIG%HzGfmeQc*+wy)iokvu;KAj?zj3BMQ{v8q>sIH zP3ljLxC4gJVh&>No;L53JJChzC7A3N$>5VpPKY=ufd0_3T*&N&0DFy$#ov0KUK;Y= zvUz~0zdc_JF;V8#Fn``&o(O?&d~r%Q+*rA%P)6t5m>Hk*`(p`_&d7A_;JFL1h117d z*=fY7HNH}#hMA1jJ+1f**Vm!2&6C9hN@RoPlgL<5*6zwW{^upM)ew+*;l=9D%J-x)hQ?hGDT-Qbes&qDzg`QuR zw&{ay($7xLx()>=FT+*x%_kcPF;7}`e56*&GB+wzONcz=P%x?w#va@Gm2V0$-3tPIWzw-?dNXhelOCU?(lAuuC{K)i} zoN8tp_hgk65)O0G@Hc;RzfT0wR($FtG!g?@-LdjgZ8by;sX{qmtxg>)T0OOIH~Fy= z_QI5Egz&{Z8|m+7x4iyMreok>&md&?f4_WLdAg7ODN+)mOhNI+oDrp0uTN*mT|%z} zP78SXJ_K(F=)_*t0HVsgH0H>SeP$kTpqOfAeQ+KzJ2LV~j9+VO@6AQu^ayx)eN|Ok zf+_WN5*cRmO0QH+LRFKyB1zNm31c!{44LM~H*C>TxO9Q~_>r#~Q>AtfnXYgV@&gi% z^zN~P#)+-nK?93dD(S~moPP}rmeJgipXZ=pWOG41ZSwoaz>GE>6}yke3v(Z-q%VDn zJ+}_{{zWnI&eum)%$y4*69mcFc!fGx0HFIOz*ix< z9*VQ~C~AY)P}R%Jc42l~=#`v_nB9b|>7!y=RAc!rHhoBzp$VU87KOBtjHRiF^(DE9 z{*W1G>jifMn02qA%2HlfRat)87K+R3`pGx-a7FU;YI)OovmVxIbn^3>y?V@DMRm1y@nSw0xzU%rX{rgLw2>aDsxlL-Xw=a<%wIq?&!ZSS15-a zqT}zyg)fD>tKWl@DjFHhIgweFe(~ zm8#KYMxJ+Jx{#CY5}ctnWIdO8aV|lC!K0sOFjE!SGt^;^k-#*Nqw@Fy&vk&LyTsjG^4EgCO3>O;Z~ z_E%zK;zvD(UnyG9aSs;f(4}0{p11b;7R>d$Ioa9;aV3`S8rsMZ%BBjY>MYgtPosf5 zqc1%>ORlWXb(J5Kbq7Ril5+mErwN5NyYii&ZHs_NCpmm=^g+r+WVl-O?_>wq_rcip z%+6*bEXOkL^S60vsd{%TP!$o8&Z@};)a|aOHjiuYrMEdQ;U?8mLF(Qz zhr>90rj2}3m!JbuNd6bOWCtTMn@T&zx%w6U#`|iv^Is`p%`X{g7Mvm^UE&&#x+!l~ zVv@BXx-yibck=SzaVTQO{z(P_(X@+S%c|wfco5P|hg*wOpX8uPKZjs4KzpBeTmkvH zs6&i^=qau#S3zP3?F)O+q#+uYPwOQE7M?Aj@E=^nexzx>5Z+kjh2mxix#;2QepTvA zfFLTO{;W(zCalrd=J(%e{i#fn)jFEeIO`sXhSFY-yON{0elOmcT618;m@ zyA0T#53yGLY-)XcHd|7XIn|?_8v$?H5SE1}9L%};+Wq`ov%sGF)djcsR=Ms@hNHIk zPEklCymvHR%>I{fy|R!>QTi~1Qj5Xo7zY;QyLYD$kBS*3$H((K`mKy1)`bVtTD8eu zWCb2YbGeq*_Xu&l0wntx5;RS~4AhPw2YOiqGn|PY;o0jm-Y}Vu76I zX@BEH$`I95SG_3SLo5!1{(Kz&!nM|B6nG3C{D2Y5Q{D1TQ~191Je#2*>e9Ku4{63+ zin`UTD+d~0mfy(qaB@QU3P+tbz}C){gc<{b&}U777KHxghiVIuRoOg;9(%rvB`aV}DaFwG+6l`$0Oa z1qFoz{oj89V6awepsiZr86XNMs$O-w!wKOb5G(MDo{!tq z6?=$lWuIPY(XW*6u~K5DwQ6mO#CD)+ucq#!2}KuOENQIKq=Ipvugp7Llj`nJ4)zm>5VSsMdL5( z&p|~rS;cHIH<;=ekN@%;dpKP>-f9Y=E|QPZ5Y{wh1zK|V5)U1(QIQG!I& zXzQZs-x!6cW~#dxT9?)Pm65S4AUcbx$WN9jgDipu66O?$UR407MoL;3b{stI)HxSi z>KPq%#%PO{*E~!&NZ!^)r&&gU6cho{}@MR|m6-kpQf?)wn#kauEe7 z@^45kf(3CAyA5z7Uw50z9hY38`_g((m|RjP5V_%`p78YbDL$R#I85jnQ=) zAm$OOZAmZ2vrP_+gKWR~%aoqDILotoN%#rYY>29=5iC z!`|0PZ@6Aej0d0z70yJi86x0;dXQsPC$g$To}nJ~_$Qlp2Dp2u64`c%q;J}Kow7HX zYX<~6Stg_zuZMV%hVasMjMMi_%3`z~Y2ps^-PhDeNPtXZ;TUm~Hh4u&vN_AOg> zS%#vlV;|YGB|9~iqGZkfT+e^-{QmrW?>+aN^E$6{zUTY>ygzchIMWTXjzVbCas@Z7 zBi$p-R6T6U5k3NLOQGt08ohc=G{}jS@bzKeYAxh$OJ9m8!ZK#Sa56RG`7JB~Pop8W z?r7ukSu~`IM_PI~N}!c|0qM>5fVtb*t4Kg-4R{X!k%8Bh|IXq+#3mRJXM`KGf1H`W zDi{(=WW(vHV3b`^^zDXM2hu0~m44IJ+viIwob_I=JX?PK`K6Ii#Rnn+jw1Zf40oj& zPu4Yn`|z&}q_#_T9oieN#SQ)&rIs)P!FQ>e4N%Ydvw9e`i+|?%$YTJn3Ht~PY3g4@ zl1{n{rXlu~^aaupA!bU>`hl7}x>O|hP0q8RX+rihH^_~KXJIsYhxZ~Shkz=k&Q!@I zXI5U)HqWNaV#V#}4raR8vvJ_IQ=F5P5fH+LZl<5m=yqcPjvV@UaI3}g!OsTHrK{w3 zrI)J4{G!K#@ew?+{UF|^qN%m?V8=%*{AzgC^`Jh`HRo*>yE3`toj|sPE*CSAeEbCU zv`EgsW;~!Ga2ZZ*ie!@%R z28sW>*kSCugD+wVk%9d3XV8{q(i&_L7dg)gS@V3Ytr4}Lb;@dhO(7vVRqO%!9$m<5 zcpvY@oB=*@T(B~qG8QOj|FOpJOT6}CnrOJgS%(_9M+5mPDFpX~C?yvHJh|YeXX<^- zX>4UHncFe|ktqB%Qke|zVY%r&vTw1$K4GP$AN<#7e40o_mrat!`Y=4OXObi3SNW7R#f*>*iK*CyLbvcAojqc+H$cKrHI?KN>QIQ=Shi=JLm|HxDq(%YVP zS2*$VsG-2vnX_rWrf35&Scdld*6EN_o56YCX9^gn)s(b0^p$p z7XKWn6*-{fupPyoW1uiMCUZ6-9HilDxx4g(CAdCfu->Qz*&syp9Wn zVmf(#PVjJPE>|E}Ac#Ib)5zzvY?pSmuc(y?@X=w|sSJ#G(P)3h8wDT*={T zR3jPI16+3YPM$`R@_6;o8r)1ms{zdRqBam5fUi;^OOJdT988}@g;P2TGfj3q9Gkyg z!q&(R%Y91^c(1g@Tx7YM{?HS8roJ1jX}0|CpYU#JixIDFxA2}T(n;|iWSHJQd!Ct& z<8SdGby4J+$TxG3+SEfYpoEB+C3rr*rfmUA$b7=*AgwCiJ%ZhTsc_s=4!|bJi9kTZ zP?(XnD(0qFZ*$zWhl_gi`F!z{{Gonh0rN$T;rcs!rqoIpxm~*qwcI7rwr>yB{9w(y zXskG2ctBW#?Fs$nmTzC7bBb0TzPm@vTm0SGHYY@OT#~c@&1_p_i=0FV!P!$0$YPxg zBQ_}s9QPcYupn`s{_zKBT5waH-!vfw&(GmX5tkC0whJP|QG?eB41g=>VuMdd}G$V&h5!*sjQ&Y>@n<2swqocBBk z)k&clrX>htF!x2tz+rww);3~vx5cksWHJiMcUy^OqG4xt=$!B4lYjA#QLR&sJ@b)Z zs@}PW)gzDDpe{#9-oQ|%Ur3<}ID?_b1keZHkbm)r{oQ$cMXyhFw8sH8Ixyv^%Y=i# zh-%*Ymha(@u(xb-k{&4it3MQv(~wayX<=~RmvGaK*Tau7&(Nn!^&TN(gHz*EwA!Mv zk$G7xukN5~1p*d)ZS<_-o>A|n%jv5szhCVSuI4>jslo*|ztTrI{QSV@AW$|f*(;IN zYG&s5h_gvsjG6H?ATImG@A4?jddj3{xk+($?BdgB=Qq7DsVr<{6Sw2Pb3v~}jovEPCv_IL?i3ZZ1 znX%q7!~KiMf!d49v0CUGN&VKc(kMx3bX~1WJYD}h>vPq^1 zjrjS{?-5NLNsdLCw@84sfmKIG)ku_O<~7P?Bpu7!A?X89)EL@PT?=pe{^PgBx%3ir zUx5mRNdC_^KA&h4uW?(IarB>TmTYR1nUS)Pu;N%u7%dojU3XjIJVf>Fm#w@@zuuhX zzKWZd4O3z_2B$nTY zb70j4&NqkLuJ;~O6vuer`K%8JDXjd`Bu4bi@oE0j%$>=qm?fMi_6sf8e2QyZac;(5 za+L&Ep@39`x*!m>msjBo9q>7}I^7Z?XFwmv=v zKhMPo=eJGnR1gl5IQV&`OOfxi<>?8zmv@ls9`6k!T5_>)`{9oOuR45Bil@JlHNX6+ z%`~NvQ3E47L>{<_#NOsEANET#E7|HP5cu>dGbq4mN3qN_a54`_znv=+)5`Vfm%Lp5ajxjm-NPVKLBQJ+g<7(Cg zyiG2fd&vRbCTatp`?{VouJAGq8xYna6ra{-Ikj%brGS^89OU1fa%z0#jfB8Xhj6VN zp>k9;S3FY+8H+PvfMBZtf(SY#O`P-z@y<26{^A4;Ll+tt(QBG{jd% zRz1DZW<-N_@6+>}_{c?3LB1qa{D;_?ep1z{>3jn?l5y^@w>-sjEi~lpBfBKdl~HxUP$9v6)#QOdadU-e03i0K9#y)*!a7EIhQ-J>l&&Fk$^q)(YUs zL=_Gvp&lDGw5M9fNC$sZrJ79@DG6>q>6CMNFgx|9891RaQ`1epS|vbVik3ti5p-} z?xGHzPzmaTY^(FbDrwytAAU3p;3cRv!*bNowlGJKfsY`x#F$Y({`8T32xBBP8Jxoa zgo@${I^JZAB!N;H4W0t?{0wQGad&Grjl656F_B1v@LnU+ghCmc277r-G{6w z&`h*A^}BsKp$>EiDz7C{9d0W{Ow}Oil`ubKY727vxz4#px=WCA--1kX82cQ)2IN!) z64UX`3!6%_AV;Ck7S2ecTTO8K%sX8Su&q>>9b2n`~<( zYNd|_*5DiDP7iWAD|Bx{N>ZKZfG8Rz*l#3NY!S)rfmaSAkPF}AH!(tekOX2mdh=7R6i^|DBt}H z!QGCpO!EZm!>Ku_v3^vha(W^4T1UK3UD5gnPLlq&%4}3=lJ>3_Pu8APx;-r_B6cS% zzs>Xq95wbQcc1Gvwx|V>;0E25JxEOJwK<%uZBB9Mb}p8*7S3d2OD#@a6uyyYt0gRB zj4h<5xV&LOq1reYrY==|{d!|{KOE|}kNpyWFP$aYJb#{}a!+Ee;cXV*j;p`c?vu@QPCHz5t@_vz|B$6b%QENrQJX ssQSHO8sP_m)j(y$Hx4^iyZ;{X5v literal 60888 zcmeEvc|29y8~5(kO#^AB2&JefM+s$2rAg7K!R&_07#T91QmK$`10qAIR5B%;2q!{i zER~sqL}iLN#NmC`S_gH1_w)Y$zMp&kh-2-wp85Mc>)C7XF6!*u%r|BJ6pEtwwr<(D zo1&&ND2k_d(nR=+SM!e<@ME6CrUMRpY|I>-j87b)*8gQ=azt#am9hDe-A9c7ay+%S4$r~>H6#*>fVqP@I%Vvd7j}ip!MJ9KMnk+f&Vn{p9cQZz<(O}|3?E%eGk(= zkT`;Q;X_@;YM#zRKRNtrLq+qcD?K$%nJYX8vpa@+>IeJ{J%_%i%^e4q(uXFrqbW3c zU_8f0w7v0g#&G@SNJ_nYJ8S6sfUNcKy-79b!U(0)O6Wp|AL}e_V9pixCRQzXXf=1p zb{Tg_b987H8M@VSsJwPk^*FJPP-gm!)bX$o^EIJtd**OOl zEf4&lN4Ro_oJNPT$gJs|7Tg6-k1VKufr%$i`(eo)x;Qe#e8HtfkIW*pct#hT#-$}G zydYWO4=3uC63y1;ELKYAV#2+$*c>u6b##`4M2R~zI;-;%8G55=>?&@{Lw`Q*54L8@G^9fr z!iJ^Q)A;lmjPdHV>Ze`B9gtd$TtQ6uB+DHmin3vjFdx5yNPKvn3L&LnJ7UW6%a{~> zMZrv>j1aLA1}!&$`M{#@%%)Ze!2+Ll%w{NPU>X6ESWiY<6VJE#!2(?pywyUtu_xuv zWFS9?zh1#r;Vd4m{pn5MDll{)YdKe}KS#u3t|DS-StW7B(ij!%6cOwG(-E;oZ9?OU zwdWvLtdvo)g1BNWYvqbnH7b@CSF938u2@#1Vx8xTwIz)!mi(w#*+eYb^%|~NLZf2U z6R`x(#B+`#Pn*T?lpq!nB&?^-OwQbaLg5d#4T-ImomjQ%0$IK6$q+xcOF_J7{Y}yh_we!e)6WXAQ0`o0*(^fn+l!OME|SHJvm8YhV(lwAkqEn`ju)E_Q|6H=-C}r(I?+DMD9iCO zXcY{mq)nzwAXKKolr3B;(+QQ+go^gefy9Sct_>Ce{xCBAQ$x z^RSTde`3DkrHz$VgHs{CwqoF;-^2b2X{G8Q5*JWRYbO>(8aWn4NpC1G+6MFh5zS{{ z>5Z1b(vT`oWjXGLq>U`;Ljr$0xDSk!&P-0^igJ*M5=2A^0#T4pAyFELDA$Q7a%bCG zxuVDuQSK8_&Vwji-%li>(1<8B5JibA$_^sRZ6Zqjq$*@b^XZJlwODu>sa0sa#!7?X zM8?jGSjB>*MrhetKCKR*5!A9EgE-Q8Vy@H~$=OGUCbL6SbK82dIbGk1)i_ncASbhe zF#qLjn{6mr*LEMr)>8mOmpQ@OC|IG5=+8Uk16iC~!!^J>HDdC5>m=+ZbQ-qPba6}X zJmPZG_@g&rdz{5SN6(&IRSyP788W-{7|}Zq!3l35Wyd9z{hpBGmT+Mbmfnrr1--co zDiZ@MYn4x1OBURqwL$}#mbD(f!w>5$M<9dM(!eF;Y0z&0S>aeUb#iwEPFezWG>3%sAkZVh8ie8&2-(S8q2jSnG!Tjltd?;^ zvAn2~qNwpRlE>pRc0xPThiH$+i?zoKgr1YkrqsEOD1uwOEtp(&dOKm9XC+q=HJI^; z@y1HFBkRo}ieNclXHon+egWeKF%;8}%idAUo-JUttqf7$X(g^ENI5nENNwhZfbs*v z`b$ho4ER$fC6p6uE0>flA$28wfg%D4z(wYIE-7O|iULw{TvA?KQlyMyP0n%@+eExs z>@FdttcA72KRtQ;QQ|J+jk%<(F)4-@km8mM%&lBfg@hE+g*=zkH7==WJk&YOSgC|N zOI?z?ASvdauP>kWgRAzEgGB9ps8Xe<3^7X{O>W|*T_DElBvs?3_yvO8`haOfxS*ps z^mNeQF0TG~9}%I1F!01LF;=3X${GUk^b5kC(FQD3@*+zfvk^iU2_aI`G3TCbV_Jd? zkXU#mi2Kp`usse0$1eyPvF}~NLp%Y-?K6|dCvqVvNRP-g7w;U6mSs5_&Eaa+hqw~A zu#*NwJjUDTKb(wYiH;5sp&o)z`?x|$bA_6Pg`$B_@gqXzaD~#qLfr?U9JxZpbA=++ z9dkAag?1lED0!|>ld({OAXNE?Q1^*YG$Pan5Gsu;R5@3uiA1QSS&sB!Zj7?75&iMv zLXtWCY#U|BoyC$OQV9~NVqwT;WM~H&B2^x2mK^Ea-O{wqSiIn1VEVupzeRVUkx*PgyDHO*I^pNZ(^n=RE=QJ_TgPIt8qQRuMJ)VsfU z>fMhz7w#RJx>_r(-?aK}>CJ@i8!JETJvmOk$U$?Xhw8gX$*YYW8x=EuE@95bs!-em zOf-U2OmLWSmzxZ5foyvoJM^u|#!AfBaGeo^93^=|2IoaZQtnH9eN#5v{ZnRcCaCNd zswgD@U^@@CbE+CWhkJT|R&I|3_cog8VLSnTYHhn}7M(p*Yc@Rv)g^M9(i9KJ>~yMF z3SWXIfWuPMtiSIb9K32Tb+$S%6uwf0PY&JMVG6$(&BW~Z)`&LtidmyB3PrtzttzEI?1aN00O|%CRHw6Y7|5ze{R!+>HiS)1&ZJa5l`7Er4&RGhFLt|f| zm@Wo*fIKhP5O^ax7foQgR>tg1vEqea-KJp~! z7Z*0<#_YI;E?@=4&Nbiu3319D4Om?G#_zE4QQ@> zi^jQV^w!?A4__{!Mf(E4~Rl52uMXpih8L&Tj?ZA#I@E2l)t z2q4oNy#r&gFC(Y2B{9zrY=>0kw83AfTlZ`Zg<4mknHTWN*RRT@?a{Nsx-^<1e<(CI z2k8x4VrgTqqn*1cb~mlJ0m%3i5|!z@{hhSOQHC)YsW>5Mb;3{{ZpVUKAw>lld)o~J zU`cdl3>o|h{@93#tp@i#A--SGDi^g?>5mRrolNzxn+{BCV#}K8R-a8ehKxn+)d9=Y z%0igXE3o#1)+6^lfZdmR)Ecrss!AS@;*vi|c586W%GhK=bi5Pz>Lxx|W;Ip;ZNpW$ z;{s*+Sjq?q5*u;zfib2f4xWMw;8ORc>`-u1CufPX3-?E*S5!UGGGe<}6u9ESS`q;txG4o;Ph zL!}Vw$|4>pYes*=VXN*{4H~h+GbQlM4NH#BRXCWm1M6$zrN7e*SAYXq;GMks_$5aj z6B+Hn*roWA^lrs~OMXB&6^!Z#s)PnAE8IsQK|MR?m3!icFDr#WY;}8V;Q5Qe9`^eL z-JgNgC~Aj!1$eu9HW|~Xh9$t(so=?*+Tm9|Tt8b}?psp(2)P$U^BRr&9;_w{`r<;9 zY7Y|jLB=@jU%`dPaMMl4{ulVt0rYADT}I?$&sYP9P`xFIc$Chel|}obOZBl!oxji<1va<%A?=}0?c4C%a=^J+++q?$0*Q) z-Q_`e-@R%g1XHrcnyd2%{8;CHi;vE2>?N+mOt5;ouI3wpIURmXFL=wl$NG9H*do+U zOVH@geH#%U*pc3=Uo|W9Ii9^3@xn@!_TKd=1N+ej44MxZU~RL91iI*Zo*lZi;&0?2 zN!b0DOM^KQ_X=7C;kynyC0AY}(Debk%7X{l40kDC`Ryposm8a!_9CQ#OIX&ux`S7( zh?5z$T94$Fy{IaqBF_ARTt+4k_zp19N-vjf03kuF6R%Y&!Qg7kMPb zvW!Vyr=R&4tfMmyPds)(=+1sWEX@7;HD4V%1i^w#2ds+wpbEykZP*2=QuYom5;K2b zmhLazny8fXL4Gn?_<=7V@$rQyn6HyjV0aX_bk-imO8SD$qqQDzG(Uk40|9;8p;1|E zld&6`x6-WZR*_}pJoA8`Ak?a%nDfBPst0Fl3Pm82^kat+a$TGOV)0^ri$0nOT6th4 ze7y~_*r2?@tiVKXK|*V|A4Ni(O0rM+@Ff|EjcXN(G8Y3;>mQP=u5(!S1pAvW(3piC zCJ5l6GUsF&_NR!CzarNsEUW~mg7J1MQB>jdUy$n>4%eV7kWJ^7?H6?a0vdvHW`o|b zgrf*zx#JMBZ+7flPZrqWkpAI|HMT<^mPTnc9VIaZb_irH$18p7N6 z_T+UnAt+F2s@S};+uliATOK1xz5p5NTbVG3ol78D0+lN}oJbs*y*Xi*3|@&d;w@;X zfOdGnXN7|0iD#KzMcKzl3lollOxxcCuyqlOR3NdP24r(_d(UYY%oEC816)b{Lg~VrfF{zPgRx-2TKJXV0%tnpY$qF3%#^fXk`a0D3n24pYgCU!N9Kz;L24&D|* zG@-@(&|rBH6;DIgk;Pb&DBzc#dn>U7i9_FdVY__$d=v-%*q3VU1ygL!y`9J%Jq8%1 zgrx06tvZS>>LYZG3kQngb%46KF!sd;ng(fb1d=Ip%g7I34m}6D(=pTceidn1#mr|w z63;TJ7Ri(w5#~}rw#7A4awXzjb-`1B(|qi_^eLU}ZQx#?5ufy%H|`#MHe;Euyf}~m zG<$I@y0pg!AcI+ekv37Rw8bQ7utzsDi<(WH==%z`ZtPwTkiL}&sO08rxs zKM1$Oj3gExbxdL;lYn_}oAKPV6lxCoLEMy_F9W$HdzYZqIt)?|B4JxnCbJj5_~F!T zw%)f&mw@quXCpO=`ZcT&O(^%Q^C zMxos9t7dHtsmICG@hb$(qd$BxSOfkZfqO6NjgV{=1TIz~RnsMW{vLo#jT3*wRwT80 zBoQjBB_BKtUy`pXkNC8hiGaC8aBJ`lA)hlOx`Gyz9S&_jkBXBW82y45kHS|$&`3F7 zG6DHp#0-QlmtqM%(IQb&#Cy1}xLA2_fEeeZ7p>9cQ@id;@Qp(_q?2&9@|097l<~9C z4I(`idq@&^Q1!;ecmqiFiKCF$K{qI=hyVzY%)65#WsW0hU@U(<2vtdDiE}614T;&Q zi%OjLxbgQOvhQ92DOM;qv|+)}^#Gp)(2;_OWN^r~ffCLi33qjU6an-~T&$4lo>Kf9 zs@iY5vPcY!BS`=i)s-1sF(k+KB>S*SEC-ii9Na=OLudcUK5H-PT2l|6Lf$CUFn($b z2^~Zz76`mpjo>xn4Yu2n1-Wgo2|GweOJSp{kJWGxtKm&UpfoqF(DtVsCOZ=Pa;%0M zfbOFJMw~cPfa{M7vat zL*&5}JdE&b$9gV)*_we!`6xslVRhU9bv#?P)K`8Yj(c0j{jiHL{0P3h9NikAv}%m2 zMHD9JuR$9NivG}I1i-XHutv_L zHz2bS;&~7D{30lrn?8rrDk&fXadZ+_UoQdJZ!JSKZ9g{ThVL^)W7LClBLUAmh#L*& z5@7i+n~CLhWMDUiw~X{e3?)UdRdu?=Z9`B=v&EgJYJom3;?S5Ch;n6yEY2YGN>a(E zAA_wFb1?>!i>NI4Q9)7+z4DicH`gp26e%)We|&rmrz4h`z{#SL>Z=S^TvMok`t33ptpz8z2T1;dgq z3~dIEo7Al!kz$xzIloKC4#D=!$u6G7>J-bjpit)QJsBBqpTJ>>$FrODzR#ok&ti0k z8QR3faUzj7HsV%W?#iu+eh4L+e4>~OSYBOTPR8Y~`zE%1pGO5{2tE`2v!huclQx|m zzR;$a^Z@MN($M}*?YZ-m>Tw>QEH=1`bgZvary{aeO2?e_$rQG7 zG?We-Cpd%AJxj*N-qh8-?JzQaE_O)Y9SEZ!BY6C1SZ2#(rms3Fn^SXzU3cEtCS{P9 zdaqsmd1%PdiGEPRw%PpUD~?S{S(yRa_Szo8&Lw?S?lA0X8>B-fs-5~in=yX%QqkTG zwL(_M{DrE<(amm#7%mlON3uAe0Hc6O)%|qF3(k2|i7Cdc6$d_cN^MPyhT4(oo6leC zcc!;Cx@#U~5$iSN;ryKw*|kyDA=6nfEpJB$MT=~hdA)CHxFOnp+Rm3p{j8{~5b2es z)fva;54QS5@wKaaEwHjEZk+dnZvr(*AYgyiJ*l!cEdADLayR(e-Llf#dXf&KiotCq zcH!k*8+N`B&_1nHLfgE4AU;J%LzOn2VtJXcL{if7SnhU5bJXd%(;4=>DamDJ?el0O~uXx)o~kKfj(}I5ZtP0B-bX3#({y4qv;%MkKyn;XDWjTRl+X1O4i0>h4WmwCXxO6bQZS zH^l(BUA;X!UBXm64p`g!htxElXESORP{)?9+)=T#s7w5BzHPu$97ym}e;RIDv|s0WeH`p>O8;h*5@TU$IQeBAwXK8K0)LIom+wZ zaDS_i3E@3xYSET?iEOq8i6Z(+92d!-oM%fYW-##lgqcwwRIyr&(RllvoV#3;%%zI*VfONLaK44}z&-Pjv1 z2KsYkczNhYFKpl6KDRjlIb%oS3kc}Gzq1)C*4Bn=_;R}%j(RECYGn&1cI547OI!G9 zJh-ExxU8-J;TqG<48~IvF{We%t0;)m2;LZniZMmpRq=q0H%3EKR|F=aSNXGSJrzEs zD;$AL!u*j_?*1h||41jq<@iD2(@L{!j{}~CAvFCPCD1;!Id3+y37KH(j^=l~_U4G5 z>3yCF;Y7`mhj7eomv!)eG4QNmwfbUPa}AFq6i{eBE-G;URbJ~%LMCX@_;V9aM^-%h z5pQMVYKrQajc`oG`V@~@ATxBb%fVYhiy*z?E#pfD2O6_CC(LpIs@YoWDoeuhTuT(D zQ}pxC;-Ai3IdtM_g?obYL^B_ldCAVzx7rY4XhAH3Kzc>M-HKz#`~YV}UQUWJ@tqPq z8^ez4%_*7v$pnZg#dCe^uMFQNU1a~n{E5Iqknz^w6#`|MbDJL>r6mYx3sv7v=rUIO zl>YX9FCzie^7*sCn^&Br&T6`D6NB?6Iu$WTaWE0r*sH(Nto=i5_wVq!m;sXC-UV4G zG9sP*$ZaA;_rG|1ASb&VNglkORWz?(6avTz1&|qT4Vi)S3!zqu#1O=fccl`Neszlw zcvg55;0MBvdCz@nZr6hE-yV%V&;_t&rh_U0C|p=t2Fic7>umo8fvMc(6Wi_5TzP8+ z0{8PyIHn222R?wqq#y`Q#0SJyL!IG>dgA82i1?{jJjRSnp|0AHRZt1j6IYEQ@aeC! z81}gabDMGUF~gmFGmL#baS|_XVzHle8GFti3mcuun{T^5FD{f3oV51}Gj!R7QNzjA z-I@WYWl8#0M#(u>(s?V{b!Xj>&8g+)&HbfaZo0}c?k@e_EY4tq!1?jCw1&DA1t0tD zrBLAZTSAkYJ`c9aCj@iaqVK=Bndw}~nzl#5SJqXTqp}NNDd{8t4+0$wK68tiKgwA} zvummh4AYLlX1v6*8Pzhu9Wrs88Z{m}r9EBd&7ZSnhTJN|Skg^itB#4bbIb)>Rry`2 z%RNe@hTJXWVK=ZplkFWpFd$H0H{4;9=nBl1i@we!#FO5oZYagB~ZQK(_vKD$go;>BJ=usKb(|3%uBj*fu! zuHbHz#=K{FIs6o1#>!|C@MhW%|M80fzBvUawB+qAG=(_a?{=+gIA3z*y-F} zp1XI$_RE{3_#Ypd;twc~Z(L{_waDbt5Q0rf6gGtjYv>cVy|aYP?xjS_`C1$V+fmo} zvpQ)OqP>p!kTjUDbF9Go!1eh$n9Z{odluSxxmJG~dWQ_S_A}N#+@nIZi_Y=>c(uxR zwXSjga9dH!TIzr5n!=UsQMzN*=n!46;8k;!N5Yy|x++uWh}f#rik{iU75B36M; zs={acyFGV7To1i3>ZqIl$G*g$WuC+QaYK#a!)2L*fKczlQF<c6!}vI;6^C9!j*fuHhgB$i zgC$L3{SsvMg4AD&B`PA!q%{NlW|ed10pTDx`?S3Eh#kta?#H3>*FmR3s0Ytbdjneh zdwaoNMZ9l*LH81xefQKOts)!Pg3rUHRI{aLF!tEVo*1qg$(22*+@{apK5BHbi0>=@ z+H=Mo`{q5(Gl#O;5o+9}&i)~_x4%qif3IH6^4Ng3(LHkJunE%9hP@$QgCqUn7{XIQ zyOfH<1DaoZ4WMF)(Ub?}5FXUH>q?EB+IyJ{`= zxK@_~i=AtKf%)^$McA`?v|N3U;qDkp*oI@IK`lWJYxrRHH;_;`?9cx9UWrN_mal)7vWk<{0i+1$qY#O%9CJlk##(P&z4Q z8+9IFHW%SE3bqfZ6F`T19;C!cLWzBk@vks%^!3+D`vKfWUDI<{F%v+;^(a!FmAxiF zU6QfA67%S2g*gx1Q{KD7#zsNebueBTjNh~$OrIwN_eADvdYTVcqByNYj;AAI*Wso2utEl{VnT-#xGYTiKg;F)pbZr#dEM0UcxKYH%P-%25M@m6KeLt!14 znqglzJpFmN;b4|6Fow1UzzgL^8$<6B=QvOHb7gIjYJRi{d8Zqgjbg*(JAfdiV~(5| zJicqaV0*{;=WGey0oNvEgo8i+tzL#p$;oncE5KWD)D>-ZrhtN!+Q+J@6&u*j*zhV) z+MGfe9tIwe^X587e?wN5^R%A>^*L<+H{JT*`#FW}cBN<)^*Ok=N`JHLR}^ImVreD$ z+r6i5m#Tkvnl}oaa5Ytm zce>R5MWVf%4_CU{yzxVKlC?JfV!R_Q9a5NZ|m@m%gj!)8lD-4V^LI{?rrL3ErSm^uaWvYj>n(B~ov2gOf)lk{Drtv=Fz$h7WZFF9EbdOHa-&D8^Jym|o z&DpD*1=7F`xHr|+vlsl?mR)N-sh_D1^B?#DyIEB{?Cfg|FPX5Xf=V9JO+5^$Z@^9* zadaf6?)##KaKz+Ou4(lZjowYD5s?D=R;fi-lbhEO<9;vrs1gev4yUtWU~|*O%NlIw z@DM|q7@|wgoV@>slNp({{VO0um|PfB0* z8z4|xuZy~)MkPSpBPoakEK96@*5glJG-cjO@AMkXzRn6@|7;3>dtk9$h--Db$9LOo z6i765k{BW)0vaAxN1{{Vy%>R)Xo zNFHzVUH>~E(9c&PH>;adJlYf@bvaOo!aYfPv7E2$fJgQF4ATEW0n6pyTQcY3Y1itn zB3|sAdvmgp{%2wRGwpuQN7Ufne@6r+_kgB&m+`Q7gi1%pJU|@E*7E;?2z}&KPv6Bs zmcC%G!3|O!L&!pFtenUaZu$vO6ZOVGP2^F#DVv2VyPf%eqNt;dV4@uiGKdV?O@+ z3)|SF&ft3L)zT#XFJ;0$oJ5R%jv(4dl>kYv8CeO{J=LE@ z6w>neI_%Pra9Z#@6l*xqNkPpj>~N%aU?wf$Mbx-wJu`S_H=Pt;GIPOSKZkl2(6;c* z+q>?Kn9$^=rMq8Q>zTxl_h_|c*jUy+tvS~H!ke#b$%=j#Wxd4fhJoWK#*8RYAK|A_ zQf*&M5DiM9@h|7=8`yFmDtBM5-qRX(hyA7k4d_YYyixYq!A&?SG7_dB;GsU}GX!%o zhwC!CTEo(#Ub)YG97mD*}Qq>*nMz2N(qE;v|ccqg^`(r}OAu+>8NtKP*YbKnR5gdcaS z_RrQW<{U!n>S~}HF!T+x6 zlzbRsCE}<+Q|MBw*H!!184d~OM3^H|gW}j8Xw3HByFb}OwTW(T5zj;9SpYP^mY`DwDj`zi{qFU_vuv_VkWH%oo)ByXPKf~k1}*P;fU9$ z$c!10*&Me*75$SazHxUHFIOGppX2SEnKnywh$Dm6Bac#{vFhjbRe^8oTm!sNQfSAW zdGvRRCU9xJ%rr+Q69BQux1hA4R1F5Xj!yLc83AH*FqYv6zzV!W@j z!}B0mk@?5iw3OV=wtV5#C>B_rp0g(jg$@-Ydhcsj#e36dJ?Td%=A7vL`MWrM3ny|6 zZt?m{PhibmHZfhXyyoH!zqSL1I4?9{#$4`$+}1h|KVCQyGjCT+xVwB&#)D5C9Z8}G zrcrtcuIzw@;lYl9{y(;-%uw$RO8WjauQJ)Ss=l9d z9rp$vo(pTxnu!W*H@!Q_6izG)HZSfGhFZ(#_U^-%20~+$zqbsgRh(p3++*9xK3Pit zQ&zP-Z=fdB?X|lL=kz&{s({pWl-^ZsnZ&5=7~yPPpMF$wS;rs#iG8j9?WWeSEVe`2 z0(!r^bN3=m?}3EWez{@o*Jv_&q6j{2e!G`!ZcU6(;iGI*-U~^+wRVV1uPE%eW^&z> zJv)`{sl;D5E=Ub9A~SPv?dJBR&%v1XJnr=K4iz2Bf7yhol-a{JzVqQWt7Yv@$z3la zyE|0VJHu4-a$en`J~O}EMJ7IA;5ogp$aE@rK#(|b-9CQR^n$RJNl`wqB{6)IRQr1M z>Q=T1&99~unR7C0;)2X7pYIU@g??=hR;EZpAH!%GfsKg07k4L#G&zL2&j-EfozpzN zLfbVlq)sxYWszg}xeiJ%?z^4ZPuI+iku`+^sP1Ioo9_^9w|3ui6nUb1U$-(k9}HT3 zfy;J5!5MbFoU^EaxX&L96Nu4?HU=AITXNpyIu1FBgSr&B&-KOx{?S`;q<2tn?sAIi zj^?txGBz!0?L3i#GnsSdEojesVXd}b-s93C`$FV|*7zv{ie7b(@O`Lz5!|CX@R#4m zLt(vU3Yk$%l}#OMorl|Y@D_!QV<-cd1FFpR>+tk-ZHq-p*IP}LK6M4h((Pea-c(Ro z%E9!6tIZzf-x`p!%zhr#=dR}~J%uI2SC`?t%TsNCN|@#|$6~}lEJ1eqxvPKl9()z1 zmcIrLQno+;cuG=Rd7WpmKy6f6&7Qun*o^U=O}TlTf5cEMd90KEEMQUZiI}y1mEXSn zm39w++6(kvtTmOou=LkVr`Fk$aTC0(lB!5%CmjttsW^3tfk# z)6Vcvw)%&@g`yBqF?0^kZU2(xn)-7ZQ`B8r_2q9dqBoAHtW084Yty0CcMhoySl+$X z1~#Wj&W}k_4)xaVa-9N&fa=t`JrMBEAVWbW^#4#8)nE1sl*Yg((Li~c53h>%YRfzw zPsJ;eFc&~UBF>(jUc54WeDx7-i0qe7Ha*(IgE?PJIA6Kxa(Q&m(htGSyId>&R00>c z3o%agK5Xq+qmt9U%tKqZZe07-(ycSR8Fa36|QMCt&&YqR5vHF^gRI^N`if_&@o z^lOqLp_$E7J4(N`FLOh26PpG31!`qe7tVUQO8qi)S6T@_y+heuXnvv zJ5IH%#a7M3v%&}9YS-~tbk@vPk|f8W$9rGT7>E)8Popw4(-UU4X|fx@(@uPXEl1z23Lhi!WD+tKUC*(fz_io{*2P+Ppp#&udG%4zMjYj z%%C48nO^$oSaC%$t;!B{eEOp7cC}9X{AK~D@9IjEoLIf6g8e;oY^AUn=sCL{eY)U2 z_h}R`C3dIwz(vPMp|-+q=1H(bYnTRm$fmho$<`GmYm?5Es9HGD7KU>DRN}q`F}v@{ z^HqeP;t(~NI*Wf1VBklV>rY|C|KYoWKN^gjB!5Xha|3XlA0!lRV5S{dS(<~|9xFFP z^q8BKcjOq&=awqcj9+pNK)y3v#=JxPSRwMa5{t5RM>(^t(Apz>HXt@i_dZ|#nQoXG zWOCUeect?qu18MYOHZ|22CLemtcNPw)rl;3MZQ`T_(tnt|A_Xi!J&CL+c58te5bo; z1g|6$KzDO@a2eOvbJDh7rTk=ZkiD0J%pJiWCV&{CVz7LW6UJFq)s5x59(|0|J8rNM zb-Alm|Fyt6-A^{0y$G$|2Omczc>!tOcjmlHird}fk7I}73dvp>vYbF&AyjuPl^mSp zXgmPbu$|5s%ha?K;`wUZ#tlgSRUCdq%^dKG>G=^{=Hlk&j{dBHAF0S=J~**H3zMRD zEW_(#$oEKB8pCF9RC5_X7?K51Tee`|su~=1zl!X+n>|g3Dy)f=J`j@I!0Ux5K zQnrp3Sm1xrs50pbZ*d$$ELPS3Mu`GxE=-l)-1_0v^<#zI57PA#0puy`bg1aLB_b}2 z)1vxR^xb(f^dLLFXx|BiN#47!{78Lzb|w}66zfUl!?OD!f$uZu0KH)KXJqx8T~3I_zA>>|(Gb1IgmDZ90At~=5zqShZyQ2I z1=tTlcLMOD`!)bPJsO{1qJPsFgDdLucAPd8Dnn`pRhMb5b=1D?I-JtoxuE?Cbf5-@ zo(M#TXh(k3OGJ*a4NK3Oa#w19^Fr6|)@Nba2yl!N_L4FMQnn}i%Q^OswJSB}=7gSD zw+xy`(ny1e-Q@^xx8vkP-`)xN)`66`V8(E6o#OKXyb#BI``Jb(S6PuCcb+9I_|O!9 zurd%oTL8Pq-_eS4+Jg%fyqk5Hkyf1L#E5~7O-l#os?4*p%qm=E>?VwhqT=>S`l|GI z2Kvd!gIH>h-Ul>zm`fEGNIQ$}^uoJvYd-+X&2gT7XlDhw^V|-Y+&35Ny${tXD-)ZW zpQyco_@fNt{<$lhYJ-~~oC6ho%Z?SULB(MPKEzBq8Qu$6|{_Jzy@cdXU9Rg zjA7aKq%`EdZa0QyC|7pk>@pV#+rG#&{^9wD=!>?W_T7~8*-#{TLqU*)UHizb~>m%H$#!y#pmfAQOr z&Xer!z3!^s?ea~hzJK15c?x4xiEcyO7Llbxwj0pyfN3{>{(}7CPd9bL(?3?C7Perf z%?EkDOR?YkUc~4eb`7$zm-$N^b@W?tR;Gu|9pAhERhZ$_o3~*;^*Z{N^0jc=jPEBi z>eHXqsGRSx`=)gFadPtP02@Po)71i5tiGeDwQfLk6*g16S7X1mX4?k4BWZXAeVeDN zZiEL2GSsYd+haNx>`qM7pBioGDRN%IOryU*sNY5xDiddNXHZ{i=v=va!n2#{&+efZ zj}YbcyWC{nRS;clkZ;O1>lBH*a!E4ZDZ4lK*wSLXN%!h6orcSb@r}ZW<1gvybDlJp zo$=E~ysx-%&Ae+?kwQk1!gB>vHltp}+TFYHNm9a0k(!J!n-ij{YmbY8j>2$gFP745 zJ@;78^VEVzhMODTIBo!;DCez_Hy#e&>TinciZf-2n6mfxi9|Xn9y`vqK417@zrM6@MaUDm%mGxIiLo}Rs0Zr$9SeqpY?J&snZ{i{uX)K>nOw)r+_Z^-Uj zsq%^9H+zuk2gxfAO#j~8dha&v$_=4it}l`$&cIO*5 z8Rjun>N*Y7{6$wZy;Tyv@8|s?J}G(jo7DmuM;Rud4Z=#%C1ckwFAsVF4^)fBhDg); z_btzCzZw=b8QPS|XV(o6h#K4XcpUKTJW^hHKz8vo=^@XjyE)r4Ub|bE`%eFUOLWIqh=zG)V(jCEr3l-gA~OjTGZU+Ya?L*JPgi5T{n$0h@v$Y(+e!a%PH^ zc361+2KH$aGcn-w&cO!mLEF3`I@Thg5G?0DBf)_!M)fm0&rU;u|ylju6(V8PVD z;^#9Yii?{X+>3o3C-`bG>u#nVwU{0T$ zl|UCRUUKks#X1u>Ady{aPi zT#~CoHfXS!4WbCB&YI7anYwc4o$ZpnW-?8iTZ%j^V{7?;afQG!k+;Iz4}7ch4Be&R z-l9z7Q7e_ld)20XtMfPt$HYoDZ5vax*Ng9^{Vq@36qG$|rTByDH#H1Pg-Efp%Vj*Q z(6xUeKqyhkco1Iqv3O(mH-!+kWH9^fEm_P_I#R8g-D>vF>|^B>vGba4T-C`R=@JaOvdbSffS zQ*&arjECyc>C}M3w+{BTpB)$N26Epmxb12aEwAnSVK}k(oC=V@_sf>GR}2Ton7f;5mT^^k7a4yuVD{b@$Zj!(kskW;gPrWL;}Y z^Vm{zFX@Q>bJ%HpLIN_wdM~g%7kDrhh=dKZ;B>kL$^h%&oa-G|2_8j9U4f^PJu) z({C7lxsdaD&BSm1XPm(U6miduwMDB~reKYYDX(ZF+jD9Id&s#H4pcuyj~RwSp<$s7 z#rg^j_V|O3!?ib3j2KNKc}7LBvkjMi3g61siXG-mcWgF zdauJ_R&Cc`{npMXzz-c}Z*X(z@a+DcgiqxwZd)SG^+4)WP033wkF%f%>gd=#4R>+P zcCW^`vwruKj5)?@S}n_Xp?nluIO@3(alEq=X(ztw35Kl;@ou+^L}~WuQo#{tE%85e zd*D<5Gq*%EkcZxtMLs{0G~{RInyvhNdw`oV6^qQ}M%l4A+uj(LF=u&VpvL|%^cn}c zsm?0M4fsRvc*#(aGMti>bg#3owlF>^!DJ~6Y<`JOdy*^kUzTuQ9=GuZMCx}$e_dBR zHrOl}9Q3*ve89L3W+&>eh8l|g%bly|X9b_}U(TEf+D_J(Xl!9Ysvx%YrH9P1|vfXFN3O<=P8n00v>dhY!xp@q}9m)aqG+QzpBp<6io`UW_^y<(?^<5;i z)XezP`)3FiUC^CUruHZXQuTkg7$G>2TmtVwCgM=k)VT2agQvT|py|MFG%kl%M_~odl3|;_4 zkQzbh70`gC$F*RBRCpc_s+y5WSC(UhCE_JF2AicT;MlM*TIy z{Q9WFL@4Eq_-p)7Tg)Jmr4XGn{g$PN{8xMDO)O2nZo?CKs1UT||4rD72gbPf_#R_FT)We0yDWZT`kW45jRFr`Kk zr*|}fpF78FwTy?H1l38w1~%jeRF~cDZ#9A1lzcei=l>C*V#~(Rq(x2Ey7*0?hLzPa zOlGHjL(t&%T#S(2d6iBYKO-UrCA{7dI7a?xk+^Q)E@T0`I7KQJ>5fo z(Sd4mAWg(Iq-6EH9e~~+Wke?!yLzhrj{KWhwQQJCC8R4OgC0UDnT)(Y!Zg?z&Jla$ z$o=el0cYV$(Cu^wc#T4Y${$X_b2peq4&2!VO74V *EW!nr}QXr_{)h9}Zd5 z(UHf9ZZJyQ42C^r_`bNqMlbz2o??JZ#k8MXwYKvgRl$UI;WIfUw`(P z-pgc0hht4vdi_Dlm#U*b_HX6*6vma>+^J2+i=KX0z#o5p!jIuYb6M-_p& z7t}JAwo%yu-&45O7OcZ*XV^@Szn<*MfM9R~lGMDvAxzo@-L6zQ;JGeno!2Gj1sIi;*uBJgkl zWd*>i-VFHwe6u-7`aEP2q|1^jd_50b-w@~ECsY*|G;#CL?}ZgSVUlMli(da0fluv4 zc(2Lst@}`{!O;qul-Kn?7heQ*YG8*~w^@d2j34(QIs}tF_o;bKBCismPUYYD+5a^w zP1IdSEfFau9X|xpB8yyIL;P6y8osN}o$%~q#0t=}dNZI(iHK13){+w(z36bY8pbJ? z3Z$bn3^yt!r#y^fJsNU*`A5__G{e*9sNUdAh15)Qct>nW4SvFCr9gIXNuSri+ETIC z|KhMzy=F>3Ix zO#lA8`a6M;s zBtLpW=AACex-Ii%EyHQhTZ4~9{$r>*)PG{B+82_y9Khd+h43ZlpDpAy%-rwv#bYVy zzd@pu|1|WUC;aCLL<9eM!hfFdpC^p8Es-Yv%MM5h8n5xb+sM-~S^n#=^Uu<<@Q)hh zv78@0p>4UDiB;@JDz!a5-os9%5#v~s28&n5-FWhXcIakhPxp=R(2Aa(6P)gxpP6ja z4dSN877hgwP$*HXC8RmgvTXB=@bOf-Rdn*a{UWthJ?ZHNbuu$lkosFmgO>Ssg_L03 z>TLpWoyxY2(9okO^qyw;aiz-1ZOCDDYwMF4Djtq;OBcXyQTz_tvquP#^;7A6!W4b> z#TGj#qZP=No=rVT&)w}*{rcp=(`uJ!79OpUd!XqHWdYUfD`nT5uV$Xme+l+ew*OZ zfGqWeQ}00&A6l9FDZ}|9ahc)sP;ZXhl7e5LQ)YSGw5fvg`71|7O+cJ2n6_EgLuT6L z&+HsQrojAhZqV;T{TkJ=+aH$;iX2zG{}^EKHPMgCNEnx)s5klC=ZSsqEX3*l9ZB*p zfsYa)tQtf>{VKG1yJw{@y(Gh@o28!c_xit@uJy0U?TDdNt^GtTCU(I^cgbt@(e)1L zF@879*)o^v6Hl0qAH))D^Dy)s=eBU*#Q*(e>pyMUHI;d57=BbV562HqYj+fyZvEGN;Yn<{~JX z!)`E#Yn)X_muHTHdp6CIr*`p%-v|+6O+C2OPwtOC!OWCD`aU8~IOIQaK`Yyy9pwZ% z9M{k1br0m8?|VC#alms$?)ULIyE&Xor@_}$@ctCdpsqxOwpC6|d65EZ=1%Nuuw63y z(m}zc*SiJ=lU{USHQpufut!&yG>xdy|8QMnFPrgahTOYJEYrc-DXhkieGC;4m8CdV z_y2tLG51a40Ne*G=2-3zdlTo!_dT-TU3OyUxmKPIFp==sa|2Y`skicd0h&}@Ib|pu zdsf4KyxY;IGnjeoLqZMUonhn{Ct`zn8rpwIC9@5oUwGO?{NU?1lHK3bZ(sNR?#Isi z)?t1gnlbKUT{?*A$^$Nr!*>K#r4;0u{Ij3v%s)Dz6M5GY(dXz^6nX76TAbSY(}T)< zcFME3ILEKUuG@9C!V7_5A=dWDLiLOTZxql=MZXB4W?73q-b&?%@C?c)7TQlYcj_1U zo_Rb=8^1eC?BIvb{TVV}o8OMh>^RXqAp8p5p!gyF2lNQBGhEL3F_ z$3``y7r#zr^FyQ2t{M#99eY{>zNwRUl;cV8&tlx>8Tt1A!eHlcYjyF7J?6p8`r_nP zqtYO?FAwGBAEV_TbeOYRp~P&-+{B=aGpD6=-bGjZY0Z48OUpR(P`WDG&hQ1z_=U!@ zZF~I|#835KesP7+X_e+J_chK~UTGE2yc!nuHGi#e#rabQLp!4dMl?dv$y+8zzMsVh zhDN@LO&AR{^rwYmz6;+b{jS=vMm4%rW2{pS-=_b*_x+FLe~SHo+I?uo06;Y1H#fd| z9tk|<7$&N6NGVlbet)1S`+IlnUx8IogQhElLp=tomSx~jqc_AHX|{Ru~NJbKJ5*q!T5zb(L<+s{jC`8wP2c`}xp&!(Cs6e;`^G}i z)=c95{XJi@?)AD$8igOP(>_uxArZCd8(ej9v>KppwV1v=W&N{ZjTgW;`F|4dy0rVX zo4&Z*H3)9A+*drlckol!0~LSNlbtx0F=-kb(Q34@JE9YGQ)%H6_t)Lb`;k*zRd(_9 z?%>pt$>mXI9+S&q?)jgbywO`p6Sbd0^;OcOPpVu*wMbLsZnN>JmHZzhqwZI+?>#%- zc3k9X;N?Hi0W?+e@&ioq*w5L9-89=I$-i-|pT+I33gVW3%a72Aaw7Op3f)Qlg|%^tfS%d7V# z%Plx)`NfYQMPJXr9cTiQk9ocR)3A_?K9_-YvIn>A5m+~F;C+G2ICN{#!5vBo_&H^b z>VWbMdG|vrI3)8Ur@!gAex~_Qx7v>%gQ1m)ffGTa z>UJFrbbNvN?2W>6?c$TeEkVs3#bp7-*?s(BqR-ni>C5~)N3O$TL4<_1F(lB7Xt(0E9R zQYmRT4M&6KS>tK=t-bfD_`di1c|Y%8zklxer_R~W+H37Kul?+2Jwx2Cvru|YXFe+G z?Y@hzpv920CsQqjW)Dw)<#aBls>gArzFdW?&PdNdACUWK6$4RCxsYSl*PmYf*m$q2 zn^8aDdQ{SO*#F~CmcI+0_qrmvawLvViF1ycrj-G!%XmsPC2@xmXVf=BdS@ zqJ~mlMbtb0;wIIfG67f1sla2w1*Q+u)-Y1zhhc9W`~b0bW$ClxN-TS>see#Y9{TyI z$wnBj#b(A~S}<;IaQT(rlc#@!_`j(N^=}UNZ>CEr|LY3=bp`*rf`5x%$U6SP70^5S zMdk2Ipit5Q2_ni|OtxFRlytpoXnCamAbF25r7V^DIoJ6xWKsPq(DANIns4@`+%a5q zq~710XC&kFIY}?cDmX8+=(Fo96Anr%sCWyA3cHGfzoF>hluVAkI=b^ zQV*tnM_pVgXZTsU6Y{9x*$HzE6l~YUMWSw!+_*)|^;=K!hj-gqKAd{+e_pJ;R4!Z7 zyG>CVw$J8S-bD%Zlf#E?&q~OR-m9q8^pjYNSV2jD>WUgZXlIGu`e3pK9~@Z<6uaCv zOwG-OLm=fw@+YmQHF&^DAT4<~7&Hi{4$r?d#jCV^g7VFS=osSlD9@rgtLW_TucbzX z(xkZ9N{kNM*KMH@4PBk;Cg z9Td5Oh}YKx)bOQouM(5J{p3nphrgCynuM>bUp$K;c(mRdtuV9L4K2G}O!q}!boP@L zgH4c*l4Z!Y4w`0GSmZTm<ueKdm= z8iShKz^Af!CL`vwI`pQdeja*(%1a1B^s{foVpVk&nv87|C!Lx(KoaCOGJ;jWaS0A7jhaezWIu ze1hwJzsQ{zdr@Cj%J~)a;l75>V!6UuERWb0d8g6q z4YGftU4;zTNcagS_Z%0Q)r%bjv#`c5`c@8t9(bc!xsT1SH~tS5UfqMFbO=dlKi(D5 zdJ3j=&hE|L2yOnoW^h_C6wys_2XAe1Ypp^WsdW!&BsKNm7$22Fa_w--C+gB>Ig7kp z2~_@Ty`GM)%H8~C&v1K-4P#j*^M6;zc~WAf15F0oCUAG2S!vccufLcN4ok*8_p(GUv`UDW$A0 z(m;0*X*Y=$J8$vXlFITIv61k9I(e!O{Ve_GO#RQVllH5#JZ;fzm&mitXvpNs!qk(& ze*BuqAKwIHnlnemfgQ_XSHKwlevAfBhnUW8)T;Bv-wckD}OuL z6ODmIYq()<{K5`0bm2s+MMwp|X5qsWLG<9yJoE9bAX;EoRLYxuCyYThnWRe!f)Vn) zs3X&A`K`d^a0OcCtBL1fII25NinS&?h$73+oLCI=QQdERzAiStxlZ3u=D1HY=|B}1 z+D@C$0H#5U?6<l$xNK{ZeTJ zqZNo~9_m<=GSV1~K%CshK?13>C8-lK=-)EI0GTuWabCS~0UJBQTpqD~?3hT1i8qaZ&7)MN1MupAN&{cm!|7Qy@!nZVV?HN5+1strc6B4wV(z`;G~k~ zXWgD&*gc{$CdSTuDIYXKJ&C1FCYIRuJDYzU(l}uzH=I1;hfILU-k*UPX|KF5w|)-S zXF=qn4@^vdcxBbs)b8}cpMxPjNMbw_ES@L!8;dz6Ea19(dVKin_+k0A#zl-inEm|( z2e!gHl0jJ=(7I{ok&i50uB01%n$-ZJfLrTYEYBWfJk!FT9sJp*3H!#I>j)38@ad@`4NM9!~uu|H*(9ANL;==Q=sL zyOsuN$9X7)7YdFWcwKXW@fvbU@_)tgKXEzJRWj^)!#{kXp=t*#I~3D2H-$e^YJp#N zqt6xRFK*;zBfn%6Uncd9d9mDmi{246 z`TSrOyyz*Sfam8OQ$vUAd)nPv(^V(KjYucFTebPIL8xMho}o*Ada+0G)lT$gj0SXY z;2J1qgXU|x!kA*TNtvy=%7ip52a#&B{hnA+dP}sopHj%KC`lu;_5YFMU7eF42>vFe zwZ1jM?^UtHg-AiaOdyjLqcl<%IXY~fu|R3)v`dorLS*WbC+z0ZJD0;LDX)f+D^mPuraPi&IFzpR z>}sfaI&-u%$-Z)3f%qTkmwo1}+{?=?5Xa54Zb8ZUNjn2aN5$6N50v$tKEK(|j%)MB zXUF)|V}}%@OBPuei#KOxcT>|cLOZtXZ1&qsQE?NxS8^;Sd(=~EwA=omT$RL>)|D2J zeZa$5hU4y$zEw2s(T5@_r8;4!NXyanm#lIh8!pg?P+?KyT6h3d8GoYG3W9s{ltz7` z`~;enTpvoIR@aP)qX8Cfym6W%J$s#o`|`ZsdcNMM%55B=y2{}w^vH{U!zn{NzEN;d(0xP-8oGuQ#B@!P2L9A~v?> zp)0(vL!uO*KA`Ar6Xx7^R59hs#}8>-j5l7tF{NeDb#$C5*2NLoVZKAnA^n5x0TLGv zobBpzUXbcw2H9&AyUPv`O!o^7TfHJ}$X>&B`0^2JvG859(~He^bNi#dcrE1lWyE9a z6DutO{6}RXT!y!eaL1pLmcRPsi}?*?|HBhe#9Z_=)#J?Sma+M zUTZErvgLsn@0)m!x77zuNPTEu3EjiM*;($ZgQgKHk6Jypa}d!qO1+uR-;O$S_CuNt z;}(w+A8m>CeX6gK+EUqyh?h!<=Te6YsDz+1o(dmYJnmo5J6%3=R(R3ML;Zu|*WSut z)VRAM%~LloiNz|AMlV!TRg7CX+3&C9+Y-0^(oCE-(RHh#X10v&n6 zE6TY^G{VehwzN5=-qPw++{YmX zSD5e@b({NdO`KEI^zu;OzJ7>$RFp)jXKCSSqg-)!Pany+knGmRsvq!sDp^~KmhKUM zU&Dy1;QSOrZk@M|Qu{jikWl3lUpxxG*eewlxG+wQv+U;95Kzol5R#eX6~`^%n5SD4 zn<@?L^YY@dkt)%L9~wN0+}yjySU5n!J$xUxf5w*vt1pr>QFqCH@=&B+Xnmo)=Na=C zMf)?_!a5+>LHDc&x16J|mDShf>?xW4i~JHuGnAq8iK5-_TWp3U-P>9ZMR8U-hF*w= zN`hj}3O>=&sN%?3`cSMj>O?_o@0nNyLo1L?f&|vva%IC^kM%W{8roV#B-NWGMJ)rU zlEWL0j{aPg`torvydg(YZl3@9FAZx(=P!W;7~-6hF7XvHa&WWJ%nG?QXg-Lzzhkze zzoy{naz!aK?{`MqM`!ldW+XKzxt;(gL-)fzgud`an9Zu1jF8SQ)v&qYmSANje4LgC(Pm{_9d(DN1ur8#1kN1LVtx>Cs=}Xg=FouWSDm~+KNU7U&5;Ogm^jbx}o|G0j zPu}BubIbzt4$^X2%u$O6iPy#@|GsVDG(!+QFUDN);Ff}F>&l}LXF7`glbr-FOI7ZN zyXGAx*aV}dG!3lLG?0k=7<^*?2D|p3#b1zLiPH9pvCXR7(n|mQ+9Q2l>IM(X(UR4$ zylJGByM(rG-JwhmP3$uCD~w)yyd5k+fGr+ix2i4XD{8u-m6B4MiNhRNqmwBjB?vnk z6aKIJzSb>!k+J4Mc3WfVuD6b1hcywT+X_>w$GvAa#6Zot@|Yk5o0O2H$lMos;`G=~ zo3pa;ebHI%S3{HPx)NC?oo{?S0e1! zXsmlGy(^At<{?@ggc|urZ;V@^$zhxr(_#}J)m{zjetu|RTW+0g-`ncb;1vO5$CDQ} zaWl)nEa){NQ}d8h8)ZR8MPEU3aM$M%pE9m59rMLj5y_~j6-{aNoAt(*Tg<_sAKK#j9|$#3TDzEK`WJSD^r`0jmsdHR=BpTah1{6{$rZ|X81Cvq zOr1So7VcAAF)iDSk%wC1wLAtv8Xn8r%?i2Md8G625`&bGCr)n^h8Z&uwWQLtfAJf& zqrF%M7{VV~!m!p^Pg`fqS+=(XHV1*!lcYFo(;XxAS~(*_=rJR^)A1T4hg6C9z-kuA z;u-@v9Fy!b5GjG+*6Mtud94GIhmixMZw^BqSnWkdZFU=I-nF+~1&$2OBS5qK(=(pV z(TO>g2IkY^4vY>koZg_Eok2vh?Z1QzW^S$TaAB8<dmfI;-@I&RMb2C19)TPQ{Gm;3T8J3ZAjiPzrF*~n9|9jaiS@*AK_-M`;1p{3+d__sMG5?4d@DJ+ zZ^MDhEjz!?Y>BKa+?`rp+0=goY}Uk{@W*eT&)w&Ig~ue5A_Gpk4gs&%9l$PU&E3wk zGZ}yE$#+?^(evW>t4Mw1$T`Or&vS~O3%9#+TiWWpVu?C|qjJ#Li5y3_&C!)~*M{Dk zC><=z2tq(f11TS%PNQhf`~Eq)m3zE+RCh(e@JL$!Ywof?Msn`@wAXZG$aw&e9d1lR zF$FM3JU*2$SKgLxm*N)C&32qTUz z9p3b2Ji;;i){GknC2L1Ayb;;yh(w~S3I8R;mB;`yr?r0evZFD6$%{+Gu)5F=yueuk zgpAL&eM1&WQ4ng9j@q0`ompDw^wa-FS6%PVb|8^rku$Z{dH*wHOl|i7eOR{*%WuW8(dd3| z>#E-b=t$0FbmTXQHrdQ6IyArWh2z|$`n;36Wyn?6`a1uGGCxSfXN5wvdGe)-U4}&U zk-S|X&|kCreNoqH)5J3G!+pD47ozt<*e^9cYHVnxd5V?%Xg_Div0ow%ooscPOe)&< z#<#P&5V_*3I`RCFIi#ZCI1~#WC!?hbn%0&~Zq#>7Ec1^*RZRzZu?l_N^Zy9MT#kQI zChcn%J3`@-gYG}sZ9~#-R6-5vGm)Coc6z5K zDV^<<9L%dig%(|qY;d()NfhOmF>;zOW&C>Mm!{swhTPwL5bQ(9ZdM+$f_yJwp~-;> zK~k<=!yl1?Q+C>hsyM@W@}|Y#zm+#Req--sik|st70cml5YDHkm27Q1rpU>$lEQGL$t@7qU%dvl8v%N& zKed%&2^pTy)MXU*pi@OIq{lf;AoL%?af#GjkM91i&$hjhck{geZ1KDJU2TEquI|xQ zaH57LNUCYfYu{lI+#0Sf{IDP%6(`itRR}h}NOBSO z#8e5#KST|F@5z+4dgFY@Yb6f@hqJ3?sH$JnKx-u;N{gqzQd?gFNlW7=ke93IL{Mga zkUu-1IMX;eh65JrNLnYD$o+GOGcn#*XR#1hfvw#ca+GDrZ!xEX-GA1C)E&Rqa>J(b zR8&&I+FsrmSI6Lmv?N<%pszQk8iT`=$8XbyVDbdHk~1jfg_s_GG|?Gqz&M@p(sLTq z^MlOeYK#+8Go zrd4zL>TfC)P@XO(eQrwd%B|mK=5TO}J=U+7bf!2kEOf=*opUA|RdH!)Y$%M|ew(VA zlqyx7>717?<5=B!{sPu=4>1Av&eGi+jRxCQ`?^@}eNCU9!WP&q*(cvipT771)Y7ir zO66>`jh+21z-hx9@VW^X+gGFeWmJ?#^!Fs`YxOCwn?_tJoNauDTjVKeS94DVuac_B z*C94?RCoL!)8ixj1Df z)2A?J23x)T98%?0c5W})_OjQ&kd)#r&r5gb=H8GTJs_IsbE28wz0d2b6Av$CeFPIb zQtx5g`;%MQ<>?+r(fE)%7^@~5c8NeL*As_I^{}?~;tFB={nP)X`aKHmKlo&~m&jB123S@qwy}%RzVV~oxO)gq;M3OhkJHVz`HV{ZRgoRr-HPKu9y z%)xHI>1elV=tA0|{R1r@O(E&b`vmK(y-7rpqrA?Ee`{y5k4}Y>frFISUn{9Vj>_Sx z(FF16kkf;0dLq;mFV5D`5`Rv#%TVjX2HRxkvUE!o3zRFFCq+5v2s*9WkDfmdoX3ljK}u*=^zN)n?e9eVYhBZZ z+gJZV+t44dgwoS%^ody);4=4k^Ui~C8SfQBDf}Cy)%5V@yUe>%!pl*Kb4`_q3I5fa z*L-u72T&|V#(QDUcsCZ#$iy;^X3f+`pedD^Wp-KU&L9qz$vI+rP_f zg74x&QxJ{l6Uei}OTWZvaOd-IS$DWPDv&S$hG^wsR*IYgK;J#Y6Fq|oDk8#_MmFm? zceo3|b>6gb8g8ir4SKF!b+sS~6SdSSDE#10?M{6QW5<5+d!lO(!VluJ$0-n!-4o|=CzfVS&b@|JN9evAfR32KnXJD;R@k# zAvk<#jWfG#i*xtx5{IrMBKwZN0|*n9aR}cq?PG|xmx}!=h=QRBrb^5HvLIU8fF&<* z0)M1unZHtyh!4T*=-)8T8@k5t66)VKF_iQ5+rxu3AC=son=z7JzsNWjOjdkzVfB5l zOgFm#1I$koQaIrCtxVQ+bm#Mkbx0ZLvBZ!O5%JGb2Sk8BtJz$&J~@HQ6UZ2>QF8Oj z^XGuXfu{Lvywy+n_FtSIb8D6F5;Ph5C>AwclvK||Z95wLsq1s2}3@z_sU5MHDLZjVc85jIQyKrDXgF`HBSc(kgT)_H=!xkz!YteGm*t%izW3sJ==4TV-SO zO%*$rIhi3=A?|(spzV}$>eJPZHE(M=9y){+q0GgX9~1c_LfdeLm%|+kdG6B2FLm#5 zjOf-4FOuSF4J|jiiYhc~;i~6|2TVz?PqA8w#oTocD(g6FVrsI<*ntOLK%EH_hiG$*CAfL+DK(-@_W%Au+E|)t*g)} zi?h^WUUcCT^Y6n$Th=20*X7}_LsT3S#bOhc;j0KYUM**cc7%ID%2AOIGAom2RqfNlYZiO#jNb^V$a`7vW`?%7cz>(7x%moW2<377 z$jO@M{7Q!oJt6oiJ{ix>w=Hzer=HI|)u9${A0Io~?MCDepzGb^E00~etQp{*S$`9R z2G&ngeS#E@{_dQy-7F%l=$IPwMg>LCMv7Op`%sXAL@W`No+%csxceSkTboZO0xn}= zovBdx3zBz=>UydlrunHQ^sE1;Rxi94tKMyhcBLurouLL?!H$R`(dQD+`;k%@0q(IhO1uBmE$2G-T&0~?;_pikK>7NqZ zbo}}Ix!(lciMqY+aB*3JPM0%3Rq~e=8ip24L1d>9-xCoyCak&9yXOTg)Et+V%VYg8 zW4^LN*tww$YLfI(V6fg9yh9|ieAZ(B9Zox!_qB+JCkbkPLeipl?cdMvh`bP3U-Zz{|E zqeetPKMUWn8W|ta&Z(9YN+`+%W-SuK+~md{h<^Rvi?F+UQ;WHYYG%75t_RM05j*%a zzcN`QR<|0lN;=)@6iTk>Eo|EY`mt@@nlZW{GvosimXK1xY8FccvK*ux-B@*~Zk-oy zU7v=>)J@+muRvk`Col`DjehRQ-!9!rjeA&F*{HTl*sGMkLI!pQ_}?LAS-NF~$Fb=8uqg^{BbN0sGyRs|B11|1no`-lzqtkF+|()@zy`EMi&YwpWbV~W zFBQ3-cSDK`v%To)kzVcNn(n>Y=)eNiwG&5_Yk}I+>%3Tt-|xK|d3Rr*1FZ3G7sA}Z z4;swbIx9et>xAbsW%wmf8PgI-8#Ag~egSTsjSRK#3Ia3Skwo_B|0XEqRaj8UZ@pfG z1Vm0+biWCF?&xTY$?qRJrRR|cAoMOAl4;9-rnW31 zaxyjhP*{N95>89XPu4r8sm``4yS%!URp-j z3-&+YtB8qdOmtj_wTvdV-)ha~n(hzZzZaSvl)!##h(tc-4kWRHtI^$er4^m!B$CC@ z`%OD^fACPr~gz zZ}@ifJqTK$nYHzxlbyUoa=^mruwaw(6T+e_FJkJg9e<&IM_Vb9mH)jeSf5CoSFER6 z&@iGs{0lFno|JVnq38a}e{u@E?|+Lxl<#uv(@Z3H#Ky8i(7;D56C-5YEp zaj$JHc&Iy`@A_NTo2DyW?rc280VNc?gm!y&OFIhEX#QK%ur4UO+#7S@0tcfT{Jkq; z&fLVhmJrmlRc7diJ-ikp7AFL^WC?_&7pUgb80^zf%++_ei4@V`^W`z`EtE(2q9TIj zv`k>uX1jA|-gKbwWvq=2L^PgwGv(bx$K3b+lSjxFk%$RFp+DphGI-_P*jlNKaXqN||9f>zy-*=@j01w0#zK)~-}@1$ z?B>0U)KbJZ+;3Q_y63QbfZ9bBCkK7&5IPv+Ex(1D>e%HopFQB`YBdjqN?`Ns{njBS zMtzL%ldC`|r}Pj5F8uv-qg9+;Z=q6V$a4GT_O!o&fx;oly=SvfMA!xI{6ebB6h+}I z$A1SqSIqgm$#-P6D8xh|J}8O;17V7Q56zBhw^*|@x!S}ZLndYR;qE(@L%(Ggth2*N z|LCZ7=t4fvDfXPz(*BXG)^7s4fr?j1kCe}!Y;74`X*4Q6-DdcEC^L7?c8|8wQb>FR zW}rkt{TSrJo_T_oQDy~N?V<73@6YXZy{#@uwnDINDY6LjA^~{ux_ZQ5PHk`c-nV|s z?~*8D{@t(?mcXa4k6mgoup&KL%Q}SW9c|RlT~yXDyjbcp|$>g!M1(GKC*89?!t!t;%ck?x+$$< zP#;n6gd||HiTj+N>;X9z!XsL1DF996|<|8lfmD5tAl`ww|m{19X;AT169LJ|DcD2LebG zh+eR!$7@+aAwuqZ_4SxfgtCJ+3z?8?+C$0M$P6>4-O7Z!bE|q7;Z_-7%N`+EddC=@ zLp=A&|KQv8k7ZSML%LV*>#~`2m1dNh(W|48^>SIBFjMkk?EN?_V{R=h=+9SvFoGl^ zJ}5JadJd~T5~Se85bIp%JBzEW{MAL5q@GyB^i%th^-Ym&u$_sl?+)u`0j1}}4g(wf zZ=o>IAB|kEs?S8xMGNj;q2c-l4kqiFV5zHFw6-YSEbhBw(uUA4mR%9n)_Gm?rf?l{ z4i|7%hK*+HUZGo)_{!fbz1(R59$*sK9{mt@fy<*o@F$Z;)aK;$fY~gxSxIvIyYC?W zb55pc1`z(ol4Rvnk%ZIR_PiO!i|9mtoqngZtW;C1EpPeTazzl{;=W&UF=g0Qocl{E zw&58wcVP)wjv=@py3O-zAOY`O4~Nvm^%M^~U$K$D8_nvdfDmi93~Gj>&&FziG-fdJ zeB%6E$XwlkZ%Q7;H#?mc&p0~r-TdvsYp~;aN;D||8O}$zu>Ad}vdkkvDV@w$%7KC6 z2{PL%T#{mip(xXn#N3-c*;oImYROF7aZVBE-4gzH^Mv6+(D4unz1?o_fYALRSE4t<>-mVT8#%Cy7&ZVfVhyj*42?`OmoLU8-boe46*=3vx1X46AsftAO4c?=echij4`- zDGyM;aA34U+xl{__jm>f8W6|?zkyO$hyI`YA$_F-KC??>-ZQHF1w=+lGWkk zli-S?IXDlgJ4_u#d5MHc#rJ;AAbmZ5_rBIgz>HKccMjD<^bvvR!&Jb8n1KplQ)Ffe zLODsFVVZ95;|_&2Ok~V+wMDTk!-E zzk1+^6?mJY=I!BNJEjg8p}HM}P%*>@S6pxP?*r#QIw#Wg-sFHGq(ZRSJf3>e(w_gC zdB&@~^*az%Y!#mfnF!^wYabLb^ldE}-Kz4&SO26Xi+G&~IurRf*$^@NuvlPZ1NaoY zC6c{j%nwtwdRUJAYmo)eZUXhA;zpwq{&`Q|#wsB`o(3%{jCS;H`=TWT8z4}^rCYhk z8Xh->3aI#DfUV!?7`17%t`_ zPJ$w_929&e>kfhyP2)6)Zd#Pr|I=ZQ%QVV2{x@f=9I-Ot^iIcNyD%~EN2EfcX(Cs= zIeA26y5b-bA+HA6+XExUYJ_%YemuhKpB zgzL3pCG`tw14ta+h@0bkZysT1U*-FIBacLfY_MMUsSWEqTFXN$OG~s4{Zp=z(_Old zD8yg=vKL1?gbKNdl3RS~6nmGqV<|}j-UMUnppd{5skGxlHuA@`(BD(yT4RzCQvmwS z9;A2x*>Zdk%0`&pt(kXsw1F*BidtiPKktiy34S|^D{x(KYw3S%)G>8&PDx{!;>Fv- z;zP^*ceDklO#<5z^!YUxZrd`E3Qy|D24grsmY9sggD6c-n7PU+j)=^l`V;opuGOxd z4*QqbsffIMDLZ)*C8HDWot|E$_=uN*w_lm(W>%N-fmgR?U^&W+<&=j$Vu#>#C-aGv z8%4?3wHql6g;Wi1<)MmA-}*I3`;Vpa%s4N!)s*uou(&a^Sxyc9hltNx2Rqp&M^Uaw zyFdTu#;VLPF?5iGqPa(fpt?Bc4oU;1PhT`LlQJ4XXR;tMwvb|x1fMRB3()T9g^=N) zp=&Qt{JR4G`>sXK+eC6|O76krf;+6BHE5jTT<+;3g7)A}2cbkS=W}4On(O3JxRVVZ z<%7vr&627t|CWLYe&9V!-!P9mEdOO_l(LcXDziuVJ?$)x%Remho6pqr?3sf{-*|s& zorc`*4>c^Pxxi@EjHB-^j_Di0sUW3}1YE=fgyA@1^HM##xP z@>mnQFi*goT%ZyU?u-Xrpi3^W-EC9lS+6x858I0lS3g7flXVCgh1|11aHrjJ%n@s| z6=?~jR`J(w!^QU4fU?{-MRspAawMer${`sD$zs^@c|SK+IHcUe?4TR8LP(y3)c0&4 z;#MsP@9gYIT{fVHwq>7HG_cJQ?-`tZ&3`ji2$4i9ZR%x&MnXM5)MyD zyF*qfeD;e~{Ez?~i@xQEqJ{G9r+1QU7b8eob0aJSmG#s$ywNCfeN*J=d6BISGlL0t zmm^n+bB@Q@y>P1H+eEM*ULbX-4HYsXbC4niT2D>cHTWclizxok<27@lK~=B2!_@AN z5p#JV;c1+@9Iget0Wt&2b3kLZLME#G1+&*3SYb479sv$OG+q<$ZM>#q*g_(S(4}vG zYQ+ImH5g4TexbtQU+?l+WI^Jg#lvjw7c2p;gZa(L zSBvemh=9B`2i>#r)E<~2dh>vn!?Ww%v5C-@-`T1J`J;E|EU>-Bm~O5kU?mrckfP+{ zP-I1lp^EKK{1K?)Y9m9!6)sCnlt7X#AG!+o1INy;%kT?Kw%d6ut>N=T3y#W)8=KT2 zam>ql;9IMDdSH6eM&=)fiG|QFtZ#@v;E1Hihj;O!@Xvnqo^TAVVJN3>y*3w zZkzaHSTlG_Yx%`kt=`5j;VU2BO^xCONLiyQHE`x2+UHPr*a|_MHzCYYKb+s}BoG-v zm~lZZyEQTct`nbxeNiY>@+l7cH(=On)koNst{xxGQC64TsP_cU0B;^OgVymQdcCjW zwVuIV%&um`J-`Key0_6s1UsT&d?P_TcvIxvk@c^M>lC_N`5x8q*U!ME>bMHg(+7^& zCigDGI#8Qc%TK_M5cP;f)>5oags=g02m3K^!mFQv+rz<@C|3EaaX+>i-@x%wMc*9QjgeZagWI)5^6Tg( z6K6n4a=qA2{xVsRWdkgzY%cg^x)f$cKL;^iCf`%zYud;i^>g5*ih)}l>Bmat^y1@{ zt!Wjj9|NTwvytQ}-8Ep@Q`n&A?xDlp5qRr`IUj2q+d6cAKCZG{h+-S_IHo7U9C-9s zAKM)(F7aX{A)QIsh;eE{qM_ph-zl1LL2LX0DYO!61&lEOxC{PRcFZ5U*4d7&q=&Ho zG1%n_BgQ7;&H6olX5Yk<_mk~dS!onw!i=_KPH_~jx(=3bgcn%BoQo$Y&!6ySdUl~- zQ$ZmsufWyJLNb2g7sIC)oE!6I*5oF5ylH-OOGa}nHYAhAr#6mgKaY|PLGH{t$*$&A z0p@1*lQDP3w8P$}90)%>@7eM`5^3n~+l;Lx#~)wt2q`r)Rbo0KTjZYX+q7oTlN{_R=?e_+l}jkVcjyxK)Y!Bf zl)>dbL{XDnSdD%FM<9*y!yFOngFPT+#A|Yo(s~-0e|%>Xu|t~^k&ua?M3C-8#OTN0 zsppEq+wSdqZx+${erUe?ZX01pIECDyI z4qs|-;mL1+^60W{&e8+rbjrDy&$Z>h__PG%gmKuR4dGcuXyE^K%8f}>Qrs^Z&yrmS zt9gXIlE@8MjU@1mqKYS0gSAx|Z7!!focwSaY618pMC-DnKb3(qJ}sdg$U{-q5{>+` zQN*D*@lCuylc^~;$op>TAA)aRLDr8BEU<6*avlRCyH-q+9eUn_p*e?u!%LwMq#eSh zAoBcI^RF6DX0+t7DNF&`_g44l&*^|iULDJ~N62tLkx=G&xg~1pTiPFLa#8j|f6f$L zb9i}M`E*^Ooj;!EeB$lpysjozGnJdCVTOi@cDcIs+RKYh9nCsA`c>D~HTMrm%aKbJ zvn-JnI7QuL$R34%vhaCqA=l!X&iUw>9O|Xc!;_5i2XK*)F@8WKXNGh9fs~u&_TSKO z;30cP-2R7X*a{E-$?e={nH-_B1ZQncuSef~kA-Kvs4jdmSmb@aJxi(6QXC;wKC&p) zI^m?iO(poki=S53x)s#EP7FN;NKv+L%HlrDp~DFG@H6pRzCrDlA5I*`1}z#@8TUB_ zP5cS>Oc2S>w{PxDjKMx;iet>lhn{#4KLaXK+~bgJgv70UbsZyaAOjh59e$RF_jp56 zg+r^cP1Sy+owN((R*a)3k>0Py4?=OS`<ErB4iLBxK2H_CPoaH7&bD~={t>R~@{-S8yiM>MTpEO^pH4 z;r(?Cr2^~2XcdY0u(IeHSVi7jvWi>J?Q4Lwp4*VMisC2Xo_{@Dn;M6CceQj|YcYD+ z5p5$`2r%GivZVwTm`d*{7G`=AexT`jdA_}DP3$WSI9<<*@go;4vIjrT7ZrcAJmnf< zI{l7i{K=SAut?0!@c?~sbEktU7ksC!JQ+vU zLgirv>UbQ~ZdYTw*57X}3{$>z#!1G@&+s2z9j|r4GF&B?EMZAaxrV&Odi+2LJ?v6y zcl}ZX;^W}3M89Uv0JkKM6wraw(N@y*LxE{*g(_ z$%l?eJ*>GglrSuB^cw2`)>Uga=km6A6u@raqvOm4tgMQ#1Mr<%5M3JgITK0h5q@br z{UgkZA4(!pdR*mILJ+YprhY6EGl zLzcZ4OG`R5sJ#-FN_eZ4IP5bNfuZjaXv>DA;edy-7^}#)ky96Y;!euj066_AzQ7Hr zikX9+=%ns79cL?$33=Q6J8oxtR)$YnQY^9ullO|&9i++FO)K?f;b+b6HW*})THIwy&(e6o6Z&ODG z!-@h%`loHC7R1eBBz2aLumoL8?<2@#_yLNe!e^#A_`!pisHxJ-IndaPisHcR^q8GE zDH%1TLCw7K$cP9TnVUGt33)U$A~+kt+#-*tedI};G=%gYjm&au1=5ybxhmVZwNZf3 z_ac4a^GB%V295NG+3uac@Hu)_Dd0c9V>(jz?c`PEwbLuk&j+G}pmWyq>lj2FyQZv5 z0CIH*B}rPf<+vQKMSykjA2p1Sh5I~7QixxjYWiDS%R^M02Yxk zEGig_2*FYfSmv;?WDzV&FcuNOk~M}!17i^(SgHXFYHorE%OY4rF_!0mC2I_e2FCK7 zU}*v@3)on;6D*<_%X7f8eGJPMjO97O(gav|*;uw?ER+rwa=&uJ%%@w3Sd^Dwv2>8; z6^e_`OxpvG#L=L=o5{g4@Y#4|F&cF`m)TNtIFFQ;;=_TD z<_^NCXOHx|;gLD`Swz|vkaP&x3lBWcC{IXJfv;dx^Md4}APGVucVO0XOv zAb_QSjb%H*f;EO}2MF}CZiK`XBcaX0VxezyYr8d-=u&z$R$00k8HsUgixOv#93mqg zWMq?yy&f_waB~Okur)FFVf;)H6?W)Leavs?BAy%`pcRC?Q76dt3itEm*H(E=-KRRBC*<2G34=c51zzyq|S)*{ABhFupC&PD&U3Jh~tqwvhWPD z@MRno-;r`67!r7tS7XjA?{RDE0Jn=4z6m>?r{o#uWoiw((CdqU#$6d$R?5)?qQI@~ zD{AusK}>r)23bZPH8$PUu=azkz~ zL6bWLGmY9r(42Q`!)Yy`c|Y`(jd7H?2&Ek5dsqhyoqEJ zFb!J<#Yegs>D|ir^C5x9&x}1n9u3`2%lAX?O^2++PneK)mH578TN-jl*&|-q@P#&D z`=(8vUO})p7O^3K9ALmQjRqt1iZ2kft{OIgTkEl)m8ZJ3^{_$q5s>=?iRqRGqgiYa zvi@6t6Oc=8ZFPjrj?ktGW~c5GET(`(l7R5Dv5T>4i@RG} zBO6Nr8-!rd1}yBg2(z(}wNNU3lL_w?R$_O|E>tN<=6|*vI$F)GFpBnAb|Kqv*p;#m?C84#)>lmH`ybL$;dJ?vTMbe znpmyNuB!ASQfdvZG7SD6t8+$sQqD zF$J}cD;s7Ov1WGZLqYA6#x8wG2s{~p|07^cyYF%VMvP+41dvO(ss zK}hLC(OSOp5RmWVAcQknIv)whUt?HE=>s5MY%B-IKuGCBiELG4gN$Jzr4Q`4-(zF> zF%Ckoi0I@JkRxMQNa+J0b!;q0$3RHwLp|TRiw!b{g_J%Nt*MrcrFtBMV0o_diGVC0 z!$L|Q010MeIm8BGmp)X#P;kB>a^_Iw;ZB+qyY!(HZ>uy&qBz4ft&QlC11WtdqbO7M z$Q1SnyY!(V^_HCBhd7gbe-gC%L!OCU`cT^*$goHLWRI{*A4>SaQZmA%8)6G&mp&Bz z{*ul1$Tqn+*`%>cAId<4jpphY8g}VJ&Aq>xOm=k)jRuKL)ZB+`7_KoGQuawB#ZY7D$CjCFR1S9{cy6!iQ^@EOKsY=hq6!(B= zJhtSc$y_5B+Hde`TuYj}tXoo{TgiXpkgIUhnO*0G;XE1pChaehj_5g(xe;iGhK&dd zt(5?5j281q=T$Ck2qf&QEQADpO%c+p&Z93u zNpjYO^uJyH|Hj2iML6@9BPA>`!-d~qbhcycB;x!!p}Ti)15D?io$ynS1J zVNj4fl!OH491dlSLQMh2v6YdA)EzRd}rQZKZGeJ~Y{>wL5RW z72RQwFB?l|3wQy6?Q(`#;X_~00LG?Ka2mPxTxd{$K>cau%XXWZ&%x!haj@j&sfD%Q zbK`mFfbxJH#I(tff|Yy-H-?0AN*)4rIs@P!s@@2;3M0 zl1Bm6STq%QE-WqQdmAAqhh`ChDquC`kcrn^+n5C@49acQBSDrMh;>z-&_?^b;!EbB zS#hDFcO+2jZwZjJp+BezotunK2B+FPD@-q%LLdyP=lJ{>)Mu*X%8{%tUl9WXnu?jc z1*-;93}*fbQ6)6qukj+>Ay#&YeSbU>Gt4qPFqNATlJ76;@${M(v1vz{RaC4{(cs~j zZj1Dt8`A4GpWZoL-qyr`B7Skc?n zWF+t}ageYb4W58Ai^Kzq^ta70!8kWG0OY~Eap{dUUI{m)5T%DpWhG=VfV%>{2MLsx zPZiRP=y+`actQotEO;L(1g4z_#d(Pzl)#RFkZ3ZsCNf}+E1-CV*92HY5Fn4`q^LeK z8~`U_PKh71lL1OHT_d@~2?>1)*kuu|D^#4C(LJS?M>MNYy%KI!&Y*NvOF!?K{({s( zpwBktp5AQ)PF3B)Iea$0Osw$R7##xVkTK=g%w_b)$oJVb1kP9Tkg+}5N*^9qg1V-q ztG1t7n<0GOKokjhC`GCtQ`*5ZY#ll|q(|QX><_u0l>qFIXdd~Xu^qejZ%&Btgjmfd zPGHf>E&kKPVzdUh{xF)T%!6 zCmLJk5%&UHdXA|4XPK*HPCxqxy2ZIpKPqkboJ1Ay@CA@eCG&#JPo^vLiLbgRSDPdA z1BVQp(~Ix6q*nD!A`9(0Df}UR^UucbIE|~iuBwc07Ti6ou0I)Lr{3?=ID>8L*S2Hx zQ`6_~4<8JmVQ5^Xrl}^yqG0ls@rf76Zo2%prTWU0Y|oZ8)^U4v$_M(^HIbcnXU21Y zf{NB?SMH1n#Ll`N2B&IdcX_2Tg4jLrv_HaZ%e-_XZWaHh4*$o0Li_R% zJ6MwcY0mWecK*`|NrE3@zz+%^+E{~D{UnR0Il&4_@sy<*xLEk<6?7{elO{S%?ii+Ktw4u7WS3rQ z6QveKb`LE_+0e)&eEB|bwf*m|CpuNDtD|3CE5$9V%uB~s>y;Tsu}f*qtETQ_Y3?H< zruD;9{=otW(;`1AZ`^dJg|oN$gymxkIXVNk)E=J&b^qR1KC3YU7*82 zU(0 zd=nOW>#K!v2;IjG7n)Z@ekhmF1U4VbY<548RK$3Q-yl#nBkc98BR|R7EtKtE#Dq zla`s2OCR54p(!O-Sj@6U+7hL5q`gwol2(}}Wjyrym=ClaM*10C2Lc&XX9CTEEo#3J z21^N!y4H!ysf54*(z@-A?5tMNN|)&Y{sPABX55>Fi10=LZ>@&aS5JCdfz|3ilmd^BlII^usL)yrwylA7czvZ!YA( z?iC{p_=(0mHMsCN+|m_mW%WF#$zvlRAAL+d}Fa?~%H8G?$Q6ZJ2PeI{JO&SFRANud{0^ zHM6EZzx@?g&lOlD_85QOM*40&?kaFPiPq!@>_T!3+Svw9KhY(jIG)Py$C#K=EPr+D zD(LB=qI&Q!a=au%sMtsp1PRff8mex@hT}g;MOULu!|gS zkRVERSbG?})uQI_O=X`70$uy}IZTZ_|jpE&N7 zp2XdC8y!vltQJn*du3p&!yfPciAI#MsvnJDThSM%Ae6Fn)T<`y`?rY-5^9V5Xf$op znJQ-5;Rd%_mc6ob)0~Aq^;)pZ$(^bl_hvu^i4b~usC%0irkZ}A3M)I=f172UbKATma?4LUKBGI;~aRH zQ+&Wz*k9_6o#K$?`Zha_aLNHiqB$an$?D}!LW%_m& zP7M%bj;Z67^5`QUhP=*w7g{%~=cp`BVY5uHbr7#4AWH({m-Y0u@y0ib-4aqXP?lfT zrTn|j)-V3Ilb1ecv%eH+35>b$GY7f>}%JvGndp+dDnV4u=}{E`lD~R z#^iO;yP1!^s5d6c?u4Wr6sa{+z4m;C+ru?0AdA0nWG}iR7D`V?)awNby0RpOb=pjSlhy0|%&7yCAHNNZ!N$ zOcYf&OD;VhHc9g^>nG#gY6Z!_T3ZPh%&fMOMw(MQiw_#>#m@<1 z{Dk*DT3r8=hcb5DUE4}?^>2LRwT_D}j_XzKQ_{`Z9go|qjkl(z2Pfl~>)}IYnZ3u6 z>o2ctndnwCX9di3$$jE@dsE-qzVo5tio&lAWHeLlaYId-oo+)!{6lw9)5&_m>c`^KqEC8)b6Y_7_NsS@+h3sAzR=U66z^$r@VgB2-Vc6@4Kr( ziB1fOrNm6fvs{NelLEY`jXh*1HnRx7%6nDTT5i_q-ohZDVK+XWuofp=%p-TE0DZ2% z_887DIaCQ1c^)uzC}-RkJ~&SSN5vi>)hHUhmN>$&^FXwDax^#)W?oyaodJPca-g^_%IdBehK?bFqEI$Vb}_?b;{R zV$9=sco;TK*?+1qwrLDaH6b}0<3d~}b2U2FfQIpS1u+EVL7cgOOs~|Q2u+^M{SxjX z1exD0CE-WPSHp1bh+u(zBWa*I{`-T{x0}OM0n(1|GIaIm^0FhNi5kMd2y0UJ{T<6euhx(k|?JyMVMc=2k-AM z8M2^B=bm6((R!NqvR`)F#>?$CJ?=!DnIn0EI)U{jhO=R5zQ zerQ#^b|$<44CP`p5?};F5h+iDrjG7*@g?C5J2yM-y}7Ogl39Gl^}=1#0v^f2U4imI zjNrJ<4zfDkaRY86*Y5kFax{~bhuS;3V*2yb#0U8%qp#N)gRF^js>7*PHjnA#%lf;L zwuUA%Oi^zu*d51^t8x|v1xMw#Y&t>P`v)oLasTc;stF69iu%Q{usby+1~Zh??$G;0 zTe%NDCCX)EDM`>$!a?5MEf0k>w?u6r0N@gg_QBar#%9QjdEWAr-!7iF(>w)8tV*0y z8_C;X3`@9|$g`y_7O;Lqe8kTau-L_yb$iy6;Lhk`O|(_WJ$h0r4Gl|hBdk^OzvBJ99~#Kp=DM@wF2 zjBW_!fIDEumYR)VSb|b&fCf{dw$ET_W4b5vyfBH*$w(DV-lq%A>b!=!enmxK6YsgW zI&~toF5G3;#X&_kA2U70Mt^dtSzY28>F|1eNu6E%gG4!|s{*9JD&%TjTwaxLh38{L zMqhNl3I&(vp9cnr+<-$($}itb-#!J3w_KGlVh8M%*A-W^LYz_(X-Gq2;)X@`Yoh`M{RUXJG|cn(^X% zj4e;zXgMv$o~U{yPyo1~T{lOJa@#kpsI%?+D~Im}K1~OaYgTbo9@bI|7F^LNG5=U} z=}X{e&PPZ-!<||CV&L0k_lR@$0~dq6z&U7(M>SxJ+P5+KS#Qp+T$^5CwJ|?{B9!k6 zMi-DrBX67xE!@?3>aFlI-@>s-VIn>Oz({)$J|qSZ9u_A2s$cd;QMSmkJ{hQBx$*bf z_nM^cgSD;t@z^B%K{plDz3A=J!p)API@+=eKvJn>uS=URoX3n*YFvnOVr)1TYi^^- z$#W)#DuB0>lg!;J(IAhPU@1LFXEA{K8~$$VE{?@2{=^t~GolyKnz$PDxL3xEc9@tY zKFCutuAV5D*lLsnIAq?_z@L4z+HIn(Qlm$lyJ{$%(d`C{vGV5b)m5#T*RI=I)GZ|x z7uNmlBu{lpY}i1O(mUXs$L#b$E)Ou7K(GMsZNTX@(vk{~7k2SCg1-wHWkSOAGxd7} z^)2^L(x{-A+<%6@1#z!t`MVPgEm9~%xgFkZ3#ep#Viavny95|9!q84p(6xb_7R=Um z1%5vtGfzJsLsikA2MnE5VSaIy<0=SWV1SVIzmuVJE04@QQ|gJeG+)1uqPo7$su1Tq z!r9hH&JYdcLjT%uKkXitUyZhyO{C=?crlTE^aTD}ph~%{;-RyamhaK$>bFm4NSi6e z4T-ijp$b+vL4v?Set&7@!lguQ3ojSif-qc1I%&Mzg?s7C;1rK~O-O+jq9B7-yoQFiB~C+ zhpqvfNAu!CIAd=DFQ1;EXNFa9-{P+O3(iL$>M2x#qZ!ksF(N+8MBJ{msu7grdLMB< zjstXUOjmicv=;fCG^*z6t+e$jhb)gg1z$p0#?4w~Wvo|gcB16g#HvDMx`v?al%59F zi>5vxTF#Gh>zJm$aFz>9=b?0P4{Cnsqi~RGD*|^r{np#v7G6P@rdgOpgOcA$fJCLU zgj>dlQ#UvD%h7My)NP%yieP^VZyAb;8nPIaPj@bLv(gHG_zGyMj$+yHoSUp%h|un+ z-=Q4$GyF-%EFcIyQcpH$u<`dlOmORJ8O^H9;~o8W-!(U86m^KNN`fdHe#QUfE@1pT1307t(^2WT-vYP z^ehd>zQ9wlE#$S-*+;2|z{5Q@DcGQe027_=%P04;TLHosc(eMrVp)KNuy1E? z{_na~CzZ8!X4x-U?l5uIdnJ_C#kvL_SCrU-#Pf<#3&kbq4d7xrJ?ssN*|uQm=G`!;X_%A>knsO;H@=$#Tm{ zGRhZ^4UZuLNjH1{SkPq#zRU3ZVKJSzt$o%1_hYx39m+cVn|PUyVGn3JeYnWO!U*o^ zv`Q1?Bf(L938Q@A9~tUZ#T!Fp$Y^Ez*=rqg4Xtp4fCXf9_5LI>+JZQe@=8}S>qtte zY_q%Y=7;jC0Otscy-iQDG^RUzBSCKl#a@Ygbqtu40ZvQ2(zZRbe9oB}zZ^kTE9jzC zc6<@-OzH6E$A0ZI;GA2Me=Rp>G!_JXF28MBSDS4rruC@{-*Z~><$;uo7?@yixp1Ne zY<|&M`lIMajyM`uFGYncr)>K{r$neV!hmFN&0ziT2gAwjx@`D2)tT4+F1sX^`+k;b z!mD1!E1mxM)b6zg)^5=%B&*q>>#$!Mqx{vp-uH@xzoeN#s&JSzL+;HDrE?0injR9Vy$57S1Fc z8Y5o~mXvM4r15~Ll-g7~ETaVa6Yw{8RNkIR9Wxa%0Hlstt3psx?qd;bcyCkyu2I3f zshKrckC6$!P181Z_5{a#?Y0hF(a|8sp+KMOD$B!bO^SRx{WF0iB##rXp%9A2MOQ~N zf@m>>-?gdt(Ey2})BrEcB^cV4qV(#72f(iAo8qNI`Lf#NosDEgASC7D^e8(mrUxY} zx-on9dwvR!r^C1{$_X4SJ25#iwx<aGZIypO%Jo!hrZvL>Yah45s52(Vm zJ>o+HrAKuTyi&1Cm-miEVkEF*UeoNZ`;buDzu!-(Ml)Tx!26OCcas>#v49y4rG|Un xU<`N^V^ojDb3sBKLCE>f!@e@UVf4!+zr$i?Kgz_Wk2s3~hGmE5xm`yYK=RL1}S literal 148827 zcmY)0byO68{OEr=MCp`R1f?ZJIu-;(T12`*8U;Z*mIi4BL2{80k#106>5%S}hNWT2 zWoxdV@9%f-J@=0}?{nslIqyH_oSE}@J!d|?)K$NC=l&f$JiL1vFH~RQ;o+zLcaV|% zI}-+z9Qn84d%seDhF3MlwvUIWgr}jZ{MsM?sGXwe;qOmwrc*y#uTldG74l2_`(umM zzps)B>&2u!CA1Np+S)Vrxv&OKfeQc&hcE+^m(`GfEM6_6;?uj^;BH*`!gi>=C%4q% zh~c{$IeX(B>WxD>Z`0y!7`;JA9VmPr_zDVR*n-VW_sj#fR29#vWH18|S2n$$bz&$v zgIfjIPQPu36Z(1m_tTjtX9T_zQCC9&tGE={BOS>3CNSsVQC1P_bUkFP!uk#y7}Ku7vYFQNHckbU6( zi(?k%m6D(_gHWe)Qga|VMBY>_C6k45>I9m8(YLyu)UnPB#@A}-N4!~-GGHPouIw;a z%=POFc6QRY9;cu60C=%T))NmbOnr*Arq_B66rdgKWVAE)%ZEenM`QC=Qr|N8bWk^d z>m;a!7<^|n75|Svj2CiMeILE*p$mw4%fC4Ez}WB;tvAJ?xsh> zO6ZIitHB)irvwVB$HMa zv1PZQHvHEIZ9;nOD-ONByvPL;0GZF^MVb(d1`VtR>H@FUcF#Flq{a%(_LTpQO7IG>lTP>_3sK*5J!}sf1rDVZ@`=aI#$@D#rP-N0kPR*FF_;nlRz9W2bLH& zHIcic7#)CDgaBR7%(NMJg*f4_^J2H&6n*%jzq;cKd3j>?+f*#%PUVU5@)g%+V3j@; zF(v*0aR^&I^EW8dd)ptX?oVh18N2=X6y{H{wLg<2$t@%p^FANvkIrHm>q5mp0{%kvA@V7 z$pn3AiEaoop_RJhhYKy0oaaauqq1-n#JK(zrpnhCORcf_z~rZsVHJ;E zXc+%|2*;6hF~acVe3$dHtu_%IHKqenCHZ65c@ivG2Le>amap>%9K@}gA$+wz4XT`$ zAuac&uREJ@30lM>>=r-M3LIsXcaa_*`9^Emfo$iDqT>Crbro0x_0&FJs;XAp@gpZ2 ziAd}{(HrJG-M%C1!)gfqn7MoFSxO|UO2HOVWIhIpb1U1!SZO+uKX7O@;JJxCk@NNI z9n2G-LfMprk9>MoBOyz0D{ zjOWQzl{?CR%JMl6&S@OEn3U?oNl;}*xo8oppQYYP?))ALSK4%srPDB=ER|>&XG*Tn z`b8_sg{aJ9HNdlWH$CWyoAsrh(m)Am`xQ3!--VW9{c^EBKt;A~^tO_>@z+WV`Em6=Em&wagsCRgaHd|trD<-cW^nPn zP;2w$seOdDvPWl$V0By;Va(*cAt>%kYmpg}^CY|0Q_L!Q=Sd(k@8yq6;S;QB`JD_3 z%TJ#LQZIP#E3F=OT0~uRLR_Bz;@jL`6YL}7W<~G$s7#>BrnI?u_{~R~AtvZ3Fv}eW z%fMR$BVEEl66rd5%kq4q%tiOU(c$O*$DMV}N=O^FI*&1RIc#l(*WL65xWbcPPbx&# z&ga5s0)_Bhij6Q2r~ys)(J9Od;IEGNDgBZ_DF5YMD#e2IaZP;NsYv>v!>y06n|9W4 z#&m2J{bh#Xvk&1C=Ms3=YZ}sSbiTaQPLXHqg0*D#9Fo>mnR;H4jH*}TJ0*gc6`4aM z(Oj9aFQ#I9V|Y3vPJ(MbEg(yX5bEFS}tM6p9)H^C4(>UQ}9PKDH)o2mx(_mMd6p%-=YAPAAUO zbCQI-X6!v%56@uS)*BgpI6578fj1W2Ej6H)TQd>fp+hJu6T-=#8UfNvKS};;#+T;Wx5hM%xRH~SJ?V&eC>e@Gl^;M+~l^27xKQ!C? zwJLcX2_X_NyqVFvH~pgSE2Pr0e$N{s%=QQk{o#2H4t=k8i>$4BQRR?2&#xyMY3M4+ z`E3p72jm3ZeYD-Sj#=J(;#e*H^WxZ`f@s=ORyt@i0sU2Fc-zzHJ;`F4qIDcJyF50| zVV88pE}NW6EA26h^^ya~;zt5IsgQg!;JV7k@x79BUd82?aCS#I`pGz_O%w+aqFYA&fDKl_&azDW;n-%af>rB;=7033_}pC z3J25ydB|6do_!GN%74T}9G+Z>TLQBA2OF1Q5>Q;qo40z)LqToT;)ywN+En2Ru$5v=QDI z5dOxi;-v@E8h%Nrtjg2iUWN7*AoKKkFUIp)=oW4og(Nx7(!9iUSs(0@11P2t6I)0qh5}S6_9MbuBhVxVC}%9<*XdgWh{U*evT4GFcCikW2vH{tt+`dS18IkA!sVc zkbBFg3>w~7&fg4Wu0*NsZiKMiE~}WZF5EQN-TR$>r{A8U4pM9};D7L#Mxlwff&PLn zQBo3DC}?w@S%Y2C9#NbKMd=-tqaVXMq<}@(Vsr3_UkWMaCX@OusZ;pf+hOsuCCjB(5$t~8A(BfaQyz0u-@ zT5HzbpR%p79uxQ}KB6Qjvd{68i=?QuWG)ArP%qIbes^?gY5n6bCy|7SUV$rC?r!Kb znE19RHiNBW-YJ=B${00{OUnZ;-T#yVaj*Z-Lzy;FDsOQuMou-!rAqmXXWZ>BEaO_d zH#mxc^ZYItRu8=oJ^J;e^7MCOarM`P^fbcRTT zoCm)Ty17EsW}w+hLbBZ{!8;{upDk2!PU+^pXHK$KC(gdVaWW?pauap;j_5Ptl}IxD z5)0XlblviIIW}}Rp_O)kO^oDfA@8G~b!YljN(<$tO_$L@;UQu~Fh zRO>PPh6jaf2`}Yt-CI+Rn0T?L)3Hpg zufuY~2XqZ`>?>L&b_~eBrmc^UogH+-`r2fl|Ac2VRw_x@%UwOzwaArtBTHqsdsawo z1FtB0*C)tvTjGg&D!g#zaXk$zkIe9ntV$qYb$MCL9zeP|Q0A%>gz~^`$=F?;>VW3b ztK?r&VExT-uQgYhzOYH#H(oK|Dd48VA!4r&@)%=J<@h}?wxy>ER>$6^qe-JTyOA}e zhkcFN5Bp+2wRD{fVfhrHB*c3={)p-c5YA(?M?wg3(_SEXbzSxMhQd)kpk(I|YBKd9 zol}5?tL+~mz?|D)QePOKs z%5w!2Wgt0YXPD4++tqrkOk_7rTMM~dFrk^mSOAND(D zGbf13jF`)l=d&@F(AYIDeo|i-p2XUX8uP`%WaA>XdeT{h1*LmeYT_m>gw8ne3!f#N8~7V806H^LrT}Ob4-lrNmU>g#3Og=GmRA z#g!WI7WmFDiR$`h-rUq+HkGxrQ!R@#<|}b6&bdqppLMX+y&tGyHDD}YDrj?~1WXUz z-4vUUpocP;N{8v$l9Kylh8U#@zJ7(VGqEcDg!diuRE7*y4XS=OV#=V_Zv_6u;H_vtv965BtUN(=Cx&0Kg36wl zJ^1tU5(pZ!Ut|E)2)jm>Si^9J!SxOTZm zX~yjEGxC z-f98yRoY{)a1w>+@4Yp~pU9kuPnL~{M^FKP8l+y%9W8~+@snWU%(@AVTQQjFxS>6& z^d<__zd>vCFG{TArBL?etr@}hjyR%|IZPr~rG<{!LTxrD=Db-rvhv{110 z0K`o#^k-MAr@-XezShF6)Nixbfd8wyqlf9ue>1_rG1SG!_Zt*f@-=g}l#w$>gGPkO z;Lg~|tlO-PUdtRc&vaxFe70VM`qlNX;Oi*6srv&!V33!M<>3sk(jIe-RbjyAcu4C0 z+LJ5g8Ife~XYxL#uW&1%AnXzDY>Ojh;99Q)r&4!xFNl6;^4DBqXDNgRp991kHue=n zeWAQ^DD>P|$Ri9JL~4VDWAM#~aX^LE6hFt6TH_V;Wn(JX5Y%FqHg0Om-CcF^=$-i3 zZFTme{t?f5sd4&>@X4SY*NGt7aGou>4dF9HZO&QkexrvnO z7sn&pYp+^uZ&)V1!w@lc?fS-tOm5nd7c=wYop}XrUE2COVjp=a4iKv!!*p9wjpB@A#$Qs%xUAYC1sA7HCW!4$c1X8|QW*4eNb(aBG@3MJ2Z3oX>f^ zn)I5%`cTmx$WqudYk*UlXO2<_zs#u=1?5T3rD^mpvwt|3%bRBwW|@!sAN8 zO6MpJ8%XPef#m@dC0xNFL$L4GgAEjn5vs80BO?d;kQ19I6~Z;_+=t60hk z862Hw0LK*q0XrK=BVGYZ7~gOJBGNvEka*Q?jY_b!wXHZqA04I$ltumYb{evg)byE$ zbmq}%*4*loAOzf$1<~;4!8P++%O9~o8{|gYSW1l`KrJNl6OV<66~-rz)yLAripU=g zR>jXZqbvm^fQf{VRt#Jz?QW=T@QIHD0`e&(Hay?&A>qcZ}J;8ptT+0g)^qJ)U%(k+Ul*WV~_Y zn0c?2w8&J$;Gcoe{zEBPuzy({-7;h~a`4Lt>z*_i;=;>MB5@`Ols*@ABYunWv(0SA z_vk{pFAXDXpX%IsP8k?`=-M9ees?})p7P**n_8SK`u)$VM@sc%J3X(!aNG~u(cNe{ zY??Q1r{&hx+fE;?6qCcQ_mwjXviUEMQmvl)D^`B`K6eQ4eh5*G)C?~oYLA~eyWiV< zNAA^Fy-E9dNPb_9WDC=tLj0~&$%-pKfl8!|o0ss0^u~6-sls>)bbFG~iPP283G2dS zS2*KJei5ICr^viL;dhT1?%@40fOZ%?{bQ&!X|7z*i(^Eu!bhW|AHy2VQDq43AFavJ zWSUEr3?X!GoOV&5EK4v1WDJ0)-T zIO*KWKU|Zn!XN&$j6|j=y^5}YlxxP`%J~96HS^NJe&<%5YYXf1U{VI{r?Fs(Psu~%|`;us5P8N2$~6!PXNqC1T{2rWO&CZl3a?#1zje; zxG-x|A&`U35b_6}F38YgYG2fm+=_*Mj$Uno;<&@E*=yc~#Pg*Vz}sTGP8A`cm*-kP z-$O$P1E%1j#{)JF#4EHyZd8;JDhF*XI?|0su{o1L6L9Dv*egm9YO`%Xdd!}9$hhO!5NO;u-+Ze0l*TpY97cFB^1(M(F!FHu{z?d|?uQRU?^oHd zCnA3NzZ1=>`w9&TNWLJ~lr2TLQuDI(;8BAW>3szxQXV5x3?lj@zka@Os{{v~UXO`I zy82Wrm+Ff5Nv{mt{w1O#B+Ip0*xZRJ`1`5e9;6xe;s^_c_9$y_A;DGCFa#|H?5Gq{ zZ^_eo`1seHZ?ycF-@bE)#_b-Enx{yUEH&>`2?DHbb@HFzuFHurqWBg(1s?8y&0#0W zRn%qbA)4>j`;W?_DZsP;QTbP<2RuLdbsa3_VuAHtowJSaDI$JzT$lMO?95acS~Lr7 zHe)W^kD*YZRr!YQbc9!dQ?rr$ou@tP;4Umyst0h!%hr$Y$Y{YwYkz=bxq;rQ@+T++zr&sA`W#7a!a3h^3BFDXF0rkIax1{W)X^K*UpPs)!VOg9$& zu}>COAuki!vvZ+0%UI}s)OJE!yWchbn=S5pM!k}@x6lfOy zQ^n$E)JXv~fBK)33z+Jz0xN2(_`I)=^Z<}c&p9mISw{bb;B_& ztjkTAJ&PJ;leDWB54N@{#6`mTPgbeI1*shOOkM_-R4$3su|wxQkxkjf%)|b1$l=<8 z{*RBm(#`?yx`a`s4DLf@S#*R|8u4Fc^yOG>z*q`j^~OyAcH^N?Yw?`QdT8Lz&-k0O zg_5+fQfCc@f)m-U8Kk0!=o^IfeTh}^B1jC@jhvXV#WO3ehU!}C;41p6ocFOSMzB^w>%tq@%8J9|e$UEg3AyUZa^s zpSO&54DRqJz8Go_!6*JZLK0%S|pN-#q?hc~r*f|v`{~~HkPu1``fkO(o&JoF&XvBNhD5;mQ zXeP&bAV1gs-SJ}X$!-5CS}N~1gi=ghC5A?%c+#tE_5$;782K=_E~~vZOtNJ>h?VHu z9bIGORXfYk(=7?&IAzcmVljFDf)e6eI&X1}i-uMCFz;_?qwi^Wg+3H`=ts$$?PsJs z*eT4xQBbO+zt|yv7ZPMy^R~7roMEr&>y=(};#tC5p&MBCwrJ`S&-+)8udh!{z8MMa zEYm0>4X1(|7~Pw_&oU%gS8B2aeRluChST1N(NyY)ppeaZeE4~7Mumq?n9Yo4Mk?8p<=u6FM?3?!R_fa`3JVDn@u#_ zjC_iKhCrR2M+CSN#%SI84Cd1!Fp9*aLmdeX3tsdip7AMLBYR~M?1XyIo2vs&4H zF$Y@BIV8me#8!F>EY@$t4?x=P>YY?=4a19(w|blc#1)-v-TPYn=x1e-v@aH8DC&8C zz~i!wk>@VWEp{781d0n!4GgJkPN>J0=&o~j@l4aQ?%#J?V0EGK2_W5z{94#wo%}o&D_ zqjThwUvG5=AoA>7DNKClqdRu>wLh~8qPB9b4$xhk1kR)7B{y4R%zClqHM0gJ$;Kj zak}b2f~xSl+>~F&55(73-Kz}~7zCskPq`I40Qs907Uj&MT-j`RaU>0_TwDt>6LM23vZNls39ooAYvDFld5l zsqyFU{@$-0L-9e7;bp4US*K&`2Dyj!poom>-8ldg!lh6Ioa0VWGHZTE7)AKF!UHD_ zP>Uo)pOa|9-!lYY&-ZemAAY&=YjXJh&q%_n{!N%`u&Aw#EkjVKzBW7{$1XdrW9b)i=#Y*ybBE1uU%I=!J3b+=e>*u@66xq zg3(gw465Hjx>2-f1y9?loNDvA@&uQ{@Ix>M2CC$>jq<6fZ{E9>VS~F89MBw4P|QA; zShI*XSl(T{^Cp)niJ_<~VovCN&jH+~5m^#B3LEb9_?0J%-eEJyvvud8)CFW_0ypE9 zZybS5_auK!J@ZlyIpH5Yq*&su^L5w(m*#wwAqSu z6l2@WO=Gf1BG`8`YL$K_oxZ{RK6)d~Yul}myjyW)Fpy_wXfv(Tebs9M#tr4t`wT*r zYWn$jDg*ZL@!o}qs3>3#)*W!f36JD!2FP{no6C!;nuj|D(5?)?JGmR%w?=O-o4QDLR$YwAKaG0b_+G zHCR#2TT_@nzSkm^m!$AB116-wex|PV$HeZgWXQ?C%my@_t8?0gf$M(XLA8MKXR~0X zds8E0Tm3jE_qaLv#{1U4mYh9(uVHLg^6xlt-UPhyO;^)k-mT+>;@`f{KAaNUt&$EN z!|v9oL$7F-x^E95U-2KWb5@O83cT%Y5IjPerRdQ3I(e|2?mc-SxYiRb*c?Qy1eSjfWtC7P+^6?ilhL?%kS0HT6_=f2hQWxF;eX<(wIsKsX zcy$yECr~hgk_t-tdhQ&~-A~QquHX{tAV{_`@gVm$Ju0@RXQbszy1=_Ct^txihWF+w{Zjdor-9q z7hsd~;JJI={eM32{_>PMu33x+HK~1cb^dF3zRX5bdsf}z_Ez}t2D2&t>q`pnnRR`! z;Um;v`el-@FK(QiW~a&f0z@jWnsGz4>dDtfSusu5KdGXzOq%S^!n2;;dFr;=<2uRl z-tfpV+kemMz9YUC?N*}Qj9halS4yX4A-XTSn%Ydq;hLo4Iq`Z--d8H)UT0xO|J2k* zsq!Z&MkmPg6w1U5O6&3a?2h|UTdpVa#$lM=xE^bF5z?1I`y-(~&(wkOuRi%WD$>v( zNss@l)o1+sZSy;K?Nvgk>yyK9eO0%8Z`*OlWo$GlOejaa_V+OkzAFjC?-l4oc$cS@ z;%Ir^S-OhKZXF_xr4^IT>^rI;E?FGm&PEkX`zDW9tcuS*mi=_TXpm(b=jKUQ$_w6K zq&~v(TC**P{^jm4{W>0*{q24u#=#PbSMbJpLkFeq>{BT-Nq>KcZR_=wCJUQLw;r$f zOr-qbAkSFV?GHU$n@n$GgU?<>y1+|Iky3rXBzb~~xWe?`BPXhdebOagtwqeI75M8n zYV7R0<$iyzjn6PNsSR?%tagZz#ct@h?y4D-O((h?RyRBzGE{fQ6kL5ktea1VKiMw*x3#qVH zizqMnt^u;H$@gzQ$!U52NWn56r_o0@a%1re%=@Xmb`O;+p#5j~KeRU6zOQLjc?rp8 zQQ(V~8CVHBErTQ2hEo7qiN6=R?y`#-t&CkhWv`7w6$xPfsr7(Xt`ms_mkN~XPef=h zBqN0il&SgxQvx{t_z>Q!gVlrmqH~QKybzI(n0Bbo5;k_-M{8&=JkSs zQGtCpfrS;-xQbwP`)MnJFm*Es2QTS=&iZdAw;&=Y%jngVqfJ9UW#wh+$#Y_5X(2`# z1E%yE)O^hx=<4DS>MGO!YRj46I+dWqB-T+|ny}S>_@NPRav!9zOpu?sbdKNC{uWNQ zxI59^({ncgii`f~I$gECaB8YQFUB`(cZ(>dMU?T8-z(s^?jVf1GtrQL-S@4L99i}6 z8C5Y(`!?oj`$`sB9PS@n+fEtqHDe;Tf-mhot{wipMhfhw92&dkOTuJos(cQXU0gf; z3^HbTWb6=BoS{PYdOcBl@UfYj1S!RKx2`oD{{{zsKm$bJ7OMXNox)CmT&ZLU*8_6F z_E>wHgJM2nJH^g?++cK-$q5Z+NHC+(k>Z|8H29E~Afu4sxv82L&+BT&VS_7_=hYv0 zMS<$VY|h8Kxb2av4#dt$ZF?p0WiAh_o7=3soSm#{s@L5qgT(xfm-~H77@%8FGd8aot z+vK>(6zw>J3gW8%avL2|FUWVAwo7oMci8uQ(wHC9zg{#2uO{Fd$Pd7M2x`Ap5nSC1 zyj2HglmxBt=WPnF*5Hsk$=>}8T1|M4}^DHc0-2hPnuxLW^u>H^uk z@Q8(aH1GXFG@g`#CLCRD!|idyb9;FN+m>6L-|r^~lz;pw^56zql~VQ^-3C<6#ZKKq z9$xMfAC+Fys6Ji)n7i2YnMh^ynO5PW=AG}E;%hc)nh%}Ei56bsEm}WZ_R^mdm#*URVXxUn@SnXvPIXMA+amruVl>U zMHyAT&Wx0Xj$;i^wxc)P`#o6sSUv}ku=$P-E$1YJb-7~Y4{t>~3j#9-(|clyXa50q zdSp$Uo4(lDoFCynaCvD!{08H8HRp{%q%dSgY#2h%Eq@O4|1v`k*^Fi9DoEPbd2JQk zox0fK4QR+z4LfDF4?cdToo@DqK~h9t!GIW)xLy8jz0I9cK+i4!a$lkqRT^xWS9k<9 zIV?=Qe_=hU`%kbd(7?y!89^Qi94CrQJrd8vlo9VPD3}GpxQ-+`j^JEBOZ*yPnD=~e;!1I@1Wp_a|n zPGaF;xnEaZ4KV=-<+xA3XE)7P8#g_+uX^2of|>!nmw}7c*9Lw#=yp$2X|5rD5B^m4 zs;@{hZcAgh82}y`zSEl^>y9rvu_kZ5>suPO=}WT9@_oi`Z|*Pcwr$1YSc17x3JiOJ zd~4O5zeMHu#=JH{l>E7s+vC9_n}{b5s9^8S7~$Vl#-@Jc-htcI=F?b?b}Ovs>c0Vr zq$_DHZNIWR+Re@Hu@X><(dfH^Eh7xCw-cmCAG zsI-G6{zH!s!kCTI>4_8%2l)8&L-d*<7+y*r39#ca*PnU>t(815xZ*i|b&}W#dzeg6 zzs53CT-l2+I70nrCgKUuHE_}`Ligh2q0Cu6AHg>rjL$b)$>?5P1qAX4Vl zDHGk{85GA5g}RyS-s)Zl8C>hUAfdmzg?uN&Ixb zPdckG3++0C@a}nnjakTJ1@%TjS6RX?D-M7J&3cA}2)!P`eG_a% zso?XEpvrUF$UjPnhW^a>x1&mKBnBso`J=!L1Ah&E{)l}N01YZHyZb)q?0KzP7xQI{ z$a^yFfgOiZs(Wr*tX7;dl8+5PG>gylu7T*DV`k(}p=TVpFB868cq_Oseo;MSHk`C&!Zv&7kGL~6w3`tfywn1lMNz#vD}{jnRME*?bOI2z)kH|D*ptz z6$|cfN*<30zW3cIE#jT@-=tcaq>RmJJ!{Ga$bTgLF`RXv?z&_)nBOh`dIVHK92K;i{J2(Up>Eo19m$0*jh{r6%-gcc58Z_d(u1F^WSv!H_@;-wYV5<=(yk zX6v(DedV)W{{-XNoqyT^+cYua`yZKp(1L?M3?7~)aW|7nZj%1lXAo6`I7QcNUvPGZ+7H|!ca4Mg>iq>Y^U+B1(DDNeT3KkwRQ zhi4#dB!SI~KH4V6&dRx67cp}RTasz}zizIQ9*+qL0^#Dayn*+B%KUw@pY4|c zmmYd`WHXw_z<(gU=Uj4o{AiLk$j!s0Uggf~L(RN^8xMxhLOg|y`)WBlo!s?Uk%zh1 z6l*qmESUCu(^#MU;A?4%Q<}4poZ_}bNW;>@Ru-u-do>TrA&I`~@F(0y|JoVnS+#{uRv-YMB-u-xk z2{N8$9;laTo5viD8CmwDI*3VxTmLoKP?%nn+g8VQ{%OS+JOA;Pc$L$y(;|P>LK+KO zIBvmw_=la~-tPvIC$Jko2&COAB7q79rk|*(3vqz$pnf?VN#@ce<{vKPh~`9(H0N=p zKm@uWPYaIzF3FuflQa+xT#7Zjr?p(R@vqEilMwCAI%_Lrl!VJ?xN5x^_)+mG(k^k= zT2`X0>3)A9hL&L^^X&YhPrPaYNyQLcGx8(Nq2*PEe02SFR08`_cFZH%%E^oFmNu1u zZ^3~?Wjp-vPG5)bsWM9!Bf@UTE06ZfSw(sP6)Ueee0Ho+mcK;973%sav&$C0&-8Ku zg0dDT?i$uI87&34UoShq%?;d31>?5*@ z9wT3f_sXJh9T(Pnd=Vk@taq2ZLm}E)c1NC8$D^bdCZ!}qt#F(SVW}bYEGNz-SSr%` zPor}@bPX^D5u?M;4D_|q_BYYEOp(>L;_u`G!zX(B%l3{@Jpv>ag#NZYSI zJU4-H#b4$__de(Yx|u2IBmvXEw}MA`UR3H3_oa2|iC{sK2F(5JYnCGqaw#;L^4-jK zz{RIHqyXP>@6!g68{3{)O#inGl;LUdLyXYy3AW zO!sjJ8|BEqfHN(yC9i5yJD{pPr>_fh8Cqmvu1*>j}?F7RrO!rjUGV4`7Djc|u%CHNKsQ1G#Ia%VT!J>}gj&@xME9mC@E^FA@`C-rn z6q^$=JP>^2U(g#$W$_jF{zS4RR%ddgeK2hJ`Xiapic6H0=WunTP(w^(2xaM1UmK>j zi11=|PSQ1EF~O0!E9#I;E1s;u@gv&(khHQUvUY-eWByMmZYG3dROb5?!h|~QLALjo zcxgX0R0P;e6#E(Uy8m#-zDzc`MyRmv-{2GX7sk=Q-(>}!dGp0oFp69(09OyZfrAxD zQP-ePnpt*0++oVNU88KIk38{tLl{WL{4t!Na}MnnQQGgd?h+NcR_{;^LjKDW(8hlP zkT!+)1pXPc)0VA;+lbzifbSh@3^n22Av1QpT62^v`Ng*s973xnGA-eq$M=E|kB&K8m{jcj>1Ly*GgZzMA6S|rUG4ys}ltlwU{A)M9H{Xf< zRb`Q|8&KjH_-yRL8ed1JT;9a@mLnCJ`>D2-5IrEuNwI?ao6*w>3BQE&4^Imz#1AIy z-8H4UBOBrTUinf3hj;ftQE`x%+)^+nhp`nyy_)cW=&lN<$Z^R$-5+6U%dz;!KmAm} z=>E*PBb+vE>;L#Oe`b#dA^(RzMb+h=QXaS6791U?2EY4?^5bOyUUCYXE@hcP6&3fL zBSF#^d>rYIqHz9Bvv|`RTlpMRW<$H}&cLyv#1Lz`Su890c>b7+ON}Wd8K&*`gos=^Y5j&uD6T8WykmOWj{8@unfNJTFWjSprpt^oD1*i6 z{TMIK?GeLEZ6#HU(n2O>?cd$4K%^JHZ)x`Xwynh8Q|!xchLqoSlAJ1usGqwL?AS}5 z4yOI*&qlseVOw3YvpaW9tp4fq?rL|?4oxfWOIas>QR+qN?mvAdVo==2v@|J%g+pa< zjke!D0eYA3*yuk)gFjM^ty^kK2AVa#(Lp9Z_*px&vNRfLyZ3TEGhdA|-@hS@d|g&3 zNNne+`(ygbZ=8t9X05kg17No36Zqv8Sb(R<9tMgWFB&BHg=K?}_qxIG%Pykbkn z$CsVBxs~Wul$a*Kxvyl?oS~({ovbjEO;h_sGWdH^63hB(AW*a)MNMb{U-O70bcY*pVG$MUoaHc+f4(Qw2gs`;m^(YATS z8*NOBw-qWK`d@>pE9#zyVNlKby#CHV7;y~E*wJIu4JKIld!N1LgTS~qGSacE#(XB6(rh0hazH1{nu%xJ?Z9dHz{>Zz%5rYU5bQj@LaF;W6)7c1;M!fm^d4*W`2Q162e)uN_pa@XB#QfkbZ=r@ zdzK0LU_)fo@%?`|dJ(Y`xO-sL%jc80JmBjzMyp-Y&z+aC%nbW}voXnt??tU04RVK7XoY`BI?{bzyHdpE+;hc!)%+ZqzUXOE z%W^&9Ya?{WmcdASwH3>o>T2R&*!~xN{@_a7LcX@S_7z13?>ZcY)i&c7_l3tct0uo3jC?cMhX@dfvJl?8&Fv7Sg^PVAzp1Or`h9b;q-#+G{ZEV1J}m5m5Ma~{%?og zYi)Um@FV)){{m=xwgRgUqL&Rcp#lcx`({&>)OVIzK z>@B~djP`(Sx>H0t1f;urC`D;Rx5lS|pu0-Pa^J3tMbJ??CM0lORsZ{219l~_jX-Jh?pO79Y zHB$z9@z07@h!X-Z??>GoYWMu1Y(GQ&+G)(bV`8_MHbKocn!mLerka}vIU$;ny0+hK z_Ph-U8dIB5<+aFv^N!YTmuV**gZ*FB`rP_U2H!JFjujD(p?sb+TP8e)wJ4ODX-(YN zd8Y&MD(S{5UwSTEU`*Y5U#1wwwlB9wMm~muqAh{PscgFQ+V@qML)Z{S;Z+4&i0?=z z>m{J~2INuN+=I2eAUF{;cE5+_zSb1+Q}A?%O)hM7804ucNeJ39@R0^?e8lRrP`XZv zFm>4z@)(VM9M7B#ydIz>A%6^o6z^J5Wp0EiGL64G+bcPHqj>H%3v4*-<5k5PYjd0* zU;xMI@7E+ZkeoNn=Jz|5STRhnn=X#^)B?C1((;_g=M!gur^-TQD5bcbV!y5Dn5g`O9v!Z|zr4xD~ZgH2cBwC-Vvh9kqd*yVw4qD&Q>1KgT0vd??em967UFUD*JT4EuGYmDXYl$#N6t_TR)p+AdwzkC~(cCtQDJJ^IZU ze+h}Y$z}j@+~*=AttCT=*i9Hy!Ub>U=;(il556KiP3z2hO$=3_!p9oJ0ZB;yT4$6iMm7cB*1%jrdzMiAe>N9mMP;J^k7sU z=u`z7LLv}a2QwDEg0`hGIuWIPLwn1vX{Sr@%k58P{PL00D%ho1^}>U=rSA^P zZM)9XaKiSwNg_4#e{j`V;`Fs~v408~U72wr1}2aeN|&9_I(HSEY&c)iK(7uhrC*JQ z?Ip}f-}IdS*|zMpei7`FXC+wqa?kozeKX$1w+Pv%Uo(OlzeMS6&vX|JE$cNA=1dJi zR#uP8i*mtIX`FLtKr)Up(29{>*6d>Y1Z;=NO-SbJwf5qf#J8!}s1G+`h1S~hZT6^x zwQmamJVRknz}PpmDPw5P{5)uxKBujd?nPDGtl`$BB4s_R{^y~p>-FUL+d9~0_~oxw z^cIBng9DFDm3n=g9#+R>y)J|@EXJl1EK2)M(;V{p9C!&R=+plBd*x;ZgoQjgnAc`# zwz3E*9A6AYrdw8i2KQbM(AV5g*2AzGCy+nEZBP^&QvElmAyx>Nqch=W7{)*#aF@cDu<30%IVnrAE15n$$Ksc^QUZSH&)ko&| zdg|2+DtIMI^2a}Md8GGt9>XjTwKuglsoZ+le?b$o3R8@vaYI@XgEipV91Gpq&tfA( z&BeyP8D`O(B%_9snj>VrljioGE&~{=58EVPqnNmJ_ZcM2f$wVWL1cb4<*f}<$Ysb4 zg2KJ;61aYTPh|V_=g@X1CR}e^ycMwhA9eZ97Z9u#T|^hJ(oEC9H!|@l3|)6N`*iE- zPshcAQOLSl${8HuL}g|dq8ER11pZoB{)S;5IACrQw7jsh;fuZj&s4GNH|H7X2OYI* zd788*VY`i!h3Hd_Uv%SyH>Zd(zuY#?8{bli&Cv+n2(8VLZpYZ^|6H>X)!8HA-49RSYxp^9ly!eFt#sSQG8(h{0+m) zouBIRNEo)RgDg`Hr9hj!9$^*FmBR7id76*A>K5Ml$LdGlk~*yGx7908E`6L#BdjFU zQI^2(5Uji?Gh%yK<;x!OIni|(fC@VR!^8}A2$=l&>`&l-D<1-~Kkr633Jfe|Bud#P ze%h6jMv)=!wvy{Kw>R!-HL8U)Z~G73yzQSQKKqP#V@Wa_&LXtAJfxc@a;Qzvk#~?? zCj9^4;wNFmSnBb_o;v6ELg1b=w8wE{1uifom4z|-LY5CMQAe;p5bpd{eVkq}!7Utd zl{`x`_X#!s5RZ^7BLS}k-ZZ)7uM#fLW+g!MW-)F)WHly`ZU&>onNTG~Cl*|N%U{kF zWq+2(7Z_x0_R1gFKM~>zLHF7x$NNGZH_yeTi2ubxTTi^GqpCM`&k@xzV_N^D$7Rr5 zfX=P~b8H=BD~5rQ+L_zkSdoUaZYua`1ib3okqKiIdt<>pv14C8dC)+2lySW%;C7>% zbU;_`Z$eReZ`io~9v?STuL*bAhriz;_A5fthg&~w)GqYJVxz3XXDS|4zE4${@N7%{ zF2|i48^xLL2L;nGeDzO%;h{W4i|vm0us-TNS^l5qe*-n}$cpuo|3)B^u~ zGSi#3o89TpxPrN$i+h_mBf?Ds=lXnu854O*Dj8YZ--VABv)A^c#jN@nae=nxYeMwm zL485QM236n@w7{;58a5*owpG0FLa%@$CgU+Kb;S&|9_niH<*AVkuJ-Z|8zcjn+BJ0 z3<_6l<^gNMeF9BFx0O1T_#Y!qG`7E)oSbgdDODSdHnT>aSAM&9;9DI1|MHlrmQlRC z+Z2L$t{wQ;$-X39bP(q3k$B6OB{4C<9wx1d6)Z*gqYVBcJG*^T+k0tC5s?wLCm#$= zp%3(^07ZICepA$dXqBBpkk%E@JWnjlsb;+l^bl_QN(VYxj|y1|6&%E$5T707wR*<$ zYEpynct4ZVbK!lF=aO-@ZN1kfixqOdoAeNbBxSGK`6YG=FSGxN7Bv-EQF=2>+B!m; zl!R*?C}qL4y2Vlr?vDsKXdSbbz+4Iv@!fY7X>klZb$v*HNLG8=69s-q=82yAz_7lQ zHj{&}882o^c&2kwV$vVABwcX`(l^H->zE&VmRG!udR*J@PFisDE~agYt@%{rPxtPSEmkyGYto*?0~= zv}8<+Rrr?dWam)u<>$OJCB7VxSHbr8$lTE4Jih?*-B|@^>wY<}jR?yB0l9Lgvs;A; z0D$|K;cK{cL0_Luyrv*p`GOvb`zB;qq`hiu;a|O@SpI2qGd@K|&l~}WAs=2#aG|Fz z)ZHS^ppf@6`E{4kuY7v~B|=2;{S}DKxt*Y($A3%2Mn-%7>VD!);W6gwkN1~@#L>Wp z9sJ1vfxf-+BBsmncfr+fqb{0}{$jnm7>~qxI;6DTa@vtX%UrVNig$!l3U zLU-26K$(XUF(y>maUR+W-qY4ke}H#~BIOO7MW@|O^TX{<){NEONtwX z6vaEChU+_A_P?z&Wo9au<8rqj{i*{CwdQ9el(*NrgUoB?TM&Q5hwozw_bOYj|82si z4*m@*b|puL^jD7n_&uSbK+R2}L55h1`Cg{jXV#>_zR6|F#$WVm z5$#L^hMSZCXSxNqjSasgkB}*4!uch($0UanAMO8=9C@8P-IB*|#}?HW!`#@Pb&P1E zREu+RYnEZ_A@C_cVrK1E?mf#3eS%V+86i*$b!zp)!*+zS3u*4x?BLX`gQdJ)6Y#57 zWs$yQA0F8=@XE>}?6vHDPe@~9V$SEAuS+c}-^hqr_{qvmA)G&chcjqo68}TbVJTL= z|Cc@MIoY6>{g*wvZJrAi>(bQf3FzLQ*pYuB23295P9wb09;Tx@!#dMIW&rMwT3CrqEsUgL_ax-2eRNSCggNC)b!D zk?dxWQ<{HrVHHDH=(2x9++W@%l1q8LjjsgD;gS>nNtpxm!m@9mR1JL(&4vzL>F4CC zfUeiplC0_yg$njedP28KVCO=!0nVHC(%JJ;xyM+DT`fb#%fOh5bQ&6qg?CpfGW+rl_L*?w6QD2K~g3~J5uUSt6Y%M@K!u=w-cpHG94jadFNZzfIDIuLYzKZX_i zru-nlJp#5hkz#<+=$^~==AGfs>XBT*Le`WcaHDAV)u#HQKqPA{#x|cvNz+@ghg8)C zBo|or`8+A@%SfPf$m-0_=LnTt-^VrwMf3lh=h(bRSJqKzIBGvs{F6KBIgD_k?l2+* zcAHt!rVMKM06-E_gWKVzC~mU%a6nsPn)#cB=CZHvTwqC0B|BP=Ef=p*oH+o}vP@PF zc#Jb@Y~sL^N}#pew0HUM3q+dn*y|tipIr{h@%KaV){u#K^B7B(Yx`R~BOJ)3;)V{= z--^{wUCs2YcwxHh*VD>hqt=#8^Ze6O?~YcgHb@3Zi!NtaBFJAUD%_LZ+&f!L0aVFJ+okhtezdT(B zq-o;ep zXY62qrg)|aFft~u>-ICO5=8mWcU~!&7*wC+HqM(c-#)oyOusBcIW{Uh_|^-$)1ENfWHp)m>zKW9>F{L77L{ME z#zbJiZCIC|n7W|)-j=pri*O%Vsl&_0l*-uq8Dgsd!K>2n=YEm*EACv>^rF9sQ1?M!7|#*nN%)8htBLdJp!!!q-*(H) zvv0?GbAII7+P~xIU^F7MGx3cg-J4TO=8>J)ez7DF*tR6 z(%M%^yK&>6?5u5Ze(e$l2MWI$mD+AKN+P*LTErQ`pXqQ9=FB zve}|_$#hBQZEa1-nuU5s{uP#w#+U<%o*G+|chbnjAwUOF?Z-~!BRgNhdouCqnuVcq zD8zV+A`PSM={c{^`vfjB!cm#3-*FjHXb-zG+8F$8_7Tczq{E_$e~ge7s1g(lA4|mD z2tK`Lq+_g-{ReT&@`kL$&Ao;0a?au{~bN zyi~3CR5#Z)jwM9*lnx9>In}ad3n273$tg263!?Dar1DC>6`--%P0-JSQm|07Cq)SU-Wb@g<%)@+jhpX$t1 z5jY!!y)RH=z7=R!31zG1L?1l>Ug?qD+Y+tIQNav%Gd4yqdrcNXQkrEecQ$B_e9(XS zNFw&gU}*kb(WlS{nQyPDZ&y$}gUf+@;>tm7vWO@-*m;dL=T`8Sn17zLm4CpJTw1ba zyqU7Xa;X6fA}d}{5?TX*9|p<}dxbp4Im)sErI)T5W~0U3C#oJoV5qg?iyM?~i_HxM z4J<@w2rH{=pIbz`SFGp#^<;sTW$*D_{g1}K%>-)SlIjNpvWlu=6E1<4Wqo^47<{T7aZYFnP*Btnqme401-XRPGIu68Xyt3_0tKQA^2_hai3C>6_gJ4)eO_k#D_kC=&F6hal9n;umh$`*ua1>;!{R59Ii^}u+#;0K;5rUvmO6x3~v z-iWepcq&`ECGQ3LVe#{jhx#|D4Be2%)k<%PCgcx4r!9Q|R2%;`;Ndp!`)v1bbXiaj z?0>P1$&Hc8fT#j$3l=e^fzC-ApTQ8@A#jI{F$9UGM=T)rUFWDaMUQ`3ap;>MJsTu?sT;Udh+<^|B5)HmeL?$G!jrAJ89w+ zSG0`2o;cY#kKv=9PNJLkWkk0S+3?gok4pG9PhjpQ*M|SnRD#>_X%UtC76+QI{*x~0 z46*0QBT-?vNO0#N-;WQmiPU#BP+@hni+iTd$&R(1>KBc793!0*s?U&pd@9H)@IS6h zl+OaO^iVXBVhQ%OX9M-gwqF?rIknA`+5CA-JZc%lIncB)bPh1yZMAncxPMN(UTPEs&|Rf0%{ z{t=9e_dFB>Kj^wVyRCPLRUz8heBkKCi+nLcE$4Ln2_2q6fFgu;;_Bj=4OW#NVL&6L zQvkYfH&JSNy0_WH4Y^_0L%R07G**Je$#8eS?2WZp)jljPV~{)CEm}Jnc5Z=k!7j>g zdav;y5%&RVLm?d!^llTlS59}&wEEtUmRp;F!S9ZFgFXbDu}Dh39OsDfQw^fXhY`xR-q%TNhbJ}T^{yuCRd4)* z%P)HK;gzeu2%CfWuc_NUi=eRtrDZnYyj0TQ6Z{947jaHHJ9$VZk@>HaZZGcO_~y@f zQusQqPgz4IlP^p^a&ke8s|t4VDD$k0=vHw$RHq+G(@m=dH6us*q%zMZ(HUdvg9?}c z(<-Lt5#R0dFB%DjbR(L@Y8UZ0qZCO(7j!(361amXjy3}+Fumx|S?n^OPo3YElfmnk z;|l_`B2cF;0o%^Dyej*qaN&pY1`3EGS(QZ~de1q{DL?;xbFnubKiL-jY&yk``rgH3 zI%AUiYBtVZK&0vpMR`v1(t{-XaAR&hR{To!oU8Vfe+CVbc-U(Sxq3nL3rLT8Pm%;x zr!26&3)z}f*z+@}i(w7?2||xpdrq z@xM7xIb^xIFxiw0s*)4dubsQKo3!KFX(Kl~7?au}AVc`F(IGepT2}#JB!g-y|#$@8>GTumte?qUJ~U1Mi`L(LnFG z6NNW`JDRWtfx?yyO%Yd2njIgV!)RwODF(t7XYVZq@aa9aNU&dH)$bo#?>t{lxh@gk zkr{tvyQz7vlMJ2tB%3-~+lCyhUF~?D;%&;Xc zl06wNQgj)fQgca@6_K|c7g|`}@IErYHoL{&3jMp}TrD+BZ#&+4d%JpDab$1^TSFC$ zjK5|LI_pm-4IWwm_N|7YNJGbBKo_JSc+43h!X0_;x&aNwLnN7ZBY9Bg{iq8<)Iv=6 z)r=+lJRWr29Cq8y@wMA9t=W*ZYaN91w>$Y4Q1PMz8ae~% zVa#r){&;iVpUXka69SjW6 zlpNf*okg~*YzxCQL3hy;)GV8($KIosMCrbL+zyG?aIYQ?@a_S)Q$ z?7cs#HckSWSn5E}{0-40HhFpU`7KU=m#{=qIdjS4ESgX?8ec2if9)Wt&2!yUHTKsn zw%_7%o0Qxai3r~*T#SGMo_36-7&n$?T_G>u4JUYK|9;A-i9(qfp@9;6wmYvK zz<7VfLnK(^cwj8&Ng24lsgZ&-RSV!)>s1LW{nLTpa#-;vZ(W4DhGJf8dF~48AW}+b z$?Xt71rRQmK_(ZpM*oebP6~Re=VOU``HD>+fb&R@BRwxA9b4?S?@{ghk$S7Z7!{;xP zJlLK(kdzs(bhgw-&VoPENCp`?v}M)9LJGcd8l;Z-mZ;%EQ0a`oJPe{8Gdy0P@pA zO9w7#zZqvXO=YN0T){3$M2~w0<8ZH27-Lk0{upA^+KKqA4vMe%B4zfcAPN$ScarnT zle==KB#4cYho|JKXRpzWA~+-F`ew5ty*+t&tdYA^Vn^3)9A}@5BpQ!He>Hb(FjigXT z{RMqji?SY9(H(<5|C)ZPy>2!*nKRX0)L;A+_g9Jb#G}KE*(fdUBPYZ1x>L{M%;HIY zUv@v=sne(H{0)U1edeKlv$uFt@=_jksa->3j3Lp<%jMdP+Q_vB$Y%{Wbh+shP|5M3+bn!>2XwTAA{41E#AO zHET$MJrW(OWvPQ`-Vkwmb$Dqw+ES}g%G6xDTX^agR#x6tk})nAsTsu+Ro>{yUx&*- zfVR6iF0~KN0z;0DgF%~2x%a^R(2V>}NL7TFTf$s&;P}nGkpE5SpFzyyu!55@I2Z_N z2ixgzU-C>I`lEtbMDz=y_ET>GOf{jelBRgEFP|a`VJD-h$qAb=@liFh>e>&p{e%5` zxnom%$a@(uXdC*FAj(fWtKDdDdcxzg_PNYL$1)Tsd1BGJy_wbhL5vbRht1x^i2Mza zlnRukGOy!fE6rys*D~`j+?frRL#6lBq|5gTG3m<18MoDm+iX=8{({Oz+K+%s&l)9I zKD4;xE<(`txR3n3MVD}7cJ_OGpdS5A#_?QFK*{ezt9bFUZBgXZ~6p5;ck z@tGBXxn;Rqsumgl{3>WJS7;RUsLgH&g64wH9$YsN!EB&=s)ME@8j9n*=h>a$>ZD#B zF%e2ksi}r2`afP2Ni`BhJ}G^>c=iP!Ebu?X#|9)lgPSeQt;|bQ`yyf@MuLPJh59As zRb%ZYH9hI5In(a)QYtr4=c0VKUlhp%VgB!LDQH+I?g}?lp4#9<$qfbz; z7bo?x%f6OI1I4Exq^M~$dQB>oG2g(jPX-09qR+!$e#E>c?=-Tro5X2y`oyLGY6H4~ z88LpnG|sSXvF6Y0NYBW|j7g(hb7&kRDxpcMEl9LaN3t++`k5pOTRP=^5p$lNT>4&M zffpQ`{F*2>2x1ERx!kQ_d}y$6mmJ|l{QB^mM#^Q$KP=^oz^2!%Dg2YckrrxJ={NH9 zD5HNbuh{#FWSj;bd_lJ+PGJCC0s7$$ob?tZ_`m-Zfxmb#lx%6=>lg zVMcoJ9^@RzFXZvP^c#!~mYbxQ2EYb;aGc6_=a2E#7KZxg7szBP)Z%E^{ISda92zz) zbp{&JC;7m68u&oYH4W@QZJ95vUEM#PdtZ=p39fZ?^()|+yVM=Eb58Q}5Wnks+I{yE z3{z?-ZCmW8`{RI7J9?yvx)!maf_64~V#Drzd1gpg&8~msYp(vf>O^oD$RG7j-&Gl& z|3wkR_%~{^-#IfDZ-za#A`!Uw&aKWdb!Q{J%_MKW>Ky)2)m=dfFviQ2>77u(LJVmhokUSi>@ z93AMWywEo@wj7Itfb71Zq7uOSW_*{|k`27mDy~;~Q39 zp7~9Ksma4!QFxc?60R3Jh??1W6~~&%3OP6T$$3za->+g+3Xwjyiklp^T)fi+!MtZN z#E)Y2Lc1kSJQZ>EKxL6J>F#RxB+-Tfnf?Tf z&`&&vvJwPl>52x4k$>SWI!|_I^G^P9&q>VcWEVqE1u|Oy zm0@ysykual+d7`Ot)Ogicyh`urf_wdp08sfPf6x`okD`)>cJ?NU*mQVTMI2|Faa#e zDh-?D?zdl=)7}Z81-z_!OBtxmA@3+)nj9SD^2F8r&^Y%HbmG3M*7g!v0*b`WQg61X zYmp@e9W^4LC+o+SIJSnr?#cO@qzA!0l|1Am!>*E0Z8X+0K_Y2hmk9BYw>jACx zgc*yFeWi?S2w@LVh}mFB!T56%JMV?BDBSH%cXptvZ|%>+JlvT9VxBh^qHtci(^_2IxJzb{O<`qgL=c@zmJ&5z2b2MO-p@PObM+PWHMYDFPNR`{v9b2GDbI8y zo6(Hc(xqhlD}KLP1fBC+sr9}UREpA;tF#D@gw3uG1XovUQu@fCV~5?su{z&P1A+Ew})0%+Mg2+^#NZ1XmoN3hX1nAaZk7Mx5qGze) z6Vtleq;@M9dE9<%($lq(EK3Ckvz7a0w)D;b6~Z#2Zr|3#31P-$8IHz?Vun2Q}vXKHhRXry$LVQkh5o?s9*sP*s;sD_fS1z7*O!#_D2B{EIR}0 zsjv$BZrSV zL6zQ48f3<#v+Q5GT#iJ; z6JtQD?8&J@WZDP^GB-KjUj&F{YF2GPG?BqrxxYT&q~iTjb%GIZnyJq#zh{OmNj;i6 zpifjQne#76EPj#bJcqka8?aiUA5EHNQJvcx;!@Le92g!oPVd22=etEV?RQK@(Qjgu zy#<`%`qcTM0DUK#?PHx79uf*_NMqpU>##}R{U@8wh@g9V#y zQMp>etP@5Aj|u;_x`d(tSCrF_m{h!aj9V9HJhSUdWaYzTPS3UK!K926`p9KL=JP48 z1Bf^N%{i}PLd6d$1K>(H<^;Etce#vD-l=OLpK2@e=VJgln3-VZg6JD19R)@3;EWGo zHt;}ABNXeh?;99z;_)V2?tqa>XtWQ&C8LgjHUE-mkYg^wj#N{w)uOlVjSY!$WJcQ? zHxx-r&5U;@!Q+IkJwQ5|b~7&0k$n+J5}F~KB)go=C8FW`WI5NS;37(-U&3tz9Ec1f z<~X|r-jMPE0_YDu$1;erGrQ}6D{uQ$056oKVx}w~l;1pz(WLR4p+st@fbC>%+}_6w z?iZw5UDtc9+!oaxJ-b>}2y0hhbzeIuR(9qZAeTMtG87{TI|0mt7%wj(B(lsB<*%h@ zaW8J3y0Hvg(w}Z{d_4tB)lTIg`(U##N4Vm@j=tC*hBDRcpO7%(CJ2Pumyo@|K7&{W zfPbMZk-;p;=8K?rIpMiL6WD(0Nikr?hrRe(*Y#M~(@xf>DcKWpb#91eyb3Kr zB`yx(rCY(c_dkO(vrEk!MjBG-r=lF(1ukEo6QMP@^%NXWAHC!CbYpq#t9-7ZkI3z8 ze(2#@o@?g#(uC~1-_Vi#eg8nLEArN6| zt$S0q&xWnoQno%m)71AJR5;}0LwoBSDTY_3BVxwq!vmg8FGu+{F4p3pwxe z5e&3`gu8=VB{;OvHStp4^Gtym4c!iWHhO@^kF*pVOH?Q;4kTg40ERK^u9g z&>B)*|As3wI&y}w0mu;dbD2lB$$qkkF%I9jl6!wC1Qz~qgIESgS!ARp$C3<83x*Ah zr#(+}+ayJ?Z_m5?kqiyr;b;D`N8`>Ot5*iMtpHT5e~4rx#G&5wZFN-OyUINZ%U&fY ziUt8I%9f^6uy1RR7G2CBfzd5)|@NBdwd&JXL>0ypm|@?P_HjMTVerG=<2QWOaT@W zTVB*|+WEPT9QJX*Et03Z~ zojp%D+uab%3rf(wm+vC_Z6>42S>p_*l-4*6{((*r0On^u>foBx ztUKZ$#;us1@YYGN{FWCMq!#+wYSDG2{kh`4tEwvKQzoIAuTj>Hbt#?}U23M59hI3C1ebyMK3X=P?!grsRNKC8U zikeqN))=+8G<(29x5)M#;`PRdS7g~*n(T}jP1HI2WV(E-x5!hWK3RNAo#(18S8-9W zlui0QtQXHI{%fx;_T<_+r-Z291s5~HRjAyK5YSyMkOb;;V#obk>d9%Bv`I`%tlaIY z@g@-ISgxn(kvB;=LjQmyYu;DP{PRwztt}7&>rm{;Lsp|1GPVfE%=8F&jMa-e!r&NU zXUcyBq|AKW0#SRZQY{jX;~WH^!|+cg(i|#WJDKtMz{qo7P{no_#YV@vAY!8>Qy&<{ zvdJ`0u;-BL?{Ei4oBeTPe8*eWI&#X-cUc9P2js{QddHyC#W`d5+kgW))?Eh^#n3=2 zXylf9{L*`ThA17dTw=THu8Q8e?rr7iyVvuEp`|YNAAbI7NQwxul`B6eo|prZ%O}L; zgFhWLn@3ho9ca8-9Ga+FLAodrVzM<+B|TJX_|OqRPC~N^Lqg7XF12zw@(P4bdM)fAzLM?p8D%wufK z9W?`Bgg!iL>YPLU$x;}f0nk25RRK7Aw~!y%>?XtJU{pFa^e>#JKU6u zd|`AN0F#Zph5ohtaKZk#l@uw{u1M4!L8@os9<*7#Ma`$h0Q64V~oJkUb1~Qx0LlP$MnUL`+6!`_6L*_PZD+_xY zZD?Y97rdWzHC#=f7r@j}f6!bRC^V{}Y`=*FvaSJY78M&gItR0fOZl#HFD8}&(uRXD z{c#$ClNTa_+4uotm)X2L?imva#oja3Jp3U-KN)g1$4MTdqD|B0SMv)GQ zA3azrj{hhO z=+7yY+s?N8p_fn?gU>G)sH)LzhM`HyW(i~BdG%$?f=C}-%)REF@hb_RpHyVBxQ;jX zp4^EGlyf*IWQE6G4{i1pl5I5k7XUklRS{bZvzuDJ7uJG`B>P+RSMSXn`6<)u&xrEQ zuX0T+OQ#b*JZtXDNEBl8$~X65?okoJ`p1{Mj=h+b1ZUY_hgNKf305{KAn5RX)ZLuw zV_utzXIy&lDq|;1e)gAJ9%vx(S$IdhnlQV2a_xyWftyMQ-x`}#b*=@@F><6eN2nCt zT-=<^mYdX6u)NAY>~UNu%?la^1jb{V`00I@HiTAPWezoA^TO$a%CAW{)xZpzoKd-`>&Jd{(nL>WG@56dE-h zjW!D23xD>=mT9QHvuhcjezK=;8)DJQawuv(@Th(}x|1E0IsEGw3P38yhQV*bT4G*3 zIb{FlO|JnjGp(6T&W_u=A9nQEUMV&6y(-T}o;nuVCz!ukXBeb=0ZTiG9VhIyU`fIH zEV6qW<%tyUXB0mwWvrLrhf_C~I+r$bsm^GA1!DF?gqKtG-V=$y5=+NMR-k)e7Jm5U zXYa^1+#g7}v|eKokvf32%w&)!XuKzM%Ti04R5VX7i2V>(_QJudu*18U4M73aZoEC& zyxl6l&qnAKpRHt$F(mVd%!7XK08=oi(>N4<8_KzNpTchd&@WN%x-XKOujc&s{-TaO z4&%8z8c{ZF?3L{h-#c1=hSsBgyLDlz`=#UVTVOY;J`F`GoDc4_>lypG{-a93(x?&S zYCj}0clh!?6S=ac%;N%07z}?@lhy5oS9=y&nFb1{78^f6=+8c*) z>y~ibUIB|L7|{1X81)wlM#3E;w_3j!KuQI?3b;O)K^N`ow0ypZh&vNZJaykMe)yvB zk~|IwWB!m)dvfG@oI6iYaO^Wdf>J*9RhX$a53rxUY*iUd&b$J{xTshbT4YREeuCVd zuOZ8gP@~nQb+Q1o=T_RtEAkf*j~&S zS0~%Q$R`U|Ww<8eTCILhebkLwV#4be!W3Oxp8Bcn_Y;*F##*y!cAaTKH>W00Jr|UC z7G4VaJWb#Zd17U=o;&buY&umRAPqrSlD-gJ+X~r1iL9f}YfjwOXL%@4%kRdHWr#jF zC^*9fK6#t_C4S$g+HgkoG82DaypPcM)7X}%qi^62s|AzuoQ^r|e7c6pG*C3n1}#q7 zSZ7t)cKqftl24+e?a1T_xCMO#o)!ChW;RDv_=F*h7 zeCCKwG>BFHN3ApeQR{1podr@xr{BjDyGjdm1|#0!yx!&D!b#26*F;=lQ|I}YR)2%y zS1@F*+}sXgb%zOlx6JI~lQ>*|5alI$cl?z$aZL*kWp9CC-YJtB{$QGnY2zuiI_LfO z%FQ-%FM{JC)_P zfAN{1pXipq-t{+8U|5qbP?W5tv)3Ip(%R|)%pG-Pt0vkX$zpb@_J5-Eamn9KtM8k* z`!gqbOm8_T+o-Io&>l7)O_r4i=!oHR0&Z9(JWrN+_$NdqXXc2orkcryvKnM(%QXd9 z+_XR_?@%{Wn`%nll?o#`dIFCZ37?vYJ|tf}k=XK?d>yD*QRj$>%af4;{~PKzI(#p* z{p1d(y~ph{R7Vi6S+8YeFF$fv_S>~p1lK5YrP$uQcqabhH0#3ZAcH1a@#lA1%Tq$3 zXj^KjUiV&&w1R$K5sHGyM$OjtNH*EcFD>vgal^azTkZfunC4Ur^PyAHv-J}ml6jO- zcO9T~^fiFlr=U06d9zK;QEeYA>0f=KG%fPOJ~#~E&(lCVEadf zojI++96hM~5WrHC-2F5#&Vn`)jpDR#FXM(0mczG9YdE;W&IUVLB!0h98nKB@ z)kB9G6T=dzysix~v1Lsl;j5b~z#w=vIHZxVH~VeWY5j9{vl7HA?nRh^VM69fIn+BE z(q_g-`VU-xhEzNl@=0T^P^o=)?6unwNFYh9`j2b#kM9!9 z_}zycgfuzE0&F~k zJ;t8(Cf6GJ7*SWR1M&K#-b0xgFu#q#Px=~#gnDEOD$>}?B(V%wx~Iz{!h>f0?X(%F z9^@kk*@a4myr5-jsV@rI-%Y|(kTMZvAPFC;KzGp2fJ?uvz_Ls?Pb}q{+>3b@jCrK$ z)QhtHlTlI0*Nc%0F#X9BVcC~nJ_)YT{WY0_m}Bg3X_TJima<_)uXjf^r~a(bq=F1u ztz2$C{XayVRa{hY+pd)skU22eo-2?1#a1f)Zd6d1a@dniGqrE^dOq(M4|Msn!x z?#{{0`|j`C`)Hl6-#S?9S@-{Xu3PV%wCrjkH8wQ85UE{A73?|E@wJ8fWBgtmn#`b5 zro(RM(2d>pGDfZ+Q;4;*U4WTfZk^#~J$=M@g5l%SS^RR8ZicwZ zvbrY}EHP_=yJ+ha`x;8lXQonc7J&KCTzFc6_c zb%S)!4M=;IISFz#1b@Lq51rhKE@=Idd` z5c0xu;J(ivZ&W1&wp#yqj1YWx*br;q0F{0XI5bBZZn?(TW)lvVJ!gpE z<5Woc(y-rH%i>%iCF06c8L-v0g5rSPtyy6vz;`cTk*Aq`5Nsj#I0D8pj6Ao0+Rz#k zfKON?<*$u3PD}Ke_4ZHqKne~EaDX5iv0RGTUJ@Xmz^N;P623a}C<5K71Erb^;|OPW zr;)c-e82i&PgVZD)}Kt?i5vJ`W@h+cK21@Vr+)O)Xv#>O3U+f5mDB#Y%g`t%>q$UEJiRI)Tu!AWUr;AM~U7NPD$ z%{JL(QcqIdFAxfcSTkmJK#%nN;Xsx=;)bVZ{g!QPeYC@_LY?*JUpYdmmp@A!N7?yJ zcKo07IH&_wjkj~X-QEs;E3A}yB97^?<%)11D!@iI>@V|y;yiMIL5|spd=gCW|3Pp4 z65`|#lQR->Pw&m7n(xF$3{zC&p9?|26nIVtSd$AJ-FS5T*^5X20cUVBa~kIX=-nX#)v znC|Ba^4w<`l4>~Y`~HpXS50wUC4UimFFzWx%Ny3|yc>L}Ng1QW<(o5M#t>$Jo;XwX znyh0E7-;-XZqUW#S03lv1*3wABVV|pF?yYuYRLH{kHSHYF#RS*-nFRu zdPJeN8Im4kGx+wIzaiX`L55`Z(n%RYR^T7@nAj+0ZLjSxp`7cn$TzJ2bTnk#|8@Y1 zs)C7q?!WaCt*V`6(%%f_SI+tytuPsC69A|2eI3{D8MJnG#J1S3n?W|%n4>74(^`QS zYACAkW#b!-n^tR?93=8Wxd7FhgSL5e+``-6Nokc@Xd2Y8(m%y&5>OG<(>KAScBfqf z!M|4I1j(2jF;M5Kn1-HJ3zHS^P}$?MdHag!t6i650yn=)YFW}N>_1mqlmSzb8< zj-p~t!HB#2he{3kj`hIOyS##-SuW3c4HFj}sseB>2WkP&(KivcIpyclqnH5CHGzdk zT;YGu0oSeh-e%lDfZDpJ698*s7a*R!1NG|oBZ#5;NBGym&f)H<&k^W^j}2aXqMg3$ z1HyJJr8Ud`E!hznDWdfgP7^(G%2APQ({b9Dn1=PMaO%@5L;%#bs?T?=BY`8_XcFux z|CKcMvO?n6wpzzY8y@=R;iIkZ$0Nnn+JCEb4vR?O2&_EzjPtEY3pNpS4YINDK z6@~MZt;(=>C<}79f;t9Z_o26aXbrV>CB0iE8EiJjbrO3EMimEmk)^d+P0=o~9p9Ya zE6D0mY~5>g?N1t-KzHbvl028YWF2$~mz7`H_O3WeG@=@1xmu*Nkv)ZRDl^SZJD8H& z&{OM(X%?q~SWcxEX~V?cNbXzBIjSkCpX$Xxj8h+e2!Jyn5v}ER%<}^3Y57(o7HvA` zQHdg?15x_N`+9d(_jv~#Je>&0TWT=6h*`YN6YxD0a>B!RGP)Qex zMxF#e?IIaVO`-1Wu6EQ}AOc33mVcVXJKK&C*`k{GP7qj@e4M2c{Z_R64*cx!^9Hq- z)cmZbm^r}|6G7z#k7HSv1g3+OOx3!1I_Cg6N??|i+g5;G?aib(qXHcU)W&P!bW>Zo zrN@=@N}@G?oL}lgq)RPO)qJgKwKn2HFYc>5Tc!DhT)A>SSE6!VS%nE34Kaqmq5DH% z&?y|V3;7v4UPhLUz9_hOF52oNe3?#&a>=usSYYLQ(h+Dk?6b zlb$>LP<|;uKo@0X%LW+}xY>V1#@G@$~-zR zkwDtY!6$-)oOrA|MC=N@OfvbNnm_rXT2-Wr99oCb>?^Oj(%h=@yIiw1`qxtLjHZe7 zaWgo~!n8XLvR8^<5pt$V;%qmv4dfi$4lGM;Z9_ZH&)zTyTtxEhEk^pJieAqJOAY!e zyv_lbhrEUwJ>8i>+tOTw0Q{=B0BZ3 zhjX2L<$I2^B*9L(dA9&74!mTCA-i-cAU9IV{ytuTly?bG6c@>+u#*mb=H9UnI<5?} znqhW-Ck@9K@j?mm{Ekb)zPy}q{k1CFziTR&~tM#@A`(!Vf*_-93*jZOE@TSJ(af9OoX?>+mnd2Kl@{ zdclEya-qe7|I5iT|2%YH*%7C)QiXDrMdY*Cgf)r=@rq19TbgRF-+|4m|2`O(oqaY$7JfWKEqK@WTWjo7caROa z2`Ig*sjWQgmhJ~k%Mbl{AKC96AR=jgnd++!$9T?uqd}P3YB_Gy0IwxLfYYyR$ znf8u(@V|lhgCD5g#b})W2I5iM|Btq=G1pRTRC43sUUK?b5{vB~b6Qyj2)?gtH{AO@ zi(q;EeHBMFfTVi+&WepKcUOuc0R57aNepJE`g3Z5NFZP~x-Nv5chn)p{_GoF%%At$ zz{hsWxW!dx@0QBg-S^??!Nd18*iKIK2L_}I`p;Uq zid}t9i?V!s41M<>T~d8=wN z+^-~@Nz;x~v)}%qWM*>G@-fm(k$P>YuTQc2J{c z&3kyI#cd?l)%s_Y*FT1Q=a}W1>7ijxqSqWeCE7!%P=aG;x?;q58tct-ybnAgOeNc{ zgiACdtn0V$ow}A z9+7RPkSg!!*EX$Zu6`n0uRTGw<7ZRvI%t$D>CPhJ3X<#VTX?24+>zB3H05T@R!f=V zWDgS3S*n#Id95?-l?;RHZR6|54gajBL+lDEokMFyuE>U!SL^=h^?`+Jh!^!lMP`2u z+vu|1Tomg3O7Y?iAlFP{HuULd5XxAdbsBPjV*hOa2SZ6{&Fy2_DTADaT;VmScAw;)SQ3; zYjMuxr@SftgKOu6*DQu@3}?0g$u!a!q;6jQLx6GCSbv<+DjEJy^xcH%lI#Y{r_CSm zsKQg^26xl7BEJK<_dr*gTr%Y@J9=vL(>35IB9ojVL54Jks4*x&1jl=G% z)tKrNc&ZzE&P?U{gv$CB8~osV&BV##Idc)IC-ZvEa{Szs{*P`ty!g<^KaUUk>(}~I zGR|vonI?Eub%F2 zor4Yh)%aNgNcs>^7$~WD;-hDf@2&lBfZF*!_O_3oR$SVH&d1_s89F>vVBBV4G+k-1 zE2u*+zz%QGXUefFI;qigix|lU%e1+%{V6f;5uHECY?ur}dcn8X2%ihIa}Ei>q~(mepU)R!_uBW z(0vn)sm%!Ubuj(lXlmYK4)t&TQDJK^BOEfywW9di$iD(j{S4g0w0y`sYjTI#3GK{?B+$2yDuI>&*PgL0>`~BMdCZ{NAGj*1?x<6 z*PbQ5?@>dtFEyw5{5N1FA`(|+@2ma98>3WskTZ(1JVi0=_-C(1_w@+L-f(j!2(q9u zd|GR9%C;FO9mUDM^=xU=yY7@@l93+`0x&}?YS=l5KkNu(bb!4Z$-(di(S|CCac`G_ ztxs+%s)PaoU3ezE^@DB%+ABU}x1d%b!kmxs7hR05gM@zD4|;z;jCpot+?kyeKu7QR zt2gE&8c$r{rH%sWC%$ib8!vey-0k9yphP;A-l8;uJO=SyKrkF*Ry|A>TA7{j+PAv@ z(#fPx>w;5mcZkR#^q_9>Ooo>!_q|GolH%DGxG*;Eb^>c06FdoXH{)9wez^6)z4owO z_J%L^zWETBGAnL=24vFL&@+3FHzcjHxTMQGAPMU@8$L){lS^rk6AF9PP4K%zvT~2< zv=Z6Ez+lKrfQ-i##nk;gBb_*p+c928+$uomdss7gLr5P0_EfOXjeSoJ=1BPxPa}xb z_Er$n>PG#}gccjDG3@6boKpy}=uM++S0fNX!58nJT`tgFUwZG(>2+a)S#ZaxT*^hra-8ODl z0of^>_gfQ?)>hwvL*wt*?I(j0)s{fqm6A+k%;|aO! zjBY@wnH=?tC4B#cd^GMQ)3wQx>5-<}fQMYW^=9Kc^ zoY_J1jAfQUqqa(q%6qh;`HWt=Gi2-DZ+)D5XJdfX@_Vip;Wo@Uo$Am*o$^PVb#2YQ zPytVpI7#=yDHUB%5FFCR*E90}CLnV3j)-z7)wkVHh-QH&@i7QYPfob3OMpb-~ zotH$nhj#e#GMxJXW^3v1mD`u!fBtJQ#qhkYByHwj+oD+-B7L~rl&UUS?` zv5{!x*)sd|74epE1{+0h@RR`FWPu2;n%aFI73V4Q>vGs3Y?cv9xOYTTHk~Zd4xgU_ zQI0dc!Q0L=dkZ2-@Fbgh#BW={dtrcfGCK7_vd2579s|5zMh?*rr1XdO4ns+G?2Ln`OLbo628#*4e?ZX-VT~0Qg0EQge9~ZOkq}3+ z$Gz9NC?v-A3&Z>zvgEPZ?GvDop2vNN??3Xo65AHQIwh)Sa}nu;<(a z=sAX9FD{}(X?e=JjLtc5N`#ccPLy!C$e0RlWRTtQO2-ObqbGK1B@F+7QnU`QNSSsPk9!)rXh?^G`R2PyQ#t@Z{*3LwKn036l2v>2Z*I%leC-SN=778A9m>0f90pU}P_03WDkTS8A|! z#_ysCXio*O{gzj;uNf=2rPKrX+w$O=@1mjiuS9}xZ-jZu`KHi4o*h{BjCk<2B?p!_ zGe^^X$>FAGxW4H2P=OZi;Lc<1)gpQeZjxdE?? zN_GnjOx|9DUXqHUDTQ)oo5|MZ7TpwW47Gyalp`Gw0})QYPFtznzVKJ)K`|rssHD$pQcDX( z^`x;wn^cQQnUk#|6${?ys1L$T;<}cj8hlqrkQ(yJuN!-qdv6qalo)cm3<=!fh7N)P z6j)nLn+3*zX9=`wIMo*|rXwDB3=v!ePm9|l=U_pF5-qd0EeCr5Y&xw+iar+F$L|Aw z=0u)_nZaJpFlg;zuA#ci1QRU_AlBm=z62(L*mr>Ijg>%hdI*3){9w@cnnB07p6R8~ zSVoDsN2{B0SuFx@-RAwa62I5!89r;EGM|J-goN-?VUZYL%b7S_?W-5|)+O!$vTdH@ z`oS2nScBEYDiVA78wa^^X~+lv0jw`T8zadF-SA*OJ%49xf0?R#mnaeG$|MS2i~#f3 zOq+|(_Q}6XL-k&Z#ON;3H4N8YK!w#?j*B_|B*+^`vD;B28oPY^g`uG=Hnu-oRE*a|@wG-5He$^Ftv* z2ig`Ny7uQn8mfL9*+@m35spu#xO*Tx7m4$MU&;5x74U6p$b-Kh2}K36vGi9Qk~UiU zOFI_x#5OU6qc_PILDnB5+-*sp)?VJt@_{y(<0Ppq_U-3DP3f+9F&BQdRB8Zv%UMcy|s3}W#QolhhuwGbN72o&>-EB?ggVk zz$JRFX5d`;{kmJ9xwfM;tc~FKv@4O~mil4F)DaC6qRZyZiO!1`sUQ7nE6N7h&guOp z;*@XoB4^G3kuetY5Gv)|9aTOlA)-43`+^ZitU*Y3y}mP1G4_vdbjU|gW9O}(1YZQBKn zYtAoxq3b{+b{n5AYX$L9LH+DW{MqZ_J=kR`p7<;jw`H>L20>^iMv~cJ>Y`hJ4Z@2T zeg7|oG7Ir(CS4u&dq3E*dn8xyL!Vi zsodIE zCv(0);0>JndH?XuHQ;2leM(%yG|2k4AStwbEYqNDJ-#)vWi3~!2XpRQ`@S&(+b-zt zABKx-BiK8`v6%MT2wYFht}#-**TN9DV>*o$Yp;3eCH(ofL9P0r1p!*Y{iV3SR;2`2 zm`9KCyPGcgIsulC|G29=xtv6{nr4*U-@YadpIg{{nqhTN9@BB!$9uXPViS7!T*4WL zPsv2SOp21cGVZfV#;>Rg2Q0y1^fXL9MIyejE64H(;_S^ID7bptHbq%Q@QAwdNw0^6 zsy%c^fI?BEBuS1vYY#Lfh?ZTY-7<(MZFycwo9l`CAPGC(cZJw{WIudEcF;M4{n*xC zKoDMD{k^p%;tb25dDjjO@&EX)WcOJ$k$A*)00>$aoUv1J9xDBTG^H@Gfoj$&O`XTs zBU|{B5h1?Oo#@w(Rr3+x929CB4I)s!zD1RoqV3=n4=tVuwh?(f&b^s*YeLquyD?Tf z`Ff2V0mG7eHHS7GAHKQXt17I$hQ5#b=;iwb``MXMt%tn!I?3xdYaG!W(SrbksXkKj zHgQci@!D4gjz>}XN2i+RI2C)H3H==YVAgvSQfl19J^OhF_j#|Fc)moxldFlAob?~n z&hxDC$JKJPr2zg2GJ6+|lPKEfI7dNd=^&2N_y}LNZ3wBJ0>;;u%IWZZb5?^fO=C8Q zFd=omrfF^`q(Bl!u1f(cOmJ{lP2wt99%QvF-^C$W9V&%(AgHpfYiW`tmcoq7buhB>#-;XtP3S z#fp3-y1&=iTgtSXzewqLC6@EK<#z@w%;KxO*$BZJov58+<4h{Qckqray6%IfMr`)O zCl~K;{K9VQZhjwjWRPjg9&9qQ$a(3Ps5L#NK5RP{Q4DLoI>q4 zXJ3cX+mgVlTnF4Bbk(P3%H~{IKJ*U%tfE;(0?GYay<}o$vTCxAssJg6Jp-;BL0(Z@ zmO5e!16{7(^MlQ+1SwC~+gd>6_pQI+F!E^n*5Z~aWlPSX-C9W0Ea}#z&s63+tx>nD zJGDr&`=&cfV(N^Wu2ZC(;>zw8ns=&EW%*(1PK41F;Pmhi`@SHl*2aYErotLg6Rd>A zA$E%Xu&s-OgXo2FEo|Q1i7Rm4JiKKV=kb4JRZ5~@%a`*>ZEDh%Wt%>&$O9VX@Yu#* z#o?kn_(du&i?ZbD1xBw-y=<*hQ1tJl+)nswB@}VCe#lj_{(cp0$<7@IY8Uq3yxXbl zwnR|uX)5WsbvSi~CWt8I3G0bnRP|kf%*FU7BxciuV&~=0s}}VqxlyEh{M4~X30Hjs zZc4|eBV{1e=Xonb$HqrlVG-cK+|supx3QT}b0MUp(1M_NQFsnhh9&CObJM)nPQ1|% z>2yc&@OWb2Qokh6yS*+#?<+=golLqnLVfc25>{vc_I{9XBgb0r`Bp;=eA@NJXQ;7t zBKH);eLIaojxg#Wx_$|A%iea;b%N}0!f0R?jv+|uz=JOMy;$b|%5nHNMkWdP9ymNo z6g|;e)_WjUk8Fa1Zd-H&S&_C!NC*hF2i&)AD+8A3p+-(sVmmIo_!b^A|Br7`_z1vO=>>bG6RVZYKYE}^`qXJ^|gzd zg9Uf{ITVA}ErE_5xwMta7R zIJ6~-XG<(2_>ygsy1uE3W$baiT?q0wk4c^GA!EA9z<>oDx-7=4i+*le12*u|9gI;N zPGEh}iW;#};n_|kMozOw(y94RXCN1UM1cRVy=z9*SdkL7Sl@SdQ{J6&qsLAVryG#J-3H98^9G38 zfn9aMQF~zYDK&Oz#ynKj^&WH~Msjioa-DeZ@9+b-;c?@=^?(~E?=miM z4=_V>gi{;%k5ggZ1AgyO4KsBDfqtUe6&H6f(%dd5=eiGXt@yX`JsHl7GBqXDL2j-a z)o8Ul--qL~FHdGM4=9XRfWhmK7RlVfR*M6_98H`Y+MH=Kglo!2Puy=DOLyF5`{s~i zXzS&DmkVg}Y-N5e12g9N(ZsA#YDl1p>7l+R99tr3VEn@GukdY8+U;>f58J)ReVezV z){4i!D7IqLoqKeFVg04IGL3lPkl_4b#SDZyff%s;^gJ)b)(SKF1&no=g zp2kdsm-r#NT^d*o}{IQFBgIC!CxtC>y0t5zTm6YYrMYS@ioE*N%%M18$?#H(y00+Sj&nS zr;s3~x&=D`M3w_qZX()zy4+9wv5Br(UM7}H$@Vvf_wk_!9OeRQ*IM_{{oId-(rD0~@j(3iQI{CAUW~_L^9o{J?|7-YS zXy#7+eTmX1->XoK#{jwXZ3OBZTJusQ@x8iV?l!wXFiSKGHS=RlX4neWdZn z&oV#l=6S8qNIkY+I-x|X*oz$dI(;M4_01s=Cs?dxZihVN1_1^?H}$|5wsd5pcr5?d zpnM>uxqaIa9Qt-XEBxj7%_g~jp65g#JvL1o?N_%SnQRAzU4zaOSMGv=htT~G2NBqR zUM&eFf?u-al6R*D zK+i9}P_e=>%2z_sp{1s{40jq(>2X+W%zmAo@`|DR5^jU$NUd?6dMEiQd+p`QTmdAc zT(RupH5>v6dx!nyU!Lmr=QZ6Bx$AX({ufl!wv_4H+5Wz2z_y@A^mSGuv!mzb%yyzv zmo&J9p}7KkKJ9mIIvZ9pBtrx;!jB$)FuXIXiT0tG#>tlc8y)Ygx*qUU9InYVG#Sgc zNbWN&(pZ0+LoF@IH*^sdZ2LN~E}UDRWuOhQ+F8=jGg$R>BDFZb*5hEG(#cZtKstvB zV}si_0a{2;it$uy#flz4XE8SzNeu8GB2|KMk~+|(Wyb0j`(M4;?md$&fyv&m7uo4U z;pgapUjYpM!(~6912_h5xHSw}s{c-1ucK=_eC<)0>w~$^r~`n#s#<>l4n7-J-6=PX zRW4sgc4nuV?f?{syx*Z%9irq{&Ypb?yxTtv-=>Y4qD9%ZbQ0(4B)Y|#O%-*>*jMZ6}4E*JaRV9ho6pF-Ksu2>a;54d`S`@EmqDa(e_)yNLdg%g2o& ztb(qj5r@T1GJB?RB>vc#j@`eAh^67L#-#5wP9^A)KW}BJH^OmEVRv2xXQ!}s$I$_= z9U6Uu&2v>Lq|;}GoyJeAtW1{M0{KQma4y`_-r6oIG|tDDL2uLZT1AM&KKA_Jep3e7 z`Wpiqs9p~gpM+5iG1q8u*6rw3nCZQGb1=hmm&|I0lmY6%YJgG!WZ~ zLzh}0!+7k$_U2^v6~~eMimgPRqt0`{aMgW0OC(X;v@5?e)qPeZBx)APG(_Op`# z@$TsvOl6@Zy!VXH7Ymd>yd@O;)Rvc&95iCXse8<^MwpW!_Rm-a7jE>8;$;Pb_s0V% zPiE^RX`S_={tcIv zgW$W!jq)`7CeFxx4N}nXcYEnr>E{aiK2s?5(08Hf zR%bT`o7Y$y{xT3oZJpQmM@92o>y1u;QFNX1OnFU+_~RGQxb-JA<=ciGZ_mQ|zL8a+ zLB<3Zs=pU8wQ!EEUOZ2NNo_x3>kyZf^t8Uk-yc!WgVTGF-ggtC8HQeKuyB8zgFkdAODy&r<^TS1h>eSED@)O9{dPMNak! zc}X4h{QS;Y(4jGo1MK*xx4%^%DcQ>Q>vSmc!v~Tr5@g;RapsD7Q3`>+_5!r_8{$-X zc{D1(drHj>ng3+($B^ojBpbL{8?THEu+E<#AlwmiLzTV zUG)KqW+cA2^?m7jkbUz;8{lsGeBAfR#h{K|>COI1u2kpFo4a2*nP$ZB?-f^0HLRM) zw%7kARv$_W3cBqo_29^ z7ysvwyH_}PRgceqgmL|q&!Y(oA7U!C0gv0Y0dj-gnotGvgTK2kN+l3@(L`!uTRlOD zoV>3Upyn~^%6BCaSbu*HW$dy8k_NkhGOfb^-`INIfS! zy&cqstQ7{zid>{t*ema&_Hm~&fGIAmC+WN$KN^ublX(g~f~$^Bax;P6pqeTOPSeOl z1w`5%d3Xas0=`;xrC^*SYC4}S{acXKNGu4iXND?;H1_@ibOgQOnnR-Dr~_cunh{W_ z=^LiUxo%pX)~(e&daa5x@^rWq-(_yvV*QnPXQx9_URVZZr-th40S>pl&2ll1Iw;U59M-?~Yl9 zgFM*W#rugpEnW#sH&vzm+%qn}`$^?<+ZU>hq?5j~Rtc`64It=b{-NMEm>aDq~|6M_ct3>d%slNMFj_3qahxI2#wR)(PA6 z*vaz)K17`MZAmTWlY%og`e~NB-fIg{SfsXC!*pMynHkq2D{>|9_ylr$0J^E~RPexG zEJ!BG$a#5nQEK&USk!LlYqeTp<1@?=iN23}GzIG8i=nFu9@X>vCfvzf2G9+r5Oam~ z_+vJD0wALdz2u8teUb3k#@QwuVlt`NJb5Ej2%D zx`5jc-Vi)a`Z2{g>I<}}IyUpSM1;C+#b`ltEo9{>2_%2fM z9>l$Sm2$5evHUA>?_56JRi#(fPTT91ZEF43hh0q(qKD~Fy@lskJrDUT$kHm{+?~=? z8CO~cal%BLJ;yuAH_wG%B|KwcoLxyXSbSN>OSOXZ(>np(<80tz|#B=FdX|9+wt$HgpK0|ApoDpV$*fjC~qk|4-N~{H6|L zbL{f^j&A)wVYBPOQaics{c5iobA0Bc+NkV^O7>D~Go13}R*m4p9E@9RqV-msbQOC9 zHpwqnJwv(xcY^QQT(W_ zDGS+;QS70Bwxi*wI3+w>pR(X;7>YePZEk}Y_;G(46JdNnNNXVP!7k$E-^@f7(Kk2- zCCkP$iwjmAllmN2Bi?#lA@q~(`{o`97rKj}rIoM13`@%h)f|F5hA7OT;9`Tf>dQdY zCIkO{hhdpWi_bK|_roruV<1n|ln?5VbF|dux=Bpj=3J{~<0s$th2s(I1oo}-#^)=r zzwcs*z{!9UiK-bB9_fRzbZCG{B@z^V@BYbByZ!Gm&*Q{;8qmzr~9N?cr$j)hWCz}A_Ktu zNTU&v*-u4GK*Y&W_R7$I@n{s4UKk5=dG zA&qTGD2M0rWw7>>-Gt@VVV||poVIY=*Uc!)o*u%!O~BAmka?OqE~3z6FBG60J{LOs zq0&jTI1rC?E~hg~x8UMMkOzsM8pnV*ygNnnd5qmt|hCn65B_Oqc^*D_kI z*XRd_X^pXSf~~MKK&RqHKVGb^8$u}h(i84P^q4@PwG6`U?#IJ`eI=_=)nOU5JujSM z$%6Zl@cUUeqIgTb(c{(hI9Kk!`yyRc8C}XZu7=;U?;xWqGCHzGP1HOt7|Y#KHZyov zJ&C-QLi#$R%&$hZMy#)hfZXl$FCm@B+^W9=cvB&SSu^KnCmRvHft`9zXzJ{Qwle*` z)>i`}sU?i(BU)$h>p95fQym}Lb!98sAQp(4p59c3OHCFZa&j71T!d^x+4!Fqrd!(b zaM^%@aYF9}FtL&e%Ju*+VLIWPTts-pw|m_PFp3zgZ})-5a0fgs@8kaO(Cz(xQ}7x^ zBW(eJgHs{{^aGwua2q$)Y-Bl!*rCAz@?YIZpmhf+AOWFsO9Fna!|e+|VOA$HQd9hH zJCjV~sXGh_W^$HYuP;c<3*Gj%^ukNwo1WvCvLdOg7Tx)?!GPjJhwYRbFw-3~f`un) zh(eWXM?Z(KRwO{E#A?xoIk40lfy)Wm0 z4=+l+R1dE_Ztm0N4_fI7+B60CXz%4ppNyuaEt4Gg*?am8c3a0HR=A=ARKJKO{x4gTLmvKzjQrjYDyU5#5 zlyvVcLFH-=mX-v0u@QfNLw?BXNa!s&Jg}-Wi`}4ZdDpWT7@pI!-}<6T3+(%mzssDs3u#E%ffDZDGq&iJF&O1AbXL7=Da{R^WDjB@S=K9YN-m{TckL_3Df5-92;oWdj8B4hagAP^R zU*(Yx?S8Os(+M?fKRC>XSd0i5oPjc`oOzb>2i`T^CA{L_s_u7tNY=z}q~rcQ{DI;5 zWyh62#{BX~F9uIODx~0z$Gvr==5raYrZX-ze{)QGzO-<<$GsHpX?(ReQ`)}kr?L`m z6SdJBY+$e1KkgAW|B&05pVK_$=H=m)vvP45 zFyaIAD{0oRwrH(C_8XZ)&cOq(C3cXGQ)S2{HC{u5IB8k6r@y~F7k2n^IAZ0Hv9?+K z9RY<~;^YAia3sA_w&&y)iR2*DB~{wIum5;htR4xD z*b@p@35NuY!&fcbq%1ONl^Lw}y{-qWxY(yTN!Aj6%|1uq|1p=$TzW_x9$<9I8FnEV zm_KSzj9u#jfRpV@8GJQZS-!^bFGM^js%`HYSlYIojt2+6S&`r z5v$5>ab8f(`C(W`$$kYtNsLa123!^$)f-_So-t)XT=g_>JYR`mBZ;OFkFpTWYCBSd z3gC4ySIm|g`0Iky-jxwZ{Gg8;-*@TEE@xl{{zo9D=7|Mm{tG9N2J#b*Q?*$|~s4x=Cyif~dXYs)k^4pW))^X1*+t;_ZL$CHy z#y`72$#I~0Xn!{ir52qC7xFe(ZQI2hdKxxIQVj+t=kOv|`4AYvS+#af z9nVz<6pMJyFTwM|_@^i@r>>gob^!Bc;=7G+#!O$y509vDpJf}26uO+gtP(bG#_iY* zW&5afzHwapZ@{iIn@dmr!v{#Le-s;YQ4gQvnA*3UNAMg~Q9d4Mo~Vd-r(5oR&L;f4@Pb`T zLTnBmpen~Htd=8DW$4?}fAoVx%`tWV;{M`OqvC0A#UW2{@Z6 zxlp2p98e#Z=r@eRKJM2?HY&jWTex#N*Gy+_e;!VL&dT| zTj#fbjHm_E%0Xgh3&-Ar>YhGs&VAO7xX>DrO9W1zmT;D4U61`uvC4169|I-mkLPit zmn1Lj%!&RmBzxR6yy*C@SIhicfYvH|sG%5?6Qfb)uvyYFMlhOwiK^ZG(YTyYxfNXU zSe$0K{DENZ9r?P$7gSC*--Bc%q*y8qK%CN_Dri)FZN|{5rqchCqcPWm>*Ebm z43(B}W^Qf1u(1Z{VMG8CA<@4d5mg6jTrJOxqkVM0B#4vUfV09=X5|7{_Yq=T4O)fr zns$;SydFQnMVkjM?L?Db-yH90`I_4kPVQpz9&z=N|D+A%5>E5ra?^fy^U(-n;XHL! zSkF-bQ{Q+UH^dFo2Q!zCQxij6glex{v*u$=W$-Y3B;Indfc zviGfJ+y`7H^KJpC>g}tJ|8)Vlq-}kH!aOTv`e4mj`S18<_`_@{*eYE1Di--!%Q}C8 zHlS&oU+hiPU-vKqZW=52LrObzQq&uaok9e{|K06Ft+(?lp+?=ztD;7 zBxDf?2JXT^!rgZ3tV-p^R_GUST&}AHut|1cg{*4S9zv>vcSyfOVB&c$tOu_tVE1LL z`>S7SMzU{4pQl#!9Qj6`Y)cT4hBsM!2(eZ}Kaz{+R6~J4vymsqanzv) zdfmS{lr)Qj&0y4rjRn}j1Bokxa=<_iyNsvG%xt&4@-=Tw#bl;h>v?Ue(@hKZ(DTH> zs^0I?r()Zy_2lfR+zYzICmq|wEM#?(j)@H6-M^3Hde$-uz_)I))Ym+> zoU5#lGrpgygYKwmvd>=_Y%nF@q-nm0=i4cM(z*1LS4NORRCts>x@)Y{_eEp0D5u?r zv)b~f5^m{K1}3@x^3$meZKr!{G2{4^8%V!6XHu>oe^Y{sx&|U+N~)7)W>0&821HF5 zQ;(cYov{AMC)10jDGrlPagU#9HcW#PK9;RehI1m;2D3isQ=jSm&Rx`ejN|(l7Qbf7 z(w97wJ@1H_jH>%0ab+4~RhP-XL(&_8aCor`(x%3;kL#Q~?)eKpmUTkSr;**hikX(> zvY|zSo8o&UTns(X{YUT62eI&-R{pvxOna-Ey;oZVH(Q)jed|TlQ60?NzZq>9=VS$Z z{KhJDUlTMPxESlU$2X3?FAg75VO^{~ratgR6{fyQR`HZ3U#cP#4EFz;hYdun<0&te z!dyHjhY`{lX%bj6vOl$-5!TFc<(Db6cJXLb=*zUH#)2UO-?`SqoY49^F0@>H*7HcK zx;CHB@1w>;1W0C98dsrLz#Ei>qe*;w3lOV95-Nsp$B9e$g4oKiv$f%LDXR5zKZN}s zn$G*34L5B2N)^?jMfqBxR#mOqGt}PQw%DUKwMlHXTWYTwtu6K@_TGDMg4hHJNgmJp z9PbbJzi{2hb$-t4JY{`?C!@Dn+Bd#_5sCWo`?IE*!v6DRNXh9-LP25Cr%HaWg3Cg; zdY%RLST+rgE$eS0Mvm@2#}YNuL&bIGHebC+OPO%nAsw!CrWhCIl=*QPCShOpsv^|` zlT#b1I93WHq}avS67G4Ty@@l6o{K@@WeZQ_?cfDXcV(Ta|73pst)H+9SIMI)jj#S1 zdv|xc%SXX?GaOY<Zkb*SD&Esh33M{uQ5eS+!>+r_D?0#7Y2kJK{l>F;KNTA5 zxt+YwkTauyPW0!fAlUbb!W}Nm)n>D!baU&8L!85j_fQ_T#Rz25M2OK%fe&#GpUFt( zNoO#VRT$Et4=#Gr8U?b5>b5*@x>NE-A=#QsKj0aY32%Q^T|>uUJD*#S$#t=34L3YX zDN5T?Vxew{bspSx1^eVThl&G}<3DVUcTF_g&ZDxWLtOSan^q{&7&LaVIJMoTCNMu^ z^=vk_w(jMZ<#L>Huq`m?tu{2(VEfLYr7F5)fdA6!(k~!|iab}atal^5*Q@qeJ~%y! zV_`Qo;(?|kndWd#yD*=<#XB9-Oyb~?Cc+0N>wI%8$?0!5-)%@J!8hDr3T``7rG~{n z{*l;sNrn+96L045e*IEFz2Uz1w7eMLY&5#II1DgpA z^Zl*{T_K$VsJ+}g&)2?(b{OilF)ji_>m0p4-K&U_1;2`=b^T0kaLxBuMJADBjz+mcCwE=>({aFxI z@aEzgYsUo$s4$~s4shei0MD+RnQCQu1NCvsak2L`j7J@ozaD3z*U3pAJv_V%B`u|S z{GsgF8`)D;0w!JrDt}}Po?xhbB=yOD9qia{a2%F= zhX;0cdX*#!JcJYpiRg<>i5HaG>I2&h=A>{lcB8AucOKa!qJf{jOL1UWx`jFKY%uu% zIM77-uRl^Z=)Y`U#H<;k_Y1yySH-z$@}yJde{GK*li! zd>2u2)P=PA0`)gT<|bCPk<%5mus%(CVwQ_Tk^91#j0X`d-*Quorh`L+iG2zdzuzh zme4n>n4C2pCjqgzIy*?`TW;H<{%j5 z%?qblZ|wO=r&)D#;ER_E0KE1qN!9_bx-6;0gofU_F_PRqFYZfyQQJK)cz4bB+KcN~ z1zj75Vi$91-j@;c%ev#+9U_SMm=a-$f`j(7J0UjlcHzKk`LDW%Wvo1@$gb6!YqGyxJZbUvJyz!p z+Xvoq0FBulhqJL;Rj)#P8`*)f ztgp+18UF7Sv%(J;*hwri?7R5XcgtV`7I>O|y;>HwTKXOW0jk;z8;!wdPqTD0TKCRU zp^r_aVS2Bzc(Nx@md6*<`WKy)K&(e*E)nUXat24|*@j(6fjH}<6jHTMEvjFE6hxE) zk&_Kr_*`cz@%s#h+e>4EPdh3$lvHBB$Z82a8{>U9OS6RDet{tXfVWNBp+{5{JUDv~ znCnHDFFdvg*2xnhwO}gImqDM*$OMI~+CnzG7Pz4maLRp;t255}J|^%t-@|7yXg@%Zgwu81z2fWT3o3Dhr`Qx^=1elQpt`|oMAvJyKyY5HAUqG$X#Tj7P z3A^vBuIW*#H8wAkZS)_s=>uOq?v-)NCXO2YYhw&slI#e(GQW0!Auq^p8c+|f7G^t^ zr3VHbTcbwz#u`_$S4_^nk6c~9V<^Rlnd6fBRJ6ygQ6)@&zV0y0oPqC@0;&^}hDr9S zw@e%}89;M@Nh`*2US{uyxgt*qMXwE?eN7qoNsG@7HIMo;2Hlu{=yoeb{;JV|XZDaV zi4_?KM`6EkD21fhh_*a0qo0(qZ{^aGPa@P7^k*g>pSdw1KWj5?ls)(pSkNT>JzySzpwKg!r_9dKkz0)-pL!M90$2 z(#~TGM>*tDzs7FiH>U*J_Hi;d+gCqi0N{#U^w?^3oK&rJ4yS(hQ_(i^)T=mu?c3$6 z2p7-gkV*%FB}Ot1IQ#AOC=fh-Qjbks^Egtziy{U|fp>6&OBZN{qM(faMo-Mcz|FW> zn5@;+@J-!<4y12pDN5#g^T@gl#|tqi8M_g~trBl6(D6Q;Iw>BIJ{DYskUacYCoS^u z(#*fD%H7^Wq;;y+>9m7!(x!@O{FUji61Ru+T7Xx}MCh2FLqXj$d7%Cdx198to9+4Y zjpZ+}oQJe6SFihv1_Ehgq~|W?DEKcmH7BMdtca{&lXkPG<2j#%+N%$Avex+zidmw( zwL65&@Fg7ou^W=G@Hq;;Vtc38=$9_J8rAkRC*e<&sfNe#O|YK#za|a~4a#}NThi$? zLxY)}OFNsf2M9W3l=*I^{y=DXkj`*q*|wKj01@j(pQE*2e6=v|goSF|CW}dk9rg?4 zBJ9Yn4bxlX%@4lhAV-jPNY_i(t+?H8EfF0{HLrLU^fr(BU;~?()|eVjB95N<16%p7 zLCCSn*X4BA$b!{lRt9m%Oy|p5&9hZZ=pWw$nb=;wfY@Y`L)dMEZ=EORjCF|y(lxWh zlkKtFg@wGl%kC?4^8JikY;^Ly&c5_*R=$?9s}}V?xDnW_W?haar+b2j0XGdjMCY&J6I zQZN&3jnO2t1aYbQ?o1csPW;ZGWJ>=J!7B*OG3)4fNG znZ8qcImedg6fp2bbxGzjssr`rkL>J>>c*o4hIT*3iAUd>d>qv!A`2NGP1dqcXe%HO zRAOyNUXHekSRQ2>{+PgDOj}$PGss?SyFoyg+OWP^1W4t(O%QoK3^8#udM)#Bj@qUL zDS)}*1PP6zL1bT}MvGwuv#^stHNBBGCuIk&Wkzs0;a1$G?fPEmDgT$j(3#<#Xa6SM zZ%cVLI^XDj&}YLA{wPs#1A_ZYcdl68bR;o}^8p zd~o8U1Y&-*jxGzFUmps#DT4uy2a04OqFdf4;*V(Bw`!<9$VH|v#JlOAqT_ki%zzlvG#RUolNlf_)JDtA*I75;a_`8VEM4 zo}Kpvat$@_##^rtu$F@pCF@oR>gAn172Q1@&HcKj{AP5%Cl+0gJ>eKoRsH?cBbCFx;It5ZqIM`WmFCZcI4FFNx z*$S__zT59wa&UrJOx44Un*X}7=6r9|pfVk7SG!uYCK%7AkR<%4Z}vPiPO)cR3UR&K ztRqL1>U@%TniI)U=%Ab}ex~y|%Hp@f0nOw_2eJk82__dAGyBz|X>y|xKXdBgrj!aWP-%NC2;==sEj(&ekX$aO_o+~t>e&wjNzX^pR*tv9z#*u2%T)h8fl#1 znEl+IvIlS)aRKsgYztTxwwc9n!o`v51yC8>7ns8rpnOhi#Y4IPuq)?K3Ko+A&WtuKb=_ z_%DY|5OMjBm0^Z$RMY}=38jAq>@?UjpKWTplU1sSS676Mhuana$Tkj>%|~qiyHUp# zAgFf$!#$AqFl7Dl+A4a9+#Yh(e2u_ylYv4rG|JUJNz94=cRi#3o?rfnw7P?TlA0^> zO`93hv}vy2m8uS58Lm_Jh}9@sp2oq7K)`oQj~seAWctL)lP+DZPVCzr`R?hP7ZrJ# z){A05HM0jF$E8a|dIdf*?r}NHW@FZ*cd2IeC9*HJUnHDIUqIhE&7E{N$-~GI*I?9x z9Nu|e*zxvN?pT{Hjtl$?XP%K==TkSQ$(IHS3Cj|avi=(406^PTF2(odx+RUobdNKv z#&uncWmV}|(kyS-@3~giy8*?cqLPS-7fm;|GioL-SmQu}^3%_)du87LWz1Mmvz`NO z_+n-`ic?o@dnqXlH%Z)UBANB|1-$>!^^htD@czzEr2b^+@xn^TpO*SB8p%`oje*Ti z%tfW6oW8QWd41-^hlPe%yNqnMab>(lB_`K-3+=v%m|eGrgneSpUA#fP9<8IC?dv7S z9TVvmFZt*11kQN4B$n(tf0b+*;pMOXw&|EnYqva3?IJ_eb4VRWo$JY>E%x!=tI>58 zPX=em*B`Ezw>ZlDsP*eb7&&>{-2Mf$I`}S~$ov7g#4!BGNZ4C^a}t_V)`M&$=3@4o z*}zg-1E>{}5q!-86#Ii2O(aEbFD2b_8zrn?=*MI!Cf>JAwR`($9p*-PnX4r(qz^R~ z;{s$KSG;Q9D-r%=J~sF4{q%6EyEh3`SNH}IvFrbJ7M*>JQ6O`Zi+WLh<^93#wqH*t z8doVX!|L68;@mX8!62>bTxSY?4l&^pqD$_yTxY4e3v7`f)<%Tl$Fn zAhXWnohe4B%qP|h6)ajQ-h{oc)}h6Se({N|=}j+aV5vH`r{soL8j0qMi99X6AJKxR zlouFoK)@(Sg)g(J`WRFB!gTMN_l*jXZ z-I)x}n^1&H{Q(Zy9NaLua1epNPK@PVrngr#v9n@Z!9kK)wwuklmAIZD|Lu4O|5Ch& zs?*9|V0jize`7J&jTZ@WIT|=}YeQEZC4wM5)#y#Ra-!o@zBh-zhHpn*j2jkUSkn+%hq!Lqzb5XYN5?43N_yGf_AKde?k6afPeD% z8~vnVhrLb3YCoudv1 z2wNOgBKNU&XA$~`PX~X*SmpWJx@cH&PKr0aWuyQG{(BQ~0x^@HlY!?cZhyA#yT*P$ zz@)_F|6I3|lz;Zxa$Q?=5tUywEk+aN-?o|X!uiXl%1UcbyHvclnXKw*FduH47A614 z-2B#rGwK$EOpjV}txYmmyE^q);XzksSiAT~f@xl4Lovq&bq2V*jiWNyMF4OO$w_`d z=u?6{Or=&jsMIUf#+GMq8o=oD3MH-{REmJvEoH997~W0*lgkaR=TXB&^vTbO=2$h- zbTo*W2@u39y4I#LJf7&M)qQf?#?bdXz0cto4h@St+4k``rY70vyrD8HsFUGc%4Jj4 zgHz;e(*_fYKTSbyUbQd^;vjV*$h&BKqep`RG2U6<68W~`_|5@)-OVEGciXoS&c^$# zGUAl(&geUM>b6Z^HE;oDznpJEpX{W`@RaFRqv^1EEa<1PdKAlew2kE@DlW7_WjC7a zed}|hFe>&=2^I6QepKtCMLRrLWizqNi*+*Jmg1d+ZH)gPpYD^4=|1bSjm=8{_ zMc;dF!0x!Gwh^K2VDsp?MrG=P-^>HJogsqHi0$|WyUJ=yM5C%F**3^!U0+iBzu-XP zduh>M1m9PJNS{Qi22ef*V;Asy*4V6FMCvt)9DAG9jfl(mXDs&M%mMvYnHjW_6D>(zPH^J(UVHmkA2|9;iV#t@}SII!r`Omy2><%(E>B0O$) z>^)DY$&`n}M;_*IHy4?X>yuHo(;Sm^2#8TWw;9p-mHMRebU0@v* zP;w23bLYb6(Wkw?Z2Ml*<>Gn|L86Ne;L(OTYs}#Cx7q31;|h7gnq3u z0C2^ieWCUacH8irRIFa!18iRDUkin*^&q;CT1*lic%&Zc&=!(T`7DpJLdAK8wbE zw9+lpYru{s2E+o7QA0AnSOQUExO`VKv0OzURThbCIg8;HR@+f>vrfr8s(*k3v94mq zXEYxRjoCx$nL0UF%?1`H&6nVr#si11hAb$r=M zWn|xz=VrS8;?P5Ge=w9zwg_-&+kFbJKmOa zw2Q_FOD;o1-b1P?%`)z!J(DB*<uc94PmQMW&*YQTul>R^tKCz|Raf3yWoTzNAB#&t&pmZ4ZEQ()(^gry zO@;-Ui;aU+)vNxBKrRjPH|c&|$jW5buBLma%WFq)vK*aH%PrGQ=(qPA=pmE;tm1M2 zhN0x893sDbRYTV3Dm>LHS}Brj-`zD=Xhc|nj2;vHZY2L^*!zTuX5l}&_Y6-bt3(O3 z(pL_4k272_U$?!quZx1I<6bMsk zt%B9rP-aWIF;Cln+obZxDxUwIQLAlD2Kj&FW4K>W^E`iC76STOwP3htnipAOjoqSS z$|io78=Uo^yI;O9m(ia(Hc%(iGIN(70moeKFJ}~mA5Y~1RwMuWQN^C&f8zfpM7$tL zYIf5gWn>9Iw}{`Z+Whp9=v(w)4P9uaIHm4J8M!gOyJA9>*`!bQBq41w1#3&}2hX$Z zH$--rP=1}s{jV{g48|1>nI`;J9|$Sv`>rg4yhz zXp}-`SG@Zjk_C}GVqc9Q0||dwcjqC#0pXY$E$6U&aMk}ojr6vW%lRub)Vf~pn{FrActI#bthEJdyl}{$j!47 z;dyQJ0@?hmZ`Mf)yO{0BeA{JNy?dPXq7KgN<@96dRPHdE)HziMlca zq%6w;*=>bPYLl*w=xjzRiP4s>wQI(Z-cH-ETXPlC;7G1b|$*W$jj z^fq9IbDf*&s@7GlKXDqpGX!<3AN;R-K^yg3L)MRS zqWW7>Ql_8c+rHxxF<`+7H`KAcJmzU}fxGRb z)yeK9Xoo|3d|}Tv0o6>3gTa+4s`pAa))@a+k7+t3D8y+~n*8M=y!}iKE(iC)bkiw? zdwx#f6{97%|ICa}bz#erp*6-PjuY9lL)R*iHRd2`Oo|Kgeh*VQ>p=gFyo-GUu6zko zM|9kdeVgX{IGu5w<8{e`d8deZ+oODmO2O``U&F7XYcpa}m%{#I<2jcR;gu%P6>OxJ z#cOa~v=2U7|8`4SD5rD1#@wMz?5Pj)I*m4dHYzFo7A`Ht+j!XRakj&W@g~V@(>*<6 zU-d~ki|5t?8~VAZe1W&Ybk^$7nUIpTegmC3?E;(k(Es`?df8{Ky#CsXFH^&jG# zvTb*Nu9Lq&#%V$xG({Fpzx5^pnxY#Yu$m7Yj-4>KI}9Fe-*zz;=e;S%ZO&c(6yuP$ zCioq{a*(C$UzI;Em4kesM;?wTnl|*Vh|pc^GyLp)BAV-3|6+2c#jUe)H!^ppd`gyh zZX+i~_-qxAtufoZA$6$k;Goe>9|B*SJHe`CRJ*+_!LA}NyNh37UeyL9Sv`;$XVsB= zu~i7soO<#FSSpIRU4xv2%lL1p-cg>JK~P)*Z9k>3g)&I)W$A;trVGT%*a6NGtfEIEr7OhA1`%icJ^CowuScOQn4Fic9qR{hr5J$t$Ex^GI_>;?cooE;)MT})XU&BzJbdx8W4#U zEn7u?ssSDa&n2@$P2HJu;THZQ|Mg!I(KKcM5+009S`& zi$MBn`kq%6_m#C?`_noU<*$yw`| ziKAzNv>g<_9W#F$<}SmO7s2lsf_PAI`|jo9*$rRIoI5tN_TyG>vf+cJ+Ns}tDK5wx z!iA{|e{9+sVai52Vh@6{itvkNTucfowE5mgLFM~=6S9xBn7X{TRb_< zJo=7|l{{gPu!%NaUIt!4P{A7}a76Gi>|8R8A#sfBg=P#{eIi9_hAW+fTo+Bz2iiPY z>SMV;BOI|Pg;vEofmuJ{r!Q%pZJtPJR4zmh(KIGK>V13e>ZjGeiyZ(b9HNN_yK zZh=%00Q8A&fMt+t%Ik=b>XB^J1GiC|Q41iGcs3?9r|;{A4T^rNdN{abw8cf3Xvpn% zrk2x#j&DB87c@B~9O3DoJY&1z^n4vppLiPk8Nrb9U4&z~*`7h=i*}TBcnvBn2s=k5 zMUTy(@4-jx69P1}yZ}08bWO||b8?`dzbU0W3udE*XI{=E9@59P)bC`-Z?XH9+~uxg z#?iOY{q7@-pLZb7>-FiWoWQFjbY|`*L*4Y+0O_$a|4PZIxgm*3Kmxbluoc1BL9G!G zMR3w_Gk{27ys2b0-C%=^3|gSkEF8kLCUIa%K_lKE$;S$Z zJmz0HK6BE@nr2)gPs%;)gX#559T$}VaEYAzaC2AhQbwAm8-+7>EY7QgBCcN;DTO?Vzjywq_!7(H`>ClTLeRw7pdmhJ zpG}%<{B0%qQ9$tN@o|rsUJX9usNdFhQ+JIk{NFBH-0Q51y@==72z=Dc_?C8>Ix9H8g-}9({?jG2V_y|D(k?n)-%TNewlw;%OC;%8UWeL7f6A% zB38uyU0L7%s0pMoDIL?9dan+=$(nH%40n-Zen-DFH1SAiT}?|Sz)9#saTy%dR|AT^ zL@OKKe0bVB<@LLh2a)~CqR=e@p)ZltP3lKS0 znD`MsQNGl!R+lp{9l=wjkL*SJCp=)117C$Ztr%ZLa$inK!>=F7pFsLp>7kt7Szw*n zf|kR{qfbJ{9!OMA(xx^Upn%cqbd~dx8x5E!0FN|-BM-i;xgUAlD|jb5DW_A;PB%0@ zBSgugUt>XJ+hVva3nGzG5tHmSb0!?&k!cnr$n}@QUH3l?4Dq@7iyvb^>#UyWC~k2S zLf;j?V`^%fs~NO7B&)z<{L3m5Cn0poA*o#4b58#^1r zr3p(Up;;!W{9|W6DF=|;u!i43ef=g6Y@a}72vljrKLrJ<2L zzcX!FxDvdDK-mp(nKWT(CFMUAW$nMlXRN*tnBn~0_yId+nKjJXUUx1x-y}GQ&|;s# zW_j5(VOP^HKgAY9w(0a$sOrkIV@|vepz-gl?kb|-{5nH5j_k*12dH7*ecmZ;~98>HbqG$@_S zQ-qeI7dG{1F3;_~@-XF_saPNUo(Nj?KmW?}hOh1oq_a+? zEef|)Vcqv)IoLXq?7U7yEM5@5YpqH4D=$ZzQ00VXiS1=hgcpvwN;Kd6{h^XxMd`?L zw?bDL!Df+j?^1MGB*JT930lPGWHzSxVpxk5{M;)grblG~Q^FCi6!fx}l}@6ZqllGx zq{QW5FRfZ#bE?FoaKz!?U;?@_WnjO#5ucL(KwEnkw)%Vv>iLf40T}_rL4x}4JiYI zGf8%x0-`@0l#F_dloq$djq>^AnZ-T6?)cby#gg=pg(-O8(F=vo(Jnk}8@E9^OA;qy z@IC=TuV+S<#;*ENbgs18%~LP?$4|Z~UCe0`zr5bHHJB}e%y(xTeSS0{q9ZaD7REGT zseBEJ8m*5tL7UKm#C$gEK1beXjWm&ysS)gy(ORWvZ{zwP=lgIBGz^H1A-Bif-L7s( zXs$1rNdE&rP=l+sDs4t0wBR#S=(!=oFv#{$z zUof=X_{6m+dHS$)^91et-DZaT(Szee>uqL^Jj@r*KF-@{0nia3u+NsxJ4__^w|nxC z#_ZcHm_^leV2SFVXL{lSnRGF)cCkJB-koJu(XQ$2s{Pv)-+m~XOoe|T&gu<9a9GQv zay(kHSu{0Q80e7=boI5u3=f$;)dI!vIJBA)(BLD2ijXm2lWF%-Y{ScZw}_PGaSx0w zmjp4M&BVlGPg{Bqdkz)oSIv!AS%U=fbJ>RP4FVQa55c;>q(_e+sNYGFNYQD~old<2 z6dRk)0_jCqLR-vkhafwHN92&Bw&{b#-A5rWFlt}e1@X5O{jOI>*FsRjYr6Ss=1H*= zmo;GPxxdMXnFXD!MXxVcf6)*+;~4+S`?tSSW!rL7m9X?5Eb?~S;~DgDmXmM#*6ZdA z)9Z~48r(ZC4L;ZHnF}x>CfIFz=BlMY3R(mCp|f@o>5I{)Q96v1&H(Q$k4yV#01%yj zhWrca(=Pv`_b2Q2AeO)i!W3X*hf**v*)uz!$BDF*2z zS9UdJ?t%yJYm|Zt35L-PHPVXV%8&Tp3|qUtN9l^w75~c{G(iQAKzE>-u#Dj+Usl~%TT6(PfiRy z-1yF9ppTwT0x?o)KcQ#xFc?sYOzPoYb(`{w!_sVWIL>^HP0Vuzw+=x0UX}d^M?3mK_zFiW<(YNk) z@hEGbnD^+FUtV9^3+SZEK)*;77_Urc1`vLp+G2ub`vHF)(1a(*-!~nW3Qe@;Lx^bn zt-8+rem%8$4D~AKXKOQRDB77$@v6&cEz!xn9q7JfzF}p-|IR?)xOC|8CqO-Tf{yjO zUWqJ#txv-~ONDBNfn(5tYM=I@xLn5kcVq{`#pcgz8OHdqTJYvaYTCeIU}Y&u&s@_J zg9A~5hECaE>VkZeffMA;Ww7rL4*yqkX9$kUFSRr|sBraYmQeSOc?y;9qYEmB>E1iC zs9ClWe9X3MS=Md|_?XUxMk30#n;h-TLJT5Ya+Z ztQN1}tAB&F&*zoU!Wq9Do z`XW*9wyz_ZlV+?cM#JcQTrtehuKbJOt^H?wqp-^356ZTefR*1}*79C5%Qy*`SA|RB zn=Kd`*0$RM$t7#cp0krdFIjx~saGQD=JYdoH25MeXquYmg@yTSx2z3@D!;^itk3e> ze^Q}Djt19J$nzTqrUGZJgm94Eb?haz6EC7qPk-CZ=(6EL*V1t|8~^zf93U$0Uh7Ef zFj)p$52L4I`f0?S#37r%Lbvq4rhY!7qU!4VyOv7=(Z7yNi6a&I)nfC&07js3L~Nk8 zN=An8wtE@(R=nowABn7wH~e626AmE{rY9R!cFQ0A_B)FI?1~n88Bh*wa0Vvyi@a9p z$h4a`$nK5xY>-&ig$KhLG&Hj_EEcKpB94q;$%-bPBQccE_PQ;Oh3hI&T`yCps5Z78 zAbvL_BM$^ndjZHTPD=x7$44uxC}na-r%66TsWk*Ow9V@F_t^IB&IEYp1+S&Y+5-BIQPk-rvJrCR z+CPdwXU?s`T5q+Pim2biW_&M@1kvZViZW6gjZDp=?!Hv^+xkD9o%d3@t<;4`(togy zMIXNQZH}TR^AR15oA9@Dw$maCqjtO~A*g*|{Iy{vSWWlaSKtImV%zlh0>7&KUR>B9rGxI{ufeLCDb{uMDBezx`VO|2Dc zu)CPlRvJ;%&#eQ_{ZdK)y;H4(T9Vvqy!@BeB2_V=Q|CXx!5z;A5`&ctGE4fz7}r<^ zzLY$MV1Te0;Fm9_1&rh8Bao9u^p!RQR^Zx%@0HMzri4wbVxr<%@j|D!#Tp9~xxLfk zE>H3HOJkcahxLsPqV}Q|mT|h7Q~85$3E2vmk0G4yuS-a{L*_MRZbu(0^CnM{F5Er< z89(i)TbUqxjF_gf(5qFkY|9%9%q^*OvBTs`xEvH6!S1pUpXSEhj|VXK*GFKKGJ>#h z&^D91}x%FnLln(fn*pKF)3VT(-HsA=B-}c_AYeX4D@Dp#T;Jw zH7ai>(a!n$I0qII#g&HtEi)L}_=qS&fB5ue&(Dmk>8Ji}Vyy1J=*cwxrSh^g741Op z7?+TtqPhsafT_=FZd~9yQVR|Z^T`l}D~8S3zTKyCHl+V)bzg6e1DegLnjO2>|DzsJ z6?s>>ohwpKH<0-5KkjRdxDJ|z?e{TcPC=VBA4GO^Ylpp`{pu3tF4?Fg&q&%nh-wrx z1+-3f_FkrQ$;2@>(8JrjSa%S=>{-%TZFOB@e(ScVV7l3$vT3J?3CIBihy&dR8WtGV z`1jNs0^N0TpJc8)ZV-2$>wk$(Y}o95mFYk?aX&*t`{i1&0an6F1}}odLp|f%TQr;# zx$|gD+^HW_M#PS+d@>PRdg1l+p@F7S5KVbGY|Hn%!e6bHWA++>qge4`{mj0>V90j9 zd{zCCdfWcN5&FWfgINfQ+i1Q7;0C0;lF$b9niA#Ur3v;2z27dkJoY1Y!UBpgZzQd@ zD@5yU2C%oNki#C5T(}LL+jR$i*o*LShD!jYlkdmal&vh-^GF+`(`SVw9v6$VGA)Oy z`=WH2Y12E6X8*tyGxRYVZ$1Kwi>t+~coIZKEJKqYGHI&4)NR~}2Y8rt=?fs|`o!qT z8({kHj)~MH&ydTdUsME^3g`t~hD^8Uev1>Qb?!)cEA;4HQxy|tL;)~}8L8eKt7|42 zmPP;zf7{>qC{#a;qr^LHEU2sUa)2#!hj}cQCngo?D?0L(gtiH9;s^oU9+BXQ_c&Ge zq8-{(BvU+8CBOpj6V~k^msVh|Rg@73Stfc@%d}7VWY5sNV01RD;v@H;`yRee=I^F_ zb**qNy;1&`J*2wDO!ALQOnYgZbNDs;$W!-?TvPujCe}TqCDZBCgZ#29Om+Vf`Ic8J z?_)jEbNq)fl2Y{c9j09wos}x1i`VQgM=aN5^y!`<~@op~I@$DQh(7?P2T2&mvIyoQe@U(P5SDCV0=U7$!$!^35_96{6R#y=V< zhR2Y9QDpV%Z0MYX)ju4Tnhs3DPApfP=@`eXN_9^CpBfcZhPLlb)c;24!>!cV*{d#I zj1yq%JdK^6drL(8JW@a27Ic^-@L)2Fu2ot{DnUsA3OdjswxF)+f6BVwrvM*oj4s=h zc>jCz_dA}|x1bQo*+$EMK@=hLEi+M<`EG<^0i%qu-GKcs@o4~tw43c-bOCn5_wtkc z#mvA6c)8yf{M0a0Mr*I$-*&jZ(IXv>%^FoX!kh_;o!<740`}5q%!?$|e>@j-; zhPDP{!1d$cM1xY|Nb@oviVCl+Jj@JkpmWEzh%&tUyo`X{XuYO&ewnFmxThW?(o0sg ze)GdYnn}?_I$@H5Qu&6N-=miu9?8zA<7#1XTs>v!!Fy;kDdtLXn43E!Mf{rwUBEP@ z3$uDNkGmt`bZPol5jMN%?QcQs^po}AE^lypZ@TBHT6CJH(Flo!VUH-1J5TjF8QJR} z@y)K?zgrTAK1UiM;;7KYcn`F689EdZMUKa+mQ0Fu&+}NjyncDJ7Plt`H~I_ydP8QT zt2E(m-m)L%x!o6b+9t(EaHjs0Y9yR5KUPfh(%OW`q9~l%MEeuW+f!7a7<^rpaMWP8 zK|8{|QjxocNl;{)FTfU+oz-2w(|t$8^(i+T%%@I$cN0}Dwx{d_a1y^I1)j^u!rfa{ zpcc3w9U~(&^&~o6#?|tfYTA&{^0_|DsVeEX9Ci)qF_<@FuBq;p^435EEWac7Y5j<+ z5NmlZBsVjYa_*ywn`}?5AN*6@`B|83@`&00lE#t3ded!2HQl<@?q!fdz44LQw@EJ~P*M)Xo6QIf~$DFd8$z&IP%;aCY=ePkMmA4brv(bwX zLN8=kM%WZu`+!6gtTyTjq#MZzg{=H$>71d+ zD_QS-1&*j2d;j0UaBOzyTlEls_WPvc9rKZ$kcsX`8(UMREyR#|M8Aa^)=`-DLvq#dtf2SBQ&>Ca*^lZQlfGXtFIu zM?kROl>jGO-lsS&%C~k8OBT|>yQ4=&PU1I7p@;A$<2(SO^|BZ}^KDQr4a@fLBKCvc z%@63MuO)wd;48~3iMA;Az}*Uy*G+We0Ypy-E(8v1?tK)yTnuMqtjc{+&`+VywCpDY z;`<3y%whoFKfD0UGm%B3T8mt z-wl6wb$)=K4uASz#wvcPJn!wgt$lB|cdV0bz{TJMYf9Ta71#>!DO=;vdB506_&LAx zU~K|duo)-@5DG-|C-OcXdp!r2G3Lm+ND`(Egl^*c|$SF z(NMTqltaq8N?!B)u)kB`mGnSZdOca+=ihpdU(kRzk+$^alwWw2^PwanA_Ddb5HaWg z{zdEgiI^olil8`Pv1O7xu}k=rwaUhRu^y)pb6v!=~tW%HD@Z zc=%@$cQi?W_pTLUjWA(!l2`oy7PuQE8oLbS2xxEzAKMOvAKpdaUEhC4HpYfc9&r%! zVqEu&f4+yUA6oS6o~W-`1LBeNpiQ$=J)A`kW`ikNF(wrWTVR1)k;}S%S zPX68qk``^?nqZv`BKCR5BqQVlcpYejcSxS(<*e)$IO?td<7yaKJI{zxbk#}QxOy1v zcHI!7!}|zPzXY+PZy# zObeO?iEDt6&2wCD84E9iSMa}SH;KC{>AZejye;&Wg7yDr0kBj&Hvx#bO3L>h$6fgH zNlM-F1Kxyk&N687S^Xyg&h7t5!{+yb_xWb69)DHN#0R$aBL&Ro_9{0Wnc&xIURER* zD>pK0J;M}CO8)DR32RVG>)p^OP8i^X-oL#x@CDRLW#cxNciz1-nvW?#3s}SGKj>G~ z^HG+41gO-@dwg%E*>l3hL;a&mIK&zxJm;M5BFh*)B+y>Jz@YoHqgUrPzsD&3=F3^| zMgF;DrL#Yzqe9w7>QFlxb zf^8;{I$}uK5L~ej`;6*DRUg=eszAHTDQlutKxdci}aiNW~O!8BTkMB zHJv`PL#p+Dy;%whrAZy5k^2(@Y9ApPIcl}mPuTGaD%&#B?sDhdRsRPB$*9*ww?FEH zNs5M?sBa!uPXYN;*Gt|qK2nC;%hcpUy7xXoyhxXuvw95H!CR2)j2IAOr()Y7Fx6G_ z>Gt|&%XThRvRa0w#w8c)lgbg*OT;RzGg_#;{mHsU%e+1jYET0i8>hIEoaYj8^6B0Y z9Yig-$qhVnpkEAO5E(lV2yaruUzz6&o}ggZ?_~$M=>gA3Jp8uWgQxZAN2a zs~6b5d(hjk3OBdO8?{pR8htFCg2;i{W4dQjqWMFIBn`w1J2Kdp3$Mx^?$i$)N$#G( zHihHL2{4!7!8swo1jieQ5|;3lj5$Nwyd8P55cw+bu-R@Vr|osPAYdAV&p;xd`}%9J z-jT_#)yC1lqm=12Uy>X>hOXDizD~2Ltc#}rP%vQsho?MMa@YfzCqIbMsS2TGhQEIL z*0E6a20b&ner5od;#k&AiGE%unsaeQTee|!Oimi@kSt{Hn6bs>#FmYm+AdVb4BxtD zn9(w7jdo1D>W!Shn~j$u@0IFofKxwu^m;1BdFR93_(Ph*9l*d-)$cyS0t2+9y&jt^#YH$P)kAT-Aq|4Nn;er{25lTtNniTRTAG*gAS zZ)hHQ+k&r8QVO+arUqE1(vu}~Ong&+p_bM6aMdIYJ%pPTUYZd#vbjYqgO7<$PNAWy zdCyIG^1!3QGv|%4SB=oiXlA`ZIgDTQ#oEitcH1|>GR0U0Di@SwZR7ti_114qzi-?> zDJ7wFBM7K8NXKYUN+*p2UccgdZ1R2tBF-ppwPTaWXef5wcAU$Hzwwx)E zp{DL5GK}_j2KD+IP9evb*Ue@al0|E8vHOZ^X^Ow8+9^hVcc)vibJ`^tmlt1vK77*l zROJ>BsbHd}>@Xi5k0QoZ9~c~+2J?tKpRi6hz!Nbg z2>*pisf8~Rjmx-|u)?r?;u_n6Yj?%DWPBY-4%s8#LsK;ZFQVsN8d_6xE(n6zU--_t z=s&E_wRy}@92j-iP<-CCx@xiAC#b{%|V&@ccr*fU%2iDoQc41eNE!?mitUyjHszVwkQ1&s4-yyE3k zXEB?BZ>2~zH^WD{{O4ELDtDvwB5R}1NIql57$I^0GzpOX3y}_M92At0vyiSB8C7zT zt&ekMr&8IXt8tJD6_Yscxv_O1;42xE)85Nnd)6tS5kL=C<1tjZ?DU^W!*)xp$ylyB z(Q73v~A z2NdxrfXl(}!k2Hys@Izf>^_+KVUbUskJ*~+P5<8V!RS)lkG}YDjg_dBL=J`>+ z*j40ivygpNzFHNE5#8P~L+PKbsHY*sAn4tqOqEr_5Z3cg+*_I7INO1%r zuIJgG$?10dO0AyMn!5VgG@Lw06CJ*x_=`tp0dI~bucdzhl4&CT{f2sOfh*;nS;tLK zpkyn8DaEp7zMisva`&gzuBd$|sQi5~&y2Qh8Igz_LN-49I{ejw5l9TC{sRwL;Xj(R z1d=)aJ#;_P4Mi4u9q~SZx9lF?0rec+T zZ)J7Yda+%MXdzC`8zpZT8f2tl?mK%Hpvn^V^1F?d?(+lRxkY9FOu2|n#rI*3PYj(Z zjNLG(aH{F$u{iAsb@yz37;%!OQ0Xo?Qq_H|b68P9B~yGUnTf@X6> ze&mv;b4R5Xoq7eFXCFI3=UsrtJz7F%RE_#89q<*NWvdHnm$aJSv0MuQPC4hgiLi!h z!?mFa5KP~oZ}IOUfRpmh0%^7Kk^$dz1u&4#*E#yEr7C8kjA_8TIl%G-3+aen5{0)% zl0wb)LDdz#RjlnK%zmyJ=+n0P`)28XQ3~B1UcWZqP?2svI&)dRhQPI65&jhU9J*{T zoGSG~N1t*rw6o?(u&pV-?riI&)mD>b&jJ+3PQsrEaLT#)FSy@jisxK;)Du*8hwb)z zd&=XgcdYpnQj8IJ?F9@oG|zD)4g8r=CNhP;6TZsB082X$${>zbYCcsw$BK z8|me2aJg+6fA%q6(?neO6tVHW)(+prb+Y(K&D+tzUnM+zAc5I~PsK?A`y)sAa&eF!T0{7ZBB<9G?Iu^4NQwcX{`A$anLIyoDDn2nC&w zBvmMUSLe5tM*_N0!=HmHkFKcyc*3pxx8v4iLKjuqc+0U;5Vp)W*D7BQ@yJhlYGRz( zUk&nbm)jKT0cTh+o48C*T@ z#$?5G8{OZ7Ov8qAy-S3KUjsN@pX=0r10%avoue7G_I+hDnNgd9b@cs?iw7wHQ}Cha zNKvCimK!0wS#~5Gs=2h7F}IhA{g3=FIcbi#D*NZ6FyqIiF#qv9)Z}WybyWATnNR*2 z*8Ib-X$-A|wv(|nj9?QR<87wv&d*GlBeaLXs_?bGTftBk(q`I|Vg{#d_le+%vD+~W z>Pt`a1!PSnD}FIZkN2(=N~HQbo@sS!4APmPN2KFgCw(-iisZZB9In?wcd21Z#NN|G zj!vQ*MC-X>TD>82&K|QTwXah|O0NsiJEhBiX{%>|7bVoQ+M(vm=Niv2FM1|~Gn)Cw zfB|sjn^jX+-JHBPwYRcAX6TY4AC!bUBzeW-dp;egwC1!Gxp@bXvm_QnO9u_5Mt2_g zpLPeLsv9!gW?Ya;VsB^CxVmzk$l|ziex9HjDnd!hw}&)$pyGHv%D57wj8jr=PmKwU z*OeB(hI`E7xX;Gf$0x7Q&taG#{)ovS@VTICz21?;dEYGMUU+idRY54!(i?A;!8>?hx6q;Mwu7CP}{87IgipK zuK%P*r-;$ZJeu7EiMNb z5*MY9DR}cX?Z*fFNawkze>PnVQFY?d;ZS=cHej-iJMF z9m_@K#^()Cl@H~ROJQ=__CH#e z!29&m*50a7f>@#t&ABblKhvmg2==cUZ04=u3>u%W_t)leX-Q_9iXbt|S`fNcn~CI` zgSe+mFz>YL*aCvp#?4n)mgI~&In20WHBN)ne;iX8KS@yi4}d(RpEiDNc{tQ12ndrD zsJK^YaL}?vj_LapRnUK9^j|_eVAmz*la{e>9CYW7N~s~c`Ml?kZi{m|X>e~M!eZ$2 z0u`%EzDWy|3w)g;&hWt*0SfBaT1gbZ`d0-Cys#?=<+RJlf0f4ot$R6++{S^sRd%@D zd*Yr{DNe`TE&HN$$ex`qrn&$>)3!-Fu(&#Y*K=dHr^tFyTbjOxN>*0Q65jZdI2rz5 z9QNd29lxAZB(KkjeK3WH`Z>Da<86gy52>{lMch$2W759YbfvS-xM&i9PA_Oyw=}^S8UgR*g4qs~E zcl>L3)X%kZv86Wym6EdlnY!V-dc>*coy40)BKLpPP7~Ck#NHeyNFxMFamuuA`c7`v z`g-j@-j)H&_n=q5k1yMIhjid0-7ak??10F{`2MTq1ltC_iNTT4YH8cHdLcJ)2hVb9 zlL^mLU&b6x7eD!pgNfcRRr5L$Hps~*jvPnG?hB7SNJ?Q9A#1=6k>|OPn6&J$f}Tfn z{S0)#uyFUdbGR5!f(cGj4d4YwiMz|o+M`c;ot@lErC!Gj|qYRn>RsN3cr_6L0t*WMv* ziH@&T=A%Bp2p~(66{5jXMD^goEdS&&Vjgp@imD=$O-V@X&1-Gu<*ll#1pJkem2|Rj z#+s{nyoobk%Xb{Ajo@1zFS3xvYyJYxtVnJX&-KC2G5BIrT=V{&HN|E#vvI>qCd5uipmILwc-?%Wv<+ z$c7?tH*^A$Ilsd_#xpe+i^b49kr>~;TdHV(v~E(Y-n*lt<|fXhCVIi2ES1(Yd@>b0jkH~zZ~n!9q{34-m*tJ< z^PZ+Y9f&>tW<#?-|NHeUb?TIICI>>O61%#BSsg}m=f_Fq1bU-9d)n?<&FIC$@XIHk zUhk>}QaypK##xSij^?5u+`NDIY7*Hg>d`5wpR|Ir%m061M`-Eg+`o`pqyF63tMZ0U z%~%voSlCq6G+)H1qy~p8a{H)N-1g}Bg_w{0+k_BnH%~ALpTXypfN~2dopE0h6;zNd zes?_ebNL^UzwgX|eqY(8Zr>9<|Hb5+qDmp4CI zh}_0)+GSOw77>Y<1krnP-Q^vODT?gGxwr~*{!Lt^ad1>{ldu`wqhJJdNBU8%XKsqr zx?9T)Nu~ZS#kDJSB_+HHSLu2Bcd&ObBvT9CrI2sOXxX79)15}1plxHa#Ee3K#}PcP zF5V|;B%-#@IV(>8gBvE7Us1&q=j){cjszdW{Qn3K?6^NzAgisaiv)fJz8awI`p~Uu zjHO_n9YpTZ8dY+lS@&h=PMd{V?&0t4pVJ3A0RzXS`s3z3vqoeR)QvdivKe(H%KQ&? zt3mp4gV271@_03V?V1n4)yB@zCb0EjoT0i+k5EBwcDlI+!uU;2DESj+W_9mzM7%(l z!5yxd4{6LFJk0jqNK5yxX)TU4F_is@w#XC@dNx4)Ps#;n%jEWOy6rF45q&w?)02r$ zPK2D01z&?}?-=X}BdA8tZg=?*pU#g^sT^nhD=(2fM`&L_yT=>JG|CbD#Qba;mge`3ee2GVgOyqWvD{lxfPb5 zAcNjd)J6<5?%>jhK?E;azVhbH#vr-o4M$ZB^HrpFW)>h``yDc@A%F+rf08G;`VGec zv3T01?V{}$P;uw|(M9_ngzjhD;A<-TZZg+dgeC2Q$$hoZ_8YYrx^r$E5_9T9K0`h` z_2-Hoj7RJ>U-pjtsrI&za`tzSdF^CBp@UOm$gioj^PMAve(9Y+t^cl?)TM`N`ansn z;~JT42snGce!7AZ;{Pe8ocVGeA}4r->Ob`kEL$_tb*w!O%5s@0R>qx{j!s?X>F7+1 z&vZaPKv=rG6>KGvMYNmqJYlAtvQMmBgVHi~qdXmQ&m}?d%>BbLz*?!LFA7u=^;8+U4(P1JtWP%q*_rp_GX# zbXGY3?!7~KFp9<;k4GV}!?Mw6R*I+fVo)EvsjSoq)wvN*Ahd1PS{qN%Z zj(r^zG`gZoM1DW%wl9}5ZM47>6}5c!eMtlh=H8lWr<~7BulCvrdzHB4`6&NIipbVP z*uahvX`~O4&?cin!<5wPAFUu+Y5s#3<#VFutOl1|+5RK$qYpxO0+QG6y%~OR+^2M9 zpb$0O#e&v!veHG)2$$^J2-kb=6D5y;9kXow>Yozwp{%2o2bo# zf)ev-rTo2@xGUgCgLV>XGTsP_O0V}b8p#nW(E%zVk*56e3a+}-$()N)(KBU$( zbiY`=3n6`^CJt@(s=&aQg55^CaBjnPG<#GMK3 zHOq!#HH_RgQW5ge089xg3Fo_KcJ*cpI9=z7 zKfY1M`?%X{(p4%MTGR)=*b~@zcj}r;QOiUocyzHu#d4C_B2j+m6l1TpdRr}}%km$gw}>DK2p zVQHB?M7aIU!_|`Z?7SggLziDdh?a-7oPcO)gfM5B-0QFaFHVU^AJmUCk67_%$%1yx z?SZ8TIVFhqcUX2>uIEQ2d#6G*SxRP9Fcm9MJTe!PI_lc*(>bdH zr#i%p)Gjw@2lNf`Qg2Gun(@rVO%LFb)MY7q2Av^kOWpT_;lT~lJ6NE6?-Nv5Snf|W zi0tntOd41?58_xIJk4ZJd!D~72)%)hTy1C|?dt_72wom)`<)D#(lvrETQ7WvE;px3=a}QWg zv<+XqVj4X~zf$A(ZH_76{m`t!>-Nh{n7mTi!I7EexY-i3lY7nlvr5^-$P~Q!9+SA* zXEy>`>O(JHnI9(Ep|ORdmFk-t>>8G9cwHW#Imc|S5I8fcV+mRu+o1%K{B9IN9=aBw ziPk(W&bCNsR+1fNQh{m4bk;!TQ!2R@zvFz%utl zGpRx}Uw+QNvI-dIx*G3k&6(C}de8#iM>gaj1&|59~{9S=v6-_Au@p>GR=!45_|1ZC`jN9vmeoOS9bJa*Wbn+fTiaM>J-x zvxknTjko%w5&t&&9eR)CUW1~W`o`{0-A|Bi5$zf~IjDEtpZ^~Ew4dhKK1V;hK8hr8 z6hZY<=++H$Ng1JVdqj!i@Ew+yu^j!4)wcF9Q|LpQ963g1hxcE2)yEL7xJrnYn|OBy zyBXz=m1n*xPi-b*QmwpJ0N14yP?kK4?M-}xG6@$3OO&g%RW<}TjfvCDUyrO8&n{e> zye&RnzFNyxpQLAVJng;aeiALt{#b{60$DM{4^#KK!s+X}a5d$`iz&9Vc>+I1uhf8X zPWA#OVJ>`NX6djzL(Ln#>V_t-;raR~4$cV+^xsV1&x#&5S{U%zA=+SaS42boP49=( z)bi|3!><4$Z+`u6lP`-bkldy&IeDI6^ho*C!t2bq3a-`GR3Lj>Lns4efyo}?fjKJ zjk7Wnd(;=9_f9d(cl=MtAmv%uhNP!7TEQES`FHO)J6vEA+f1S+qJnt!J5nxgQR3(+faq`t?)&N+y?ififfG;`s{+J;p}?8Kx} z4^-h_@F$XW1O(6!2Ohyw`I!=W5i^ zBT?56ekl)AtbEwLSbq=mknoWw);uTTz^mVU0ML^LFw8Iv4Z~`vxZ^!oPq_umH`3Z86g-I)_Qp zVIY}?>f}a8%8FSaMJJP(VQQR?MNt>C+~;uxp9dmtH%AurUESe$U8_|(vO;-|j(K@a z%J@H)jOWA5g_~O`3R}p7^mTy9i3_WSI5ib+l3`6j_+-sRVegD8be*3I3=XY!ectr&W zxY|GL?b|8di{n;_aQ>RPMFaO(k+gl7p<`>$TsA!(+)DQKPX|{M{)n=`o_!8~R$hZi z;;&AKH?wCC+A*N$clxrH283(#wINaCh>i)F4MgzT_4uQ5l=R_ZYN_8EZwvOGVnn%F zOU@U4fZhat$ayNK3||<1eTu-{l6w3foKcMayqAKqjQU7YVw)MYeE#7F`Ns#p;B$o7 zkp>pzt&2aPclW-J(2ib?Kkj>6ZXMot3LwJ16rOsrsR};Vlv-kZ+dV_?g0=HZ8b3nS z>W^@EE%XPRqs7wM+NGY^`1wsy+J^h*o?xIKQyw+iiVZ5b$5Kk0>?wWU01v+@v*1ex zJap?YO4&t+6V1`E?+3e`^F2%&Czp{5^nX%+E#Jk9gl96ra)Jw2>x>>ty2hy(!#>%z zS)D%*5&Jpbw0XQBg9YzJ;4hC)13(w7qc=d8JSRLVCXKQs0HTVD^r*i|I+WCfz)!EY z+{M%T!$0xHFb#h9h_oS5jyf*-4SA zw|Ta+OckqQr2OYc35LwQ`R)U#?*^++TKk4WjtAGx3B)V(y$Oiu&r?fkx>$ML$`;GIQ;`5~xpk;JX_&%lfBL=SMV%>Z)v z95*pT5KTq(_9Zq=Fq`_=?`0)3I-x4}QIa>Y&)$BkaC8o!!v^oXSk*sNk{#)3%4|7P zXN%-wSXOV3f<8|jn6on9NRIA zR23t`;V$P%@)NUTFF5etRjh3wB)^bjV0<{vXHgHmtvG?)6%OXgDEv9khumy&Sv&iT z#$pcSvWmTlmDzR2FjEAV-{^mn6OYNWZpz1GiL!t4wH@CWJ@EN2ods9U#d(O3cUTHa zq2wRk#`xDwjh2r-)PKG46UuFBQPJawU$zesi=u0*=OSQ5r#fp4#+Q$5R{B>aI-c9L zBcr&)y8+^T3|!76GHx?qw(F;7S>40)7=!Ky&J{Ht11u^G>B`i$Pgl_bA7&00DA=r! z0^2p({*p4trZ4Yxk5;!UkXz6;O7MoxbG4{=FDHa=EBdo9Ld(-XsnBGuA4#};m*YJ> zQUalMKrfk&wT6^XQzFJ?zu3^m3j)G01S@z)8Q$VmKIIKEXB1_zd6U{lMf1dkj&!f~ z6Hk!PQNB%_kv!HnHW45-I>X%OE6M^}IZRZ^8`dtP=4IgaD${5RD?p-V0@dEWmmry( zzC2)VN-|D&**B5BcvOm3c-|HnOz@p z77mQc3Am3;(O7hDz*qp9N)Jc$gmpS-Q1zokSGf5eeA*j~Am?jMz zXQVR~Y?w^}qeD$Khxs1*%5xFui^A`EOcm~=A{q^)98Z*)W9G!me4iMN`7s^yXqj*P z?(B}fes0%4CC4)}e(<>>9DhGEZ`>0B6zk+gI@q#r!b)r1T%pefXTykryq&aL5@Y2U z{&j*vf{M_?arCy(e<8&`7nMf-1*XezogXS*cGy~bwc_uf^TStVt|u~-3#CD3PFT15 zU_7j8<&n;!+}18fKOugdVGio_>uom6UG6`WC3F@GTaN{II%IFKeSvJpK|Yn;b8DZi%7!xfbSPgpato?FS;cD#%Fg_tH5>=MarjXAA7xNA}Sn1 zdTM8(_! zIt_qqZs&aA%N099nPOfp-J~sn$?}6NKYVJMevv(6p3pFM1)Z9x z#T<&c^x5Z<3LF?5XuadiL9)35P^4fC%OZda`owB6GG~TC#DbqR#W&A+nL~>f{Dmnj zO$(eb*i|>yOMkn2Cu&`p&XhSVXOTX~mm%I_m)21bu*V5Kym8C9Y7Y+BI5-X1;ad*a zh(4{Z2(7Q@9mbBlazdwg7uQ1$=g4!01qF;Bd6goPj(A zN6r%2ll*ZM^Uqk4*5mPjU8xu=LmBPXzHsY;3u~_3DlwNxIre=nf@o^LC(^h&e;g3* zAa&<3gO0RNxL!)KynKF1CyeX56TxpfpTQII?&)Qy^r!5NcDS-8AM-hZyCj#^=AR$bTJxIh?tetpbu3Qo)0M0nlGffIx^Z6eA2Wz4S7ttw zG#zQ%n)9$(qyAMsEboBv*J*5Vohnc;Q`p=h?Yw7Bz(Obi#D4o1uCk80l_c`*Two5o zCqy-9T$Sd(feEJM(#c?qy=GtU_>jFZmOOt;y4Hm;iP>Czz4+|rjvU)0J`ZBWYOOAA zY3~2})P4L>44_n4J+qI)(3T-Xa@*#db61#y@{(D7Acy+iKl&+?^ZRSUV(vcBvmL<* z6s38Vy@%VfeKEHpYo;m-o7_?PEi?e_=-Q`u%3&XB72}c zI2qmWtt^ylr|9ZPrUS40`UjivVCPA*4~lwkH{yxNAp{PTkTN0~Azb{1G@q8@Hp#Bx zohv^gVDX%s@I2Uza93UhY}q2D;N4 zvce4PxWAT>c76mY8m9TZVkRrS&Bd(SOtmiMfJLTCx-b4*T|)P~rDQsK&T0jv|4WJb zbQ)e2ifw>6I?t zShRcdoH$Cy&}21QC-CWk-3+>A?K*pD4F&oyUm_Z+eX-~7)>f#k>qD=Yx7^w{ZV3ei z$|x7>w$?OWNrWCl5u5==S3RLq#W)QDth_Pr-! z@BQt+jhh@>hh~9%-oj+-EdXO}Ip_A?7em63mR1~UCrx*cY||) zuP9E6Co|EBHn2Cc&i0)xUcwH?q4Z~6jFwylLm<;w_Lut1aPjR)3A}rz#GR(7*c}H# z2AQE26}-{IlPxG-j-r}kXEfU)>d3)dn9>^knw;qLV)o2K(dlnicba=I4`%R9Me^In z=d!~ZLv*0IAR4dY)BvkL`aC?Z22yu69T{ZW2VuQ(wKCZl)mRu~EEO$YH#6y_b$wbM zFD4w0DioLl9Bvzb*FDrYsS^1)V({!7sWEx86z|aZM875-dqP*3iXa+z_s~-;nTdr- z@Wopv7b?X`LfMS~p?S<*ib1g~VfbMJvM2okw}t>Lc<|-3Mps97&~Mjz8|FL*Pk*8b z>T+La2hH$5Q8gi6F_ zRDV@Zy?(F~={$y9?f3X&AB3bNH&HwT8#~H|&>XL!h*^vDV*o@cEy4NvrVfnAQ|7rs z>{r+9ljKn0m)%92JQpAZ%t?dD2Dw|O02gd#D!s=u?w9|nE|aFB%=#Zg;zVn zf5G3nEOnH8+Zy3upNZe$mg{E@7h;1^Y;nJAU(yfds#|cfFS&XIeLa%GwR0%18v$K1 z=)9)mSDfGOdpoXPZnYNm_fM)o98+aY>;d*p<=ZjT(sWyLpfOXoj9&&71X>FF#pO=JI)IzN-1!tkkRi;mQzpSO7U7dhZA{^tW3*ayf(m~hhJq>X_i#-;M&uq zw{dD$yr}*}QYM!Zh}FEbI)0Eipf;tGk}Jw*$6>6~lvMxLQ)^;Yp9#8=^>_U15D2Z> z^p*b&+|}iA0y{EzwRjIe$y1KhmOu0zida3OsHab zqbwP4ipF3zh>%RWXuB6+K=J5NMy^j8UTH*|w?oXi*34R(pOA4*^L8;&7&VPq)L8GM zO76qV8vDUXN_dG(Pir5pkqTOK;o(XdarUWvAaIN&U{gLZ>uep?G6h7iZXPP8H+MWnIt58vOZvlj&PxgWyg0vdqq<8B+c6m)D(oZxo&FF0s`{WkTc;s2 z{gv5+Nw;UkCxIO%DReL&*JFVgiT+dh1YH4+g6L84O_z&sGY~v`-JA?Ag`|fd^yvU5 zlK%BwVyja2W9oTEay09WeCAihB%iM<9_Map1$li)c z3h?0-X@~`SrtshOAdc=bMg|=gPR)$R!J77u6w#AuOChbXA45^A=xm-zl^mc@=rHFr z*4QAi_CP~>dZc3cjWFf|k?yg#M5iG+bg%lRJ_qHgkt5cfX z1cD>N?2gx;c~fAP%jTWq0~)`i#Mj8yz1WNhH!+VsXr@`s`u~x-;H7@A)J8CS+{%Ri za;5S+<3KC1`;tl(6-zp!6R*R%)m~CFD$?bx_wYd{75%K!hb)k1Bcdv&}IW)sM-I z2H6}m!CmHeuFO3eK*{HmD0iF|$S0LGnI~LlmPMqaGJBq3y&rR=Om0cFk`_2_vX|)F z&+5jnp4F}0=HE#&yH2bPu+iygh@-cg5I#x_XBX}Wt#KdCY7qwB_j=J^9dEzJ{Z~hQ z9Q-U|;rk4Q0`bl{faac4ffg<(RAl$sRg_z(N=k?$e`4~YVh=()aOvQZyoPMB!H@HT z9u4jos-vk(iQKCoj5?@X z{$I7GB)Vh$n`-$X=ULQ1>aMzi8gP?Dz!Fg#;znbP!Y*OLoL9w)S9KBpJ~cyDt*;D} z;mbaf*90#SlJ;u79zC5>S#PgeqRs>$w$|V`?{d!N@Pgl;AS!0Z1o)Bm)v?o1T_wQL z+K0mndSC?ve&xhf~K{Orm(v~qv|6leIJ9pTa&HgL0vlZ-o2OkZa?{xn~2?`1E3n2 z>%!r8&aXV8&X43&6o7=QGYTKb1??RqEg0nMkK(51Xk>uD2#kJHW7@B}1VdrA{bxZX zdVB5y84eFV5)Zd_nrPRi$2uy=vaI`23f?iuT7?o-2vPeqehEh3-Qk9#pWq_t0nSrA`QFV9uB3J5)1!*`Bhkk zr-}a|Vb8&NgOHb)*NDlGu;*aRB+UPWtjtm9I)9ij6P%J!qki zCn<27IWoKtYWVF)J`%uUMPQexzqEfa@++`YdN$TL%VRr`h#W}Y<4@%h7eE&Mn#+tu zX?nFhUChmliAuAi1pRYvNk(`x(&A9TdoE8Ft%P`Zk#~Cjk@Ru8Ph-}x2#r)uygt5Z zKRg#gbr+(uVsj9jr>)3t7vGY)jWQ;)WDJsDbwL_Q%u)*7d!uM!OsziKv9v;(Z+Mv@jJS|ULN7YwM#6Ba98WFKb)1w_GUbl?6nvWN%X;o;|kN8_m9Q*gNX^2#JX2K3_SV#*1aTM`h-hNnK zeXsuSV#kt6t6Tz!tpUz~kw?DIoYd6ojW-qr$-34-?)%xAM0V(S=jF1(tBmZO(Yrlp zQ#1(9$VWwTPcLz^p^2WUjWKy*X*5b3U)Yx`|5hJeg>v9YUB>VWQd5-C#is>5v=N19 zPouFXtv}dyS5S{lM+p_Oq#E`krUcqrXm^r(9N#a!)eDkdjwz&L?g+;Ij-F{~-W>-c zvka|xu2U50Rb}fw)%1(ZRkMAy+@BPet?r#BC zsQJyk1@QQMyAm_o0~hQ?dAP-xxb`?HEvMuqcB1!t`wW?{n1gM2YCbk>n08{DpX%Bs zamUPRGF1aFKDK9K?mtVL()|m&yc1CO|Nd9xd9oy5)A%LPbHs4)hZO6t7`-`dMT94+->y_p4@rUe?RSrGI|#| z?9rBLGIDLhsJdhBeq4i0jAp-%S33J)L%PPR<+z46&7y3}UOsDp*!08^6a?`#w<$hm z&L_yM*uof~|8&Jl`(*{vWWAuXA}u9g0nC(0bae(`DYyy)O2%k^T}*(DFhWn=?2b5n zxkEuAJ5-jQYtZpt)-5gl)uY^if6&B_OAx80P{L&N`HnYXW=-~v6Nb=D zS0?+D7`X}B))xZ}!B0F_;Em&U7qjjp;-aF<$y)zE2jDt|AhO^EuZlJ#Mfo%PqLq4p0(n8&(+{u!!KH7^t{PQVW%8lB&MN7=h)ejl?}fZm8sDxSuU$8*It+dj|9U+AlAI49n#i|RffH7kbo-L5od0_9(pY@yyNXp< zL-s$t928Qg(g%x+^sAJYrSs~}9(_+xi_KlbKujdY^T-Btlc|lXZ(~#*=MA>yv2@jc z7$EEzC3@=jAW-rU*`~$lk8@Ip9Gri`+v#1|;T?b9k4P!1633u)jim*#LouGz)h(0k z1Su+Wu|!e4PT}kFL3#1=<6rcx?rgoQL*GEoUOb*LDOljCTM49E%PAts&HBqjO$zoH z5N2H>e((8sQFWN=yK^A{?T_#%_pu!k4Upl=q)a3HmipP-`nh+K0H7ygk0X+ADs|C^O50E>`+K zM1Y`gpu+cL<$_tYTv!?@ItvPXWCHW$>=8T=3X-_jXwr~hVc?b@6jYLl8Yt^B@9HKU zxmMB_AY2}P^BU~-D03%^ZBR01K~>HABN2$x8AyFS{36Tyfmw{FN!#D_A=oyKuT3QO zn)E2@-bQm08yy+>W`AQQNWW|`8!P=3c7EV?^`m~6a+qNNgHkPeeAbdGV%4q=5_O9r zR{A{jxkgtrDu1C6xW)Q*82hP+o-+V}gnHZuLNBSTvfb}~j^9lz!2r8ECDcT^cG!(6 zr#BEX#KRkje-!y5NQ)o(Era6npf%{}<(KbDl|9tSy>@fD$hjey3}UANa2EF8XBF~b z>e7M)0?uOakjB%0E}+DX8MkX@XA^rAdpC+vb>&T`(s+0pX;OaKvw7psKS;ajGb3HJ z;*CwZ);skdTF6-O`lq)kFDmEx{{iGIr_o!eE%EEEwTz}z&_kqS8!0OrD&M+dE}hJG z@{f#JJ^I{d258Nx*(QE^7|8>LPs6@aXdJ>?Krki1uHBAUwE2Es znIAU!jN6zC3tO36l>GJaBM?z4A4xDfhg?CuqZ-W+t~BIJbuK)6b*!I{eFJUgY|$Ze z*urK}mvYycuVfxl?0S1xIuR#NkEWns>(yJ~jp;*VHK${GZO|vk7h(U`1%P8)Iw;m+ zxeB^R@F5SD_|OR`+z!sC5S){o*~k1T#%Zp$dFY5<4Jk!ojA%Po!s;P!beiK7)fFGM zf1n8~4~D z`FL|*Z!`b(%Cscj3k4tT>A%;<{&oF(+!PN_y6s=nI|(s@3{Yl)USDL}=Q@{t0&W{& zdD)7-$Mv)sn%#0%O#vAwTT)10r zjht1#F$HiihFoS3+P@080p9+3I*34Asws^z_E-BSH1#Q5ZbrK0E~ku`FYk`hTDE** z6eX=Tx2a*_72!y+x4EqaF={Bm5JY$ZM+6UKid??4qnHF132(b+vn9CZ=`DK9k0Jga zyYF`1N%>>W3=n&~#Prudn_0eJi^?3N;IJl*?(R*0{>9MiE%ot!+Sf{vKXtH;4Omfl zp`T3!`_G!irbApwGrNxEUJG8;`yX_18`tsH5p zt5k9zmi1BG+sNx@jr$irAc#4M?He1O@IS+)DdGG#`x%|$b=+g4R_4Wr!l*5bCdsj2M3>XWSWp^}+Nzs7}fP`cq9G9$gE8($o_tenYyg!#*DoC_J2 zK5F*DhR^#z%7Qp2o|TFNc-Tu+ZVZ{kI^g#CDI(V-zNcqgF*B_ERI4)^uK=XIL!IxiXfG>Wea4y%lu}4R?y7fY&9bj-GtEk1piirhu)u<=kVT2Y z2Dp*6vK?(W66PitT2?_$zT_~J$)}o$uuQ?%t8u^U$OL;xuf)8_8NA)sFl-NLx_T*d zdJMro6OicS)fS{M3BTEQ{nt8Hbw!ZA)I2uO`kx+hAF5dtC- z0#XtRQqnIvM~8F`327KGnlWHwyUt$k_vd@QKb&*^g7?rdJ%zlHQealdEeT=2Zia%thZN1TO~BD{KGp1;bZ@linIZ}J z+=VR+PYI&d@FWu0^{r2B(_ajWRyjNUi@~Y=)tB@RbQ>8#2%4tjC9B}<2)BvE=%t{zI@Mag9a%B z%6P0t<^u~;87~Ql`Oq^SJ99mtm(g#9sfx9_r{@=zd2D@WO)V-t6Is{Zc5|?6408F+ zB2{xl@-l|(zZpV&Io)Yr%aGBjpu3mw4D(!C?75kDMWr*yZR-j z{DwgstcIg&Iel#PzL2avRjI1nwXi7~h^lr?a4Pfrw?_Bv+L&YAXi&q(@a3#;KKl1D zsg8o*PrN`-z^Lz45sXzDSkNcT?I{z@iB^Te{)^l7K-(FDP!8^2%2MYPfWWGd&zK(K zHN^GV-aQcA6-Lujr~W%a-25Q9wl7#`afdUUba+eLht;TuFe+{`gPpfxYsI*}r+Tr6R;^rzbxAhIe9iF1pjfoi57Hl?L} zaa|H|zLmPg*`;)q#)m9>&z`GDeh1mUy704))Km~NY1@3tcUp?#t-+1%SzOhbaa7)D zgx|7M9^-p~e{W5Ybr*mH0BfW0t6>QGF~;Y}UYSXTdAfByRvC=i7!4*tDHUP9!DXmy z#&JjA-L+%*kXEYt!JwZH)~8sm+B0&qmhY!P-E!HTUIbNw#=R)2g41GckD}5QC|B0% zH&RuW`4dk0N&yqt9F`WwY9P$&GLgD@a#e|{{lZ2cvhj&$i(yqr{N~|3_#)3zj8pv3 z^T5!YBNOJ57Q2iiM&Cs`(+r_W$m>c;PQ9k#LG|HRodU2OEcM=y*)T)@)n%bGn&6XU)XCMxH z=PO3R)|SAZ%CFn+%(L%`jctiyg?QKgW0=!wfQ&uNHZ<37z zF$@{^HD+EJFz0Qaq~HEsMD5CVmhT{-UzcqtU(l;`xxM6aX3lD~9gc#lB!6u*-!LT0 zYx)Vc=b@KVUG_X)cX;rA2}pNz*FE*T+=f`LH@rJ{i|}sopEwCI7NMaq=NkH<$$5_c z6X-*%s6;Z>ijoNg_^(Rq`c2a_-kI$8kj(F%3Aac<`DQv#4Hw^`@sCxZ->Hy3vj5z` z0Ma>?FszoU_kAZ9Fr!Kxqg=8zL9OgPx?x(Vx2=8S_DAcMlj9 zFI_!=pYTg~An6SoWJEN5|0x^5;fcJYvdckGgyO1!Xpp9^vv19ZCHn`>6~Wi1Hiw3m z6BS*`OsB1R2|})67&%tN#`~64Y;?|1=w`t04Sd@MyDwYNBQaDmT>(y^GdN|7Uwu)T zdxjV#@Thkm)aD4#Q%8HfQRP|BhVsgcb*z-p9!ef8-7G&oHmc6P{+4El%*c`;pGq0l zVGuD`4YyCJZ>ca<^>cMs#kLf5k8SLU-t=CAIF*ML$BVa30Qt$ovuT4Me_)BVXGV$9 zv8;8k5N@@mYu08eLp{cpY~0T?y{h6D%aC*csv6`7u_I+W<`24g468jmedx!+;~Pyp zsPKm-I&$l|UX0+B02Zg)@)`Ec2zu zBW(?pKk%6=H%c}IH@>`&Ht59OHLiz_NGy~Rd@SS(uh4UjYuEJz{{%I`xh_92Wl)k=w5%ozBp^tr3TX{eS-mP-~r~0Va>;k+V zcB7x_4u89**Aa`!v3R=7Nu10|;0WE`1lh@4_gogG+gIwY?-NaOd0lr72KXHefsNpsv&i@SQIj_oTOq765O*YO!ySC(DgvM`$&J+fFP z>)agKrdNPJ~-F4pHRPUzmkA3p?NOz%uzu_D-!n6OMY{ALLw();s z1OG>~F!z6r7D(7YaP2x(#$L|!mfU8T+4Z%{Yw6#};=PU5cwO%dg{N2}j^iK**dF{8 z1K<2WI&TQV1%OwgFfG1XWq|TO*@B!TL*3Qk?WOiy5Z1G3;sy1Y0IYL4YQg&C%Rx9U zsaO6oKWCcrP=8MvnKhKc!s9>UmIW7!mG(7>|6-c~U8L-|$6fdzn_Trl-3~hWTC)>%{pC#m4|PD*C`vaV z;Ujdqnrr1H$&PmdKWu(?(Wft&b#Q>i%}TZWThhmLrK8|-I#HksRtw?^m#0AhC}b$62|K` zo2uC~=*G^Ygk{0!e?40X{otT)XZnqYq&p-xK3tX}=#atGZdUu_CZ7{qJo{RQlkaKH z#2&TIll>BrLThh#2mzP6#a>Tf2gSh4FSHP6sU84&DR5pB@cYqEe6{PyZ<`G=sgaAp z*J81Xgfkv>@Eq9j3VXHW&{zG~2hlPC){B&>6UAA6$t}_BDVwl36kPa&5YdPQxIcp& zMW;Oj8s!le{sL&G)9(sLzMHf@ralt*-Sf=GgOlga_C;I?Y{-Qmuud0aMcbvM_gU-j z*nU|BLpDjIlY**c0c+>$Fm0w`C}ns&hwtJ)c-tlsZ$-b}e~QW)A9(^86Tx?(%&7`| zyOd99xOtl;_8?c_l zn8IZJ4V^>Nu8YA}*9XYD6jYh11o)6U&z5s)eH0DZ?$V^Ws)~m38~zS>Pk>KD{78|_ zG#F|~;b)IwSF8~`p!odknkuxy%6nOdgr@gB(CT_Faja;BT%Uj&=HPoO5JyKX`kX-v z2Q!4nFuOHx{pH_jdVJ^cISWP!{&B0Z1OXd+uhF?cdQ6SsMqI(b39%Vrz~H*ySQ2ac zhDp}(=N{Ixwvnup&e|wZ_S+E{XY~DcSJLzU#vBo zdrWJquw}eM!@T@V0C<7Csf|ZGXm^4XDww4QhgC%3_S)272C7lvw}^w@)7d7TqeN1( zq6xNIcoNU@gogS2XxExG4MeUMuZ4rKxexRTXPkf*X&r?kw8s~LMSZ+FsjH69*cMLp zJky#Y8PwL4g_=iPFD-5YNF-Hd@0_D{x5(Mx^M=Fw9$t!UJ;ovKOIgsAYX)@Hzp6v# z?GN&*Pum?8*4|voS^tX7jC`Q0lPyjZE2RGy9Owb96hkEUSfCx>D_FD`BXc-=#hLFu z_(P~?xFq<0%zgnw;!*6h0u&h=1GA?Hw;Z8F#MAy_Xs%kZYm&R02m38JG%{K8o_#9l zw@WRxju~Yy-K3%t)J!<8WqcaM%Rpvj_X?}d!c=z)t^6bA%|6Z8m-H`jp7-`~@(~;U zU0W;TsiUFYvx2oj#e6y~ifO(Zj@HZq?$aNTyo!ja=1?Ssx`n+zDN|rG^(Av9khzlI ziHz~bvlF+(2DiDT6ULYdNKPb)ZA5ku+nVEVs$uT&|6&EmxBI8>@Yxl3MbhVuvKR5^ zQ6CW#wY_G0n)92FU@w>z2dC}TT=n=oY3O;H$YoV1Z~7TNnJ#sD>N>7iXd2ue;|a%r zga3O70QxF`2*bc$iUq86_Kb{OHzjpl0(XgLLlHsJ{f4vjw6VCG=h)O%Ze}Pm0vwVYr&2b^~%tF{UecGk*%3gj*6w> z#@9RqwG(bpYW~x>u*iSwbLF3Sz>8&p7!tqv^IJ811}*T#q|okQPUvX(D-k9*789y2 z#}J5JeLTax@fAeLF;}$5gU9hjxy^_Bi-lDOv|$mex~?9o`6voQ*Rof{!qDEwWVF*n z0qg&=1$qBr3$87xyL|Tg81G47zo1!={!@7F^MGY~ot2&YwUp;iO>PP`5St>Ige7<6 zIxm^z{W&<2y@iPVm<+H@9@T-)g(_(ivt?N7&jdzc(cuA)GT6g~ZlSVNXEy&~L)EIBOC_TYG{yxuImyPHpEANf zwxcP$rd~ZfR~(CzR}Y$A%jWyWAiYpp2vi|fH?5D~6Z**RJ1CFlN+WY(N1u#Mc3~Yq z9VRock0ap0k6c0}EITEDaHrO#)_9CW=&eHF21@=7>xZ$Qa?;$>V>+#rviTuVQ@+U| zzMttR9FW~RMf@N)pH{N5A@M@LvgG<4lTaW;EJ~ZeL=jgZX9hT31tG|(sPd=x9OHU8Mo)IddcD7jEQ$IP$3NNMF?_&EhrBnm<#R9sxl>z8POfx#rUlQB^s z30?ixIRr0sVIEs(F6w-LYs$W}H{&hMUB!Fe9uk-()YOLM$Hs!QtgLEVbN@PjVm*T2 zpn%Yf%k}pP4Ugp4`E}i(<{ugM_i8am8AEoyidCp%c#C7cbB>%O`%ZBRHG+Mxnn5&w z9hPTE-$o3g4>5Ecxwe3S8$H9X)NTqABzRu)9?m&ZWC$V8JW&zf7dCUBwZiFRsN_53 zR9$7&*+;n^+S4lC?d3Xh<0@=_R}-1|B)ZWP6A8{NHa|UV`W<6aT~n7Lf^ca!rBNyy z7XXQoSOav#4aBnzM}Nao*BPUN6|S8XD`Ju+vs<5RPJ?|ih#+A}AK%EVh0gp0b5*Qh z<#+Y;C+EX@Ry5AQY^K%Y$TOLr_hTuS%snT%yS*?7uP`ed2y*_1!+jS{VhwuWm(~>> zFf{yRmvjvlC29>+8dEXxSJ2}rY)PG$r03l-aUVTXaY`BR#lfi#>Dc zxEd$lPMnt~*(sLpSjXwHbSne~^{}ccX7p=RrXF53Dn+wq%(snpC{&KUDUh3vzQ7p! zElwU&ZSNe(KA>jLps8!CR_1cPX3gX+r7Of^>WlJ!EBZn=)9J5S4txSDErZ`NL!8RU zJ7nhpFEwG76o1jjzsAgl{ha=06i>@{@x9X5mC_rDNb@&d&k?$}=uh>ju-C$9afp1} z7-sl$oU4z8)Csr{E2X$i#sgC_%FNSKVc7TVE>gJ6_|7V~(>awMAf~zefYLuV(|tf% zpA;TAN7lmpvZ?c?DO6xRNmuX&|FoVL)*yb(114gDzj{Ob`#!$$j4^!bkKvs7gjN|Dq9MNtUs`hu^zK3a44%>$1S_z2{1vVa7Cep}!Wk6Lefc z2AW430g+hqQ(+^b6#~ay??|9o&*;>`V##bK{s^*I~cAW1IGawL4V zUe|dhRGD^j$j$dQ5<0Lr*W~A+PB}LZ+~Et`%V~m%tFX7H2AQ_^FF#&NC3*1fqyYWu z@`Aqp6~oZ7vAjIO{pgVZcw{FjJIFCE&6>%Td90PXwp8X8yi;fU-0Y7Sjw;kEo_#ZpMV_o^xzC4BPt9c!V} z^w@^JO>riuhPW7&8`bEmKR38`zg5dwlYoO?RoRsQ{;YKm8{c-aO}Y)J!KR?llK9wI zhG8_#XW72)WFEBUqwzU*MTjk5^c=W&^wO=UY1{_ z=))ZFaP+Ch);rZ^tI!4x``q03!cr50i55`>&jmY6y^lUnzx|jvc8HX5l*`Ps4e=^} z@v4OuHFvGSl!968%WAwtJPAMePKBS9yNhAKj|vE*ZBMYe5BhZU(_-tfhI4|IKuy-yM(WMoJah?v3wR5`_-t(=sd8~1KKvr*RBd+Ca!gl4 z%@O=rjOP)E&NB*Hk=oy5)U?z%_~TVD&MLtbSk9>cs^01B7!vRc_us8#>&8Mh6x&rC z@AhY*2}X9?$s4W!IwzoxbhsS_Pk=zp2f#GhSyBBcm*u8f=?S7)BJCFld<9bhFJC_) z?na?=#Aegq5Wn;TiatXkmDlOhx8${ZQ~XrNEuwtWUtvQVEysdG~=LabWnM;Y4)U& z|0<$3juI^m8$4>-fp)R9#Ij zQzG1S{6oG0PyYkA%*ZRW7n729)aAlHwo%EX7lf9Y3Y5ILuQJY?02P11Dc^c}e!svK z737iB!3~+tq`T`H3##Cqdb4|Wh5PDBf`#Qq_Y;eWPCjl9y&0O(aI>cOZ$`H(sU zGh?XDO9jsYoVG!u@Y(HBgbMeSzv>-rTUab)og^i2$(2xneINxlNu(5Q5d1;&6Vf|` zR%D7ebD~d+lFB#I^V;#O*AMP0@dSyN6~=iIk8>JZ31x_2e_~g?eo=teToO+Hs?~n% z>W6pWCs(z{6brh>%{7~q4XTVQ-^{^WOC$jfE3 zUZ`XYeW|Xq!;e@^98(%0M3iTI9BfZlP8{F(U}GlVq4v>fXi8JR=+sCtO>UNZf7c!- zuzfKLIt{2T3>14XxK~?c-YCx3U|t`|NJ(+8)&B>5hW1j2&3!w*o%6fXZ)u;zP?C1a ztp1Tcxpn(|>!Qp4u$mO<7XrmIf7K3UC8FWlpU-V=_ympT)WQ~p-YvlmGx5|xTiPBu z*o%r+f7s10Nuecv4^FIu)QkkUA#)e?d_m(%K-cn)bKLlZ8pLFt=yb&A3LhU(&9C2^ ztM8Z>EUH#IfLm=|Xla`z$?*GpnW;NnPn7TVwTV9i(;~{VFF${oy_zbNY1l(z5%!p~ z2(G;TlE)v!x9qGuxvaPt3J6JM5fKKUy}&z+>tXd6PZ23~gBy00DyNZLuKKfb4FRxH zfWGFDtkw<28V6qDUxa&&N065sFFPPGDRJw0!7q+3VU2tFO%Lhz+qncOPn;qs7`zM8 zoslo7CeNdvrOUR{PTo~c&AE@<$cV_J&s#5cf6E-veeHv?xo$j*{d3dz8#5!o z%>g_jWHrielUlO;PFkS!8=m7NYKOBPRufG@f zD$*Q+fUSOrIm@(Vi*# zAV}SjyR0g7yjXr?)wT5IeCH9R!-|vcX?6qh#3tP^+geC<$asZ+KKbLT7(6jNQ~IeN zXZ1BfXL~ZAtQl%jJd6jW=mguOK695i@B=$X6Ry((pf`c-aP0h%_*9Q**J5!MHR|NCli&%p%Cy%;`Txr2(j z(t3`h&m=JyUrrMw4tK%(N!GqH2S2Tya3hE#DDHd_!s>>QaGEU3btPvD_>l**3%m|A zu~FzrtX&3_Ggs{D%vXD!2`dt2uGEhZj4pO?D*Jol>gI1W@X0^lkSY&;tTx>M@IRG@KP zftYwLXq>@QxSqo@Cp_P9n;W?ussH17x43I3kYOX>G62An3{P z$VBfy*FDIUN5kJJbLfW4D$!ORx*Shz4D|5~pvkDiT$!dG^FMw2ppdN+<~qs~@XJcS z+FJRF2BoERklMA>E=5Z7EJ68DpW~-5yafJV^wPc;fiNx@#k>_(@+nU_v%!MWoIz1!uPhOVqiiu^4p z{P)Ui1^O8w$PwaX=3jNqMfEkVB8NW(*X4iSCTlBkw)%7<@9r7cck}l7hogd=^RM&o z3A)6GuU%eL?>-7L*&L_U0n!EzK+ z82^HrmGCeYPC|WeO;RD`YNJZb zHW0>myTK>61Id%H-0jGcu?n_`Dg<0`?AOi%8nU314;gvwuojcTTx4$*9XmU`?_>;| z$tqJ=sy)e@eeE(vQGJzCDB9;mmj8$O!?0gN!@s}6(!D&MUaT0ldL}>C@OW@1#&Z?P z8O{!LZGVGe;6K7~7_iD#jnK?Y&gq87x5}j*!?jvJWrZs43tR-;ljc*Du__9Z{S(8sM2{9q4}joE)yHY>$}JFTd{# z5;vw*UN#y#%FH;uV-dni=vi3lMex7N|Lj1JII!p(1vurmH3UYg;dm1zV$Z)x0Akxn znP4BT1g@4jNYWesPZ&@#J=?!RGS;BB5A8kImJlJkU0-P3b(p>lceitDc17N!l!30& zHNJC50yzk2ap>QVcNi{Dk{QHOB&4E+e3|eQ?F;k*4h0MmNB}c$%-!ar`Usz=8b5Mgxx0)7us#K^&lo#psFJoY{^;-DL z7--DSc@ff~{w8b%nKHkw)$u<3C7r1vhc-vPn?nRC0{BXBvSy~8<#@eu zok6WmNG!IxPn1Hfe3^4Fw{A0$6%WQih=o_YI`rBJsop!o?t+s`HqjOerN|p$jlmQj zLEjw#)Z7DAQTLtg%#L%vv)c=P-G-t8b!~RoM}Yn9Ge?$?lE_>v%3Q9`X&2L?*AXRY z4h4n>%o}-x>97B@o%W`+k#nQ%Xb{7EUCn#p3|z0@lqY}nmp9hapQZa}xjx_0F)O&b z(%^Mvh^*u9Ik}tCPfljlWQ~{0taq1a-^_j9gb{CZP7Y)NDd(4(_qo_)x4&}+y=ZNW z4VC+oo6hS^@pCjFy9ASR4>nYdjMPnlhHM#nP*~=gjIOMsQt7%U{X1GYK+Km>9?*PZ z2LU9#+7ZcIW*6GcDfdqkX362|BP+bT5MvS)B&bm5h;rWMEJ@f5kQT~*g7AgKMYC&* zJHEVW%r6`N_b<2l5d13nHCsqkxs?3KZ6*d=hO0M3{VtSaf*=YV&*ka{;R?-Rdj{KZ;-W676)VYh!+NDUy0%JCgjVyy7%YK=V&+ z+qp@tKy`VqFc_+u0G^>=qFT5q)v|aVy?NOeG+KEMC0T>M$!p*}7i*ctVF@zvU$=#9 zWhmxej1Sh3eTz6!zvk^n zI`q9eHAqA+-ssgYv)x&OtT=BFc$S>qvu1Gok)bZmS#?-{(SN*Wtt03?ceYobHr{77 zF9*GbG5L_j{SLMG7WTjJ8UeK=Hh-imqI;CSQQsQkCHqDM-o3}ocx1e`Z-jjvdvI~8T5483_u?Yt_C zy8JYc`fvp#s~8ckh_>M6HqJ&d=gm^u_m&g~*&8;8CD7MkK0FX*Fn0a9*_P>-4Z1XU zPB`4Qo;`1K?72zliie!_8aBh7I=uQ!TUD>m3kK{)xHhbp#}`_C{jS;s|8S0sXl-k; zmN_ku4=e&PNb4+GM}6OsJ^ERP-Ckihxp_ z!8hKaxses^J_=n%sMB4Y)@gH15;+h{oxi?rTLs4kv2zT=Dx;bI9tZEiO}_p6^myn!g|bws!4!w#&M$@pfp+#jZ= zADK05m-(GmaypwQdA|#tg{-dC*9_?^^11M-%>lCL8N!Y#?kI3{rDrxKE8ykbY{RYf z;@n3^hcpfL&p~1>vrryU!2U`x$@;^7>f$_ue`Gh8qep&>&v}zXH4<#(d&l$lEPLH< zj(8OFuiE*eXMyIY4TXJo$B~lqG28Gah_}f0jf>(6)iA0w`drTNDx#BJ&c?Pa-gRZ9 zxjqjLToz#yXRjQdf^(kI{6d?^0S)+cVT94-wZHCWUn19F$+UG>fS+6nX=Ks+Iu541Dkms%IuC4GGEPzT_)YkqZBH2k}+#X;nBi>r@Se zG8j}%oj3aZ{Es^57`U)wGss}Cc6<;q_o-By@iUXMLluy{zVH<0&Uq!S*4~54U zGt_c=WuR|v=g2Yz<+%R6>$CYE9su`B@`ZxSoxeH{HCDweRGvfjBS;|vX?FpCrH)E^ z>(50Sk*r24aWh}tMZ~Nkx@fR6WF{&qCBO_-yF$+EN6RQw*O{BAV{x?>J; zyJ+nz`GB!7Yc4)jw`nans-lS&8n1!fNTTgiaXk$Q34TWq;1$G3KFIEPC#vft^mGW8 zXIeJmCw{}aqLoHYd&ytSGMDn@r{h{NZW;2JWNZBcL^F`L9 zfBNx3tK~l~x8O%Mdmfk-r>x1U|DXV`NvfdsE)rJx5FILz@{qyS?~F3X_4}Wn(a{}; z-Ryev+Q6KK{zb3Tn$13V@GAxWL+HD!I0rMClxS%eFzSYzk7DR%crejbQS-hoY;Ar= zGufHn+s1IU#}EYE{kSz14jD9w*JF`1^t3uTA^J|SqF#B3`-KZ5Pw6()9)8lg7c`!W z_S5&ZQ&VLX7n@F&xGd*`b4DBEWK9f|-tKIgxySPuVN)1ro zd*gterhVXYCUwF?s8Bie-eW>GwW!&sF<4;rjia`3x!`8qcWHe9DgvZu(_x|I3|kVBN2 z3Whb?pda6_vITd}86AXvY%$;{SL{8fy{L_ajhsFK*5oKRUpzXIEESxCCF&sS%408x z%JYLf?d3TROmqUi_1ix0#&4u`Pi5wp3wqs{%31BP-baLMO%xr5yIzz(X(ON7NPjEz zNpog*LJC$%CVm$2@Y7rGteM@8H*)P8w?V+g%XQm7N@&6}LP&}PRx7CPnBTTXS!i(w z$Go)4?vz8%YLt^-(c6iJ&IL)y1?nE?uFZs}e)UxnJyyS+^6j-D>ue1_|GK%ogrSCE zqjSL&Rhg56t&U~m;MQ*Bs@UbDD;GOHCSHl70Yhl}{3{8WK?H*eS=sRn*+-jvdb+~! zeWFJD(N-+&#U1(Op5_s{q%Rm^O6@n~zAYee8(y>F;5>t3|2>U}xEo0w)H}J4u->DkA$c!y;AJoz znQ=?)!$^0qU zdYz|;u2;m$M~|d)uP}SSllQv8L^d#%fK95ealD6q330oUl&4l`<;j0*6MqGqhMa82 z#D7eU6@DC>HwhrMo(W=1c~sGZ0b}2$DCfb?Ka6F((9as>F>{SBb!;Mvh*I`8CFT!? zEpS?Fd#e@oz|D^o zaHT`4l8zuN5TTu4hZ(*-!%l>muR+X6HGR9T4)H&Co<}bGdIkN`s&1kZg^|C1AhNu+ z^m9A7{ex*18_t@@{(8oo{ST%Y6Hkn7wCr~JAH&ZbP$TPf@V2ENXsIEH)rj8XsntKD zXzeQVE0L4P;4mjo6xzwWI)6zT7-F>#uS1Kzx?4Bu+Z^lMfkPd3S)mQARh zg>_SGL6?C)|85buQ&5r1bfKbuO`R`PZ=jfcEl{!c5eGoo=xIs?Hf8+kxaY_pW-MQI z0325sZzz`C$!jRl*MD9I^o;-?a-X((XTIR(xZd$_Chx$v`ZuSb-Mc=leU2OYK1($L$fK^^vN@^0tTmrGfe%ayWPrTf!?~voA>O=#xKXkM?1em{p%7l(`;;F9W$sl zBqReiwJvB1uR!u6EFlKi%Ne)nt*~mqg{1KjQ3&VBmQ#-C<@s*&9^g;gx|X%r$wywq zIZ((vl+`98Ydm6=4QKBw5zDB+Wt4TaRbiIy7C+l>7cI40>&td&kjdv-Fggxm{m-;K zk7YN2^gj7*d2o!aNbmkjHY06_r79)wn@|iLwi2(D%TJHMdaDtOL8Y~EM10I6M-Gni zWMcFZSBLtAUCum*oi4w%^go@DD5*)WN7Q=`ir+C0lUu6U$(@Hf%mSw-zK`9daXh{U zp*4>;lrnfC0=zCEr03Y0w_dQ9AI85d;szAVgpV@@Q{^2rNee}W^=XU|)5{!n z0`2r@XVzcwYOIt%qJ}V@wK#cjDt-saoxn!2z5c7{S@~{%VY(aypJLH_jQVo+C8bt> zb?lh)Uc2dKl8)F+6al4bUB)sg?Sp+ICNAUPEG9|knJJe_D(yK^Q`@?i0A?h)Fiy?@ z`u^1GVpM&?_d~;Kj~U^~ci4seqa;v{qL6#(l{Pzb@mBs-zpNZsRmTR`yFqb^1ONEH z{o!#BFN2d_$#{=-==}9lU@nsX$y(&A3+3_QIhd~V$1uzH+;-fHiah+#+mo@A$46!k z4rz;O3wZzv(38!v6<9K(tHwNt{e(D+9m2@IK2ad9pqpmt0r}>I4 z-KhKFGZ3@=NIod!h-jV>6b~DvSoC;U`f1hJU0m-hhfNG-e0too>ej#|X zFL(nRSKo@qzH41PGHE4VGDV_~PM<>MJ5 zM1^1Odoi(ZwYNs0TQkRIs}|WAqy=2X{`2jDc*gkM$6_r_1&?@D}x=4YK%t-^9kkiVD9jg`pX}7cFQOBP(PHgx!6zVP_(YR z`}waP&OfQEZR39pYIjQ5JDeYa(ZF_O_DfxQM=DBiFP?F}6LG)L1kD*8;}t6TZr((f^c_tI@KDT@tj5~ZK^H?-(M8)Q~kvjhWG)fsi-5BTcI<7w)Zg#L->gueq<9ct#~0Vp*L}$pMeKbjnOmb)Va04LPTUrFDhA6FMab%2SH$PGw~eJ$UCMv=zW{p`Z7`3Q(U}iP z+kd{GJi7ZXk3+iid?bxAkXl$zrqsl@QH9xL!0zTN>a`;0x13=vy^|^mdWMbE3eWd! z*`1qn^JuGV*DUb9GL~84Nmu{=6g<4{1at>h`63z$6eAahRO5*Jz&r8@VRb({5i6v!`-; zBLT-h&(c|wZr$krru{|i9O;iymDuJEAZBl#4__?gyGYm$K=shR<n-*XXz2^FwS8GPUP5n-Qm z&B4rV>#}(@@YG|B8P|;|1`|sFfjE|ipFTz{3_dyVlHdnamTkZyEvUc=fx4~BG>5R zgvNC;2bsz`BJv%58EE>brQSweZ+M&nRx+Ep27yVlxS{Ix-9X})jA43H`C?h3XsiTB@R|&Y zU{#f{j*vE(FCp0woi8O+cTDjLUW4X-aRB}#Rj0SI`7|?ws{0v^s|0P#L961bKYTP- z-Aq3Q2*#a@2@D$#yOl}~Zfi6KwaNX(=>sd?O^4~D&!TD}%1_vNKtxa5bea(%sxT@R zyVQ;>s~r!gynssS zp&9AR;3no21xZyyxie}oaO?P&p+Fc z0~T?ybk#Bn$S$&Ol{{#RFIr*y1oAkVq2Q+s=3Ao{=gWOi#>&KkXr0&9xB_cUv zEi4Ps&t7wiB$Tzc&C@a0Il0nZwBfa4S_Un|&h)`dhJ)maQD>*%yPYZ5Hu4nPc)t~p zXQMDjq*9eQq-^U?k|CZLPj@7LfZ(MJVebvZAEYzVh+#~VB1tX#5501OOox2EF8Phw zrNY7J)Je6ECr82(Pu{E-aJc02XPz2eGktbjU+{BdgZ6iaR0*hqCWgADgxmUUAzH$0c~J7m6(YaaO4wF zn{Q23Y)4%3g0cJh1Y$fuBD#@($tHV&4odXCsnl<89`nZ=JlxXeBFC*TAiI%w7tI0a78Aj+FktI)35Sz~dH z(D3cw!?*R+4ot;cTvtDbZpPfBe)!>5m+BUOqW7!&Ri##&HjG(8lnJ}+TfX-LR&DsW z$w!YD>Fv$hnk8&1|L{ab({LYTa26mC02yVKn#?-qjm_1MucRX^2ajlS9LiPbTHjWe z!7mV%El=Z{$`I8ztuMX}hH)|Le<<#_HscQpKt8k7m-nlDt%GPe9Wft#e)=YSrTG@Q z{j%vLuEPJ!lZc1l_jB;Fkkfsm88~zH*vM?+zDW#FTft)rnxXLZ#VMsnYmIbISX)NvLpXn_`5a9bIj}JGpWYk_ zjP$uuDkJVsm67^S55$Fyp%#6!%52sR$*qA)#wHm;uHFW#=<HxiS|cz%LWx^vYYt;@1C*Di$JTFCPDGL2b>viC{he9NoC>?Fcf#DP1N#}@?~&kq z78|sdR-Imbdy&}6CiOBg+QcPGLZx#GZks`Z7r*s7wq3ijtsd@22rcVt;eL1n3L4^~ z@PLI8p<8S)Eb}PQewKV%8i4|`i8G`{CN)oVcqEqyS#W(2>7VaPEFNvarvhIghx;?c z62$UfFyM;7{Vl|#0&RcR@W9*>{48q;_kM|R;fYs1AdWB-1g)bPUSVc&-8a`iCl7!c zTQ8FW8dIS}f%8(|0+ZU#li<6H2*Jk}i*Jw%jT+CKs!^lcRfWONHVZQduAISU?-FZR z%HFF1#V<>%=Um0LPLNuDl*hip2fw%BEu63I%U_lJD9phVJQwx0V!61wU)8JiObYtC zdWBr0R2whv&dy(gv^X!FE$uOspz#EGU&`ef{l(XxR-yFk9ydmsvMEDd<2yY8;ZJk} zes_AGI-M48<37zGY*WD}Ixl0Fh-hN+zHrop(s=NGQF7P#?TeTeA#wnoyka}D${0O zQe5_UnDdBP3^g~4+DU!f{@_flDX}ngE#|~ELS`xZqX7OZ183cH*Jla$TG_yb_TZ?5a3jY^do0bejjdV0h;GD3C6 z`qZepP4m5<^AKl}M9}Fh^bgSu*JdoMGw#9s{5Ezh&v{%6=w_DpGf^{wUC(UWFNhnI ziI1m(rh0nfz$2JeT(87DQ;%2F61f}QALuI0aPx|ip^0>fsCZ)TV9ngN&DmU(Yzv`T+JfxnEQ#k}n8=`Z#a~BHbwV^;F72UX zAN2?6SCC_wDURgVj4$FK-kiU{u(5Ss@}Qn(@JkOYdVLk(2)tj-z5~jE<5M8w6b;)2zRIB2o0-uWz}j;B{TZmU zRK~9^Yq|5Xwh7U<{%=Y7EZOzq3!v;f_tfWl?A(=y*2vvP5K6=QXJLa_?$ zp~exe_hTilm9Q|FX$@>uVJzoX#BQCkUb`-(pI=4$=ab`izF&S^eRk46#}isy+A;>i zV6=%*^^E%(M0W}M5Z;FMt6g1K7(T|&y0$%la`6yRiYR1nL>#BYG>ydxW20*%^Z9S zuza?qKWlD@Ny2||kQ}E_2fM#y95#O}+4_T|pA&R&l1XXf?K;|_;8&zHr-l8!=3Ljb z+)z$zlYRe;!1f=wj;Xm}1=MqEkekkI1~u$UHBazcBKFTGTkx~0_@}EtfIZ@D#K*j; z`pyPgspXqzhFqQ15`QgLp=fhul;L^0>h!(>37Y~FAHEJCzdvQ(ZfEix|D7?i5V4Cg zFVa)%Noyx2m%uW(Ia8ie43}mfP<$rOIBcX7Z+}<$ou73(f~SSHSJ~k1)sCH3k7F3! zj3RdAjzd|tfqgCY)tD#&&+9AF6J{JbSVZ{*vD+J@r0CiVw-@h{w82XN{SWQWyEl>^ zY9QlKl8x{-fko#lr+KYNkNVX_(>G2pZsyNVSH5Iqboi+l|CCate5g2N8=nI83|2g<@{@b!Aj+ua4e)Hfex*GE-%xOf z+F5!x!jyzE>La7r@gy>w;r2bLFg+d!ao6lzh63hOtnylAd&;LL5Co4KTFwFCEhJ>+ zxARj$SW9E*qp;Yui0|$b7vshrEVr+ehr6figR>2TC@A2@&JPFJ8>R++_Y4fZi(S_gA%@EW;Rd^FKB43SDg>U+SxO1zIcG@t@P%;v;Jz`7Hbv z!}>bmvt66q9s9SfkPa=<-x`LCh9CG}Z(#HxVX8ncnJ(E79%OmD^tm!L?~o;KI7Z2KE|@IU>LUX|2` z7UzWC4@#(f&CxFcuTPjaeF?IiK*c3|Kdm5!G1_P9d$C}#|5f>XbrGxBUwy%&wHRep zytwdIJU}+@Nr9db^Wp_vi#$-Ck*YN#_5BjN$%6fzc#xp zq+`OoyBX>83=Y8uKPe+2$b=ssMs7?UgPx1NXqoCVO~jw@>v*jL+8jwG7W%7893ZUh zSH}}t6sUkyYMjq&8Das6+;4tqljXg%HITvx{SumKO*J2~m7JC#9Pt>4qDRnzMd zI~#I#O?rAgmOPlg2=?1J?0r7r!t{9W7|f@eOX=AgCV%~6U~D`#lSzCom=3#~7B3qIc*BbJUy3(sX%Pu~vavN77q z%91IQi{(FDn3AP?F0MG5H=~;q@LCajWcu@i$LC_8?XZLi?~VpDKuGB1S4;TM@`}5w zRlzXdUWC}Qx9gPhU7)=Erl+F4NePGB5jto-Be(P;;tfaAKFi&S%WBXqPX=~C*=N`g zCIT@Wjh|f}JuZEHACDni-&VDaqYJ!ETe&(&2E%#Pr49TePo=>gkCd2Wr&UbYeTUzk zf0YdPu(xf-i^r_A&{!xXJp|?ZogA96ax(WzX)8OA2Dy|UKEcz!V%vKrc|eB~qgEiT zC|;Er2kUF&F#jE%Eitj>)Vb}uRguF`6T~R6i?VrE+LQL`;#I|$yZ;jQEfe#dgNZG8 z97y)+hVJ0?nwU1`MWBBk3gdwIM}oVx2gEKOU)1KKA*qVDj^rQF1f|6ni9DKf%0TY1 zJxj&!<80{FE`Tq9;EqgHLQDTK)MZGE@?(^(h*c+|603Au?MK8%q15G=oOZQDP_*l|{s^0jy$F6Xgo|_)4^((71A8oRE z@hLdw8c=R`d~7+BOP(sNzC)|YQ4fvJdpHvIMGzw5DgeLDxWCYdEXsb`tQuEyp@Xth zsFfcB2YoJwdcrl|;X(MuHvbJImP*U7+1^o$lLp7(0Klg z>6W=aoMpj=7cd#7NFVm*W}-FN>X)}lV`)c%&{*?RjR(KWRq5?^fvk{7dAV5)3a0S9N;K)6I;H*BK$5$Z2=c!Tg9PUiUjoJ9g~Vek?PClR)e; zx+|poS8(`KpEyCdv3%;oyRv;BFtARtQ8fx6FBrCU%&M-uRs)J}y7R^woo4p#*m;hU z$3lGiV9bwPZ_9PZyPIlH%#*by>HK1B5Dx<$jHQjHnDFgi-lNV)Qm1b_Eltg1rw>tk z?5c`Smq(3T_klCLvK^rgOqbe!IlQ*c@*7c!GJ1t^sE8Bxq~FaJuzMG7&bf&EXEkxj z+UljdRQ)6I5t+2V1WW_9YyFaJ@4BxXo&QkJD&8L&#PlE9w5yjp0h^d*17_=@s8Eh_ z6e;%`5q~5I#aq5oL<=k3aVPQC#Sus1+q#^rqEj-AYmdP8Yy3CVXM>fG`3SS0=9ni> zllk9vU46>gL}eDewP*A`ngxPakm$cYwMFyjbK4Pa2-?Vbcc6M!^asNyM@_*=W1}xE zI|^>vBf=JyxL4yAZ=%|1C|p;K9OzW3$MzrPAUF%f^;0@e1HsgmTZTo572EW(J3=3RS$EN7|QR%K%QHy7d zEk5K@kUf?AS>w@OD5OlK+teB+a|4pPzO?vr-qi=#a&XO0Xw|ROQVGT25=;s+j`&@C zTK|DrZ^K{9l|F1fp6|4pFE961% zoGn$`qpHP;b!_cU(tJfD)+JpJ=KH{JP|BkM-?_Y8roiIK8ourqg_Ssem39fLhR&GIX;U9LSYzd-2_LZaMlzmY z5W|~6+NbR5yDFGOu?s4&Yp?-2f0ELu(?EYfjtT;Yn_T< z(;clS<>jgwWi@)TW5Czcd%uWA6l+~S)$$N5+O2tB>I^tMHzl8=9a9h)7!w1p8P22ycQ!v=@MOOFxe`Z<^ggK6EBWx) zUc-e(jAZ2y(h`kp2p3z(aK!#N9(az>5#V}?c%q0oAxp9-#VLisM*BWuJ zEk}!Pr8c}M+}ma*o^AqxAJaWg!xyi0&|0dzxy|-@0jUi&0{Zrzj?7j(t*kAK)ETk~ z3+K9)Za5A>vH*|^jinoQy4Iq{}Z#!I{?wKzWq{ZdICU2(XlkEI{9`veWbxL8lx>8ug$JumH`EpFsP zFxOdcjI>wTZ!BDuN)&#Yi`AxN^{)^X{Hw^%I{N!4fbaQt&VrMW2zoC622@Tj>{D)4 z=J`*<4qR85HshFTL6CMX)wqvlc{-7V?17IzPM(y7E3L_+Kjw*KY^TfZvd zp~e^34JxZEU`0`S?XZ*==q`p(w0l`49pYevs*W@S&J7YZ2jP#D$Y;#}4*AAMeARSP z&1cmOcc1p4wQjv@-85S8cf*K2;*B+9uY1a=eiQ5CbOX~}YO^)N62Ydjh?KG}I_FqB zr#qL1OWNlL+JcMkl*cM7NpR!BJumNm9{(-3tlb1O;1^k1KQ8$QTZAqi-8yrpX(K*N?umq~bj2??lIM zZKyUTvvR*_5B`5>zvIQOVf81Xa6$2G%dLymPSG)zTOv*0|G@r7;1KN5jw^|YU=cy= z#EUlUJ6Cph?8`l$8v?eZM&y=ItB3Sc!gqdF*F_^oOVx+HvazWV#akPMs`vgDVY6tX zse!`@ME!1+K!gk8Mm5MJy1D}&{j737^#zmkLas;$^cWbI4|ltP7(RU~T|?y<+YIK^ z&!NdFj>?rVVR#vj>BZFNW$<^0#qjs=^<&0xRPOU1s4G<9K zSXRR4nv$cn>(a!Rt5cR#y~W^5@mUUMx05ADDeWQM(XYq;ain3DJ$Ar!^{Io1E%T=Q zT_MmP7VqY<@ZgHsBU?56euwTO{UJ70DiM>JpHi5H*_fPeJW>KsHN`uBYDWV2e_j*q zTj8*S>oVDCvVKA-?Z31}MT9RsVy|7qxZhfM=Y^5@Hps)Scb)AWe0wN*9s~7tqeB^x zGP|oQ!uKotb?4$GECwlYF!2n3A9$D^$@?_wz5oAk{qea29kOyJ&Q#;Oz-Jp~jrZSb z*kT+HD*}``u=|$?GO6K~^o6M5`rg)+P7{4gI-14}_6OC?CET{@?56GT!k3!%2=%oW zpJavoB8_@K=*_=2%3R6p16`~|fCD~lw0yq)llKr&;na$sL06#9_1*k@*FaXe0(~wr zQM|*s45yiG+vRJb$aC`2>9_P5Yjy7>oqe^hvCV$-`uK10r3}D7Za9gTN8Y;l4kit9 znIUeV{Gb!NfyOS7$NL{$1iO zyfN%kWHGGx-J$qnHx)N3yZd(2w!mR%xZV8qr_5!amK97WuANtTF0xteBoOh z5xekWkPcsd|0*GMO!u7wr%F-_KvU*b));j@x?+1Z*)gwXj*5R=*$TL8itH+z3Pi(X zznmKLX3ajRun(p|8~a2tvQFb2-Nz5#vsl+7t7v+v`@Qq#wsT3GLBb>7mO=oy0^dGn z=6=}?zR!RP?pDB=gtLVW^G9BeHNISx{(230ugS8@YZ3yR?y5ff{sqvK{2_>)2WbWU zO)IcL)4x|>{f_T~x_@D8)@J2A7*72Xk3yQ?q3T0O=An&e$kB(c8Se2LuArRRFMiWr zy0`2Q^n3#e`Tbn`?W7+nL-22Pa!A91{@N8(_?Nzhsy$8m_7+9#(_nJ5`QiyYhS@IE z;X{G=6p}ryd~g3r>{T_e0?J%k^{Fcx0&Y7Qf5x=ZAGvs;5W>Qtu_=Av!S0CfiqVq@ z+N~CLn;Tu1jjivz08Y40c5teB5@leT&PI~0mwmDsY+9)&;9@5fv{390ua>AYuiZ97 zu}h{JYBL?#u@Ghl6#W|;oB63qrgVjy3-f*PK^&aCKX z`{CiBStgw!rVn6gabDbBqnyiI-?1L1EvvlVxu*jE^fFr{`ljyOdD6H5nxBxj_hy%| z(;DiNmKK>Of(xMHQPVX+Ox-DF{y43gIXVw&$)|de7!I|izO+dfzkeOS)C!IK^|g1N zL!yV@E8HVYl3cIb@%O2${WV|m7*#x1$oB}1cj$_XIzlD3qEa?0v(vaZe1hl!t8QYj>sq#^Z{t0)B$aaozK;zS*joU-;{9{L^8WESCGJ)$Hq_o78d&TuGH) zb5LVrzp(o*r9yEdb1}1vZcg>i*Qo>$y=kZx356HwC&MC?j#F-%%#M8+9dh9D3=9j z)n=@x9}G*nx+PLCdDO1)ZadidcVrLs+u7ir{uXqU3rXj+U{U!*MXC51y!tM&lzpz^ z?o}#w;DNM@wKQ|!`k=eW_3@x@8NvMLSgg!ilos&qLgkHVGa3ID>KX^IirapBlKCE1 zr@90O6|7-&rHCIh=7>TD20hVL+KsSZ{gzMl^6(lFuj)o z3LQx^()))p&+^*{9ZIbPm9jb?!@<~=ZB>(6Kp9ukbSdT8U-p6M)BgKuN+NhXQ(FhM-UnwEDWrI@^$YhYs2Y$TbYPvCY~P(Cw5N+9&g^3ipInmS=9W~+|i!XT|lx6 zWOLm+4&OLCF}q_#fV_0fMc@f&%HjF^%Dc@zGI)(1Et7|d0rR62Jw})QTLc7t z+P=>WN1RAEu)YYZ&7NJj8g6CK;<3_Bveat$t(LPJ{yN>gFw^WgVM1k4VNRIm$@Qsh z++4`}4>1s1`|}0xi3!GiM$iqfQpjBwroP;>>(awozD9Rq8ms3G7n{#lCcn5%Gxl26 zx3BL)u~V%Yklvta*}~eTX2!h)cM0?v(B0|&Xwa6sIsG*`gn~J+u%Iu(8Z)uFbiN)e zxhbTA^u3q&m(PPIW)Fh?Df`RvZ3ap2K<|7=fo3l2hhZ1#Bl$KA- zQw3j{b)JWDx?27E=^T8D?CHvtnVHTx@dVzt5n|TZnNVig%8yv%)tNV$g2G1i7x9d6(KPw*+!s++T(=vR z>sxQlx%Ot7BU=}ywK<6`#h?D@Q}K8fm&Z^n*PylO|9Wrni4ra19Xu+#>`=O#nJT={ z4}&mFM<<$W3cSGhD=4|Pe~^0v&dJaC|GYCo69T%~i4#E$b$NYizJ_L*{BJ+4-wb|_ z`r>D0_?G1Zozx8!{YzjrKA`NC1iG2H5(PcIH#=JNKLP$qDovP<87>uyJ;>`!+*J%& z_q^Y9q!-b|{KJ4(E_a>{Yw`Aq*i?pTpUsDza<3jUyx1h=`V#2#`l)ki==bxCsG9&s z>B`>9 zj<2-c2E%MPGes~r(X(nV??i#?bCHK(ys?z$FTWEuC3SC;AKu(H8&TB$t%~-@(;<~C zbk4xc-Fu=+xgZ6^dVFLMbQ0sqDJZiJw1Rydz~8aE0jSC zjva?qeD+?xW8*7Ts(sv*V-`%Jn?T>@($-c!7IX7~bK{Ss+%R*2gsau>a?0c%95eps zr+o22m9OPx<&gI1K?{s1%rSTu+CdN;Se~6zFUIx$- z!6_ajT2t)k)?Hq$Lo*jm0=)x62+(lHRb>D6(yxk9HR7^UsenMSUgKvG z%J?JRSI@l@bLIAHz&J&AZ4)^HekT*jOd$BVUmjtOC_6}O78e>VFEbOd93Izk%rSVb zfE%xdp*r^ZYFb5WAM|{Nu8HQcUC=eNNaS6UE}(ja3ivl9%JSGQ&7D4r+Ze7JgxdV% z+V$~h0($YP6qVq>y@y0ELa|cj*;ZMkVW3W*{uToGYMZE?xy$zA_x?+N7L{vX)0gHy zug&rf4gEi6u|Wo}tDGo?p~_D*&n=9`^uG@{mOl>o)B7P2fCR(th&2cNoqwJ64T?wY z2xXM2C?`vO>b)K28G|bXxiF(wHR7T zHd}rcGTry89is)XG#mYSn>iB_qUhmiqUg^^QhEPb{&SCT*^R=eTQ=_aItRnt@v4JSUsDDe72Xc0^_#J(B2_v=p9Lj;v~@Qi9d^r3Y~ zm@*`kJsC-O)zq52&X9ZQn8Pt&nj`N~7+2gAzZ)!43V5XL^EuU`78b za;>AxEvzd=ouW-wh%Syq7fVoP3nI)Xa})37=se{x9Iw~hew57%5s+54;*@1Ylze?m zT4#Fj%yGMTJ-y3KLoVur7jDtlaMDL|7We9N58>%8-%3Y=gV2ta;U9EM>F3?iSWW74 zwEgn6IcGFrvz3{Zu>PNd0AxwDIDi;=6#VR;$lQUO$ z^znW*<{ACf_wc;n>frzekI%-8OtR9tQxj!}BIcYeM4Ug+%4>f^Ak&|$4Zf?X5SDef z<6?`(QURE>K;&8_Bw63m10gp;XFT*Wj;amoQ$_I^KOfRqeA!Wm7BgE4ecf` zrq#ESSRn@9Bxva(d&BK)wFD0}37p9S$0=bue)LHl?D0&x#8{lqesKo6rUsu+8b2db zXo@2Xy8#KleaQDJDuE(IL*Bldiki+I?j2_4c9Q|DI%SMRTM5)F+)0dhy=t?HzZHxgYZJ9_GMrY$l0QC;GiAC zYHg*K^ll9in8EgSd8+twN7v)J2N5RIAGID>WZ%Ou0qXxyI}?_AirLRdf%_ z4K!$F(6xRDaCW0t@$p3oUjXih+a1dGzyxtC7p%BK(yMstzhB+wFL|C1BggFRt z=iEs+04mQlgn4lFX-fLQL9rN{-#*AM@pAt1M)tV}HC|F?ZqCF6jgp8TtE$gNcA>aU z*W8~_K|lLW=d!>o)~1D?n&IRd)|Hq9vTBsklXG|4;?PxXvbmMsXk;*mOahY*D1Bqx zle(qPUW%xj zr>mcG)^A#`QHvfJWr!8@VvH=LST3%eoZV}~Ol+0#B3hxQ48ni2MT8jQdG~-q z`{G%*__8=!P3`DV%jexwhlwhw*Sep1RH@wGo%^Dn5SwoJqjcSQFn<{hEvDcHG-sks zeD70R!Inp1dt2vUfM+;ju{~zS>XwN3W{!+Ytye)uYB7Dx%K!MITy3>Capr2k5a5cG zRn(<}Ds;m21|q0uJs7+XNj)H*_uHYT<}K#6UlsJ-TbMgM=LBz#`QK!K7iZg$O=jo6 zPTLN4QKg)h`KHCuPAM^!DY@h>&awxx`WQZc^I`l5~J{`Ur1*okzFiaNG`S`OWY5vb#c`0eWcm{ z0PS#f;q<;~L3M|fX)Noswv9oRi#7E^GEQm7X&ypbhDZ5R%9eVR(+iog<6Edc+ggVa zjTrB1rQAa1;*Ff!6rc*Tj(1C(ed?a&#ub#iXwKI6^`Z& zG`FKc51m?pJIl~p`nFzXV1)S=?F}3=-^)=SC+t4#)#NoY-DB%d2s4pvv;O(Y$8Wri zIUcVsyCc*WF!S+B-=C5y{g|xP7c#baK-fL@fgNK7jz0V05>eSSQhyU!b`b_yEdF2H zmpwO#pR!#hi;A7QOoFh=bEmHj%?8VW1h|mHNv|VuPTzda<$(jiCJ|@Mv{esM%!b^( zj(NT~!HHdyJg9(-Y5V|{sPb{TvgJG>PPtm6q8<^SoI3@AutDCiKQ9wwd3GPQrSLD@ z+&=vAKbhY&j34i|jFJqTzER=E&^N*Quh_^OU+bWB26Y*5a)0d2LN_#pB(b(rglo0= zAuT!A6`$w-FUDTFXk_&Zs650EFl%!;tL=|{>UXAfoLnQ*+y^QBR*@jS$sMHHw^@=#|IaP102d^086UT%#AnsL7W^;Q=94$O zsh_H1*U5gB%AjqY4R-;M2@N9cj`r$Odp31j7m-%Brfio8{r;osQ9xgmc^U7*fU@eH zc+TJC1Rw7;^RtoPZ?Dh&SW6=nulHB6tXPc4DLtheL-x&d@bvM&NAsR4N3s#T(UR}l}xFSw=dY^kF~ zw>vN}3%|}RwKk%vm7z&7ihA)0MG)9<8jFVJTS1FJH{IB9;>1ULFJEvSMs#N5D$x5( zW5myXz|es7`IG0S3nrK7dqwpBl4=n;EiNVCl zZKWMd4cGp@NT`H&L!+=+gc_Ow9}Vjd8tm6{=*@Yzzfof-s}>Ikwql%`t--NZ;6~Jd zPPE_C+Dn&r?z)0y_QHJxVnTw8n8S@v^l6xQr*l-IAq5-E6f$XoEllgzlOCN5>(~u8 zWh(@E)^`KZWsvLRd5^-ll&yq1@Kd_&22%&mm&~FQH|bdGjW1a_Kyxe1gu%mtH9Mu_ zG2ky!0<*+Vz&{6-UF1B!1id*2;ia#p^MD^oT}DZUgGc=lC-+0p^^(HD6rVv%m;OUS zd^lWcnzyXp?3(kEldQ!hC1&(oas08*&Rh4%bej&;XTj5^lQFIfLmE+mLYs}d7Myn@ zxEZr!i=T9z6E-;!^?LX9e3fou;A0yfanE!XJTuVtKywyjBhDzrGCM3FY&`wFwR5`; zi{Mfv<>h|V1CPccrTDk}DLlZg7Pyei;Lq=xb&q*E<P2~2xG#?Nc`3HYBc$kr z5NK2cYlUqb9roHfkbGQubNch~MK%OQ?SpJ;g0v}Qo_DMGx6FZi@S2%=gFX8q-Tlba z{9@|H^Kt(`Y{l8DS5ebiwIweCC#XS)dhn$11`BEt2;|%RGsoLhT!LXUDfz7Bw8zrT zx=|#nbhr<|s!7d}zd?qWo<%pF0D3;9CM*fJ&sf6LRgUNve57KGEDFwOXu<;3G$Cg^ z>nZ;0(r$I};=x_ak|{U|w{HQtts@<^HFv&4J73#nEO?>M&=sn#F7)GM(Umjsf;;1a z8-o|33)6Yv+(r+lt5`lu#0qcyQg?;@UD6)&WqoQ3sxM>p(t87NB+?jgA5XYhlXxrTGb?^ z+;W59eQ*RccLe*UZs&BVLP`Uj~D611INLYRuo_7gn}2YMfF4O`^Ozit&>yJ z>1=~ey-~MoZ5~srw`!nu%W0b$FwbAJh72S_)sz7?*_|P`ANmv4ZLO*a<`s>Ko zy(RO~ayhs1URKSH8On5&wbRjeRWV~};2*3whr4sBVB|nx&WLT512?%W$F+W>Ki0Hy zh;IP_@#7Q=3ntd8&JMTH^{R9{+12Tl&`ZWZ+bE<+}UDpgh41XZVha z;2Y=uUkf8gDVhYA33&=53b@48Kax7~@5hD|GEazK{l(fU2yo*T%*##1nf7{T%K2WT z-`0DCVW84vR>ibsfW?J0qC)@1`)w7XsKRLD!z7Z9n}2)t3bf8|`u`7Ue`b1Y@gJ=o z1o=_G9-?t3PlGh;#3`EwN-u({6cKe}sCK~{#g%id8vww{4D_k;9pnA z5Tpe%k(+c?PxSk3+}(py9110xjCVJA4C!NV(%YDCxxRn)!8ZlUb|kZYl>A4g?m^mD$HI_IZ&K|f=qpO{u!yr>#a4MDX;(*`Q{F>eS_@oIBUy&NAL{`=3Hl#$R}N$ z%%Zi@g6#A0&UjInT+hHrx4%r?N>WetFL~wn2~h$ZYDLTetPMOIf~|AH;`Ku7J%--D zvOJv-yrY%r>1c7AAjj4Tx3cp@;eRRMkHy}I_Qmao6mMujRr)-|z!=BFZcEEL7J3#K z&;<|!o2!90g+TOJ#R9c^hiE)-a05m!n<34>0jLHh?(xGJXuLiWdJRS0HiO_u;D&cz zG{G4q54t_!4eVQu=9Yq^AaKUPMJyrjJ_fNAerey1@UR5G?3-B&=>-6WU1AwH6RzoU zt83!_2K8>TrSE7e4ol1*r=5k*SFbGPeMfqP?J%yTGkQnAEt5Y=(g5jD`yy#TCY15PAA{DW&=U=)YN`-A zE5ba>faT2RM5DQwttir7s*BsJin|_Q<@8$ME${9lUR}zBA3+N=e@d+)!d`EG+Mz|f z>Gu+>ZTfIvN&0Re2wy0UuiZ{)>AYPj-D#S@C5AmQ@SdF#)zNaLH7&g&@k3=AS{xJu zl$vx0BsOP>r%Wt}Cw6>xGvPh`rW*&%Alh%Up{|~ zF2RWI7Z_?r@&lD=u1M#aQ)OY87K%8MG^n(Y$iDnunw~Sr0X*-Fss&H3`pDMvMRb@$ z@k5Eiq8y}$xHq~ZUghQ7J`H%KEZCXQ*kcf1|31C~Lu0b*&*8SThsA=4UP-N`M&#E2_w4tU znSmXs%DHRZF~f!D!0o3w=$N>F_W78-%-z(d^o*BA+rHqhYwLlKt7(R&KphKLuPm;s z0yoY2uUp^c#!IQ@ufuQ%KkXr(KKFnVJ7@PGN=;p1Qgk9 zcV^tQ=IVHit;jBUVW4N+NJ}}vlR6{dIjs&`h;Z04RI(oY)Unfqvs-vL-9)I66SWDk zTk2SU4xV|HO~+^@OJUtwtQ)$MHx|u3$|rpuIbOP>5)|^iG*Z{huZ&IG#!r7U7c!tju_0jXOxD?btGx{jL1=$E$ z`^b7fxXsyy5Ik)=1S7N^dZ^T-eAC4a?bZ&U*(b0CwcUVR`qtdKjWwBb(Rk;(-`?Pj zn?RY}LZq|C;PgrI^EGdN@=APniK1|uS8OhoL}VT;bX<&Bl7A^-CjZX`(0)*3eN!UJ zi*vpe2fSU|zwj(G>NRK&7)F{Y2{{3wUrX21-x(Zk2BJ&IrN$;G#j>@4t?>>uo4Rww zQSAtyO0y;8z2p$}R?Kup?4LfUM|V7+i#<@?mF(pX^!}S&GsxVHno_37E&bA3842k~ z3f1$p*+D^^Sw@wn2gvorIJv#ddp@x0^Cq(UD^<$id=uiioByV5O4DO6%Bw}%Z@726 zY3W;xk6Ui|J25jSt+bIrqcqjmBgmXm#!+^G7R0cJFjx1wruM?gsm*UKdbcV?>h+c< z*t)l+;BP+7+GyXAeiqn|4~rpq3To&3w!l{|vNdKgvC8?`PnY#e!KWc4&voK*_EqYi@)xjw)$bBax}hB19+b^8ruseMeD_!3mML*w3N*!HzLnUV z>EraJ!Vy}g7`pH?(H{wF9{&iKekav+zo5F$gYLq~8u0!TJG$gN3Uiixbf zcMtHfHenbl0fIUNKpx1AkF1cb8wCVs0`%7Ak~1Om&7&)Y0(GhK3L2iBTW2|`> zW3Zkh$3u$*RT-U${E`HH__~okWIFo~?=f#U_^)!e06AMu!rMU8D^AhUuOSv`2kkWu zBb?bc-V8~3zMu5getU9!)cfhK>He~$k|fxtr7b#Eu-X6R4P)+_Orj;ma(Pl@Tqe`V}>l=if> zOq)f~xGrGj#_yC`D`@}k>1EY#Eu74FQEXd3_^zPn);9`AqcKHpX~rPn z@AKOybJlqN2|ERP zf0_ItEx}DXx?m_}JbUZL*VB`j=vUh0xpEtEwE#X&HnFfOP9yp-`iP^u(_iQ$f;#_e zM^t6k5k*?FDb7L5xA$z^F~ounzXLgn-OrYteJ|CZpePp$j`CmdWmpa>nkUT^ zNL;GKYSbz!{w4FT)s;ohymG6!=S=cXo20Y+*7sS&J{W5UV!T>)VLwuhb7i)<-S@0o ziYBYl~YwbC^#_$>-&@13W-9y(l^`#mN~YyIz?uI||9mokG9 zCtJD|pOIRZoKmF-(GOW(%nYys)vztxQh1~|qq_Lb{h&9a^jf`MzH|0$Xzm(})AsF5 zeYrb4>22*PdY`4m@(}VSa<3|6;MZvi<+pE}cCX5_lxOB<%dc&-WSpd9g1C9b_D1Ov^2KdQHw`Zr0R&OCUP*mdBsqg)xHs6MA zG09zR>_Wg@sHMVa&vEn)RGZ?BQm5``Tl&#?;{Y&OkKbb{lVqyZJF+K zg61zz{zbMA1?_d>JOO;cQBsX;{JH_Vqc<(R5AKz^BvvWE`roBS%*Gy6-skqE-Qw6Y zL>2C%6qrS?koP*4##(Qhr}rJVmeXb{P|61s4DIK@^Rd3eLset%m; z@1iAYL=cgvQG*c@5ha^W@I_tER4ORjk5+ZC#^S-R4`mm6|#)F$E$JMYSurK&@Ju`-6zDM^`!lsDoP zbz8@McSUaD`~au**s8Jt_UhZ=rTeh;)_e4$JkFVAG^(H`Soca)m*{xxp4MN&0Oqf> zT_vA3Ik$LxkrT6yBg&K?X@%#8`nyQq?I&1DhcSJHyIe-&y$u1cRJ`}}_9DqDYO;`q zQwZ4I?erCxwm(<(hK}1qNl3k{K+rvpgZ<7vbNkAkAd!aHYKmM`I3xevYnR75&u|uk;w2vLMHALoPp`_W5=_3o+fNZDcP=sa;E&KhJ7H ze)>anmv{8dlheItpCz`<7Ux^Z8m+BZNxEdZtNlb-!R1)SkOXDW;w+_i9ytq&^c0qn z26*XytT=wu?>Oq8jQmSX<$TinL0ty^Xfb42{fV&l1gA3!-umJ!HR{V+#ykKYj!E3mlY!<^;e_%3i@Iay?bhmK(37n83%+{1}|% zDy8r}hm%w`{5e3)Ze5B=iC-U^!IT;kQ5{z{i+H|t`BT{^g^10yjkLE465?v{H7D0F z;trE@xlZQthr&O39WJ@nZj8yYldG(g)ejbTmn`pYst7JjVXYgtevhgm)wXtq-- z+V<9RDeE>B-utTw#Cpcjn*rN74$4K*E1VH;zQ#{ugvX*z+m}yy^@Tb~YME32QCr{4 zc%Jl1cpZnufm%c&R!*l)Y5O09Gi@G6zuY0do`Q=A<4!e~7;tLc(C#!i{|PqbFU+@e zDrp0qZt2;UgW~`L;+TyW;|dBpHK}PXBm94yE&YD;a;k4ZeRKMG{&wwt`zVQPLXO)%()#doZ z+XeHbDJOAGZK*|;+UJS*^YV(b9I+qY1lD?crCAx*bAvbw^9-_(11g(}p7$u93CEGN zn&u}|6``{1Px6e*$vxR&Om{yeqbTpem-zs!(uAGTt5Q6T5Xam1Yx3j2#fj`Ioc4zN zL`Nmh(88lXnmY!8qI$eAv~*@h)_($M{QY2b@YSZ}k*p7O!Fp?p`$6}EGErFczYW!a&2#cjORWJ-EF3SVmL!ydUm| zU22A0*#&pW9jaa*1|mHHipvt0^%ABUk_)^qz`&ax+pna}%AMSm-CxP>%eK#s!C`JJ z&@-!JHtquNs!m(M2C@Sm#caV?O0Sj=pO5`8v>T7HA|m{J3C@PSwlf7PT?o%blD&7f zJ^dIvO>GLM%^Bj2F(R`#efy~xz6@km0M(<0>(e`EnGQ7S;@2s|=^n`bE$Q){9zFg; zQ0*F>1CIWVw0HJ;{4u@`737`#YeVL;!1}4)t|PNX)EtI=_vM52LG&5jjosC^X9k6=?5>4LL?S`{%e}oA z`^sL^KB~S`-4?59r^_#sQQi!0^Me+))+HgwxtnK)Ct-0k+ClHDAD@91nh*R;SXN$L z$4kxTXkV(AtG_#PXWFf0xQ$$u`Ciq-@I5X2Sp15o4bQfFT3)>ZmiR@rV+o7g4K{@8 zUr23Bt;0u;_fh{KoU3r^KeQLGvx^PS5Ai`N4_jUXy^ip+JI|BY@(}y-IdH|WTWo9w zwBDg*V!_3l-cS$k1YbI8)*A5uGq<3CKkq{dbK43_J0rTmnCp%(-a z=2N1j-^tB$;KUr>;_L^vhhHa*lLmIdp3D&&hmp0P%Q2Q_e%#=_+5&mOPJe8(?t||5 zkKl~pI@hYVAt8m(NCKI8h#KZy3tSCOx)M=prDFcvw{_yr%Sv)*n6@Rc-UMN=oaVYB zhqxqce$-ozZmbSGg$Y^pUvx|&i{)q52}vWcVfZ*gW3|I0jF@_`_RW07BrG!cok=lN)jS7-59_@2Qlvq2!-JzY)sK>w~z8q|?BKuNx8536s`90gB*7dUD%Mm_Dq7_z>eW`tvBDZv*865ce@~R z$kUjHyR*k$*W9!6#grMf+xL6hbK^2~s?mnEQye&3wucdKm_}99bD_>ZyhRJ>V7r9IAMX0sL$pwJuiqmblc%6Td@W}e{>D!6~^$2It@->F( zFJ{um)xcH+Yowo^*xZty9!43{B|Npt^S+6`|B9$AJ>ql0ObC=O4UnV@?q$d%wk{of zZpP3C!}!c;n&8SYlYKHX`JsgYH$HRT-swy{znlu?3H93i6OEi3){?K)2lp)evlI@Q zcE%-7zT1k+DWd@K&t;J>yf^e){Di>44smPHZX;-9KF@-7evRZH*17oRj3c4zqEJM> z1gyF#8Jc4j`{8OZAFAe3w8F8iGNVZIH3ESnzwfg2zdW4h+5eDj z{CxKd;pFo#_ox4-DEXxKpV%B`%EJJ-Dm8M~0aKvb2yf!tI<-z!e*1B?6w?1;+vA0b z#B0tEA{LxSgh?H2PJYIVL#}-VZq|h^WtkCSEiHOgKJr+;zQ1?#M8x3o%bPU31lQ-= zufxa?>Th6~J?2DlO!1WL9>+kei3eyY6 zt;JuSl>b!dM;UKQts!zo)L8Z}GS9#y_yS;d#}mGJ4ck1bQ)d~x0_Qlt1b=;K|LUT^ zga7%}D;-xE`OA9+>eyykyfwHWUVjH^AYMj>r(abP7YWF$|Tl1>|f%P5r`$vdRwtgEn%RH ziL|zPQDt)??(lN6%nnXm=cyW~>Iv*3GcJ9f#~d~rg6@NG#aDX9Hv23bd4dtLy{$$T`ZpdM#PlXm87| zqp-LMohJj6dFM zDv%*s+!QZ{qdqN%_JLnwl}K+6*(~_*B0WM_;VZNg!gl26E?IC;gY-YcalhFM-|%3l z8R30##)-}+@ZQ-T$yu@XprPGktx*ggWuq@N0e?cr>vSn%w5mSR^9q8^&!*2mM02fN zV`Pbcago_QtEzqQeT~BWBa&OAw;EX8oOk@cNZac0u)$ngh=(1-t(5+))M-0XyWCHt z9$5sfKoZ`*JTMQFC(TypB-ogdk&x_|Sc(xVs>7Q4`^7B3H?bX!Olh=OY zBelI`oU9`Q%_fT}HddMEXfa8u2MXCRor@2K*JdeRy!|MG-8q9qR!M zs^zy^508lvK0Hy-yecLcbXZHRM){Y9k?h_p;rz~KuiBrQ=u`USrO?yVB(Y+VZ_R;X z?Sc$%-!b#wDm48;;x_kkTfDGAGQP{2me+eF_Aw+rGrE2-ww2Q zJ!pTcT6?Uleh^zF_&oMr;(Y$)9{cSMTut{Q2C&JDEd8~rYt<|Z3+>Dd3E`%D-(ZQMeU6YefIX079AUX5Ozz|+G@mlbA%m}tOFlX3;nXIHQIAwC zF&lCl`8<)i^DL5!B+16nd15|ZH{Mhw9_jmrsLYF=wf`LKUbu9`P_ofc7uL=21n#wS z6!74S@EKI?JPuU={-9B+jTMZ)+ISIk@yF97o+1Br{O;O((QdQ$ImnQ&*N!#4hoIA16h$lVx(#n}d z%yeA^!AL-rt^~SwdL8y3E07O{XpkKb!(H1$*=4sm7*aN_PbnuJka4{pm_v;#9@S4N zdeRBUUAT1^p-uyc{F!A&4VWMphQgB@`C>6tELwv*N8sBRp7`;FJQrSc zpkZf2=#>1j;Y*sKrycoXy3%k2pHPDgmVX@2Ggk=aecVRHw!pWnQ%Eu58hO7^)8FcCoiXQkUb7XOj(?GKPc_c zG>&7&VFtI8)GD=U(a%zj^Z1TL;K$)e_Q^Ou)%*5OLVckqz0dxi69_49rrC1$u8l8RnL^x5aNF%H%GM(u_~Ag`_dI3lN=*tlks}Go7XKvskuxl zy*FZCCMO>1dPQtO2J9aJ3vhWi>z2tiWe>-%AH`5p3h0J_ODUV$Q(Sv%s`zDi?t#nL zKa!U%A#x5ngePmK3u99t9l+5d&c~d)80uwxT?Ih zAe^BeYiYRHg!X5yH+);qbIm9g#8XEN=9ILx}rLtTU4`;(rv8xM=>f4ps#|9-PX!uAMv72qFN zZ@75yZA&WQA+>&rM8wt+7Tm$aLZxlW%`_5y{cG3UQ`8}2j(tHR?LQ>D@6`tV?VuU5 zSwn3eHq!mi*bq7GI7?$sA<%Tt%u}5{fr$kR5YtMmZmrThe%*1GVDx6-bRg;``G5|R zN(C94n`2(ubMAsaWtp7J-jn*fN{V+_BOv|dhzP9tYo>YnM}kjo=JatP^#prbOHQ4l429V1eUqtu>1^xqKg#EefkA}OTT`iKRaghCB zyHTt6$9a=l-{q}1dC*<0bDNq&i-g)#_ppuJZzL504?OLb^xfqql|^^C zghN?3t32(xpOx4WcvS0PD>==by-61QeP?7##+)>&<7q8CPc+pAFaM|xkAJ0~`D*oKx*^Etp^$**&&$p+JRF7j#mgDU#`ukbTLl|#1$pxY25Y^Y7Oq?AGDVBK8*I< zixU=U`Y9{l{J0=8;Cuh?y+$j6_^9?-_#5sU8#Vp2>FT01Djn#9;) z*X}xbjU|36!Am1s=y46Z5smMTfqj2hp`ELv8IH{<=?XV^)HrYq=JYCgE?m+*aOnT+ zD1UJrC1K5OxZ8EOhwXZsH$iEX4V<7(0d1lI=I?Za)?W z_ckHR2g;#U_7uDHrJ96V=%EN=Eegra$R&;IF=_<_?%ZoIU+axOv7!4s| zDA2hWQWHHq{=K^t7*)p5`r@0j!5<$wz_GDU`+7ph8c;Z5zjMv_*msjQp*UO7oW5A^ zP^ZCau$p)9d=!Bas5c1`keAJIA3#<>Rb=PVFfLb8wFSHA~*}=tG{%dXV6x{~}jonNep5rYf z>_H(V_CmWTo{HR$_ZqJLdHnv3V&^2>HQEdJ2_}RiK#yA#9&HaAFK7`a($NvFPWlqV z@B$mVt!?}UrdFPhAYC%UUB+#jsdK_)Ep9ngr(bb?v4;~`ev|N^WfY$m<*__hQ&U!g z+p@uYr8|4fb9%>CM2@4WijdC(k6OV)IG9DR_lQGkA$qq8K08tEd zU;VOYh91TF3Ac4GHLp4M7T?V>OQ>%2GoB!op#B^ck(Q;n7HZ?RX5nY(@s;#tqtJqr z$6{hcLbBHCB;LV+%iM9uLZ06h_{xk`ASo84VYutbmR3zI_qbs1*x@Ebd?f-LW$4s5 zw6txZ?u3qJOty9iH(10&o!ULv_Y&2|Uq)q>@skne;c@6WgK%3_;BA`ZYfHRJ}lZzAZ!7W=iN<8BTon7Td(B9qf zUWS`Z?dC8NHC9;!MaloNQF(j&u^KS-_5^|x_eF4#@Ap-DSL15N*W!BA&101P5MXOF zaZ9NYjMC>AFA5cYr#l|=4|7vI&BrD}csrx>E|DEZ6~_s$-Hs_xPqp_-!L{k7Mij@5jV@$7Wq&cm5bNU-s8fo1{9QD1L}zA>V3|#A1RQXb{3JrVYGt=frFVv znbrFYzK*>U&JM3BWqZ1p2XJlCCfuns*HRuyH{DdXW2Y0^F=qU7?C^Q_?{kn+o&cCw zQ>oyVKqZUlP?Vdz`5Y=?7dhcaC!kS_b8NQjotD^PV*He2 zAnf7V)i(&>z~b#Ra&;taJ72`oah&6|qwNlE8&|wDf5p9b1t%fPlUYb^SaF@%?!#9v zjPv^91nhq*`*;KgmmxJxBj=bd-e+aEnWuAe(Lfa~0hWzF+eh#b3=|)L9gG-tUkCYN zeb9cGcZS%%2m4ENKy*28#aS^|+Ac?}>P!F9-ictEJx*$yxN<0$5d?&Iv7Ra%)rwHO z{IcaQKLlRU3ftma+z0=>;E_b$|D3`+#IHt1;(A%Z(s+Z#4cv1k{Fp&s+nfXn`9nmf zxgWr&_58lMfUXcY{1lHex1yT>1j_lpFAE&bfodB&;jKm~F0H32VRs(N4?AVg)O@-s zmkSKQ7h3mF#AeaFc6-W+eq@K&_Gou>N1tOYjOo7W*Qn(uct4QR_6B?GNMjZw|A@U) z9oHHO6Aj)=j=1%eN{rh}dqAQ~0?>%kcy^H+H34^qDCiH=S5}riIL>Z=+c6rs#$GPf z`PJCrlO@3S`_n|I>%_4>Lo)OZ@%P9(7A(+9j zb#|x(0+qkK(93ux#gXc^9Pr;MR)QU)JEbsJ8O%fXpl1dNlHu~tBC2!*$sm))NAn8bcL&&fRxX-Dznl#dT zYAV2eXlQ!%>PVoN8Xoa}B~0WXVjdRg`=<*}OZ$_6qOnSD!%m$$i6wb??Pp~Tg$a(L zz?}cY(%j5Lyw_Da?)h8JPHtbc6s$#o^TF}ru9wI+e`ha*hjE{9=((4X+pEsqAdIwW zOe;>kvfO(h38&2Feu*8K1q)7t%C60Q4b9C~2dQ43hXK2(@@eu{?aiGjYJQbF>?S_JA& zPnljIlZ=BsJ$4>^R%bf4E{caTSpCXc3}5UpG_ofz7W@UN>>D&xwdtht5e%n2%V9-2 z$EN+N!Vst!k^j~-uz|rSkWvlD#M?VgDI3eCKrxCHC|Y-A;pO}_asZ7!czHL6rAt53 zF*MlWYsRErjrG?!2qgsFl!i)>{6{F|Xikaa&q0gM=eGALU;MC+dN&aHxShsmUBo<< zdR=7^e@n;{2gDH!w?^$I8OtZgw~=k$zP%#n!X<5)TkEKg+RZUu5fI5xl+tee^i~Zx zoGy}#cT`#uC!%%xz8V`6%&eN$y51Um&2@Bs?gebK2!h!S^OCQ)407`oNQTUKRCZLp z;4WIR`fQc7vp_vG3M+*H`yB&%Cz#LPa#5x;Ds4f-e!7|LlTDU@>_0dTptzUC#-rAR zm|PV+kU)1>bZWLg^>&zx8vH`ccMSRyIm(O{+ zu~AJt6Omh@w5w`yFGF5buQo7XXFs9gqIfDjAvh6Je=TK)EB#B=_&I~fXmoFbCt#g= z?CAz2!C%Q^$8}pC6$xP&y2*Dc_}8NLKI#BaP}jq{i~DS??w6YqS4@xhg#_J+jpvB zwV263!{yY?>PB4bo4suD1}9T|kkCxC$}Eh0X?bz8+yyIm)w|2{1DWJjI41K&xN|I{ z+=i)}lHXNzBUvxr-eR13BFasdBirwUV2HZJwCwL<7T+s)!m`pID-5CpgC|hw77X9! zqA1hN5cr01I!W#86Db+=E1#x?sgI&H>=uU}x14UJ zC*VN1K7f`l;RoI5Vs9-l$5chW$OnPMyOApdj@zv3E{py)&R27g`lC=#ygaZexuo)n ztAGU_Kv&LnNQ{mg7h!2iq_wfQ6al3z#x$tryDNfFB0*HHsqP*TqD zy-%XSqMz>4CKZLXIANyAJ=aI6BxV=tf@hk=D~P5NQzZZbQ-@E`Q0LQBi!w=P{ z&|Q4S610W`^YycC7|w#m0!n%Ro)WcLDsnD- znuFpA8qGP5$Z9CIR$Subgx_k#_5T)C$|$^2>5%Z4IYoP8|3;D{dw|KC>YUx3-;1t9+?ffftqxHvh0RdT|>zFaF@cYuuewZIyx8-q&aTpk8laP-+}UEv z+IXAR_O_Lm!d1?gtDzfq5*P;^j%zo@Fy4Ach?WY+e{fA`gPxu#O<${t;E|eB`9>D+ zV1Gcn+MCwQ=hw65AODF~r0zj+g_v=8I_XEhn>FXX6ZM<`$W*(r4u$XEAJ907>Up&L%(P{{>D{R%1gkkBpT<&rb`ejTB6#COKJnHhfezah$ z-HKDr%Q?>h6BggUXU{KILby!JM{*De+e!yoUi|ZiSY%+Z|M<3*{!QRJfaH7$rLZV%e^#LeNX$t9ojS|0FiH}cHf_?pO@8{f=RgbD)(|bwoTR9j* ztt!pL!TI#z#PUz@3pgye*8ECbQDr?n8Va8BJ{prfg%kP406KdkL=78UkJEs$&Yc|m z=k&s}7h~{6X!}tYefHMut$Is(sf!0wiAW|pPpS}217AHfB4BK}24j3k zPK|ixq~HGh%Daq3^4*3$(=Da*(-o>Z&H7v#J`fQhXY{9n9unpIU0?4tcy@I*T$cV# zmcBF+xG2~C$mFoi%U2n?ec&8^EwhlwD5#V&%*vHAw ztLfy#owA4ND8;4WPfW!tFBcch`A@BMg*XJ94(*Agj)__>81B1h$jc1|4&7FKv;t~> zY5M-%vBnxm2bkJAwFi|=ewvY3+-}3so|bK#jL&G~lzdsdfcx3(bjUsn<7xr4Fbs?O-FpQ-nR+l_&JT8^H z#OZx_eQ2LA&%3cAo;Qr;%~(^&{K3*J*`ZRzC_rv9IF~8$`QJ`yb$X0NhE?|ZDdk!V zX!BIPCS&vy_d^h%__&D3Py^Ng9`IHlO%q=zwmi5`lzMC1N_W9c%axnvX*~G|g-8~@ zfY8cuTG3Ldk2)`6T&O8sH=>SqGX6Hx%Oq)ieNE8fpbVtQc+|PT8ytU1`vbS!kvX++ z@_8T5>bl|R@N9~^Ew7TEr$qd_7osf1vVk`(JHBb3mn6p-$0NQ6173X!vXo>a7_O!p zP@pE)dS50_WP6ZUhsGx>r@+%1p_ur^SF`X5H+%mDNvK`*U8dcTOCclz8a82Of4_9~ z;rm4!E&1r^N%5M-EC);a71AX9iy`nB`e>(n;-s*7nWJM2|n|t`6ssAUv|jl22CrVcr>v^23Z1!O}a@cm1Ue zv~l|&69#E%BRfY&?5=Clu?Lp1%f=f~#91@c$AHfVF13d-!XH8zi=8U#fu65Y&(u>a zK%Nc7_qO)9ay$I+XsA;XTN>WTc#h;4=AvB^Os>wEEN=a8LD^xsC)De$d&?ikx*&v) z8;n_pH4KDY*xu_5%#FTY_oZg=yl2Asi1$)kAmH{;7pE7A467kBy)g35_gEd=zVbmn zxmOo-E$pnLL}AxXJN&6O0U=F0J5`5+)_Fq29Kl#?rJKOEHzaSRy?n)7^wYea-r>S>BeV?%>|ce|yGnMX zR@r|2^df13%V?G=K4$cVXOr2g!DMIk1U^xEfr~?1A7Ksw4VTRULTT9q4__horM@)SV>f zQDr=rI}!5Fl(J^{Bv+Jr>Z!7l(;rLn@AtD5TX{Yj&v@Q5rXA?CEp0JhL-}=tap_$d zwm2tGIBC@Nyi(>~FS-zt+ArOHJo&ZHF@Q_7CP~1 zhW+8x`&7Z~26x`q!|OJioBY(^C&C=pNI=vhVBl0+#s>jpR3Ns0vCzjLMK24H=RvTC zMKb8=5oP3ZJ!jj8aI;~((73>0kM4uK#J_$H69FHME9wRKl7Y4t2 z6S}>nEenzUI)gvnaN8foaUpo+vGR66_!7h&u z6=-(ueF@$m&T6!)mZt}q$9>A2GDLcgOSE|IL*+AUXjP_f{&r7ddb93q6iZ%D8oN-_ zNhwedb?x@OcUS>CKV$LkBo#O2mua{o$eiuenz5R^ywu@vah{{g>;I|WwoJdy(w!WB zPUHr9>a^hl`^b4yK>d@sRy}Q_%o?&)lpx=!3a%@wN1y>Gt;2Cp^(Z3R5#(2}RglP` zAwQcrXbu%|x01rl`RZkH7r3pS#fY%PG&h0O?hu?Y-`z(E{6vHajyTk0u?c2OQfx2R^{A@MZHBJb zEVEwMM|o_0hsY8lg{(GrdV+ov^-A}%>uNvzT!`Hw3Bn4F7??OWG~u3K9mHt@8ju@N z_c6$tVJ+3sNKi7bP`*)ad}!-Zm{t}Wa@?aF%BKR6`FcGOgKlK+Pe$ZA@`$k0Xzc4(X zQ4V)qLg&rBr8aaacYz##8jpbhV`4APoUB;=f(MG{sraJaOcbuSj+m2fDtl#bQOB4U zCuW3QMkWP^kNV?KF+-a6pT6*(GvDOlIkJDhmfJji`}AmRwplZE&|t=c#kKn5=&Q`s ze-ygz=P3WhVpDH-SYz?=%b#gOpWeyc!_Hq0G77X)F)^O|*A)p>Qu_hn7CaG5GrOLK z!Hwey&JNuJT>GZEe;(~PCZ6X2u-6fZEXQ)R@lLeuQHWAKH2a%N`fZgU*zlCL2~E#c zD`8gXS`!!CFZ)&>-?x;#EB>TSNN#1@PrFU!@AEqsl%n{QL8+QewO3zH>-|+1(ms|^ zcU--i^f_EfmV(JxZ1`~f+;lZ&bLc;Ej;(GL0L9x~$_U>C>3J5Qud0~lb(UKsRa0u$ zk%^fH#E<#;{S*7Cg^l@%JF3wJ&FSK;Nb}J;# zxn3r@emSfA`o|?KTfD7yy@&c6q%>)h8I@j=!;+3u`K1FYLULA=d5(OG^2y|CIQj(( zbfS4BsLxkA@nQH0cP@#Y4?X#=^sJ{>+2ZG8WTWQ{m6Y<+-mimJ`WZZRp#7O--|e*V%u`_}!$45qKjvqjRx&dDwM5 z5j`aQhy*T_r-NJtTPv67X~g_EqaB+N%!6FK8D%w@DFed2FZc@2_;kp=;OXd=O^7&A zI@8EAGwu&HVsj&(n(9@Zu5{(6eLY34uMVBecg4kz<&krsoxt&!^0L#HR3D(_Kep+- zu3+Fw#vg#2B~WnxZ_56T@}B505F(nZhsohbJg`OEXl@kv4>|+WzON9c?tU-od%o#D zpjj73));y?yBg}d5FUKX2xpJa9XCz`knq>$#&&}t5c&)EsjNC<&FXt#?kQ)W^@f+rOxMO1)X64QVr%hMjw1)xQ zibwcbd<$9VjT-p7wUeP$a^l$Lp8CQ@zEi|%$d9J!OR+ockSJrm1a{0dm@rqjjiSaT zWVSU5f@=m(nlfbXB9VS{5Bt&gFw8lj4a zwS@0gfsGkgphg$&-+-Q_cJA^MVR5VC=k=D$h#$v4Yh8mS4MPhGO`f)-vlli!So>*q z9s+%R+s}$dC!PoiYC$g@q^t=(oHo(jnlq_4`ZV!6UxUMs%GvrthqsK#yFWC_zO&x^ zm~M?FsiTeFd!Kv6ZIq|x-0)>|_38R~qiBqroVY6Y&gsNB(eE%tmf~txU;a{ZHP64f zxck$X2Wck57gOgsKL|$j(uG%w3@LzzWDc4+v%44tbz&;23BSp;b$>FAmy+hiKORr7 zeJm6UI(~A>`4gZgatsIov5+|Ju7cVRv5&C5T_*pAGpx!H6A7|mcL>-kgFngRg*zNk zS0j+^(nIq_upRU=XOcNWk96g7<_4rj`D3sA{a?-5#@JF9DE8>CUaSb?AcUC)Q{_Vq za#VrnjCJvcEA(P3v_Nf+9W`3KBh_qnXXgx_=y3jwmgXNfd}$VXgEIK`2NpppnVtO= zV(m-SPI(yNaf{;SR?(4^{`3JOE=f7>yF%3NlsKAw^!>sr#1xP!C^c2!;Z9NPD9HP2 zc+Sht(6#%oC@9IpW9sdrq|NX6YiKW9R&57m3hj&SsiGhKF5Kdtxl{ilnjQ8<1ZJDL;hbpuA%p%OBRo&r5Pz%iU~&s zV5oXqAED9ZCo4_Ql_nk;uAJANd2xFV6@NUnZ^}Kc#3;vr0t43lew|(R88sSrdO4Ro zFD;#*BG0*43>P%JDA@1MVLf7~0x*P*)PEYPIDwa2;O~AUwer|}Z|ZZ5C7y3HdIuGF z*Ie1}TtV6>2oiX|$RxYE5@Iystom2Tp3&+8jlz?!D{EO6GTQvz98VhOnWHG6ir^1+ z@@h#M;PekP|3X9aOSo>WB^0;XgnReRxj0(e_HIQX1YGyWVna#aeB?dhhn{zLhmYS_ z1l_v3c}v~(YVzgb{n9V&$JR34aPrrr32~K+ckJ}Cd86KzAD*ql*fk&W)j%Cy$Z7!{ zO_pX283Q}FzpE?mGZOguKQ>22f8I>juN{C`qPIZ+$7>0Z)zoQUSzc`$lk)kK<^C}~ z^YMRqmOO7_HBgS!Hcgu_3ERzl`g|H)(l#SD8P&<>W@L*)&hw?uIraZ4JUf%&bG(fh z#vDmMi!$s|-q@N0jvmT6@OXv2M~=qDruHvznjf1k zR1Gcs9Nf#}6}z(`7h7zhisk(d>M^qc3oUIdCFw7r*2B!CV~Lj6@|OoMVacn!uQ|6w z2puy|y1($Kh}^rd**LBVZp#jff{^;XaUrPOQYlaf4FV4>KeqeuJN#_;nhSS1&M_}n z6v{f6K#Jt|SRrMjqbO8>#vAS`n=>#+0||yQMj~DM~0s|{epZL^^4z&=zalt9$ihJhK!3BlGX|-9V)62oIuvZVxv%Uq~ zltul4EsdeHhZX4wJ%%6goPyVz5r(*mzKJ6HLzfnE^l`Ffkrt>At~4>sqtI(2+f?{B zaV#!E=GF{FhVX`IxJ-=Gxe}en`2$QrfNk&YMED z9N6KNJJToty0%WTg+6`*;V8r2{yo24b^;sJpE|SEi1_DSAuB2^Gkaf(Nvoe_9bf94 z>&819n}|t~r_r#w&5}1AID~xT6jpd~;-_MC_Z{>M<+e|ojF;h6%9%-T7O@AK0M^b< zE*v85nZgY*UX4SQk7$R$VY;7MS=!r{rhiR)EV;)c zxLpf_r$c{id-H6gg(_e;dzr5Kqe4dk7da9dQ1+64gHF6w;|ZN zPy_f^PDH#Lgj~8Sd=OYMMHW8ziR`I+{lJYto&NstzI#*B=d@F``5Bf4aj**eS2Q|nOzvKP?T>!eDFR<+L zFLLjB1dy`u3RrktFE3Q2dRw7VFUV2QZYvZ3DKYn10xk z??$$Z`5n#2y8>Bl&o?~j{JSXZ z0lh5zWvJK-y-nt{%j#zgr25KHbGTMDE~&SUu&lkWA=?yu@yYby6)XNkcBE|U;%YGW z09G5PA796vi( z>VhUW0=&Bqg|%J6N5B~Y!49Wi-!fV3@8!!`h-Ka-$i^5luO4@d*`#y@H62dxu3K#YbBUMFSfAd zz96qW%B4b+2ijx<4mBcFeFVnRjb&I~#qsmCxPcGiviKA)9Hm$?L11>E`;<4uZ5K2{ zjXGO4{@u|T<3_GH$9{QGnofMU?H>j$|IB|USvk2!<2lNP7<-9N{Y9#4_5SB;*uziB za>kD|dNKF_ijRUmxi~t4EnO2E1-DLR=~KAsxTR9RwDT7OD$1_dX~qM1VaV4n^;oxM z{&lXn--6HJ`eI+QYs+7Kjx2ln^uqRW<4ZXW-$&ttkEHcfAG+(3mqDk*zO__WPE0GU;ElGl;BV1Q-SyWqYbOaT!W2?wg))JS-Mf=-BT*qyI zP;shfZ~aZy0bdKWf&9VfT>+<1?KQ4!CeMr4*jvX-Cof01m;L3 zFTR|0TxNpWzc%IQ=w3H0e(u4^xv)q;3|+1MZ~!VCzoilmpE^W8m+U{%Q{EzI6MTM6fR@mT;!T< ze`aGxcz!J4+NvPsp+(s~U#_h4ZTq#nX{ZZ07^l=?PSy}G3(blL#oObm%hSa(3ftv)x)Z-F~IfjbN zBsgaZg}uvI?V@ZTBWw3@moy~&>|x-!xm~DGn|?2=u`E(g!cSd8!DQhBf&IJI%u8g@ zcck{VNeRs{LuNd`#zhXkgQ^~{{>{z>9bAUO)4DE}?H`<^4+1>aTOLNXvnkeXykZQ& z(rZksf?e{uWl-&BFbuZJz4h&g14~Whs1m$linxKg->J&^D)3(4t?yHpNK2Mc&qfh0 zhZQt;>FWl|RyMOPiR-cU#$4ZrM&l8x*WL0Mo_-jy75cdfH(mmzc}H`H845U~L`Cq#S9-heeh%m7wT#QA zSUI<-M7F|Vc;{RScU!@uT=y0fJ{_DO+7XQOxAvCFhZb1wtxbUn`gF6;L-*(D2s=V*e|ZapX3Euhi7cwFwl}3d zXPj7tU%xNf+{{x|?lunb8t&*n$mCbQ6k!i_NMEGty6MPE(>{IK<{Qv?57IFWU-`I~ zcTDv}p$@ZbWAO7GjmGQy7yslQF`m~SFTSJ*^tofBXK4L^1|I%w?S4(`U9y#?h5!Y& zJEDaVzh+v^O}KvF6XX04(Ab}vIau#VB9BgF;xMzLh`tY_xb={}2JhAYQ12P0d|<*I!wk8&Xq{~9hL+wU6Jl7DUH@Vznv zj?1ro^(!-iw%AulI@BNRz|SFNEPv!1AgV2ATd5pE9PQndi&u@v6s`G&I|VnteT?6m zE|x*{KF;9{6sE@<Rf3wo|b@R!hv~M_G^#1h}immKCX~G^p zy_yr^)+Zid(^S{1sHHR8cauo};3_Nn)vv`iKc4LU`$gru?@`Db8v6I?L(k-qQs|lX zTZ{^F=lP%6sYh)@9ZNs2THujSUhP1i39*<0f=}eld3)F2bi`?T@Dg8){!}KJ#n4kB zW-e{86^xWmGZxXQw@m_y5}jlI`}Cb?p^{{8w^Ywp@9p_XzRKQ2ZmzJbZC=^p1JD+l z_?Fxw^4E7=Ik&He?mT_1zF0bSaaBmBQ8wWWE;~KI4#Ta_YDY%N{$E{Z{nm8A|7%LR zBqSt61O%izha!R^As|SX(g@N$N%@1PsenaZwiY9vr$$UaeK!{07 zHK|Qszme&wHX3Io!EU49b|+tX`j9Y-w_h zWKxb9e!g(0kznbCP^C;oIUlV(9M01Zj>dBqOQhR+4D+~sNfRT1MIx{7It1?qC8NuF709_5QQt}dr*+z3kR z&2rlaoz-!4yiWD(F2h%6D9rtVd!ATFjqG&qt6jhw8lTl5!;_Ta>n5xN*)k4I&BR>=x(QWBV>AsfqvZc@(pbqi9#yHotfeKzj8q$9kNM1(D2LP{$ywI{{*G-wb zE-Muce?!7S(a05lWu=m_4XzmMr=W(VvWO)J)=ytV%Jaa7E?F&xHl&?=?=d(iMt|>J z4K>}`KzYr*dA7tASasH}_2)Iukpe!AX*fm?cKZrAd#yc|ZiH^5#Y*Ry#=wy7Gdq!y z6xEyYaa+2}EE+F9kORIs`7h71e0>QUaRyu|LEtA!^QUAlHm7i9(&b)7i zlJxB9GdOVcAuRe^F8w^$5pGO(&R(xlIOgELLCN6QH0X;GAhWFKtC-{7ZRVP|KPH-l zd58GdXgdJShrUjD@BO;n#e7d(>XA|?4e-Y zF`LGA|HO;b@oji~SXIvEs!oXYK6_j`X%fs0EBbBxWhS{>&indfYqKe?y>lh74cCog z*BpSK)Z;_j4K26C>toW2?8_-|hw&L;L6+JJW!`-1w{= z8oO}6hfEWb`rm^EI4cgjlcshUu`Z{F`ALX6C*g@hFfjDj9_8D0K^HQIy4dG`7&8EK zt&d*_cX~^yC5gSeimRhcj6Y>~*{)zo65qr|*tMRudph)M??zK{r(8vlxMbz)^(+|m zwh%c2?i`OOu}rz1+Re~x36AbK&LQ!?UIIs3RVjNSZZ^ol%8wC{g*VV|AAzxrWs5b< z9IsK)&5u1N;BwKoS+SsiyA{|k#TbSwq$X4=O%)#prjN?Gu5T>OLfbwRbD|&!yMow3 zGh^Q@U#NL?W}>P-Jf{aY&;5xkkJTF{vIQo3p)bX51c|I`aSfS#GwRdh_S_#D%RS%g z{9_K@gq_JIR;vkkrG(|QESJNGeLm6XYTHF%^<(o!`fb{vrdS_$W){y5_ z4?B{uI4m8Bn}G9+uS$?6m50IVPrLgU%y>zKmplHs+K+Z#rYjrl3koeJA^XRfHrlcG zT-7e_rUeZjlG)#zDCJ!?L0ZH#$nv|nF2~!{JI~r@JNYevYj<&Z|mJ0W6AWFZ~Z337++1D^EvGr`4lq2ERAQ%-y$EA5B;v#1_0P zb0;ep)@pvOOs>tJ7SR!y5E zF$4Zfq%kt7O-A+Rypge+a*J`6ik1@Z)pM$RJEFHY`q7 zqHwuj(L@^J5m?!Tl}Y21JxY7}4!G)ak5J8_wn*WfqM&cpc3tr}T!Fwv+;fv;q$mR} zIb@tCY*h14*B(_i69_@!U^I`Yqhc-km=lB{-0md3ipWf3*#!_*JIfevQCI3Hz5-mb ztZdMU{@Af~Lbm1Ke{aH}bHGM#6qQ8s$)+lKJksy+(H&7BJr$QhHKo&=1DftS@Y(j| z8FTgAej3BUYln<{#fmK^e6wq?VDrO`OXc8qO|?zHdTpvUZX&lh2l_QDPp#A!Z1A!b zPNEGOT zo5KOO^rfJ!m$k$RcJ)l)ky^62B8S>XR7HciKmztzPfbZ+R{9!XKq$nR4|yVdbZ92fARWp<#gW zsYKkwU$z?PZ$8eyfcZUvQZ0R9Mdecco1a^*2LCtwxGvAzXyE@Hw)h%j1;zS(P%Tqj#+*u zqEYi#w?%iq`8%ixM1(33j8KZ=HNiu|CD(q~&uba9xtUi(+GhV_-Y;YgwY6ja12oB+Fg;1UQo5X2Pt%5s`RqZ#LJqCs*H z>|rch>gB`gq;SMFuK7s$~xTJ^{YAKOKyq#HR{oKR35cEnQ=gtIho4$Y7t7h z3F!M6i=?0*b9^)84H|N&_-J#a%g6lEPxq?v(xgadHq~9oH`}|CfkSt{Ke-48pijHF zaxAcuHGHPcIh|mz%V>Q5hbi?DF@AS;Y(s+dt#Z+5+BD_+vvv7q`A<(UBgZWb+}02v zfup(Eb-a5_>d4ENZoD~k90MdIyVKB}dF1J@dfx-M4cjulMWwDlw`k6dLxg^PtYQEw zjp)WQy@0M`J%w|N2N+YHdOAVX`2Qkkg!Io#AC{y%FZK#;O6_>*KU$yl&(&IhhpgAi zSWYP>#oFw(?>2BT%Si~k4r~Yht!kp(Uz&shM!ib;{{ZMEC0J(+y_`032$VOC9;8(gYOu`5h${61{i$OX#A*RrN@L-Q03dSoKk zmx!2QO;zG3=x?-x1H1$S2$How%;Xf-6q(1(`r)B1_OZ{O|3?>XO(|MJFDzF<4nMk!7>EL8jE$D6eB^&RQ}Vt1 z(E9~x95c&!4dE-SqFZOYMN7$sZC6IeXDFW^Cr2f&itzXZ7Z? zV{t(uhv8YfbSAN9Rjj=^Y?2M&1X+31L*M2BV>f;f_L_^h*qK8~vqJg13VU5rmicQm ziI+z$t1nLskbN}&8PH#f;Dse(8tR4*dFCxpZ^0bNs7m#SDN2DvwZ{?Bd|nZwj0SA) zJ~{XMQiCfdSAqCut3wpNb?PDZ=ohU-YyxnZ6!7LWME7SISqp`?p!b}~<}VO!8gx6I zs}zOVa?<;Co5~*rV$%NsT@;xSWGHZarn1$d7206!N6o3%g0OxylV9vB<7$roj;6nE zs8=_>{S&1fMvT_IOee2jqtHVAz}Z?XOISKbi8;5Ngq{WF^=fAcfOKEM#J(PB!DEOp zPbJf9M)K97Dw}`@mvxw{&DJ_yJ=Kg31k#ukL1C!)s45MiwS$?MCACc}G zb;mRb8CUH!t&nod;Qdh-vRPoySy+T8|NG#|NiuO9b>7-yaOTz-1h8RVVRKS~lZ!R& z=;=hK#6uEY6WLlZi|!V|$Hi{I>BB*>S^L@DVuXF@yI)Zj2po3i-+POuDYj8x6OCzp zXv1-E-T317Rp-s)K@!YoTIJwhr`7Q2KUUXMT=J9NiKZfc)XOOAg8SLzf(bGp>e)>D zPzT(-4^Ds?P>W0PCfLum%D5!@QdgzV{Eo|cVs-DHMt!&@ujhSFoAh|ZXL@sNMas~F zMg1F{%kwM_0?1?g_UpChIsmaY8`YcK2eJ|RJv2U9s4Gd)>CF!Je?DGX4DlrdsPhPx zU|$P(a87XM7MCUHc7YB($g-qFKGyBlDK3j>9^XGfES|$&Zby^~G;6tEwHH&%cm;XY zSu()!SQtXE(Vu>i+z$-tE|GD*K-O^@rS)B209N-;B%1u*j@t-dy2Xv3@cJKbr?^$_ zT0usoDFkF*&stn)SdVm!Q>0gea@VfU0B(u}XV9=T(C}@1v*T7*Xo(C46{@ctSGTWb zH~+Z#@i;#(0|)h)&g~NLg24#MjHAyHj%Bt3PI648)8oCjsb*gWSJT0Y>4*N9{4ZoP znfFy!TogzT_uJ5x*zM;$Tf&fTF~EIfMgLdhPcfd=O%oLEL3b#x06i;@LdAjCrv=y0 z{=B|V&d4b9Ek#-U>wzc6elk|Q+i=C@)L9W=0>i?@v0z-f;;);3 zj%7H_%YF7dJWG=~z`k>X{O}qx-cJYr%hN>T%x-3VZprLqI=PHD>({e`s&-L&Rfih} z-W)&DK<~Yq@Us@QU;gd`pY$56H5A&{wxZY0k$ye6b2ZLU_d{^KR1Ge$#dxxFN;s(T z0ECJf_}+dczJ@U5rg|n6I$csdw*l!;Xra{N+ZOAM)VnQi5}6UE>Ricc$mc zc<`93V-89mm;GeTYzruvu=Qf+mYw9Ine>_d=Z7;p@UZk`Q^@|w^7ll%>m1&@2SoRa zp>9FfTcJvq>){BAn!7?sp}g2O zEQPzB%Q17h@;7i0o~Wdok0=;xi3h5U?qp`ZdDP1m_(e3goI(wF3H*Y$i+4E)c|R*; zR=9eQI+a6PXrV?!R;o2tu14fSrQ5veTS3&k>3Xqyp2sPCg{$h8tLBr9B$#i9mugP)LNTVircJaDBe_gntmr|`r%9Lc6*PK?hyfjHtsUMuberh3E(mQv( z1auNMI-Op_0KBx=v4ek(`61ve6Iq2>+KeeJk-u~}LJjYoxjfFvZLr53DOrm2y0JsJ zf-V7=F`Mec1Tj&t@j3C>IP}_R-$u9ukdK&*#!L95Zp4Rp<+1`_yeTQxVjhNFdg6_J zYu>!s0@a+_V~%3?7oD(5PhB~sY_Y`DXm>3sEkpJ!f+m){lG^vPv3^uuM^Cf)(zl8+ zlK{2-`{v~^X1;xM6C~lU0cH*5X6uDe-DQp~Gygn#ow(k?mN%#Tqd7MQ%CcVNL&H*v z_Ud11L&T0i#*G5<2tA&;`ZL_2_(Rymg0JY&I6@g%kY0Cg21Q=Bh}tvV7&`zr{iziR zJkxIdpK7J(HNV~U-S1>qzx&?9op3vPL~X$XBEgrVu}(b!MP-AO4`qZ@*{V#&6c zoeMdQABsX3CHhaM+*%Euvn{+1k zUGlx^OL78P^0&ia2@Qb2By)pq2e;O4bOtX)Fb=|H zc)hw4l&6e@MmA$%5{Pm+MNje7-Wdz%SbSyg1@vo~8BV8}6+HQCsuOzUYmzS3Os8Xd zC^`ss`uqidWELQ1zAKK``jG*9^tB)s!$%v*FQQ9E{_I}jl6$DM0VS>zJXkqEI$QOO z5${M-*7l0etfqvDC1cMt_vezSn_1Ox#52KZ<1jNUR~VyP!ptDR``J49A>3>EF6Y4Q z+KLiydmWd){B zICUCRVkDp~zBEn$a)Y1d^KiH_h7nrka=XWtrO(znWCPi>4}ddpCt%%TDI|`a`d?hs z^Idgf2Z8<*wI}U0bmbCr&TfG76pYDhwyclO!-H>Spda9%_;8T1F4)=|ux7eZJ7HeK z`cIv9pZwBkRxP!2xw_MYO-K>+$doy68yB9QTzvF?w9ZDFpfxAFIHV)X5x0$)()vH< zbRspGs?9%j`ud%Tzpekr`L9J+@}8fgNTc_5zbovlIC6hXQ$ef+ zQ*$-L`G#h+A5@W=9RhH030b>)b5?CqgPjlULO=7BHm8}>C6$8?mW<{tiTOyf+`X0^ zHn(T}bn#MgX(^kzLMN_D%KV{&#{z3-!0d1I8H|BuZMZIgqP;x+(7$#A`L81~>2RGM zZP(|nR5m(-1eq2;aS#o8-r4~h&zaOIUE`l)bn6H+kqm*$%jm>@3%pU;d*IzW4tVy! zOCfToG6YP!W!Vy>?^R{6EekC7WnfYHk2cLn$SjrD>-P;UQHuGl5{iFiH^2s;P4*a5qzOIU{^ruzbYaoGW$&bs;)P#DTBVQcY0_ zLKe*DcCvykYnT-ct*Rz>73SbCZ1mKli0*WJF=!7osliD|xqF@-X}0Z8EGy;v=W7PA zCNM;rk!HEu=so6X^aBVW+G>A|PDn!JQs8AYjC= zppuKNyRjXh#~ia0Kbpb-wY; zjp~zVbJj-NPHe}ej}_-p$+C;q472z46P+Vz|A;&6N?}xflw^7^$xG()bCKM%CME09)(Gen?S_cyyFoknHs)kBLDhRX`xtpchUO3kM5r8Bue z;cYDgvk@}7ChWr~Fyl}HZP4(3WO>I3UIXwD+dESxWT$(4lfRnvD4-uiJ!dvts_zHr zG6G(!K^Lx5f^UoCGcGx?{aCt+kTa*=%>bMol;>AHlcck4V(7N_tCn! zCkn`)<#R`^r;Kf_bZFJy26Ove{w~b7b7N;p0l9XCefn+$lK|bjAuWyFv3xYuL(OFG z`2p|C>9_2m>|1;T9z2~dW(ga_JwG4roU6&guw!?KpN7En63+v|7YJU8Mob(8Rh>fT zoA06D@~UokBfS`JMWw&Br#-9ZeJIA3LQTzSH6eHFB2(X2yL};AweTRkaM!F!0uEvF z*PE5}>oKl|^gjO_75N5;I%1~nLHw<`&xN?AslY(RvWnh12qw2gQD8YZ<-{kKBOdBu z@w|k9)UGZS<$tEuUJ6RKWuE2}(?)yP*LQko5vx;p@I6b9Pzqpj2`*ED0p1gIi zO4hX1DSiBu_w~&ZcoPPfQIV;SDd>Mx=}$SImuF`g)c;KNv>1yAh8OJJr?G#Dt|4kk zJp0S4$)30Q1O&yNB``4IIQ3~0CamKVxK8M%7rT*99w6JWF*xSMlM8oK(hgSY$UpYz z|4poyYg2j_Xe}1$RqGds8jJyf@b`ST)JP6M%%*jjSNsP^8f@*ubq#U-sg$Fcizn*N zm2}G1aHec;`#xDc%#vw@3nxWBH&L^40K+gs{!;%rkCucv| zIqGOq$#14DKhSaiLTnw$3zd1P zQ$y<-(l`wo?v053@nhEsaDSb94MuMXnXhA|Pdd>2^F;gZ(MkHtKR4(~-oN)1j&&s1 zx@6VsHMNa9F&>cQr0x2Edy)y#S-`qyoNe8(UV%>Y>|9QZ71D>y&5w5PXq$@+sZ#BD8P#EiqNm#YtWX!6rZ(A5Bq)$J5cyrcGpmpsekZjG^bo2&KJR z)Z@7y^|e+>BScw7$k{GKc1Q}{`g0PWl}q3yH62Oh7}!Ft4;x8AdhX}L-gNK;pZoIW zUYm}+cOT~Qw~xL8SR1%yegd1cIkbP>J;rbTj%-#@==Q4QfIhZjFPzZTy|W; z&4xw~4`epP4iY!BW?QHF$?-AQobPhaJ!_h??`vdPB)hGIe!FxZ53+^&>i&ih(v-R0 zaRi*{fJi21xmwXs$=7H=TqkxWFl7a}!9DTdJ7Dy`mfB2(!DkbX1TyAPzFQGFlE=$xsV^oy$`ady|hBj}!rq#ET2Pr#i3uJ1i zha)0%7SF%aFp{A)CW>O6LG1^v(chfI;m}lq5a=^Cs;odqfJnT@-uFxIpwCvVy4r%6 zk`}S8hXo}PDN>$F)j0@{nWTeGASd`Yobd;OM)zCl`no_-Py4rUMv7nXsSH8lT)7Bf{}1yVDDlY8Y+!&DPFF;8i2Bpd*oVXGLinwv>l z!JkU+VMSX*rpeyM0|KH_DVSio(m*PPx;d~vo1RbfI~g%2UVpR#EwC{Z$MxrMBb}M| z)iwfA>fkKnBL)FZZ@5L;*ewS4E+*Y3&o!zrGg15pZZ8u-(@qAPBISY zZl&Ep8F7d_t(T^_baiPaY&UtypEvRH>x0F023H$2Fi47JU?db_K=+wX0Ts~Bt9U`T zj}>hVD*kG|t%L2cV(>tRmbXL2|9`Ohv03c7MPVzyPy!+Sd>8&3c}}=@q5)v1FF0qC zabTVkEn|ivECg!DPT2eG3Xd*;Pry-|z{{~QrV!xG)dFUpcW%9#;|)&(b{N@y1@z>E z*kb^ahxa(KO;qS?DTtO#qch0$6H1(6HA5^>0AjF%jcP&6uEp;@)o!y=g~3u z*cqa0TDfog;m&#~3B5;&MGyaG4tr)D;id#v;OlLbn1zHgq`>Xr+Qv4{6}E1uGJbx_ zo7my9OW_>MXzQXntgrewl_e&oa3AiM6@;3cy)C$Bz|C0~(CRGAASQ6tFv~M5q>+Gi>H9H|Q zxAw-QW%X_V|S2Y@^plQJxli=uvW~`=8In;UWdzsQiy;0 zKb~|D=A0FfN?f7qaK7b>jH13vx!piKo|9NXo^<}%@Sy&gmVu0IxwTM_{MOxXpG=bK z;PqUTA5dhE_V(Mr;-P_S*i)P>O^r*0yrRv>6Nrss-+mHjC%1YI{LB*yx8TnWH+oo3 zt!#hi{ymF6E=6W)NP}hBX9aqg!=6Ks^u~6?g6Li^Q%B_ahSCm^H`QIv;d65!F9PB4q1=h+Xr=>!G5{TdKZ$>)yW(HNeyX2 z(%`EOo;M%Q)iIN))OxqcI9qxY4b}T|V}kp^?lS>{+8NHhZhJRG`wHXYRf7!^)j_qT-O$boipjvY*xu6lNf{rhyJ9rnK}jYz zr&Y?s;}pGHwjgD_>p5hvt>~@$bG}w>k3%&?SI3~fq?aNw`y;CZ1F7RO1rN}q%BPUvyUO>eZy#?5ehibe3G18FPm9(ftehp_i z!RgAf@2k6>3!SFr&EG@i%4T=-Z;&|S1+2s| zoWvgWT;#de&Kh~Q>dm05B-B4`Hm>P15cxWrwmf|8TR zvH3MpT)+ZLay6@hxsi!s4I5XJ(YHY)e{($xWAy!W!>53DHt6=ZZToxjurg4}5LVC) zR0irlmN9MAr9%0$hKF+=(ppXi(Q=!;ZY5d4QRq-5$iiJEiCk;n?MslrqXzm3-Gb&b zJE+G%XMG)rRr~=O_`zi1b0P|pT(AtYIRW8bt-eA0UtgN3f)FM08RtuTEZ(n$vhs1k zhv%G*WBd9Jkb3oJ!W>84zkquGd}%-LA2GabW^mn4A_xFpIcaY?YWG<*F5>ZJvpZuh zC@teEj1AME4e~OzF=EDajK$F%TPU@^53zGEEiFxpZ;UR=P|(2{>k<;l9_BQ~2d7+O zfT4`8r_6@6TK^gzxC&GrvDs}M86;>KOUEw6=Pe_lso_t%q-vGq@_0h+qewKTY{bh0 z&o%dsSzmtP@$N z6|D{!XiavS16{4X{f{j@BjF*mBc)Jr-lhmFl$f#!!sUssP#1Xn+K${Ew-s-QeOSlGTw;Xa z;`&)ub?1~Oy+~b##20Kxd#1-o^%!Ookn5ZOZJtz1G%|2TC z#t%h#f=Ny8hNnKS!MW0wf8}+LyMq-rSDrT=x$oogLKy0*H=?W0ny}Mm!agAue^2|* z|JJJ4A)7*K6|u>lP`2^D5@U9$)vvyXljR>l7GzVvowrl+zj1ELKUKOq#Mw;w4$@p? z?^Mx=C$kBQROb+7y0h}Tmd57Sh3)brx(@qHYb(F}g!@wP>P9X%3|QslG&KH{rKBi6 zwW{n(<>41E zn2md25^bH~zSLTwI~qUsgEW=iZ#;xKRXQJE%Y3^QpN{-mwtqJr>I-I;C6^1dbi}^>gX(9L*0R+zfWajRZ`qn>F2(@NYmVAi|x~xul<7xPFIGr6&S=%SXGX zfiKz6VnEHBf_^YvZosHL5%*4OPCDpqPvAX1j<@IC0^u>?ghXDJY5;gARfg)Y+OT%z~y5B^@GzBqe1%q*~4?h{q zqUidTzWH~cw-U%vxw*1+4mzvn+so0DWMzLFyU8?)c3!OAf3kCz)Fr2_Ksck*o6L>- z6lY4)UMqAS^9On39i%wBxk=CZ&YH9AvMHb@TdzhJKz8BQymLvV%fNtJele{gc1|gF zrKUvRKr{sD7h~#+Z)jsTYiRjR;IbW^bv^VU>*D}3>Y79^e;P~UKdjg7EL5?curFE@3pmcEs9J5VjSPE$vh+MCCwYl(^rg*~qgybqE|MI- zBfRbyWnXAiKqZ57E?p>=g=&3}Ux-a!GLuB2-Ms z++X?_MYAL;6KK(NHCeakDNzwnNC`Ca5*;tXh?{KKgIBLn^l=@)e#}(^;Lo|D283}| z{HUwRQagRS;WUV7Qz_1kAh)g20<$k1cylH?xJ(+#lK>)U#iax*p(ZthZWMt2`tI?( zRv~-7m4rTD%T@^M%ir#NRP`crW1gBP?&JPaSg709%3%C2>?QAmbJ=%=I^VFHX}Xbj zpJvuY&yMA6&~V9|F4qXIkvHn(?1X4X|8+F`-J9zjrJV$@c9DEl7s*>ha>E9tfaE(Tz;m^OaA!+Db9l;7@E=9 zSgMTi=KSmVXv)s@U4$5;1ld=liUd|1?HWS83WfPg_s7-MHDrnUR8Cg%P|)SP3I>6P z@>isdHJ(;YG0n|~i&X_!-;(_5^cb|-PaRpbNY z1#>en6I;-*@X$UYurrEud=khR51{(V*4;Fw*gRP+C4fv`^cd%J#>d zUIum02NH=`A8cv>v!@Sjch2iwQup6AeGiyId^=g7a`VT%j)s0I>#ITe!SF$7JM$Lk zEZ$x6tf1b0V|HyZm{V842g*wac=^+QIkmKng(3FK{u9MhpMZ=WZ1bC?pA}6#UlzxL z;eebXXl4C@m38^c5LSkZ(JbClua&Z?yEx z1~}wbim=DvfuSVFrt0XRBzSAexMX}~{EwJ*7iN_RhA~%>)cz!p2M@l?~En|p#$DZbX4LGge7LBI_uP44>kR^UjUdWe=8oKgLrCG z*!CSjZi(BrlnG@4TZW?$?*GKf@IB7k$O^v4|X<+$2f!gfrv z=kq{gKmR)!oH`6A#-w5)YVeTHKj$yS$jTD9<1P}MyqO8KelHi#j?;zhgxdhyFW}<| za~n1CFWmEcH@Bm6ojb=jozE*L&Gl9%}kY;Txr#S&blMLmmC4 zTz}gWb`oQu-7~Jdtr*8m^GC-ad5ezIi{uls6}9R|G3D(?uuS;+#y9 z?(;z__?^ci@Ew9DAUylh_=QX32-aH^_PP4R)ec?h)tvivl36$uF|2i4=*wtJSG z*D8v98cn;eZL>{jEU zf@8+qG$baJTIzqS{m_lLTuh(Zw)#^9lm_4;-GT|vh>>Ub_oCpLq;hjBFS-K5m7!vz zLuyaK?FHp!*0v``a;m|d1XYb)&kuW;ARY9sk_!rWa6Fm%fjacm z2D3g5;rt?%QZ+f<@L~;B#qEW@uloCRsxYe3(&=(P>(i2I)4?t;&<{>EP4@6hBUdSl zdYp}XmLnfIdI$65an^=Bp6IbSs8ku^P$VkZ3&-Zlwe!#46#r*la)3c2wFj4aK% zftsuSZ7{)bRGzb|kl*8ay=M9R5I<9HTfq$dqB!+(6(~h7ew1CB^T%ELO@kM*)b{$ceb4|+BqUH{BfRb=wW7u><;o3SMi-BhqvB?Yj+1J0_LdM?E9K!!jHz*w1_-RQS*EajZFGA=O0Eum&#VNy6g-V?BhR!_jHY${{bs(jMjM^e{H3sYgI9rgCwickey$Eh6ain$aICh0%4-%Y}*gbBCI282d@_Fazw4pT-7Kr{!oO9;Ga33C*s>z*I`us zhNf-J$!VpUZs4FLMB&fx7Oq+l?n?3>R?x{@c!Dk6P_e6ciumUI9wlrD4IggcFz{oR#<_+c!O^Fi}+c| zOd+q%yXPYJU+j>A0U|D9ArB_XwPaGgN9RHkcP2HbA4x`7?1wnOI03~A#kJ4Qj&6k5 zxfmSQ?ft@+J>5t0{gg=d*kx?Nn^ix=hKu;rNo0{@ZWQfxiaPv#GHF*Rrg>`BQ5u=U d3lzx};HU8)sjeUc4LscErJDBhs%KW={|iOizw7`2 diff --git a/datahub-web-react/src/images/redshiftlogo.png b/datahub-web-react/src/images/redshiftlogo.png index 5d7b37930be997e4ff21126cd511413e038d156e..30f7ed4a88c9a5b7fbe13a1cf75c640b4c057b07 100644 GIT binary patch literal 13749 zcmeHuRaYHBw=M4O?h;@l2^I+M65QQGAVGo$cX!#iySux)dvJHR;J5RgbH@Dzciflr z(4+V6uI{Q@wPdcjszVgzB~g)xkRTu+P^G2Blp!D>qyHTUP{0-Rwd_>j1Zl4v0lFFAg5 zb{bHk=ifLFpQLe4|3&YyvE#osO^Q3n<{KIc6&fpSCy&a(!`mEZd z)_AP?x>1-RyQZU7IPus%?^Pn-e!#D#u%^z)fkgp{#pWZ#jLkUA8w-IL?M)@rz3nIj zMPrBpi3I*wVJt4~4f8>>4Nl4%CY(~J`_q70*$2dE2-vX2f2}_P_Xe6macLp3vJ9_? z{=F;^0(O)@qdJF12nq+J4-K3KFtQ8@cNo>6k_k`7}vyK7bG#xiMOWojyb5?O}} z5B9csnq9cAe#^5M(pGzBi%sCkN>``x7lUV*=I6o$`_mv`nL6-=)WIARadeL}bqO5f z8cZ_TDKbv%cZf&QvsuQPt6CPtX1i-jg6Y6*zH{tCqZ1?=PmagYIOOrUZo5nRJk^=b zro~xQkLkX+(v2mpd0<#GB-nJzX<_~j@!q)zBjkFi%uqPJH`T}sl@UjW#wR}IBAf_3 zekh!>;JBtEwHya=>}GP=cdM?V$W)ql`Tu<-xMFY74fNKVTyQTpJ@ zVSq>xYW)0Zxj|~p>Eq8HjkzNn`EHq@41>Xj3+gpVrefub9u$tg;2z6t1=OqD0zxP) zXO_hBFw;dDMvcQX$!??=St70&Pwc#R8jA(4O>IgUA@MZ)(qQ^FYf&0kEk0g#j6D4u z0nDVxHH^{7;QN`;%>H`%p5@UW2AIzoD833XeT&7`>%sCKGCf!cQY z)_&Kd`QNa_eAF|@GIrd<&wiHxs*?ViuNJusjK!!U(C6~zs~ zBpOt(0lxA9ln4;M391#R`?0ei%+o*Gc9`)S+Q)OX8#3tkVh;-~JT;EhSl_j)0Cy&Q zJxIRpooYHX_tiluou|N;G3perZ8M0^sM`f!bnrxY$WDi8&vHQmc91F$`UK&_-8{wi zkAtC+WXm*a<_~~E7!K{+8%;5P`q@I{u^pt>@cH!Oqb}1Z* z#4s3*-9O8-VzpRNVIKeD;{G0BF6v;z?4~A98p66Gq%Ge)LgFNrgPu&4d#*-0rce0f z=Ul07vrVZS?o$@R=xxFV0cHIKHTI<jVn<;d2Z}zL1Kq|Umi_ZC+eAh)OrT@|v83+K#LKfB% zmj@9W82?RNh8`{8-dT+K%>KaD9WU8I-aHxLaQI=M35;(%m%> zyRPi6WfiYED4lC&kVDjMk0!Wpe$Ncr0|-LRPYUrJ+(j5{&JFefW1iaL^D{3LLfzMX zB}nQ?Pf5pnjz&#!A`YBSV$pxo@DL#VFPsPqM1Frk{WU8ZmAb9fsW?r;F$n}EK1oI^ z*P-$aY?@L#(?f8N{l@j;?i8Bj;4gD>#4HWZ!}6JT3E{LvYSE^Jw2jdMbW+m3^m5?A zGD4m|xsJ}wCy3^c!P5`bSJ1R-B(?hpjss2uKd{3k2soCF3`~pLZgiH}SWV```$Ilx z>bDRwxC*{w!zXe|3~V0RTy4PYeEUvG@16J8uW%^%3u*XBpx4Gz!q3OgyUgsxt~j8{ zwgBhdhnKIHoz-$Ht@8Wt$^4{T76E454g))SG8hf_E$0=Bf@~pCe}q)l=P-oE(@wlOe?kXnLtk_pMtXF?F zB>I|Rt8%s;&8ix6z!@N`i!vVzQdW4(|J0+tON=;|vhH=*_TNl_$bsYxT>kO>ipF~r z6mi5#Jnpj!16b5UqL1hPmp=XJZE2y*Wj(LuzPO}Ub(N~qTA4NWPT6DDgiZrKgGb^; z6EUGo#sv$OE&`w0X;maJsubsGPNqHv_V1Z(X^4=(?*f=vhL11{sSF6OB_yMFaYCa| zg0fN0%Q&FUmyHwc;)6AmNzeJ%y(b~VE;#|55h`q8IX(3d4s}R}kQZ57wwUp&Ps8i3 zC;4$e?@J?>UtosKzh6;$>w4^T;AHN@O(`RRqo#UyGT zxkYM$zI&CDAKlGgG+y{&Wzi9RJh21XH9Vhv-=Cl5G~w3EkwL50-UtpvNU0+3GR|al;wX!$?gw07Uc;8jNfNG$|it*9hW6M%-qU%#T{ZvkF z_NBP63egZQxtbNvOk0?Z?3`YHwg{_Mv&Pj@5)r*}Vokxkvr0^E= zm*Mz}BHIkdi;z7rIJA+yiV6e@eaJ^H475Kjt))A^qXhx+b=Zn4KHo%*%nibzdqkF5 zFy(9IUZ|;LiZ(lW^boU1zVIy(BF%fn-gd}mX|bTh?RPxuG@m`YyEs5jK4JWur?VT$ zS}w^b#W3@s&_y0&Rmm@}Rybw1gxc{C*~{(P5?;cAUt>Ux)W}2!!_gL& z-8AVg@?t)%i$l#Sc!m4>mWZ2L=z2%6et9<-S2TA5`&nrZl01%ie`>EJS%C#^%hOWP zN|J!RQYStr{Xq6ooM%E19o(b>YMSJ&QyIqVL-xT;>WnL(^tSbXshF{do+)l<=aW@- zGVUa*t1@ZcA6wm}1tyiqac4%l&cOPr*iIqY*aB~{UZYAm{5NYfarY8!xl^1usViZp zG%ixef#o`;{;XJF#o^&|zW@cRr!?2tjwbA@+QpSR5_nLEIgCj+Uu2~#fgg z>z}S)0MRm|*fAu(1fCR`Od)V>-$Rj#_ir5O@Ni0+V6G{$Px0SAyKNyc-p*rARp_T{ zUOKT7A&(ZfTBEbd$$N-iT=sc4ozAyrbvdRX^ffFx!kWgSOr9 ziL4#*Fvp*ixuWFxtr&_vGAtT0>1Vg!M6tx_A4q|IyGkj-+*tqg^N)79p~Vu7XhQ!I zblagOkL-Qf_!QyqloY@Ev#-aN>JAQG>7Q0-B(8+etykfLruLNmUZfLgJ{Ppll6i(R~d*xd=hHt(7oG)<&05SNy}G9UO&X*9`t*UhnEIHUu=*w=7-Kr;;~|6k5nzEMqC5Y1ideeYCU9@p#%QHX!mM zky9nsqslvg-m~x#gA2bWx6z9p& zIW-&HbrLea|Dd~`up-o5@|BwEM2gtk zY$dj4f0sjJJD?7x2~#%EfNPlB(~~hzO6;eq*4zmtPlv*qC?yfN*_p}?b8!jXuz(shT3dqM33O^^+CtovgGb@vxj&G&P-oe<0fRwe5?kw?Ib*-&nRghTsPlMU+q6eLbRcWSJ8y^rb!J+&>)_ zL-8-30ugt4r>~t6vnw4lk2#GxpGhsQ6q9C?%ghEwsY)6kQjF*U8Mg*$3Sv?C*a3-# z9gZ#~Bo@LKAPrjL?|X`YK#a%EGEB(bL_&-fS^|7j&pG8n6pU0BkX4tGz-j;-PLDhg zBb?ByJ%QKl!4Q1bj`U@!rtl{4;A|AP$1RHr;bI%XGQqVd063fEZH9$een^yKiO3^s zYSoe668lTTvh6KB%LPObfw~+rJJ4}bdv8m=ECgZqfV(gf-0{qjVQlqoTg2kKUy8NU z@Mw*rQOm*J)qlYWa6jBv2=?;Gb_3VQUtDnn1(bwy!fwd)eK6iV{+>Y_0WHASUJx-DIVAd+DV-@AzGWftJ#u+|>pck3>*tU*2409;aF ztBF>`Xxb`0w#sS@>|hI~u&bWw5dtIx$6AF(hC#rg1-E`G%%c|8g(JrO{$oD#Q5FIi zBBam(13x{49O1j!q-^;I3>)I0ujPM7>azf+>l*#ETwd zG@CCMb-htyAl&p4{Kc%hcHU;MG8yoyBPiw>{gP0N{DyjeLl@1qkmtXq`S|!pvw=JY zM)QK0c^BaoVieXDd(ka~F*m7=6nB5?+aOSI@mC7zs1~<9f5C08$lN#IaZkSTXjaT& zLb^b53tu?vvhoDf=TRsuX46Tr&Yjr0;H`7$VfQm;5R>ox9-QsbjLlA`}B5X@o{&U z{#*pP7+ZTf+FMEgHNFBMFTU;}5nsxw&kD}^4n-Hb*(=$9tXzVEv*06+6iRraQ)d93 zm&v4*MF@w$3bHKwp6ka}I~_;y2y~aRZEBQE8$J8gxnow+QHOKo z7JmP`nz9OrJ0Aeu2?C;E3<4k5KT|gflg(vI;#OeizLXhXfb2vH<(oW#gv>Kp)TBOp zP2BG~)g71M(J5OQvFZTq_%Spw;8|8EQ=7FiceJ)~ifl;AIZ?CybGYvwtT-4G!)Hgl z1)Cb7Lt1tXjnnGHNls2@?g9ryN}AcIROJpgw*5+D>z>)~o!YHJn%Qx}IYVRMjN_{W ztV@<5ta!%3oi{|_^yx|wlli6TuvP8~rJ2rIOAfhA#D{X|6=k&T#WA3vStRG=F1(MG ztV6ztcY&9|G3VTw%I_7qchLBX#v7%h$Dm!z%pCb6zX>1-LeX*TM-hl*qgxAtDL>Ce zY-2*c0G@8^C;TJ+iJaJ#`ls)|*tiSj0I6_^;9-35u`h%H8VCM{yLivPP5q#yHGdJD zoO$7)%$AzH9*HBf#rVpyddvz*cX^Y@Dmw|`V^%4BaguNCu~uA@ib(b#;h}$t5W>@Ao$oD-c-sV}c%3XBSmt7o4h_o*8lP{wd4th@n$EuI5 zQ|9@9W+sMw+#AYR5yX|#d(L}A# z{%8c`iZ_Wl=*o9HZJ=64zzwO^jEvN$ZVjm6jg$Zr+>9X|*Ng$&y4 z7bKRD%~SZraI(p|TDGddTdCmyskRiLf+E`p>b=BJk`F`38@LJ*le&ELmG8RXj%~Sq z%~1M(%4KLuNlE2>>vka+XYGOZGXcMo=?get2QBIW4AD!}wl_Q@BAoT$cwXuga(YD& z4B@Jc0C-=KLmYJmW~g9_Gsp5BEH)EZDC!1gb!ErBnw{R?Lf?R%KlLV?50m(0EXuS? zw9FbQr@+hg zLY+FG;|MC8b3*2cThFR!91frtsq+d@Zb)<^HT!Auo5MV9e6N0Qc>p#A&l3`roZp;j#>9s5MDxKa#b#AHUu5dri8ykid}g#<*5k z1X&(rV!Kb^C@XN^nurYGBb5iMwB->2ye>sjQaz3g)2+^t3(Pv)iJZwmIIEK@1@gH( zU8S0R5rr108_fMGyH|s;oCkMs>)y$Z;QI%X{fW6I?o5&N_Q~CU1dS7!Oq8O%=38`_ za%)JBPs@7T=Y>8F*!`>=akk#>inpfDQ&J%%Sl2%~Osb9WHH)mADYfVwnDMA{Ag3gS zD^gI+y#C<%J=^o7^xD$Vxj$IxIfvY}XK@#7 z)2vi_9mr)v@BFR`x32ppdZ-@loL%VzrD{sPMsD4WN;-?75$vo<=_F62CaUpNH<-WU z%@O+dRMEjj?uFiBO>X~1Abd91iSXM;RQ}u*!5dSYw|qnK?l11_$a|pq{oUyIHem>Rh#Gk5|F)IcOs4ux@EH9)LKK&%QZY%h0QjvgnNhoAdh~-wpwGf zl=qnyC!m%NBLM7Ej3ktDoV31Ig{|k?{$##wf}`6n(nj z>rOk|FgKDe--GWm-=K9^`T6-6!bTu~>A_K$Z{tYy7k<1IYGr#>(nrLj(z_cL(VHU) zqEMJPc++EZNd;&-oU#ut*xWdGwLg9$xK4$kFEpvuZVj=FOAKFttlVVIT3S3njrC`= zCg=65qkCuwAxSU66dmz+gi!9X1h5*%vwG<~=QR;e&>tf&*6HWt=zJ;fZPZ6Q#_bxG zx|^pQIgd_S{)Uv8ZRr(yg5*aigF(bDVPzTV*X*4sfJZVbIbdAW$s8qkFgPEZ@M-Gp z(Zd4grSs7|b0r+%?`yI!&!_*sR2@DkTA^%49TwsCq*zHDjuGK;R$iOkdc?Ola*K?C zB8Dc$R0(vL3?Ht6v+4neQ|vZVHAzPUXp5q)W%vh!CZ=C8M+6Uw^5qsx-M$}~3FuhF zOJ;V*7vIW@r9+kSvuDe~3~nMp?r$YUuKu0di1^47>xM&Uc8>K^C1hk^$u(;G%xVGr z2LwStvVT2H^+ZubdE80{5a{@R8_h+A8Fi69X=Q5H!W(P-f{$-Q>?sG2yd=B)>@&ZD zJQ7ewMV})ex;wuB)o9^b#UGF-7Dpk?{Ix*2D!O?$-Tf()BGGS;4a-F3DVYb5rX+DP zO&B0P``>1Vj}uIaqu!~iF)l*2Z9AehT@g3slTjK-wHfiPUR1$m<-=^Ng|mjs%{c{U zFKYzAntTs0y~SNCM?IAw!t*fpD+$=SM|u?#Agoh+&w&VexMcf2J9YIw;HB-~yiFJO zKYo(a=)g`-bjm~E$_SqI?-fT-M|Ktpx*z##Wo=xh0AprbHb4g2f5!x9`A~0cHEbZB z+7~)lbsLIuW;6#zK8{F=tQ81+OYh9Trd+SsIl^szNc+?B;3!E!9Wm?nRqxj-9tI#e zq0M#yX4iV=%fVQ@w6yPv^;c_(-ow(2i6VlxG@{|c{|KMr_I#QRcjzpE z%k&U_JV$}I9N3cMF--_%&*i{epCIt-P*_6~eHrzCu!*n29VdeS&5V_+0P%YrklK0MTmf3`%ePhqPWTN|*(0dO>XtXMZ*;wFZKi!b8 zvo{>YG(8^YQ{~-~nlr$P{9R3c6Z)zr%$^Qp{0`^^3-2y5Z4d9J*NrM(3h!SeKpj`) zPVjq1x@aQ3vC~FP^F4Cp)~&H&W(Up(*}gFP@~D-_x765^x@+bKb?)cnaDX3RbLrmU zrTDvI-#bfwzQ$HE5U_>1Kh19`E&n65jMb0sWM84Lj-R03Z;DZJ!Mx4dnXS9a0$3%2 z1;ZBj(qh5&+uuL_{_uJ=$JNuYz>Dn2o3}Z_;-TPa-z)Wt=M{npQc!q2E2&KK5Y6P> zE@^+HEdwTQbQiwjO6lmc*XJ8KBB1=i>ow935p;?Z$%Be{RPu3`qW`Ng7itK30bh=C zxaH3ktZ@nJPk|8Rh!MXM#zWDr&hL>zbT9w>8Xm>#z5S^UGHWCqRi*JCuzo8@u^Rgn zjMHes{Z=b(3<|&N9+lIE`4=cR`$Fj`EbAVcLMtm4vv6FN8~@t)7JqQBslONro;+#5-MoZ<}tezQ2XgZ!A!=#$bklDd_qn1ceEeD z^{?aL9en!~`{m_2J1|O{R&<$+jZ`q-6Ip%MA_?hz(~U@UqKD2+LnvCVxF1lOC-PJ+ z7g~_>*xh|<#(j_@kV^UlX1k%t61aFZNWO1cV;7C#ltwCLhP4tSCt>~s^B2tasX}!J z?5JMii`et+P*23tDHDYi_1oJj)k&m6;kWAlVg|To`;E0#UU5Es(P&9apSl23tnkwP zx$512S*LR8iHfWFY5+h>_x{x89%k!GG4=N3=8@05*y|7-PbS<92ae_p39ysk2+O&% z0jmNaV@lqyK0Xf1?)RvNR*7D&P)xmTZ&7A{AYPa1xc7G)N*G7Vu1oHW-t%f~{^HF3 zUy_qL2v1io@^AIf0&7 z7(2j9a4bC&CZFF!rR`WbX+G%pUn?pv)p3WUoTHiTn~H0))Q z=yr%^Jj?k80={64Z!UsT)IJ}81t8p%8&X-^nzUi-5@y15yV| z8zq+)5!?4i9o-gyI1+?Aeo(8jIM7zA~tmSF={S01-N+uD-IL!1Uqm z*|H=00}IR~wl36g{l%+nU-xo^EeekV+2fuAD;VydGk*zo15yvZ3Ump~jVl|xVRJqK z+AwO!4dBi0q9;{`H1wCIGTkMG;}@Vt_tN9ZmjIYCYJfTjn8aveyZ0pjG2@YUn|f&S z2ugtgN=Zhj$y+~v;k8ZolBheC-G-7dDJQE{=qgiQB)LKH8#tXl1*xKK*Y^Lbv(jKF z1MYH@bk;t+?%Y~0V%x9g6gS}waXD+G+}h)aLo9HE#mY7QUHuh_?rZx#+T&(t>BfkG zbQc2fnD?4LG1EyjTkUaSA^am7qt}Hg5(YvSeq1D23SimCScZ!t zbJ{jB>;l>>%qzvJSs2w)mipEO$HNk~XGDYX`QI}vLTNq&7$$oYZ0VT;Ni?$1L8<@U zqf8yNS&hBk0%{!1kFwpSZ`9l|LPda|@pV8Cgs}uw3e_q#A~Kk96bKgY3t3b7bp7X^ zkNy{JXO-U6bfST$L-y{(G+S+&2YR0>fH8$V3W^Mj{laEG{K@%<5ZqXL70@zdZys&8k zJ3YDg?0)xK+#vJeT^k*Q)q6ACR{IE;@PFF3WF}QoNGwh5OLp6uQFT5h;@?*%DK7X4 zQ*Nz;X7m2~mL?hvm_P3=-{<>P2Cat_!jZ|_wfd=nkMWJR z-L-qA00T|^Pb|*F+bHT2@iCc=71BF=gnZ)6EI$W7G5k5){!AtUMMU9`94?q^F)JXw9Ehjis(CyAulj`O+J_7sxj9Y zs$Z?GEfXGr9%tu6S4C5RhysRaT>^rhL9b$1ktJbnrOjT^_4XvdCJbqcHb5P=u2Exa zIxm~jmM(*Ax#IVRu0qd7Ge<_u1_^nDW^YfQbw>WRifBiVlZlzz3 zMCEyqfaG1L_i$N$yge>hm?%sQ&qc0IHU2g)hz z;PB0nK-qGvmUBCH#~(XysgnYYA|v;>zQVj8!G^UDct}gEnN*{5r^-XH1m@la%EIEK zFp~n}cQ_nwo~_w-Ji(IKpyqxJOOcKSwh7uYr6J?%5V7Fuvi zjk4DT`8q6gvND+u4~s@d?Li7)3K`5#>Ev&S73*wV5psEaxOkDM&~E(1^l_bdYw_QF+TUU-t`mcuzJG{Qxop!+WPCQn4E;9S-?CC7p=mVe7r9^;^mT zunrm2%uH2s=m5&)n6TFpphDqhRUGN>eq}*y;RSWpwqzbVrgQu}J$8wimE4PXkA~LK zBb(^G1)g*ju(0YE7kpfqv5?XZpzxS!=&$iWOkiRTfAY&#S(A{7Qku!h^^^=NAL?U! zwg{}%QIs)fhh-vl<%%4S6<8c?g3ni~h)Az*&9XtRskaI_-5B%KWoB?AzA9U6NOT}C z&O`E0v-q=Y{CK%6R)yQTG%HrjpXQt2zcW4X6k&RcZweF_u8mD0Gw>cN64rjq^^mz1 zviNf0qNvISL7rzmGiKtfMV5XW&B6B{{=;N)Ws&Z?hrMJOYPBp)_mW$8qBl;rC@5S- zq2snN@+OBj+^m~P1qIJ#O=;yOI|wOLIMbP2f?~Qsm;4+~!cg`?yyyXxh^{fM>&2Z3 z3w+U6?#rA^v;{MS?V8F%%)&_i7^OeZmB$4<8;VgH&IzYx_-c@A+G)EO=%G}IOkGUi zt|yLRaRQPf`Yd35JY^uUp8q!aFpTkL@6dQ$D4EHxUKO8qd8C^CfvYvyu!0E0TmOuKmDpwUX>zaFTvP5=wY1bwdie56oO2hZ@ z;~cWEvedCJee3Fir4acpV&%QAB<0dzu=id1sM!${idNO+*fT$a<~qBhws9PoQKI?+ zrdx&%)yoHg$k#5N??Zpk?ky$*12}f)SYt_rRnmeY7Q>;Aq8S5=z~WS_eN+r;qu)XScy@KHIJpv{=};3pxSr;m~{=I zZC38go*PvcG_E2eJI_)KhC+mf3Fp>)&N$WY{~T+V2-hY8AO}b_g(yL7F>PvrBE8J7 z8#1cp;jIQ)x>a4cgL-?vcA9N+L!Eo2po2M?Re@E=wi{GS@Ua7MUJubU_l2?Mwwl>h z7%rs4K{hvOcJoT2a$%VvY$LFBn#66ze4rK!sONE`@7bkn_1#Mww@{yFXQ>>@ZB&c! zAI$s*dD+41>9vD~Pd>mx=j{k3$h7JAmNs6e@9Tr`(;D@P+OAhs>DB?Pj;gP z(8bqwtSQmCmQ*4GUjkKsNsgb!mZG0jF6;|cKSSm4e=I>vE{<#enasmExlIXqK$yzM z#}g*w2l2Lhc@RWvfgsnjidXI?b#l%)v?AWq%yEo0AbJIdz4Zp)I4msoZd1#Nt3#05 z&8r6##T+@&O*$!&a7MuaC6ENhYdL;E6@uQE@hXHsR$)Ld2oGI?f|}@lNgtOaY?8Wl zp58N0{2x{}MpGN_CbzWvEdP6j%?GOBSol2=tCA~3>dGDc(|UYj<`!;WaQasShbu_h zuZKar)tT78O_vb6(;|IwQ65k-aaCN573F1=A&ZiTc47(keR0L2>8ZIe>z3KzA1nwf zcIFG2670=YFD*ZL;$l8Fd8rCNBBpPL=V|nYPIDuvho_~_sJ<+l=u(190?9mXw-b*v zREjgpp=Q-oc|~U8WR2Ucp>Zh!(?{{j7Gt$T3=|8H+$CXN5=yr>G0$(0es{a^_F)uF z_T^9=Ve%T}KxM@u1qj&PhiTj)du0RPb`%9d^g;RA<)m+Br+m99oWj1Og8p_$Jd6Z< z!y)DeNPx)Glu3#L4^WtDTXte;sOf(MUm*E15>_xQfaE}{V-m3dAW*Ez*^+GVg)tTe zP(LzvZ43Kog literal 152085 zcmb5$WmHsQ+c$g~0SN_Z7!Z*zk&Xcc0g+a^kw$5xhfZM>1QcNgLv^tYAII!{}z02eYIzJWy7qycz8;98Y;@K{qYZ)wPwmR)6;5o9O%0;bbEOj35gC1fsSNI z4-7F5Q*bVNPK3{=n1G-5ttVgz8t1>sMkuliQJX#JtPte>-ehG)A{ zJPUaU+?b7D@QC1M8V?SACBzc1B9lOB(d{fa{K}h9$?Y}J#~c=`5J6bL4>DZes~)(+MR=cS_pmt8)J(Or&lWQcxvxoNzlj{G$cN_ZyCdUu?M7gDwGf z4fodj;9$t6*lD&&n%VwzfPC)saw-sN1yhe}FJ&C!i+l^MZ&4oO~LD=nw)rb%Zb<|{feavw}qVNU!vjiv~Q{6;!h`Zx;`E_rGSAfYhF`VWrrlA z_Pb!elP*1^zn#h9Z^_o--$%4@3NC-7@*kSfqo0hnaHmMYW4nDu>l~SFzJB?=GKqK| zdi2>CWi%p7dULca0XI#ndGQg}xE}?5i{gG?y=|=Tj zH+_;b-dq5rucp0G8X?~Yf>xKM6J7EN*;X#uK0Y0|Zn@d5^LCH)?pgqSyQ;^1+g(}P ztsI-*GPyngH8!nT7I_siIMw!+8gtk0-vWBTq`afeA@2>{HvY%%k%OY5fPzibFhQ#h zs;BS8E`MIrz#%SYvDUBJ7DZ>eRQ_jQwe!+EPjMsP@!5JY5ZJMym9JRKgl1Z!^v5b9 zdB!p8LLUnZq~#c^m@k6MDepN24pdyv*J0e1#||i!1FxoSWxake_?RM+@@rtwvHhr( z%udHv>sh5Caly!1>77d06iYhvgm5sIl3}&^EsK&tR=Vuvn#lIEn;*Y4(-_id8% zI1dQpJnPWRUFC?se5DzsA9UyQBld{=K*KGUhufrIy_S$|$Sh~l>$ zqmeF=F3I%R>WAnxNN>Qx@_D`6_Wl($x1n$Ur5l>M&SswdW*XvI6xI-_>3e_j6;CfK za5yxvXJ!C>zD(fIeWROR3)#CfxzEp&zmUvigm^Af}+pq&4e)< ztKBp^(zt=zwF1DDUlyD(Zt_gs zoc>%g1N3@Z?>&L3IUw^%dSOjv{7Z*3<>7fW*M3SF(MR=QS7bL`D}z}&O4PQNEy^cF zkzyj-sZ^{3=?pAw51RgrFD!s`B!@S7D6nhoe#7<1+;p@-te&Oi0@Q9HqH~B52Cs?& zPqnkKV}mg|U`}=sr!xeiFz)5wn}aXok8LQ~s4BwK|?=)H#UXu@keI zflF)1Gri$B%B=Jdvy9$)5_prXTX-RSgf%H9Pp7c>oWlR>>bw=={GyR} zeRoBm>kbzSddK(qD5_}u)x*bpWi{6A!Jwi&HN#I=>X-K=$ z`ti^>MxL+&Wu0}c!esu#X`IMjk)Y954Ik4d7M74f$P)kezUpx0*Y47c42fU^4eiAn9r(b_xw zMe6s|;dg5~Nvm%Kmap*}L&D5Thvb4)E{Swos{Z;qPcD=W~4ZkV5V5E|l6A~oc%g>O5mX0Lpgn%&7+8P~b^YFxfl zih9+YH`v>}lZ=9G$Bd2>YS_4Cr{1tLj^&KWefKaR!feG-Z8n6heU};_7Il7y9*VUR zPujv6RTrFdo=wJ1;0`rVN~N=naJG9COfqWCR3Rp-d3%s~VImu; zpnNi9e;si!940JRe8a<8qOjcZB2jyI`+CBs>%x5{A+I)5OU>x}RxjwT>1PRyc~z+g z{VB*@Pu~1pABbw#MF!`8yE+JQ3?5o=FbWJ-l?}BNTG$w59FK%gbddYE@3zU5&WUu( z%idoze@9qhoD01HRE9-cS89XROPxEcxxDVF{2FXxyKC&^YsasmY zAa}^+nk#gyL|1UNc?DNpgECuhJ3NKfFMX~I$yX}*7BVE6%~E0^10TmwVGo#xRa#3m z8fU22&^V|EZx6z_AZ6%Fq9yhQGjMIP__AAf0cjgN8R7k+=uMp8@Z8U@WP+yAmQ>>u zBzNFCxO?d-rUhhS%Wv5~>lRf~l^Oq3!w0#EoU;zyi~Hy?4XZ!~c}&xtO+#+coOz9g z213{;EIKN#E(>WIrwaJ_BmsV>BfoG$*2bVBb=(Aa^VB2;k$l!@#kO9M-X5w)vopIF zi!J{aVP1?iJKqJ=r>`!)A%_^xdLYB&u$BviV&VA|P~=-v%mv=ilZv!8Ey|zdf`$;)rS0#PN zZapLT8T-Mzf6gn+%W-WDF9OBBkmmR9&NTgetZct$f9;qMUb2Ue|9X@^TIkZt5Wccf zHB7{}%#XRS+R7qaXI|`*i)$>4yvT*wDcyVJ%fFMq;#+gbq@dXTcUoK^IAr00Qs}Zwk*ycj;Cn5<<%CZicP^k6J{C0;e$% zu?Ayh?T~#QoAuM6;jTW#9nzKW3mwDUg*!p-KJREdyekjg+oZ7lwKkDMGk9oXuYdyu z0(pObf50V|zs8h*ys%g&C68*k0MmtTdwKu%5-Ewm%tJ)?U1vkf)iaCLC`$W_Vgs!2 z9B=eO+1uw^TM%jQX7gW*vYUOva&5_W8@R{sR4U}=e}!AuRGt!A;HM&@4v`MFSK(I^ z5LATy5N_>k*1>rs zgpe$E~^XeK&{~t$l0}r28zQlINouDstltcG(xnqwnzimWcq~ThC zw@9$U_p>pG3cr2A2!$v1mn6cXSlqbBfJ2Ak=JRsRM`V6#5U^^zEPhi5ID@e(-Zuwe z!wK-$8)#U_d>y%{tFGfU7~HoRaMYdtuJ!uZZQcX0Kso*E7;m*n#8&>?^I?`I@Q0V5 z*{PZuP7-=NsFBBMnDr2J3ugFZz&_;PK=bc&pYYZ@$?JI^Hy3|b8S}RxKEIhM^XUO& z8C*Z!82iPkf(Pz7>TXZEXepmMl~sK^Z6$x@vFxG%M6qq$v!hhmsL&C98OyU3a21q4 zR)>KA&Muj(R1QC-JLTeBSnD%07NRN;|Naj8D!=5wX% zOD&_dkbWZ_&S~mERR!AEzb)oK4n=mxl@)p`2daLwbz($wf+xrz(nEVm*gtot8ruh< zabJFQv-xJfDUL=?vF2wmM#M@{yDX`ct8Oc!4H+}vqp@;8emI)vw_ZGQ;DQs{=~=1Py)S{`&VvG@ILLg0J; z!ag+=z){M}i@s3A#V}VyH?qXO}+U9n|I~?GUr+ChD!P1 zFI%&S5FcWUNO3~jQ)2oGGc``cznK<_+&$OVOE@P2XvAX~qVeP$*?a{oPkX7OCki~< z${^Kn+=|05MVqU#cp&X?u+tx1u!z!mLpVPHIs=W|D~7nEDt`HFg`4j5Oq>Nt zo7f9?-j;QpqYVOo1mJRUh;y9)Xk0!0(1;x@A6B0!nU-|8E_rq6I=kz)eJftt2 zqm1AAZQiTb^Z{3H3w-IYVpC~5wduc`@qpF#y>X|LFungLJUu4((Db_5&KZ>IV{8A+ z>%0!ZY|+09i2XO#miHb~K`NVBl@9a8(NkjOrdD>-0Vr_54XW02dg8&D@sK~`@t}w1 zh$eTwSIjp_ak2flPR&1xu}sJ$sQA*L?dW8%p>=T23L_3O3Q697N~25$_s%ED6K_d| zL~l&o0hjO`T>{g>>GE_K-7jRkzZWocy z>)yX}uKg)KFkt12Dc5~Dr7pD#A}k(g&w6^1()6$VR~fb*h2siRnL6!qTzM4*syy)E z?Ns+qZeT(lM9=W^KkXj|%Nnrkkho@eWvTdmM|Mf)4`TT_=$Al$&J-0`oGLwQ*yyQk zLiSLsovIrOv?m#r*&fXqr&-P{J7xt(=H=-hzbj-7*4_8GAdRbXZI8*Rul1Or%W3v} ztGy`tQ^=d$`Z8#UIKRO5`0I#3!MydEdbXG4?*Jmp@LPjmV^BA_?9HR3m%tTx+y;SL zTk_6@tQKGl)2Zo|P^2b0mRA&CE4t2x|Y58qlq`y`p8l#^ih?6Ac-9;{e-b9;8MY`Ph5mD4*`| zJ>`yRPEe5+@PNO2(^?{OjM#Dai+u-lL$)o3h}E|hKo76!YDa>|uBK%OnhVj`M;iys z9${*+>%;EF-adZNXKK_X%I+}2N!gJY`@UGW5d|Fi405uBY?)&q~7ClC@yIER_ccL*yUUy5<4E>G>a z0%6U5INGk<>Ms~m-?LGPH7Ej71cN%*RRe0uB)Cq-5OH`=>bpvggVb@i97n$Nc>?KD zt+-~w3Y<4CtkfP@nc?20gQ9-SqjU;Qd*Q#S%`5wmB^CEmbAYg=fJjN{XVM3|l(f!k`BqP_I#B%k+!Oa!@2AgZ9 zG4HU`7~5t&qkRU~SCQwFGb63pIj(K5^t8m{c_{Wym*c~<>k*M!&||Ab%K?rq*beO0NTu930Rt_jYzn*$+`Qbf~D3$95(R2F9wti zP~<-h-gxcZ^zHEnOkU6jU2*f4`UvVHK0jOA=KX*y)`r`g%)BNvo3S^BXZ1{eZ=(Zf z2f+kaaYYS{*Efg=10Ga?x(aL(b$jQIE(Jo;obDaGjAe!1R7J=35!6TPJxRtG$%TP@o1Kv|tr zH_v7aP%9@at|>sTbhejQ2ytHmh`ff&_6rRalw3jVo_J%6OtB$bws}9lz43Sg zyefaETF{&-@c~L5o)x z>Mdy}`rvYG9#!~;Bej0Yle_$Ro5DiO%a5(62lC7>4eWv+m=sMa#~KIcyc_S33JrE3 zkYpi_eQb&vVwl*T2A@wC9*#FpN2ZFCEbk+>iuz*lC2`+hi3UH5)q!$<`$p=!sBd>g z>_!Z;mrt$gAa|0*x^lj+VyJE`djmG(^0J!ONKmG18Ul*$4Vo%{$63s7y=yfSRs4SC z70jrW8u2-Dd9BgZ-7@R;a-%jfkk$R(kKs_6Db)QOlc+PBA}b`W&}L{mS`M4)eZSRW zW8+<`uO`%Zzs<9Ba#j|md+^EfslQ_B=U>zNfaVS=jwFphjV$m%(V9fu2>*e)%!ut} z|8)R;$NZ{g&NXl5$``NV{5y9kw@dmOqBf#ZSsX9r!J#Q?W?A8d_Su&O7IE?V#oU7e zegrqxpPsKs&}C%|DQ{^A9_!>ihD(~3m-PmSf-9%F)0ofQPXg)p{z zh(%U#J_T4V>QiIpe|CqumXHnoipbLf>{2Y8!Qq<;+ zUFepumhoZ%;kZ7%=GZco}Bu z{3*emhH$yOW`Lq2m^mPY+Kh4dt2%y%L=<#s0D1(vQ3EC?Jzn*d*QQ*EL+{kfqi`!q zunU=2Xfb0G{5qn6`N+uCPqHHKX|+qI)*LR_FK$0vL0Q-Oc^Gdem4iq(WM5oWQqZ1_;e}zT(;#^31144(eS0=kQio}=y&-~4-F>nK8@FEpJ;f5nT1*L$! z1#?_@g-AHM&`|bf>zMYGX^YwGg=`#{$j`(C@nc?qwE7+}J z?+m^DT4Nvb$(;LuT;aYdQI+`lZWhhAv?|8Q)$OF)U+dicN=6TyOD}_tmzHH5bW4vU zb|#&FF~*{$!QZWI@xmu4%$gQyYUI594xW?I#5V)y-qLZ-tU!-1O3Ko9EdzVu{%7FP zhi288`La(vx58{gQhXpRsA__z{d5~uaybAP8K)NLDc)#*j8-I<%p%v2-FAq@FMv*T ziMG|A(C(Ww+pM+gT(Zu4;BTwO#k(XJn9K~Xf$IZkgX?!U2C{4KG$VCMy9vRaEqBSZ zD72|2-=4>kh%aJ9GKjLQpUw?nDADr|hX9PE|0|a(|r@R;-z~Q?%etb3^mnBJb;mq`S1yl(= zMfEb=&%U&7;S4-jQW{|1a;f5{7S#bb5@f4Kwq!JsLzRUf899o_yrUsN=ptdu*gr4_ zlg}(%!8U{w0_SHA;qOZJzU<$!yVg^p>nU0;;mqd_)*5;=g;;8D%<-gpNsGiB#A4B# z3Dj>L1Skt#xp;*xgRha~Bxsh=Ad-xEt$+=0Jv{6bh}@j-$k#Vjiu07DTW2ce-=NXR z``2O71Yach{n#R?@wo7msyZni6 zY*KW^3*b+yK#~~*RNO3otcebuQ$I_l8Qo@$3)$k(swOuM_5Dh?dzPXqB|!-bv(b(e!D z5tVtmv^pN6XR}!(cXR}1Q|>+CAu&&-6Z8JzC)wI^Au`K0l>mSj5tp4_ObWr=AVb|Z zO|d<~**=P@n!oXUXUKkaUk8M9CIaJyr!Xe0_<@xsZ$)eLfJ#)z=+L8xit&kyZ)&Zw zg4NOaHs3sqGb|PRQox37pH*9H_ndtXLCNce6|ajU?cKvq`lO@q#Y*z>?H}0WbSfTy z9!s+cxlOO5dPJL5K*CiUIR2v>ao?=^Dfk);i@zhPKTc5*TJYd5b&H-bJdNNih5e_x zB}nIIl@M^H(+&y4Q&|6%FO@D9_r>t(qH>~mN+_Pd$S+;v57x(QB0v#D{rNzJlaBNL z?6F-d03*u zx1GhzTuD{3dm3jZ-Ih%sugnGkzr3$UIJcd8Mc1RBnhMtj!Vp&@a%2T=4(@_Elt<^w zB+7*m|0Csc|No@?7ZA}w!>#{cDQ9pqD|u&4+$iLLoe<)}*s=^@%OA_!{rN{D>vdu? zR?e{Sf#&T&90!KqS<#f@&w`A<=6A~X#zSec=SM4B8dYE8rhhSnU>3B$glSs}j9t27 z*3;|gslA{4WtX;v)$II(awPR9p52FHYmTEbId8H!DaMQtf|8sO4&D+O?{Tm5itlRF zz`9EwSi2C6%@PNc#>$+uOqwgK7f`u+Bs#~)3dSy^+%YIcuRgOlkhWg)#F!bG(ma(6 z3gK7b!BM`{I&emZ05`W+hZzW+&Balweq?+0HYRn&;^-ioHU?`uI3i%Acs+H#a!9)) zZGPN!UGB=QPN8y#)%R-?=D#I-_tn>P+`f-=UMQ4yBmnfG*t8K#QmFCdu-yY$76y&J=Qur339fcrEP3$ zx>xTHv+Hmt%jV#d5jnSPgFYCgkvwk0-rws3o0TF@*9e4AkI$NTn?~E}aD>!`Yn68)Y?qVCcTXpW8gAC_{oj16pemv)f z9A)w8(+HjN3-pQ@jkbkI@N41ZLM6X(+`pMgO-b5r_jKAe`}0&28*V%k3h>gLioBeR zL5t-p{FQI{_}+~6i4GGQR&c|+q%XfW+%qm|J`rm1R(1PHD=BqG-Ct z?aKz)$BD8%3F;rej-7|@(BK|ig`aSN^yzgwXPJ{tvH$w$cc z?g#VNbrzDWYUiU7zLIQcNmyXw3+N%4P#1=1=IYeSeAup5g!2X|&rjdf+YH%fYNGvp zqIP@Pe2c~nwQO}9!Dw?kL#fQaUTp8DU+&pvI<2Chq=3{X)`n2lg9D+)5UQSr%&@>H zRQt19y&99)DiQ=NFV3K-m~3*9d)jxeXvx1_F@vYYgl7bAp@P$HjxzKcWMFB97r% zb60SfUbqMPL+9R4#w#5aLR|oy3WjDyJtUF;E#WV@a#1eJD!<;7M?+R8)~7L3)^EpK zN;%wT6GfcC$A;-Pzrw9!8|(!?xopTW0m5=j!x~>bpm{Q`*h$X<;|H@osU8Yq(@@A0 z9#yt89-0!d1PxdAK)f)sl>uF9gslHJNSiGgbxA7`vUJ+txL+K~==zX3HpENsnXoAz zy}2vzTgkQvZ3UUc*A+3-rPz+KJMVB0oUp%kmA<}y(7)ve&?`k+z0@OXQl_uRnoVCU zg3b3U{kwzy@CF!b77rI8Am4th6Rool!4LnYj(5I-S%2?(iWoHH>6YXX+c#+Ke;Gs7 zVS;zdyA~N2Rvn-MaUJ1YFvif<6KIq>xULPgthf4hnFjdCyv{3Fk(VI}mUmd1%Z{o5 z5ltv6&D1V^npgO|0kQyv<#pdf9kg`Ac27KYplWjY#0}sDU6_D!g|ZO1Sf)_|s&&+2{`iYmf7{E-FK6j&K3sL5CJRT_oF$>}w}J?6 zr=KNKzF;KA-+DfaEDw;0*uLWm;3;ft;4_R0;(gdT^vpUoz$kxI(hSl(+7$5UZ{?u& zwLFMwtr;(0foy#=bYU)j03=j}HLLsy4V{5_lA7tc>>$ElYtXhB+$Z`z*d;|BGbW`Sei^X34I|FbME$Z$jm zc=I5avXgeqqiSy=DQ3S3>w1|Dn4NDOn`5d#8mh!db;0vmWwr0_*gUd7*q*5)u{$8| z5?-Zn)bFs}60lDjaYYb6hIX67k+~7;dYE~Kqf9G5uyKi)?jx2o@u#n`(?7A+YgI%W zs)LQsf~P6HyZe238a<>AE9MeFe^ldLoR;34EwJ9JJSneteJ8xXf7|8gBLLD_1rMbd zrtN$%aAUE4xMU?)4AbTv4stF9UFxJud(}9OJguoU3gJCK=)IZn#YLdm#*rBJno^?k2 z)T>H?GK$w`efs~V>+OA1M*8l(IESvl&&gBmhpOZtu{RIi!3=xW13IJb7p96RutYQi zn~8)hzk)iYD9!_P#b4*8BTDvZ@xI&@YSIAil$J^GWIp5iW|#Hbk}q;Y^=9{NOyDO+ zwz*CjanL2--2<;9mzVm%G3$Nt%9zV`YGy2;JKwi=ZRD|+WHoeq z0XTKb}gXg^Hho& zd8?e&4EGOaZ%pz1Z`+NvJaq?PvaP+r7t~e1{|B!NJ~oVMoaIA8m;yojMxR&t3!TS% z7)p4SOVN+tn``J~6hQP(iOv-=jz%=zx(hZc7Lcc&RRui;A?O=oQj7%t0rm^(mo~#< zQma9G0rEX(U6A<9qW~Vs?1P`?iK~sT`31nB@baLi%eWrREJqTFhtJ{E{Sl#^kaQ?D zC__aZlLI&urkuO4PLWZn`_kTdgf+vqQ97sWE${8-ooQU-g^0ul9|LzTV2O7mT;SIc zqi8?&aOQfMC&NcD*444x`tyZ9aAVzHqho|YHwxEkWdni!RynCmjdfQxVUg@o#XsQ` z9svsS)TK>VEy!+arG(>Kcm^}ey8|iK3M)e&!A&=*Zx4BYHg8s~zFKT6y_ypHA^auo1KkqNu7^PJgX@dcJjgJ-5+^3= ztN(>S3}meX*?eXsczp-A`C!1bIyxq|{o0odUvrE+ujHW&?Qf&Rt)jq?`oJx3L*0i5 zdzR~xb|HWdbDQT3$@4{1>(DuW1kKnEYa$nYABD+9BE@L7E=`%ts|7c`^U6l zz=dZALe2R3r2M~(-KqbTm`qra|H#|VOJ*;$ZCm)??AQ`YzeLtZ|3?2}vOP8N+KtzG zD7!2useBQ=48n4i7-!)1WRh2uI@;*>R2j_Mwd*)0DXko(35fAWnPGDN)^}+UtMaZD z8=!2|6f=38fDf_iRLYfPHRZYbJaUMHRT?-zkd>NK7Zl7#eYx6i#&ZFAHt{WfsGZ|B zycoIx35c{DnhsSUtHwcma3XXRuSc-OBU7bJUOfzrA4nzguq;L=Z3}gr@8!+O7X{zKhDNy(efY>5*u*P;bJ^=Nh_5225;%^Ng4I=<%r>@iw~8j zE01i1pg>1~8h6WG58t(qA^7KO#7+6$1ZF{|*6+v#H9}XcRE%b7VYut6_ap+_AWCvd4p2b50DwY((azG@PHA>Yy zF(JIJg+(1GC=hw0)QvE7SLgu&#-4nPmX~}h`HfjWilgCo|4H_oo%mw`DXM-Bh5t}? zkOt9BvRLR{9KvZE1z@2EH1>TgK>fO5Pn1#!I|MVL{(sw46c1o*IEJLv~iCwyjtI-3d$|`$`y>2XUCfS^Y%*$C4Y{<5Jqzyy4A%=;i>#7dvyNKj}HSX)Op zXee|>j=w$2a%$!D_`}1NbE5B4Z(ccJe#VkaEtdrCBtUqxP-Paf$nDRBllpVh?%S`5 zp?t=vdC#iz3GrQnj zL@mcV)1(v3+g+rbFOCcoN6yL~K)A|z91O7aSO`&m2G$G+*LbHgIkr!9^8#L3jqwzxWbRR5oH7w8{B*5FjOm*2Z z^B?;dYdHCi(0Pss7LbETnyx?$u~~5aySNW{)`KmG!;p-aS5x1cG#kVTz#6Q?4hZo= z4ZD4(o{VCyR=f^6bktPCR<&Jr zaG2umGVMOJQkp2~`tNS>qeAnR5qmW8qcIkrZ)Z>`A=%|jOx7r(KPp2m(l^xq(X%nA zZNwB;aplp=y%dk%tYpl;IJfyZxOJ{`h5>jA7#`{h`Uwh8iCwp~pKPPBv z?=la)_o}!&zlYP^#7JsH7$3d4LQluftPNYYHtVl{=7C(*aG4#ndXzR51r2t>!yq71jwJ-o5RAg>Q>pF zeU;3~<~zOmh-bP)<`wal2howuCWT{_!mG>xEaXn-JRkHf?xxG#&aB5#xrM}_SVLJV z>!^D+?uF**Zh;8tJmMj#SypFjlAQ^9+@lbqQI(EZ9^+dYJHZyjimg(WZ|q zB7<9Pk8}zU71u$wCVBWkVdzOZ+m6cy7wj#H-X&5kdCyXbN^Izj73S0)A@@%UNI zo1L3M*W9UnUf-7qWzX_wCcfyAKPq1Rp$lAyR9@Nw9&{f9G^fHx`cFE@f?CkH1Ur*l zM&8Pipr}<8JoQ9KhwCFv)dg*~Xwqm0%yspWx?f^I+;hL*EMCrtZP7e5?8`MkQboua zT>tj(d0Vw_$zvqVxPayG&ghT4H@ETDy?uiM>oTJoT@{vwyn!v4Vmxj04kdkJw=y$~WKb!vZe{7n< zhGfES>AxXv!vBW2ec5{mQ_`yrINf_1J#GN(uE5^jD@>aQ9My={^77KQ>oQa{4@#T! zhp6~;O9gzlmk@8%|Hr1^N#a8XuMnR+uE3$5x)=WnaH5qeccC3@9j5ie<#Z|{`<*}m=LX7LI6tg>T2C zz{>q%U9;KUDqPTPxJti21Cfs%YP2*qHZskuR`{(yFI#*=-Q)0u%mSufm-m@GvC-}DIkS+jYa1WPIGGTSSDh;NTYU2lzuCtaJ3~W11tts^u|17@dA9_}M85#?ZaR)=#lD!KE&wBSZQ}Ks!#U> z4BQOqG6Dsz6?`xjz~WwO@UgL!o6y?PUK(C$qeFNInU2sPyoySXT1ULh*NK)r-6ocF zB(8PLC`b=zW976xCt88E zi)4GT$N#F+UteM2ppS$3^&sxf64p-@J{(`$wta3Wxsd6`3KB+TF;yLvcxx2R&D_OmKT$jwL1=N6W0jYBL)m#^7?Tq>ry7Lt;%DZ3TIywd-Z#-&!4bJRD$Wf0m7LTBs zYP7RRDTtDhZd@k=D?HgxCHqgj%IQnRhNQl z+Nrvq@kCh(z)=R*0Z)-%r+e&_U&S~Lzu{kBJcP7hBE`E4kMx+z4WRjZ((P4o{kTF1 z`U4QiSU?(t{qfJEO*vo1^_^d1dE|gynbA5R?sY#MTFv}_G`eJi?^6LWrgzRorX26j zGUUFzL^`1k*996T6oidHFg4df^0Q*8AJ6Bszq#Wn$=+Stjd_&nxM%hnpGA5`P3T3_ z+rv+58kvaIw)I0;B+H$}E64&b<3;2SQ&*55e%x+rfAFt)&ep1^R_{R+>k?8IhLZ_34) z7SW8@WAxTHz8Oa|SLe?NiLo-7xb5mc?6KPl#ATkg6 ztD|*Bl0UsxZhiMIcXHa<&itj>gk#>#{u=7<;?_4E%i2P#B0~0by!{PC+-bw#4WNQ^ z$_Z^n&K~J_uSh42HjEugidh}vhDKmT0*K9#lPDHRm1d&M;|RzxPgp8) zU_F-RIt>E9c1nBh6tH0;fFCuMsdW2wNbYR&P9|4LTK3ab{|aqi8N&1#&lYGy-HBy@ zN*TnS-=m**Q~qZ_B`tw+w2myoe1W~Sb%Yg|uNdtGcmk!U$otBJxdKNM-Rqq~{`Q>B9W*MyO+$_fUFZ9R1pqAShiI2}t)t}ZZMnlkC=HmxC; z670W3662$F-emX3Luk6F&3-(DmV$%Gh^w%K z^M>0K6JHB}(8t;MMp|gWDFavTVT()FWPE(t(d%1@PDP0c0-W1o=DicNRJfxh#wm0c zGP)ZCEem{itHk{Mcxa<%;$QEi^+UY0b1okO;=2u+vyl+9#C)f(yj+XCf&KfE4X~?T zN0nHy%(%51$XEJ=c9VEAPdz9Dw%>sd^F|ZT`s4C&xfH?#R%jsXO*kX`X21=<^MU#8 zq-?uTj>cQHcYkF2`cFLzRVoCH&pfoM^%$p3XoiD6XOGBkvP3sr_w&0F^%?<^dqmWf zQ|*ZLKZphkhyoa%==|@-FpmAvaDq?dX4w+A(P@{&43{`H8i;*6(-q@H=(ua~(gZ=qcw#prR~Pya+G^n? z12;Z@(7#qrrTV{tYDM#Vc@4&^$^=f5u{9I^|5)L3_s3izdu>QlpWOO*{n(y~zTgU`M)1u@kDG}X!K z!F|(6j*X^M&tKnVe9c=dM;TX66J%om&;@AE8IL%m9=T9w-|4uze{+oEbBAHWeInR{ zBv<2a3CJFrvU2#TeY{~Yn1b!k;Krn%KPu+;`BBEFg4St~j;5vxIaWmTfd3_`r!V7U z1N@f1Ux-AbQZmtnyk9lX=5c#OLz@pI+_CQ@TPx(pwcggz(~lV#4-`1)iY!D&*qHF& z--lFlLGh}DiN__XfU@GKEgLdh#-`ZJnd_5 z%^QM?^alv0@h|m; z#?84jVI%t>^J|&_iu_8xd~IFuh}oiOm^C5#h|}?~DdM{Z*kT$|aSS0q2bP0;M_XC` znRLPLj3x2go7$6CzLf7)a8?uc^xZPQ!06roiZsGGbn#r<)m)jb44-Nd*e+UWM(tmOVLtHDet*iuhrZ2Ly|-T+lIBQt2-LpML@9cUh`FY*s`+ z*->@?wy+E#5`;s;IDwOGizFq6Hsa4{0R;BVvx$E?jnDjMZ@X9eZ;)4*n%YR$)`N9i z_EO&HSV!QERj`G*!q4~{m%ig!qGWvf4TrP~JZ+>N=P2ToxcuvVxf5KROsp+aCcR`n zFKlL6q&TAY=jV6#JE*i%H|$Hb3APU}N_bJY5X}3c_OibZZ#X!Xg6B*OR+O=5NL9ff zKJDQRX~Hd)hZc&VU7Ym5)UWqk1=D;U`@q?C{K=EeocfNX;u$1P#w>{yEamH|=(t3y z@iSN`(0}PVmLSSYBYi&m|6%LBf|}~XcWvoX1d%31Kv0n0L^=@>5CIX8-a(p3?=3+< zid03aKxoo?M@s0u_a1sl=#T&jNj~2H-rt_tGy5Q!b(mStLDssT>%M+pfgTncoXWQr zEGB{tSI_9o2m0e}V4?PAkKErS#&Rbc$kZ>u86KmjvOJnummywdwt-<<^&J~vu^^QG zqxG|Ic)uTC5Ln4ag0Y1BrqyUHSk~-M6wY#!t{kChy8L{{D^SY`Q#i*4V-pYO)swgT z`;plX4MxHK`;FUgk6UJ2CM|cdC#FZHGg?kbI3c)&!L<5;d`)jMZzt=nI0$nw$X=;1((wFo{#oJK; zhsh!YC2}uamEH7atCTQ;4TWsyUi+Damcv%A>+jQQoo+@}=%abR7KHqVJfHa#5qsZt z=*o=w+)#TFetg3ESy({CBXh+=kuTpQ0k=>1IaEC0g!q0+lfLddhU*dVre`~K40vnU z+WtNj0?I<%Pd5K*p%k_(dMT*n&%eEG+R0B*-)R=NSmP>=1vi;_VpxxiDI-|44n^q-9_2)c)kVu#-~ z)!8$C@p8LRdulE6N1cEwcPv=Bf1Q4UDeF)9YcAQUseb%eTlY~vwVhAZaq#?Pk(n2d z=Pz`4_L|ab z-W5?eTjT+lmF`+}p!siKEKvjgxM-H@EHg{ht6pXDqr|qnX2zMq9gAZZ_cw+)uJi4=X7azB39Qt}}=Ya~t%I`6R_{#Icao5$h z7c&O9ADXraBld+AS1*z)Y}}t0(2MJ*eD_W{mTSr+;2E~3q@VtwPUEkrKR{;7#X$5j zb}~bJDYl-hWx%m_^T!`Czz0%Cb!K+EUhU40F7*R8!gd;C&(ovU5RLv>Yyfz#IB{;W z*Z&TQZN884HL#=Ee+P6X24}U`;6LUavG;KsZ1i75{^+68Gz}!oiy~7ewYB*{rCvOt z(;v9O{or;8Bz^ytS64v~@bP_kLF$Wht+gyYM@j3zd$UsC3(YPNpK)f1M6X7}hX(>lfG_N5E6BUvI-K%*h(I>LA)&K^U9L_avN&Ii2-08CVIJDS!) z5zbR^wlKalK&y>*@Lrjph9ex{M5(d57W+ug6sEIru0U7Isr#$146zm;eN~NG4?Z7k z#%sc8-rDoYl&RH5=@7P#)oSh00VAwR!7rIUX_(%z&jIJ=vfsgX=3HsNA}F&bujFPh?D-`jM~Ourw`1 zdd_#cj^SB7DT7#zqk+N#A!jAxZ1!42&YF(rxSRR%3tI!hp@A&c@faIK_Hg)-C&jI+ zatBnX)JChE5;f-HqFr(f;{PG-^;qjaczI)Va#0QhCbkw>cguN83$uYdU2 zjsDJJUPlux!Sr$EI_vs|ARAolnEA2V9)F|=6e-Txa=B-HMeOm<(+ig6U)8ybT#y0Wso!U ze-h(yW_hsX9nNgf>SF==vB-6$a%f-#~J^J-P0V3hAXxitI7gR;*dN4#|s)^7efe6%wH#5 zio&!74*m_D7^r%ZfFKr;f3j6r67m>`Y6Jn;d-SooT!;EYgv-@@Z#PERnXPQE^W$>e z7N4_$Fd;8qW>Jm)U@!&Wb&BZxvk{)xs{b1ETnVqc=3EP!=zp=qH)!I;Ot>bA4~rWlhn>qgr{WHQ6HPfWjAHcbLyUJ zmo+2|vd|dl#!i-Wy0J#4oI0`_7=4Wl4H*}@APszCA?_X+fa{ec(Rtk*R%nCc((BB4 z?5g1zl|~Rg`JhS&!|A15zX=fsSpde0yX}0k+60WbuR>mlb8X$w)(6WywH4SRxNZgg ziN{TS^sc0}JbHi4&bm0Gq{*jtYQc7w8mH2>-;=tyAND@xE&TWI?6CKhg*<;gn$4{b zoY#TY4$2KlEgCi}Qes7yzWv8GcT-Zuh$l6cW&e-;<3~r3p0<}jyh5VU_-**hjj{W3 zo6ycO!*nqE1WK}5#Ol-camWH-TYFBQMAd1;TNrj+v}e04B7Lw?WA*+_S3EM*GI+Y= z_HEI4iP3-2aWTp4@DTA_HzfB! z0kCGbe2C5o10DD)JF4v5IO19pUGM1)Gn2mvLAN#%pDqx6(7#5sF)qB^5+{mOU`rwr zk@d~~OLRlyA*Om!wCA&PLy4a8)ajfA%2H8x5Z=62;vwOM)}6ETf@hpJRj^Eas+Mh zY7Io(ZH){h0B@sOxX5FN$A(z`BoQ#+UF2bd-a&!!q;Ow%4K!9;%rLlfPgYk4y%k~d z`GHD*XjLmqCDpO(&$-I(Z@&A@!yG~+F4t_^Yw=sK?#OX_aRwpb~wwbB%4XAig>=PdTAD^Op`-FZeVbpmeCl94pY-39%`& z8d+oO2^XVwbu=2bm!p+f-ZTo8>B>2E#+2NYS}J_vkX_y*=c9ic%{a{PwJ+2=Yr%Cy zuxB&w#!Vp4@e}-gznD0;k+gVm1Jia?&_lYFPE(*vPDcdpYbVL_a7;-UgsEtrG~ffS zL94GEckW>;NVO33u$24DWX#LXDpSS@pM)MC+1g;1g5zzmI zL^s`K&(<4GpT%7wAa%Q2IL}O)?6ALb`dTOoK#*SpW&apPi8-!emmxU%i@Y@Y(Fr9e&}&;TVqQLkv{>%%w{`(Nw&@^vw@fOFbUoACxXY~umT5N2IRB%f^DhCp)5KN}#`lqA#_v)| zrF9kl6cLj#_kFDu6lXp;PyG>Pv1ge--iBdhduirsdt%fJ35CjSwgfHk&Ado`H&Wv- zyTLNO4Rik<7G1F!r?K^}Zg}QBv=NjtNx(aF=|(!!ioPSFJM1L4cD@SGj3_lIXv7xd z_P?0ZAYUgaT#M8pu1UC#o2Ls+-ky)#1<>B*e%T-9rX>_N*C76;mGVuPiC6tM5!pMX z%?Ke7_}%zERV%JVAAE4Ja1)dPxC+a79<;u6`@iz%9XrzV{j{<44#poc58KK@0S^Vb zSdlPR&H7s2LNl}vAhNQ94>YC|Es1w&n~xQKtITB z?r(B+N)={Bo!|E-7#puFE@4`K$3r8*AZ8(RCEX$LH80V0(m^WJBxXPl2t4h0+7O{J zInp-ZdiizCqFj?RPp4D*e%F$GgqXju5a3x>phf%AlCp+sz~t8zC+$dLYKF*}at~Sq z`wq&2;qX((9UZ{CD(U!LmT8nPe<}CYMDy$YC+?t9B)=1P)J}?K z5iChx1>+E(SbUkZVly;)7vT)Iqo3EX#TsjJAvNCDNC)U7*T32Y=>fF*9TRO&u=x@y zW%kJ9e~{=2vJ0qL&W(1L3Jd0%2)E_Dwg`4QF7jd{4kzPw$~S316SGf(yzNKpsLOwM0||3 ztX0`J`)q;>IRG-E-zaumBc%8xht|rs)vF+ zY_5U1c;elBMAULx&*!sAxpvs~voH5+t6!(bdxI+guS=hKy;wCGR#+GLqIn&1Ak>vO ztbKSh|7hjtnUo{X8Pa8;la+)Nu$&1_;!E%xkHK2-q|`=*ZkZ1@aJX~@$Gt8AmHoX@ zrD_kuuuF!#U|+-6|H7x+&F1HM1U?_6vBG3)J@YN-?w6AjWPDd=*oa#KxTzidJU2(h zdJ1hFS3Wz!GXv#Fy^>4N#9?bb`Rr2(Y{cX6`~IV#A!LM$*m&oy&-zn{ zqv98If7&^TC|!;*o`HeyO{qW=?WkAq zM#VG5qT9N67GYWsKgx#%VbfDC48KkHWAXe)blm&MI~X4WS@zIj{b#ArBShs}qHxNF z`Z5h-?mg&TAFuPiQYgyaDWi{;T^2q2CE*1_8Ub)W?#VoTlk&tn0jp@c-9g{Bk0T`R zWL57pHc5taxy3av0w`VaY>ED3pv!g0YHb0MCjm^%de)&G`u{9g#P z#t)d>@1j>F8YMS(nKQL}ntzB#Ta^ip`+XS(w*Wd(ubHXSoMWDgAr6G7@X+=+=}msA zmka=keEqg_HXibeD8Pp;nr&1FLnRe-@*JUv2H+#6P=QwTO;|WrDt(R?!`_3 zZu-l&YY71>csI%2iuU@#ckKJpz$4M10{k-H3o-hnxwl^5;Sj$R2h~u>1vtIAL!~Zb zbz0PXI8QYu9^=!%8}!1U*reC{m9iPz&sc{MX@OL9<(uk+z_mDqSZ-u+n zP+84UCkQdt-x1I*fh@xwddT0>Gt3c>>@;qRN&6B0bKN$^`NEFDO(JnTD~#s-kjd+a zl#TkTc~Cn?K+u-Yu&dNx&vFR*87!+sBNGnN<}U17MM zFx4%hk?=AZ0t4(r-bKG-mv_fkNwi?fO<>F!)#nwk<@CLQL1!YMU-}`0Bh?`C{5hSA z0B<)7gRY4(Xd&*3g_Wlff9xH14LRDsx=uShkk0-IEWKQ|un?3qsLa22dv3LUJ{z^Q zE}XbU#b)yLAPsaCMbn;|q7_;-QxIN*7= z3@+XAW{65{Sp>>wfz|hUiM>4eBAf38o5d{yt{}|ow-uH4S$m21$2MzAm=55ZXWriO zoZxu-5zvmyEo|8w_XmPS&SRf}A-D4Eu%hdclmgG7SEyKsBzYW&shf)v1& zZNSrAx-~R1*SLa)6m@gIN@5~a7U>)qgfn+3+u{bSloPCyv$`W5zk7x$ddG#0NUGJuAv_2yADHk|ROBbP7jiOWvo?QeU%ij=l`Xe*w2O^x``WUx;a3%Q#N2;p+O1h@^Fz(uh^u$-9A zbCu?99PJbUhu!<{CL>4&A-9-DP$}RewtN?a83)}>3VUz@!3=}CZkJtfFfb0ge1h$& zEJk0o>+ZquXTz>4Sck;^)F z@(^3EElt+S(H@-eSvpfOoZ~{4)z3ue?qnAO#y;xJ(}auxz7*CR8^!L;iML8XMlY zW3dkE50qde7hZmZ+?_J+O(9HWWBaYU(I=rFN~)YGKl400&YtN@N*o;e{bnVS;ndDd z^z2Gy34TQOvm%-$Ec8dRgv1Q@OqJdfLf&6tMvT-w?AT4P&1VmY}0x5SVg?E?=j(oz77H|8lI8djV&GnHB!?& zy$~8(&X?lzmEOOgi~4C`2qqSSxf{$C1hwS?LQ!G&mqFO8yKnF%)ep!`A>;;kNELB& zgY&?u;_#p1HVOgvaF+8D;s&!jt`9(W zD?o0jP}n*#X$SlB>9@E`zdH-%qixMvmOi-#mXh}kks?&^0C{BGsr~pO&}F?FM=>)$ zVaJ*(>&O-4{h_q|=5bLW+e9I|JxaxcQv}oDReqbGf_~y>_@i^c+57_L$5k6l1T)vU zy{dTROzNXQdW!j*s;8x{D4&C*h4$4YiXsY@+3jeu_*AZGU(&&-~Wby+h2y> zM5Nt?M7Pgz)&-ioLM?2W9WJ=dy}J+Yx*YfV#tv7C{W!_Sk4uriyZfJSYp(mpY_eJo z&;e#I*v(HgzqJMFETEaq)Lt~7sJZ%iP&RZWL(wQL^0}Zw3LndF{D*bXN;d|vcp#OD z$bFv9O-J01ogg;7+H1au_nz%I=XAF_UuyvW4?>gU9Z<2&PG7r~2Kpxs_YQYM;%^K`189v7EK0x9Y zBzBNg5~lphHxIUJK_Ok0dZ`MPxV!(;Yjez%XfgV}`n2>h{OEJsR889>)D*7$@j!YT zu`L9l$G0rIWe&IG8wa3n&^QDe2pxdBWxyTXU7&HJ=GZ<^-nGpw1n>>IuZ|l76$1hh ze*KVX06n@Nw=dL=*~0Are6fJL_8J7!0-6SW9}G76Y4#qPew>oV=TUYN>aiYx8&3kZ z;=ZvF-Pb3X(mqksqIgqb76WORx#XZU$y!J`n*|)eaoqxpY9BZR&T5ZwmM0p)-lG+; z*2WeWXuF&ylTmr|_lJ*|?%};*=dyflTT`7W`K?K~hqN>Qtu|eR`y>4l?WZ<0+&euz zJbce|MUrYl@7pkoNsbUlv6GUHzNuw;TKs*cJj-Q#aoi{%(_`%fcXG5`p3Kqfe{v@- z99wg&Y(vWLq1!>&pL??BUTXu%qw8V*{LIby<{K-L7`p)Ml4H7&lHxUgKr!GPIcbp@ z-4AaH$F42A$E?b+-y^#+0=*oZ&);fh`qn&^$TYad_Ki0C!8&0hkL6B*8}fbAZoN0| zrSikkZL3jr8?I`W8e!vZ$4R7C?=y2#=M~=9pExt}hh)0S5A6x@$=kZdAFHAvGZhjET8n=d;6pifg|Jh%SzOMCVqHK`uf%ntQkLH zO$qv88UNx_V&~@kJ9p)bvKGtJL<|0RC$_3Fsl)HX8C8a1}u|@9t>9$-?}Bv+r<^T7pK<`L`oQ#GCV+ z#+hZ~lUauIda%^UbD6!XcSMH)8=F@&rkTSmzo~;Qdq+99LeuVnJ2>~C8)8M{E5cV> zTLaC9x0CvWP9p9-jZ)8<%^wdl`dv*b)z1hO-8TWfQI%vZcAyyJV7~GB$I>a6(nVW) zUtlZJ<;frqhKKmq82tWFNn2!_>pI;sVQx2IZftqc=EE)Owz4*xAtBdA!`kFriK^gV z_M4BfS^g^mWrXTktww*E4L)_5O3tWMj45?;r1owZ5>a4mOg*R@W!Ue@xC><;5>w&3 z5gX@y7E<6We9`>eL&IxvSH8|BU+d%6+f z&lmmDe4NJMY<@m9++Y`U^Bgrp3&lr&hhNkZ1X^MhUE1sdpv=ds*y|@(bBgTf@u&eA z4o9b>N{`3eX+F^Va+V0ZsWTfh!HWPl{(0fKux>R2YR@Ls8m0wN=8}l_tW!Ox^bXx< z!tZ>9C9eLFV39pD$ahbqppvW_N#0PA;@1FO$&2ehBcq7bc&U8s z_J?BqYVTgRf0}@>Ut(?FLX_)ge#S!Xq2yC)q92ZXr89ZSz0|fl(Q=(uX-*WKg0@A+ zz@PboMYj3$^{iyBuycY{p{Z({?P;xX%#vE;UeKz|viSTF;EHvGp>OuI_g91Z+}Cj%g!ImR_tkyozI18ogB_c#}8*bXBXb55L6MAY_=J<7CgyFpWiqe z)oC3rrdW|KP<9ZUR*$l&5QQ45T#x80n1Z?G3^=o+8iG-hbw4?j)1_RVpmQ5*Wnz$E4ecrW(oUPQz)vPgv0!^o~aNmAE{XumYt4 znJ1D@Juk$5wNkbS2(l=C=s|*%g!SpVbR|_z%c$;sl1xRV#9#8ex)+#RoIPQYx~po$ z9~a7<SxeTyEl z>a|RODn6a|nbQo32UF}7cHJDDlDu34x!^qgFfVUH5u@*KKQ)7vT7jFbKuSf;ul2xb1*Z&GYFKgwAQ<7a;5$ z^{u~j3@TX8=HFV^msp5uydT7RnH;aahBfwaalyn+FAK1hQmILTno;*)pAgNT+ab3v z5?eC-R%~Cs+WTcaoVap)9Y;{Im;e3NUJzgPR{}7ONn~)#ZJQ%3->W>nIP!^B+T(vA zDwf~M^280J&+9x=9ADDgY0Vq^7tGo(9LCA;UGE#Zmv617m)SxQL$aj>XK}@*iBpry zegLFKWnsyaqu2GbLeGwMzqoF-Y;iQcZOg9u5G(%RRUZGN_VyaC-PhzMGG1e8vWpZp zLMhSUBW+KCJej=ub<;5K0xgiEY#E!>Q02C(tJm12yS-sx=gf=D?~IEPeTEQwOTj8= z_kaqM6M^u3S%#BeB=;L;1P1pRh$~5*uWOYPtkuLp5q36S{T(Ugg z-!!tZ`j|b8K@daouZCc+`!6&MgR(T0RQrJLex^Vt;Ya@6XS098;-fg4W_7brU}drh zDP8JvNq~XUxPO)CUP+A2yo$Ko$0T=Oy=v!@QDCm@0HbjB$djU~q#*n5q6ex%$bW*Z`?vm+O;+gilV+61CY-5pw({co zI^uZM8MWSqa)4{jM)!^)yqCFsD;vi&XEz&=x7k40*xzG1NugFUPwQkJyGLXACHhen zS_OH2^kJLcXOesoUngI=3pP_+FdE)cGAP`@hzjJjo6n;dzUt3M56x;yYXNf1T@c+V z`bn{1TO96ub5G6psb4pw$*~3P-KdEwj>z)HsIY;hqGJzG48(|p-Pdj_Skhs`7ajM_ z(=C@{NQ5f74qN(4CXXh%i8AIEK?8Q%kxLpKm$OR4rk5M<1+|oLW16!5O>r=AFTKZ} zuy@Iy$&YndS8?BghKD;-v>=+dwJ6Pwd%{;l%52|0a|$yXUwLZxHPd_3I^`mOzOLS0 zykk_)M1PXE4Ke=GejdQIf~_d^eZzE=rEz z*sfVrv8xIYd3?2sUiY3xW8S#wv{`H`ZF$P^*x<&>0EiT`BP9U!Q-DCUTu`>XcoW{p zZ2K<0%j+~a+i73ww50Z1wu`Nb4fD;xByy~l@!nJOLblt^&+4+9A7-wEWh>0ovrc2k z;Y7Vcyqc{WT}PjSP=~catNLK#z~RcZwl*xo@zQabdD}14#p6LqufGe6d-Xt=nANKZ zIkD~ze0;xzj@<@s9)+ckCs2h*zZ%?oVTV|`NQPI^e>u|LSzuJPZC(U%dTT|Ry1t%* zxL4s%SU(ss`eAcSkCb>ZZvNv#!lu-)dYm;wHiB8v|K-8iv0 z{>COvJo@3VQBm3_i+A5<#Hrq7(Z+Xrt5*qo47R?s@_eL+GDx+ctD2rkCf@!iyHe`N z?bUf&`jZ@;>&06oqRqIM)H{jth_ij#DLAQ-Znm9J_V2aKFLXPfY-j6sOXX5Ka@@4s zTwXE_P+M#4{Lck_%9gn%e_Ii7iJYX$sDLWNxowbAZ9~b;b}ER{51H7t==9RLphVr6 z@p!asiHG$}a;NUADiuL>NA#BuBX0Ul-cnRse8PU0xgSl|AovvvW6fLdr`FeDz9y4i zqlua`nke%oQ(P~1BR62L3&k`SHNK?68~6a`1o68<{sVc>d!xBaPX2m1fz?up)G@ja zBk%u*R$Qzpb5vu0joTQnWulUNG;~4Zu5&1@WzKDC_EC#MR zG!!#dudVZ;2o;|`#%5Q2Et7+7?bf%8Eh~2C`NrKUnn?jwRCD>%lKo!liBvPk2?-84+Yv8xGlYYaEZi(g)pkb=_)%@~ z#Ekk^7i>#rs8xLzAsp5b-u4Noa$|=?^?+B5q0JuiNUX$sv~rv4iklkd9P)MGXIpi0 zz2@)korFt}o|DUx7-p@otGRx+$oYV@r6rmB#A2bcC$>66iqr4zT#|k14{Ccnlc%mx z1XA^^@o5B3r+r|j->+yQIf3eGO-!z3Sp3PBLYv1Jj&{exWg@{LrT?x_#U5 zD1fUO@vRhe;U;>4XX+ql&b!W;;Adg?C)1&s?NwfCU#Jzy7T(N*B5EF!xiaVKlExeM zC#iTnQyfpNXO)Cl6Nj8AL>}mjU~SFo{9>}+ysBoFsu4ml$UWl;(WPk)OAsRW)nGAg z>#7d^vXmml01n%{TK5D_CXd1a!U3qy}FU~5K*%stacUu~- z&U4QqeBkpxTNsl{;C6ELquk5y;$OhPBx$aw7zsKeLwcDo>fCvS={yry4B>CO6&C!- zead6?hQ{!-XU|Rh9m&F9kLuiJCq0ClD{Z>G_Z*l-gEA^Ezg$+qq)P@=&ochqwjMc* z0}LVJ>$#{SJO-ybGn?4e7$J1~bQ$2Whpd>-(9;^8Gk7>B3iGW~T2jGNFCloDQ&3!% z@+!Jc#Fa${bh_jr+~)EHKJdMp!H>2!zuy{T!}1M?#s8z`d5@$(b_C93m*m$bMPkru zQbUoZu=vCLP#t;J^F0p^7AY;=$}3j}vuRP9k=|GE9V~W5@{I=&XI>4V1o;`}9~^`{ zvHYX@r=0nv1R%_wefSrh&f@RZk@t)PR_Y$4YzK5|P4RICDY?3u&7-Q>(Vl_y4LP0f z-pPScp=s^9MKwO7V)NS(KKp4N&+%x2=V7sq<+%bktgh z=hi(e6XNIlgm~Z#0n{z6j|ZrxQQ`9K3S;;x_7fXSYsd_4(-b{c{>r*h#3r((xchP( zchKrJmq+UYlpPD&?`^}ry9Nd4n@hW)zX{pTM;@iag%+|fzjT&8%=}A$GUs-?5sEHl z=6}xNxaB7ZnI_i9{qu;?f&%#=$>ZC80Q{uiggUAeUZ|58?AK`foLG@_ zTlIadd8qZn?@*7tLncxR&Hyn3zMS(Rm(r~mF3p$JBx&xN;XiF)B-Vm9Pab5vx*3-D z5ElursYu`5B4pD)VEb1VK|3hb9_usj`<(|hpfNaNVLs|4c0(bVzCf2>^`yq6R@N`r zhB-MfDp`g$f^KYpeW}LYDZYk2P21ukZtxF_-W=W_-HJBAohFZwJfzt_OPJY%$~9`P z?uc5-L7BDXmT)8CqG4~A3X8eXH)}L?zp+NM?vApx`2_Sko~*24TN+*1{7%~F!EFcC zbSdrWHVQN|jUtDL`5%}o#qxk$SDIAm&oOAnj6qSf#R;ZN*%SUaJkz6-)RjWBcl@eL zQbJ0Hl|}?6q$n&+uCh~lyfqG5nDA1MUkVaLMDQ$J@+|lJcn6)$&qS-XnO>FkBYVR% z9WiU;tMwQxpbfrIp-jW{S1m&NhnPRfD#-~=Rt88eSmcu6tJ6i?2Q+lAnv_x_b@gm# z!#J!TtO#D-x~kO8AGC5B9daYaM7M@Np=tJY=MwS4q|e1JHLu96yAdy<8}G&Y zhI}8d8-XSY^Rs*WM@p=K+?04Jd^-I?(bol?x!ef2PBLz!npa^mnM`bs399Xy6+Ju7chr-4 zYf+)L?|L|gm@fzg1E|3Z}RA+z)Z~b)y_iQy%kTFAWHqR z>}qkD!0Bi5>AP4o|NYrm@Pg>&x1M@40-e6YsHWA9qZ0TS)a>*;x^WLQ-HVb+5=Ty_ zK~AWS7lAj0&hu2hU5WkCQl=dl{PgdSVs{aU(itJI(?UPoX^W-{=v2sMbq!T{9&CpB#A zaDpNi$)Yd{=$ijI(qckKx$0lO=Uy*%MX*AZP1!s`2s@=pB}WVW80Jk?9Jgq;j5AadMhP7rx$QL4QJo8!6PN8`yg^eNaiLr zP-VUl#=VHU0fFGiC}uQ+Q!K~e7pJM62a?(lVff8~JAMlCScBJ8<(9x$GSH-dmQ4a~)=xua%vKbYeEK4EOBKP8q{5{ObH`gniO%6x5)ux-J$S8sBvrQtCPaove|_XO+j-9 zd&>i1SvRdqI;2+mo~zHAgKO+MiDUdU%x^R*nE@n^v~T;;+=CvzVFjIXeKJbZj>Kpb zG+_v&gC3a=D2I&cc)Ql#mw3CFtW3>H z2=HA@+cJWfyYiL!+7scuP`5$k4iq@(4XaVXg+|-sQg9bX%gdnUlcHyhD)8vtRpb)~ zilczAcyryU0F5_H8o%0q`8bc1fi%Dmw}xqctVx!vyL)O#Q~4?=eT1nB<-5rH*f?Z8 zY~m|2N{br&w?$HQwA$fNVYv892c;9U&ypewyiM$L(*dQO<4?)ehpgE3)qIkk-QG;f zCDr}aoTy0QR!CJKUflBlC=2iU`>p!g?{&}j7B0uIW~!6S(yHU}z_ZTU-7x(i!O~j3 z?@p>qel*Pt5Al^BJ{d1c8ZCwW)Vb$8xPq725me>Wcj_Z0xP)DT0++nc@@}o9AWq4e zI+$HRvtLUauyDo_CC=~xq0%P#e+f zFwee17MdKy+sRwMjp3`&F_Ajowp+OusTuK-%QD&n^to#84^f$sUJHhZ#@qih7C~3WYjBKgawpw;`6X^ty!?wD7uL+zH#9MkdiNVZ(-TYddr`)0(Un8zpD^oevpV zb2{Q>N6JG~&0|Yv*X4@XWKz1ksN7FTyC2EtzU?5x(9xU52fzmJ(2&8x`4csNPlzoB zD@ja^k5Xjd^`(uSI+<4YKOe+*My0=pUfiuI5#&R-tudARd47mEu<(Uu&kxM&^_QFZQFYl{doYG5jMoJLWX88hg zHuF<%f1;MMmF8j`*J9jw1v!)7sOg{FTpxmM+V@fJDXn?{YlH~5vnjycT5<6#>2n&j zJuiWKGTp=veLVSQ6c#sxeS+&tR>bYqrcuthomX-zL8AN7L_J? z`HT53oYopp=p>H5ekWM_^2!Z0%U6w3-rQvfEQbmPI99aooHQ7Lt_W)`bNh23q{;=H zs5}Rx#2?b5l9QTJ&949yKz>UrXf)KE?_90&pF_Yr&SP)LXx2yLk%5&y#(s|%8fbs- zQXDm2vzaSn-Tjh9AL2pni0!d?=2{v5a*Rz~`MF4vPaT3HVE%80Q+`1b$aRM$gc;pv zL2~d7u-DMvF?Y6s7?%Zp{U>o*M1JwsSiE}MzF2*h8#XA8EI~K+Z%6U&LG!Fgiir-# zaSd`gz#+`|ZHxYlW;@`#vipo@28^x*=z<#Caz<#eF$K#JB31ZQnx&I~42WL*hvo~3iOEcw0O2QMx0@$-c>HtBSkytV zZcvo7&OhK5?>Ys7kGKnI$AbD3ObM@^lD{?CrJX^OiO_7KtJ#3HU zcBQvuGijH_vwdpg73n}rRpa*>CU!2I-^s4E%npOoH@!$)5 z%P+DN5xOW#zK1{?BLZWZ1V(65up__QkCZ`^DlMJbkRpjK?0LL7uEbyN-ZbjRbhcL#+aYBG2{saRvmU2jk){{uKJ5FH9@xoS1!0~m(|ub z#`E!7T9(uwHg@pka)s%6 z+xHj2AeCn%7JFeOo5tkU=RPQW$wckBqre?bO7do7tGVO{_pHL#V{FQW*;AAGR#HXT zLYVfhH$S?r{t||FH8%;0-u&Z$D|E}FfzLOr+<^hp+?BaG-#eQU?6XkDG(*4qdc&XQ zT{5Kf>!m+=TD8Uk%_d`kCwK-4p~>fEH(h3TDu>%pC&14E2u)xe)|nV8e_+P(?!ZQ& z{~edwTtAHA6YFT1Pv4&}PrtrKg1ggzZ^NWRf2z6L2Ktm|H#{&X@6Y{BdJG_t2;^P)-x*>Nn{S!jv4GUXhUoxK6-1tFlg<8cwqcjeQq7a|G_`=H~cZ z-Z|oQsKo6KQPU~iCQFaq=W!+uhUYTlvN(&I^gc)Ppgp7}c35+^vUafXAbjI+YiHi1 zDhLkoNz-8?kgEW~g{nN2U8JiLEbeCh0;ZbA_++qMSnRwCX#MWNiv^&`#;32V(poh! zfsrJ{Zfl>Vj`a*k4YDS_)f*OvhkqFG&(UVx7-WdJF`(2jiHUYA2Z^{0o727Urc<(F z>ST%wq0zb!p{GRR&Y4}41UNbKto%$uFGt@ld-ib4)jAKb zGz3fi*QiM?uCEwXK0H;{NuOI&aj4d0KP0h5|O zpL$0m2IFTIumch+L}tw<;xzcL+k zNuX%|J{4kL*n-U`mcT0ixO!Qe;{4|Wp0RdvX$k^(@)+zY_Mza#Zw{s!f7uTj@9iju zx+Ct>YqY2l+EV*ylrI*I%rnGU!cAIGie?VlAY~Ir45d&*2~iBOMJy>Q(&$+l*;;0gi|mj*;V{y1_Li zrAfl*l$9m+C{N7$+WnN{iHSlE@2{Vt6)blP!2BD3j+ld7b-u@QU9maYI9mo_Ix;n- z$Y>tCpW80h*mjLI(J)a^ggoxdW_Tss_XO6@&n!Xi80rA~bM*IZ(VITRg) zalHFRn;L3A?`lU%`M_;vz{K&ZUImAtyOt|c-pdBqhtIcRV|Ooq5B*L1IsbnbKtjTt z2ZsnEQq!eKC}g{9^f{1=gZ&3nGfFV149}CkMfsi3=!3A*XazPLtp>Jt&%rPkQb4J% zn~+LU;a-NIfx~l-mVxVM@%u7n^1v;*&&KCeb8UOY8BOEY+shly=Jp#BSl&Yi*_ijQ_Ov<9 ziIsy&CJ|ezgRi&EsvX><)(4m$-o1*9QtAgF>tS-FBZ#Gj>c|1IvHufD_%7>)n<4K6g{^<6b_vBI$ zXq7b||A1NO^~BwZ3!btdj;#V76bjz1yc^HJ4tBgZGyJP6=3D_l5i9s@JMN}6%CExI$Ve)YEuI*Sr&FFfIrhAzOG z8gr<>;}bWqt^fY|0OX(k-A`@rdM&?^_uQ^)pK@Zm;VbH=-l?TR_9@AP)pqOD(l@!O zdNxp+8X_)-!@x$%Lm1a!U;1KP`}blJZl~_-pJ}DP<_=*(g7P=oxq1~|8Ld8ri)X|P zh31paJ{9FM7Ea!RW%QLay(+DRn{JsEUG@yBDWZn6Y-AIIs~9msL@dthOu!OkeU{*K8MD0Q%uCe!cqt@n3zo z{?urH9=f@xFB#CG?v;qtZJWz5(z!Ah&BB%X6Q5BPk*F_CRcJ9KzI`@ZRn%#0oGKnM zj*qP$I>`2!>XCKwkS6ntv59A?fB1=4rz~>J!&`yE)`*70jgi>;&n1n_J0=)8`-V+C z736m^TxT$eLC2VPOn5yykJ8hGz7iw*YD51q^*eMFP!TQwySOS&SC8l#`h`ZPA$-Z_ zm;R?)>)XZsZMwFe{r(l-ak2A+BV?5)Q`b~kyRV1!j}0JI1tFE#rt|`1*KdF0z}A~C z__bo639FlI3Kb;&88vkRAjFplYRY4sOQ&OUVst@kkml0YN)#&6vZAHt@{bY3NIK?( zxNjXpkO`Pz^vHC~GFKvFnBuRozliP!fy=Q&-kQ2OpmSAJ{rAPhsoYpQh8s{sl`-#W z*}O`Wg`4_E0{RF~!sWz*QJ0%GRz+u5V<4z24GV5J0%;k>HG_wSPMxMKXZ^PoU?)bX zU1b}cT=L@t-`BpdV(EGWX>1MA|H5y$B9Uhu`9kK;{^Xx*pZZh2B~k<$2oiNiOFsq| zLZYd!?7{B#U&h5LljfupZp(hq=W#wwVczIswb*-E-dTT&TdExUhlIgj^Xw};T)RHkTG$Gbt`Gx+C~M|Q z4{%sDS?dpciDX)4`sf{yp6YA3Jxzk%)RgIGxhYPiC3=6_TjIktHyFn{bdc-{cl4XH8`vs!A?dj zXai?`f(@is2j{=So8Im&)b+Q?gYL?v1T1~d!{AV}?f1plnP(7KXa&Z{C~pdoA>MM9 zomK4;4;&L?0dLVvutVg7h8Un@Qwl782>f3@{Dcni_=PQN&GB=T&)5W@mVz}-Z7j4%(>7)Sg1n!#r|z{R0#I z9qwb*dHF?!n!pSi%_eq&V?emX@TW@+` z`<;J$$3UKKavs0Lu|AEZLf9LG)szR=WzYx zAYR;nSu~;dB_@!$*80g}pL2n4!tmg)xpFiG7)62Mt%BWu$AiUBP56^TmLb!2g^M>) zVpfbxDxAp552P+>6fb#<*BQiSu=1jdrG5^iLj{0X;xL9(M72|DwXk3N3;XFq9xWOPP1SA466b*{q#4(-vO z@K&Z}Zirnj>vshmUS1PWe^bde=X-+GZ)g-QHz{a>Nv^Z6-j zhDw@4mDMkoU?Mu&uHM0 zUJ0>;OEPPOionW8{Br(GPQU9)q7H2rTFq&zX$gP9SWdw`0Zo+mB<-%O+Mj9t95m@_P7{c;~ zJ$c7i(avj}Gy>!zu>E0XPMzspGBZLX5Fm;n}!3=EKTF>YOaC)*_>Mr?-TlKRn965R8 z0%QH{v~lVm@zmHsiu4T~Yj)`ZmITYxnsK!@D8P>TYf@`_CnkB7YpFjk5*eZ6QY1Oh z7X2fKG0NLmKiDg=(o2AK$+X{p^E-#;)CqTAuC4s4F;J=Wsp#8TBiA&{c1vet5~%SLWzg3vllmD?Ckc;LuYMnaj7-&)OW^m(s`! zPwj`sa4x2)!yIHWyz?u@@z!k}YZj2e;|oLDq2dhq)bjS4g|MjhrtL)yB8@GW9EvVe?H&YCM}2Od5s28qD%``Amo z@`A7qV~Y`1Y@55YZC74vfpY@|lxe617(o{7uia=&jRPahR04dpkhR@?~D8PgoUuV^Aytm-fB2xz+W%IWD+X(BI?!NL=wVKgSW zpbjQs(q`i66p$O*T)(6YTse023o5KMc^TvEf08Ra3MP2;adlB5xwvX>{#n1zVHe&* z&bC7q{U#7qT9o<;9sM_=?~Z37eFC;7<(p!U)LtfY{=!;@YLi2CZkQM-)>{P z@m*UvBrC2IrwSJROZ{o7_fgTVPci#j@}mz8yt!}Sc4`$kxN8-xQnRp92sHP-t@4Xv z-pZEzcEdqd=aml!|3iPE$?#ht!xS-2ulvX?zlC-E>=O@kpJnG-Ok;Vm&mO=}GZ>$zfb3VlRWnQk) zo%WH?=ig-66YRNCKf5OV-H(BVemq7TC2hg!v01U40~%dCsmCXw%zZ`_Ellge!1|zm zIC5E^c}IOX8>nX>P8=x`zUGjxV#OTIj9dyz zSodVHtHxRvyY7q~GV(jR?(6=8um9dx(zut5W+F-Vib?4izskhx5+jko>w*swuYAX* zneWJF28PgmIKs;-qr;a_8}Kzo2c*{UVe;jP8y{PZ(AKc#L(9}(0RA^lz$Pt|uL>6L zSg!Gom40P1=uj6b!iSdVv90bBfHKKx5)am2_h42%P0Dcn7oFRuuIRoT&S{6LJIX0pupaLvh1$%h0C&P<>1b5G??JzAH~E$ zLSx8*mgtvN1ZI$~$)*IxPIat>8%c|$>sP?I(Z}YMfDY(*xymHesyXq%JH+9BpeBD{ z>u6mGvPZq2@?C%3txW=~U+@K3YhlVHjwI?mh&IVA{p8+XtB%SChJOe3tH`b&2BPjg z#~%sFAFWCzX;^wGyYfwC)|pIBXl-DQuL0d~;oa#cgo~Wy@v84uWn0~)Yu3-wQmf0J zK+$b=Y@20o@XW6)jjOrxh+x_T>H4vP8T$7dXsGUWGsQ9yyD{QpKPN2bAJh!>44Zxq zMA5rjlh%MJ2+_|QS2oHA)wU!Rb#PUdu;pY6#HD!0q;XX5M!Vrha~bcveV`Sy6qWK2 z0wHhwM}I63s}}s7Q+O?flYYe$+wth{IY?^Zp6r@A(7C_@1gV@>VEtvabJ?geL`Uap z0uAT-5y%ssdQsDmfZXIdbNvRhH>t!U^TitOR9KeIh`Ean@s2{0ixw zX+{0~S05UuT+vi05f^ABU|F~tbP;a-odj$Fpff(dZwD0E4(K; zmqv$1qr=VP;uIq;Sq<3lKausu{wZ%QsaZ*TK6GIhw7U-aAi2Qd<*r<_$wXn95$E4IQJStdTo;60UK#0L=rD%c|k?Oh)BCYzUqqSpvM0B!XA{ zas`c+#wIKHGC3!{nx!bYj?s@_$qWs9!O!X94@{*E$;_zi79t*|(i?D%t#FJwobeMy zQ0WT|E=Tn^uvVQ?2n7MbQod{n5tkNza&R)@e06k|8kO6rC}!w}MpB2;f@6#DW>HQ> z5gwwM!cT;Yz3U%{a;^1?qcDSO$p>zB&J26~j?)6VVddB_Pn^Ra>9JwyH0WxZj* zhPH$r zxSH+tPsz%;)FSxJKVrM_O$ZC-lj`ojdbbat*a zXEg2Vrx%biLQvcZ~HnYvm^& zm*d^R2iq8(#7p-6a%c^OHI~ZExWgodj&;o5t z4(ls_g^NK1!62`nC=99lg43c!%i|jqJ*xq~>u0XA%Op&VQ~!*RewGEoFS<3bH96wp z?-yTo!S>bPaaHGn)0!N~e7P!5dM#^XQxc=1RRH?ect6M_Rlyu|Y!xMZVMX9RK(q@V zGMo(^_*r?*qVlY^)@uesNTlV0nNwgU&!_XwoickK;r4H=M)s5BuMD*bHo)D9`@JL{5-03gCDmD3c z``3wMZ zwbBgzBfci-GXdzr$CR~lFkwpIT0f`}t~obbms~|esN);a^|M;*A32jt029EA-+S78 z@wbm9IEAwqm3aSPh%b8f``_^ur;k7Y@$bwRgqCJ(BfDr;iWh~-8*8rslvDT`g(J$m zS!Tq;rIenIxplZn##p|^D8RJU*VH?GyMeK+4K$n*4%X4L=6TXun`UhG^WdK~0gzc6)ExUldj ztZ#GeJTu>f$s@e=&Uj-cG7Xyg{g4{^m9B0QRyN|;nUK-&)2^va(;d3SE&+0aFQ7u{3}k$%YQe?@b1t{%b4 z?gYMzK)1dA#zeRG2>r7b`6_f*f4G%C=YPhm^;@dzN0+%xF!fgg^w&im^|F83yKRA)!J0Aq)KfT9#ZEnL6i zWw(A|EI|22kH{@>)4q>5sLX@^gb!#BOAAb-44ha04_^Y;##( z0G-8v5ziI9fc>A(P^0kkUPe#=j4Wlnf z*xJ!T>CzwH5ia-AJ2f?nTN~s0@o3rc%m*clw7BNj^9Fp@-w|L%;5AcUV&6hB4{4@+*I><%6a!R%0!4&S*S@m%r`q4Qm06s(F^eA*3#S7nZutstX0E zP0)xVS3fXnsWUnfV(2#s<$$_WkB%p1UP|V;Haemft8yn2!W%OgCM?8$a>K4%8N*-3 zG{L!gHN%ygR*(8;I@yLc5fI%%^JzYn~xsWU5rn z)h5=_Ckfbz6Q>Cd&;Llt1N&{oEzm(;8z41c{9fiP_okGOF=O3TK^ipyA{ zc?*`&SJL!K=f_Vks2=#5AOc%obAD(mNrKxxaJB3*#K2rKu!lD}%Y#knGbZtFG)`EN zxnM)cFNQHfkCltLMl4!nBf6#dE4*C_96HMteE`Baz>1stl}Dg~gaNZcRxWQy5;wp| z(`)|dZ@h^7k&FC65cOM*8xUwm_0!Oc8>pEwrI+yJ! zOnqR>iY4(G3mF&yGl4DsU|Jd5*0x!bYHQ6((R1v|E7I0)+VSL&J|vH!&UR?C{A2VN zTp9?jYs|5ur2;e3!|;xgit9g@dd$3If|0Xt*u+yodSWMI*9;_cbc}h&gx6z%DT?&R z#LEt0UGf)peIZGVLm%2+^1YXC-~Pi_*7<|<(O=@D{-K|}?%vdBEuBLTs;;wp7+qr6 z*{W;sl0c(Kfx*hnSNUNFHmn^30rB?t2)0ATs}(pn>PD{eORBJ1Em7zX0Wq1^L<`rS z4h&AwbRjcHG16CACzZfm_kn(a26E_vQn|3y0{!EuCCnt^;$SH0D#WgFhrsSMv7cv#T+kD-8>7Hxke? zjB5rD4V^koX?|OqKtoyVsyO!^g0trD3}+ko*51GNx1YZ87Y%ihYq_Fs_Q}0-BMXe9 zW7bf`VOR;WKV=VgxBoIOPNB>XF5xqJt)IBr7j!{4(q|@mV*%{;g}Mo<+tAOluC;zJ zK5Koh6=U4)J_Re!IG@Bf@UZ^aaWj{{CK+v?sO&$M!B5;2Oo-z(^G#TI)u+j4VZCNh z`vFePhxLzQ3cfJaoIZL7q^J9F^3-|TD}Ln4?Hj-EQGVpH)Ipgur)Rn^^&hSYU+dp7 zOfrsB?>qavpoB5dby?67+ErsYT0hoc-TyYUQG>%oY_koX`!UFTDRW^Z?BM*b`f>MO zNI&bk%75&8vyVk^0c1Rgdo%fzY;qyn&98-ClC6=;aa3W*v^YsVT%M)uHD3# zZ!E0rDH!J?+I@;9c?xK5B`u8g;In>y8*gWyM3B{0b{)Q&jv`fwyvN*%0fEGYbjRHb zMb_L}g49}hL|}%&Lsfl+MV2heXUN1!MeCKXu!d){7qB)=IBp|8STjTmn_D*)k4Y8A3g{u&_!k_55&Ej3aZ)f5SZ-6aYgd((!HTaeO+PQtDO=OSnv|5h?;JAMq-nqg#W*HE%UC=r(|7tiL%_ z|CoD6S<0k|owI(gonXl&v+Gar5?*kyYb-B zRebtQpi41ot~tpuxDnPq!o^Z+{gOx}u%cZqc7_-(Y0_XCe9}=I1Jb(5TgCxVyft;^ z$yc$eqjNNQ$D`jD*__CEbw|G{X-pZDH@yPU-cWnQh;U*Afbp@rUi@uWZO?t_<=e0Q zqmOUD|BJUMl`~0s@3AEe1?q9{zbc#cqfE8YTlr+Do7ar=d#ztjY}p)elq-zFdY%51 zZQ(KxG2I<)CXjuf)ZeBU=k_p{satpJmymK)Z1!lxm(MjG_QEc+p5MbR@p{)l=%}>)86mT>x@%b>7sZnC4!8OYg`hsnkjzOu+nE zFFv`w{0FYwu6~SOI7BUZ&NAvoICZNI`(!FL)|9az-56a=@D#j2sma>%~}ArN%{m zRo{no(a#POxsP_dc7GuVr+CyDq831lvZl_pG%$ObD2*u~0-7$ly!74rYaT4wS!x|4 zd0T(y4rU-KV4TZUzthZk>S+#@)_rsRmZ2WWx!k15*=8Peu6d0gXEow5cf4Y2KKCfK zAq$YPjA+WW*8jMtUbubB4_>)F*xIV*!r?iF)2 z#0;7b9)d!?)Q@qVp8_;S+Xr3mBU9yKa&RdJwgN~(O-J5?TFbM2kRtkqx~C zEWvFX`W5a1eEzLNafH{KqgRgo`9GnbkoB z{a%Q-tTwPFc*Z_AdJKL<4%|4vJWvR4ZXbnO$-qji%Mmeh{yhZ(JE~lXK#w0#Qxix> z*NAXk{p6Z;i;=hA(>Hs=qqGm;7yh0|b%&l+U+C#|H=L|bS6#L}=W9-V+TUT7jvqJb zwACi9e^11FURv0^(vs&0>oZGsosUA8f8pp zlNuBmc>}K*x4I->^TIgHj?d8j8PC6D`>W4?+V;`+pWc4!|NO-Ew%@&dd#HYQUE1J2 zm7p@LpBkWk^iBOPfVC^?V(1gGwG4t%+&!Nw9LiG@oihONR1mmb8^{SBj=Jf*I#@QJ zU!r}1YoJHgIEkreAm_lyy7^d&pl+B8YhAVI%QhCyq;txOkcJIy7{`@fP}!`XAi}2p zih)bk5u9|=g-g1UaEV_crv3$|+&lxfwB$jZ zh&OFd9^x+&a&;5D8?_Q$VQHIvLUk1qS|VqmB3W*Zheq**ZEd}Wb zIi-oP=29I5LA=`A6i&2?y9{nHT3pjECxT4m5rTSfrT$6*!J^;w%Z{%uY0}JAq`?`@ zV~$NY=LD8gWNA`m@44U)bGz@J2e%6^n%|lIFz_SpC*5#r`?vq*Q?{EwcJKE3U%q8~ zet?PW2GN=z0HccA2dP5;rHA<#c%WyCsNG#i$j8x!JN7S0BHoomQuKyto0iyc+d$FmgHhDpwAE3 z@%KP%RCssF#Q|yRk)bCvq_G)^CkdCu^{NJh?Bb(TQz^QhHYXa?Pm0m8>KqZq0d? z`GXHWwEfg8-@o1Tp>zC`Z)f4U=DL&HxBuW1xBv8KzG(aYzxCAZi@)kJeV0u_B#olF zSBzBBk~Omb!%ra2vN3QeylXw&OZ@wAvv>P&*HuxNnMsNsj(n*3cb^QZVE@$;9j=~JCr37$i*Y7x;*{X%{5U>3pdGnGd zxiU&yOTNBdEMD;+T)DmSr=PU_*&n(3F?sR0`}PO7pZ_~I*GIQYUq(aDFp=KF`u!x# zzH@2RO${uI>)&O`2BxoPs6U+Uml?1To=_&(vVSZ<;vY5w99~x9YaI>Xl*jays2oO0NI}UyYkR%6B^K?!z zW(+1{^04L_eBmz+T&p0;rHMd-hbL=j_At1KpiwB8a=f@lKbxn|siBY|@$RNF!v zJbYw7_y~Z%5c$+e`Z6&1D|YCHgNX34;F2y4UfWoA1BQO}jL>xa`h6sltot~HNx!%qilbd2+_IraEO9>}^495F2FA~TL? zIiZP=@|F)Tm~Jd%{}Tgw3!)-xnno~Y6NKyAiW~C)IQ(XGn`n94O$4}RyCfI`|x4l6>RrfTlUN!^&ceu$DvObs-HlAY@P_NeEfym*Z-w!_4cWHF?mP5 zki7MGZ`*EsM}Nda1JU2rkBX`gJUQ5|p?ldi5=Yk*O&}2>a35Hj-O=;zEIA|U9r@6* zjO@w0nyLIw(Y~0`Qgdmki7P@~PpiJ>XYbfv_Y3u%e)W+@ z`qgsII=E-_u3+7M23q6(cE4*OjEpSs+c@h#BoJ7E%#w>(BkgyeysI5iXkTn6d6i31 znfctpqx`@HL~q$@9xmm|xoA9CbI<)Fe7#y-@dZmR5cJric`VZVUiR|oD;3EZEd-TK zFCt=Ttz{Ek`-VD58jEdA5)L87ddAC*K>fNQq#|5YNJ15h zx|FZpC2TbfWRP^I>T14nBeBbt-H{7|sr@j^VeSdi8cGqpZr{}$J_i9lMcb_t$Cu99SW|w-Ww}>xOC=7!d39Ioqtf|$bKr!Nr~QF_*8#Q z+=p^w6iMq6TZaX|?tgQqevq7cX@&^DZjgF^Whj=Xk)GT4r^zcpDZD@9bSd@ zCYd1hH>>hwBXvf_T{n%zT)fBn|&yXx1w-|)gq&w=HUL9V#w!i^tJ^1jphxsvz3>8|a@x8J?p ze?R@`^U%5|44Vo>Xk8k5&fe&}u`ZmyjaE~IfF7KRcD!m=2>Ec(dH13X0-CmSsAFZ( z|GE_kG2r<}p6ov!=~52S3!hvb(Mvd4xL8+It)&@L=3p8JdZ*ul9K(}C0Ao6}Kxsdq z;*kJMm9x&&bzHf zbUl(wHCK%K*MTiPyfDgCxBT#<&jxrA3=PJHpjLL*m|bO$h%3##FW$>!X@-h(j*b&$ z-86dtRWEA@4)zaV;*Wd&lCLmGU_Q=26)A@`I^oo0UdeA<@1m#y692=B)Xy$oW@lTb@*iLf|=`qkV63<7g%m}DZYr)AQ|g6{LHmW7L8pc$tHqY zH=f4#HQcOOP{f#5zQ!rSo$ow$IA`l8T);%JNyepqrKs*=2CA9j$ko;wG~=cj;JNYP zlD88mujbKgu2mm+WnAjF?d*4Ssh5>R{qLMSslV!Pd|-R_i!a+=QC}Q?^0O~KJJF-` zQty+$=wiKnz<2)fCr%RDC z#cTgYcvY@VC_nx0`iE^Hz$X@NnN@U8y=O&UYo2_$#{MsJ^|@m-Gy(DTzruSj#8B&* zK=YRGc{CLfvE00Wjbd;TDG=2lkZJJqXN{loochzTUwn~XEUtg{$?c*`9{qRb75h0~ zw|(ON?KQ8yV|(jw-nH>f9qLh{m-=IRxG^DYK>DZsp%>Viz^(C0{l_j$5bmeo>UExT zLa>}3Z$Rbiy=chH7A@WYr2!MM6%PfB{;0qN!yRn&mxC9PT+Cb}@ofd0MPZpbB!KBx zY=s#UzX(KzA8l!%X5j`=eqqYC>T-;tOkNv6Y<~n&Hc_@X5!J2pXi^$>2SV=ogk{BW zS5QR2Nk_&N@wU4u4H&qTXY88ukSjSu4}~NjG(@=arMbbkR6L2aekyI#n$yj9wB6mH z8BPPL@y<2c*9nzB+7z=Oh!GAOmjFG=a1J+bDjK3+lg`Ocod5kdzJGiE*ImAS%lAKF zz|C~WKCW5k4J)dhJcwL`$d{w*C$}d){bYTVaZ)cB*Ia+XHLtv%;rSi6 zJh1)V&)>fN;jiAgJ#b$o>-vZOukW z^Y-j7zjXWhzxcTAi@)-+N0{i*!>_0>>OcQwyiJa~Zht^8BsYEFp87WAd$y0&_{opn zx7~ePeeS%j>^bFiHQg)LYv1NM%l?fBno$JqMfT5op}xaY|LB@V<#G@4v$2ZyR9xwB z+P;qH?Co!@6%U{<=d8tU-&=pd*7JX1z0P0rqzmgiqfTy*ukVgxgxh3k#m*5{trJ@4M-w@o$*bgZ_XQ^>PR=}J~eqs`38`{SSj1$%C*BnGD7H$w$ zGjxww>~)#DeJunt1ZTLW)^4Jr3oPBqau{EByBcV}@^rxpPtN35nDgM8hhhLVu_hl3 zYD&0KkVFrCn3-mug0b&x_0KxeC&y&poHO~N^DHB&%e^X_`QvQ968zr!B0FzSs9$OF zvhTfid(k&txt%=q+4*$GS_QwG>zOaOL~pD8^j!~Zx6}*C%^$0`8gIF&#*frnjkkaD ze!sWNCRJ&i-`O|1;Onq{f22{qs$UP<;Nan+O+5IT!tq{k_v6wJCgq#$RCp6qxMPT) zx*C$99sDNS%dS3uyZVV2ZdYH&+liXjTfJypRpX^s7T|N_dhmgVw)eg1biHugQD5V_ zm&VsM`y-jPq<&oLpWSh3Jl9+HPZV|IamK~C_H{ge$-962&h*T9bVQdux`$Zta4-71 zK3r=~<__a|g_;vAWZg>D;=teXHTsO$Vw@QU5BQ4LI>yeUylwMf9_B%T6#s2vL)BY>%IP;|9cr-NU^ui0! zJ$F4+-%Rk(2KJu29@Kdm`;YYQ1yk*lo1*s<*b4U#uc3fOe6WfvfVS+R?V^j%udh4Q zrmMCYXv8&mVUcB_bnc0R0lC=i zD@%ejX>3xsf$3%ouv41?*)jEZcqw!J#&q|mv;6094WY(n56KVJ5jqTA2BG)VcWu4l zS8mbU6Q6!+`-*S9a(m%7T)AEKgwNc!dY(g7XM~)(^n#7I$3NxVf5pbV^>Z@RdC#2> zZui~$(8hbz57u{zJy4%5W8{m!58PjK?0olW^@WcRE~wvI&9_2dP>&dmH9wzu{khf? zC;ei<3&+WeIX6c-ea0F-mmR08NALdqySKOe#+~(z5dG0cA0$2bEaLrS3WEW0*Iy< z%9Iy2Yz-IpT)|$RB^-;_BG6K?O_0a$a1>18`1?~;&b^X)FR&b`fApyc#{8fIivaas z^|v4iajtq;kfGj~)JOxM;$JS=N`)jzjj{4qTvBJ0y~4vW;^Bq|F?Ef!a7KXag{j8G zYFDiea=~$^No5qD`YM9i$oNV?CC3Q?nl$t%R5^-cKw5WYvwpdW9-1^karkQLet_wG z^11$|R%uS;yt<>`9;G!tzZ40#R(UQ5D>?&MERwkKE3_&Pv5jX^|_a9 zFL>GI+Y5LxshNx40?88_N)8QdPt|K5+|-O%HtbWgPg?Ae-l&RvH=3ky)mN-Y64h+Vl)nX1RG&{ z;v3~*2%Q8ehS90H<2PpIX=yu3a)O~K`5P2X@e$n7z!ws#F ziS2kiEkJ{VmEUDKALi7LbBQL4);s;sHrKffDyMa<_?3g4(hnf@w~aN}=(56x>b+0C z75A>~m;RTJZ%=*BsqHymUBAlvt1ho^2d-b|y>LMJh3oUL1AKkyy?=0ed*2&RZy$QA zUo@z~ds`>s-~f&DUk8OVg4^ep$7aW2)M2v^KCb<#x9v>#G`X|wLy+g-pH0!0@muwTDGCTwCmToYx4oGSH~9X#3(WAT`r?Z9s9slfjO9?*3n>Z?Cv8Kk<<+gLu*@qSVwx~1twv}FPaoBmu4`v zey02Sr3+Z)N(-#4)+InL-O5|BV(eTUW(=C5A^Hb7&U3s}bN5E`iYwksVPUT8p#IpU zv2OZ%90gPrXe-+v|53e|yt{tF_-8($FYfYU!gp~!>qVE==k>?;WORZr44;b~;0H`^ ze8)Z8``>hT{m|#>`bLHOb-oX^=5ferI`;8f=VAS{H}N`WeSUrZc^)XM&UP`i?=aVK zYoEh@ybXY?TA523D=%YT4cR?^`4sOTzojM55!UDUWZ#Aj7#>^%1wU^X* zT>U%;j9!SlpCd@E9@PkjL612!&gzt3UI6HG{S_Q*!i%o_N*Dcv%z3RMMd4xS?0Vi- zK9eY)lC(~I`h!wwP88^gTx@mI*g_m_7uH5W_<_dCI71nTNA@7gi4}Ggu?!5HmE#owq*l z|FZXPG1_P8Ro~k^@oO&k1?1-Fb0zZ;@}ViPK<;USxH1Bf{=m~as?OU zB9W3P63PXNa)Fd12qhq4B`A@V5JQ4x?8G)Pwz0vE&)CM}@yzsi&OO~dS--WOwfBDC zs;|CkV}q3VUseCT_w%f0t@Z5vf2;cYzOE|$$oCIizUNy$K;-fnpZnb93x30!FTYM- zO!&X5UeG@l|LXNmeh2tBpMRh)82muPpZvQY)9-zG$-nQ#2P=E`zW#4)@8{nSma~5C z`HMrqd761X=Jlut#ZR3}cHIs26PJ0;c>T{ax{zebmz-bY6Y}$HtPL1TDph9n!jH5Q zirqZ_E}sGBo=N}!KmbWZK~x&%=J)d-5p%gl9S)^2E_Bu!-V2q?jyRiJFy6yvF{I0U znP!2Gf+GV$AK7j5j;|^AdBOuO-h{OWs=_iQi5moFM7@G zqmN`ttgdmg<+(ES>gLxPkv{)CXrbH>(^bX&3zwkjXL3Bjbw!rv zIvCL-B`B!<`71RrEdR_F6$M@7HS2O=Qo~BuJois}{RU*9r-rkq;?BG>F=#K!SEHi? zVMwdfl=E=u4Y$-Aki=leCi(Rz5XgEec!|<#*@EJl&)*^94nH~*QC+O4UK9HAtJ&0! zg`C8?X4+;GpC;x%Q@c@7f{6DNc1(_H;=-s%cbo8oIhu|gaZL?(nj}{#@}Jq_fiIfO ztw}+QsUWejJLQU%PkBRCvmOo$;cq4toncg6aS?>2gnkohw{}tUGCBmK!`}nM7Ym0%uTj%5Zzx9Lm1B~x)c-I%c zQNLf}4f>HM`QU$R{nhKW?*RXo)X(vS;>Yua;^%(s6PNe=^hZl^ieFH=b1*_dB^8logoJeW7$G2yO4me6X#}Jh zNJ)i!l5L7+2n>Nk@r>WXzDnMT7Doo(hv+)p(xAh)8L_*YQ&66uSdDa~?=%eVD*? zoLHf&>>kr+L3vyrcxT(wE%TClm@?Qmu;`jmSl0}16 zi>|?OZW^K8quR#LjjDUko{T%x*c9WIGnmx9dZeoCeiZO2w+V%0ibBFHKVP_PIQ7f( zuMW@Tr{17cRVz&@@{5QA@&9FBE;n)2tUW?aA2$stAnki4+?hdA>25(fATp(c_QqDe zkMx5f|Grlr01Hi~CI9 zGIh^&S%clE3}MjToS7Uqgg)CLG)c>z#@AfyDt_B6$d#)Ti9emiiWG0W(ll@DgciLz zW=g+$0{|+TnH!Sgi)eOuAsE zQ2-gr5PaWNR0T}Wnwrm99v(f>LM-c!@cfmbRMHch{LJXcFr@0G;NfI(gS)iomdW{V z79KgTx2rGsrI4i!A9=-?_o78`);S#5_>nmhPL0hscJy;Wz@45A(8c#ITRJsKW5WR( z4;2tHk2sbsJ!wiV11+I{Ksz(#BTQb%H>Yg8SA%(orWjv=3t}qFWmUv=e;t!3bQ1*J0@v9*lIP7 zlJp%CQse(dtX-rCp#c^-Sm6ApgL=?b5PNaUBwF0GxxvT97(LDOt5*#6aij_Xjq|)z z9ln@J>cdb>Ti^@3P0?Vq|E~8zG(CMFvZ;=hqucOO*A>oiX>{}ESEp8b-EiK}{!RCi zzPXMUcc|z@DfBs2c)@N$wR?-}W0h8;8h$Rp%qoCj4thsPt}Qz&-p++@`(4bW6xd87 z^NPbW0LRfo?bSeXIlstM1rXF6!gu*>waGv|v+2dd(-s+FPRd{L+w}Rr0 z0QFpa%3pQ9YqtDl2NcNn3=0X_au47)p4kxHIouZ^G7nNO7Xbd8w*i=*g!C7XICL+PW7DV7(?m|g<*OQHP%|zLFGwF!K(%p zU>z;^@)TRoh0aC5gWpv$C8H1o->-vo-%idV?RyVVvXQY@IZ0S>-<~#u>mlFGZcp_Q zPwA^e|6xX=0a4Q|5I2q@#R{BsrALC3a#nyMw8t6!Cu;DrW$xd-nyrkP>8_SFXLFuy z$G^7Q`xV{W#-eM5EkT}A8f{_@;cIG!Iz_l&a))xT)1;`RVWdTma=3kdSo0jC+DR*k z!D-k+bVegXDxVVueJngPNtB_vx7x`~)TU~fh*%)Dp&n1UJps;!bA5~brpnF}Iov4y zZgNiV;p@lFm~ye9hzu6$h(v|v-ak2J5O!(Y$M;(Q=!*Y1kH$rM{%gDhfS;JGz=OKS z*Hu&MeTs8m`L>jf+c8Ja6jgFjl##6JbxtB>fjyxa_e}}W2XD>-6%_NkA~tgZ^0TNr zJ0+)=_uojFNBntlGiT!E2vI@;e4CuVUH+oQv9wXVSVM?j2S}U%MO9Exf}Wr?y{tvh z*#)5Qu$Uf=Ijh@6k9ji;22d>nFq~fsoY-V9QZ;k-qWu6De;M8#O&o}qTQNTnkuKff z8*2Xn&?d#U>*oUAkV=)#75Tcs#O|_72Hyk=RLjhM2eAcVRKwn6ljEdXXZNWOjg1yS zu~sVE>sZCIDrljdQ%><)7tw0v8!@-d89X)Voq!P=vCvKF;s6vCoL+ zm^n!NuS2bXmJ!0ciz<8^ZL@4XfdbqSGv9|MhZU$LqK5RD7cDiv4-`uM5eUv)o8=of z=%+b5W?W2Z4oH#cvKwCC!r%BqY`^ZC+|ti&Cu+t$QMl6?zdtj7dtWN(jiAp?eUSGd zOgT__E`D2Y)w3llL%VxmFQ>bemf?UD{;I3ua7{a)y4Jl-wpoQ`DuH80q9J|TxpH$S zCmw@U)|;^~>rrJk^cPlCZqD&bvUd2go@?*c+*v`6KAwU)?5`iGahZhLbV$qylbcj8 z&kyQ>aNOE{4+ZnYQSJDEW!8UB()i474V&B~g3l<|0^gE$o}bm|y5LSho>^ASzVFEr zDm(&M>KFosgGndD9sjKU(^rn7^iWa1o<^ESQQ01$c^5dRp*f*q-Mjz$72I%fk z%#4_m^OtXc2V*(LfoLpDxEN&9N74&q3)%frO*(x!obQJCyi-bYFC^{tjX~)`r>TQ> zRAkI0L_k-LkRJ3(br%e6_FC*-sG<+aJw;}$(;K77?sh5CV?eq+?ue*D!1r^A7nO$|-f_s=zRvElQ9r|7&*4!nVDDD8LH|=3x$V zMbG>x;j|fznr`N1+BLv#E}Z<7@PAEsaKxbx&NLntPE|m<{>s+oSEX9QfCx#VmVin5 zhx6P)`G29pe_KcrvP1)#)2D#ZZ_y5KrUv2{n{-5Q%{TR8dF`vKwd_EFycKf`@6tQ} zkP=yBf;@K6f1plT?-Cor+U%oQfhTfO0y5St3n)O5tb0H)6&SZuJoI7N8nWf$9Z*Vo zqz^o1OQz7elOdKz6S-ob+_xWlH=7tC^v1iD6wP|2>v-dfR!(i}#dAL-IG|DeLEB|5 zB?bxNT2vyXy52Kaj$a}Al<@aBjTQ@a7O7o0cjvsj3%f4q^V8_=N-j(PXV<~3FNlnbfh znC)RNwF#G0oXkxDK!vVFUSwT@_u_icbaYq7M9o9R?RaC6|DLK(ncB>V{pNnEz_lm~ zYTFCgkMr~=*9~5)8+!CGuStm6k4r`9ffi}`P7kno_mt%U&cV~?`J)d(pYrardL=!- z{!X8KZfLB{i~Ng}C`xK>;0-iL`hA$DoAkK{8X5WW(KkihYniUy6&7Oev7@}Z=%1$5(m9^$`cfG0$(Ne6gsVBw|v<(eG1hj%?WFvk9mv2>Z=l^G=id`ksIIE&9<=8 zbs{4iyLaYbjL8pcIujz1D;Q4(idGtb$!Ewdny(-8*st8&9Y+h6OSuq^{@OGLqj!tb zvH@0x5&QKhUOicYgi`kUot2Qta7!r>9{Z4Q>cv3s^@JiG^O;6UFVF)H2U%qMsCD}Z zUkHP<@_9>){{AB@(&;|GQnl7PPjp82YyAnTr|3k5;0;iTQuO1))g;fK4~#eEPP%!% zP2NkKRwFd{CUsRy{&R-q(?NgSkisU56+u3(mfuYKoxb{Iv+$vqqt@|-)6SasTDbbrn; z=02!I5YT?ytmO1Z;^%B%PAFs~Ss9+vodv#}yG^N_l8pM&8r_3j*e z^1l&9RwXqxSajmruUFs*xBOmj`mH?TeQlo;)+I{C!lC$EZtA5Nb-V-CvPcjChqAB? z`%+9)$0q5!hhzjhWGRSf@5Z_?1V%!b;=bK-jERyX+iAn8$Lm0z?i_C&bY(da^PeG) z%E%ehRtQrC0hH1oI)vE?ce~-*oSMNGB}QZW&*U_6ZFr+FU)z2>f8}p^axYYPurS%# z`rF`cr-JypV3UBbBjdtil4w z{M9EiwK0gC`j5f9KCL5h4o>3>xjYLIg`g^iPRue(m{0RH;1&SCd_w%EOc9Kr1CI+Bao9$B zLoGh})_cPIgu$U|s1x9biaC8@jC=;CvWV4+@1NXrbwL_r28=LPHvl{ zmY}L#Q#tI6va#+zfk8cHv^R|pp%i4_DYGF;caBc2EQQ-40ZL&^4qw#+utjDic~5hu z{emm>nKE(#m$ky1f%hT^`tr@1!~x~!w`|jH5$$beAG~H|#56^{CHnZ}e|*S^+C_q9 zJABSQRClS6(nN$X)K>s2+U~E&ywGSMoZ)p_i;`{>8NBkx zW=!8~isU{CC?ehWxS&a~yyUn0ZSfmA*HNM1@|4-W_so|EK#DN!&OC1_-45Y@|AeY^ zyxP}|ebzeFz9B9C^*X$sBh{Gzxq`Fnt3gIcqg-U3nzYwxWB6mUMlzAbM}MsUG#VAO z&6RtgNiRXJ@+X5c@8p7!uWsSdcgD&Ot`%uVuj-zwdKS)?0pL@PyjEr^hf67T%E5Fe z_~w1Sh)#x0yqUUt`U;XapzZ_1G#i)sQw`#!bGZ~Uk8hQ`>g5jcpSF3tF;iqPS%1Wg zB}2HHX~N;8{1eaM3|6ex5sYREoiMr{JK?RIJGeRrCh%o&NY*h=nE)##JR1$8r>C?r z0;8k7UQ7QuY~&Tg`eQ8x518KSM0sr4KjkD#b$sPO2hmVGx_R`pA=4qI31K!Xom{n{f$A{oT8n_dSVJPM14lMn$Zy#AKO1Z@@i|ZBezpR|B z+cL`o-1mI5Nz?~ieBW4k7^+1FKRy&~2oV){zj1zxb+6eGD6p{)Yn}jUS~-iQ^j4yv z+fTjP1{X-fPK73qo!B2;g)d~Y>Xl%Ci=W-aX7ABz*A=g{GbvsjqM0fQ{g>XNxB*su zb0$S1c^Tv3uw-@cqQAV%4WaZA|J!*XK)ze&dn=D*kGCebgLLJbrh`SN7UbdirU6#X z;Ma0P|LUJPS+0vRv=uk2CviB59W-@j>=jwV1ArzxjF=q~@4dRPE#?2*Ip4_H;6-N+ z$0^GN0w^42-QF#GMN$F=D8`~~;{Wr^D6CvNT{IegX`n&7JJ#B)PKwp9DROgn@{PbD zkBeD7&=WSntKb_}6VxvTl8F5K-IjZOVxm_K*D&c)_QtZf1Xpi$qkz_c`s%#asD)z@ z$AyNHZ=uRr^`uVYnP+sOI~t?5G#>&bg)_I)EN- zmtx=&fW0a7waVJ~LcM^%tPbB9Uc%}w-w4}VS#>#WcRVUH#OhBfN_9@>P%Oqf99$Oa z7i@2dxm9BzJ=G`sZUb^kV02{p-nOnCy&=&6tQ(e2U^(Tiiv(!Gi^?NfgHM0G28t&c&M4gE^MBhsMH}i=@f~q~40cOShq}0P z4+c{cbU0-c-n-!;m;4$4G4tvC!yY*gK8&&3&PrXgkH{j|2iN0OA7v-@r)n86&qMY} zXciMSqrT$|QGmm!h28Zkv(0#U9NJcXn85rvJC*9TSChx;oqcpZfkVhb-zlKX+t z!0@LxG=6Xr9MAKU$2Coghy+}P)brqs?4^PUTxIa@KPumoo!r2cL%$=@gYG9Ie!syCQKlwnKU z(;xU-CMSOsPO}Br63K~kVncmR55SmVU$YpN( zQ{NHXI_CzR>n+c2_NWNGu1`ajMzQGoT45*|8D1(dfnrFWiN#_0JSk#(=K$r>y80LO zj@Q&KDN&R5)P5nBipDx11oI`%h!FHadz>>FHN#elTfV&0L0{J%{^_xp-5V2>Zhyyk z_A7$I;>r)e)$w!#e0jx|)OiLkhpC;$wikdn?!9>rIZZu-{C3!RJH%?K@6;ghQx8os z0J#9U+Y^_YkS@v}yHAl4eN_y9m0qQCLeC!;W2>YG%_xqnPgCc=%n~Cd`uF0F(697z z?!t90<5SxhE2o7^MpmC?$%?fAlEto*yDTHJ5rMT!~IXUIHnQ z1p@e^uf)hhl2rIMk4TWXv{b;u5u&~Dh-H&Teub>~*Y~jHcpl-0t*d_wA{^Y2E@}_c z{Z%-YP8ijszKwj$@DDtIhUxa=f4u^TRZEplDR$W?s_e)TadxD3g)dPL^#rea-@WWP z&e;#ZF2Gej|KhOwJla1+^I;@g`3;TtO~nG$(_j8XbYP>DKj~lB#_?qKX5-Wf=koC$ zGA;zoDFQxSSnWgmFPA_zl829CgI~8ALbl%8ObBaI?4>sW|GKd+ zNshIAZAQLZ1mv{-x3_4qc@e&BlvM7mQvUSn1~GfD;bk) zt9C%v_K8?l0lr-y|_c5=S3^_$UTxl_9_dqpU7UxRHe(OOLRy!!~^ z8H8SCa20UR##;c_r)$HYA60wgjJ;2DnwSDy)2kJ3$!yzB{^^qK!g#3Bx@E>xyo+={E#*Pk5yYe=BHZnMyuV5r; zQSNc{61GgH`IPwbYVLs=#oY?*o4saEH{7x0SF1gZT99?JHq#*1> zhYM*;yz_{WQJp&=|5o6ir#z}=3(C{YGDy1w9g=9WY2I^xQw9OY>ASN=83@&zXT)!t zS>GsSj2cT*Ar$FY`=O~yyI&?=GZ}p+jDK_D9k%5W0LJG+ht4A_Q@xsllylTspgEgFuU zav>w>KL&Fz515s$pa->8; z0rQTc^xo$Yt7#L_O#?(z=YI#mI_%N<7A%DY=v&h3eUbfh*%$unuD6cdzI^1#I@%g2 zUw#jn;CP0bH<#!l=>r)uj>s#9C% zyi?5)(~d;v@Ods)SjQRAB|T)uHP3UKTIklgy6$LeloLEIh59(=?hJjX6Y@L|_#XJP zwi2yR8PQO4Z|O*Ax!>g2&Vq!jII}ERyZdcPoMDb93Oyh_4|$m;^jt>Z6!WCrLctzC z$-%D#k3^L#p-x^y)DO#9%6aoY)KN>W$xBoUQ%D+LyfsVQ&XA9szW(7Y%Ft9?Tyyuo z`Dm-6x4j>bdgV5ZUFw1FsMl@;5>EgY-al;Z!p#`=FH8TlixN3Dz<3_Fmf$YfIUPy% z@*e4U8Gd}$=6*3litw{Imr30%P^QKqwTFb(b&n8-%mH}ZCXuzT$uHaw>|2z4d)}{4 zE)Ga~;Y%07u=+ac#Z}Ts2(b!5`>4Q=u&0fNo#3PWQY&7a1xyJ{T>owOy`@O^X`9`2 zxhjCjC5$yvy3g1?_j?Lzb)nAAOJmRan{@#?yg zZS5wVNi;u@{bV9`_;hier%t2H=1L|vHre{%I6O)VtW1Lze)saxkct-nNmx%bJ0CFN zLuwEMviZv~r)IdZ@znXxE~uliU=h&zCvE zk7!e~J(P#qsJ`Vkpv-@t*h3t1KKT`QFngUulN*#)qU$X;G`tT z<@E?|A-nWQK z7ARQb*#jt6B_*&0wtT1McqNYv?dfvG`iH%uJ!}dc3$Tc_$rRZ?^5SEcf0QaBZVr8hKd;bqcsoKn%CU}Va;o_F{OUkgh2EgSt7M%Ws1NL z{G{RI`)e6rJ8gaX;EnhKtxMG`rLn22w_itw6h9nf+nxmIa6Jln=pc9K zQKvCZCqd0h&&}Wv^)rWozZkb;i;(#k5Ue+`^7mHMdy%d8kCluh?`n}Y`vaWUrr2`C zcx?`EE0?9R=kr~f2%BZ9R?-d_oyI;B6e(i-BkA4=;x;IsY&djH&5 z*J)dpI&(}&d_Wn*U&xvRbu_E`IKuQ6sJffYo6GEIU>y>2o!0+)22QTzu6X;Gv+{K1 zGI2m%3F&LHZv9uBuz)`3pHKV`EWSPo*MxP$50ct#W=R-EJlFz!akXOY1u@^SJ}W*b zRyh>&f@$L6@B8=;v#!dY%eS%@MEH6*R9yT?WkdE|u%=gl={qc~*L#1C%*5x!optg! zTt0}WLSS~z-Jcz_d05l{wItpq8OiPr(iw`0b1yee@G)q9#W6(LR=J^?+BpeSPm z9|iWHKm8tid)I|f#x}2h-%X-hi$nRJw6z^%TP^tN{_Rmmix2kP0Mz|&v7v**J5CQ0 z>Ba~QonPY82w-Nm2~%m9fl^GNjY!b9frODYA)T*?-aO*!lmX;Y)xEM_gg##9p(nXT zQ<}&u#kXqRHHW1sIU2!aANQ_gtI!rwV3*MkwN(|S1g$3fZYcVdH2=TYmX51EXSZ6i z7Ny1)z`@^c&3oo`&(*Cz*t-{@X>urWCH^&rsKeDSlKl8pllHR1Q?Ny_82C6hGcTL3 z7Wy#SU}ELk7=rnEc<4UnVI#6~RSRpxcb`E|jtf?>Uj|#C>;I7w&nS+!uYH@nx^1m% z+Sto!|3RS7$0H+yjPT%1;z z=Ag%p#3!o4_^gHahWA~4RR}A84e-v~kCC~2$r2!Tf+ldOGgG8|)_Gvb2F7ZUbD^A0 zcKXT6&O69bMwky}FZoBRik(txQZGr&y#7f8Up@@z<*=Rw5*^kO!{< ze3yUCJVBK-#j5zN{AQfYAy9CR(7n6iBP4?;Ns8g1(D%xteb83km~Hy7uVs3c$Qrfy zS$vgDsJscF`*A7{}EnoExA;O}`RH}wUu%?d?k3&TkF-3pJ#W<-d8 z2h-s=Z4rx*BL$`4i=!(nRK-}64t5-6FAT=(!gsE(#7FufFl-S5l!Z+YX+1brAdQw- z5PQM?9v3!lfy4vjvK;8Vx5h`@8~LS@n+4A?K0|$0$tlD= zXQAdO?rmdHk*e45#UmB?rk6J9DU!44J;_pKqXv>2lp4qy7#Jv^kVN%XZ;E<#3f5#^ znK8GqBCx)HJ#!f~LmMZ^NW>@W=S6G|c&!jHem?}#ivO}iEBBSl+)75O{w@}5dgqjL zpLrMJLKViz0X@6tv72;wVyi!G3+~KNZmg@E?fBF8>uoFN0)>}a+==gFs-1JIqcyOX z1`UtdW7v-8uX;soQj9Lkdei4>&qCVi+k1!cHwDiI07aC5)}c3_iXhmzti8P2Tj4)W z+;aSM@>9EoXt{wv52xo=;ubA21~U>TMB3VIGcmHCK25r0(snEZGOUH7RN7V%r|_~z zvBW!ig^YJ9fT)m=m!B5PbLH)&$;=8(U#Ww*vE)lA8a7h5X?S$@dMN${`u5DtO`9zz z(r`mW$O&$r)D@Qw$FQzI1ExT^luH;%I@tLvp9u6&LCN!?(~We5lnEI&6^5>^g!Ccx z^H$)z+mG_AE-xD_+Og2fRiv6o`}&qYVczqJB&>hzAXf1?uqnjaqH1OO^K?~xrPQ_E z_U?|0pHQ0zl8ma*}-REX&B^rnL*l z_Xpqxo+A5YBH-OyLaHw+dX* z@$_29#Lmh49xNZd_zC#zOPcu%$54$3O>u?wOO$bcoLM<#+`92*<8tj*=kx2Qq%ggY z)vLH;Qml7$V*V@Na|9v#+e~;F*PH#cX6{KSZ3uG3%9UXGzPuMxdrF?hA!BoNs=Kth za&dt$U%t3Vy3!}FhvGm%2qg5%Z5buPy#t%^4~~zg_B=UT5Vax1YIjcf^V1Z=tzDIJhNz$48bYSH(ROS_i4pYOifpNqEO8H(*f}U=8+TFj~Wlnhru{>@)u&ow0hk( zHsBxaL?Ffc?74=u7_pka4)0r5|KNk#y>KbH^ov)6-gLophH$wUjYT-_8k%{r3@3J~ z{GkXeClxx@e})mhB!Sy^KoA6S%8(<#XipOw-5xsH3-n*^`$#LaTv?foG&rzZBk=i} zyn&qE-aRVgg8|xV`gp|EU@kv9^mPa7BYs7BmHLIK@1EA5RUxP?gT!Z~In&H{jB5mk z+=?(C$epOOadzIX>fVkljXIB?{K4y%4hICBm-r6M$$;mSwUHLUGxH!4j`P;s;&hZC zc_RN<&4;DcN%m^Vjw7<-$6T!28M4$jsRNx(N7c(NEMNR+y}geBYE&k?Ok#Q0Dm96PqCG#6^w`o@&JO}4hFd9gb%`oR*qS zp9x=r-IcsL+XppDXV_f+gVFI! z1nQsO%p*A$&sDkvvi?k5T$5SH4 z3y!}SxsKM`A*CNogCONzJc8j4y=WqssRQ`4x!t~wwoH|0pq)IWjGbe z4N4i&%W8WsO89Y}a+*NnnlrR8G(#TmFb3~Bg$3SvMx_qeIDG1t!*P7TjvpGf<=_F* z_MFRzNT*Q0B^DX%zv{XX#VG2)lr7fcXUT^m(Zcg1N71QZ_?Omd@CTrT+#%N5kiGcA|jm4YI zJ&j-Us6RZqi4T3?4_+0gmsBP(4E3bu0J`i#fuU!2e zg3s>oa#FsPE*dYa;pK6!C&PrQ`p;&7=+>L@EPblpV?*Q)G$5SzG#|WfVlQ&k#X6A? zGyNH8=dMPhOpb6cF(hMuS|XLeh&k5H$LPa>+KT~O?~Lr~$B8Uo?Rxcn7C>t~dVm5c z79gem${ypDe4THHPze*qWJJJE()j@2xufW@cQ+`GX}lV*kd5`@TD|MAPSgsGR^rQ~ z-D%Z)pYXnxJ%GUs$e$mAE|1 zkR*@}!-pkR%&L8BXjfa-`i5%gZA+SfqjZ4XC+?L&>?G&R=ZEvUhYpx!VjJr^YaW}M z?k-GTIDHi?Ul}%N*!S7g`c!=OSyZRgs`@u}v9^}N;QgBw0Xzmp9YzVPb`>7StwjUk z&18d%NaN2<*VKxvK)<@@Qu*#QM@u@&`fwtXBC=W6L)wpp&W`(?(IPLC+rL7Gyg{7) zvX5-8uQs~XN?P9R$k44tPWR z17m~?q;{TK%=}rjWwof7;g1dF3ez-=gsvEiSq*(PyROHJb0lB!0M3__(@&KM3iV|y zh6bGiT-xqg_0LL1XFkcOEy4jcuh zavM@~-AB`JdAsKYXp=V$hlW#918G>g9Ii?*( zwy{R^=G;96%^!FHP_D`WC%ZL@Nkx71Qxx=oh>{Izk0idcdfJk?dXft}e}%Yc zghZ-73~8*JrzjvEM5hzG(LaB7(HHj}gZOY7|4Z15{l=2kJ$M#lr3shH46{WVgI6QR z#P)u~j8tD7Z%$2yoGhRSs3M>{sMevvP!Q3NgYlQW3uX)e54;PU(XiAH71l4;sB zx39-Ugli2U=UHaAwHf0>X_K$`6x1xH$Fuf!C~tMlbfE8w?#Qs~VodV9acW^F;OQNV zZN)WRJyW31syUCGrOnR7&DGAcb7cbR1YnzE_R0iNm0Iae@~77w-}6sm`*Evhc0qPV zYT$7?4(M`icaO@~adKEr(f)4_I1svQ*qX1Rl(VrQ7TyUYZ3Trq>f<%3O+r^y z|Gl5*TQ>qt?k*6ZH&=ox(A=0xKT&*~)RY&*v$xmp&P@;*_y>$~wYt=*4CURYIBX!M zr@8x+4YT<*P^HLxu<0mv6!_7ZYX5QFE92X5#=7#-?`NvEn3b$}A2O%bfOUyl*+TNU z81Kb`hx-fSdSMbQB-6Nn%As|T!l;(UK$Gvx&=F{on_5!wx?o^d0;N#tcpOXj?w1)# z{_!2a8xGrFj3}u}bzSL=9B)Fu$4FU=dSi=Co*P8whahgMQe%b|TB!gIL|=zv4}@C} z(;~Vxt0O*=OtMHv3qFw2(F!AIU=z}GE&ptJ*&Y>B;~?wuTXdZa#JyBTu0iszU5PlG z*W}z+qC5Old$-GA3^y;gQ8H};{QRc9QZe4r0I(@R1t#e{&AcR|&#OF3y-*?zr>}cH z(OD|px@i-ZIHyU+L}hPSW?5CW+n{IoSY&ES}jN&AbxI&CRkMs)GVO zYIL+N1W4(Frb_!B;JWj(ulj>m5SvNj9NcQujISjtw_>CG@Nhmp+E~$dPz!j#4P?mCn&Pm12$+Pa z3a$q(cPZ15Ig9NUJS8>FM(2OlKn>r!sG*|R8LQB$lRlRg{s>;JH3|^!-G|+a^0Lh$?%ymt>Ph+gVH&|O zIRPQ4DYrREiD+m9eQVuu++?yjm8zg`67s7WH#pe?hhuKY(Y{xm$u``aE!&KBB7GRn zzdDV3J9D&uYRF(aoCC)zGn=;wf};0P;1Q_J0{f761YlMXWD}5D?+T$Itt&^0QG+ zL)->+Ci^wZN*i8)vqND(@QOguJTb;7k0O&wK}T1Yb6xJ|$cD$rhj-V)M)hRu8bb?m z0)%?YtwwqFLddO@dV>H)@`eH-PhA%sCJ`Nq&sPdfN|b`MJd!bAo7AHCb}p%g^+=c< z{u7G6OG^Q9K}N>aXGoqRANq#If2Q{Sw^aHQ$XSPJ zpHY5*1duW^L&*a-vs#Xxk%{uZs`zSryW~3SAL|6TRoan~%!*@aoVE;fB5T*!musOV z)cW3vx;5zs2#Z2dN@U{JyNwE2O~%&|U9mVkE8Kdx=n2X@h4!{?<7LdM;AJHA=wEr2c(u^`Sq-`?SteTgZR#lQQnJ7ImdUk%wE;E-)Ai!|O%P&p4_K z5Me&ipb5XqVy@bOn*HnfVq^ix`K0x3D2;bP{Bt*ejS{EK5Bgh`7>lQ%`Al>x8#8N zvxv%%^K!8wf~vqMBW8E$(TCzLLCBHF{CdgxA%s z5M*pLNR~C>8n!uoc7k^$o!XB-`W%*XOX|P>T#4Rx?_In9?c{mcBg+ef&*Dul8|;23 zin)*~xrf2$nd!o?^Yabjz2gNqQJ+WQ81vjA$Z8%7eq%jBN)MUJ7J;3P^bu}tqM(lL zjhSBf3GkT}yV2`|^Ir`}5M{%4x+9Y}1FzO^=p|d;0k>JmWf9RWHa>KxLel6BF>e--?|? zqoI6V#;;KS)e$Lw->enm3qTb`C1o&fO5SxlWn+Hh3hAcs)T&3R2rjZIMJqk1@Ji(N z$w#*Kl^3K2qfQUiHrDKDUw^-^Pc6l?b@Cg9VQ{jx76oD=Cyq;@73I$tFL^%~i+2mI zFol7~BN-)Pm>HYWo-m1wL;WmOwVT{8?!6$8Jw=i0lCHt^CoErSton2-foHt#A-7cy zei=3==rD`Ebu-6Kd@In0<2E#`uvGfx==%e5svNK}_QXdWL8bY-SQs`!v1XEbXZu_} zAhV3y9K~Wq7p_r z?&+EY3o6SpGUT@YVENkb$UVZ7M8fhjp1fB#n~@UkTs*wP2C|*L?)c}nwOjGwfe-qW zGM{J-&hQ%mpS{TFj`k^rrxtHDlRaj%$RA~hAM&}N98Qk@@nQ9cLI&noj}%RqE3x_~ zIvE5*Cp14NOFUv@i^8a0RT$a%{Q#LfL|G&RwWY}91M6?F75FfvtuFixJ%stb+`bbRs?~$k~afbub}Lv zi=uUmVPkF&@bQE8_<0nEzdL^I+IuABJ&o3HEo$F~W2j#xeYh%d#W*ed=NroRl>Tc^ z;n7#a1p|2$p$x)$tWH zojeZ@ohym3(%t8?Jqg%mWB!gV*?O8=k_C(O;Tg(GQ*^Z8$@qOcLa8;N_ji`&?P7fz z7DOt!!WXjJmgQlF98+B1sezIf<^U?>eWu?{4p*+oFG)>fQq$ZoJk5}0+$}lLJ0@{o zE$4XNH)jU*wY?eHROw>Uo3Z^N7dk0z@8bU7DBi>Hv_JLnwIZ)A_9xaMe^3Koc3XGx zJcG#0L9%R0X^6TxTaxx-4oKB3(P>5YP0ONgRUvdTTMw&9U4v8ff^8sMC`G^PZpd1Tut;m$%75I(zgk3q#!$)6pfY&PWjZNW7mn|R z5^ts@H4}-X3#%*5Bz_Ow<@>{GRzgD8W%t28(!~$N|FZy+8RHt#Af9al>}`B;UbQcH zGS=<9(gDEb8MwzZyU_C@R#HmIqm{mBwH)a3zDrOBQ4X}sU=9+WzHRTv&tX}>XX0gQ zkFTrpEcQLH^qz~rc)H!S?))$d6X12>67AjD8iTAsuT*4+7Xsx5ihE^Cl3$4Rk` zK&@rAyAy_D-*8_EgH+9hK#79Y$eW+u3w zQy993AZ87^T`0Qkq}d^~2l>Vzkb^2H@mPa2gfa^moKYgiQNzcMG)^K|o3?8ZZm^cy z$)+Ywzw2R==!>8$syb^E7*>e~`PdN9AUCwB6~{>S^@JkhLuSNxXe&BK!o3IL#t6b7SP&=^U&i(;o}9n7Wo-a%iF3ThLb zsV-`s6}u)fXV2M={0eca+dUaHf1B`SQX@V3naO5mOXomA?0I<*TjN?OqfMQ(;b-R= z6W_PE3c%i$biVg`3z2F1KQ@obOOVF;2N$ng_oJ%LhP;8kL+98~_E06< z@t+Ilqen%vbHt)Ff|q>o4_z3~cI{V@sopY6_o(1QXz3F4)7!(A>b{crXNKL9*Yd@g z4n2+V?-bi!0ntE_2G}i8ovwNzYlmkJs>8=g!eUnszXS-ddG%(gSo4Z5HJtZ;nNao) z(B;amG=ETDs^NA9MUti`{1@OUwpm{s?c8rA z;KF?6{B&?-#_h?bMsDy?Th+PYT>E#J74G_ip938@Lut@0-q8Qy=`H-4e8VIFI9dJRdLe(?O4}IAYQ2u|aR)UJ30>UdkJh4xAa!530!FbHhZ;C?xvja$jqQRn0cl zlKLh0?Pjt7X8e2b6*KdumfpSWbp4$1;LJfAcRj|)lw(r=#RS}W+?`?R_TfgRnKB8) zLGC&NGiWa$wbm-gS#V-@U-+-6gg1M1bwLILA7PcqiRVR#F@=6OUf6^NlH3 zAi&dIgIn0D2S!IdCK$fY+9z>@gi-DG32jA36`^V;35n8A2Ug$qA6a+x3}lWsWLwD7 zzGs{LZNu}OH2OEy0uz(`n0bp{BX{|_6P1ZTY{PU-*otJ1%$`SmBj$(~QT}>AkNKh_ z)Fy-vr4RG5EuL$`X$W8MT3^7FdN zG=O*1t+M&~=zj8!$TeN~`_0kLmrjRS-H6F+D%~HUy@dg*K|0R5!d?u-F&(xOW7dB^ zzL4>)`wWpGOnc6t&~;{sWxQVjFc9{}l{|5Ct*=7dgI8HsCu)$1vcz4t7TV@Ji~A#y zkY=5(!rEvyWnq)Vxa`JFn&`H)W<{)o!ZWo1)7rtuV3`6VJy>dZ>kc& z+q1)xxEAE+Y`Vae0Bq_&0KHP1S~umIEMa!Y%`?*)dX9un8e^)E&kd~Z&b8^*zOV13?l6pUaryX#9AZgtj$1>g zcV+3@v)#8REA(Y&4iNQ)p32?L7JJVfuvD4q$>Gh9YC9x>(E2AXywmWyT27sp={38i z!*vP`=T!Dp$NBv2Ql6ZqxBtAG@N*W%5-H5WF?3!i#!n>WD#8B_w;oJ~+)&KW3Sa1p z)o8!HvcA58!qpBzxSj+dzt?t&hI=*ya@!5k(TqRP<>2d;bSm(=2Vix(?3&%Gk`0Hf5&v$&xqz!XWFE-;xn-W#f99%Kx64ZnlAGcvB@0GZ0Qf=YXu3 z!k2xeN_|+y%Z&>8xZNL?%&(>^JxiHof0j@#;@_=JzPD^1And~JOS|q69RnE924>|m zWcu0;na{75a98Z?@=()s`EMdwd3vidlUr^A-ZGH(?>$h4SoH~GlG3Wmf)?5Xh~tMZ zBXt=JdJZMrhq~2o(Ny^3A7lsLf&T^TIW$MUxEJx4c*3s#z1I9+5x>o>g{t#rXh#Ug z0|>gE=K`yyb=-+AckYZsPcLx6NVVl}K=x-v6lf=UD?L>c*UqJyG1_5{X~!U#EWMnk4Oh zd;2olu|S#8RCwBDfS=)$UF2SLEvLz(Mw!T*wlpJbgC*y#F`j9>Xwu^#&P_r77o8Z4 z5$iHms|b7`BiN~>74l&BkfIvKQN@97Y7C9C8Wqdy$cX-uEPWQq{Nlc;N7|rYW#!x< zu#740*isc09~@bAr^~&3F(UV$bF6!T&SHN?n22bblZW2y&sLI>E;rSeS6qj+&)5hB zf^^`|>)OizL77)Fl}A0C$FUeTlD_;EYx(ih4UX>K_l|y})-5B#Sc8ChZL0rp|JE?` zJJ35_KuEw*f+^4*`XdR1Lz(-p?Z{&LshaPGa2K7Km^;Y%y@JFBum+z<%Qte+2k{C3 z>cmzz=k_Rr_X$qLa$3T*Z0xEY+Mwy5Q{i2RSX2$Z_RWJ1`}hxs*DPesWRtC3q$vJ5RBl*kQ)%W1$^^URtR@1bw0>?awpNX0mO4mxVn0 zj%RHXhx7BC(ib01L=2d#TfH=00rTX$8@76JtO@!srK13rBh)^M)P8jtD`Pcb|gcfHc zrIti)FQiuIf9<#-UWdI%{Zu>pRN^T7pYx?V; z0Xr{utr7jG>}%j$?7i5pZGb6n#HI`3$8-eNE)Dzt(hf^3UTrXpCmg@Q?BRMLcL&4c zIGax9;Rd1&Bf~SY0Juyemq8R?wDa8O1%mR+ zkw+kyyM)_j_u|sb-Fu8e(%nI^taJ!A52 zv0QmZ?h7th35BUpiOxy$c(zR5<~)SZ{71knd#U%^0tuYc=6MhsIjhEpwtF^NLu1pD z!Jh6kYP#7?=c?7+gf)W5*R}H^FM-+)alFE-mvzAlk&C3Omr=vGyS>%A0%%SO1e>2= ziY*5q#L_|i0WAyYBfZ$$yE(IC(>5sXkDRTI_Vi}%amizWd5T+B*StaQZnrO?8|koBSZe{_~MWhgM--|S%=-%lRfYX?ljCAJ6|tTJ?57i{a{ELk6t-`GmA3A`eN!};K#})VRCNpfN^QC0;SKb1@WYM*~2g?Z+p;Z$C58GK$KZZdfQGNP>yBANP@bfi1DFQ0WQ_Mp8oq;~!<`|{r)v=<0D_6^c?YOpGKo8(; z5Ha6f-Z~vp@nw0ymy+rnoTcJ4+6;Q9KodOkA#abl%GQ~=`53J7WdQJ!*6ne&A2`w}CPkDXLq zD4G_Wxp*H^n?~_|?KF?2R-RJ*A^yTy;;)m$JQ0xzQ!I?3u%C%F7{6$)Th&}|C)B=b z_Ls8@2Vy(wpe#Lpr=c%S3!|G}#X-H_N9+84aMs}39tk9I4`-CMdkx8r8(aSwJkroi zJv)2pA)9jKYYQQhapo`6ZG{mn;%bUr(FSjaK3>WOyH{XyJ-X1JOwa~0uJBH1fZ(rl zDt2l7i0=;}wc(EZNbm5lstw6(g4pK+Mu0k28v&j3*(jm^pWqXejTLcWDO1GuB^SjS1&&Kms0x;P=0U|FvB_YtWAv4ceWT47=LCW7|stRoJ z*dT6?8MClnFcWn;8PiVe`WmKWyv*in8}^=u=S-WQe8S(I95HE>ZH!Z_6uHT z1ks?^^0y|8@@FEiliH@(`7u>VHc3 zE_etE$6lR(my|zgBE{Yy!P^%{b>k*xB{=W9b+j3*pN|cYZ>;>4la%L6+0`HMM4v=ZQ!G5!2>4KVluDzz;wD*3T7$G=cr#bXUcc3b7@LlXd;SV z+us5|H{{xZySA(~0$vuFC69~;n@Q!Ma#PO!MTlJO?UoRIqU0wreP+8=#>UzcBHR%D z#E?9*FQZX8SmIDaSiEm1n4YXMNOGz99Ck|}5)~qwq4mduA{yF9 z?Z`J>ZYoYww3k0(@kzfZItNU;9BWTg#oiYLj}`a5#JZwv;9)avDH z-v{2Edd?0&VZF(RwwSz3=VCTI36jTKqFr0jMC<&w?KgG-`X36h@{FT_OuxrvE-P(u z4|juqc{=3T-}m5R3riUW^#(HcT8{za;C0Mrn!Y^rS#==0xWi z32nt^;ymjLAKYU>g8p{jr%lvC@=HiskD<4H45KermYAM0nvKGq%?*bri6V59y!nrNN)P`|K4Fs z+?|VzaH>oSN%(b{r>!YcA}4bBV1Jy4N!w)a#_+QA9fTD=>D%O zNtE{$n&{dcyf&O~AwE($X=$p#7q4T!Dz9fyQJXNiyZo>0Z_kj&8avppJ!9v=bU;Yh z%PnB3dh+9sh{^hxjMayTC<-ZnYR+_=;S`C^>vWpDa0}h$ifK*v=S@4bV_e|t9n^-> z%n+AX+y`N#h)Lx%)XE{u7}d+l<@*;xuk`9A$$zQLxrNvXe6##p1n#ih(vIp<50Hty!RK=aO*6o`UQSb(MsNh9(93-s}h z{x*lMzg+~#Bl4Qm;-*_v`_Y3ga#g#GcYB4aog6&!(!A*QbJ&`(F)BrBK zS+`IW6fTjDTP1Z%Ua)s=^mk%o3e9WXRbmA{do2+pfdN)VA7N8e1opH&u7{rOif=7I zZ7T}OVvpC#53?lX{&j$}Vl^Mv4g74gKKDjRmcuD(%dFZ}ilh8+`+(0I&?V@r;&a3} zSa>3RKCSY=y5Rj!i8Ny&VTneO0587vX3r1KWJej8es2Ds(#p$tqUZ1{`U&WPGaGWw z0Jna6AHO)cUo0MZu1_@_I(>z<;;v(|BGBHV+%|IPtctpMLm8WVF^G z;N5^yZtK~W0G7pI$fS_H(!bOYDUSm^?x0qKNSo}k*UibRLKTEVg$xR{JLnMkr-YgH z1(9{{iQd_rI9iOY$|lsLu2UEHX^D+CeXRaA+Pzg1>0a~j`W;?t+GQ)V?uw{D2!W(H zpD?D0dxeUjQm-UxH62IXB0?0aIni)sf4rWX%@9&EL3Kdk_fz|UArCX~d+!#Z#z)@c zM)D#8=^!1%g-~csn4jqUh6P~V;vkVnU10`H?f(JC8F|@Z>Oh>Cv0M1|MLGsuy!@mQ zcL=PUq1Y*_9(pN|cQiWV@IZe5@;#2vin-8`;AtM)m^3>5k$1jh@grYVc&Sg4+rxWU z!)4lVwweDjAP;wpL(umxHbm?guoX^4x)0-JnxyuJ?$IXT3dCP6pm39C76=Q(Htd zVfeHh_J?G$L^oO3N|EcemruTT$%U&7GZ1U9r82$RtI7jM`$JFzq^vk!&)cDD^h`#; zQ10n-92PiAe@~p!-V>}o-?%u*Pe}Sd+)rAp(KtX|a3>9?H|`u9{Uz`we!wKR5$u&t zZYY^bw%E`7WVWcPx_@jCney68aE{!VVhi>_mBZZPGJuQ0IVt;pfIsWY@4s1iL;EnQ}M4T`!Z1Q z+oJnx`FH&$4d+b}nd8wjc?Ixo;GLdrRl1Bu`rLFJpz$B~8a7`esSrdcH>az4w<=dWgWBJU{lqZ4SJwn>k;CLXTFl&pM0ahh( zMsa!RNEX#I;~Cd~*@MNKP#fP4hMkm~88VEre>vj4)UiuO_?{+Phyx_&vh%^@4y-u#b_wxMJ?$oyvN-eGi>g4U zUkf(}aV=ht8%X4H(_^pKEEz_XDuTt8U)Z`i(aRRCqwHCTk05bVKF2(@Vx4yhlJxS| zc7QF5_H*RHar{WuRz#<4{P*(Q3ysMuAo_psKQ|7Ft^OUHpV4;=G;#5a_SLMf(KA}h zpn}7;C#F@stKj?QBT_{>LA3gqcj56F{&{`Kcr%8tTn2#4y7>AJe3cm_G;q#z{(Gr4 zGW3!4zQo+~`6!_~jd<*6SL+4=YT(*NOO-$=WGLM zs#ai(b>;m!YU;%>O*GS*3441&!20$kh#Xlm0AhZvXW$bk^+CvJO;Yet1vclwFAVxJ zoyiA&VW&WsWt?~Gz`k=qmmWK_&Ns2~*bqmbrYs24zzVw+y zS+$KkC6HQv;&T{srbr5ux$_z8m*VFX-2wbp7$O)W4JAs5&U*F+A1k5%KW=-9A6~L) z!j+J~qZcftA6ZXP99HYpUqwXDObwt}U3K<$p*!(H^M07?lngSKit@yp`*J1clsQcL zle}Yq&%ff-`qUctx&L$Ab9BaMI5Ekhfg{Q}kFs>HkKlQer-I=B`MLM7-)oij2+c1B z>5)Ud9svoNKYOI;Gd9OAy%x!wzT_V^g6YRqyMv}Nzvblkfd;}En5V5)-LAa*dm=}vV`0~y@45A8T{0WP zY9L7PdC8!^s=S_Vd3St{MJ-&3!YOVwy%4;omjZll@?6)^&f{WM?^NP<#RIP01q~7H zcJqV2Ma%H_a#Jp9dMw|+46rIx4d9?h9b`3#1-%vI=|?97zfaSz6BvIO5tCvazIyjN zQDqB8kxUK_!NgkLEou`oBA&#==wTI?hb0e($na@3T*Me_VmTOes8bRdBsfXp9QO77 zKFF8vRjhpuf_R;ToCdAAs#iYOlRoXhN!y<`%$Ws2a2==URd;iyuY<5?HC4)2^KW1c z8oQH=sQYO9QLC^P$Ljkvf_FAt6jdoQ!jwl9>{}D&@O_Pye^-_{Wmrx{vTSnV66Q3r zmqpyLf)6RMQovM$QtWw;8Mni(Q7xga0VC>orjp|b}&we$n5jm9naW`PT+pH+C$-7Pn++6_mqlFpp$$>5uLXCz$r7jaZSAOD^WEt5igF$QqlmH8)mpCl=p;l z02J$E!G=$x{8X%C?)ED&&<*c#qdvRVV=GFCK@0VCIK!%JU(1rZS)6#C_0%@@)c!XI zbz+>|8LI*L-pfRfn&Z8C*IL(+o3+a$yo6TsZj9(%>>*o_^LKxBO>V>A zgD=`7fj~@Ub|U}a_>UfgFJE=vD1_A;=IIzBAC0=Oi*R$(U?MpL_L0HL3_XOyzmq0h#IS>tZNHB;v6-*3A$9*nINWVdg@KWliOIACMn|G14k^4KJNZ1hR%RWZ!e#46J8iguHQ z(nsrN~5U)vv-o^h&h=R3u^%f;{8D6?wgD^ z!*A1B|DZ>9!XO}Nw${L$2Yy1%ehEK&)GmF#q6G!__^tgXq1z>$#Cs@*D%F#3ixw6K zXS9bApL;w4r(C$3jd${u2HYQ2@S-+`d})|m~ByxbH%pW4!e`d&-1d7Hz(sS)@Wy{jMRe_ z8K{3RPzjY(qq=9ZscGrI{IfRmWHLzCTXF(`Dvzcn5G8Z_cyTYk+6~U^-M2DpxYuLd zsZ*6zB;eYD?Q|BtkK5SwZ4!Dg0LauuMY^1K(5<9`XWPEzo+6ek?gKmMnN-Eskds6A(qR~IG!RDN2)+UIbxLyxS_Nt9*zee!8 zCz0&D?&hfD@9oKXG{<0%K@*X{g&xZU5Rb@ligJe$Gski1NlvTFO2}21d`Q=b;6>XEr26yC!#O#0xwc(x*2-~<`Vorf7+$L@5ePwPiLoJQ}X*J34qGTYv3O7FN}Mwb@DO&>EsIBIOp zO>S>g&(cA|dP@dibRmEjB3JCxp)ga(R*$I!qi&7^%&#Ua8#}AaVf*9Z^r_C3T)*ii zb2BD|*jgwEN=xCUBmc>(!Bzl7UnS2ij8#64ION%E%c)eGAV<KH=gOduT{6L3fmDs?q_>~rBRPzsf3p)Vp87*mJ5IlB5R zB>LA}y-k9WZ=zm;k#qAWiRl9${{g>49s+)-JmwI5b93xv?$IJZ_3zs$r2PT_bwXbN z+lH=S0{D{C1>?Dw=q}0rOB;GKi>VE_=?J za>sHG#y+~OIy_LR2ZJhpd5XR+d-^wup@7AFW8;srNif$-{~Lb$1$v;f6}j)34B3Hkmm8*M)oqkjdp|Io z;$sB=@`Mqz>1luKG;ikkAex|-4ZEW>z4 z6{YMxIdnP2TWasUne^(;vl?LDn$KRx(kU)lAm_^gSNRdJ_9x8Y*?SU(~lj<`!9n8qUh2*Y+CSt5)VLL0BGU4UhWH z05Nflv4dQceG|pUN$g&Ze?X7zPQG^5YJRkzy0fj*d%c&h@)=#LL&WP6N;uJQOv#(X zMc>B+o3cujcua;zY20zV4*OLc(J_?MtfecPENx%+SZi$ZIZQ#(IN>mXk@{WS)lB`{#@%1o zYLpOrhy39*C7Rn^3UChJ>)iZv{LLJ+&|LJ_aBpqOl#psUr+cEq0Q#XSqf>7VbznNT z=4DXhMYpZqq}J?}^`B8OUZG&%{K8U+Jo>I!#sjnjT>cYsL}&19`K7R$^fO)DJ6*)q zE}Qqlo1l#{dQD4#@?#s1kp2F!{N27v#Bk94$D}wZ%9h=bH%nIdhz3^u2oqQQ{;fdA zzf;0!d=vO3%{{1>7@e@zt=Il9yUrHbzJ0`?^kJ{&4K%swB#-oOR&m-SIllXOcjSu2 z!o(9a4Q6M*Im~rMzO8)I9JMF+2mQfN<(<9q8{+&|&O*-wKk#t}pw%d9Uo1{Fa7$v1 zSl9||4OVZsk}(tifwo4v%B&(Intc4UN=`>R0lxjzRsUGmyhM-8(7XFmL0oHPM{^HT z(8AN!XNK_pHySL9FQ}Csuw@ZQ9m!n9j4{26Z4k3$?H0+Gwh+7i+1oXaiKoZloMQ)zi@Ag-FlX@BFKQ@S!Bes z4t=nRhY`t~va^{QWyPfEY^}C*Jft-bF%=`(3G^QD-*N_gzEe^&IB>vZwP8DX0)5}6 zGLZ;BqsUh1*&o#fo_&Gp{2UHS!&x{4)4Gh{6aNt@QymUew(DDTQcd#vOT zniHc$vMyMrTe=Wkui0?>r7{Kp`0T34vSMZL1@A+QDf!a*puAA+6N3P5A1Y9nl-EoYTSFa$uPq*KOa_vDm<88SALIoeepA)RJs{Yl7>104atrE3Wx=zRkZJpO}9$k3VR`|+>5U#JB z5=$>$5gT;U0t}kV2Cl50_hmOSSG9Px9T&GXkqgP&w;h#{xnZdJ+%Z)Y8_e;3im;CEN$mq>wQY#p$q5LnVaHzQ)gcP*w>+;$)Lb@8rAG9Bz zYXiVRNA?OfZ8sNnO_xFnK|uq(_O5;hNJ+OT;L@evZ~*5U5Em|SW3FS#3mw2Q6W)faD zYZ6`irBQvR8Mkw`*go^$0!>wIxUQT=CBTsOWfgpmP4`V!p#-8G#m}$#9``=f>rLqG zGd=^dm!^0HqzS`tf*f`T^=K5xIYwb=0Up=j<9Nyxft@+hxP2?QrQ*VOf6nvgfe*A) zzlBqz66q>YM(hf#a3LSO_}6Oa7m8E^ncdyNqqnkD z+m()k0j;)q!M)}Fu#YLfTC9} z8)<49h?kb0fyOteTJJ6Qb9J@CZ7;1s(zI6@ZQWyOcJ2Z?{eB;LJtMlJD1e#nLy6;A zf4P50f0vO2F=;jn%f!CK%kv*jl<2QNj1aSo=MFgbqO~4%U{wLX^E(rckQd2a3!rxk za@2G@T=0EAvD7a%qKnW~byC;NEo?zbw7584g-2c;nl=?~gsY_Lu}RF2Qz7ip^)dMN+RqJqTV6fic?OjGyBFO0 z{0@c|!=}VEt)Y-}^gn{EClh8#URkhIu1VYz>mJk&RDrG&^_I3?~G8`Wd*RwO}92+ZDVga*fI6j*aRs z*71}Ikr4Z45d}GDwu>G3;tO4VxpTJUal0W~jIDA6(BEb+Iq!H$1;{4+k*?+wd6lla zZBDJ0R>}S#!9XccgBxkF8y$^C@nf^ZT94R0>`;rE@Xr`^MejU zzR$U+g-01wqgyZV|OazW?8sl@5wDV8zu%fxZ(B!q_n?i;aV zLh?hwc>#tl4Em!B?TUB9cZoE=(fXXsSICvdGjbtr$MK)qTt}Yv*ThbC=OUmt!$|KPGY1&uw>Vs!>|l$-kTxd_ zyF=Op2$&v?{#^e}mEfPYLL}VIvQdhl7YL#6Q0 zzMOLXw4DixA=THy@vnkAT{^b?h`DOM69om$bXWi6%R4%`A``hg29uINfL}&lNUZ4#LJn>5B4(cmPmlT2DPam7MHA_^GOQiaG{|j zsBi}QJAY&e!cIg8b?`BS=%3!3qD&dF@1ZWWR2ou>-VyY?-4OTOTYE~lx%GCmqyan~ zfGLSsj6KUSk0n>Xb>+mv=*cp<8>lZ$E1cFq*jhJK{9LNhgU%~G52(pcby9v`c8%tJ zln%mW;ShwV?_Oq3&q8(0*>P9o_t8I>cij<}i%SIiQ^`$_tqNDS4kNZSIs~`NvDs>_ zLU+-_BiD*%g3VV)$FLV-WpgGB2qQOA}2EU9s5A%ws4m!0#r==ADa-A>mt z1(Q8^?FrwUx-;!pPUNhG*MFWK4;hc*j4d#Cvl$~-CI0ECNe3+xl<=X{=#FIilHbS{ zSkm&~{h04m|m~C=tkAPCzwQoYRou zPnn_S{DoND8Gm@NI`3z4f;bPu<5QIZ5~~m$Vd3?5UXq+S2FBfoGsX*}JMR9`czdF# z-5TzN6_fQKt`?g&ge}p3wwFl0=k$N&>FD_Yb^20rc`Zko{^akCYbliIN9*rDKgz(| z5X3Z;;{cx`GiJN1)Ji;mHo0BP|H@#@iYWXw()z2xXeF7TW#Xw4wF>%5nH8^XyY=FH zkJu{)2vM_%y ztbSa}cAfCZ#0*4D9hSK1HxM})j|%W(XK{)3_$O86SHmLw*v4#PKSIH$Miq~}t#GJ* z(vZ~AnDTH>z{hef!qF2KD3twid&ao`t9KmP*yn#6Xy`NQ?@3ODrxh}bMoS?B-(b&5j?%%eYMqFXx{6soFHx zee=zw-J6FIX(3Vzy!s+5l2)xJd1{?QZTbledyWAPj*lcHJNInlF%2&YXQlaHW0(cz ztDy+p^i??r{}e|{R-XrCP5Ty6%95NC^fO?JGDRLO;4LC>)BI37@Xzrx7RumRV3a9z4}&Uv6bPmw+N2YcrT0X)&OiR; zLaVa6{=Z)!B z`*4#SelsTTNie}6Qe@jpT)W?tFYcUs7k7xYU6Y#*Ld`A$Cmaf2H~U*RKYc``TT;Ii zqukb1v=(OM7fYrl<~P3Sf;s#fK+l4+E{iW8W^G!lGevgG*1?01_IXm$i`1A6@W1 zted@j>{`NfXLx21j82_u%!9xjrX!rEj;^D=%BRj#^2W!{nh5ybb^Z6mU3Cq2)<(ZQ zWq7(a6g%6gA{aHkaE5%tz^cgxUmceACtnL(X9_$U<&xA+VO}$$TaIk_o`W0EGntvc zJv;vQBd^g2>SO`!t7u&xSm0mOj4e#z?v$OjZ`u9$c+b(;Q~8Gy==<^6-U!QU_>O}M@|Gi07L7lO_$yxQJ4Q=3j{ptQu<4_-c z7I{Ay5^nZ~g+e9#+_&G=yGvB`er9glCpER2lY_SgWGt=1^&}OX`SCS$N>X=vbSRM% z2jy5gf?LPFfrjZY+fE_!7uA*O_RLyxmh97VNq-^&!s`mL)9G?NI2lV%zx0h`z4ms0@U1A! z%M3JlEn7XApj@aaQ$ZTWcEA`HGcvPrDgM=pWjl)_w6L(Jd-mrx$5(;dGDdlWmpfNtRR1@k&RWo#8_LR=1e;r1V3FIy}F;=N>(F$@qe?%zvUYw`ADe%I+HZX zA6?5&>;=!{OHygVisHa0+1{peLv}1$XO^xes87MU74|B_{jFOP*1h64|AC#on{(jf z1xwc+=FK$Ee%hD+VRKR?CrkZ#*hJ?j8sGkpnd)6n{H7j-al}&Q@QNRuZ!K$+oUu7n z+j#IJqh{KT`}ieS0Gs=#Qbjah>?!dwM9B>_nUp~9dTZW;K&?%*t(9qW2Y;I6`Qf~F zS$k|20$dGw2Jw8WJ2ukx3gKmTUUN*75ORh|syVk00OQhu%d5qJVRXLVTJ4JOmOPf| zFR=L1_)t4&<1OmrKDHQVIU{u2IxLBOwo$pM?2eidOH(6E`u|t}(GB`ECv8C`1k0DZ z_CPhK*+zJOjsFuoV81QptQC=tjBirO>TMo>)2Z6tV2C1U-V30%RD~T}YDG);EQ)0P z@|JG4vhjnKfsEDleftB-TM@f$?6_BoXvF!F6gKaH%;(_=p|rlPAC{klmge^6Bi-(T zu~|t!_Gh}BU#343-TEf+D&!# z^3uQY;qEzjSw_ApR&Lh5f>R$%`AJ|mfvM6B|R*>3?v{L9~53bissA zk&#thEim`T{X)Wei?`5C3HkV&oNj#r>@CjgE>gWa1rd2$5X06}Gk?PO#=S3l-Tfj+Qf1O#*sb%m{V^AZq6Z0CZypb{#guA&urj$Q0 zR@BoxZnvA=3pwZ^8Rv{y1C2gf62c5bG%dN&uooVdKqZ6EPL5Gj!QMgBLZBHTpNnW< z!1nc7Kg%^#2IP2tKdAm3?{(s`0!TLB@U(7d6N)%U3$_rV&eyT*mp7-Ee(t^LS3~rF zm)G!e@*n%m42m%gOz$Q9$>{{fxa%>xBrE$by@FU>-02}JY-blGb=*z-uBK@^+QvH2 zfces1z4?I`B4h@6R<0|UU`w;Gv!xQ9@yC|N$QaDUjccBQ!n9w7-JbEGWIAF?;~OY% zBC7>q=x;i)yZLl8Lk8Sd>sapwSk!1sL@H%GzZt7El>~ddzpUzm2h*Np(D{^h(uBF)(@G zO$&gZrAu%gCU#4f0FI65QG|CL$knEEdfbi81sk^H;(7`31{`5lwNr4M%r_?gb*kgG z6nqWBoD9WP0f|Z#=E}2nKSaA=ArnY2T(+qi8(-eU130Z=f)w8~+DD-`6TWNmZ3&QE zG$4)*`ARK55|3I{`kOHPg zf?J+x(%d2<6{MinVIj673V$U8v^3A`yYv8@(i{dP#0_>h#&v|C3l3)*GIA{;*5aDP zA$#-}pr(;afT6!rIwl*qx~c8%d9}ATd~SY&n5%wLS0&=Kv|P%+hMc&zJ_xvp(eLFF zlA17Z6wyg`44F`pv`6b-jxPa2Y+|8sEcP1wf*UV@c)Ib77hR%HAh!-p&Lx^PXV(N* zaz$xvY<$KG@84@~ole(r{xkKbWgq#t2m9msFVCON$5R`9>31c zMI#-p$8GK6$yv+_bN|otDFQf@W9oOJP)u-Bo;=s_cFsJ0E5x+XNoHf(ra9Om=n&-} zkIh;WI|lJ=(66`g_#Xl$+J1Ujrd69;jJ?w55k>!I{K%<=@9WN4vw17Nv1<+gMK| zN{tWG%(89o_}!aNBRcNqzx*=&>z#kkgaP{I87VCcD-Ui4T_dlT`hLL$+XL}G5Hg)^ zTyV+R+lBg4+#%!5zkbv9+yCJs?tN9OusmVS`n&q7&&2onoBsd$AH4tIUzp@2RMH@( zoGLl7xqHj~8(ZM}=U*W^T--a>T?aJo4V_Mz0OYLdpeDwy(_UI@c;L-FHuS@*vz9iKc8q`alJ;< zSXWH*W=LGEIVQt>YS`<( z_l;2J2fjk)eMyv!i_?ZYKCHikMl@x10^)K{TEHO}g(N=2zSd>vFP@Qx+`PM%aJG?@{Gr4C=A=V{}($>ZPOnujKiR@R@wFA8vI(f9Xw+vkM_pGi!Ns}EJ=G0soM{* zT)?zl>u-3um^|R^yw+3wSlTjll4{o;4*k$!A20|6Yt{#Z7Ek0*5G+^7f~k#hT0=mr zmQC$qiVv=c4HRsL+NU5#n3p|J1Gf~kwGjUH`8t|h=NT|U4Uzv z_Ab}DwBgON(VT@iL{#;+&$;2(+k4-B>vqeHca!e4<0;R(czeY+UnQI(m$Nz2t0Xl) zxO4Qo9AEq2K33mgzJEXcbXq|V0D!-5a{c@7aL*_M{q&6w#;7|^SvWsBf-g8Qd}IBP z!Artle0lz7L>z`4iRg?O;DKg6!kW0{y)I4FP(jwkdOyXE4GHY@XO|jZ}b}o|!WRH9Ux8y7Aj7!cH z+a$@c>Q`_{8}d)69}F_h!@}X{ntE7c4~BJekOte-01)@AtNTs}r1_agHqOkPVNToe zIAyy$qir4DTd;!6E^o)LVB8((MT3%uT`coZ9bPbdk6rHyiv9q*Aw!pP?9?Qf_txLy zl&SIrJCa7>ZaN6uc;Ad2SX3iF`#qXrARdw$seT2w=fGK^Xkm=}d%AX+lwI`$84spP ze^UJg8TE%C3|wjHZ~okecT=6Pd%>4pv3>n_KhA#(W9nCcbrIN$^y(7*p`~yB-Y0HP z(Tg_ze_{Ob8*Z60NAox%w~*@mkvapww+_ze2K}h4hUb6u6|^S^JDh zW_I9#tz-^e;?uc6{IeyZ8mG zCGp15qb{HV@Qqk_=Wddy^nokzz%9KRHbt)M*4TvjTF1VR^yFFx`@T<{o=fQ%x+=it zCA|0KA!q09HtggtPE%>EZwG2PGb^#iV3ThOn|LDVj6FM$i+v<(&%jnpF%PU1I%nh7 z^ay~*4XITHj(D0&EFYF@+?vJo6Qg{cOAGUaEX(jDZ&_T7p@MfleCTRSugx7HcCq){ znyaH^#7pZbg4Dd^bBaaNsKmD9?cAibd4h^Inp<6r0#&#t z1^UhNw*BglU!xyWJ?wuW@IB*&mu`RmM?Z6W(U)JYZ@`~bw@ZY$*TV2)wlDw2E4T0X z;m_C}{p9m4(AF7GS0j3?BdG(+o{6HIdi7V{SP_soS){T58D05V6ko6({D4E{$SPE%!lZ9e&UwL z@6?07u)rPtV2fGHp?}0vV+Ab&z;zgA*9fc=EKBz%sawHNGW%YLlP*M$gNthqbbUIDEG{PFUur)&Mf*72p^SO3=4+n0RP zRonaZhof%z(4E`u`dtaWS<_$ud;4$aulAj-FMVCDziRlH`bEOW ze8xjIekbLD9Q;{c_n0KrCzie^wC~%_?miogHQdvBjNN)u{q9IRPcfhKp!Fp8&aYy) zON{D+l->8b<|Tb>&LeE(W$+4l#C;f$KtVmhtK#`6EH?UR)>5WRZVMD_&lSo zEpd*4Ub4{CW9YYb6Qn%_8Qjh_^>sWMSN&%<`3{t3XyoUBh0xCfyBMokZ;-|meKmAV z(1NNKm(ptyy8)>n?Iv@jS59)%5MKg|!E3aWvQ4F8Os%6e0EFK+W}#xKjfE`@Qr*t6 z)ZbhkH-6V;;qAPA1FZQflP7a=Ag+Vt%aBn2965sT=SYZp1BYVMzNM}@r_jy>t#jya zZJK%a7|l)xsNXrtZ5$X4xh#O+$9bLp7xK){zf`}aeb}q>nm`vcUHzno_@iwf@BH>R z+%!IUz;U-;{r5fk1;KagWt^oSF&Kz3 z>!@qoKjZP+xX`gXWHb-_t>1AaCh2`0xx*XV$+TmIKUO_+k#DIV zKSSos;{iyQy0`Ihb)IV9nxhe|qn{soKd{QP{;mmerYiw?Z1OxHD?LMf)ss6xWMB2_4?eCC0!sgxvyku}ZZ5QXhC86hxYr z05C~k1mqD8m)5q5t394G>+-PwA(gLr~DUuqh z`=3LQvCl%wOCNFA|CV@}`y8T96P_OCL=KOi)Q^jOENsZBK50f$qFJ$aROvU&R_ixH zF@)5#1{me;pr4wFjeOMPcYIJ<>nWJ_0dBVB<$sx#Ok2ZMlWPIAI-51rZOU8+?q^Gw0y2)J5@R~9Q0TiC+=Q%+8%A4! zDQB$*v5gU=W&aD@4Yta}B!Qx>1$de;n*kw1fMvAqZpbP1cP`PGd#N8v;+fO<&b21W znpobgzv@4BT^OfDa#QxdU>52VmoE2cfBOb$-I?j11CF6A0h+r0<@k2$ca35&RqGz% zCa@FSHSf87`-$&;@Aj(ieB3cPA2`$9cip@F@89#T?Pk5SO70oy(!yu4q@CSJ0o2UP znEcrv+`9eg@AJ!e`o$^zZ{%{loXF$!zmv!C@tF_N%cmdq@*<@A^yglvA1b)PcjoL_ z^wfhTw>Mx3LP_iLeXAWaKUsC?Yi`|qgT8DApW?cHpS=al9Gz>NX~iT6Ty!#{Y25!a z(AK_J_*Wn6+-K_=;?oak8RYUB{p_4T^Ktrd2d(rK>7of+>u+67v2!tfSrTK?s1VoS zmlm%_{oGT55q|3DFT5Isg(LT3>Rb5(%ym6~@agM{C|uRgC)eg~Q`*O&r2g*l4oew5 zxo5fO!1P%LU$2G5fsB7)z)fLXx|3z=ZA5^f;J{`vq)l-YJcdcfPC?aRnOvm9BGk>X zT^D5N%{LA~9({0R*MJL*a*P9yXIy7FRvaAO{TS=OOybH{wk3UXj{1`uy4KoSurbV+ zrLSvmh~d3scCc0A5_<9y&5B9C=8P@+YayJ=+N<9-L0gNHSzE{B%-RnSF9iUjx2|6V zvcbm4`SEzZUT>ihx$~~wR*wBYuek`^o>t59=G?YKU{XcU(x4xwTB5bhr1< zddVfdJaMMqyr5!J15Qq=VUwKW@n8P71Mvtru`(cYJpSnn`{9+=&|=guST=)Vx*yky z7R!wHeZ~!iE{PfX1tE7Fm85Le+$NLOeJRJzV~%K%tdG%ZRz+)4{E_>=VtlIB5O_Z( zj@1tYF1>ZmeJw!R*DY>sZ67hNi|X@OBo_XT^WHsc{rmTSdeXcjJWX}DZQ|rS{W7Rd z&LPMHj+u*R2g+C7Zy#pskip4=M!e%<~)V%oGTZ}$Y&oJ%~c9C&77@2zTl%Ve9>e5rH6In8pGW7MI_jfY*J0m~zUGLi7 zrf-^)-zS8Y6qZAjJw3D4l?1owulhwoq^CZA_P5@6@AjKNd%a&Hi~ULC>~qfCUi$S{ zxM#Ew#`KttcJ5be&HWE%=vTx;^a<+)Uv}7wHvZ}F-eOw!Up$P?*|~bQz5E2RA3n6T zO*1lweyxmcymr|Km(aEWI&AD}?hN7anOT7BeUFLOy)0%Jj+4$}eGsvqH_{t^=X;wTyW^`9zNYg;vsO1j3X-&s|Q zbIF&MvLNpn{*ue5J@ZrZ`x+m3=k43~{_Q{6-ti{=#Ffh~)tbDb=X`LFSYGZGuN|@A zOi=OVW17cy-*(&0_ipd|uOxIRTBJ+GxR02zy9r4 z`}bsGIr;jw-|Cl=+0!pxpY0&Vzuy0;|G4wVL!aC}r_Y%?+Iq#6rlv>QcAAsb?s|RC zRsGXbbsW(zgmR)cy=2Z~lGt(o7ZF;64d03df_%oY^>;A^f(!n^skI3rBt#`Un_R(r zFyYtSn#_s*z52Tj?2UyZy<(LS?r0*WcY?-9M7FK3<837kAjgJa9@G+Toc1-Z9P4@{?u98v0c}0y+!_tAoT;IGBj30Q zsOHD7rnh7>XqXKue{=T`cI;jZ`S|Pk9}zSzHfeqe$h^^KB$2P z^muly7^BSnIHvwlBLq1(&*@*hP$*Q)jQjMmE%{{S?P)Gw`S-5co~-YOpo@5TPBo%i z->}rH7JvD-U3u`|2H|B>{`hzL6G*A-L?R8Z?6yfqX~51Hl2mBw?m4~ewt#aLBcP%e zKDb9d+y{+?u*5}&f~9Hj;q!YXfxGvWCta4gct(xdk26)waU~)=pB8rvo6#_-92{?wXYvg z)JpGuP{CJ#B;cCQu*?RUC?R-hFuN2bscSsa$lWm`4_t&fIO(;;nt0iX!GbX`&n&wk zG3)9@c3o@{6l;7<1g26)Hxw4x`3vdvsh^y_ZJ}CgB$hYDgNZg9vu^!08&~n^nfR8m z)}P`R=LyTf)p=;jqMuOfXW>I(p|PVM7X%qu?w-Hf)cPA}5)oDX-6!`#bo_maXTSKc zFS`uN`?285RloI`>$ZRJb#L2V`$Hex?!LR4u+*Qb(t*B5P>l36q4cXZn`BLluD)5= z$-_A>Jqq@W?G69o2LH|rY^NLNJm}2r>;L=5Y%lrRhj09z1{{2lVdaJg+ruCGpzYQF z__5nFUwqj2K8SnE>u=g_*Z+*d5S7V74w-jM(oi*a{&ur(pKWXX1!8izvvr@e;eOB- zU(Z+%8$6zBG`V=l6^*PIFSLZZ8O?I}_h;3~+&EbuFd}FRh*WY+!>_mBLV(UZH41Vax zQ3XaDHDgTfj)^54XzG&Me!4k*ZDMhcZ(8o^vo_aQjFQv!S&Mrt{m0Zl^j86ss|!qL z%(vV?&-?$kHOZ{6PV8#iu$^!kr(cYdt@ z+8cePkEzOba;PcMdp74wdwx{dMT~F7`)Zd>!bIb3z z{GUH^trh0dRi_(WzkTjQs>%6NmhdT7o!Yy97TBW6bC!xVL#GOG!FF8N7Dapf(zaiJL@QeSqZiux?}1<( zKOe5;$pr^~D_I4Y*icrLzC$$! z>kI}YryGnn!WO($+umBDU%`!qr^c*Q22zzyy9+mRt@cj5^Pm2o z-{(&T&|UPfbGD21!Aqq-L@&v4yMC+vHvMAUNA*)oAGtO^mYd|!lHQ?T!IZS<@0#F^ zyYv?}M(NkSu}ipZ%K`4*fW(OR^*{CD?Js@tL#>rOhYV`ueyLnY$P$ENg8rNp=<&C@z#d`~P|y8fngJR4z%ACIpg zVBx|KSDBJ8OiqE_4PXj^`ya^8wRJgXr#?cj>moL;vtbMy&TKK6`0u%REK8n%&L@9no6O2PL3^X*@+hV_Pc z-B&4eWHMF{uv=)emRITxRx&!Sn#;qy_pkw>fC ze)Vk7`l(6p&DHmRxx38hhocJU^Y8kRotzQ2`ulOd^Z190Za6)>^avrmXWcf>;Op_* zeuulU=r3Ui(TJ9+zqw5x+|U;R&f77~oBVJ?$b0k+ejK(teCXH9xo5bbT01QP#8`A% z9ed%KFqnWQ#O|v@advYSpRK(Q0ap`PeSEgrT*RSsN+V=Wbnv-(vbLuPR$Zy0@T)_n zb?a26Wi+~}H5c^|VBSS00Bqpag9yclHo|^z60m<`fM3rg@u#^~ZYOvAicjub*8>9> zT&qJ1>yz@+qV%oKxs#eCOw{+n1HXAruGjy}hnppW`d~!2_UEXs6p&++cJS68nq151 z4G-!$NRxeGLUdyNNf3_c3By}{{l@JLuf2Yfe+pN3>Gxj#)%W~`e<}=pNBR_rLs8!a z-DcmpU|hcmx-q-`f&tu0P(8zri_!PDH#WA%meXFQen@jqI!J z=o((uo=J5(pXZ5L>!+R`%hGZFJ&@H&LidtcA3iiiVe@v(7{MAHikJ*GkNoLzrwx`X z_F!r=Ao7}*L`560@st}~hWXN=_E}MLgrLG5*Ll;RsxfJNwI))d!TQDC3AAT$rRl-O zW`}Tw$mGtHD>sBKhMB=w19M|kj?P=a`!%haTAOLPpapT2*H~0mO`WLsLx&pHH*WiO zvCA2on>0@5oV@7alb?OqpP7A|UaRW5_kYasVc@E4iBDBO7%0rkLb1kLYV`PO{rn=KM))I_b@mGL z1Ea_Q_Anr1HuZir$yVW%*^y&L+d90r;3d$!d--v9pcf4iPH*gDq3o>8T`+r(UGEA) z`;q#)Am!MpNm%c#zr_)sb+9986z-;jz=uZ8&O4b7TdkT>{SeSiN$%DQ?SSkFlD&0| zo3sl$R*tf(ejwdd;iOj6h) zq<7%h+T=x?C)FQ>6}bKooUz?~$9>xme&;*4KmViU?|%S$fCd_W{s%v_{o$|P82wz= z`j@;huf4*V`UlM6Rb85EzFBYesRHO<8aVS-dYFZ((_9HgvQsSr_I))VY@ibGnKhC!2S}`3yCL0_+#IsY*~nho z*j8tvnZX8ApLLxlIlSzA0&%moXB14ZM(1O_>S>%|1H@->xbikwi;EbV%bc}Ybr}R0 zN`GM_u5;O81fVPiXWMb`9w8$iZd7HoK5(*;z6N{0nV_u)%%%xZF(jW%NA#CASi!m{ zn#a)6G19<|$l4uRG&MThvjF&b=bOOKed%R`@}$Kz@BNs6>CY_6;Wy8T+R5dcg&g{@ zHc#HpMvB2&(OYbrgxHb)EhhN#rfoO`76&vvvFFysEO`U&g$`~D}=e3hdo za2-H%S;w-BcFjUfu!;*14AY`;$b^U>S~A{uMuS1VbJMSn79Tr?&ua z!MHb73)sXYPX7ZTP{V-S1X%nbq4riH<799Xz}V@FMK}s<9cE23>=JZ5_{NYvxl~LW zYu=4s^DBZNK8hgCXtNpRT!u`}uQ_?ZF_rYWbT*S4X%Tap`tfO`r z-l89J5!9IUZOjv`#M8p7@C!XSncm{^ROz81wMAI2+SH z4Rzw0G+9H z0*T(h!Fo2%=Zn0GHFuwrxJzPUyDwG0vRX0oZ@=dH?a%+d!${KjcgLq*(HVkyuU-I2N`cw{Cf2?w+0DzhTb*Mjrmy2XCMMm6!V$ z1bK1A`!o1b-ygp2#_iXC`ug$8#?D__;PN>|QdKY8(C@N*!PI8rT#am9Pq!RQ1IO=+ zIRG&4JQ{cFk3~jEM9}{T0C)Jvy@jqSC!ypNQF4+ZIdEbd`h&Ku^?Sw+p&QUSFov|_ zlN4^Cb^nA7aM@TvmOb+#XKL$a0ubKm#pT)@ATxc!TUeu<9_uoaPI%e$0lx1&P#wQS zO#Rec!?}4vD!ycATc>B2E)JP-gc~ES`hjz+kQoqc0r#;D3r)n22#Tyi*Qj7C($AKK z)nuH*ISj%N9Ydd`1q-ERRXo(YxfY(*KY|wD2}|=Ksp7|@%rc`2j2{dUP}tO+e~ldNZ*&Uk z!{}MoQ`8UXi(dKg6O>LDoP7DpRIPizemSjPcAq6P6a=AZwU=qSb7t)GAe;5?e8eFZp*B^TK$F_g-kKecbl)kL>gr}XqJ?U8&>JL^u z#J?LUq89@wp&pj9NCKt zjB<2*9P>KMvErTjcVZ%c_N(|)Xz_)@`n@d8fkta$49o?*K}|MPhAa}N8_-M9bFjoXLyqq_`pj7I{Npo{=v>-pWDGJ0~_r5MWWo&$hWL z=Uw`R$v=MM&Hj){gGjxEiN0^dOOf28504OLUDxCFS5D;)(;6}>ij|(3IQnI5$;?%S zXV+BJ?eTr)O&sI#-;qrQhEbfJVma-#+9zDPI=>`I=&C>ae$34066_d%ky4zW{&vFcL0NZ2-JC#;5) zHaRd`hu1tRD_QCACA1&eQ3fP@I$U%H-?3t)uCrfa)>(W}-rxsQ-W@lY} z0N1&C28Y@g>PL6YgW)ve*M3_6`>0bwo4MS%Ixob@Vg1V)bd2e{fT~|$o4aN}G%&-Cn zJ6C!JPMFf*%f@T~1Q0T4QpI7gY5B|2>hlmuY|c;!hx>hiV)^`ssGM zndxTWGWS9_lY#+G5b*`!_5qs2tP+@OaLH?wyqN{`z{jv2&EqD)7=f%jfgCq?A4u2ltm1Gk zdV>yxqH#Sy_8P=xQ8PB&7TdWc>^Y6ts&q=d;8lq#`Cr7lAqwCLFxope4o1%2czpFoVlMBvc&cDd0bmWq4 z#RBQBIY;@idvq}cf{Rlbob=iateK#a9fDlJdl;*g=GJ6R^mE3`==&IZV{!Ye7ihSn ziI|R?R3y{Oxko99aZw0^#LT|1&?@U8NaDbH4|osnpyje3?>xGs5P%f41gy)#k#OW| ze+4zadF^XWu{8J4FANnVuCXnEM3Cq*2;=McJh-AKnkvM(b7{6P?Zz8H8qE5z4$bqR zf)CTjc{N29P;^6oQL^vc=|wwA)7XZfMcqCeS&xsvypv+Z^3Z{EZ!n}c;34o8^@6M8(mRvGF^zLoM`5zx8Cy_j2B7)xd42Nz!1ZEKD} zQFSF(ba(wnH6Ck8%E~kIl^J4vw60x-vQA^^?*38#j(!Dj#x@ew`(m&U^G^S>kIeyv z-koyJ^$Cgi;uwX&OTr!!=KW%7cjpH>pTD#*b_vjO5D57?S7t@qXF_*|uhkk`F*^ha@>!r5 zJ_vk!IFHu-wN8Ap!JN8`T8zo*L-JA=i?K?gH6~_iip^|-np;k+g!8pRcAaxLN;`X_ zHG@eO8x^0ujR#&5llT(Qvd7$7dL9}`$46;`tM!vURe=#mm~-`aEiS(7ob8g!9^{uB z=gU`z48BamPhW|mM(M2%jD9H3IKR?GVs!amebr}31{d1c%fQlM4Z z_aug{Kdy8j`IRH#WGex}%QpUjVUG5}W@VS-uz$SLN*SqzOJWk9`s0qTb(vS7^hZ9w>!MKGx?k+~pT9^&3c>5|fnbnDVnn!KqKlggLJMiyn6F_SDb0WPAGOUAjHt=@)M2oj?8!az7Mqn_qSJ zwtG`&1SVeE-Tw#}oL!4M%7&WodaZg3X+M3Jy=x9Ei1dbYvbCaSue$!watcnRSu4Fe zzO6CklZb7A^F-n^P7>#6EjB@}0=TA%qvwykzsZeMhiK0FxcfhB?w*o3e+N0f{PtO< za}OnPM}Os;kAE4^qidgzm2`~ecnO!n5}f;ATJ<*T$9dpre!-VgQd`krCuQd+dCr); z#}H8eNZ%`Z9b?Ylgya4vRrB}SeW(~uML>M(F{W~L{WyI#i`{zJi-_2P*?ImdfO`ps zy#~p(Zhmq)zwGnzyZD{Jk)tNp<|0UKhQS*o+X6Uc&J>?t&OM+gnvEo>YjKD%BY9dg zC`sCZnxS`r72mi-C0qP>CKvQ>uBC8^zarAy&ZD^XAR6Ob!oV6Okd}AoFL}H40tIV$ zrMIRc$j#N6m_y(jz&TsLf;$&HHD;wUkg9arUAR7|9sNyk;M_M8@YdGr9g3=-)Ky8& z+~8sK;t9{VaQotKc+~bp%}4t{z~KXZum72AUB4Sa=;%{Aa9sV}(qwW2m$)olhxo_v z0cO9i8(27J2huxbFVmXSy)5Fmooz`hr$~9+Hx^tE=WlH$sCWRx7#OW9>9DM1Li_$1 zwk29QTE7>Svo&?EZFU%!(jucDJu>0nslNjwSGbC)@Z=;202c<2!;GE!%Xacq{3AXp zi|yF@@pWIKzY4)IG(}?T!alx|(oLzFsHJh-lVg}R&od1*R`H|>c3*$a%p3w)=OEMq zPWEuL1jy7kc%*PVRpzaHQ^WOSb6 zu;Yf4vvpBFqwC(8>t8>Qc+7d*E57N`+tWVx(ttk+*LS}8mhGDN-YzDoTIwJAY4iBs z-_)1(SKr)h#LV;W_-^B_)T8I(`_Q0{m~-F%v?%8q9XTU$Ij$Zp+N6%mtGcj51|>94-DUqI(1w{x(lu~UC@ zQztc4YV#rkRsk60_sMLnQJP}X~r3GCbFHdPQ@vtXNo;Q}4z3RZ7#@11_nmVY z)O@nhkiW-Tf5LIreZAls?|C^7@{Ida_zu~xzwN}cwqJ{$W1pz5AIsn;ZVDzO@m}R( zEWGa1_pMbP40XgmLB2OsPxatC+@gyUZOe zi4GnOZDrjly9sHGGa$}TfBn_Kqq*D^^c$LkOH|b)2L1t%@Ke{{y*l<_O2_JRLidd- z$lqD3zP0`-plUyC8k{t`1{#LF@>-W`oHuT3;%{&N^>2IJ_PJkj*l&|}o5gqxZMc_Q;7%Gy z2|OM2K25e`XdUtwoC7I>^Gnajg_$_pFw7)5C-xfi;irJ&w`+}WYy-+ieBEt}{@8Li zgmUU9kSGmtdTF8B%SFt%*H{aanjA6o?ck_cZ_a03CO3A(lS%K#o&S!uN&{Ae*4RDu z?}3=wTp%Fy#L5f$wAzNd{Dm6)*-Lr_wU}m2SaR9s01KE9**a$*#&Bv;XE#jVx6lrZ z5I1}P;q8qV4WgGcy#2I3K64k$-ecEGJ#Ywfr2f_=|4vQ9dT;$LE*x`r`c>Z;owp-) z`aiVG>vb92e!aGh{ucMQ{IjP#V4pl%6Z7Lf^TO@#e&18Ji}d%--2v6_zE$6QO0V{W zSG$K5{mc1d@AF%4mc~20H)OY=NVRu#otJ&W2HuOp76%%g-jQ_~@r^Ao^fji1c;PT8 zuBJi}A7m)CE@%Owe+VsX#q{~d+7W}&20}YaNBc13HSHQ^0O)n3{$UYj+=oq2bq&mE z7S+ByHdKq#_GK@9^rwQ3>z+}+!<=v26}#Dm>H6aku8CT|9G#wSO-^=-UNyod1%y65t!T;GMfj4S_j~EAYTA$%aia>$){IA-;Y6?IXSGm0{->31!vbGZ%wv zKG4$%x=7}%gCee1iHg7Rz6(h#XCr%UV_R`5FTVbu81vvNbzasQA7@Qo#=t_L(fL@f zdKzcg0HuB!#M@vkF6SyeS({atL4cvOQwQ1>{lycIuff2XEzx;j(ht`G3v&8x%~f%$ zo}r&SCI40b)f2Z*`|QJBhU5S#r-fYk_=jxY^beo3o%`VVyCd#9Em-TXTJS|`YIFj8 z7&Sa;?>En}L&c!x+$@@Fy_yCQ3^>Rq(yZq!GcFHuB~CL~thzX6m)tfqf-%lmR#ras zJ01=I#kJ0gPTpYF`#(YPH*?WMQwkb^#`fud^RCCS}GKXI#mwq({^l8o_ll(B=r zHG`Rte;(s9>3MpwM9B=t+iM0`h0@ zSu!h3$(4d_EoY;#f$4bb#}K1u8r`N+PuLuCI$ravzaJn(&Jq9IdnHYspG4#!DtWbX zA;nYE9>a-SxXJ6cZq8mr=U?l`_R?2fxjpB=KLE8y`3Y^We$x5d*Z%#-nWOd76w5!- zgYI8-4!=HVTV`&A7?15gSqui2-UClwJGs<=XTZH~EV(*QCB~5BGqT)X&pVKuqyL3t zS|8x%6+gM0XTimgVspm9QIhdvF@tCJimv(-5OQl$NSI?wceGvzx_)NoOCzeL)ZcRB zXSWa>EvVr~=Dt67qq~14pvs*3m&bo;TZxB$8Q1%x&t#{5fuSGr_j1AIGthNDerL9$ z_4C})`h#ms*NBd;dp#e8d33y`L_i;^XX{w7Rly=k<<$8fSN{Yq{b_MH#oh6&&oq}W z$;y>&a!8JYxx~-j7~`<(!^Ze!aKG)5o!J(O1-D?0aqFAi;9#&jI5Wi>FJlU?Xk#kQ z4b1^{kv+FM;5+(5YU{wrw&vDOLFKi6!HsdSuxKg`%hb8!7eKhNE*u?##lsw>-w8-F zyp1~LOkdom$^)=992aC?)$zn$F>{DWU#tR<0J^za?-_>8*+=A|}`_HC2 zXUuZ{*cGFDM=yjP;Y&5tkI#Kr4lp)=tQfiH8teOSfV8lt})Eq5?~)VyU4)M zwffWgXI`slx6#QGZH{)hm=zTL1v||Y2~ps-2v4@?m)#;Tg2_{*^*ayboiBb{w460! z4<^qkAh2YhwGgT=`kkx9Bt4kcRDm63BKT?F{_|Jo=$K%Pv;L^q?^oxtt_T?QHxp(g zSQ%V;8=pu*YiR0|SD3Z_$rG^S(GS;(l{+tGLPInrKpUI#gkk*`7&19oYy4FN;J zhh<>zptwnBXJZh`TcTt&0T*EVheSc~7yw3w6qr!#eei}haJDn~tShl5bzVn1em70@ zq(3QPy*bE#+^~JGe%lr}`_x&-tyISy>~Z|q98dm|h|SveG_9P@Cta`n=ErPjp1A}P ze89N(p8K{Byz{p0?Qi_(_LkrL$o9TJxplkqj(ZP?JPP3_vv@i4Q9#wdw8*Zg%r*Vu z{>WLl)GC86y4Jn=*Z$%NBX(9zn2uk^L5CuG!B5^A@8fClIMt$)pjuG$mxf3x?d!M=ZKRo|}%8t8_m>862h zZES`x3LPneQ9#9rW6&}YER9J_9Ll6jOhv22vaFK$LK9zTVrglGpfOG)CQ1a#c|f2P z2PT;(>82ZKdLE$X&ibtNtiAX5-1EQpbfdnB``q*0dq2;5)>_Zr-*fJNxWjr!rvE}0 z4S@ig8Uh5fMB{G%7Fpi7r>cg=XAi*)_RIgombZdD!xSzw+dm%7ImV4z7jO`qjk$dP zc9!vV$G^1m+lc9b+~rro5W^7uoE)l%Q9zh{PO+HIL|G3t3xVBuNFgTbBWzXX zsr2tfumOW?Q-B_Fe3|0%h$nOp|10RI@p$nCEMkRgL`(l#Y|CG3()8{h4s+?e+@Cu_ z08F0^`y|32!TvY@+$(S2`VU`! zd+VFumHzGfX}1sfK>dYMKl^#NFZgvY7Vn2VnD^Nq`pnxeQq5oa6E9DVEcyA<7Je-e3Dy&M;|( z--6KuH)FpoA= zIbPhQjoGa5(RB?N>YaDN!yX8@IlN?@GsRx`-NP)2E>Cy9e}!u2duHy^#?qhn@QlMn zjej&Ppm~}BS4^y?GQSrio*~QE{hqnk`^NIGWNOd^87`lLb~#swz0s^ljeyQK5XI=l z1mj;x#$bJ(gISzTU;Ul;4=11g3B&|8fzh~8E|TV04y{||{${H*%*(;$WTq^ZGm|s- zBw_}aiSx@2ol+5@#w|Fpb+9QBh6^297lDq~dT~9Em7j=U{mnXO|Bv`bOCNl_^;?wR z8@4MAf`mxNh$x64-7pLxp~NWN>4$ElV+aYQMClp?1Zj}Yp*sZzq=xQ>A!eAF&3Er( z|Mnj5|FE88-OshI`#d*XzwRjB5#>{xS{6>B@G8k}ZyGmN2UjY-1bbRBI6zff`}J}O>5p93qxO611E#&v4YM$ZzNm!^ zsV;>ao!lAXGK{F!aM>2O-HQm+MO90?9wb8>=-F}e@hTnZ59uG2NBedKSb0L)E0cNk z3?+Dh24UWJMC!U$$%`Nn3Weysb$}e56h!q$I`2zH+_qg-9Q2EFm`2_n zV*lGq7k*xclt=rzyBhf`*jsIA*$y;wqd|q zj6w<&H;=T?yBfb0%AuB#k;X(kj{ds4l~k3YOI~webOI{Sl$kX)qpmw)ZLqPES;S#7 zk@!CbfZ2&{IAkL2hK^&^!=(d2j4aDEO7z>hKR@U+9BP|8o-`?43*f(^Ob)Jg!mGx! zE!lxDrcrmyv?`j$3!%Tid}FiS+vEDY_zgR_OT=ECOr9Y9=7@o| zeO3Laasj`tb4OgZBsTb&S3LS=&g`-+oJEvm*yu0%AI<}EFqx7FNiVW(Ymxf^>aya< z%-~*&XIf>^xU<_}?KnItHIg-F&?+m#Z2-UghRPm-Qgo{FV#WjTo2u?@6WO5~Lkrip z21n;ac&0O>f@ydKQfI*-%5whZ(%D2S={!AY&(k0kDxQ?K?pL-=2MqnYWp=GXZXM>& zy!1b@xn4UHd_|8I7UZj<6UPHjt3J@X2Td9{W(x5Y)jLxzNh>uq1Yb0Lt@c|4?&vO+ z)<-YlUb@7+DpNhyCYYlk7}&WpIC=c=8l>Enq=m6mzV7^^@(;m!?~0q;?UO z6z>(u;qsD%6GDF&^`#iq3EN?;$?2xsr;~@wI}7udiMGJL!8P1V0V==#KQR!OyhccX z*JEAMSDLc(96leIidUAVLf76#?EjMZZyAb#9;5}YkmtT>J-4_mHV;7;!LFrG9LRy! z5%ez9oE}$IPjMETV94I}@(tN8$A(=n_Ucm=?mIuGyY^Gn(c zWcm)5R6`_*#C_mp?y@v1=hI~PoYu%GMUbKGn<3$2Mzzx#utC93kB(Y*n)9N!QqE}eB2aJt=!O7gOdUA~yhtTOeNOQ8H&I1i9$T>Qn zKi+=Ec^GBv>bPS^n&69qEH(HB zXfAqeP1jjAcGwj@k{zO;(OY(NEsWmq#}5d(wCKnb6j6xU8VGGPY7FUmo}Er^8gQKy zI1YC1hdWa+hWJDZoiyG_$L{OAAt@W5FX;5S>S$_(KnJG-sX6M>9Mt%ie!c74;G*_+ zrXW7%K4a9@BI>gG*5&mu{-Lc?tKXMw&8ScTx&3HW^}ulB0^{4FIK*$Rz#D^kL-xMH z`9nRmcAw5k)_*Zl*^&!(?mm^i-Gquu98dp@lY+a?dOE&Gz@H?`xxY`e2o&cSj>gqk zWnV(CyX}_c_HEd&pj93FBjz*=;&dxZWO7uvyi%p5^JZ8T_8ic0R@RU!dV2jG9F$HUmAGh)9jGEi2SfV#GN8j{3tM3dw^oFX?c^9 z(tgh<`k6@eWFE?@Kn)rlKhbR8E=w6RL_OAV!TcY$ze!dGiy_kHm$7rfz&lAUl_4U+ zER_BWzgv{I8Wqi|oole|{@UlJnI#6iJO0^!5nt@mMl%;s%&(#k0iKvsBV8xk>I4=b z73~d8@aqk!+sX|Z+}_^aCww(MVUqdlhUn)7by%kaphG7sw;vyyYvN z36J09X>XfL!>AXY6N6S2fDB{NW#Dk5;*^_B?r+g?jcsQ+_w)hos)^GsKSZnmC_4My;`-<8O;(f?w$u7pG z%*Jx%DtBI#{p!i)UE;^ES?iMzczV1$G9!4_CZH&=A)ExItrax8k;~2ykS8XM0bfsv zLfDg_sE3a)rNgu{`~~3v&Ev$aJu9@=Z1O(Hs-`rfg22f)^L00a>b;KZld9p_I(a2H zFDLq`!n;TU7h|_tiL|f3c{^n-MZ<@sZv~qY+Qh!F5fT^mUullBSCxRp!3Mku^d$CU z-5S){7>N#2!8}0kBS!F5Pv)wW;-sHsblrUG3%lFp`f9(*lW}sDj!WS|#PgePXYGhP zE*$s*WhxfSO9N#Rl3N}fC9EsPw&MO};fHiz^vJCn{S32AaY6YG2ITOl4LKxn?XJj_P&I))Sc z3`TLn{cLmNHfy8)?KBb^zL~a|i`Nm&+|6?GtXDKssXl^T=<`)3MUNKn>@H)`NGE;v z!K^>mx!AIG{gw>ubp}P)`4M7P6}N*Io{&o4UC2!ERo`cOecV^b{Krpt&sJ~lFm8#- z(qW;Pd`F=@^^2ElA{r}Z%9+rJx@dnKJ(|7Y1%Q*Dvyf47iY1GvO>xN!ZZnf37;9E$ zEZf!B%Uxpp9&2)!v&mI&8I`G$?%K@>Xfr>d7fAj@yjUapGsn`Y+@NB-yF9?pY%8+< zFHHX7-oS7b^C_Jo*a?mA@TinA;~?@eB99-!#&N*yxEcAo#khjC_o@u>A4%=j$;5CuZMdP{#U5vo!|kGp!KpU=QXVWLv6&~ zNC(|TP*u+|C^K&Scck;gWUUR~+h;4It^D=74-5MWO`C#RI_NW?!6#Kj*o_-)2|m1C(bdt>_=C;aHHw-~jq)4d-uEMKgqagEEO40A zN(JZ#Cu&H(2;Hj@qaCJJL@410Kkx!qD2izK` zh)c@w`nlxnO>FfUK>w)Y1|lQ7CgeZib55U6YPz2mQ#Kuvh!`gxAc$-@o2(?i%&~lh zeC~k?<8$6$?3unKm+)eKqjj%S^8j9UJ9Ik1JdyzTQvXbcpKyglWQVpI(<6orUdScj{Ohz?}sbr+p2X|fdjH(B2 zbaGE#GU=FSJNq|0aEy4w)DVxv#_)wyu*@Myy1RhJlQ3`(5BsP2%Hkr;6HmX7C6WxA zu{<@V+F~y3hty30YPcf=_qtNsQoPpwnN20xdkBN0d1f<1-$jMOlJtc{$CVXMyb}?z z!Vra80nClCU11!?xokiINdvPHxQdNAO$W{kaTSk>{9n zMKc63)B*D@E({(V-s=I~i494-ePI}z?uih4Cg;U(jez+c9Hg-+Q6$%DFqM(SFG;-5 z8VN3tNjiez&)0p2*dX8q=Gdudr&}s|2ouS8*UKp{f3g9CdpmZ30RRK`K4M|M&HlIk z=|58D5r?vwmAgCwj@l0Uf4b80y=G!uPsI{mtu&m6@ zy>kpJ_VTkRaFJDE--C49!1699_-Rn1!%GBSS1ZWb_mx(7;4*M~11`svWe>Oc*Vxm| zZ%jt~MyQMdvUCQjJDIFAxf%D;x+W;9H|p56fnpec<*ajmQ$2<~a+$CB-h#Sr)^i>L z5S#uao2i>#+LRey%9%BR9VbmK#*-#HQ~TMeG-5>*+Uo} z{K7FNUOYlzxQkvz`W)T((tg#lLRVqGK{s{?U7}+Kes@bf)PVQQqj_Zh2}k& zWDosIjF)E+U;A2qQOFNy~4Wv-xZy%IZ)>SAd5Gab!WJDE66!mjY~@WzWT#dwKf5UZns$ zS9j~9Z%5I)7=DmwZJiiBvm6A!()BO_677HaL$4-t0NkV@PIzj2b2(Xjd^w>{eHiz` z=1$D<@L9Ei;N54-*Lv`QJlP}`1}Cc7$C9Z>P=AlGdY<9>&c!o@IzuNm%nnGDAoMfM zz_t6z@4>%;)6toc zVnz)VWZ|g3DyS8^xx|eFkp%@|8QSd~uI~=19tAfPw;JpL=7 zdp3D-8(;7b@DHzn&Ha(Nxl2z(fR^IhbJXU%u#-?E*dI$wAQG`QrM!8(D7;iCns;oZ zXZLw%fh3i&VQqH%&BX!?Px}$qlJzRdn-}EH_?FWkM!7ksTKZPMvgk@wbl0p-I%n$i z6zXYpaUt}c7#h)OmmJI{JPo>Ve3R8dGPj=##dA;_S^46hDi{}ZoK%}xbrzWw&^76( zj7XftP3py-H%FdTbz1pMOT@Th26TkTf1;fbJD3^W1D6-X*7Mg1vdA9b@t-Vi`Yy-3 z4H(tu3MUPF!rzy|BL40N`E}y`U4549Hw7G`JO;*H=)SGZ(1U=?{$BVxEzA>PWLD(w)vm;LPyD)~|7)`*IMW_-> z1y#_M$mMms>>-s)&{$&I803wtT_4$5wcXVQWRGy|bvmtTDf=9Gen6OpqmRv#S3h;g zxBygRytTwjhZRCQ>cTaZ!ix;eD2u1B4|&)dMAp}YIqr6uSR~#HrnBLRG+DmY{!v}y z9M>P{NtyIfev{c9m0j5L<*?R~qxMr5AM2L>V75r~I9WKDH8RS4)OhI#(9q|1NySh9 z9a+Ozc8wVRs-%%=G_{404po`RD|wu(H+)yeecTxFvVh%vy$$)4T3=^#2(Ggq1#B2> z**w0kvK;cxTf5fi%Jtf@E>+I(GP3u*@ugr`dpovC1>DD6hkbkX^R2|3D0v?bjeOH( zc27Yn*zM6}#Ucv5Bmpx+(09730O{m@d zq?grkNej5MbNzWSW`UO0E5eQUE~?+{lm|p@UZ>Y)VO@p}o{rwT)rl#%Cw9~pl|*;2 zpHe$=eoo|&Abh{v`suARX~pk;AE3ahP8jiW+Y@w!*4rM`lTp1VEzbRH)##e9 zLX*BLvdspuT4J)P_UGs=vThO)yYkb?Vh@;UnxOb23BMMGQ zCOSWs+H%Qf8WqT~njC5+%@&tNub8fkGNhfCNpY@a4H}+yu@b)5TcHlDkiR4S&Jikh zL=K4X5_h`fx4IdXVhvxOgYX}Q=Ko4`QuX3UiBz|?T#ux@2?F)IqdJzLk})}-%BN-q zOb|Dfi*u0KM4;jBFX;KtM#ROIKr3d+;w))Bj#2igRU`m<<(2D;{w{#NT(o0{v>gBV z9CEsd2ts4sf@_Cunl7$D@w;uF`ckduTIdjq)2*LFa|V-Bpr2b1c+`jY!mkf)jzL@R zDj7DXu;|iH9p>_46;Cof#}0itR8n*9+3q=*Imey0vVRH=fMQ7TSGjS|Oz~vtvao4K z2@$)7@=d-$6TZnmB=xSsi}xR_akJXuk3i74Gb^#vonHxM{2+if;%^5acjUjQGZ-@| zdP450T|J;LEB}=0v zq6K5iNPgkFqc^)(jXh*V7>YeV=THY5)np<dz=G2EK>MZfv(&m*ml>1g-FT9kOEhAJb8ey<0io+{C3+(yg1onkM+nKM2 z?8j43H>4*msQV}l#~{!gJOT<@J|gKj8f%=x-7I3u;0ks#&Dd7v28w^HrN<2`ZcgxaW4wcz&? z58(bDT46Foi|0G_z7ZMb>TxTVR&t`*1rf{~H#Z*F*b>`b19(}+)boAWzzVPNS}xz4 zzv7Io(kL&NbWipO-Iw>re79y?!`lohk|#OOd(K|ErX|>&b0U2dO~ndDncJr88$NiZQ$nWiHut15c_VB_JIuzV+!K5PN&<)DtFibrtFE{` zM}uRm8|qQyNfYJ>c3HY8fQzR>Lzn*Z$EpUe9$i4jV(hMUefU3I z_J?EDLtwE~S_lWSyrn8J*W#jg&gdo~K)+2D^?uZN^yYo&KfMB@@a!6e2eRc7*ZP{< z?hnHFq(`q>>joD>g`WIWNT$-yJg0ITQD2|U@Ao12g;%Qk3HdfgH%QqA{di|nI?L;~ z>4;K7IoJEW+b>RTt3xaPb2B*-iNQC{MUW)(n)N;{LG#@t7Q9_nkrmOM>@ULGd^Vcs z%QZ(OU90eH0z&sX<|vAYd<8O-%WXkvw5+jqk?Nh!NTQVb#7$gdaG9RCA2LzD=4o~J z>8+E`3UNCJ-8YkEj83|YUPqeBMbHYw={9cbz8*66H11V@#Dh~uTuFxYp!S%ubWi!f3jF!S59o>)% z=?DS(FUjabD5bW6Glo;Gf<5b7r;TbEx%4&CcfT0$lj!aMa!l7~5P~x?LAeEOgYPgS zkV6YsGyhV%a5Dj1+fuK0HMa0eF>4!mQ$*nTfGo>JAh7LpZ$LUEw4a;3gcmBD_Q5sW zY!2do0K8S2{AfiO3!1=}4!ESceUldQs-oou}nEMrQcNdG)MS2OAX;%PKVdg>d-v*(`~T5%t#a5L{evNhR`i9+8@VD;KRrU+A)Z~Qx6 zmGo>M+Y=JT2YSqdJTJo2yU+Ikn$UT$p^PGY zD*$d%+7j)SMl$U-Xy@%WDbuV#U6A0-! zT=FTGQujB<9UmN!#5HJ^e29^`svzz6QDAeV;ja$8%-b+LI?0{ML%KIVKH3T{JihlU z*yD!a5LuFd8)#AV@QW*|=8)f+s&%%+2Tj{|go^SB3o|XGeKOg08(dZo1#+qnxX<7+ zyASR{i?nneaVtJ#XQS>AI(}12Yajf@EJD6*5q8jpCV87!?Mu7zN5lkihkiz!g9_hj zleX(-eW>;iQ(%xq*=>f49eFwZy!bPQck`TxVw-FNH5}{bp5^MrWT!*TrFB@3!Z1Mg zuS4eI-F9(Y0|s|V5dRAR)5Pt~0~;sbZz73qtY86`w<1za8!cYY-h?wDj- zEdeJi-<|qDW$$f}ey#iPeI1JFQK3_a26@xEEzI0-KPB-BeLIoCvxAY&a#D6l(yBe|nsF>cz% z&AR$>^q}z1R4%Xihlqb9uW-AMI%OAxwNt#$#Z#rvOV3^`oKlxNpL0-|RQE{G#_jQ@ zS&j`-D1_R&1%0p#4Iokpsc#bmx0CLCYxuSrH&Vh;|C#VX@x%!xRbmBubAnA+1b1wi zy++T8!_a2;FW`NE+ptB6yLtl?0!YKBOE(MFuWe=sK*Ii%`QFghKIp;$!YC4k)?#*}{C$FXA_St3<6BR>>1NMF>PB&owUNgT9I89J+GT6b8-Crj zqGPx7@eAw3j>%A28=oKE^XsBVT0h+VE%xXVr%XFp%59S>ElZJ1ffF7m64}~X_v*k0 zkr)fgFPgY(G*m|9-wi+-yiGxZJRQ&xOZ&cI4+Pr1ilt|$z7Re7ZX{CAB=x9_G{|z{ zh{^QAEP}kBIbFnmpTk6~Je6APUHI%X5<+oKtD{qe!CL)9>i4w=3?-z8i?t&;`2Y7T zp1D0*Bl#xw@?LC_iu>R=DnQb(A1Dgj0JN`NMTZ9#?a?0c9#1p#qr3IuCmtC8MVay zOV3a7AMzN6%-Nft-t-jB?^4YL);r1U z{VmChM>sdqC*@@q@m>o*{ye*=w(GJOUj% zP}tcO2%GBG8hi~xKxEbe-u`j{iF@u~ciw;BABxYJhAG~?dFYyh16E4qXP+#2Q7z-^ zH5U+Y@#wx6to7a=HOSNu`1WUrJ19*8Ex*Zj_dzS3GxjdX;@OXxCkgNk6zFh5>QT>M zc`tOi#D2fa`6>oGATput9IV_=TTaEidwFO4e*NFva@5vg{(s?_#t#}4}CUH$b(U?RJt;m^`^y#BvjUTy~& z64hPZis@OnC(Zq0-1;mPYR*WXQM%-y(uaZu5x5gfBq$Cpn%);G*g!e=&6 z+ix;5^xv2)rIT%oVeXsny8L6w0m&oLdZU*Ux?rH>so$XB#%Z`%S^Mgkjr9@qZ>#al z5q6P$7U*5^hN0du8vAx4z>yQ~O0yE=`qBe?)@JVXR#OeeY6u%T)jYY!@8Z@1T_HOH z9s5^hxH6~*I(Y(nD%@^yK?OmWxLMqCTZEvmAmyOaUIvB4JwW%gPU7^6B)&->h&|AD zfp+-CKb^pR4H&Qs&P4hjdPie>4#G`{vv+K$64FgPgCT_CnhHrI3D2tH)?K@dHxOav z8xDxK4&`#@p8Dci)9bC%u>Uk(31lon3RMiAyoc+Y;Ex<3Nb;UFCXI73B+eXO@UC4Y zTWF1E#C(c5b6=sOXP0qSp}4`947mbImws+#%INL?Vl%cV&{1rSu6lqe?y!K_7n>JX zB%{yPwQ!w>Htzg!f0t`ZA-s>~O4%_ZbC8Am=Dc1y`Of7;-Q5Jn4abUDQq9pPf4>m* zbfiBy-%hpo{&5Q`Up=1Z0hpj3wenAkc<`cQQ`2YPnlcnx%N1HYRKpaGdeQ4HAT9P+ zsq1U9*YSX|!@w5~@-2VakCE!l$T=<^AQcJMWUkvY5#y07{#qMV|LYL3iEbr`kW<&l znnHDEDrlSxM-T`pnkQ8fxRWJ-QNN%cWeoZtiERnGz3$k$g56ysJkU22H76x%1!hP~3nt=2cXp;2?3)gy*0d$1w(>nozv@~@vlM{tusPu0## zg`r?f&nGr(>UqTK{@!YXD-|)-)%V~2b&bW%xa3sJPW;DKJ8BY#bbbo~LQgu|&wGF9 z6e^qh(JEfx$!d-L4f$;@HQ3K`$fi)Oj_abBc$eR&a#GBN`$+zaGe9uVBByu1f2V3P zUmZ0Bj*Ud~X|ec8lVGWcUKa=~P2u)#O}%{O;2Uu*pT?bCwS!9AI1>p8?T-WB;N1q9 z6%+R6tcSj7W#z@E>f@ReH>LFV_r&jk2u_x;5`QfKEsaYp=-KYycA;U0wngnJu9uEb24ZyRO1bgAp~>Y4PN$ z!PHTL@v9+~atDt+fp1ZvZI0~(c579#uomZNm>)0=YyNPR+w-*(X)jr}nXG4bR_O41 z8X~@lNTX4C_0631IPhTm50?w?lKw>S{tS$#&IO0Ku5fEPyQPC%lDG=o_SzkAC-1Dt z-9-ZYHr)7vaP3ebvE__I!!zE zoQ!WQ3ZPpbUDs&9X>}2gu8~>Nij*A3`WGo*;HmT}6PPkiZv8b^zET|Pr?rD-mBaw; zH8SE;B=)I0`t~1ts7h0WX7G5DdHLY8Nq^%&jmqomO8wsh*EfxCU+6B%7$b*&4agW@ zCOA8TPo8>i_6G*aF|6&d=7iADM0lt~mJgKuv z>oE2koe|M81p@Qw2wIt)UY#u0Z@}I1h$uST*)heb+#&0Gn>^Q%UZ{c43xZPaYkIE( z#ww)R@}32&iA3{^JpZ*S>3%$$N2g7v&Q~v$%4NcXoZq-EPZfO@_U;dW&P?WZ7!GCV z{<yH58Fv)vAsQn9C9AP3*v3`A{cM%XUCs zQFXrO!yU$}Ti*b7HZYlq6oFPH<_RaqxX$&|AZBxruYKoD# zFv1mXTgKaB_j$c-TcZOnxZJsX&q))x4v5VZH|xy*3SYaM>AA|=~Q{y4}4GngkIcx?dG8*gz7g=;S^DE;WGgsq%f3RTcG%&mQyvQ_j z4^*^rU2GvLd0a z?<;#=uSU7DM6_!Rsw=KZ+`_jGCGk9;7VBocPHGd96&)s!_H{MjFg^cr&YEy6h`qvlFz)MlA;1yD26+56B>ZLfh{~+eW ziO$Ct9gOgPYT4|Mt71GHe2rT>8i? zg<){Q;WHcOIcE9xGw%WAG*1TFF#@><`KUR2pUnmJiv+fQQ-R{9S`e@_aiagX8;PBr zI&3UV#|>Vx+uI^`VyO=t`ii_uo8YJ-K8qY;`d$hn(!m`O1*ly9SqY@&`fZIyP@^B( zC9UN)fb8Xas|wmWp9j}0ywKdh*ERTCO9?Mb$M)CmAvMHlnbL%@qH$c01pMRo z4Sp1~kQX)KiHngD*7gF- zk5?d6!yV#T{^7rbBicRGb(f|gLK#v6p>PWa6Zvi%RUNWvDE@=_d8sVc_xnD~pGoG0 ztoVt7cQc*vOK)h%N#rh70&G4Q+^3$u{)4s6kN2nADi3cN{de;>@dbF$S!eHUBFAs8 z;P)v4{NLvkm0iw#I@y$g<{m{0w3z zDU>!Mq)1EUUpCpNht1FUSJjIx!#cnPp=rf0Oz z*F4B98~|Fh1($RfhH;N`aP%^j-VAGBUE z{6*DEu=Ekv>wh@US8?ABj#g7~E}uxJAqXlGua~@3+(_^a^+b<#d`x7CM_= zeav<$gvir&d~i~`PHN}00E{}b_wv#Sg}t}viZ_n6&3 zY^pU`yl-a!GxB!`=S*$e+|$a5zb-Q_eGFZlezn zmaFfat8e}^mf4(LH5acbWDKEU)DkB(PnDtzL~T3 zwoI5xvBjNBgje3-3Ra3-En{uc^mb-+PM|rL!6{H=30axrm)!P+p=Ua_{rUIP}w=b5~q53yx>M60Ss$4LfOJaBN7$tEI9)Sui>=|ANX>vnsBKKHK{({9;Am z?K=){s|HbyL!+%21s1#1spoY1qB@|$*jC1A)jhxdETLC+4Uiv-9aLP>H|d^X!B01c z__nPG6}fXckgp>Bi5Zsj(r}t5dK1$Oa#Bv-6;h1HR)K%d*KwgBU$@bGQca%ryn`wqBV+(<={JMLful-mWBpt8N%MkH2_( zo+Y18LHAs746h&BhT3H38gRc=F5~8Cn|RmE%0o_@K9Cqt?V+p^k&V=yZ~cd=oZ;;A zy|3Amr*Q~PvW#JV$vSLtD8!!H2LQFn2iU`8Y!+QpW>|Y6?`#7tZrSb?v>RVWQ=wW< z*=fo~%oQ)RewQsV8TGT_@UP2%a3Q9xQ@#dc?SjWg&-ur5S~H&-+mtRIA&4$wBt!>m zM>5B5B)W`5)9Yg{gBd|4d-F}5xRCwS)FZZlZYSEVP92RCNE=N}a;#BY9 zI$(cElef}pcuU*y3g5XFcXdL$mYoohiR`e?NmHHh;xAkq7~j&1PYWnN-wc2RbIk8h z0-8HUyx|rHE(A>W$i23$SB&lPUVn|b2bx0)#dIhTlXNiMQvgAR`m-P)?S2aqlhC^Y zb(h^5l=ws`GLP2)aXI)&vJxDO%8P5PQFcFH_Fk=J`#87Cv{iBaImLwRJxeS=TivaL zEFIi;r9Ko_#=v4wv?JI?JFZ2MmVdA!wIPr<4D?Ajsq>kGDWAC+10Oh*2RFQF^MsXu zCa+p2PP6%-K{S80EM-TA_e-!$n!E{kB`0k{6%-|6-tV^YYhlnqZMt`JZyn(oK>hI( z)a;HX7ezEJC&WO3l0GX9Jp&8RP>~p46rT-K#Hmouv$8Uhvp*0HoTYZam(|c#qSfN2 zkLH^+eKVJniHJ5K=&o8IAhw9N zu9pDgD?V%KJi4_9pa5{N;!@lUrPT?>WH_aMys{xE5^-J~i4E^yS=}n7R54pYv+%u1 zBiG0dcy~}Cs8+tl7wkGZPcuXO`$`pf%hx}VY%{}LIeXuUKYG+p))WMn(bqQZ|AFB> zEE6)Px*g8nTnaVtn=qY)QErfJxqGgL919j#Ys+f{?kCl-haOCtqUmBZ1Kiyc( zOGD(ip(}^}HPQ|D-AC)`r|NJ()f`3tWk51&3Y$}CJl|$=Va|#Hv9YU%{^gcKKYNU@ zsq&KNH*wHtEb*q{B}eLVgr-J8Y3@LY^xIKVc z8BI54K_K_YYSv4y<1jK|qCO|hP3#*XSdI*R_wU6PDjKM3iBoKCBb@5BQQ3AqSiYUb zO;zm~^ExtA5^ESyn(NA$wyWz4=;}pxyJuJf144L<^UqmC+q?&TwrHwjtPdVkVXFKv z4zBGZi2~m;9pg(9u`CRD&*|N8E0kf=u^N&zVOpB-71|55lB06laY%Gl8adHz;s46- ztjSRZlOK2ZjG}>!%mD#`7AmA>Yb*6tvLeT?hiLhr z@3Jv04b=)d5FUyN zECo|lSh>UNJM3lLC_8N&%Zi_!nE{w6+tnD?y;Tiq$X)hyzDLA$&7|hAo#{HosbCWa-+>mS@iry#G^lgbLVQiZhOs-B_3i6yMg4L5*yuPW>Hnw}r+D8sbQjfH6hT^Y}pjpACW^(pK{ zZaC3s9LztE&g-`Dp?&RAH}&HcL{!T2lP4aH zGyW7ee60S|a*I5{IR2o7iQKFWoAW%2T#D7}9O% zY;$^IaxB!}U-BD}Q|9MQh!%LS5WGCTLR`=I`0a|cU#$B3uHe>erGEBCM44uWVxXrwzc=VNcnfrr#&nP4!3ditZ>y{NS7o+(2dm@pFXFljubt@TqN(?ov$b1h#@bmQNj&HF+(oi&b!xR>95pE%qKIBaiO zyukKz0Ls99Em8NwT@ArF$Y4r*al%d?xZ}pV3He3 za%S&7U?>pg!_!NxLM-8ZU@q?9j}Mj=nzeVL;<;XU*E`a8Xo>pMW}RDnEPaHX`uM3~ zwUmP3617(I+$GK)WtUK?RL2IMX;9;Ur_Pv;6Y1yQ!V5ofSWR*_S+@HF*els{cB7K8 zXLJUagr3iYtWw&abZ5nj8UNQjkj(Nu`XH^yTJP!hw2l>8%NZr=Ic(SN??u@RGlThk zmJr`6VK3;r(XjTt^acA!5hka^Nh0Hs7!pT;0V!SXtq-j%5q6mP&)>4I zs~_W|EnLGbr1`2fLn!}B0b^dcbkcj)$p-J~@e|C-b}n*f58P&lbfIIT`s@w1zxE_? z1SD9gUWv!p*MuzH+Hl|9a#fGdJ-0IMD5Z_4tqq@pXJECZzKN4_l(hW+EP%Y#dYlZl z82WyY%hobZ_NlMfy4t4z7-{^lVs)FT4+6yg!Nae zJ*xRnGKtP&KJ>7nW&}C_uuh65Pn_}{ve(ecGgO_Y%m&A`_!4#J~Eiq+w z!mnQ!;pJ*n%qn;^L$@&&U48eN!C46T@OjOZh2thvHemnHGv_7&~>H2G*cDNmG@d0J8+>Vq}TzLT8mTupZOK7Xr(G+RviM6Uw{bUUK^i?g8O<+u= zCp$DZAJ@m}mMoI$+Mv#h&8N+hvI}(gyEaHX1KD-|B0b!>+>7<3zz=@KUuHfx0MGwk zgBEyH^V1+WxY^-^X zc2xL+&UEv)AcqAGs}Kg+;a7(@1|4$zWk;G=yA0zi#FhM>p6ztSYigXwSjs? zXwppn#Luy=`6Qx&X(nc2@h+byjU)1ynb1qdh|!Nlb`Zj z0|ys--^8*1v#8_0Xad?*2**Mc0s5maJ9y``LuFAuZh7V6SZ1}>9iA+^LGbs(@U952 z7gZX~b>;m|4e&1{`0d5tAJr?1IB|m;xqsWHlbW_e*L{_)cxe|FD=EkT{22hlw_{(| zglEbR1rEba(IgfCb=afX`Tyt({#UI(>;|&T*#kX!r7_oi94qLHxjuWklXKVPi~DB1 z=>41@&r8>Q>==5#sV6mNg9ni#0_vse>g+0 zOP`nToI~`~{7@Dpm#_wYKp1P~EtmCt^Y)!*D8EL>*2d_f-0|Q5M0$RQ79VC1UGvk7 zM&Ql2{V1s&1vpGQWv5On_r}d7v;uZ==Y+f8L^8=O%%z9(adIVDw}V?>yprSfVGUmBaI?zIzEM81HK?f|OZ#gHEL4prPTv;J{-Aj2#})Qe_M67k zKKev@Tyg8wNAov=lA#dKseR|Y&*dr7F{ed8Rnee9wjNH#jky1mKjr>atc3QKp=m%z zMsi^5L>9NegVw%@0Z-c|{{=pLV-_79#hM)^O+jbw%)Ry3bR40XG$(sK%oPl7zoLev z{UO>}){@$Q+kK}7Vs!tKN+bWob|)Vn5{V?(w9WLlzk32SBv{Oq|AUbq;$7?L0pz1i;18$R5{9?4MBAu?GnqEJ-dB z+iklc0|##$pr@>8c_8%6qeO4;|KaMa|C)Nk|1Tn;NT?{yMBbzdA|R4ek&qA(1<4Ug z2+~N7NeR-SfYbz(E8l)x?qeqV(V+>fHFQ0Gx^8E|Wd7S$^?)$!8*Xw$|$XSI~ zUaMK-2m78}HyQ*B(!XhrSTiWyRg)+62y)z+xEp#${f74jxe)8Qq`}N&@;eAos6Yl6w<+H?pEtLSGfW&b0)solNEZ)470ChdJMONXRkq zEzm!FTK71SE3d%rtT0J|)#KACnBW%2Qw3|Ig(kHi?LlPfF%c+8qAV6}m1cnP>;5TuZ|t=XLqT^ zMjN5-A1|$m>6vp!geRv+t6y~#3TjBbp(^lMPNCpoh_B?`?V;7_D?;Kv9q+nSHTN+`jfMn|7FrU4pTEB#0U>V z`gs)m_$K6aq@F3JA_FFSHIp$$`&Z)8@FnM=usWwx};AY-grn@zew6kM+*O`Blr$)aBS~;@du%i%&1e7{cZjD zSW#PMo(d-OqBrlrl#I~EJR@Q7k){U^E2D83{3_hoDb0#0MmEPp8DvHpk}`heTJ$+$ ziS`W7&pqaP_SF9#H<@-`x-KPoU-^Cp=clf!l=O~)RQdCqh?FE4nvq{UqDb$VsT5%> zFQoXhGuN9%Kd#yz(%VvZ9dB?Jvuuf-EdOnB z>=Na!%NVP6=$)nue@!-}6q9V8+br8DVKEIEyK zlxO-$f^TSYm^yk}_qHQh#2-J(H{<05I^%PW6zVv?yRHlue&>{7G3R%580>fCkTm>m z{Ev1c>FeVikWa`ABU{})UOnL=~(e{}8Nj zz>{wqM{qjfWorHQ(m#6&I*}bL_@BV3sV94i-R+{ zk`@0{4rA!h9fiGl#WTy_4>|&|K^)i`_dr+j_zO$n!flw%e?9+Wk@pRrFUMS~l2C(? zrBH3OV#vW0OQU<9{<|?5!t)|y@ho@n;r{+V$A1IeD7q{?nh=sA=JzAwtf54Kb$+im zkl-Ea-C#K7b9o>2RQ|h_fH~K}1rJ%kbTMchnDOiVC|ev_%X(!gXZtGY$VBmQ-Mg)IB&S)Q$QS#5 zxAfy1@<3~@bpGl~OOc+E19;S%x+2hb!)N_*+d&bRyu{?SWVjvIBPq*29aj5;9zOmC z!Cb8M;-YUc)Gl})cW!kYcs4mIw3%OA-`N}Y_@bx2IG`(QoAFd$=hj_meve(`L1{v) zT#hjGtwIC!<^!7K1!MUR)#90IT6wiMO;Z?7xT!%tVnWiz%3*I!jVg#5nR)fxi4dW`0?xeoMjZA3%h~5j z$fIi^JSVmc?B^H43Y93uUhKsed=2m6|M}ZJjf{BSsm@+ZvC`wZ`1Jq}$sHQqQIWr2 zHkEL$re5xXM%1f5*u4?s)y`9?UBP?~b$WC+LMLTI5wW0q2)wI}tL;|(`0@_?%wG8# zp*WntXH?aOLcZ;agYm2ly?VPUQ}^33eH0gT`uvL?>bbPwgoeEH2yG3;Inu?;#FD_7 zpY%@NcLbS(+`79slU6`qgx1pg+`gW*?3f73EhbYQAC%V&Ffb;>{*6YT;>;a~E?E84 zJ6tSfJA2o3D1JD(jbHBd^8Z!aKeJ#Z5D&IfoNXjsqkO%1rio#4jbG!edsI6;z5 zW_jrA8Hu4g^OZv`h51AM4KNCr%xO#b!1i9n8+gNw&83fFY{jDgOX^>zmxPTKj_sg% zKBZ#$cgnu{Eogb=(OuKdvODj}F~9xKDpss7_?jrbx^t;%7SI~J5I8=&87RqxuQ4@x z)LZexD)Doat((f0-KC2u2b0+dBcm3zQ~f&_3ArRn zgjd;AOmy|Cp3)*tjh)D90_~R3jTw4Z#M6-kCV%z5Pu17nE?^9o5)bj$bFH39@OEHzL5B0*+{DSF!7KE-1c^%7h}Wljye0}aBPZ}nctw*` z4vV!!-BsIlZUeeBbAkZrTeC44Vrg38x-*&BUdY;lpY@?N(nv2qoKass)sbcM=7qFHIVU$ck{3{l_vQyfj)k;Fu_Nws^>gs~e_VrbLNand2WU zXf4gi>I$kjcaqSpI6^fA1Z$^s+xd62|fiJ$o? zX$;5J&)n8&J#Q#2Mj{9kA2gB;4=V=hA(6^)TMpM-V+E`dgG?IhRm#WxM$8{Qkh%}Y zNPqLZY%#wL(!FcbQ=KslLY+?NV{Z;q0ie?$||X*^tp)f7;KCd7J$LPQB4r?*PfkG^gDPT%_;{vT1}8&;4gNBd#s zdes9j?f0&2FFwPs_q{EsqGV3_-PCqkkmHBmU2%WU2l^-#?yq4{%oENPYmxUg_elQq%^2}N zL$(IhkSMJVuHJxO%Si?wpY$D$OOIa2ij0YfO=%$-^U2(7R%1w)zQb~D-mR(lqnf@F zA0+f?j$eTf*N*9#&@<=ncnQWos;IW6^32>~DfQd7`~l^{y8~n3Po_G>uO3@!i8oTn zr{ER61=7*+9K8X&Z_P9v zat5>sc-2sEDp&S*7UEz&&kply*PpP|=*Zzfvpau+)hR-|(hKM&@|l6Jb>YB+?9_I7 zjom`nJg(2v)@49d_{D##;eK*gY+TMo{FNWI>C~&FUjGs!R$4B|?4Y!{tk;twaVCJ{ z9E4-@=fH5I|La(JQ$@;+iYm{5R8HEnEBuT$w~iu@&(|t(zDR8_?2BWr7^0G&qHR0J@kXIfY>>DEicH zHhn~ZgL?FDQPgG}W^YU3JM668k9#WH1o`$22LpV|>YCcZbWvb{bEc)I% zz;@c$o$11B6>~0?{AzdZ-`_eCdcVO`e&TD)yjTFJL|8l zF_!&taU|4u(((C)8u~qV*5{#{7`{`!MLPcH>f9nX93^6|X#`kmOb8AhfQ)#6XfQ7y zhA`~WsE&(5HMbq65wn>weRyNpz@uFrYM(4ZouUa3q;%jy>gbiDEY=xp+XG|G8>nh5AS^xYd|f9nsh2f3tre`@x41i*WNmv`wlq&M%Z84CnVTTbGk-iwAxi z3$4Y~DFw6(rkWRf+R5BsD}r$hudQ%S`AK>NU}o{KjgA@|(@ngy@N7&TrX?>T^(n*K z>hhF%K<{?#1^=hYySFM#-oUaJ3Z3RcH$6^wWi<3p-<-yl!#X%E3%8U6JDjL)gAQlV zHkz?<3kTN2^Dwm^fO5YaKTyTxZ0^{R>6qEh-G+M4Q2opH%K&AptQ6_mr9x=D_iL}$ zgGD?GofG_GKEUB8w1dZ5-XV`#mVJy3Z#u$lZGtD>%&Cm(PyQu~uF}oFRV2yM}Ct?N@Di!BwYO;L{+da1^WW3e$I~RAmLXu#2 zpI*?PH;__;e_+1*Yd(ANDZui2Yf+524Rq!ak>bC%!+gumd6 z+lOjTe_=od9Fn+^PgC3LM?98$0iv}QdAQcu!&FZHBLe_H;^VMam@HzAwgW{q=kACpHUl)f?S1CzQ~NjR}8CzPft=Rq`H+x1F$a}=p6~fc*>E1A$k{f z?0t97&$EtmN+5tnch0zSaN~CP6tg(Uw?>6UfT7>p#`*qeW+qRajn^G=jqF1-|J9FqPPpMQK8yWBWA z@%`)WrrR&W7`x6_!ACx)`&5oBHD(q|-#Rt2A9fMz`i<;D#xSQScv-4Q=o=CGM&Z4d zCqsc}gyrgJJemi0REkh^yZ)wkbXVZ_&ljzW=Nxd$a!&7qVw3!aq!Wd2o&3A*0{K+2 zbzzGBSmIg$`7R{bEx|eBp{-~7pR89S(ycOV}mYE%kD>!-hk`jkFT#D ze+{8QC@H6e*f?-V@Nm+8^t2Eo_y~;o&{vLR2o}nyTx<=>3Nabvv8qEi$p+{h3YMiP z+&>->67UmoR`2NBRDh8;CtlQbCvA+!3v?|C9|v90@LHNw{dZ-$zCsueQy*aHd!Q^^ z{5M1XbuZ+gD_H>&eYv0MorjL3r_`r&k}FKIr^f^q&;e}%Pv{k#K^Op}^XiBIr|qIK zbQGBC4FIiie@}wO!6_@0{%53;-eT98jDxj1MgQKFH7*Q)uvmF}Zw&e8m%I7qU7mm! z!OC7f5=trMN-5;r$Q>?Sty`Wro0RI}7~Cm4b}9YQ1C!ggh&rOys8-K$|A@s6(>(4j za9BXT?)_%Zg{0PBq~&4ri$zgcBEEZ>Pl9ub8hCh;`IC!i;R*wDvNiTA5;3Z4V;f74 zwThL@CXDW7B-ig>B0pTsc5I#t)$KJIrNk#5Z2wTt2drMuqSxF3;!h*$dx{o>I_3eP zVzy8nL0~$|`sNo!uXVH&GrUe3zUp?z+QX{lzUb(nk*A3I8X92yUdNt*qIp`43o;#q8qQ(7^NC@`cmtr|f4w zA%}(zYeF&Z(!9v&_y0gJP!N!dQyDRziD>!Y#r)#;Cs3+hG?7k| z)8O+tW1GQ5tS0M>HQ8(K&nT=(mGf71s=9)|)!W4BPjQS|tLFKHiWs>nx#%Ne3v-ig zxnj%scGHnRS@!x;C~!IGD(g>9){Jvcd+U<`sY*}=2xAf}^D1OpLTRlmYJe)Bi_lFM z`NbEV6mn_T(^SF{$#wiWPXD#SlTVX%zEd5q9Z8a!91ySP?sIW3V<)uUr=mK3U+pq| z>A!Th^3n0i-^Z3Uh*zZK7u2*x&{4gnt0 zA)F9?4!EiTA<;bAXapp%(%?a!LC@LX)k&&)Go~}ufR5I#IgRoizFZNW-*X7}m%)iE z_Z0(IOzLyk`qYTy&hr6Y*j9zTpju-QCyh40cX?2)=E_45@kEAQvAB9n6HRRLTrM;s z961}lGG5@iLwKLZtIL*l^mS{d`LI$x2rr>VIpTv;T4x7C7YObk+P`Sp52GsB;z|qs zPqsU3iIe_!S7PBiI1zh)`^}}iUMxM4S*A3{D+mFK{$vG@8_W%88t$u*z&$C0(}cDhvb>N zW_W@(q&#@Ksx}ud;E>ofS$8Zm+yAmdlDI>FO|m{$#z~eq&o!CdOZhO=8a9d5 zJ*FyYDE66??UNc2robfQhs0l{ZvlhnhSG=+-V>P0WzrgGZVG&tg99GO8n2eNb;o-; zqmx|1)46>ZHoLKkhnJlcr*7|fNQZ9=M!F!ifjjRpNK>HL&IQlZogS434Rtr*eK)i` z=627w)8m@V<;#Q`4Epgz4~5k53MBcsc_u`clQ69K*R8v}D37!6rJo_zs^ZIm`0j&- zO{1lM#e?r#I3%YucjA8&b>K07NWDU7@B{K3@PfhT1 z6+&6%Si($~rKOOe*;w1Xig;L7gxBnLqk_+Wl;0bqEHo0gU_02kzOi)=S*6jV&wnI{ z!=bQ!qY&&M=)?hoKVXC%UV)%7dChOW60puNbJz%;=XA6)gw!K@WRiQddhlxu-P6Lc z+zQ}~5Xe=j5k0700NCu=YBbTLDkfBR&vLb(@46~1|mp>F@oF?qB-c3Qqy z?&0IL-QC9+fKel|orkE>xU9{vorsjhM zlKxcY^%E=KveCDKitdF)a9z2S5KGaSozb-XH-9SQUoE_S-eluw^E;8&{g+s$J^TFu z`PU;~i*s0qy$^2#3b7)x9(Sk0JhR3!=S_}cTZ(8(?&0_`Z+lJ8x0zmI#b+T~MCZzc^DABGnv0A{sTM^BZ z-u?8lU7sOK-SSNDsfXR8s)9*N88|=`LqA|7hvQj*6aSDdy6j5r;${+nfdr7)fFAH9 zxQEJS6P)1=6hjr2DEau0ZXvuE$L6f~DW={*9?=X`!Ex^T+C?{8yAi|+ldrVLYXMzH zj`z1MgLAcU1@}UdaC&nW^c@)|KJk=ub`-A9bo|RZkf_aDsS-wEt5w}Oyl{1Ed^U%p z0Hc&$B=%<8Qx}jMpE&xO_ngU(6Xn0R$jtizO>$8#j`#Sn2^0l>^$WEQ^O+~lhn5`9 zOkU5<66Chhtbtx?R_Z&4qAx`p?g1wj1H&G&y9wl`Nl1Tp@UQDrA?gX3&o@;+lv}+y z&SQVxnDa#VLIIdstU)?(0>*Gq5)HPh%18z_=man=P8LirgpYyaiKGk=8k|Vvgzd1& zLP%n`GPM!;Yjy98Rt^Q~WG%p+fO=`QHtm&2RLpJ)-yCgMJVky1zo+E%i7y zx_Q6EgSvwQhGh}dAHX`ije6R+ndaTmc%>rHXgR?g+cfgsGPmtzly}?0HpE_!6S)L6 z8rG2dQeiv!azXKm*$^d(HSo~!moQ%Bl^B5w_CstT?~TrYzUPMBjgVh|twNs%<#Rf& zf)G|5%!bg$nRC%~6sBtj#o>jMjg)F!;zM@RF7iJr8JuiUEs}uW!zXiR$bGcUYC99a zGs2u(HuCY(EwLbmwJa|Aw)fk*ZJN135-h=H?}!02Ea7LxFmF?MM?_RI*9T_sCwJ{{ zyBstFAJ6#`4K;j4gZ+?^v#_+#_V-DNppZjv;vr`TC;cbxA5SWVau5xyBBzY>Z(}^( zQ`f5LTWqlFKoWhA=MYi~IiO0wsByAz8XxVyHdpY7Mh<<}U#CG#6MALo9sJLr^MY<* z9c0PBSm}tEy|K}qdyJeN=~sLo2R(5yc~~|-HtSj$7SN~;jYfpw#xTHWh7k;-13_Sq zp*w4*`%V`5@EEqUrb*8G2IE6xNUm@gNgp5@wNcvu0-Fo;-LL7L4-KXH{JC@?#KKKO z+(txbXsz(c19$uNz|X>&(P4|nZod?c{Fc_F`a-tjStD#7G>xQO8_aZ_VYSJpP_gol z@>w)rv-MmR751nMxg>XAFoXIvZ(2mP}5F-KQL) zLxCOGL9k8e35GU~Uc;S$UjjX5RPzfEUHTt@@FN38K1LoMi~*~IWWs02eiDhkWd_rt z{EIeI|7{4mLEHGzbUg9T$tipG(*5j$?KY#C)q|W>KDl4M@{@5-e79$cm24`Gr5muU zQYf2hGdf8)9!E!?2E(-;V19jew!@S)bq3Wk(T;n>%sJN80IUm#GDB^k_) z@T7KuD5)j?9vu4#o9NF4k4Y>vQb-te0>{GIpf$U0`O57Vm4o%U%9CPoi1fwgwRrsF zAdd0=eYFqLLj=#QOXYyn*qy8!$8K#OGUpU$itm)m=&I$>2Zd;@0D<}#KBJl36eufn zy?c-TL?>!T#dgV+IC*4vot3Cb4g)QD*cmSQBu8%Fuytri$G#Ym_-$bLQ|fbX;8?s> zSCHd_>O zCWD~s#M+e?>vK)aZ)pdrx|i+UZ61XN zt*Ig`Wod>@IJyv~NcI}$=s=40PEtGc)jE1b;Mje*x{rzlre)ix9x%KUgpNj@$g5mI zbbbCic}pKIKYxHcpd-h+m`40af?mQ7ESgIcW$=X^?4(Lm z#8W9QnW;Uc;o*fQR2zHa14YfhbWAXyIZRwv-z;I>A=I$k3=8%_euUM?AdgCE@Kg#3 z;#)b}GX~4{k)ExV;sEc9f)9CwVF?XU^&Oejx+ZGSTkh6&|9zKK>ztQ5$m*+IN^08H zY$Xm}xqmT8JsYtx40`(%y@otB$QNl$NKb zr9s|a$^nx3&-R39pRF7#hv9mXQ!uAq==&?_9?nXgc{iVevSg*9Lc}M(9Md_E6axy_ zU&_B(Z5q$Htl&X3dB2Mm@-+^s9=*_hCgW?>fj|Go()yXFz?j$Lb(BFV`sIPdZ9opCn?ADrJEq*omqQ{R9cj zLP7yf7}nztz#qy>XTEi^+)2!bMIn+;6D`AJ3&8nEL!uHLd-~bx0X)_QvH!K23_T0W z1LXI*W};0U;Pu;h(JiMWi8}-Wbv-}Q>h#)xRd+G$Og@6v5}XbB{x%_c3Dr9uhcp3M zojSM#*B#25UxE>1Jr>HVQ)-X@4eYs4z*Dyu=(le!BVZnoZ}@v0{%fq2cSeU5FDD>h zByagqO;)92vz`JLB+gOF%Z+%JV32RwSSxltu3fIXqoqgTpLR!L5`SrK0UV-eoEhDf zz}zOF&iqeOpyOk2L*M7F#e}N$NNb^3e`?>~F_@_|-=*PYXO7EI=(h}Q2?26{`S^Hd zp6lolMq_bFg;ouS;zCv}{K*nFiFxHx)s}98KuZ|s5Q`1M8X!`2fFTbU$pKg>wxR!p zQ{O={;0=CHLVU=UuUSr%U&72_*GG5~K(s+Po`)Um-yN$`J>!vguqW(PtNA2Qz!l49tH#{zoOx1 z!=Qrb&lB#I*Q#aLmht?%bcP{q`b{j*Qj zcLflPR8tIwwhce_Fs2#O@=s);QUIE${R;IFE&w*7eNS~-?j?Vnj9Agx94@Rk%F`Q> zIM8r%bhXnEU`=;hKnIGOU9A|4?pSNc&m9DNK18vOjo)>Ck|FQ6_WC8GDO&6Al>KQl zASFtMlJq;HdH89N_70*mP`O4p#_7wYiMN~<4TWDo-cEbpS<8s~$X*Y#9_gbp7;w!r z`!`4_C){2RDWLAt&9ZgS&n`3Hw0fHiRT$>OX*5cK|9pU`RE3WU}YRTn|yc}%v z*S^wKkM)D^Re`Y|eIE0fa_@V519k+W-tS9RyL001l15}ip~A7pWDt{TFxP+fy8+@{ z3U%auY5;T|oO4uGRJH~b$aTcgPa`L&=&=7O3ekh7AFYjkiN@EiRE=p?-Df+sT_TTR zP#d5auM7nC8qvEmd=M(uc?xvbaUQJdPczOqj3|}{2a>aIpkFTpFCf*_LPL*?vp{u{f^#(Kyb z8pi5M63=B0c$zP66WBG+v4B{+*UIP>6_I{^2C75Zy>8EGqC@)hBpq zIVSk;&|CP!{ACI_E0?+5JV4{1i!cm`+&#!i$K!%XP_knWZVZW}pQo7*C{@D(RrjM% zA!}=(jbaSF8$&KYIENgqfi|n>=yzePp_W?=m$QAKtFmtJFUtXYmuLSy+|CuhENm?^ zw$AYHZTQGy)baPUz6|#GI1OOmfcUy|Ut6*AHL8BLIDA`vgTeXU(^W_vMeW4$ zm6A6oIlZL4=aKQC-yIvniq@;1_m$;NkQVAibkb{U=9eiSjoPAIgF>bb*Gex!A5?7- zTj+f(nJ%Sk{C!CIOJ^b$Ix9U;i`5Ceo+HHH!a4sN#G))eu-Jz`cBvO^@4$@o8~3-`_~MKp*c6{zzn zv|lQR9gcbjxJTcdvQ@+JaCsOkl_AiPD01@r4O&FzvvX-h_CFyH_FJ%`;k9D@Ww+({ zJdJx=I&3+~LeXNNtP^!M`HSl>lJ-uVTSbF5#TupjO*A%2$L`dbI=bnP9;rX`(3=#v zeX@mF>=5?V5PGeY=A`grvTQ)(OJIjsUux;3yG$? z{0=?ipZ)vUm4j~RDPa^Im#rc2!1^XexUV^3t0SXB?_qJF>0>ymIw}0X1!_kULT-zamtbTWw>COJW+B2UP1PGM`!M{P6OsHC zcECom&p-V+sUVG^%~xhK zDHwBNS{tx}i}srg4oewU{_V-Vt`jJ?-E&c3;&I~!?;tg|9Ge91v0hlHeRMWs^ypFK z<*JmNlTo)QIT$^fZV8njKlwO7l&jpFtxoLG*;Er8Z=-EBGv_AAiN16%QV}Z|42id~ zZs_nUJoxlr9)#YMia#b&W&z?HrN+roaB`Li3B zN!ctFv)#ykyREzOFqvMsbN84a%n;%tHl%Ep{L=g7hDfWQiqechixXrP*kJvt&ISx5 zZ0=-SLkA@bBi1az8}R=qr8tr^!h|vgA|{a*QD)TKIdZlwRny|gi-&$Kw+;uP&Y97M z6XBKhz7Z;jDFk`8$o50rztM_?zr%G$bH(-r+%)2k z*h$mgu;?(gjzz)A?~3c86G*R1Y%f3cfd|to;v@F^FuhvqtPHU>VhcI&(Q2;@pym&c zjE<~HWv$08lZUG$dVTAeUTPrXictD-m7Ia5%aNeo;)}%biCG7S92uFS=lTCW@`2)( ztll5A73t;)uNi`SuP4Gv)nZ?*Hg}cy>_*5@{!WpRLamyAYt^_0rL#7T6tRH;(d|?x zrwWLn(xd;-+ixc=T|^^-3%{NOA4day`5YOoJ6*(?Gs&L;K}QY_3oVvR!<^N-aw06=FE{c99TlU)N z3dNyd>lrqo#Ef}gP}D9=X?WHj_vf&A7Ts_>9f7W!H+eU+a!5zfu(m4&306L9UAqDqwY*7Z zQEIMi9q5{HjBXFUD?Q#wm2N)X31FCZrIxQlje2so-aqcer9?~o3}B3ay_k>E45o?& z9^E)|Kv&J6%kotYbTq1#l?s?8S8KLCn*4n@*(Z?j*1m{QF~X;X-Luq|j*+P6x}KWi zJzV8_!;lr*`c{}8z3w*hJZmQMCZpyx>f=Ap<*RsjYSf~2S+iw9{fQ@6;AqVujyp`w zwWHICthG*06RQIowc<^=xFWG?7tgfuIT8K(LN^o@y4+}g`>O#6MzZu`@pk&30 z?B2*hmeHH`=)2`Mx~G_YC!L)iZz*sYgN~9a;^zf4KFKJ5TH=fe;U+nfYEI)#L(zxK zDyC_j`Sm92f^YgY7i-pgRRybwXMZWLw=EE+vyS%zHZ;1M_NqsK%#MtfSvlqL;!8x_ zHJjb(U-iK*$UP(ZRI4A2BS~|+DbQRN6Q=1jVY30e$2}}TZH-#O)Wu|2^}_GX@eSYc z8HAQeM*xG+Swcl#=NR~Gj+SCrWBA!@e!cWHy(T@qe!}-}2X*Cm_S<&%qRp>eG2Tgl zJ(kQu#P96-erC<;($H}IijP_P^Z8xhy*(_v8v$6F-z{nUn8u)?Uj|eJrw75#xh>-sqHW+m~$O2~IUeS_8@^WP6W zL!R9+e5SAu{)3m6n zNaMiR>I~bGjtku`;BW zIo2IgINYk}$k3%V7hqN-{?#=vhh|cM);(vgZaR5hoc`&}Zoz1?qL)3>M9{gy=`p|u zg&V$OCS4q=&=U=}+C*B6zmOk}KMJPf&z?tI|0|s}bl#`v>#R=^9z$9zn@Fh*3V-dF zsX3F`J5cOM?J_D5-o5<}M~{%J8%BmCpG^yhcYeN$y7zHvLbcDNYO3bW`r*U9g^Ltv zE77YZ=7X;)g@!uWS6m)%xmYm?rmR4IEJj4d^NFJE`m+qh)Vz{TshGmF<@nwVz1tqO zhi;Y0GX8R3KE21`9phv8b}=_BzBwQUX;Vx)a{)iWTUzYjgR_HfNX>cQNoQ4obCrxJ z)=qj7=iAkJ=JI;}cL41gvh(VD6>Ju>Z|rKFIFBd(7R1AIIeqmRbM7p4@wQTS2-zPqB>MxG`Auo zOlm3(TFjrvTg0_>uxLwdXk_0q7$=>TH``Y}ZUxd**Nlt*Q!jf)aF%moFb|(-q!{ss z+m|F2-H^h)^5{FWipZ)>FaN%N-6})kJVHjNk8XE)itdCMj+f{x4pn(*`*_Ek|9lkN zk($rzra8Z&Ii@`uRp+q1YZIlq;0+8>&1_U(!D;WU!M8OLO8bGa`$KnRne271%2H{` ztnI9#pqO|4gqu73{8{!b&65|D8rb>tL`_4TIoV!D6y%BXVRdf*5{)5VK(0@^zhQrv z7D)UQDN7u|>{qh>_4{`SJW1eTjo~E3YYuHK#~~M^G6y--s(~qZL<_6wkH-fG`yQ40 zHJ9ZPRSk*mN8tJ&p)a=*^newBd8Cz!=Zw%D&eqi*l0WO4+-z_C?N;wchx2K!Pz)Cy zPB!I)N;(?DD*bu+Vy%P|X^;KP1d6HgN`30pdx@Tu&k6yFHsbH%>f5BOlB!_!z6`nj z|BnT5+Ny-=v>*g!LU6r=NcZ0R`*&%s7M#RcKPdkcw2^fgoFy>g-DLiBmOnz;W9!Fy zjeEA|m#?f#eT5%Bj!-ZmiBV{#UjN<_JvLuZcvQHjDe-SJHfR7sLJ{=a2)Ch%z5W)n zG=4zt*=;$7$*YL_+5O!T;#L9m_ZkKD<)Ebo7SqRx(}V78@n!4pt(|C9Fi|2lNWePw&vThyF&z+J9k*L%UAdoA)_$w~dXcF?s#W?mW_%#uG_&g>&SJeYu3 zQ?hd74V2%}9l?LleibLjw|71}`SZcrM2MmS>~Ci9Va|P;s){Wrv!?A^^!fOvghBEN zi;#M-)jJ-Ce}BnJN~ex}X|(GS{r1D|DJd^TZvXFc54%FR3&ub{F{4_$`b>$52wp!iU%Nz_^ zUd=&Ik*|CJJ>a@%MloeYE_^B-ln=_*5N(;YwCtIvIN8cutPiD>t z^zHn+eh+_yd$aunHFy6@u=@4&Y^;bQ^wKzF`DsMvr@8Zk+e89P#XkQ~1Nf{Ot=t|C-zRQ?={|4wl{?0RwL?bev!g@aOnPmfmn? zZjHRF+INz}7b(66+}v1GtJa;DhF9-TTovJzf5~|gt&q&Neuk*)3H`frYpyLdB=Vf9 z;RUEl_NMXmm;WS9bcDP3Gp%2Iy7|z$_WJVIeJL}wbENLqH6D+xTe_zO4_W=xt{U6# zQul+@isY*dya`;BX(Zf4LEt%k#7@=9%JtohmKM*m+ z7E;u->)+&{Z>zQg(=YJymJrJRbopJ%jB#tU(0p0*hxqgjdHEJW@CR`DoJ;Tr5D#6Re)yAt}2*<_U#LX>=`+FP|;i5A5GYn zPd57>Z=;DP1V3Wz??LWC-u^RJ^%LN6{$w17Z}nY`Hv?kF;Cy;G?mM z4vi~rO+ON=o&vW&O;&7mJXBe|GIt2BcSN?nhLytY*(VR_3VB{H#37IHvm54JOz8oQ z-*|o=e|0(dl5^*q$oa`%Cco9RuV%#Ny%|hUEVXi5a-JDTE?9R_k-w>Azd8j@tM#5V zmP~W{N~c-An*7hawYT;BS(KIH+0WnPJYv}_<<%nMLjuFS3=}uzwC{bq2)bHpOi3ty zEA~CuZ(1_ewjlI;73q^i^}EY+K1ovko8#z=yPX|sW+bf#Z7=dGe{cM4? zcMQ8laXSfz1t?y#>hk;t4x47-x%5}j=foeP7t1q@e^j!9PYrRYup3YYAAZD~JLC6` zcKEhf`rf2mn8Fs?woj?Y_j$hg3{OA#UgDOgcoJ}8Zj`3v{`t4##aq9#(s4JaIT*&RP6OW^dnLwEs_Wi1UB=JRx2d z1ru>iUt%y$yZ=My@fNl`;WYxBz5U`(`S*{DI80Mxcs7x${~Ld=@uO=lm4C3wRxHaS zfy8l{`F}C>mJd<(QQIzE5&{ZC4j>^&cMnJ@QUcPAgmg+XbV*8gDV@^YAPv&p4Bb7< z%%1ys-~H_U4_sfaFY9-#W1Z(o>n|O$OGh+F-?!a#bp>m;3_AG^f7bK4*w}uIWDeYa z&QGc_#{!|Y>J%L)GP?5Z#cVtv5S!h`=7`7XtTn;-Kv7-9aj=FEAL+k$tqnxq0OGP$u4KnlbB+ zIo!naS4uuT1TtZ$Iq>5c_>eu$SYgCqlI1zQ@DpRAz-1i|NMYhmkjm-E@JpcXEVd|t z#d!e}FUcD*ncWsr>uhIi9lKmda*-1K#0+0y$JI#g2ppAlN)9uzcJ^iu@SeDx7(edl~g)U4L!UJJl zL{(hyt@E=vx3>8m$oq{bGdCX;@NYVAJtp>cEOG$Z`tbH#Bm#vn2;ch)0=<`UkLD&z zzop%3pqUo~?@`gcv=3zVu0?O&(RA>R=+?IK*3sHWy0NV)-Q}0F@uV!TDuL}Y@{&@u zE;4R4Ahl`nBz{1z2;xYv7~C)zmp%MQpw=S@WWuK~8!SoM{DF+#FfWWZcuZD!B z#Ot{7QQ*m#_S^a47jibkfqo#i+!)1qorJLOQ~A(?>cVAp4(9}kjv+0b;O&VVf4Lmf z!XZeKq%g}2!0Rz%L>kvQs;bJ^tjWE1XJXj9JD7MZ(*o1CHk1CemIQg8fG0cMeQbt6 zxJo7J-&1!=jsnmu6OJ?a_i|lW)?#@MMHUa4o9C@OsBPC*845>y)KBoJVV&TQ1j{fV zq0(T5ok!41jG@W-U!q~-J4%ANw>t&?g62n-FUw-|v~4dpx60xKh2>Zh3X4loNGBxn zQQ2bdx{C~27T^k8TLmn_Fs{PUHqk}-^(S(y!uVrMhuzakw$oE?W|?nd*4cn8cKi6X z*IL6wBQX61sjaZTL@H6P9zA<`$6~*9{AL%F1B;sjMwTc^E=LH|JIvIl&8 zu0&%?9uQR-9Z%#C@Ns84vBqcxJpMV?tL7bLxT*+_?}W8hlTsWEEH~p|IB8$y)`dm3 zAK7~hrZ1j<6}2#Q)RZ43F3(d2rdzvTf=^TC|BPFeFGjo!G4=8dAbz*LXZZH@-?iYf zp=#G&to7j;6eucTv~!N!jQrbtaMr0p`i;nPTJH43Y-t7VsC+pLzXTB<06G`n9uG z{~F~?*4(16o|8Cff@vb9nsan}nz2irn^TR_Zu%k?YQoH4dC`+1ja#@$uZjrP`y!<%k zZIU^7y=Xr2eRHMh$pPO<(mf7T5Ors8;kQ-fsK)fxgi4Vng+Hn=o%(#l+)P)X#+h^| z{Nmux5BU&Rc&Ry;%3CTPMRU3ffC$utk6CrYc|ek~G_zLKWeBS!$F3Wj`{?1j1oaM> zc`lc*kSkgOOH>Z~v&6qD@LJb)h|Sxdo+el=FWjk)D2_Qo>A5HOXRvph&K2dbCnP~{ z4}#K+DbwX8t*>9!=pukP<#=PJKOwyFAEQX|1i#3-yf)=?>*0#|o&pl{V~Ia* zCqSbkl#C(Sn}Z8Jcv1Vh&vi+tL88caO7qtrs-l3}EgO&6s!rE8^4VOZZ~s;wsj|u3 z>%Du{s7{sOISN**8dFrpf4)@ppAw`yixfr&$q@z3k~ZGW7E-;*OcVdhB7INkE4)d~ zl3}-z=&QByIgB(07H1U9k_Ptyw`apWMkT|n!cLSw zbZd@2D5@mxPTeX9LFO04Z+CLNJE+au@0OouJC~){)n&L~HC4f{#sr>-R0gy*xFebw z%xI4uEMmS+cHDlBC3=2A@B8#&s|`w&&~s5N<5qCY*wnJA+YA|y7ZQC_A->+OSHZG? zRq%My6*1FAMJ}K>kNfouW2VBx#A)6g51r#H{xazrTBMfK@YjuzES>KP*MX$=i0G7I zc_mfk>DYK(D^9zD?gr=eGy0c_UHpjlsx$aNfPdO{Jhttrq|Z9Cc(t#xE_izvk~4X9+S9W-*S=YVk?wm6MD2DH}ah$ce<$J zAnHwmj{jo%KFOseN%53lP!ON{d=C86I-Ft{`>5>Ik}97iPF2FjPSc zpYI;orkU?tD+Btxo2U^m56nf`S{xmG!b*ttt{a>Kx9BYZBXAUQh@V5Q&%gpzku9^} zC#mOFS?3(^1O;O2EGwW1*itTjREDam*yJtUbF1WwIbMHxYwQ@G3KkL*iq9Lg`uOoAN^fg{K$YVI#t%g)B}xcL!4^JXQ2FQxa4^@Q8{A=DCAwaKmz* z%vdD-zY8np_>~Ebg!`1gelUrN5AGy#R}C3=*?Re9j8U-vZerkxfib5q*3EQ(z06BE z=@&nhEWPzw5i3AzsDCD&6fgSwvqq+^E!{a5Yci|Q^8rY}RTY-EX+g8i!A8W>8~pJ;ulOIj zkja9GSTD=+*LI9T6t1u8p}#EqUtD>1>2jlpHZWU!3%WrhA&J(-lvVu`QK@eHkBMPX z%d38abIGCHU*7N)#E6xPN8?j);N1FDXO#;HZ}pR7r9=XExWhV?BmHN?|B35Klwh2r zF-j9(ZRM6r)UEPv7C@F($rFe1-JiV>w4SMqeM?*MScag-c_;F0TOqkOqmD724PleR z)vojP9EPXU8|4?3h_W*M$;fLd>!M*xkl+jEsrKvfKYAlW$3rTsFgM^%f@no{fAP#x zBN{g~{hBL74&l?FwC!btcV5_bI6|YV*_+hG+}XtVQU3j3Z`@GgmHF`K64e%Fv^Kzb-V!5n~He#UBtbBCu zvxNpaA-;Bv#-Vf*eTn77CG0vJo%;7`3^KwZt#Ib51Pv=VzLxX%^6g0?q>6zYH5D+D znWi_g706ANOCQw-f0qzw}hNLWkIM^x1|dOFl_eDPj7T` z@>^U}%nA8aL?V`s7J;_wvlhb79~mnc-pqOCaB?JETJWurNPF;wO_&{# zU5gs^Q?yYt1!@qd58I$$=1YH}5HByWC3`EdH#d5jQh5=r%?Jg|NnLp1I6PVNi-ex+ zW@$cHTc)u>MsQuxk(2An={K0XwL3@l;BUwpRL%rnF#}_0jZj>CLWb2=6nw{Xraq&J z5qU`1qs*}A16x@jkoyE|UU1{gQFgP!O(mmqDWW5yoJ~+cn#uywE}bUV!`+rX+5 zW;7C$7yzT{ZAnMQN>s;M$(S)@@P!vij71nN)&&wFl5iI!%*9oD z)3KR9zdbb{TkO&(wrMHLVD9+hW{W7MkeiB28H)0ZHd+c^PNWX@khspXipWV=3jeg6 z6V0WwBfQEpdA=9mFZ*+C?7PPAjoyD-Y@a#Lla34&RousV4BZZ2YHJt=ZR=8j(pH@K z*Vb0eUOdA#TbWB8;XVw@ldfLH--pQ{N4gX1u7cw?;+8k)S4Yo(5E4)dWzNp&rWb2C zeaa7#b+W_)ydwOlgbGoEX<@U){hi&3^ACgn1LtYEql&@Rwh!nD5aA#wLwN97$EC~k6ZXwx^E>3*8iV()m(-@cdZ;1&~zdptpJwsFT; zFs#Ie6d~@cqtFZ!2VA+3IrU6qLd$31WI6!v-ZUxTT^rG)m&;~Jl%>4W{`)u)U;Lcd z-2EwvO8q)dn{iZmWRZor?ngrtm-KhZ8r8xXH~fCY-F^)vwPcT%v+g{zVnuBVPAL+T zXoO?0P58}^MSunkqUu@_?Nx-X?E*!$S3sq{V=?7e9<_$-??W-+FR}Oru}`mff=jqw z`HZ)-3f9l_M3J@l@SV9>mvpaK2&l8pu3}JxXSW_$(;4G`_I9@x=2F$PHzaGVD-9{; zid*7fP~o9;bCNIIGtix>D}Ud*%3fL;U9^Dtf^wM88^Qk5i4A@e7e^bV|iWQ?}2Oye<3w(n;lt$vu%#XgDEn=kX(&;rIV30}W-HcTx_5}OwLyvwEu}?r&WM#JD+%A$4gpFM4&J30 z-0|;eh@d~uIl*hSWnBWdS|x}=p)6LFXBhDGIwAa7jqZX4rP6Bw%CX?dmVB=pbOH)t zMqyA?nMF__qfQL$nW=qGSq=YoTAlGR_VzneJGPva$_bfPAM#?NC-2+zY)gvwvrmHL zl>e*P|K;*t^;1MC#NX?BQN~+Bj*Y~G7YoKq`$HYR1zTi!LXLo$r7sD zyQ{aM8u1@qR{-x?{fE~V{l4vAIi0YS0X{T1k4$>Hu3jgI1`{Zw=U8>2D)B>s=&eLJ z0Y&$%*K?VZA1Yw2wxwW$>rRWV9wCVTAE((!;p0}xFU_zG455uHLhxjk1!S{jME@LD zp%0~xkPL+UH*=ufyW`$f^`!tdjwwIDI<6H{n8LKH#NK?k0Q~w{{jQ?YVemm^+!n#9 z>3lTsYP-&;%vqOys~1@&yQdR%oVM&Sbi#yjoqgbaLceR-Ntxat1RB4>`plo9)XXn> zB-8U3=dHyGAS-JtBlz+QAqW0+nt^zaQ!0puJXGTAsrPjlH4}n-FgmZSXZ0-bJtudI z=5}H`#dk)Cr&QAMzw!Nl2wABo?Yl-QWEn*YOx&|sVKQK*t1wo2_{iWxpX@ALMzUm! zPuzQ6?el>FH6lnJ+{_W|7~faON~R%ZAM7~?25=%PW&1g(R?J)z-fx|4_{CN{V&KJ^ z?01qt79`u(+2+jia%H${hk*%~fq+ zfSy$ZR6Hjl|J2ytdhd1DGgk`kQ(yAc6APwvl~OL`{QpnWFYEkjRLh#$_fuo^-6n_j z&_y{?l1SQ*9oFjwcNc5lXqIqmdE9?cejlr#7JE!U?R7?GX^}rfn=W(d{priXMfR1+ zzpm1qZVykj=6hiDm*lVY;ymZzBq5v@sdAPYQ`0;>Z5yLh(#g6sI1#Vjc#EFst+26_ z`flD!37odtw|p!2Q7w_&pZbgZW*FoN*KtfO4KDfr2_L@y$C$$o33iqo?Jr(k)_Iwi zv97Q{*t7fT1OHy7)ThrKC4SXlCWY*!G1a$EPOI3puWuFf&;|(#_cd?jJ`$Dlrn!GS z#)w61<*kd~f*uwyDjin^-WDOC&w-nz5I0jFgsg>`GlM+|Mi`R2uxks86B}ZwF=)Y**RGq;tFB}6*tIAZ4 ze~R4afZblDzPdJCxX;2F9XCH-I0TE?exlSrcd_5JiK(1VdphcJel2C@{5_yKwUR}` zl|^>O+8GvPBPBN01WVt;r!^&j6qYxVsr7Y_1Ct6H=SfHz-%&MfgSg%wySqelYKMba*J z#hWhKbY3;bbqT=>>#+H#zkI9Ar2O*bhLl0P88Gm*HhM@IKRu z9V`Vt={fSqA2(ozu?U)tF%7dI&ggeyN&)JXEm00$N5y&oHP>{ybom%}9%pn*rI-%k zN$&}+(hiMb^b82g4v*Sz)9TtvEu-2dIJ)l{P;RoBW+8VR&xP@{RG|M{=%;haL zA=mbgQ>WheBw)Q~SYg5OSryR$P?;lUSxktWuRgxc3ZlUxux`O$d-t35-9SXqF47s@ zL+2XfYyE0r845`9`Tc&>$M{0YtiG9|-o6XT(7wu|i&7+UPadAyK` zTY`k>(+u;T@@EE3 zT`A8_h0x4AWeB8Xuq)9&!^r$qw%y4yM!gGT;kjQwQR1~0R&1t&aQ$V!!V_7|FVAUls0n59d{;h? z%ty=O!PdE>56TJJh!7P78Y`qRnq~t}|M&a4dXP?~^lR;&U;oRvGtG4=kzD+p!BQaZ z^mp-Z2Dx~1aL5Y`&>gJ(|RIxICFTOoiX)8V)U=X0eSb`22BBLuec3nkuPH_L^0?3AIluq zg2{``$+Xo9P)Z+&yH95S?;7}$F&4}bOMpK)hwc0>a34nSAW~n&Oh&Q`gc>g zi$pcs)-O|Z|J!advP{+$uAQj`z=P0VP!^Kcq}{%5H8Dla2~19zguiHqcD3eq8e}{< zglMHuaxEcRas9Q;yW9W6=>0!w3*T9UXwyCU>O+pYF51AL#KdMOgC?zPr8uvuK~Re^ zL3sC=Mo}REp^9ei-RJPnb>d7%H&JshSp}aEGi`Sba+5m}sA=3|g{bzbBMfa14u_#; z=9Qj9n1nB?%y^ByBeH$m>!i&72c-kiZ^LSx-$h&Q4-4ZHN;};oQ>Zfg3+`Wg*gMvt z7O#Jq&R~^UqiH=cte2JV%@S)~c8>xB0`CcnLaKrC_a^_YFCd>ux!#XRtIVgG?RHRB z?pBWnYW5@*6RN&37euKTH^{-L@}p7heT1l$TEbp@@`7dNM)WJOZyvkE+V$ zQO%mGtr*ga!SjFDkUag?8G_Z`K42NX?S67TS^EdyS#OH8P=I;-H7aXRMRXl_mu zJEvXwwe9~Q9t)lo0|7E%)T5g!k3FLT_nJ0!E zi(+uNYplZyr44bC3nPpP|F9?$@*G%yD_UHE*J3Ch;*Y7Ta9a$fg$HnX@bPL?Q}eAv zv^W)hGT^GeLrBRlxV6wzZ}0!fIC@P)*@8`YF%Q`~@b*#{^*9nSu_$)H+FP;K=zZ&U zE$gF~DI1$3|0cod43`G09$T<|q~i{xLdxkKG=t!6Z7&bD->n3ja^tlaZOX4h)sqGN z0EzkaH1#U@Dk*}0&+y83vG?GWwwr$N>RAJ)f!0o$s5u2cIgp3=TIzzp^84#Q9OQs? zGH-RYC#z%}Kg<2AKoRKK^S=O@wCiWps?H?v4P(W^ow4hd$?KCSw(THv{J(w$x@%}g z_ZyzeR7Se(62GB|nU!j#W+{%Urfs$~sDiL6_#DD}F%mEfia+xa09V2>Cqu#KxuJ%w z;*#CnQB6?JB)qTQyf-I>u2U-FiA^wlBfw5G4C{%ArT-8#xbtjBu2*Q}5~_0gU|bkX zBYTe+Ec7H*(@WL@BIM6GI;A$q;nfWvyy2N@Tc?37{z=2Wa??lB+*G|=Zx)%!f){Zp z&22EdxBI--0MlvjYclVYnwYW!=0i$wpQIv|;&{h{Vut4Vm?yL4iSE!;=_%WE2ak%P z9`M%T)e;3^R|ZN2+tTk`?i|PJ5Yh!No4X%2J{?yAqraiVFIpml??cS8-kNk(U`Naa z>?rdvC6<^RHg!~ibaYcPLsqPb6U1cADimjnPLgqJLw|w$rt^D1x#r9L@kA<&j#X^i+qw?X#>lh4rBS z#&(|xt;>ACudJPM`uvk=Sy?yMbIPriE+#$ThrHNqtSJ>3`l||(TFP*lmk7NyIbZmN zHHKT~$+wA(4o|MTgqKB<{U*i_xkgD6(C*%(dBtor6Wrr@IJ8}aUFB6bAT{r7`j%#+ zDjdDrX|1sA@q^0ToW02vU0G@pLp2oJH89NG8rTU=C>EDy)80@@YW3ZgGns8w}>q6Yy(E)l;W>eidRA_07*(dkuL%Y4o5w*gLSA9P!`oY>AVlfn2>HLSxJ9U4fOnB zT}49J=OuR5m``_L`12niz7A?HPzgUG)6@7u=7uhU%+wa!)KEKvE^D}p$r`cwV+m3O z0b>}A=E4e_EyIx{of@ci`aAwsAP3%7toqyrw3J0~GGlvVi@FC~VhgsD<6FmCx zD$vNxHbuE3c=mV?U>-?Ld-VpiIJ;Q~oDV(dlt=WGE*?>>WJIJPW9}1dyuI{+7Ld@b zhqU6jSDkH$3jjWddOETyadn@e&1v%T6#>UWUNDAIcG<_aBRjf`Dxu|J=`8s8yPv;7 zHKmGm>w!IwmvWvg?4GQL%s_MP1IBZ(YEK)hB=0_D>a6gfxA;xK6?Aii&FIW|zijbE zU(5x4K-vZlWN16F>D|H9cCYFPkaCc<157BzwC;dF7e!bGQux^-VE6nlFLQc=zun{F zP0TLM+<=h@**8FI$wviiVI?!H`xHNEd9TsW>Fo=8>7Rbo^5$$ai3adqPoQSGN7;0w z%|1ys-yz1qs*I4yZ+qjTu@jx+T}$1adlaUbEX5LB@a45^(PG!o@&*>x96vY<*YaLn z2%8ss8syT+YxWCzPLhNFq){h+`;^kLDQtmicQ4$Ww$%xGxLnWT=RZi5i`_^~b6iC3 z%3(!vVfNfEG}HCXy>1c{^Tpv+t8Hb2$mQEHaRg@_qRxIWPsG`t_uX9Y!Ay&K;koc( znR@;xlb5KH)zuAfUMkvtgD+)RcOu}j$1UUPM z2H3~W4S;T@Q<4o(I|jHP%~YX0|8#bWXp0QY+z9(KPH6m8(X(4KZt}rlVoVfUook5> z@8hvxxZ-*~ZXI_yDf{-CUr@TOtr65CD1(8b1D*2Y)kpM4UU6!Sooz`=orrMrS7}F) zMCD=Ga(ef7!na0xWJGhS7Nb;+m}E*%ly8&UD`cwV5{cj@|0E7dZ@LL$kF#i|U5U-A zqd(J4>H;Eep9u`V=5Q};PIRi<{!0iCeJWOZc8iz6VCBhzF^XFfkNu|lN2JrWoj+CgQmgTvf)Z3m&IT`5r zY6=i*Dp-KLu(s|FjRtdEZvFMF(j19(4B{{LVW@6L7#VHv)P4r0Ew+qWMkQzJP!Ah! zm1d91Z|t?OybKYAoEIdOvu>PDNv$&)A(!$Bt~R)-KN1>@PBiY=t(N`x;;uL&qYO%7 zjR^?@=O)GtrsCr6CxG?te7ITycjz1%bJu~)r9#@yc_DH`0;h=i;>C`T@tpZi=3vZM z;|Ch9{W}A;_4{vTQCwTj4>$IDz1#2>oo)F0!FIDzUca0@h=_c)V2f8baS;-c0lr>Z z1`kLR+_GZjZkq%kJWe ztdbpRFnX7+J#Dy7b1!&jhpb~TiDYOvzjUK3d}9AC+}!W)Nm*t6 zw}s`OCn5iqpe6fqYfWM)Tbi|XC2HaDDuD=~q}YvDY|kZXCVS?iTws~KW~8>-R;0vi z>03W}QZ2rNKl(Mdzll5jK+lfE-Ryat6+Um+g}m?s>@W*~69MdqCsU^6G78)@6y_s^ z?8QHP5Z;Svuy?vp>eapfQYMb_UM9evPLBv-{)RvZ2%3(4zC<$d$JrZY4;w~b#{!e{ zTSsFdO;@MMSWm%en(xi^C^LS4X)B+YvI&s#?f912{`Jo=uCOGFaq98mjoX;LXLQJf zXjbIs8BrVYV(pBeE^oex!{2F=(}Ss;iX(fWW&<7uohlA~IQvKV)$?>!bPC#^a`Hs5 z0RLO@`q*c_dZf}path8Xifs6D{}UNX@5}xVysL44K@@y(obDz2J=&>2ZDsOwqg128 zpwHy3`_6n_Y16Y0d|02|+aj((V%Qxo>BZ^XM!^|5L>L|u%d+z&m5a_iI*-Ud?UNpH z+)5t)lB4a2%2d}Xz92Wsf;YP2CI5-gpf%Z~JOGqNS;Q}yep|pMdLMk8@vHOkJaPLbbwjTyIE$I{*j?odQ<9$OuGFO)dI zi9%w33aH@@#)qC-V_3@s=l>L^+F1d-5&mAhX1&YFjXC&rr~5sR7H>w9NukwxQbdvB zE#9Ao{E3p?!YqN;C5Bptbvf@%Brg>FEO&pi@Ay>82>tyB@$jahyXre z_^lHvDsh_M%@9@awT>IPMTSjM&&9_kjDkb;fAO@@0L__! zVIZ)r@FIQz#;%k_UVD~@!DPOZBbm5@zbLAZPWg|w z^f^?d-@d+28SPPOc@e!p^gXz{Bka3`CwZ|nerUYAkvt6zc{sh`zlvJR41I%7`iq|~ zGKzehYGR%!Tbw_1>*!9gkzA^fg_{WGYp?T<*f*^}pUAy<9(dZsS@vaQs4) z2lW_S+RMuAm1I~uvnsZ7E3*_7jyk*c_#QRlxX!q*M9S(h3B;lKrP2w66Qkv?kcc`x zcX`7Dv7`;Fd_mmdzwpb;;GP(sOMjOI$3Aa`rJZfNei z^9PJda3AaIQO$3(>`*gYhQdACP`_CpiP_nJy_6XH+LjOWsos?V#PlD2zT4`r6YoL& z-Gr(E%?QAOBCo{>s{NRR<~Rpr?!Mmzwmt#rypDR7uK~C1a4UX?qe|Q%_cd5tpaMc_ zYdk%Eq=;SHJ;D}_WsAmi5Inkf%k?~`Uo?m392@tTkAoXhlI>?U!DZ|IHCNWoYn+Qp z&8!<;)POVdF)|q+W8HlrMm?3Pn#$HM?|o=(Yny@%iOVWu-v8y|a-W~Yk|WN;gaHQg zNHE@i<}Fr3WU09hCCXk(LUQrinO)rXoKr`*b~=EP`PUsO(CSMP%fyF^23g&eG&URd zU!{uDE)y?SgKIKT2%F{)7O^bD2OD-qZifN(QBfl+g^!VZuYNLL?keVHU+XgG_|>4A zI9>JZ-AT{{4X4IikLU-c6nKAz8X?Gn47AG|<;)KH=q-=9lJ*m=T*iK9iC$gfG2V_= z|1R|#rN$t4H#h~svAdS&t^IZi2}YmDXW?=N4%fl8KL^B*Gn|jB;V-PHzm4xdHwD(D z^(`IfSE8BFmmmf=Jk@*QJ=f*k*89FmnVu4J|3@~LYpLn+rJ_3pI7@!sOH3vZCYLJu zCfCABv=GR%0x8A{#cW!vRS#zuBC3{q1*L>J`g#U5tnYb8XuXhW)iXaTM+|;JiHt7> za~Wo9?nRR(ETEfpdUb(ImLedAxlY$E`E=C_lZeooNV{=a0nb4Kb<$CU53Em>YY9cQ zopxcG%S01x@u%)`ad3BKWSp>C1<^e6<|9poH zIef|FOB*Jh7RDJpJUFl=LsC2WGJ7ws_k)cIi|gf#pjsZ{ ziL`OG-2w*3>L{i$iHsx=Ou>o>3)k?ANdMN&U1XK{)N0`_g%=;At-HevxMl|wfBA~b zNXgv7T|{T7?cV{(I<*{H)B>rgJo}zhdhzbRUz);Gqylf}Fy>xIqNXW)s?xN`@dX69 z$R#eyndHq}nzd~-meJi3@_-#;X#Ydy6#LN+f{jdtM&bf3V{MN9XH5F$l zV-uQnle5**DORvMDv-VZ%zOgfE1we$;$P@w4i{&&-xH;AF<&b9)}|KaV%BKz#SP%bZ$1~D$0j*ROuV-h zor~j*Bo=nftK)H3W=d_Lhd8;;eLWg8A$Fuca&=c+J$pr@nwBNhMUqd!wN=7pGrF86 zWPw3B%KP_o4e9oDL*UZx4CpzbST%Jq9&%?D6}I(K$2DHeQF%h!eP6ZZ zQ!#C027O3S_0^9&iEQFkx~*GkYRy(^EWghEC0_c1r05vsFdk zzWVS@*EQ@poIK76^w$@+^l%`IaQ_c)tJ!NPFd($?7_@~t{9fP7)&3`;{H zL7;yFxTf7DQxpx^EJ4JqD1z|L&l(jMquKy;1PoMgp?M3$a$et7FYR+~t|8`NHx{JG zqD~!L{W}CV`ub~9=+xI};bJPik8z*N#mi#;Qt9qx4b6eKJP;o6yBCe(5qeAVn;fL( zK`kH>RRrB0=IW6K{HWLA0g@FClKEnE7_X#gr}VRF;->4{%`%F`0=)h9b&k|7&GQ$+ zZXp-| zjhM?sVM7g4{Gl)WG$ij*^aGkFvGj+J2TTka#0%nHbK~tvuPe@u!-?5!HmT)5lFkZ< zxcz-nk8amab;ujA{(VSO2QtG*tYyEx1u+NXR>44E+Af#g0xiTE@R&AeASSKhgNtELo<83e?dSci8NI`k1yq`8=-rZl2tnx07_$bPr zP<6NYB!=&Ta_y!$BcvS>R$ct0_LIc8+@~^qFjsA#If#WQc4+}nreJ&bNKy0Y3B?|N z{bd{G`0<;m8IaknA*GoN6ZU^#d~p8Gk)pg?_RV}F*FJ-Lb%+%KTL%D7Hr3%SOWI@T zXB$kR53UqZX7)@uv+n1*hkMxBV_bO~rx*l(v=WwH*;BIpls0=qh0Bs|CQ?h0`ZG|V z@GS8PIR(b|Fi^|7{c_r?+SBKY&)TQs4o_0+QZroJ@%-=}%* zW=?S3&U5Laf@RC~tqhbF&#jAPsZ6R)i4kK<0mjIdTu@ z?RbgVicC~X#%o^uO06%6!h8Lei56e!w%efUJydAquZd>3Nv00ITt2@?qY`QjD-r#V zX(}x&^CEF!1&F0%$npQ6@h^P=#h!K&=H8HroTO5Twk6hRIklEBcUhU7*3t01q}-^I z8@1u3E}kXFnjxa=9Bpqra=Lsy5MTy{u6CSxIUd)-q#ywHhfk^ki`p@YHGS;C1CvL5 zx53*)gJeHlOt>uze%$pi1D><9Oqw1}S9wKTMG?pMK4LHe63X!wwgNWDC^MKhi|bEp z33by)+ey%kzDaXrsIzAhpQed-Lt{=vVqM)p|s(J2?Jg4IDr;mwbSsJTl^j=-YTU_96sakpN~ zd{&o82RWOzX$1i>zByeBQZsk@Fx@K1-Ik{Y?fOe26BxzK`wdnf=(wOd5}#Lo4i7`E zkidhC`XViJmnz3P4eQ$%{E9sVTdT5s6@Mj=1I|X%I2<=6p*YX#-y&!@o%XYl7WChQj?aqUs+8|^-5I~IS_ggo>HNaB;uCyfXGU&mn`_M+U@%*D?c_5=%Rty7i z&|U7uO<1TAa+S1m*yZ!oUmZ8kJEC#BWSv7-R#2Yqhd+tG>m8^9(4I#5&HRsz zgYVA)S4<>igvJUvc^sl&XgDhLpAT-FXtT5trofo#N^pr?*P%=7{28@Zhwm?1ks*wY zHLxc>RtjOD!oo)f*Pc43$sg|Jg|1W=mlPs`;vlMsqIMZwcQCQPM35z-mCyhTx6S&{ME(R8NuTFzbN^y5>W^@-pC^EY+|FKJTtlEdPtlUt) zDqj<3@~y#A6C(XaKQuS~yx+y*LfGvySe3&PYM339({W5O)(M$JtB|eDvhGvbX|+_dTm?gspX#0X7I!Ou0}W5U z7k75tyFLH7OJNGNRBf0X2=?<6fy``C16k-uU>TrrQ@5POgGB7vN#g4V9fqR^!59T- zmoE*MbX`^Wf4cVqE%n{Vgf$yF?I#=ux1-8QP&}0_O>+t#qY27?uitcu2}@7aR;#*! z$Ir~r0qIP#0KqA%6l|fDjJCwP}lgl%p#oJV_~z0sPoWLu}c?n zMw7;Nm-F+F#D9GHMz!&2=q>l$i~9;fE2Od=Gw<{5@5(iK;e|d4qHt?q?&T{ao%VIh zyO4{nAwvS{IwYXxXc=ptjZuvq=68W|H9{P%kvwo+%-lF*DK4Io-~&Q6y)=v7evhIO zK4L)X+I0l-#@_WlMLU!Cp9JQ{e4Zve0l(D(w^OhvALAF*a*m6oRSH=s_uL z!6ti^0q^hr3aiA+KmJ&KChif{I(}I|ivmV3V0${NT`;xTKA`3!v&@GoqHrPulpjGY zRSvNV&(ZssJXU3xo2kt<^X_G);u+>GpmM}v>Dhx=dPU6O>{p9r2*1h(RuKi+J#aDb5VwhAxh3j*>N?cZ*UUb%Sg*}NdLW1~7hNprTG_9Fq|`1#-=9>? zV?_}sK-!LIchbEiDE1LsGJCXO=o#?|G)BfrFTQ>18{urZ4?qw2izuj(kdZ0 zMoI4Pl$-Wk-dEpZke?(9@!W>Re?>CC8_<{G83- z+_1LSuc0p@h*m#-DiBxUW!ez>`!Uikn)`i@JYwT+0<-BSjYvUvH`Ms5+j(2&592Li zQLFn$%Rw$(ZAUx&=PZGq-bpLA@*xeH*Me!zbNyWUC_OP^tDY6Vv*(&zEq@a18vbnj zW|(r_Hc-Td`-Lk7Rb_ zTGwZ$()XO_`}8W8;Tze_*Tg5)kJ+D|u%lApzAW2qk&biI|4_%y@-=)sLYKW`au zeD^oLy67X(n>e|&L+n+j+yjMkvkKEGvo1G1CoAwn;m@sir=eq6o{67`lD<9WEnn!z ztqWFz(EJ+TluMM9|Ap!>I$qeJ%s*CJ_V&+u#CB@BMT>RrmDfJLXizGk@c?eco!z?r zeXb=sPXuCasaoFgh}cGJ$Sr%VKRX!CPnQMdUu4X;UD+eF{htJXY|N6Lwht%1DnX#{ z+IMhGm?ua{VOKERrq=0WnM;mLwRS=cUpMd43?bb;yn}p=%4KYv7WBfso;iw~y3120 z@T;GEysxj{-}8YUiD$1jCe$$}L0tL(Ot>X|ANMbwCegvy(BW!5arTlpjE8j#m%%IV z-pq}zePgVO@IAw#*zU6BuE$idBKIr$q=R zW|~S_FLZ+U3ub+m_`jh(qIbKJTgPgZ7S4@kD*k<*)!N;}+6!A>vweUVZ<4MQ*m?T%(IV0~+tC;r@m+FNScVO8b)*9A)r^qA;O5wHqN%=JDjT@Ri-c#RHfa5_1( zGoIXPVcGF(@dD3!>KNQb#`#c804*P1Zax&^yX;}9jb#Ux*JvGOr9yVKha}Jr&L20V z;tMgS{>_u1@v}jtn^k|?WEBJ}u8JBSt_BhZsaFf?XnNOEcc*t-8GQGd4G#!cgl>Bp zM`W|xbVsm^>5-u2<%)rSrN61q1X+!Y0pmVUQWZd48}R^EHXgI#U)CNC%k~plW3}n6 z{$wl)`jOcGth!}+5eM?IYV>G0r~WP*cgGEa2#q^f;k#a~A1c1ceagLADLFH$69(Ry zp)IY2sWSD;V4MHQJFgC=Jhd42+b198fE-Qy;QIuev`~y^K|})?JZa<70Q;`m6NoYJ zl{PjIinh=5EvgGL={tbuV6}fQc}~=LU6^W(yl6$75!|Aj$j(S^479b@rb!0$q2V*a z+#0&>4);{@vorb6)2Bzur7 zfY#d3do{oPdgxR4qW}uv+KR39HM^AHSp-wgdgkROa(-)UUtKicQ!y0{dpo%K7)aee2LZxX+ZIqiY6aJXJ~Zy(#mg#md^s+Eo* ziu@NQKz5KrxJ~^cZ@q5(X4AJ;uLC?X&!8Zey(Ln(us-Z)z3*YWs`&Ne^dnRJEf|52 zb#Z@Wc#>>c1sfM^`FAb*of(rKoy)ZND6~K!b;3s7m-=nL4zFXc*frQ@azJTN7;s*C zRpgqw=VlP`2*U<_f9`_pAz$y4h1o5{2h|=?q`WGys_MZu;By7xENc5X40p=rtZOTE z+INWcW9V(HF(R^m3Di{tVm%>ShlN+P<**1JgSffn-}5 zm3&5)~izc z4Tx=6U}){3U1Wh~J)`5zi-|0`$X9n%rbLrnrF7>6=ZtsZA?FH!A<;fJZx=caK1gr1 zZkqen{P)S9-?~(278x2?h^kt3uZ6rT`_NYWdc26+NN3 zE;Z;Df~IfsZo^StugeL*Iuv?a{=Hk#JQMX;@d-%f!s1?;n)JYTgN0oC-@O~P^01pM z$(p>uk^T7VQlyn^t^K274{hb6nvmalwa#eS=gO&6<0Pv_yP>$6>e952I%DJ+kEw^% zk1d`XU67?$LKA26F1?El@=Yo1A3WJ^{Sa(o8JAC+6`=*`UqKKUB>Xaf}JIUvTD=Y9fz^XYB|nA+N(Q>;S{WUFWi zJzd`>l86e&KACp8BFR58W5f^bDM;NzEW-XYxJaUr!N!y`Hr2gBIjgkcP&Yq|J(&Oh z3FQeI_HpDu3Tk-^9h;o6)z0(PbNF-$vp_Ok_oeLJ2m|-8`9E5lEpj%v_nY~_Kkeod z^oC1cp5XR_82+wIp!4rS7zG5+Ae%q=Rd-|E3Zy7b>-9$ED!0Dkvt9BnfoX(zhR1+hPUfB%7CstT1m0m#=MPNYzp?EFri#w9XH_Bp9fbb`in8 z`7L^~a34p_tvlb0vkrFqWN21-CL*8BC)ziEe9nvM2y+SeYT^ffYvI=4-u-(U&JfSc zpHy*(`!e-3gGZGK-1Wda5d>HM!;RXW%zrEFsNLK*|Kn+T-b$+u7Pguqz4n$_+pAWND$1C42=?wPFZ}rs=JJ{d(1N`3K+{lGm6ED-e z(rVwX-ZNhE7r;WTe?IxcNWYogpV#_b-+S?Qwz%ay@S~MVG_|Kro~>+j{>dk+pJ!IQ zO;asvAO)%x4wS#<-wlw3eb%xmoyP*n*;*Jny!R3#BYx#?{40{*3Fsba>-0Ml$9Pp> zxm?T}^t~5XdhT!8iap*s+tc@ht^r=_(*9A){Q>jCCU?%gMvIc_^;{-Pt3~-gE*5aU zIBM)gp?&t$U#ztEMa2H}7(#zPk8?;#e6TzV8J-oQhhu%fZR;G5|Gpe9Sszgd%aRaB zOTfii?XUS`5oEGXC%Uc)l>GV9oSN;)uZ=7)VD+@pl7kUrB1d=0j z3pi1-XrFtqj9Vwap$P*eyUVffytcv_@|mB~onIjJJeIM|t+HKa)T8sEY7baESxywSW z^3vNdB^Ym=CzrkaRd_(}{O7{e&r|?Af7dD>%k#phJoE2G1dmF-#3O#^zkyB$5!oj{ zb54nSZc}1=FD?zD)$k`zaP`sBo%!3JB09%!U&Q9OM>7CBI3477wA5g~_WOM>hHUzq zdU&bZp!p~8&W{g&L1+GFOp~p1cFj%D2RO{h6*;S#3S66$U%PSK>EWIKnZ9#bDRL9i z$yUvR|H}h>eAqaLHs!mg4>)NX0IiP^-hPH(j(CAP{$PGQ#EpL6vl#-}U@kv=Xr@(Q`u6Gkr;XnRfcD(a?_ALdMA>UII#M=1 z;_vq*HffbuuirjVn%wR!xGEeEPNJ}xz4-<@j7~@zlA3YUD5it7_a}ct2Gbrw{B^Ov{1HyB?vo?fcmCzCnf;KmwLZL; zZ}PcnjNQR4lNfZ9t#_TusUK`WNF09tJzC|*v>|L#e^ctg-orI|_S8m4Ap7Pg$qo6v zf6b9r-Y$Cz)AF4SV`l6-zMy6_8(Da)m`{%}%(4PWZSy1j)9b$fX!>2S75Ml{CT{

FUQ*}_~;J%e*X{s{EMhamHC#($#jv2&)%xn|Mm5J6-I0GLxDH!dLE@? z*!x_sirSBFPfEPhAM-Dpnodu9yr0y5T%uv#79Ctmrew^W z#Y2*T*i|+Dy2zcV$^PRNee}-1%7iGpm|W;Y(G$`kvRIVT`{fb*w(N8 z0|}Yv9^%pu*PCyE9i$~{NPk=WAksel=B(I z0?p4YljY!k=BKGA<(ny0@{bD$-1IMYmEZY=v4DxrF#3Ke>{EfOSa<0Do-7~rfF~Q| zZsqoiGMjzQ@B9DZX24V1`7Nn-@;6uf^SO7jn$HV5VuI*eUg70i(19{Pr&O3TJJSnu z_s{*~frc=9f5(5h)ZbleYe}D--<6c>6YMb^;R4yo9v?jCJHYX$@I&Sl;6L}A=}&+I z^G4-^J&^DG_$RGa+y^*XZhPaM=L$7v@B48pnLh%}f0}sZkVx9sYg*2Df)-oO?*GV*q(c4= z2}L}YrKSlo*lN7RTLaTS>FKlTUf@1|z;O18m!?45`s~$Uuv!>r1{nQGZ~W5+;>Q>V zPyrpPd?3hLzm(lgn*z_**k@Di|Amm*d8ELf(LecJPCwa*F|5{sF(I2o_GPnPRGX_9 zV{cSAl=4`CiiBFxHh0W9_s#C+F*@+hMBk6jXq&GBk~sLGvHY#Blw;-z0~X({Xu&`4 z+#YBJ`MQKfhWs2qu%Xy$dl#1yGj-|hhwrw1X(gn0+tc$%t3u@I>inrhr}IzKumj5c zO5i{9n3lboCwm*|;|SOZP5Q?09ch;71mw?P(9BVvkl|_0jnI=SUT-L+@x!v)MxDJ3{_t!>AnL_s*sdP#W5)L5U9g;&ycQ;7aAdS>eBQ1*3EhXIzcYnV3 z-sk=c=RW8Bu%B78W9_x}Uh!V9*il*AdM&F0BurNlEod(kZry$2(Y{V=e^0q_sAr_?p((sq$?;_iQ3!?z*dyWqe{n zZRXr#@?68a$8U?;(_p9WQSU(f_tUL`-j0Q)RroOjP9h4Jm{QbhoSuq6MUJDHi@Kj1#sN+(SnX%n1||BgXOn6zhMHr^Woa*8wCa zFIQ4RV!=#mQJieSoMK*_Y+jOF;21v%#a1FWK! zU&qJa;XghPSDr*gHAD^bFCbW-4RJWGSfr$EY^0Q2=VyL>&N3ioMNNIcK)uI6eLzcp z&>DY9m~bg7r!a(BQ-ZF;q+?Z0ELHChw40k6j}(Cy%sl)&99+U2I4iEAQ5smTQ(RL? zQAI_M`-bw3g|`IRyvkUa8>bo3nw?mliBRZg5nibeshirwDkTefmGickI^4R1ed zYysJCc!K(C8^+9|N{Wg{;ai6~FPF-Xm!`al^@X=V5`Uhf;_7SZ8h(7^pz)76N3N77 zL}~MjzCxHH*4yE8W9$llISnh@3HCerSWSr0r>gz+-#^_unzut6PRt~w3#dmm5ZO8J zky<8JBlpQQ1Z;+TXFoI6-tvN9`8!3MXMbCNUC3yGA$7@-pPSvw{n349c>WT-`QT36 zZ_+(j;CeyszYzr^h$~K8u5)Lzl(#ub_cCI==7KMacB4J^gp6-tMz($aQob|$o5sl| zzesf&jC|G)QSWEA_3vj*98qQ8n2?JI_Wpy9XAn{F*&BJJ+2@(k%PU&3_*Iji3W_~b}9}|kWf#=?`dz^Ewym!r*iMuS z5?F0WRTM2SGr>FC!1px2KdsVZGP6S2H#z}tcK$w3V1UwzMPT%)c7x)&a6!7{7uJq> zp%K3nKkTRhXK#Qt{)9RYH!C+cxA$s#8Wdkf$i5~)PD2tAR8+L~3#;Ym^D*Z+Pk(F zoMn-fYyAKuQ)chImx-y8Fff)n=#t>9G3gn$k>*wU`Yyl*_;FRH#b?6~gDu+vplwT+ zk^&Esi_h|Kw|03iZRNG<24-u*W@`pAcb(}5tk?#(O9!^G2-on&j|vy`ejn3xb+JAQ zppmE{-hC#2!1z3Un!IolXAy3ook3ud@01hyv3cCcZS~y>^jDLH5Vg-E?yb%6$&D6| ze}5c>>Erbmw)A58^7zue#T_5H77b%RRNhz-eIIhG6!l;j8|`loxz4M!Z6`y0KjeBb z?NsaO%f{&G%#SnTvo}`i5H!KAHZfPeyqhJ$xZSMa(K3;u^ zSuznnNvFcG0+_jqItd7k744{#UJGanP^w4*LAXoG3DeLc>EHgETY@|ih zFvVOhw>j*dWP)xDg^CWtU~I#L2cI79?+gqU;|9@LNh?1G(<8p!zFW9!c08K#rW>Ly zh>qc}o&+6FKvLVwSU*|VUMh@<1sC7=<4{w>PiK;1dL&&QGp;d`lX8_if$6Rt4VGpn zixAqQl+a3Q{e2Wi^@rd*-{EljN9E;%G1*|+BVAretB@o*VrwNQr)A*i*O%iBj|Sh# z5O-it?y#^$5}5aPdvrJMRT^dk06ObwLN01kh;A*ofUD9N97jMn+qpX1T1Bu>NMd1^7!^X zdSsg|?JM#fAp;#gj@e~|E=>e&D2hIhk-po)*PjbIq|wvT#O$n{p6d&W)di)hd32fV z1flTCCXjVv#>W%pSNN{2-f*mS?+fvhX6DM6&VThhJ2Eo$ zQq+CZDM04)+TUrVL^2)Ia*iN(lnjP(RFyLmU^1sGAi{Z6efvR;pyVkv@rba45_%V&o(PcRw_JRHQ@1{q~$;zV- z4s#p!3S}IqTxjI&My)&#-@fJ&DzhcxB40(7d#@m8oVq^+7j21VLPRjeL73j+swVmZH6;R~cdJm3Jqwe1H3P-1wjX^)- zI9z)C`0-8b7|A7pP>q(g$+^@sHR zFfFqCq_4Z@h{5h8I}=BV@dt>Ha{D;^LxMWKMRdYUGrHLx=jgKkm)qA{#@IhZV4nTi zLH&=xrVA8a*}ne{K56`ZB+d!;Qy&Ad780N~DS!{LHM65T9Zk#Q*#@loxG&_+7Uol5 zRvfNBjEh!8bL&xw7&#hWQ&&P1MKDo=+V=iM#IwW`(I?6r6UmIexl0w)jr&vAi{SIg z*L79KXNDX*p4hbvsP%s6dQjdUtD~Y2OqMCcDGVr1GsoBVXKC_k@^z(=AEJ?kL+uMP zdQ6NZKTY1x6kG-+r8o?vQqkgreyFT{45i@a!Jn6S-z^e%Vjjl`$uxhlyo?!&tf#-J zI(d1NZ?8<&JiU<4Z$KG=fciSC?X5=l?V3MIn& zadt#`bP@CX-PRYVyN?bFcKXap)}BESn{>b+-Mg(? z_fL?B^a zziUJu@4(ygk@hwm0!S|j6PEgM`R>>05%5Qd&A>(Vlcg-8ImrJBZ;@S*{S`ajtB|IF z)Xxh~J$?ZOZ7GW3bTm!KbLYYrFQt%oPdwOSgN z@TPij&ysdL!RV$H6|CK9(?V5Ms&j93V^(aKExnwz5H)1I#CD_-8N)Dtqv+6lzIEcPaimdExXFiybFGFFA2L&O{qz35I$*+=O}^DZ@Ov7a z!u1Lx%#!-Bx6>=@uDoMrp|jKRT34v^^3REL^LehTW-k%RF(s-4J!)kYxYKkTV+|=N zFkiogD>iOVgviFt`uDGX2uvsRJJ_U;g2L3hW$of>^0BF7(?lr4+?2$H1BLtd&sJWNgcA8pfCMt=U(R^U|W*if)NKByO6v# zCupVW^jp_q+>5r)NKW@>#AxA$AiTZ!=oz`QS@4euVLhoZ!b>y})3e3ldFgY)ET=fd zgpX95jEQ6!jZJ@B$Su9MN?($)k}8@t@*?ze$iV!P&j+GEK_G&A7Zx9SEFW6mt)G!g z;A7^WAgTz3YVUxU^3h|7XKywbw#OF}g7Zd^orkO2*LOE>c1XEU$Q0-Nnec(w^<-<^ zlW>^k3;qu{rQdhhJ5}2<4OQe2b3?BgUyJMe6HBsNJ{H@AZ?+p9w|hRcLer!duEpx+ zq^FB%6~Hzq*fRQjuI6TliRi`Z&boTW?x(-g*PryHefO?gr@B;PN!0@>o+M(9iuWKJ zoxn^YT-05firo_!)nJ&p`ncWyVXH zCrM~pbp3Fw7;`WptMl9v53f$1_PgeX!;IvamZKHVRsL@V1Q7`!mMHl@^fg-R89DzB znTLqMkV?Oo>JTNRs%LyAc(8CtQ&d5x7cE_VoypDQKg!F{JhX!9j3Csl+Ou)znWo3h zcY9LV9bfEZXVxI681af(`*Ab(_!2!;`A#J7>1z!p+2&) zovtu)lLV&IlvWjCz+{xe%`h~;a|IfagR}8UQT>J$YrfH8@>kFbtbMTHszkOoWnfkY> zF1Pq(F@-rTXeA`fJKK|)DSt@|_FH_GvtsKy^ZfhgSpWu*rBNjF6Wh*S9>A)8(NFH8 zccb__fkTXk$1^xRdOR<;wOHPkpEZo|-pK*$L8NW^_4NAGF_Wj^Sy;a|k0=ji7st8Q z@m&}tx4%p9od3Lh?<~WXXI|$yS|^czhd3_y*p>F*ZfuH-B=lV$NQ}^pWJ*%JnMmN7pI! zm>B0iOb&R22#JkXaF6m>)gbaAKG7j3a89>|T7k4C`<251b(0sN$>aHsIWZgL7EUa0 z>@k0i^a@_)-(M%(*=Nk|Fm@L*Se#_UCiK?gTak4Z z;m6IgI`}j>dpP_MlM;!5pUBdcghp(7SUnqxWM@}6nfYuC;A$y4DZf@kGBc%muLh>4 z-`LprKp-Mb;$U#!<#QVT03zWBM?4(D1i1pv7tB&G1RhUOHCQ%ESk_rs){o;hW-C7C zNDRt+dG{c$vZvU|lOd;?dAYR^(BPZEU^024vggLKts_Va??u?a*zzsH>C@epswEU? zB~7lFrP`k1cR&s)M)Xpfc?2XBHq+2Ep1%*KuP7v?X)~hT7@?vHxjPBpYMp$2G<2-Q zNUw6QVzb30_oEDYQ$CE~LS^XWGgl(sy7{>u$2Ld18Wk-X``C_JFW?P=3n-f0F^ygH zTe0zJo2~K|S`y#A>h+@j@eS){5P*H^7@M|k2{ym*M7WV#)`fgv9=iDdqo19wHUgP~Y|e7T@+z@@GSf*nbc; zEEMXd^%_~%fksP}uJI69-IbRv738l*+0BOW9Nf!OxPN#FxqtWK%0XzDaUpZaXfW9e zS}*cKmCRC-WO9mA`N?SE0~RVHhZXh4m}FqxDjE1s)-st%-{!R=q%arKcGY&?S)7-2 z#M0=l2wy&z%k{=3EjCIe|LiBGBkC&lv)8ZMri-aP&J8TiUhj}mJpVJg80=9O#^zu& zL@Bjn|)(M!-t!B^=^Mc)oI zqWpd0=ziOp_b+77x8~(?G(bxrbvP3puLf7m44A+jyFYdc z*JQVPQ~be7{Id$q>5N)uH=^XXu!U0-cv1oBNf;9~99-Syx zs{_!pVBagrqM5*Ge$s>ls|cCnH$jHSU&@xiNW=hur3tH<-_arabIwpqGDFsc{l{*u z5%<%_w!sT=x;Q5-qnj=LUv_q0(;YoS)8rnrIor5Iy)7L`#$rNkK#h?is1QKMr7)F* ze}xr_Vjjp86^_LUprbZ)_5Iy@HA@?eih)PS9>_gCB~wKGsB#ng49ygLxMhknsu2a- zw0SAP7$VA^$YYG7Dv|vP`F7^tCGgr@e@BZizfal@Q0h$!ZlzD3+ujV$;Kzum5338p zl_2y(gH)+WgsF3YarJ++np6CE1$sw{i=kR;E>7ZCOr;NpXC@Wq^*_-z8i8=A-lENu z9$WPaTyzA)|Kr=@W-{I%kXEEtCVvJ3ZVqLXK~K+p&sPRr8@3%v)_G7Pl>iM`$i9Nk z*^j_sgCTvZH##4c{}L&YI%#Sm1lphtk!;%uVYaYH8BBw|WNqq*12b*-$7!h)5FYB7 zuZA)wS!x7O^EE9vC@xH!wU8Lq2WsX>U72GxoEK80UkT9(r`3lK2-oZ$xZ}4q#aGVP zCDH3ky!kv6UYW=REdAE4`D>tNIqWrKT!R>rE*c@}uzr^V92YL{tg*Po(cVLJUt85u z87NtM<5RINxPK6|nocmN89grh^>Izu19d?4_H+FZ%Km#tACOnZmX3?7R)C2`7mAm@A-Y7! z2{}{)G0nO=j28DScbvQPYb(>ISh7zpbnl<#ykJ;#|95wke^X@I^854}JdfpfLIVU8 z2327&&v4LF27BNL{Ux{nI--`+UKX0=s(lPJFWL!o!^G}OY&Q!TQk=e2ZorhOr`oXIv21s)~>XyiNDRV|H$se%KaVUx?Of8s^QDX$S4f; zG_t!1JSepOh{#@+vR*u?;K52FZkTnSW=kyR0c`=Gq2Z%;hD=uZsEYGd#lLXKtn4N`lH);oJOSAw}gD2A*|4 zZS=gzh_JG$a#^o%8e@287fE#PhmV4<9OTr94W1kV+dto$gAT}n7!dTxFgw)g4d=4K zs}De^@j%$-NoS^>Z&>{NogvJ_F8PAl^uaUM52FDUucz(Y)}_ub6i|8wt~#Z zN_=CrK`WO5w{bt#kh%*7}E%C~Ms-@v25!nK}hBed4LsJ~8pZ~6`l z>g-tGD7ZUb{?saMnA)yy%Fa?was}_ew2Qqz?v~Cu!5B3ntRC>Tu^M)5u+$8KrjtaP zRewFcjdY&90Dy%m@M4rsBVXP_u5dxbK=Pbf<3toy$F=sWI;_a{d= zj3vs&@uv5VCU*!KX|URRZ-?7M%0m$t1{%Ky?al(mjL1wmxJHpmzyr1%*U_W16&*rd4PufGk8;< zV2HBD%)Y+202_}4p0fGpg%t*VyvO7Hg?PiqyBe*v;bWV6K2x$N#DIDl;RC>n9(Mmj z0*jcp$`=eA6g5uIh&%KePB8Eiz=t1QcePvf*Jx0F9VI;aZ&c1pw=4LFNqemAI!m{n z2AR(JmOWYye%m$E&MswRc&w)V5U_JAjgUxKA=hmE8ITEq4;<)U5!UaZ_O&iL-P@7^ zE99>_6e=gS`x5g%e05)>uX&+ukpB7Q$!lWcC(bV{E>!eAAdMUHxthi_;E*@yKZrnN z=$IMwtf1n1TTLp);jSfYun!KwcJbYPuA}oGe&Ax6MeUj7u<&(~W57y<{NkZedV{kF zkB-GzZp}y6Pgq2&LL&I9G=TeNPU3;yMw8hCjMuq_Bta6qPae%$F7Y`$6c6xRA zoeFl~a@@gDAitt@#vR%7_3m$w1LS%)4v*oTL^%!mXM>l%Wb={T%iV;Jt(ALi_W~d@ zm22N=(qAXH zO>*;~D0rSkT+h7j!Yu#AHrUD;I7hbfHWUJMReHz|FhO)ty%9W;JelOf=PR1QEwg~% zSGgHp4(5M*+dt4tJWa0`=hVMHeSNdbO-6~MQGVUKY5p` zCNBir`f<25JG)||4kd2cjPFbL_vZY8x%#OAtYC0W$q{oWf2UPeHE8Fe|B$EclZ}my z>8(I)U3@%&kaxkC-)J>s%n$$|z!fx56?)d0uiLtx7H!!k3_9L2UFz@#9w8xqt2KLC zwP6vN@AjtSO4BhSHB2s6`+dYhvoEON(5?SpP?F#Z?P*uxD$PE0`j63X>)VT^ac0Qw z@aE1(+i!YO&{mS@{{Q)@HA&y!A z(hao^I1kGla~yp%eJn96-mHpF;28Kg-EyQTtsAHiTKXymiUm|$8;HN@c@w=|HVmGMB%0vHcWb8qj$}RkYdaE|6;j!JP*zjsh-3UhRX@%h{j%Wk1 z8>~CFmOK^PO0EK>g>UsSbN^;#?kirAJd>}4L`*WFiZqv;eQLUGg}i1Kx0DS#|EFz= zhr_ELJ7Q+#890Qi7Ij2jahA>If-~YS)l{K0Owq`%9mPFz7)EH|xNvt&jAM)wdNyK3 zTr=g0x8>VaeUq4A9!u&-ykpp7@ z{J%!q?SQbzNzri=vIQCgG)zwgG}>o&=mbD6LY!F!f@rhZYd+)d5L~pS(wfwpH8bsK z=jZekNjWW4cjf@4MJ<>C6bk=rkd;Uid4LVMG>aN!RqYG9?gJ*g8jLhi!Ct%pp1uMLBiM+qqT4vw1V zr2#|r&2y_8sGE(xz%nVz?^Nn%MAyFZk$8fE3P?<|h0Br>U#EZTLT6G;dY#=xWzr0f z8U2(&j?}vY`%YQU`8(;&lfE{EnV*XhIQ$eyf+8$@0OD7rIT_IL1Tfqy0aJ({z|~1@ z)2z$}9a6erYN&#vV>EPh+(d#9tb~M8J9xRmSP}1C1B57>wqK6Ubyn;sRPI6Z>MewN zkRS*hUHy%u!rQe7_Ow|xy2~~|eBuJ)pZ!@Ak9-B(&E9M;S@F5%p&Jo7_0!=lgNp$h zC0In_Elq+l9rjJ|<|D|Fhf$-A_>)tMmq z{iZqWpZG-9BUdz$LcV<4AgDONn=_Z`^`~&NRN`x#y@(0h9id8F=-e?I(-@qjH6Yj|IXU7LpNSvMwp?nCEH6K`~q>0&~9hyQI!!UBV{f>T;ON2`veX*dnP(#~kgj!ty zTmb@)QUTpjhwPzY>QzlC#DRXx7a!cmhPEf)%fbLsbr&|PrR7Q;U*d?@d(wFEN0d&L z!$QE&a*e>uY;9tV2f1@wR6oPF5Pg)apjciuD}>Z$Sg$;Qz9aQnIR#u>T?3o!mS+0Oz^O+BMcTCbDuA74yS8-5TdzH=+E0sVM2X zLyu7_zGh34D{XjES{tN+M$4o;g+>%l1nJGhi6Ip@=1V1fwoeA%j!3@&3W->+$!a8# zI}QandlpAUOLGz9Jql7++vWvi>uJuL;c?4d^@G?o6-Cq3)(N?T=7*)zLshINL*q`B zw=o#e^`aaNTVKsk8!`^u#hwX!=jEnkud0WppzNP$RrgcoM+kyqWqr(a+reSi{8$kS zt4mJQyT`+?LKI>|7hvlB=FFsKUr1NpQ#O>5f}Iz(bCE}BJa+Nn)0rr_OgznppPu$L zd4+q>pV()&2GdzA2f0U#8?BZlecX+;;OL{#LxW#5)HP%{*WR#qGhU+h|M;RylPP|g zdz-IY6W=#(>7h92K`@P5#UWYrhG>+N1O;_02eG{=uLJ`HadYm1C?0FR zo^9-n$6WDN>kkgEc7{`zLE2WKPGCPXD)v4i*3N^5%#79BV;!NP;N)G9s)UvjZF=F9 z;7AtA{vr@@A+fpz0R2IPn_1x35BE&DN9NK)m*G2(Ya8@zf*=+gsV<9=&YH_%PDo>B zweh3lpJpHPXK0!F1V=jls{$PrJ7uMVElX@@UyP%p%x|04J7v?RJpM!Rp+`!eYB2BL zn+qw`&8-qYLmvde&4jjIZiI$wQc|uoeSF^z533Z(5NAtW^5G4mWTl!58xWH#$0(t2Om?1cW+tO<_WeAX{i@b&@j{{3=u`Ex{5!Tq@PJ@3 z1{=;6ryy?nY()5=HGiQ+Y_kZgeKQO0uuiXkl@@LZWvsWC4o-MZOviG}65R<%c60I>K!@GqwLfd;h{_NLx;#y)9*sIvjDP==3G|3rAG!VII1Wg5NBP zWQ;OKj36%hbiR)v7aD;5yvy_Zl%5~rSJ{d04amquwZ`av97^@W6a$$P^C9eQB$rtI zTTw7j>{n7i{bqwh1}4;^`VueWXCAeRwj@;-3vNqoC{cR>F?E3SwOHYA2Fl`|#yI_<~WFUB*wGIgtWh_Cpqcx9pUEtAT} z{Z74Qj(Yws-e~Le>8YBbcb;nW=D-<_1uVwgqH*?YIrqH%D+%>uIHN12KOQcC+g7~h zg}oI_KV0x4l0*?sjZYXRBCGf$({z{1STa}43CXw5MGwz!=hGzy&`rEXJH9))sc`j- z;Z6Vwl=~%g+5QRoKd9%a{SdIrK<=hCj7yJ%NZf2{ori%JV^pwuI*fUe7@rMw1?LM+ z#-R>evg_zApVG54T>T>`%N9aakEQ0z?A;q6c41v54Z&Yiftc@wX}%WlH$TIrNPoRY z>p-H*cW{U%5_=j2fS(jgi<|4O>Y*Wtv$1R~;@>gGJjytNFBlt;RVebUrTW0AVU1Qs z0d_UWl>~1`^jQpo26##UEh0gvHlC9XV>7&JGE*)#3*!a-1ZdXA$W6RaB6qy>x35C*QhJ~~7g6k_ zDZ%2XC%g=K@Wm(`IklINKm2%V`DkM66P=7KB}%=rG7?Ez5~mw&*o6Ba;IIs%6VEG) zlcP6msQb~`R2eVZKKuMtOq|NqwxR^Px)(vS&g%OTGzritygE`lRuh}8eVPn_=W_^1 z_L~p2=l}!;3zwdgg|X$ukKj#iJzV`-l&o3f7vH~glE6v76CEB7GRb{dh?>KQ5e9!S z!vA&mCRmC}x@j@$zNSf+4;|`$l-%vxGu8WU^$gR+j>4*~(^$>cw%{P*AXGM~M zFX11B$=T1cfL_>vY14a3&4m#I+jK86!^{G;k)*3>ou+8Rdqce) z70jxo8AU*=u%g|sf3PT-IU789nD#Zh2s+oM4Nq;R-y$I*0{9wr4Hv@{MIbi51NZgf zU&Sn>ok<@haK@M!56z~^%_zWg9AseH7d3o>e2YB6>`wl$*jND$)ErHD|L)bf_Mj@1 zc*ba$C-t*&2u#0^^8*eP_6B0prdv04A`2d1Cm(e|MaBJNDMAL83019P z%mhS1KS@+EhDF|R2D8K$Lru0=C~Cg68OPpz))q(y(SZP={(nM;eY1AKBx*TC2fm%l zmr*gRq~Zu-2A)_pjL0<}5Pa^Pid=|l;ThK_$iypuu`=lRDv9@35n1a9vV<@YTqqYR zKH6}y25>HG7*t%!!7!oF2$&EsmcGFLTrTPD#^p`!zy%RJ7SR&9g`tx)ji7fSx7jaEF=0!H%?=ux5B0fn*)p9DBAwjnhVQ{13I?K+fP}cdicH%0^v7 z{ro+R)x=W#&fIE57-}78HH7V$pFW3*n##^|yx)Be$g5Gy^)A;?#Ah8C7w#Cxo&ipY z?hk3AJir_vc zm|r+d2-LuQ^DUf2#j11*2d9nr_-B7Hw4L~j3wS<*iJ> z&Xa8x!s1ZC0*@L}C?9ykHa^lzO5(yC+_gLC70RQ*>d{xanh0Dxe_qjO!`|hCZ}VSt zBZbCwas%|*lc=e{+gBC#M)D4BjQEVf&BepPEHNTPIH}CY3aTb7 zWh28=a(U84rOVO=N~R_MCqHQ_aOMa1wo8*Tx$p=#&P~rQHtXOvV6#w_6xQeY_uL^F zfTq*{a|&T0@UBpCLpzCq-ulD6Tr>1_*zyJ!gq|vD1$VG+jGhO{=uQNXVpT#|5xr7u z7^CPPoYBLX#ls^<`M=YF1~eeApf{!DT5t*IN5WBbGE83N*rF5?UC144&YvdiS4-Pf*asSH;0{CGMHFf^ zS48~JLDFD+C;)FP=7QiQB$s;0eqK46p{3fukbepoweauE@{avcs-72G4G! zab~fhX4I&k=~X2!Ka#LQQ~#^wWT9W!T|nAL=i}VCA}cmP4rqG3(-pdJigSET^cHc7 zkC#*B7oYiT34V-PBlSY5ms5xMbDYXt+p#QyA<=dzFKsR8;35!JXpi<_n5CcX1&n+}r!>8F?cEOv!Q5UBF*D2sSBEwyW z@&nH5(-w1Lmc&uf-cu9f6U~Xm1~TDS{@~}o$Oz|lNz*_(ozf=!)_Tsn&(}$$<^uYY z->{A%040ZLv)rz)SPF+`BGxlb8#psA4>oS9?6C` z4M_f%x_7@_m!_r7a}h^0?af~@R6j0Nc4X98plk3;;G3dRy)}fmERCEdyyTo+)7hSX zRQkg|dUW~tdv0hZ^a{t*NB}o|u_|=qx1!Yf%Fe6daX#^-*#RZJ{%Y%gUU|-bpFI3s zyjUk;FqEdsSJuA((48-=*oPfm-_qyy;BNj-`wnn8*)e?e z!GPNl1u0L~^0wV-Fhja&huEQDgK0j4@!Aoojgasxoi}NFH`#1{pw@RN@;0h zzN$K1tj!$Zflw=K<^2d<4mg!wmV7|(*cfIl15;zD;j(BKU- zCD`8^`=3puCB#q;U1Vz0VK>99%sRkQ1kWSaMp-l_A*iTh7o(F9?cDYjKKGZoB!B}B zBZ~Zu>=&=+{;`*y7(1z!MNqB{FHy>|Cj0FmH{>br0P^RKK^cXxn{S z1YqV$)+RmyGOVQbIJLAhM034(DAOTj8mvP&mbB#aIudClV ze>W-d>Jw+HXhy;{4T1EG9S5s{x5bJ-O2?BBu+BgFaY3D)HQ z0HUM4l5Ze+AAb*;?hqbPDTawnh!lfZ8d*VkAfu+R=u8-(r$$y4NV=W0<~>lFZ1~i@ z1Z;bSJ|!u7$<1bf;VFY`g-!%7WBupGZ#S2}UwsJPf2%Ed_;Z4oYLE@vSn!gN^0@Kv z$0k*0=Rh{Z^=%%FVr`_Pkm~4=0**9DE6P?U9F9#D+xP$YFsTy^tQI%~==YwlGv1Ey zTcsft%aMa$)@Yf@9k|=pD<_4zeE;^`JZ|o;*cx90&hL-5SIgLdSBsFur_1&hRBZj}V0GKH5@pWB9IVmY-vol(|n4OKxwqAJ1 zgQBQQ`BBHjL>7+4JgPb@HHo@;?bTRpoD~6+VO2<1nZ&mI*=ei6Unf;naDI+l=3`xE zsqw;Pp!%K^xEiINIQwU4&s#0{R(E{zx*M+GjGFlCx?xqLh`iG6ZndG8sLR)@N%o|b z>0Rn39Bn<7-BsBza^njOpn1#oL<<;fm+>g8f~S7UGq*vmD__C$S}*hg%Z~lN#PX~! z*&ZjHW0RxF-Gv(W51mX#km?vXq+VEfE9)}aI5+RzaNJ<$&;AaJgRbp`+X*vs8H<^Q z^uu46?#gE>y}dX#R|Nk?!ktn7%zA=VHSpHNuepyL!H2-}1V!Y3nMFw%Iz?r$u9|K3eOVCvMwGi)SHkflh@a{UOoSm#} zU;{l!HAk_H!GOhO(f%1B)S<6wJ$z9}otO%pK z$oaA!Bf!S$66?rp%jjKcPMZG?j&HOJdG}05`HNLbFBe!?jNG1fK!MFnbT#w)?oq{G zKnX}^L9DUBAt5{xNJNZf|K0~e);Y`M{vD4);9ID;#@fDARFIB~KSQJ>Ef(o7EVx$! z!sKxT=^Bgf^OMl%Q$(3Gq=1w(;)k}JO20lP!h{OkX#W?bR9hzWc$An(6# k0sQ~3^#5Cw_)$7%?4P|Y^;Y43A8$~8tMR5z4hs9f01P_Hl>h($ literal 154300 zcmc%RS5#9$-#>bgDhN@LA}wG;0V&d30*Zi26O<+$AxMWHorEGH(nORR2vVhkfYJlf z1rk7d@1X|>B?%!pKF|C9&pEf}>RkNR%$_}KU+%r;GvC=UhWgrUEW9i*%|Mi(^XrtfI(En%3zvBH@ zG&FQMv@~@8TH61N#pRm^*uS#Yx>G&A94yt+$a|??Kp&D+kowYr|)~N9cE7;RF;F zN!fP{4mf^>-R)R!`OD{nDqB-=o?(|{n`74 z3ivgl?f%*c8eT|XodlIPB&%apK6fJUXxT@0F4jYNu0{uRpVQMM$R-5KhAL;#F*}nZ z?R?cUA1a%hhnnxc^AqL^NWgh4oWDNAmrZ06vzP*{(n3Mef+m3bgkUa*Z2T))f%Bb~ z{rWbBygFA2YsoJMyQ&a*J>dG4%M~P3|A%yMc8#jIPl>t!iy$R2QnJncoyW5j33kp* zJ@iW8og)z4z+#H_M8Ml$VHJVWSY4;HdTygRH4YlzAgjpb7eH^Ga;vSIeKUx4sy{Wx z4t~Die|Q4~(c1RQvu-W9P_t*Fz2~zQm$<3~HoJNrf&;%v?pP*8_66Ew{32W{I*Scy zBm^XHJc@WLiMnZF(iNYG&x)lrA@|BU<#W581)q|(Cy?J@o!AZ45_x_HH^dsF@jiz@ zSw;83lN~R#2J5ByA6j_s&*Ns=zX6eJGv%sl(1Rfu>edfS`Kwwhh4fV^#o{G7(tkmQ zU#a_v=RcfP6$91+EGoU{Tq`hm*58u6LT9do$UE9KY4VjQaZX-yu-JO6m1Ih=H@ZPc zBAj&_Y@&n$M*bKfkq107%?A4bAAji-Hl39ckbGGKj}(`%58v&|PW6 z$&1aXza~>zEQ^>g8P7PJz!gPJC`$fGr#N~Mn-cmK^U_8)Wx(oW`KkjF`{a0)n!EsC zOP%lYyjLs^*GpY!BJFb@J!&)12-1QPMds$~gnH|*hfBV{8(Ld}&z#R3A0Nx8$$n4L%xz6D9G~Qk15^O6-I~Zxe*_UI-!@2eCAud+W#&i^3+l%N zmbIiq8`BbS%hq%I3-K1(Oi60fs=s17#w8&PP89BoQnnBV zdR6YWty`wt?#oQBIS61-sWUmofO?>Ccc~_3>(Lqq-(2aMd@Knn0lPZl1zTb1)~-j` zq+Mjtej5^hF|`iU;56*<;R%G^ zjqDR_L9W~N)|_ARdcBw`P@Vda40)oQgqT|6;D>Dr)rZn9J;?Iu>cw4Yh`EO1g+#>0ljIy*jDH`kSlkb zM)cCf|M@flyjV-Vtip6nz9xWu<>|RAQoQ4>!QPX^$kN}#Ps3FkLF7{}ANJY~$8Oa= ztIUQi5bMn+gQ=w<;)1jW4VxVK>y2ql8&aFk7RI7I_dR6anrlxYkS6FT4O;#%@6V1U z0EedC$v!BuMtzu;RinIed%w>du8GB2E+aTf8H~{FysF^+gZ$OmU=$12H($0ga2vv9|;3%O1_ySv_DZ|79|AW#9T%3V)!49d3`K3W8{>D5_>9 z3}0C2$2{B?;dlYcQ3c2PebiH$2e}>%hJE*7aPjoKHYIld5w>*rOpHgsLDaGSIHm;6 zxi9;%FIw{?ooBpGAhbf1wH^wg_pkbJ?R5Mu@XW34qXOnQ&gloukJaa68A|r0v^l+R z$-yrj%pW9Y> zgCbq;o*PU6bi8GBo~WG1yIyEv(uWB+-)pyl0T#bc zp?0B8TM0|&m6f8~!Cawex*G1mrt-Z*r^j;GdvvL%xAhPcDB5^rvvDYaW7MF{3q$j4 z#v(vmT#lT{a>3n&U9a6jH?8~l8(nKq*p>sE;&|@2?IrDdj^u;*V8w%Jb9yyuRgmd6 z0w{V?bn3I7l0=U8DY&{=hV3Gt0XZDkhJ!c9vuQ)lhcLZOxnJ|XbmbVBqWx59bL)n9 zmoi~IYl5_QHKZ0EGX%IXY6ohxiFgbuu3eFMTpd!Ag~=Nm*p5z5QYtp*s$_@z*0uSK&kfTY8u@5{6f4{^ zu@JmpILwYrT39k~{*3=s>oGs^+rzR&2wd3-CfXp?X5xpM?=_ce$jf|Q2s`qDc~TZX zFYNRfw`2kT%oPp1z*(2I1QKX|$^A8z|H=1v7ILtUjUpI=;8G*%cd)6)@|()(EXr7> zXd2?NPQ?z$!mH_e;wu1t*V88xeKi7lzV5l1Zo?-I70!=6-wO|nan|*@Bmd1|_DA~x z@h9x$jUW0Zu>;}(JZ{)xFDF`^SVSSB*$($6ZOXO{3MP_~%kzUc@_5vR^(Ecp_i{E= zw-L;Py+jeC@juQm`b!_FyGt>?)GOm@g;_)FHh&rJ2&rgiCQkfDsO#B1QX*8fFP&H? z(!45Q8-tdb;5*>F*C(ES{;&|gE({rT>S0Lb?k3)IeLzLrNV|sTnVefg2GW@^eXQ8! z&T`Jp>rO>g@2Di((!aIl#HHuxH&F!K)BvP)#kzy0T~*KtnC;~5oZHRE>ngFy3--5P z@*qy9L%#vDXg)WdwXYR;dOQr+NmRoio!51p5hS_+^E%qex^2(FLrJxofMbE1Hs=EB zc>ck;zk?`|a4DpH0(?P5xnv1%oti}slo-~K#E50|Ax}3*2JVfv+fy=17+(08&oRB< zWfif?D07(Or1=1lGJJGH#l{6K&M$XR%C$nap@@gdT6k$4eG5VpjUDoPqt++txF)Sm z$%Ri0o~|1>SbzO`*?rqvMqKqU4VmkXcveGSq_J>gCfB4Wc-|gY9T5 zUX_x%JB9XHnb$_z$Q)g3O(Q5;^lZa&j_D%1oSh{~r^-Dzy^_D05*j;0Tsy0TGuiKZI5Ntniz?9vm29V$X}#WT zoM`)%R3ETQWu+S}esf?cakEXUGm(pl@x#n$ox5IQPjMdG$w@`((d=wJ(}hNT-rQHM z{f$Z5;M1z~V~yX#08}gQV*Z%&!CPc~^-{DGgR8F-rkCYl4V(bD6YEp`VL>fZhk@_D z5AOuzOK;*QlH|gfXf$y@P)qMo(t<2}%8m*xiX^ee`(z)s~yV?8$U9) zp@IwgFCKPS+rY;gQm&28#Rhw9n)IRz`$?{k+0}JL5Cz*ABwo}r$e*bni9K){t=^FQ zZSv@g4*)0QiSG-MeLtK5BD3f}li9m2@hphHfA zfy8Ctk8$$O?;+P~Z4ZvR3Ava@q6zUz`vJThw^aKzJ0YK#UU)O+u?~r5qF97JWzYR= zcr;CCLxp!^uao!S(4e#VKr-gKlHjk*lZ&eZMb9n|w~Ql_bhX9q_|L(Z1Bv^6%6Y_* zur2b@0;@rVT5Ko$hVo{77mmwLUhj_Ew12ZUk_Q#ESa)H=z6@SD!sYG!qkP@c^=x)1LlU z4qr(auMXo?0taXh2aW6U9PiUyz4WMvd8k1tSa4f}3W+%QMp;FPSNGHi_UqhxKKjP< z{osgS>*u*rYkPE3cB;}VmAH;cHp6J3)1K&9aty(i!&`kDDPk7h#l#}AO~`xq zgkoc-C*J$+SYp3{9yQtjLR=Z>%Sz`mX7}(g^4mhJA!z5~cYU3Smku5n!Jgr35nKioOJrf(qU=1SP5M*;S$(1)rjC)Ndo}a1z}D(|QW%k}|nd#$RHiO0HHnJ@N~; z&&2oW{3nON`2`OA#8!13n?a~}ze zb$VS|8G8zQgh>51+~ex08uQ@+S>e^cvV!On%U>2GD=G{IKzZdHKy=Bf02*A)r9DEC z``4_Ar}Y7_3qNfmOWV?ww(Q--$!f8$mk@7vyVGN2rn^U?IEEMv+gOGn?HNED$tzD8 zV27vR-#^xChr^!dT|`So%e;(VOCMJ)1kUiag$pPH>;a>1XC$^#&+o0Eb2B0RCxFEb ze$~wZOlkjgXI=j=xQT*Rc~a?NibXd2OPPJeaxUlh6dOI~;%{P!t1oDV|0*`eM=-$* zForKlA?2Ah%T`y^U@L~UdxIW=bKi3l?y1G=Mn(CZtP6%($p#H8j6;(A<;t(f=gM(4 zC`{zX2Hb<6r*`y40wtx_fpO~SB&avc-DTxiH3jr{OlbjOjXCbq=w8}ahHk~Ll9zLm zTEd=9e5VyL3>A8PZQ6lyg{%Ls*RRZ`+!q{*p_!U}48h=Sy^Cu~(Q9<9)IHJ0)jC*= z+Dy&8#>X$z#lG&8&TC~{EkO%-V;B z{SPvsgv(}rIPue2HApc=vEn+w?cysQTE^SIlpws2y6SG8CB&pV$Mj~ z`n^~xghp{v?8^u8b+J*QE*-IR#+fRnn}!TKMUN@`S^M`tGk#=u%oz2%mCZ-)e|j?T8a=7K8s_S!uaQG%kOB^G5Y%PObP$ zHFCFNa4bBV6Ye~KyoH8Fk%mLrDmt5 za(5B;q4e5-+>LD4{{a_aB zB`0wIy5wa(tJw@>`i~v{S-IY}nxYWurUWn~K(y+K9-5-WN|%@Q2J@u8KAJm-Jgh_% z-t=*_w@44Ar{7S;7^t6T<74aPfQxs>k)usik3ZHBgB@Hyyt*;;6pvFEpBt3DSpgC+ z7umYiy{Cn+nu?QFt6dZzA*or=TjYT6o!h|7LUWj*b zr0k{3HIN{eo9g$cK_%0Mi9J_xdJFO^-m?w-Tny10H-al03iAN1|uDP_^$tK(BI zzsTPctY=n`G`;*6CA3bYO|(uRZS#cDGbSF6F~_yHhjU-1{A0Rj17*+nZ(i0rt66Yr zxOR;TtTYIn^4{5@GyfA8Yw^Z4?_E!DJREa!Xu}V6(0_hLqfA#cfESdmh zlU8syGVrD*$!(Q>8=Oxu{B}c^;5RRLNiO7Cf0heiY(m zJK4fF%TB^~m#^rsKTtpkjhC<*jwgHWsLvXG4!mEX4njq_P45PJ^;8KFU1JLm{JQ71 z33NW2VZ#G>9oN9*P5W16<$XU6)NRR1j z+)AJ-&m(rx))KksuI9k(!4^Z+vm&=kkGVF9mWC4GtYC0RXOO$Wm8UCED(wv=(AOv8 zd8f1=x_?A%sX*-GW;(t-p*_7G9F$BNk~B|c?gY=2k<{d=WpPIzQp$gIkfzX6@~jMRiIRjBPe4W8h(ih zZC1ajj<8wTM#UR2Igz08T%~FNBzMFyq&7kOEBIk*rEu`IYcA3!QRPl9vP=67+@*GN z#i@y&Er%f>n~*PyP+B}3&>2&k@p76a7tY@2bdI^Hk@?*teD6)Q_wl>ub%;bT0O&BY zeWwNQ5jNH}_i`rWO)2C2kFYwH)(p;h#s!bH!D;vUa+#;E8!~_$dKYc)M+7|o>0E8# zRD#5sA{0I~6dm&_@ckf65E+u9D+7gykiq}rJUIR9GsS;6ACI2_mztgLd zC6y-)Q~oF81->il^|he(yBw5oQ}wy`eWzfGEE7IZysCSwTr3(-(dSdSz{Q+!PcAhf z?D>COUR}*>edjD%?CkTMsXu1ADMdS7oF7_F%;%DkP}Qk-xLIPssgq9)`QN6*-d~%# ze>+OSHz>Gpg66f1cu)L{qqQnJDS5_DdN_Kpl+ub=AUpE~ZDx6UwUT_-k*U&7*gBHp zn#r!gXGRrbGGOZL>OfpIAtj>E8eyY}y7fL{&9c=@lp}_u+tC$KU%K&gTS@H!e(Ny`btnNxChOMl`^&xlDDi z-hgonP;O#C31Hz0btlk)s@Ok+=Vb7rc=Ima4Y)euqPSz;(A_B2;-k7))uosg1 zSiY9M-SKrXQ-_ix;8jhO>I{ZPn-MlOEV|G>JmzTpC9$O6`PK28W+Gb&Dv8lJz6n>7 z^`LwY&Zmamr+s)$DwQH$CWCo{Y?EE`W?E5FkQ_M>cFpqnB6Z*5rt#ZXjmA7*I*0+v zf7&(b8Rxd%FYxk3!P%$9BCV;tN31SSyegf@2Q#cIObM`@z7a(KayF@_Rb+_9Z@)R) zlw)M7+t0@0v&!(m^FxH_UMSb=m1IJO?;i`KkkyH*RrC{^j6yIuC%CA%U->s|l{%?T zpOBJ*c(Q3yuTk(m1@=Rv>ayFhw@Qf5IB+;C3L4|BKR|mUDxWS43(5W>$Fw$VOvZEn z=<;fP%G~SuZaA1I5h;OZcD@h|h}5#USD*;TEqu2vTW71~8MzXst)g^j`H^hcE~9?{ z=D`BM7OeN_f1iEXccda7s|@}T#eTx8X98^kPH0@HNedUnhr~yYC@C+McROuzheaxs zP22vis!L%c&IXjs6a=)zrUunmwYD(}4p;eGUA`o?oKAlc`3LZKTzv}AW?GXU${8hw zA~x4_)$)oA?hX0=55N!6W_H~OWveivG+J8dA1|taOUhp{WnUmHgx5=d?rCNoPUu%F zdlB9e@$UWC{bhBbf)2=3F~a~r$e(5XweCj0!seqzl1+JQNa1aTzfWG-xGInGeu_E$ zks1wxk_2f8L9%19T6Jjq-mIwc`4MO#+ahzVTwlX=3)=9);-{@VNXmv!I=s_}74~?n zIq4uzjfm$TW>Gssww54z)MEGCZ7+U{B6XLEnzWiGe8L_jQ+ZB_#v0_~qRyDxVG!Mx zaz=AA7gAqf&KSbWGfedP$Mr|rZInBvG_%`OW*$cHQ@bTx`Dyk5*H3JSf}rG`h_%qy z!ko4D1*5$lAWP3pBf7uRq;G>HyZvD_SwfNX&wjxlDqcGKhD?GKd_C{CA1V^Vg0pFc zEu7;|iHXnmQVJ)9EaYcSfqAsWZ>M;(Gyyix%IW?vgUC1OHu53pbm>-$gWQxNHFfeS zqfjK~EoQnw2x_NFmvW+iXS!_AnFNl}6P)2v)?2pdgmH%UvTdII( zQL2QG{jW2X?<|?a2fcpWi@UQ|eE}x5X0My_#)c}7wj(d%Rt^#p+Q?75>OPBT$Z6L! z4q(j95^uHhw@$oB9tnv!`-mA#G?jc;ZC^Ztrh^&CnP<~v{wD5gcLdohH@&=%;1&g~ zFbTrf4I_dHDLNE2nqZR*HSaFTz-Hhf_ajfrtuTx;cxO9F`s3eM0R5j_6q39FjX`QW ztp4Yp246b72zI|;ro|K)uzpLuR#dP&p!a#pUCUONDLN-0TjigUROsYZ@`&cw)pz?B?CQ3Qx+xZMep$t~RSYUtW5Ywn3lfFshN$ zVere`S$>BGYcq3?80Dz-y!@3rBqr=1zuUlcE7(#RE{}Atw`;++r4Ed9N&;0QEAV1{6h4r#&XXj_*3mI%ao9SKKt-tX8G}$bwZ}cl? z^3xzrVHyQ(PwpV2Hm8ycv5h;i<>^v$9FTU3+PYdJC3&lxoe^Dn4}rHqq^mGHv>SGn z5#<*C$$RJNS)?d(sOMWioi#NT+<)S$0g#T_@k8@`R0>58MY|$C&){Bf^=GDN;i~PUj zJ!B})|GYimzEi!kCCIo^TqxUgC6gD_*x@1wu0?JKCWW$db%zGs?>rEm-$eU3pa9b; zpXDBL*cd@xU-;~8_tG-2DL6*I)=X9z2?oG}@CxtjD}ft*C9PAxl~LzWl*^OK_MViD z?zsTSCv=!!{`sQ2NAw?3(EQMoKb(N_E znv7r1+3GF#+wYgh+WaES;!tlB!}^Bd0o}4S{c{U_2E&2NmjbyVgX6!r!cuWorny-v2eeBny8?YBmlyLW0u;uTQA3o|_Tac>zT z(fa+!)!u+#od=8q;$}_P_5xlrX=sy<=21znv-#Z?L1mwY;ymiC#^Y53-WTdFE$+^A!WeQUd;prifWS1qCplCb{yVES3*=gHpU@nlk*^nljf*epfT zDn1o31%_EK_S5uOqJl$C+9Pt!IsC$WdlzcVQgYFYcxapRtA74_DyXXs7!rwkZ*H%x zI9I^iXuZT-W@7nGu}$xRqzuL7Gu`FC7a^OCg{|GV2OcZgq4&aAez9SO(B{O>9 z2Ez!h{q?4xuMOvd`SqC^D?Jv5=*O9W7g&Gd3|MJMN{dAW5c_!R?kTO7?{eJEGjKRo z)y#MO(wh<;K?kssXSY{U{Egr=`I!68@KmTAW zLb#GAsK^5)?83RHzUP8UB30>T&x+q)P&k6sPA2l&=%GQA3h(jH&aeFD>LX-;|N zsY+8_&{xD1*0(8f;N0fp?n9(dbUJtH7}!M`Y$)M|t9kf2iUFWiZvKy~Cx*DjhTJ-) zb!8aqS7F-tjQ>1EJdOJ+s!nDJ@_fPFQ%)SSnXJp|c70i^lk%~nZZxt7>2mJY)$B^J zIS{cwMZFgO=x}_ytXjtp7urePQS9 z)M{oChwS^=gAml0ClL>gt|=kvQi>{Z%#0ZJ9PUnl@`*xK2Rz^{LdxR>=$t`Gf!Bi#7JiA8{<(#WC=pgUyFMu*XUv?4@Y zi4~lsNg4gYU1GV!BqkNWgdxz)eMx2jT_t%HlFhc`w!D*+WIDiva*lb3Ej1MImkgt7 z#iXuv)Uk#)C;fi+5Z|za8!1<5;kE5MXjdZdRy17^VEEOS=*jh>#?R)i#cSIw8Y|j)@)CTW z4FrFl9JS?B(xRN$X|N*LE)lmtRqKq0TFEeMl$TXwCK{E*=GBf|IoTN|IxFeVzns31Vkk%|ra8jQ|Dk)HTXYnEY`$vs z)! z@nw587z1kWi^s7`a&r6srTB8-`#agty(i01!Inl_ABxiHsbz9Md)r6+3xy?CiehS= zLc}Ix{3~5(%?H$NFd9^ipBMTSdRYyZB%%$B<4o;mczGg}aWl<12kKj?9)(`Ks0eNM zehUx*7Z{^{eyMkLYO6F+N_}I{a*>7h4;D|p+l}!* zO=s{&_oR)>5MbonJCobrjNwl=#6^{W4bmX3Q-)>=l5Dy-c7X~BZ1)J{n@8SURac79 z+(EpXr|-^WZH^4324CO_jCA|=g6HI61IBfAo6dobE}2C}VaJQF<7?YSFRK`ddYVYEmT;-uOx$$ z#UtX>eEYTHxzAVv(DETL8kJMcTD3N1b0%fe01c;SJXZG~Al0f-BqrC56^z9P0JXme z{k3M0f497)AYupt0tFaw4LA8gLlNa!!b@I2-N{QBEVHOZLhalmP-)cRUVYBVI#%O!n>;^HK8 zfJxm`pyb(g(Z1-(llZL2S9PUj+qZFRPE_O#f5DbD@!nri?E=8hQ)cvqldR;^N}v5q zOTD&y1AViu(QS_FoZ;I`;dD|963gKkC_rIt(*6uWT5QkGiMiNhChS$-tGo+J;D&Km zO0>;!N8p|I0q~UnI0#(NG2Xmc+Gg`b@xt7iz+`%Tw$3P-&-!C}Y!F(L^9K-B6TYXz z2&9Z0G*UQ#@W!7ryUe%fUpy@{nks{T)2klAEGs1|Tw61sM&FFCDY(17ld`EUGZk!p zSEQ!6Cl`b-4al8}`+dSiDqVq`2GaDbsZs%^T;5KM>m#plQn1C!Z53`Rb3W`4%oL0{ z>HuL+ZwdV+^dOg&-UJhVYs5`J|_MsHVR^XEXiUy*!29Da&u&!MWji1HZnVd# zBz`~B_)RRFqH+HG<G*S{MEE z^oONq6C0;J~aDrY_-7Rsgyjx`QF?$YYBro$+kI z$6)FZ+c$S%gG(RekF8ID8G~lVdKa9tC82E-)Qdt1r~y*-hwjW#$Y ziB$Rifx`0-7;t$%R;84vub9G*#XToJ6<~X6$ z+wJE#YBqCGR?YAd!LnvYxVkr8+Nm+sd^-^@me6s($2ex*=&{?aSWp}J^zy?$9TWVY zE6y-m=A3`mk4?2LdPzhXri$%faFfaml#s`duqK9^1CE!c10H=z8xV@lkM zbvQMc2R~~r2W}nH>AqJ`4+m7KL1|1b{EMxt#YP7P{a40SA?w&LHR{eaAkb2$lps*m zby|rMD1n6h5LbzI&PkZBs!GxofLBbthMk{gDcDRVwje&G^fq|mP|=GvMFE(_@4?i& zm-2K8A`!x~tqV3QFM&J!`sbzqeV;RmE}x30n^NB6!P%R(FS0(b)V?39$Hz~%%-CEF zL_Y^(x0GM+i%Uovw?aJT$%qxNTIC+>3Yf|Mo?Jgw71eVUCJsBJ6dfnSbx$c@&?q|d z8(m4>G$Je6b%vq6@4k#%`+NFH`V!~0{nNP<+YX<+XRb2rgJtA_si`=c#Xmv8qamS0 ztYh4i4~(VOh1$gZAPCGZdg*M{$*0AhAn~!!+E1`BT$Ji?R-yfs#O?&Cl9MKLe~_f^ zT$)$oevKvy;P*AMYJPlgr1>@&>m@QIj zV%g~?LgaW@Kg#i4D)2bVz~|F$+iz|L^R4zOwBXM>TG_jhu=@5`#(&!cxJ8eNYioHK zTIUPhnF~3jSLd1^{zK;QnCzR!<1NKYE$v<*ZOk(|Xt#=T`oWmUeTctQHWYtD;9sok zp7yEXO1LkeHLr+G%j>;ZUf)tanCn)a6x>ic{f?_3;}@8QiuR^UH$i1O>vkpCk5f64bY^UH8qozL&|8C zQNoLvWT3%S#nX97&K=~uea`<0t}m>q)!EA;=)#IvLDS-W9gYH9?AxEEO@duSbQl(g z^%GQ6tNfXIDa+bp#EHENu-dlME%V8og4eJ^W3Lpy47s&{Gx8A5TA&S)xi*NS=h}~Z z>p)?`DDNU3L$5w1&HolpsK}dIj~bp9TvBgV-Gz4zPF<_=1od@V4YGQnNt>`fx>Q zA>Qs7osxhe?DQ>rGmGHgl~bQ`#2%Xfv+;8Q>KQ_j=l`@E^HGZYKY{iC)p8^^Kli|} zc#^?ld+B&l{{ER>FRzJ`$Bbc4vK$9Q26I~`-9ZHn27{G&}(@vZTa zI^CxzKCV0bxIeL8Y3s2rKfRe*dJt^u6F24mwo(>fszo0}qE-M4ZWW zd;FQfj@WXgc%UzC!tow1D*5K7Joa%4$?Tc0S5CFJrupo-4!X12x{_cbt zYe=oU1V!u7<^Fn)w(N>X3@54-OYV|dID^(UYwsTvBv7vvrCMMxlLHf#=b8u(^fp`} zi!DI%+Hvsd11jdc>W0b5LY0E=76UQR?%Vcy6Das6AKOufEXU`gWi0vmN0b3+5Dp!5 zUD@_r+e@xlbNd00^+&TeY#F?MD%La+|AsOMZe3O2qMTeyN7%?UJz2$=FUHvYbOt?> zk|>=|IX7mi+&cIOZt;XCi!F+CIXixPs_~rUZcy8(5`YrgD7EaY|2oi>ZFXJ`$h*61)+i>II58FT)b6PsLGyA~T zY%u9L3HJO7k6z0Nd07W`c1vRNm4V=qX;tV08|OquB^v$b(o8!m;B(TztLhVxsUH=?fL_hTzBasp=%-T2Ik4c8O+eNQ2E0Z$V9q=`md#* zq3OlkdcL)kLxp#tkgIVy=+b31j?p(0sK|1x#sa@*7h&!PoOF96jgfES#*9uKPZZ>h8ya{Z{If#orOH#6PL zn0vH^Sn-qjWExw;dn4}uqvmf~to#nK4Vp|XhzaPOkxaX;d*kdm(K z2f>FO1OMQh{X;q>Sg7%KZ@2oxU4CJ8zAVKXy5}*yy{LG0dOyYC=ldf;t>@iPJN5W= z)~7M7ur#R7xop!r?sO7At!6G>DsjD7?zFE`OEBp8Q@Ss3Ja#bTKbdudZY}SG z9H=Y$ktTMpHYkEYkZk9Y?w+fSBSjtrxwzO^)iuknE=o{~^Hj>#4~(fTzJl{8hf8tO z{({Gp&bpe}$(!gv*~mz8n+V6JxCs^y^fm}kWCxVl z9Rqz<{OE_8<6YX;A^;w5tjLXBSQxcDm|hn|)Rrqux7vp3!#<)p;3-S_d&f}57598h z*ECVvPvYI2+#MMfVyjza2veEDUUzi@JTfJ*-rej>o8G&~mGm#Jj#rq>p`5B8I*y9H zMSXR_adKGN@40uLzpME?EVX}JSn?Kn>(E!vv~O&VrBB#DT=fR%K3B>1MEc&yvGnhv zFD$N98#;;d|El@t)B}%$wlu{su{4#Zyd{Ijla5s1R`T!@cL06gez=wb6NqrC{EuQpaYGzm!9uNJiM(>#?C^G_mw})6>^W4Qy}T zR{kADmdbU1eGsVCex@rIiV0TLLPnDW#jXN$BwlZ*O_yY@(A|}gaho^+^!~om%e3uyYz$;19n^i-(+AjE&tTW)zu3s-dZOG zNMER+xbm|Qg|{4XlLiUMr0Mh2jf=avUu_>Xaksog%rU;d{zc;3QQoI<(zUi#Ta_3l z#~_brOc2aA5tXMGt>r8k1n_|ITOWQA+2w=RIeWMEd764Ox|MjFb(hloKKKh#3;t08 z=BXHkuTystp}kLh&Q1NB7@q~7*kJN#^o1(rAgb7BZ7lq|Cy#n#>BYC>)}OJKoIazr zWbCT4hBcZbm~r?p16$UiO-mbmobg2C>A)tdp@S;|ZoAtUeOg8YIU}_Rued#i{C%LE zMn2?;yNWM2NO|R5xO%9dBk)Z-Y&-CO1MBlM)XhQa)tBi`Zu&5HF3=daKDhTV%xmmj zGg+~P(9U!f5ynZMYH{XKyybIVcUQCrS0@UZe%m?mg>&O24*!b5HOyRw`Vo9(8%yq$ zhs_775S+go(>)lJIhN5WC1-1Y>pT1!CJ0QRh)^A4)EcMSr{r#f8Uq+DaWb~^esS*( z8sDB7SA$=RdEp&@x+<}%IA@{42~olXU5yJ+;wC_42rXq4fj7M#tPW z@~dcI+3(N4Fuy@jg9R5bps{bo2JJ_%2+Sg zG2`eQv*BG?WWQmm#~g92%CqOuf4IC)Q@h>Wo3I(-GQP+9m{p^YeMnBu%A`}_t{j^$ z4iWWF%ZD!JsJNXV5aR=~g(sD1?`vAte^^HdQb&CZD2Fp)yzuTf)_-Lev7);&lhTaGR(Hu2Cub>KA|K!{s4y3gmP~#vjPS;aZeGfv|uTQPK{6kR!PE1HeyZk#m0(y0ojeBJS{FcAW zyp?{y8&{OPu=N=_3_vD75^jUzhD7Ek_vLzXl%CtZT2Ms`q}61 zioz>wBp5h8nSG_``pMq!=#Jlc480})AEmtcE}z{0`4_^7=b{Tw%>Z)?cql>4l44{|)6Z?w~o{JA;Z<`ZZ|{gcHN(xZ**r`n%A z4417p9IOP#D`-e5{Zd=~WW#rFKIfjvOiol1ynrN_AgV{>79BXLKY$_h* zjOVt;c1ZkROuc6~n{WKMZPct$RjU*&MN4h9SFN^YQG08}-g_iQQMF5Lq7<#Is4Yfq zN$gd7#oi<#vJk{3<1 z!a6rz1V()Fm@x_X?XPP`7sibZ;4J11H*vY07 z#NV7;$hx~7S68K5<{SM!AKl7i2b{cpT(Z{>Aq^u|7z^B0GU-CoO74S|5)rvVaxS(`QhWYkb)ugwUq0KET*Ri$p_>E(W zGcz}kdTN>kB#eUccuT0GDI?55qdU7tJx`YXfa1|uf!M?EZ@=_a1e3@mV-aE$6qFx~ zo({#b7~6B85B(4~1-s@hQ=ebE$}|hy_36>B7lAFWgAqN8yOgh1e9Oz_|1s(P+N0oO zv3$w+B0i`YH(vZy^tv@q+b&^w1vB|)X9$E#;o@iOG(M@cfA6CGWOusyMu%b;{d~bH~x=%xe{;EeP!Ib=fdW zD_VZv2VRJ3%+W+*VY9d^EYl)9PrP4rQyw*?I`FbNAeE4$-f(*X4caD4v+A$R=H<9S zEK>hIS@U5ewZe(^CWZulN1($tV{2RhzegQ72nl60m@0ANR?l0m<=P8FgWc~m-NZ+) zLTm355IPNVv%1lpBxe>|x30$jiOM-Df#irP;=FK)v~}rhsI85)oX1tP70YVw#>vbE zN%U-858;a4RKc(dRWGg(IoAL5^*53%FHIMAzAoclWg|CHs_N01u@(I6h3IO&=G}ck zqe`l7`0Qpov!2@BAEp)d+s?abs)7jK<1y~jKBnd>m4Dpq=VY-kl*qq_u(Ghua73Q= z5dXo@Bd&smz&>MOOj$q#;o6Q#)u8` z6ZA1Uj52`(m5a+i$Nu^eS?qaB+uH)hG{O#jZl%>pW;}|Xy0_T-FJ=oY5j+@}<`TmHiw`S^fhVd$k!%Be-~-vyow zjm>*4#ZDfKO(hROBFXGI%ReH7cFrI>PE#wSs>*=IfaH{xE z@e5Ny8nQqN!mz-y9p?bO^GYD*ogKcll_MA?1N{<3@+Rm7t9{W(fVZOJRJ{jJrLR2E z_@ z`fqm9f2_UcG;}uy%=T%-sz>o%2jrmSf^wdZ%@)(i8sg`=?YagB-+8k|@A#ASk1H5* zp@6L7G48e10H0nHf4;utr8m#>I45{9gDLr9)2J=?bIpt35BwMrYYYrs9^l~Fq;Yf% zhBIov4>aXDT5MN@vqZZOHYpX@W5-v|n3;|$dGUm|yqzphO93+c%X?3Y5lVA@f7211 zQtBiDQu;-m2PgojxdhpJ*Ijt`$V2Rnx(Sv+WMBI|U!(P^Our&Tk9h(EUENtVnLm>j zq;j@lM5j0RD$KQ0#<1D2!34RJQ;y=m&nM(5hh;9I4XAD?S^OX4r%9lV5J-Pyy)yda zeW>3z7HTx52>znm0^dfc|5T~Th2dhfsmenq6sEg+nzs?C5x;d|p{xg)9DRN3tZPIL zMSWCwAU8H*TPO(@E9oQYXU_){@3EL`$l=pzk2`2qVs>41^^G4V!m(trn(WWS=VA!3?`Ei_b)TM-%eQy~J5+F z#CFW#f$D%|%Q-aRADlL^TZrGE1r)VxaLj8dbmA|0|4nBaN_a(XCA%BwJu^lNjPZ44 zw-*p#Q+oFwjn3~AE{X;1Wa3>phHstqI8<2Vyt=n3kmcpqUe2>EDrO%baadAx2-$#j z%B*$K(BoP^Q|WN0Bey?@)bo11s}Jq}1KJ-otj^nW;}dWIRPX1XH+Q-ANx5s`)A&l^ zHAEdXpnhnB44{J}0$eq4lDFH}SJxuo=SXlYWYZ>%E^#*p`?%1ZSa{;g(f1o}qqf+Y z{COUs{&6Gf_K%MU@iQhu#C%_q!2P-W(D}c1$(8`bD?vvA5yH*ZWe(nlf zU~9|vh*9~f+2;9_J_t;RToI%cu?d#ofZ8pzVIzDyOoXE8GFy40$Za)i4s{)%SWE=*UfKBx#0+{km`;w6vQgr*;x`s(5o(O6@k zva&S`bQphH12!Q_Qp543b)+!E^Gpb+9c;&4>^0*@6&BlVj(TJqqz}%<-9Fr0`YK?f zpz+3dd$=lx=|5ec3(&eOL9?(g;>$e=YinE(OOz>0H^S%;gzRAnQk-E@zc2l7L%cVs z-lO1D?bH4Uy>Djl<-t($)&b&@sTx9nmQ;?+dqmM)EBKs2m>$InVntA!K0CRPe>90py{sv4i~^4~uU3@c-&E)AHs_RR>E5+v1br zTSnht?;Q87{|a2#F1u9bQ!tfd$oEvKAn_Pk+GsQ6Z?hI3DR8-4ry>5Qp9em}8SdV3 zq#_1W>`qh=rHYL^VwqBRUHUJ+-E%-3lUXG-)d;HS2^FzaU|fDBO5C4+pZ33 zL7Dl!HTeJBc5ZHbGLyFD4Bd=1k<`fgyKqXj7rXDCFHxG>K2W$xpfMqL4zrB4*4;47 zgZRXy0W5ppO5Z11wbp$LJ*G>*{;%0S_V^?>VW%TB6FK!|+>?f&K#%s6EJ(3o`0JW;reC(+$uTyWIu43%h zojo}2MyB_oV12_;;Y{~~h2+yu*3LFE2c!x1H~dXt=j2%&N=FYyhjvr~4q`8O?j39o zP}bit@w#M#tNI|Oh66M+)i;IbJ=@XiYqZ^gOv_%oGhbFy+X0VrJa_WN)YMTeDof6d zK8$mFl*maC(RT8j2#SF&T((mk^KV&tT`UF(=`4pSw)*A;tU8aYi`5eUafTTf+?Hjg z&1Uz(lwPNE(Len!9>0m?l(R4IZ0w@e(pE$q5Z^_zD}8g%h)wF6p7d3y!E+JxT)jb* zVCfZ+A9Nf=kEve9Ot;w}Fv0*`#$*+#Y+hHaagIr4)AdQ-aX<0I9TwO|$v^xC=B#7h zRJP_8(+7$d!bT9U3Dc60=uD1l>eI7q-j5O@50e9`MrL;Oq8dj`r{Mo7?e?qUrYEC4 z|0(U)r`QU75ZTlJRob6qcZo>_&vLwU!&85-b!*pltRpVPiOgPmzU~&x-KIYJto;#p z-(}I^)p6#x*2Y%pr;DYqm}m+z>&V1jacjf+#A23F%vy|(*_GC#c5>NfFlNo?`^{Fe zFdC7HC4Qwpy(c=bZN=n++7AC5hkjRT5B7b?!k2j-HP+{Up+bX#sbsVL7nTV{729^4 zRD6l_bBgU1FfOp({)2|m5-PMVh09y&4v`PGmi}K%KbL=<$nP*?a8dBftQf%ya*1nD zDClGdm0p;?l2Km@7t^0QUK)vF!c5cVV`aM@y%_nv^P4TwvUB@#zJAG(JLk!UvW2VQe&|nAa5cs*zM&V9*+lDFs)?YvN!$DGcrclfVBw{hMizhiyi?O5}p!5Lg zUTiN={w$4|*j!7>Rid?eW>P|xHRtr?v6r@bEy{Q#Z#2M>ojQ`K=(u$aHsFHcwE*4B zaExjItE>sV{}pJBR!yJwuDJVX0~-XkhvtQFz^rY}I&{=9T1N3}&ge?fe0sbz;65~0 zK494u!Isj)M1KP~?8$^YUgkFm<8&Hr5qS!lb)T*O57cwGa{EFuFB>p(c=6FeQ*Hv_ zv#xD|;Tx5YaOO>!R%Q`i>UqitRJPPstME1(Tvn7ukIk z$8)u{TKD>iXz}C+yTqS0`8-dKLt=SuNBtN(1aIuhdb1D=E1U3;6EEOQPwrh;c@BKT z#JNTDlqh$_wxFg9qKxWliBG%w7#aJ2^!Bk)paQx+j{vY)fB!b5qdpn@)lZ=x;Vl0C z_1FD^74bAtbO`B0>^{xY7p-CNwr3Kb?!K+}`sJX2->1o6eAEP@WEQ@lA#=kDrRbFT z|K(-<5|je$D&cNeWQpKX&j&f-njJ{cKGHdgE_Xrn}iZMLVQmJQMRzh zmy-wgtv>Ae+_`tldE|1+W_qR3(}OBC?4oH$y}@*o0gHTNmOj(jU@UYIFVciNIf8fJ;oBSCds%}_A zBisNjbsBl+E-@A7Lr-t<{N0N6hhSOB`8Uf_`EYQoV#ndo|KNF9A-DSI=r#_R$am(( zz&p#BX9KrV`#~4=Nr$@r>=|#^OvKxv`~Q^EGY2}h6DRt@_DW^Jl?f*oQ)(LBtyh*f zkC>VBB|g5}nv8JfOka(8=n-;7N26=gN3i(eIhG)NJB7%n0A)a~k>(7UgG#D%2E!D8 zw~&MQ{6T1(+y9~S{L9%9Imb7*!+UZdMk5?el&nxB0X8O#Z22fM4pLP*Px(l=9?pXpO8WW{YTHZ#%Hw$p^q>)_@7SmDjx{pccOaCS2zH~YFYB1VX zHTSlA!qE;loXk5s4h84@A3i@dA0P5^8%6MiivSh$p$=dJ()!axP*XGGQ0Su7XLy+m zX3}kY(=9S9bi+F*U%(soW2#BhOHU8i{ zwp#SjYTZs zbu9;1h+OsNdE%l7)LEf}QaYaFI+$z7)O zBX&SYi1cyV5U~kRqe@wr=B<)-5KwEHWS~pmr~e|YZ(Lk{;V?EOlh%2L1)9mY4AZ?u z^@+q+Ed*1I-hbC=@%3anPK%{X7DwY*itYK5=O5)1ilVB*B<7zvztaLj0m36ylI#TY zXS;+p-mQ>vJZ~rn#BF!gZ3;tI`t8m3vf8G}v>oERTE#>=XEO|Sfk6n&JX|d{H*z0A z5cQZs5e5N-?#?Tbbfa@R`%wl}kXjc2eBAio9Qh8-Y;V-SO5(G0F;L>I(jFNGr;R-= z20%Z(OMaMQ!z`l~4uj60ntYTBsNpI8;OQ;A>M3Ew{L+s3#GlA2Lu2@6;H=|2eh$M|&nbQW`M-o*?UKX7$= zn6Ug)E8`8WHw?uTze<+>iuUs_zjlX(*dz6`n_AvNnXyf{9J*=uZad)H!+mx25*ilo z(o+!YuVXLT5bO(|7eSF4=xfC?+3})qO-T z+*?*#c(neZJF4_q-t!hdS2l#fev~yi>jlfj>T&??7SE?OW*(e2?G!AkhrhkE+Ct^G zJCkoked8~e-n()XELv2&`bP@mt1RJyj4q1iG$7l_$r0pVXHCx`Z4H3+5M?Uv+!=fH zGDJtm#o?jBjj>kmyp@lska9A}xrG;mc2i1J+z_wX&5pZbh3NmU%dRU#g>wuiUdg=c z{D`Zu}Tq)B2ZjHE7vwQq&!Dd2@-jVt~l#RwF z_~FQ;O{c~-A%Tn-phvakyAb}C&R2Q_dj|z$d*403Aq(|ouO?RKL2rTWM>OB=Ut1Ny z*H1)6suTCYK10%Ft{>^&0j4a{lWM86n0P;I6%tU(z{%UpuzotdUEC z{A5@v5G*Gn(7e7tfGx*8fQplOPM$uLw3}O{+nAHZX0EO@!j+PYA9WQ@s!^RQx*I=w zQBE-(Z6|BBCys=_oqNXUsD-Ex6TY{yt5W2Xc3K$vhM`#z-i=hl(Cj6`(2)nEVtR{F1U}*fel56Y@0qQ|zop8reJbfZX@A zzs>*Pi!(5MOExo$@wUmG>EsWu$X)lyNNwhJMgC-r=Ea!Y`}gGEWwM=>{2i5KHlZ^9 zvZFW;_Xe6IzkW5DQtlLJ9FL=KnR@35g%j|W&VN|!v}v>?ye|J72#cmEr~;Ti8tZzd zVEo?j%>eRfM0nTW27y1Q0b#ckvHRAbOD6pKtTnzKbh=-IljGO}T_W%UHISKWA&D|1 z)9nX=*m(}T78rEB4-TBdfIoCzBAan|R4fF4iNtR}@Yf-U1p%Q55S|u~Ehf8yV2}{> zr6m6H)|kk}Ux7i`ssa4PzJ~-p7hn2re-vLl0HL4B#P3i;rA%PsX$2evw~9dII@`iI zfiyk-^nQX;Q22fT6{$?UMrNOz8RzFIY#2fP$im=8I{(G)@@HS6gn+SKt+NpC1or%c zE|SV4(Yt49yO(64KR)c>H@YdUai8B$VZN49^xa4}5Iszmn%tDa9qs-}6ji_+pj5hg zVnKhEy}unlUEy_JQ~W)ftonx*KgrI1Y3tswZ;d^7J;Or&&0G8EQjr^7s!D4 zHy~t0DN$RQ15Nw)-p}Qwj=ai;u;%%h&D%UUw^JRUoM|w*w766^Xv`$)U9m{x{=k)+ zYYpqJ&kPKe(}e7{Kloqt)^pU!K!S=%FwK)>IIXzW0m6fMrV=-ybjsa z2HbbZ9}^D^y(aDFPPw7lHw}T{k7ZR>@3dH5Nw2gat@cgf!7~fTH+Zm5=b9nrZ=?Ez za%ENYOhJaqQ3%R=-ImrwM`oQcCt3bNwOxd5(t`$L{uj?dwnBw&;sLS+>|g`_5#gv~ zJa|IW)8q?~Q%P(6wxiu#KcAbZsO&2Zk;>~c!*%{p!*cYZ{go1+W zhf6QJ-AAL7&yoHEDN^=UlDtJEYihc}bM5|f>;?GJn!u^8vVMwD20|klKq}dH?c&a{ zRbGBkfQ~#@!cJjbgU5lSm=Bm;y8X5DZ(7d7{AsrH9JnGuB1OR?RQm-YL(W2k1dpRA_r^X!z^2>L?zy6k8zi>2gsE zjJ#lx-QrV?sLil+S@`I=vY3^0Mch87Wjs%9&LXRN;;rshK$;Qkag|;1%-bOYyWSVh z9Inlip|8G5`Ut1j&eNmkK8K6&TGkSC`CHm;91!&{o}T{g8hn@D4kae@FqnQ^{`iU$ z<1-!?dqchcw^z42x9`^i_zo-n>ICWq_YVif-%d%5oJL48K#lhjqi=+k(Lsu@(>^H} zGnk3G@}yuJn?JpRv7gJfql2IKa{7DbNNZu$~QPYH(_EiyVRP}0=si*abs6xKfhwNTCB79=06XAet8?%RYq_PsaLAMK!R zhI=oJ16}~we6G9~L<)4}ua!q4$QIMUEOVVn9%e{4GIz+i=EyfN`p&Z6NvU`5(~&Jn zpli!|X|hlg2z|F*Q^xeD4y~|eb_HeqsnkAjwb!S~-7M5Mkatc!c-cJAGy>-x3usKS ze`L9Stp8=DarYtHekq$!E^ERALtO)VezQ>N(pdLF9Xo;=b) z=SNX(wG4{7K{&)L2NN*~CwbL`Yek>w%bUgP>;x~m-YpWvkXwoK1!MhQX_hBfj1>Nj z%Ao#KkK~n)Vme{WG*rF)(R!bnN+_38e5np26NpJBAeQP|V72o%2Sy%Lvou2i(Dk0D zi>X!P-$YCL zpmGheeDg$JQNS_dY2NVpdAUBt7pt236`sXV&rS3w*T0ZDy6wO3V?L_K3oj;w%G9KE z+jl;h{2;N(Re8>!c17%rJE^p)A4*RCa14*AthBQ$;%?{`9gd5mcsUdsP-#SAoOjy% zam~YvUjRkK=ghE^l<>h!A}TU8in&PTS%KFEHv~U3ahT1jmo6IdW!SjN!j{?y6xhoY z{GNdPD+t=R6a)0U!JYIPoEf7L^;sMO)o2EFz`FLjMZ0O9&xHL+|LM;yz0lt`xf{8U zvSdB~F?m=i1XrzSp<;SKq&K3=j^1$k{g7$?m9q_!ux+!dGV@@dR{yz}38Gno@%9Vc zeYtY(v$!4nw#JN9S1t%w>?jAp?#U~Dea+cNH<*QOP2RtgnS7b;g(6+W;M!~!Co`;O8~5Osf*Rpj z->k93W@KG_^#IoDxAjHZ$+V03uB~of1tJgT7$ZXIh!D)fr#k--)2OKb~d3LN1v}zSB*9?>rFKSoga4dMn&B zXI=lU{ zJojvEen$Hfb1TlkXo(-OUdmd;rGs5Nvd(gS0?==)8y^Zh|N1BlvXL=RaP?!$t4uK{ z=yrQ`fVDqt1PR^EeKaTe2sM#!Z6eru-7ykv-0p_%dVk!7thKniep-39jNFiP4F+ys z5TO)GgWAJA4)OrX)^k|D$ql-5h*9ZK>m&buK9GMH_wKzP3eKV67M2E zH&hW!%vW}yF3A{~#_Ag=KC41KB%y`T(r|I=Z<6&WO7eeNx)mDbIW0X-)*bCcM;$Pd z7E5652iA^4$uQ3X3CI{))XII4J3Hu&AT%|Lk@eoaI672%W8h2AP}3R`msT_JYBoKqq(Nk>A9IO))U zCdBOpgx)%@T}o1X0oon0?g6KGSRZxqCVC8n@+!DzLP5?vyylfGlHJBf*T2pBfwv3k zbP^(g`Ube!!1y1`akyZ9#ylbuBwOBJ9_ct~lEFcbtZrr>T3dJ?6CcHU1j;e5P$$!3 zsE-wpGIJrL7+|u>$x|q@4QVUEQ4V^G8Sz!esA6tvFb1LRVQ6NYzc&-EC0<47bKdDV zT<8(=nKo@9I8lxzPa31=D+lIH>T0j3cn7^(Aw6=)l%2GrLC-kO(CD2ZxJ)~q-|{7{ zp9_8R>sLoK14d`fw>w}_sgj&8v!aET_Y?rXVuig+jqO>X-xh(;~odn zWNSjG?8!LCgV#otzK>h43$vjgx&^;>pRe|6gg(u+G|S`+KZ{X0&nxaTOlqM6rWJ-C zo=>(ui9a#4^yz6E9!q@)@!oJO=u1r|Uu~v5e4ubPnOvu)7)5V0*gX6VBf>h$v380Q zD`?FNSTM-Y00(zkWksU!!?9$E6|U)U%6c} zUqPK-`YsNPe*^nuTrJ8l>tA_SMLf1S*zXL!v1K2Ug`c`Ha$3rXT)z03z0|(BKKEVn zo@;waoKUuVQ3AJ}W_;R9JJvKoa)MhX$(dLN@cEQhZz>&Sx;ZoDtgavVq|3#^AmVf` z*U&@Xlx)$kqmY+XK=Ld;#G5SV>u%1*5^K)tJA7r?p(oTujea;feNVBrC>`yIoB5#B7g^Wx zZ>s541C`bmQ9}|tsJ;d1J&M(_NRruX-r9*uw$_olNZ+&*1Jy^D-x{vdlx4Ov$&oo~nhqex?0^X4mOFXEn1{+anq$uV}-thGWf=Y_v~8{Nhj zFSCm94`~v+G8sP4>1^jaCqqRxbcWM!?f74Grsl0!tbTaLdPjUZ|9CYqjAoi__~c^V z^X(EXKC}mJFL!u=5YUx6XF(xSZm6q0!Wlr-lYUKBiIU{)Z$h^X0@Iq=#1sX4R|NcD zeQ%R!igjDpTUs*bycSLzMEhA!t~}AhPl?s7{U%N5bPti+a7{5oe8D9By3>#$;INv7 z%$>8`ruEp&E^nD*b;pFJ8yKA3bTT7rL`G{Q88APy(oVSSlLp8@zM~WGZL2qQG45fCZL8l%S(TDHQYdHy z9$9WM_iT%K(R4VMcipy1=gux-LvPMD>pL0nm0k~0L2FT97u4n@hoxZ05m%N_zwwqu z<*_deLB}VI5S6r9iqA7O9Uv^%KJp6C5*N$NDa??yS|ix$nh=aY*COfh`{sT*BYNUQw&}b52I@!SA%_8lz#pSd z8j8$d`d-OT?0z%e&lOP8IUb+9Mwr~qmi^{AUU)z*keQk_ke^Qz0SqO-Ubg=6S8|Kw z%^D^CmkyjE*r|VH6ES*Jzg&uCMaQWyKH@ZH%og(yvt(8%oe)BNMpCw(j38p)zx8XD zwzN61v>uMv}1M(l#!Noq~@?$psO^EV+#D%uE75%<5G7U^mz1LTx_g2~N=dhOy=+G}Rwfd@pl( z!9G1;QCaL8!b4z%QCJ&GJB>NJhy4*TnY5le5_O8Mhs+%luOqWCYkl}f_|Z!;)T>w_ zqx_V+9n%U`rb^cvUk50**`}(;$$R%?z<0G>1RDpCs$1H?iIy;!hlpRb?}yQ#AlJ=j zVClCIyJ3T#-d7b)7&ZyQ+f!!h-y~{(Aex0+BcmQa8(eF>?$k2M`3;ln%0;s}mtM0C zc9{$71%(HSc{?6IH>heD2YiC^m$5NeYqOHQzL_GwZ|nNZ9hkLhEzjL2eK(7PyI9S< zom*+yxje_dsak(hVy5Rz76?Z#B^%DX|6_zuJgqgzKq#D%jEKR5*cIxoFY!N~!Pmaz z3R()b?uCuSDq=ggif2P@T4=x(KW7{rtSdr~_g}(U_A5V$lpm-5Dl={62xLBSSxooI z{5)!z`SVvMz2QUS9U~{Fv+Sq=?Rko(X=5;ilCQU#!IjN=N&SQNQ5=0~L=C`+mGH>A zp!TmT5mZQ^K04RoY2C`a)2sH-kMD>R63t*5YiXB&N66Z(gAI<7{O+Q4a`!RcwxU`Xvj;QN8ubRe23H>q+E`7&G1eW0?^Q;SKT4r*p(Xb*a; z9{}!>Ow-t-j>cc~2|3|{sjs$sQEWpTz)i?5{Yk*ux$6YaVS6R55<)1=#dOy&DyGk5 z0I-BuCw>sMe~0~c$dZwjnv!a6CGB5xD^1~ z{`Gb`w_H1z-<<}54@C-Kv^Q$NNG!@ZCk=3Vd!$d=to85qV*?L!1>dcZjAVPvRB)5xxBX>8 z`p!tWv(mM2>OVE|M224c+G+QId}z=hMY1V-&>0#v??juYax{L~*ksk%epH12-F~@I z(~G|!6!IL?Bt%*7U=4Yvt@AmOm)+a^--LRTnb(Mr4uFH@ZjpqTT$%ta%P+~d)rZ?mhK+7~k zF}>o?sxXSFw8fg~dl|*z{0eFn_p(P2DBH(Ki^3vL3a~lDe38Lpk_g#Yb{z>qvYE^N zca7264s{wp=x!9&Jnu!~3N`L9Ik5W-GZ0#XBkR=hxjk#Lm{-BGuPp;W)t{@asqU{5QAW+0f;cT(unk4ZT??7K1)X;^WR91@ z3XzIJ6rTkmp3k zzw1kL|E4IE)qpT%ySJtH@gl?*w=P23xm)_#`TB#du){)<+|JxqYMDj2WZK<~*aF{z z&N%PLMJ`RC$Jnvuh4m%e9Ao|P8I;SprC#JWqNPg-WpyC0bRZJDmsou5q{|w8%0Xlk zHHoc24a0|+1z)=gAe@|xRYgf=hjMSg4yN)oFMWVjgj0M4)rld=KMbDd*xmjM`mInj z2y&jx7?OTJOZi*=~ zM6_d$kC?Q-eu_+l8$Z*&)dIhOSE9w`rWM(uOAxFV0ti~$u=>sBbs~QQ0$BK*e+%pl z963AE?PS5rU0Pu)pIo|BM}17#=!V$?1J&^cwKrXsG39+9Jrl2Ez4qf5_6RvmON2RU zq&jxe;O$qc8mB!{4ck)#4cKn-stW@2cukjo0(x0$J?BNa-PmU;8O^X)=edrDWI|G~O`#|%7S zjE~%yx6Ggo4>gqtDcC7uD({WKBRaJFxZ8rU6v{n_dDTuB61_OdB33h`a`c#iavKsc zx(aQ42>(X~8KAaG@VhDD(Bg!vi;}=;DsPnND+jAGHycn5svnx0i@(naf|jM&)>Ni) zl&9%_dz*A}OmvDwa&aoX@KNC;7KY0qWiuh@Pmm_IkR1xuWiETf3*#5JWzGFeNz1eA ztus~yt7)!j2jbAS@$u><3h(3yCB>=nsRU`wetzxzQwO1)fQH}B&}B)OhFTV>`8*HZ zq~7M6FVZiL&pn%i;yHg-p?_ryu~D7nYJ7CG3jRdLjV1}`NUZ$u*UOMFL&0!Cx;hsf z3#eQK{nQN}^OA2qw8Pq|kUy8J+Mgi^luX7&iZ z4ru1=lUP9`Tgc!WgQ-r-<b?IdDqy3IcBD~jb`F2I#=AenK>tZlI@^*`@Z=iB&bA6CHopVcwsr1#n zu%%D+un;;*UX;Q(=Q(Hqt(d4=)TO{>ky4OSbv}|m1 zqm5JJ>i(AwcOu!^0^EfZn=0M+@2J$NYEww7@_BlVH&X=fBt?mt0GR0T7xQ+=4wj=H zGd2kf4O*4IC~1}iUbSvjafI}qmBw2`LJt;bxfG|2hmJBO1^->gjQ#>xnv&SVQ5OYA z-Nt@KWd84PpEPRkFgvklaf-WK|8n^-(Akrft75`PRv7lg&0^2_ex%eFU*l_)l)y{c zItodOYMq4~<7;D*biq|0#!Yg(O@UW2)B4BX)P&!FT#zg;3Ib5owuy&7s z{z(qDe5MMeeJ6Q~U&sg6{$r?MUo2Ug%!3>B5uGs!j^LQ~rqaF%weauN$i!b4WWHcJ zZm)ILCbo<|=J=7=!Z22Qq-YriAu%S)I(fblx{$8X;fs}y@77p^o$G%M{nzh?gjJ|jl!`g`t$w_k7pfrZ2FoRZtTH=``< z3us+K{J%-uv(C&7bJ<|)7pSL69-W)W49N}cC~NuKgW}%Nlk)6OoX+2~Re5m$vsi7bo4rmX7k_DYr`XhQ+<#`Sfx#lq|Wrq-IY-| zLI2d8c5~AA9y(&~BUo4HhZTlmO;V=C=|n1kJ{eE^wE z=x1fhkKb*QCPbURzwMi=>Tzz7CgFL`ZZgd*dwOM?Dwk)wh4jK4@?upmCv9Ojo)*8) zdq?&aQ8`=MD8Tcuvy@$bi6;;0VXv+UX9$G{i|XIMXy~bAr_(PE8STh>o}T+^)Znyd z^L@8-+D#;#MSM{NRSyr5jKlUTDiOA>cgj6(ePPe{YvF~eQVv`Y@-NzdHwuLK^{=fw zapm6`gb{-rBm#hRhCVw-h4Trc;Rj-8tkAC-z%zYiJ8_Mj4*@t&5)bt&d&euyJSVS5 zMIlvrqp7yu`F(jHsOE{Gu^Srt8)H6%KU+ntL9`Tj6K+0iC%+B4yuno|DB86c;3kf7 zhtih8upl4b+hRM6$na3QCYVk(uDN=A`^lhH%CCVsF|E!QcUV6WJX3_}-}Y0zZB}yX zz{zq`q=fG+{=nZQauX}0q~bqG5Iu|7WE634?qVp`&SBHqe!|z+*K5IcpHa|ZoxHzZ zU>p_xfRI+?_c)QjPuq0XHT~`m?kA{rh>Ab0%R#aE#07|(!P2EadCq3C z{k5`N4K@zBt=p&B>@Y=~yKllN^!?vFZH{^GBi8)ZU|QNP;eAb(Iaqbz)m~iBt2cJ# zENL42U4Q-#Z1H#va7!=*aF_Ox-uXmMa8~%*g7^t6Mn_0)gS0+Z+{>;d$TO8OSluNY zU>y6}Yv`;p7Kk_t0AJCMDBtXmv7N=z;gXRZ&git{b|d_(TEoS&S#8yuR{B@f=1Uz( z3r^KrGdKqL%gIo&`HuJ9i@>E1cSJM$2an&&|M<=l@DZ75>w^l<>ZUCf3Hw9w#A7Cl zfiPE`N9Cbj$TV|hWErLrA&z_!B&I9;BF(NqlREokocia@ELE}5F=Br;XT6NT9IAPc zZudUr2EoMtoupc8vsj@&WLo^s7C;FY`ufU?1KUki|GGy~WXIq)%$sLhVNfP3ztrD(4WR+JB_G$~z9=66Fhh z{>OVZ0%fDBqxRS9fg?BTo!p`&c&$vRMJk72N0v|B$1KUdK6!+r$kXR@1y5~mFvC)5 zyL$es13x*MF@s{D;2_4%VQ1@vWa}HmNW}{F%a&UvrrUcqX*Yr==*+lihC1Z-h)ncz zOG$bf2VbF85it^tbEm_pAzlK(`Q&UTl&5Ec&o-=`6CO|H)}x=y!ME{w&Ewx%IdjqG zgqACkD~!-a3}Wvf*#E4xmJ2evUIe*_C&&`-|E@*=*z+1s^ud$UM{H+g*Sc+AOi<{< zuh3*Z6>3VRZCK)IG#Q#)tA(W8TRr)2t`a;dXzVqFw)ST9aL4*?y|qG9YE_Od+N(@H z44o7kRs$s1lsPwU3K$I76<6MUamvqeeb^N%3*uxNy~t*#Oh5Gb~ZF z_(lYfKMT`kLrY~vp~FC)YS<*N62EGfhonwY?H7}F<3&`*<-9z8wI|;UNM;_Q`zrYF zO9FHt3%6zW7eXd)-dLwiRdNMQQ=IR3)IQZQTck(&e?PVyh>-gz#%Lqyn z`BA#X)|qKmc#^+5QGb02_k|9rZPj8Hd3_Ck3+%Jk%RFpK>IlN!cyL;p|J{ax?~C-6 zrvy#*yhtajvz%uvDR}KQ(SUgdq8uBAtxC@3a)~-}04cxl)?+L3A{c&&6IZj6ozlwS z7PqW1jhR9kz{Hh%s0jY$4$Pi8$aal)@Ur}sTWVj+xqbJ;XQK8CaQW{c7xOkvfJLcl ziMY>055GO9hEc@-(3Hy-`)V-WBh!!bBc@;RuyB$5&6U|ai_WL9xknXklp1!XDUhgo zWouSbL|-c{7I}TDxn?dpulxT1gFt-0=E%-Wn+6M1PG(P^lZc4aWwn0&uCKq9n82AxmFNhr3Jm4qtByAkD%|`4vTW*FF@IM-M#aXj?#gdK{DnXM2g@gKxEKT$=zCddOj=r!A@@(-I5O+Z z2~vL?H0w7z-D1$W1e?}_l;Sx&@wzTgeINkl?g?Pka(a4% zlQYMfsbN|S>@0aRe-#ZPalt(w#G27H@xP7z>F@k^_+;OGH{E-^zwG*tEw6aX&*Qtr z->N<-`9=fMm$l;Bb)&3_+IcRcX5bPysY;mprzm&{BBmXw(&8bEY zLO)2{&-q=K`Vu0{(^3RJv$A@lJ`fn*Rm^Iml)q6XIbpG;SB9`gvfk!2JbKAv%Y;yIBjPS|pd3*gzr?kmA`iC>&CiyHbBe(JmRD$EH6CohnJI&ICFB`y(Pbj@n663NAVM< z-({Hy_lxS7slZhY?(`@3dyw}9913a?5nRfU^!D%hx8!9AAjab?`E(+bmTJYucoSI%P9T8yAh&1@AN0( z&Er?}xepoc=H>d^b}p}a`!6kjbkXlD{2%;OhZ0a$5`&aW>ct$Hl))hs9IDwSU#oFp zNq;l1p%q_Xm&i@6uuM~rjD$I3(}GV6rES@@V}`YU){n8r zd)}Ze`Pq*YpZw9IdxXXsst;?`QM!E}5zq~w7)MmEy)nf7geAlCYXgTWO<7m3uXhPs8`6nf5>VsYm?e{)>NW<#~8Zw?ie9&pB?rYGjB3M(+>*F9U<^uQp-``$=4__0*% ztgF_Yui~<2=G?}K&79lAdy9t?opfW>`R$}Z%)G|bcS#J2Gigx6 zg%vmCsipc;yo?%H!p8kAM#aq8X|)3JRWR4&wf?CAw3K$#pu ztT~J`zp&zPZI-o_lsM-j4;aEPJN7lJD&|?sw{C@+hxa*$IlW3P4A-dHQ#PF4-^*|M z7{1TrxAcF(pLz5-$gy7q?%lLMek}cYr#@ZZxqjp4c3KN`rN`2tg2)#YrK3_bRuK4j zE(qKxUp7UixQSOiPzKi(&Pt)w$r^0F7EDs_^Hdk{hR-7IrP4F|9@=zIKF@57vJy`3%|n3 zSzLej6%v#eRjRL;sY>~^z(z6!)LLb(3Y+x{?0AZ=G5Tyna*|tAFekqdWgKaICOK}R zOPC264=`WcIHX@IUTjN@?;peiOAm9b7evyCAy{!HCjt^ukO~)gtR0R$;-^H;52tA( z%lZpz{R-L+7dmAAXQWJ$59>C4 z9{GLW1-j}9@=1RiEuj0yt;K5A`5V&xQBlNjL?DuL$&r3=Wvs2M8ijOtE!|(y-nE>L z|El)~F8F8quX^L(+xfF!x@Gz2ulrlewYOfe;+*q~OAQ*Vs2cBs?H+FR0rsA#CJ|u@OzGTcwS#j(7A+l3ZF`zNph0M)MI1Ys0MsB(g*(y z#5D=WB?l7>VZ}~6v*3d`Vo(xIj1Jfdf~$C>MF1%1{M_aP~c>7G7E9$70(neXZ)P20@@kl z7dP$X^~wZkzjorMcMNj*Rt_(a1QWY4X7zM6XTNYU)~H}b5h~7E7lHG~TmVf4L;;e5 zZFxnRi+bkvsTY<_)`|}hb4togm-`o=V-#JWym2Vc#FeGuYr_zS^#Y!}pn#QTVtLV` zW*rh89&`SFK|z4Hogh%zI=nfF&Bpn)GeO5v1Wh_V%)|)SEADati+*g$8$b3xm)r5V zjk8a9wEl@}BHQiqKzv91*~dQ`-zW0gh5u5J;r>^Th5)#&ggEv?GsJ2=Tvew#(BW82 z<$FV0c(gLfuJYTf>J_DZNHwqHnUI#`(1xOOIB?QeQ3ENi0*Rn_3?xI3K2yfv_xL9) zf^|+?qgDm-W<;=`R|SZ?!mF8`&pzQRj`1pif;pToHE_-wY?V});1N-LidG<39vfR3 zgCD-Ar91g|EI;_vA6w3U;L}aCm)C1=y?Xia-~2oH>A1g-%J8`k{2EG&J?UzeoR|;ICHqRb$)tOiy@uQSQtyB zWksLhfgPZt8Rrm}9K1wr-6P7swI}LdVrx$QzNZ{7R~}Ss zI>R>NIv&2&-$F!E%8NkQnU5H9=6+YqUCSlcegK~sfA{jRlOB(svvogN_vUigeUDu3 zbHG8%hxGXnMmT@xtyFUvxqrf+dKBG27m)iX*kJk(Sm?WR`37)LhmF4Js3gzKR;p8+ z`=R`GCjIE-rWml?PjQn%AF%0IoM>EYv-wZE!Am%e3r$*cbAa`81Eti-IXD?|Qmcwmgy>aHT{FOx2nan!Y4JY0z< zz#QiEjls|gO#D|zq7&vpU#V>rt<@G06&OQ@c7^C_zUwXqu%Rl*@7cTF=^6i4{?d6#d`EGn= z7ypMv=!ZVab>EzdHV-eXw=9*Pm9Nf62gusY$H;K6I#BUsQ}-&D&jgzgR6KF>V2tXg zv4xJFOyR>l9RNcTZ~ao*XW}wo<8Qm1p6d*O=-+|AfjnjUB>D;j^t5-fQX&;(!(Lek zoxn*n&5YMUD1_Kll{F8BhplRn?z@&B`?i-Z_doK~ z!1p-+?)!gZ`IoQ#N6YPB{!&QAL=~=#FVe1Ay^-dkousZY@HasfUJYn-L5gWi;3vj7 z#4;Se{6NAw5o}7KUb?p&KnHfN1@vy~%#c##SHz%=n zRZlZUYLPZwHXOj*f&cRM9hbggIqaaL@cEI4C~i*|=0E0sCoX^b$=_!dd{_rl@WXu` zYuTsj+&^n^FR&(PHOxs9J;oJag{9Z86$WbSOT=K_JhNh2iBuj%mNrE7{q8gl zj5zgB_#s`CbYK`i)^mRKqw@Q%$cB+vlM}e=|Oys709_5uOXwGx~;H_7cukY$gqh-G+&v`>YKhx%s_!69cfezRA^wEhqZXs~EI&H(3YaK%sbtfTW& z#2_eEq~{wFT*_}9VuVfl6H@}3K??`P#tiASY$PtH@^Djo=9ux?iw+lJ3a8%0Sw^`e zntF}@kMH=o<+UIF&qVIkc-TqbvK)5^e+Cy-I9d8X$#!pq8aTq|dtM+PAO%%^-#KKK zrF51P6Fhcc<$FB&>53+E6YAGl#jEyu{S+^`fT)j=nJmUBmw0W{68SC5j>J1RB4JjY zDXCQz5%&0cIBW` z>yjotgo=l{l%Ez4?q?fg6E8}|BtMDG{ETzIV-**^?dI2g;ML2^-~3W(QNDo4m5(Z1 zIkB$tU6(fYNGtZ-u@fAg?U!LHU@Ykm77IBXPYvl0F+9T_#9`3SRq{sxeRF=oi6t&; zVg{#k2BY}OPdtaTRBefq4}P`FC|sjoQL<8GxH*dpuy9v|u++`HYyn_D@-us#1-0>G zBG9r3jD#P9YXc=lOKG8XiYK-QLombxE$$z7(uJVtD*|Px+%#3V7`&s?@vPE3%nv(V z8>8cqhgjiYOn&J?Om|EIOeIO&6F=vqsq|}B4D&;exNy%d-24u@{`V`G5GZnRTyMxb%mrHq6hDIlpvM zJ3kVOnTlzV{@!TUSeuWTSp8q<;`dr_Y04 z)`+X>z_hCAkwr#x;#?|S(Oe3|Yq|MAZ@V9sBJ(!%Pa zHl5JD)sfDhc*gPE9FL(FMQgN;N<3}R)WAviVIrPOa&I$hS7<_(?w^++fc}-C@W$(DErNFTT!#U;8 z;esatX8xkzGtUY;NvbGL_^6%EE4dgWgJQ78tY?wQJEB%izz`)cmy+?#lQt#_kha>E zjUI>I#W#@L96$-^0HzkF77QRUhvd3Ii7R&aiIDKZ+M=M2cd^Ftmf$|KkP?|l_@I__ z%x?vmS%!;Lvz=z0zjV+w^s@rzR|q!uKk_s!_jGyH+ke?V;S({`%n>fm-1y*2Bk~hP zU+c$)dzD#E$Iw3YLO5IsBmEUi0>+11JqM>6m4?+mn9ff-2OZ^1ekvGT)?=ZhW^#OB za4r4X@j{~5d8QtQJBi|lM|mo}xDu0%_PFHUuzsV&H3`U)-que&lu`ZIl0TXYLMX@R zh0FCh<789}7)9zn&Hby^6svnN^GUyA?aTc`LgmCvj*^Q|?_nWL<>#!OVvMabivXb& zpq2W8m^r^{P}6(@$xnOmBfkk*(L-lTt`UGDtK9m{WC_;RP(!}a(>PfyrD@fgjB90X%8EN9>J6y)&nVp35Z&sd6LUbFhmL`|TrIH`wQ zrtwGw9d=4Hb1MJ1q1HD;;xk(-iSKuxz55>p-*f3(BZ2C*mMqqxnqjk;p4E6Dqq@qK zbV6M!7A%nA_?5HJED&*SfUbs0f%eQF;>xeQ)FdCn9WT+mF!z*S>8Jd3igT4e9G>Gc zk(pDk@>YHpxz0~rm7Y{sC%^kgE*srvDoJj!C7EU*+najHV@~oAFye!Dui*;;X6Xl1 zKF(ES=Pw$WlcVTK!;g7%{G2}=mDnk4> zeXKe8DNWQNN=#?~6nEv1Xi);gFmYD=2=~gl#(kUo62Jo!fweJZ(wLPN0-VKVFvglb z^yzmJu*bnK|Mh)o9`dS1W zXZ8FLico%tj`{IRNUX!f+4azr#EBUM$|1iow^93W=JBOy&3q+@3*_>@FuGS9+@2Pp8bh%mAKIY90Z7t*`hMvT_FcQnli*>`GheY_QA$-^x9((rb zlZ1-Nnu6m)SA7#l?0{-t$(e_S3SI@ulZxUp0vRS{moT%;| z7bxfs{Vk*<>~{Lh=dbtuCqgi3P62gasLG;;EBf6$(pydJ9c{8H{~AYHVg;-G6Ie?} zV41v`;b1HW%C}8Z$xS$3?=( zCWB`#EqFF!?N=0w;TAYnxDhUgg;cz5Mr7AhL3~A+fE1xtEX|E!4i3eZhx)Z=dUDOf zIaQ;V%g$S_ngP39^E=hWt2Ap~_HOQ<^-yO{&<=$F{*37{eU#v|Qa3N1b5(KN@~jucg!r|2&=H1c<0Hsh&*F3lvw#u&#j7%Mt-rB5qd^qh5ILq@zz z0P=T|us4BXy(OHy!eF7n{P;y(^~;1Kzo?6dQDF^(3M2@g{dVkc+C5zTe}zytfzB`H z(q=m{A#d`h$1+a*RROYt0Rh2r?-8X$5`)Vyc~krJCT{1)jyf2`Wr)3%9ZQYG*H)U` zTh=k<4}dd+`2k+(rws<@*$@j{F#82SLyS&FCb2Ye_ANdJ%!y~G)jaZTA6}y93}T9} zRwz1dBDMC7&9$4wxJsf09nMcZ^B~_ATS0c-{OK9G%k>2hdWL+Wwty}{{K&!MQU3^< zC1`I0u_h`Aw~e3sHwe^U#_5o4*A*Ae&Tm@vpGyhFjGn0W+H*bR7ney0E&U?k1w%N=E* ztUb6gR#UUq{U_1^oAYD6AwMKcU-u6~$;{#NPzH-B8~z4jB1X|xK*X^T4Cg%Yz~Ui) z_>C$o(kPH|Br*@9n4(9yxmOM^kcz=R2qzvO$gmoL!ND{Ao*FxoU_-i*tH@o9Gs~izt}ep^_hFFY4g%C zz{;*`nYfm{NEI{U6K+`bxu6NPe$!Zgq6W~kwA;o#KRkl3U33nB9O|DctT0T3CckQJ zPG*|!CD$Uw< z!4I%bX0|N*&HT=!w4#_fjV(%fjSH8x^87h`o%5#`oK?^aQ-o`!Iq}9qJoQVs=aiEy zj1X-c+|a31zW>DqBF~BtXF>??S`=&y{&(o*xexu`3fTR0?QQ;-hvaatnSme5P&0Xe zB48TIFVnC~JsiaPJ{~^xMDeMvssV&3(Ib|>YD25{@&iQ|Ik>C_Xipo47@6NF^8`mt z1XV!dv!{!I=q7f)=fo)jI*8?0VO62BD3O$br;yxcUE~aQk;Nl_3az=gn1_ipX<0Wd zO!AS7es)hb#AEK9j1fO^A|RF43?YXbQO%JzHgD9BZR}-QuDa<{S?+c|{g|_szxwFE zk=|8v5kg%YnYpZ*W$yY|m_feQjl_-m;8{FVJoiMB)4g!76jX8UL-m9CMZ)YumNbpQpi222*F1v;@y=mR#DmOf{gBDtT1Vi>*)4NT(W;sG-52){ z@zsdIBk3SZ}BbOnuA+oV7JLxSYD&Qi*}$pw=zv0)~M@-kkyn%Bu#guIMr3^$rZ zz+6};rDt+!D`>4a&9wmPa5|WGS)OtBcj3np?cIMpz4O*95kX4!&D5FQ3%OmhaK<_Y z8JQfTu_be5i@(6Z8oK@SuM(?Hr4ZQXlOD5JMVD^ov`rJ1Lriihw|Lzv-YevL53 z-=l)%mvR$s+;B98n``xD3Q+r|eU{9xwaz0}5+t*EEg$}ZRQsm$qhnTp9ZaTSpQ*~h zC7lM0DNj3sH^OnHV9w9z5sn?4@=tln?>aYvSJ|0s_%~_Y&*BBHkQ@2Q(JZ|ZRze3q zOXbeyvUda)T95l%_esJ#JL9V_#1ER%r3~USPD?Wvc-5DhqsI!(Ao%bU|?$x-q-arT* zGLaP`_~J_9RbPr#8e2HABoI6oL>6igE814#{akL8uXth7C&pJ$I7#IDm8%+^-}_+K z=hxZif)(KK$tDAM;iK_Qi}o@PdoJ2Im8x{a7wP~gh#}V z8^m>{!uTF#8pex5`mKZVv@JgR%~@FK^IgW;#G#lvn?WF%)JAl$(capmq4Xk_;T(+9 z%nOXsDJ{ZPO$8*sG6`GpwPs6TDNb#K6=!MyPq>AS03n@U84zv;T!9IP$x*4!%6Q^L zlSS1;e)WX$Q+_d9Z1K)KL{39_VA1dBAgk3`7w{Bdsp6#fxI<1_{_!(^3I7S)y(hxm zFR#7zs@5c=IGNWPna|Rh0Y-iJ+~=MoR!7~zTzCSu4V7mFaTuwJt;VPF%6X)p*;B!i1U`e`k!{EWiB{z2cxI02H9kpM45 zD3fgTc$bN2CL?iq6wkyOR8>QkvfJyc5G7bpU>xs4X>)9?nzG`tPk7Q+#Auv|N#qzY zVg1=J-n!iU`5WWi<9x)yM=dY=o?lxYaMT%%RqtAW&FPbTQ)fD8wdiB%IwRBZ01w#} zIUCjupl3@wqbh__8-OKq$R>4!EydBgN7fBY zCe;sK=glH&%}e#lFMWk60c)gL09Ih_*Z{>V3*u-`{=TuCN)YCo3B2Rgk)F(6sq895 z!cBrJLft=PEm6te@rmuAu`nk2tv$U~{}>x$tgSckdwBjqZb`WMw*yP3Hq<|{QhVp? z0;(eDSmg(g_QW;kh!3w^xirC!<%Gi?u>8cgzI-|2pkq?d9_O1sf5Y;{J3r5+3~Y1S z9R1Ee;UKb0s%Z5%@eTDN_hk}Q7qYFAM;1ll($bWS2K}Dmgw($C(^Teo`}{jHiy(aO zQNF|11c^8AtUf==1Of+DHHV_>gCQZ&46t3*v1HcPOl$|SY71`|1{4nwbNnhiN*G%o zBY$|<07faHxYTTKwKt*+-|hSd`FvC(~8= z3*v^B=w$pcOKlN?@X*uSQ6GXYU7k>U@U3W!}^zt+9$WKhwteI=~OFb?M zN!8j|#aG1hL5wf<73va@0 zaY{hY$nVy|#^2-*$0Q{A?I*@HnJ)#9!olRsAfMvt61f(>Z>WQHRGj%bG7Gnw;k1wg zzr*R-oaPa#V`nVa#1P@UT9DKqrEikoSOB!{1{Fv=h|{mGkLWR^5nH*-JbENdHOg?2Kh593+Ix2o` z91^n-ubhaR((jCQ6{mn&DNfBIvFub)Y^74o2up=UC#J|x`%k1tCg=V4J7D?2r~Mzxzl2GlH7cO+$def@`@8Bg32@YlvoBbY@(aoEEKqqPGCc=6}n3dLW^6P zD?bVS{5a{zZjDk{aXb3?AVFJYFy#PD?>`DqVKLL*ohOAs%X{roV3K}<(T z5fE2D%7j)pQ8|mJUl@`qfxNMDQyiqI02-N~yGM+F(bZzL!KFmd&LxHEBk{zw5c-IN zA#qqZhF2S{pEp~Zv?jvr#_1{$tjv$~-TfylQBIdfoO0gstAG2o%L^X&ckrgNf6G)F z2x$F`fdJKlrTRq>bVR>tqcsmJNNh3zH{@0S#?2`{5foV5f{yT7xi6$6*U0ZaJ3czq z?4oS-%HO-@uuwz-ulq;AuFBUYvQh#ux_<1-L;`v!Z_*DF%+ybNJnXUSFMdqSPy0N| zD-%83o2%kGx#N|;2}oK&w1t4n>LGiU99c=HjP~e?ScYdv`&6iSsDB0#9CAkEH{}wO z{NbC0inim7aK3?d{6x<>IzL4W0K$1=h_$s9$GIZDxk+9uNiyXy6C=QmeTMY?J(A zhfUuH0@rGYn3f`Rf!>ptV$4^GX7UnDfN&Dz{1QuXu$jQ|Q@9$-JwJ4b6SL#Tyt+YE zQSmhav1tYA=%{$=qShK;%|Br!HZ#8hjHj^3chuK)g%uD)7ZN+YEXouB_9H)4 z*I6*0AC`qhEsMZNnH6I%0y;ktuukzt*Og8T@gQdOq>@Ho5i}S0XsU2A4545n7k;@W zVi}-Rt4qfNub3iDFQf|*KI=+YRFXsje6)V%q)EDSf02Me_C<#DIh+CfEsbX#`|#yo ze#fuj%XEJjUukt87{9i`A77;Ui-!4`d$w7$9Igjq)C!66xvW)~g>K5`5Y)@wun*nW z_^KC5P&V}pYqDz1i#Fw9O!;ya>L2-|fC7pjT6_-HF9z|Ni}~d*FnP+;wMaju0DFK0 z0Njl$k=#2G6;ed0U;qbf`r8xeB^Db(7}H$D(Wa?zv|&gX9RiTcE!Yxou=A6f`Se$7 zYT|G6>YsUtO*Uzfp)N)$NEmBX1b9FFnagMJ*R($KxU-jEdeN^h@4f8p%PZgdZL*m~VUZZywBHB;eHPsdH>L^n2@gUTrIY_P%fCUXVa_n+8|MbF19E=eQ<6lWAjEdO@sQlb!_W zpZMsejJX$#5q{{)-9r}#^XHyZJdGRqm6&#pIX{62QunoV?Sx5+;O?bu@kbA<9r?o* z$()B6GxYN;jp0UB8~KUmXuNjF3!r+eJucK=d8mb(HJ1`ehAXvZhTB8^+(>|)fBJa- z%2nZK;J>N;?#KMl@`#hqr+c4`d=i~%loQOUdG-3mjFJ=_gz5E%cotyIEQ4GEW3IAj z97C9~P2SAA%AYn-M~2!YVX)L$_=M0`>WCCAfT`RuGoD(#U!R5gFE23_m{k)n`YwD9 z1WYU8!8=3yC{nHvxMNBa;l7F3Xryf%MX*L1wPM-g)qyTA-Q9TXX&2zX%DrIu(~tkj^2)dU zo8^<&;Xk@@5vrd`q$M?{F7!$-ZtOXIOv3(AH72cGe)obs;~Q(vqW}F8@eHU;Jfeyr zsg};cT+AW-m{NBG_XKkaF)I(4I94PZ zMDh@65~m|Nyo78Xq_J!q0*NV3mc%XN<-*GB-qAx0b6$xbX>!J~XOUo^j`JV*^yLX> zK5co&C*HXH_MiRQa^dCgfR?C1DhN=`oN2CJaMq23IjXU6A;2}1tBP5@iKsT79~*!V z!Wix)R^BVWJ#d;Ud39qk79Y&lBO7s1ze8Do@&T=xeB`eDj>jIOQgNldZC)pDW`+-( z&x{t?5t95;87z!-msNG0^#HklLIVP;Xw4}s4C!&F6q$gP-0XXKBK$`(qX3js6D0r3u7cu=ONfm`JFkyz1?U2`Q+p-qReFq~e;m z5MiUE2w$+K5uVM+sPN9OXe3C}m0q9(VN(ROC@mJr&v+%NR?>ep{u%3@cmrV#JOI9t z@Z<6uKC%4vg}=W1;YGiVKMCQRnK&q5H$yipJ0={o^EXSx2P;`OdxJ=&5;)?qRON#% z8K&4t6Z7gOs(GV*)x{CZYIwdqDpBjFpBlo4Jt4FukFjP5&NI2qZcG(JD3-d2%=u|z zb&8P=$MY&a&H|MH;o;?$bi9^0<&%*YYG4j9V@eK2&@9fv#^-zr5g_`LpS5opuqChY zj!!)fVVt#WiKhqpiOcM@_r#NHSMl_x0?bfhF;EX2{T5Y3CjWHMaJ)N-vk{J)?=^Uav)*gZGIO z$q_pDpEiCl*73~)3&+5DZ(Q96jOA~HikA>>Hbz^34WDKN>->S2d`cM&*kCE80Oq-5 zu*yUNeGY`p@NZ_~Z-d2z!2bLM49Z1V5n8wyf~9SSr7mu|0Dl^j+&=)s^! zZAX5Y>q6eHqOi{4_~aKK^0Q!ul&p>XwDT*m{FNC@aZip19DU~UBhUOlm+yb_KV1Ic z!@sq>?gOu0uDH=Z5m@TkKMKYu$yj;M*?p1ZwBP=^iAHb8fFTk#fE&zQVy&KdD?48f zIdo=RRMWJ_+B68jP`p;Y2r38DP(L@m;r(yLj-$=AsWEn^6NL_nd&eg~nMYrwMmdzJ zXdF87Q|SD+GUZ1=IQ4mqNU&(vS){_LSb#Lk(ecQ{c9IrnQA8z;9>&l4jli`zD^?ut zF@2*>@`BJ!BdYsCj3rlID^X6Mgw4f#qc5t9`lg;%C5lge#{1mo@?DoOy5}6pUIRhc7twUa)GMGz;Q<4S;{+`DX;(T@NzP|e18(%QGyewB+gnCCNZD{X_IDorb&e}8AarQN+kj~ z-l{E_>`W)O@)HAFiQNbn)mA` zzu&zNS)Tux7wh8_*M4OAqd))MHi&iuviZ1n! zb`S13bUs@9h7gCES^gO_=iFF}2Uu#M&dE&%>+e4mF@gExH0)$g55lLLz46wf-cc;`okBE_p~U zkYKg4pQ1!C&}@`gnOA=EFiy}xV7KhVn5&}2@5CcYjY&La(kDN$d;7g0SKfHp@`|_o zBK{Wflgn|3oj5Um^~*Dld&t6%mpt_!FCV!4UCSSTh$Sf6jq+tP@U(zR5!%n>-!Hm~~S6L+kw6%c0Ah z#d(Y&*BHq^XOMKL?6gxz?lLmCtaTn6kqNyWi%Xsz z%Y6?xczOKkPuH8qIVV1LuU_QIy0_zo&t9|qr+2(``Tq0&$#Ueu$L=wVp97KNNN)WI zMS(P@SoT4CnWknCQ^U3Tp7T9P$J`U|*}r+Pm$n7VF2X2#^>+^HH8U2LEB>ZP8Nt9= zSR1P%nBX8cMQ{lLWa6Gk4n>sRU;*XpUWmZ)cmv6ez(%nN-jhkxNkqG7eCeB~@D|4) zC-gmxb>g+4q{`t0wAzN1e3_N3imOh`#Nw1g)Xkr~tsfmFH%IfD02bDWYXN-dd;iUZ zeA9U6C*HKYA20NO>xqAVdH!Qxyc~e@GjW=TC3uFIL)%U&19J4E-HTFWyvp!v)0o~v~NB%W#K zGcWo+AMTq=&h4LnO2?^+OVx;XI)5gS8#9;klN?*Sw82ut?h&suhlf4Es~+9FdgfZP zDq4bR+-_921f3fWJ8fiSd=D7=pOs+tq1fN~g+TnX?}D3J`1}$gPHdz|YGVrVS^r|+N=l;lqOa5TtuF~o; zVCZ$ds2h?D-rS5P<^p?YUwo!@-EW2XG# zz$JxpMm)X>t8ytW52!NOvPv?*jg{6%};}_#pJd<+}plRgSS7?+i ze&qw-h-Z{Jxgsu_*dUq2?F5Q5NV=TfT8O>#D|al{-E#GE;t{7Pc6XN(4}XB(K*)LN zbst^+{;$7C-}CW(_{8{wPk89=LceO?gN{E3|C!Y}%Zo4gd-~hRdoFvc{<-UeSH5St z;j`C?w_A!uwI3CuXWZepZ@TJPW7s&$ay-yHRN-OQx&YO``TV$chk*smx!Te>!$*Ja z17)3ZCBONd8dnkb*#fkg-x%aJ0cOWYS?O0^nO#`0tscI$%{VhrDRmpe`~b5gX`$CR zHGoDURcAQ}5r`6{pV76}g8t}F5O~PM;W#6m9>EylntM9iN~*9V3lEv1rC6~yXH8uj zgXvS<%!!Y~?t9eoQ2dkBxhFk#dDJNv?A1RXk>_g}pS=Df%d6k}@0JgK`kic@2DO}U z*q)zRg7pa^!h8)og2X0w$O?(8?z~A{dTKYqT*f=Me=>9Ctu$MhRN zIf1cussg<=XW`FW7NdIQ0m%tfER&<&*k41VW1KQ4ICGnVgr(mz;EJLZ9Q z+ymF64>@jm_QSqQAEdwXrpuNOT=DMZgZSWl^A~O$^{amXsR6SXHKPnVfWlPo3a|4| zScRy4JO?$76D-y9?tHND2`UbH6^&g1WXCCqf?nE1r+GRo^;@}!g*QyTM-^2u^qVI> zKa;R&kx0XR|Be`kR(vytAF)-p5wK$~Mi%PdFqW4NE|U`LLT^9DvpBcjG)_AF)S2QQU0-?ArORvH_bbbL zFMm_Fu@ja}ebA35#;* zycp8-761RT_vS(0UuSvW*V?RIvKDJIUSzNVFJUuDsaXtDCy<5^5;LtcKp=)RNttPA zGHL#32d2$r(q%d=)1*+s6fjA^lPM(u44A|Yn=!>pYzJc*Z?a@rl4V(&CE1dCeXi@i z&pF@U^S*k21{p&}=Xsv*Ip$>mr{XM_;eU{%t|MdnAqtDeF@AoXWD|Nv14W#o8 zVlr1uKSuD5q`8c5?wc2jevy!^gf+4aDZ1;;vt@1~SKdu4VNKBGWk0*w`E=Dj&*615 z2t9^hL5KOqT+X2CjUR2VdHZ%VKPK!N zbB^7e^nZ5%z8$)Ek_j5%1c2ccP;F7v{r z2*$i8@Y$kEKj5SUDR3>Z;dGewm)Dc=^p70ZMOG3uw=K$G!WD3>M3PWt+JTdpx^MBW zt&Vf>q>nNpmbe=G-WI2P18NkP1h#&X)SP};6~yB~Sm_Oow)_4da1{&KnM{Motg)O6jkPat{~-i&T=c^b&k z002M$Nkl>!9xW6H}gXZk0dOoSMI* zssNXfOO&nrLw^PI<8;K(2mrfe%~ew7s=xa&zVl1LhXpq1D#j0@22nzT7iTrNTNjke!Wr>`!9dp#Oo; zpC032iVW6YdG{~sC#inHU()(ZFZqt`Wnb|1e);##`Ap8w*6*6|@r_^nO4Ho;(7oI5 ze)#R%@9K5Ae&^P=`SrQ(*7tBS`awhgWLa9X_O60+#%$K}uX`ft2LKBQElaPGKQQY| za54@|vDG{ZoF?V7u)6EA{2k7jB=YCydX#=7rnm zUj2fN@AZ8)2Hzb2jrYE8`?}(ceB4uy#BB+vud6Y4ByPz7cvp z%xV62=@a+g|Cj&u_JgngAGbgM#eaGG`WJumcICxq{&B-Yyg$gE-#PhG{d5-}J#Nl>21sN|!M@-r>!uJ54C-6e9znW?YUMVe~Z#L24noU?jQ zMjYt?en5f0wARVP*WI7M-M7_S@aE`C$Mrrl;H9hdqpg?L)ua(Q(l8^mtKv(oto7^Z`<~~p zHYeEMtLs9V2BeiA&DLe1%@x$u>JJEE|8 zpzgA?ak!wHKld3_6{*Qe71;M8gey)fV-8^9En2{x2nHou@$~^9%Oxky@Uynd#0mCN z`FxIuW#+fQD=)gnPZZD8m!A5`;u%l7@iX!xwW;9Kzkc$OPj0{cfj4ab=68Q)d&|wg zzCHTbBWb_uN@Y106{~X@Pnfu9ir)EY#bbW|19cz1x4(qtR6xerrv2d|mcgwcOYxdC z?X`n*$>$ql9O?Avo(F@cc@oK~CD-W5jsS5MsjMs+?m?JI)qC=rKQ~BXlQi#GGk^_N z)3dQHy3*t1hrftR%kKNrZ4+3&gavkk_Y9W5spf?)G;0cnBo93N`_UTbPXd%!)GM!> z;UxgNqmOLA`QF#-;|=+^l0{@{Nsciwy3_}7ZhcNCtp9(}HJ^CwF6fqvy)m^VOT zbAtV^GsoGKb0deIc4zxRw9fV_sBP@~zC#B2oP~^asSQ(%p3H^Ls%dCZ+(=i(s@o3~ zU^JXpL{Jge{40R5%nhwYmAmeFYz8PT!sLGwS6qCJzE|-~{}j@-mp#KXKas@x{Btj~ zhChmz-_uW1y4i%aqy;p1UuWE98kvD2mkU5$C?mYZS@4p9PuiCl9hoL2qP=`n+Uw z*A`Q%6_W>A<+lh+R18D5{12;dj^?MykkmkvmV>{{eqbA(0I-NCU28t^21uN7{9jWU zWoq31$uMrPyY9PF56r8I>Gabxp7!kRJ-7dk>nEyf-szV;^y!o1ut6^!_dfXX?Wgq$ zy+8ftAJ>EabGEP2H{QSc`G02ng6sdpz7HgR76(t3H|mv&`8XU8f9m0lmxs7TPb~Z{ z&7JpsWc#Q-?$8qpYyRi;(5HB!<%DXeet)9OvB*{Tm>T2b<3i&!(kr&f=i2>qfcLp? z*89>KNvW{8F?av3gtLImzzWse_k|O2EqW4{dk#NZq8~=5lTZ#Ku$dtnUE%M0&iM%% z0L1emgIDSag8xbIQ+NC;`zr0PzW6%dD?CYjw*L1z3ORDYx!iR7JGM8z?{)fy`0KXY z@42NL_@9 z0sOgm^a5i`@j@}h-DdIu)c-){1JD1>S40<=IVs)nBnpoTfskP*Jm!GN-#$R?3LFmS zn_nJolH5cbLb(xHVa-px&F(M|6|5vbiO4=Q$(MsyRPl!=x{{q3?2c9bE&5mXF;5`Z zKkZq50vR}F1UQV^!%r7PrK+UJwaSMo+v(B-^51h z6EXOo_PcI-i@zcMoA3MA`n@TA8FVz;-4;zABR*e0iO@fKx}F*5gE6YQ&)&&??C*el z#IJ#+Y~u%vli7dbl-=i*8NKjD6cTcl{dom2$2bSwb^gnobfYAD`oV;iqu!`NZAZ$MpomZw}x4;5~lX z`%gUlfd41M%xi@4Quusi{fXLtV*8Z7t_}t-uE#42Jf`^(eQo^_{i5h2`T+qxvGKKb zW_>|Xf7ki!?d)^-JN9SmRS2{`TOWF98vX|NS!cL-ay{UmYuAKFjheYS21{()3$1LYmQ|($j21Dck?bsYGHObo^^#@js9xrM_^5v=7yy*u#O=*1O@f6i6U+rIp{dNR>pJAB~{U%EZ@+&}o= zh+ZUq)|Y(p=3nD{JURYocYq(dc-L)j-QM!SU(=JuuWdJf?DtLx_MNWBR_3~^*({E} z*E`t{MJ_#T;Bi(!dZ74wtlD3D^zVS&eCo$PIvD0yHp?dv)5_KSeD=v>w3H96mEQ^V z7&bl`*fr|2*W8cC|KtyH_60Oc(JQXuQ|J6-BBP5dKIQT*lW{F*6cB*Q@J6xC z>lyXfy-a2)_fAYmDY(Uu*p2a&e7xf#~+zdHml;zd^rDI0je|FKi$UHtU>$Ub%my zgkH8Bib2W<0+0i*aNqw?{mAW|H~C9l=bUw}zWM&b?Zuz-<=cy&@n!yU*F{gg*c?w@ zp6tLA-GK)lzE4jWZ`=O;t-rnTdQ>;v{&xTGZsp0no%nPVgrG9=Fj-FLdaqf5o5f5?6XOTfa3rpir1FQ~4xJeu%h8m7xUF7j}voqTbf%(;h{ucnj4N&>u zOyfS^;o|3sgWVHwS2keowvNjAOCFoL4CR9?CrVNe7?Q}_hs7`UjIef)DS<`+NtyU(A+Kh6dcjraIB}E6&e!^R)!v)*2de(X|M#QTajCvp`+WVD@#pI&yPkjTi~QrlS3LRUp>E@o zb9LbE2R`bblzPwY@6->6yhH1EZudQO_sPe<{yZ?!9bCIPrPe&|^O(jL{><6u^TN!7KemoVv5dA?f7hh3?&_#UxxClX zpFe?fY>dXVNe-})z)}MAC9Fv_KOvFGaH!@q62GlBPbB9l7NW)rkphcoZ6p}U(PH68 zyoGDdlkbvmAe0nG2&~Yy=I_~3t5Jn(lh$ad^r5eL@qJ7$@$t|nA3Wpt7S7Q(RBzC~ z?r*;91ICOFs!M%YRjqKA-TdtwHsVJ4uEND7sruxN%$ymfU{Z@?doE0(8GJMPx8BbS zJ^o4z%HzjD{bRXT>qm3-@tmulw>|TU8#iita{Pzg0shx|%iZtyD-V3|qwn#|%cAom z*W6S6c%lor#d2ewc>KFfCqIA8xcxoPJ=^0{kAK@BRN%3X|4-*NuuLy(KW-=qF?2X)_V;6S+w~hiRs`b}QVxCPSV9hrsoEo&k%k%`Hw)>(GUfuR{ zuQ~Id+~gbSH|u{a=m@e|0xrmve-f~=f?=BUs(t)(fH1`cc=RMm`}~HZbI}qVJAMi- zB7XUgpO|^md;jgwz~4c6=9M@43FTS(X(@isg%@bNK{fIVhEI+^ULD{Y+#l9Ucks)f zJYjtBuJ`EgXWXP$TX>&-0_{`t@x!ge824E%WXk=M$0Po}Pkmc151`$BroeibIlr<^ zvAZnyBl(>(7tKGl)iaA6*q)-l2zuO?XL%J~s_HMxB>kYEqTy3aYwpi4a-~0Unjbt5 zVDvWj7+ss}-7rl0*zprrV}{YY`Pkm|rDBbf2H4`hjg)xNkEWHM;Gxm|CMILYkF`LZ zUrCKghU-<#If)G!@J;A^V^a`~NSZ%sH$E_9jeq8YYs&-*NbOx_skVvuB66w&V(TSj zE`nLHt+l(-KJzNZkT?O*NHdbxb2}J(`scb`-Rm5Tmrmm~jUNbY` z>vhWSyvzwBHuVI#>o>y4Z#>Vw30Z|-MCnIJFh*hbQgM7zd9Pl!gpV*fOh4v&)^^pU z*KVKlw7lZ)4SHhX`#(?D@2XsV$#wd@hD*)!c#jJ~&7@9y>5 zV+TJsW57urHZ3N9Yiz%M2jtk-z|s@Q$;W?0Lhm7>e*E`u!9(7D>t|ODNR4?cHpF<8 zb_3*ae@&IxwGZn$(ABs!Bz>@gw)41c99WCLMUuz+!Q;Q{ik8ZEx`Ju22GL5 zkITkjenk^IzXxL85|dOgJ!UV4%68Q(J3*^xjQfHC9q9=%;P?q71x}B} zaY}k$3n8Vk90Q112&{Y?B8Pm&`m8qqa1>aIZ((pCw33&6$n5iM0L6)z&|~;P1$)Ui z=9*qW$9^My{YfnM1UaKFU?lHbjDy;m?U+=vZ}+Vju7NUvGcPcnLP z(aUo1%XR!#Hm?VE@l!9;2fwAwFWz1DALN(J_>vO8k$<0F`{_RYawPwI$o?MxHh%w4 z;*NWHIhfo03zGD(&(HUI4BQAAZa4Q?&Y1T5Mrgmb{rKZ-FcZS)b>AnA$!+7$(wdFu zj70mEf|D&_2IHQ{zOTN!MvQp-z8RPu2^RXJ2X)zZGI6PG_^88feuQ`VQThpaV#Sy8 z7(+)kE�NPmg7gu@tDbReyTjYNJVD10Y(bVtjH&@Z_)k;p4w?;*NsJZ_x`G`}86) zC4+j>q^EB%g;>VYZ5?|KzY9h<6!r-)YUal8;MgBUXx4K7U9KTfyK z)3Gy|n3ba>HvQrY#}HN!^ZwQA2Iwq#>6c@M{}N#eHV0zY`@h*zh!AavI+i`mFQ@x% zyz*27*3UnV0ci9<33_+`;BQdPqbCd5nI8Y7yXU6pe)l=p3$owXF&gvq`Yo}7HCfM2 zROu(Kz6X-%fV0jG!k#rV0A21}SI;Vj%r$1>xTyk5A5$pNj76FW{i&#o`Lwk1g9Hnn zBsU}59_^d|*DX#27=Xff*Tp9g%+AMr>>rl8=CT_EWYAeXTaEqe`JhycOVga?zU+JA zWTMkB!{=sC{#X;}A1Vlc3#hc^{JLjUEH3v?ISMpZY3g1^zI?)VyuWcs&+zD@0#~fv z4QT**>jq*@^#~s6#tRq5fF|y-jeo7*b>AJ^M>k$ZUe*P`utu6@XBS3vHWLg&a6l!U zW=db>RGrQ;kye^}Nvg40_=1}%=}?^PyGPbfvVmFDvbkU-c@y7vG?N44nBj;&`c>d-OVMG{8uR)xz8<1^PM4H+xh1D{1I=eGSm3 zZyIAw{o0zdf#IK8e5PsO)Sy-O0h(~8H@zmfU$HvKPoH>d0M#(pr)O0Y95*rK~9A|law3GsE-N}X0XNf4n z$wKK6)~IPjn-wznWus}lc^nEDIl4It9fr=o{hr(OuhB=6`#9GN&%4O4m`X_WbF*nv z_^iRrHAY{niR9#lnE4LZg^&uviKSgx8@TQ1eeh@%jL71rH|tp0@q{709OqF9t$!i_T1mt-0LwZyL5waQ$PPX1@5i!l*VXIL5qo)<%T4nE%{A9%@1%sc!LG0DB-~YIc zFJF~_f#8Kj4^$ZXtn5Z`C17_iYmdG)-h0F8LQKWEH|;E=UDXU~c8$jC`-cRL1t+=+ zw49U_6|r~!7ulodyL^SO_q>Au>yN^3`p{He_HmI{rn5pltXVh%e+tSWW=*0G9*AlJ zKrPxP@{Q38SPJVfzcIq>YiCiAxrnim3!l(3H}P3Jnw5BX6Z zE{9fFpBpo8a18{o`olV-)^XshSjBIxO2gMknMx(;z`zn7 zrb<@;tIE;rUPwS{B9;Vz_G9Fuv-p8Yp7Q;+eR@BEC5n+ROzM6D%1gGN3bAK&r-m{MNB`CPVM9%c~ z2Ap6m&G%VozwndFN6{zUoAN03EZ|)bi)FaDK_0OaJ2yd#^MMwVZIH}hZIo{r*h6B@ z0@(2vr*GPXrVHRzuFW)|+O+2vfcYy~>quLZvB^3L2oe!)tY+57ei@5zq?Z&B^r3us zv-BRsD99CK*Uh}HD`8C5zw{G?U>5=T#oM#P<(_Ox+HLmyEjBuq2S%$_?wSsNYbKXB zvcgZWJ}fU;*0TUOKLNR7a8BlV{!7xTVBw!wxi8%~6Q+d?-S;Gj*h-*-sKuPjviSa& zvIPdKW4X9uV-!{Z2juL9gN@gBfMFD0>DJP_wwbsw1(Do6e*+LULJJ`A11-HB`h^33 zxyLz1s;*Uk``rg24*ELhhA%AKU?yw?V|D%D#(D7hqlGvimwEo|0bThq=^N8B-%Q`Y+lW?l&rc|SAnSwK*&hXI?M9}FbUypN=hp*qI1*t?nR>4voH`BxpF@S zDg5QY?K+w*v-#UZC?nKa>)Kq*>v{y#0vPbt^ic98EP3<5*{3yN`$Cc!*0Z2qBO*iw zr$sKH8kSAj!ilQJYQCYe;>|Bw`;Es&A5`RE@z>mquGGXSiZSrfrG4~+p`xTpZyvLoLbK@wby) zYd0zg#>Jf+RvEJw)DwQ9M!q4Uze)>dLAR>JM-L>{Uoouv(;?XAG)Ky^)Y_wf(Rl46 zhZY!1i}t4qCwS?89Pp!ude-H#c31AWC$y2vA7&W5cHzzI=rBU}-7xaL0Fo~;GU zWKvuxuxJf$`_jL}rhj;lGJCPpT>M5smU)QnwZY3CCJ?(sr?=UmRakK*d(UtE@$auKpRCk4qPmSC3Z<|$T45|5zx{hhdTU6@ZO=tx4zaf*dpW5?1pY05bFEfZ0y{xpfi{k%QCx z2R$*+sZ>O3fGt`BlcAoo}b@#%v zgNRGMug)<J#NpDVr6GvWg z;3u}AG>wt^@mC_A#&Aa6<7Kp5u#oK;RK}989e%JIml<1+UQYh@XfCK@Uxn8{!n>!p zfb^;y5j!-%PtRJ@OwWq`T7QXgEd6kTVhph44}d-i0>Y9Matf1P!0(yW95Z{yHV$+8 z72fF28B^YVG%m3@2j*To7JiFkR{)LUSUrJsQs5eNPsN>JAlC><`JvTx(EUh|VqkUqL71H|;9BcDL7*UP$Ff%z7I;tx-b z!1bc155yvKa5k5H)fs!Phj!zcXFV5UXHS51UeAk8mj3AaLv!NVXoFu^U6%oJIgw{W z2QK`W7|>ddu9;&W&f3Bon2H(zfCOjB9GJsa^$q=Ua1XBgZb5%i+xy_1_xqq%5}wYr zNG_J?OLWW759nEG;VCTPiAQtxeV171j-MFoQDnzXT)AYl-9p9VUF&yh@)6##T6bM? z&T;Vg4>0VFXC}q>JX3qsz3ll^kN?xNLNgdd%>T@RqdZ=kOcPw!n77 z4O=O@b>}-<0POoM<%7y-z}jg$_-smC&V|@!tNg=#V9dud>Wkm0Lh?NMgIGLlv~2ye z&_uO;WDou#uCwhpu!!~dK)RWE##>3WCSPEFP=(9u2Fi`lw3*x2)Sh=Cx$(LQeHhKL z%NKXmKIYj7ff_eRT!|&W%-YM+00^m+iP}aS`hZU##C^K|1IZ^Ikc&_lkQ*-0r{PBz z!F+^2SjYiu9R{^My{}Z|c8&?zS6cYBqb!ne9N-eo8*HgR*^@ncl1I80w)VwyYpx9% z0C8{G$gX#5np@jDW7J#)vPXAd5_KTo@DUdNL~)-%;!GAA(r_Ft)-&Il$dOthJq#EnhXx!S82Gl$f<$Po-uBWoc)u9TDJ;60%qEluExC&+~EfSE7y!T;gA zKLqs|$7L5jZF}lD=SM#WsW%}`ej$XDPb69I^>=`Biog5atcou7odH*Msip)}_U3vSnI$8v#_p#oih-lj{FUDegC8T(O6?13w27Eqb?4SJ^UJ!%DVSM%e0LAJo?Z4 z_y-rn#)Quw;vV-~f+`aCh>oqcOhdoarvKD40g@Yo?R9=F042Ox|JwhCXnk<$%|lcD z4}`4{ok)O}nH^b|1-Oe`-QDU{xKc(TePj7VxKto948*(87GPsJ?boHWx7K)dWr054 z6k{ys$T4zcs1dGpmmHEUOIV+H`2OwAdvEWc$DMfh<1MW0xtp0;@57+Ir!;bV#yg8HX1>;0OuS+ zy5q0d6X_UcV}j#7OU`!=_XqM&*+FRf7MSSotQk0a6&w4KjDShPZsA`Yv8NYpU-=C) zDJ662pZuMv*~YMwhKeQK^-pc`K&B}%pcj58_FNQ)y~w@VBmi;GP2Pia`yJpEVu|R1 z3O{hWVYE ze5_V4hk0wR@DO;@)#<0OMkl}kv1Wr;=D1N7W80dD#Mle~SY{=h~3I z1fByoe{m^ojIJ5OsI}H3s227M2$zfsZo-(oBYX4z4A0!QMEF}3!%VWY-o&sRCvrc7` zKbXPZ>n||)O%6T$NT__L;njXbRWN(Srhkjg%hC^y-Wo6cNo=oIO~-x%-9LfVdL*ND zTO;~KVcwy;^si*KraJP;0n)Pp@MytQGnn2wv{p=w#abyz&n8h=G!FP3XLF()Qx|Je z04dxO%-u6WdcE2^cJ&XzOV057UR$tmNWGpwN)U)Px@G>k!#_i@EA!W60Q{Bc{FMxE(`m79TqDHL>_c z`WSN-MuER*5>T;geq$5helH2uzwCQC&L6x3{!(J4=rDdII#lb)k7HF3xb%%agO=o# z2BkfGPkPhw!=pthSoyKebnWD{f8i;A>O4p;R82Le{}49)EZn8eJ%3;dMVpS-(t#7- z?H=aCjK-PwxBJ}bg-cZWpT4hso&8QF^PI?1d<7?}cynTXels~u_W>j%o7qIyiWb{vQaxOlbOG zmWt{Z3Xm3lbl_m3?`w_D!`%%~M0yt7UUyE{0unv%GXb}4ps_EDSHU6TfIp)2GB}A2 zhJl@L-q!E*9PW`8>sgmElOY?w_>C^qDz1Cpd-tRiw5=aMR+fXYE|%zrtYe3Imu3OhMW`@!xeN zukw|i?}2C$IP(moq^`LBk#WIj|tMUqPdQB+#QypJldBo@v~z zyGP*9J||@O>xcM>A_!CLvld?KH~N}*SS zKub7mb4UestbCKHt1_Y7sSyPRu&tTwlR25aXtfUYy=Q(! zInOZ3*hj|Jhg~}#el*A;jo6!i+*qNqkkPG%vv1LFfH=p!Qn8-)qTC|uhkL{wPxQQ1Ds~Z|3K1L0+gTkL%x~P}Le~kk))Ik_Nb3*(k|TyuQo`M%QPc>{BK;di zWzY3ltRcWO`Kd-$SCt1ZJmGjWiGAE~|%vL)3+ z*;#;D#^A9Hb_I;7?9$8hOo5YN6JZHj($>>fYpk>3Ll4JK^al~+dNJ6`;b?b%r)qg(}0=a3F0 zrW?l2c$s?*+8C4Gv0(f0W)h;JGBay5QDXCg-Dp|#bP~;LByv7SR0oqo}bRDqH6xrXn1|I{aF&Az$y7#vYU2 z9K*W&jxAbl7^hb3v0uWv>XK`=bM(ti0yn3*z}GOpSH(Zn%3oxzNo&hUnq0M55XOUv z+6&(tb?P;lZTj~L(m&9_h=lIRkOo50`$1&H(qAMKUryU{uaM@C%CFQ+A`E}9$|zb_ zUJVGqQr-@t^AXP*NY)RwcSYR%OZi}St{&a6D4CR6 zb`}8*hEYz-pGa~w%pbhaAM&R%iYKomHk~23w2$b!Kk+gBIPU|)ce(|Cm%uNo*Z|-x z1-dW7bN^hwStglmRPO5KN^0u|)C~$I%~C+)8#4_gaE$4vH8VeJstZ1|@d*iZ@wX1l zm9N;@Kk_sXOYgPh1FJE$=ox<;#44a;YIgPxAgWxaVkYiL7oK9^E`Bf**Y@I{juARE zT4Dat;A8K;u^=m~E!T25H<&x-x zWR8IDS@QcV_5iV3Wnr6HYYz)7hxtnjbmFnT3hin356K{}u>gbNtJd)swC(LDuQi)X zJ7t>p94Z}2YzPjP(`;~X#|@_e2yROYw!u*H$(aI!#I^Y%uw(EX!%tjBiqX0RH-GCH z+UU~wqgchEG&jNNh_t~rH(REz?B zHZ}N_+{hu0rsNQxyadIECig`o&TdZYsTc&v?!Re88TuG{Yh}KLpZ?e1!m(zjkO-va zYc9P(FU0pb+ZFn=x{QxKaOZaG-5=OKa?dTs@AGi&N=jc#k24C$Yr(g076l&9rT6T z`fdwX?kAG;!LYe`Od@05SEDZqGY6bXtT2`g_aMy1buEg|-$YkH=UALm9OuL|60r$1 zkrIZ`iXT&vn^bETxQL6;WVZP??Ag9t$#T9d7L*+1X1aAE@5}g<_ zhFV)k0~F*qxw{V|)_6F;%>x5|uN6()CFfth{e>_2*6pRwd-?XPD?iun!wY|I_g8QF z#qFox{A2zb<>M!*j7cR)Hl#A6gAb$VxTU#YMW0tEdVp0fArpGj&A4F>;}yAfoe?I z=jWdUxgXUFFnJrJd9Agqvbknvo8ZUBPT7Tp3%jpbcvnerDkb$N9~$x-ISENNe9#8J z{T9>w$@dcoQI@9s(uO>P2NE=#hGFACx6@0lET;VdPodb_-vIp}54s!6zt95f*&Kt= zOxh`*6q+HS&Q_Aw%@2hfYja(c{ml=C&;k(L!QWaH!%)nzKZ*4W{V4BFOAxHLi=(gH znC=Za+3Sse(PyN9?nOg%qy^Cc4Pz{G*L-*uMyx)O2NUOjkPDlvr{+T4HTo ztJXb}X7`-580L=VU9faJcxd&Wd3hKf}I=|}likE!n_MiXB zzvjQ8-u$PUpLykTw^v^I*SA0a#oxO9!0_zZ`NK=pX(lE zsf8_P&JE}?ISck(v*zxFQ*i;L*`t#DuTU9I^nxJAcS7Si+n4p8bG~zQA*J{yA-08| z042HmUZ!>+D&2bNMQrVFJqP!EVYOstix>qb-+1y}d+f`Y=u21`;uWZSTo&suo^ckt zT5yubl}e+T^mt^WoDx0HqPS*yH3c9N$YB}m#vPA;$H+=dqAR5U2fMrukgmc#-Q%CQ z_Ls@7b}|)tNb1OV?$Oistli_OkU?WEyGM=4XR<^du-Tm(v=B53W1J=?D1-aq_{>C7`vE6fF!fRk=jSJ4XaQpsm`aiZ;ebwJS zT=OjFW$cMrQfUEf3#hwH6!;=a9LY_#YahEh8E`#5S8hF@~MotsUJ%XfVJ^n zm%ca1A9q04xumcpXUC7xc*=B7jOxh!`4N~we#kWyKZcQSi0JPE%vsRX#7KPffM@*` z!@BvwHm5m;ToBAJ%MOnxUA_c)%;2}sTieQy9`~gRXnyP+Kl#Jd^{`92bX9N1Y}o36 zf_z@Rb^j~7c=~L6#v5JE6ac*mpY=N+zVr|9?qA}fABCi6Uf0epev!GysoyE$3ubo3 zuXsaEIJr|;o=%cyo}(6x93Jo z(p7p1cb~^fa*FjQA9;BD=zSkaJy0(6&F@}oV@_t?iXUmt+0#!Z1*TU8T%yTmFwwK# zPx`FFMgOyRELl#F%>YaO0O$+?!jcqnqA8C>^rt5D^qAQ*wsDwqUg5PSVN#lV(IB4T z=I?=!#7dW1v8#QSw4#0UhQJ4rJbXAb*UlOu@e;Y#pPDle%$k4O|{h{||9+Yx@2{uun7w4Yn`(4}+`oMt><_DQ|#K+Voq zC?*E^$!vz$|Erh(_uCg<|Haw-0j|%#_C?$G{^`G8LW{rfxCci)vULmkljYur%6Y#} z{n6dIDGXppQW>#C^#giVT6hXec;ZKuY$_xLyeAwcf~MwAD*>9t z&5~tl8u^Jff)i?G&cKqh=kJ_}6*BD924B~&&^30JX5O2mYlBPc6Y1bOCix`$&HW?3GKgcX zcSBEe?9PHjg8)A82)p|Q|EWe|;&B>SOTIN(9(a@g5cjMmKqzk>(a}3Wr{HMm8yRE5 znGs}BGC^{v8y_RSP9)D}U0BQ*tkakE*w>;{ohh3+B-TZaV2~PF3;A(T6V`+!ccGvN+`QJ9#`vU-=&|pVlWH@Bye*)-K=c;^-LM zMUa@eHSk9bJ6i8*pf}Iclm#ny~W`m;N35J0Rml z6azVVs9RV?Uv?@baPoU!xelK04jb_pqEA5pqaVFmGBEAZ+c6?$KKCzgJAU$=Gavup znm3CC5G3w#zqQYigbZSPdINxup8xmrBGi;%9ujZ*@`-=GL$&-( z4=bF+W*t8&_z>!1PMsB@&8PDtyv|B-S!zA7t(C_aql1{k-@Ra-O?R*_(ck+PHgpNUad2C&T{@Tjar;sF*4?H83N91@T2Jzqd6?upe{g>J(HxU zgM32uP4pZj(0!m3F<8kZvWv?SW66$CZz_&>mOUDGHMY$KI=F8rD=~iOHy1e*w}rHR z8kJ92%*HPb&BeB5VS_0*J=UtDW-vT3>+RT|#L8=8DYt4eM~Mb;@M;tO*!qU!FbN<0 zYt-)#xgi-J)^R+UqC?TCUW2mGw0M5PGVVUG;z+S3LLY27l_~K{adbnybW8pnw|q z`AFMxuj1&4KNxkhYaxe&1rK*SmAn7Xws9i{+zV*s z%KglG#`PN0x+XS1h2`YRwghC=Fi!T1wd%l9L`B$SO_ z^N=D5m9jZPl#{*3Ifo=Ggg8bVvfi@ycI>^^vG-m_#yQUZ`FwA;-{0`OyxXS=A>C&dM9!!com)YWb4Y#^e?GPK9kgQA2r?c5S#DA@7a5v4CMQ? zUOc4{+IThXh(H&=y_7Koiq7dP)xUu$3;v+_wdHMjIVm+4Dn}C}M#78?A8iqKgpNtK ze94cyo$GSOopjB8|LX3+gm3$Mi#6qNm9=*t1iAz74U+KRva!KlZ4792AZ>J-(IxmF-S3yEfxXT}cOM=by@1?3FTos2tw)O^M^=0zQ`X z#>$Fi;5FM;JpmQCwv!Q?+MszRqqP~&r*o4X(P;WSmYA!x{ph{uFiZDAR6ud(kuLai z$(Dhp5jm@O?yKf>=Sj&qO5zb6D%3R2&m6{9lsjdFKdSXD@SA*2S;=u=6p}~|a+*(s zZ*1yR1lW`sTk#%j0h&)B30BW3tB}1$r;OsS&gC`v%Tgv9ZV@*|s#ZLgN3O#& zXd4{=URmG~`4pf^js%rxq?zHIy0$x({IBHeoK6;OQdlj}l5U_6va9v6f4`dLrJ&~XrO%m#D zD4Fhf5YNX>Qk2Xu<`ljj@<^%Kjztm$j1F0zje=kl?=B*DV#``e%2$$YueCmX6v2;u zKbT)XO@s2Qud4Ech7~M|9^69_HaRWaxX!*EO0Z)tSqL&Q;f&iqp+K&Wk5-G9+aJ4P z&IIO}7v)`1z&D}AoSnlO*um0=+czz)VB$3TQsnY%E(3Bpe)s;zWLWjm}MW3#`*L;y+v1l|oQxZ9sa%C+)MKntWi#I>0h16TBq&>2g~C zx@Y)F4I%dEecXXEbk)~jq4nLDO`p=ky#cBhR|=A6y2eG^4qX{ud@~}PxBl=OL2iU+ z>Gs3RUP*rbSK@N)c~k%Xt%|olO7%MDsIRu+)k|G&s6@R8n-j9#adN-&&vc!<4V;6a`+$C@;*IDmXKh6$*=+LzrFnetO z@$FZg<4DJ6pKGChDB=r&!V}qgmRU17sZYJETN=eHbd^t)+{fq9Ravku+O*iSsI=l) z|FMg+((0M-Gg|;ZR{FNyhUBy%PgBG7;69~bh6Zd~z^~&J?#YX6*g3q$m|TD?w+tlb zPTSmt|o+-d3oCW({aKRwB390{qIre_`Px+#;<>r@XMuF}n>FaT%_IW!!# zKeThK^hv;J@BwnhIyICmp>HET=C7xHXWG8g`4i0hJl#m zHulrc!ub7OQ^Z>I3Z-KmtZ=_`_8EUrD;48{#i2F|N1ZWRbFh1sZ#t7&rRs&KXKV#5 zs>@`rbcZlF?-h=z7Gzg@;WZA&NHlyuUbrq_`~10xNzGsCnW2YXpyD2ta{AO_qTArd_cCB8>CL9i&ICCoJRZz|%61t%nK+HgwpY~J9N z<4a%zO=ey&K|1UG&;L|H0;CASpl!O+c~(*HXa^=Pf0w~QU-iC)YZ-8p1-|F}W7MTh z6D z6qx@-GE;LKUs_;mZ>gt)V#A0Wb(_6q2a&J&xDkD2MNKAVO_*{|U9kk$@j^P%K~dX23Nkn2z$X*VC%@#Mo_Bh+j|4rbdo7)Z-{+C&OC)ht-N}kRxIG$lWx>_o`wMOAkxD^}wp|Zqty<^bz$3>%X z+fi>mFU4mR-DhAO++lq+&8hUT6~#2vnM3?r0QR_5o%>|3_A#jS)%@*;H0(px`lJ=3 zGNJwQ`&1*yJuhml?1?6sRht%;aC|O$BdS!U` zt=_Rkq1B9e_XUO(8hP&nhy!z~kD!B__gs%V7h9c1n%SF--r*SIORWwhusrGQIOS&v@$(dr)b6qb-I=FMVU|htfSHz+IpBw>lAlV8 z>i$(RZ^EO6@uk1Vu>JX@#-d;7nyUPO(EzsA?v@x1dX|8#F^Wb;U=us_=*w|CZEA0| z?KgSlPhmg)T|A!eWJWUmwOh|bel1Zwxr77w`vCQ+8IuXdb-nhPJ{+v1bPd|=QDS&9 zRp?(&eqc~AoaG6F9^i34?PN)2{HPpP!$b|Wh5jk+;3c(lI6>8-h4;`};==(ts81zfT|K4uIgMQ9c>R=s!- zPqN=-A42HCP$ zz&Y0VQsU8RnBwRO2cGYi+LGTuS}lq=&Enz`h~Cczsoih9JzWw3+1n?$r5`3XIvq5Q zKSuiBG_d84*GinV*oqZ%5Emv*yo)oFXbflm*0&}&?T|&6B-?dUu4dSKn>c6}q(Wvb z#kRRrwETLAVlv(ZVi-0$&VfH8oW0aScDB{NBWdMbhMt$6?bU@==v$diFu7a6J*O9* zpBub21!}D_LsN~3;8arXdThrsyW+sPp?Sp7Y}ZuuuPY91kKFa%>WY***r@YqZ`hyf zS6)$`Mwmn*g#kyb6ZC3L{gx+PwneR5!;6c&iV6;JT92e4KA79qXjH7iDj#X!o4;bh z8--Nb1ZWlXOT}%VhmB;e!vkps?`|7j$@Gxya<={$4IDGTKqR@>vbOwjol{?*?3r}z z`}U45A&y_Saf{G;huhsN=hgKp=!>zrwv;w76YiT2^fckL%qsM}2YODQx>orFsg7i( zK>QzF0Pq-CeKV16xhdUax_&n-Endl+74erW!tyt(TY%T=3RA#VvsaZ5Y~O}2qAWO<&rgyCq0I1 zlX@BhUjo|$wwnP~b>aFh=?)BjY|0!+ThCcl@-8z8jHAs0DC;hnG>F=m8cWJGM=uEv z*5p#pIh?6B=g>(0yC#y62&0x{#YLz5GGetH`(oXv4*L@z_X4!hPvXisuG~u7@%Fa6 zLi7GgQO>p0XF6g%DVD`@R(FDUo31$rKlwExU&Zt0cMUsYEc8M^$0Ke+o@p;lFz|tK zun6p=uY_%!l`e!QhV3UgMxF5c(rfHI<`hDKvGJqkEo9d}>sjqTale;Zq2dmbY211|hU3uu zvKDVYd_~Ur{A=@OGUvX%&Z-6et}?-W|qLI!kO zz>OD8x<%iuOemvC9^vWILoW!1c6gx55e(byuwYW{mLm>v`ME6YQuJJ1qL14lt6J(u z{;R0+t_%`R*6*Z|>Mb71h5McfEhzn;4v@?hf$&A~;f$}jAhFYjaoPa7;CXGq@Muo+ z3q!N%L*Wj=;N39SWB8+Jw~4nw;H*#0BW6jy)Fl_aFG>CMRsPpq%3iGS0j6b_Z}gnc zVh+^z-Sf(sEph#JbdKo*e*p#;=HQXxzia8mtvefOJCoL}gZE8OpUhi-P#=2p#D{9@ z^Q*0UM?B&yGwWaz&d8r3QYXr<$@M=@UYndBseCofbdx8w1HyRTn#GO2Z2(w9c6uc~ zSbFuz&MO)y!VOc^y8wdT4Z^phCe>_oVQLW@-9K4;^2*(7BpXWCh1{7Vy}`a;UojqY zrw0q_+x&+xvvi@`7P`CjF2rP(4@~7$#F!14-pE{Gxi!F4^6^nOapx}oU}_J}hrS_( zvFke6{bjH_5=|kc%^%`FRsb2-Bcn=|TGn%6V8TB%9yJdl>{0BA8uwogaxo>Qdm%J2 zdv@tMYZIV*D2#HVZmMH`fS@Kq31_=#QL~zLIIzCl^V*l581sHPt)D5>%q3kX=4K*O zK6`k=P4OPH$N4)fYXLcAr$&7kcssc*>f!fV$wIALMKqP*;s$rhNceyH*QXhE)it>T z>QTA;3AXTAopg3uREAYz_On`x3&s4Uk+UM=i-gLN@p8`*hDfQ_cp#NhNKo1RP~I~S zq5hC%=|V+&Z<|xcrbxHsIJUrK`wvv&QesN=)}p3n!+c*W_5< zvWH^yb@;!>A!pi#8WkJFpATmBQ1U4bUmTPQk3Npzi0q9{Nk;=}Z! zyh0n>7N;Q`Qo6y=Fi~cO=lK6RW+NZeF5mk@{i~U3bgONfk)3n1)7y0ua8xK#4BXrA zx)uFO-K)B9hG+UJwL#BATGQu>#`pVXyq|f$`{E_&tu9-Mz?t2TPhDpD`*+Okz4d(- zfw#AvmbNOG7$}qXh0#F~www7|k9$tbmtVlHBIBGDs2*s5S$wOK|1*)ArVpHQSTiC= zdiu&JwbxJDkhH|xsi;2q+8a_gvWKQytF;3}cXZ6O70UCmiAon7=FcJqYXbIXWdbbr zIj>Gnf>c5p#@rpjVo5`}PaEWkGQ-#=y!ro;k*@ZaM?Tx8t|c zeU$q#fO0Pqulcq`u{R?-Acd{VmXv!{5bc)H>jT`tUcaw26sHnj-M$JgtMnStUb~ip zlcVb)ehzZ`A(X+s>wcXa`Ah?^9r%&8#|-_e=H(HWN6;#NSwh&-JBfI^_{9fO^WKE(z5j&!SVoF|?Z3?@SZ;V_MDK}9%WKcCZwE)H%QKeZZD6VJ zna-iW7ux?W_g(O@&=t`uBU~Yhcqv-Xg_`D#?$R0OK>R5$!50pL4VcTTakUlhB>k;_ zGe21#Mm)$CSBw>0w)m~<-}EFXgU9ypu%YIuTZ+Mr{hv}qo2h_#UURr6i80>cKiVuz z=*QnLtvYz?kB!3yBVKoZUHos*_7{0UeZCtZN8(VWZrX*o%#_sa?9R)=Es_5ii0?vN zTFLkTs2?E=Nb#vf1k4-k#fC%P!fu~q4w=!=L(F#{Qjt~nM*HEczbUp-KLPf5P|l^B7=j_Fk%!%E>I>;u`7a|{i#5R2M|?M%W5!7IKQ@P; z@=8k~pAllPu=rZ0lUCd+(D7`wK#iGelYM~>z?xBxph_N* z>%#!V!?YKt8Au>*wBg737FOe>^~h&lw_@t53#V%CyAgfS0>Gls>|BrV&lWSl^kpr2 z$VNbL#o9IM>6xmRQ?iYdN^@htuZ^6VE$4?f7XD5CmU4~m3$@xYqpoau5EgXj(4nye z3)Ka4m)J92GZnbBVBQwUk@*wJIMQ00VJf3F+&g?^;K(gI%iwS~nSZ#ah5%@BAxx!N z8Q&;7`#|}#yp+qymq%1aH#-s98jzI~UBMBy5% zYRD-(kbn`n99J-uCR|TVnp*sFi8fGhTL|J6^c(#!C(>Jjw|P3Zj*&SDpN}WJ3dkFY z<&a~Ib`Fp#7r!ffQmh?IafZ=!@>5O!+qS@;>N7;vQi&s1`m-}s_{R|GEtbG`o`bdw z#&;thC>x{6L`%vKrtT`?kU}R>##*3@Cm-%CKd@Oii4LtOkG*%4RqGXyx4YQ3SFL9j zmfh6O8the638T+ty~7EOavgFLf;j%wekaM`aBDjT$WOXu6#eFYfHVJ3+0kh*k2jog z{8`H>ua&pWP%6oWxg?nH^)tOhp0o=N&N{D)(WfhQej^Foa@&<#szRD|C85joW-Bsd zVL2CPA20J?2_~e!O{;)b0SC>-GovYR35Qa|f6^wWVV2I?wp>)r0`fB!^d*CI7SiAE zM2_fnFNvlmZ-LL4-^Xhq)rXxZvZhpjw3+>k@T8~FAA<{ zZ*#>iBN$CLGgL}vo@Qe};{fPcf9G6C7c$z8`a7P0CZ_>nwYp~a-@R`2v9rmWAP9Ue zpmVd0t#vdZ#GgHr-6IAF=#M{tYCllT#`-9IbYF$Zs3_`?zUm(H&t4fps_vgDW`+Sa z9wz$nBX3fp3NGSMens4Ty3oRmn$A%^5AK?b&(+=EM^*D&Bauu;Jbj-|_z3PoC@W|O zt3{$OT9YG2Rvun$vbzF6hQ7oOxHwH-7Eo#GzW{etlh3mEuLqs7*=g@&cb)I9so*D= zscIA=cD=B<)t&M<4M(0&p+$-g9K82#ToekEBp%)ye4rfOJbmx_BGVPw#}GdbYOW5Q zuYRkOJZWiXlu zdbJS(&?TqTk|z$U(Y`~NAo3+F>@Xu!H{0odI!G?U(X>Iw%=4h&2r)BGl~21Ux;}^^ z_aPKENB^?+3GLJFV4^)<#KMD=)b;;kS-}g!zM)Fn&2jsR#<&tHlc%Bv8>aG<1D!S# z2e!U6>|g1}|Gmup6e6*i_UUB1g-LkxW&w3_ejl1YshFfD5nO!(&8JzTg)_mpwAqSW zY$SGfD!GGfZvqI|XO~A5oRuPOdF#)&#D3YouS)A@=7zw9UzL#0S$3J!Sq*H2z7ht7=EdE?hF9G&QfIOZ!V-ZHY`uY{jhT3OzwshcO^~n%GhYm=Kx5$5^ zq1WzS(x|r;?{{(V!UBXcSsMx#y26(8D9R0V(|0cJ@nu|9Obqu@bb+D#4u)A$zg}A{ z!s*n13y7b|o<~Bk5?_{2*-J-bSzz;+JZ#HdS-lG2@!l@}JE5>;&&8#bTmq{k4Z%sZ ze$+irqq`wAKBR=n{|@QOJw{a}d)@fkrdlC(ukjg0eDo&gUjbVdp%WFU`gaw@6l|_* z(|-+?7{271ANiEX_*|pU38@@HsKj3OQ+Z zqF-1fxTZRRfPsU;FL0{32Yb&4lhZ88-8pi93d zhksP0i;R7)<1r!d`>t7ed7Vk~10BKJw>NE(mHB(-F=-pYB_=7 z=_?mB5h3Lf(NcL_G1{%(7&lY*w0XgA42s)*x&B^pJD0_WSpf{=1$HFu89!a*QT8eQhn25vWp~ zh~7(PXfJr9H|XNq1C{_5IXIDKbdl(jOnSA8lXoO7ysGM{$Shzf(Z%K8`(f0sgX9Qw z%|jgRPf_+nG{3C1KPrZ}AN>rN3(FC6h|UJS&x=_SOrN0o7@t1|_2gT>m`e-QCHgHd z@@H;usaf&0Zk~hz*GF~|Hic8Rmc&_L$JyeroeAjSy1x-q>Wde|REL3QbP;s5!S1~D;ZtZYgtTG| z^X=p!=T&2q)e+A>s5g&-t?(bR;lUGngo zO#`h}>S-nB1Z7CHnBx>Foe1f?T#(=M>(G*zp7LvV5;M-M=H29R^bVsFS!3+f-9pPU zj3Dg&f+&5aQrj0qx6+W%C~P)$ME3$m=Ji9DV**yaKjY--&to1tuF_vOd@^aQuF!+-#B3CLXz)2h2^3;iGUuSabdgy$zvq%+PG z;SRuE#e71|J{*Zdi33k75Xpp3>u(*n|31D?KUjU)yIUceYH}!O5pMi~{&mu{L6c!k z(&Y6LR=b)ovD;1!T(5GetUCgPAjt1MQkyHW;?$6pF# zH>l938JJvu`x#K(adC-N ztL<$SAh56FawarClg~s;iYWbXV)XzGno@oPIl%-|CY@&BKf<@wNFwKrvdBH-Px?o1 z(+mB;JMo$eIn>_$$MTRBcYIe8Y-&tHsNTtC?qON@jZLi z$@P{AFCBO$qY2S;UWxhCk5x9H<5hD0M~8eY-C&DjJi>Q+ecU2_#YfEL!dbEPnQQYO zohznMY)$0+F6Y3efYiEKXUZc)GMIGDr=isDvSEyrwcOBj#w^hk2#2zgPvnu4u%Tmi z{(s*TfKQEEn$HIf0x_GhkZDA-%Jw5rk+Cp2GsB`y+$%?S5Mq zmtn4L<$Iiee9r0lRb5^6tzUoE^EdyYaQm8K^yZ@`*cKDHSqnOInhn6$4HyIWc_fIc z1WqJ*n>dhllWKB@ZPqPtUR9Xutz*iRC^7Mxlx?h!i)ENioBqoy1>%LCL#pFX8Y5&6 zVmzDFl^onQ;1&->nBEHa?ta9e4(jgEhD!>wi0GQINf{F=)zpGJU2UkF)G`a0t}3yb zKCwSp@Zz0&6nj;1e(q|*&ecjgRcVzUUFg>G)MfQhkj#vRkkKNR`^S-B@ zZ&=rU5NnUk(^fls123H#(KrGJuGB8UaKb=r@hUeijq1Mi_wTt{E_q)(^$Lu>AhW3r z+a2eUQmvGg>vZ}pU5g_2)3S#Es(m+zEjSvfCkZytH5=KjqNuhXSW(!Al9@`*1ps%Q zv1s0-fQ2nt>ELaGmMhziu)SN6GSv}XU8i_;eh_t8pIh_;T^jA}b*t0$o@50>sv6_n z5Ih!n0oa${fE}E?COutC0GxVK-eyawJ%z`@F4)CECxfN+#IX@#B|J=Z=O*;86q3F} zz1VzOPgb8>YQ*+^urWn3+2hlH<-ZUqgWS$ZUl-G4${bb~Xsi#x?6l8XNHc(ZycOk)g{u33#h zMq}#j>JCv4-bsDZ8-4+e#}_82X6G39{&eDTL;x^Ne&`jEShZ{llf9p`nG;_! z<=*BpAoCuL>fK04KsAj!q4Y1b%;bBRKO4ppDjr4qG*$}y7PPJciB;5>PaVBkDwN3_pZw_WE7zobzc_H-PGJeUQ1iqM3aesI z=rjIJ>ETs}0NBj1-Gp91ql%RTGi41$ItZn#e9WOyJ9$5C^zsKl9=!B4K{|Fe2-h$I zo6Bjf0=sH5{8=&}tC5AB^-M?M=^72*9wRBfi?Aid>B67FWGX7B#Z&50S+h4?0@Y1L zT<41v_H!{&aB}Ol)aXCs;olu#Vq82wgPqrh{T5$zKD;n1i_YO`HQ90ky%l48dP)1~ z%mW3kP|ZIy<~()6tr|;Wew1{SNy*o&Cj}8Kmrlt(mgIgoMVr2xV{=Z$WTX|R4LtQZ zS3Sng+H0|Y0z*aDd_JZWqK>+D%g*u#1eZ1HsnKB-;bcsenbfA_l6!P6uWwp z#qU--(1u3L!2fJQ0(AZvuIVf$J6B%BjjP`*W@K6ikPsQCPzAD`+co0HRL((<88Gwr5d+7`-_qz zuiw`FW?%NT+t<+X3J|&zTIkjGw*gGp?@?N9CRdyd*_xelG4wN17BcZtOt#dD3MWQH zMy>m{Y;d8&0e~L!9J7*bwA*@xF9#RH&sWC0$~k5Tlfq8N`1s{%mqsXQdvE><%P`)mo?TXyG^zbm$>o?W70#|?QIr5*;JKmYz) zJH7K3;gz|tMyA%!q0eNRiD+kPcGaiev2a#8)&Z`;+z4s#nmZtF)dDZ?>;DJx9wBKY zS7uzz-?Y!4{>)%VG3OTW&#h4&ZRxo}$_uZZUw8Z&*64U)=gbo0!tpL+Bf3^O=tbaO zyU3UvPzQ+>0eVsv@}Utek8!8)M*j1v0p+tFfBg;QsLKDZ9w15GTgenNM5YK{Y9i!|AS?}z|I>?De~V{ zV$h~kfiWx285jC754!)Xpm4gasbVWaM=O%l-UAE%{jiW+*Gt$_s)3u#*YIu@(fKD| z^51`1)aElZyE43g9CF^!2=c>a!2?m7+93SMkAJ^V z8H^+vl6yCU@fdE2ffF)Ii#+pNHpq^xG+@KdgQjNjd{ECF)9%zTi!6|$oiO-z4`ki4IXGd&e-g1y~z;qN~Gcs7!1@H+KDL(+KHWwQ-0#_x90sTA8y zUexwr+ubbsrs){@)Kj6)-+J#;;@@E6hv>hFZCb?b!$KM%Z+fSuVt|{O05yHW-`goo zwbJ`gb$5oJex5OV7vc0P*Xx%zD-kDja05s?1Zme*+OpaO2L(=zD5(C|*3bOt^?D4{ z>E0W>+YTsTcuvs1=RuU@kJB5HngRLWc3#OlQ}IfbNBU&pSZ}@x@|L*HJXFK>vpn?{ zc?$NxOEq|?%uY_9V=q#AbI0fG!p>*#c3F8Z%6vvxe@Q*ARavl3^JbxCgN)tpMJ1ZJ zs|bZ+bN98cOd%6`?`dte%YWMVTz9+E#n^N3AUp$U2;w6S4p4G4n$NKkKKljrKD*Uf zYU2hpS{;IblTWbsC(Qnjj}6bA_@OK0?~Ncasmr#)$NQGE+n4uO4K4Yfkl8U2ucZA14dZ?#6#2R&I*6Ya5sS zWJIa6TV>-MnSejrB0nEQs<3tNNONDGfBN>H(5EX>W&s27&05=^#1$|_F2X8w`s5rg z-IYgs&%RT8?yMRQW&vEZ1C{06l$ZZLT;ci>VLOw1K5;Iu(Xzq~Vq0&vSoXz&&w2WQ zW%nXKes2>K($GcjeU2^H^XnZjlh)PJvl5Z}*GOG58>c*7UrQ~CWDQuqUmT3HbH!jR zl)W!dG|#$^bz>iJfN57zjTZdVEqBIe_&yX_&h2&OFcogYDq$V2)|}O%f-5`ApHiV^ zm1@h^`x}_4gnI7F8Y^sHjU=Dl-1b8B>5*qqE0A8;{D!d=p>t9~+*6~1 z!WU156i&Y_)4)#A2_T$2&e&E=G3?|+cG4Q}Zfwmm`3RCdb_2tdQmHCZFMW%`haAL$ z0XaF*ChE_&bkcIde7gAfWty?abl~ZEe`~s3e5Ku<^s82O| z4mmo&&#kt-TLFPnoMBGNtyH1dFQEHv@BM78h*QgCZ!0%56C}=-`L>rs7Yh=$_Yvrh zJL24#HOF^N^#D(F$OuBjzc`cANw18yE|39n&Vlj>*0?W*FUI?{ppU%r_P@lQ&b>4s zW%q9X?9Lzl%?`;7K=`3JEn0RG$fJ;cO0|6R&WRurzv$Q(CXOU;aqfHdk_!;NK}*X+ zzW8;u^OIeJi-mzx*g8FN06_=}v7cp9;4q^sIGhES4;vFojPH5=~Aa`GH;hav~+*g&pPcdV+Ktuh73cCaeX60cfcF0 zvpxsAR4o_MNMnN`k}e1Xxc|?pgcv_lOT1J(Qq1K$iV|s5`82CI^3~8|>M{(56tDA7 zX|L8<T#tlc9slbn8!{-oP=1>jIRVj}==VU(?jp2{&>ERF?Tg((bT?wc5$-)q2 zwz#EH@JmZ7Wc1`M=WXVT-Nr4vlNCJT1CV<9W2dWy>dy$!o=lv zcH4@4;XpC>FZc=Ep&TMt4@={&s#@|56(_84C$D#^p38~rZ!0eH)myY&Bv5`rVVg`> zS2Kh&$i&Lhi}DqwAeU;=MJa4K6h%k)(WnL*sq*ht6RJ`BVEh)TQ8Cb9i|UXO8j;Ex*fU34(+=s7u@@{Q{1WJiRy*WV z^%(!i?Q^qVQ^o~*eu1P2L;Hw&XRWbMRtBAanmP~ZHr>Y8K2PB9@2#%f4JxS)mGh3Y z{K^uz{w(s8NuBY-CeqzsTE~euD=uMl-Cxd$g3h9TONIJ*a{wQLx7tNOk3iEyxRt9* z-a=%0t20aTIR2LFdp4eK)|9{06}b|$8?bx(A#j?$jvi0`I&)j|BL%uD>Qu|H%CEa0 z6`T9$bXJ}W?pKEw%NmtwwX3l^Y{X|g|AHDE<`*kso=rW7W~M9Xeq%)+263#vE#N^{ z&TF07D3^jc%Q18QcvnrarO?vb|99lJ6~e6M6}?%7Wd}?8=$RU6TcxZQ*6W2}cc>+$ zEtf?CCvU($Us#jZ@!;>o&lY6oUQ7J5lhIg+Nvm4BF-RwBvma!h4OK^l(U0a+{+dD8K;sX`9z;w&Z^)Hd$McVmp%b+yesNy@CsMR za`w3PC4ZqYc{~*YBzN49UC~va_PD%kIC!BhI^2Jj(jS^*)2oLkd zlq$PHA19x^&$MI=!2{|8iGutvfkh}T0ZGn=8#ixm3a(8j@Ac4N*G8UB^IKdj!VUSs zfEst|F@bI`Kh)JAzOU)dRe1oap=E$%w3FUPZXqW~2)Mh_PC|Cx_e42>zS2_gNmx(y zN4{!X#3yVlJzZd3clr#iSF^wSad*v&18TAmvC9%V7GQ#`g^T2Bk?H(dXZ3cVyk6ar`zV{%M)ewH@e# z;dUGtvLYIv77L?mY!Uk0($H6eJCwPd+N;_*;+>1iO)i!vdNU78Vogn~`<-sJGs0&} z{DQ1PgPweo88oNcmQJi?kj#0amvPILE1?B)SD9Bds=oK$KPt)4e+}&^t29r6b2UF2 zgm->S`CH$sPace89Nq?!qAnW|BM^@B&6ZXt4WNdm<0&9eRgIR{=IeFLCl#u~%T`3M z(Qg>YzS#KHPf(lE(~$c9=%I1<{+4leO_KvmJ$P3G+64er5RzQjtpN0gaQ7W0SatFVgN=( z8CbZCNldg%ARWpH)RK=?w|E1A7grlB$S0f>VLWCA0w&fVaCt;gE%jj#Rx1nbgNv4@ zoNR)ciJ%>G7eu~8sj6Iza??pf9Pl1C9!2>oZ4_0_GYeLGY__8|k#RVcQv7o&tdswt z^XR#$+;<+QMPc@Myu5f}7f^mY(hcEpB`S-Kk5vTJa!C>*YCPgKv_2w#^{%--5rXus z)}faD?lpT&e{b-g%<@vi8<58}aCtfX1B)+I*7;u9k&4eS^rISb6W83miy3r>Ta<<` zeo8qHuTmz31M!=R2Bh|H3j>MBY%&o<_~4ekxg+ZzqcW|Hi_xRkcj@yv$p zrEh?A-;x21-5<_X7Ibu4Tp&)>UZ3d4-Us;eyDu_LM!E8c<2wHGzfwAE_hS>8R8H8& zYdsBEn~a0-_+tgiwITQUv*rba)Ys{~ zx^u(U$PuqO>L=96np}S;$a-ksnzSGSYX>DB7Yjir(p{zktFB&tT_bl}jy%XsRug-L z;{RLZ5yD7le5>-?%}-8x5Gs`ejo2n)y#$d3ZFn0yBv0w2)Is&*zMXZ_HQp?5(F7iE z(;py?kA3$Jk4n z7@v<9!TERJKIxY))A3L%zugi_RR{BDqJ5u>4YmO8$Bw8Q#VG%=ZuqcXbk~L(yl2M4 z9j3Y&+PZ7wQeMc-uKXIcc2{aF$fsB__o-K4CQ}~=kb~V2wRTghzn`WWB3EPQP9PKWXPrSvp0X>or= z96^%9PI)T5uT8D=tQ+FhAH3Q0llmizIWvY3{D0!q9b=6_tlS<;hACs7->IPWxy$re z^}hnI``>+@v9$#R){jh+?1$(#&GICUmN|l6o8@ms^Tg;ZPjuebug@{xUO>~g>OI=& za#HL0@IglB#f|Vp5s9AD3VBMT)y)fKap1`~`X5Ux;y=*qja*eVv-L3mwr*fsWiyKS zCkAv}C4p3Vhj_JgQX+rh)mj5B_T(q(_n@HNhz5g4$O0EVNRM3>;o|?Z07fVXaaqd@ z!n>44Sk+aFsbY8}P6zVfL=6JouRIegUNXi$f3}dnzuUHq{o^uW?0Q(L<8JiGn#Y+c z9#Iw0f{G{~3`VE=AwKRiQj52v`g)ya;K4UiP-Uw(7qh zy)-5yohoj0TlPVjPFikxjCJadk9VH1lx06n(&rA!tNfS@+bY`D_!j+&EpqRWv5kR?9^}FIdxvr|WB_@RlH25)hNeVV|Fw;W zJI?YcL!LHv$T}qci3%G59Otz?lO6xJ$&GCDybG%~ke_Wge|l{VArVsV(Lbi?3Vmuh z7jHm5o?W%PFvY9vVSlj1?xKY78z!x^#f(MEpBoflYkw1nCDD4LQ}lEd>}}_fLJSx< zTa${6+tDjH_zO?-BH55E3&Ypq`u?dWX`O+k9w;wm*u+5f{_#2N)lsMNTD7llgK)$F zATE?bIN@=@f26dgBNC52qGP959C(P$@h+n*%YH(fu+sH(bTh#n8?fv+6gYL#g&@je zbtwBT;}hN>yaBtzhou$lVG!$`;0FFHjch!j8wSCmSQG>tBeyx!8liPsMle(u>YMBg^S=M?txI`fN zhl#cxr^@DS(*;Gh4TIR3UUjdb9>3yRmH%1XxwxKOh54?4wR-_vy@~L}l^`*O*kmjE zWgw}7f-xKim6v45tnhFBwc1u+P>SMR-&Nj4KVwq<2@&P^*puPV))U;L zdcZCLIz0fcf;QF@`j91UwNvCN7h)#m+oACuM+h1mxEw`Z-GmY1DBYHyr-o)pvtAjH zZalZorSt(Jl~dls8p*>Mdjqvpltm;#h2#p&s8G><``x1?##4DONobg$*4>^O(D81V zY4;?=!})3Tm*!YDIRMO^MvM*{-vZp=Qjco@5@*JsBu+sD^I>HY?K_75N7!5TMfryR zx&uf_gM`$8bVx`y0uq9t@{ErDH%+>28MZ?jE{^?tx+0{MXuR?T7mn z+|TaMb$;vkn!P3yL=xxBQUF(~I%!M(8kp2ZdhlDXA;pYH09Yg62@*(qIT3}owD7%l#mZp~az z5jf7lyOarP81e`5s^t~Uz#93Sp8e|V+Tj-YkbutTez~d5Ik#{Ry>V-jK>Uwf+| zA*WODAIM@js$~d*Jc4}T-@9#1COPovfbo&ACes4D2~(8Dvrzy!BaGM z&-v>hqqkLVq$1jeH0!2+7?|trv;`d@TB3cIeid3REu7BIXxOTc9K$=tGUIM+D_t*D zJl)nBzb4o)(yy9Z+AI(BNw7)ES@yA)?YoXqfG-2P(*AelX(~oYVm@r_K{K}5DF{Zh zMV*eCcVhs5!dnwXx$h~Q1AYU0qrQLx{S{*MTk60*4TmX-CmFY3HOSZlF1$>DbkUx(wSh zq`J8fD{e{>$iM#D&>^)^%Ubhdo8S0l?yjW!$Fc$j4%XxiOjP{@^sd3muxHirB2f47 zaQjK;{8Q@j4BDW6NCZeJIBdmxSkwkS#CG)m(i0So!DsojY?^16_%7{ToBg}N4A1kV zSPZ?p#N-R7SVaaYz)VaNr>bh)h=kaF=a0{n_Nqcm24&se%tDos0`1d2c`5D;m`%d4 zKdl~GIIfP_|5n`&>w-V0j@wCk4Z+(aV0r2`E|Fc^IC6{s9#ucw(xiO`cPqKRBg2cv zFXP1gftZo`^sxQd4tk1*6@z>ywxO`Sr7Qb_leQLw-^poxlPqmHp>J{8lEG?0vi1|v z6Lfrh(`Eto`s6I;tuV5W-R3OGSv!%Fyl>Oik~Ob=2{sObcRwCe`y!g^$&Hsk4m-F? z=ON^FPZOX!Fbk65WH_CDr@nkcVP)*$ZlID z#aAbVHZpq7RZ#I)n?aZiImlSde;h4yCGDQ{Em1MU_uq^zl5|B6jCPwDAO5>9hWZYE z^PCNxTq3Dg=G&miEsmhV(_2cDv1~2xNNd2f*(`feUWtKwx(BVdC6M5 znqovrKvWeYyiHHIt`RNC%yB&xbO(+bUTW9F-T z+psfNAAKqJ2tp3P@^4`Ku)iI~;;1SuWEVK9qz764y@O=kYj|79Ur3ANkP)p$#Vvr&y5Q&L#i4MX$|xwDanvL8Ese z{OAlUO>@Xi7ctg*u{=XV(x<1JHTEbDhCdG%gSz&3W>nf?ki#RF@}%JVm!EG?Sg(BF zuuIgxF_|) z5eHcMdBl87Vly)OS&&>O3f(Sp7p1ax+32h**Y0l+%54|}EJTwT|ZT`R;vx%P8Fp!f6#9V&P`g~=s~s^`_l z+O#80ZeiSZ<~7G4-DEOnHpZ8)w^6+=3fB|a{_R`oBNCS^GI$z4#NiW-yq?&f6ukv+ z7O4KNUH(m(6%rr7mn)D(dmPFV+zezkLffj)Y_Q+`k7s6&L671TkT*9Z(wjUG-SSYW z&$5SMFqX06)AQA{4mi4fTLk3OXxw;v1%lKnTG9P?=+#&^%`aw9^k zQF?da=3)0``!&DVoxmPhQihrp^s#DPi(ewOx4AHUK%Wh=66Cr%z{(?y7{;y|-SR{c ztZ?;MBCgs7xCPz&67uCASupUvb?0Pm-ksr+i1MwGUjV3?2z>6hSSg!>OpWZYG+aIh zlA4+Yf~3*9^KXBfG0mlKFHB!`zgiaT{KqL@!~V+atG?l~_)0y{<4xA(UZK54knw~ah2Y+jh$BYg`;`Q~uYc=Chxbl# zRS~+z<+K9a9C8ml2X3+@88TwTT|VWOM1?mu@%#=E#+6+BRtzJ6{~n)b`=4_4*OC_G zsPfI#eE`BXK0}25Og7_@UPo19D}L|2`Am0xTD_Ozd&xGdrusSaSrpBs7I+B+LTetdQ8zS0Z1#xg>sJctb~=Nz`?C<|%MLqU+-MmY=bbGIc%Pi1PgKc;eftO5 za_TFg@>{aj$07)HOC5@oemFXl1OKl?O77inlOJ$L=~Fb&XFbu)>rpit?z>C(!?O8g zA9)PAtBoN!UV-^hjr31gsyN<{9GaQgR?InBncV2Yt9N;Iz##d+7?i0aixmUFHIT0> zQyL8fFtdEX!xOXUqr7>kcucFE*kTI7AJ)13W?b2e>n&ybjhi8+9zD;OsA#CHR)Olu) zZFFQ>!e%7@XV$6^=bS3SmuHs~8$yBzRZh^d2zySPx=u@I5*+c`aN$zpTUQkEstcM- zyq{$f)%im1Zc<1**pm&g6giO=Mt^8X6@V2bB1}_u#_)#_dk;d9Ab^2L0rAOtfwlFV zN%vV&lMD)XSoDR7pivPHptVYLpc~6~Q7>6uA#!9Povh^}*^mqBzj?defo;Lw(c}rdV z5jPnLv+u>)yaZnJDf0VLi^w8w9>-N_G@oivY=}$d+qTE?oyc|R*)o!n^OzVeQA_4# zBO$%?uHH{zcy2U9ohrHiAG`NXlZ95={%Uw^DW^O}bf<6IOFCRe!%C|OvjGpysldZr z>QQFCtgN1=t&(ZipSd~T3g?ub09l_+(TyCxUu5pFC3Kj+AhNEcddz0$3;Rj6QF?+N z8MXO;!oozR6Ey%N^Pws*|!cT^Lp9p3b(>74h$iv81bdsXZix0NZJp*nBIIrwOH z*D7u)Qb4|ah|7cAn};DCE$>6^i>E2s#sRo&5akT&1WACSHwfB?iJ}?M=HjY?X~BN^kE&{J z1MMwC)E7;Cy@KC^q&dq>X1n*lVeCaGDH)#`Ouf*6*FrOqA?ztr8A7o6XDMBR`1NS4 zZ1wY2M|%PbMgynZm5xn?r5%zDos7%&vs;zE?%&Ikho^&AlCho#^OSyA+ zU9wDaN0pRp|MW5&PY+HNqZnQxt!xa7Rnt%j-WqD)S{)V``H*}}v=evIz?5e*X^Jiq zPUfQ=<0`pA#mptqFxX`D@F&bFLW1CGvUr~VV5mxcV)YotQM=W|wPy~$e<;bD*vyN0 z4b;zg5q~tiNRU~?0YJwTh5RfOw)D-$w2Ty8Bx>DP=*^z4Vo8I$>6$76$Kyqbp&~UNH1jO1umEZfYHR$~}G473`hn(}`OsD+XCnVI6Bv zHoE82t0b;`(ZhJduuC+tbJDn55_UxsUhHGqxJ3`E#V?Uexrq2l+)orJ4{T~o80he* z0rca}@dc>}re0I_Nm6Fq@+Ea{mDByW#V>G)8aZ}wD!5f*<)m$g?Z{!!LMViM%9Q*u zH2#Eem#H>1t?rM#%@WqyZhe2PEKODMrI8>4a#+n_p;9#zVlW35t){M0~UvyOS-Mi^yYqTK~j1~j)%P_HO=G1eCLo`Bo zp$r52SCL=-0bk52wYS%6A=+M@GO94L@qCLI)Mz8e^-#?Cg}|Y)TIPXvFSe{AyKC}i zT1~f?;)a?#+BYu4Se$<9K052UIAGAZ-g)E(SX6@3(apUUVZ#|$id`~lhIl?Nxe|HslSa=W z38Wr7A>m|eZ9#ZTs3%!b@LIQJk){Dt@EqzBvtD%^Qo~YK^-&0rNwwkS%=lj=U%WQ2i*H@!F z{BvNApu%$KAfzO7@^ae?^ zqJ<;+OH%;n+BodCo|t>MS<%%urNZ5vFp10_ctuL#+p?Ro)J_u?=dmB++)fR->v=HH zqAneKpv!fL{SDdHPt5mMBq|FUaUT%fWtLv`g-e|hE+WMyH)+5WOxPKVYqRNn8pVFy z#K{Y->^Zz@W-(!U&9oa5hNH&oW<2myv7K1U;S3HF3?{x0O@^LT11EiFF%Wb*KLio- zhkSTJ6~2D;cr@Zt(W5fvt2-mJ=d!&SXT%;3d1H; zu4N#ae;`1cx~??BIbPRh_E40x9O{gR8JTV>v9F=NHc^E=dgUI&gxN5oTV?FS$Yh zrM+1voA2X8KYN~+F8w*}+3{}hr0YQm^ofbS)ugrNr01$`sAJs#gx-Y1=FcZ+es$d% z-C)w(XP1M5hM**dqvwl&g;gX6>EnrQTq?{@&1)Uxc2w?@?f*NGbz+%XCBz5ICMiYc=@~Dt*JR)?qp#?&xo;mN_vZ`NFeD zFUXSAg>OyiB(O`+B@KHM|^X)f=BQGPM%Fh3l_XoL=PF@${)0QcPK z>ydRQc_@#6Z6KgADD2)|#0`+rsH4EOoAq)$w*-XXl*Kp<8i=*IwCF@u+N`~$cN@6b zO(LtNqju_}v^$>*3d`Vn8D_2KGcM;Jq0Lw$=_p|lai$5Odk`eK_mr91Vg_?re;z5q z90b0-=`nV&F5stS@tlE^E*#Zs;w_|l>iiWKQ!&(Q(Y5wDF8VtY1%Q3DCe`}bk%HByDE6bbU1C2xL??*C4GaF{!-zfm_b4OR_Eb1a+52*7%qPO714&#@g8-cA~;@7 zN0~Xxdkc2aL%lbW5qPvNa=9QFe|z?}-{2tF{5DashjGe(br-*bT2`f&dH~O8Dnik8 zvc-u_ao?L6O6$#NllyAIh~AYq`|yS8@B-wt?=Qab$39!}uECFB?hUcd6Z6y0+=QpH zML=7a9>g)p1IH7{(i$^IkRtoZ6Zm-AhY)GbT1$oPcK}B=oIM|7yxO;AfS!eD_j@T+ zlacgF3XI~Mr~I*JsQHSTVk}LUKs@x!1kq?>_i$9rqa%KO8P6Ft(*{@*LSNeV{Zco& z_b@bxB$)r12z~9>5l4SpnZcMVIr^Xy?)u_g%Bm00iI1{E_B(Aj=zAsJa8xS23Vtjd zt23KAqiEIcTY;#r!Gvlr^nbkz2p&=>UQ7}vw^<900w^R?VKhiox%uytRYr4+NgVon zysqj{Q5Yt$)A%*uZAxi9Kfuw^`f;ln{#Z7&asozde*z}Y^*f(C@O59L>A}tljN`%nk#bNP<8%JGdh}5^^0YLzl{#&&_T@oD!L@W z_3Sn;leMR#&_gE9NDj*N?|e%kj!C-I$MK|QzrXEJAI>_e&6}UE|2)JhwSWC0BH0y| za3a|&9nJ(hV z9GaC0!6YoH{1@bv+n3fB45T-cIKHbA=3<^-ziiEJ$-;7zn0-4b>KbXI7d?}KVn|pX;E1kw_AyE~DQLitX|)HAsJ=X;h?_mHPK_6W|EY3Qh!HJJJziVcasa()QlWfislYJv8^C~_ zEkhF|lFqTb&q3;EnldFc`i0-bj-RUf%ZTFv1E8olKU)I!hH!O~pO;Oz(C=BxOZ2U- zul_=Fh>pPS@>+*WpSjub%+LUQ=Mw3T3M=##mjuWlEkazM?4NmyDF5)Dg8EAq&snuH z0oh*?xG}Ddm^y&btW@?h%i>wbSdY^J^OiynQm=oNg&Sq>#2j#d2b|o4zkLhN9ucL_o!2vW@|!lLZ~4=fJ_C z>Lv~W2G)^eM_*DSYQ}VT589vq(yJ1Cy}!dgegMQ@BSIk@-nam}94>Q;FB8A9@P4bs z?uWot&*5{;80Y|Bb~*!|nBR0++mg3ox(ar!Poxz>C(tR@;YA>6OhWXo$I$!xiHmMP3uS-5vB4G zqlcS(I26!(hwQ)~Ui)?y6xU=$rW&kMCWuY?&=7Kwtr?{}UJPz~6LH^g{wLhv{qJ5r5N@Gnpxu_A;Pm(Z{@ak|fql&r?MzJnVO*Z?Ddy~W9?j+G;t_={ z&N8%+rUM<%y&*Z_iTxdlzGg`pnGY3-L8L*?yR@*5*$kY!#<5#BQ(8c7f{mp=q5ky) zT9U-A)YL3k+Bz$iS>oOx#-ndt?;9wYq!SHFRWe+blK-wH`Qq^*7{a=y`uj1j5guywckTd}xc8xxjz;&UZE#H`! z5o;H$U`}#dOS0+*3PnI|c5-lz^uZ`lOS`SPFU;pP7 zZQG{(l=3mFb^MlHJ`p38G+{Cy!I7@Ao7hNoWU*^gc@Ik020>%k%`IpDe0S4ui-`0T zjgp$(-aS=&O7xtvgnHbVi3DP%JBYsG;M)seRt_SKVpIaL%C{Vs$eWVO;w=iVX1E`` zO6wL4xwY{%laBCgdJi_-V#34H8_LO-VP(5ET>tF(eTW}1uL+L4Yi9De9r}sl!*ffL ziy!*kD0@5G@LUa?=#wNTQ!qSRU|(98A)>8hsv5}r#G~&Kvpllkl2Oh4z4&X0&U%(|`2aF;FNe9I*3j^tA6RSHKZjmnA}87pmvMfy!^ zV{bFy%}WA;6zkFbwa{>-82@vq)p+&Du%D&LHXHshdSREjsc--4&K6ETm~*18fO!4y zIpuo_~E=p%MHI7Y_Hg~{`wWhBW2>ckv7DDv76IxqtD2(a3)0aiB zBA$0gagt@5x~d?ilrTKHEkvSfs#^6urFEfoVs_UYupvzTh91rA-M{uj>qRVPSTw)> zH{G?^4;mL;f^E5^&-kA$NyIt{l>1&-!I1h~wg8vd=QV$oMfL0TS3(L-4Rl98 zCsvNFZR3WC2<|di$UpEbG+&w{=mvWVY+xZiwvM!Lhy2Cnvg5+Qu?$1S_bCDimQd#qe9 ztBsxA$5tofPY`*T6RnUz6`2Vkug=wTyP)o)3*?EbX+MF0r1cDRVWSlmPs& zKNofD5XLai4$rBEMF8`*s}r9kX`q>2a>iq~{WFB;ItMh}9gTi2W_eymURdJm zN@~j8FRuG17eKw4bYXmqrT@oMpcXd#Qo#@*=te6=(U|T*CO(Nwu@tX6tyR0&C?pPw zc0H?a!r;LXy8GLlHjR6--PHS@w_kUk#;{9I`-RRf@H{u+5L3hU5|>@ZZkCvk_j-Ol z#{A=Dn~FCr+5uY*`L1=d@bth#Ui2}*+HAgK`GDM2{qtq4eQ)fywgAc3mEI zn>N?a2?2iXVpwo3tusC9RTXC;0@HJRcHLmwMoLN2p*nsFRn(pQhaxd>yc#LScrB^7 z`(6A)F|q%!nTs*itWBTwW}NtxMPUT|wkxLREGQT#tz|?EG6}TM z2;!!b>G`p^A*NB;&ku7rJ&w!LY(vfF9NBj3piE(nGSVTThFu*=T?&&Hb=*8{~vrytDAK>}~mO{uX z`q>}m(jbB_@7W>ukrWSLCvI6k=fP5H0Jix&fL;0enhBNguYGZ!0Fkx1;SssfF$)ti zUYZvvt>g3O3tb0JtQjx$alr-GCeqJ#(=4yrq)q3}+Y=p<`7O0W;L!kTFVeD$*V@iU zBtkY;4;YT#msHs395w%UhP%}1fSYaA3bYw~TM`0QDFytNNLzRbQJ4%$0QUWrmrBJ{ zgl=a&SC+(y3bK-*h2otv{(%!hrWJt(U9F?#kUqOZvE}x{QEgu)QK|bQSd})eqzdqh z=GH<1!0~sj%b^#b6XUth@Dmv}5?lQQ1m~crTNqHW;bj?XY=-}3b#`Y12bSM`_OSY& z&9mT>cip^(b)r34X4{}Y1lf-i0%cc4CCE|}`!h4M%s;3OP=8QHL1G2ia&@_gr*c_| zOZL`MjyC)wqj_3&%~)+ztcea#L9J56Yf%0)FFo7NRg~)E!>onx;9H&0Kp{1q;;V53 zEl%=wICf*gM6#P4qWr*tC;pP8@IL1Qa^XbethulZBLYEF!ZIJeAf=o<%WHrrc|Ajx za3RmqFmuow;CfmP5A4JD|Fy}%kF1j%!wBcuDe4y_^!L)w^|{L)`SOIQ&*aARu`WHn z?nyZpN*N?^bl@`KiZ8sPa1H`G=*<=5`O}SfHvJloJIb$K8#BGq`g_bXacA}5qzS|$ zu{@9G58mnC^(T_{>V5fU_NQ0Muc0d5(Nh){RIx6uE;2|CrK~%9TD!YKao?>XhCDZFJ;w?zMg2?OvP-8EJzV=ECFnGHI3u@k)J4A8JwzLFO{>;Ck-(BIBC00s z`LaQy*lmy5bfnd3Wojx(FN6_wxNmc=a`_KpPUG|D$aUq7-r2LKplo47u5Y^v59f?^ zb=}2{{2@0>dXjYF&Bw#|v7ZaKX?~8R&+05F)6LQj*(06;Ar!>wb9{I`mBRlct_#DM zoSvE8;xm?o{p{@$*9Hk#=zP3b5l$V1?l3mu$9ykyYS|%%VXi0RgI;!>p!I(fIL*tZ zvth z4J{syvorBbXT{am-x8B@&Kw^|#m+(f0^5e2pTI@!HsJmyj_owGEaPKFvDc3gScPpS zG=P+;N|^LC0E*Axppc${MWE@hzy|d^lmhjArXO!1SX|?CybX*kHX8<7iSPk7KW4`f zT0fAH#;>1tzr(8eP&gyp>}?2r6}dfsZm0MWW4u62qZ}=eHCTKqVwezSR`uiKO3S}x zlnlvo+=xOWF@9_!4UkDh*vrH zWLJw!<_0x40eI{(6Lp zt!4UXePu?wMW+FU^z<9h%H8v4r{n@czS`c2L$4B>N0{E68QI|SY>^p42+ZJboe6g- zvOUCxIyx$0-`qS&@hTbX7L^cNL%%KOZREsv+ZEADB`K#IK*Lwzemhdmv{auvf13$6&Z!J<%KBni~L`+7dAS4UzsV^v_tTqPM7JKpNnf zUjFl3ku=iU1*Ey$86<%oD69>%KA?w!_%cetxn`Ffe%ZNcFOEVM-&zn z{p=8N47WkhW}Dp6+h)D&-!Y@To?P0Mrk@dQ41@agI6g4qnc(oo(=SJ|4Ma)kO(xN? zx|)(o&veOmwF~1ovo6@5ygX=FhhKh`uu6%d1PkqNG+AzPZX}92kccUvlN(2ve(SNX zuG4DM@1ItIATJ7b$~wlE04?a_xqT2WO0?)kQug>QNwf6MCSx*9M1K`Y(RI%r**dP`KO+sa##N(Ocbr+k zd8|2klILmA>RoU93zuCzIn^pQ_cu@f<>G14zug4=EVsQGuOdJi2s`_oBbD<}(CZ>x z=)wcD(n%c=2x$PUY=)@Bo^q^M_wqjRMX6-{pHuO$WTJ`tm(E|TepJEcCL{Ge9%=b**#pCy z_ASwWHF~E zxOjQkppzVxzdg|$$^iDzF+_K~%6~qau{7fMsV9oJsL`1lm^cf;!MEQ^Z8vKwlLLL5g8R2xL+c}iZ zzBtV=aB2?^hJ3M|KoglC(<7YCtp#<~9p2G`iosmV0LrcQHTks_Z%L>OMDpwBN68Mq zhBi(Wp0+uniSb@!r;Yj^YJnv}fcFxb*Z+uSx5^dacVd{?9io;PHT%ek7?&Tli*Fe3 z6PukAP6F8%#3$ovW1c_PtV2Hmowx-aK)eC?IGdb97}1HXEXa{|ZtMtX`#ZyFFRlcN z6?}vK#O%N3WI6pQ9O5|FCV55SdU_2pIsJ_j&k8R$q`EUYeq=<7;s?#kSSq$5@d=Ya zrfjJ|Irql$3+?bv8nHAk;;I70@M?_Cwu;}refCn>YrIM79>a3CI}hs@iF}c&)wtH!4*Vt~swvy=>c4w@p4O{o={aUzQK9j`kE>r9 z`9d`H+=MPH+Xlp_JbN{8z1ddm_*T2kN*DVgS)Q{m`Vi0nYRuJ?&1x%Hkf#QP!9lf-d4ITPTB9PJz=sdOlJb5z*@F9qHuO=sDAn%>^cu<5n(gO zote~ti?aLpez!-~p_@^RHPnP{56zLF+c)i2~E75f4O6pd`n;$ zYDDrTxOU$vPL{3RQ%p3FNnLixH73({l^**z$I`jq>$%vk&3I4-liu7o5AImxLL_}( zDsrv9;hd*JowV%M%t^2e9{A@^+K4w`c^_KucWwryUD*LWM((KJJII7PKN|iw+YC8k zlX?F-F13|c*Hb(2N$zi0r#b!EY)QTpGY#UTCH>QgF-h=p1Ty_5*1VQ6FrJv~MDvx& z0v#Q}f)u=NJs{c73ipmTP0it#C}e9|7d^da_-)kaKiszBtM2c>iPXw^_GZ#Fnq3ut zGm5z%2dWukZvR{E*JY~wzjA-m@)`#<;5=eQ=od(2*tveaZS#+fs53{% zm)>J+>2;353?({YKgoNs(n|ciZk3p+@ss6S??-^O=E#n=Hu;=qQ_a}{3r9X1%$Ha* zF!>m_Dlx^d{qneApNF(-$(3|KO(8y4@>P_YyF6icyq>D(+~Rjqxo0Y36zlX2TTqmo2TO z0%b*qhlMg8!N}^zX39@lvC~`eMfeL@3GjFdN{CO(X-LF_c`4XlvxV@TxUAJ%$bN`d zUu)W6TlY84*!s|J;Kb-0E!rgBH%|-PAf5|IhNWRLWdL5bsSdQ0?QSR~tS%vjyO$kF zd79~Rdxp!@(jxaIkUhfW`Ds1}w3dSCvmr)6D2BsJq{AjsoZ=%_E3W-+HXYv|qj-Fxk0nKtVTY z_Cu>QZ@Ixr22-_oZujKJF;-3k@R=~C)$HFdOPJPQIB=1zAx@+SI`<%&%O0n~>ebBm-f$I@?ygCS&9-9IjXtJqOUD?<*iJ+~FE+gS=&53B3O zbnE82nGU|$VWitESn;HKu59YEA`>v^mB~O2P6ER;WdmCH{;&35zqK&_|EK*&%c4(E z$$0m)dQXffx)m(B;7^>x6SgZ~;m2oiFst+#kM`1Ue8#7qbUh+QPM6`oh6%#hhD=aV z)SRC4PZI3DhO=8}m5xlQ>zK+h@6yd^y$=emEt|$E=h`#K8T9z-`#XN9oQh`~To<`~W=gUhr75}WJJ%VU}8mU9HAn>6~rfcop{B~tM%!;)HSSA0t2W4NfUFIfa?<61%# zow@qY+?}D7P6e+k$eYD$VE^;zxjxp_!=;gsW6H%HUL&p>Aw7$^uawxYK@Kp(E9EUJ zr)`1ACaPuTX^|UG(#Y?;tVI4dU!kb)<8BP{@7q@yPgb9CS8f4hooxOkVPp7_sQD+yvPbT*Q!M>+I*XPU!)(_%q^QE! ze0Ae(nif-vTY1Z2#jMvyUv^&k|H=Q}*@82na(1?-w%693VyoTx?|5+eK_|%eW`2<0 z97uAzjZX_aJVkD5YY~ApzXFRIH|nNl^Q>30&yB?_^9?hotmX_CvJeXS?8j1SqDWLV zd%j8gYhCL&5&Y)8e1#fL@ik?|MPtAqyn=3D%ogXO|7NADHRlkMblvUFrx=aHX??o{ z8h^RAV#?EI>3gPLdJn=@V+TIsRV4|Nn#6cs1UP0opM^0EgyogwmCP?{tu4TaDsESX z=ly1ay^5cBxOB1y&JNpiE!!kI@`m1*xoon+i9I@-_CK4CFFoXUxLS(G{E2X5TUZ`+ zz<>U*z1ItYxi^>E6+6I2UcnUvCIn*qu9DYcc}I6}O^!d%HCp4%BRgNs+#QMYIlb|w zii4o2I+ZCI4PI|8lm3jh6dTs1jm5^olu02`!bY zSB?Ocjq$sB(@i5qQlwOVyXBhL7tz`t!C-22dBpw6;fCJ ztVWf&X@5n0e&utX{&(Szxz*pB(Wr)es=Gh!AG3pufMH?)$DJEPOaST4@=j}(|)UNDv#B$ufqx?W$V!GNXH_|PaYe@fN)~5i=%Xngci1PTuL&o2`-RgWWGNjCq zgS+lGP<_eoYT3#WcY%J8p%wvI?wCuYG804691GIvo(WskYyCHAXe%W|h{~BHuhAUwZ-&S-n!LJ~!K$ z4hM{1|IZe{iBI!|HTJ)>dM~m4qgw{C=<|DU-;S)g@~QIshn(N7#JeURUYE3AY9vNQ zOs@gmX(A<%YSOncsOy_)i*!H|dc8i2=ZZzXa?Ac38_mN-1OMuVVZ&BXVgT^rj*1FL zKBa*5=l5Cbr?aK+yIk^eufE7c4(5V_{C{}?L8Fp?u8AyuH|S5gjHP&Y0bHg{E5q#m zmkWT^xh35uQ%w1`HS$*_OM1$a z6`-3Y^gmVrWT69_ybE*h)%9r*#K9vz0FZ(avues%m_Bn~6|n2OHu%&e5F$+gsW6XS zYE!$fy_jngOmLrtTmX(Zbg|GmVI8PBWRJV_PnEzc;62|QGDs|ieIDDuW{bjg+^+R% zQ=IbIu_nYH{k1i8!y84gn=MwB5(qgTa#7ECz;}_zlZd{6#YL{H8r3}x@>QQy?~{mA zcm>IcU)AcbLUEbsvn%?1g>uF3(e6+V`|1QazfXhxt}b_`zZRgB9l(l=#~qRjL&^() zoD*Ia3ScKBy6M@9PsnZ1P3BE1u2Ce0&hj@f)QgLqLe+48C@y_c|9$1jc3bYag55$k zYw#bJUo`Y&em(l(*CBn*K>bWS(;Qr06Z84KaN%Xi)!B`b(L<6hsW&tT^!?N4OFEmY zyu0lh@^c!OwoQzJIl`TCkrvzXkRcZejelvqGs%7#o< zJq)A%&cMWP=Sw8Bb721Z?SOh)sf(6>fhl{lTW`!*rEr&c!U?7x4^U^>GUjM{QKWja zkZLw%5ENS(Alk1t?419&r9wCHD2McU<$=D4(JfoOqo1k1mb9uI+kD(UK2m7=P+ign z(fu-Z>H9+$qM_yXMR&>K2P-$Zk{~zpnLNHH6W%LMoQ+Q(SxNCEH08<02o;%FaoHt8 zIiBMO$P>T#i5Ev2nEk3(wf<_NPH0MIMP~Eza?bo=b`jB3cK;}SV(=)5&wO(sd*k-a zr@VD`@xJX5mt_uZ)SiFfOlBEWTXm~Gc-z8NI=kuC5jlIcKx(}j-E+QYV$CD-(!%-_QB^43(nYhCq@w%)q5S&e*nWt znio3kEJ42PdyQ@8mI>7%u!?&AX&_?mE(}QG$nt%&i`0)5%YbhCt|T;giLu5w8Ba8N zZ+2Jz#%17E)$3TdeA5ijMr;GrzQAWCE`!O=N$ANJwM z#vjj!_+duRMe9TZgi83&=JjrC25X~Ruw0+E+@RJ0h9>@3b$J6}dLeW99`5?Y#YB8Y z^tU~fNL`s;K@w%()TRaIWaK~B#s7z=^ZsWmVEez-qO=sPsu4PjQnNORS=4H2jf$

wcd5`Thsz{BXU_b>7!!Y>ulhG|V5ch}nQi zJ*&%lZfm2Ey;LBUf)xuKcnVwXg?R0MH82OgFcXxhQx}fcDtogtqs78@Ool@as z^m8iBo#&H6iiu}X1tdQKGUzV@rM(FSQ}A@Jf~^$sFU#R7ItNq9!7z!uY@#}YTp&hY zyIWc>HP`GD`R?Z1@&^OY!I=1xZbg&KCb8MsyY9s;E-^FiZW-Kx8Kz_Zd~WSZDows# zYub(rMiA_$!Y}(9->gQfbk9ONO+>7JFxGa{?^MjXQv9JopKL99k2BIdlltIj_$Zh{ z?c#JXdE@Tx`t;Tu(iT1NJmObqNYJDeO@MI`^HssB`~$`ijT)EUO!_GboPr`P=9hP(1>Yslx@@+zDfVEX4<| z+XxYOCi`W*!l?d|0-2`6#K|f1)t#$b!+ZJtpMD#kr)zk_xWMoLwzD!^E1gn)|2=Ue zZ$}mBA0HA~Uv$7|cjt0*OZa>EE9Sej_;IK_)Dl?^IWS&;0mWNR(Y81ONH2JUj`-Jw zNNp2y;t^EA##zqC553B8^L~c=Ua5POKLQ54AY^i(tI33L`B%hVGZjox!KmWynft2& zLkfGtxWFccr|H1)T&g_xXPV9z8DsXmJLN2FALN$yBNCqrOcvPI-p^tv`Ew^wW~!Ke z>6_nF$5WnNC%r|1{@GTis93~-LSfl=G;=N{6E=OxGIl9U%0;_GIC)QviynsnU{=~%$qtp>f zUBlJ@n9Dj&<@1?4MLO+MkEQ7!z9OM**EhrX^^_#y%?XgKIhl*{kdQ1C9Y1sSiE0MC z$EBe13lU!O{%>aZ4dG8JxZwEF{WQehSDkinXTxnK^Z6qZA%V5QpTHX7k9w0&*Z@%? z4ab6^=KR0x$%yko8coiw9#B`&XFBF=j_280 zS+?t`I8>A&s5>8do{Kng$Y#3;OX5kV&bKH&6D_mA<;t6JM{~$441JH^)&w-fooHnL zET&&}jpX6@r$KZh7plkn)ng$8Z=C_qpIMNC#5bhK=lilWknS{(h}DL}9er&(8}H%8 zuMQ{_fGo@Frg<=v2BAH0_v^$?qmVOwarajKD<7@Rk!Y>z6MM@O>Kdz)eQ9Ujn-DEq zYcrCji0SViv)^mAKO&xSWf~aofKie6n9!F-_T=(M|Xxw0q$u??iqqY1t;vX}|ce+bmaQXn$y@QH7(UuZ&@x@<6T%FlG$a<~_+tr}uK-bbNY zVdXx=#i3NcRA0xRiN4gR2J5@lXmE6yL5OqJn^8hyr^_yy!)FX;hY|TVS%n0G{5=uW znW{eWj41u}Id{bm&&YsNJj>Ej1{O9eJ&#^H{TPvE`#rP8XtWYEgR_Fv-Cx+KDjsb{ zG^?*+ztV;Q=W30hyRgISK93g2Wuozoj@<(JCFR+(ukAPR`Z9lLLJm_u&U_{ud&Egzm98TVao z4E{E|>^5hzuKD@g0GdN&;8v1&t0a4WXlMW-e5?c5&%AEpRhHLorCH~?RmmrHP{ z1!Ckbo9?nwBYytQ(X&rimd0gJB@ab4I~&n#P|-owSB;Im*yr8shpcm^>Z`ng0bP-X z2NGkP2hR7+CC1tkUU#|Fb*PNknCq!$LEx-+^A` zP+w@YqV+Fjkf{^_dNvlWhJ&9<$GsH3k6L`ccf{b+MRcUFN;Q1sDQ?4qORb)AcbYvP zXg47XlSe&#CHdPH?t&*?ha>klZktWb7X<#ShjIFyw5PP;F@&B3O6n7}NtebezGP|- z1t(ZiGZd}ImhKi40dl1yQJ_3NWX|=cmW`SqM;m)WysRgwo|kI?cT#ff{W^)7?%}+7 ztX2JpM&2T%)hi#QLqf{b_}fD!v@h<&bD|AQj2JNU|Dh?&_ANjuvO}E0oO0Hj^{?e> z$0sP+-W`*LzgsCsR5}*9n(DsF=|poK$}#!C>spl4ueanA$Jf6b$J5s3oEn=ia6z}f z(4s9i==sr+FMdf#$)T$cyC}HGX$@7TC#9tuTYBcm7SG4Mq;NIq$V2UYT;|iNDVKYh zv&DYo{;#*FNm{0@dw3eYC_+b3l2hMLPGUued>)S(v~>vEtHb8FiF>4;tBZP}P@*(C z19C662S9E@-1kM;Y1%u=_t@##!3x@A*>Lqxe>s^#B)^s>*6XfChku6T(^0Rl6|l;z z(2lp-jx1!sS(fVysMI7Qg>*bssPqwdL8&}l7viQRrHa18t9$a7;)tO1AQ!#!V*V^id=7My(awPl)vy{R=J=chkur#qPC&!vY+HrsM${Cxu#A zug@exsNGF{r{P2tO!}pJJ-R#DSqO|=ay~3o&+t=;5Z&=%L*K4~4kNK`Zm0wQ&_9;@lw)os_`)gb^baipI63E`nXoa&J<~ zl0H>a>CAnCPUtS2lgc;a8+Dnt<*84ctJDHFVi31exnQ!A6I^q>ujO7n z;k?>q2~KM|in@a9g9W?t{MH-u5`xYGm|kDiJ;O^|`ck~C8KP5-z*}r(tqy;t5~u9) z^>YiL=bY^AFo$kasNSu!nVW4RBu@8D-^S|*&zBm?%NQQJff9G=8`gaPRGG*0xK7N3 zemaiXA4_xxf$az3CO5>F@6WSjX*7Ji`f<(THf3P=Pya_>jmYG4%Y_kA=mgSt5Ae!9 zOB9YzYm;rE9gk&CsR0mgvSk#XRHnZ4HP@Qy8;z7atWCMtj%X^}3=; zOma`r%vZgUpI992@@(e?+lD0Ces^(Vakzo7a_U+ihwPbu3=B#iAX9wy?OR)7)s-O( z@Vyk?H;_Ulq`N<(ZsAL%ktzJ11RNCM6 z=s@=j4wn25I{+ppzo#^>rZpFVXJa5qrbeS@SIOKsrrEJnTEE@inYN|ZM2$n=+C)J| zzpH8P?7C~|-u7;)UBEu_4Nm%elOjp0S9@cwc=4iXj5NW82IrtnQcw0}%6zh|-g}T3 z|5)l}c#OSDo)Hz(#6&Ik?#Jf3E`B%fgSp_@~vcCCqa)Eq1U8;{QgK)&K#UU zE@KZS^8~)w3mLTqdhee&8nqEoW!{!nr7u7KgKayvv$Rh{s0~}mKX#Y`ID1A;ARy$G z<&8*?XW6MakrMj~9y_W3Z4e2}1N zP~dVMPdVL+Hi%#MUdE%{(I@$Nudr&?cx~MmiyoiUjmV0QX&=7%XD|jPJ2E%Q)Sp_8 zJi7NuWc+4J3DgAHgOx+Q3#5UrYvr{YPdV&qRS`u&7pIB1!Cb#NW|6Laq&lU&b`higIv(jHLYyM0u9x~mzSH806^)#4jDcgBsZz2|Q zb$X$RAit6?KWn*oWU6JwY<~u3)V2IOn1N)yykH9FMpA9ledrgRG(6xZ^xH+YU<=`d zb++c&eO-`L3F)-z^$u0Yu#hP8BuQ8+N)kUAO#9HVOcCh42dDdJ0Pk2nJZL^xOjB;G z@}t=l8NE9Sy1w$rG58lxG5@y9lj&Ehe3(h6ZA{J5LzT3CvgnkhN9O#=A46%#T+94i zzw*=^hIza(8&2w)^rlvY+B^;lU=VrQ36tJIsg=P#xb9LYN=$IbdK2Kzp`;goydKk+ zrbwcJeI~e$GeH^xJ=BTT{>96|<|^e|?;2mI-l|Dm4yyEQM=I^l?FNe%)NX0jhX0HG zInGHLwRo&P&jP2EiFi0aJ}0~56|f6Hj0DF+kbI?Qb3#H6gf)v+h~4sQ3f71CAp-{E zIxiaTmdOne@yEeas(_QRfaXpDe&fxl04l6^*9H~g3ku1!1X^|!M!aQNo8^uu!m~hi z#zKAWEZ=*ev$F?R64{M*DytuY>JTn{mT-gEpU^Cf1AaQga>q^1={B32wx zovAD8$?beHkx%G)n)@R=y7>v&t>V#QF!3dC9CEfQ6Dcah4!=if;kV0jbww;)?!8?V zj_=4Ztjj@`za-g1e$(;z2ICO3D0@t{_VS27|6%uRyqDNv-{$A>e8CxUnCmh_Uy%kK zr=?fiuD)|ZKdEDUERb-hEs&SRL&=@clJ()dWky_ksDrnz#pBm8LuJgwMPFx>E=b8V zre3J>b5rPU)@n3rriCL=R=cMwtJDJn9UCy0m-mO&$BKhB2C`AutD>oki}VMRk&F5s zAruiV+!|i*+^yv8Fe!K5BP}Lm;`%STf}ZZ4D@_rtWWV|xX>v9}`BG<4vs0k4I8*B8 zNt2+e&;cKQ5BWn<+){w|kD62x+iT~kb~~p;@5#B$wG?BwnadQ}jvCBK-f!L2AN$F??>fLbj&=*XOuN+c`bwu3j!Vp^axBf8Nagu$%wT>UQPDmiV zC!)^$Nr{%>-p98Z9-9xno~l92FV*YKE9ntyysN*NNC(C0O`Tha#|PSqrXb^E{vO zfQSOS^+f$>F>O@TQ_Bp}J-ndn^R4-KGT0U^Dk#uTmCwd8bh*c!x&46efh&%@`j7jy9v4+BQz#9w_2C*c`t=RI<2tkumy!$C2i?oq z>(YMF3SPe$nm#W3Icv1@M#RJLTi5-INmPA788A*h{~r_9p1w29>Hf%gKFJ_fNJ9{*5D$@vbBKn0VY+EDa zEJFvkeAvNNJa6kpp^1rcN9Q-hUL25^qkqKp(Qgkv+Ak5e`j^s)Kjv-1&enitTrG#5 zm(%i?_FdJWl;m2KVr1MTtz6E}N)ko*+EBb?BQHg-?`t;+O(TsN2pN@gI z06Kj`*Bvv*=dUSn&uL{))zPmV2r~OWm`pqGf0KruhsNWSZuj3yoHT2_iJb@S2hP)E zApw4RhKyOZqxCjs`ws$QM^wLdXwy#!8H6y~CuQe3WgX}87LD)3Zad*uLbr!Fm?FRI zMb(Uy#)@i?Yx;h&UXuo{b{?qo!z8flYxzRiBCab3#3K$=f5{W8 zCzArqh?%PjPKYZ!-du7t@ZvK*md%DHi&@SvfhODkYH0ue1>Z3^;Y!)%Cjt?m29y zy;As|V!AukV%|5RzBKb&_`XQkVs~iwg65*9aMh0NP?TY{l_2kY<8)76j5K0!t-#8R z;K_XariiX(lIxk5Wk;{S`HZK*4bXjv#Z+%ZuL2c&j1W!`#nED0&c(VpHcpskBhy zoa1e46OxT|1UDcb(in}XFGJS7IzE;H3S;RRl$2Mvewuy*7tUe&i~0A}dr)R92gC9@ zEXgr9q=JLW1%V3*)mDv1_GwkG6*yVO$Sza!n&4S@9)dhH^*G&-3tY1A!Mh83(vFg1 zV6vO#7Nvnt1^ER_^n}dpli*TvCSTDt59ML9?k(=?ttE=%6gX~oj{|hAUF+BbnwMt z-m8)g#DH5M^H0*rA( z_|{RWE0t%ZRi-t{H)*oNDt*tqQDrjMfUowF1yb?i--RLm$9~Is1~g)isu_V1YMFUb zRd9Os%v3|?4PoU_CRqMEJ0YnFWU57AeBQ82UyK}<-P84~Q(L(Jnd^f}3ZPzz@J1l5 zBH`|Xi^-4HrDZf~FHTP+D&C}xJ5@F*1C|8Sjs85f!HpaDsng*%j4P{vI)$m;uLmq* zPVa^?tUvAOw2;E33=&M5v8)BU&-LH*>f;Ccg?RgWx`c8cS~iJ=hKDy&`HGrYE|d(K z=66BQmxCo+X%nci9vd1>MYpAMR)DLQO@Kps6=s_4uNZr$&@b^q1ZSai((HeyyZ}=Q zf&G#a*o+AiOR$E{Qk=`B%uT~#UkV`nLEc-zs^Byi2bL!NLx`^|qoGK;TjdZieLz8% zj8kvv2ymF-Z|7CljaW-K-pH;oL)W;m{F=HUuFM(_NVN41bTz*Bm!U;!Xgxsdt2!A5 zZ`Z6VmH-5zc#%Jk9I{Z2Y1h}kewVdq&1O;RB@c_k@En82b9$_ z`^l^${CvLGb`rFl-J(a-fg*ox=PvQ)O2pngxmQoXBgJsKSfFG^X!mJ`YJm~+yAOQ@ zmVb**U_1GTdY_?r_PyWmh@+i zg?VGzs)wpdA72f59S!1O4BR})K4u%!-i*0+j50!c0*}bdGPil?m@f=t^(w>yI*w!X zC+~tYga2YCBU5cMaQE)zq#tle@791La*fWxQmN~-N6WdA_;s@Zu6m`ey#(ncc1gu99a4T99E)oN#(U`q_(%GB_?^DB^YuiH z%_br_DbZolrHPbiechE|I@N}Pp>WPf7ii9OwL_Di#0x`o?NHB)7-<_TC|hmw8)@PmU)+lsWIzrYhDI$x6?D1rgGh zP*?=kwR~qY93i%0(u0WFoAh+p@G9QPN=QISkY2jR_4!6OP%PrRbj^0;_bX)5D- zlxmAnwvgbp$1|rX)JML_Fg`FvPru34E{u6=^ElExy@;@5U#ixcW(O!fX2ss+?>gd? zyb*$vEeckhRk~zHpc@Se*X0EsbJIUqSb0J3HtG9>3_bVEPBtz-_f(Y7HSUQ0+VrD( zh4%Irp*kPt9|7Xrmxn%OV-}W7fFn`|W69VJ!zH76z~`}mmn8C44Tf`oX=zXzrQQ|i zq563G20OoykaG!%ke;qF+3LO9qx`{Mm$B(#&p)UPAn8k#U(zF?pk(C1mXE{My?%u7 zS!(Z(Zd1M=b$4|P;|)@C0bi+4O8@l0;h;Z##B9CpyEAW#zba#**;!Jj8ZzOli}55~ z^gbu9J{rctV+75A;K$a5-}|4qSfAMMKYPtOPE%EO1B^>`$evm4vE&j*GYP#VXDr%& zs3H1XZ;dU(XAo0x;*`h-ph*sV6huV2oxHuV8<#o<$yz6YCoXc;m_8B1d)3E5)W-XDaCT^^nz9QK_t`UTmO zFj1$VRQ+IJ?$hbfd?Vw+jk7@CFKkACS9Hy?+dclh4uV4TA!=UiFYdq9oa6cn<-`m= z;(Jk(Yh-YiPv**SFY7XO)yC3KI~I1dg3^3c{VcE_SRkq-YN2&tG~`Xs2?3XyJvb`4 zrSgM3O;u(tb`fo&B>gR`T$<8N5A+xjo{r6xObr2PKaUm?jV4J-k}8^VQM%csOVb=_s*uNg_RG$5GPz#SH~qM%bR{+K7+Bnb_IRhUT$ zRD$ub5FtGehz^c((oz`c(L+umxYREQviOrZ(I-60C-xPS4q>c4oin5b?%SG}0xIwQ zge}2X8$nP8`d=}@Q~#SsNwlyRv21{n25?7F0bU^jS6Qv2IVGYcgP(DdQj-=V^0&nH z%QkuMmWO@id|9muRF|uvP|bjGzxmhJ_b=t1dChI+a!>y0@~<%!A4AgNHlg@#C|?iE zu$E4eLNP)g>fKlMq1xmt-$lefM{mJO+a2Sb-+2meA26vQy&_JPg;2k_vd*&ql_2V| z?cM2CM^|CX$2@3btbvi_Id6iRDA~dED@R_Ze>5kR0iG^~|zkP3U7y`9KtOZExXD+DSGxmXGPfTB2kEEhj444cz^WULyv@gHouQVfG zYNwP+zSV-&%Y{EQ_IXUuUTgW?|B%=nq8UBoBz{M8FEiwb`0n2*6K_2mgKS;hCawvf-zQ&)7bTiB-IV^ zdi2y%Q}bz`Z9fRSxpmU2mg)k)Hoa+XgHmj#m=IwBqj)+0qk$-98qT-H;z&pS-LtEP z*v09PmEk2{`84l<{!h(bbj7+mQ<~C$8b&Zl`kVy-EO#){Xhvsxj6&ngzst?~Oj8ar z>r}m1IU);l)pa`hKOC=N47NT|SDD<0P2SS@w)ZVIF3NFr;#4mS+}~KW14&_jhMuH* z{kAQbsmVk@Bx#M@4+DZ6>b57DXPzm}%SMJF+xHbL;3(RlJ_uc4D2D{?dOw(n?PU%zgRCdKRbCnTmN}^s0PPQU=LY@C)nMTvi?{f z9cM?rusj(G^4kAC)kNQ}*IGsrQzktlLxa{q-hOJFXgxTMwtu&^PGVc-QkzD%5A~q^PxLv93Z4gcvUQF2t|q>PH_L2j8w(Y!rl|c_-$+q!%;fJif<@b9zl>W! z2Gh1nIQVhAq0e6kq5KXxI6Y@Hhp_(3Yo8C};M2Qa&IDf`C;Lx*xwI^LetyWefjv;x zbnmk|upCKT8KdRLS zA1JwF^RKU^L^A*Vri;nZ+v5y4BCqj^j@0~njd+~DUk9_l&=RNQ6@oh|{i}Taeuok6 zt=bZNR`dMg>hv~Ru4Ycu@5>F#D>r!|38p$bKi?OB+Ep~1a0b_p5I0vIB>1p@5rcj$ z2dYMje}_*qh6hn3^!T=RNj#x8GGQJi~@s8%!YQ8E@^2Z{?1j zPfqdsCxEKDm6iatJEVW7cPsi#cH*dqZPKa6SP3)l7W1O3G6)jaAzvq`HdmBm20f#J zFW&}h`n+Hi3K*%^_w&?*Af*$DQrmQr-m1&fif6YP)|A}#c_xt3dv40wH|vk7EGS^l z`8FW8s6j-?m2{crw!Z{K7kIJ?`6e?wp5Ne^QbeteVQu&+kBfN}I$q+fI9`CTDO zkD0Gkxs%f(&DBfQW3jBoxA)bm)Fep7P%^CiI#D3MSe0MEC9Oub4imjN(8UyX&Due6 z_xXdI5sqKHuMLxG)rk7jTqQ)O@|EG>%>e@=qf_T~ zh-++6N@uE-t3fgRvdmMU^4%B*&cPWoWg@l{S%^*XIe@i0yn9F_z+3yQ`g^5_v>q-n zo*Nm|vHLgx>47UfGI8POFu*C6<2(E&I0Wk`b#4d*h?Yk(Bi?I7J&TXoW-j$7bS)rI z5-ZT?0Is=pW<0%r3Jls*abjW8%)?18?rYykaC|3C78WlLt%7;Hd0T@$Jd@>t6y4!W z3)v#C*e@ww#EHkRdEZ_D5v8C+SR@OW{A>AdO|7Hp@;F1DuyvBJHL>H8{s+BuSrd42 zf^W$3)n3x|8%7eQQ^)4rjd- zcSI9?lkO<@tMb_Jke=#3hY=&(K6}4S=DE(qbu$t`Vi8udE8s)vh!kyt()FJ(10&YM z*P9A{&ytARg|jS3rxD|LH1*O)s`e5eHqLWkKDyM>MY#VEDgFdfwUC zrd*wkJc>7UX8NK*4=Ul9KK=Nyt+8jitH>!(d92I#HH#fl79Wl-xyjo>?RDaUmOe z#%L@}&9{gsU7`rYB$U=X+J^H?`4%L-+!PCw4E(fmcS_}|$|J?0Zw+36GJjOP=>in? z+$JsA*0Akbt?+u)5m4-%v2>5h>i!eS-lY4w&FsaJx0x0)v(v6iRdPkMRXHwt@XH8#e2!{q_jA>CYa@-N04)NeeC)j(G zeod%44gbp42RW*(C_a&^lMMu4lnhK-Wv1X!7=$LXNRbwh^nGQ==%d9vp+k`!#^lT%QuInU%R!zvXl`{ZelZ*AH?P!>lE~M{AZd z@HbcIm#B7kvq=U{tTI@%Ooba$QsP$tK_Xf=pRr$nmmmFlEju7`lWG^)w$Sf38~ya% z`;Y!ibycIg*K;*H&sUyN!RZ}OqO#pIYMuVDg~DAL?9RjTz$TxFx>Jpp5ga1eBEoL> zE?O0y181?<0utK4zFdB1pEgZ|En7~wpgmb06+q>;AD1}Hx_x;d{^XbRR}5D4ArRL% zeBm?fQSWgY9Nwhoay7N%JtQ}$x$H3_5U%)S3ngvE;PePZYsx++(mJ}z-A8foFo2@W! z-@j#<&%9ppr!Tzf>XG9A8~)ESw{$s%d)S`uDppytvHI(*=uoT-M+HxM(-{umn<-WoW0fad?N{+&&v%msj5#Y-NkF}* z9h=&|TqUGz5U9}p?s50>g8Q|}TJ@k`!<58-fqYmUZ|)3lpaY#Nm&$qhk;*F%aA06= zyBD7}oOELV`|a>mr761accI6#&JN=w+h+TFQx_Fl-%g=YCKM7Nx2;&UXBlH6mR(9v zTGJaA>K0vooEOtQk0v+sZsJ}_jhh#<%)!}|_u)__oq5Kk)AUJ@(?8c9iaTdwV%UiN zjm-;@Q@_QA>M z`TpUr<7H71;X**IS={s5Z?UfIdr_0xcxl2jdEHS<&CQ*q>*Z5C!0*QGV)vRBa1$|y z0$sfIZkK%V%)djabRl+T8^Z$7_?p`|^nGuQmE64R==;?BBFg(i6jShB z1Nep9fCuWSFW@Ej1vg4cyL(KjuDgwpJ@CQ3{^dB1vZ$kT`h(?wIXGEUs((R-gGOEc zVwDaMRWdRKd;pG(b@_9Azl_$dndWrhk3hD6?D;$s2BjoxK4I=@!Xka3AdcG*9^!Yf zob2OS@0ZfnoUIdI>o;aQ6?%eySouWcNc{I*1E1AhmCqFFkXv2TTo+_|4}XzHHYkl0 z7WzOkm?Cdkxoa-9IP$lx)R?FF!;h-dvDPG&Z2X)xLGd*M{%1n`XC%W*+8wZ5`3M#J z0VlBaNmxi*?tbq6BDbFxd(7tRAS6>6IZ#pF?Ed_wbU23|N^Xp0HDuPZJEocX*Ec*U z?#olm$BJK6`vPKuRg~0BId4mkYa)78G`=Lo)2_W#POrZ#a(zHcGAljzyKTj8Ic0uT z`0pDADXP4(tGI&NN`FT4o*E@~TrnZ4@1z5O-#Tp*UAo^EHsA8==E15&57H=BVrNJ# ziL?uq{A^!VmDyX5jq{pJ{D+VSwbPaomcqfgpv=Q``0OfbEh(_FVQ5NDqg_8&P61Pc z;Q(5_5)s6vFSHa888Cuc#MmA?6it__l1QPLXK~Mr_)*!sGR2vl@E9yQRHIPMBYh zk9G_sbI!pwjn6T7kA?Jd7MO%)ll9Wwb*QX(;DHtomSKE%9ybKSgT=wbMDOY_k&$9k z*1Z1m7ZKdVzkG`<+n#;_GYckP>QNWQUPq{WyCj88vK~hYKQ+DW=NpA$jLn zN(GW_0R!^M{fv{VTELlAUX~c&(@-#xWo~&W{iK`fMEp-&;;5}#u9B#IPQSfoz&@s`Bd-qk-Y=eS z+|s6XobvucIN)LD{UEz)kCmMOwF1JO_!6+_Zq7`$an@hue%8?8i0(x}!|(pg+*ZHN z9}-Z=6LoF0(`+4R(3hl1^2W4455qXMb{uzOwfm`=AjB0p<-XU{{azwQljnAlp#@)| zo0adD({tg8oVU~G0+H)ex8oP%pB(zTFxmK=bpsR1>#hz8-gV>`_UG-%WinCF+IPLO zhc)UdxG|HgtTXKVxxXh!P|i?W)#g8@xc=y;NGJ95L*ZT{?V>7qJiim?dmoZdo?M{O zcbi)3X_c9|?=shQ`*qY_$u|k{|LuxtcQ1hxg zns93_lhT2>M?Fz4_U2s)pQfQ_@yw&QI8))vCsEe}T>|6uMH^hH0R-Gd=@~%*9r8-e zWTh9h)#Rnb$98v)VLJx(QeoYwG1D}y*K^(s$#F-vt|MQipac*A{6wB zkFo=*Imhv`PI#1~<)v}&YcwrT8W*bN!&H8xR62*(R_*!x6GO*t&3 zoskO)DFw}TYx_js6vE$ufHGJ9aLmFd{Cb-m-Yc*yTofZh=vx*xwZ3>;c{i_(iS4U- z4KHoW25byG9U8KH^hdm6|AXl8cx9Izdgq{`r9wt_`#`~}UtLLPeTzz=;S^hYQdjtSCtX{&bVj4vO3eKxe3w&IS%P(g5~xQgJSqpbLRuWXP#kX5 zHYLb$I>k*jQhDL?uY55f( zgP-r+@Cy1pYx;QwwB*_66E?z*fxxxuDC;>-)TlUgK%q3iZO4()f+JMKto0zmU)%_5 z`@@DMZ>f%abMHBLP)&q$(dNAOk`Z$5vb`s0j^ zNTFE;2KzdPm{EEr=1TLv{nIS%&hE(GC%n@x&II*Z5@SmsHPz#p(fdyOGtR}`{rRxV zWlsL(+1Ut4;RlGgA77(c>I_2sDPcG#)0uLMXgWRGMzi${q(4qI`r}dKD1jsx+8usg zS+=Uv%J`#T@j25CWre|Ik+oVLxU%Q0bmHRp0`N#QuDLdMV)l&%wjT*fG4VbDQkzx_ zJWMxbI157v+W1$VUsM4kak_e5Z0YTkoT9XU+<+n4>&Y^4Bz(K$``LN7F~e&*eGYWQ z0JUmustS|R<)S@zm*|V_;9_6A%wC)QyE?9woQ2o2A-R^L zAAVep(j7+%(E2gUO)ai!n4fcQtR}CkcE$JMVTQ88|FZq|HhBr%9noM08<9-s4;w2{ zOX>Q$_&{KO%g-+OdLccW)z^IbAOKr0{?78-X#P=Xw>;s%cRpj`)Ugj$&{SDMs-2wb zjZp#J8aKhkGm^O$l$6-1>3U|?!Cgh9^s6}>nwa59?I=+K_2o1YEZ;;>QZpe`NN3p8yqn*<73|Y7d=~}v$9T6y5Ab|0@ed`0Eoi!WM5)Zin9Mt`TGENbxS^n2X|H+*@O9N z3;@LY|E|7M@c(Rc$#hJcV2Q$W6X)-6JSJim+nEDDtGJ1Hi7I(+0D8cO1UT`%bNeK8 zQtX#W<%I;v?975h8R4CRsosn6rz^NiHgKPkt8k*xu4-LVGmTwj=i}2uV+|vsyj@tE z2|U(i%5~ERDTN2kEg+Mmg+ah6`NLpo*?tTg*52BUSTJkLfgl{ zh;N&WQY!MKI!h(K*)>m=(H`TI%#`1$ZrJ5+*U?vQrY2;kBK5!bJH>vGUcPu+pV*vX zsYin?0pAtDuB`J!J+4@82>a@qm-ob_EpJlqURb011Fl_X!av=(1MwA?IiZlidZX>a zESXC&j}n4^g%mXI*wrVs7Y`2oa#q%{AN zre(oNc)LDRA~^krppGqiWdM6Eddv}ddXmAsVrrQGzeOZb(ApCV~=qNuL53SYU?NZW6Z zXLL~kk=NEawHr0^Op8WaRPB*H`F0Gd~pE{Wr_DUZ&#uzSJxQJQF_8{MF8!E;PAc{7yIhwFLRvVzdh>PtDh(_A# z%mU7}!j2Dns{QU0TdlMv0X2E)^Fd4E zmjFs18y9zEl=;gBac(oci=PyHcl#!;TFuoGGr~@=JvR9Nkbi4Df}))c`M3REUi$2K zn(9SYq`CsXA#8dJGxJS&81cE+u0UP-YF3|Pb}L`W(XypF2~|o}1~iP94;-)MSW8HM zJjg)Yb+@)Yod_7}x+zVuc>?!6IX%*a4*cY6gdC7AjiJ_!>v}R4w;eOS45m&P5X&pz zbpbbBAJaQutj*jZrGNZ!1x1*6Dx`Tz%OVmUxP6%yW9n;KM~~W=#8_w|8+@1e>!}%8mak%(qd8CiJG}Jb990KOhZwUtmamzrh8l z+-Jr)-{3>0|6Gk+qP~fEptfG{{m_<&wGk@&U;c`=`Ao%A+Up0fiH>w!P&U54SDt+wj6V8R}yPnU$xVfJg>Uv%7MrAnsXgi+#6rA8N zsN!G2FQfq-=)lR)%q#R?+mT?dN+<$WJ3pe%9GvCuLd$o-ubohRHb62NpOq8X+Ue3g z8Wmo3`^|ro^QS52kajkFiGDSISQq-|jSVKAaayFyCWS5hhdt)AR47@Dxadd7 zrRklgAP{-*~c=ICU3=A6uz0>zh4r}1$E56(BBf5lIB~GU z#i{3aZn6Q`VEZtJ`6=eiL*`W3*F<$4u1PT0f1$QW&2MNcnZz9skA~gpCB*pnm0WR+ zR|&QJQ+g9w>9Y&ba+3hkmRIgk4}91E0-wMKOG-y!^$-hOC9L zuNF9OiT|$;4zPDVv-;fACR(DDn}n(w46Y3gB(%?K^%;Xzm$tU&{FIq-L6m&S1$8kj zyGsM{RpjTCN1qpqfgk-ZV*Kjj!giX}(4Ll{->SybN00wcj=yKjq%d}~6@@W?`&oVC zGd@269D&_e*fOkkSQ3xbvPOZ*Eq4_1GQom}E0S-b85$fyf70}LT6rR;>qZ}r+r1?J z*RJHUo^*Xd`3H^+usOa0{~N4XVZOmRZ|$Yt`=fz%v;4?s7a%^EI|}?;+po zXFYyHX$}5{$iw2LUBe4QLdmca67RCM=JHk4WlS6*@cvdPEq*2FpYc@?qKg}&y`>BrjPvc_Y5;6j)7 z&^o!*N&FSF2JaY)|JNDE(08B8#BBqa@SC(yMCJ%+LKC|6K9M6HhfT2K^9v$O7IL>P z<8cn~&GbAD0tD(*1Mf^BZ*Gkztz!MZjS-vL^aT(Dot_;@Eqq+Ri^9qPVJ*o zzZ=Gjv&KB{-sl$kPB5K=$|WEDM&{Tm12Iu`i7nf93*|}n=d0^2(%E)>-G732c0GR# zJcJ-)OJaVLe+^7wxEL{35uq;pe@vZeIMi?W_7f$^5=tRTMY2`)ETf1POObsUOW7Gq zvW%HYL_#XbZb-K5NeDA!%QlmBtTXn(82ika{qg%B&;NLyH{UnkclUDK_x(A~>%7WO z3PpRO2viPEZJz1iXPSzBk59ZJ1b>?uho3wDsnZDVoe|gh{bn)7JwR! zwiEqn3HN!faIXd~MQ-j>ap1Dt+V9rSqG;6FstBX8Qdj(TZa>O14{g4k8+kY6Jovsa zim^+54l*6np@H@F4pwXiYIS-wbR3ua?ojk#+_&T|UyyQ#@LR$r&^Z}fL!3pKz*Utx zYkwE71ljJ0?zgo;pRcN1MG|_iAo39HB?#~LLLZ)EX+zty?WLB#W{>TC6Ybp2b*t>a z{i%|ZVska^Iy%?>JE9#{jX0LYU39dIVIgRp%r~~yS^TH*VT4tPGP(4`I^PA zYQNZ9JB(StDaNZhbd4BePNC}`8p`LabR~X*EHZhf(gLE|ojB!xD1qN9PqV zg=~Q(PXOu7v~TqWu+{=XulREN@4iPBDLUsy<(&v3TJw&>BS_z{*NcToO1lv7`M9j_ ze4tucntwG4e@eYuWmTcMulAw9>mg}mL>lryR6rBp5ZEVhFTA><(*i*<$b-)9cGZHf z&HI~_LrwH(hDi-WIt5vAL2Gd`jw`BV-*+rgwxtL;NQ`mOSlFpqNWG`@qY$!qTJGLo zfs(Z-@n_VYbg=rD+^UmdS6_@SvMY^YJj(3T7xD`8LRO9nKN#?FeNJH1T1XzrudkDh zQ7I=;r79mKQ=Oo;le9JSstTz;In5>j1XDtKNU}0C2_(dWT%p%EG4- zjT85tP3?d_EacwVFL~P5oykjop_sLqbXqcisYwVmaqJf(NUt701<>z+C7W9`E#5T- zuE4=@W0c*tDq?&2L;$ew%X@_IC8fO^js$TmJa6PNiz z)lyYZPd3x7I*xE-eF|4qf?%dTG8ZadRq+Vid|0BBa~r&AYxxy0#HCHU^0Xgwz4(-W zPo2irs-#VQMmLP9S00r08F4d z6t$o&UqQ%`>q2%gcohMHf$f${zgNHLcEX4>IT7t<%WIE4Bbu~@D_>l8tU|v;y_ysH zlRxlN!tDCTrnqXb^{x%7EKCOy3zm>@ktT+mm)Px`ef2oA(rm4y{KQ6Tr{hJ@lAk&8 zIv&cTp)hVm%BenY$)HyRu8xu;5Lf@g9JMM8bkve~d>Fc34&p>)+MS8m z`lF(vc}jtO@V01r8qOT7157kal?YD1 zx)36HT&27mwUx8cf&6R(`}QE>{mvtEw?d7VE%~S6g#s%ZW2G||n>?}aZY(<7eNu(M z-zHV@MM>NJ>7M;lYsb9OLn-o;GQ#cE{oOuMD+L@2vwQi@q44hgHQzP!DAcIm6-?_5KMSh!H1K3``?)t0ai*Wr6VfZcbtKCI zZ)l{uUK5HAjQMf+Q-L(D!Eue=h!MDPee_tnKix$xug zoq^*hp12hE#+Vt(fXcSZX9w03MOk@Vri~uELq%}CNs?(9P6z7`RAl#SZOl(ls8k9V zG{^IueS5%1#l`#67&th>SUMx52H}7dT6>_ox14{&ls`wMKn;1f1x5FY6M5CI{rf~q zZQRTHM>VwTJ*CQl=Pk~{jgjZXEwUVC)s{aPeN#99AVegcgaea)_3f6|Xc{bxC4yAb zd>^b$q-W+LxN0xOwefrPd$d|_MT(}}6(#Io@ z>}SNM$Di=oEA}v|(=2O7Ix4yPZRcuJ6SRc*ZvEW{wQ9jbU?U;wWOY(Vm(UJ29Y>e7ZdH|=4!`IYOBlzbq zbg*YW=|FpV(irY093DzNQg)-5(fuSN3#u+DoS`!GnyAryriub}kJF9Q5BLDU6v*4vfQP zy)v-t*B*&wqQ9>3L@p}}9Z{d9!H)1y z4%-@Hl-C~K6Dw7r{!}2XHB{gelhoHgH^Fe?Eoq=Ycgk%Jc+D6A`#-f=MT@^S9^HF@ zO$K;m=wd>lsh5wXpLTg|W*w%EMk&ADJEpc#1cSG|y{{l8gc?wAM2)}YPo}gIaws_b zM_NYGVDcp!?1wM${J?LpzwY{{?wZ$tHpaIq4ykM9I|;B_$-J3YWL(Av)4H)}-GWr_ zzI7=BvHHgnyz~X0N$&kDe>MS8f18awEL%HBxn-v;&|MPY=z6XA=Owet zSE9V}N_&D@j|q`QXNPmOn|%}33?sG!FOZfsu4~4~WoKwe=SwPDfkArmXHGT18m~0; zs#&Bgav)IOAlE`q#YDZk&>-m)kS*04_~QaW>52EyqHf^aU6aR)zbLK(o(8#7tSgtiT^pJhExPC zO^iTS4i(#SM6Aq@(!FM-BiphO>x(z63%4#&&nkV7Kf|r-(jz&bbfj*fA0w%cDzeC_ ze7MI~C7SR>-nVLe?X%b>&o3B7>k#qJXqI@K8(liz)9iVRrhnWhc``oYN#TXOjS3;1zf0jgKZ{UV$Jx|$p=&EuB= zYJKfXyUEyl1~z%g!VdgCx~YQ=r>~ zXv7jq8634p(lA!ZIUu7t^x0Q3V!n*w3Q{;IhIyK1%A}x8!6QyHMBw+p-%z(pM?wS3 z&Y5AZF8{s(r`1I3=CS8m4YBoL=oosB1CK`=g}>O@%=;1@@b{Z!7x^P4eum_8Cg#>l zwR;{O^SNg{kIdh|3;ru&$}fMwf6waYe5H(a&4>1tczrrK{vYaJ#64oz9v5=j;Ev8t z$n-;_hrFsY&HM>vFO;Pd^*Wy$U7pIUzR^K#f=4cr7p2&pInFJNxp*8B!qh&L+@9FHlx+%T`&VoNjTa$&zrLzOrG9XSn~6W)E8yf^RP4G92Ke@1_|jLbZi zkCHxP2_B2bu#YsgNL-(ZDdUQ)Yp^{vDd3toS@OoOTXXhS{AkQ_*7moV980gHoxH?5 zfZElJuutUbD{s{rwQz)V&dgdjro^rwer+lw9zg@0A;m+xRg|MG@RO(w+`0xUxXic; z$3$)Q>#PpMs>G~?ENQbIOQP@AsHF@j)6~OGY%LJ-5 z&BB0bK%OT34S%TW1=;lSy4J0@+de^RSN zK}6rRFx!V}$KTP^*b+il0#KFv&4_)l9Q7Z8fE#AhA7@4otOzE5X&z(Aq;u?1!9#nd z;7zlJOEv7WWYohZNwb95MhKt;XLegJeO2&vj6Bp$#D^m;y_K2XFB7vyx7H5Lq&jdF zj<5MKlKR!+YwK3-Fe5p)gU+pf$x77AS+Oi?m7*=$9Zm!E?Xbl7-SOh~s~K+^wJMzr z)OroRNzXj35JhnteE`+HyC#q~7dR-kIREvtyxNpuXH0LRaj9e7)RlW9BZ9V4a;WYr zAYE7O_#yzQ&FT5;fQbBppxm1`(<6V!0L0jL*Pg)Xf<5rrf{ zR~ljlV&rL9+ET9PM(|#SWo_W*Sac#Zyw9KKo#DSHf6kbP(PRXaRQNHw@WtN%4Yq&R z^`RufPe7d}Np`lYakb|$79M`DMce%aHmhEyQ&n8FWsf%8p2QRIbq)NZip1~>=6bpj z*`@BeZyiDZc^2)(@KwWwpXAuOIa9*6{x5~cXYZLzcV`Dlvif{dxXwk1!&Jdp01M1& z^_}E3LvH6*7pZ&Pa#)!=?&T`;@6P>hx!g*vkY@rfD=l%{tHGF`;PXNrJ2BOp6{C(& z)>rN+lk%b0!sfJ-m+2aqWE+8qU@qCk-% zSp80|O#r)BirL=)hin5~nH>~1+KDlMV#*<+%F4+12zm1=q5Nk`$4GyK7yg9!*<9L5 z%efqSIGn=zLV1P3K~)o{JgGLX(QOf%44OcqFaInP0b=jWP93~Hw;Wy%BEsnm2Tpd} z|0e;UST?p;n(zh9Z)mmotb(6`bA{4is-0`xg3ZiTFKthQ>))k#)78TLJOZzphm5AW zuRpteqp-1nXz)L_|HZ+JZ-~87z#0CY zQcX;4Br|0O%EGa?0sy2%fd9yl=h=vYAW-`6v24$g{3|j3d_2C-L$i*AGzlmxOcR3SNbTVEbP_vu}>@lrhGRK zu4)E}H2ShzCGoo8+LFTdt4kWLxtCrDt~R3jzmYR0&DVyJvwAyIvH`F)b4GdtjL^XRShxKK97zQr zcEe^leb!(vtgTS0F6o|$W^3vn`u$2ZtRFN8%_L2z$?;atUui17b3hB&Y~mWG-$V>_ z^I%of)o(n`8`;;)c<@w-Dc@L6+Y7S6*6nDTcAUq~zqCBD6GN$0+t@DRQfumZogK*Y zMA*K|w$Z{M%~Sd<^gPH%W&cEkt?8BA1=-pM?_y|yb0?|3iKvle$mm;F8II&L4JDEA zxfkjT)xhUQ_e3>X_%vBs>R;j@>UUltQjXINYXwg9CCv3Ow5s)D#1syGIx}NVt1mV} zh}l~>78bw+VyGSJusJ-7I1QuYSv}L}R;u>VdS#E!wDXGKAMYUs{DZoQPY=3I-}zoJ zFKC3xzU}#INk-h!=%>%$vo{62w{58osQ=UjU(!ti&D8IcTW{(^Ix#^F*Ga^gChOfvTxu!n|4oPp9|MuAW@sI%M~z!^ zc8cz+Cf=Bgg8oF(ME@4wH-tk`ya?IayFY)iar;fUNIIH507NobxZQZvZu%x03!rN^ zR1*ZA78DGqRjH9yFjO+px z-XUMR@YU^IJXX4MI3KE0ineIuvF&-sCCIM) zPCPRPkP;8fw9tKFL44Za88f2i2l?ChVE!2cz9*OEij+O>*sW;mr4JLy7#0dvnumse?`XHzFMJP}a+nN(a%F~s!=%(rb(}&K@6crX)TF-O zn@fRODCUG(Egj>Dickf>-LI+E~MA0u1g3HVw@ZXuy%*;HVPoZ)%Y&B-2Jd{lA)n z{7EfE=1Yx)DYq`bvBQdk__@9_3V#l>f5129cQXC+<4FzPQ?Hs^hg`GD2ttvG`%cHrL?1V%>>gLW7V(aoD62b;S7!dEJve2xpj&T>IlG ze5Nn{ddv|Ofw48cze9PEdmkDiOw!IhiXBPQniSHr(GMXNoaYt5WbNldEjeRGPULo^ z8j=)!W(nNgx2pIqS+$L;(a7H}DsRj`YoD;i(0IZ`wphb9AS5#pNg$)}u>NE0A06e@ zEs=Cx_S6=V+05>iYKAOoAJSwbY@U6$eJ?rvV4K@Z!Zb2v!%HS_FV#5;Ri}IW#-bg` zn0o3tfigR>_qLVE2-WlWm}wt)?x~>b-BJ{%Xx1FWTXxI=H zDVf}a=t08v?@aq=^0;_H=sB+2_<0ClEY?RSapA`%jlftrFLwdr&v>lx4ZEHCZI4?y ziIdDP?&2oSoPVL-5P~qYuI7$oH|)TfJ8&d(9!Kp7s9r~6AfE{!W+8ixtihtNi&N$k zqV)vERb|L1^Gzb`?ggZHd+aBp(%RiCCRQ)6UTekV){mz)B_VWqz>t7t{)EsuCR_=jHC zCQY#d7t-h_tCZ?rW*bLbATQ)ZKG<%f>t8F*AtWFiHGbVVzWz(q6E=DnM@du6n_Byn zT79p+#VCxgxvfgD)a!`=tTiA6Igh(`pusLY41ygs-+Jxe8=cXrI-2E;VyCe;&Irb}ZUAv^p^Wb4x z>7~1-X-k|IqxS{XEV2Zwtm@CC(Tc2!iocI$+gtIKoqRZIlwW*Bn(wRr#n?x&Jn9-D zwY7Ze2=S=3gXEh%_P%U%gmY4R^c5^#3tXv&&u~vt!(j0(TZd7)AU#V)u8m2B8D?hP ze?zl{Aw^0I37%LqF(3MyR(;BErb8*iX)e@Li zz<`v*5X}m}vhb}ldvB~_sjC7CpSstqPY|@3gV^fJugF%J7baD<8t8}-u4FZyv60B4EE^zhnk? zvuqm<)ZDl}zmYJjtb9ErAoA0jAjRm$kS(f?@abI0OSH_dcl<{iK9hzUnz`OK+}Bra znnG5PXFZ=l^ncHxiy{Ywv1q3~8`Nf1{kFo_#Aspfw?yMoOLE*n_oieKtpBTQIqdV= z$nt<~;8;cX#^jwM?tjuTA&JZ-G+xg%ljfe{wPIp{p0)SRX9+B>HB=w`tVTd)x9GHK zPskrU=pQJ!8`r&^j-;Zo&a6bhGnQ2V=i93LyTj(%XG1qZfCjSyDlG@SMo5C$GHhDJTYRewbOS_06rae;I!oI(GN z+uE88d25I7$L7(aq`>U8kMgCXp#k4agAaD90eY}^#hX0Kee!!5tWMPJkb>mR>zqO7 z+_2w*4jj}~o1G!!!N1o~@9d(qR|ldV{Yo7K<2*g>TU><$*q^pF1dC@ow2m)S1Wf$3 z>Zpjy7e?P^>fY^Mx@xkoI%9oLLttcm?!^xWXLnQUz=R9R-`xlotzBP}0ra&yoyJlt z_g1&SvOr-Qn#(>20g_>(;m24(YtJtBz6?PGHIgr zWq5?VQUNo;Njg*njdj$iHuq@iHmodK9;`|2CQYZq)@^24tN5tO!<{V8#DCr!n`M=N!_xJLoUZQ7|Yo>&wNRw0*f0$%9M`rU_ znQniscb#ogi!PZe8iwjyl_gB~yL3N8uPc0)xQyw32I-#3eqrPq_BZ7|uy;)5sM5gW z*5C#_HeS4D@RyVl5P96~SdqFJRamUx>c80}DB{j59HyJr(t0`s6>!+rI;9xseb)64 zghxbAcN8#heIuyn{l|?mz(mi!lp^vmv1h54DL|4m^g@lfTeo7tbWZq=aR8f&WMTr) z5b~ZFLosFRfYg9vu~L|T1|;1a!6dVponcrkivZF_9Pb7w`j}@_IXQ_5j@YnYop}P7 zU3ueq3+%L*1LApnQ{i6Ej%n9*N7MfP=$yt#vYco*P#*sw*L(b<%Sut8VL>bZS|oIN z4Bd0YjA@D31LxHVw@b9@=MZYRWiW0@dgdtxH=Bb#<5~f6OXB9}Qb@$Bdw%?LYvy1Y zaSynAwE?ib2HPiB!&We>7qZ`F z9_j}?FIVo!%~LDLr<@G9Bt-_z>3(^?UEQ=V&rt$T?>pl7;Oy_8FaLU~aL88CJslnu zQms@t_;k&s>6tTWDSxNiB-BoQUa@bFM-NRI&tGm2W(i#Rn8)qth<&?~0Mgd&blLro zmmRb-I~Z}=p*Qw&aAUdNMZlH#=){#?Nc)x3E2?g-3e&72&@!p8Mobl(*XA_g9pj&2 zaA(UxBqD#I@agCbS_3^2piEs&pJkN;C;+V9VKO|T18J%P#byFvySd72?FP0D?kRLx zvD#dDVzU(%Py2EEyl712)yUereo2In++Me|Y~T_3d3sM^H^gQh1-gX~*Ntl`3)|2o z9H#H6`~S6c`LxA^U@&XOP|)D;On3S%PA%=AsoPoh@bAFRfdqoOZi8Wd3YI9<^5~bS zA-o7lr9RT5%9BhN%RxYz9I%M}DuNBQ>?!;s%!mh&oEkthBx{#A2!f5|z%}yyT33mg zrjEa~95%owXP-kQg293-bP90to=!2p60%wJ_32_Gv2TCGd$L_UVq8fRbePaYaDS4N znUEu?>D@OV=#Eb0)}Fbpb88iBEPdQD#ZRVLf;~#$l<6r4EhugQ4RMM*zQ8Z>#jcae z+t4TROw|C+)Am_{sJNV+C~>~u;;{RIq+Ijt-vP#@iSVE_kGpzY`2MAu?d6kx*SAg- z>+q?`dpXV8SZg(A0^h=z3`!SoLTb}+lPH|q_*!_i=D zA+U|{1Y$s~nx-(9!e*hXp0JCs!4|)tfSs<7L>15J5v%gQtKC1eJ8? zz1h~R-@=||3z*}E=0TwNu_4OGCniB@e=v0ipK*I&5b)pu@FjgAo$1?v2JeU>&0)*G zyP&BJVJw<<0GklT)@VTP=(1bc#Ik<78B)<`xs?A>D~uBD{~QCs+T->bWxg?LPW>bgmISS*M`LI--XJgP@87uJyjyCehda&EfxhlPvaH(B%uZzlWatrc2B)dF-T~C?6`0wc1?*4) zV8kux2r(T>68$9`vF0268#>B9CJ!&S+u5j_dDdf8j2TJOF|M4{fgoPm&<7#wDV~+$ zt9El?bkE3)+`7#WxC$rCPk(zI*kn6(lv+KK6R@mb`u=7= zE>L>~a@#wUEnR) zXjmlSQaiWIAM>ys(XRVjuX zI#(vnELlN>4j2{&xN+FCBe~^h>3;IdwQDaAsmtchB}a4|y6dE_}vXpJv{k5yAQvP_Ku2%mJ>y1^yrHKATn{ikpGv! z#%CPI0T9-saRK?*%zRqbyT66$p5g27L3!(}X@<~T3A1sHQ%L4EM^CO)3SjO8t_3Z| zto?A;4I5nL_Qxi6m*mXvl@l z=h3O~fOp5f-RJezAwFkl2|ma0`gb#P6^e~)n*59EzJ6bZP?_PotxzTQ)mf@v zSkqOEVB{E}KyUPQY%U+t5^uz$$TvC&P=?0_vI;@M9O04GAof=UjMmrvd{OT9?$;rQ8lF=(JOi)EpEdNDYwUZ@y!d^2D2}`FVCV%Ya8SkEOMM~)Nb6xaQr|)~ zDRlN@DH;>n6@4$%9fo0SA~TAw-aW@FaP?{O@}%$%hi?+>anmM=3A<=IZfGb{Z~a%g z`BwANjaXNuZ27Mq9<2jDe79}UGDijLXTTYkM?UX$Jsaw_4EH}ukfo)3-qkwyDzAT5 zOQ@%&>~iDy7?3H>qBt(3D|KQ_r%yP_8T8s~IhFPhh~GnMI)Ofn9@ z;GlPrZyyZ4XNXLo*?nj%)EZ~vOp1h94r!bnuZ~z$#yXe7bXGAql5g>b@Q2*BBvN}y z=nbC+dP&=PGxLHuoflO7-J~mlGqMdQPQ{y!L8~(S$;WI;(MxOGUg*yct4ITnqr&`G zS{3PUR8vyV>scV5e}QqX+(WYure#+#3VJ(ll*jP>`^k$%G$bme$(@-CWR*hg_Z$#x z7=jri#nztc&f&}r#N`E^t()g&hM2zJ(cu+)x*-;F#7AE(OnHV$nLh$C!H}!#Y(3>J zAmb@fvzLE_K=J3zIlq)G9~#%Ou>tLO2Rq9sL}rQGPDCEd3Uu!9Lh7AyM`K_q7-&mx zhi8maBR_w$At6*?bJG~|~45Oj0)lX3q8B@0u_1zWM6rxz;%`Rojo%)2R?#a9H99%Yz&=VQ(| zIEIe5Dy~~-Sa5myZg;jF#OJN<;P%62cru_+;3{qV;MQcpo7SQK=ycc==MVbta!}Gg&umH-<^_`%XQ$mnT zjxpM!Xz&^zWWOu;#H;Q1_4m2{+jSu2gKg27JW7t}jB`F(RFC4$6j0n5d<_d+yQiW@ zI4||LxI5Hio!uo||G1`Pd;)h#0`p=Fz2UVgYb`wnLE1AtO_MJb+{PT8t;OGSl zc-DrDUV`I=wl%K}C1%T!r^|Ner1e!iTfa{+4iWB*oZkjG_?MqlI-H8SMd!SYvrDs4WAymt-Nfplw2{1dU=9>jr^g`YcK*noQ9lR`~8}GG*eyjt;UCFBk{)8f+B_n%q=931`s8tmhIBQ3{^UqfI;nNq2 zCr(2T90J(evg~N^l(8Oq*42auslJy~jZYQz#ujlB*IGO>)5BXQbSUCMY?5bt-2N`f zIFxx8Ljfq;YWDXvR&^ksr!}%^ArLm-#PYxdXYyKT#Uc#JCfgLq-Mb0lHAPr|1~v2h zH(od1*hsfINh4%Z`)8dRkQdz#`{z|4sWQ3MfIZm7tCg;nYvd0z)Bh@UBL)JKK8GY> zb-ld}4hEl;y$@KopGXuh`%owynA)Dq^}BWDCe|ozd;WZWG2yVu@`JFQK9J*jOlpk! zkt@z~7|$LH;_7Hj6SztJCHmnOwjxktDqhCN#-#1gEq8bTE01WT0)prUa8eJcYvWkr za!jhg%Q=npw(0%B4A5B|$sE~%)4}edG^+7A@hxas0cIqIe>fg^ZBItxgW9YuHaU7ebZ`kuJ7B3JGP>bxVTnS!7WNkVL#5FrG)5wpLVx53nZKxE z)1PCWb}KNL80qGMOP6>2yA|oSxPRBxOd~RWgm>In7s<;D6U^9#@qGm`0&SLp_? zUT9cK!;HBjGO~IC3A#sbus>!PDJdVutR%lw&=ToEDCnbRc}|>6*Zo|J$X3tWYdc+a z!%2xnhdp@?%l9;I8sh?t^R|1pN#QjZ=l{vpYAMv4ho?t<_twAk3EyyoNa#J|=%1 z?F;u$IkJ0{%ADHr!=@qXR@Qt+b}k}U4F=S<;3dD)%4z&2pC~1Ha0UxUEwQ%QFo)c2 z0?adUKyLMLy19ZwY=lRa5^PF;8-F%YS4+F0qst2QXq>?fe8&BCb6<4G=A(L8z?G06 z@#pvipP)I+sxCIdulN4A3G9)h1`0V*=IpC$EoawDim0M@fKzNY-KaIa2 zoK7}gMa(2CO_s64-H6_TZep@OE;Mn6Eip*1m$+A$a7_Ptgt2NpW5fMOKk^g7*97?M zQ|YtsI(L1p%kY+;*jpr^&!kHmWCdysI6iNIu~REetTEbPFSd4oVR(=I#&yHb@-6ty zuk6FJ|3zK~UjOxQ>lyx>H(b(x&Z#8SH(A~V8Qu6O<~c_FuGLoWnG;}rn8{an)Vx3- zu0K@8#KxkkbNK21^6z{BlUn+*e!_BOYI@tDQ96BS^rsEIjW%kIcxHF?KLDPScka`m zQQTA@`H~$gx0g7|p1`;Mg{?DzkX})5#!>d81oS8_Wxt&Otl-}sv0yLZ5A{STv_?`o zl4i^#=u?L)TJusy{UEUUxu9{d%?BxU{RmNRl>84kohJ`%kB=xmHfAtOyo$Qg?25De%fFcQhFa%KNh50o>e&%@oL8??EP-* zSclkK0!Il7I9n$9n%A7~rrsMC>0}uUEtYvIp{&?PLTW;A0>klNj^jP z&b}vBZ*LJWjeB}Pc&bNS)Z06}Fh&h;{l;BSnig+QrydILB_G7Y2!_~NP7zz>y+Z5l9-5ukm#P~CefUqj!UNiJ& zY|~}}#0NTYZ$6kv0Wej+h|!Py&J8=8WRd>BONt@#g}@<+5mH<{_SuCJ_&)pSfQ+J4 z#0q|l=DRU-(P7}y@sM@AtNn%jxRFnpW&%$aWt~rnGFz^RDoED@3Ylo=ZWi&~h9WA; zX3#D#LwZcg3v98uEfaxDb*rf%8W-u5_z)6x>Bec*yk1+}k{0M;;P!#HGR*|0GB17- zen`vF+gpa$ySRIBnfxtQ$TsFkHMmF*ws3f_FEB%Rx7>wUwpCLxH^Plpn3=LSeOorU zubG?IY)#kos{BclU5$zg|JjPQ8+?a@I`VBd_Jzd$Vjrt?cq~9tw0hgU7ZKL=cJ8h7 zhA)zxtOX7%dQK;pV=Yz6mL?AF1b)$u3J6{z4gotmE?_Td@hIG+T^4<+uS?j!inxxc z3@|Y*7N7&REz?y}g`dz(j)fI-RAMwfI5Ezx2`0q|`zk$rU*h5NFZE(`RTffGVtKC% zb_s3jA9)CRXO=!TGJqVl##OOk{iA+`C2cyNmPirA=dVi;(vB%>0Q&&H(xK|H9x`ZQ zPo_p|S#I2c&r3WM)a=J(+VAY0`o%hK&pXqO`f6;tvIto3Vxoh}V=sqKD zpD`IHFp9e}qsZ)6M=W0e4FfOijWoa`XXP1#3ngl-&xfOD4TE}IVCUFG)|m(B!ui*d zQalAV`sy-%Aj+W=?4tT5e&@}1*$*#+zRd2n_u;;J1 zs6xHR{g=EM!|iJZYM)Bae+y%}(^9@9hPS$g9Iu$nr0rWpW%fj66zRJ1eWgT(ehVk+E&rX&(6XhiN zIZVz4%pc}&?_6W~RU|%|r#QCce5jYZ>ifd%Fs3wDc9uLO#uSFa1)IYiZ=AX?Eh0k=DK_+dQC zLv9XMH|>-(+o|kwU$D`paG0fp9L=$@&*g4w&Gl^GS{lvOv=O8f6d?p2qxwIis_VpT zrQYbt$|_qq(YNg$achSy3-{K!H18d;*zVS``eD^qX%sEjVRsMep*BZn zJvuVJZ(%?NuZH2n_*~A*XC^D2I4k;-!o0lVdt$h{2ge?ivR_x)!aZ1`t`6D>UPpSw zOjP8VPNWP!Lf&C>e{BnIPO@xGl|eg?bkzA2V7@`Tkv48J0iPW^w;?a9%kM7rO`1r~ zOP;uF96E0vNQ&*#zJ9c&XYkr~fFSO*>SpL%tl-)H$juDG^y1z|oN>3Fca zb6Ag6GEI)oS_^e8*nv>4;~)LQFO+Uj$Snf&`;~Aoy~a~V8;;I-0#iJ3ll4s# zDK}?8lI^1!?O&qFI%wm~GxxZa=q1CneC?^8fNC$sj zdd|_Jl$$ty@WbE{#=pLSwaiHuBSd-g88=?NG)S^rgUYCal4~Kz7=)jG!NJ|sM6iZt zj_@OR`;wD_59XqLCDlF(dKr|oJ_vufN`hTvh*)1+gg75Ot)SmK!f4!8ds8@HfAsX8tmB*W!EaT~?1Hlf@^rsc>nEER!P+d8l(v@h zodi;Z{zJIWk8O2d;rKv&Wg|V#y1LiWPXt`$;M?3k!yFMk+Vq8WV82W%b8A@rQElyeDk87kfZfNd zH^8aEu{Yv*!|aeiYArJBL;7CO-&78Ub07U{RU&w%hp~BmK=2I7QJgz3M`o-HnktOk zUwEJ2|9*Krt5(;;uSF;+#rc@ePXyZhzSyhk)9XT-qSa?>_w03LR=@Gs=G0_*j?gF; z^fn^0gCJT^rrM(584qEN{NlyD`g^;p4m_T^ssV4*jRFWHF)f z47a<|5l1netzhze}Md=Ls`+ za2NkQ8z}NL%1m5;d`ikpq(%xaR&qRR)zVYRIKBgG0noNkM;Kwj3GTulR^ma#o-$B~ zFWT!UDs)KJP-x5VTK2Bq`lJSsCzxWUv72*_QC(G6+SAkio@r0^+wn6`k%voI|$gC z-3Q{$?uWA#gHYS6%hddKfxM^mqMsA%?@{C-6vmC2-QTh|B1abS>z6#gg?$4kM8KbqNfBRF(>AIjB3kjH|m7g^i z{{fXnDsG8?_0Z%_LcF$A=UTpLE$99w^QJL?RZsQP0o&33DTT^NJoTxtaY`=7CZCE? zl;HB}mTy_ks$~()_3}&ge%XmH+U*-*nDEHG7?RadMV)&9)q@K%vWxK9)27All|`O1 zzx1x{RWRjmv0@%p|FCI$ke!nKTu32o-_i-PJZ0) z|GIRpZd^ID-v?*IEI^h7NhWMhSA|{;UB&f+2}9pgwZsMRf7W!p-RRCTry@?2Gl%SGp!dOs0)=KoOjR#9!W0lO_O z#S6uYlwxg>;2J1eid%8l;_kt zJLmgf`WYD_M^S)lr;Z_8N(LI7t6%n+>(A#O|?~~E6Gy1=Sl5g_U@itlu z;>Vt=#Aass6L_#J){ai(JhMwWxbQZTCac+HzuL^U5iwQJ%UaISG>0txIc)W3QU&>i zR|!30v5eQdv!m-TIkzViVyp$_^YWl6bT@ECh`AjI4MM z5gOCwCM&mjrMEQtKQx^rqx)9!i)0GBray%3ZSP4~rLi@Ddy{#Y(MJnnl)_F|1CuTx zqS0~ATz)Wl(?tv65AfQzXPU2Zgtm2c~kzcmmVh|ous4&653p%Z1Lu`(TI*3S;mYG zT0-byiGJ2czGppHa&rLU|I}+(+3o-Yhrmv$zT~y4lMLFqe3|{tW1BwHPerMKA*z-$ zjm3ve`$qS>+~0x%_b~}A8w00pAV1&>K^!JfXd#|ISW%V?mDu8kbR}|-<=5tkcN5aM z0qir@Yx8YcR8)8G!0Afez1g<&1irx{@|JA0po;DJ1;eEwG|@Xt#4vpW8$T=R;}RC1 zuM{^W>cg~b3nam7LzUMUA=uX?aa3e>PjsuUK#u2N{Bg~4+^|_xGGp;czpV)Nx${p2K3nKIoX>`xgBj0f^vZ}+t z)uTdUfZ=r#(5oxywZPn@qpnQw618!+@}H2tqUG+bln$*^8e)e|*C~DeFTXu6XRIMn z^>cIUl(-9jsc7pTgq;Q%?VRP!j8-Su=sz6hABA9E#Xv*xqWzdO~sDM5bl3S*>1fsO^VbcaLw|HQlcriHibs_y&DWg>{|_On!VRgcs?D zRvNh{)Dao=Xe+BjbfFf*%iC%bC&agabsrX2{{f;AKR^tOCDUXmx1*bBIC`4IEpdY8qf_qUN`Z=G-dlZw4K zy9B=<-TC@Fr%+fa7EM;iG@3{mwr%eC^xh$UBZ*WWjcsS)9^fz6XgDCB`?|92rnr(U zg8(4IO`_h{B6biEJRjqqGsDU>F$7Y!AuL_#T5pAWRAXgD(6VF1IN?P+HB|d9=-ImT zd`5wZNv+U?P>?_pGs*&qjIkRoA*>W^m-3Z!dd!!W9{056M-e? zWWfLN(yaKmC`(7YSt*IcDssFq)OAm+2>}>hF!HlxfGWc_g(aa zQSR1J3@czEF=ManPSGEZ_Ypmw#dx+E&O@=+bqMA4rLcYokJ5Sqha~0tE5`2r#+duPa%w*qvguqlJRI5<_uFs zm8$`hbhn~ER7ygO>$~j@_t)zoOIY|oAQHZGgSOJ}Qxa_})0F?#)j+Ll*!V)43^dKp zEs9)Bf6w+G$j54JexKlBE~O`)vM&s4uAvo+B*a{T1IR(~~UY>sXEvc`<(OR?Uj|d zH2ZF@3az%^g~3mj%CH6OfC3Dk8U|~pr=GLQj-^3fU?}rS=*>nkKi&&V^-OvE5yDCj zQWT3W4@>lvS`rugM~l$VDAYQMy86F&8zzeM_x`f8+41dPd!^SN#&5sOdoLP78-jeX z1{A)yPq!=3e-RQd(`<=#{sYM#Z-b<#u^YAz{4Z=Hv&n3XdPBJ3Yjd9#a-aXb$$0Bi zzNpp~%+mB;LjS--2VJL*q!McK=PciGGDYkK!p((@dtc7MkMyw5QVlm-X9ef8&KWTmt6}8v)zFh$~A;BIse7SKlQFT^a1rDD7p89c_NN zToK81flKi{W2G6*w^y>pT}o$jPO##jbnX{Fg98nIM4yzsgV|3|NUoHNX3n7<@b{>~ zH;Sdj_FJ21H=6KddejRPg5FlLJ}e&pUdLU8d|&hm6>~4;p9bt*QQr=nxzb5+ELza8 zK5`r@^LU#Uwl1!|XSPo#7kqAqGB4d4bpY==lt*N8BaHqmt0PWfy<^BvSwAC;j|1)` zQNE-6p?>47_BmzSJ*9(aItJi^8VJ(#Yt3bZSyYSmdJ%9a^pFd>gsi_7Bm_@EJ=3jEvqVBS7ppOn*3*2|Lg5LB>{eW=hw-b48r!Knoz?E<T+>Hb2-5$i?Eg0}V_P~HzgPNQJ&V$aI}Q%S~t z@5OB~3y*+uM-MOm(!dFxP6^9nvKw$tD51qDYr z<;z8zkbM_0M!mjCQ=}VvawTOnQFHf@_tpjj-_Jc)42kdhZQJu}7y@Z+Hv@Bnsh-?f zS$_@DqN?mKETgzH#D%R|d9*dG&(;q~t7-~IP=7@L!p=8WRt5#iJDyLf@C26D-=4?w zllfqdeV}ZcLJ1Un5Hh4t%ki1QwpsBx(Wm+2D&G8q7L4XTmErq=qIGEjMq2gPj0)eO>@Y?rv(9Yneygl#?G<8d$IksQ2GtC>G|H@AB=+i17PHxH~crvfcx#t^vS^VqZ z&<=^;it;p*T6|Np``@O9Ad+fniSm2COm$04j@x}Af@to9WY>549l6@c2P--lk&$MU z^*LpsxlU8;=VAk6E+>j!T2%blU;U0{Tl5UUePde{q}{qyE(*60n&(OT!em~xrWOqj ze0BWGA`WiQTc)tIC#vBSNBtEn4^!+vd8oCKuk6WteqJskT^ZNXAGAyYT-CtnmY?HO z>S5H}RW5@!S&)P(A?MlN4l$RbOU#a1FtV%9PtgkhKb(kxyMOJ>S%?Z2^}tYve;~3q z+25dlx%(M22;+>(T9&78jP{RLRtq`a@R~N$&p3S%VD2LJm2+DCMvP^G#j7wAukK}s zUo%<>`&%=5@x0ov*EfyZHyGLNM+K$ao+|&{hBOOOu%h$}xVqr6W}s2;{)WZs=0o~91RR$DV# zrcYXaJn8$~(9vhH_hYh5AfpjoVxp&`_L8gpI0HSVmdxFZo9!^~KxP(tSN*X>a?p}0 zzOtFud_)z6Qq-M-??E31YspAbP2+ux6y7yC+mh|XrY1iXiQI(3aOPaQe_CZ!;huX2+7=g^UkY41K?UWCsGY1;OJ4kjC$4wisS&S%pI#-W43ZRw=$%#c0g}d#e8Z`s)PSVGqd8wwbGeuq zIm-=XVq1(dyCAv)1uJcBt@whY#+t}XgnM2Xvi$gd7BiBH-;=5}8JhW5y z%gdvY&sWwREV}XH0y|OufBa&moE&H;FLYvp{_^7qgh+d$Ypc@!(0@A(5BK_5E zAk!V5Mb*ULmTzV90N3QdDvB+=8qF>ZEW)%PwlX-+aq*%=<;X!*#L+T?aK}0@9Wvxu!e>wxf- zKdxx-a0v~?f~Hvs*2AO9l^;2&>ht1;e#d9Rquz5B%lN0MZ-c4E8}EEGTLv#?VJ5A2 z9Qj4z^4^tDSFqv0su{%W-_&SECY^Z^ZLEor7~Y0q+~HWAA%n_{xxu--@1!fW*<__4 zAAh+1Nn6{^BQE-QBjhERgUfZ;Z)caaI9>)kx00u(KVWjT<&ui~GnbeiuqEK?e>8~U z20r6McIs~Bxz|y^=ACfwq|m+p^RStL(qp`_Eql`bm%wOJh8HIj(uO&`ztu8Xmp=J5 z@gYD+uM&$Ty&}pn`MeSX&q2m+!cDP+C1UPa~>29g)*H;W`CBc87uP%tg}a zT=G*$21eG3prsaAh6CJIUA{-|?jR%uk@AX4cOwx`RdvoMzb$q~a>FB-+I7j-$PL$|2~8DIH_1=XR)CfSt00KYsbt1Qn+cx2InO zLO!4Mk+#zVwU#}3N5SObutn!Jo7SR!#oPzpHR3badTtK@^QUicr!sNraovVbpF1iS zXR>4Vp8T3uPRG^4%sh@n@i&ewQ4u|<7cEd5&m?R{8O!P^lnf5JlEWRk88W9t8v2l% zj?mMb7EkHXMrJl+4R(OHv-@YQDoVodJXq6D-15aR-_0L8;1Z7&pQSTac%<({-;XWaZTO zr|S2PdYX#;?sd{QXI5^rT;g<4Ax#OFB%wtLPDeBo&;11JR|EAksi-TC1;Fae6+_DF zg=Snc^?;+0-y2p!xBoR}HwqruNN1!1W*0-L=I>AhK}sV?vbjFL^b3xGQA56fE_F}v zN{~)DMGVwd`I}=FQPj`7q!!@0*la86$1B9rt6S3KtW`C?EaP}{e%Hd!%LrL_iL_bO zF%B`<_Z2*?QVcz3sj^zg{IPo1%l(mQC~0{naNp=Uso0PzEn{BUZGmd+FR^h_%w?SS z!AFIjtA;qP2~7G|ulox6PjU;c{Riz_PZWjim4DKe%M35YdsOdr)1H!l7gf13$ zwe+@$vB8%|C>hS+=aLq?ZfbQ}6zPL)v&K8J!dgko^!ZmO(KHDS?Su1<3N}p?_t+_8 zySndFU~lr$Ua}x0$=S2K$}QJ#=nns_%AJ9RnxSILtv1_t4Om{>k!23&7- z?VA=_E;Nz-9@B7McT2eNxr+O2qT!D=aWQ#ZB1GG1C?J}GjO>&#H~h1(e5 zf}^B|qU8Q{{i2(!a~jC@7fjeT?YNKMI1;vkA}zt>RHVI{ONr*4r_$fr3acA?Y{A$S z=VCBin|WN)`(!ka2TZSkvOqR@Y_vxHn=;ztP0l_GL#ca`AJP6Oe5N4Zy!Rlc^D=$i z7=wu`1wQsuT2Ysyz}V#bc>T()p?I6K&4%<#*hG4)q2SCiq%_F8(HMl=eJWbj&$T7@ zY$8^0DN4uVyPGZql*I!cPlQUV-ZUo!E7(HitSoL&|5q%XmWA z@BfJXUI|Cj)cCcWTUGpV$ZozD)?b3_gCDu%3)GX_77pGJshhT0+#&RHqCk@y(2U59|#^Brdr*p#Tqz>5gFvg`Mq29h^R&Z+NaOK?4!(ag#1 zqud*w-ch)rKkr(jiPyt<++*dye$wA^QkHvzgPd{wwOOA#&i_Tn=ahjScxOf0G2;#| zhV8%|P-h5YeX$SE*kOXaN#%`$y7E&cL8zPDmC%nnFv@&ANM;9$(m%hoPl?7Y&ugJ3 zez=WoR^>oZ?1?LbyFRsxmA|0nkXI8UM@T6$t{A!$QJ!b z%5AHEIF<7)hzWJ#n`~|x@L#mL7)-|TAA^^6;Yy{(X6sG+6#Ocl;73Gvh7%3s{e<9i zyuTpVFI_kd*83NyzXBD$$;Eic*q>}|FxO-}MbQdvK>Xva)U1O21{&F3tM!-;PbI66 z*;fCP0<<3-@WmckaT5>OZ{0M>S1fC4OIfw3qF;SWlw`+ngLt<~9@)81b(LWG9QeHl zzWx6L>^p*n5xbxri-S?ox+szoy5#}SAS+iZ_FIwZD~zB?x5+y_j#(X7wLFop0Uw+K zkyjO-DmS@f@RY(4i95D6Owc%4%O(Ya?=J0_1z1+3k=(DI^}a z8V|J0&G-4?i8pkBTjRy1$9_-E#d|$lWGluV_mOBLQWlb_r;%$@NwP;+V_0>9IIf)Pwuc8*T?ZfX4 z+d;FF)3t(~1p)BNvz=A!Fq)*Q8^u;B9NV_y3pR@Th^0LtJ1CzY(>P8tAYoA-g6PN+ zoKy$|(+AehSF(-boV4f*Q@jZ+d&Yh5nfR|jUbpq`jreq&8WCt=B|>H_aAoPNyJ8e3 zYxTL7{qjqi%{qq5F&?36D$~A|d9V&-omCUPtOA1p>sU}p=Jno5e z8FgHX0P2?8GSQFPwZ$Uwx=SlA@hdte?ln)}Hh2q0>^s-$ZNdyph@Q!0bz3^T2Pa@V z%&^~sNK+du9)IHuSyHFCc|ytt*1VH3!LW!E6Sh8y#q5fwuy%)4dLA9yV zQ*FrKk&Uybo-Sa>qYH*>Vwu}$s5)3Z0&4zTi%tSKrUpXsV$vEX2t|xW%^s#@c(D?Z%yu&x&x|3f)DyGG##PceL>4u?3NNt)M^|%v}e77qPF~V-I`-3 zQV;)&@Jopw8aLDA0wFJ^ye)3RA0)zT$rs^4Ua`~5lk{pI1`{zc42FP~`;q%uOMicGi8vTg` zVf)T((4Id`xJ!omV6w10r)H&|`7?+vUq{D9J zQXvX7Mx|Egu%5Q7kwMX?vc(%sXB*_xxoJhm{uMEli$w0V!9W|`#hQd8;{A+N8=d_4 zI_vG2GSl=7$i*BYuK_n+s^!T8u`XK;`d(XRnumUA`%6dUW!VA2#65d&H}=#2cbWvk zW0SI;ylTiKgF#+1Q%rUpA2)_`-}^Ke<}T>zU0XrPZ*|gB{!xWPYconGhQlduH;}yYjpQqRp`*rOF3UC=Ck<^UB&2>B-LGtT(f6)<7 zABiu4Cj_J)5!M$$B+KnB{zDdGi@@hRz#>1T+YofF3Hy-vipES)ucUHdt|;B5 zK4bzlg6eRP2fUJip`1MtoE3ZDZI(7Wbf+lTjw>49nUomJ3lz5&ZH=v2W3f zkvmWqxF&O2g&~EV(-J)1YjUi&+TNz+q&N%(w7fc*!?ivxfB@u~ApL!Fs$h29Kdws* z;bdy(BNAkSIpaYNBlTrE=6i)}hMtvPGFkh&kWWLH-aDUT1D;kgR&hk_gS`>LNrV*c z-5arQ0a2*cM~gQJTCjp=C5# zQ>0lplB#Se#4f;9F*n5=X!m}!#o?>;GC(W@fH<`Q_%{zP)q~$Ce`@%i%jKUCxm}#) zQGKU)tjEGy(`j~eQRd6kyOzPdSu&nQhD5mgE&^P^B#CU&HO+Rd>HkYpgKTI|B{^p zvd3C|gwCSMrEYb(7x}vc&D}cI-~ywpck-LyeRop_QXv#w$;4Z`U(Tmw>!03?%fhUG z-Jj2XzIPk<_}1i-W{)&rOrCD=&~Qp5@@-LUNihe!FEbNikNoW{kscR!KYzm&lUX?m zC(UlEGNboy>rPn$Um`_lLUImREI=-~Gnf3=L1MSi_e$Uwq=FbG)714g>Z@NRERQ?k zuCV0US)6k1@Vdz>aMju=gir246bXx(qn1?35H(J(Ynk{)kPVqhr#VMZ>@`{rexcJj6@$eR438*z?x8j>z+4ut zYvhp1;6<_wueor`?wTr?IqK^}=Kzk2C3HZA-a89TfBjA1PT!L5)(3mwWa7(m2p?E( z|7%aFi~L~@0`(**4Q@xS82h4%?@tngqfSe01IUTa`Wyjs!t0F*-oJ#)aT@lmw#d$s zM@MR=1PI%{0twdo(VTR^%luCfewV1d{X%7bOTT47RG)d*Ev&SMla`0eF9m#<+*3c| z-&t9f7psd2(yB+7E8DY6@-!6rkwlEnGA}L@7SqCwH!^In94NFbAJ>b1ve(ABl7RXO7R#!;~&GQTXt#hel386hJj6 zG-=#^gHHx&^hybFV;lb+ChQ%(9l_{>lR+|R;218ClOgW~+i=a1jjtjD>hN!hv;c z*dUZD)%;;f=fP$F5lj9;#)--%H^})Zm}2k%T{lU+%6;tN|4?|<`3S{6t;r}Su)pc` z!3GhU!PY>%m9?Wu*}*0c(d8e!zGad`F)C`qtwj>$wXmCG45nW@_z2 zLl|QyY0UOP)EB%_GPV*E+kN0bjb@A2Hy>Xqm|Wa>IMd=fCLXElXz)fG6*kvBe#&-Z zj5#Egd-#l~z#^lSO)(!Q>O-zgN=nrOSow*vRsz}=0)~DlbzsmUy3^5jbg3kO@-knG zJ3|z0&s%b`^0AbF0a9i^zak@Vc!Z_-*k%=(`CG|95T`af-f3I6inmc;6hTi{JBL7~ zk&LL{V9$NIn9k4ILB)VNI~w9f5KZ3~Jl~mIsZFb?_CG+`FRWbh4u^f_GtzWPHQuVP z4=PV3I0=H94QN9r*33LR(M%?KVkE-nZIyPbv&#|}6QhB<2gU4Z6#AXFZ8?gh@*L9= zVdS=#4Yk~cWu8zIo_mDP5xS>7l|H5qv{MLcE2^9FFqThfu(K`f6?|RONgE-AMWmt0LmB z#Ui&yl0Xzn10bUP>a2^f~H5|EtJUL-a_&<=y^d2B1gDTjEmJ6tH8#TWjwL9T}(*)a4H*z$CM<%x^Cp|bp zDdU9!M7)^#NvFDEOIe!jqD5jw(DL#Y>=b|gMzG>ZyRTr*?aSHP_(98Szq{4L7azkH zT2qkSqBhi+FFu#xXISHuqsA&`Rp|8`PmhR>DRirgnaip`>2K6CRBNe{Aa7M*h?&|- z94?UVHfIHJ{dGV&?cJNT9dfwTPk4V8O2IDnOoqZ58`KFGJ?))#9A}c2pHv;|zuozU z+y31?$OyHDwMqnRh(z?dmcW;vW^a$pf-#~L+^1XvL|k}V1lY2CablgsHG}cQg|fd6 zV?B)(`1$@(13lCOe;Ir8gX_+g{wNE*hR zSJe;hu@w!AXn0z2og>k8gucRvyzoZ(8b-9zGjix7Dm17~szJ8JcFCKBA2RLSLQ4|Q zgcf%9;^8xOw#m!($1`=38j|vZ656-ttTT0b9L>_B5g}coY(W13SUw zH6GW7v(Ub~Md7ntDxNB>QWhv0C$@Ssmy60uSfp>P2+VZ(jk274f``+ zvW5WWmpPF_7J^>(DcvaJqJG6Z9hAW#5fzMkKEL_#u=&nf>w@R4^7i2=a%-Ron7UT7 zs{@DW-yOp$gOFiFo?%MBUCeB3Y`SUpH>p(6H?gh%t-3Po=6TK##(-SIqe9u4E!ijU znM=>iws8+lgu!#jW{sMJ=bCemVy${uXkIA_6u+_01C7GJNl@$qg;@pN#PH z#h_?y?2h$ub^Arv*^#=Ed&=70rdx=oJQeDk@E=#VVdm#Kpbp+QDoHA)F~ls#GitZJ z%0cuAvDf|&Sx0t-5Q`8Ugb<=fZL6As5x)0afQ_Ex*QI&P|FOLvBW&+-AiQ~g23l)2 zjAqF3Ni#40lwZnN;&oMRXdj78!7kD#Ry^OYojD^xa-szw-H=*igXuJx{KtpO-qCPe z?DWJsODrK7NsQ-oQ7GI??$A;&h=gk|{7>Un9G)a|4BLp)_oGheh$2)1N zKhOp~1dMwsTIMbKr=8ASy|UixyQVnT3bS2$Pk`f;1EDAB&eGT)upxr5b3|ZGLKbi? zX`O;7I_yedv2WrR7tupI2Ny02iVOo??Lk#;nGx|Tvqni9#9#Pkb*tRXVw3iRnl~nK z45tn!zL@2{^G@)Y1&XLc_U({ z0UL$C|5@Jc=@jU$dc8hp(h*%ss$asVb6IgoyB&_-5}Hxe;$ynH;yunrBg+66LetP` z*hsB7VgN@Dw3#T8?n_=XL$*B>wBO->tAM;^i4I6-ZT_mddXTw~mnhKxi`Q8SwP38} zBIV!157Dhd=j73FN67VYId;D?0v9#ZpQxMD73O@tOi><)_QxT<7qv{KZ<$TiZ2)TP_xJCA8@jR599`U z&+!r#z07pID)gS6PcoY!#a zz$eg2%aNMJNGhH{yw`Jqa zR8=MSWW^Dd;GI$;wns zcmgKKh$y$#EY5P*qZXisj+0V1pmVXLmoT3CFoxF~PalNCX1>k%`GR4o%8vQw0ZTa68zQSKr+DK&A z?AZ}L>V0YvlVD=Wemdsu@|&vrb&&NQYSY188f5Ib?^|pp2wP#JCe4{+%kXd$zn&-` z{p+&(h0(!zm~2S*M9%UL+in`jJ}u}p4wy^^cTy==hWpZC*nU21pL7SgNu~@&65G+E zH6CZC;Rg?$1T_HHRgC#s7tKmE7L4M9BFBhgCD)`s|BBuqY&0>eZJ(%ghtA9L=Re*! zzV$;R!MqrLm8OgxzZ>?RH|g!sJoUqgTfKr*KDO*Yd@HB2@eLX1nf2s%NMhY>T#V$t(CR(v38DmKj&0z{Y>voW6EP|i1h2xg1ZMLPgFIUmk<0z zO+6!5$EEGb8|@!do$3ad|NlGqB?l9zM(M8PkBpX+p?EX0E`rmt6-XzXi{omeMHxR= zCCkIoe=%f27x5-X*ASmXK;w=)qna&7Zj*0>1eB3L_Vu2fE{{c&{k+vahQgn#76FZ0sw0tBs<@y{U$QYd)+Dpf(IH-&&u@QrM{7=sy>4O> z+3CHjuu2A!Q@U`D24`27&D|aU6VzN4@R*{ki?r59KLymtCrU!+qR@hslm4s2*Rg|& z9ja@~ZDgfWZYZP|`UU>W#2B^ai#H3Icjk{WT>#?d5By^URnLcMkUHWkT- z|NDK~_phm)PFVj`ZSpy)LDqjJ)Am}X{+q*#?Q!mpwze~v{B@@qF61X!jr+wkH%!8@ zS9{lQy9GYTF+aD$pa1dw_s6bCcFP~8`l{mPcq8XrM$t+hqD;FKdG)GfmV%GU#bY;%1My25@4!sV3PaSZcn`bl{rE^P)Q+qNaP1z7h}w9q`H(P zVvAuI=B1lfNClP^U{=lu31{Y~)M-@e1MN7d#>j9#_Q$-!08$n?C2jQU-xuU9E_lAx z_1JmSH2hKFJ2>^XvwdDoTc<%SsJGR{-$_XvaA3;|(P8d9sh}0p%T1q_>|Ue9=N*3M zdcU<&ZvPcAh)*T0psba4YwIaizrXBmr%E8r13YD2UgRdr0=M>boA)B@%vf6Mx|R}I zaLmBh7q?>35pR`l{?vlbm*2LU? z5J#T}T5$A)NZ_*|$0JSutC?kJW;!MdK55vk*SkJK)Na3YBb+v^js_;OK+@e2UxqK% zGKcMv0gE`8^|pH4P7Ad{GZ$UnAqC9W-0nMnp)a(N{Np*VGWUAlWffrnRCc(fi2&Rc$w?};vn^F8tI%Tyuw|-n=B-w zIs;bhldC9>36cJSB4E0wg-D%Yw%z}M>EqP@f0&*Q<&VYuJ6Y{L*e%p`(hy*1g?Z5A z6vF*urLKeG^nIBy@k=bQBQk*T+r)8Fhg5GltS5XbKy$$DZpkAg!FT&gZv=DMCBeLw{vbos6}s#+J$o!s!H zL_gn(`Q(qPz4;)i94uXtiYzE7!<4Yb6E$3JTiD@F0K`(|iKm zcFsjhMvOdv0+?U>-)I`4m^R-YhWFKY}U?XzSkR6D;yAHnaVVB%Qta zqf99~PVncL3`NbT+hx1$wseULw)YB#d{*-xOn+w-BL@mRmRG4GXj#p;QWmW&EA(#V z2UQS6mUHLp;P>?4#7j5%cVIKd88|9FK3S(9upe%00l3aJ4R@KAwR1eX>j>&?R<48t(yQcYfc_ za@aPPz!g*Ye0Z{!)Tzo){&i~@Hod83s?NbFP|2P1#nd~l&-+B-XZZQY4C9D~-nW-- zXI&^@#~qO#{HVVL{9MgP1#v)yNF=5L=M$dh4Tf7Jq>A(h{Hpde>4MInG`WYE?a(yk zzs!}a@)s28Hj8&#S#x+Kw*#58Q9=Ok*FERD=v$=um?@gO3{=SI4M0B$YInX?f=zLU z`Rvq1yS`r^v8_+ZI_3L(n?Otb5W4j%4chFs`>=Yvc~2Sg%IAM4Sz|S(-a4MniwNT! z(hh>F$KdW4{U5F#$fmoiFc&f>hE1PM5b16G@9-^ujRVMkxSUKW?7YqSsy>J~Qt7ep zs{}WPN59j0?NpSy$lQNyz2mXMT?!jW!ME`8u4yj>MiDA_x1`=kt%u%$6%er)VdRmjfk2f;}jh#tm4w@t)R7n z{^-MDL~P^bc4i{rl%7AM=+bcBg5HbB@s~|m1RZ)^eQ0muuwW_Lh*HbhFXq^p#$M_K zsCit#VMy7<-^d^giGkoM@ID_+PeC^<-o5zrWt&O~XFObNzBfe1m?8(J?RGO6Wq#jU zec8Da zaV_+zo`2Hk$Z}5bLB*_kJj$in6?<0HmtjI877baa>`qB-TE=0ZX)P zpE@bM%t4GAF1OW6A7bap0pG4Q#RN6hX!*=QHm(#B{ywIkZV6z%?=IeSxL0`;__fBd z9@d#_O!@d@C`?hU2FOx$C|+h~w$2mhTBVmdRb0Au&m;wAO=&V5j&4B$wIdZj&#oN^ zb$Cn;TTG^NYizzBWk&Z9ygAzmI2k*1?&x+(TaFWIf|CdsHLWDt@X*T~`v>$&7lE)a z{D-eDpI=0EOZnxZG_hG=hmO0_D8UXyuh8rk{rd17DrMMhr2@$tPx-4}Bi1eqXrV0o ziNdt{z$Ar^`*5x~5NA0K;E)vwiK10b>azxc62Ak!U12+t`;g=W;FfSr6dn*ssOla-+8N>P=G>I>0y*9{YVgN!q1 zJ~@4}bcRM|m%t{kShm;~`G}ka&1X-KMgYF;Wv8evl~biiBZt#fwihNIqU&6 z;QF>{XsYqz9zU1$;zk#Ux$$|a?j3~#A(`;It*qo6dHlax&=duc<+bw&J?v09(al#` zilg4^uXWMC)VJ3%j&*)j7|G#?e?`lOIK5<6n|qGZ{O;3)OZZvz2&~We66)(A7gpw0 zHZjldoDw1S(-w7{`&g5p8Zh<5`Uz8G{D>awOvPHy#q_n=^1l8YdN`|=1UKFMuW9Dk zlBfaHesM{B6nv4EJ^F}5YcErVm+=-MkJsNh_ZorYE#%z%mP9d0p9d}=uC(b*oV!3C zul#tpCbQ!6%&_eJA7S#U)C#*5tSlJ(JR2u&ubD^+x$i-&Zg(K!kBucLSkp7goXcOK z6l3t;hR(OxQ`r#Tp}_$G->J`0)+;V?MsRZUJ$0<;da=2aqn*?E`oKwoATZ$erXQRG z5VYMsU^c(HS`mgya?e&WiUBRY&lxS6rgRG#yOqqr8A3~dG&c(ENMkZ6)Z-}Q!OpIi zEpUG2_SC!#C%VD~qLR(8lPUpL_bXdJYhD5;*9G`|1%-;MSJW*3+yOiLi&$#uiCkQQ z&OfTucS{X@%cfFy4AnnaoO*`V#eJ&PWD5_tVV=KM#+PKHg+)00-s}Ikk`lmRVv=0d z0~+Rf>R3g|Kr1Q&z)q{i4paOwXaelBUj&pAe=0+gF9R%I1x+pJYFfEkWurt zv{+ZL+V0-4@%O&RJV!E8`Fa&_v{E7_0Qd!rOVUo6x5$jGN5T~``^1>8vG4JEXoc)%rRXjImj#?HE zwsUtl>*m-o-kGAwT*X;wQpM? z|Bg_p-&Qcw%!^B%#8l7B-j`#z8 z`KM0NxBAC)-&`HHl)yuqLrC@7bJz9+J5_Dq+Gpr9c`)7Wbxn!`4dbG^et=DD{pAB$ z7U`y#K*_!?2(+Sg00Yr%#idWV0L~cAI9neCA1Pj??JsTq%yYrsb<~N54wi~cWqo4V z*32n*>PUC^VTA#)Lvu3tr0l+n)Viq7(S4-rAKVr|?@dE5e#Pcfd$&9}Awt@tlNii5 zKAZK=C;qbXNp|-p}$#SH_cl@^0wa)&!pfsx=0+6-D{C zF38IS@(P&=fon^>sTZsk%>oiI&YvMJ6K~VZ>NrLPDRMG}R)&Ekn)E>U^SB!%NRGKZ zDE~fYhPd@m0KHU{|s@e%gu*4u%+xk9?VA5!-@fFw|r*zP}A*1`fU1yxf?> zI6d#6m_v>~t;>>Nnk)Vzm#On<5xILE4_RJ{F2rslRkU~UT@RQ!0UfmG>Njmc z!>z{@woTdTHE7P zX+3u*Eo4|!dUI7uz;|hgU!R{YEXp5LE4IDhP5P9kMRbe)TfLa<2$*iK#!e}VAd;0T zo=zU={hLp*EigL#G^Bg6CP^-533DWezi7`f$ zAI&|ew#zBzdMXQgTlp%ULpLUW-<9eA391oQ?kQSJsOXe$g7D9E>GcHTBDeO!GdpIF zDYb*8-2AGC9=-yU6Mn0QZ!D`#fI-=F>6mwZ#t^DE9yHC1APu1CXyf$H)ntEbaL2c# zIm?0Yl<|l+qj}Y2z5s82vfy%`aNYQHr9`NdT5D4R^|ZGW)*+ z!DQka-^0OvqMGc(7>sbjcEiKd8=L2F-pb9K1TmW_hpFb_KL^YaOW%oDq9wl1smY5o zn!Q&LP1TZmUkCzlWdsi9UVo@+KLDTqnO{z3ppHFreGH$0Mt)rH=)5AHzO(s6K=bvF zpa9l{#TS(;B>X+gQn)Yw^ujE_17`GcC3~YC2jB4p)baHD;VcVGuAAR0-S#Yyy2MGp zy%-lJ7i$>CY79U5*bn#7RCcgj9?v4R^lt#wl;`z_G1ZF)PnO}O zdJ>zvl}7x_|Lp}4_Wm#@9qZa_ojxDF_C1d*+Xz7HMUJjJ*s}s3|7OB($Fn$kA4&Qb zgl9~(Z!K`cQuA_!L^AgrAW9(MGk(w(NF~o9xYz#)YXjm=K91|aRo=N7-+A!&9{bB< z7HxL2_+(MZ?br5YQ3QOik7vWCM%IBd{-ymVGhz6FlN{o>0eD^hkyI5su>L<1o-Z4L zh2P;aMqptV-<*Oy4_N%*rOj^|VZ#=>5nKYF;P!`~V-Tcjnzq)x@xf-^qsxC;HOZ02(P@dh-*sfemf{ zn8QykKCghYXcNSG0{CzK({P*6-B~o!sAw5wPxP{tnisI11Hd6VYMwKD*3^?f8qDnz zF=q~&-j-)atW$eWQ`iH732unc*kS#^qm@`1rhS(mdJu z%(XuKPv~>j(+fB__K4;ufU&`awjLY)^n!;~y8TVpjnT*Krn9&Xp7%GPzxl(BPonuB zVR7COJwL+2wi!+Wc^4iBtl}G7ON=aI$83j6m3<4;_v>$_1$aTr3Qdbnw2=cAGk?|B z(2Y^3PWfho?K)#J;bC8C=8UQw|EUqoA&s#Ynd%QJ52gpG&jt+UVI3Pd70fEve_Dug zDa&=&8qabp+FlTP&u`}}{Ud;DvQnN_M~B2S4e)*b3gLAQf^_`qyPi8WOCnAu zMJAZ9e^N}lM|BQo5Yd{9E}(Cuc|$$3Gk2 zcRl%`83Vn1YPqD_!zKmKL_hcgNbw3CekTIDe-0^pLnnWpFFiws>(D!kSKjgL$A6jP zuUd25{Cg5?Wb=5ENuRyz8J7T;_X4Z6g*EBuGm)E;$MC0V1sx{1o~xd&nFt`7=lS>C z8;A~swR!Ge=IL8{Gk)^b&m6G*`#>t;Y`d`uLfBj`sAN}_y208t?H4k}XFb~CBR~*u zaD)h0w%V`2-}u1B*Cf7s(pxvpvd#az2Et7vl*inpAlZxGj`!eBj_oa%*2~ka`BOjn z?L&@$qU?^Gf_d8T*eqldx8f4@-6(ihlg2>tT!jQDMo z4)F-Z7HA1xuSt#XZy!GNj*m3}|0xik>;(IRgFXN9f7gxi zS6rxQ48`a~DKuwZBhE@WIhp$NC5QLir*?9QzH>#-q7UPl4ye(5K`C+lXD?!wDFgBh z0_tdw(J`1T_Av6FLWJ#MysP=>8*lO}iUxcz?;-!@4{NGR;6Hgd41W0MVBaj_?Q;HN zGQ>a1O=@xUBDWEZld*#NPY6l#YX9QaNY?{zY5vouBe~+M6*;IfB_dZr)R|I_XE$0~ zZ|RG1f$uolw53L- zWYL!XM#4ET$-*xfb1yI9U;`@}SRUFQ+`3(u4LhB$%AM-R}llg&XVrJm1^D@C~g`_C`GR z6-)g-znA}Dt2GtIl^JD! zT@)(#!+C=_zc%&|*Bej;XR7;okkUZLT0Yd*f1cLz!lpT=)$Z;1_pISW!I=t0;q^ZX zf>K}Za4uh4xh%lqb~TzR`3%L8qw8sICtoO7_X0{i&tffmO@qD9Dty-NpSI7rG-vm) z2Z+SYwKZ74`WVXmiTCyoiu=UGR|AMM+Qx%8qEvYQg*f{3v5OHqGH|7TXJ!9vco%^4 zn{$3(2+eVZ*Vt_;r|-4`t@jUrnT)yqc>X}WeKGO>+xK6JPcNqF zvGqibe}s=gVe+BW>-+wsnEutDb6B*5`YG(iVq6OD<@8h4M$9lT- z`mY{vZPRGB(vO=H{<4@lEn}_Tz6m!?IL{&v+NVABK>HwBXxo&H4}Tlz@M9(WYsO-} z8(muJZtpqQB+ws!@uv6YN8iu2{s~*K^ClA<#}WQSVIQb2=f|HcTKk2hei6WYIi_d4 zmo0+qBBN5(AH>}oo{>bkjz7!hV4r$W&2OKQ3N?kC^VdAx3m*adBP2{=7CG~`PSK{< z|Kzuisks$PpAOCbY}+_m+tkQ(*L2_FYX9{8*C9sNxaa!n{^r*?!1>vN(|#r9#j+*u z=lJd4&e0JqUmS09m8`XzMl`Y?e=1(P0eM0@xBXRzgQQ2@qRE6Bd8X+E)dvTUTL#`e zG;%(C@3k{iVGfS#t*6p`>?M%**}!dQhumrU z@*>~)@I7ZuZ$E@}(BO);z|LJjf@){TMey_6VP0#ns;@uJe+-ypVLo$yQ?kmh|8&?S zzk4lw`eMqTHv}bQ*$4)s`g#!mQUzR{zA)c&JpRTAkG6hazLu$LIBh)wn}FdL^zDxp zu6NVRbHhhJ*6%&d68}BdAuxITSe>PuZ5@AaM0m0&;vC=)T`3@zTfH@$hU}TnbZbQAgXCWMX*|f6!3Pv_C%>hPRi&C5^NWvk-@CdTz=p-+vNI&^EsQWM|rBBOH{ z8LTPS|NcA5#701u{mKj9Rcjg`+%o0cl1LL^&)w`f7zm#JTR|%o^{XT)=>BHid>m_X zZsV@MMOushNo0@c!20n|DE)h^zW(%%ihY;g^!AduuF~(n z+#M$8ftiD?sXjOpgbP8CCpu@8}vk8ReG` zJ=;mwzqhli2|O38#N!+w`HOITWl5Y(e1aAI^rBIJ!0%a`mBBjgNifkx~^>a%?&2Rprv>t5kC;kM43BP}M9xiSIodc9_|3sSY0{Xe$qRKO2 zdgo6}L(ZNzg4_MYlE@xp0#M2I>AY6}|MB|y{!3%g!WXf+SHk}YE9UQ>!|4{{Ql(?m z5C&tMiCQpvh!SzY@&mzI#%R6E0?{d5JDLH3%f3D|!gH6=i-5HQJ^oBgw{vDEaCH(R z`jwjG)wVSlO4mQ(@UM3L*KF^w&p993%1Dp(^V(A9 z@_D;wPfTI}F*`O6p)^Ch83Aj-|Md?rAntG`f3(bIPnFl-CEls0N6orna3t0<&WhAq z%L@DKfxVcEO@46c4IqD67uE{Z0poUMGyW>FEFM2JeSu{IV}DX!kWx(D+BqdSlE95Fs+bx32<3 z3%Msi=f+r_8`$EQ=;P=0V9A=TYBBtiKlzV;au{mq@IQ15#sB12E%`TU-+d`xgCAd( z$!|mp3vAgvhxHe&ds#=zozKTU`Ei{lBcJpu-~Z|qvYzkz%C2jeNO<$WPXlu@74031 eSd~9LviuJhPdY%c1_9py0000*sdP#W5)L5U9g;&ycQ;7aAdS>eBQ1*3EhXIzcYnV3 z-sk=c=RW8Bu%B78W9_x}Uh!V9*il*AdM&F0BurNlEod(kZry$2(Y{V=e^0q_sAr_?p((sq$?;_iQ3!?z*dyWqe{n zZRXr#@?68a$8U?;(_p9WQSU(f_tUL`-j0Q)RroOjP9h4Jm{QbhoSuq6MUJDHi@Kj1#sN+(SnX%n1||BgXOn6zhMHr^Woa*8wCa zFIQ4RV!=#mQJieSoMK*_Y+jOF;21v%#a1FWK! zU&qJa;XghPSDr*gHAD^bFCbW-4RJWGSfr$EY^0Q2=VyL>&N3ioMNNIcK)uI6eLzcp z&>DY9m~bg7r!a(BQ-ZF;q+?Z0ELHChw40k6j}(Cy%sl)&99+U2I4iEAQ5smTQ(RL? zQAI_M`-bw3g|`IRyvkUa8>bo3nw?mliBRZg5nibeshirwDkTefmGickI^4R1ed zYysJCc!K(C8^+9|N{Wg{;ai6~FPF-Xm!`al^@X=V5`Uhf;_7SZ8h(7^pz)76N3N77 zL}~MjzCxHH*4yE8W9$llISnh@3HCerSWSr0r>gz+-#^_unzut6PRt~w3#dmm5ZO8J zky<8JBlpQQ1Z;+TXFoI6-tvN9`8!3MXMbCNUC3yGA$7@-pPSvw{n349c>WT-`QT36 zZ_+(j;CeyszYzr^h$~K8u5)Lzl(#ub_cCI==7KMacB4J^gp6-tMz($aQob|$o5sl| zzesf&jC|G)QSWEA_3vj*98qQ8n2?JI_Wpy9XAn{F*&BJJ+2@(k%PU&3_*Iji3W_~b}9}|kWf#=?`dz^Ewym!r*iMuS z5?F0WRTM2SGr>FC!1px2KdsVZGP6S2H#z}tcK$w3V1UwzMPT%)c7x)&a6!7{7uJq> zp%K3nKkTRhXK#Qt{)9RYH!C+cxA$s#8Wdkf$i5~)PD2tAR8+L~3#;Ym^D*Z+Pk(F zoMn-fYyAKuQ)chImx-y8Fff)n=#t>9G3gn$k>*wU`Yyl*_;FRH#b?6~gDu+vplwT+ zk^&Esi_h|Kw|03iZRNG<24-u*W@`pAcb(}5tk?#(O9!^G2-on&j|vy`ejn3xb+JAQ zppmE{-hC#2!1z3Un!IolXAy3ook3ud@01hyv3cCcZS~y>^jDLH5Vg-E?yb%6$&D6| ze}5c>>Erbmw)A58^7zue#T_5H77b%RRNhz-eIIhG6!l;j8|`loxz4M!Z6`y0KjeBb z?NsaO%f{&G%#SnTvo}`i5H!KAHZfPeyqhJ$xZSMa(K3;u^ zSuznnNvFcG0+_jqItd7k744{#UJGanP^w4*LAXoG3DeLc>EHgETY@|ih zFvVOhw>j*dWP)xDg^CWtU~I#L2cI79?+gqU;|9@LNh?1G(<8p!zFW9!c08K#rW>Ly zh>qc}o&+6FKvLVwSU*|VUMh@<1sC7=<4{w>PiK;1dL&&QGp;d`lX8_if$6Rt4VGpn zixAqQl+a3Q{e2Wi^@rd*-{EljN9E;%G1*|+BVAretB@o*VrwNQr)A*i*O%iBj|Sh# z5O-it?y#^$5}5aPdvrJMRT^dk06ObwLN01kh;A*ofUD9N97jMn+qpX1T1Bu>NMd1^7!^X zdSsg|?JM#fAp;#gj@e~|E=>e&D2hIhk-po)*PjbIq|wvT#O$n{p6d&W)di)hd32fV z1flTCCXjVv#>W%pSNN{2-f*mS?+fvhX6DM6&VThhJ2Eo$ zQq+CZDM04)+TUrVL^2)Ia*iN(lnjP(RFyLmU^1sGAi{Z6efvR;pyVkv@rba45_%V&o(PcRw_JRHQ@1{q~$;zV- z4s#p!3S}IqTxjI&My)&#-@fJ&DzhcxB40(7d#@m8oVq^+7j21VLPRjeL73j+swVmZH6;R~cdJm3Jqwe1H3P-1wjX^)- zI9z)C`0-8b7|A7pP>q(g$+^@sHR zFfFqCq_4Z@h{5h8I}=BV@dt>Ha{D;^LxMWKMRdYUGrHLx=jgKkm)qA{#@IhZV4nTi zLH&=xrVA8a*}ne{K56`ZB+d!;Qy&Ad780N~DS!{LHM65T9Zk#Q*#@loxG&_+7Uol5 zRvfNBjEh!8bL&xw7&#hWQ&&P1MKDo=+V=iM#IwW`(I?6r6UmIexl0w)jr&vAi{SIg z*L79KXNDX*p4hbvsP%s6dQjdUtD~Y2OqMCcDGVr1GsoBVXKC_k@^z(=AEJ?kL+uMP zdQ6NZKTY1x6kG-+r8o?vQqkgreyFT{45i@a!Jn6S-z^e%Vjjl`$uxhlyo?!&tf#-J zI(d1NZ?8<&JiU<4Z$KG=fciSC?X5=l?V3MIn& zadt#`bP@CX-PRYVyN?bFcKXap)}BESn{>b+-Mg(? z_fL?B^a zziUJu@4(ygk@hwm0!S|j6PEgM`R>>05%5Qd&A>(Vlcg-8ImrJBZ;@S*{S`ajtB|IF z)Xxh~J$?ZOZ7GW3bTm!KbLYYrFQt%oPdwOSgN z@TPij&ysdL!RV$H6|CK9(?V5Ms&j93V^(aKExnwz5H)1I#CD_-8N)Dtqv+6lzIEcPaimdExXFiybFGFFA2L&O{qz35I$*+=O}^DZ@Ov7a z!u1Lx%#!-Bx6>=@uDoMrp|jKRT34v^^3REL^LehTW-k%RF(s-4J!)kYxYKkTV+|=N zFkiogD>iOVgviFt`uDGX2uvsRJJ_U;g2L3hW$of>^0BF7(?lr4+?2$H1BLtd&sJWNgcA8pfCMt=U(R^U|W*if)NKByO6v# zCupVW^jp_q+>5r)NKW@>#AxA$AiTZ!=oz`QS@4euVLhoZ!b>y})3e3ldFgY)ET=fd zgpX95jEQ6!jZJ@B$Su9MN?($)k}8@t@*?ze$iV!P&j+GEK_G&A7Zx9SEFW6mt)G!g z;A7^WAgTz3YVUxU^3h|7XKywbw#OF}g7Zd^orkO2*LOE>c1XEU$Q0-Nnec(w^<-<^ zlW>^k3;qu{rQdhhJ5}2<4OQe2b3?BgUyJMe6HBsNJ{H@AZ?+p9w|hRcLer!duEpx+ zq^FB%6~Hzq*fRQjuI6TliRi`Z&boTW?x(-g*PryHefO?gr@B;PN!0@>o+M(9iuWKJ zoxn^YT-05firo_!)nJ&p`ncWyVXH zCrM~pbp3Fw7;`WptMl9v53f$1_PgeX!;IvamZKHVRsL@V1Q7`!mMHl@^fg-R89DzB znTLqMkV?Oo>JTNRs%LyAc(8CtQ&d5x7cE_VoypDQKg!F{JhX!9j3Csl+Ou)znWo3h zcY9LV9bfEZXVxI681af(`*Ab(_!2!;`A#J7>1z!p+2&) zovtu)lLV&IlvWjCz+{xe%`h~;a|IfagR}8UQT>J$YrfH8@>kFbtbMTHszkOoWnfkY> zF1Pq(F@-rTXeA`fJKK|)DSt@|_FH_GvtsKy^ZfhgSpWu*rBNjF6Wh*S9>A)8(NFH8 zccb__fkTXk$1^xRdOR<;wOHPkpEZo|-pK*$L8NW^_4NAGF_Wj^Sy;a|k0=ji7st8Q z@m&}tx4%p9od3Lh?<~WXXI|$yS|^czhd3_y*p>F*ZfuH-B=lV$NQ}^pWJ*%JnMmN7pI! zm>B0iOb&R22#JkXaF6m>)gbaAKG7j3a89>|T7k4C`<251b(0sN$>aHsIWZgL7EUa0 z>@k0i^a@_)-(M%(*=Nk|Fm@L*Se#_UCiK?gTak4Z z;m6IgI`}j>dpP_MlM;!5pUBdcghp(7SUnqxWM@}6nfYuC;A$y4DZf@kGBc%muLh>4 z-`LprKp-Mb;$U#!<#QVT03zWBM?4(D1i1pv7tB&G1RhUOHCQ%ESk_rs){o;hW-C7C zNDRt+dG{c$vZvU|lOd;?dAYR^(BPZEU^024vggLKts_Va??u?a*zzsH>C@epswEU? zB~7lFrP`k1cR&s)M)Xpfc?2XBHq+2Ep1%*KuP7v?X)~hT7@?vHxjPBpYMp$2G<2-Q zNUw6QVzb30_oEDYQ$CE~LS^XWGgl(sy7{>u$2Ld18Wk-X``C_JFW?P=3n-f0F^ygH zTe0zJo2~K|S`y#A>h+@j@eS){5P*H^7@M|k2{ym*M7WV#)`fgv9=iDdqo19wHUgP~Y|e7T@+z@@GSf*nbc; zEEMXd^%_~%fksP}uJI69-IbRv738l*+0BOW9Nf!OxPN#FxqtWK%0XzDaUpZaXfW9e zS}*cKmCRC-WO9mA`N?SE0~RVHhZXh4m}FqxDjE1s)-st%-{!R=q%arKcGY&?S)7-2 z#M0=l2wy&z%k{=3EjCIe|LiBGBkC&lv)8ZMri-aP&J8TiUhj}mJpVJg80=9O#^zu& zL@Bjn|)(M!-t!B^=^Mc)oI zqWpd0=ziOp_b+77x8~(?G(bxrbvP3puLf7m44A+jyFYdc z*JQVPQ~be7{Id$q>5N)uH=^XXu!U0-cv1oBNf;9~99-Syx zs{_!pVBagrqM5*Ge$s>ls|cCnH$jHSU&@xiNW=hur3tH<-_arabIwpqGDFsc{l{*u z5%<%_w!sT=x;Q5-qnj=LUv_q0(;YoS)8rnrIor5Iy)7L`#$rNkK#h?is1QKMr7)F* ze}xr_Vjjp86^_LUprbZ)_5Iy@HA@?eih)PS9>_gCB~wKGsB#ng49ygLxMhknsu2a- zw0SAP7$VA^$YYG7Dv|vP`F7^tCGgr@e@BZizfal@Q0h$!ZlzD3+ujV$;Kzum5338p zl_2y(gH)+WgsF3YarJ++np6CE1$sw{i=kR;E>7ZCOr;NpXC@Wq^*_-z8i8=A-lENu z9$WPaTyzA)|Kr=@W-{I%kXEEtCVvJ3ZVqLXK~K+p&sPRr8@3%v)_G7Pl>iM`$i9Nk z*^j_sgCTvZH##4c{}L&YI%#Sm1lphtk!;%uVYaYH8BBw|WNqq*12b*-$7!h)5FYB7 zuZA)wS!x7O^EE9vC@xH!wU8Lq2WsX>U72GxoEK80UkT9(r`3lK2-oZ$xZ}4q#aGVP zCDH3ky!kv6UYW=REdAE4`D>tNIqWrKT!R>rE*c@}uzr^V92YL{tg*Po(cVLJUt85u z87NtM<5RINxPK6|nocmN89grh^>Izu19d?4_H+FZ%Km#tACOnZmX3?7R)C2`7mAm@A-Y7! z2{}{)G0nO=j28DScbvQPYb(>ISh7zpbnl<#ykJ;#|95wke^X@I^854}JdfpfLIVU8 z2327&&v4LF27BNL{Ux{nI--`+UKX0=s(lPJFWL!o!^G}OY&Q!TQk=e2ZorhOr`oXIv21s)~>XyiNDRV|H$se%KaVUx?Of8s^QDX$S4f; zG_t!1JSepOh{#@+vR*u?;K52FZkTnSW=kyR0c`=Gq2Z%;hD=uZsEYGd#lLXKtn4N`lH);oJOSAw}gD2A*|4 zZS=gzh_JG$a#^o%8e@287fE#PhmV4<9OTr94W1kV+dto$gAT}n7!dTxFgw)g4d=4K zs}De^@j%$-NoS^>Z&>{NogvJ_F8PAl^uaUM52FDUucz(Y)}_ub6i|8wt~#Z zN_=CrK`WO5w{bt#kh%*7}E%C~Ms-@v25!nK}hBed4LsJ~8pZ~6`l z>g-tGD7ZUb{?saMnA)yy%Fa?was}_ew2Qqz?v~Cu!5B3ntRC>Tu^M)5u+$8KrjtaP zRewFcjdY&90Dy%m@M4rsBVXP_u5dxbK=Pbf<3toy$F=sWI;_a{d= zj3vs&@uv5VCU*!KX|URRZ-?7M%0m$t1{%Ky?al(mjL1wmxJHpmzyr1%*U_W16&*rd4PufGk8;< zV2HBD%)Y+202_}4p0fGpg%t*VyvO7Hg?PiqyBe*v;bWV6K2x$N#DIDl;RC>n9(Mmj z0*jcp$`=eA6g5uIh&%KePB8Eiz=t1QcePvf*Jx0F9VI;aZ&c1pw=4LFNqemAI!m{n z2AR(JmOWYye%m$E&MswRc&w)V5U_JAjgUxKA=hmE8ITEq4;<)U5!UaZ_O&iL-P@7^ zE99>_6e=gS`x5g%e05)>uX&+ukpB7Q$!lWcC(bV{E>!eAAdMUHxthi_;E*@yKZrnN z=$IMwtf1n1TTLp);jSfYun!KwcJbYPuA}oGe&Ax6MeUj7u<&(~W57y<{NkZedV{kF zkB-GzZp}y6Pgq2&LL&I9G=TeNPU3;yMw8hCjMuq_Bta6qPae%$F7Y`$6c6xRA zoeFl~a@@gDAitt@#vR%7_3m$w1LS%)4v*oTL^%!mXM>l%Wb={T%iV;Jt(ALi_W~d@ zm22N=(qAXH zO>*;~D0rSkT+h7j!Yu#AHrUD;I7hbfHWUJMReHz|FhO)ty%9W;JelOf=PR1QEwg~% zSGgHp4(5M*+dt4tJWa0`=hVMHeSNdbO-6~MQGVUKY5p` zCNBir`f<25JG)||4kd2cjPFbL_vZY8x%#OAtYC0W$q{oWf2UPeHE8Fe|B$EclZ}my z>8(I)U3@%&kaxkC-)J>s%n$$|z!fx56?)d0uiLtx7H!!k3_9L2UFz@#9w8xqt2KLC zwP6vN@AjtSO4BhSHB2s6`+dYhvoEON(5?SpP?F#Z?P*uxD$PE0`j63X>)VT^ac0Qw z@aE1(+i!YO&{mS@{{Q)@HA&y!A z(hao^I1kGla~yp%eJn96-mHpF;28Kg-EyQTtsAHiTKXymiUm|$8;HN@c@w=|HVmGMB%0vHcWb8qj$}RkYdaE|6;j!JP*zjsh-3UhRX@%h{j%Wk1 z8>~CFmOK^PO0EK>g>UsSbN^;#?kirAJd>}4L`*WFiZqv;eQLUGg}i1Kx0DS#|EFz= zhr_ELJ7Q+#890Qi7Ij2jahA>If-~YS)l{K0Owq`%9mPFz7)EH|xNvt&jAM)wdNyK3 zTr=g0x8>VaeUq4A9!u&-ykpp7@ z{J%!q?SQbzNzri=vIQCgG)zwgG}>o&=mbD6LY!F!f@rhZYd+)d5L~pS(wfwpH8bsK z=jZekNjWW4cjf@4MJ<>C6bk=rkd;Uid4LVMG>aN!RqYG9?gJ*g8jLhi!Ct%pp1uMLBiM+qqT4vw1V zr2#|r&2y_8sGE(xz%nVz?^Nn%MAyFZk$8fE3P?<|h0Br>U#EZTLT6G;dY#=xWzr0f z8U2(&j?}vY`%YQU`8(;&lfE{EnV*XhIQ$eyf+8$@0OD7rIT_IL1Tfqy0aJ({z|~1@ z)2z$}9a6erYN&#vV>EPh+(d#9tb~M8J9xRmSP}1C1B57>wqK6Ubyn;sRPI6Z>MewN zkRS*hUHy%u!rQe7_Ow|xy2~~|eBuJ)pZ!@Ak9-B(&E9M;S@F5%p&Jo7_0!=lgNp$h zC0In_Elq+l9rjJ|<|D|Fhf$-A_>)tMmq z{iZqWpZG-9BUdz$LcV<4AgDONn=_Z`^`~&NRN`x#y@(0h9id8F=-e?I(-@qjH6Yj|IXU7LpNSvMwp?nCEH6K`~q>0&~9hyQI!!UBV{f>T;ON2`veX*dnP(#~kgj!ty zTmb@)QUTpjhwPzY>QzlC#DRXx7a!cmhPEf)%fbLsbr&|PrR7Q;U*d?@d(wFEN0d&L z!$QE&a*e>uY;9tV2f1@wR6oPF5Pg)apjciuD}>Z$Sg$;Qz9aQnIR#u>T?3o!mS+0Oz^O+BMcTCbDuA74yS8-5TdzH=+E0sVM2X zLyu7_zGh34D{XjES{tN+M$4o;g+>%l1nJGhi6Ip@=1V1fwoeA%j!3@&3W->+$!a8# zI}QandlpAUOLGz9Jql7++vWvi>uJuL;c?4d^@G?o6-Cq3)(N?T=7*)zLshINL*q`B zw=o#e^`aaNTVKsk8!`^u#hwX!=jEnkud0WppzNP$RrgcoM+kyqWqr(a+reSi{8$kS zt4mJQyT`+?LKI>|7hvlB=FFsKUr1NpQ#O>5f}Iz(bCE}BJa+Nn)0rr_OgznppPu$L zd4+q>pV()&2GdzA2f0U#8?BZlecX+;;OL{#LxW#5)HP%{*WR#qGhU+h|M;RylPP|g zdz-IY6W=#(>7h92K`@P5#UWYrhG>+N1O;_02eG{=uLJ`HadYm1C?0FR zo^9-n$6WDN>kkgEc7{`zLE2WKPGCPXD)v4i*3N^5%#79BV;!NP;N)G9s)UvjZF=F9 z;7AtA{vr@@A+fpz0R2IPn_1x35BE&DN9NK)m*G2(Ya8@zf*=+gsV<9=&YH_%PDo>B zweh3lpJpHPXK0!F1V=jls{$PrJ7uMVElX@@UyP%p%x|04J7v?RJpM!Rp+`!eYB2BL zn+qw`&8-qYLmvde&4jjIZiI$wQc|uoeSF^z533Z(5NAtW^5G4mWTl!58xWH#$0(t2Om?1cW+tO<_WeAX{i@b&@j{{3=u`Ex{5!Tq@PJ@3 z1{=;6ryy?nY()5=HGiQ+Y_kZgeKQO0uuiXkl@@LZWvsWC4o-MZOviG}65R<%c60I>K!@GqwLfd;h{_NLx;#y)9*sIvjDP==3G|3rAG!VII1Wg5NBP zWQ;OKj36%hbiR)v7aD;5yvy_Zl%5~rSJ{d04amquwZ`av97^@W6a$$P^C9eQB$rtI zTTw7j>{n7i{bqwh1}4;^`VueWXCAeRwj@;-3vNqoC{cR>F?E3SwOHYA2Fl`|#yI_<~WFUB*wGIgtWh_Cpqcx9pUEtAT} z{Z74Qj(Yws-e~Le>8YBbcb;nW=D-<_1uVwgqH*?YIrqH%D+%>uIHN12KOQcC+g7~h zg}oI_KV0x4l0*?sjZYXRBCGf$({z{1STa}43CXw5MGwz!=hGzy&`rEXJH9))sc`j- z;Z6Vwl=~%g+5QRoKd9%a{SdIrK<=hCj7yJ%NZf2{ori%JV^pwuI*fUe7@rMw1?LM+ z#-R>evg_zApVG54T>T>`%N9aakEQ0z?A;q6c41v54Z&Yiftc@wX}%WlH$TIrNPoRY z>p-H*cW{U%5_=j2fS(jgi<|4O>Y*Wtv$1R~;@>gGJjytNFBlt;RVebUrTW0AVU1Qs z0d_UWl>~1`^jQpo26##UEh0gvHlC9XV>7&JGE*)#3*!a-1ZdXA$W6RaB6qy>x35C*QhJ~~7g6k_ zDZ%2XC%g=K@Wm(`IklINKm2%V`DkM66P=7KB}%=rG7?Ez5~mw&*o6Ba;IIs%6VEG) zlcP6msQb~`R2eVZKKuMtOq|NqwxR^Px)(vS&g%OTGzritygE`lRuh}8eVPn_=W_^1 z_L~p2=l}!;3zwdgg|X$ukKj#iJzV`-l&o3f7vH~glE6v76CEB7GRb{dh?>KQ5e9!S z!vA&mCRmC}x@j@$zNSf+4;|`$l-%vxGu8WU^$gR+j>4*~(^$>cw%{P*AXGM~M zFX11B$=T1cfL_>vY14a3&4m#I+jK86!^{G;k)*3>ou+8Rdqce) z70jxo8AU*=u%g|sf3PT-IU789nD#Zh2s+oM4Nq;R-y$I*0{9wr4Hv@{MIbi51NZgf zU&Sn>ok<@haK@M!56z~^%_zWg9AseH7d3o>e2YB6>`wl$*jND$)ErHD|L)bf_Mj@1 zc*ba$C-t*&2u#0^^8*eP_6B0prdv04A`2d1Cm(e|MaBJNDMAL83019P z%mhS1KS@+EhDF|R2D8K$Lru0=C~Cg68OPpz))q(y(SZP={(nM;eY1AKBx*TC2fm%l zmr*gRq~Zu-2A)_pjL0<}5Pa^Pid=|l;ThK_$iypuu`=lRDv9@35n1a9vV<@YTqqYR zKH6}y25>HG7*t%!!7!oF2$&EsmcGFLTrTPD#^p`!zy%RJ7SR&9g`tx)ji7fSx7jaEF=0!H%?=ux5B0fn*)p9DBAwjnhVQ{13I?K+fP}cdicH%0^v7 z{ro+R)x=W#&fIE57-}78HH7V$pFW3*n##^|yx)Be$g5Gy^)A;?#Ah8C7w#Cxo&ipY z?hk3AJir_vc zm|r+d2-LuQ^DUf2#j11*2d9nr_-B7Hw4L~j3wS<*iJ> z&Xa8x!s1ZC0*@L}C?9ykHa^lzO5(yC+_gLC70RQ*>d{xanh0Dxe_qjO!`|hCZ}VSt zBZbCwas%|*lc=e{+gBC#M)D4BjQEVf&BepPEHNTPIH}CY3aTb7 zWh28=a(U84rOVO=N~R_MCqHQ_aOMa1wo8*Tx$p=#&P~rQHtXOvV6#w_6xQeY_uL^F zfTq*{a|&T0@UBpCLpzCq-ulD6Tr>1_*zyJ!gq|vD1$VG+jGhO{=uQNXVpT#|5xr7u z7^CPPoYBLX#ls^<`M=YF1~eeApf{!DT5t*IN5WBbGE83N*rF5?UC144&YvdiS4-Pf*asSH;0{CGMHFf^ zS48~JLDFD+C;)FP=7QiQB$s;0eqK46p{3fukbepoweauE@{avcs-72G4G! zab~fhX4I&k=~X2!Ka#LQQ~#^wWT9W!T|nAL=i}VCA}cmP4rqG3(-pdJigSET^cHc7 zkC#*B7oYiT34V-PBlSY5ms5xMbDYXt+p#QyA<=dzFKsR8;35!JXpi<_n5CcX1&n+}r!>8F?cEOv!Q5UBF*D2sSBEwyW z@&nH5(-w1Lmc&uf-cu9f6U~Xm1~TDS{@~}o$Oz|lNz*_(ozf=!)_Tsn&(}$$<^uYY z->{A%040ZLv)rz)SPF+`BGxlb8#psA4>oS9?6C` z4M_f%x_7@_m!_r7a}h^0?af~@R6j0Nc4X98plk3;;G3dRy)}fmERCEdyyTo+)7hSX zRQkg|dUW~tdv0hZ^a{t*NB}o|u_|=qx1!Yf%Fe6daX#^-*#RZJ{%Y%gUU|-bpFI3s zyjUk;FqEdsSJuA((48-=*oPfm-_qyy;BNj-`wnn8*)e?e z!GPNl1u0L~^0wV-Fhja&huEQDgK0j4@!Aoojgasxoi}NFH`#1{pw@RN@;0h zzN$K1tj!$Zflw=K<^2d<4mg!wmV7|(*cfIl15;zD;j(BKU- zCD`8^`=3puCB#q;U1Vz0VK>99%sRkQ1kWSaMp-l_A*iTh7o(F9?cDYjKKGZoB!B}B zBZ~Zu>=&=+{;`*y7(1z!MNqB{FHy>|Cj0FmH{>br0P^RKK^cXxn{S z1YqV$)+RmyGOVQbIJLAhM034(DAOTj8mvP&mbB#aIudClV ze>W-d>Jw+HXhy;{4T1EG9S5s{x5bJ-O2?BBu+BgFaY3D)HQ z0HUM4l5Ze+AAb*;?hqbPDTawnh!lfZ8d*VkAfu+R=u8-(r$$y4NV=W0<~>lFZ1~i@ z1Z;bSJ|!u7$<1bf;VFY`g-!%7WBupGZ#S2}UwsJPf2%Ed_;Z4oYLE@vSn!gN^0@Kv z$0k*0=Rh{Z^=%%FVr`_Pkm~4=0**9DE6P?U9F9#D+xP$YFsTy^tQI%~==YwlGv1Ey zTcsft%aMa$)@Yf@9k|=pD<_4zeE;^`JZ|o;*cx90&hL-5SIgLdSBsFur_1&hRBZj}V0GKH5@pWB9IVmY-vol(|n4OKxwqAJ1 zgQBQQ`BBHjL>7+4JgPb@HHo@;?bTRpoD~6+VO2<1nZ&mI*=ei6Unf;naDI+l=3`xE zsqw;Pp!%K^xEiINIQwU4&s#0{R(E{zx*M+GjGFlCx?xqLh`iG6ZndG8sLR)@N%o|b z>0Rn39Bn<7-BsBza^njOpn1#oL<<;fm+>g8f~S7UGq*vmD__C$S}*hg%Z~lN#PX~! z*&ZjHW0RxF-Gv(W51mX#km?vXq+VEfE9)}aI5+RzaNJ<$&;AaJgRbp`+X*vs8H<^Q z^uu46?#gE>y}dX#R|Nk?!ktn7%zA=VHSpHNuepyL!H2-}1V!Y3nMFw%Iz?r$u9|K3eOVCvMwGi)SHkflh@a{UOoSm#} zU;{l!HAk_H!GOhO(f%1B)S<6wJ$z9}otO%pK z$oaA!Bf!S$66?rp%jjKcPMZG?j&HOJdG}05`HNLbFBe!?jNG1fK!MFnbT#w)?oq{G zKnX}^L9DUBAt5{xNJNZf|K0~e);Y`M{vD4);9ID;#@fDARFIB~KSQJ>Ef(o7EVx$! z!sKxT=^Bgf^OMl%Q$(3Gq=1w(;)k}JO20lP!h{OkX#W?bR9hzWc$An(6# k0sQ~3^#5Cw_)$7%?4P|Y^;Y43A8$~8tMR5z4hs9f01P_Hl>h($ literal 154300 zcmc%RS5#9$-#>bgDhN@LA}wG;0V&d30*Zi26O<+$AxMWHorEGH(nORR2vVhkfYJlf z1rk7d@1X|>B?%!pKF|C9&pEf}>RkNR%$_}KU+%r;GvC=UhWgrUEW9i*%|Mi(^XrtfI(En%3zvBH@ zG&FQMv@~@8TH61N#pRm^*uS#Yx>G&A94yt+$a|??Kp&D+kowYr|)~N9cE7;RF;F zN!fP{4mf^>-R)R!`OD{nDqB-=o?(|{n`74 z3ivgl?f%*c8eT|XodlIPB&%apK6fJUXxT@0F4jYNu0{uRpVQMM$R-5KhAL;#F*}nZ z?R?cUA1a%hhnnxc^AqL^NWgh4oWDNAmrZ06vzP*{(n3Mef+m3bgkUa*Z2T))f%Bb~ z{rWbBygFA2YsoJMyQ&a*J>dG4%M~P3|A%yMc8#jIPl>t!iy$R2QnJncoyW5j33kp* zJ@iW8og)z4z+#H_M8Ml$VHJVWSY4;HdTygRH4YlzAgjpb7eH^Ga;vSIeKUx4sy{Wx z4t~Die|Q4~(c1RQvu-W9P_t*Fz2~zQm$<3~HoJNrf&;%v?pP*8_66Ew{32W{I*Scy zBm^XHJc@WLiMnZF(iNYG&x)lrA@|BU<#W581)q|(Cy?J@o!AZ45_x_HH^dsF@jiz@ zSw;83lN~R#2J5ByA6j_s&*Ns=zX6eJGv%sl(1Rfu>edfS`Kwwhh4fV^#o{G7(tkmQ zU#a_v=RcfP6$91+EGoU{Tq`hm*58u6LT9do$UE9KY4VjQaZX-yu-JO6m1Ih=H@ZPc zBAj&_Y@&n$M*bKfkq107%?A4bAAji-Hl39ckbGGKj}(`%58v&|PW6 z$&1aXza~>zEQ^>g8P7PJz!gPJC`$fGr#N~Mn-cmK^U_8)Wx(oW`KkjF`{a0)n!EsC zOP%lYyjLs^*GpY!BJFb@J!&)12-1QPMds$~gnH|*hfBV{8(Ld}&z#R3A0Nx8$$n4L%xz6D9G~Qk15^O6-I~Zxe*_UI-!@2eCAud+W#&i^3+l%N zmbIiq8`BbS%hq%I3-K1(Oi60fs=s17#w8&PP89BoQnnBV zdR6YWty`wt?#oQBIS61-sWUmofO?>Ccc~_3>(Lqq-(2aMd@Knn0lPZl1zTb1)~-j` zq+Mjtej5^hF|`iU;56*<;R%G^ zjqDR_L9W~N)|_ARdcBw`P@Vda40)oQgqT|6;D>Dr)rZn9J;?Iu>cw4Yh`EO1g+#>0ljIy*jDH`kSlkb zM)cCf|M@flyjV-Vtip6nz9xWu<>|RAQoQ4>!QPX^$kN}#Ps3FkLF7{}ANJY~$8Oa= ztIUQi5bMn+gQ=w<;)1jW4VxVK>y2ql8&aFk7RI7I_dR6anrlxYkS6FT4O;#%@6V1U z0EedC$v!BuMtzu;RinIed%w>du8GB2E+aTf8H~{FysF^+gZ$OmU=$12H($0ga2vv9|;3%O1_ySv_DZ|79|AW#9T%3V)!49d3`K3W8{>D5_>9 z3}0C2$2{B?;dlYcQ3c2PebiH$2e}>%hJE*7aPjoKHYIld5w>*rOpHgsLDaGSIHm;6 zxi9;%FIw{?ooBpGAhbf1wH^wg_pkbJ?R5Mu@XW34qXOnQ&gloukJaa68A|r0v^l+R z$-yrj%pW9Y> zgCbq;o*PU6bi8GBo~WG1yIyEv(uWB+-)pyl0T#bc zp?0B8TM0|&m6f8~!Cawex*G1mrt-Z*r^j;GdvvL%xAhPcDB5^rvvDYaW7MF{3q$j4 z#v(vmT#lT{a>3n&U9a6jH?8~l8(nKq*p>sE;&|@2?IrDdj^u;*V8w%Jb9yyuRgmd6 z0w{V?bn3I7l0=U8DY&{=hV3Gt0XZDkhJ!c9vuQ)lhcLZOxnJ|XbmbVBqWx59bL)n9 zmoi~IYl5_QHKZ0EGX%IXY6ohxiFgbuu3eFMTpd!Ag~=Nm*p5z5QYtp*s$_@z*0uSK&kfTY8u@5{6f4{^ zu@JmpILwYrT39k~{*3=s>oGs^+rzR&2wd3-CfXp?X5xpM?=_ce$jf|Q2s`qDc~TZX zFYNRfw`2kT%oPp1z*(2I1QKX|$^A8z|H=1v7ILtUjUpI=;8G*%cd)6)@|()(EXr7> zXd2?NPQ?z$!mH_e;wu1t*V88xeKi7lzV5l1Zo?-I70!=6-wO|nan|*@Bmd1|_DA~x z@h9x$jUW0Zu>;}(JZ{)xFDF`^SVSSB*$($6ZOXO{3MP_~%kzUc@_5vR^(Ecp_i{E= zw-L;Py+jeC@juQm`b!_FyGt>?)GOm@g;_)FHh&rJ2&rgiCQkfDsO#B1QX*8fFP&H? z(!45Q8-tdb;5*>F*C(ES{;&|gE({rT>S0Lb?k3)IeLzLrNV|sTnVefg2GW@^eXQ8! z&T`Jp>rO>g@2Di((!aIl#HHuxH&F!K)BvP)#kzy0T~*KtnC;~5oZHRE>ngFy3--5P z@*qy9L%#vDXg)WdwXYR;dOQr+NmRoio!51p5hS_+^E%qex^2(FLrJxofMbE1Hs=EB zc>ck;zk?`|a4DpH0(?P5xnv1%oti}slo-~K#E50|Ax}3*2JVfv+fy=17+(08&oRB< zWfif?D07(Or1=1lGJJGH#l{6K&M$XR%C$nap@@gdT6k$4eG5VpjUDoPqt++txF)Sm z$%Ri0o~|1>SbzO`*?rqvMqKqU4VmkXcveGSq_J>gCfB4Wc-|gY9T5 zUX_x%JB9XHnb$_z$Q)g3O(Q5;^lZa&j_D%1oSh{~r^-Dzy^_D05*j;0Tsy0TGuiKZI5Ntniz?9vm29V$X}#WT zoM`)%R3ETQWu+S}esf?cakEXUGm(pl@x#n$ox5IQPjMdG$w@`((d=wJ(}hNT-rQHM z{f$Z5;M1z~V~yX#08}gQV*Z%&!CPc~^-{DGgR8F-rkCYl4V(bD6YEp`VL>fZhk@_D z5AOuzOK;*QlH|gfXf$y@P)qMo(t<2}%8m*xiX^ee`(z)s~yV?8$U9) zp@IwgFCKPS+rY;gQm&28#Rhw9n)IRz`$?{k+0}JL5Cz*ABwo}r$e*bni9K){t=^FQ zZSv@g4*)0QiSG-MeLtK5BD3f}li9m2@hphHfA zfy8Ctk8$$O?;+P~Z4ZvR3Ava@q6zUz`vJThw^aKzJ0YK#UU)O+u?~r5qF97JWzYR= zcr;CCLxp!^uao!S(4e#VKr-gKlHjk*lZ&eZMb9n|w~Ql_bhX9q_|L(Z1Bv^6%6Y_* zur2b@0;@rVT5Ko$hVo{77mmwLUhj_Ew12ZUk_Q#ESa)H=z6@SD!sYG!qkP@c^=x)1LlU z4qr(auMXo?0taXh2aW6U9PiUyz4WMvd8k1tSa4f}3W+%QMp;FPSNGHi_UqhxKKjP< z{osgS>*u*rYkPE3cB;}VmAH;cHp6J3)1K&9aty(i!&`kDDPk7h#l#}AO~`xq zgkoc-C*J$+SYp3{9yQtjLR=Z>%Sz`mX7}(g^4mhJA!z5~cYU3Smku5n!Jgr35nKioOJrf(qU=1SP5M*;S$(1)rjC)Ndo}a1z}D(|QW%k}|nd#$RHiO0HHnJ@N~; z&&2oW{3nON`2`OA#8!13n?a~}ze zb$VS|8G8zQgh>51+~ex08uQ@+S>e^cvV!On%U>2GD=G{IKzZdHKy=Bf02*A)r9DEC z``4_Ar}Y7_3qNfmOWV?ww(Q--$!f8$mk@7vyVGN2rn^U?IEEMv+gOGn?HNED$tzD8 zV27vR-#^xChr^!dT|`So%e;(VOCMJ)1kUiag$pPH>;a>1XC$^#&+o0Eb2B0RCxFEb ze$~wZOlkjgXI=j=xQT*Rc~a?NibXd2OPPJeaxUlh6dOI~;%{P!t1oDV|0*`eM=-$* zForKlA?2Ah%T`y^U@L~UdxIW=bKi3l?y1G=Mn(CZtP6%($p#H8j6;(A<;t(f=gM(4 zC`{zX2Hb<6r*`y40wtx_fpO~SB&avc-DTxiH3jr{OlbjOjXCbq=w8}ahHk~Ll9zLm zTEd=9e5VyL3>A8PZQ6lyg{%Ls*RRZ`+!q{*p_!U}48h=Sy^Cu~(Q9<9)IHJ0)jC*= z+Dy&8#>X$z#lG&8&TC~{EkO%-V;B z{SPvsgv(}rIPue2HApc=vEn+w?cysQTE^SIlpws2y6SG8CB&pV$Mj~ z`n^~xghp{v?8^u8b+J*QE*-IR#+fRnn}!TKMUN@`S^M`tGk#=u%oz2%mCZ-)e|j?T8a=7K8s_S!uaQG%kOB^G5Y%PObP$ zHFCFNa4bBV6Ye~KyoH8Fk%mLrDmt5 za(5B;q4e5-+>LD4{{a_aB zB`0wIy5wa(tJw@>`i~v{S-IY}nxYWurUWn~K(y+K9-5-WN|%@Q2J@u8KAJm-Jgh_% z-t=*_w@44Ar{7S;7^t6T<74aPfQxs>k)usik3ZHBgB@Hyyt*;;6pvFEpBt3DSpgC+ z7umYiy{Cn+nu?QFt6dZzA*or=TjYT6o!h|7LUWj*b zr0k{3HIN{eo9g$cK_%0Mi9J_xdJFO^-m?w-Tny10H-al03iAN1|uDP_^$tK(BI zzsTPctY=n`G`;*6CA3bYO|(uRZS#cDGbSF6F~_yHhjU-1{A0Rj17*+nZ(i0rt66Yr zxOR;TtTYIn^4{5@GyfA8Yw^Z4?_E!DJREa!Xu}V6(0_hLqfA#cfESdmh zlU8syGVrD*$!(Q>8=Oxu{B}c^;5RRLNiO7Cf0heiY(m zJK4fF%TB^~m#^rsKTtpkjhC<*jwgHWsLvXG4!mEX4njq_P45PJ^;8KFU1JLm{JQ71 z33NW2VZ#G>9oN9*P5W16<$XU6)NRR1j z+)AJ-&m(rx))KksuI9k(!4^Z+vm&=kkGVF9mWC4GtYC0RXOO$Wm8UCED(wv=(AOv8 zd8f1=x_?A%sX*-GW;(t-p*_7G9F$BNk~B|c?gY=2k<{d=WpPIzQp$gIkfzX6@~jMRiIRjBPe4W8h(ih zZC1ajj<8wTM#UR2Igz08T%~FNBzMFyq&7kOEBIk*rEu`IYcA3!QRPl9vP=67+@*GN z#i@y&Er%f>n~*PyP+B}3&>2&k@p76a7tY@2bdI^Hk@?*teD6)Q_wl>ub%;bT0O&BY zeWwNQ5jNH}_i`rWO)2C2kFYwH)(p;h#s!bH!D;vUa+#;E8!~_$dKYc)M+7|o>0E8# zRD#5sA{0I~6dm&_@ckf65E+u9D+7gykiq}rJUIR9GsS;6ACI2_mztgLd zC6y-)Q~oF81->il^|he(yBw5oQ}wy`eWzfGEE7IZysCSwTr3(-(dSdSz{Q+!PcAhf z?D>COUR}*>edjD%?CkTMsXu1ADMdS7oF7_F%;%DkP}Qk-xLIPssgq9)`QN6*-d~%# ze>+OSHz>Gpg66f1cu)L{qqQnJDS5_DdN_Kpl+ub=AUpE~ZDx6UwUT_-k*U&7*gBHp zn#r!gXGRrbGGOZL>OfpIAtj>E8eyY}y7fL{&9c=@lp}_u+tC$KU%K&gTS@H!e(Ny`btnNxChOMl`^&xlDDi z-hgonP;O#C31Hz0btlk)s@Ok+=Vb7rc=Ima4Y)euqPSz;(A_B2;-k7))uosg1 zSiY9M-SKrXQ-_ix;8jhO>I{ZPn-MlOEV|G>JmzTpC9$O6`PK28W+Gb&Dv8lJz6n>7 z^`LwY&Zmamr+s)$DwQH$CWCo{Y?EE`W?E5FkQ_M>cFpqnB6Z*5rt#ZXjmA7*I*0+v zf7&(b8Rxd%FYxk3!P%$9BCV;tN31SSyegf@2Q#cIObM`@z7a(KayF@_Rb+_9Z@)R) zlw)M7+t0@0v&!(m^FxH_UMSb=m1IJO?;i`KkkyH*RrC{^j6yIuC%CA%U->s|l{%?T zpOBJ*c(Q3yuTk(m1@=Rv>ayFhw@Qf5IB+;C3L4|BKR|mUDxWS43(5W>$Fw$VOvZEn z=<;fP%G~SuZaA1I5h;OZcD@h|h}5#USD*;TEqu2vTW71~8MzXst)g^j`H^hcE~9?{ z=D`BM7OeN_f1iEXccda7s|@}T#eTx8X98^kPH0@HNedUnhr~yYC@C+McROuzheaxs zP22vis!L%c&IXjs6a=)zrUunmwYD(}4p;eGUA`o?oKAlc`3LZKTzv}AW?GXU${8hw zA~x4_)$)oA?hX0=55N!6W_H~OWveivG+J8dA1|taOUhp{WnUmHgx5=d?rCNoPUu%F zdlB9e@$UWC{bhBbf)2=3F~a~r$e(5XweCj0!seqzl1+JQNa1aTzfWG-xGInGeu_E$ zks1wxk_2f8L9%19T6Jjq-mIwc`4MO#+ahzVTwlX=3)=9);-{@VNXmv!I=s_}74~?n zIq4uzjfm$TW>Gssww54z)MEGCZ7+U{B6XLEnzWiGe8L_jQ+ZB_#v0_~qRyDxVG!Mx zaz=AA7gAqf&KSbWGfedP$Mr|rZInBvG_%`OW*$cHQ@bTx`Dyk5*H3JSf}rG`h_%qy z!ko4D1*5$lAWP3pBf7uRq;G>HyZvD_SwfNX&wjxlDqcGKhD?GKd_C{CA1V^Vg0pFc zEu7;|iHXnmQVJ)9EaYcSfqAsWZ>M;(Gyyix%IW?vgUC1OHu53pbm>-$gWQxNHFfeS zqfjK~EoQnw2x_NFmvW+iXS!_AnFNl}6P)2v)?2pdgmH%UvTdII( zQL2QG{jW2X?<|?a2fcpWi@UQ|eE}x5X0My_#)c}7wj(d%Rt^#p+Q?75>OPBT$Z6L! z4q(j95^uHhw@$oB9tnv!`-mA#G?jc;ZC^Ztrh^&CnP<~v{wD5gcLdohH@&=%;1&g~ zFbTrf4I_dHDLNE2nqZR*HSaFTz-Hhf_ajfrtuTx;cxO9F`s3eM0R5j_6q39FjX`QW ztp4Yp246b72zI|;ro|K)uzpLuR#dP&p!a#pUCUONDLN-0TjigUROsYZ@`&cw)pz?B?CQ3Qx+xZMep$t~RSYUtW5Ywn3lfFshN$ zVere`S$>BGYcq3?80Dz-y!@3rBqr=1zuUlcE7(#RE{}Atw`;++r4Ed9N&;0QEAV1{6h4r#&XXj_*3mI%ao9SKKt-tX8G}$bwZ}cl? z^3xzrVHyQ(PwpV2Hm8ycv5h;i<>^v$9FTU3+PYdJC3&lxoe^Dn4}rHqq^mGHv>SGn z5#<*C$$RJNS)?d(sOMWioi#NT+<)S$0g#T_@k8@`R0>58MY|$C&){Bf^=GDN;i~PUj zJ!B})|GYimzEi!kCCIo^TqxUgC6gD_*x@1wu0?JKCWW$db%zGs?>rEm-$eU3pa9b; zpXDBL*cd@xU-;~8_tG-2DL6*I)=X9z2?oG}@CxtjD}ft*C9PAxl~LzWl*^OK_MViD z?zsTSCv=!!{`sQ2NAw?3(EQMoKb(N_E znv7r1+3GF#+wYgh+WaES;!tlB!}^Bd0o}4S{c{U_2E&2NmjbyVgX6!r!cuWorny-v2eeBny8?YBmlyLW0u;uTQA3o|_Tac>zT z(fa+!)!u+#od=8q;$}_P_5xlrX=sy<=21znv-#Z?L1mwY;ymiC#^Y53-WTdFE$+^A!WeQUd;prifWS1qCplCb{yVES3*=gHpU@nlk*^nljf*epfT zDn1o31%_EK_S5uOqJl$C+9Pt!IsC$WdlzcVQgYFYcxapRtA74_DyXXs7!rwkZ*H%x zI9I^iXuZT-W@7nGu}$xRqzuL7Gu`FC7a^OCg{|GV2OcZgq4&aAez9SO(B{O>9 z2Ez!h{q?4xuMOvd`SqC^D?Jv5=*O9W7g&Gd3|MJMN{dAW5c_!R?kTO7?{eJEGjKRo z)y#MO(wh<;K?kssXSY{U{Egr=`I!68@KmTAW zLb#GAsK^5)?83RHzUP8UB30>T&x+q)P&k6sPA2l&=%GQA3h(jH&aeFD>LX-;|N zsY+8_&{xD1*0(8f;N0fp?n9(dbUJtH7}!M`Y$)M|t9kf2iUFWiZvKy~Cx*DjhTJ-) zb!8aqS7F-tjQ>1EJdOJ+s!nDJ@_fPFQ%)SSnXJp|c70i^lk%~nZZxt7>2mJY)$B^J zIS{cwMZFgO=x}_ytXjtp7urePQS9 z)M{oChwS^=gAml0ClL>gt|=kvQi>{Z%#0ZJ9PUnl@`*xK2Rz^{LdxR>=$t`Gf!Bi#7JiA8{<(#WC=pgUyFMu*XUv?4@Y zi4~lsNg4gYU1GV!BqkNWgdxz)eMx2jT_t%HlFhc`w!D*+WIDiva*lb3Ej1MImkgt7 z#iXuv)Uk#)C;fi+5Z|za8!1<5;kE5MXjdZdRy17^VEEOS=*jh>#?R)i#cSIw8Y|j)@)CTW z4FrFl9JS?B(xRN$X|N*LE)lmtRqKq0TFEeMl$TXwCK{E*=GBf|IoTN|IxFeVzns31Vkk%|ra8jQ|Dk)HTXYnEY`$vs z)! z@nw587z1kWi^s7`a&r6srTB8-`#agty(i01!Inl_ABxiHsbz9Md)r6+3xy?CiehS= zLc}Ix{3~5(%?H$NFd9^ipBMTSdRYyZB%%$B<4o;mczGg}aWl<12kKj?9)(`Ks0eNM zehUx*7Z{^{eyMkLYO6F+N_}I{a*>7h4;D|p+l}!* zO=s{&_oR)>5MbonJCobrjNwl=#6^{W4bmX3Q-)>=l5Dy-c7X~BZ1)J{n@8SURac79 z+(EpXr|-^WZH^4324CO_jCA|=g6HI61IBfAo6dobE}2C}VaJQF<7?YSFRK`ddYVYEmT;-uOx$$ z#UtX>eEYTHxzAVv(DETL8kJMcTD3N1b0%fe01c;SJXZG~Al0f-BqrC56^z9P0JXme z{k3M0f497)AYupt0tFaw4LA8gLlNa!!b@I2-N{QBEVHOZLhalmP-)cRUVYBVI#%O!n>;^HK8 zfJxm`pyb(g(Z1-(llZL2S9PUj+qZFRPE_O#f5DbD@!nri?E=8hQ)cvqldR;^N}v5q zOTD&y1AViu(QS_FoZ;I`;dD|963gKkC_rIt(*6uWT5QkGiMiNhChS$-tGo+J;D&Km zO0>;!N8p|I0q~UnI0#(NG2Xmc+Gg`b@xt7iz+`%Tw$3P-&-!C}Y!F(L^9K-B6TYXz z2&9Z0G*UQ#@W!7ryUe%fUpy@{nks{T)2klAEGs1|Tw61sM&FFCDY(17ld`EUGZk!p zSEQ!6Cl`b-4al8}`+dSiDqVq`2GaDbsZs%^T;5KM>m#plQn1C!Z53`Rb3W`4%oL0{ z>HuL+ZwdV+^dOg&-UJhVYs5`J|_MsHVR^XEXiUy*!29Da&u&!MWji1HZnVd# zBz`~B_)RRFqH+HG<G*S{MEE z^oONq6C0;J~aDrY_-7Rsgyjx`QF?$YYBro$+kI z$6)FZ+c$S%gG(RekF8ID8G~lVdKa9tC82E-)Qdt1r~y*-hwjW#$Y ziB$Rifx`0-7;t$%R;84vub9G*#XToJ6<~X6$ z+wJE#YBqCGR?YAd!LnvYxVkr8+Nm+sd^-^@me6s($2ex*=&{?aSWp}J^zy?$9TWVY zE6y-m=A3`mk4?2LdPzhXri$%faFfaml#s`duqK9^1CE!c10H=z8xV@lkM zbvQMc2R~~r2W}nH>AqJ`4+m7KL1|1b{EMxt#YP7P{a40SA?w&LHR{eaAkb2$lps*m zby|rMD1n6h5LbzI&PkZBs!GxofLBbthMk{gDcDRVwje&G^fq|mP|=GvMFE(_@4?i& zm-2K8A`!x~tqV3QFM&J!`sbzqeV;RmE}x30n^NB6!P%R(FS0(b)V?39$Hz~%%-CEF zL_Y^(x0GM+i%Uovw?aJT$%qxNTIC+>3Yf|Mo?Jgw71eVUCJsBJ6dfnSbx$c@&?q|d z8(m4>G$Je6b%vq6@4k#%`+NFH`V!~0{nNP<+YX<+XRb2rgJtA_si`=c#Xmv8qamS0 ztYh4i4~(VOh1$gZAPCGZdg*M{$*0AhAn~!!+E1`BT$Ji?R-yfs#O?&Cl9MKLe~_f^ zT$)$oevKvy;P*AMYJPlgr1>@&>m@QIj zV%g~?LgaW@Kg#i4D)2bVz~|F$+iz|L^R4zOwBXM>TG_jhu=@5`#(&!cxJ8eNYioHK zTIUPhnF~3jSLd1^{zK;QnCzR!<1NKYE$v<*ZOk(|Xt#=T`oWmUeTctQHWYtD;9sok zp7yEXO1LkeHLr+G%j>;ZUf)tanCn)a6x>ic{f?_3;}@8QiuR^UH$i1O>vkpCk5f64bY^UH8qozL&|8C zQNoLvWT3%S#nX97&K=~uea`<0t}m>q)!EA;=)#IvLDS-W9gYH9?AxEEO@duSbQl(g z^%GQ6tNfXIDa+bp#EHENu-dlME%V8og4eJ^W3Lpy47s&{Gx8A5TA&S)xi*NS=h}~Z z>p)?`DDNU3L$5w1&HolpsK}dIj~bp9TvBgV-Gz4zPF<_=1od@V4YGQnNt>`fx>Q zA>Qs7osxhe?DQ>rGmGHgl~bQ`#2%Xfv+;8Q>KQ_j=l`@E^HGZYKY{iC)p8^^Kli|} zc#^?ld+B&l{{ER>FRzJ`$Bbc4vK$9Q26I~`-9ZHn27{G&}(@vZTa zI^CxzKCV0bxIeL8Y3s2rKfRe*dJt^u6F24mwo(>fszo0}qE-M4ZWW zd;FQfj@WXgc%UzC!tow1D*5K7Joa%4$?Tc0S5CFJrupo-4!X12x{_cbt zYe=oU1V!u7<^Fn)w(N>X3@54-OYV|dID^(UYwsTvBv7vvrCMMxlLHf#=b8u(^fp`} zi!DI%+Hvsd11jdc>W0b5LY0E=76UQR?%Vcy6Das6AKOufEXU`gWi0vmN0b3+5Dp!5 zUD@_r+e@xlbNd00^+&TeY#F?MD%La+|AsOMZe3O2qMTeyN7%?UJz2$=FUHvYbOt?> zk|>=|IX7mi+&cIOZt;XCi!F+CIXixPs_~rUZcy8(5`YrgD7EaY|2oi>ZFXJ`$h*61)+i>II58FT)b6PsLGyA~T zY%u9L3HJO7k6z0Nd07W`c1vRNm4V=qX;tV08|OquB^v$b(o8!m;B(TztLhVxsUH=?fL_hTzBasp=%-T2Ik4c8O+eNQ2E0Z$V9q=`md#* zq3OlkdcL)kLxp#tkgIVy=+b31j?p(0sK|1x#sa@*7h&!PoOF96jgfES#*9uKPZZ>h8ya{Z{If#orOH#6PL zn0vH^Sn-qjWExw;dn4}uqvmf~to#nK4Vp|XhzaPOkxaX;d*kdm(K z2f>FO1OMQh{X;q>Sg7%KZ@2oxU4CJ8zAVKXy5}*yy{LG0dOyYC=ldf;t>@iPJN5W= z)~7M7ur#R7xop!r?sO7At!6G>DsjD7?zFE`OEBp8Q@Ss3Ja#bTKbdudZY}SG z9H=Y$ktTMpHYkEYkZk9Y?w+fSBSjtrxwzO^)iuknE=o{~^Hj>#4~(fTzJl{8hf8tO z{({Gp&bpe}$(!gv*~mz8n+V6JxCs^y^fm}kWCxVl z9Rqz<{OE_8<6YX;A^;w5tjLXBSQxcDm|hn|)Rrqux7vp3!#<)p;3-S_d&f}57598h z*ECVvPvYI2+#MMfVyjza2veEDUUzi@JTfJ*-rej>o8G&~mGm#Jj#rq>p`5B8I*y9H zMSXR_adKGN@40uLzpME?EVX}JSn?Kn>(E!vv~O&VrBB#DT=fR%K3B>1MEc&yvGnhv zFD$N98#;;d|El@t)B}%$wlu{su{4#Zyd{Ijla5s1R`T!@cL06gez=wb6NqrC{EuQpaYGzm!9uNJiM(>#?C^G_mw})6>^W4Qy}T zR{kADmdbU1eGsVCex@rIiV0TLLPnDW#jXN$BwlZ*O_yY@(A|}gaho^+^!~om%e3uyYz$;19n^i-(+AjE&tTW)zu3s-dZOG zNMER+xbm|Qg|{4XlLiUMr0Mh2jf=avUu_>Xaksog%rU;d{zc;3QQoI<(zUi#Ta_3l z#~_brOc2aA5tXMGt>r8k1n_|ITOWQA+2w=RIeWMEd764Ox|MjFb(hloKKKh#3;t08 z=BXHkuTystp}kLh&Q1NB7@q~7*kJN#^o1(rAgb7BZ7lq|Cy#n#>BYC>)}OJKoIazr zWbCT4hBcZbm~r?p16$UiO-mbmobg2C>A)tdp@S;|ZoAtUeOg8YIU}_Rued#i{C%LE zMn2?;yNWM2NO|R5xO%9dBk)Z-Y&-CO1MBlM)XhQa)tBi`Zu&5HF3=daKDhTV%xmmj zGg+~P(9U!f5ynZMYH{XKyybIVcUQCrS0@UZe%m?mg>&O24*!b5HOyRw`Vo9(8%yq$ zhs_775S+go(>)lJIhN5WC1-1Y>pT1!CJ0QRh)^A4)EcMSr{r#f8Uq+DaWb~^esS*( z8sDB7SA$=RdEp&@x+<}%IA@{42~olXU5yJ+;wC_42rXq4fj7M#tPW z@~dcI+3(N4Fuy@jg9R5bps{bo2JJ_%2+Sg zG2`eQv*BG?WWQmm#~g92%CqOuf4IC)Q@h>Wo3I(-GQP+9m{p^YeMnBu%A`}_t{j^$ z4iWWF%ZD!JsJNXV5aR=~g(sD1?`vAte^^HdQb&CZD2Fp)yzuTf)_-Lev7);&lhTaGR(Hu2Cub>KA|K!{s4y3gmP~#vjPS;aZeGfv|uTQPK{6kR!PE1HeyZk#m0(y0ojeBJS{FcAW zyp?{y8&{OPu=N=_3_vD75^jUzhD7Ek_vLzXl%CtZT2Ms`q}61 zioz>wBp5h8nSG_``pMq!=#Jlc480})AEmtcE}z{0`4_^7=b{Tw%>Z)?cql>4l44{|)6Z?w~o{JA;Z<`ZZ|{gcHN(xZ**r`n%A z4417p9IOP#D`-e5{Zd=~WW#rFKIfjvOiol1ynrN_AgV{>79BXLKY$_h* zjOVt;c1ZkROuc6~n{WKMZPct$RjU*&MN4h9SFN^YQG08}-g_iQQMF5Lq7<#Is4Yfq zN$gd7#oi<#vJk{3<1 z!a6rz1V()Fm@x_X?XPP`7sibZ;4J11H*vY07 z#NV7;$hx~7S68K5<{SM!AKl7i2b{cpT(Z{>Aq^u|7z^B0GU-CoO74S|5)rvVaxS(`QhWYkb)ugwUq0KET*Ri$p_>E(W zGcz}kdTN>kB#eUccuT0GDI?55qdU7tJx`YXfa1|uf!M?EZ@=_a1e3@mV-aE$6qFx~ zo({#b7~6B85B(4~1-s@hQ=ebE$}|hy_36>B7lAFWgAqN8yOgh1e9Oz_|1s(P+N0oO zv3$w+B0i`YH(vZy^tv@q+b&^w1vB|)X9$E#;o@iOG(M@cfA6CGWOusyMu%b;{d~bH~x=%xe{;EeP!Ib=fdW zD_VZv2VRJ3%+W+*VY9d^EYl)9PrP4rQyw*?I`FbNAeE4$-f(*X4caD4v+A$R=H<9S zEK>hIS@U5ewZe(^CWZulN1($tV{2RhzegQ72nl60m@0ANR?l0m<=P8FgWc~m-NZ+) zLTm355IPNVv%1lpBxe>|x30$jiOM-Df#irP;=FK)v~}rhsI85)oX1tP70YVw#>vbE zN%U-858;a4RKc(dRWGg(IoAL5^*53%FHIMAzAoclWg|CHs_N01u@(I6h3IO&=G}ck zqe`l7`0Qpov!2@BAEp)d+s?abs)7jK<1y~jKBnd>m4Dpq=VY-kl*qq_u(Ghua73Q= z5dXo@Bd&smz&>MOOj$q#;o6Q#)u8` z6ZA1Uj52`(m5a+i$Nu^eS?qaB+uH)hG{O#jZl%>pW;}|Xy0_T-FJ=oY5j+@}<`TmHiw`S^fhVd$k!%Be-~-vyow zjm>*4#ZDfKO(hROBFXGI%ReH7cFrI>PE#wSs>*=IfaH{xE z@e5Ny8nQqN!mz-y9p?bO^GYD*ogKcll_MA?1N{<3@+Rm7t9{W(fVZOJRJ{jJrLR2E z_@ z`fqm9f2_UcG;}uy%=T%-sz>o%2jrmSf^wdZ%@)(i8sg`=?YagB-+8k|@A#ASk1H5* zp@6L7G48e10H0nHf4;utr8m#>I45{9gDLr9)2J=?bIpt35BwMrYYYrs9^l~Fq;Yf% zhBIov4>aXDT5MN@vqZZOHYpX@W5-v|n3;|$dGUm|yqzphO93+c%X?3Y5lVA@f7211 zQtBiDQu;-m2PgojxdhpJ*Ijt`$V2Rnx(Sv+WMBI|U!(P^Our&Tk9h(EUENtVnLm>j zq;j@lM5j0RD$KQ0#<1D2!34RJQ;y=m&nM(5hh;9I4XAD?S^OX4r%9lV5J-Pyy)yda zeW>3z7HTx52>znm0^dfc|5T~Th2dhfsmenq6sEg+nzs?C5x;d|p{xg)9DRN3tZPIL zMSWCwAU8H*TPO(@E9oQYXU_){@3EL`$l=pzk2`2qVs>41^^G4V!m(trn(WWS=VA!3?`Ei_b)TM-%eQy~J5+F z#CFW#f$D%|%Q-aRADlL^TZrGE1r)VxaLj8dbmA|0|4nBaN_a(XCA%BwJu^lNjPZ44 zw-*p#Q+oFwjn3~AE{X;1Wa3>phHstqI8<2Vyt=n3kmcpqUe2>EDrO%baadAx2-$#j z%B*$K(BoP^Q|WN0Bey?@)bo11s}Jq}1KJ-otj^nW;}dWIRPX1XH+Q-ANx5s`)A&l^ zHAEdXpnhnB44{J}0$eq4lDFH}SJxuo=SXlYWYZ>%E^#*p`?%1ZSa{;g(f1o}qqf+Y z{COUs{&6Gf_K%MU@iQhu#C%_q!2P-W(D}c1$(8`bD?vvA5yH*ZWe(nlf zU~9|vh*9~f+2;9_J_t;RToI%cu?d#ofZ8pzVIzDyOoXE8GFy40$Za)i4s{)%SWE=*UfKBx#0+{km`;w6vQgr*;x`s(5o(O6@k zva&S`bQphH12!Q_Qp543b)+!E^Gpb+9c;&4>^0*@6&BlVj(TJqqz}%<-9Fr0`YK?f zpz+3dd$=lx=|5ec3(&eOL9?(g;>$e=YinE(OOz>0H^S%;gzRAnQk-E@zc2l7L%cVs z-lO1D?bH4Uy>Djl<-t($)&b&@sTx9nmQ;?+dqmM)EBKs2m>$InVntA!K0CRPe>90py{sv4i~^4~uU3@c-&E)AHs_RR>E5+v1br zTSnht?;Q87{|a2#F1u9bQ!tfd$oEvKAn_Pk+GsQ6Z?hI3DR8-4ry>5Qp9em}8SdV3 zq#_1W>`qh=rHYL^VwqBRUHUJ+-E%-3lUXG-)d;HS2^FzaU|fDBO5C4+pZ33 zL7Dl!HTeJBc5ZHbGLyFD4Bd=1k<`fgyKqXj7rXDCFHxG>K2W$xpfMqL4zrB4*4;47 zgZRXy0W5ppO5Z11wbp$LJ*G>*{;%0S_V^?>VW%TB6FK!|+>?f&K#%s6EJ(3o`0JW;reC(+$uTyWIu43%h zojo}2MyB_oV12_;;Y{~~h2+yu*3LFE2c!x1H~dXt=j2%&N=FYyhjvr~4q`8O?j39o zP}bit@w#M#tNI|Oh66M+)i;IbJ=@XiYqZ^gOv_%oGhbFy+X0VrJa_WN)YMTeDof6d zK8$mFl*maC(RT8j2#SF&T((mk^KV&tT`UF(=`4pSw)*A;tU8aYi`5eUafTTf+?Hjg z&1Uz(lwPNE(Len!9>0m?l(R4IZ0w@e(pE$q5Z^_zD}8g%h)wF6p7d3y!E+JxT)jb* zVCfZ+A9Nf=kEve9Ot;w}Fv0*`#$*+#Y+hHaagIr4)AdQ-aX<0I9TwO|$v^xC=B#7h zRJP_8(+7$d!bT9U3Dc60=uD1l>eI7q-j5O@50e9`MrL;Oq8dj`r{Mo7?e?qUrYEC4 z|0(U)r`QU75ZTlJRob6qcZo>_&vLwU!&85-b!*pltRpVPiOgPmzU~&x-KIYJto;#p z-(}I^)p6#x*2Y%pr;DYqm}m+z>&V1jacjf+#A23F%vy|(*_GC#c5>NfFlNo?`^{Fe zFdC7HC4Qwpy(c=bZN=n++7AC5hkjRT5B7b?!k2j-HP+{Up+bX#sbsVL7nTV{729^4 zRD6l_bBgU1FfOp({)2|m5-PMVh09y&4v`PGmi}K%KbL=<$nP*?a8dBftQf%ya*1nD zDClGdm0p;?l2Km@7t^0QUK)vF!c5cVV`aM@y%_nv^P4TwvUB@#zJAG(JLk!UvW2VQe&|nAa5cs*zM&V9*+lDFs)?YvN!$DGcrclfVBw{hMizhiyi?O5}p!5Lg zUTiN={w$4|*j!7>Rid?eW>P|xHRtr?v6r@bEy{Q#Z#2M>ojQ`K=(u$aHsFHcwE*4B zaExjItE>sV{}pJBR!yJwuDJVX0~-XkhvtQFz^rY}I&{=9T1N3}&ge?fe0sbz;65~0 zK494u!Isj)M1KP~?8$^YUgkFm<8&Hr5qS!lb)T*O57cwGa{EFuFB>p(c=6FeQ*Hv_ zv#xD|;Tx5YaOO>!R%Q`i>UqitRJPPstME1(Tvn7ukIk z$8)u{TKD>iXz}C+yTqS0`8-dKLt=SuNBtN(1aIuhdb1D=E1U3;6EEOQPwrh;c@BKT z#JNTDlqh$_wxFg9qKxWliBG%w7#aJ2^!Bk)paQx+j{vY)fB!b5qdpn@)lZ=x;Vl0C z_1FD^74bAtbO`B0>^{xY7p-CNwr3Kb?!K+}`sJX2->1o6eAEP@WEQ@lA#=kDrRbFT z|K(-<5|je$D&cNeWQpKX&j&f-njJ{cKGHdgE_Xrn}iZMLVQmJQMRzh zmy-wgtv>Ae+_`tldE|1+W_qR3(}OBC?4oH$y}@*o0gHTNmOj(jU@UYIFVciNIf8fJ;oBSCds%}_A zBisNjbsBl+E-@A7Lr-t<{N0N6hhSOB`8Uf_`EYQoV#ndo|KNF9A-DSI=r#_R$am(( zz&p#BX9KrV`#~4=Nr$@r>=|#^OvKxv`~Q^EGY2}h6DRt@_DW^Jl?f*oQ)(LBtyh*f zkC>VBB|g5}nv8JfOka(8=n-;7N26=gN3i(eIhG)NJB7%n0A)a~k>(7UgG#D%2E!D8 zw~&MQ{6T1(+y9~S{L9%9Imb7*!+UZdMk5?el&nxB0X8O#Z22fM4pLP*Px(l=9?pXpO8WW{YTHZ#%Hw$p^q>)_@7SmDjx{pccOaCS2zH~YFYB1VX zHTSlA!qE;loXk5s4h84@A3i@dA0P5^8%6MiivSh$p$=dJ()!axP*XGGQ0Su7XLy+m zX3}kY(=9S9bi+F*U%(soW2#BhOHU8i{ zwp#SjYTZs zbu9;1h+OsNdE%l7)LEf}QaYaFI+$z7)O zBX&SYi1cyV5U~kRqe@wr=B<)-5KwEHWS~pmr~e|YZ(Lk{;V?EOlh%2L1)9mY4AZ?u z^@+q+Ed*1I-hbC=@%3anPK%{X7DwY*itYK5=O5)1ilVB*B<7zvztaLj0m36ylI#TY zXS;+p-mQ>vJZ~rn#BF!gZ3;tI`t8m3vf8G}v>oERTE#>=XEO|Sfk6n&JX|d{H*z0A z5cQZs5e5N-?#?Tbbfa@R`%wl}kXjc2eBAio9Qh8-Y;V-SO5(G0F;L>I(jFNGr;R-= z20%Z(OMaMQ!z`l~4uj60ntYTBsNpI8;OQ;A>M3Ew{L+s3#GlA2Lu2@6;H=|2eh$M|&nbQW`M-o*?UKX7$= zn6Ug)E8`8WHw?uTze<+>iuUs_zjlX(*dz6`n_AvNnXyf{9J*=uZad)H!+mx25*ilo z(o+!YuVXLT5bO(|7eSF4=xfC?+3})qO-T z+*?*#c(neZJF4_q-t!hdS2l#fev~yi>jlfj>T&??7SE?OW*(e2?G!AkhrhkE+Ct^G zJCkoked8~e-n()XELv2&`bP@mt1RJyj4q1iG$7l_$r0pVXHCx`Z4H3+5M?Uv+!=fH zGDJtm#o?jBjj>kmyp@lska9A}xrG;mc2i1J+z_wX&5pZbh3NmU%dRU#g>wuiUdg=c z{D`Zu}Tq)B2ZjHE7vwQq&!Dd2@-jVt~l#RwF z_~FQ;O{c~-A%Tn-phvakyAb}C&R2Q_dj|z$d*403Aq(|ouO?RKL2rTWM>OB=Ut1Ny z*H1)6suTCYK10%Ft{>^&0j4a{lWM86n0P;I6%tU(z{%UpuzotdUEC z{A5@v5G*Gn(7e7tfGx*8fQplOPM$uLw3}O{+nAHZX0EO@!j+PYA9WQ@s!^RQx*I=w zQBE-(Z6|BBCys=_oqNXUsD-Ex6TY{yt5W2Xc3K$vhM`#z-i=hl(Cj6`(2)nEVtR{F1U}*fel56Y@0qQ|zop8reJbfZX@A zzs>*Pi!(5MOExo$@wUmG>EsWu$X)lyNNwhJMgC-r=Ea!Y`}gGEWwM=>{2i5KHlZ^9 zvZFW;_Xe6IzkW5DQtlLJ9FL=KnR@35g%j|W&VN|!v}v>?ye|J72#cmEr~;Ti8tZzd zVEo?j%>eRfM0nTW27y1Q0b#ckvHRAbOD6pKtTnzKbh=-IljGO}T_W%UHISKWA&D|1 z)9nX=*m(}T78rEB4-TBdfIoCzBAan|R4fF4iNtR}@Yf-U1p%Q55S|u~Ehf8yV2}{> zr6m6H)|kk}Ux7i`ssa4PzJ~-p7hn2re-vLl0HL4B#P3i;rA%PsX$2evw~9dII@`iI zfiyk-^nQX;Q22fT6{$?UMrNOz8RzFIY#2fP$im=8I{(G)@@HS6gn+SKt+NpC1or%c zE|SV4(Yt49yO(64KR)c>H@YdUai8B$VZN49^xa4}5Iszmn%tDa9qs-}6ji_+pj5hg zVnKhEy}unlUEy_JQ~W)ftonx*KgrI1Y3tswZ;d^7J;Or&&0G8EQjr^7s!D4 zHy~t0DN$RQ15Nw)-p}Qwj=ai;u;%%h&D%UUw^JRUoM|w*w766^Xv`$)U9m{x{=k)+ zYYpqJ&kPKe(}e7{Kloqt)^pU!K!S=%FwK)>IIXzW0m6fMrV=-ybjsa z2HbbZ9}^D^y(aDFPPw7lHw}T{k7ZR>@3dH5Nw2gat@cgf!7~fTH+Zm5=b9nrZ=?Ez za%ENYOhJaqQ3%R=-ImrwM`oQcCt3bNwOxd5(t`$L{uj?dwnBw&;sLS+>|g`_5#gv~ zJa|IW)8q?~Q%P(6wxiu#KcAbZsO&2Zk;>~c!*%{p!*cYZ{go1+W zhf6QJ-AAL7&yoHEDN^=UlDtJEYihc}bM5|f>;?GJn!u^8vVMwD20|klKq}dH?c&a{ zRbGBkfQ~#@!cJjbgU5lSm=Bm;y8X5DZ(7d7{AsrH9JnGuB1OR?RQm-YL(W2k1dpRA_r^X!z^2>L?zy6k8zi>2gsE zjJ#lx-QrV?sLil+S@`I=vY3^0Mch87Wjs%9&LXRN;;rshK$;Qkag|;1%-bOYyWSVh z9Inlip|8G5`Ut1j&eNmkK8K6&TGkSC`CHm;91!&{o}T{g8hn@D4kae@FqnQ^{`iU$ z<1-!?dqchcw^z42x9`^i_zo-n>ICWq_YVif-%d%5oJL48K#lhjqi=+k(Lsu@(>^H} zGnk3G@}yuJn?JpRv7gJfql2IKa{7DbNNZu$~QPYH(_EiyVRP}0=si*abs6xKfhwNTCB79=06XAet8?%RYq_PsaLAMK!R zhI=oJ16}~we6G9~L<)4}ua!q4$QIMUEOVVn9%e{4GIz+i=EyfN`p&Z6NvU`5(~&Jn zpli!|X|hlg2z|F*Q^xeD4y~|eb_HeqsnkAjwb!S~-7M5Mkatc!c-cJAGy>-x3usKS ze`L9Stp8=DarYtHekq$!E^ERALtO)VezQ>N(pdLF9Xo;=b) z=SNX(wG4{7K{&)L2NN*~CwbL`Yek>w%bUgP>;x~m-YpWvkXwoK1!MhQX_hBfj1>Nj z%Ao#KkK~n)Vme{WG*rF)(R!bnN+_38e5np26NpJBAeQP|V72o%2Sy%Lvou2i(Dk0D zi>X!P-$YCL zpmGheeDg$JQNS_dY2NVpdAUBt7pt236`sXV&rS3w*T0ZDy6wO3V?L_K3oj;w%G9KE z+jl;h{2;N(Re8>!c17%rJE^p)A4*RCa14*AthBQ$;%?{`9gd5mcsUdsP-#SAoOjy% zam~YvUjRkK=ghE^l<>h!A}TU8in&PTS%KFEHv~U3ahT1jmo6IdW!SjN!j{?y6xhoY z{GNdPD+t=R6a)0U!JYIPoEf7L^;sMO)o2EFz`FLjMZ0O9&xHL+|LM;yz0lt`xf{8U zvSdB~F?m=i1XrzSp<;SKq&K3=j^1$k{g7$?m9q_!ux+!dGV@@dR{yz}38Gno@%9Vc zeYtY(v$!4nw#JN9S1t%w>?jAp?#U~Dea+cNH<*QOP2RtgnS7b;g(6+W;M!~!Co`;O8~5Osf*Rpj z->k93W@KG_^#IoDxAjHZ$+V03uB~of1tJgT7$ZXIh!D)fr#k--)2OKb~d3LN1v}zSB*9?>rFKSoga4dMn&B zXI=lU{ zJojvEen$Hfb1TlkXo(-OUdmd;rGs5Nvd(gS0?==)8y^Zh|N1BlvXL=RaP?!$t4uK{ z=yrQ`fVDqt1PR^EeKaTe2sM#!Z6eru-7ykv-0p_%dVk!7thKniep-39jNFiP4F+ys z5TO)GgWAJA4)OrX)^k|D$ql-5h*9ZK>m&buK9GMH_wKzP3eKV67M2E zH&hW!%vW}yF3A{~#_Ag=KC41KB%y`T(r|I=Z<6&WO7eeNx)mDbIW0X-)*bCcM;$Pd z7E5652iA^4$uQ3X3CI{))XII4J3Hu&AT%|Lk@eoaI672%W8h2AP}3R`msT_JYBoKqq(Nk>A9IO))U zCdBOpgx)%@T}o1X0oon0?g6KGSRZxqCVC8n@+!DzLP5?vyylfGlHJBf*T2pBfwv3k zbP^(g`Ube!!1y1`akyZ9#ylbuBwOBJ9_ct~lEFcbtZrr>T3dJ?6CcHU1j;e5P$$!3 zsE-wpGIJrL7+|u>$x|q@4QVUEQ4V^G8Sz!esA6tvFb1LRVQ6NYzc&-EC0<47bKdDV zT<8(=nKo@9I8lxzPa31=D+lIH>T0j3cn7^(Aw6=)l%2GrLC-kO(CD2ZxJ)~q-|{7{ zp9_8R>sLoK14d`fw>w}_sgj&8v!aET_Y?rXVuig+jqO>X-xh(;~odn zWNSjG?8!LCgV#otzK>h43$vjgx&^;>pRe|6gg(u+G|S`+KZ{X0&nxaTOlqM6rWJ-C zo=>(ui9a#4^yz6E9!q@)@!oJO=u1r|Uu~v5e4ubPnOvu)7)5V0*gX6VBf>h$v380Q zD`?FNSTM-Y00(zkWksU!!?9$E6|U)U%6c} zUqPK-`YsNPe*^nuTrJ8l>tA_SMLf1S*zXL!v1K2Ug`c`Ha$3rXT)z03z0|(BKKEVn zo@;waoKUuVQ3AJ}W_;R9JJvKoa)MhX$(dLN@cEQhZz>&Sx;ZoDtgavVq|3#^AmVf` z*U&@Xlx)$kqmY+XK=Ld;#G5SV>u%1*5^K)tJA7r?p(oTujea;feNVBrC>`yIoB5#B7g^Wx zZ>s541C`bmQ9}|tsJ;d1J&M(_NRruX-r9*uw$_olNZ+&*1Jy^D-x{vdlx4Ov$&oo~nhqex?0^X4mOFXEn1{+anq$uV}-thGWf=Y_v~8{Nhj zFSCm94`~v+G8sP4>1^jaCqqRxbcWM!?f74Grsl0!tbTaLdPjUZ|9CYqjAoi__~c^V z^X(EXKC}mJFL!u=5YUx6XF(xSZm6q0!Wlr-lYUKBiIU{)Z$h^X0@Iq=#1sX4R|NcD zeQ%R!igjDpTUs*bycSLzMEhA!t~}AhPl?s7{U%N5bPti+a7{5oe8D9By3>#$;INv7 z%$>8`ruEp&E^nD*b;pFJ8yKA3bTT7rL`G{Q88APy(oVSSlLp8@zM~WGZL2qQG45fCZL8l%S(TDHQYdHy z9$9WM_iT%K(R4VMcipy1=gux-LvPMD>pL0nm0k~0L2FT97u4n@hoxZ05m%N_zwwqu z<*_deLB}VI5S6r9iqA7O9Uv^%KJp6C5*N$NDa??yS|ix$nh=aY*COfh`{sT*BYNUQw&}b52I@!SA%_8lz#pSd z8j8$d`d-OT?0z%e&lOP8IUb+9Mwr~qmi^{AUU)z*keQk_ke^Qz0SqO-Ubg=6S8|Kw z%^D^CmkyjE*r|VH6ES*Jzg&uCMaQWyKH@ZH%og(yvt(8%oe)BNMpCw(j38p)zx8XD zwzN61v>uMv}1M(l#!Noq~@?$psO^EV+#D%uE75%<5G7U^mz1LTx_g2~N=dhOy=+G}Rwfd@pl( z!9G1;QCaL8!b4z%QCJ&GJB>NJhy4*TnY5le5_O8Mhs+%luOqWCYkl}f_|Z!;)T>w_ zqx_V+9n%U`rb^cvUk50**`}(;$$R%?z<0G>1RDpCs$1H?iIy;!hlpRb?}yQ#AlJ=j zVClCIyJ3T#-d7b)7&ZyQ+f!!h-y~{(Aex0+BcmQa8(eF>?$k2M`3;ln%0;s}mtM0C zc9{$71%(HSc{?6IH>heD2YiC^m$5NeYqOHQzL_GwZ|nNZ9hkLhEzjL2eK(7PyI9S< zom*+yxje_dsak(hVy5Rz76?Z#B^%DX|6_zuJgqgzKq#D%jEKR5*cIxoFY!N~!Pmaz z3R()b?uCuSDq=ggif2P@T4=x(KW7{rtSdr~_g}(U_A5V$lpm-5Dl={62xLBSSxooI z{5)!z`SVvMz2QUS9U~{Fv+Sq=?Rko(X=5;ilCQU#!IjN=N&SQNQ5=0~L=C`+mGH>A zp!TmT5mZQ^K04RoY2C`a)2sH-kMD>R63t*5YiXB&N66Z(gAI<7{O+Q4a`!RcwxU`Xvj;QN8ubRe23H>q+E`7&G1eW0?^Q;SKT4r*p(Xb*a; z9{}!>Ow-t-j>cc~2|3|{sjs$sQEWpTz)i?5{Yk*ux$6YaVS6R55<)1=#dOy&DyGk5 z0I-BuCw>sMe~0~c$dZwjnv!a6CGB5xD^1~ z{`Gb`w_H1z-<<}54@C-Kv^Q$NNG!@ZCk=3Vd!$d=to85qV*?L!1>dcZjAVPvRB)5xxBX>8 z`p!tWv(mM2>OVE|M224c+G+QId}z=hMY1V-&>0#v??juYax{L~*ksk%epH12-F~@I z(~G|!6!IL?Bt%*7U=4Yvt@AmOm)+a^--LRTnb(Mr4uFH@ZjpqTT$%ta%P+~d)rZ?mhK+7~k zF}>o?sxXSFw8fg~dl|*z{0eFn_p(P2DBH(Ki^3vL3a~lDe38Lpk_g#Yb{z>qvYE^N zca7264s{wp=x!9&Jnu!~3N`L9Ik5W-GZ0#XBkR=hxjk#Lm{-BGuPp;W)t{@asqU{5QAW+0f;cT(unk4ZT??7K1)X;^WR91@ z3XzIJ6rTkmp3k zzw1kL|E4IE)qpT%ySJtH@gl?*w=P23xm)_#`TB#du){)<+|JxqYMDj2WZK<~*aF{z z&N%PLMJ`RC$Jnvuh4m%e9Ao|P8I;SprC#JWqNPg-WpyC0bRZJDmsou5q{|w8%0Xlk zHHoc24a0|+1z)=gAe@|xRYgf=hjMSg4yN)oFMWVjgj0M4)rld=KMbDd*xmjM`mInj z2y&jx7?OTJOZi*=~ zM6_d$kC?Q-eu_+l8$Z*&)dIhOSE9w`rWM(uOAxFV0ti~$u=>sBbs~QQ0$BK*e+%pl z963AE?PS5rU0Pu)pIo|BM}17#=!V$?1J&^cwKrXsG39+9Jrl2Ez4qf5_6RvmON2RU zq&jxe;O$qc8mB!{4ck)#4cKn-stW@2cukjo0(x0$J?BNa-PmU;8O^X)=edrDWI|G~O`#|%7S zjE~%yx6Ggo4>gqtDcC7uD({WKBRaJFxZ8rU6v{n_dDTuB61_OdB33h`a`c#iavKsc zx(aQ42>(X~8KAaG@VhDD(Bg!vi;}=;DsPnND+jAGHycn5svnx0i@(naf|jM&)>Ni) zl&9%_dz*A}OmvDwa&aoX@KNC;7KY0qWiuh@Pmm_IkR1xuWiETf3*#5JWzGFeNz1eA ztus~yt7)!j2jbAS@$u><3h(3yCB>=nsRU`wetzxzQwO1)fQH}B&}B)OhFTV>`8*HZ zq~7M6FVZiL&pn%i;yHg-p?_ryu~D7nYJ7CG3jRdLjV1}`NUZ$u*UOMFL&0!Cx;hsf z3#eQK{nQN}^OA2qw8Pq|kUy8J+Mgi^luX7&iZ z4ru1=lUP9`Tgc!WgQ-r-<b?IdDqy3IcBD~jb`F2I#=AenK>tZlI@^*`@Z=iB&bA6CHopVcwsr1#n zu%%D+un;;*UX;Q(=Q(Hqt(d4=)TO{>ky4OSbv}|m1 zqm5JJ>i(AwcOu!^0^EfZn=0M+@2J$NYEww7@_BlVH&X=fBt?mt0GR0T7xQ+=4wj=H zGd2kf4O*4IC~1}iUbSvjafI}qmBw2`LJt;bxfG|2hmJBO1^->gjQ#>xnv&SVQ5OYA z-Nt@KWd84PpEPRkFgvklaf-WK|8n^-(Akrft75`PRv7lg&0^2_ex%eFU*l_)l)y{c zItodOYMq4~<7;D*biq|0#!Yg(O@UW2)B4BX)P&!FT#zg;3Ib5owuy&7s z{z(qDe5MMeeJ6Q~U&sg6{$r?MUo2Ug%!3>B5uGs!j^LQ~rqaF%weauN$i!b4WWHcJ zZm)ILCbo<|=J=7=!Z22Qq-YriAu%S)I(fblx{$8X;fs}y@77p^o$G%M{nzh?gjJ|jl!`g`t$w_k7pfrZ2FoRZtTH=``< z3us+K{J%-uv(C&7bJ<|)7pSL69-W)W49N}cC~NuKgW}%Nlk)6OoX+2~Re5m$vsi7bo4rmX7k_DYr`XhQ+<#`Sfx#lq|Wrq-IY-| zLI2d8c5~AA9y(&~BUo4HhZTlmO;V=C=|n1kJ{eE^wE z=x1fhkKb*QCPbURzwMi=>Tzz7CgFL`ZZgd*dwOM?Dwk)wh4jK4@?upmCv9Ojo)*8) zdq?&aQ8`=MD8Tcuvy@$bi6;;0VXv+UX9$G{i|XIMXy~bAr_(PE8STh>o}T+^)Znyd z^L@8-+D#;#MSM{NRSyr5jKlUTDiOA>cgj6(ePPe{YvF~eQVv`Y@-NzdHwuLK^{=fw zapm6`gb{-rBm#hRhCVw-h4Trc;Rj-8tkAC-z%zYiJ8_Mj4*@t&5)bt&d&euyJSVS5 zMIlvrqp7yu`F(jHsOE{Gu^Srt8)H6%KU+ntL9`Tj6K+0iC%+B4yuno|DB86c;3kf7 zhtih8upl4b+hRM6$na3QCYVk(uDN=A`^lhH%CCVsF|E!QcUV6WJX3_}-}Y0zZB}yX zz{zq`q=fG+{=nZQauX}0q~bqG5Iu|7WE634?qVp`&SBHqe!|z+*K5IcpHa|ZoxHzZ zU>p_xfRI+?_c)QjPuq0XHT~`m?kA{rh>Ab0%R#aE#07|(!P2EadCq3C z{k5`N4K@zBt=p&B>@Y=~yKllN^!?vFZH{^GBi8)ZU|QNP;eAb(Iaqbz)m~iBt2cJ# zENL42U4Q-#Z1H#va7!=*aF_Ox-uXmMa8~%*g7^t6Mn_0)gS0+Z+{>;d$TO8OSluNY zU>y6}Yv`;p7Kk_t0AJCMDBtXmv7N=z;gXRZ&git{b|d_(TEoS&S#8yuR{B@f=1Uz( z3r^KrGdKqL%gIo&`HuJ9i@>E1cSJM$2an&&|M<=l@DZ75>w^l<>ZUCf3Hw9w#A7Cl zfiPE`N9Cbj$TV|hWErLrA&z_!B&I9;BF(NqlREokocia@ELE}5F=Br;XT6NT9IAPc zZudUr2EoMtoupc8vsj@&WLo^s7C;FY`ufU?1KUki|GGy~WXIq)%$sLhVNfP3ztrD(4WR+JB_G$~z9=66Fhh z{>OVZ0%fDBqxRS9fg?BTo!p`&c&$vRMJk72N0v|B$1KUdK6!+r$kXR@1y5~mFvC)5 zyL$es13x*MF@s{D;2_4%VQ1@vWa}HmNW}{F%a&UvrrUcqX*Yr==*+lihC1Z-h)ncz zOG$bf2VbF85it^tbEm_pAzlK(`Q&UTl&5Ec&o-=`6CO|H)}x=y!ME{w&Ewx%IdjqG zgqACkD~!-a3}Wvf*#E4xmJ2evUIe*_C&&`-|E@*=*z+1s^ud$UM{H+g*Sc+AOi<{< zuh3*Z6>3VRZCK)IG#Q#)tA(W8TRr)2t`a;dXzVqFw)ST9aL4*?y|qG9YE_Od+N(@H z44o7kRs$s1lsPwU3K$I76<6MUamvqeeb^N%3*uxNy~t*#Oh5Gb~ZF z_(lYfKMT`kLrY~vp~FC)YS<*N62EGfhonwY?H7}F<3&`*<-9z8wI|;UNM;_Q`zrYF zO9FHt3%6zW7eXd)-dLwiRdNMQQ=IR3)IQZQTck(&e?PVyh>-gz#%Lqyn z`BA#X)|qKmc#^+5QGb02_k|9rZPj8Hd3_Ck3+%Jk%RFpK>IlN!cyL;p|J{ax?~C-6 zrvy#*yhtajvz%uvDR}KQ(SUgdq8uBAtxC@3a)~-}04cxl)?+L3A{c&&6IZj6ozlwS z7PqW1jhR9kz{Hh%s0jY$4$Pi8$aal)@Ur}sTWVj+xqbJ;XQK8CaQW{c7xOkvfJLcl ziMY>055GO9hEc@-(3Hy-`)V-WBh!!bBc@;RuyB$5&6U|ai_WL9xknXklp1!XDUhgo zWouSbL|-c{7I}TDxn?dpulxT1gFt-0=E%-Wn+6M1PG(P^lZc4aWwn0&uCKq9n82AxmFNhr3Jm4qtByAkD%|`4vTW*FF@IM-M#aXj?#gdK{DnXM2g@gKxEKT$=zCddOj=r!A@@(-I5O+Z z2~vL?H0w7z-D1$W1e?}_l;Sx&@wzTgeINkl?g?Pka(a4% zlQYMfsbN|S>@0aRe-#ZPalt(w#G27H@xP7z>F@k^_+;OGH{E-^zwG*tEw6aX&*Qtr z->N<-`9=fMm$l;Bb)&3_+IcRcX5bPysY;mprzm&{BBmXw(&8bEY zLO)2{&-q=K`Vu0{(^3RJv$A@lJ`fn*Rm^Iml)q6XIbpG;SB9`gvfk!2JbKAv%Y;yIBjPS|pd3*gzr?kmA`iC>&CiyHbBe(JmRD$EH6CohnJI&ICFB`y(Pbj@n663NAVM< z-({Hy_lxS7slZhY?(`@3dyw}9913a?5nRfU^!D%hx8!9AAjab?`E(+bmTJYucoSI%P9T8yAh&1@AN0( z&Er?}xepoc=H>d^b}p}a`!6kjbkXlD{2%;OhZ0a$5`&aW>ct$Hl))hs9IDwSU#oFp zNq;l1p%q_Xm&i@6uuM~rjD$I3(}GV6rES@@V}`YU){n8r zd)}Ze`Pq*YpZw9IdxXXsst;?`QM!E}5zq~w7)MmEy)nf7geAlCYXgTWO<7m3uXhPs8`6nf5>VsYm?e{)>NW<#~8Zw?ie9&pB?rYGjB3M(+>*F9U<^uQp-``$=4__0*% ztgF_Yui~<2=G?}K&79lAdy9t?opfW>`R$}Z%)G|bcS#J2Gigx6 zg%vmCsipc;yo?%H!p8kAM#aq8X|)3JRWR4&wf?CAw3K$#pu ztT~J`zp&zPZI-o_lsM-j4;aEPJN7lJD&|?sw{C@+hxa*$IlW3P4A-dHQ#PF4-^*|M z7{1TrxAcF(pLz5-$gy7q?%lLMek}cYr#@ZZxqjp4c3KN`rN`2tg2)#YrK3_bRuK4j zE(qKxUp7UixQSOiPzKi(&Pt)w$r^0F7EDs_^Hdk{hR-7IrP4F|9@=zIKF@57vJy`3%|n3 zSzLej6%v#eRjRL;sY>~^z(z6!)LLb(3Y+x{?0AZ=G5Tyna*|tAFekqdWgKaICOK}R zOPC264=`WcIHX@IUTjN@?;peiOAm9b7evyCAy{!HCjt^ukO~)gtR0R$;-^H;52tA( z%lZpz{R-L+7dmAAXQWJ$59>C4 z9{GLW1-j}9@=1RiEuj0yt;K5A`5V&xQBlNjL?DuL$&r3=Wvs2M8ijOtE!|(y-nE>L z|El)~F8F8quX^L(+xfF!x@Gz2ulrlewYOfe;+*q~OAQ*Vs2cBs?H+FR0rsA#CJ|u@OzGTcwS#j(7A+l3ZF`zNph0M)MI1Ys0MsB(g*(y z#5D=WB?l7>VZ}~6v*3d`Vo(xIj1Jfdf~$C>MF1%1{M_aP~c>7G7E9$70(neXZ)P20@@kl z7dP$X^~wZkzjorMcMNj*Rt_(a1QWY4X7zM6XTNYU)~H}b5h~7E7lHG~TmVf4L;;e5 zZFxnRi+bkvsTY<_)`|}hb4togm-`o=V-#JWym2Vc#FeGuYr_zS^#Y!}pn#QTVtLV` zW*rh89&`SFK|z4Hogh%zI=nfF&Bpn)GeO5v1Wh_V%)|)SEADati+*g$8$b3xm)r5V zjk8a9wEl@}BHQiqKzv91*~dQ`-zW0gh5u5J;r>^Th5)#&ggEv?GsJ2=Tvew#(BW82 z<$FV0c(gLfuJYTf>J_DZNHwqHnUI#`(1xOOIB?QeQ3ENi0*Rn_3?xI3K2yfv_xL9) zf^|+?qgDm-W<;=`R|SZ?!mF8`&pzQRj`1pif;pToHE_-wY?V});1N-LidG<39vfR3 zgCD-Ar91g|EI;_vA6w3U;L}aCm)C1=y?Xia-~2oH>A1g-%J8`k{2EG&J?UzeoR|;ICHqRb$)tOiy@uQSQtyB zWksLhfgPZt8Rrm}9K1wr-6P7swI}LdVrx$QzNZ{7R~}Ss zI>R>NIv&2&-$F!E%8NkQnU5H9=6+YqUCSlcegK~sfA{jRlOB(svvogN_vUigeUDu3 zbHG8%hxGXnMmT@xtyFUvxqrf+dKBG27m)iX*kJk(Sm?WR`37)LhmF4Js3gzKR;p8+ z`=R`GCjIE-rWml?PjQn%AF%0IoM>EYv-wZE!Am%e3r$*cbAa`81Eti-IXD?|Qmcwmgy>aHT{FOx2nan!Y4JY0z< zz#QiEjls|gO#D|zq7&vpU#V>rt<@G06&OQ@c7^C_zUwXqu%Rl*@7cTF=^6i4{?d6#d`EGn= z7ypMv=!ZVab>EzdHV-eXw=9*Pm9Nf62gusY$H;K6I#BUsQ}-&D&jgzgR6KF>V2tXg zv4xJFOyR>l9RNcTZ~ao*XW}wo<8Qm1p6d*O=-+|AfjnjUB>D;j^t5-fQX&;(!(Lek zoxn*n&5YMUD1_Kll{F8BhplRn?z@&B`?i-Z_doK~ z!1p-+?)!gZ`IoQ#N6YPB{!&QAL=~=#FVe1Ay^-dkousZY@HasfUJYn-L5gWi;3vj7 z#4;Se{6NAw5o}7KUb?p&KnHfN1@vy~%#c##SHz%=n zRZlZUYLPZwHXOj*f&cRM9hbggIqaaL@cEI4C~i*|=0E0sCoX^b$=_!dd{_rl@WXu` zYuTsj+&^n^FR&(PHOxs9J;oJag{9Z86$WbSOT=K_JhNh2iBuj%mNrE7{q8gl zj5zgB_#s`CbYK`i)^mRKqw@Q%$cB+vlM}e=|Oys709_5uOXwGx~;H_7cukY$gqh-G+&v`>YKhx%s_!69cfezRA^wEhqZXs~EI&H(3YaK%sbtfTW& z#2_eEq~{wFT*_}9VuVfl6H@}3K??`P#tiASY$PtH@^Djo=9ux?iw+lJ3a8%0Sw^`e zntF}@kMH=o<+UIF&qVIkc-TqbvK)5^e+Cy-I9d8X$#!pq8aTq|dtM+PAO%%^-#KKK zrF51P6Fhcc<$FB&>53+E6YAGl#jEyu{S+^`fT)j=nJmUBmw0W{68SC5j>J1RB4JjY zDXCQz5%&0cIBW` z>yjotgo=l{l%Ez4?q?fg6E8}|BtMDG{ETzIV-**^?dI2g;ML2^-~3W(QNDo4m5(Z1 zIkB$tU6(fYNGtZ-u@fAg?U!LHU@Ykm77IBXPYvl0F+9T_#9`3SRq{sxeRF=oi6t&; zVg{#k2BY}OPdtaTRBefq4}P`FC|sjoQL<8GxH*dpuy9v|u++`HYyn_D@-us#1-0>G zBG9r3jD#P9YXc=lOKG8XiYK-QLombxE$$z7(uJVtD*|Px+%#3V7`&s?@vPE3%nv(V z8>8cqhgjiYOn&J?Om|EIOeIO&6F=vqsq|}B4D&;exNy%d-24u@{`V`G5GZnRTyMxb%mrHq6hDIlpvM zJ3kVOnTlzV{@!TUSeuWTSp8q<;`dr_Y04 z)`+X>z_hCAkwr#x;#?|S(Oe3|Yq|MAZ@V9sBJ(!%Pa zHl5JD)sfDhc*gPE9FL(FMQgN;N<3}R)WAviVIrPOa&I$hS7<_(?w^++fc}-C@W$(DErNFTT!#U;8 z;esatX8xkzGtUY;NvbGL_^6%EE4dgWgJQ78tY?wQJEB%izz`)cmy+?#lQt#_kha>E zjUI>I#W#@L96$-^0HzkF77QRUhvd3Ii7R&aiIDKZ+M=M2cd^Ftmf$|KkP?|l_@I__ z%x?vmS%!;Lvz=z0zjV+w^s@rzR|q!uKk_s!_jGyH+ke?V;S({`%n>fm-1y*2Bk~hP zU+c$)dzD#E$Iw3YLO5IsBmEUi0>+11JqM>6m4?+mn9ff-2OZ^1ekvGT)?=ZhW^#OB za4r4X@j{~5d8QtQJBi|lM|mo}xDu0%_PFHUuzsV&H3`U)-que&lu`ZIl0TXYLMX@R zh0FCh<789}7)9zn&Hby^6svnN^GUyA?aTc`LgmCvj*^Q|?_nWL<>#!OVvMabivXb& zpq2W8m^r^{P}6(@$xnOmBfkk*(L-lTt`UGDtK9m{WC_;RP(!}a(>PfyrD@fgjB90X%8EN9>J6y)&nVp35Z&sd6LUbFhmL`|TrIH`wQ zrtwGw9d=4Hb1MJ1q1HD;;xk(-iSKuxz55>p-*f3(BZ2C*mMqqxnqjk;p4E6Dqq@qK zbV6M!7A%nA_?5HJED&*SfUbs0f%eQF;>xeQ)FdCn9WT+mF!z*S>8Jd3igT4e9G>Gc zk(pDk@>YHpxz0~rm7Y{sC%^kgE*srvDoJj!C7EU*+najHV@~oAFye!Dui*;;X6Xl1 zKF(ES=Pw$WlcVTK!;g7%{G2}=mDnk4> zeXKe8DNWQNN=#?~6nEv1Xi);gFmYD=2=~gl#(kUo62Jo!fweJZ(wLPN0-VKVFvglb z^yzmJu*bnK|Mh)o9`dS1W zXZ8FLico%tj`{IRNUX!f+4azr#EBUM$|1iow^93W=JBOy&3q+@3*_>@FuGS9+@2Pp8bh%mAKIY90Z7t*`hMvT_FcQnli*>`GheY_QA$-^x9((rb zlZ1-Nnu6m)SA7#l?0{-t$(e_S3SI@ulZxUp0vRS{moT%;| z7bxfs{Vk*<>~{Lh=dbtuCqgi3P62gasLG;;EBf6$(pydJ9c{8H{~AYHVg;-G6Ie?} zV41v`;b1HW%C}8Z$xS$3?=( zCWB`#EqFF!?N=0w;TAYnxDhUgg;cz5Mr7AhL3~A+fE1xtEX|E!4i3eZhx)Z=dUDOf zIaQ;V%g$S_ngP39^E=hWt2Ap~_HOQ<^-yO{&<=$F{*37{eU#v|Qa3N1b5(KN@~jucg!r|2&=H1c<0Hsh&*F3lvw#u&#j7%Mt-rB5qd^qh5ILq@zz z0P=T|us4BXy(OHy!eF7n{P;y(^~;1Kzo?6dQDF^(3M2@g{dVkc+C5zTe}zytfzB`H z(q=m{A#d`h$1+a*RROYt0Rh2r?-8X$5`)Vyc~krJCT{1)jyf2`Wr)3%9ZQYG*H)U` zTh=k<4}dd+`2k+(rws<@*$@j{F#82SLyS&FCb2Ye_ANdJ%!y~G)jaZTA6}y93}T9} zRwz1dBDMC7&9$4wxJsf09nMcZ^B~_ATS0c-{OK9G%k>2hdWL+Wwty}{{K&!MQU3^< zC1`I0u_h`Aw~e3sHwe^U#_5o4*A*Ae&Tm@vpGyhFjGn0W+H*bR7ney0E&U?k1w%N=E* ztUb6gR#UUq{U_1^oAYD6AwMKcU-u6~$;{#NPzH-B8~z4jB1X|xK*X^T4Cg%Yz~Ui) z_>C$o(kPH|Br*@9n4(9yxmOM^kcz=R2qzvO$gmoL!ND{Ao*FxoU_-i*tH@o9Gs~izt}ep^_hFFY4g%C zz{;*`nYfm{NEI{U6K+`bxu6NPe$!Zgq6W~kwA;o#KRkl3U33nB9O|DctT0T3CckQJ zPG*|!CD$Uw< z!4I%bX0|N*&HT=!w4#_fjV(%fjSH8x^87h`o%5#`oK?^aQ-o`!Iq}9qJoQVs=aiEy zj1X-c+|a31zW>DqBF~BtXF>??S`=&y{&(o*xexu`3fTR0?QQ;-hvaatnSme5P&0Xe zB48TIFVnC~JsiaPJ{~^xMDeMvssV&3(Ib|>YD25{@&iQ|Ik>C_Xipo47@6NF^8`mt z1XV!dv!{!I=q7f)=fo)jI*8?0VO62BD3O$br;yxcUE~aQk;Nl_3az=gn1_ipX<0Wd zO!AS7es)hb#AEK9j1fO^A|RF43?YXbQO%JzHgD9BZR}-QuDa<{S?+c|{g|_szxwFE zk=|8v5kg%YnYpZ*W$yY|m_feQjl_-m;8{FVJoiMB)4g!76jX8UL-m9CMZ)YumNbpQpi222*F1v;@y=mR#DmOf{gBDtT1Vi>*)4NT(W;sG-52){ z@zsdIBk3SZ}BbOnuA+oV7JLxSYD&Qi*}$pw=zv0)~M@-kkyn%Bu#guIMr3^$rZ zz+6};rDt+!D`>4a&9wmPa5|WGS)OtBcj3np?cIMpz4O*95kX4!&D5FQ3%OmhaK<_Y z8JQfTu_be5i@(6Z8oK@SuM(?Hr4ZQXlOD5JMVD^ov`rJ1Lriihw|Lzv-YevL53 z-=l)%mvR$s+;B98n``xD3Q+r|eU{9xwaz0}5+t*EEg$}ZRQsm$qhnTp9ZaTSpQ*~h zC7lM0DNj3sH^OnHV9w9z5sn?4@=tln?>aYvSJ|0s_%~_Y&*BBHkQ@2Q(JZ|ZRze3q zOXbeyvUda)T95l%_esJ#JL9V_#1ER%r3~USPD?Wvc-5DhqsI!(Ao%bU|?$x-q-arT* zGLaP`_~J_9RbPr#8e2HABoI6oL>6igE814#{akL8uXth7C&pJ$I7#IDm8%+^-}_+K z=hxZif)(KK$tDAM;iK_Qi}o@PdoJ2Im8x{a7wP~gh#}V z8^m>{!uTF#8pex5`mKZVv@JgR%~@FK^IgW;#G#lvn?WF%)JAl$(capmq4Xk_;T(+9 z%nOXsDJ{ZPO$8*sG6`GpwPs6TDNb#K6=!MyPq>AS03n@U84zv;T!9IP$x*4!%6Q^L zlSS1;e)WX$Q+_d9Z1K)KL{39_VA1dBAgk3`7w{Bdsp6#fxI<1_{_!(^3I7S)y(hxm zFR#7zs@5c=IGNWPna|Rh0Y-iJ+~=MoR!7~zTzCSu4V7mFaTuwJt;VPF%6X)p*;B!i1U`e`k!{EWiB{z2cxI02H9kpM45 zD3fgTc$bN2CL?iq6wkyOR8>QkvfJyc5G7bpU>xs4X>)9?nzG`tPk7Q+#Auv|N#qzY zVg1=J-n!iU`5WWi<9x)yM=dY=o?lxYaMT%%RqtAW&FPbTQ)fD8wdiB%IwRBZ01w#} zIUCjupl3@wqbh__8-OKq$R>4!EydBgN7fBY zCe;sK=glH&%}e#lFMWk60c)gL09Ih_*Z{>V3*u-`{=TuCN)YCo3B2Rgk)F(6sq895 z!cBrJLft=PEm6te@rmuAu`nk2tv$U~{}>x$tgSckdwBjqZb`WMw*yP3Hq<|{QhVp? z0;(eDSmg(g_QW;kh!3w^xirC!<%Gi?u>8cgzI-|2pkq?d9_O1sf5Y;{J3r5+3~Y1S z9R1Ee;UKb0s%Z5%@eTDN_hk}Q7qYFAM;1ll($bWS2K}Dmgw($C(^Teo`}{jHiy(aO zQNF|11c^8AtUf==1Of+DHHV_>gCQZ&46t3*v1HcPOl$|SY71`|1{4nwbNnhiN*G%o zBY$|<07faHxYTTKwKt*+-|hSd`FvC(~8= z3*v^B=w$pcOKlN?@X*uSQ6GXYU7k>U@U3W!}^zt+9$WKhwteI=~OFb?M zN!8j|#aG1hL5wf<73va@0 zaY{hY$nVy|#^2-*$0Q{A?I*@HnJ)#9!olRsAfMvt61f(>Z>WQHRGj%bG7Gnw;k1wg zzr*R-oaPa#V`nVa#1P@UT9DKqrEikoSOB!{1{Fv=h|{mGkLWR^5nH*-JbENdHOg?2Kh593+Ix2o` z91^n-ubhaR((jCQ6{mn&DNfBIvFub)Y^74o2up=UC#J|x`%k1tCg=V4J7D?2r~Mzxzl2GlH7cO+$def@`@8Bg32@YlvoBbY@(aoEEKqqPGCc=6}n3dLW^6P zD?bVS{5a{zZjDk{aXb3?AVFJYFy#PD?>`DqVKLL*ohOAs%X{roV3K}<(T z5fE2D%7j)pQ8|mJUl@`qfxNMDQyiqI02-N~yGM+F(bZzL!KFmd&LxHEBk{zw5c-IN zA#qqZhF2S{pEp~Zv?jvr#_1{$tjv$~-TfylQBIdfoO0gstAG2o%L^X&ckrgNf6G)F z2x$F`fdJKlrTRq>bVR>tqcsmJNNh3zH{@0S#?2`{5foV5f{yT7xi6$6*U0ZaJ3czq z?4oS-%HO-@uuwz-ulq;AuFBUYvQh#ux_<1-L;`v!Z_*DF%+ybNJnXUSFMdqSPy0N| zD-%83o2%kGx#N|;2}oK&w1t4n>LGiU99c=HjP~e?ScYdv`&6iSsDB0#9CAkEH{}wO z{NbC0inim7aK3?d{6x<>IzL4W0K$1=h_$s9$GIZDxk+9uNiyXy6C=QmeTMY?J(A zhfUuH0@rGYn3f`Rf!>ptV$4^GX7UnDfN&Dz{1QuXu$jQ|Q@9$-JwJ4b6SL#Tyt+YE zQSmhav1tYA=%{$=qShK;%|Br!HZ#8hjHj^3chuK)g%uD)7ZN+YEXouB_9H)4 z*I6*0AC`qhEsMZNnH6I%0y;ktuukzt*Og8T@gQdOq>@Ho5i}S0XsU2A4545n7k;@W zVi}-Rt4qfNub3iDFQf|*KI=+YRFXsje6)V%q)EDSf02Me_C<#DIh+CfEsbX#`|#yo ze#fuj%XEJjUukt87{9i`A77;Ui-!4`d$w7$9Igjq)C!66xvW)~g>K5`5Y)@wun*nW z_^KC5P&V}pYqDz1i#Fw9O!;ya>L2-|fC7pjT6_-HF9z|Ni}~d*FnP+;wMaju0DFK0 z0Njl$k=#2G6;ed0U;qbf`r8xeB^Db(7}H$D(Wa?zv|&gX9RiTcE!Yxou=A6f`Se$7 zYT|G6>YsUtO*Uzfp)N)$NEmBX1b9FFnagMJ*R($KxU-jEdeN^h@4f8p%PZgdZL*m~VUZZywBHB;eHPsdH>L^n2@gUTrIY_P%fCUXVa_n+8|MbF19E=eQ<6lWAjEdO@sQlb!_W zpZMsejJX$#5q{{)-9r}#^XHyZJdGRqm6&#pIX{62QunoV?Sx5+;O?bu@kbA<9r?o* z$()B6GxYN;jp0UB8~KUmXuNjF3!r+eJucK=d8mb(HJ1`ehAXvZhTB8^+(>|)fBJa- z%2nZK;J>N;?#KMl@`#hqr+c4`d=i~%loQOUdG-3mjFJ=_gz5E%cotyIEQ4GEW3IAj z97C9~P2SAA%AYn-M~2!YVX)L$_=M0`>WCCAfT`RuGoD(#U!R5gFE23_m{k)n`YwD9 z1WYU8!8=3yC{nHvxMNBa;l7F3Xryf%MX*L1wPM-g)qyTA-Q9TXX&2zX%DrIu(~tkj^2)dU zo8^<&;Xk@@5vrd`q$M?{F7!$-ZtOXIOv3(AH72cGe)obs;~Q(vqW}F8@eHU;Jfeyr zsg};cT+AW-m{NBG_XKkaF)I(4I94PZ zMDh@65~m|Nyo78Xq_J!q0*NV3mc%XN<-*GB-qAx0b6$xbX>!J~XOUo^j`JV*^yLX> zK5co&C*HXH_MiRQa^dCgfR?C1DhN=`oN2CJaMq23IjXU6A;2}1tBP5@iKsT79~*!V z!Wix)R^BVWJ#d;Ud39qk79Y&lBO7s1ze8Do@&T=xeB`eDj>jIOQgNldZC)pDW`+-( z&x{t?5t95;87z!-msNG0^#HklLIVP;Xw4}s4C!&F6q$gP-0XXKBK$`(qX3js6D0r3u7cu=ONfm`JFkyz1?U2`Q+p-qReFq~e;m z5MiUE2w$+K5uVM+sPN9OXe3C}m0q9(VN(ROC@mJr&v+%NR?>ep{u%3@cmrV#JOI9t z@Z<6uKC%4vg}=W1;YGiVKMCQRnK&q5H$yipJ0={o^EXSx2P;`OdxJ=&5;)?qRON#% z8K&4t6Z7gOs(GV*)x{CZYIwdqDpBjFpBlo4Jt4FukFjP5&NI2qZcG(JD3-d2%=u|z zb&8P=$MY&a&H|MH;o;?$bi9^0<&%*YYG4j9V@eK2&@9fv#^-zr5g_`LpS5opuqChY zj!!)fVVt#WiKhqpiOcM@_r#NHSMl_x0?bfhF;EX2{T5Y3CjWHMaJ)N-vk{J)?=^Uav)*gZGIO z$q_pDpEiCl*73~)3&+5DZ(Q96jOA~HikA>>Hbz^34WDKN>->S2d`cM&*kCE80Oq-5 zu*yUNeGY`p@NZ_~Z-d2z!2bLM49Z1V5n8wyf~9SSr7mu|0Dl^j+&=)s^! zZAX5Y>q6eHqOi{4_~aKK^0Q!ul&p>XwDT*m{FNC@aZip19DU~UBhUOlm+yb_KV1Ic z!@sq>?gOu0uDH=Z5m@TkKMKYu$yj;M*?p1ZwBP=^iAHb8fFTk#fE&zQVy&KdD?48f zIdo=RRMWJ_+B68jP`p;Y2r38DP(L@m;r(yLj-$=AsWEn^6NL_nd&eg~nMYrwMmdzJ zXdF87Q|SD+GUZ1=IQ4mqNU&(vS){_LSb#Lk(ecQ{c9IrnQA8z;9>&l4jli`zD^?ut zF@2*>@`BJ!BdYsCj3rlID^X6Mgw4f#qc5t9`lg;%C5lge#{1mo@?DoOy5}6pUIRhc7twUa)GMGz;Q<4S;{+`DX;(T@NzP|e18(%QGyewB+gnCCNZD{X_IDorb&e}8AarQN+kj~ z-l{E_>`W)O@)HAFiQNbn)mA` zzu&zNS)Tux7wh8_*M4OAqd))MHi&iuviZ1n! zb`S13bUs@9h7gCES^gO_=iFF}2Uu#M&dE&%>+e4mF@gExH0)$g55lLLz46wf-cc;`okBE_p~U zkYKg4pQ1!C&}@`gnOA=EFiy}xV7KhVn5&}2@5CcYjY&La(kDN$d;7g0SKfHp@`|_o zBK{Wflgn|3oj5Um^~*Dld&t6%mpt_!FCV!4UCSSTh$Sf6jq+tP@U(zR5!%n>-!Hm~~S6L+kw6%c0Ah z#d(Y&*BHq^XOMKL?6gxz?lLmCtaTn6kqNyWi%Xsz z%Y6?xczOKkPuH8qIVV1LuU_QIy0_zo&t9|qr+2(``Tq0&$#Ueu$L=wVp97KNNN)WI zMS(P@SoT4CnWknCQ^U3Tp7T9P$J`U|*}r+Pm$n7VF2X2#^>+^HH8U2LEB>ZP8Nt9= zSR1P%nBX8cMQ{lLWa6Gk4n>sRU;*XpUWmZ)cmv6ez(%nN-jhkxNkqG7eCeB~@D|4) zC-gmxb>g+4q{`t0wAzN1e3_N3imOh`#Nw1g)Xkr~tsfmFH%IfD02bDWYXN-dd;iUZ zeA9U6C*HKYA20NO>xqAVdH!Qxyc~e@GjW=TC3uFIL)%U&19J4E-HTFWyvp!v)0o~v~NB%W#K zGcWo+AMTq=&h4LnO2?^+OVx;XI)5gS8#9;klN?*Sw82ut?h&suhlf4Es~+9FdgfZP zDq4bR+-_921f3fWJ8fiSd=D7=pOs+tq1fN~g+TnX?}D3J`1}$gPHdz|YGVrVS^r|+N=l;lqOa5TtuF~o; zVCZ$ds2h?D-rS5P<^p?YUwo!@-EW2XG# zz$JxpMm)X>t8ytW52!NOvPv?*jg{6%};}_#pJd<+}plRgSS7?+i ze&qw-h-Z{Jxgsu_*dUq2?F5Q5NV=TfT8O>#D|al{-E#GE;t{7Pc6XN(4}XB(K*)LN zbst^+{;$7C-}CW(_{8{wPk89=LceO?gN{E3|C!Y}%Zo4gd-~hRdoFvc{<-UeSH5St z;j`C?w_A!uwI3CuXWZepZ@TJPW7s&$ay-yHRN-OQx&YO``TV$chk*smx!Te>!$*Ja z17)3ZCBONd8dnkb*#fkg-x%aJ0cOWYS?O0^nO#`0tscI$%{VhrDRmpe`~b5gX`$CR zHGoDURcAQ}5r`6{pV76}g8t}F5O~PM;W#6m9>EylntM9iN~*9V3lEv1rC6~yXH8uj zgXvS<%!!Y~?t9eoQ2dkBxhFk#dDJNv?A1RXk>_g}pS=Df%d6k}@0JgK`kic@2DO}U z*q)zRg7pa^!h8)og2X0w$O?(8?z~A{dTKYqT*f=Me=>9Ctu$MhRN zIf1cussg<=XW`FW7NdIQ0m%tfER&<&*k41VW1KQ4ICGnVgr(mz;EJLZ9Q z+ymF64>@jm_QSqQAEdwXrpuNOT=DMZgZSWl^A~O$^{amXsR6SXHKPnVfWlPo3a|4| zScRy4JO?$76D-y9?tHND2`UbH6^&g1WXCCqf?nE1r+GRo^;@}!g*QyTM-^2u^qVI> zKa;R&kx0XR|Be`kR(vytAF)-p5wK$~Mi%PdFqW4NE|U`LLT^9DvpBcjG)_AF)S2QQU0-?ArORvH_bbbL zFMm_Fu@ja}ebA35#;* zycp8-761RT_vS(0UuSvW*V?RIvKDJIUSzNVFJUuDsaXtDCy<5^5;LtcKp=)RNttPA zGHL#32d2$r(q%d=)1*+s6fjA^lPM(u44A|Yn=!>pYzJc*Z?a@rl4V(&CE1dCeXi@i z&pF@U^S*k21{p&}=Xsv*Ip$>mr{XM_;eU{%t|MdnAqtDeF@AoXWD|Nv14W#o8 zVlr1uKSuD5q`8c5?wc2jevy!^gf+4aDZ1;;vt@1~SKdu4VNKBGWk0*w`E=Dj&*615 z2t9^hL5KOqT+X2CjUR2VdHZ%VKPK!N zbB^7e^nZ5%z8$)Ek_j5%1c2ccP;F7v{r z2*$i8@Y$kEKj5SUDR3>Z;dGewm)Dc=^p70ZMOG3uw=K$G!WD3>M3PWt+JTdpx^MBW zt&Vf>q>nNpmbe=G-WI2P18NkP1h#&X)SP};6~yB~Sm_Oow)_4da1{&KnM{Motg)O6jkPat{~-i&T=c^b&k z002M$Nkl>!9xW6H}gXZk0dOoSMI* zssNXfOO&nrLw^PI<8;K(2mrfe%~ew7s=xa&zVl1LhXpq1D#j0@22nzT7iTrNTNjke!Wr>`!9dp#Oo; zpC032iVW6YdG{~sC#inHU()(ZFZqt`Wnb|1e);##`Ap8w*6*6|@r_^nO4Ho;(7oI5 ze)#R%@9K5Ae&^P=`SrQ(*7tBS`awhgWLa9X_O60+#%$K}uX`ft2LKBQElaPGKQQY| za54@|vDG{ZoF?V7u)6EA{2k7jB=YCydX#=7rnm zUj2fN@AZ8)2Hzb2jrYE8`?}(ceB4uy#BB+vud6Y4ByPz7cvp z%xV62=@a+g|Cj&u_JgngAGbgM#eaGG`WJumcICxq{&B-Yyg$gE-#PhG{d5-}J#Nl>21sN|!M@-r>!uJ54C-6e9znW?YUMVe~Z#L24noU?jQ zMjYt?en5f0wARVP*WI7M-M7_S@aE`C$Mrrl;H9hdqpg?L)ua(Q(l8^mtKv(oto7^Z`<~~p zHYeEMtLs9V2BeiA&DLe1%@x$u>JJEE|8 zpzgA?ak!wHKld3_6{*Qe71;M8gey)fV-8^9En2{x2nHou@$~^9%Oxky@Uynd#0mCN z`FxIuW#+fQD=)gnPZZD8m!A5`;u%l7@iX!xwW;9Kzkc$OPj0{cfj4ab=68Q)d&|wg zzCHTbBWb_uN@Y106{~X@Pnfu9ir)EY#bbW|19cz1x4(qtR6xerrv2d|mcgwcOYxdC z?X`n*$>$ql9O?Avo(F@cc@oK~CD-W5jsS5MsjMs+?m?JI)qC=rKQ~BXlQi#GGk^_N z)3dQHy3*t1hrftR%kKNrZ4+3&gavkk_Y9W5spf?)G;0cnBo93N`_UTbPXd%!)GM!> z;UxgNqmOLA`QF#-;|=+^l0{@{Nsciwy3_}7ZhcNCtp9(}HJ^CwF6fqvy)m^VOT zbAtV^GsoGKb0deIc4zxRw9fV_sBP@~zC#B2oP~^asSQ(%p3H^Ls%dCZ+(=i(s@o3~ zU^JXpL{Jge{40R5%nhwYmAmeFYz8PT!sLGwS6qCJzE|-~{}j@-mp#KXKas@x{Btj~ zhChmz-_uW1y4i%aqy;p1UuWE98kvD2mkU5$C?mYZS@4p9PuiCl9hoL2qP=`n+Uw z*A`Q%6_W>A<+lh+R18D5{12;dj^?MykkmkvmV>{{eqbA(0I-NCU28t^21uN7{9jWU zWoq31$uMrPyY9PF56r8I>Gabxp7!kRJ-7dk>nEyf-szV;^y!o1ut6^!_dfXX?Wgq$ zy+8ftAJ>EabGEP2H{QSc`G02ng6sdpz7HgR76(t3H|mv&`8XU8f9m0lmxs7TPb~Z{ z&7JpsWc#Q-?$8qpYyRi;(5HB!<%DXeet)9OvB*{Tm>T2b<3i&!(kr&f=i2>qfcLp? z*89>KNvW{8F?av3gtLImzzWse_k|O2EqW4{dk#NZq8~=5lTZ#Ku$dtnUE%M0&iM%% z0L1emgIDSag8xbIQ+NC;`zr0PzW6%dD?CYjw*L1z3ORDYx!iR7JGM8z?{)fy`0KXY z@42NL_@9 z0sOgm^a5i`@j@}h-DdIu)c-){1JD1>S40<=IVs)nBnpoTfskP*Jm!GN-#$R?3LFmS zn_nJolH5cbLb(xHVa-px&F(M|6|5vbiO4=Q$(MsyRPl!=x{{q3?2c9bE&5mXF;5`Z zKkZq50vR}F1UQV^!%r7PrK+UJwaSMo+v(B-^51h z6EXOo_PcI-i@zcMoA3MA`n@TA8FVz;-4;zABR*e0iO@fKx}F*5gE6YQ&)&&??C*el z#IJ#+Y~u%vli7dbl-=i*8NKjD6cTcl{dom2$2bSwb^gnobfYAD`oV;iqu!`NZAZ$MpomZw}x4;5~lX z`%gUlfd41M%xi@4Quusi{fXLtV*8Z7t_}t-uE#42Jf`^(eQo^_{i5h2`T+qxvGKKb zW_>|Xf7ki!?d)^-JN9SmRS2{`TOWF98vX|NS!cL-ay{UmYuAKFjheYS21{()3$1LYmQ|($j21Dck?bsYGHObo^^#@js9xrM_^5v=7yy*u#O=*1O@f6i6U+rIp{dNR>pJAB~{U%EZ@+&}o= zh+ZUq)|Y(p=3nD{JURYocYq(dc-L)j-QM!SU(=JuuWdJf?DtLx_MNWBR_3~^*({E} z*E`t{MJ_#T;Bi(!dZ74wtlD3D^zVS&eCo$PIvD0yHp?dv)5_KSeD=v>w3H96mEQ^V z7&bl`*fr|2*W8cC|KtyH_60Oc(JQXuQ|J6-BBP5dKIQT*lW{F*6cB*Q@J6xC z>lyXfy-a2)_fAYmDY(Uu*p2a&e7xf#~+zdHml;zd^rDI0je|FKi$UHtU>$Ub%my zgkH8Bib2W<0+0i*aNqw?{mAW|H~C9l=bUw}zWM&b?Zuz-<=cy&@n!yU*F{gg*c?w@ zp6tLA-GK)lzE4jWZ`=O;t-rnTdQ>;v{&xTGZsp0no%nPVgrG9=Fj-FLdaqf5o5f5?6XOTfa3rpir1FQ~4xJeu%h8m7xUF7j}voqTbf%(;h{ucnj4N&>u zOyfS^;o|3sgWVHwS2keowvNjAOCFoL4CR9?CrVNe7?Q}_hs7`UjIef)DS<`+NtyU(A+Kh6dcjraIB}E6&e!^R)!v)*2de(X|M#QTajCvp`+WVD@#pI&yPkjTi~QrlS3LRUp>E@o zb9LbE2R`bblzPwY@6->6yhH1EZudQO_sPe<{yZ?!9bCIPrPe&|^O(jL{><6u^TN!7KemoVv5dA?f7hh3?&_#UxxClX zpFe?fY>dXVNe-})z)}MAC9Fv_KOvFGaH!@q62GlBPbB9l7NW)rkphcoZ6p}U(PH68 zyoGDdlkbvmAe0nG2&~Yy=I_~3t5Jn(lh$ad^r5eL@qJ7$@$t|nA3Wpt7S7Q(RBzC~ z?r*;91ICOFs!M%YRjqKA-TdtwHsVJ4uEND7sruxN%$ymfU{Z@?doE0(8GJMPx8BbS zJ^o4z%HzjD{bRXT>qm3-@tmulw>|TU8#iita{Pzg0shx|%iZtyD-V3|qwn#|%cAom z*W6S6c%lor#d2ewc>KFfCqIA8xcxoPJ=^0{kAK@BRN%3X|4-*NuuLy(KW-=qF?2X)_V;6S+w~hiRs`b}QVxCPSV9hrsoEo&k%k%`Hw)>(GUfuR{ zuQ~Id+~gbSH|u{a=m@e|0xrmve-f~=f?=BUs(t)(fH1`cc=RMm`}~HZbI}qVJAMi- zB7XUgpO|^md;jgwz~4c6=9M@43FTS(X(@isg%@bNK{fIVhEI+^ULD{Y+#l9Ucks)f zJYjtBuJ`EgXWXP$TX>&-0_{`t@x!ge824E%WXk=M$0Po}Pkmc151`$BroeibIlr<^ zvAZnyBl(>(7tKGl)iaA6*q)-l2zuO?XL%J~s_HMxB>kYEqTy3aYwpi4a-~0Unjbt5 zVDvWj7+ss}-7rl0*zprrV}{YY`Pkm|rDBbf2H4`hjg)xNkEWHM;Gxm|CMILYkF`LZ zUrCKghU-<#If)G!@J;A^V^a`~NSZ%sH$E_9jeq8YYs&-*NbOx_skVvuB66w&V(TSj zE`nLHt+l(-KJzNZkT?O*NHdbxb2}J(`scb`-Rm5Tmrmm~jUNbY` z>vhWSyvzwBHuVI#>o>y4Z#>Vw30Z|-MCnIJFh*hbQgM7zd9Pl!gpV*fOh4v&)^^pU z*KVKlw7lZ)4SHhX`#(?D@2XsV$#wd@hD*)!c#jJ~&7@9y>5 zV+TJsW57urHZ3N9Yiz%M2jtk-z|s@Q$;W?0Lhm7>e*E`u!9(7D>t|ODNR4?cHpF<8 zb_3*ae@&IxwGZn$(ABs!Bz>@gw)41c99WCLMUuz+!Q;Q{ik8ZEx`Ju22GL5 zkITkjenk^IzXxL85|dOgJ!UV4%68Q(J3*^xjQfHC9q9=%;P?q71x}B} zaY}k$3n8Vk90Q112&{Y?B8Pm&`m8qqa1>aIZ((pCw33&6$n5iM0L6)z&|~;P1$)Ui z=9*qW$9^My{YfnM1UaKFU?lHbjDy;m?U+=vZ}+Vju7NUvGcPcnLP z(aUo1%XR!#Hm?VE@l!9;2fwAwFWz1DALN(J_>vO8k$<0F`{_RYawPwI$o?MxHh%w4 z;*NWHIhfo03zGD(&(HUI4BQAAZa4Q?&Y1T5Mrgmb{rKZ-FcZS)b>AnA$!+7$(wdFu zj70mEf|D&_2IHQ{zOTN!MvQp-z8RPu2^RXJ2X)zZGI6PG_^88feuQ`VQThpaV#Sy8 z7(+)kE�NPmg7gu@tDbReyTjYNJVD10Y(bVtjH&@Z_)k;p4w?;*NsJZ_x`G`}86) zC4+j>q^EB%g;>VYZ5?|KzY9h<6!r-)YUal8;MgBUXx4K7U9KTfyK z)3Gy|n3ba>HvQrY#}HN!^ZwQA2Iwq#>6c@M{}N#eHV0zY`@h*zh!AavI+i`mFQ@x% zyz*27*3UnV0ci9<33_+`;BQdPqbCd5nI8Y7yXU6pe)l=p3$owXF&gvq`Yo}7HCfM2 zROu(Kz6X-%fV0jG!k#rV0A21}SI;Vj%r$1>xTyk5A5$pNj76FW{i&#o`Lwk1g9Hnn zBsU}59_^d|*DX#27=Xff*Tp9g%+AMr>>rl8=CT_EWYAeXTaEqe`JhycOVga?zU+JA zWTMkB!{=sC{#X;}A1Vlc3#hc^{JLjUEH3v?ISMpZY3g1^zI?)VyuWcs&+zD@0#~fv z4QT**>jq*@^#~s6#tRq5fF|y-jeo7*b>AJ^M>k$ZUe*P`utu6@XBS3vHWLg&a6l!U zW=db>RGrQ;kye^}Nvg40_=1}%=}?^PyGPbfvVmFDvbkU-c@y7vG?N44nBj;&`c>d-OVMG{8uR)xz8<1^PM4H+xh1D{1I=eGSm3 zZyIAw{o0zdf#IK8e5PsO)Sy-O0h(~8H@zmfU$HvKPoH>d0M#(pr)O0Y95*rK~9A|law3GsE-N}X0XNf4n z$wKK6)~IPjn-wznWus}lc^nEDIl4It9fr=o{hr(OuhB=6`#9GN&%4O4m`X_WbF*nv z_^iRrHAY{niR9#lnE4LZg^&uviKSgx8@TQ1eeh@%jL71rH|tp0@q{709OqF9t$!i_T1mt-0LwZyL5waQ$PPX1@5i!l*VXIL5qo)<%T4nE%{A9%@1%sc!LG0DB-~YIc zFJF~_f#8Kj4^$ZXtn5Z`C17_iYmdG)-h0F8LQKWEH|;E=UDXU~c8$jC`-cRL1t+=+ zw49U_6|r~!7ulodyL^SO_q>Au>yN^3`p{He_HmI{rn5pltXVh%e+tSWW=*0G9*AlJ zKrPxP@{Q38SPJVfzcIq>YiCiAxrnim3!l(3H}P3Jnw5BX6Z zE{9fFpBpo8a18{o`olV-)^XshSjBIxO2gMknMx(;z`zn7 zrb<@;tIE;rUPwS{B9;Vz_G9Fuv-p8Yp7Q;+eR@BEC5n+ROzM6D%1gGN3bAK&r-m{MNB`CPVM9%c~ z2Ap6m&G%VozwndFN6{zUoAN03EZ|)bi)FaDK_0OaJ2yd#^MMwVZIH}hZIo{r*h6B@ z0@(2vr*GPXrVHRzuFW)|+O+2vfcYy~>quLZvB^3L2oe!)tY+57ei@5zq?Z&B^r3us zv-BRsD99CK*Uh}HD`8C5zw{G?U>5=T#oM#P<(_Ox+HLmyEjBuq2S%$_?wSsNYbKXB zvcgZWJ}fU;*0TUOKLNR7a8BlV{!7xTVBw!wxi8%~6Q+d?-S;Gj*h-*-sKuPjviSa& zvIPdKW4X9uV-!{Z2juL9gN@gBfMFD0>DJP_wwbsw1(Do6e*+LULJJ`A11-HB`h^33 zxyLz1s;*Uk``rg24*ELhhA%AKU?yw?V|D%D#(D7hqlGvimwEo|0bThq=^N8B-%Q`Y+lW?l&rc|SAnSwK*&hXI?M9}FbUypN=hp*qI1*t?nR>4voH`BxpF@S zDg5QY?K+w*v-#UZC?nKa>)Kq*>v{y#0vPbt^ic98EP3<5*{3yN`$Cc!*0Z2qBO*iw zr$sKH8kSAj!ilQJYQCYe;>|Bw`;Es&A5`RE@z>mquGGXSiZSrfrG4~+p`xTpZyvLoLbK@wby) zYd0zg#>Jf+RvEJw)DwQ9M!q4Uze)>dLAR>JM-L>{Uoouv(;?XAG)Ky^)Y_wf(Rl46 zhZY!1i}t4qCwS?89Pp!ude-H#c31AWC$y2vA7&W5cHzzI=rBU}-7xaL0Fo~;GU zWKvuxuxJf$`_jL}rhj;lGJCPpT>M5smU)QnwZY3CCJ?(sr?=UmRakK*d(UtE@$auKpRCk4qPmSC3Z<|$T45|5zx{hhdTU6@ZO=tx4zaf*dpW5?1pY05bFEfZ0y{xpfi{k%QCx z2R$*+sZ>O3fGt`BlcAoo}b@#%v zgNRGMug)<J#NpDVr6GvWg z;3u}AG>wt^@mC_A#&Aa6<7Kp5u#oK;RK}989e%JIml<1+UQYh@XfCK@Uxn8{!n>!p zfb^;y5j!-%PtRJ@OwWq`T7QXgEd6kTVhph44}d-i0>Y9Matf1P!0(yW95Z{yHV$+8 z72fF28B^YVG%m3@2j*To7JiFkR{)LUSUrJsQs5eNPsN>JAlC><`JvTx(EUh|VqkUqL71H|;9BcDL7*UP$Ff%z7I;tx-b z!1bc155yvKa5k5H)fs!Phj!zcXFV5UXHS51UeAk8mj3AaLv!NVXoFu^U6%oJIgw{W z2QK`W7|>ddu9;&W&f3Bon2H(zfCOjB9GJsa^$q=Ua1XBgZb5%i+xy_1_xqq%5}wYr zNG_J?OLWW759nEG;VCTPiAQtxeV171j-MFoQDnzXT)AYl-9p9VUF&yh@)6##T6bM? z&T;Vg4>0VFXC}q>JX3qsz3ll^kN?xNLNgdd%>T@RqdZ=kOcPw!n77 z4O=O@b>}-<0POoM<%7y-z}jg$_-smC&V|@!tNg=#V9dud>Wkm0Lh?NMgIGLlv~2ye z&_uO;WDou#uCwhpu!!~dK)RWE##>3WCSPEFP=(9u2Fi`lw3*x2)Sh=Cx$(LQeHhKL z%NKXmKIYj7ff_eRT!|&W%-YM+00^m+iP}aS`hZU##C^K|1IZ^Ikc&_lkQ*-0r{PBz z!F+^2SjYiu9R{^My{}Z|c8&?zS6cYBqb!ne9N-eo8*HgR*^@ncl1I80w)VwyYpx9% z0C8{G$gX#5np@jDW7J#)vPXAd5_KTo@DUdNL~)-%;!GAA(r_Ft)-&Il$dOthJq#EnhXx!S82Gl$f<$Po-uBWoc)u9TDJ;60%qEluExC&+~EfSE7y!T;gA zKLqs|$7L5jZF}lD=SM#WsW%}`ej$XDPb69I^>=`Biog5atcou7odH*Msip)}_U3vSnI$8v#_p#oih-lj{FUDegC8T(O6?13w27Eqb?4SJ^UJ!%DVSM%e0LAJo?Z4 z_y-rn#)Quw;vV-~f+`aCh>oqcOhdoarvKD40g@Yo?R9=F042Ox|JwhCXnk<$%|lcD z4}`4{ok)O}nH^b|1-Oe`-QDU{xKc(TePj7VxKto948*(87GPsJ?boHWx7K)dWr054 z6k{ys$T4zcs1dGpmmHEUOIV+H`2OwAdvEWc$DMfh<1MW0xtp0;@57+Ir!;bV#yg8HX1>;0OuS+ zy5q0d6X_UcV}j#7OU`!=_XqM&*+FRf7MSSotQk0a6&w4KjDShPZsA`Yv8NYpU-=C) zDJ662pZuMv*~YMwhKeQK^-pc`K&B}%pcj58_FNQ)y~w@VBmi;GP2Pia`yJpEVu|R1 z3O{hWVYE ze5_V4hk0wR@DO;@)#<0OMkl}kv1Wr;=D1N7W80dD#Mle~SY{=h~3I z1fByoe{m^ojIJ5OsI}H3s227M2$zfsZo-(oBYX4z4A0!QMEF}3!%VWY-o&sRCvrc7` zKbXPZ>n||)O%6T$NT__L;njXbRWN(Srhkjg%hC^y-Wo6cNo=oIO~-x%-9LfVdL*ND zTO;~KVcwy;^si*KraJP;0n)Pp@MytQGnn2wv{p=w#abyz&n8h=G!FP3XLF()Qx|Je z04dxO%-u6WdcE2^cJ&XzOV057UR$tmNWGpwN)U)Px@G>k!#_i@EA!W60Q{Bc{FMxE(`m79TqDHL>_c z`WSN-MuER*5>T;geq$5helH2uzwCQC&L6x3{!(J4=rDdII#lb)k7HF3xb%%agO=o# z2BkfGPkPhw!=pthSoyKebnWD{f8i;A>O4p;R82Le{}49)EZn8eJ%3;dMVpS-(t#7- z?H=aCjK-PwxBJ}bg-cZWpT4hso&8QF^PI?1d<7?}cynTXels~u_W>j%o7qIyiWb{vQaxOlbOG zmWt{Z3Xm3lbl_m3?`w_D!`%%~M0yt7UUyE{0unv%GXb}4ps_EDSHU6TfIp)2GB}A2 zhJl@L-q!E*9PW`8>sgmElOY?w_>C^qDz1Cpd-tRiw5=aMR+fXYE|%zrtYe3Imu3OhMW`@!xeN zukw|i?}2C$IP(moq^`LBk#WIj|tMUqPdQB+#QypJldBo@v~z zyGP*9J||@O>xcM>A_!CLvld?KH~N}*SS zKub7mb4UestbCKHt1_Y7sSyPRu&tTwlR25aXtfUYy=Q(! zInOZ3*hj|Jhg~}#el*A;jo6!i+*qNqkkPG%vv1LFfH=p!Qn8-)qTC|uhkL{wPxQQ1Ds~Z|3K1L0+gTkL%x~P}Le~kk))Ik_Nb3*(k|TyuQo`M%QPc>{BK;di zWzY3ltRcWO`Kd-$SCt1ZJmGjWiGAE~|%vL)3+ z*;#;D#^A9Hb_I;7?9$8hOo5YN6JZHj($>>fYpk>3Ll4JK^al~+dNJ6`;b?b%r)qg(}0=a3F0 zrW?l2c$s?*+8C4Gv0(f0W)h;JGBay5QDXCg-Dp|#bP~;LByv7SR0oqo}bRDqH6xrXn1|I{aF&Az$y7#vYU2 z9K*W&jxAbl7^hb3v0uWv>XK`=bM(ti0yn3*z}GOpSH(Zn%3oxzNo&hUnq0M55XOUv z+6&(tb?P;lZTj~L(m&9_h=lIRkOo50`$1&H(qAMKUryU{uaM@C%CFQ+A`E}9$|zb_ zUJVGqQr-@t^AXP*NY)RwcSYR%OZi}St{&a6D4CR6 zb`}8*hEYz-pGa~w%pbhaAM&R%iYKomHk~23w2$b!Kk+gBIPU|)ce(|Cm%uNo*Z|-x z1-dW7bN^hwStglmRPO5KN^0u|)C~$I%~C+)8#4_gaE$4vH8VeJstZ1|@d*iZ@wX1l zm9N;@Kk_sXOYgPh1FJE$=ox<;#44a;YIgPxAgWxaVkYiL7oK9^E`Bf**Y@I{juARE zT4Dat;A8K;u^=m~E!T25H<&x-x zWR8IDS@QcV_5iV3Wnr6HYYz)7hxtnjbmFnT3hin356K{}u>gbNtJd)swC(LDuQi)X zJ7t>p94Z}2YzPjP(`;~X#|@_e2yROYw!u*H$(aI!#I^Y%uw(EX!%tjBiqX0RH-GCH z+UU~wqgchEG&jNNh_t~rH(REz?B zHZ}N_+{hu0rsNQxyadIECig`o&TdZYsTc&v?!Re88TuG{Yh}KLpZ?e1!m(zjkO-va zYc9P(FU0pb+ZFn=x{QxKaOZaG-5=OKa?dTs@AGi&N=jc#k24C$Yr(g076l&9rT6T z`fdwX?kAG;!LYe`Od@05SEDZqGY6bXtT2`g_aMy1buEg|-$YkH=UALm9OuL|60r$1 zkrIZ`iXT&vn^bETxQL6;WVZP??Ag9t$#T9d7L*+1X1aAE@5}g<_ zhFV)k0~F*qxw{V|)_6F;%>x5|uN6()CFfth{e>_2*6pRwd-?XPD?iun!wY|I_g8QF z#qFox{A2zb<>M!*j7cR)Hl#A6gAb$VxTU#YMW0tEdVp0fArpGj&A4F>;}yAfoe?I z=jWdUxgXUFFnJrJd9Agqvbknvo8ZUBPT7Tp3%jpbcvnerDkb$N9~$x-ISENNe9#8J z{T9>w$@dcoQI@9s(uO>P2NE=#hGFACx6@0lET;VdPodb_-vIp}54s!6zt95f*&Kt= zOxh`*6q+HS&Q_Aw%@2hfYja(c{ml=C&;k(L!QWaH!%)nzKZ*4W{V4BFOAxHLi=(gH znC=Za+3Sse(PyN9?nOg%qy^Cc4Pz{G*L-*uMyx)O2NUOjkPDlvr{+T4HTo ztJXb}X7`-580L=VU9faJcxd&Wd3hKf}I=|}likE!n_MiXB zzvjQ8-u$PUpLykTw^v^I*SA0a#oxO9!0_zZ`NK=pX(lE zsf8_P&JE}?ISck(v*zxFQ*i;L*`t#DuTU9I^nxJAcS7Si+n4p8bG~zQA*J{yA-08| z042HmUZ!>+D&2bNMQrVFJqP!EVYOstix>qb-+1y}d+f`Y=u21`;uWZSTo&suo^ckt zT5yubl}e+T^mt^WoDx0HqPS*yH3c9N$YB}m#vPA;$H+=dqAR5U2fMrukgmc#-Q%CQ z_Ls@7b}|)tNb1OV?$Oistli_OkU?WEyGM=4XR<^du-Tm(v=B53W1J=?D1-aq_{>C7`vE6fF!fRk=jSJ4XaQpsm`aiZ;ebwJS zT=OjFW$cMrQfUEf3#hwH6!;=a9LY_#YahEh8E`#5S8hF@~MotsUJ%XfVJ^n zm%ca1A9q04xumcpXUC7xc*=B7jOxh!`4N~we#kWyKZcQSi0JPE%vsRX#7KPffM@*` z!@BvwHm5m;ToBAJ%MOnxUA_c)%;2}sTieQy9`~gRXnyP+Kl#Jd^{`92bX9N1Y}o36 zf_z@Rb^j~7c=~L6#v5JE6ac*mpY=N+zVr|9?qA}fABCi6Uf0epev!GysoyE$3ubo3 zuXsaEIJr|;o=%cyo}(6x93Jo z(p7p1cb~^fa*FjQA9;BD=zSkaJy0(6&F@}oV@_t?iXUmt+0#!Z1*TU8T%yTmFwwK# zPx`FFMgOyRELl#F%>YaO0O$+?!jcqnqA8C>^rt5D^qAQ*wsDwqUg5PSVN#lV(IB4T z=I?=!#7dW1v8#QSw4#0UhQJ4rJbXAb*UlOu@e;Y#pPDle%$k4O|{h{||9+Yx@2{uun7w4Yn`(4}+`oMt><_DQ|#K+Voq zC?*E^$!vz$|Erh(_uCg<|Haw-0j|%#_C?$G{^`G8LW{rfxCci)vULmkljYur%6Y#} z{n6dIDGXppQW>#C^#giVT6hXec;ZKuY$_xLyeAwcf~MwAD*>9t z&5~tl8u^Jff)i?G&cKqh=kJ_}6*BD924B~&&^30JX5O2mYlBPc6Y1bOCix`$&HW?3GKgcX zcSBEe?9PHjg8)A82)p|Q|EWe|;&B>SOTIN(9(a@g5cjMmKqzk>(a}3Wr{HMm8yRE5 znGs}BGC^{v8y_RSP9)D}U0BQ*tkakE*w>;{ohh3+B-TZaV2~PF3;A(T6V`+!ccGvN+`QJ9#`vU-=&|pVlWH@Bye*)-K=c;^-LM zMUa@eHSk9bJ6i8*pf}Iclm#ny~W`m;N35J0Rml z6azVVs9RV?Uv?@baPoU!xelK04jb_pqEA5pqaVFmGBEAZ+c6?$KKCzgJAU$=Gavup znm3CC5G3w#zqQYigbZSPdINxup8xmrBGi;%9ujZ*@`-=GL$&-( z4=bF+W*t8&_z>!1PMsB@&8PDtyv|B-S!zA7t(C_aql1{k-@Ra-O?R*_(ck+PHgpNUad2C&T{@Tjar;sF*4?H83N91@T2Jzqd6?upe{g>J(HxU zgM32uP4pZj(0!m3F<8kZvWv?SW66$CZz_&>mOUDGHMY$KI=F8rD=~iOHy1e*w}rHR z8kJ92%*HPb&BeB5VS_0*J=UtDW-vT3>+RT|#L8=8DYt4eM~Mb;@M;tO*!qU!FbN<0 zYt-)#xgi-J)^R+UqC?TCUW2mGw0M5PGVVUG;z+S3LLY27l_~K{adbnybW8pnw|q z`AFMxuj1&4KNxkhYaxe&1rK*SmAn7Xws9i{+zV*s z%KglG#`PN0x+XS1h2`YRwghC=Fi!T1wd%l9L`B$SO_ z^N=D5m9jZPl#{*3Ifo=Ggg8bVvfi@ycI>^^vG-m_#yQUZ`FwA;-{0`OyxXS=A>C&dM9!!com)YWb4Y#^e?GPK9kgQA2r?c5S#DA@7a5v4CMQ? zUOc4{+IThXh(H&=y_7Koiq7dP)xUu$3;v+_wdHMjIVm+4Dn}C}M#78?A8iqKgpNtK ze94cyo$GSOopjB8|LX3+gm3$Mi#6qNm9=*t1iAz74U+KRva!KlZ4792AZ>J-(IxmF-S3yEfxXT}cOM=by@1?3FTos2tw)O^M^=0zQ`X z#>$Fi;5FM;JpmQCwv!Q?+MszRqqP~&r*o4X(P;WSmYA!x{ph{uFiZDAR6ud(kuLai z$(Dhp5jm@O?yKf>=Sj&qO5zb6D%3R2&m6{9lsjdFKdSXD@SA*2S;=u=6p}~|a+*(s zZ*1yR1lW`sTk#%j0h&)B30BW3tB}1$r;OsS&gC`v%Tgv9ZV@*|s#ZLgN3O#& zXd4{=URmG~`4pf^js%rxq?zHIy0$x({IBHeoK6;OQdlj}l5U_6va9v6f4`dLrJ&~XrO%m#D zD4Fhf5YNX>Qk2Xu<`ljj@<^%Kjztm$j1F0zje=kl?=B*DV#``e%2$$YueCmX6v2;u zKbT)XO@s2Qud4Ech7~M|9^69_HaRWaxX!*EO0Z)tSqL&Q;f&iqp+K&Wk5-G9+aJ4P z&IIO}7v)`1z&D}AoSnlO*um0=+czz)VB$3TQsnY%E(3Bpe)s;zWLWjm}MW3#`*L;y+v1l|oQxZ9sa%C+)MKntWi#I>0h16TBq&>2g~C zx@Y)F4I%dEecXXEbk)~jq4nLDO`p=ky#cBhR|=A6y2eG^4qX{ud@~}PxBl=OL2iU+ z>Gs3RUP*rbSK@N)c~k%Xt%|olO7%MDsIRu+)k|G&s6@R8n-j9#adN-&&vc!<4V;6a`+$C@;*IDmXKh6$*=+LzrFnetO z@$FZg<4DJ6pKGChDB=r&!V}qgmRU17sZYJETN=eHbd^t)+{fq9Ravku+O*iSsI=l) z|FMg+((0M-Gg|;ZR{FNyhUBy%PgBG7;69~bh6Zd~z^~&J?#YX6*g3q$m|TD?w+tlb zPTSmt|o+-d3oCW({aKRwB390{qIre_`Px+#;<>r@XMuF}n>FaT%_IW!!# zKeThK^hv;J@BwnhIyICmp>HET=C7xHXWG8g`4i0hJl#m zHulrc!ub7OQ^Z>I3Z-KmtZ=_`_8EUrD;48{#i2F|N1ZWRbFh1sZ#t7&rRs&KXKV#5 zs>@`rbcZlF?-h=z7Gzg@;WZA&NHlyuUbrq_`~10xNzGsCnW2YXpyD2ta{AO_qTArd_cCB8>CL9i&ICCoJRZz|%61t%nK+HgwpY~J9N z<4a%zO=ey&K|1UG&;L|H0;CASpl!O+c~(*HXa^=Pf0w~QU-iC)YZ-8p1-|F}W7MTh z6D z6qx@-GE;LKUs_;mZ>gt)V#A0Wb(_6q2a&J&xDkD2MNKAVO_*{|U9kk$@j^P%K~dX23Nkn2z$X*VC%@#Mo_Bh+j|4rbdo7)Z-{+C&OC)ht-N}kRxIG$lWx>_o`wMOAkxD^}wp|Zqty<^bz$3>%X z+fi>mFU4mR-DhAO++lq+&8hUT6~#2vnM3?r0QR_5o%>|3_A#jS)%@*;H0(px`lJ=3 zGNJwQ`&1*yJuhml?1?6sRht%;aC|O$BdS!U` zt=_Rkq1B9e_XUO(8hP&nhy!z~kD!B__gs%V7h9c1n%SF--r*SIORWwhusrGQIOS&v@$(dr)b6qb-I=FMVU|htfSHz+IpBw>lAlV8 z>i$(RZ^EO6@uk1Vu>JX@#-d;7nyUPO(EzsA?v@x1dX|8#F^Wb;U=us_=*w|CZEA0| z?KgSlPhmg)T|A!eWJWUmwOh|bel1Zwxr77w`vCQ+8IuXdb-nhPJ{+v1bPd|=QDS&9 zRp?(&eqc~AoaG6F9^i34?PN)2{HPpP!$b|Wh5jk+;3c(lI6>8-h4;`};==(ts81zfT|K4uIgMQ9c>R=s!- zPqN=-A42HCP$ zz&Y0VQsU8RnBwRO2cGYi+LGTuS}lq=&Enz`h~Cczsoih9JzWw3+1n?$r5`3XIvq5Q zKSuiBG_d84*GinV*oqZ%5Emv*yo)oFXbflm*0&}&?T|&6B-?dUu4dSKn>c6}q(Wvb z#kRRrwETLAVlv(ZVi-0$&VfH8oW0aScDB{NBWdMbhMt$6?bU@==v$diFu7a6J*O9* zpBub21!}D_LsN~3;8arXdThrsyW+sPp?Sp7Y}ZuuuPY91kKFa%>WY***r@YqZ`hyf zS6)$`Mwmn*g#kyb6ZC3L{gx+PwneR5!;6c&iV6;JT92e4KA79qXjH7iDj#X!o4;bh z8--Nb1ZWlXOT}%VhmB;e!vkps?`|7j$@Gxya<={$4IDGTKqR@>vbOwjol{?*?3r}z z`}U45A&y_Saf{G;huhsN=hgKp=!>zrwv;w76YiT2^fckL%qsM}2YODQx>orFsg7i( zK>QzF0Pq-CeKV16xhdUax_&n-Endl+74erW!tyt(TY%T=3RA#VvsaZ5Y~O}2qAWO<&rgyCq0I1 zlX@BhUjo|$wwnP~b>aFh=?)BjY|0!+ThCcl@-8z8jHAs0DC;hnG>F=m8cWJGM=uEv z*5p#pIh?6B=g>(0yC#y62&0x{#YLz5GGetH`(oXv4*L@z_X4!hPvXisuG~u7@%Fa6 zLi7GgQO>p0XF6g%DVD`@R(FDUo31$rKlwExU&Zt0cMUsYEc8M^$0Ke+o@p;lFz|tK zun6p=uY_%!l`e!QhV3UgMxF5c(rfHI<`hDKvGJqkEo9d}>sjqTale;Zq2dmbY211|hU3uu zvKDVYd_~Ur{A=@OGUvX%&Z-6et}?-W|qLI!kO zz>OD8x<%iuOemvC9^vWILoW!1c6gx55e(byuwYW{mLm>v`ME6YQuJJ1qL14lt6J(u z{;R0+t_%`R*6*Z|>Mb71h5McfEhzn;4v@?hf$&A~;f$}jAhFYjaoPa7;CXGq@Muo+ z3q!N%L*Wj=;N39SWB8+Jw~4nw;H*#0BW6jy)Fl_aFG>CMRsPpq%3iGS0j6b_Z}gnc zVh+^z-Sf(sEph#JbdKo*e*p#;=HQXxzia8mtvefOJCoL}gZE8OpUhi-P#=2p#D{9@ z^Q*0UM?B&yGwWaz&d8r3QYXr<$@M=@UYndBseCofbdx8w1HyRTn#GO2Z2(w9c6uc~ zSbFuz&MO)y!VOc^y8wdT4Z^phCe>_oVQLW@-9K4;^2*(7BpXWCh1{7Vy}`a;UojqY zrw0q_+x&+xvvi@`7P`CjF2rP(4@~7$#F!14-pE{Gxi!F4^6^nOapx}oU}_J}hrS_( zvFke6{bjH_5=|kc%^%`FRsb2-Bcn=|TGn%6V8TB%9yJdl>{0BA8uwogaxo>Qdm%J2 zdv@tMYZIV*D2#HVZmMH`fS@Kq31_=#QL~zLIIzCl^V*l581sHPt)D5>%q3kX=4K*O zK6`k=P4OPH$N4)fYXLcAr$&7kcssc*>f!fV$wIALMKqP*;s$rhNceyH*QXhE)it>T z>QTA;3AXTAopg3uREAYz_On`x3&s4Uk+UM=i-gLN@p8`*hDfQ_cp#NhNKo1RP~I~S zq5hC%=|V+&Z<|xcrbxHsIJUrK`wvv&QesN=)}p3n!+c*W_5< zvWH^yb@;!>A!pi#8WkJFpATmBQ1U4bUmTPQk3Npzi0q9{Nk;=}Z! zyh0n>7N;Q`Qo6y=Fi~cO=lK6RW+NZeF5mk@{i~U3bgONfk)3n1)7y0ua8xK#4BXrA zx)uFO-K)B9hG+UJwL#BATGQu>#`pVXyq|f$`{E_&tu9-Mz?t2TPhDpD`*+Okz4d(- zfw#AvmbNOG7$}qXh0#F~www7|k9$tbmtVlHBIBGDs2*s5S$wOK|1*)ArVpHQSTiC= zdiu&JwbxJDkhH|xsi;2q+8a_gvWKQytF;3}cXZ6O70UCmiAon7=FcJqYXbIXWdbbr zIj>Gnf>c5p#@rpjVo5`}PaEWkGQ-#=y!ro;k*@ZaM?Tx8t|c zeU$q#fO0Pqulcq`u{R?-Acd{VmXv!{5bc)H>jT`tUcaw26sHnj-M$JgtMnStUb~ip zlcVb)ehzZ`A(X+s>wcXa`Ah?^9r%&8#|-_e=H(HWN6;#NSwh&-JBfI^_{9fO^WKE(z5j&!SVoF|?Z3?@SZ;V_MDK}9%WKcCZwE)H%QKeZZD6VJ zna-iW7ux?W_g(O@&=t`uBU~Yhcqv-Xg_`D#?$R0OK>R5$!50pL4VcTTakUlhB>k;_ zGe21#Mm)$CSBw>0w)m~<-}EFXgU9ypu%YIuTZ+Mr{hv}qo2h_#UURr6i80>cKiVuz z=*QnLtvYz?kB!3yBVKoZUHos*_7{0UeZCtZN8(VWZrX*o%#_sa?9R)=Es_5ii0?vN zTFLkTs2?E=Nb#vf1k4-k#fC%P!fu~q4w=!=L(F#{Qjt~nM*HEczbUp-KLPf5P|l^B7=j_Fk%!%E>I>;u`7a|{i#5R2M|?M%W5!7IKQ@P; z@=8k~pAllPu=rZ0lUCd+(D7`wK#iGelYM~>z?xBxph_N* z>%#!V!?YKt8Au>*wBg737FOe>^~h&lw_@t53#V%CyAgfS0>Gls>|BrV&lWSl^kpr2 z$VNbL#o9IM>6xmRQ?iYdN^@htuZ^6VE$4?f7XD5CmU4~m3$@xYqpoau5EgXj(4nye z3)Ka4m)J92GZnbBVBQwUk@*wJIMQ00VJf3F+&g?^;K(gI%iwS~nSZ#ah5%@BAxx!N z8Q&;7`#|}#yp+qymq%1aH#-s98jzI~UBMBy5% zYRD-(kbn`n99J-uCR|TVnp*sFi8fGhTL|J6^c(#!C(>Jjw|P3Zj*&SDpN}WJ3dkFY z<&a~Ib`Fp#7r!ffQmh?IafZ=!@>5O!+qS@;>N7;vQi&s1`m-}s_{R|GEtbG`o`bdw z#&;thC>x{6L`%vKrtT`?kU}R>##*3@Cm-%CKd@Oii4LtOkG*%4RqGXyx4YQ3SFL9j zmfh6O8the638T+ty~7EOavgFLf;j%wekaM`aBDjT$WOXu6#eFYfHVJ3+0kh*k2jog z{8`H>ua&pWP%6oWxg?nH^)tOhp0o=N&N{D)(WfhQej^Foa@&<#szRD|C85joW-Bsd zVL2CPA20J?2_~e!O{;)b0SC>-GovYR35Qa|f6^wWVV2I?wp>)r0`fB!^d*CI7SiAE zM2_fnFNvlmZ-LL4-^Xhq)rXxZvZhpjw3+>k@T8~FAA<{ zZ*#>iBN$CLGgL}vo@Qe};{fPcf9G6C7c$z8`a7P0CZ_>nwYp~a-@R`2v9rmWAP9Ue zpmVd0t#vdZ#GgHr-6IAF=#M{tYCllT#`-9IbYF$Zs3_`?zUm(H&t4fps_vgDW`+Sa z9wz$nBX3fp3NGSMens4Ty3oRmn$A%^5AK?b&(+=EM^*D&Bauu;Jbj-|_z3PoC@W|O zt3{$OT9YG2Rvun$vbzF6hQ7oOxHwH-7Eo#GzW{etlh3mEuLqs7*=g@&cb)I9so*D= zscIA=cD=B<)t&M<4M(0&p+$-g9K82#ToekEBp%)ye4rfOJbmx_BGVPw#}GdbYOW5Q zuYRkOJZWiXlu zdbJS(&?TqTk|z$U(Y`~NAo3+F>@Xu!H{0odI!G?U(X>Iw%=4h&2r)BGl~21Ux;}^^ z_aPKENB^?+3GLJFV4^)<#KMD=)b;;kS-}g!zM)Fn&2jsR#<&tHlc%Bv8>aG<1D!S# z2e!U6>|g1}|Gmup6e6*i_UUB1g-LkxW&w3_ejl1YshFfD5nO!(&8JzTg)_mpwAqSW zY$SGfD!GGfZvqI|XO~A5oRuPOdF#)&#D3YouS)A@=7zw9UzL#0S$3J!Sq*H2z7ht7=EdE?hF9G&QfIOZ!V-ZHY`uY{jhT3OzwshcO^~n%GhYm=Kx5$5^ zq1WzS(x|r;?{{(V!UBXcSsMx#y26(8D9R0V(|0cJ@nu|9Obqu@bb+D#4u)A$zg}A{ z!s*n13y7b|o<~Bk5?_{2*-J-bSzz;+JZ#HdS-lG2@!l@}JE5>;&&8#bTmq{k4Z%sZ ze$+irqq`wAKBR=n{|@QOJw{a}d)@fkrdlC(ukjg0eDo&gUjbVdp%WFU`gaw@6l|_* z(|-+?7{271ANiEX_*|pU38@@HsKj3OQ+Z zqF-1fxTZRRfPsU;FL0{32Yb&4lhZ88-8pi93d zhksP0i;R7)<1r!d`>t7ed7Vk~10BKJw>NE(mHB(-F=-pYB_=7 z=_?mB5h3Lf(NcL_G1{%(7&lY*w0XgA42s)*x&B^pJD0_WSpf{=1$HFu89!a*QT8eQhn25vWp~ zh~7(PXfJr9H|XNq1C{_5IXIDKbdl(jOnSA8lXoO7ysGM{$Shzf(Z%K8`(f0sgX9Qw z%|jgRPf_+nG{3C1KPrZ}AN>rN3(FC6h|UJS&x=_SOrN0o7@t1|_2gT>m`e-QCHgHd z@@H;usaf&0Zk~hz*GF~|Hic8Rmc&_L$JyeroeAjSy1x-q>Wde|REL3QbP;s5!S1~D;ZtZYgtTG| z^X=p!=T&2q)e+A>s5g&-t?(bR;lUGngo zO#`h}>S-nB1Z7CHnBx>Foe1f?T#(=M>(G*zp7LvV5;M-M=H29R^bVsFS!3+f-9pPU zj3Dg&f+&5aQrj0qx6+W%C~P)$ME3$m=Ji9DV**yaKjY--&to1tuF_vOd@^aQuF!+-#B3CLXz)2h2^3;iGUuSabdgy$zvq%+PG z;SRuE#e71|J{*Zdi33k75Xpp3>u(*n|31D?KUjU)yIUceYH}!O5pMi~{&mu{L6c!k z(&Y6LR=b)ovD;1!T(5GetUCgPAjt1MQkyHW;?$6pF# zH>l938JJvu`x#K(adC-N ztL<$SAh56FawarClg~s;iYWbXV)XzGno@oPIl%-|CY@&BKf<@wNFwKrvdBH-Px?o1 z(+mB;JMo$eIn>_$$MTRBcYIe8Y-&tHsNTtC?qON@jZLi z$@P{AFCBO$qY2S;UWxhCk5x9H<5hD0M~8eY-C&DjJi>Q+ecU2_#YfEL!dbEPnQQYO zohznMY)$0+F6Y3efYiEKXUZc)GMIGDr=isDvSEyrwcOBj#w^hk2#2zgPvnu4u%Tmi z{(s*TfKQEEn$HIf0x_GhkZDA-%Jw5rk+Cp2GsB`y+$%?S5Mq zmtn4L<$Iiee9r0lRb5^6tzUoE^EdyYaQm8K^yZ@`*cKDHSqnOInhn6$4HyIWc_fIc z1WqJ*n>dhllWKB@ZPqPtUR9Xutz*iRC^7Mxlx?h!i)ENioBqoy1>%LCL#pFX8Y5&6 zVmzDFl^onQ;1&->nBEHa?ta9e4(jgEhD!>wi0GQINf{F=)zpGJU2UkF)G`a0t}3yb zKCwSp@Zz0&6nj;1e(q|*&ecjgRcVzUUFg>G)MfQhkj#vRkkKNR`^S-B@ zZ&=rU5NnUk(^fls123H#(KrGJuGB8UaKb=r@hUeijq1Mi_wTt{E_q)(^$Lu>AhW3r z+a2eUQmvGg>vZ}pU5g_2)3S#Es(m+zEjSvfCkZytH5=KjqNuhXSW(!Al9@`*1ps%Q zv1s0-fQ2nt>ELaGmMhziu)SN6GSv}XU8i_;eh_t8pIh_;T^jA}b*t0$o@50>sv6_n z5Ih!n0oa${fE}E?COutC0GxVK-eyawJ%z`@F4)CECxfN+#IX@#B|J=Z=O*;86q3F} zz1VzOPgb8>YQ*+^urWn3+2hlH<-ZUqgWS$ZUl-G4${bb~Xsi#x?6l8XNHc(ZycOk)g{u33#h zMq}#j>JCv4-bsDZ8-4+e#}_82X6G39{&eDTL;x^Ne&`jEShZ{llf9p`nG;_! z<=*BpAoCuL>fK04KsAj!q4Y1b%;bBRKO4ppDjr4qG*$}y7PPJciB;5>PaVBkDwN3_pZw_WE7zobzc_H-PGJeUQ1iqM3aesI z=rjIJ>ETs}0NBj1-Gp91ql%RTGi41$ItZn#e9WOyJ9$5C^zsKl9=!B4K{|Fe2-h$I zo6Bjf0=sH5{8=&}tC5AB^-M?M=^72*9wRBfi?Aid>B67FWGX7B#Z&50S+h4?0@Y1L zT<41v_H!{&aB}Ol)aXCs;olu#Vq82wgPqrh{T5$zKD;n1i_YO`HQ90ky%l48dP)1~ z%mW3kP|ZIy<~()6tr|;Wew1{SNy*o&Cj}8Kmrlt(mgIgoMVr2xV{=Z$WTX|R4LtQZ zS3Sng+H0|Y0z*aDd_JZWqK>+D%g*u#1eZ1HsnKB-;bcsenbfA_l6!P6uWwp z#qU--(1u3L!2fJQ0(AZvuIVf$J6B%BjjP`*W@K6ikPsQCPzAD`+co0HRL((<88Gwr5d+7`-_qz zuiw`FW?%NT+t<+X3J|&zTIkjGw*gGp?@?N9CRdyd*_xelG4wN17BcZtOt#dD3MWQH zMy>m{Y;d8&0e~L!9J7*bwA*@xF9#RH&sWC0$~k5Tlfq8N`1s{%mqsXQdvE><%P`)mo?TXyG^zbm$>o?W70#|?QIr5*;JKmYz) zJH7K3;gz|tMyA%!q0eNRiD+kPcGaiev2a#8)&Z`;+z4s#nmZtF)dDZ?>;DJx9wBKY zS7uzz-?Y!4{>)%VG3OTW&#h4&ZRxo}$_uZZUw8Z&*64U)=gbo0!tpL+Bf3^O=tbaO zyU3UvPzQ+>0eVsv@}Utek8!8)M*j1v0p+tFfBg;QsLKDZ9w15GTgenNM5YK{Y9i!|AS?}z|I>?De~V{ zV$h~kfiWx285jC754!)Xpm4gasbVWaM=O%l-UAE%{jiW+*Gt$_s)3u#*YIu@(fKD| z^51`1)aElZyE43g9CF^!2=c>a!2?m7+93SMkAJ^V z8H^+vl6yCU@fdE2ffF)Ii#+pNHpq^xG+@KdgQjNjd{ECF)9%zTi!6|$oiO-z4`ki4IXGd&e-g1y~z;qN~Gcs7!1@H+KDL(+KHWwQ-0#_x90sTA8y zUexwr+ubbsrs){@)Kj6)-+J#;;@@E6hv>hFZCb?b!$KM%Z+fSuVt|{O05yHW-`goo zwbJ`gb$5oJex5OV7vc0P*Xx%zD-kDja05s?1Zme*+OpaO2L(=zD5(C|*3bOt^?D4{ z>E0W>+YTsTcuvs1=RuU@kJB5HngRLWc3#OlQ}IfbNBU&pSZ}@x@|L*HJXFK>vpn?{ zc?$NxOEq|?%uY_9V=q#AbI0fG!p>*#c3F8Z%6vvxe@Q*ARavl3^JbxCgN)tpMJ1ZJ zs|bZ+bN98cOd%6`?`dte%YWMVTz9+E#n^N3AUp$U2;w6S4p4G4n$NKkKKljrKD*Uf zYU2hpS{;IblTWbsC(Qnjj}6bA_@OK0?~Ncasmr#)$NQGE+n4uO4K4Yfkl8U2ucZA14dZ?#6#2R&I*6Ya5sS zWJIa6TV>-MnSejrB0nEQs<3tNNONDGfBN>H(5EX>W&s27&05=^#1$|_F2X8w`s5rg z-IYgs&%RT8?yMRQW&vEZ1C{06l$ZZLT;ci>VLOw1K5;Iu(Xzq~Vq0&vSoXz&&w2WQ zW%nXKes2>K($GcjeU2^H^XnZjlh)PJvl5Z}*GOG58>c*7UrQ~CWDQuqUmT3HbH!jR zl)W!dG|#$^bz>iJfN57zjTZdVEqBIe_&yX_&h2&OFcogYDq$V2)|}O%f-5`ApHiV^ zm1@h^`x}_4gnI7F8Y^sHjU=Dl-1b8B>5*qqE0A8;{D!d=p>t9~+*6~1 z!WU156i&Y_)4)#A2_T$2&e&E=G3?|+cG4Q}Zfwmm`3RCdb_2tdQmHCZFMW%`haAL$ z0XaF*ChE_&bkcIde7gAfWty?abl~ZEe`~s3e5Ku<^s82O| z4mmo&&#kt-TLFPnoMBGNtyH1dFQEHv@BM78h*QgCZ!0%56C}=-`L>rs7Yh=$_Yvrh zJL24#HOF^N^#D(F$OuBjzc`cANw18yE|39n&Vlj>*0?W*FUI?{ppU%r_P@lQ&b>4s zW%q9X?9Lzl%?`;7K=`3JEn0RG$fJ;cO0|6R&WRurzv$Q(CXOU;aqfHdk_!;NK}*X+ zzW8;u^OIeJi-mzx*g8FN06_=}v7cp9;4q^sIGhES4;vFojPH5=~Aa`GH;hav~+*g&pPcdV+Ktuh73cCaeX60cfcF0 zvpxsAR4o_MNMnN`k}e1Xxc|?pgcv_lOT1J(Qq1K$iV|s5`82CI^3~8|>M{(56tDA7 zX|L8<T#tlc9slbn8!{-oP=1>jIRVj}==VU(?jp2{&>ERF?Tg((bT?wc5$-)q2 zwz#EH@JmZ7Wc1`M=WXVT-Nr4vlNCJT1CV<9W2dWy>dy$!o=lv zcH4@4;XpC>FZc=Ep&TMt4@={&s#@|56(_84C$D#^p38~rZ!0eH)myY&Bv5`rVVg`> zS2Kh&$i&Lhi}DqwAeU;=MJa4K6h%k)(WnL*sq*ht6RJ`BVEh)TQ8Cb9i|UXO8j;Ex*fU34(+=s7u@@{Q{1WJiRy*WV z^%(!i?Q^qVQ^o~*eu1P2L;Hw&XRWbMRtBAanmP~ZHr>Y8K2PB9@2#%f4JxS)mGh3Y z{K^uz{w(s8NuBY-CeqzsTE~euD=uMl-Cxd$g3h9TONIJ*a{wQLx7tNOk3iEyxRt9* z-a=%0t20aTIR2LFdp4eK)|9{06}b|$8?bx(A#j?$jvi0`I&)j|BL%uD>Qu|H%CEa0 z6`T9$bXJ}W?pKEw%NmtwwX3l^Y{X|g|AHDE<`*kso=rW7W~M9Xeq%)+263#vE#N^{ z&TF07D3^jc%Q18QcvnrarO?vb|99lJ6~e6M6}?%7Wd}?8=$RU6TcxZQ*6W2}cc>+$ zEtf?CCvU($Us#jZ@!;>o&lY6oUQ7J5lhIg+Nvm4BF-RwBvma!h4OK^l(U0a+{+dD8K;sX`9z;w&Z^)Hd$McVmp%b+yesNy@CsMR za`w3PC4ZqYc{~*YBzN49UC~va_PD%kIC!BhI^2Jj(jS^*)2oLkd zlq$PHA19x^&$MI=!2{|8iGutvfkh}T0ZGn=8#ixm3a(8j@Ac4N*G8UB^IKdj!VUSs zfEst|F@bI`Kh)JAzOU)dRe1oap=E$%w3FUPZXqW~2)Mh_PC|Cx_e42>zS2_gNmx(y zN4{!X#3yVlJzZd3clr#iSF^wSad*v&18TAmvC9%V7GQ#`g^T2Bk?H(dXZ3cVyk6ar`zV{%M)ewH@e# z;dUGtvLYIv77L?mY!Uk0($H6eJCwPd+N;_*;+>1iO)i!vdNU78Vogn~`<-sJGs0&} z{DQ1PgPweo88oNcmQJi?kj#0amvPILE1?B)SD9Bds=oK$KPt)4e+}&^t29r6b2UF2 zgm->S`CH$sPace89Nq?!qAnW|BM^@B&6ZXt4WNdm<0&9eRgIR{=IeFLCl#u~%T`3M z(Qg>YzS#KHPf(lE(~$c9=%I1<{+4leO_KvmJ$P3G+64er5RzQjtpN0gaQ7W0SatFVgN=( z8CbZCNldg%ARWpH)RK=?w|E1A7grlB$S0f>VLWCA0w&fVaCt;gE%jj#Rx1nbgNv4@ zoNR)ciJ%>G7eu~8sj6Iza??pf9Pl1C9!2>oZ4_0_GYeLGY__8|k#RVcQv7o&tdswt z^XR#$+;<+QMPc@Myu5f}7f^mY(hcEpB`S-Kk5vTJa!C>*YCPgKv_2w#^{%--5rXus z)}faD?lpT&e{b-g%<@vi8<58}aCtfX1B)+I*7;u9k&4eS^rISb6W83miy3r>Ta<<` zeo8qHuTmz31M!=R2Bh|H3j>MBY%&o<_~4ekxg+ZzqcW|Hi_xRkcj@yv$p zrEh?A-;x21-5<_X7Ibu4Tp&)>UZ3d4-Us;eyDu_LM!E8c<2wHGzfwAE_hS>8R8H8& zYdsBEn~a0-_+tgiwITQUv*rba)Ys{~ zx^u(U$PuqO>L=96np}S;$a-ksnzSGSYX>DB7Yjir(p{zktFB&tT_bl}jy%XsRug-L z;{RLZ5yD7le5>-?%}-8x5Gs`ejo2n)y#$d3ZFn0yBv0w2)Is&*zMXZ_HQp?5(F7iE z(;py?kA3$Jk4n z7@v<9!TERJKIxY))A3L%zugi_RR{BDqJ5u>4YmO8$Bw8Q#VG%=ZuqcXbk~L(yl2M4 z9j3Y&+PZ7wQeMc-uKXIcc2{aF$fsB__o-K4CQ}~=kb~V2wRTghzn`WWB3EPQP9PKWXPrSvp0X>or= z96^%9PI)T5uT8D=tQ+FhAH3Q0llmizIWvY3{D0!q9b=6_tlS<;hACs7->IPWxy$re z^}hnI``>+@v9$#R){jh+?1$(#&GICUmN|l6o8@ms^Tg;ZPjuebug@{xUO>~g>OI=& za#HL0@IglB#f|Vp5s9AD3VBMT)y)fKap1`~`X5Ux;y=*qja*eVv-L3mwr*fsWiyKS zCkAv}C4p3Vhj_JgQX+rh)mj5B_T(q(_n@HNhz5g4$O0EVNRM3>;o|?Z07fVXaaqd@ z!n>44Sk+aFsbY8}P6zVfL=6JouRIegUNXi$f3}dnzuUHq{o^uW?0Q(L<8JiGn#Y+c z9#Iw0f{G{~3`VE=AwKRiQj52v`g)ya;K4UiP-Uw(7qh zy)-5yohoj0TlPVjPFikxjCJadk9VH1lx06n(&rA!tNfS@+bY`D_!j+&EpqRWv5kR?9^}FIdxvr|WB_@RlH25)hNeVV|Fw;W zJI?YcL!LHv$T}qci3%G59Otz?lO6xJ$&GCDybG%~ke_Wge|l{VArVsV(Lbi?3Vmuh z7jHm5o?W%PFvY9vVSlj1?xKY78z!x^#f(MEpBoflYkw1nCDD4LQ}lEd>}}_fLJSx< zTa${6+tDjH_zO?-BH55E3&Ypq`u?dWX`O+k9w;wm*u+5f{_#2N)lsMNTD7llgK)$F zATE?bIN@=@f26dgBNC52qGP959C(P$@h+n*%YH(fu+sH(bTh#n8?fv+6gYL#g&@je zbtwBT;}hN>yaBtzhou$lVG!$`;0FFHjch!j8wSCmSQG>tBeyx!8liPsMle(u>YMBg^S=M?txI`fN zhl#cxr^@DS(*;Gh4TIR3UUjdb9>3yRmH%1XxwxKOh54?4wR-_vy@~L}l^`*O*kmjE zWgw}7f-xKim6v45tnhFBwc1u+P>SMR-&Nj4KVwq<2@&P^*puPV))U;L zdcZCLIz0fcf;QF@`j91UwNvCN7h)#m+oACuM+h1mxEw`Z-GmY1DBYHyr-o)pvtAjH zZalZorSt(Jl~dls8p*>Mdjqvpltm;#h2#p&s8G><``x1?##4DONobg$*4>^O(D81V zY4;?=!})3Tm*!YDIRMO^MvM*{-vZp=Qjco@5@*JsBu+sD^I>HY?K_75N7!5TMfryR zx&uf_gM`$8bVx`y0uq9t@{ErDH%+>28MZ?jE{^?tx+0{MXuR?T7mn z+|TaMb$;vkn!P3yL=xxBQUF(~I%!M(8kp2ZdhlDXA;pYH09Yg62@*(qIT3}owD7%l#mZp~az z5jf7lyOarP81e`5s^t~Uz#93Sp8e|V+Tj-YkbutTez~d5Ik#{Ry>V-jK>Uwf+| zA*WODAIM@js$~d*Jc4}T-@9#1COPovfbo&ACes4D2~(8Dvrzy!BaGM z&-v>hqqkLVq$1jeH0!2+7?|trv;`d@TB3cIeid3REu7BIXxOTc9K$=tGUIM+D_t*D zJl)nBzb4o)(yy9Z+AI(BNw7)ES@yA)?YoXqfG-2P(*AelX(~oYVm@r_K{K}5DF{Zh zMV*eCcVhs5!dnwXx$h~Q1AYU0qrQLx{S{*MTk60*4TmX-CmFY3HOSZlF1$>DbkUx(wSh zq`J8fD{e{>$iM#D&>^)^%Ubhdo8S0l?yjW!$Fc$j4%XxiOjP{@^sd3muxHirB2f47 zaQjK;{8Q@j4BDW6NCZeJIBdmxSkwkS#CG)m(i0So!DsojY?^16_%7{ToBg}N4A1kV zSPZ?p#N-R7SVaaYz)VaNr>bh)h=kaF=a0{n_Nqcm24&se%tDos0`1d2c`5D;m`%d4 zKdl~GIIfP_|5n`&>w-V0j@wCk4Z+(aV0r2`E|Fc^IC6{s9#ucw(xiO`cPqKRBg2cv zFXP1gftZo`^sxQd4tk1*6@z>ywxO`Sr7Qb_leQLw-^poxlPqmHp>J{8lEG?0vi1|v z6Lfrh(`Eto`s6I;tuV5W-R3OGSv!%Fyl>Oik~Ob=2{sObcRwCe`y!g^$&Hsk4m-F? z=ON^FPZOX!Fbk65WH_CDr@nkcVP)*$ZlID z#aAbVHZpq7RZ#I)n?aZiImlSde;h4yCGDQ{Em1MU_uq^zl5|B6jCPwDAO5>9hWZYE z^PCNxTq3Dg=G&miEsmhV(_2cDv1~2xNNd2f*(`feUWtKwx(BVdC6M5 znqovrKvWeYyiHHIt`RNC%yB&xbO(+bUTW9F-T z+psfNAAKqJ2tp3P@^4`Ku)iI~;;1SuWEVK9qz764y@O=kYj|79Ur3ANkP)p$#Vvr&y5Q&L#i4MX$|xwDanvL8Ese z{OAlUO>@Xi7ctg*u{=XV(x<1JHTEbDhCdG%gSz&3W>nf?ki#RF@}%JVm!EG?Sg(BF zuuIgxF_|) z5eHcMdBl87Vly)OS&&>O3f(Sp7p1ax+32h**Y0l+%54|}EJTwT|ZT`R;vx%P8Fp!f6#9V&P`g~=s~s^`_l z+O#80ZeiSZ<~7G4-DEOnHpZ8)w^6+=3fB|a{_R`oBNCS^GI$z4#NiW-yq?&f6ukv+ z7O4KNUH(m(6%rr7mn)D(dmPFV+zezkLffj)Y_Q+`k7s6&L671TkT*9Z(wjUG-SSYW z&$5SMFqX06)AQA{4mi4fTLk3OXxw;v1%lKnTG9P?=+#&^%`aw9^k zQF?da=3)0``!&DVoxmPhQihrp^s#DPi(ewOx4AHUK%Wh=66Cr%z{(?y7{;y|-SR{c ztZ?;MBCgs7xCPz&67uCASupUvb?0Pm-ksr+i1MwGUjV3?2z>6hSSg!>OpWZYG+aIh zlA4+Yf~3*9^KXBfG0mlKFHB!`zgiaT{KqL@!~V+atG?l~_)0y{<4xA(UZK54knw~ah2Y+jh$BYg`;`Q~uYc=Chxbl# zRS~+z<+K9a9C8ml2X3+@88TwTT|VWOM1?mu@%#=E#+6+BRtzJ6{~n)b`=4_4*OC_G zsPfI#eE`BXK0}25Og7_@UPo19D}L|2`Am0xTD_Ozd&xGdrusSaSrpBs7I+B+LTetdQ8zS0Z1#xg>sJctb~=Nz`?C<|%MLqU+-MmY=bbGIc%Pi1PgKc;eftO5 za_TFg@>{aj$07)HOC5@oemFXl1OKl?O77inlOJ$L=~Fb&XFbu)>rpit?z>C(!?O8g zA9)PAtBoN!UV-^hjr31gsyN<{9GaQgR?InBncV2Yt9N;Iz##d+7?i0aixmUFHIT0> zQyL8fFtdEX!xOXUqr7>kcucFE*kTI7AJ)13W?b2e>n&ybjhi8+9zD;OsA#CHR)Olu) zZFFQ>!e%7@XV$6^=bS3SmuHs~8$yBzRZh^d2zySPx=u@I5*+c`aN$zpTUQkEstcM- zyq{$f)%im1Zc<1**pm&g6giO=Mt^8X6@V2bB1}_u#_)#_dk;d9Ab^2L0rAOtfwlFV zN%vV&lMD)XSoDR7pivPHptVYLpc~6~Q7>6uA#!9Povh^}*^mqBzj?defo;Lw(c}rdV z5jPnLv+u>)yaZnJDf0VLi^w8w9>-N_G@oivY=}$d+qTE?oyc|R*)o!n^OzVeQA_4# zBO$%?uHH{zcy2U9ohrHiAG`NXlZ95={%Uw^DW^O}bf<6IOFCRe!%C|OvjGpysldZr z>QQFCtgN1=t&(ZipSd~T3g?ub09l_+(TyCxUu5pFC3Kj+AhNEcddz0$3;Rj6QF?+N z8MXO;!oozR6Ey%N^Pws*|!cT^Lp9p3b(>74h$iv81bdsXZix0NZJp*nBIIrwOH z*D7u)Qb4|ah|7cAn};DCE$>6^i>E2s#sRo&5akT&1WACSHwfB?iJ}?M=HjY?X~BN^kE&{J z1MMwC)E7;Cy@KC^q&dq>X1n*lVeCaGDH)#`Ouf*6*FrOqA?ztr8A7o6XDMBR`1NS4 zZ1wY2M|%PbMgynZm5xn?r5%zDos7%&vs;zE?%&Ikho^&AlCho#^OSyA+ zU9wDaN0pRp|MW5&PY+HNqZnQxt!xa7Rnt%j-WqD)S{)V``H*}}v=evIz?5e*X^Jiq zPUfQ=<0`pA#mptqFxX`D@F&bFLW1CGvUr~VV5mxcV)YotQM=W|wPy~$e<;bD*vyN0 z4b;zg5q~tiNRU~?0YJwTh5RfOw)D-$w2Ty8Bx>DP=*^z4Vo8I$>6$76$Kyqbp&~UNH1jO1umEZfYHR$~}G473`hn(}`OsD+XCnVI6Bv zHoE82t0b;`(ZhJduuC+tbJDn55_UxsUhHGqxJ3`E#V?Uexrq2l+)orJ4{T~o80he* z0rca}@dc>}re0I_Nm6Fq@+Ea{mDByW#V>G)8aZ}wD!5f*<)m$g?Z{!!LMViM%9Q*u zH2#Eem#H>1t?rM#%@WqyZhe2PEKODMrI8>4a#+n_p;9#zVlW35t){M0~UvyOS-Mi^yYqTK~j1~j)%P_HO=G1eCLo`Bo zp$r52SCL=-0bk52wYS%6A=+M@GO94L@qCLI)Mz8e^-#?Cg}|Y)TIPXvFSe{AyKC}i zT1~f?;)a?#+BYu4Se$<9K052UIAGAZ-g)E(SX6@3(apUUVZ#|$id`~lhIl?Nxe|HslSa=W z38Wr7A>m|eZ9#ZTs3%!b@LIQJk){Dt@EqzBvtD%^Qo~YK^-&0rNwwkS%=lj=U%WQ2i*H@!F z{BvNApu%$KAfzO7@^ae?^ zqJ<;+OH%;n+BodCo|t>MS<%%urNZ5vFp10_ctuL#+p?Ro)J_u?=dmB++)fR->v=HH zqAneKpv!fL{SDdHPt5mMBq|FUaUT%fWtLv`g-e|hE+WMyH)+5WOxPKVYqRNn8pVFy z#K{Y->^Zz@W-(!U&9oa5hNH&oW<2myv7K1U;S3HF3?{x0O@^LT11EiFF%Wb*KLio- zhkSTJ6~2D;cr@Zt(W5fvt2-mJ=d!&SXT%;3d1H; zu4N#ae;`1cx~??BIbPRh_E40x9O{gR8JTV>v9F=NHc^E=dgUI&gxN5oTV?FS$Yh zrM+1voA2X8KYN~+F8w*}+3{}hr0YQm^ofbS)ugrNr01$`sAJs#gx-Y1=FcZ+es$d% z-C)w(XP1M5hM**dqvwl&g;gX6>EnrQTq?{@&1)Uxc2w?@?f*NGbz+%XCBz5ICMiYc=@~Dt*JR)?qp#?&xo;mN_vZ`NFeD zFUXSAg>OyiB(O`+B@KHM|^X)f=BQGPM%Fh3l_XoL=PF@${)0QcPK z>ydRQc_@#6Z6KgADD2)|#0`+rsH4EOoAq)$w*-XXl*Kp<8i=*IwCF@u+N`~$cN@6b zO(LtNqju_}v^$>*3d`Vn8D_2KGcM;Jq0Lw$=_p|lai$5Odk`eK_mr91Vg_?re;z5q z90b0-=`nV&F5stS@tlE^E*#Zs;w_|l>iiWKQ!&(Q(Y5wDF8VtY1%Q3DCe`}bk%HByDE6bbU1C2xL??*C4GaF{!-zfm_b4OR_Eb1a+52*7%qPO714&#@g8-cA~;@7 zN0~Xxdkc2aL%lbW5qPvNa=9QFe|z?}-{2tF{5DashjGe(br-*bT2`f&dH~O8Dnik8 zvc-u_ao?L6O6$#NllyAIh~AYq`|yS8@B-wt?=Qab$39!}uECFB?hUcd6Z6y0+=QpH zML=7a9>g)p1IH7{(i$^IkRtoZ6Zm-AhY)GbT1$oPcK}B=oIM|7yxO;AfS!eD_j@T+ zlacgF3XI~Mr~I*JsQHSTVk}LUKs@x!1kq?>_i$9rqa%KO8P6Ft(*{@*LSNeV{Zco& z_b@bxB$)r12z~9>5l4SpnZcMVIr^Xy?)u_g%Bm00iI1{E_B(Aj=zAsJa8xS23Vtjd zt23KAqiEIcTY;#r!Gvlr^nbkz2p&=>UQ7}vw^<900w^R?VKhiox%uytRYr4+NgVon zysqj{Q5Yt$)A%*uZAxi9Kfuw^`f;ln{#Z7&asozde*z}Y^*f(C@O59L>A}tljN`%nk#bNP<8%JGdh}5^^0YLzl{#&&_T@oD!L@W z_3Sn;leMR#&_gE9NDj*N?|e%kj!C-I$MK|QzrXEJAI>_e&6}UE|2)JhwSWC0BH0y| za3a|&9nJ(hV z9GaC0!6YoH{1@bv+n3fB45T-cIKHbA=3<^-ziiEJ$-;7zn0-4b>KbXI7d?}KVn|pX;E1kw_AyE~DQLitX|)HAsJ=X;h?_mHPK_6W|EY3Qh!HJJJziVcasa()QlWfislYJv8^C~_ zEkhF|lFqTb&q3;EnldFc`i0-bj-RUf%ZTFv1E8olKU)I!hH!O~pO;Oz(C=BxOZ2U- zul_=Fh>pPS@>+*WpSjub%+LUQ=Mw3T3M=##mjuWlEkazM?4NmyDF5)Dg8EAq&snuH z0oh*?xG}Ddm^y&btW@?h%i>wbSdY^J^OiynQm=oNg&Sq>#2j#d2b|o4zkLhN9ucL_o!2vW@|!lLZ~4=fJ_C z>Lv~W2G)^eM_*DSYQ}VT589vq(yJ1Cy}!dgegMQ@BSIk@-nam}94>Q;FB8A9@P4bs z?uWot&*5{;80Y|Bb~*!|nBR0++mg3ox(ar!Poxz>C(tR@;YA>6OhWXo$I$!xiHmMP3uS-5vB4G zqlcS(I26!(hwQ)~Ui)?y6xU=$rW&kMCWuY?&=7Kwtr?{}UJPz~6LH^g{wLhv{qJ5r5N@Gnpxu_A;Pm(Z{@ak|fql&r?MzJnVO*Z?Ddy~W9?j+G;t_={ z&N8%+rUM<%y&*Z_iTxdlzGg`pnGY3-L8L*?yR@*5*$kY!#<5#BQ(8c7f{mp=q5ky) zT9U-A)YL3k+Bz$iS>oOx#-ndt?;9wYq!SHFRWe+blK-wH`Qq^*7{a=y`uj1j5guywckTd}xc8xxjz;&UZE#H`! z5o;H$U`}#dOS0+*3PnI|c5-lz^uZ`lOS`SPFU;pP7 zZQG{(l=3mFb^MlHJ`p38G+{Cy!I7@Ao7hNoWU*^gc@Ik020>%k%`IpDe0S4ui-`0T zjgp$(-aS=&O7xtvgnHbVi3DP%JBYsG;M)seRt_SKVpIaL%C{Vs$eWVO;w=iVX1E`` zO6wL4xwY{%laBCgdJi_-V#34H8_LO-VP(5ET>tF(eTW}1uL+L4Yi9De9r}sl!*ffL ziy!*kD0@5G@LUa?=#wNTQ!qSRU|(98A)>8hsv5}r#G~&Kvpllkl2Oh4z4&X0&U%(|`2aF;FNe9I*3j^tA6RSHKZjmnA}87pmvMfy!^ zV{bFy%}WA;6zkFbwa{>-82@vq)p+&Du%D&LHXHshdSREjsc--4&K6ETm~*18fO!4y zIpuo_~E=p%MHI7Y_Hg~{`wWhBW2>ckv7DDv76IxqtD2(a3)0aiB zBA$0gagt@5x~d?ilrTKHEkvSfs#^6urFEfoVs_UYupvzTh91rA-M{uj>qRVPSTw)> zH{G?^4;mL;f^E5^&-kA$NyIt{l>1&-!I1h~wg8vd=QV$oMfL0TS3(L-4Rl98 zCsvNFZR3WC2<|di$UpEbG+&w{=mvWVY+xZiwvM!Lhy2Cnvg5+Qu?$1S_bCDimQd#qe9 ztBsxA$5tofPY`*T6RnUz6`2Vkug=wTyP)o)3*?EbX+MF0r1cDRVWSlmPs& zKNofD5XLai4$rBEMF8`*s}r9kX`q>2a>iq~{WFB;ItMh}9gTi2W_eymURdJm zN@~j8FRuG17eKw4bYXmqrT@oMpcXd#Qo#@*=te6=(U|T*CO(Nwu@tX6tyR0&C?pPw zc0H?a!r;LXy8GLlHjR6--PHS@w_kUk#;{9I`-RRf@H{u+5L3hU5|>@ZZkCvk_j-Ol z#{A=Dn~FCr+5uY*`L1=d@bth#Ui2}*+HAgK`GDM2{qtq4eQ)fywgAc3mEI zn>N?a2?2iXVpwo3tusC9RTXC;0@HJRcHLmwMoLN2p*nsFRn(pQhaxd>yc#LScrB^7 z`(6A)F|q%!nTs*itWBTwW}NtxMPUT|wkxLREGQT#tz|?EG6}TM z2;!!b>G`p^A*NB;&ku7rJ&w!LY(vfF9NBj3piE(nGSVTThFu*=T?&&Hb=*8{~vrytDAK>}~mO{uX z`q>}m(jbB_@7W>ukrWSLCvI6k=fP5H0Jix&fL;0enhBNguYGZ!0Fkx1;SssfF$)ti zUYZvvt>g3O3tb0JtQjx$alr-GCeqJ#(=4yrq)q3}+Y=p<`7O0W;L!kTFVeD$*V@iU zBtkY;4;YT#msHs395w%UhP%}1fSYaA3bYw~TM`0QDFytNNLzRbQJ4%$0QUWrmrBJ{ zgl=a&SC+(y3bK-*h2otv{(%!hrWJt(U9F?#kUqOZvE}x{QEgu)QK|bQSd})eqzdqh z=GH<1!0~sj%b^#b6XUth@Dmv}5?lQQ1m~crTNqHW;bj?XY=-}3b#`Y12bSM`_OSY& z&9mT>cip^(b)r34X4{}Y1lf-i0%cc4CCE|}`!h4M%s;3OP=8QHL1G2ia&@_gr*c_| zOZL`MjyC)wqj_3&%~)+ztcea#L9J56Yf%0)FFo7NRg~)E!>onx;9H&0Kp{1q;;V53 zEl%=wICf*gM6#P4qWr*tC;pP8@IL1Qa^XbethulZBLYEF!ZIJeAf=o<%WHrrc|Ajx za3RmqFmuow;CfmP5A4JD|Fy}%kF1j%!wBcuDe4y_^!L)w^|{L)`SOIQ&*aARu`WHn z?nyZpN*N?^bl@`KiZ8sPa1H`G=*<=5`O}SfHvJloJIb$K8#BGq`g_bXacA}5qzS|$ zu{@9G58mnC^(T_{>V5fU_NQ0Muc0d5(Nh){RIx6uE;2|CrK~%9TD!YKao?>XhCDZFJ;w?zMg2?OvP-8EJzV=ECFnGHI3u@k)J4A8JwzLFO{>;Ck-(BIBC00s z`LaQy*lmy5bfnd3Wojx(FN6_wxNmc=a`_KpPUG|D$aUq7-r2LKplo47u5Y^v59f?^ zb=}2{{2@0>dXjYF&Bw#|v7ZaKX?~8R&+05F)6LQj*(06;Ar!>wb9{I`mBRlct_#DM zoSvE8;xm?o{p{@$*9Hk#=zP3b5l$V1?l3mu$9ykyYS|%%VXi0RgI;!>p!I(fIL*tZ zvth z4J{syvorBbXT{am-x8B@&Kw^|#m+(f0^5e2pTI@!HsJmyj_owGEaPKFvDc3gScPpS zG=P+;N|^LC0E*Axppc${MWE@hzy|d^lmhjArXO!1SX|?CybX*kHX8<7iSPk7KW4`f zT0fAH#;>1tzr(8eP&gyp>}?2r6}dfsZm0MWW4u62qZ}=eHCTKqVwezSR`uiKO3S}x zlnlvo+=xOWF@9_!4UkDh*vrH zWLJw!<_0x40eI{(6Lp zt!4UXePu?wMW+FU^z<9h%H8v4r{n@czS`c2L$4B>N0{E68QI|SY>^p42+ZJboe6g- zvOUCxIyx$0-`qS&@hTbX7L^cNL%%KOZREsv+ZEADB`K#IK*Lwzemhdmv{auvf13$6&Z!J<%KBni~L`+7dAS4UzsV^v_tTqPM7JKpNnf zUjFl3ku=iU1*Ey$86<%oD69>%KA?w!_%cetxn`Ffe%ZNcFOEVM-&zn z{p=8N47WkhW}Dp6+h)D&-!Y@To?P0Mrk@dQ41@agI6g4qnc(oo(=SJ|4Ma)kO(xN? zx|)(o&veOmwF~1ovo6@5ygX=FhhKh`uu6%d1PkqNG+AzPZX}92kccUvlN(2ve(SNX zuG4DM@1ItIATJ7b$~wlE04?a_xqT2WO0?)kQug>QNwf6MCSx*9M1K`Y(RI%r**dP`KO+sa##N(Ocbr+k zd8|2klILmA>RoU93zuCzIn^pQ_cu@f<>G14zug4=EVsQGuOdJi2s`_oBbD<}(CZ>x z=)wcD(n%c=2x$PUY=)@Bo^q^M_wqjRMX6-{pHuO$WTJ`tm(E|TepJEcCL{Ge9%=b**#pCy z_ASwWHF~E zxOjQkppzVxzdg|$$^iDzF+_K~%6~qau{7fMsV9oJsL`1lm^cf;!MEQ^Z8vKwlLLL5g8R2xL+c}iZ zzBtV=aB2?^hJ3M|KoglC(<7YCtp#<~9p2G`iosmV0LrcQHTks_Z%L>OMDpwBN68Mq zhBi(Wp0+uniSb@!r;Yj^YJnv}fcFxb*Z+uSx5^dacVd{?9io;PHT%ek7?&Tli*Fe3 z6PukAP6F8%#3$ovW1c_PtV2Hmowx-aK)eC?IGdb97}1HXEXa{|ZtMtX`#ZyFFRlcN z6?}vK#O%N3WI6pQ9O5|FCV55SdU_2pIsJ_j&k8R$q`EUYeq=<7;s?#kSSq$5@d=Ya zrfjJ|Irql$3+?bv8nHAk;;I70@M?_Cwu;}refCn>YrIM79>a3CI}hs@iF}c&)wtH!4*Vt~swvy=>c4w@p4O{o={aUzQK9j`kE>r9 z`9d`H+=MPH+Xlp_JbN{8z1ddm_*T2kN*DVgS)Q{m`Vi0nYRuJ?&1x%Hkf#QP!9lf-d4ITPTB9PJz=sdOlJb5z*@F9qHuO=sDAn%>^cu<5n(gO zote~ti?aLpez!-~p_@^RHPnP{56zLF+c)i2~E75f4O6pd`n;$ zYDDrTxOU$vPL{3RQ%p3FNnLixH73({l^**z$I`jq>$%vk&3I4-liu7o5AImxLL_}( zDsrv9;hd*JowV%M%t^2e9{A@^+K4w`c^_KucWwryUD*LWM((KJJII7PKN|iw+YC8k zlX?F-F13|c*Hb(2N$zi0r#b!EY)QTpGY#UTCH>QgF-h=p1Ty_5*1VQ6FrJv~MDvx& z0v#Q}f)u=NJs{c73ipmTP0it#C}e9|7d^da_-)kaKiszBtM2c>iPXw^_GZ#Fnq3ut zGm5z%2dWukZvR{E*JY~wzjA-m@)`#<;5=eQ=od(2*tveaZS#+fs53{% zm)>J+>2;353?({YKgoNs(n|ciZk3p+@ss6S??-^O=E#n=Hu;=qQ_a}{3r9X1%$Ha* zF!>m_Dlx^d{qneApNF(-$(3|KO(8y4@>P_YyF6icyq>D(+~Rjqxo0Y36zlX2TTqmo2TO z0%b*qhlMg8!N}^zX39@lvC~`eMfeL@3GjFdN{CO(X-LF_c`4XlvxV@TxUAJ%$bN`d zUu)W6TlY84*!s|J;Kb-0E!rgBH%|-PAf5|IhNWRLWdL5bsSdQ0?QSR~tS%vjyO$kF zd79~Rdxp!@(jxaIkUhfW`Ds1}w3dSCvmr)6D2BsJq{AjsoZ=%_E3W-+HXYv|qj-Fxk0nKtVTY z_Cu>QZ@Ixr22-_oZujKJF;-3k@R=~C)$HFdOPJPQIB=1zAx@+SI`<%&%O0n~>ebBm-f$I@?ygCS&9-9IjXtJqOUD?<*iJ+~FE+gS=&53B3O zbnE82nGU|$VWitESn;HKu59YEA`>v^mB~O2P6ER;WdmCH{;&35zqK&_|EK*&%c4(E z$$0m)dQXffx)m(B;7^>x6SgZ~;m2oiFst+#kM`1Ue8#7qbUh+QPM6`oh6%#hhD=aV z)SRC4PZI3DhO=8}m5xlQ>zK+h@6yd^y$=emEt|$E=h`#K8T9z-`#XN9oQh`~To<`~W=gUhr75}WJJ%VU}8mU9HAn>6~rfcop{B~tM%!;)HSSA0t2W4NfUFIfa?<61%# zow@qY+?}D7P6e+k$eYD$VE^;zxjxp_!=;gsW6H%HUL&p>Aw7$^uawxYK@Kp(E9EUJ zr)`1ACaPuTX^|UG(#Y?;tVI4dU!kb)<8BP{@7q@yPgb9CS8f4hooxOkVPp7_sQD+yvPbT*Q!M>+I*XPU!)(_%q^QE! ze0Ae(nif-vTY1Z2#jMvyUv^&k|H=Q}*@82na(1?-w%693VyoTx?|5+eK_|%eW`2<0 z97uAzjZX_aJVkD5YY~ApzXFRIH|nNl^Q>30&yB?_^9?hotmX_CvJeXS?8j1SqDWLV zd%j8gYhCL&5&Y)8e1#fL@ik?|MPtAqyn=3D%ogXO|7NADHRlkMblvUFrx=aHX??o{ z8h^RAV#?EI>3gPLdJn=@V+TIsRV4|Nn#6cs1UP0opM^0EgyogwmCP?{tu4TaDsESX z=ly1ay^5cBxOB1y&JNpiE!!kI@`m1*xoon+i9I@-_CK4CFFoXUxLS(G{E2X5TUZ`+ zz<>U*z1ItYxi^>E6+6I2UcnUvCIn*qu9DYcc}I6}O^!d%HCp4%BRgNs+#QMYIlb|w zii4o2I+ZCI4PI|8lm3jh6dTs1jm5^olu02`!bY zSB?Ocjq$sB(@i5qQlwOVyXBhL7tz`t!C-22dBpw6;fCJ ztVWf&X@5n0e&utX{&(Szxz*pB(Wr)es=Gh!AG3pufMH?)$DJEPOaST4@=j}(|)UNDv#B$ufqx?W$V!GNXH_|PaYe@fN)~5i=%Xngci1PTuL&o2`-RgWWGNjCq zgS+lGP<_eoYT3#WcY%J8p%wvI?wCuYG804691GIvo(WskYyCHAXe%W|h{~BHuhAUwZ-&S-n!LJ~!K$ z4hM{1|IZe{iBI!|HTJ)>dM~m4qgw{C=<|DU-;S)g@~QIshn(N7#JeURUYE3AY9vNQ zOs@gmX(A<%YSOncsOy_)i*!H|dc8i2=ZZzXa?Ac38_mN-1OMuVVZ&BXVgT^rj*1FL zKBa*5=l5Cbr?aK+yIk^eufE7c4(5V_{C{}?L8Fp?u8AyuH|S5gjHP&Y0bHg{E5q#m zmkWT^xh35uQ%w1`HS$*_OM1$a z6`-3Y^gmVrWT69_ybE*h)%9r*#K9vz0FZ(avues%m_Bn~6|n2OHu%&e5F$+gsW6XS zYE!$fy_jngOmLrtTmX(Zbg|GmVI8PBWRJV_PnEzc;62|QGDs|ieIDDuW{bjg+^+R% zQ=IbIu_nYH{k1i8!y84gn=MwB5(qgTa#7ECz;}_zlZd{6#YL{H8r3}x@>QQy?~{mA zcm>IcU)AcbLUEbsvn%?1g>uF3(e6+V`|1QazfXhxt}b_`zZRgB9l(l=#~qRjL&^() zoD*Ia3ScKBy6M@9PsnZ1P3BE1u2Ce0&hj@f)QgLqLe+48C@y_c|9$1jc3bYag55$k zYw#bJUo`Y&em(l(*CBn*K>bWS(;Qr06Z84KaN%Xi)!B`b(L<6hsW&tT^!?N4OFEmY zyu0lh@^c!OwoQzJIl`TCkrvzXkRcZejelvqGs%7#o< zJq)A%&cMWP=Sw8Bb721Z?SOh)sf(6>fhl{lTW`!*rEr&c!U?7x4^U^>GUjM{QKWja zkZLw%5ENS(Alk1t?419&r9wCHD2McU<$=D4(JfoOqo1k1mb9uI+kD(UK2m7=P+ign z(fu-Z>H9+$qM_yXMR&>K2P-$Zk{~zpnLNHH6W%LMoQ+Q(SxNCEH08<02o;%FaoHt8 zIiBMO$P>T#i5Ev2nEk3(wf<_NPH0MIMP~Eza?bo=b`jB3cK;}SV(=)5&wO(sd*k-a zr@VD`@xJX5mt_uZ)SiFfOlBEWTXm~Gc-z8NI=kuC5jlIcKx(}j-E+QYV$CD-(!%-_QB^43(nYhCq@w%)q5S&e*nWt znio3kEJ42PdyQ@8mI>7%u!?&AX&_?mE(}QG$nt%&i`0)5%YbhCt|T;giLu5w8Ba8N zZ+2Jz#%17E)$3TdeA5ijMr;GrzQAWCE`!O=N$ANJwM z#vjj!_+duRMe9TZgi83&=JjrC25X~Ruw0+E+@RJ0h9>@3b$J6}dLeW99`5?Y#YB8Y z^tU~fNL`s;K@w%()TRaIWaK~B#s7z=^ZsWmVEez-qO=sPsu4PjQnNORS=4H2jf$

wcd5`Thsz{BXU_b>7!!Y>ulhG|V5ch}nQi zJ*&%lZfm2Ey;LBUf)xuKcnVwXg?R0MH82OgFcXxhQx}fcDtogtqs78@Ool@as z^m8iBo#&H6iiu}X1tdQKGUzV@rM(FSQ}A@Jf~^$sFU#R7ItNq9!7z!uY@#}YTp&hY zyIWc>HP`GD`R?Z1@&^OY!I=1xZbg&KCb8MsyY9s;E-^FiZW-Kx8Kz_Zd~WSZDows# zYub(rMiA_$!Y}(9->gQfbk9ONO+>7JFxGa{?^MjXQv9JopKL99k2BIdlltIj_$Zh{ z?c#JXdE@Tx`t;Tu(iT1NJmObqNYJDeO@MI`^HssB`~$`ijT)EUO!_GboPr`P=9hP(1>Yslx@@+zDfVEX4<| z+XxYOCi`W*!l?d|0-2`6#K|f1)t#$b!+ZJtpMD#kr)zk_xWMoLwzD!^E1gn)|2=Ue zZ$}mBA0HA~Uv$7|cjt0*OZa>EE9Sej_;IK_)Dl?^IWS&;0mWNR(Y81ONH2JUj`-Jw zNNp2y;t^EA##zqC553B8^L~c=Ua5POKLQ54AY^i(tI33L`B%hVGZjox!KmWynft2& zLkfGtxWFccr|H1)T&g_xXPV9z8DsXmJLN2FALN$yBNCqrOcvPI-p^tv`Ew^wW~!Ke z>6_nF$5WnNC%r|1{@GTis93~-LSfl=G;=N{6E=OxGIl9U%0;_GIC)QviynsnU{=~%$qtp>f zUBlJ@n9Dj&<@1?4MLO+MkEQ7!z9OM**EhrX^^_#y%?XgKIhl*{kdQ1C9Y1sSiE0MC z$EBe13lU!O{%>aZ4dG8JxZwEF{WQehSDkinXTxnK^Z6qZA%V5QpTHX7k9w0&*Z@%? z4ab6^=KR0x$%yko8coiw9#B`&XFBF=j_280 zS+?t`I8>A&s5>8do{Kng$Y#3;OX5kV&bKH&6D_mA<;t6JM{~$441JH^)&w-fooHnL zET&&}jpX6@r$KZh7plkn)ng$8Z=C_qpIMNC#5bhK=lilWknS{(h}DL}9er&(8}H%8 zuMQ{_fGo@Frg<=v2BAH0_v^$?qmVOwarajKD<7@Rk!Y>z6MM@O>Kdz)eQ9Ujn-DEq zYcrCji0SViv)^mAKO&xSWf~aofKie6n9!F-_T=(M|Xxw0q$u??iqqY1t;vX}|ce+bmaQXn$y@QH7(UuZ&@x@<6T%FlG$a<~_+tr}uK-bbNY zVdXx=#i3NcRA0xRiN4gR2J5@lXmE6yL5OqJn^8hyr^_yy!)FX;hY|TVS%n0G{5=uW znW{eWj41u}Id{bm&&YsNJj>Ej1{O9eJ&#^H{TPvE`#rP8XtWYEgR_Fv-Cx+KDjsb{ zG^?*+ztV;Q=W30hyRgISK93g2Wuozoj@<(JCFR+(ukAPR`Z9lLLJm_u&U_{ud&Egzm98TVao z4E{E|>^5hzuKD@g0GdN&;8v1&t0a4WXlMW-e5?c5&%AEpRhHLorCH~?RmmrHP{ z1!Ckbo9?nwBYytQ(X&rimd0gJB@ab4I~&n#P|-owSB;Im*yr8shpcm^>Z`ng0bP-X z2NGkP2hR7+CC1tkUU#|Fb*PNknCq!$LEx-+^A` zP+w@YqV+Fjkf{^_dNvlWhJ&9<$GsH3k6L`ccf{b+MRcUFN;Q1sDQ?4qORb)AcbYvP zXg47XlSe&#CHdPH?t&*?ha>klZktWb7X<#ShjIFyw5PP;F@&B3O6n7}NtebezGP|- z1t(ZiGZd}ImhKi40dl1yQJ_3NWX|=cmW`SqM;m)WysRgwo|kI?cT#ff{W^)7?%}+7 ztX2JpM&2T%)hi#QLqf{b_}fD!v@h<&bD|AQj2JNU|Dh?&_ANjuvO}E0oO0Hj^{?e> z$0sP+-W`*LzgsCsR5}*9n(DsF=|poK$}#!C>spl4ueanA$Jf6b$J5s3oEn=ia6z}f z(4s9i==sr+FMdf#$)T$cyC}HGX$@7TC#9tuTYBcm7SG4Mq;NIq$V2UYT;|iNDVKYh zv&DYo{;#*FNm{0@dw3eYC_+b3l2hMLPGUued>)S(v~>vEtHb8FiF>4;tBZP}P@*(C z19C662S9E@-1kM;Y1%u=_t@##!3x@A*>Lqxe>s^#B)^s>*6XfChku6T(^0Rl6|l;z z(2lp-jx1!sS(fVysMI7Qg>*bssPqwdL8&}l7viQRrHa18t9$a7;)tO1AQ!#!V*V^id=7My(awPl)vy{R=J=chkur#qPC&!vY+HrsM${Cxu#A zug@exsNGF{r{P2tO!}pJJ-R#DSqO|=ay~3o&+t=;5Z&=%L*K4~4kNK`Zm0wQ&_9;@lw)os_`)gb^baipI63E`nXoa&J<~ zl0H>a>CAnCPUtS2lgc;a8+Dnt<*84ctJDHFVi31exnQ!A6I^q>ujO7n z;k?>q2~KM|in@a9g9W?t{MH-u5`xYGm|kDiJ;O^|`ck~C8KP5-z*}r(tqy;t5~u9) z^>YiL=bY^AFo$kasNSu!nVW4RBu@8D-^S|*&zBm?%NQQJff9G=8`gaPRGG*0xK7N3 zemaiXA4_xxf$az3CO5>F@6WSjX*7Ji`f<(THf3P=Pya_>jmYG4%Y_kA=mgSt5Ae!9 zOB9YzYm;rE9gk&CsR0mgvSk#XRHnZ4HP@Qy8;z7atWCMtj%X^}3=; zOma`r%vZgUpI992@@(e?+lD0Ces^(Vakzo7a_U+ihwPbu3=B#iAX9wy?OR)7)s-O( z@Vyk?H;_Ulq`N<(ZsAL%ktzJ11RNCM6 z=s@=j4wn25I{+ppzo#^>rZpFVXJa5qrbeS@SIOKsrrEJnTEE@inYN|ZM2$n=+C)J| zzpH8P?7C~|-u7;)UBEu_4Nm%elOjp0S9@cwc=4iXj5NW82IrtnQcw0}%6zh|-g}T3 z|5)l}c#OSDo)Hz(#6&Ik?#Jf3E`B%fgSp_@~vcCCqa)Eq1U8;{QgK)&K#UU zE@KZS^8~)w3mLTqdhee&8nqEoW!{!nr7u7KgKayvv$Rh{s0~}mKX#Y`ID1A;ARy$G z<&8*?XW6MakrMj~9y_W3Z4e2}1N zP~dVMPdVL+Hi%#MUdE%{(I@$Nudr&?cx~MmiyoiUjmV0QX&=7%XD|jPJ2E%Q)Sp_8 zJi7NuWc+4J3DgAHgOx+Q3#5UrYvr{YPdV&qRS`u&7pIB1!Cb#NW|6Laq&lU&b`higIv(jHLYyM0u9x~mzSH806^)#4jDcgBsZz2|Q zb$X$RAit6?KWn*oWU6JwY<~u3)V2IOn1N)yykH9FMpA9ledrgRG(6xZ^xH+YU<=`d zb++c&eO-`L3F)-z^$u0Yu#hP8BuQ8+N)kUAO#9HVOcCh42dDdJ0Pk2nJZL^xOjB;G z@}t=l8NE9Sy1w$rG58lxG5@y9lj&Ehe3(h6ZA{J5LzT3CvgnkhN9O#=A46%#T+94i zzw*=^hIza(8&2w)^rlvY+B^;lU=VrQ36tJIsg=P#xb9LYN=$IbdK2Kzp`;goydKk+ zrbwcJeI~e$GeH^xJ=BTT{>96|<|^e|?;2mI-l|Dm4yyEQM=I^l?FNe%)NX0jhX0HG zInGHLwRo&P&jP2EiFi0aJ}0~56|f6Hj0DF+kbI?Qb3#H6gf)v+h~4sQ3f71CAp-{E zIxiaTmdOne@yEeas(_QRfaXpDe&fxl04l6^*9H~g3ku1!1X^|!M!aQNo8^uu!m~hi z#zKAWEZ=*ev$F?R64{M*DytuY>JTn{mT-gEpU^Cf1AaQga>q^1={B32wx zovAD8$?beHkx%G)n)@R=y7>v&t>V#QF!3dC9CEfQ6Dcah4!=if;kV0jbww;)?!8?V zj_=4Ztjj@`za-g1e$(;z2ICO3D0@t{_VS27|6%uRyqDNv-{$A>e8CxUnCmh_Uy%kK zr=?fiuD)|ZKdEDUERb-hEs&SRL&=@clJ()dWky_ksDrnz#pBm8LuJgwMPFx>E=b8V zre3J>b5rPU)@n3rriCL=R=cMwtJDJn9UCy0m-mO&$BKhB2C`AutD>oki}VMRk&F5s zAruiV+!|i*+^yv8Fe!K5BP}Lm;`%STf}ZZ4D@_rtWWV|xX>v9}`BG<4vs0k4I8*B8 zNt2+e&;cKQ5BWn<+){w|kD62x+iT~kb~~p;@5#B$wG?BwnadQ}jvCBK-f!L2AN$F??>fLbj&=*XOuN+c`bwu3j!Vp^axBf8Nagu$%wT>UQPDmiV zC!)^$Nr{%>-p98Z9-9xno~l92FV*YKE9ntyysN*NNC(C0O`Tha#|PSqrXb^E{vO zfQSOS^+f$>F>O@TQ_Bp}J-ndn^R4-KGT0U^Dk#uTmCwd8bh*c!x&46efh&%@`j7jy9v4+BQz#9w_2C*c`t=RI<2tkumy!$C2i?oq z>(YMF3SPe$nm#W3Icv1@M#RJLTi5-INmPA788A*h{~r_9p1w29>Hf%gKFJ_fNJ9{*5D$@vbBKn0VY+EDa zEJFvkeAvNNJa6kpp^1rcN9Q-hUL25^qkqKp(Qgkv+Ak5e`j^s)Kjv-1&enitTrG#5 zm(%i?_FdJWl;m2KVr1MTtz6E}N)ko*+EBb?BQHg-?`t;+O(TsN2pN@gI z06Kj`*Bvv*=dUSn&uL{))zPmV2r~OWm`pqGf0KruhsNWSZuj3yoHT2_iJb@S2hP)E zApw4RhKyOZqxCjs`ws$QM^wLdXwy#!8H6y~CuQe3WgX}87LD)3Zad*uLbr!Fm?FRI zMb(Uy#)@i?Yx;h&UXuo{b{?qo!z8flYxzRiBCab3#3K$=f5{W8 zCzArqh?%PjPKYZ!-du7t@ZvK*md%DHi&@SvfhODkYH0ue1>Z3^;Y!)%Cjt?m29y zy;As|V!AukV%|5RzBKb&_`XQkVs~iwg65*9aMh0NP?TY{l_2kY<8)76j5K0!t-#8R z;K_XariiX(lIxk5Wk;{S`HZK*4bXjv#Z+%ZuL2c&j1W!`#nED0&c(VpHcpskBhy zoa1e46OxT|1UDcb(in}XFGJS7IzE;H3S;RRl$2Mvewuy*7tUe&i~0A}dr)R92gC9@ zEXgr9q=JLW1%V3*)mDv1_GwkG6*yVO$Sza!n&4S@9)dhH^*G&-3tY1A!Mh83(vFg1 zV6vO#7Nvnt1^ER_^n}dpli*TvCSTDt59ML9?k(=?ttE=%6gX~oj{|hAUF+BbnwMt z-m8)g#DH5M^H0*rA( z_|{RWE0t%ZRi-t{H)*oNDt*tqQDrjMfUowF1yb?i--RLm$9~Is1~g)isu_V1YMFUb zRd9Os%v3|?4PoU_CRqMEJ0YnFWU57AeBQ82UyK}<-P84~Q(L(Jnd^f}3ZPzz@J1l5 zBH`|Xi^-4HrDZf~FHTP+D&C}xJ5@F*1C|8Sjs85f!HpaDsng*%j4P{vI)$m;uLmq* zPVa^?tUvAOw2;E33=&M5v8)BU&-LH*>f;Ccg?RgWx`c8cS~iJ=hKDy&`HGrYE|d(K z=66BQmxCo+X%nci9vd1>MYpAMR)DLQO@Kps6=s_4uNZr$&@b^q1ZSai((HeyyZ}=Q zf&G#a*o+AiOR$E{Qk=`B%uT~#UkV`nLEc-zs^Byi2bL!NLx`^|qoGK;TjdZieLz8% zj8kvv2ymF-Z|7CljaW-K-pH;oL)W;m{F=HUuFM(_NVN41bTz*Bm!U;!Xgxsdt2!A5 zZ`Z6VmH-5zc#%Jk9I{Z2Y1h}kewVdq&1O;RB@c_k@En82b9$_ z`^l^${CvLGb`rFl-J(a-fg*ox=PvQ)O2pngxmQoXBgJsKSfFG^X!mJ`YJm~+yAOQ@ zmVb**U_1GTdY_?r_PyWmh@+i zg?VGzs)wpdA72f59S!1O4BR})K4u%!-i*0+j50!c0*}bdGPil?m@f=t^(w>yI*w!X zC+~tYga2YCBU5cMaQE)zq#tle@791La*fWxQmN~-N6WdA_;s@Zu6m`ey#(ncc1gu99a4T99E)oN#(U`q_(%GB_?^DB^YuiH z%_br_DbZolrHPbiechE|I@N}Pp>WPf7ii9OwL_Di#0x`o?NHB)7-<_TC|hmw8)@PmU)+lsWIzrYhDI$x6?D1rgGh zP*?=kwR~qY93i%0(u0WFoAh+p@G9QPN=QISkY2jR_4!6OP%PrRbj^0;_bX)5D- zlxmAnwvgbp$1|rX)JML_Fg`FvPru34E{u6=^ElExy@;@5U#ixcW(O!fX2ss+?>gd? zyb*$vEeckhRk~zHpc@Se*X0EsbJIUqSb0J3HtG9>3_bVEPBtz-_f(Y7HSUQ0+VrD( zh4%Irp*kPt9|7Xrmxn%OV-}W7fFn`|W69VJ!zH76z~`}mmn8C44Tf`oX=zXzrQQ|i zq563G20OoykaG!%ke;qF+3LO9qx`{Mm$B(#&p)UPAn8k#U(zF?pk(C1mXE{My?%u7 zS!(Z(Zd1M=b$4|P;|)@C0bi+4O8@l0;h;Z##B9CpyEAW#zba#**;!Jj8ZzOli}55~ z^gbu9J{rctV+75A;K$a5-}|4qSfAMMKYPtOPE%EO1B^>`$evm4vE&j*GYP#VXDr%& zs3H1XZ;dU(XAo0x;*`h-ph*sV6huV2oxHuV8<#o<$yz6YCoXc;m_8B1d)3E5)W-XDaCT^^nz9QK_t`UTmO zFj1$VRQ+IJ?$hbfd?Vw+jk7@CFKkACS9Hy?+dclh4uV4TA!=UiFYdq9oa6cn<-`m= z;(Jk(Yh-YiPv**SFY7XO)yC3KI~I1dg3^3c{VcE_SRkq-YN2&tG~`Xs2?3XyJvb`4 zrSgM3O;u(tb`fo&B>gR`T$<8N5A+xjo{r6xObr2PKaUm?jV4J-k}8^VQM%csOVb=_s*uNg_RG$5GPz#SH~qM%bR{+K7+Bnb_IRhUT$ zRD$ub5FtGehz^c((oz`c(L+umxYREQviOrZ(I-60C-xPS4q>c4oin5b?%SG}0xIwQ zge}2X8$nP8`d=}@Q~#SsNwlyRv21{n25?7F0bU^jS6Qv2IVGYcgP(DdQj-=V^0&nH z%QkuMmWO@id|9muRF|uvP|bjGzxmhJ_b=t1dChI+a!>y0@~<%!A4AgNHlg@#C|?iE zu$E4eLNP)g>fKlMq1xmt-$lefM{mJO+a2Sb-+2meA26vQy&_JPg;2k_vd*&ql_2V| z?cM2CM^|CX$2@3btbvi_Id6iRDA~dED@R_Ze>5kR0iG^~|zkP3U7y`9KtOZExXD+DSGxmXGPfTB2kEEhj444cz^WULyv@gHouQVfG zYNwP+zSV-&%Y{EQ_IXUuUTgW?|B%=nq8UBoBz{M8FEiwb`0n2*6K_2mgKS;hCawvf-zQ&)7bTiB-IV^ zdi2y%Q}bz`Z9fRSxpmU2mg)k)Hoa+XgHmj#m=IwBqj)+0qk$-98qT-H;z&pS-LtEP z*v09PmEk2{`84l<{!h(bbj7+mQ<~C$8b&Zl`kVy-EO#){Xhvsxj6&ngzst?~Oj8ar z>r}m1IU);l)pa`hKOC=N47NT|SDD<0P2SS@w)ZVIF3NFr;#4mS+}~KW14&_jhMuH* z{kAQbsmVk@Bx#M@4+DZ6>b57DXPzm}%SMJF+xHbL;3(RlJ_uc4D2D{?dOw(n?PU%zgRCdKRbCnTmN}^s0PPQU=LY@C)nMTvi?{f z9cM?rusj(G^4kAC)kNQ}*IGsrQzktlLxa{q-hOJFXgxTMwtu&^PGVc-QkzD%5A~q^PxLv93Z4gcvUQF2t|q>PH_L2j8w(Y!rl|c_-$+q!%;fJif<@b9zl>W! z2Gh1nIQVhAq0e6kq5KXxI6Y@Hhp_(3Yo8C};M2Qa&IDf`C;Lx*xwI^LetyWefjv;x zbnmk|upCKT8KdRLS zA1JwF^RKU^L^A*Vri;nZ+v5y4BCqj^j@0~njd+~DUk9_l&=RNQ6@oh|{i}Taeuok6 zt=bZNR`dMg>hv~Ru4Ycu@5>F#D>r!|38p$bKi?OB+Ep~1a0b_p5I0vIB>1p@5rcj$ z2dYMje}_*qh6hn3^!T=RNj#x8GGQJi~@s8%!YQ8E@^2Z{?1j zPfqdsCxEKDm6iatJEVW7cPsi#cH*dqZPKa6SP3)l7W1O3G6)jaAzvq`HdmBm20f#J zFW&}h`n+Hi3K*%^_w&?*Af*$DQrmQr-m1&fif6YP)|A}#c_xt3dv40wH|vk7EGS^l z`8FW8s6j-?m2{crw!Z{K7kIJ?`6e?wp5Ne^QbeteVQu&+kBfN}I$q+fI9`CTDO zkD0Gkxs%f(&DBfQW3jBoxA)bm)Fep7P%^CiI#D3MSe0MEC9Oub4imjN(8UyX&Due6 z_xXdI5sqKHuMLxG)rk7jTqQ)O@|EG>%>e@=qf_T~ zh-++6N@uE-t3fgRvdmMU^4%B*&cPWoWg@l{S%^*XIe@i0yn9F_z+3yQ`g^5_v>q-n zo*Nm|vHLgx>47UfGI8POFu*C6<2(E&I0Wk`b#4d*h?Yk(Bi?I7J&TXoW-j$7bS)rI z5-ZT?0Is=pW<0%r3Jls*abjW8%)?18?rYykaC|3C78WlLt%7;Hd0T@$Jd@>t6y4!W z3)v#C*e@ww#EHkRdEZ_D5v8C+SR@OW{A>AdO|7Hp@;F1DuyvBJHL>H8{s+BuSrd42 zf^W$3)n3x|8%7eQQ^)4rjd- zcSI9?lkO<@tMb_Jke=#3hY=&(K6}4S=DE(qbu$t`Vi8udE8s)vh!kyt()FJ(10&YM z*P9A{&ytARg|jS3rxD|LH1*O)s`e5eHqLWkKDyM>MY#VEDgFdfwUC zrd*wkJc>7UX8NK*4=Ul9KK=Nyt+8jitH>!(d92I#HH#fl79Wl-xyjo>?RDaUmOe z#%L@}&9{gsU7`rYB$U=X+J^H?`4%L-+!PCw4E(fmcS_}|$|J?0Zw+36GJjOP=>in? z+$JsA*0Akbt?+u)5m4-%v2>5h>i!eS-lY4w&FsaJx0x0)v(v6iRdPkMRXHwt@XH8#e2!{q_jA>CYa@-N04)NeeC)j(G zeod%44gbp42RW*(C_a&^lMMu4lnhK-Wv1X!7=$LXNRbwh^nGQ==%d9vp+k`!#^lT%QuInU%R!zvXl`{ZelZ*AH?P!>lE~M{AZd z@HbcIm#B7kvq=U{tTI@%Ooba$QsP$tK_Xf=pRr$nmmmFlEju7`lWG^)w$Sf38~ya% z`;Y!ibycIg*K;*H&sUyN!RZ}OqO#pIYMuVDg~DAL?9RjTz$TxFx>Jpp5ga1eBEoL> zE?O0y181?<0utK4zFdB1pEgZ|En7~wpgmb06+q>;AD1}Hx_x;d{^XbRR}5D4ArRL% zeBm?fQSWgY9Nwhoay7N%JtQ}$x$H3_5U%)S3ngvE;PePZYsx++(mJ}z-A8foFo2@W! z-@j#<&%9ppr!Tzf>XG9A8~)ESw{$s%d)S`uDppytvHI(*=uoT-M+HxM(-{umn<-WoW0fad?N{+&&v%msj5#Y-NkF}* z9h=&|TqUGz5U9}p?s50>g8Q|}TJ@k`!<58-fqYmUZ|)3lpaY#Nm&$qhk;*F%aA06= zyBD7}oOELV`|a>mr761accI6#&JN=w+h+TFQx_Fl-%g=YCKM7Nx2;&UXBlH6mR(9v zTGJaA>K0vooEOtQk0v+sZsJ}_jhh#<%)!}|_u)__oq5Kk)AUJ@(?8c9iaTdwV%UiN zjm-;@Q@_QA>M z`TpUr<7H71;X**IS={s5Z?UfIdr_0xcxl2jdEHS<&CQ*q>*Z5C!0*QGV)vRBa1$|y z0$sfIZkK%V%)djabRl+T8^Z$7_?p`|^nGuQmE64R==;?BBFg(i6jShB z1Nep9fCuWSFW@Ej1vg4cyL(KjuDgwpJ@CQ3{^dB1vZ$kT`h(?wIXGEUs((R-gGOEc zVwDaMRWdRKd;pG(b@_9Azl_$dndWrhk3hD6?D;$s2BjoxK4I=@!Xka3AdcG*9^!Yf zob2OS@0ZfnoUIdI>o;aQ6?%eySouWcNc{I*1E1AhmCqFFkXv2TTo+_|4}XzHHYkl0 z7WzOkm?Cdkxoa-9IP$lx)R?FF!;h-dvDPG&Z2X)xLGd*M{%1n`XC%W*+8wZ5`3M#J z0VlBaNmxi*?tbq6BDbFxd(7tRAS6>6IZ#pF?Ed_wbU23|N^Xp0HDuPZJEocX*Ec*U z?#olm$BJK6`vPKuRg~0BId4mkYa)78G`=Lo)2_W#POrZ#a(zHcGAljzyKTj8Ic0uT z`0pDADXP4(tGI&NN`FT4o*E@~TrnZ4@1z5O-#Tp*UAo^EHsA8==E15&57H=BVrNJ# ziL?uq{A^!VmDyX5jq{pJ{D+VSwbPaomcqfgpv=Q``0OfbEh(_FVQ5NDqg_8&P61Pc z;Q(5_5)s6vFSHa888Cuc#MmA?6it__l1QPLXK~Mr_)*!sGR2vl@E9yQRHIPMBYh zk9G_sbI!pwjn6T7kA?Jd7MO%)ll9Wwb*QX(;DHtomSKE%9ybKSgT=wbMDOY_k&$9k z*1Z1m7ZKdVzkG`<+n#;_GYckP>QNWQUPq{WyCj88vK~hYKQ+DW=NpA$jLn zN(GW_0R!^M{fv{VTELlAUX~c&(@-#xWo~&W{iK`fMEp-&;;5}#u9B#IPQSfoz&@s`Bd-qk-Y=eS z+|s6XobvucIN)LD{UEz)kCmMOwF1JO_!6+_Zq7`$an@hue%8?8i0(x}!|(pg+*ZHN z9}-Z=6LoF0(`+4R(3hl1^2W4455qXMb{uzOwfm`=AjB0p<-XU{{azwQljnAlp#@)| zo0adD({tg8oVU~G0+H)ex8oP%pB(zTFxmK=bpsR1>#hz8-gV>`_UG-%WinCF+IPLO zhc)UdxG|HgtTXKVxxXh!P|i?W)#g8@xc=y;NGJ95L*ZT{?V>7qJiim?dmoZdo?M{O zcbi)3X_c9|?=shQ`*qY_$u|k{|LuxtcQ1hxg zns93_lhT2>M?Fz4_U2s)pQfQ_@yw&QI8))vCsEe}T>|6uMH^hH0R-Gd=@~%*9r8-e zWTh9h)#Rnb$98v)VLJx(QeoYwG1D}y*K^(s$#F-vt|MQipac*A{6wB zkFo=*Imhv`PI#1~<)v}&YcwrT8W*bN!&H8xR62*(R_*!x6GO*t&3 zoskO)DFw}TYx_js6vE$ufHGJ9aLmFd{Cb-m-Yc*yTofZh=vx*xwZ3>;c{i_(iS4U- z4KHoW25byG9U8KH^hdm6|AXl8cx9Izdgq{`r9wt_`#`~}UtLLPeTzz=;S^hYQdjtSCtX{&bVj4vO3eKxe3w&IS%P(g5~xQgJSqpbLRuWXP#kX5 zHYLb$I>k*jQhDL?uY55f( zgP-r+@Cy1pYx;QwwB*_66E?z*fxxxuDC;>-)TlUgK%q3iZO4()f+JMKto0zmU)%_5 z`@@DMZ>f%abMHBLP)&q$(dNAOk`Z$5vb`s0j^ zNTFE;2KzdPm{EEr=1TLv{nIS%&hE(GC%n@x&II*Z5@SmsHPz#p(fdyOGtR}`{rRxV zWlsL(+1Ut4;RlGgA77(c>I_2sDPcG#)0uLMXgWRGMzi${q(4qI`r}dKD1jsx+8usg zS+=Uv%J`#T@j25CWre|Ik+oVLxU%Q0bmHRp0`N#QuDLdMV)l&%wjT*fG4VbDQkzx_ zJWMxbI157v+W1$VUsM4kak_e5Z0YTkoT9XU+<+n4>&Y^4Bz(K$``LN7F~e&*eGYWQ z0JUmustS|R<)S@zm*|V_;9_6A%wC)QyE?9woQ2o2A-R^L zAAVep(j7+%(E2gUO)ai!n4fcQtR}CkcE$JMVTQ88|FZq|HhBr%9noM08<9-s4;w2{ zOX>Q$_&{KO%g-+OdLccW)z^IbAOKr0{?78-X#P=Xw>;s%cRpj`)Ugj$&{SDMs-2wb zjZp#J8aKhkGm^O$l$6-1>3U|?!Cgh9^s6}>nwa59?I=+K_2o1YEZ;;>QZpe`NN3p8yqn*<73|Y7d=~}v$9T6y5Ab|0@ed`0Eoi!WM5)Zin9Mt`TGENbxS^n2X|H+*@O9N z3;@LY|E|7M@c(Rc$#hJcV2Q$W6X)-6JSJim+nEDDtGJ1Hi7I(+0D8cO1UT`%bNeK8 zQtX#W<%I;v?975h8R4CRsosn6rz^NiHgKPkt8k*xu4-LVGmTwj=i}2uV+|vsyj@tE z2|U(i%5~ERDTN2kEg+Mmg+ah6`NLpo*?tTg*52BUSTJkLfgl{ zh;N&WQY!MKI!h(K*)>m=(H`TI%#`1$ZrJ5+*U?vQrY2;kBK5!bJH>vGUcPu+pV*vX zsYin?0pAtDuB`J!J+4@82>a@qm-ob_EpJlqURb011Fl_X!av=(1MwA?IiZlidZX>a zESXC&j}n4^g%mXI*wrVs7Y`2oa#q%{AN zre(oNc)LDRA~^krppGqiWdM6Eddv}ddXmAsVrrQGzeOZb(ApCV~=qNuL53SYU?NZW6Z zXLL~kk=NEawHr0^Op8WaRPB*H`F0Gd~pE{Wr_DUZ&#uzSJxQJQF_8{MF8!E;PAc{7yIhwFLRvVzdh>PtDh(_A# z%mU7}!j2Dns{QU0TdlMv0X2E)^Fd4E zmjFs18y9zEl=;gBac(oci=PyHcl#!;TFuoGGr~@=JvR9Nkbi4Df}))c`M3REUi$2K zn(9SYq`CsXA#8dJGxJS&81cE+u0UP-YF3|Pb}L`W(XypF2~|o}1~iP94;-)MSW8HM zJjg)Yb+@)Yod_7}x+zVuc>?!6IX%*a4*cY6gdC7AjiJ_!>v}R4w;eOS45m&P5X&pz zbpbbBAJaQutj*jZrGNZ!1x1*6Dx`Tz%OVmUxP6%yW9n;KM~~W=#8_w|8+@1e>!}%8mak%(qd8CiJG}Jb990KOhZwUtmamzrh8l z+-Jr)-{3>0|6Gk+qP~fEptfG{{m_<&wGk@&U;c`=`Ao%A+Up0fiH>w!P&U54SDt+wj6V8R}yPnU$xVfJg>Uv%7MrAnsXgi+#6rA8N zsN!G2FQfq-=)lR)%q#R?+mT?dN+<$WJ3pe%9GvCuLd$o-ubohRHb62NpOq8X+Ue3g z8Wmo3`^|ro^QS52kajkFiGDSISQq-|jSVKAaayFyCWS5hhdt)AR47@Dxadd7 zrRklgAP{-*~c=ICU3=A6uz0>zh4r}1$E56(BBf5lIB~GU z#i{3aZn6Q`VEZtJ`6=eiL*`W3*F<$4u1PT0f1$QW&2MNcnZz9skA~gpCB*pnm0WR+ zR|&QJQ+g9w>9Y&ba+3hkmRIgk4}91E0-wMKOG-y!^$-hOC9L zuNF9OiT|$;4zPDVv-;fACR(DDn}n(w46Y3gB(%?K^%;Xzm$tU&{FIq-L6m&S1$8kj zyGsM{RpjTCN1qpqfgk-ZV*Kjj!giX}(4Ll{->SybN00wcj=yKjq%d}~6@@W?`&oVC zGd@269D&_e*fOkkSQ3xbvPOZ*Eq4_1GQom}E0S-b85$fyf70}LT6rR;>qZ}r+r1?J z*RJHUo^*Xd`3H^+usOa0{~N4XVZOmRZ|$Yt`=fz%v;4?s7a%^EI|}?;+po zXFYyHX$}5{$iw2LUBe4QLdmca67RCM=JHk4WlS6*@cvdPEq*2FpYc@?qKg}&y`>BrjPvc_Y5;6j)7 z&^o!*N&FSF2JaY)|JNDE(08B8#BBqa@SC(yMCJ%+LKC|6K9M6HhfT2K^9v$O7IL>P z<8cn~&GbAD0tD(*1Mf^BZ*Gkztz!MZjS-vL^aT(Dot_;@Eqq+Ri^9qPVJ*o zzZ=Gjv&KB{-sl$kPB5K=$|WEDM&{Tm12Iu`i7nf93*|}n=d0^2(%E)>-G732c0GR# zJcJ-)OJaVLe+^7wxEL{35uq;pe@vZeIMi?W_7f$^5=tRTMY2`)ETf1POObsUOW7Gq zvW%HYL_#XbZb-K5NeDA!%QlmBtTXn(82ika{qg%B&;NLyH{UnkclUDK_x(A~>%7WO z3PpRO2viPEZJz1iXPSzBk59ZJ1b>?uho3wDsnZDVoe|gh{bn)7JwR! zwiEqn3HN!faIXd~MQ-j>ap1Dt+V9rSqG;6FstBX8Qdj(TZa>O14{g4k8+kY6Jovsa zim^+54l*6np@H@F4pwXiYIS-wbR3ua?ojk#+_&T|UyyQ#@LR$r&^Z}fL!3pKz*Utx zYkwE71ljJ0?zgo;pRcN1MG|_iAo39HB?#~LLLZ)EX+zty?WLB#W{>TC6Ybp2b*t>a z{i%|ZVska^Iy%?>JE9#{jX0LYU39dIVIgRp%r~~yS^TH*VT4tPGP(4`I^PA zYQNZ9JB(StDaNZhbd4BePNC}`8p`LabR~X*EHZhf(gLE|ojB!xD1qN9PqV zg=~Q(PXOu7v~TqWu+{=XulREN@4iPBDLUsy<(&v3TJw&>BS_z{*NcToO1lv7`M9j_ ze4tucntwG4e@eYuWmTcMulAw9>mg}mL>lryR6rBp5ZEVhFTA><(*i*<$b-)9cGZHf z&HI~_LrwH(hDi-WIt5vAL2Gd`jw`BV-*+rgwxtL;NQ`mOSlFpqNWG`@qY$!qTJGLo zfs(Z-@n_VYbg=rD+^UmdS6_@SvMY^YJj(3T7xD`8LRO9nKN#?FeNJH1T1XzrudkDh zQ7I=;r79mKQ=Oo;le9JSstTz;In5>j1XDtKNU}0C2_(dWT%p%EG4- zjT85tP3?d_EacwVFL~P5oykjop_sLqbXqcisYwVmaqJf(NUt701<>z+C7W9`E#5T- zuE4=@W0c*tDq?&2L;$ew%X@_IC8fO^js$TmJa6PNiz z)lyYZPd3x7I*xE-eF|4qf?%dTG8ZadRq+Vid|0BBa~r&AYxxy0#HCHU^0Xgwz4(-W zPo2irs-#VQMmLP9S00r08F4d z6t$o&UqQ%`>q2%gcohMHf$f${zgNHLcEX4>IT7t<%WIE4Bbu~@D_>l8tU|v;y_ysH zlRxlN!tDCTrnqXb^{x%7EKCOy3zm>@ktT+mm)Px`ef2oA(rm4y{KQ6Tr{hJ@lAk&8 zIv&cTp)hVm%BenY$)HyRu8xu;5Lf@g9JMM8bkve~d>Fc34&p>)+MS8m z`lF(vc}jtO@V01r8qOT7157kal?YD1 zx)36HT&27mwUx8cf&6R(`}QE>{mvtEw?d7VE%~S6g#s%ZW2G||n>?}aZY(<7eNu(M z-zHV@MM>NJ>7M;lYsb9OLn-o;GQ#cE{oOuMD+L@2vwQi@q44hgHQzP!DAcIm6-?_5KMSh!H1K3``?)t0ai*Wr6VfZcbtKCI zZ)l{uUK5HAjQMf+Q-L(D!Eue=h!MDPee_tnKix$xug zoq^*hp12hE#+Vt(fXcSZX9w03MOk@Vri~uELq%}CNs?(9P6z7`RAl#SZOl(ls8k9V zG{^IueS5%1#l`#67&th>SUMx52H}7dT6>_ox14{&ls`wMKn;1f1x5FY6M5CI{rf~q zZQRTHM>VwTJ*CQl=Pk~{jgjZXEwUVC)s{aPeN#99AVegcgaea)_3f6|Xc{bxC4yAb zd>^b$q-W+LxN0xOwefrPd$d|_MT(}}6(#Io@ z>}SNM$Di=oEA}v|(=2O7Ix4yPZRcuJ6SRc*ZvEW{wQ9jbU?U;wWOY(Vm(UJ29Y>e7ZdH|=4!`IYOBlzbq zbg*YW=|FpV(irY093DzNQg)-5(fuSN3#u+DoS`!GnyAryriub}kJF9Q5BLDU6v*4vfQP zy)v-t*B*&wqQ9>3L@p}}9Z{d9!H)1y z4%-@Hl-C~K6Dw7r{!}2XHB{gelhoHgH^Fe?Eoq=Ycgk%Jc+D6A`#-f=MT@^S9^HF@ zO$K;m=wd>lsh5wXpLTg|W*w%EMk&ADJEpc#1cSG|y{{l8gc?wAM2)}YPo}gIaws_b zM_NYGVDcp!?1wM${J?LpzwY{{?wZ$tHpaIq4ykM9I|;B_$-J3YWL(Av)4H)}-GWr_ zzI7=BvHHgnyz~X0N$&kDe>MS8f18awEL%HBxn-v;&|MPY=z6XA=Owet zSE9V}N_&D@j|q`QXNPmOn|%}33?sG!FOZfsu4~4~WoKwe=SwPDfkArmXHGT18m~0; zs#&Bgav)IOAlE`q#YDZk&>-m)kS*04_~QaW>52EyqHf^aU6aR)zbLK(o(8#7tSgtiT^pJhExPC zO^iTS4i(#SM6Aq@(!FM-BiphO>x(z63%4#&&nkV7Kf|r-(jz&bbfj*fA0w%cDzeC_ ze7MI~C7SR>-nVLe?X%b>&o3B7>k#qJXqI@K8(liz)9iVRrhnWhc``oYN#TXOjS3;1zf0jgKZ{UV$Jx|$p=&EuB= zYJKfXyUEyl1~z%g!VdgCx~YQ=r>~ zXv7jq8634p(lA!ZIUu7t^x0Q3V!n*w3Q{;IhIyK1%A}x8!6QyHMBw+p-%z(pM?wS3 z&Y5AZF8{s(r`1I3=CS8m4YBoL=oosB1CK`=g}>O@%=;1@@b{Z!7x^P4eum_8Cg#>l zwR;{O^SNg{kIdh|3;ru&$}fMwf6waYe5H(a&4>1tczrrK{vYaJ#64oz9v5=j;Ev8t z$n-;_hrFsY&HM>vFO;Pd^*Wy$U7pIUzR^K#f=4cr7p2&pInFJNxp*8B!qh&L+@9FHlx+%T`&VoNjTa$&zrLzOrG9XSn~6W)E8yf^RP4G92Ke@1_|jLbZi zkCHxP2_B2bu#YsgNL-(ZDdUQ)Yp^{vDd3toS@OoOTXXhS{AkQ_*7moV980gHoxH?5 zfZElJuutUbD{s{rwQz)V&dgdjro^rwer+lw9zg@0A;m+xRg|MG@RO(w+`0xUxXic; z$3$)Q>#PpMs>G~?ENQbIOQP@AsHF@j)6~OGY%LJ-5 z&BB0bK%OT34S%TW1=;lSy4J0@+de^RSN zK}6rRFx!V}$KTP^*b+il0#KFv&4_)l9Q7Z8fE#AhA7@4otOzE5X&z(Aq;u?1!9#nd z;7zlJOEv7WWYohZNwb95MhKt;XLegJeO2&vj6Bp$#D^m;y_K2XFB7vyx7H5Lq&jdF zj<5MKlKR!+YwK3-Fe5p)gU+pf$x77AS+Oi?m7*=$9Zm!E?Xbl7-SOh~s~K+^wJMzr z)OroRNzXj35JhnteE`+HyC#q~7dR-kIREvtyxNpuXH0LRaj9e7)RlW9BZ9V4a;WYr zAYE7O_#yzQ&FT5;fQbBppxm1`(<6V!0L0jL*Pg)Xf<5rrf{ zR~ljlV&rL9+ET9PM(|#SWo_W*Sac#Zyw9KKo#DSHf6kbP(PRXaRQNHw@WtN%4Yq&R z^`RufPe7d}Np`lYakb|$79M`DMce%aHmhEyQ&n8FWsf%8p2QRIbq)NZip1~>=6bpj z*`@BeZyiDZc^2)(@KwWwpXAuOIa9*6{x5~cXYZLzcV`Dlvif{dxXwk1!&Jdp01M1& z^_}E3LvH6*7pZ&Pa#)!=?&T`;@6P>hx!g*vkY@rfD=l%{tHGF`;PXNrJ2BOp6{C(& z)>rN+lk%b0!sfJ-m+2aqWE+8qU@qCk-% zSp80|O#r)BirL=)hin5~nH>~1+KDlMV#*<+%F4+12zm1=q5Nk`$4GyK7yg9!*<9L5 z%efqSIGn=zLV1P3K~)o{JgGLX(QOf%44OcqFaInP0b=jWP93~Hw;Wy%BEsnm2Tpd} z|0e;UST?p;n(zh9Z)mmotb(6`bA{4is-0`xg3ZiTFKthQ>))k#)78TLJOZzphm5AW zuRpteqp-1nXz)L_|HZ+JZ-~87z#0CY zQcX;4Br|0O%EGa?0sy2%fd9yl=h=vYAW-`6v24$g{3|j3d_2C-L$i*AGzlmxOcR3SNbTVEbP_vu}>@lrhGRK zu4)E}H2ShzCGoo8+LFTdt4kWLxtCrDt~R3jzmYR0&DVyJvwAyIvH`F)b4GdtjL^XRShxKK97zQr zcEe^leb!(vtgTS0F6o|$W^3vn`u$2ZtRFN8%_L2z$?;atUui17b3hB&Y~mWG-$V>_ z^I%of)o(n`8`;;)c<@w-Dc@L6+Y7S6*6nDTcAUq~zqCBD6GN$0+t@DRQfumZogK*Y zMA*K|w$Z{M%~Sd<^gPH%W&cEkt?8BA1=-pM?_y|yb0?|3iKvle$mm;F8II&L4JDEA zxfkjT)xhUQ_e3>X_%vBs>R;j@>UUltQjXINYXwg9CCv3Ow5s)D#1syGIx}NVt1mV} zh}l~>78bw+VyGSJusJ-7I1QuYSv}L}R;u>VdS#E!wDXGKAMYUs{DZoQPY=3I-}zoJ zFKC3xzU}#INk-h!=%>%$vo{62w{58osQ=UjU(!ti&D8IcTW{(^Ix#^F*Ga^gChOfvTxu!n|4oPp9|MuAW@sI%M~z!^ zc8cz+Cf=Bgg8oF(ME@4wH-tk`ya?IayFY)iar;fUNIIH507NobxZQZvZu%x03!rN^ zR1*ZA78DGqRjH9yFjO+px z-XUMR@YU^IJXX4MI3KE0ineIuvF&-sCCIM) zPCPRPkP;8fw9tKFL44Za88f2i2l?ChVE!2cz9*OEij+O>*sW;mr4JLy7#0dvnumse?`XHzFMJP}a+nN(a%F~s!=%(rb(}&K@6crX)TF-O zn@fRODCUG(Egj>Dickf>-LI+E~MA0u1g3HVw@ZXuy%*;HVPoZ)%Y&B-2Jd{lA)n z{7EfE=1Yx)DYq`bvBQdk__@9_3V#l>f5129cQXC+<4FzPQ?Hs^hg`GD2ttvG`%cHrL?1V%>>gLW7V(aoD62b;S7!dEJve2xpj&T>IlG ze5Nn{ddv|Ofw48cze9PEdmkDiOw!IhiXBPQniSHr(GMXNoaYt5WbNldEjeRGPULo^ z8j=)!W(nNgx2pIqS+$L;(a7H}DsRj`YoD;i(0IZ`wphb9AS5#pNg$)}u>NE0A06e@ zEs=Cx_S6=V+05>iYKAOoAJSwbY@U6$eJ?rvV4K@Z!Zb2v!%HS_FV#5;Ri}IW#-bg` zn0o3tfigR>_qLVE2-WlWm}wt)?x~>b-BJ{%Xx1FWTXxI=H zDVf}a=t08v?@aq=^0;_H=sB+2_<0ClEY?RSapA`%jlftrFLwdr&v>lx4ZEHCZI4?y ziIdDP?&2oSoPVL-5P~qYuI7$oH|)TfJ8&d(9!Kp7s9r~6AfE{!W+8ixtihtNi&N$k zqV)vERb|L1^Gzb`?ggZHd+aBp(%RiCCRQ)6UTekV){mz)B_VWqz>t7t{)EsuCR_=jHC zCQY#d7t-h_tCZ?rW*bLbATQ)ZKG<%f>t8F*AtWFiHGbVVzWz(q6E=DnM@du6n_Byn zT79p+#VCxgxvfgD)a!`=tTiA6Igh(`pusLY41ygs-+Jxe8=cXrI-2E;VyCe;&Irb}ZUAv^p^Wb4x z>7~1-X-k|IqxS{XEV2Zwtm@CC(Tc2!iocI$+gtIKoqRZIlwW*Bn(wRr#n?x&Jn9-D zwY7Ze2=S=3gXEh%_P%U%gmY4R^c5^#3tXv&&u~vt!(j0(TZd7)AU#V)u8m2B8D?hP ze?zl{Aw^0I37%LqF(3MyR(;BErb8*iX)e@Li zz<`v*5X}m}vhb}ldvB~_sjC7CpSstqPY|@3gV^fJugF%J7baD<8t8}-u4FZyv60B4EE^zhnk? zvuqm<)ZDl}zmYJjtb9ErAoA0jAjRm$kS(f?@abI0OSH_dcl<{iK9hzUnz`OK+}Bra znnG5PXFZ=l^ncHxiy{Ywv1q3~8`Nf1{kFo_#Aspfw?yMoOLE*n_oieKtpBTQIqdV= z$nt<~;8;cX#^jwM?tjuTA&JZ-G+xg%ljfe{wPIp{p0)SRX9+B>HB=w`tVTd)x9GHK zPskrU=pQJ!8`r&^j-;Zo&a6bhGnQ2V=i93LyTj(%XG1qZfCjSyDlG@SMo5C$GHhDJTYRewbOS_06rae;I!oI(GN z+uE88d25I7$L7(aq`>U8kMgCXp#k4agAaD90eY}^#hX0Kee!!5tWMPJkb>mR>zqO7 z+_2w*4jj}~o1G!!!N1o~@9d(qR|ldV{Yo7K<2*g>TU><$*q^pF1dC@ow2m)S1Wf$3 z>Zpjy7e?P^>fY^Mx@xkoI%9oLLttcm?!^xWXLnQUz=R9R-`xlotzBP}0ra&yoyJlt z_g1&SvOr-Qn#(>20g_>(;m24(YtJtBz6?PGHIgr zWq5?VQUNo;Njg*njdj$iHuq@iHmodK9;`|2CQYZq)@^24tN5tO!<{V8#DCr!n`M=N!_xJLoUZQ7|Yo>&wNRw0*f0$%9M`rU_ znQniscb#ogi!PZe8iwjyl_gB~yL3N8uPc0)xQyw32I-#3eqrPq_BZ7|uy;)5sM5gW z*5C#_HeS4D@RyVl5P96~SdqFJRamUx>c80}DB{j59HyJr(t0`s6>!+rI;9xseb)64 zghxbAcN8#heIuyn{l|?mz(mi!lp^vmv1h54DL|4m^g@lfTeo7tbWZq=aR8f&WMTr) z5b~ZFLosFRfYg9vu~L|T1|;1a!6dVponcrkivZF_9Pb7w`j}@_IXQ_5j@YnYop}P7 zU3ueq3+%L*1LApnQ{i6Ej%n9*N7MfP=$yt#vYco*P#*sw*L(b<%Sut8VL>bZS|oIN z4Bd0YjA@D31LxHVw@b9@=MZYRWiW0@dgdtxH=Bb#<5~f6OXB9}Qb@$Bdw%?LYvy1Y zaSynAwE?ib2HPiB!&We>7qZ`F z9_j}?FIVo!%~LDLr<@G9Bt-_z>3(^?UEQ=V&rt$T?>pl7;Oy_8FaLU~aL88CJslnu zQms@t_;k&s>6tTWDSxNiB-BoQUa@bFM-NRI&tGm2W(i#Rn8)qth<&?~0Mgd&blLro zmmRb-I~Z}=p*Qw&aAUdNMZlH#=){#?Nc)x3E2?g-3e&72&@!p8Mobl(*XA_g9pj&2 zaA(UxBqD#I@agCbS_3^2piEs&pJkN;C;+V9VKO|T18J%P#byFvySd72?FP0D?kRLx zvD#dDVzU(%Py2EEyl712)yUereo2In++Me|Y~T_3d3sM^H^gQh1-gX~*Ntl`3)|2o z9H#H6`~S6c`LxA^U@&XOP|)D;On3S%PA%=AsoPoh@bAFRfdqoOZi8Wd3YI9<^5~bS zA-o7lr9RT5%9BhN%RxYz9I%M}DuNBQ>?!;s%!mh&oEkthBx{#A2!f5|z%}yyT33mg zrjEa~95%owXP-kQg293-bP90to=!2p60%wJ_32_Gv2TCGd$L_UVq8fRbePaYaDS4N znUEu?>D@OV=#Eb0)}Fbpb88iBEPdQD#ZRVLf;~#$l<6r4EhugQ4RMM*zQ8Z>#jcae z+t4TROw|C+)Am_{sJNV+C~>~u;;{RIq+Ijt-vP#@iSVE_kGpzY`2MAu?d6kx*SAg- z>+q?`dpXV8SZg(A0^h=z3`!SoLTb}+lPH|q_*!_i=D zA+U|{1Y$s~nx-(9!e*hXp0JCs!4|)tfSs<7L>15J5v%gQtKC1eJ8? zz1h~R-@=||3z*}E=0TwNu_4OGCniB@e=v0ipK*I&5b)pu@FjgAo$1?v2JeU>&0)*G zyP&BJVJw<<0GklT)@VTP=(1bc#Ik<78B)<`xs?A>D~uBD{~QCs+T->bWxg?LPW>bgmISS*M`LI--XJgP@87uJyjyCehda&EfxhlPvaH(B%uZzlWatrc2B)dF-T~C?6`0wc1?*4) zV8kux2r(T>68$9`vF0268#>B9CJ!&S+u5j_dDdf8j2TJOF|M4{fgoPm&<7#wDV~+$ zt9El?bkE3)+`7#WxC$rCPk(zI*kn6(lv+KK6R@mb`u=7= zE>L>~a@#wUEnR) zXjmlSQaiWIAM>ys(XRVjuX zI#(vnELlN>4j2{&xN+FCBe~^h>3;IdwQDaAsmtchB}a4|y6dE_}vXpJv{k5yAQvP_Ku2%mJ>y1^yrHKATn{ikpGv! z#%CPI0T9-saRK?*%zRqbyT66$p5g27L3!(}X@<~T3A1sHQ%L4EM^CO)3SjO8t_3Z| zto?A;4I5nL_Qxi6m*mXvl@l z=h3O~fOp5f-RJezAwFkl2|ma0`gb#P6^e~)n*59EzJ6bZP?_PotxzTQ)mf@v zSkqOEVB{E}KyUPQY%U+t5^uz$$TvC&P=?0_vI;@M9O04GAof=UjMmrvd{OT9?$;rQ8lF=(JOi)EpEdNDYwUZ@y!d^2D2}`FVCV%Ya8SkEOMM~)Nb6xaQr|)~ zDRlN@DH;>n6@4$%9fo0SA~TAw-aW@FaP?{O@}%$%hi?+>anmM=3A<=IZfGb{Z~a%g z`BwANjaXNuZ27Mq9<2jDe79}UGDijLXTTYkM?UX$Jsaw_4EH}ukfo)3-qkwyDzAT5 zOQ@%&>~iDy7?3H>qBt(3D|KQ_r%yP_8T8s~IhFPhh~GnMI)Ofn9@ z;GlPrZyyZ4XNXLo*?nj%)EZ~vOp1h94r!bnuZ~z$#yXe7bXGAql5g>b@Q2*BBvN}y z=nbC+dP&=PGxLHuoflO7-J~mlGqMdQPQ{y!L8~(S$;WI;(MxOGUg*yct4ITnqr&`G zS{3PUR8vyV>scV5e}QqX+(WYure#+#3VJ(ll*jP>`^k$%G$bme$(@-CWR*hg_Z$#x z7=jri#nztc&f&}r#N`E^t()g&hM2zJ(cu+)x*-;F#7AE(OnHV$nLh$C!H}!#Y(3>J zAmb@fvzLE_K=J3zIlq)G9~#%Ou>tLO2Rq9sL}rQGPDCEd3Uu!9Lh7AyM`K_q7-&mx zhi8maBR_w$At6*?bJG~|~45Oj0)lX3q8B@0u_1zWM6rxz;%`Rojo%)2R?#a9H99%Yz&=VQ(| zIEIe5Dy~~-Sa5myZg;jF#OJN<;P%62cru_+;3{qV;MQcpo7SQK=ycc==MVbta!}Gg&umH-<^_`%XQ$mnT zjxpM!Xz&^zWWOu;#H;Q1_4m2{+jSu2gKg27JW7t}jB`F(RFC4$6j0n5d<_d+yQiW@ zI4||LxI5Hio!uo||G1`Pd;)h#0`p=Fz2UVgYb`wnLE1AtO_MJb+{PT8t;OGSl zc-DrDUV`I=wl%K}C1%T!r^|Ner1e!iTfa{+4iWB*oZkjG_?MqlI-H8SMd!SYvrDs4WAymt-Nfplw2{1dU=9>jr^g`YcK*noQ9lR`~8}GG*eyjt;UCFBk{)8f+B_n%q=931`s8tmhIBQ3{^UqfI;nNq2 zCr(2T90J(evg~N^l(8Oq*42auslJy~jZYQz#ujlB*IGO>)5BXQbSUCMY?5bt-2N`f zIFxx8Ljfq;YWDXvR&^ksr!}%^ArLm-#PYxdXYyKT#Uc#JCfgLq-Mb0lHAPr|1~v2h zH(od1*hsfINh4%Z`)8dRkQdz#`{z|4sWQ3MfIZm7tCg;nYvd0z)Bh@UBL)JKK8GY> zb-ld}4hEl;y$@KopGXuh`%owynA)Dq^}BWDCe|ozd;WZWG2yVu@`JFQK9J*jOlpk! zkt@z~7|$LH;_7Hj6SztJCHmnOwjxktDqhCN#-#1gEq8bTE01WT0)prUa8eJcYvWkr za!jhg%Q=npw(0%B4A5B|$sE~%)4}edG^+7A@hxas0cIqIe>fg^ZBItxgW9YuHaU7ebZ`kuJ7B3JGP>bxVTnS!7WNkVL#5FrG)5wpLVx53nZKxE z)1PCWb}KNL80qGMOP6>2yA|oSxPRBxOd~RWgm>In7s<;D6U^9#@qGm`0&SLp_? zUT9cK!;HBjGO~IC3A#sbus>!PDJdVutR%lw&=ToEDCnbRc}|>6*Zo|J$X3tWYdc+a z!%2xnhdp@?%l9;I8sh?t^R|1pN#QjZ=l{vpYAMv4ho?t<_twAk3EyyoNa#J|=%1 z?F;u$IkJ0{%ADHr!=@qXR@Qt+b}k}U4F=S<;3dD)%4z&2pC~1Ha0UxUEwQ%QFo)c2 z0?adUKyLMLy19ZwY=lRa5^PF;8-F%YS4+F0qst2QXq>?fe8&BCb6<4G=A(L8z?G06 z@#pvipP)I+sxCIdulN4A3G9)h1`0V*=IpC$EoawDim0M@fKzNY-KaIa2 zoK7}gMa(2CO_s64-H6_TZep@OE;Mn6Eip*1m$+A$a7_Ptgt2NpW5fMOKk^g7*97?M zQ|YtsI(L1p%kY+;*jpr^&!kHmWCdysI6iNIu~REetTEbPFSd4oVR(=I#&yHb@-6ty zuk6FJ|3zK~UjOxQ>lyx>H(b(x&Z#8SH(A~V8Qu6O<~c_FuGLoWnG;}rn8{an)Vx3- zu0K@8#KxkkbNK21^6z{BlUn+*e!_BOYI@tDQ96BS^rsEIjW%kIcxHF?KLDPScka`m zQQTA@`H~$gx0g7|p1`;Mg{?DzkX})5#!>d81oS8_Wxt&Otl-}sv0yLZ5A{STv_?`o zl4i^#=u?L)TJusy{UEUUxu9{d%?BxU{RmNRl>84kohJ`%kB=xmHfAtOyo$Qg?25De%fFcQhFa%KNh50o>e&%@oL8??EP-* zSclkK0!Il7I9n$9n%A7~rrsMC>0}uUEtYvIp{&?PLTW;A0>klNj^jP z&b}vBZ*LJWjeB}Pc&bNS)Z06}Fh&h;{l;BSnig+QrydILB_G7Y2!_~NP7zz>y+Z5l9-5ukm#P~CefUqj!UNiJ& zY|~}}#0NTYZ$6kv0Wej+h|!Py&J8=8WRd>BONt@#g}@<+5mH<{_SuCJ_&)pSfQ+J4 z#0q|l=DRU-(P7}y@sM@AtNn%jxRFnpW&%$aWt~rnGFz^RDoED@3Ylo=ZWi&~h9WA; zX3#D#LwZcg3v98uEfaxDb*rf%8W-u5_z)6x>Bec*yk1+}k{0M;;P!#HGR*|0GB17- zen`vF+gpa$ySRIBnfxtQ$TsFkHMmF*ws3f_FEB%Rx7>wUwpCLxH^Plpn3=LSeOorU zubG?IY)#kos{BclU5$zg|JjPQ8+?a@I`VBd_Jzd$Vjrt?cq~9tw0hgU7ZKL=cJ8h7 zhA)zxtOX7%dQK;pV=Yz6mL?AF1b)$u3J6{z4gotmE?_Td@hIG+T^4<+uS?j!inxxc z3@|Y*7N7&REz?y}g`dz(j)fI-RAMwfI5Ezx2`0q|`zk$rU*h5NFZE(`RTffGVtKC% zb_s3jA9)CRXO=!TGJqVl##OOk{iA+`C2cyNmPirA=dVi;(vB%>0Q&&H(xK|H9x`ZQ zPo_p|S#I2c&r3WM)a=J(+VAY0`o%hK&pXqO`f6;tvIto3Vxoh}V=sqKD zpD`IHFp9e}qsZ)6M=W0e4FfOijWoa`XXP1#3ngl-&xfOD4TE}IVCUFG)|m(B!ui*d zQalAV`sy-%Aj+W=?4tT5e&@}1*$*#+zRd2n_u;;J1 zs6xHR{g=EM!|iJZYM)Bae+y%}(^9@9hPS$g9Iu$nr0rWpW%fj66zRJ1eWgT(ehVk+E&rX&(6XhiN zIZVz4%pc}&?_6W~RU|%|r#QCce5jYZ>ifd%Fs3wDc9uLO#uSFa1)IYiZ=AX?Eh0k=DK_+dQC zLv9XMH|>-(+o|kwU$D`paG0fp9L=$@&*g4w&Gl^GS{lvOv=O8f6d?p2qxwIis_VpT zrQYbt$|_qq(YNg$achSy3-{K!H18d;*zVS``eD^qX%sEjVRsMep*BZn zJvuVJZ(%?NuZH2n_*~A*XC^D2I4k;-!o0lVdt$h{2ge?ivR_x)!aZ1`t`6D>UPpSw zOjP8VPNWP!Lf&C>e{BnIPO@xGl|eg?bkzA2V7@`Tkv48J0iPW^w;?a9%kM7rO`1r~ zOP;uF96E0vNQ&*#zJ9c&XYkr~fFSO*>SpL%tl-)H$juDG^y1z|oN>3Fca zb6Ag6GEI)oS_^e8*nv>4;~)LQFO+Uj$Snf&`;~Aoy~a~V8;;I-0#iJ3ll4s# zDK}?8lI^1!?O&qFI%wm~GxxZa=q1CneC?^8fNC$sj zdd|_Jl$$ty@WbE{#=pLSwaiHuBSd-g88=?NG)S^rgUYCal4~Kz7=)jG!NJ|sM6iZt zj_@OR`;wD_59XqLCDlF(dKr|oJ_vufN`hTvh*)1+gg75Ot)SmK!f4!8ds8@HfAsX8tmB*W!EaT~?1Hlf@^rsc>nEER!P+d8l(v@h zodi;Z{zJIWk8O2d;rKv&Wg|V#y1LiWPXt`$;M?3k!yFMk+Vq8WV82W%b8A@rQElyeDk87kfZfNd zH^8aEu{Yv*!|aeiYArJBL;7CO-&78Ub07U{RU&w%hp~BmK=2I7QJgz3M`o-HnktOk zUwEJ2|9*Krt5(;;uSF;+#rc@ePXyZhzSyhk)9XT-qSa?>_w03LR=@Gs=G0_*j?gF; z^fn^0gCJT^rrM(584qEN{NlyD`g^;p4m_T^ssV4*jRFWHF)f z47a<|5l1netzhze}Md=Ls`+ za2NkQ8z}NL%1m5;d`ikpq(%xaR&qRR)zVYRIKBgG0noNkM;Kwj3GTulR^ma#o-$B~ zFWT!UDs)KJP-x5VTK2Bq`lJSsCzxWUv72*_QC(G6+SAkio@r0^+wn6`k%voI|$gC z-3Q{$?uWA#gHYS6%hddKfxM^mqMsA%?@{C-6vmC2-QTh|B1abS>z6#gg?$4kM8KbqNfBRF(>AIjB3kjH|m7g^i z{{fXnDsG8?_0Z%_LcF$A=UTpLE$99w^QJL?RZsQP0o&33DTT^NJoTxtaY`=7CZCE? zl;HB}mTy_ks$~()_3}&ge%XmH+U*-*nDEHG7?RadMV)&9)q@K%vWxK9)27All|`O1 zzx1x{RWRjmv0@%p|FCI$ke!nKTu32o-_i-PJZ0) z|GIRpZd^ID-v?*IEI^h7NhWMhSA|{;UB&f+2}9pgwZsMRf7W!p-RRCTry@?2Gl%SGp!dOs0)=KoOjR#9!W0lO_O z#S6uYlwxg>;2J1eid%8l;_kt zJLmgf`WYD_M^S)lr;Z_8N(LI7t6%n+>(A#O|?~~E6Gy1=Sl5g_U@itlu z;>Vt=#Aass6L_#J){ai(JhMwWxbQZTCac+HzuL^U5iwQJ%UaISG>0txIc)W3QU&>i zR|!30v5eQdv!m-TIkzViVyp$_^YWl6bT@ECh`AjI4MM z5gOCwCM&mjrMEQtKQx^rqx)9!i)0GBray%3ZSP4~rLi@Ddy{#Y(MJnnl)_F|1CuTx zqS0~ATz)Wl(?tv65AfQzXPU2Zgtm2c~kzcmmVh|ous4&653p%Z1Lu`(TI*3S;mYG zT0-byiGJ2czGppHa&rLU|I}+(+3o-Yhrmv$zT~y4lMLFqe3|{tW1BwHPerMKA*z-$ zjm3ve`$qS>+~0x%_b~}A8w00pAV1&>K^!JfXd#|ISW%V?mDu8kbR}|-<=5tkcN5aM z0qir@Yx8YcR8)8G!0Afez1g<&1irx{@|JA0po;DJ1;eEwG|@Xt#4vpW8$T=R;}RC1 zuM{^W>cg~b3nam7LzUMUA=uX?aa3e>PjsuUK#u2N{Bg~4+^|_xGGp;czpV)Nx${p2K3nKIoX>`xgBj0f^vZ}+t z)uTdUfZ=r#(5oxywZPn@qpnQw618!+@}H2tqUG+bln$*^8e)e|*C~DeFTXu6XRIMn z^>cIUl(-9jsc7pTgq;Q%?VRP!j8-Su=sz6hABA9E#Xv*xqWzdO~sDM5bl3S*>1fsO^VbcaLw|HQlcriHibs_y&DWg>{|_On!VRgcs?D zRvNh{)Dao=Xe+BjbfFf*%iC%bC&agabsrX2{{f;AKR^tOCDUXmx1*bBIC`4IEpdY8qf_qUN`Z=G-dlZw4K zy9B=<-TC@Fr%+fa7EM;iG@3{mwr%eC^xh$UBZ*WWjcsS)9^fz6XgDCB`?|92rnr(U zg8(4IO`_h{B6biEJRjqqGsDU>F$7Y!AuL_#T5pAWRAXgD(6VF1IN?P+HB|d9=-ImT zd`5wZNv+U?P>?_pGs*&qjIkRoA*>W^m-3Z!dd!!W9{056M-e? zWWfLN(yaKmC`(7YSt*IcDssFq)OAm+2>}>hF!HlxfGWc_g(aa zQSR1J3@czEF=ManPSGEZ_Ypmw#dx+E&O@=+bqMA4rLcYokJ5Sqha~0tE5`2r#+duPa%w*qvguqlJRI5<_uFs zm8$`hbhn~ER7ygO>$~j@_t)zoOIY|oAQHZGgSOJ}Qxa_})0F?#)j+Ll*!V)43^dKp zEs9)Bf6w+G$j54JexKlBE~O`)vM&s4uAvo+B*a{T1IR(~~UY>sXEvc`<(OR?Uj|d zH2ZF@3az%^g~3mj%CH6OfC3Dk8U|~pr=GLQj-^3fU?}rS=*>nkKi&&V^-OvE5yDCj zQWT3W4@>lvS`rugM~l$VDAYQMy86F&8zzeM_x`f8+41dPd!^SN#&5sOdoLP78-jeX z1{A)yPq!=3e-RQd(`<=#{sYM#Z-b<#u^YAz{4Z=Hv&n3XdPBJ3Yjd9#a-aXb$$0Bi zzNpp~%+mB;LjS--2VJL*q!McK=PciGGDYkK!p((@dtc7MkMyw5QVlm-X9ef8&KWTmt6}8v)zFh$~A;BIse7SKlQFT^a1rDD7p89c_NN zToK81flKi{W2G6*w^y>pT}o$jPO##jbnX{Fg98nIM4yzsgV|3|NUoHNX3n7<@b{>~ zH;Sdj_FJ21H=6KddejRPg5FlLJ}e&pUdLU8d|&hm6>~4;p9bt*QQr=nxzb5+ELza8 zK5`r@^LU#Uwl1!|XSPo#7kqAqGB4d4bpY==lt*N8BaHqmt0PWfy<^BvSwAC;j|1)` zQNE-6p?>47_BmzSJ*9(aItJi^8VJ(#Yt3bZSyYSmdJ%9a^pFd>gsi_7Bm_@EJ=3jEvqVBS7ppOn*3*2|Lg5LB>{eW=hw-b48r!Knoz?E<T+>Hb2-5$i?Eg0}V_P~HzgPNQJ&V$aI}Q%S~t z@5OB~3y*+uM-MOm(!dFxP6^9nvKw$tD51qDYr z<;z8zkbM_0M!mjCQ=}VvawTOnQFHf@_tpjj-_Jc)42kdhZQJu}7y@Z+Hv@Bnsh-?f zS$_@DqN?mKETgzH#D%R|d9*dG&(;q~t7-~IP=7@L!p=8WRt5#iJDyLf@C26D-=4?w zllfqdeV}ZcLJ1Un5Hh4t%ki1QwpsBx(Wm+2D&G8q7L4XTmErq=qIGEjMq2gPj0)eO>@Y?rv(9Yneygl#?G<8d$IksQ2GtC>G|H@AB=+i17PHxH~crvfcx#t^vS^VqZ z&<=^;it;p*T6|Np``@O9Ad+fniSm2COm$04j@x}Af@to9WY>549l6@c2P--lk&$MU z^*LpsxlU8;=VAk6E+>j!T2%blU;U0{Tl5UUePde{q}{qyE(*60n&(OT!em~xrWOqj ze0BWGA`WiQTc)tIC#vBSNBtEn4^!+vd8oCKuk6WteqJskT^ZNXAGAyYT-CtnmY?HO z>S5H}RW5@!S&)P(A?MlN4l$RbOU#a1FtV%9PtgkhKb(kxyMOJ>S%?Z2^}tYve;~3q z+25dlx%(M22;+>(T9&78jP{RLRtq`a@R~N$&p3S%VD2LJm2+DCMvP^G#j7wAukK}s zUo%<>`&%=5@x0ov*EfyZHyGLNM+K$ao+|&{hBOOOu%h$}xVqr6W}s2;{)WZs=0o~91RR$DV# zrcYXaJn8$~(9vhH_hYh5AfpjoVxp&`_L8gpI0HSVmdxFZo9!^~KxP(tSN*X>a?p}0 zzOtFud_)z6Qq-M-??E31YspAbP2+ux6y7yC+mh|XrY1iXiQI(3aOPaQe_CZ!;huX2+7=g^UkY41K?UWCsGY1;OJ4kjC$4wisS&S%pI#-W43ZRw=$%#c0g}d#e8Z`s)PSVGqd8wwbGeuq zIm-=XVq1(dyCAv)1uJcBt@whY#+t}XgnM2Xvi$gd7BiBH-;=5}8JhW5y z%gdvY&sWwREV}XH0y|OufBa&moE&H;FLYvp{_^7qgh+d$Ypc@!(0@A(5BK_5E zAk!V5Mb*ULmTzV90N3QdDvB+=8qF>ZEW)%PwlX-+aq*%=<;X!*#L+T?aK}0@9Wvxu!e>wxf- zKdxx-a0v~?f~Hvs*2AO9l^;2&>ht1;e#d9Rquz5B%lN0MZ-c4E8}EEGTLv#?VJ5A2 z9Qj4z^4^tDSFqv0su{%W-_&SECY^Z^ZLEor7~Y0q+~HWAA%n_{xxu--@1!fW*<__4 zAAh+1Nn6{^BQE-QBjhERgUfZ;Z)caaI9>)kx00u(KVWjT<&ui~GnbeiuqEK?e>8~U z20r6McIs~Bxz|y^=ACfwq|m+p^RStL(qp`_Eql`bm%wOJh8HIj(uO&`ztu8Xmp=J5 z@gYD+uM&$Ty&}pn`MeSX&q2m+!cDP+C1UPa~>29g)*H;W`CBc87uP%tg}a zT=G*$21eG3prsaAh6CJIUA{-|?jR%uk@AX4cOwx`RdvoMzb$q~a>FB-+I7j-$PL$|2~8DIH_1=XR)CfSt00KYsbt1Qn+cx2InO zLO!4Mk+#zVwU#}3N5SObutn!Jo7SR!#oPzpHR3badTtK@^QUicr!sNraovVbpF1iS zXR>4Vp8T3uPRG^4%sh@n@i&ewQ4u|<7cEd5&m?R{8O!P^lnf5JlEWRk88W9t8v2l% zj?mMb7EkHXMrJl+4R(OHv-@YQDoVodJXq6D-15aR-_0L8;1Z7&pQSTac%<({-;XWaZTO zr|S2PdYX#;?sd{QXI5^rT;g<4Ax#OFB%wtLPDeBo&;11JR|EAksi-TC1;Fae6+_DF zg=Snc^?;+0-y2p!xBoR}HwqruNN1!1W*0-L=I>AhK}sV?vbjFL^b3xGQA56fE_F}v zN{~)DMGVwd`I}=FQPj`7q!!@0*la86$1B9rt6S3KtW`C?EaP}{e%Hd!%LrL_iL_bO zF%B`<_Z2*?QVcz3sj^zg{IPo1%l(mQC~0{naNp=Uso0PzEn{BUZGmd+FR^h_%w?SS z!AFIjtA;qP2~7G|ulox6PjU;c{Riz_PZWjim4DKe%M35YdsOdr)1H!l7gf13$ zwe+@$vB8%|C>hS+=aLq?ZfbQ}6zPL)v&K8J!dgko^!ZmO(KHDS?Su1<3N}p?_t+_8 zySndFU~lr$Ua}x0$=S2K$}QJ#=nns_%AJ9RnxSILtv1_t4Om{>k!23&7- z?VA=_E;Nz-9@B7McT2eNxr+O2qT!D=aWQ#ZB1GG1C?J}GjO>&#H~h1(e5 zf}^B|qU8Q{{i2(!a~jC@7fjeT?YNKMI1;vkA}zt>RHVI{ONr*4r_$fr3acA?Y{A$S z=VCBin|WN)`(!ka2TZSkvOqR@Y_vxHn=;ztP0l_GL#ca`AJP6Oe5N4Zy!Rlc^D=$i z7=wu`1wQsuT2Ysyz}V#bc>T()p?I6K&4%<#*hG4)q2SCiq%_F8(HMl=eJWbj&$T7@ zY$8^0DN4uVyPGZql*I!cPlQUV-ZUo!E7(HitSoL&|5q%XmWA z@BfJXUI|Cj)cCcWTUGpV$ZozD)?b3_gCDu%3)GX_77pGJshhT0+#&RHqCk@y(2U59|#^Brdr*p#Tqz>5gFvg`Mq29h^R&Z+NaOK?4!(ag#1 zqud*w-ch)rKkr(jiPyt<++*dye$wA^QkHvzgPd{wwOOA#&i_Tn=ahjScxOf0G2;#| zhV8%|P-h5YeX$SE*kOXaN#%`$y7E&cL8zPDmC%nnFv@&ANM;9$(m%hoPl?7Y&ugJ3 zez=WoR^>oZ?1?LbyFRsxmA|0nkXI8UM@T6$t{A!$QJ!b z%5AHEIF<7)hzWJ#n`~|x@L#mL7)-|TAA^^6;Yy{(X6sG+6#Ocl;73Gvh7%3s{e<9i zyuTpVFI_kd*83NyzXBD$$;Eic*q>}|FxO-}MbQdvK>Xva)U1O21{&F3tM!-;PbI66 z*;fCP0<<3-@WmckaT5>OZ{0M>S1fC4OIfw3qF;SWlw`+ngLt<~9@)81b(LWG9QeHl zzWx6L>^p*n5xbxri-S?ox+szoy5#}SAS+iZ_FIwZD~zB?x5+y_j#(X7wLFop0Uw+K zkyjO-DmS@f@RY(4i95D6Owc%4%O(Ya?=J0_1z1+3k=(DI^}a z8V|J0&G-4?i8pkBTjRy1$9_-E#d|$lWGluV_mOBLQWlb_r;%$@NwP;+V_0>9IIf)Pwuc8*T?ZfX4 z+d;FF)3t(~1p)BNvz=A!Fq)*Q8^u;B9NV_y3pR@Th^0LtJ1CzY(>P8tAYoA-g6PN+ zoKy$|(+AehSF(-boV4f*Q@jZ+d&Yh5nfR|jUbpq`jreq&8WCt=B|>H_aAoPNyJ8e3 zYxTL7{qjqi%{qq5F&?36D$~A|d9V&-omCUPtOA1p>sU}p=Jno5e z8FgHX0P2?8GSQFPwZ$Uwx=SlA@hdte?ln)}Hh2q0>^s-$ZNdyph@Q!0bz3^T2Pa@V z%&^~sNK+du9)IHuSyHFCc|ytt*1VH3!LW!E6Sh8y#q5fwuy%)4dLA9yV zQ*FrKk&Uybo-Sa>qYH*>Vwu}$s5)3Z0&4zTi%tSKrUpXsV$vEX2t|xW%^s#@c(D?Z%yu&x&x|3f)DyGG##PceL>4u?3NNt)M^|%v}e77qPF~V-I`-3 zQV;)&@Jopw8aLDA0wFJ^ye)3RA0)zT$rs^4Ua`~5lk{pI1`{zc42FP~`;q%uOMicGi8vTg` zVf)T((4Id`xJ!omV6w10r)H&|`7?+vUq{D9J zQXvX7Mx|Egu%5Q7kwMX?vc(%sXB*_xxoJhm{uMEli$w0V!9W|`#hQd8;{A+N8=d_4 zI_vG2GSl=7$i*BYuK_n+s^!T8u`XK;`d(XRnumUA`%6dUW!VA2#65d&H}=#2cbWvk zW0SI;ylTiKgF#+1Q%rUpA2)_`-}^Ke<}T>zU0XrPZ*|gB{!xWPYconGhQlduH;}yYjpQqRp`*rOF3UC=Ck<^UB&2>B-LGtT(f6)<7 zABiu4Cj_J)5!M$$B+KnB{zDdGi@@hRz#>1T+YofF3Hy-vipES)ucUHdt|;B5 zK4bzlg6eRP2fUJip`1MtoE3ZDZI(7Wbf+lTjw>49nUomJ3lz5&ZH=v2W3f zkvmWqxF&O2g&~EV(-J)1YjUi&+TNz+q&N%(w7fc*!?ivxfB@u~ApL!Fs$h29Kdws* z;bdy(BNAkSIpaYNBlTrE=6i)}hMtvPGFkh&kWWLH-aDUT1D;kgR&hk_gS`>LNrV*c z-5arQ0a2*cM~gQJTCjp=C5# zQ>0lplB#Se#4f;9F*n5=X!m}!#o?>;GC(W@fH<`Q_%{zP)q~$Ce`@%i%jKUCxm}#) zQGKU)tjEGy(`j~eQRd6kyOzPdSu&nQhD5mgE&^P^B#CU&HO+Rd>HkYpgKTI|B{^p zvd3C|gwCSMrEYb(7x}vc&D}cI-~ywpck-LyeRop_QXv#w$;4Z`U(Tmw>!03?%fhUG z-Jj2XzIPk<_}1i-W{)&rOrCD=&~Qp5@@-LUNihe!FEbNikNoW{kscR!KYzm&lUX?m zC(UlEGNboy>rPn$Um`_lLUImREI=-~Gnf3=L1MSi_e$Uwq=FbG)714g>Z@NRERQ?k zuCV0US)6k1@Vdz>aMju=gir246bXx(qn1?35H(J(Ynk{)kPVqhr#VMZ>@`{rexcJj6@$eR438*z?x8j>z+4ut zYvhp1;6<_wueor`?wTr?IqK^}=Kzk2C3HZA-a89TfBjA1PT!L5)(3mwWa7(m2p?E( z|7%aFi~L~@0`(**4Q@xS82h4%?@tngqfSe01IUTa`Wyjs!t0F*-oJ#)aT@lmw#d$s zM@MR=1PI%{0twdo(VTR^%luCfewV1d{X%7bOTT47RG)d*Ev&SMla`0eF9m#<+*3c| z-&t9f7psd2(yB+7E8DY6@-!6rkwlEnGA}L@7SqCwH!^In94NFbAJ>b1ve(ABl7RXO7R#!;~&GQTXt#hel386hJj6 zG-=#^gHHx&^hybFV;lb+ChQ%(9l_{>lR+|R;218ClOgW~+i=a1jjtjD>hN!hv;c z*dUZD)%;;f=fP$F5lj9;#)--%H^})Zm}2k%T{lU+%6;tN|4?|<`3S{6t;r}Su)pc` z!3GhU!PY>%m9?Wu*}*0c(d8e!zGad`F)C`qtwj>$wXmCG45nW@_z2 zLl|QyY0UOP)EB%_GPV*E+kN0bjb@A2Hy>Xqm|Wa>IMd=fCLXElXz)fG6*kvBe#&-Z zj5#Egd-#l~z#^lSO)(!Q>O-zgN=nrOSow*vRsz}=0)~DlbzsmUy3^5jbg3kO@-knG zJ3|z0&s%b`^0AbF0a9i^zak@Vc!Z_-*k%=(`CG|95T`af-f3I6inmc;6hTi{JBL7~ zk&LL{V9$NIn9k4ILB)VNI~w9f5KZ3~Jl~mIsZFb?_CG+`FRWbh4u^f_GtzWPHQuVP z4=PV3I0=H94QN9r*33LR(M%?KVkE-nZIyPbv&#|}6QhB<2gU4Z6#AXFZ8?gh@*L9= zVdS=#4Yk~cWu8zIo_mDP5xS>7l|H5qv{MLcE2^9FFqThfu(K`f6?|RONgE-AMWmt0LmB z#Ui&yl0Xzn10bUP>a2^f~H5|EtJUL-a_&<=y^d2B1gDTjEmJ6tH8#TWjwL9T}(*)a4H*z$CM<%x^Cp|bp zDdU9!M7)^#NvFDEOIe!jqD5jw(DL#Y>=b|gMzG>ZyRTr*?aSHP_(98Szq{4L7azkH zT2qkSqBhi+FFu#xXISHuqsA&`Rp|8`PmhR>DRirgnaip`>2K6CRBNe{Aa7M*h?&|- z94?UVHfIHJ{dGV&?cJNT9dfwTPk4V8O2IDnOoqZ58`KFGJ?))#9A}c2pHv;|zuozU z+y31?$OyHDwMqnRh(z?dmcW;vW^a$pf-#~L+^1XvL|k}V1lY2CablgsHG}cQg|fd6 zV?B)(`1$@(13lCOe;Ir8gX_+g{wNE*hR zSJe;hu@w!AXn0z2og>k8gucRvyzoZ(8b-9zGjix7Dm17~szJ8JcFCKBA2RLSLQ4|Q zgcf%9;^8xOw#m!($1`=38j|vZ656-ttTT0b9L>_B5g}coY(W13SUw zH6GW7v(Ub~Md7ntDxNB>QWhv0C$@Ssmy60uSfp>P2+VZ(jk274f``+ zvW5WWmpPF_7J^>(DcvaJqJG6Z9hAW#5fzMkKEL_#u=&nf>w@R4^7i2=a%-Ron7UT7 zs{@DW-yOp$gOFiFo?%MBUCeB3Y`SUpH>p(6H?gh%t-3Po=6TK##(-SIqe9u4E!ijU znM=>iws8+lgu!#jW{sMJ=bCemVy${uXkIA_6u+_01C7GJNl@$qg;@pN#PH z#h_?y?2h$ub^Arv*^#=Ed&=70rdx=oJQeDk@E=#VVdm#Kpbp+QDoHA)F~ls#GitZJ z%0cuAvDf|&Sx0t-5Q`8Ugb<=fZL6As5x)0afQ_Ex*QI&P|FOLvBW&+-AiQ~g23l)2 zjAqF3Ni#40lwZnN;&oMRXdj78!7kD#Ry^OYojD^xa-szw-H=*igXuJx{KtpO-qCPe z?DWJsODrK7NsQ-oQ7GI??$A;&h=gk|{7>Un9G)a|4BLp)_oGheh$2)1N zKhOp~1dMwsTIMbKr=8ASy|UixyQVnT3bS2$Pk`f;1EDAB&eGT)upxr5b3|ZGLKbi? zX`O;7I_yedv2WrR7tupI2Ny02iVOo??Lk#;nGx|Tvqni9#9#Pkb*tRXVw3iRnl~nK z45tn!zL@2{^G@)Y1&XLc_U({ z0UL$C|5@Jc=@jU$dc8hp(h*%ss$asVb6IgoyB&_-5}Hxe;$ynH;yunrBg+66LetP` z*hsB7VgN@Dw3#T8?n_=XL$*B>wBO->tAM;^i4I6-ZT_mddXTw~mnhKxi`Q8SwP38} zBIV!157Dhd=j73FN67VYId;D?0v9#ZpQxMD73O@tOi><)_QxT<7qv{KZ<$TiZ2)TP_xJCA8@jR599`U z&+!r#z07pID)gS6PcoY!#a zz$eg2%aNMJNGhH{yw`Jqa zR8=MSWW^Dd;GI$;wns zcmgKKh$y$#EY5P*qZXisj+0V1pmVXLmoT3CFoxF~PalNCX1>k%`GR4o%8vQw0ZTa68zQSKr+DK&A z?AZ}L>V0YvlVD=Wemdsu@|&vrb&&NQYSY188f5Ib?^|pp2wP#JCe4{+%kXd$zn&-` z{p+&(h0(!zm~2S*M9%UL+in`jJ}u}p4wy^^cTy==hWpZC*nU21pL7SgNu~@&65G+E zH6CZC;Rg?$1T_HHRgC#s7tKmE7L4M9BFBhgCD)`s|BBuqY&0>eZJ(%ghtA9L=Re*! zzV$;R!MqrLm8OgxzZ>?RH|g!sJoUqgTfKr*KDO*Yd@HB2@eLX1nf2s%NMhY>T#V$t(CR(v38DmKj&0z{Y>voW6EP|i1h2xg1ZMLPgFIUmk<0z zO+6!5$EEGb8|@!do$3ad|NlGqB?l9zM(M8PkBpX+p?EX0E`rmt6-XzXi{omeMHxR= zCCkIoe=%f27x5-X*ASmXK;w=)qna&7Zj*0>1eB3L_Vu2fE{{c&{k+vahQgn#76FZ0sw0tBs<@y{U$QYd)+Dpf(IH-&&u@QrM{7=sy>4O> z+3CHjuu2A!Q@U`D24`27&D|aU6VzN4@R*{ki?r59KLymtCrU!+qR@hslm4s2*Rg|& z9ja@~ZDgfWZYZP|`UU>W#2B^ai#H3Icjk{WT>#?d5By^URnLcMkUHWkT- z|NDK~_phm)PFVj`ZSpy)LDqjJ)Am}X{+q*#?Q!mpwze~v{B@@qF61X!jr+wkH%!8@ zS9{lQy9GYTF+aD$pa1dw_s6bCcFP~8`l{mPcq8XrM$t+hqD;FKdG)GfmV%GU#bY;%1My25@4!sV3PaSZcn`bl{rE^P)Q+qNaP1z7h}w9q`H(P zVvAuI=B1lfNClP^U{=lu31{Y~)M-@e1MN7d#>j9#_Q$-!08$n?C2jQU-xuU9E_lAx z_1JmSH2hKFJ2>^XvwdDoTc<%SsJGR{-$_XvaA3;|(P8d9sh}0p%T1q_>|Ue9=N*3M zdcU<&ZvPcAh)*T0psba4YwIaizrXBmr%E8r13YD2UgRdr0=M>boA)B@%vf6Mx|R}I zaLmBh7q?>35pR`l{?vlbm*2LU? z5J#T}T5$A)NZ_*|$0JSutC?kJW;!MdK55vk*SkJK)Na3YBb+v^js_;OK+@e2UxqK% zGKcMv0gE`8^|pH4P7Ad{GZ$UnAqC9W-0nMnp)a(N{Np*VGWUAlWffrnRCc(fi2&Rc$w?};vn^F8tI%Tyuw|-n=B-w zIs;bhldC9>36cJSB4E0wg-D%Yw%z}M>EqP@f0&*Q<&VYuJ6Y{L*e%p`(hy*1g?Z5A z6vF*urLKeG^nIBy@k=bQBQk*T+r)8Fhg5GltS5XbKy$$DZpkAg!FT&gZv=DMCBeLw{vbos6}s#+J$o!s!H zL_gn(`Q(qPz4;)i94uXtiYzE7!<4Yb6E$3JTiD@F0K`(|iKm zcFsjhMvOdv0+?U>-)I`4m^R-YhWFKY}U?XzSkR6D;yAHnaVVB%Qta zqf99~PVncL3`NbT+hx1$wseULw)YB#d{*-xOn+w-BL@mRmRG4GXj#p;QWmW&EA(#V z2UQS6mUHLp;P>?4#7j5%cVIKd88|9FK3S(9upe%00l3aJ4R@KAwR1eX>j>&?R<48t(yQcYfc_ za@aPPz!g*Ye0Z{!)Tzo){&i~@Hod83s?NbFP|2P1#nd~l&-+B-XZZQY4C9D~-nW-- zXI&^@#~qO#{HVVL{9MgP1#v)yNF=5L=M$dh4Tf7Jq>A(h{Hpde>4MInG`WYE?a(yk zzs!}a@)s28Hj8&#S#x+Kw*#58Q9=Ok*FERD=v$=um?@gO3{=SI4M0B$YInX?f=zLU z`Rvq1yS`r^v8_+ZI_3L(n?Otb5W4j%4chFs`>=Yvc~2Sg%IAM4Sz|S(-a4MniwNT! z(hh>F$KdW4{U5F#$fmoiFc&f>hE1PM5b16G@9-^ujRVMkxSUKW?7YqSsy>J~Qt7ep zs{}WPN59j0?NpSy$lQNyz2mXMT?!jW!ME`8u4yj>MiDA_x1`=kt%u%$6%er)VdRmjfk2f;}jh#tm4w@t)R7n z{^-MDL~P^bc4i{rl%7AM=+bcBg5HbB@s~|m1RZ)^eQ0muuwW_Lh*HbhFXq^p#$M_K zsCit#VMy7<-^d^giGkoM@ID_+PeC^<-o5zrWt&O~XFObNzBfe1m?8(J?RGO6Wq#jU zec8Da zaV_+zo`2Hk$Z}5bLB*_kJj$in6?<0HmtjI877baa>`qB-TE=0ZX)P zpE@bM%t4GAF1OW6A7bap0pG4Q#RN6hX!*=QHm(#B{ywIkZV6z%?=IeSxL0`;__fBd z9@d#_O!@d@C`?hU2FOx$C|+h~w$2mhTBVmdRb0Au&m;wAO=&V5j&4B$wIdZj&#oN^ zb$Cn;TTG^NYizzBWk&Z9ygAzmI2k*1?&x+(TaFWIf|CdsHLWDt@X*T~`v>$&7lE)a z{D-eDpI=0EOZnxZG_hG=hmO0_D8UXyuh8rk{rd17DrMMhr2@$tPx-4}Bi1eqXrV0o ziNdt{z$Ar^`*5x~5NA0K;E)vwiK10b>azxc62Ak!U12+t`;g=W;FfSr6dn*ssOla-+8N>P=G>I>0y*9{YVgN!q1 zJ~@4}bcRM|m%t{kShm;~`G}ka&1X-KMgYF;Wv8evl~biiBZt#fwihNIqU&6 z;QF>{XsYqz9zU1$;zk#Ux$$|a?j3~#A(`;It*qo6dHlax&=duc<+bw&J?v09(al#` zilg4^uXWMC)VJ3%j&*)j7|G#?e?`lOIK5<6n|qGZ{O;3)OZZvz2&~We66)(A7gpw0 zHZjldoDw1S(-w7{`&g5p8Zh<5`Uz8G{D>awOvPHy#q_n=^1l8YdN`|=1UKFMuW9Dk zlBfaHesM{B6nv4EJ^F}5YcErVm+=-MkJsNh_ZorYE#%z%mP9d0p9d}=uC(b*oV!3C zul#tpCbQ!6%&_eJA7S#U)C#*5tSlJ(JR2u&ubD^+x$i-&Zg(K!kBucLSkp7goXcOK z6l3t;hR(OxQ`r#Tp}_$G->J`0)+;V?MsRZUJ$0<;da=2aqn*?E`oKwoATZ$erXQRG z5VYMsU^c(HS`mgya?e&WiUBRY&lxS6rgRG#yOqqr8A3~dG&c(ENMkZ6)Z-}Q!OpIi zEpUG2_SC!#C%VD~qLR(8lPUpL_bXdJYhD5;*9G`|1%-;MSJW*3+yOiLi&$#uiCkQQ z&OfTucS{X@%cfFy4AnnaoO*`V#eJ&PWD5_tVV=KM#+PKHg+)00-s}Ikk`lmRVv=0d z0~+Rf>R3g|Kr1Q&z)q{i4paOwXaelBUj&pAe=0+gF9R%I1x+pJYFfEkWurt zv{+ZL+V0-4@%O&RJV!E8`Fa&_v{E7_0Qd!rOVUo6x5$jGN5T~``^1>8vG4JEXoc)%rRXjImj#?HE zwsUtl>*m-o-kGAwT*X;wQpM? z|Bg_p-&Qcw%!^B%#8l7B-j`#z8 z`KM0NxBAC)-&`HHl)yuqLrC@7bJz9+J5_Dq+Gpr9c`)7Wbxn!`4dbG^et=DD{pAB$ z7U`y#K*_!?2(+Sg00Yr%#idWV0L~cAI9neCA1Pj??JsTq%yYrsb<~N54wi~cWqo4V z*32n*>PUC^VTA#)Lvu3tr0l+n)Viq7(S4-rAKVr|?@dE5e#Pcfd$&9}Awt@tlNii5 zKAZK=C;qbXNp|-p}$#SH_cl@^0wa)&!pfsx=0+6-D{C zF38IS@(P&=fon^>sTZsk%>oiI&YvMJ6K~VZ>NrLPDRMG}R)&Ekn)E>U^SB!%NRGKZ zDE~fYhPd@m0KHU{|s@e%gu*4u%+xk9?VA5!-@fFw|r*zP}A*1`fU1yxf?> zI6d#6m_v>~t;>>Nnk)Vzm#On<5xILE4_RJ{F2rslRkU~UT@RQ!0UfmG>Njmc z!>z{@woTdTHE7P zX+3u*Eo4|!dUI7uz;|hgU!R{YEXp5LE4IDhP5P9kMRbe)TfLa<2$*iK#!e}VAd;0T zo=zU={hLp*EigL#G^Bg6CP^-533DWezi7`f$ zAI&|ew#zBzdMXQgTlp%ULpLUW-<9eA391oQ?kQSJsOXe$g7D9E>GcHTBDeO!GdpIF zDYb*8-2AGC9=-yU6Mn0QZ!D`#fI-=F>6mwZ#t^DE9yHC1APu1CXyf$H)ntEbaL2c# zIm?0Yl<|l+qj}Y2z5s82vfy%`aNYQHr9`NdT5D4R^|ZGW)*+ z!DQka-^0OvqMGc(7>sbjcEiKd8=L2F-pb9K1TmW_hpFb_KL^YaOW%oDq9wl1smY5o zn!Q&LP1TZmUkCzlWdsi9UVo@+KLDTqnO{z3ppHFreGH$0Mt)rH=)5AHzO(s6K=bvF zpa9l{#TS(;B>X+gQn)Yw^ujE_17`GcC3~YC2jB4p)baHD;VcVGuAAR0-S#Yyy2MGp zy%-lJ7i$>CY79U5*bn#7RCcgj9?v4R^lt#wl;`z_G1ZF)PnO}O zdJ>zvl}7x_|Lp}4_Wm#@9qZa_ojxDF_C1d*+Xz7HMUJjJ*s}s3|7OB($Fn$kA4&Qb zgl9~(Z!K`cQuA_!L^AgrAW9(MGk(w(NF~o9xYz#)YXjm=K91|aRo=N7-+A!&9{bB< z7HxL2_+(MZ?br5YQ3QOik7vWCM%IBd{-ymVGhz6FlN{o>0eD^hkyI5su>L<1o-Z4L zh2P;aMqptV-<*Oy4_N%*rOj^|VZ#=>5nKYF;P!`~V-Tcjnzq)x@xf-^qsxC;HOZ02(P@dh-*sfemf{ zn8QykKCghYXcNSG0{CzK({P*6-B~o!sAw5wPxP{tnisI11Hd6VYMwKD*3^?f8qDnz zF=q~&-j-)atW$eWQ`iH732unc*kS#^qm@`1rhS(mdJu z%(XuKPv~>j(+fB__K4;ufU&`awjLY)^n!;~y8TVpjnT*Krn9&Xp7%GPzxl(BPonuB zVR7COJwL+2wi!+Wc^4iBtl}G7ON=aI$83j6m3<4;_v>$_1$aTr3Qdbnw2=cAGk?|B z(2Y^3PWfho?K)#J;bC8C=8UQw|EUqoA&s#Ynd%QJ52gpG&jt+UVI3Pd70fEve_Dug zDa&=&8qabp+FlTP&u`}}{Ud;DvQnN_M~B2S4e)*b3gLAQf^_`qyPi8WOCnAu zMJAZ9e^N}lM|BQo5Yd{9E}(Cuc|$$3Gk2 zcRl%`83Vn1YPqD_!zKmKL_hcgNbw3CekTIDe-0^pLnnWpFFiws>(D!kSKjgL$A6jP zuUd25{Cg5?Wb=5ENuRyz8J7T;_X4Z6g*EBuGm)E;$MC0V1sx{1o~xd&nFt`7=lS>C z8;A~swR!Ge=IL8{Gk)^b&m6G*`#>t;Y`d`uLfBj`sAN}_y208t?H4k}XFb~CBR~*u zaD)h0w%V`2-}u1B*Cf7s(pxvpvd#az2Et7vl*inpAlZxGj`!eBj_oa%*2~ka`BOjn z?L&@$qU?^Gf_d8T*eqldx8f4@-6(ihlg2>tT!jQDMo z4)F-Z7HA1xuSt#XZy!GNj*m3}|0xik>;(IRgFXN9f7gxi zS6rxQ48`a~DKuwZBhE@WIhp$NC5QLir*?9QzH>#-q7UPl4ye(5K`C+lXD?!wDFgBh z0_tdw(J`1T_Av6FLWJ#MysP=>8*lO}iUxcz?;-!@4{NGR;6Hgd41W0MVBaj_?Q;HN zGQ>a1O=@xUBDWEZld*#NPY6l#YX9QaNY?{zY5vouBe~+M6*;IfB_dZr)R|I_XE$0~ zZ|RG1f$uolw53L- zWYL!XM#4ET$-*xfb1yI9U;`@}SRUFQ+`3(u4LhB$%AM-R}llg&XVrJm1^D@C~g`_C`GR z6-)g-znA}Dt2GtIl^JD! zT@)(#!+C=_zc%&|*Bej;XR7;okkUZLT0Yd*f1cLz!lpT=)$Z;1_pISW!I=t0;q^ZX zf>K}Za4uh4xh%lqb~TzR`3%L8qw8sICtoO7_X0{i&tffmO@qD9Dty-NpSI7rG-vm) z2Z+SYwKZ74`WVXmiTCyoiu=UGR|AMM+Qx%8qEvYQg*f{3v5OHqGH|7TXJ!9vco%^4 zn{$3(2+eVZ*Vt_;r|-4`t@jUrnT)yqc>X}WeKGO>+xK6JPcNqF zvGqibe}s=gVe+BW>-+wsnEutDb6B*5`YG(iVq6OD<@8h4M$9lT- z`mY{vZPRGB(vO=H{<4@lEn}_Tz6m!?IL{&v+NVABK>HwBXxo&H4}Tlz@M9(WYsO-} z8(muJZtpqQB+ws!@uv6YN8iu2{s~*K^ClA<#}WQSVIQb2=f|HcTKk2hei6WYIi_d4 zmo0+qBBN5(AH>}oo{>bkjz7!hV4r$W&2OKQ3N?kC^VdAx3m*adBP2{=7CG~`PSK{< z|Kzuisks$PpAOCbY}+_m+tkQ(*L2_FYX9{8*C9sNxaa!n{^r*?!1>vN(|#r9#j+*u z=lJd4&e0JqUmS09m8`XzMl`Y?e=1(P0eM0@xBXRzgQQ2@qRE6Bd8X+E)dvTUTL#`e zG;%(C@3k{iVGfS#t*6p`>?M%**}!dQhumrU z@*>~)@I7ZuZ$E@}(BO);z|LJjf@){TMey_6VP0#ns;@uJe+-ypVLo$yQ?kmh|8&?S zzk4lw`eMqTHv}bQ*$4)s`g#!mQUzR{zA)c&JpRTAkG6hazLu$LIBh)wn}FdL^zDxp zu6NVRbHhhJ*6%&d68}BdAuxITSe>PuZ5@AaM0m0&;vC=)T`3@zTfH@$hU}TnbZbQAgXCWMX*|f6!3Pv_C%>hPRi&C5^NWvk-@CdTz=p-+vNI&^EsQWM|rBBOH{ z8LTPS|NcA5#701u{mKj9Rcjg`+%o0cl1LL^&)w`f7zm#JTR|%o^{XT)=>BHid>m_X zZsV@MMOushNo0@c!20n|DE)h^zW(%%ihY;g^!AduuF~(n z+#M$8ftiD?sXjOpgbP8CCpu@8}vk8ReG` zJ=;mwzqhli2|O38#N!+w`HOITWl5Y(e1aAI^rBIJ!0%a`mBBjgNifkx~^>a%?&2Rprv>t5kC;kM43BP}M9xiSIodc9_|3sSY0{Xe$qRKO2 zdgo6}L(ZNzg4_MYlE@xp0#M2I>AY6}|MB|y{!3%g!WXf+SHk}YE9UQ>!|4{{Ql(?m z5C&tMiCQpvh!SzY@&mzI#%R6E0?{d5JDLH3%f3D|!gH6=i-5HQJ^oBgw{vDEaCH(R z`jwjG)wVSlO4mQ(@UM3L*KF^w&p993%1Dp(^V(A9 z@_D;wPfTI}F*`O6p)^Ch83Aj-|Md?rAntG`f3(bIPnFl-CEls0N6orna3t0<&WhAq z%L@DKfxVcEO@46c4IqD67uE{Z0poUMGyW>FEFM2JeSu{IV}DX!kWx(D+BqdSlE95Fs+bx32<3 z3%Msi=f+r_8`$EQ=;P=0V9A=TYBBtiKlzV;au{mq@IQ15#sB12E%`TU-+d`xgCAd( z$!|mp3vAgvhxHe&ds#=zozKTU`Ei{lBcJpu-~Z|qvYzkz%C2jeNO<$WPXlu@74031 eSd~9LviuJhPdY%c1_9py0000l|IO8BOo9q{rAE^0)D|?Eyx1i z5IwXNWf7{!X!e0G^45k*Hfm}J9KdG`1jJ}N1eE_y0bW$V3jqN+9}xi=ct`y2w|u1k za}+TtANl|N{NIVx{4OI12+{~j@-n)+%E}TEDSy~wH`OFO zEQeK`7?XQCS=f4!5(eC#R>xW^Kp8n=Jc%%pq(5L3_{g4eONah@^dUzd4EcWEp1DRNg5?4l$Lm*!sUhe@}U?>&^tT= zL?#Prpo}IhQdCwX`OwG#pHSfJWd`Zqy7+Q-cAyd#u1;j&>;*JLqBJL61)0wXgbd(w zTA=oBVW14h9~@Z9C@~0_ai_omq$ZirNNZ|6Elr>@plRs;w`u>M@3s7FCzj%TGy;kx z=s-}_H?i6BMBzI8(|~~O^8L2A)Sr0o^rGK5y15+o|0(siR!JX>VRQPG*=P$o8z@9U ze4p>R|C$(i1vLww4e7XI-k-b7XWd_ICDPE>Vtz06(j$@1RmI?+Rp9l~w`Bk8bBalW zHsQ4_L7$LwyX$CaOfP2X-YMLi1JWPEiju36*ST|;JIVp(cjoP$`%<8bypi#Dt~Xx} zB3ii2t1iBd502fxk^lw$K^JTZdA6xxyS6u|`svLmGCP%!+M%2bJu2q? z+7QpF_iP}0OP$p~N=o#%$612Sr>Av08d6*dcQh3`NbS#E;GdXhEiry@)oCHaRIxt( zl*(P_*b;4(OdofwT~;~iw4SU&zP^_6c=WwkaXfE!DF|U$FOFS`r|B@lN$h>fxpV0B z!mAQvs<1j0exfwfH#<6G71WL2GZre0cI+ipe%WpWyyyo1?P~sG4)av%Y$~AQ>t3CF zhld7h{hEp+^hY~Whnt-5==no$UF}O?3s;6v(UTSWzfSj;r)Tj|`w16uzV04O=GW!b z>&bPfBD2W~s-qVyTte4diX2@$*ue+zk0Bin+sqZ@PNIIQKa)Kdf2LM+q)b?S+!me^ zvgU9tXN^dUwN^6ZuvcIpHc8s3J8KtCM{V|wO1?f9R}1DX(>>8ACc z_|#OZ+G#!v`@7{mp8|*bH~~hJ%_XbKGpZ|N!i5^PP|hH*kQjwqKatj)Rb^%KQakG|;p5FUb)K3B7#dU0EymS@^~WVsMLcFY~dLGS4_4Cd+7aqO=2J zfvq~%KPKnX4r%Rec8$9>^frbe_GFK;(N|HV6~AOd`#@56>ywKYUG{ zJarmfRvEpZp*`l76cXm${!xXRo0Sd?=}28Mi1X;G7-_I?DJ*yq#8k1P-S;*^kn4H9 z`Mdp#+-@u?bEP)V*7*X@k7Z93Zmd|pQL&(cKgGQ}HI;BlkAi*&#PNPC>){uPpW6!y z)qC@T2H8mBOAdzyolx(^&&eL0%9iEV>h9|fRE;-!-40?U#%5qG!LO0UKh2aW-HZ%9 zE#@OQcWdSx9qr9p9wPhc>efBbU6(azLp-mUxUnlXC#yD^)(VxqXk(Zu#`;>Y-{!q5 zFXkyRHrA}Oqa;dmj}_digI-=?!Wj0Jks|gV8jH+mS@0RfHjot(%o;6bzdoByercm2 z7!>S}pYL`4%-3D8)53!$(u`y#opJ-E(WG8v)?I=5O|Ft*`JR?} z3TpHfl*VcxayIjIL>ZyhW*KZI0K|IWDkKv8vT}*4vc8gm{UXcGgYsvJh+t67G26EP zeUG3+n&osix5^E3iE&-bYqQ))(m+&7PovNl4uTR}%kuY(rSNXFQ7?ps^Zu{ND(OW? zy&?gb9X{entCbSLzcMYKHd_as*8ZXuy3vq+3X3FFWkvaA-P6w)29*Xe@DeXy&60e= zyX>b_00olrB`1`Ti5wa`Gf*LFg|)I&=D$X4b6*geCbUN6!T#4Qia$xx&x37)i-Rp^ zVsZZQvD%R1`|MAUxZ#Hv4=1wrvvJe*jWp{MMc=i~$y)rN#u0_t;kgfn`i0L9=B`G* z5@pT1LReG^`*+i?=FI7|N@Nu%w!z6mW1;Uw8%ZL{4*RN5tu6&k)UJ&=4tOHU=0LQM z8=eWe*zdyFQ4Kb*zCn?qtJ2fJVec*6BVXNm@kgeM(JAfxRPViAVP1@ljHL2%wZf%g zP~CSt4_p0VFlxTvN7c|oez$YKb&pPGJ~5emuXm3e#_&pYy(5$g_PNK%F?=M9 zP6I_%wWU*OB&)$b6XR28784ki%mOIBlSL;CN>jJFaTP~9chJ+20 z`f5RaKL1d!b3V^Vzpk8)(W#Q=OcLZGCWwOfu5yc4(!V))nNzC6(}z)P-G^6tO9g*u z5|ANg6GG(GS8r79o%&-V(kFmbN|=tik`I~oSXPa#v+!i8{HKT_GW)Vqg8*J)wvIw8o@5y`%tqIZ#yenzI7e>VAg*4>eT ziE3U#)Pq}mwF>W3SW={ltf(%btcU;Yrq}}3y!X$Mm#f?((YgM}uhgU}4sr-ORWPG( zT`BFu1nPbcXe(%S({q?ApMiM#^Mo7x(7bDL+O5p_bFfse^=1_7`fcu8y+N$ zUsG4QH#t=?%Y6b3(#IOm(G=7!vv8?w+SqPJgdXW~1}5n@nnBlQbMJ5%KHJw_jl z3|6ibni+E)ci0>%IE2_(Zn!9XAEA3`&PGq4C;@;3&$m$=W^@g>yup4n!ylXCq5NyI zUWA{OvG8q7XGun|mIV_o&$pP8L*UCJc^onJ%$f4KLhe&`iA+pJT`{VU>EWkTy(eaA z>a>tAj;32rzb3;^Q^T9rKoK{2p#J!{W6L7mj9s^y=T**N;VF#6VWw`%G%ibF&T-Mu zlHS%?=a{%0iD_${6GuX+a=(B@V=fmI0IdO?IvAAkp)~V*V5cgH3MB1rR1y+uVQf zSpMkHNsvFeN-m&^FN3mGG4E_Dr2Gy#`YN%rRnm4~vtxjqhedI=quv^Qp&dn!BRx*U z_gW$xX+(e!Xw7Gfy4SOWXNQ;fi#M{4PohDK^EBdjzi)b9S+yO&61Ba!j$YHJN8Zd( z7tcYYjdMQ9ba)NnukZKjCnRGuoH_PP0qYqGX{Po6>G2*#gp%*5vQHkzbz9i1X4qfg z@U!n@@^Qe3HNp{T!)8OW>@^Pdt>N9oN}w0hb6+#yndX)QB6-T0BlbHCd92l%gj~)W zRwlBjSIT4e!`7|V07~GB_)d#TA66*%3RTwgOZhG*`C7p>^FG4?Hr?TrlHnY9G&ek< zJ9kS5iPHHpXv{|UQbb0<#wZHtGJ(=XtusP)5$_KS{kUqD&W=-a_0mJ7ts0G|2bp6e zhyOJ!Q8WOD3Tgd#jqBubdPW9-JhRoBlUkL^EiwH3zbS)U_zsT>U_O5z6haez1T$jE zBFYD0%GQ7 z*bSxYs`~slN{F!}BPEe`dCcx^(*n7-9nD(&+w6rJp(EKr;{A@r#o}p`0Crbl*5o2% z!f1ziFHTu(9$Y~xyupKq=}2A9FuOTcw@L$R!K=wVuL8*BPC+mG)8#)dR4Y~5j&O>V5?LnU10$jll&Jf1r%ZL$5CnK?Z$apHI14-j6Q^K6;TbBsXOzodwn ziv(H|XvnVZGL}aIg+M?Vze#|KOmAKD3wwtVMradTC47)Rd-YIW-=4yVa>xxYSG>OF z?hP|I6SeN#`-9;i|3_>_2tuIDEqUIc`G7d+*)(Y)VB z;&*wVTY3jfM$=PKvDP-#SQs0fkGK)1-H=MsNJK3?0--&DB&a+GtW~QbJggbOUvjRU z>y0mZliXT#KG52 zHD`_=2CW*{88lXE_XS|mmuc-VOPz?FwVm`0+|u5aI=xxsul>H{ghBVzgsRBm7LV*e262?U&Ot@(ko`>F8YV%e3@`8lMd<>phAw~ROpTl1xdQNpi-?INOo6J&q_1VtBGQ z{zs6c>eMtM^_W2T*6Mh_Req_zactv!+%^44mp;&caQvsU&SnX~XQ$(t;?_g{i;OXF zb6D^fi>RH-w&$7ikV>yAt7NWIbxb%q6^en`GR!*^AuvrC-b4xU#_qSIx@O zYxiPN|DHt_ha>-HNankw3|&uad1I*qumfI!PusTDt+Q=9BoZ2`>6G(tgJ>(AdUbIZ(8>$ zS8Aa(Q!dkUL-t%Tp1M>Mj;02Em+^mcCqW6 z$G*2L4XLk-sWvuM+v%3s2jYG%(~skrtqt=9XaHqg8I||K&X;nD4%#fuNVH@Xq3V?; z>w;Wf{AtyV)ZGDdLaV3sebCwFU}6Ru=P2%xY2f*Jj1s|)e8Iui`Lq&uFYibmKje@- zvC#vr_Gfb8NAhj^`RGqawn~)E9+2&NvQw5;gOS$yN*}*E`?l*?U$t^&0$~HjeP6pU zrU$=oubHWey<|rC_sO9L_Jk83D$W_zjcl{`?6 z=28^60S6Bcka{q7rx!1)X9*a7S&Svq+w1apd2Z+PCUKg-anR*a1p$^4 zyus(Ksf-YZV0b_`y#aLF8~9_-ReA`in${PR)$;GPTgr!-TzN>ZYIuY zr#>yXciI%l8Db=z8Ed<6j_Ros4B!hfaj2>uKp331EL%M`t-LhUleqP%AVYtA0t3bJ zC*Iya8*E+}K)#v3SuVDDW;`3KnnjQzN?;XzN{t=MmTyOaNpg$oejJJsL&F7yn2XX& znp1fd`PLFJa;?%yBb{=12)%{Mc&IjRJXl!PXBZm&wX!Z;2;+0y-0RKZ-llh%H+q*j z@cQcWVLiZZI0XK7VPc@l5~!;SZnD^~cSy58`mkeJAZ_@3_%>_tlFBFrTz&Bv$G!X$ z7I+E8!Z*7b z9gzTa9Q3~bT(BV$8-u!}Q7iKn`nlW0%j>V_-uSTQtFy0}S`C!W*%F7sNPhdlDtxuA z4hjews0U=K<%L?&ztd=483_3?P91XY*2kWUP4p07ezEcI45#!sMQ974cR;t|Eioo0b3t)-a02$;NY2yom~Rj6`V)AAOfLdAwq&H>Cp zW43>E%R$YelvH*b`(CsZ-|R!@Cv2QvkyWmz2BuFpozP|3^SZunhf6qam>m- z-%}1L^(w1e)Z4f7@@?<7g$8viHtq^izF#+}@^PkdqpPHuzTZ_D(`2YDSt^Oe#Qs~p ziv@u9Ac_fcF+{a>_lt#)<|iT>764&(6`XQOl2S~B1vmw|oWHJ|2BPqo{3+wkU^D8; zuQ8NZVgnDlil(IH3>L@E(Agr4MheH@wk>-rk9Th06`@dux zk`_DOV~8B4RnkYeb7&43elZ9$y+7se%6^`4BhfNH#Xw&EJA{7>bJ-I!iyk6-DmEov z&4mY^mxE%DC6)U}8deu*kp5W@I1Xq`ZJk}RZgM04j26^4N;Tx|(uEa{;?r9^*u{^N z1gg(X^UZGg@|EgsXa{9`RX^4 z=prg0ONF*#FuVkv`svibvo}Qus!b;$7S%?5Q=8;kFfG>@UegveX%m* zN+%4#Lr*$1vN)WKJ%S#^VxhMdg*&Q>6+I3*v}N-YUNSp8*i$egej3S!{?WcCtw;b(MmB}yQuxFO1zioyp1;=EQgy-lL-cEslA znM>n4?*}e&W6HDTk-dPv@s9T!!|#L?qwe&PdbA4zzZdrV;4#{y={w_8i+Fi#YPr0`{i7lhvXZb(>!JYuk=L4`6sSCn;@!>1-sd1L$%A} zmv471scW+~Xa9biaEtQZ{gYwa81}=L{s9GyuV0A0V{#;F#(%lhn!ZzM{n_mTzh=&x zmEXX3Df!NXX3U{U>ZqsDq~TTh66h7Jq@U-n_V;*svm$hL+RZ$QcUNf$%WW!(W+gQK z6JHPQeqCRHo3nZ#$jSeB@9Kmo_Yn4z{c_`EBO$KCL588y*3eKy#8J-~TiU#v(Y?Kf ztF5GO?!{P}w}*Ykh_QE6`ACbE0#Z|HFmKD8Q39ariU?^SG>RyYJc3cZz-lNl=zj8JkByT!WC7?Ffv$|4T*Jy^U^z>_cWHOa)! zLQZ{A-RXJZ+z zrL43U0x1Up%ZfJZ#b@U5+J?vTK632zyf$AMd#)Z+M#*NIecNU1Via%Ar-6iPAc$*a zz4$SV5h)1{E5ATndGI^1IA_b3R>CVk;5)REf4TC9>TAyP7?tp*d>MszQ`nxa@^E@U z^i?s`zRt!GgbaEydP&Rx&|R9$Tv5OwX*#kAQ#UgkK&I|tV#lp$8{{;>2p*gn8hgQ@^1-vonXwU zrg~hdYSzNglT&_M4o0EapCMtZPDYy7OFIdW04NToA{DcNE8H$$Bd^8(6qT>n!k+#Z zGMX)yUiTN7sd4Ce!AS!>D+yejmri<8FT0vtd5{H=ST{8oKn!)zis@^7c_YdWhH@0x zg@BgxU~vnJvQE+~@G}x~nev$c`1*~<_+0IAn1^_V>^%hSqpe(* zt}Hks=GF|EhyYV9%F$3SqYfpa{-}4EJKNJgM2r6ptdrlWR?zg`)=jBkO3dL?d8z%F z1IRLVi1Zy%8kQ~u9>1=(AMKDgAO;E^P>cD_#A78>Z3Piw@`##Aqd}o2{^l6wtM46_ zmtL~1-KE@B4#2jhM<o8QN(#Pe+T2yrGPH|QWEvxYJ0DtdWTi0H@g4YzZ& zM1QKLAZr%pYAt?UK*K*iEK!$tVo`U1{Y*(#dH_(#ubxO$>s#eGXPG%d%WZDj-_wIc zsrCV3F_zOszo%#?X$Tl~{N#&o#}jsV6>$%p=Irs#O_~2utj{K77;<^+5$kVh&}-;* zV|z+R*xVyKKdZ#~Ze!^W7eO{HATd@vn*<1_YjdBM497VH0`amV~+CT`EFS$s+-a_qUtcju1||m#=NEpa1r; z^4VEee4MHy0>?eJaVYk6kFn^>ndOA$@(BuWneqEkBG zphwpDiAi}BxFWGS>rYtu+I)qo&IRb+|N3o;7C0XSoAINP7lQHHI(OYqH|2_E{`NBH(oEeWdr0~fA(ZRP*Z*Yw z7%S@lSw(SG2(I|Q38A8e1Nq-A0f6k&vZ12U!gJmb~KA>(3Fz@1<|rqbu}s{5k*0snL_XM!vz+IuTrzL>pN?> zicU#|IZG*Wjaazk{D)G_^NR5vIF8Hxzos_4^?Z72=`zbTv}?!JjaE7Y_=#AWcQbf# zTdV_}%x4y7RLs9>ZvTU={QNEeM|{4?$wY^#(#1p&OH5n=xFAyPw@NyeT@4og*{z{? z3gdm#ukq6vFsHT!zw0h)%pd>ox&&+fAkbTmVW>G3i`C809#Ac)ZNsW^<24Rndk)B& zW=OhWA-OefSRKv1mTF~`?sZ3&pH?&}iCrf9S&XY-A*@EqL*CnuHFzC{-IdnyZEz7) z)%bP&j;gUT?K@Ar{sjDKA19)?XCQ94gCIP1Ct#Wr!H>?$=$dWd1PCd@(-hW zeEw7Bs1NPR=p?OSD(qE<1-PK65|5BOc9q%eM%!i6MXA%|miS?Z$AGvHe!Gsf+P6q{ ze%u>rduLM*kLC9OBXOKIkf@EpCEiZH5tp+%?VwmvqNpNtXiULNI;PP5B4RU(B(l?D zbG{gcU~4$#BD*+c4cyj+`}z>Gm+5>3pjSAfEAhU~nF^R0^VwwvVch^Mh9k%l)J!N^9V7!(Id240e?_+qqS4g0DhgjnYb#WP)g=tut@S#veKuC<`?&P+sAgb6bToUDh47PM9 zGKBN*{9lert?Q$>9@t!aZi-ZmW3cikbD8MUZFwq%7Y){k@Y2Xvf@2067jOrN=~rK4 zbX|XHQ$IvcZ2jS9BGO%f%1)F=;ns5PKwOV_&By_I}oMVb}jiEsH`%Qy-0V_#5=fVe=*R5-(d~+xTDGYSy-m z+6&7WG3UHG?RXs<_c&RR_L)Q1#jw<7nr4}Cffj{IFR0!|(jckW$_WGh3~2ms?rtO& zPK9zEeNdeM6AT2|WeAll6|MmL89feoFS@Gnb0s`ZU9kHAUj1K+kcaP}XLix#dSUH% zyl-gPp*}OHR?P%vh~Dh0=yr?1P@HrgQx5KS#ibY% z*MXOOkC5z~H&Y*e%?t#e16cdi<=d5}YV4gO*;V*i@TrsWaVy0NAdbyaPP?7Ht)`8w zF#}TgdB&t@gtw@e@!(3TrRanX2IG>hUP#`B$s^Pish{4=vzAlGnJj=AVf;WE%ujo z>j#ZKh@43D+5L?dtnhkG|1-wWURQ$3=dABdv}>iak-enEb5*WNc%zD~R zzOcd62{~ozQVCxD$OLE(c|5$qf0_*nO;yU)a0%LUW06lOjta}E_^7=XLW@l~0wg39 zJV%*;RI?+dfT~!M{{f~!9p#su#QQGTQ2!k>`PJ|ZY2^h#f%Nq9{COPUcY#4)Da_*D ze=Qf5?q^WWxIPC+X{%c!p~Y}cd`wQ*9p_S8ALM!Yq_1xXVk)GTV$ymC*wLJTkhChk z#aCag-Qu@gZykxBn=6ru!ue_@9=W%P8X2C!?BgJprf3FO0_twIRjI+Zi=|87WdHUG z6W-s){{9WMj?h6#?mW-F&yTI24o2kO@~rK`m*OTb_9!%Sq|DHpJ4VAsi{)^LiiyA< z54k+3K0b^TohtkTXrZSE6CC5F@ceU>!$8hN*?OJBi2SDg2fDUR$U}UWX7+HxyjeK- zmLMPoe}|_(xUhret=7`StI7rYM%RyTct12iV5bX?gP7p{Zm-hguiiPv<+}k!+=cI| zr=$(f-HRJLiRhiUKjd7Bt~9Cz(k`Y@gk6&JsvGAa_mS~{u*QX8bIm$memu0OFST|m zeG#g!64v`+bKOqnw!SKyMfe8;F(FkJhv#SkcP4n_=bbF(R?b@OQ;8tCathUl42%f5 zO4t~Y$1($*2`A_aQEkI_qSiADux~NwKk=S2dWf2B74T;3AYL6z?Fh5Idv20_B z>h9%Ny;}B}t^F_qL%Rwzz$3{UY#hm|TD9(D+Ymf2OC0VOG6Sq!fTsjQ?yD|T~kZA2dWdDKo` zld4PjQ1`c=41bUjpju)#$&k}6?A-qKoDU{E!sL|UR+Jk{q=`qCsP;0=WB#d*HSXkV zK+qtSrJ(0Kq}n`XPp`nB97O^aMW_pHf}+tZcXF<9<*ex~c?sICCZWVN2Vhe_*}ih!SS z^CwP^NwMA~CC?GFfbGQV8Rg>)S-qusbbY+h{!ZB>D9s*9c+*B@Q2?%pog5y{$1p?W z?{~r?-@~!RdFC;y;gJ0Z?G_pBRPW(pF~dAotU%pH|CZ@!-sGjH`=>@j5#x(>LEE?s6<>be-C$y6;SAJuJ zY>D9szAYc>W|f@nJQQ?W^=zh((%tFi_<{lFw=h`pSDq(C@8Zpjp9`hHZpR(mv28bj zA^keb6T;{weg75&;now?CH9Wop=jcC+HZ35>s&EJmdtJ=2uR5_nk(vtm~KSwyhi4^ zYq#4^#J)Mb8nIN~AfTZAR{7(cuY?2L60B^P&?9cAavP?->p*@b#E>nV7pL(p77qrn z=G1Ay8sy$3MBD)%63SoAl9$>C%8&(Pj7pU9vPLjL`o2SH#1NcBpPR07BJdxm2(%Xd zrHjFQRtJ~XIpo11f!CJYJV#5*AC=-UX1M6fYFWyLaoGV4FLdumRelX3V-P^zyk12U zuU7P#uAWaE8p~5Sk~ro$GNT4t<`Z^zM2?el?Nkoq+7ivtBYZ8aN}sR7Xys*3K<~(y zFby!-o>jy*Q6#^S+B0dvFX9MO{$Oa0JE02xqd5@vqD*;hlk_&z;D>W4hs{m4O834 z(OOtnKdE>QmTQG(A&FCKAp))mEaM3d20)Td_$U z?dVTktK>XKKhmDBj!)DM@LpRagU7FWglYdRaUjZ&t%?Es(H(MJ&u?$Q?``t$d~i_j z?9!EUFtO=BHnsFSopaWOY_uoPXsz*lw9$3tlejj!XR>N)qkVJC)l;V&Abbw5@pifb z<)Z4!F_7?R>*4A|T<9j2p6M;=@xHp67}KYB=H3(Dy*z6d+yVr-^;?)RHc0e(3Z^}4 zXD0`yl#jH0sV7Iu2A37r0B*xG(`0wBV)xxYXQLl;e3i+6J5qvu8jtd8nrmxKLod}- zLNxC`M06HFT)BzTWQS>I+a=P3J;C#($pGKzS)ckN?l7x=gK5riYxN0f?m&*J`g0XVVpGW_^T6?*$XDU8Rxcz+{xrtR=063d~062Om2);$F zN(kv#f3o9++~yF9yx?k}*1wrA)t*IPX33nTTE=7th}qCy&Q+0X2Bw#vkVa4D5Yeqy!r{DyZBrcHTEd+U3s#^R^Mt;`(%yLi5uz za-OaNFcu1JTwPI`2gMx^{-+D;&)&KIkb9c}iXZjCH^tScD7h3IP}%?U-QB=tKPg`C z%YV=}0W57SoiVXoXA=zd!Zlgg9~NDOP(c&NgCG<=V!nc9^CKiiB3$x=A(HFNZzgcr4{T*i zBqX~=>V!lMk8Lg}DIVPoNvq`!&5hlYfDD%~mYm1Q8%qHm$c2Wj2lmuzjlT7PCoh%CxQA$uEd&c3GWv zer*m=3ajDSVQ}}6x^TPG@2t2?(o$Rz2zCBvgm-S^bFfT$iH+@Ye}j8py$(()??5*2 zqLxF-B5fF8Sa{Kc(SW}`&N?hrI9vI)Zu{>pk#aBzli^`*@$UY50Yn8rBK;#|oEG5m zRLKF{XgwjT0+TaZ{P+RFY^ji?Hrx=h@S$ohg-~I@%cx&fU%oS}aU#&2Lan4nO|^rm z1U$tLvtq(cD)$1`*Yo(^qcg_1?&weZ8l#rM6XW6xnc2-L)gd-tKmuuzVFiF z)O)A!?*ZTgMB$AZ9c&L-7}{zpJ-51#)o~AN6YcI^F`QbBsco4DMzO7tlzQ}E;%nnJU@DUF!5hXM%L80 z?CahN&Xs^Wa600pzxlj6#Yd)g=}94DqBIQQAq7w&QTnB`-BPvdL}^#kQT~hWzd<&q zbLK>LpWKMn0wtWZ=-ho({%{20H-O==%Xy-7xd9yT^dx#>g@y16jM@Dfg5g@|nBERB zd234h%?<1EfWO8xnbg0MOp8viXIvW{GcDMK_0~>nCtZQ*UuL+pY`mDt!|EMijVc^g zw0Gnix~-C;0%06mvT%J3HeXI+9=`c?s9XPX!RZ{#UG~;{{hdHWCc>i_n|=mnEF^Rx z&zn}a`pr5Pzxs7O#&f~g#8E(-KGibozK(%p9oL?l;xImJ#JGkgEabWwU5l|Qh?xf|RuaK~&Y#amEMpPE%xNl(y?txt zTD{XzZ5{GX4c`Lsg!Dg8(KhU2bRNuWhk78$`_^1F ziyx*k3NuN%F^41zr>R@4G_%cj_Wixj>tibVnku zMjkR8t^ZVQ;J`8byXGK1cUiR8jyBBDl|4P|pOmmhR`wgOnj}ac&f)j>RpzG)py8f@ zu-T)I#u-#G?!F%jgPmoN;t(xS)OLUm%;AJbch(Hn)!a;qWB9(JBD`CRm_W7a^^=gq!Q(^ZJMn@ z}7%)hZFSRcCy zazCpKI8|zk{2<0br)J|NuNHB*zxV#l7$xgQl;&kTb6m~J%CyyovebE!w0rQrijCze zC0;XDe1{%zp~W0#l2B1$Gl$PQw9-_%qFcH$W7yGH{32ln8&wSx#xHjdnGTz(w8hLV z%-1@9pQ&x0d6;p+5cwqGzk@{Di+Gq|fm*U2?M^<`Gptr0_VS?@>rJoO($K;7;+^xY z%KQ0?sU#Jmtv-gcxl2@|*6lvno4=QYfVpIQ`s5Rx?Ncl6iqP}q98!On?2%aBGpaxY zo_?78TTLe*sRvTI?(Nm);Me3R6G)L!LFYK_<`|&%mqPXgDXfJL-Z z|2|d)k2G@&3&n#p;Za?UI@!1ayg!wZ{^1FEJFw2_c$RX#qZZ6M*OHG}MmAw&sk_<+ zV#}Y4?R!*Rltyr2;@JxkzjZMwEARinBW@UPo(0*Mq^2Z$j|Gz^`-{Vww0B?+YHlMF zULy4MDG%E-P9mpM;yn~Hu%_dp>ag!T^jb93>cuq6Lq@>?oh=AR(g07AKQ3vNe6O}H zf9qOOFH0T>w7vSRbyr;U51jHtlh>U6UPI9rN9jbd8&75JNl(ww9B%^J;#17 z-QC|_uXxauds_=ty~uTJiixJZ!c65LKCayJ2)WlpsFUIFZ)IkT)@DUb?{@V1dWz43 ztkSOWb?w@f-iU)39^+Ouy8O&!FbN@^eq!U{li$IH_pH62)3gE8hQEW=Neru??`axu zOnB}XrT&E8h9bi1oF(@yZK$N{kB9BroNLNo|2Yg;L7B~3Q^rC)}TKlCuI&D%+tEdS*`6hwXm*#v*AZBPcZ#top z3=OYFszS=RgS^=?MIH79=;d=k&wO|~k^isU9PiK_n*}U_0tY=DxDjBGhs#1o_l7qz zlM4k)27NxXb!3hyEW#mhbADb)9I{=C+YC?KlFkniVj zMhcd){0(RxD}L=v6TI3Sq=3Ds2-m5uyYwls>2-`OEA>ab_tq#b3PBfj9Fg<3bl&~1kOg|D>`|X3Zd;Jqcp((_`C5-|G%9R5D-5vY?6KG;{m=f zARz%qkX$nqfUO;j0lYLbgm(5FNJJ3Al(U<@eO@e&(=oLMc0@r?Qc#z#mbD1~U(3ydqyPW_ literal 313530 zcmY&6)7?|l zRBc^ruV;6pvZ53+0s#UT7#OmQw74o57}V=66c`-L>m`7gA^7zI?<}q31_s6`_x1-~ zEbyiL^-FL!RVh)hnkk~6Z~t#4q96hW28>61GJytz8c~uF7g6^DKkl@vtG3Y~;T<|I zA^ZuG8KqB=V_F%3uJF-}+L{=fpksy{H}v)iDON)!M!S?HMfznq?TXe^cM*>~y_z(a^4A#%J>0 z@619!=g|9*;40e<@S)xIB+kbuV0HJ08ZL=vzrr-uN&o`#>Aa7{et6>t>nmYTcKfm-GHy{kMR{ShiR z&Oj{lIp_Gd38|sfBkm?Y%D7UNkN$94?08e|<|=`asU;_)AI0hH2Lid(`;6W8T@xx| zhg8gI%!jPc{r4MV8{;MMq_bIOr+2QaaV9PkO>3(salL?{4|c^%#dU8b(tolqoUjn#yd<%MjmW|NTB{y7{kx(ar}n+P0fm;{QjFbr z|9G0&2fwLJRxbMiskRT_hTQtTw(2-JG`r(h-)}xSy&OLOG4}1&NW|_XcO<|t3VPr- zam|`%>vfMw`lJ6o>C2o^siW3+Cd8S;&UNpzBPJs8L+FY2XxP1V7qFX?P%Omj_eo4M z@1h1J2i<;l>&plB<(4Yd-^HqJHHLI6@h9sBfV+1%c_nICdE3w-Y7*4C__TRC_i9?9 z-()~q_q`G|$y+$lGc!P|em&DIB#s*`ptYApP?F*+%RGEWx{!=?H=2PBo z69;PZA7mM43~PoH&p$>ykt@1=AU?e29s`&(`dW9O#Cvo^J*ogm=BKEKc*hqAT+@R( zOcoX_lYj8!yVhQ8VNWqN0SnPeqZ5?kQ@VqNS53S6sE3@*%yM=CtN6?#%>-AG{36{; zeXO-EJJG0R?#bPcb=#gghg1soFI*tzUH{kIeG4boYx3&(bAU+2UYJXvP$aJ@{aGA?_o&@6ufh!6P#Pq~9w+{!BQBW~66T$&9dq zzKt1#;5jf~n_i9AzMUHBmX3MZtucQsC^7Q|+mnzPh6s{Ssy%K;Wgw{koum1%Ayx{J zy}jvn;eFFeN7(_W2v^aK-+4lYS=kwK$jhMug!nye5-xnR2L zizc|9er)X_CCtriQ;Aq)hS2ih0=4XZ``7Y+T2%HG=zPyCd^p@RFzd(^Y>vm?bK~(V z-(48tDGztRFv3Xk%=FfcW<><%vMEV{0tJRTceL00J})vIwBb+)u#+RL{Gyal!Lu*Q zyDP^TBBG&1b2LASO4!LXbNmb(IGsrkrA)Fa1Fwd(Ogn+2M3`8(X05GJLI)-swU1|J zqd42T)rNz%m+b?l_SXGJWyUp6x0CU|EW1()HP#|bclpCUVK5N4I^cF)ZPR|BOv48Q)EORI z`NN`E&qt@H<4TmX>+WA$x5`S^nV32XCix*94=n0uipJ*3aFf6bR?_(>77j98p1|Mk6gZ?56BaEBA?T{iZuXQRoF8oYUZEF zhAeWC;4p;gp=5!S&?G9@YNKPdm`xEvpL!nF*y(mPZ(LTm)t4j8uE>@WXnJ`$yJC1K ziU_p$nxUVAy{@#eTt>v!soty4Ndi_QGjWDWB7%o~aXia?CV|M-0}0Ix$!Uvb^XC+S zXyF)@bdtcI^4Pz;lj%(Lvdasq7u~aM)K)*vvN?UciVkl*W%e+CT#s1Z+|ilfdpY zu*cU7hLIl+T^|}^8E%Ce!iv_JQ4{C(&E#q{^X6_& z4=0-)nxzi`pOvLozLV{)eya3er%pYe0} zw(?d*gUm1Co=>TVkh=_EF6vr^E8>kf=1y)Ywhua-P-Oes0b=a1KN|PSHCPe5CzQ=L zo;u`#VLQ&GpV0pW|SZl%oYkeBJ+Z{g2tAj{jgCIV`pdYJw|C+aM{u^f_{MjJD#*!{kqCSh@2p)K7q{Ga_*+{yQczZiTK>{qnPzfl z|G^OIj%~a-fe%vje#WC)(WY%qY*;2MU9-6-+|sP49H(IdJE%A8AYjez4;lEwYd6o{ zcmdD!{$(y{Lg&-f(X$d9tHJOIHRuM^5q|r4mbgFu3?{5M-w7`~)niX6svKffdGRo! zN#3w5Hmtj1@zw4MId6fUrTM+7Wz$t_4+-S8W(qUC_h~}0!mKrG>lQGXuWou|!Xzclaoc2YA7KXy~-v zv`ezkXx#iEV*d-lv>CRkuhf8BtH$hP*1|NT=9I)nXq=7aYa({_>m=^-)SHG6xN684ig`Ui1kWOB(Q7mHvdw;Qxx zcli$2I`zbsiUpjW(wo$_-vt$Xt7iZV#1w=f>YW3o9NL+NPQu@?Ab}xHRN0@vn**XH zylWGS@c7<)CA1CXO??=z@i8-C+0GABXfGZT%5>ffbFnCViLPIgjdWwJH1TsvzR($H znUPz3h*)~h%Ff!Jq11vGwCib##U78LeM5nNy1o(~yw4kDQ+qWkzWP4C`K_T?oO-|4 zet`p#COp4{F3#Bm5*xg8xKHYy-c{;8LOa#k{xECK2=T6+vVd5c>DUoLczjPHVoO1{N2UM1G98v0Y)@Al-w+aIEK9voT#_=)5Q^^`rHEMhxFVV> z%HZF_h9VMp?I9ArBReZxLpq{|z}Lv~Bm9nwc$5Z}W=2FAU$(+DFKGKxN;nyiJ9=XC ze}&n7e>TnzB9(YCS~~F{#UoYdRwS6CbJ{fkt#2Cy>}btKYEW5=k!;^ZK(+2c*j{?72h_gvGS{~bxS`?Nzi+*C`Qk;aTOje+(Fv7)Im9i?~_-m ztW&1tIy|QNy8!2Vu-9V2S{_cCFN?b$${t-IcfY*{;mtjePbqb*x^3lUM zqGiM!jG>}))qSoV2Pj@|;ZJe?8K#9eK+NkIpww{?>Wn|AV9uXlOFb9NYqV3y_u_mU z)Q*DMZ8h+*O}j#bA6Kz@KH4xenW|QPuVvIeuNE+E;<;Nk#;R)alfnYD^I4qjzEZ$h z4m!oF1w*Y0{-M84`kPb^SK$7GAqcP%wiPD8-jr^@S(;Wl1!Kf8YN8orfK%2V@aPwz zJi*S9t;gUfy8N!fZ3jT_q47u|lk6^u$`KB5Ov0lp__Rm5;QSyK%ul4D-i|s}!i|50 z;NzDS5zS)JMYY3*m}*dZ?v!h44bz}iolZ;N#qY2Oz4<~B{9e<)N}=dnn6)S&RiDkU z3h9C}orem#$;=v`0Ku=fdlh>D5>8(dBSfZ(fhrTFY_lMRS1m?5PR^1*EmdjBnrc^1xkic=wc#x ziwRUx6$OCO+i^Q6jV;K#L}zGw!^Q@Lbkep+AL}Euj^_hi8)f3%TV&^t*pq=oT?LzS zz7p&%F~hJ7Js|~CcJ95WDz(9rbad(qAxCDIy}#c#2;+s*Aosu+QJ2X7gzp@UH}eaIbU;8PL{!|G4?^ z?d%ROyMD-hY{hjC1phQ+hk8@cu2Y#vXE|<0b{;jOsIHXp&JTu?YPqSHGkdrgH{N3a zi_JzpA>mj8&hqlUoDPa{q%#<>$bkaWEIxYZww&i(jrW5P+>uOp1f8MMYZNQ#;{Ltt zEJ6UjJGlML;|I3!*3bYvzFP&c_`R(Uq!AFp9pDCZ*sHbM!e-i>#AgE*c}{&y z_@6CY{GGXabq!t|HR1n&8C#Nf(ywX*2V#}iJ~vx^92e8C=1kzy+(p}CogO+IDVqVr7gj*47lG5g)BU2CGPj| z+6Q7jV89mF4yHhl8yTgl|su~OoM#Mn5HD(T$Mj6>cidB#rw1=Hko*LPUs9nSnAA8usOtvFs^5@UMN-pYev}A;Xco z2)})yMYE4F*AT@O+}s;+9s7}MG6CGdc3twLUck^E2X1NjVz0-j8B@`(Q$Cos5eLVx zwPC?FLtPSFaISs5IW^zQNp9g=HP-5I`~xvRjsB+B8z5i5k&Rl^7bEVfD?87ZzJQK! zMHw?*=o9MhbF*6BkYm^(Ojscs)wHX$XlI&POoE^yDl)9ur*ie!UG@^Jm$~@J1R8;g zCx@}+wo zeB_vkBa{LbJaxB(D>JoAw~umOSwl1l6s>|omn1I1vCzKF;Ks}~`&}67W7NAfb7Dl? z9MCR1FQt0vXLT}VMEL!_636N`rYGBBPGOF)-ups>{J+ESod&E)gOvtxTcm``sFTUW z)qyhVemv?b7PZXMRPp-~FA+0*vbmI+>6BZVz4}zO|Fz#X&c8JB)i0o!bI|#j-NtqG z=KZsKEa4o7qb712?5C{x>Rte38x$EN(%w2zhqn+oI1|MWYdt(G*XhWZn;>`I8?^BB zW3rFr@~61fkwl8f$pcVak(IsD=xT(1+8dRbkpiW%N;BP5{V<8am8Xl0Ari{SYY@$W zWSBL~g5OPZ2S)DL!r?8ZbQ+zL!2q$1A`=1&7Q3IC-g`=VO?9jgDND>heH3K$C(&CW zfDFRA_ssnDAkipm$SJWJ>f%i@oHSI2rqRqJ{DdgjkOZHYxx;A!2<72IM#xfbBk@}G zB4>^K_$T~-ML`qMe;#tFUr5Jno$mIX9<4S-7V7{8{ha>yxZabOUN`~>8Szrb8Y)iM zGjtx#*VOc1L|mbmqmZbGslbr_tavr zV@gRG+C$zr#kcuG97nVn1QeDV_BOIpp*%eaU)p=~Yo(J~xKSa!eGeP5Q*GQ{xEbY{i-EdDhk|E;+>P zQ_x5hiI7>vwAG2#l};W(b<+S>DmEPNi3<=2RVN8kvT6^@F5+WNb=&Q|%za39O#I0M z3Fkw2P&NJ*xlP=G&`{Bz94GzVelK)We9-;=`%~%MWI$J##H*zfs}J{M%wE^Nt6_pAg2d@CoIK`9`I_JX)-&}$ariN3%A`v#RwZEdBm#+^e zPv90x(~i6XM8)k$?7Z7LxNTla4H)$P5XraJ9}l>g`j;z&-GO@Y&k#4Szny&yB4@5G3Iem>6S-C3JzXrawh2eD=Tuke>=e70+qkU zQwSrMZ4`uhO5(Xo-2L?X_X>a*IzP^48!>k4IZ$21Z|*vOI3rCX%ccL<&kNr-5n?i+ z;0=zd-X{0Q-29c{k(;_f*`yyMDJA#(hIBh4i$9 zNAX>bp6XGiPnWg7;s)(%)N$VVcFkvaPa>7FQ;QBr^Wvk}af$}9GEG!+m=+y`-%AUq z1>!p5E@c7Hv0Ws6mLZDJBA4Q6C&kT!H5T|~%wPo{Gw9rZhJJ680FtYod3TNNWT<_L z+L4`1YgmbQ@zl=5oS&h;HU)fxUTQJJwyFIb9=Hu)UXWfe0J!#%Cm)lM&VNU9OOl5# z>^^n;uR&kvUgc(hicvZKhFs`LNWb;@>W;13W4N=nJC-Ea15yBH^5y$xB@vOwvVR!W z{iF=y$(0Oy&m?;<&+zBACTi;BkajZE{tua-++u=i<>{j~w0(4Eu{ej|SGX6leYu)) zcD9v>mQ;P?Wc)sm$8i*U(9O-i(_aXq6eizl0wy92e_t|1n$e2)zQANKtmLsbGN$q= z3Y@d5N2tLWNcZh`6G`)18c8!V$tFCQXiO=24>RkTQcGkQw98gpEx2LIhP2hg(+|@t zu`$tI9$(8h{GEk@L2piOr7P9{JImHctnD>_n&s1*5^Qn*M&3d_7qzTEzj5thd#pmx0s3)qXBlfo;;*>%W7!m@(`nJj~1I zD*Kl6sb<}%4*yhhty{GeJIH2r_W)$kbkKcc`(k!xHx446>jtGbJzIgYNCrKqsoHjK zlZ@_WwN66)joDP19H=%&4hv^dh% z>s}R==?<-sIY?RJ3yKyj#E91rrxypx^82=ZJ)pa64ZS!30elw0|39VUEykGHd@pBr zVT(_fEicJurBdYp0ofDk-!JEb0-NVG8EN|sSVVivRbRI-ktX22GbQbtHXxho7Wyd0 z^rU15MR{u-#;E3+`G`6DGF@9x4;|+snSlLZjK+joK865OYD5f+Wj(7Xc&CPD3`yHe zlHc%{V`rh^o)j{wN9Gxee(?PewP4kPvmo@Nc+NW>)Lz2rv5c?=)*<>j^WiWu^?4Gw z&?%mQnC9=;m`EN zs5b*C(i)qQ`sKbqk>b8iC;@#o<)OF}ytB zgyk_5eQBL3={PNjOfm9ict>OwO&8l9M^X-nPdVzN&dxO)2DJ~*WvH-*b|P{5Hw(l8 zY&`~NofS6$MZBV?Pk7`b4Nm@aL-aI)L%qHBfP^-#eD0wX?j2DzYFiAu^SRw*R|@{* z)N;5TM(ak6^^7Ymtqm{1eYX|%iaP0`1>|UE37~&sdt>#A!a?-ln1nHYsYY*;5`&OM zA6|7csVLEupjnJQ0dYJx>5gYC-vDz5turbm4f`Q|UAk;@jcM(1;G>`czxAuNCQ}DO zcVc$AZy+|Dru<)bo2CSzq6*S*%^+?*p}s6XyQdJ&y{1_U?uLB8O^w(VvvN7VEuu(6 zMfy%g5=#0GW^wRvm8)J9n9#$ph>{}%KPMM+>u}RbP)VBxII(A&AG*kMLJ~p&B(lrZyXE7& z5FIssaZAUvZKDBsaR#+#T(S#^Qbe1A#t!VGurOrHqYlG>5i>cE4C77N#0`D6Ec7UqfL<8<(F;E%7ZuiJt0WxhL2LUb+h1l6=eKZ_8_e>g8>Nc z4UziC>1$_Hpon-(3%ES1asQ}c;^Lm)sL65pM#4nw=zxPG?SxyrID(WQ`CPWH8S}eT z!g7TusTePNdD9;kfEvGpcRxwe^~iFv2njPytZGwEKChJ*BYu&5866;$ zL7!7U)laz@rqbTiX&LhoSGXdGh&cL87s-w%nddA||{@@kgp)a2#Y@xCW5WK+=_v4St)*)5!iRX~-oh|l+OnEq%S zO{t0>TH7%NW^o}#ri-P7LQxZN;(M4Fh>y52H8`a+;F@3D^R(1`@6W#DuMzmgvIKU4 zj&koD)|LmIBmadxDUNR(Mv9uha9^BVkAp9fFFriPHfnXBE#}7=&wxid?83DdKG(ni zWl3z4ejxR;OXdJNqo%E8rLye9*40DjxsN>_#uDw0c^54S9T66m(QeRouec(@*Z+L; z?`d4Z;< zsGoekqtk@w@tED(%7<|06@{SU^wF%@<#2J?#nN<<0shQOOUB0tCV-o}W%d7T6O+on zu%}H_K+?=xzylo)_;T>bq9LI;xIiro7yr}vshlW(_RhOWA>r6e1xa_QwLUD3)%}SiMO> zujJvCHdVeU&1zRd3V-YD?++|hPYYi6aXmYM9`%0GH&(hr7{9uv_s(b+9V&Llv_cSt zRiSVx*f>vjrJGACxG!8+nXpEX;-j(=vzB1YgZi`3n`}+dK2{}Wf=l3q){TvP{UDL0%s*(4NecUz zaX5z`oHEf`t0B~%vF$QMA*npCb@Ah7Ax#A%*)$WZ!Rv!+A52*@!ZnBdU~1#Qi}^yj zw#E*Fq=C!`XW@*IEFY2+4g&l%Of7%FR~|M5k35|Vi7~ooI&|^;`uqNeFLwU}PMc;c z|1>J}>H`qvfzZ0w?$PksnpVC+3PjIk8z|Tfcc2&8Sk*YO)eOZ)DnNeRhq=@e5X-<4 zAKxNQfQFnRqpxp7<2*qg7t(G)G|Z~%g*;Jp3F(g@wIhRx;}4}GhK4UOLIjUicw*@W zM{vzT3j38?CiQ|P1t6!e)Gq}WFTyld&1tj79z@voT?*EAU_L~ZnkU&Z<}$5S2`3H% z+-S1jvTrX$R>g|GO#=djPj-G@UBRpjLloWnH{F66A{GA6d8g^}WWtc8;~C`C6RYDw zv|TjjtFisBni7NdvZSiM6F4_&q1ieeV?WQor!QH_dqq=9KALH0aY6Nep``Yd2ag#L zh%It3h-3B9^^i5YpvC-mIJ_Xpd&csomuNoI6x-UOm3aath(?1+H`X3Avc zA2s2xFGe%}lTtE4@t;9YZR>IY`>WkohdS2R1{`MUKULW z0gS03ZZ4i%71j0vEnH~TCF#(Cs1xb(E|E)7TKezDKHuNMvKS=}wna|w10$cpzsc

y29$1X)$u88P`FE4ZG*NbNXyhB{R4Ma-deAI|UWrGF0?^!|B^ z%*V(X2B$T}D)k??d27sH{~hFv9A3_xmV|cVA^7{^!OjI>A(<1XwJ@o~SVfEUeYii5 zl}*$b8TQo++vUe~miAge$(a12YK}ri$oNJb9Hud#PM}2M^I<$$M3Q;z1636o>+VQd zClaek(eV+-0-9kmbtY-0z6@$E^G-$)@_G3PcKRV**j^Z1X&HEeaZOCE3g;qcqHuoo z7a1T+y}5rmnym!$oRaG9DDBuT0Jk@IsiyY~f)e7BDtY81i5@4-Yd3`q9>uYS zE7V+ynOVDq+Ry)wBJ(di@pio?Pw4@uuphK4c!ute75LMn85guj+L(HK9%}$oe zfJl9?4-hA+__y91CalGeL1F>8-vq8cBs>DR| z4L_FK_YZwALKNn|XE>4SLrCX6kIbSU;epy~1LE*;^Ao^RL%8Na3@L;OrL>#vM(iWu zCzx9n6g4>)ld7Si&Vg-BM4vtzOkIVq>RALZ-J%oFS+CtCc&M(d3QP^VEO+i>vs-}4w+V-}Ye4~Z& z&)xNVocChZ->WY6DJLvmT-GD#HBZ!V^9uHpDaD51F^s+#bXbQ$L~D`KnHFr3_-%)2 zpckrQF-0!hkC$}Xq#95+Y*v^zM#*fYcMx!YVqBy;hdD5ltp^-9Ul^rBce&70HG~8@ zQ->yHK^n-b)2CP$AXygF&qw1?X^bJe+(fz`3}&y z>JUvk^&+CJ(>_m}@9Sge9_}iB2UM1;hj8%P>8brv)h3R!8xx2gmxX3){RYWz)6Zkz zL3jGwcGSP!bhEH>93*fRFSPbrA5{=NEo+ZoeNb@xTq4HMkjlx6g%!fadbpn;4=HW- zM8TuUDYf8B12=&~ZLJ=-m2MZ8T!fQPogv)V?mrAjG=#yt3{>R#bxg^dY{;^q;*w%k zvA3Gh>|hl=0bX+joT*xsc#N~Aj@ND+%IA1=70q(@iTU52r0LVslEHfX4%V(RhsuQM z+13!aD~jj(XgI2rJ|(lyU`x^r5^IJsga$e$oxC5@!PSOWX+$~RQ9~=Cy&)S&QZ~t2 z@R;4L!v!$)BC`#Nk3CP;l-r9CyON9WV*#?V7AL`;mgR_KMDyJjVPH$b*3d^%v>7u& zAK&(iJ)m;{^$A1A13TweJ{JwAQZ``mF?|Nm)=P;fky~TLdAIv zSe`iYgqWY`-U-JUZnVo1h7O2x%JgH>LVdfVo*2A^23XI6M29oFvz-m@BV(u%=Hdb7#lkT@?1z>=9U z@@FA~jDdJ{l{@UYu&I6hnphW?VF;WE$gHZdief6d@^8C5R5H3t!Xp(0^{CtKnd!z_ z@UDS8lDWqg9K-BFVmqB5{Is&ZIeFLp?iyvKL`IZ1@14!^X^MOAnbhWyfAL}T@V2A9 zAxxsPZj(>&A}z~0O1-^_)SlqL4=zhA5a5WN8(52`O%_lmmTM!S1*IcZFkDTmlhE&4 z(#cY8@Bl+Ot))9z z-lKNVlfM}FY9a~@YDa|$Hol?H;pjC&%+hY>9+!$XHLNnBP}X6V_(+o}vL1V-fn(}& z!vh7J*f54xH5&dWT2ZHHiBPM3HU<~GnJ>P$LupnTbSZHL>m_dd7S__kpLuExsnL5x zo0xYT)SN%_AhtXcXtwa5;%-ikaZR@`gWz z;&TSqV@I2RheCFqZOzv72lt-uBBr6>MNAt{SAh zOUV91CB(H5FdOMbmZ82(qfc+L*)@+TE?17LB@7Df8v zjfvl-WjA}{&6rYqt#Sg|(Yxq9Qh502L?C1j7D9KQRurQPECCVzek5!F@Sf(0O z-zzFKa6TC8?_y;B!k!XuwW;c@xnDN^poTaO_VKQM#4)Lx8FO-H8tY;3j6-U4Rpxy0#m)LfxT`PpKOR_z2|!>j_Y z1(pDEYd-_=&k@4i$`co9CaBr?e-QX%WS@tn_*pjLb z%&7%qImi?G8DxHn2uY=c_|YJ$g6TvSRz9GY5QuYylamQcr6g%f;ie3{(hJ2;{SR{Aj?3N{+1nOgz{*_!btxEACD{2S zXn)wyISmBe-y8}^1XBcesn->z2CInAX2obsrcjl0w18I{3T30^h0PR%g0E_@nCnR- zLx-;?th5vyAfmGQ*tOxd9MC_{V1cx!N5mm~&4=I!zrcxbX_08o)hb~xA0+&0*TNsk5%6#evqyEj66A!yi;2kUie+Vlf^`n)-?%Sn7^2 zj{imA|3l=za-9+;DD<@TN(eMV?2dm@j{w#_?RSd3hb7rAsei=bxk6%{SM~4%gkLqn z@aZGJD5CJ~WY~7vG&WdyQnB|CJ-R7Hlq_eqM*SKmCV5lZHeNhr~$dx`csO4 z{(T{;IVk{q8x8)0i?a9!j-m78FZbIits*txF=)wP&Pleoa3#K2N8yL$isU7#&sRNO zH%L$-a;@l?@DIURtf46IaM5>4b*n>cNTfRypAkZLbR?7_X%nVn3de%(^uO*e)8M!T zFUH(1xurvu!WBChlk*f{)o?>2-J8i=$XX4U7HeD7kvD79-vp+CQ<$a-gr$OIRcu!b zVdO(6!t?#Z-L77{O2BH)-`xYboVo(3oM36VMWao;?~Z zGFtb+T9TloZ=y`t)snbSQaAfX=!qW4<8?Eg2+EU9BnYC9jRzJI zyu2PoKegi`8DmL7ft z{0^!~cQtpIuDO9~Iwk`MB@Syu^Uk?L`7~-4ME2qg86dQO7x_xxa%SznC$&Bh@mW_m z)9hVQ=6E{@_MMP%Bhq(ltAZ#?;VGt=c@$5mN}DlKTqrKfwJA{o>J;_>T?;c<>fVd| z)>mzb!nN1g=ZW`g?U3y}J$OtJ7w1RTA)-76V<|?`X_u)+SVp+*XmCdi?cQ_gXZ7O{ z%bCyem^Y0Mol|vLHyaUj_E5qFqASa!^e)m<1a;AV8br%j#1-jkh#AmR5#WLY zNvuoKmJW8T+GjyLh{pL^ja+f4Ztr*i>aj^1V zzI7r*wPuNmf2;jd8vl;HHYV3^FXUx{wzRH5D_5YofR~@|x^MVi?uUO!^IbsoSpB|q z028>N&znysLx2FomStwUH=VW^uLSflxE0p-!D}0gloPag&r725%%H;V~rHlf#(VL&Ca~0vgG!p(SO_~Xkt?u z;^BYd5s=bcWYoDp$eSuk8Hu4gwlORw#(q6bwC<3tDsvS0kFvaFNB4bhb_NK)dPM)K zyYT?jv;|UV5qQyX7v%$jIrqmIVSl1zR!podE6RpA{waQlf7+U6?auG@gC@2jPlX|v z!i72TCF1T5?$beVd3tf3r%JFMU8B+oADoxYR$LQtp< zSXiNrQ#15PF0KLfQ7I{1L8zm@tia--Y^@DHRdf~Sxky`x8rZX?vym2>z3;urDiaJ$ zIfn9qEj2F>AVZRcpht&WTUrxg@7EsdIFR_1Y|8#);6c3#u8f{&qoI(Tg%dLYVswWP z)h$W{W6gdjYW?$74`IIf01epHxj8rs>cJN@pTX2RMEjvHDdSt=PK^IKsLQI3ufgF_ z0IJ2?J9TEi*ZT_dK7Z%vgh43mF%dXdq#l*$557u#F0UXPG7UIPmz34}KnLi-BOGcze_F zvEemCgw8lYbI1QvSxUGd)RX4qE~N82c(9Uoj#3(ooB?CFq>=a6dF3jA3>|~t!H9gI%}LnL#QLmzkXb85Xu`!J z9U)?p79+p-x;)zfAtmFQ>%vUS3B!uQCl&eDv;qzIf0I<86$eM|zbb~lB31{q2@o-|z%)PK3>aK{2d z2aEMY8WNJ^W&1vK06#V>nX+;&MAs?+<;jg;ga&9R5W9NN&RRakc*IZ5TUCRh^PY@B zX{KAI+$10!_smBnk;CP01Bim~pBdwbE}}bF;FPc39#f@!gz9&Egd^&?Q4pTjaOp&D>0IAL~)b1tV#)fymkkYT}s_A0C@ciS4{4VFa7wV*mpce>f}`h)(VQ zUMZM;dt*!J`3W?U$PBs04}RX3>G)O5Z^914+X5IUO|@Atf-gT9v3`}bWS%UjQHc+G zfO<)vtVoWtg~l<8QIwh2`yE3m!H5}tDXotN>v=*^UKrdjsx<1$a?L2slN`l}^^|SN zJ&crL01qlh$#oDd?;ip&%9li4D=+l1j3E z-hIzRClq0Vg{g<4AyG|eGSSc^L~I#Bo>lA%l9a5W?9%)(uBy*?qT;;PhHeOWe~*Pr z$QI(jQ5*PG#4*N!=~j(m$FO>r>Z_$_2LYXJqWMP9lWiAemLE~+3A)dg-hOS{m&Y8( z2yLzfZ?)aAyZ@<@f2L-k%b6#k^{4K88LsXi&J{~=<6+;HG$R|KVlx=~OXqak&-Mt; zIWjGxDKO_+%mQysQT& z^Z?_(Qu!>;9ALz?t3t8Ke-L+_3SJva7a_ye2YeZo_*UsdGJefVFZ+Q+bgK?Cg0V(b z&WCkyLAB32yL3-H!K>!Zw2wW<#BJpr{#OyzW`yj=vkPoQh=Cq&+Q@g>ggBlq`O6K1 zjORKL#+dm_{7HG{V?Vf-o!@DUO0@ECB5rA3%6;rXlbIdqaq%yi%yr|~{Ivb~U{oNW zZ>su_AI=!w){DBJ{#)?v&Uj6e;-^hEKL&?J4e+h0+>h&_{SeG`wm38C4f3UNl7bL- z2aT{9)nBR5A7nnx^vTDd+l_&1DnNwm%PHBnT1qBCCh(BwT{%1VElS5^g3(2)H)@}E z#g5~f8&xg*KdQbmEUK_ud+3HC1_nW57`nT=L%O>|K&88e?v`#81QZY{LApDn6zNg{ z2_*;S8}xn8Ip42+&A#}vAJ)3pz2dnSu0zpyxXY|H2VFpEaxbM_ay@~{tIssn^8+e4 zJRbH5PjVSO8Rx5kGITNV?g@iyckw=%lo`kVKIuQ;>CZEn&H9e~R!D0v5;(aq{K}hM z<1Ogic3tGlw)m0a1W$0?d80no^PIOGcQA7pWWn+P-ZWPdj>?vx<}rWY2oEj=N14!+ zMCj?$1?3ScJnAL9q9xicocP4bHUWOa(^=ww6F8sYmz4jpjrDyb#6FY^)-_)1jU)Q+ z#E<>&T%T9Gd5a6U9UjGtdtDPH)}zZJzwwIaRbD6{>IbrwBNote6iDwAZ6~pB&JSeF zz1-alErIMa*tsVU9vWCE2RLPu?bF4o;OgI*OKdMslI&W9w+^tDwzgs%9z(!|D&bGd z3Whxj#mi~KK&e0#_&O{CB_=I=W0K;$j@~WTz@m_i>Qqx zT4jK0I)6Os`{V>9Xf}m;C%sPYx=2!gDQf%NW8qK@*CiQqf;-C!kbh&l;js0=@Pa-r zU)b|ZiPR|ddmwHSKdT*@4inYSUR>7^Ql?V#7^1HuA{dLGGbAi!w*H!n)?^qql7w8B zs5&Ea1$ao>?mt zH%!LYB(eV0`~I4$9BQc_An{{sdvy`JYbVfG-$BaMwwWjG06U(vxF;0qzSC!CLq^80 zU8q8+Mr*`2jvrbZ64Cf!$T3azh3DKArapi5O>W@Mv}+*a$VU7okZiMXDN-ASP;QXR z9!3>eVHu}4LfW>GL_pY>Sj&3&@%tWA!=;D|E_(ZPIxSTd#n00!^GG)-FfABV@Gv#~ zf9eJ$)z?}Nfv2iyi3;J@c>@M)XNn((9Jvc0J-_E~D*)m03uZFjh~oDt%3|`Bhwjk3 zP*^YaqC~|dTcM?uMmZ$BDPEE(uv3jcF8qe#t^W#t&tn}fHH3B-ce)s_itxrgNEucO zLbhMRvx$6yAAS5I@jYg&MSy~nL5p#F-ICn6V--?E6`j)JK1kXzHKHKMnZhgBZ zYT^y;;YDKGOo1G6X3$6vHGVe{GKoJ5H^a04I2*am~UERK~1UZ zC6DP(=c_UpD7N|$mv-RVaCD(mkHDvYBPwn2e^a`lH`iK!Um?Uc5l;8z_k!V`s2fTF zt9YLqaX7$OmrE#2wS4*SsBx0mv?kbfnaAw|%#Tb}QArKPR6=X9-%gN8Zz=Q{<9zrX zyBx=4g*N2FXa1xHoT3?&Ai{buL`)}OzLg$EQ!`+ohClI4ChzMcrK5+o-f7esb@tjg zMzF0#*Z`w~;2ujvU7~vEb+m!!sIikJKm{3hMTxER++qCo*(ykCO%qzHv#K<{|FQ;aJwYU|TV=Hvdza6s z63PViQ5yZ;7h-x=OGzw>$*KHQA=Dec=W{5ZI6eJEyC!u#eH`O=UP=U2*#wxL0b{l^ zD^pQ7iD%!%>als6)e;b*| zSEW9WOAB9-bBP71err*1AqOX`GsY;k#n<$7fS0N11QSDobW(7%f?B)P|8=q+W^Nwb zM8|(04WI@)+3<(&11JjC$>JO22_<#(H%X!xtte_N*?t-dGUH1dr&stcH?2CI) zS3TVj>E z(Kkk+iHVk5JoE~h>`m_pQVcTBg`(-~|0_Kr3u%RQ57k%@S@{TlB&OshR7`!;y zBmD2S@57l?S^W<|Mal{xqwb5~TlAjC(tp0-VFApo0k4?7%@g;jQpnQ8IF?-*ld_d? z1Y}9cs9Vk8ZJGdqP3L+029|Cz)LKTV0K@C|G!Db8!Gs|nUg?qzaud{u$R^cg1#D?l z_nScrQ?F$?5~{!3>Z0Rr;cpq!km~8@Q6FwTx37372^aR@@Qy2KN-MRx2l}I(q;W>kI{e$gFA{SnlyyymT@k_}Gcn3X^tiO-;7e6df#unQ# zV@mW`^~_{QB|eJ8;FBs)&rt0*uR4?DKDzTq=eR_)>}!mzx{F_mwFYv9JE@)iHzY~- z%TdhsynErzL;gS&h*w;Im`%n7FjGTFL4z^MuwNG;vg6OClU*3n3&utPyjnHt*1%_t zVABa`>H=7oW!+6gzk<}5IJ2W1MSH1rGrvSwV^)+9lkPR-c)Z(fba&H9UoChnt5!av z%gg7HjJ}Fx^@Isu!!dJ+#g>u&LL;0O=Pv(q@|!s2(iJzXiE?;`*Fdz@VJ=XIa=YMv zXFva)$3KYWA4E*w?^=D^lq?6(ZC`b7^1{#~=|>x-CVTuMIX;ggz%|y!qWox_5pj;N zpdD*6v`{seQ96kPV>>;`(^5S1I!GInLv}hGPKuji>2umQjY-dnI+HG;5fdZL-GrjJ zO%0AV(f$gG! zm{hWCbk(>~46C?|ni*)&0Y{r@z7$(YwI-k``We)qgAHTZ%y%K0*(pxWQ|pd_0B z>CoOJz!+{3ZcJElf1hcso1+@8vA)oxZzD-2hKYF`Wbc?}TcRmft#l2gQfTlD1(lE% z8?NaQDTsZ<#8A)5-i}~nJN&Nl;>xQ{D~N*X7m+Ppx`L`WzZGhw`CDZ{Y0*{eW<8XN ztItn{I5#yAP>Niw(R-TtBAaj(?Gz2_IJxRNF&3Q_bHQ&a8E_Q8Q5u%!%4-^9)kSut znaB?zaqn<|H}=wHPwQW~|8!jVewg2H;@FGB3xrKR7J~mhh3?sWM1b^O#or&a{~@T8 zi1}#*#nm4jK->1((wc8ufKFVIFV9S^WqX?#)nHn6T(1kMV_lidJIJV}260ASzT;`G zE+uUbrRqhyVq0~Vv{Su^2UCx`O5b;_V)&^j=A8wUljmu@!^|7Cq)=Avw$c$*qd^oq zo!gHV#G2`J<+QU2jkkrueG}(8pji8iuBg?TG$`HND@?~rR#j(u;t!c`c=Y#^ajS;V zy(CjsjmZKk0?pujvecm*0w7W!9pa2pX0mpmaziG875J7ima7fbGirEFgsph-Yw*-$ zEcP~&s*Kv~g%9VDkVmsBuXAbC7Dw#irekil{QIZQphq99(9$9QM6G&}3M7A{$-m+l za;WqEI;_q2_Hn&5d`im@(01A+Y*`uK{|duJqS|Fd#TJ+XamZ-o3w7X$mB$~fAo1pl zM~U^#L&s=m*Gf=FX^AX%G}<1-SvyFK_`?RS_?6+HCa3G6JD-v4`=Smcdbt*Gh~D>7 zMk}lnucL6#<8eoHjk-Y+_MyG!_3!sNwBkAqDNm^f*dm^Z<_IWCaOJC9E*&@R#?|&{ z6ifrZ6yd33p!P7qWo4Z0hk z3k?RAt7M{*CK#7KzT9=A-NHKU0~>HzHOcBzDZp39jBcC~d@^h%Zr^D}S7XKhltxzM{ zSEd|FHN)?eh8~#m7Mi+jXN>PR&I3tn<92s9JQ}!z|0E>KlJBeXIM9Wp7|&zKk6U31 z$A@1BYNT+VvO(jVVIvb0{$}`)#r$U}!K$ECAk$-3dNMu(!pj#>7T#AIg6nR>bQf(r z#JJthS=_Ft3RUb!F$@Q1LYUtbL_F6Nh>s@*yOE}bXO6q;L0BU=vB(xIaCm22B&(F% z*O#g7PMSUzh@Ob#wCUMCQq0i^_aE*GI&o8PNXuh7P&t8>L-741EsmRrOWv~`Ej#o) zyZH?FGyB{A@0R*^vv)FVMC+f${LDlM&k&dZL{>ini5EUNA0COwqnoJUR3#cK4W63} zKsjtLEkiy>7@|Z0UL9(Wq=UyL^e~{hH|#1yovaEriRfjFsPts^d2X*yNJqHPRe=<5 z$+&vkJAccG`oy6vsTGhWRy-XTG{S0ddCV|UGbu+{M@CIyLUHmL%v_91w!pxf5#`rW z2xc@yN%$S>kJ@eTAUODx!#&A?{ov7}z}8dEFV3IG!5|bNiI>aZV>P?}cq4pvI@Q#f>r5>-kc$Q#$ph70pW-R!s(>_5C{5;{v<2tgdaXc2> zWW;OJPiT#tiYWSDpSk(3HXL|qFRac_@-(b9Z13!I7kb)rtoKok_a%ez+fs}NNW}(K zVu?{UESUP;RuUfgxRMn1;fL#HopdXFMRRNMKyMqrU5BY%^Pw?SU<)sMqiD zX3=0A8?3N--w~8JeYiePL&n(AZ*7Az!QT$l=tmk<-?r*rvKrbEoUJx^KzyOYY6-NH zeLv6#4UW28qzj-uYviXYU%k-iP`f4l=*Sr{M^#ZxP1jJGx;%7uU3n>MU;CE*|M^o~aHEKXiYecC?x4 z#UyV#1`#^Ura-rTLN-fh^g&of&fnJpL*IB|(2sx(D@I|uQ42I5vhn6VsA2Y%L6gpC zUVXOaFvdsNW41 zt$;Jq#n;K?zJVMu2eA-iTFttveEjI0I0`2TnO+iF*LCwGc0+gfwLuz)=Rj4!)M)GH z)MQa-2E#aY&0~DC_iVUqXqO&S-iKGq{86I9 z{D3kBCeE1Js>00g08$)0-0SFxXI4(5puu6L@^~Wyrh++g9lYR06r$pcp`BNcwk0cO z?4y}cfl@z1V1@d#2IWE7gv}`sOk(k*_-Ky3X4k?;ESp$6rB_pyu=feMpGS!MiCHh6 z@_OS&yu@$O@BshhQTdb!9C7#B#eDz!OVw3u&xUQY!@Y+7waR6rV32Bo+(?zwcgWR8 z5&?n4%)_+pxbT%>(WXI)Ou|B2nb10-#$31mz}$bkh|lkpRSFPkky}CMh>yF7?Pu-Z z`;LdGcJnNgRjq&7o?f-1&!?QY2?0Jk1`oo4ez?+!6@asvNo=WJ9TGGL+)S_wU`JCy z*Nixen1!yej9Jm1Ni$78+TPpmtX-1cHeXvdPIpKXeJ@#rv)?!4rxCe-ojh5)hUq|4 zoX^z+@BEn;H!a0krikKJs;E&odjA%rp-%}8jU-9THxwtdw)&FplZ(EEQ%UCYX=ANPmP4Ue;cg#04{>=4ax}lT`00|5pJE;{( zbo*Gc?WsD5`7<$RJkKsnmY~3endZ$Zwd`LM%uG0-5oN#e*v;3RG3dqB z8(9!hG-eiW|1B^V6ffy$Q-1R+dw6Hrw(1++hTbVw33Cq>~i|_k(uKnUebKna3}Or_MlaEvJ-rkL2txp!eEtoBSpf+!Pi6 zCw&V0sPF&@yn9GU=&qlE6J=^~BBYsbnYrUSWX#@xbQlP*#j8T(5*S2Vc(Uo#QKI%f z4`E%Ovn0|s6Tio`LC+~YL&Kp?iCt5{%wkuCmW#U)Qej8_FoYaTuJtfPe=_|xJ53oN z071M&(ekyN_8Gv78%6ESt)7#c7h8QnpzHr* zu8VOo=np0j@ujF^&tv-3Hv?_| zC*=9B!kJ6{NqUf0avize$Kq7wBZLoqrzK8Ixxf?S2|`80W`$`u#*>Vq@ooA7aSb;c zm(&)z!K^*_Ef^;X4O^tLR0qp=Q0OpJ)Li{gifSWo7QNR#!?uzM6V*6=m@Z>wX)M_% z<*KZQ78{$G4*Szij|120Q>9`WES^utX}sSp=ggjlO_m^UkOlQvFaO9tDA7y! z#6HNRm%X`Vt^z-ol_IS)n0b9giSDq0yXC#6ElI~ATA!9vZxGdFr9WELjztrUZ~LHt z_fUxnDf*`yAZ7KJ5h8EXv)y@6JSkt3?=6ydXhC9;P( zG#xb-`FA|+!U`xbWuX@y%gMUFo)9@TDIHRX{LvyuqyrSWS8_b-&!#GU&$ zfR?0Oj}(x~qlvsrdy=7sM8ip8xe;jv8C0Z}#bpEXvmf##X~%!PjgQDu!$33gWw+&Z zH!~$hI9?&^F7=0A9eFDg;fR*DP1_8H5t#|{G`hn_cs6uK^8COBoi#ul>9==A^z>-V zUN{@z?Q+1vCrH|2dLGk_7#%f-8boqP4yjtTRsBfGylr_D&EEq(ahWDZ+arVTwPMg& zS!8tl`#FbdOkS@BvCU7D3qnLO<9XS|wwi3$g0lGY=ACEld7`#^*}ZnwzPyd#L5dd~ z?WFjPgN(;!_~z(orSqtY;R7DV1A}sknzfrw?>O=jM~JXNKi@^+6HlupR9Zd$nYcf4 zcHo);G|bEM&wSsGGPrsi>)iQY4)pi=fQ!fuhWjL<3E9cX#uG1*7*!l}1ZR4^#2YFA zRbIkh*u2biu{mU1IrDxMlrf(OErI{E!Wo7Us!))hUdV=nS&sX&MyP1HQV= zfyV1Idi`+EHS|&=Z++)J+C}?>OH*z$*l zQda`VmW}>@Uc|aQu%lJ!LAX^h*ZR@vb-q7N1IM)T?r=&RHnl^14cO4LM;7bZwTl6m z!L<}Lv*X9uvnOYR`yQeTThmc|gWppjA5!=nT_47kvl zchyw+_&Zv32vpCU<#u4)wf?) zl#a~*ha@3M@jrvuc#y`fLdu2g)&oW||9DZkIT9_KOfcS`Py5no1Ih9gEUME=>Tn(T zGZ~i=dWF?Zs@QbkT6xV%12n#*64ZEQb@UbxE8V~j^Q$|@@pCpIwGB@N_5nTHm%IZaYBmTjI=u0A@Bc$a!*2E(Tq#^Kq2#z6tcoZcz*givHvgb8g3@HadXa&t$ zkCO#mPuE+L7J}59r=&!c;%p@?90Ebopz=JwIFBoGLQSx*b!ca&TdRY4>ngl@8aYb( z(HGB%?L-<66iyHY))Y~PgWq6^449UG&JJB2=ayDQbWaG(GCW>=U~rqjZWQrou2J_b z#82rZNGh=AJ5x3{FbNNs0DR*~Q0p?7!Hyai5<=t!mSX#)WAl)@wIKB+AK2l4`gmcf z>^lEBpQ>cavY!PAal0q07u9&M!6nX~KO)Kt8nw0=vwutovOGRw#P2{hAPPc6Xe`ln zpI_2@RSs<;XzsMWYWAoF~A35x81QeVtB?^?Lf~lwAE?gsg~r-`xCI z*QqE-wQL~G&Z{?(hppPp5+moD_>Ma~O|(cx%TV#sKmX_rf+_UH{UH zcKw@0(R%*Vi!DIpHvUXPbiB$8rBmGJlU){=I(x0|#GDe|NWz=zG8Ly$>YzoEdA&iq zcM?iQX5`e);|@t@_9GNC(Y4d$E0=j!2qvroiXjE^Y^UD4!$yUam)(ZN%=MFhbFmR~ zr(#U>wp31Sx3}&LgEem_R53nm8xN>5bCIE@a>mfD;b9M-Bz+wzxV8XrH`c{+D-4v^ z2B6R33>l-o$jgbhmx|CjF2F&fk$Bm%>)f*-%d9exhl(dgf!lDaLN8YQ99Q-c6Ph7T z3tAd&QBH5NN@WNkQqxwl75r#e*R6$X)qLBYL?y-R(qylV_aOCFflGW$@+&sZ>BGnW zkKE#aOa!?YVPy}If{`K%rk|%9+K@jJqiuPS4GF}#9(y6;2Zo4UZh{`xz7lnho z3Ya~?q$SxC4&$KF5jNnCcgJNiBz`nr;clvli4_9>ojf`y0fn&<;#=LAn_ELD+#0e* zWcy47!(k*`YZ1)1l%kOm__3&o8QY9Cz7M)q zvdo7&YY4AighjX?n1fiPTJ!-v`)@t8dJ%hQ2h;@ zH<$Iu1j87V6zXeuzBv69b$O}jn2DaZe7$|1|3>bx6!l#2bhG16)0MNW`S&e*Ws)w; z6uD&)|J;5ZcKU04Ncr8RA^kQPh*@n&^JOS{V987WF9S6u_p7!UG%EVn3c%e+7&(Q- z@IZK4WyQXxb92kvwj{4xlkUk6F!vO?lYyaSZ;19O6d0kwiDNHODjn>2>z`Y>!6aOV zN;paXq)~4t=KFkWRfJJ zeDgdnq+kH-sDS@AY78GEuO|V1q>~f9w5ju8&d)Xu&@WOJ(#Mai#E1MU zw}F8|$e#UJ17a`KI%ra`wi1+nU;AR3g7%@s5KGCjsfzDe;VdHW-M_xEQKl_YR=}0qJGTGCjiGi$ixIYCTWZ9A>Xl$wa z{%Nk1H3Jyqb~0NFYML{NgY&6@*oq*IemgIb(&Zm9<@+XczpANSKofC2WF?Wr4PVn< zEBS-i=*$5)jTp=p=merb*N(AF%+eR(;k9uIUr`*o?biXf)rZ-vZ+|!h8moMlYB=Zh zI`F{Ss>@Etv6gKs!|yXDbeqFp>iHNl?XuIqKlkH=npE4+ ziWB7n{+MRxBjsLi3<%)u5Gqq{bf9-JWRqr} zRq7>vt_v`F-clO&;So0%L~a!4Qo^qA+hbb^Da8x}6N9~7Cw zWbSE^do+fN9w|Dyt%#8MCpo#z5nEl5ZWYz`ocZg@R6n-%w0lu z%i>dA&Ln~S5r-=3Dx9$QOlpU&h(yE?%5CKNCFaEjBiD_&1yX`02txI7Fb6J9`I6N}5l z=J*mS4GJAh@QrZ&6T9+0?I2DCcjc0Ad6pP&ZH(CW(Ob9FEu@Z_5Z7X_e?DSM)+#nB zgGE7b89g=NW11K4zywaBl#%!nfe8(iIh^>*q;W{b+m${P%`Nf{$zMDn7HB!+qZ=)Ue#BK(7E86ij$hkB0doSP9$TaGg@vfo5vM@xZPIu<) zqKC2Pl)6$jvK*yZHy9clqxKJSJQ9NK^xe^p9Nxmnc^{^`nLB845YNNL_^(V_?*s#X zkV=(2`}F;F`iU0J+9-TSzC^UM>00{xU96ul;`a%c6B3of)~>>k=9YFp>s73bl`@mv zB2OP4dM4Q|PkX;pGVih{t@8^xa^P2boeC%7zxjjIG$lJhXalZ@ZnMvMLNG}(bkNc6 z=+XQbmw8@LOUexM48u@SjQ)tp559#}kZdiRllX$T<9HkRu}aT!B1(Qe$EBV;Z73vG zMG>#c;SlGeb>bx%LDNa*C0;HpcKMdhqL?Y0WJo7}63ZQUVc!SCd8hSZ2ryY*C!JR$ zE6)=3jWCKgD8B`c>#0vGbk@>$5`)dR3<{3%If^qT&Tg<*rykpxZzf2g3pKABrs>yy zwTqKm`^n|$fw+6z{D)^-0i2;o@Iipi5Z_xR(k*Z5zJ4F1xR2?s2(S%WF9SAo)?N=u ze?**2?O8pP1d$dKcK4Ebwj2JHdn@)l5wZGGu1Bl$wkT5A#Vc^}_93!`eY{$)NB(V9 zIs*Pg`dl8m-a#E8L9G|FI_vjNr`Sf+dI+rEoS zHi)t%Y=+@ydF(ilN}T7ia$9~p}nJQ&KWWDPRM zfGx^Md49rP_buchAO1?FP9`;s)qb7Hsb@883_AJ|%M}CNH%)&RFzm`M;*OJPfybL& z{o`OUh$iEG51sSNw0K*M^u5K=EFGNe2lv{>;3voTYE?A|31p6{Y!m^P|8W#9QID3GTc1-GAb)8W(8bpNEx+H;jk|!(B(i ze%w9O4G^Js$O*sh`zvWFR6qp!@Hd$kBREvsPPTy>hu=wdK20`n*MUVF_iYX#WUcu7 zni~Wt*y*{7Ni_#DrTr9hf}%D0(IV`_nk4<#q6YN45QsK`#jvH0j{6!NWI2yzVl^{V zO2KLQXQ18N@;t_A`mCE>o+gp!#O$AZDyVnpVOd@WYOhd5hNOpPa0ocbq^%mxM;yH6RQrS={o7%BFqb71#1VS9f&P_B_S5KNQxI#!ix2og zizy3&{tQr@a@L;yMoD0eK9!vZtaRWvfUe`iwjl2X-3x~+G7@0Xqm{#wN|Zi3pQ$R_ z3+@U!DbwEg1d{$v_ERsN3Ij}1ji1}XOcD$T#@r|uS>kGsdh1r5!uAwOut(&5+FL7I z-6#~cAR(L6enrB`V$UtmhB5cED($7Xydu;Pi?}pxnt~o8Jk#`C@4BUA`Yy}Q6nO)6 z4Us=4bXyXaIUGK>8jfWq}5n4AxbWym>-9pB9uNp8qV7nT3dmC@|!c0x}tfQ zRYHjEAm($n?^8*nQ<`@LdbDW#N%v;#t&qTi$UnH=T=OSN9M4$C=c$WRZn2Oh=w_u2?^k`(YF1J_10z~c~MTU=N-C-V69MF(WxjD z%79w@);Ka-={&AN16}Av0x3Fk$newMIp3rOiF|e`C57VAA24}G3Yscbm>gqFg0U*> zQQjN<-GtZbjEO5WoigR(?L|-+h;WvN%s(Z*WJMpD;8G1w+rZIb!B^!u#V7 zc-0!xO!P|j!n*H~r(6Qxe-6*1&48MjJ{it1#@VI8oMDkEPG;5>JNd(e#p&?|9STl< zeIhnn!={vWT^&D}9iYiG6s*Ff@h*|1zBWye1|6KMGDeq`=^sO&e=6fIfs?Xh-v?^z zb&MQp`&D^)yVIz3qHLLhQ@$(fX~4D6t!<+ICU-qoc$o~{gWgnteJ1~8+coBeSux!Y zdfRP&y;pP5cJV^O^(5@T4=-HvbcKbkA`8>CGMR2|N$U@EW_v!iYtL4Y(b?=o4M+I+ zrK@s?!cUDWWx&f3;tfnRet2rKG!L1P9lZOF)oP@^OKteB9=VN6C?^%;0e!1-5v|gH zeuPl(rMH|iM*IrcKWhwsIFw1m8bb2(WEhdcdD!YAYB)1ZT#hO;-THuBIcjh|8caV_?54a2v`yTL;y5efO)ps4p3@Ret8v(=vPb4%|4*<-mhr$N$Mm4l{4*8x;~7StmJMnKvA@E}0JhM#bU=eM1> zHT&dcd{s?ho>mtcPlDSDcnm!0tbfD=i2JUwii)BT;n(?k6I8FsJ>Xh1Oom0Kr9pMI z2eE6!<{h9`&~WO2Jg?Dv_Y#lXx6o<}0%TtAjH9ckdJyXKbM8J|xjDLD+7dZ847x4p zEV1-k|Ekh`lJsCXu49Otu-)?^4}_T}4$_6LGJ4i;UiId&?}q(@mj~A=vA9nq1uR^6 zcZSoD7qds180a#nN83W86pCO)ohF?8GwBA5pSqmrw*ap$*f`OSvbjtRvX|YP!pFx>kkS0tvnABEE9J}CSm{6U4+&Pt&2R#OqT~!e^h5VI#8Y8JM$dn} z=fQ_;z8a#)H0frBgUUp>c*%2;+%wwIyZabqIRuBB-`k+d>iw~0!m{lG&V;}KPqC}R zJC489s2Sd!ml?-hqUzdq8Ws@N3zd9^^l5(J#Lo&in=AtwtEaNhvQZ`3FMyKiE(Jd; zdw%-PEKAsijT#HFj;6Ssm7qOw6cjcnd>N21)1RcnI`yY7EWb^D5^+9-*nR%;wj&?8 zOY4_2SNG@6-N^0-cc}WQkopMmy#3TM@XvWy`u!3&oa7_TS0sGU1}uF&I10Oocyj;G zbLQK>ArWOi6>-WKPDx#>Pz_RKNz<;y-TKT+j;}qSXo-Vop4I1UYQ)0YD@!^uX^-uT zp#zgtRWhC}0}V6aWa?!DH`J-e;~Nt`U=uIl=AH|)OPRQjM|CVgpRw94WKsW)qxw_g zZ?vaQbwRCKh#|i9jDcsLMMwdgXS>*_*|SMzv#1fBO7?BmmoJA467S#V0E@5G%+%(b zG2k$dacM=x=Xu`BZ#I~IG_iO{Vj3P|)nyF&)sf0&D&ak=tbhpb;;{Sc5P$XoB8HfH z!6=uLSxKOr)x{_8uC?N+#KPcVr~KWp?w?yrCHY9$__`H$zL`_-5Ta(c5V~Zy%2fy3 zJgJqRb|Ws6w(`7QU;eFFkqCR=Um%(C^$y~S5HMbZxstrj!-ATK!h~K&*@Bvt>B*%0 z)-qZik|ZWG=wDLBl*>e?U*lP40;u%;KCwt&%cf*m3k8`K9`5n?9vQ%9jIiEgw-f0P zyXMFr@?zr1G?hHfKnb?37GM1u%QjU`s!)!LkG9gbJ)FbRxACPchN1BlrjyU(NM@Rs z##XOs$QfRX4!U>Z}N&HQuF5DzV=$ihKy@k zi?y4D>JIcbj5TJx>J45KN_I>ytci~q-|O}hc@7Wz!l^~QZW(HtdTaHJYCSmg>eXI< z2|V$^YwF1nh((HE`jDl;=cP{HE-<`b<` ze0&Tq#$-e;{TShw-;Bb-k##4GZ;T!#6|P&(VPlx3J{%>e$By3=r%r3ItzmM9IpDr` zHD(f5HIU^^ocloDqEYk2r97vcl*{v|o=&CS5RlZYlZM+~jjP4zo18SnF<3D73>Eb( zX4%ZSW}1xmtwhA;DO3aB`~^)&{14+hN;`V;ORtjb#tDVpb!vx#?|~MVy%=JrNqt*g{u73xT)aN34svm}8Y|WxI<(Ib@KeO%uw*Fp zAFC92Np=hT43QZN=_w%k_2MDlwz3UzfOr3SU68}wv-5NF!Dh0#aSoAmAE~=r`!_eN zI}f|Y?y+3*X7GFl{o<(r6H4issEoqlv$U5@-q99G=q1A@hpeVX+Z#Bo#xXFcT1ISq zH&2S-#>pJu)r(UPbSk$0!|u)jiMnI%?cbr;azY5m%`x*mGL<6nRtR+)c2wmBln1@f zW7@(uyD*-u;B3zuC;-SrZKV%aUzqRbM$K%8*ng{2lM`uRE6^#XYuQB0@ZePXKmzQ< z$es)C(!w8}g_c8f&sSMKN-?{=&z#v`r5SEO)rHRT>LuM3|-kFWyuCYqH_l~;n`A91D zhz^^;HXl-Aqevo78Z&8XG#Ot!@92K_kQ8~TbSVEjy!j+v2lF4NXMmQR-Zv0 zeex;Lo`_xa3Wt8(=Y6l*G7eiFy!<^vp{YJlNu)~!i4@-Y(|%+!NV$)gFFt`H0rVZ> z0XKfzq5PUj#dsLL5931+b%L#60hU+1tkSP=!gsmNvAZvwW1epLzIZ{Gb)o-vW}=DXv^TTv@{?x2emDWpkEc0*5)u;Tx~ zMHU8|hQeBZa{NNv-K}!o-HY(;X+r$Ou2a6Ig3mUjTo)zw8l~hFluhB( zK`dNpBn1hf?1$eUEN>F8!{^@4owRoNMk| z?ACGsdy+*>$Z{9-gp&x~^-#Q&u7z{w-2Rc4X|Y0-VBLRJ25?MAt$zY&*0aH~Cn%8% zWU*zq7T8k#XO#OK`Q(x$v{|*Ul2fT<1u_VflP0d6`1VTQ$zbD&OGnXv*Sv^d#`u%B z`iTxR!dX9)Qi!}tuad_PgRb)<>1l~tNg5Zme5TBkzTA5;UWtom&qsYGEZO~zOc+)E~2rL_BkdjI`L$B(v0w_|Zr#=&qpwnR#%Gt|H)K+wov@c-ppQZ~^ zvo03;M;iYQz!73xNx6m!AZI1|5xeU5mI0?fzngIdWB81>baq_F_#sjxu12MiRMhTX z`e#ekjNi9=0`FVB>3t8xxcm5}bMX4tuykY{h53QSBYDX~e4#A(S&Z1O2TWM2IzcwF zB8ZDgKjCU7M3$cOYb&Lde}RD{u-_|>4Qrx9hTv*P>dpCjICQR7MQI9V%H?b#3AWZT zrJOmBKWFBqn9)H9wVk@&Jt5v$%bU!+nz~jN?x@NisdMOyGOoA0Ib{tpsa3kk$Sm)J+*M zE-d5I;1{tUPD=(>L5Z4- zShxp#2ya4%&T*OMVfPztRFb67zN2$ZNJ05{1IXe=;XVaOD~+sjq1lL@McFIx z;6|W0aKg{xGbDiDuG^CN@i7GU4XDO)OrdEIc}066coa4GMwgnz&HhmBbaGo>?yE7& z(g$T?TYL#%uWj-bj`S&Zig=reXmL$H(KDsocvvVne&n< z2}I63PkrYVfdx1y*F9KuBjkC-Iy{r;!8d+AM38wQ^6KlbqLZ*fK-^QMc=Gp-zJ27W zYDB~f)A2J29M=4XT63zua%5RIWD9`!6DW(oC{CEVMbP)eVZZlsS=%99`SwXDnwmZq z??zc0iA8PKH8c|f_NPPY4)MdI5)2#0VoJ4G*GRq@a2Rd(()vGZ0n%676ly0(ukoLW z16J{0%N;zb0#-KIxo6^;52Z7sRy{_g(sN^?18=Jtqsfxx3l#|7;~i7qHi$mM)?U^u z5Z#C;3UIaf^I5aW8Of)f->(PGZ|8KItXbJ!@O-_RYg!L-f;WfnNf)u+OC!sx$-jCJ z`j!`5dULcmgE!X@KFATAO9X3Z>f}F9Aj(19JO=3TWRku71cFLF5IqK5IIpGJaArywpZIO6=ok zw@2m`TxejPv4{9GjtmYb!Z#L^wVz*FJqH4!1-=^G82g zu%xd&0{ee`id;MET}K+g_4=)}MZsOSnA+zDR50ob3!R;YZxoYU0d6J-jS^qhe5D4S zosK*4gV_Zz-su5{(_oBrkjV=hjEt1!239~`HICh>zoMZT^JJl0QPgKAq8`l!dF3v= z{KOeGur9Xo5`UwZl7ROHw$h9eA#r3tm4~9hTYZd+vJv(^CuxBAD=`+=0{>=u;#qUb zA}?`^{z*K+Qa2{^!BynqETNU8owYdb>GKZW_v9Z|s`=`7bAvBG#6R8r8c2Njsg__7 z#LNSetG%XpdT)iyMQ%uo2kMXg1nUFMG*RF?Wx$g?WUQD({Js<0Ml4Bgs~>a}-@?5R zOWZU^LdY=D8mdYI-?91|DFortKW~xMuo4>N3PW!A#*RM4TDctu+;Wd1vv}mD z6Q2PDl1aFK(SomuA26@oiGM~iE9rCDPlyh)yBlPwy($%P!x+vVTgFFLqVoc+MMi;K zSa>SKUmvUn6p*X!+6Z3W$03=<=EWtSwo8t(;bJcJJKn(o(M0@yANwax3**Hgks1rX zp;b+|VUMRd>nWy(H`Oc+nFh$XfJ!Fb7e_SC`vl0EngjAx?$l%XAnm^6LXr;4dFvl= z{0Wtfb0Mr)4sNyXY{NZeAj9QBHFmn$y%zhm*M4Cqfk$BWzUGx z=D6Hr(YSPW%1#X?NNsP{}iL^DWxu75pfmHHj}&~bQhl7+i&z^vQh5;A@$KX{22 zqI9FLSb{K=r4iSE)>ttGN;|0X1A7sMtNc9`HEpXjKcWk_P0^sHYEda!8>h^?zGTf#ed)Um zp}8jzVe&|1tIE z@lb#7`}i1S&mJPX6iF!AGL)2E){w@Qec$)7hU{d^S}H~MY+(l3_a(dRjD25bn3?DI z+3WNEJs!V*{4*X8bB_DI&vjk*b?#$fo2jTXG-ZmV@#dQ4ASJ0?b-gA`%d}1!D0Vm* zB>pbWAWbq(%o&neI)IST2g&M^=>Ql6yE7OEqP{LDsH@u%^5y^SmkkG{<-%NKrS9lF z4y5mlM6`E{Y<`>)zy{1bFft#EFYg2!4()icHQ*!UVE+b#L($iQp{rlTR7qy+U$?qh z{gz30C%#oG#2+|1bMG!m1m!mhVL`El7s)&%mJ}_;ZO>;F+UmhC?6$~<_xjtBmRp$l zBBU7QEfQg9|I{aeb}H<8rClsKNa z$4JRya~{3=JW7^;@p1eCrNmP=wh6AubNY1ZucZx<*)4F3XO4)IW;)?0{d5n@kcQBQ zGS8IvS2=Vzi3=NMsE0lPMM;1U4DE-j`YoG{7eGuRMIu8GhZ7GIL=`}#5TStuPE2Sc zD^YljLdfv0=L69XVDD;w^S|KuPeRXsOi0{^+*A4yy%`(E9S@~Odv@LYZq@0rj(9Nru_rru_bw-%Yq`b-h!HC_52uMQ{zjJ9 z3_eTUTr2FDF9xe25Ap34!>Ei+md@5LZ;ORz=vu-bE-r)fV+IdS4*^ ztV9U@ku`d-o_o{tibZaS*W;=gQXZaEwr`r0=(UZZm2ce%f@$B8v&M@S26Cp_4-SQg zsnnlK-xV%*4bUh03nhEO!{0zl#0q`=GuwO2>@R*tNyR41k>9;>eSkJw>jU$j$IEfw_3yqBZ85kz zg=S#Q9+ICW&T7Mnxid_V=4j|S65tYqh0`5$g3I|nc(Ufdg$LGxqwm1!_(gw@l>FB^ z(2219$=wUbaN&J__GY}+{C>v&k7!Ir1GrE5*w$voAw^b|cJ9?v5d}9z{!rTY5hcxh z>-MDs8Uk9gmKKBDTlZPc`Ycr_Rh37(&|=UXheo>(8Qe)F_Iz)N0u{u~)qrKS$CH|`0R3AZD~4J%50{3&hP zYrKvmQv2S+k}rh7NHSJhjsvXFUMn&4n@>XS50(RJa&50oTBK^)J7l-T#M2NO;1Lef zfoWkT>}`5Ak@0m{gcAmFcpI)H3KW-bieeiR*n;UK>{{t zP}#5kD+=ZR7YdbV7-|ECzl;;VIi2_@q|)PH7)S0Y;9%Mai7FnBCbPW;X}Z!m>z;h) z)3*pFqKbxOGxFCyT|qX*`sv-x7he+nMxpyIA3~t_I7PSjUP7+Dq~X`PW&oA2dN0-% zBLc&El~r((h&u1Rlp9znyvdnb7)BplcAx%Ix0FPAuJ@+yu{3cWc_pcS4F|MVm-IN| zDfRp+`;5Y^$?oUe1?T7=^)he0RebyDk8+2V<2VgY7oWd6IS&$$QoP>~a;rdpuY&jL zxaUVc=I7-p0F|M2TIzwL)>KY@dK?^H!KbwN3}GUeCKp z*e0R+6R|C_M2jz}rff?zc_nA#f*W0*8-$14vTWLza=jZ#sv{P&cjSTcB-RmZP5m79 ziRSYml2ZwR>~ARi@%CeZb&fVSBNyYakR933G*w_$hG#r4_+Sz*pV_j_E@K(fxH+$} z3jHDxDbM83ij|wKIxIw$uWri^r;`h`_@uv}hXD1VN%xyV>P586SDCeEF+doDbaSJHyvEUE*WmgEBKLIL;Vbh2QC9N3Lz z188%S_Jj8x9@OQe?zg@~a_=wT1`(;3TsX_%nbLewAFwTjX$mBZ4pvzd9gTvS!x^S7 z!KRew*?&d29MB>K;3s3SdeN@|;XDRy`wpH1@R;Cl*d1yX;lV5;DeWb-5cW#b z_S&7>BJOlp)}nQ&VcvsWLHVxgHs$3?z0Zd1PgcXlxYq9*iWPj|`;7ODyM43QJaY37 z4)F&IBX~|PVAmz^MSb#qzJjbvk*kqy!3Z}0}oO`iDuw@ z8xei)s1y#s83`cp=T3(FvA7jhLc(>e+e#z)b?CHV-4MEh%%h|@Vv?Kffn6F0Nv*&w zFA~n==K*ghHJN`)eRv{Jc~4Zb#VBOTf{u|XY#ft;%#^CgkA9cU9!y5DzM?5<%EN49 z@U`&eZ~C2Vhd+TNUF!E=yw{cs>3K?bi%%TFFWwPUd|x%*`{eexy3w@0J5diFJ5xDa6WI5_ezSdJz|v9f*t z`O^ryvcdX@%}1gCA`m_NHwu0-_=Lv{yS9!_haYiXcpk%EmE#b5x_PZs51kQPEF*rl z;ez`({UG#Z1RfzxQxT}NA;+YYo+<&ul`Rx3T%^)!?*q+1{+EBsf$lW$9n=r(7Eu5* zg13vWI{VO=aoiqp*b)i5-t+3!-A{Za??$ec+;VN^PagdWH9M$l zXYG-tpyyB%TM#hPRs3|mw%C~AU(e59qOw!+f_%jhE`1SpOy?9sd{Fv2f9Lm0l3_7# z6OTiZ!S5f)g4?_Xwg;b52>&d>4L+82p{(uH>k*i%D1f!6l27IZ73TO1>c8zBfQBu} zu}VPY9?wZ6-i=byDH+U+?R^n;8mbkM*jhFa++k=>(Au>>1eU>;)nQTJ;6bCGTO5{; z>z5;F#&N#8f0kF2NP4FTs_X#z{rW^}qu}mCxN|dK-8|faI}i@+zu5=08rK4Y?ko;) z`JQ8{L%HOc=mUU#`~vmHBp?a@*C0mUPmBXr_#o>K$0n3Ex~U8NTG>K2tZoQ0su*mS z78}bu7xSh|i!XgQG2y)@=&~Z!BbU$CuQ@R;_F1J&au=m!(z!#X6Ei7x9pL{6Wx5r| z?)H7N|-l>4kL_a^e@(s_K^r*OXuh-1+8sQ!Az_-Sq zjTbJNaDa+C_WRgjQ~B0a8)uXmGC>*ep8cKnDKTkx@LmY6{!qVCW_jP>foC6JVL@(n zI(a;TgI}JJgPws#{1!TBEDK@i;pY?J*Gh_UbS^Md%G2Jo$^_TxFRR{yA&G!o5J$T z0ypS#XRF_Vh||6@E)iQlV& zbi~;vJLUA(saI8QFI`Di^-3$3qaE`RLxkohh8Jy{0l< zq}w!wWmqdC{Z^15L}NT#fAOZ+hmzY@yGtvx+Xc#-1e6B``B8uPZkbcj=aPK3q@dJ! z6IN7sz00z~_*0x($j}Ow!Hs6P9^+2_nq!uTPFjpA+Nq3gOpTrEFh=gBNnT>1uAshd zk(UianbJU|c!Dh3V#{D*n^}W5>Tyhi-sQUrm%%qdwIy@xxZe#GZBmZ(j7mc6gA?l? z?XPxiCpi8A=dFX(A>)H#{^1G%BY-f>2J_}r)(jgEV zZNGxwuLpS-C;SC6SHO|dT`3f7&$S-v*j>`;F{X(7WDv=v2$#3izr40E*2U*HZ7*Sc@1}RYjbe6})}d+}S5Jph@WJ7# zhDGs=(^Bxw6J~u;o7O`b?|I*kpne$#)Xkk>{#Bm~wC4htMPrFg+YhhqcjG$>dx15U z9K(464bBUGBH%eOt3!@qL7jE3NetPf1AQ6t$fJe@*WiRpQ8*5?X7m+tUYr#E0(VOD zWUu^U#^V@tBsA=6Yhzc=b*4raI@RZS3|i{lYtp~{`N8XIsFr@tU#o*= zEaD9gK41Dep)7w5?%bJ?(R_>=!E<3P{iSx7ju2Q<8|l3!CXzw_fIi}VHk8!Gi^J*` zz~&K_3ezYm56p3#qxM^90b$k=&NV#f&J9L3;a&x)EQSj12N&Iu10nnWfKxWmLFiit zbRbi@oV2$XGgKq8q#w&m$FIgJy;ln1q|x8Ka4!R~WCw ztigA5!y3`E~96b+kxpHpvwNjmS zc4HgtBO?xOfDNDtwRRmMTAH*u{A3>oxoHhYYd8wXkITnalL|Nu&u$Cxk91bvAWb9o zm|b5U`uXD4y>A8t$zz+e#-D=1f1_#Qyr%D%Y0`p2Cd>~J*e~mQ9tTF&xT7kE4j_Yz z@{Uh__|PL!)F0gHezWfUNvIEe5(g*jd_mB5`q}n%Sr7)6Sb#d9hiuh&7a01rOSN%{ zBJAhKS{N<$jQWEI8q9LK91xUsi~q;*&$~_19PrB~ z@OJZ2NxqgAjI3)k<12!6<;F?OP~e5$4j}r_NrslI&Ue2{-hZ)2v?m-ic+7+L|H&?} zv6T6rVPODPLc{x9W!_PFFqcw$#7-NuSwH_S*cu&L_Ecm!XUgjIENU`8_twZ8B(sMy4sZ z>XE2x2)f*!gwcc!(nDR6NJzkowbj$>6Ai1^+sU6+n3>Jk@(z0E>R4G^4V!v<she7oek4L0=7)Z5Wc zX=TgnRf^R9wgJ^z@&zJRIh^r%U^nf<9( zHpbZCrB_&++Wh9{)R#A^ORxw#f+)P|v>dsfJ%j^K`^GF6OtVk7AlF6zcQoLBeX&e} zP;@P$Ht=K_O;|LIg1gR3=jBThL~ApU+;tM@r_;#+-}UU2pW7SDmX@?Z3k<9y-;P=Cb&+X)VXnvy;8)X(LHH zUNt&#mFoD*g?<8^AIbS!QfsE;)DDD~)z@_jr*|4xN$4GK0B<4M^l5RWU9gg%`p%eT zj!za;yh}E(zZ#TN$fI{YuUm}b7S@5Iz5&xS0fbG1h-e((E`mV7) zByPC@b2a`8_IuXhn-3sineqpUg7GeXE8veOgS5aYALIa=vn*%O_ycw!ZkQ_>%wQ1v zm|v==saSsGJuBI&G8ah-TUYfBXfu&`rbSvSA6Jb3H_DMNvDKL;|CL4C( zjCUZ)HzpXj)GPsGE`Gc~2|5?gC^nz&a*p^L7PG(qwE*qq27CWl` z6)}X=B?^7}HwYKNSesr(5tt#(pH>qK-kL-1kO=6Rw|BoFv0U-sfE0g-cG|sn(_O)i zXz3%=#z97hN4Dvvz5%6!t0Y7|=7tlAY{`3%NXiuB(wVXg-`ab%^jeT7xkLUUVo8sR zh8~R28?LuYL+^S}TcxTxmq>o(e)_cU-b4LflE33ix^L)&9QVf5d-ZD=I&OHZ6V&w^ z#rko15!Sq<(S1#9N>7cCTf03O4dObADV<-?&?i%f@GpV(d!8cVO50mYcAjR& z*u8tOe$UYj4mQeLKor`IIBjxSt|s&iU{X*%$VjjQ*fm5Az;PY$)^aJqXL*R_1FFG9 z)a8xD+y5@k4FpbByA(AdaSiBYwe|Ew_#M=9!W9${@ZsMRgyEaR*YUfdVSe%SVO<^BXqju zwQA1jG+;UlY4~zov@NsWO_)R!!_Ch#n8u^Q8&H-&-{5FK682or=?6AKtgXIsxcJvB zIG+N!T!QrG$avI;n!5u+hyws~`n%B*g}4b`LV*ANi#T=|8h}$_Vh%OY0i0f7=jgr7 za0zPNWMdOuP+#@)Fo+cxav2kpKOfGT1@Z|If=7SJN==Y_i*i37;`Us|*yr(@|3hG) zD+3${cbDUmJcM+XPmcom^g>D=nwE91-%Fu3@rLw=z*1@MFnyKpQ`9y=S3k7dt3o*H zV&q)oK4S^B-BUqa5snM07h8fF`cHE*LLj$npWin+mQcJm;*$}UP`SVq!DRS&(I`)t z=;6T&_w2XK7OirMx0tRas(oKCixik``W{lxV_?&!o;}pbTF>I0*8TQePl+y~4L?Z* z*=|B_M`mhwl8M!{H4Hbok-pYm1*dKuS$Yoy%9rC{qNq8H=!1ORj?F*Yh07PfN3?1C zRn8UP%cyyuTU)p*K^Re3@HiHx*`52ZlreI7S!7Enq|v&R7tkm>HhcAoVzp!Lu&U(>a$Zfmt!K1Zs0C z$15Ws9j@Z=Vw!u%-Z>MV_{?{8tCJ<39$I$xPLl zD`slr-660c3iA1F%MZs6JiQ#6hl&vAi`eW@Wr7WZGiBWni*!q|wL7KL9qG{dj@sK% zOWRGMBQiw_dp&}x)KkB2I5~HlH73H3#RI>#)fKXz7UwkCLl9&Qj412^Ul5k0#{bt542L+3wY@}0+!?SCQz8`33hKx5YeI1#lAPkSiqatXLw ztT%zCm2CNgJvaMK>CsErRM6lVFwkI5_`iJ-2l{*<1>xKFDt9w`!j!bm3*SAhGJ10e zv40ssMSjPPzvP|y^}+3}e)V2s`Xe)4fwgPcqU|We8=I>(|DLbw@026mFqwQ2N2?a6 zIzZ0mLREl2ak-o8G|yi%KjxM2t_x!Wz44-_$6#fJdC`wg>RPPjF{i0Vwd`^PhX>uP zc~33(AX4UiH~Hr87ZsnduWuiI;a@5#hpkOLoJ@n~tjS3)qr14*>D{Byb%AUX8QK0A4^cd#de} z?kG#dbjNU}4y`2imk70yTrHzeV?I#>ny;?Qyn*=W_3`rDcFG>lX)umfG);C*r=u%j zoSm}~@h!0*eYa||nqRC@GB*{NoK5qwNiWuyaZFiGbx(oXVEc~u{)R8JU&*_S<9Hi) z6-Ajn68fa8legQYmtmAmT?>ab3?A}tt5Ype`ZXp?4pa~QyZ11GD-@+9HZ?JMj0DkV zS@1)BxW{_p{X|i0K$l5pUB^R!SsPT?HbBCbkKBtzMu2P%MAxbd=e6*kuqbaoFvSEd znnXCTF$dg0!AM8NBAXf?1@}mY zf(if}xg3`teRxgK61=4{51_Z}<~@(4Zg@?yO{%)As6lCcZiLHBJyMLe$Eq8N;o4ogJc}bV z(edG_f7|XnrIw7~E^&L1V43@7zEo}oe?y}0l~MN247Z?Dr(YYven({h8u*x0Thg+E znH}-UZ5ym@ToywmgFPW5e-H+-n^Z_!9gpNbI97?!Rqsi(LFFw_~Um(wk|RJp`qv^R-aIVU}aFizbgT^@+hE zsbVf;pkuR&)F)mHP<>uLQfoS%#X55-bXcz(EpDeg`hYi*eho^TUSY|{8Js~s_ip#s zm7Q7V-j3h9gxf4-xT;DIuF?)M&ER%#^E&VS8nG#Tm0L>K!q{vtq`!P@&{`{Tn zI*?Hlj|JAy2w{k8kxDzSm1AcR^GK9q>2(G~AQ1EXS?|Z#)GK`1JqG?t%p!{NCG)uj zr`|pG7NW7+lZiadkF5RG3|>6-EOf7blHVXVEANBPDX>Q`ucqYFEWGJ(m_^>8S%Ll$ zqoQmheS zSS-@`bZ;oQH~91k;`6TR_zxNaERdjOC_}qev_BXSa5*wQ2geU{^@Rg_!hC>>9c>ga z6tJwj$~A|(L3WzdvMc7j3K+paCo*UkAs|9)wQw#dXiJ~GWJHI2a{N|TXQlqfWQ9Re z8iwJLV6pVl#t#(5>>kk1yw$sZ@i%fi1tAH4beO4XWR0v>oU6N3y4#owqy$So3sR7< zTwi%ilz-Z!&2>kx*2Xb6zYyQ76DAVKKqdm&yt*E^h+9luujK$b@Ye zqcfX^GaIe`-t3FH$8VBix)8-x#dqMvDz}2mzxf$^z>d}kbIJsiBMyO;K$;F+QVQP>e!kcZE_G-n z0SoNphye@I>vxSZmQle&*}>D=ewpD;dm++h>jhFVpSpzHA3rNhsWTZw<`34hK-QGC z6`Aj3*zeq2@7Va+=WSY0*gNW0Ylr3_()o26`T72m09T_>RpY9lpILw=d4(b}|L+q^ zBn)1?x$}`Nsi8;SWK$F+A5CGQr;L^i_D(!!GZUrdR0h~@s1;(LEDw`s zf=dDn{C%G3SeS+~?NagXefnI^lP+2;J8mKAyy_6Rjlgg3z=B#Aq@$w>a-$>U|B4*}w-10M$~Ww^6mgPAK*y%r?XE;D$`NL>ib(hA2ef(-C{l>2J$kY9@xZIv1lor4@DbDl+dMAbR*{Y5TP3 zs#ybY;)(*6@h-3r5%R?`EOO1g>L%fTMZTi9<~6gQbfCXolQlPokKOW@r=Oo}T(C5;<)>Clcbe zefn0FPeWXRb9yO+N)L+e%oIkMt4l7|no03d=X<;wo`bYswQ)+DOw^eDnp;PgBJ9-# zrI*jX$P=-g=4QZ3f5|vnkTe<{u2yZKqUgSQMX^B>Vg6*-)UoB`!bDj6j=*zu|xX z@cr?e>(k>}Sfk|u!klnfP8t{>d?l_H=5)b(25$mr*AnUltXF|I7{sn+S!D)pAp7dF zp}-Ljh&l3m|AjjYBl@pqCjqY@u7!FvcFz4fw%4T0DiS!V3hiq|AYz6I60fM#ymfod4Fnt!EbfoeOj1( zLWks|&WE$lcW14tQyx;yXdo@+_o3Jva*x@;wcXtt%$*JSHu8JWf-|*`wWM=D-S{({ zaOT*>*+FMn$C|#q2yFcI#980KYZxG_j0dImlL#A#jqNh4o*+Xp;)~25*Ch+aoMuhE7*Et$w zPABA+naoJO9t9-RXnJ11W7as-eyWCw$L=a~d_FI*B7_HeO7R|)%KfD8Vu%k?_a zJAb~e5l~01F#A_*SOg54I1Y(wNqneDOM&aYv|0tgU%Vs(7Y|S$z`=!MzdR7`N#N}} zPxzuJvl12T0bLI$R_-U=ihGh4>&N{)IOvT!**nX)`BNUfwH(rx!TOJv98DhJ1W6ym z5C-GC$~5YCZw$9<+dii{$e$^5?+e%8;R~$T-Aq&eIk-U5cGUC7jW{pBg1zOd(@S27 zR7(@ffE`M5$EZOclO2@Et)W?E?FDPmX>|+7B3AW>uD)~iv#w%eK_>V9wpbYKK5?ex zG|d-l&vD^UILiXY(g<9#7oSd3Jdi(`4DT--p?zjD2!9Q)07DRzf{ivJH$2`MdaYRg8H+JrOPV0!|q&jg%C-ANAwC|3~b&@zn?v9duYmjRs|9#ds z`Tf^h%LFyOsu&f=?5fC^Zg4(d_aPMV^avBc?Qsf?fAEgMg~PMqh6z zCa(_rDNJpcB#?E#-I2XmIefTEZ6xVAx!Y&?%c@;Lko`V8{QeTtRok7}$fwS;fi)6~ z{S?v*BAyK8B8Cgyyu=gAHu;Qz`i9G& z1=sm6{81LTavMlLq=JgW@qL2?8r`o_p&w#ux&O`Ii~qBHEzu{5uk4k44)LX(u|7wy zk_Q$g$|Ibb+@77> zj%#PLXvV;##&R9!dw6#goUTjV%W zDX~0H^2=i?1E-=065#mmXMJkO!<*6(D$6dwbnaPaB2tSB)0;sC46y--oouA(YeN5V z;*#+6krC@N(vW0eZSDXel%P@aD-QuFc{B3_3Sdy@%j4ZZ1J}Qm_y4PR(gYu2NbU7t zwWsEW+8eU@dzM)uVmvF3q%&?;o!f=@c4d@neOw(+i>NVF^Xm1pnKx8fMBhjzN)`RD{PcrtD5(G9d+F1f)Y&Vauob6$PSVK|XTL;n($I<&}HLIMJhOr(WvCwlSEMkli`5I&< zC}0l$sCoMsF;aF>Cn_n98M1uC)pZhNfPpPx)`ps%eq9?X%x$V!BOtt^n${S7exCCI z`C0_jMvqp0-mW_x>Z@4k72lPUBnA8bLj2WPGVrD!s05fW3wcB><4LB<4wl2-lB|q= z4quj8hnklw9IV(8k|lWe3$+{W*Y#tQ@gk3 zcJ~(TgIJ-(T7?I+&x0ybZsm-NnWPe*RXbK)&KEe(O3FD>ka)B?AJ3MxAWL0#T-;Mk zH96v3sq-Zkkfj5ciRbJWABA~d_ug^c{l>jV?M-!v_y8gN@jwFH>6cns?+7jcZm<;~ zUIt1LD|Wk+!ktb;EY2_Yz|Pykj4}ach|8A}D^1VJJ9VF*jo5@s3*nHL)%n-42t$yV z{I^uCSip%i#8C_)`)yVbS}yx-BeUT}Lt^O_nM`;sesi2{O`2r>e~aRo#@X z$aKa!OWb`eGwQ8W61d)Q?e2D9z3}sXCpvLQK3rw1Ppr%A=FY+mB1hrD6{)W6)rsk; z>X3?Y!J>Wuh43V`^$)w?ky}kfO;Hoz1)$0Z;0r}%V7R(L*aVm6${K+W=xaSh2$A5H z&ylNUoD5RTVf&YLv>bti@^J9<;=hRduVSYHvQ&_&(p~5msvt+MMvgG+TUPapU8!mD z?w*)mS7+|yeuRyCF}m-&dU6F?=d<57emV$S+UYrvOf|n6S1RtM|H0c8T0neu7P3)w z!$!1E$oWRLIL|!SSp7Z~$*fbztkn5qOYJ^x?^o_0xB}uhP4CV6O^L0-HtUZU@A1i! zeM(KJYkd&+q*t8L_Hv$Nwc zQz@@mbVYTI>M6O3hesVWe~3d@$gHMOD*_;&(&oD+SF;%9Sl<&8BSI} zP*a^Y^ijkt^dPCFw5}=VHx7{^iGhu|{j1X%qb953M?hmDCB&Ova2<&(<`?|F<49~u z_I18O+|V`S6Y^2q-L~7OUX1x&5gBP7`d_Gq1314`U_eYO7tBrwTRgLM%MR38rZ0Y{yhOiF zk8j`Uhc@D!L1J7;%RIY~BUy49}m{P{7M zwUO&(^(>?>FUiweFRE_lSZ9`L@4Ndl9Y=Zo0|}pf`3>rI@IO}+RPCm8 z1*oe7DfG-;3E)%~*oMDyA^v+s5%Tlf*NvPXiWjA5m!BHwqm<9s26p|-hSs`Y_Nh#P z2u(p^Hs>eKyeeU!x7z8Rgxub=9s0*48zdlzN~K!N$F^GIeAs-?_oRhcRem9I?vIfHeQfkblyk$gcbzLrmCV@^Y#)IMg6rIUPII2i zTvClJ!x|rwJyWNdhINHT0cnBtJO1MY9*M@3m9ozs(imXsnOyPTA?Zh}`%QH|h%;ww z8ZF^Pb~_D$mH^`*u$p9Jf z5d1@0-N2Iu5wqn1m*A5i@5>_j;G4&YQ;BsTd(e+@SC5<(`Ns$W~=WbnOq1e@_r=vhSM-NV3+fU6B!9t6M?7#Eck zkx|evG0f1bECCWVLY07PFYVf>Cv!sq#F8oj%|$vEyit zIYF;aYOqqhc6RtPaYd?;rw3QAVbILF3(VTf&AQ8*kPh+wSbL0t1Y%Jsiqr;33h^Pa zKRXY!ghnqD@w4fn)Bbg|(5T0fL%r9rWWLG@)U(bs_intliLrc5vAfr-NY0buOk&r$ zc~c_b9{#znB8}z`)hO_&FMrOq z$dpB@^w+I?)EhSB#OHD8QE*!M_$`%wRNV&BOJ!y!&`<=5zG}42+>9)20Q-a6P`~Nn z(cbZx(Id9^eZ`<%32oL%B!u@<8a43B?*4FYki0msfuaszA8|Cc3L@Ay<}{qF>X*$F%zM(CM@kP3XPmUQnRnvY{p<;zVs62&e|AnRh{dv4BU{ zXY(&)v}YNPl}^`K3$!U~X#T7QvdVwbH+Ul%XQn5lJ54QQx0q#K107W+wXq(?%2SWi zPyL``+6?i7#Dds0u;RvfnnLv_kz%hoLza_K&d;K>JVA@YPBP@!+~VJHvDibqXPg>! zh7TIs6)1{{tDft#gx2_tYkn@Dc+qkb>eK3_i43eDTS>Qyz)!p4o=O)e%*~aPQe*1vSX-#sB8BXRvo0bJ>FG$kKef==JWmk~nSR?XWFIFgLwuD( znSZ~QY_$*d9)mXFZQEwt4pN9w~unAlK0M^$`s!tDU_mp_=2Wh4`LaxK+gwCLO4Nmjc1 z>stX?mGH~S730LU|HlSz$d63`HQe1OaS3B2JsFDj2a?;>|g8@ z?Q94&Xf{a=l@Q&;e>Dl?#BF7)?V&CvV^@K78c*;D1A{j92;_}ioB%L`0)E#KP(5Ni zHR=9D+ImW^yy34Mt*KAaRx|Q!nQz?})+J|kyDR-r$Qas{O!T?HD19I$Y9g5a+KVy% zd%mKNh@xrcrj=ylJp@lDNz43B@1Nw?ZYS*KzBc~})nR%65b~8=K~6rr_g2WQ?RXC% z+hl5-WI+Q%?(3BKQ)8#pO7PZiafK|w2jqbc_)&q{ z*+>jXJM+GwPMSsS7?Yk6YD!k=W^>OM__zqMLq}g*fmT3w1sKuy_B*$c7s$UHWtsgg znX$g)`9PfOgr&42<==6*=81(py1%~*^mUyI1DB}O?6PLXL5FNgjv>Tyya+9zb&ePI zJNO0>cPjU#ce(39aOsM}f}!6HP|D`7VS(b@OvL8`+I4hCkJJ6)>$NWzPk&NhRrGz+ zl0A7ZE~xkOJw&X1qDhjF=@|B=4*z5AZzxR_&dJBtid+QyyTx8XS1`RibCe3ZLGR(8 z&p#Q!%broUO0LL%sg=0Z|H5A9i&{5lvvo*r^iHP)1-rl35Vz1d?{cHdK|g|)jDE%AL^ldAQd)s87#Lot z&n0*je^iJ%zi3|UN^=~u4|#}h5uQC(EVCkz4Rzw+R{-(TRar(LH`Tn*8%l^tnUW zT@BF*X?Az!g80?vzgo>xNI%G2i`eQWPx{q(G@k&;_iU}GW}kk+W7)$os0gvD8LL&5 z-wt~Mk?l?o)mo3YdKaQ2Gch$>UG6OqNNLMqtNmg?p)<5fpy0hMMR*8n;rYO0mQ$fJ zGR?lqkS`XicU~klqg&a3MJY79=mdq+#cMGK3b7vha;Q&BWH)_YEUKbE(X$D4&RgV; za6Cfz{{pguH}uKe5Nj&4i!gS}gM2UAP|ZX@t?pkCo+klglv!MeZCDz-MN}te>~77bn-dhAVUDSJ8&8Gc{I zBu+Ef240lvwSp8AO8M;dbQPk%K^{lBeszEOf-HGdUfl7F@3?oS|0?!+s8YB2rnHDHA<~4xE&kZ zcGU6U1}>`qa~(CIj{yg7iU-RestX(>tIL%NVBm8TdAV+Rvp>c6(XS7PkH!`Vc*0n8 zMkWw6`)|L7Ja1ruxqiTKCh=>HTnHqi%Ax2U)7I7O^wMLF zlGTb`g{wV*?M#PH*WAwzL+!1js1gZ{(DzbTsr~N8#Xj&v?cD+yOAWO;n}U$EH}iSc zoDtEu4433^o`Vcpd8WnD#4$PE(lTeWk8m=(f;tHjeN-=q%LjQZ$XkoBqd0i&|F=)) zt)ji6P5Zk?D=wEs=>NykRmD}=JzcuHyFt37ySqE2y95EHIdpf2lyrBBq)4|k%0Wsx zqz?6bhyVNio(pcy)v))>nzh!<4(1wr_&@CkT8VQf0ak42)oWQpt1b$YVfvu0piJH; zg9ic?Q_NOX9LhBIa03*^?eqHBp;_W39ju`|o&z>cWkhmKC6RQP1s{n-m}L*jM+{h9 z9Cl_Bd%@9rogMT1=?kVjQ#R()ztuk&T=j2^Sh-{WF~iwkrR~n(h#xpjkdID7rdw`@G=+;zpX_b@TEk7f%%q9|GhEO zTBzGmGXG4m&0|02Bj!!9$q>Mo7`&Tq8#xzc{8G;TBgoZxkL_Pc&9dR?rS z#mjKupG^ZFa95uAv{fK@Z((-A2wjGmdN+~$^4opYub%__qhCUt;X?nE6n3p+7rH(2 zk&4{r#o{XN74mBK6of+n=V8-)>IX)`wU5`zOA)+EEAnt#^KJz+V&=s^)tTf6H14Y` z+oq2>^kwxo-UX++TS_Bz;tVUX)f669q&H2$2eC-m@GnOd&UdEGbo^NTMKa!**b&rP zzS&2^@K|$C$Ltq6M>1xYt6w%HZ}^}!bs90ID zx&C^a4qW@ZP~-W=_Z}i!FwO!G?tiaYY;>s3EBb;1HS=F!#oBiEn$MCp#9S6NZ?c*8_madD{1&Z?a6Z^$F+7*AI zdv=~pVVuD)tO?tS6ipo_EvP;%j}X~wy^SwlmO^ti_Jmv%q#F!5YMKou<=E|Zf;*AT za>=Wln)apYPh-=Pda7ca)}lv45ou9UgweSOVh}D`PrhW8m&g2SHg+mF)xEbn^Bcm1 zD@Pg>NYZ4Ex0UzQ2*OX}$HzK77dgqorq!x@+Gi(3!zc-x-i>Now(8wc7^k2}&o8== zd%htC6kqUVO3n1hpQxXR6IvO7yW-c(1cJIB>fv$RDgNaP@7iz49$aA#|%ZE6!OLK|V7d zHc^2shcBEV`2LO1xVeT$uV)XFqD?K`v8A-4qaj+{wTCEMKQ%CUkE0bl_#DYjh(TvW z>!HzD`|bz+i*dAi^1H$B^y1$62N=)cBw59r1&Mo{E%FFR-~>nK))Y(fcUfz=!dXz6 zw7G=4-l{O6t*5+ENRrL}n_iYnnd|VV^n~z%z{b4QirKRer2IU{DPYVZz~sV_c+C~c z+<-v${obz}9gy%32yGOzb-zzP>^uMVR62N*+R3IW*n0J^=;UitN~(u3vz zsZ>fnvW--Dra2}oB|nC!o5@F9hF?wvxPzZo=mrq2Har>hOd}HVGP&3)%k%P{H9~5N z2^U5uO*(w>ccZjD%M)f!mL7Qv$gNN<|KaeVvo@GRO!?uV%a&$bE~jSRLtU>54q2WT zsV|>ky7bRYsPS6a5>))rDeV~BDl@&(H<5bNa<7jgoBjJb)11ud< zLlulb;oA2Z5gW>-D9>hdpTZ&?^RJpKv@@rbG*+F#a%(Va?=bS(dZwe<@4_%C!~C!@ zrPV+Omjr}auzoK|+tGoJiM%L+M!;}&=kEEq#mmq5oXnzYCZ9m&Vm50GNOr~K<$e7P}MJF^NGB3^aU zChTKP)n^Muxu+$ybi&j$44j^Qmx-R-MWIM;$cFvw)cW-1M(^_^dn|03=sS4MWj8}1 zrXN)in{8V3RS?+vza+hU1!w1=MeIJ6%E#wX*%!`c{U-|OrL@QZT8Q0tp*if|2mn1` zpzB%p^l)D#F!Yz^FLkCl&~y$(T-&AB=dc(3i-f)rY7QPR)Dm{Roo@|W*7CmoSwXAM zVA@d!s*K#nqv&8?W@chvDPt|Jt)*7tN$pMDMBT2ZQcrlD!u;dUgQV`>#|J%z74 zx3)x!Gp}J3R~*`oJEtE<{_}!FKM0f$$RySJ;v-J{Q5a(n>W>?+C$tS+%{Wt9jKz_^ z71J!`s(qVyJh zk&9oY9ydbVg%33`tbkOXydF+Lkv*s81Wn9MFGjf^BgPVyk$XqQ+NbY^gfP9Wehed> zU`~9F?U8YBxvf#`7B_j}@F3)eC%`s`1O@8E`~4@%uNRYALXzvL|4!ASZKd!0RDcj} z29Sj-+JW*u+X_%28G0iB86l5>I*yGCfV&w6U&IbWE4ln|L`(0asBch{b4lhvwHQJ| z+Kis0;h1B+W`!9-?xB0Z%nC)~D0ijK_y!zCE{=^$rH?YQ%Y2Tp&9D$t!LT$tU3xgt z=-ZFV8sl-=2;cE|e-wjjbT(9UomA`cu-W0Wd@9{6h%w}q7RsUw7-#n`FLA(nM%ejF zNbPmHOfg_=Q4_pHlYxpjBVu^%cDk=aj`biC?nO;R*)hz4U7qfK3q1Nv8NLspL0)u2 zZ+5rDBzZ#190`lVs&a7%5G~oWy&jp9X;HBbq>ThT-fi<6I}YF#9d%SneID<06Fs_P z-12ShJ&ZPAI)jj(@(M)kzm~^HNI$mQE{OC=5{le+yzbgI;IRJChH~9Qy^K4wp^F{+ zVgJ*~EMJP2AfPc6Jk=!JApYxt`>u-I}JzVm0!NpN*=f4;uuFKfZjS{@@BJM%r*lwQ5EL zn;-<}@~~e{yd;KOL3mIb%-TvMwJ+{Y1T#wKFA9`Y9fXx(-|LO*b1g4H`@V!PoTrpxgX8)V-I(*bWR3q1tE8G?qJ=x#&J00ac2``Zp~2+tBW*>ChyGT^^UIWewvE?| zc^3)Xs&Ci2R~E?KbY;7jToxKx%pcXpaKZ0p{lqk7S!oV-2PFIQ7a9BfJ*fKFt14{B0E4 zav2Oh6KSgqC_Swswal1~P9c0)A=rblzEwN819Cd%V-kc<)~NAzDDfa%l+wql4j~-r zp|p|YZ8F@{(NSC&anvswD1RvlsZ{Br3tCm2LI;}?AMG)}wsZ+6!LJNv!SaN2Js38X10mQC3!C_6 zV@)oMJFMAQ%GS)HeaaY@aUD&zBg9&@{X!?D$`D5rP|VkclA!ijS#%9?ki2H~8yA!? zoeB8*&#NfaF8{SdI!6TVzCgR3a3FdF=nOitxk?A!+xtxx1u4yVeT8z94byDqF9`=@ zuNe;eB|Gqu4`_sw1=Fmi1giNhQk-__8+<7v%8^5UFNZT@4UuK8v~m>Cz-x+z>mXjW zq_BA#l{_X%6LeES5inFq))1!F9Vf#}6B~M-H}jUPoiCY&{+pT=G7d5kUW1Cvd0Ro* z32q5m~? z)%WjNj7yMr=wod0KdBH)3flJMBh77EIi+P z!4AHmg=&j^vcXptoAxapuSj`Xj5=s z2c7772G?Fg1UH3VrR8v-g&IQLg*4}>cadhkanGt?_qIDuiYzQ@O|Nyml>0r7Mtbap z*cg*BL}e#(+swp=fYr#TVmq{qRpiLp0a`(G6fV&iK5D5B&BZe}sU}4q6%)mjU4LNS zhj`b)jKR>=@V0%6wH*&9GYwnCWgQa~<{X0M*eHhCZt}<`)ut1ZTWYPlr)`R0H|NH{ zA=slQap^+hgB9}hpj8~LK2+V4TF@xBRCI8*;9;L6yifeDgK|1p->V19!~-9A1FMjd z!J9%)E7wTs%Tg7g?rW^0FKv@=PqlWD>7=bG7(~hGRi`phv2ba#%y&6|Ljb#&)OR9j zVt&axBa^5dXQh0H6@NS2iQgOYz6LaU24MULS`+BY9wiPoBy^`W zq>QIDEb(E?U{>ZtfQim=!mU)A(#JdJF37OdJRFd@>pj+*FI4sd4^7GTOayr~GW`2v!EByvU^X<)jCv?I(ym_UoCrVx+Z<0f*wUlw-?b0-g%C#ACT$E`j_D@*kSDKIc#&ya-enZfgzDv#D8}B1YpeqGzFC^Qb{YBdq zh_F2j?(hv)+06*?maQK1S2XukR;PDxv`aRtA6ZCQeURLAx##f;VAPSSctWHX(GbKO zleQI#=@J>S%hDI%U=+#_lpR7Dp%1s>z%@MjR(Ko{8PU)U3*bhG2*b(A%Y?geFs{~w zr*n#Q#C^o$2}gBp$MpQ5Wk=4yET1-@pNf!B{)743rLlutOc&ynnh)>nQO z8K``F9v_X+cl_3;(_&i~s=EX~WD#rB;_Nv9fA6mQK#v1pkVDBJ)EhIIwaMx7f-Q*@ zj8L4YiP;r_$b$7C#KufnXe(;RUmA)HlUN`T)7?#z-@H!})%$s9nx?@V2-V+2{ivBZ zL=#rdsV-8FW}~;1G)Oo}$B4W2cPX{HPT{ZSNc$~b&WZy0Mhv)v{t*frSh@t^HeNZZ2S!7Z35=|>G4TzZN^_5nhH&H;BH6d;5Q8sl!+u9(nt=i zu&WeLA}v~s0<-e{1jnjc$kui;o{T+n^H*(7=L50iWMqo$9ktkDuaiwu5d6)YAIS{4(L-?`zn>z~MgdGe_;z zciK#!ef#|GZ0E2Wd3U$rs+G0?-r{5Cz^Wl<_a4Ey&3n6Ubxmokf1y_xc?o`%RlC4 zIUCUW7*IX;U(^IC2%ACIJ{DZ(Pg7=|g&Ta5ev8_)V2Eb#hYQKw>9>NS(EAPnT6RGm z<)SN_rOwS{@Q+HPPNPRkDVgYY`s66vTH=m$??X&B$6(v>;KD~-@G@1y_jdVs7w5*b z+%j@K$EzB8D$2h4+Hrmz+v$qadw$a`^j=u&WB9wYB{m{d_BKweL7tv@WTfBPNzfgNbG3e3mtEE5?EFp@L&Bc4++6-mN^3(dQK|1`(Dt1+|_#i(vR>N}CnNtjs9 zhoYJ%(t0*fp7xGbWnCZzTeb`B(y!c`>WO|zr7#F}gIs2}gf!?wN~ROT8pTmIN8*pq zzw1k9VOqjdW3~_OW!0s4I~4C}{zMUyFCecY#v`SdE1biEAxiEw4>C1$MX+5$1K@gD+`oU_`15!iQ^MeAlo06G+^ z0OgyI?pcxxZ@L$*Ro*fTDAK#LuhZW7HM|W9Zs_cutpXk#wzCdDzA^`TZWo{*KH;W+faI?vTh6Y(QMlNamC8Oy zpyQg@G>}C-GE%4IpwRP6wXMrz(uJ1SU4q+M^n}H@g>L$pG)xuGjoZrT=GEMLHeGJgF;7*Nx7qMeqoqY|Cesh3zM z+k$5I4YayXrx%zpQ^*ry_dJD1>uMHyy2kll^)SYbVC+L(Dx0oTqQ3B< z0S50zi}N5DO>-31m;WV7AogDJ*ZM6RwQuy(*!fG(=i?<9!$(;-3*b-xn-$6Z53h$c zNL1@((ryNL`L;>0Gxqeg*E>R&TMiDtc3gTOstdcW>Kgcb+5H*6q#xi)WWp;(SlsiWP3feFH^fttY&g%30W-e1%jTyKM6`|? z?$e7lOH&jt;nzo+)k(U67#1s89Xi*L=SkbXH?VjkvrXE^UhST%3R;*ta8B#LHAjm( zSxA~<6gnr0grMwc8TUjPv<+H12k^U3 zH6?wkpDY?)BWc0(VrqUeH4Ydi#GBoGT8v3p#|&fA5!X&|h#M zGXO$ALBZAxq0iJWwVQn z&(u3z*tX`w8Pfw9?+{rIyv#bTAQbCbAVRve89uk3E3nl%5j+lqL40%c_J_^_o8XUm zGTI7P6I_DXCZopT6OrSGzre@TUU9nNF`^m*aGf>9ar&S{vU*spUD%lI<(JC9^yk+8 z$DM~XuU-Zmx4gekyORCW75$G?Pk+jRIrH4=1-(!{3i7Lg|`M2xD-mq}n9qet4 z{V!1lZ;o6797SEFd&3(p)lc27i?VQ)7U;f%n3}1t`If?F7o-|QZSXRiiA^N5Cmt#| zuyVQP`=^$6mz8^BPy=SZt$q#$Ex9C|-o+X+$l&3*5{YoHzQD6D%d?O7M8cDa^0a3Z zrd0A5U;pi$0|9*#2HlZl3e=Bysn_zwK{)D%9~t6jd)>q>oOTpl$Llgp>u&rA%I&Dc zJxLY9Arh%}WJ!GOaS@o5@0wP(`{=OCRarghnLlXW8a$^V#jB)Yg9mON(u!hcKLk_c z4671P@lNGl&>Ke7`g*kce`I$vkhpqpvZurL)M?T9E`ICVo(RnDN6~oi!Qpd7IDr(V z<%rs@Xp}%K@)@|)+tkdHi9Q?6@}+taVUdINQ85E^ugQAk%hq7suc!DvW8}cD_!ZcY zN=LsN<5eqyTJHdE$hj*0+Yflie44Cj7ma);tkO88*u+Dy~w`Qyy!C__sUoJo;8*nNMXTmCXm16WAE^jL9= z!q>dz{)i~Gcb@7?jKc)N3=MHY&B7oo7z^&rq?~e42v$Rk!xBThZHQdWBW!0K8X!P{ z%ki))M&WajV@I_APL!=CDlD}g7`3~AdQp%8e1JF;7`EqZ zR9dE7eZQRBwua`pPZllI`5Oi%?T$KQgoIf}rYg;P$k@V@HtcKv^`#wpnu0P7j1B}W z9m^I)byt)09 z#_Y6!^w;}i&3|!u6^AyRdt=-8SE0v!4Jzh{-#w$}Q}({5<%$!9wx zt(hJ-Q#DSO*bF?yJS`E5zSsl#k8K@GTwwX#KyUup!{j5EC}1}(aBsnj`BIbv;GNg; z4fyOn@jchmrsKX5vT0`p`34&^*}l?}&`@!q1E8JZb7%+NJ4~Gvz`4;z2IlYFWNie^ zZ^aX0{|Bdt3jj(BE75UKaK)REMw-m9 zvb?}4FTf7XFIM*T(W395XSbzyx8+9)rTszlIeXMOts>TL&ZMVl+ZO(6=K)E|sy zo4aJI!+UNxyiJ09o(n@&l_s^8I{1EK!GE54z@Ab_Wuog%jc9|VT|pTsE(S@AR(eGY z{c1FA?WZ8aIbGiO@zSxh$~#PBINWas94$WZ4dYwNcDFQ#q-KUaVr__FqeSviv9%UG$-eJnbX3A|<}Oc&1rOVt$#sv#vA zONK^8VLNwvkfTyEu_IoA4Mz-oEwkYyPs^@YM zHbiVI<(e>X_(A1}>PsC}Bqtn3#54menP){eKbmw+wPJ(6^2UK1eH+IIcD^VAqs3s( zA}NH74_+AXLms`R@RrUb7)AMetQ3^d?wb#h`pJ|GpC|P1|ZutQ}4AQPd)&yMZda)PxGX5Pk&Ds&z<$r z@B9Kr zDuKB$8Z%KKiu%WdfY|yy?FGx{&ryQoa3v)m77wqRITapV-lx}6H^Lb{ajR;H-0v1KiBD+K5B8VO31uB`ociZUn5--Sb>Z2iNBy<8`Il( z6)S7(&^(8#IEgib^iczhuP6D;53c@Dl%k_ms2wV4Y>3FW-k3IzZ#qmJb1-8yPUBn# zxeiuOMfH_2!;~5iam}^WiIeL8{S@)$C?E89^rOn=!Sf{0wh1JHt|S8XfD6+_+dc35 ze%n{(vL6Nzx5#@y#2jX41EAXiNoL9CP2!Hb|E~X=X&}qu2qIR37pM9I@ojr;99bGxK@CxA z2)*?1PiGjdTxA485-1)o7;rvi8y; zZ%AbHrN(l5m@^FZ@HyC=WSA;;yzqO=2Kq8A^m@Wneaw~Wwbe0lg^F!~63sQ@?^58> z0pm*4L4pqMd%k*ALQ$Iyz2bDg5w1_(zGG~n7X_?`f2{rG8;Y(`OIBu4gE&IxBBL;F zCG3wI)x}Y%e-qa9mrPA14_Kygh4JhC8+ENP_v?+Q=bQ4pp?}f5#qPfmy;=5pSAQU2 zR*=*1@EQ^vEzMY9lK*YP_Uqjy2EKaHJH=??x9whbeza5MY5e>FIS+;`cbYM`gBicq zN`w1K(*KpH>wkLc2UIP7QbP(X(w*eD*^(M^idN9k7geGU{gfBE5I2MG6p?|iP~dPS ziBTWxevY0~5mZ*kWTvxt%X}QcuGFy~7ZoB!SMbOnY!hKUu0k1dcJ?>9gjMaW?Hs#3 zPIZ?fs;CVtLzJ6LG|YQ8Ar_^-VC6rN!$TA@p){#gfxTImWny-@IhS&!B0_L zqcjF*Foqs)6>g*WXf57QDhq`brmGptWKbm%u0hWOx8_G$6@k>#w`D)(H?4Fm={Ur{ zxU8WN{Mxj$Q6z>7fU5h4@jxwbG5zU!5BLdzx*(v@DT2N)39ef3&7s=H+#MQD&$Gbn z##ok_xf!48=sgk03Qp_PdaX*(TzP#Dyb=SA56@f9v+Kf*ub!?vQ*-7<&}Fg}ztqZW ziPw#W-vMOIjSXrTEZjf;mL#*fJ0ujk_ldb1n)#{NX@zT2x4{$4_4KnT@U80Nk$?i%vLbE z<^QYId81KLAxb}%hO(WLK#?Rz#zjZHtJa@Gpk1vgXssV!gTvSwvyxH;n}^}q=kUJr z;d7t2rU49|-pd6@U%M5KiNJR*A0b8i5Z)!8+eF~|v-mE#tW?#2dE!&ZAj6xQ1* zfv0w;SqIuVcs^=3?+Cu^e)F_2cO`-Tm#85YLl)D^E-MA?=^1Q+gtz~!=dUqBHV4gQ zQ~SQ8CFxGfsTx~Y2L2|f69)}#NO&w;feC$$U7)FYFnmjAfrYlSzgcub1f&wko;I&NDj z7>)Mx#XB;JHweBq{v=~u^VK3WLH-IY*MDNBE_PsPlU4$;A6a*hxMyN15oM-NOY#Vv ze&S^RK}++nQYp4^kX7aD_|1`RZ@fBJ=gqbLhK`19!_^-YB(@FU^gV6eK>AaPo`wY{ z8v`#W6)j$zMA&Oy+Fch^%Do}dwGb^z5>27qr752A>UJ7)s#Y7*H+<$I&^|LWZ#+CFnl26#h@>f%z<87uz z6y?o3*!%vn>eCSqA>^re5te*59U#_X#Qx{SuJF-s;qn;I#a@Sg-Y+Z*><3Igjm{HB2Z4b3pmY9{bxK3~ z1e;oh328as{O{$3Cl=NCwfDH}2{C1e6&iweb#%N+h(bbZ^U8xQYI4nU_JcoNzMv)a zK1kl5d40Xcax<|O68{Axb`3SXd9rBBedRt>&9*~670&UCPN7<@X@#>b+$6k|)RFrA z@?5A==pJ(|4c~)?koUE-JbKd1iUlyv*+9^RytDlWpM+=oi7xUEey_*DgaeFA;C>1* zpj_nU&sSP5coZ2z(iJ!Pqy$!t{Xf1O;f`wpQ>;?#H3&qdIr1~JmzyqC!6>Ah5j+CA zx49w76Y?#b31M;Z&5QO?^>j)Fv!AbK-#6IvHWi=Iw|HymqYpY8p$oLuOm#iVXb~fL zx@&CxDcv7t;1)g#Ba-G-EoLguC;Y{IY2rZ2iiARGtP=TW8-FKE-0nQhhC?>NhKc1%L6^%`l9y!8edN=-#?@*AP^ z9RG%2Dp0@AzRZIh8ehT%4{WU)p_pfJ5G3sJ60k6sCL05X3%9dIdsdE7nv&?vB+%eJ zu+OLcuU2$S?$^rpE9vnXJ19VUMmMYRy1u;qD#;}2m(e-<54x*iyOYW+u=@eZ$uUg5 z4jaxM?BS)6dc)2@oj$8{YB-?Q1{L}21X?3!$MP_3-t%u1Sw5y^my0wk!4r#E|9H(a z=F|9CwT1Bd1mP5xe~yYQP29(AMti1|749s@++{>=HbARjW$-Yb0vT0x~xdAC0yR2 z!B)MBGuMKKVFr_Ftt4WR&aNQem%{y@bsGmbs1+YXfBgXlJL0 zM$qh0PT1Ye1iAo1*ca*nuv(OTG1)MD;aYdS074*K7qK4?S{>@k4c_76saEWE%ji;h z{9sqO3qXx0Ppcp-DfIL^gwAb4?bNTi&E2kOSH|C71k%voYhix7(Z5`{{SV42rTzO^ zfy}i9rJP@beF|L&$OIq`MN@}p_L_F5FBud|!|~mk ze{e;`)+NeV*6kCD6P-seWoavzX`x%2p@7S1ID(C#MBX9#uO?3tBa1Q9rl_r5g9+7A zI#;+0Qn|pV`Q}qI^7n5Sss!c}wFV4EQt42H`3H&*n~W;#nx{s^P#h<4g6NH$5U4^_ zP+4#jcwtfnmxzViu`I54D`(p!7u}$o#e82e2cX$OIUxhY@pwd2$M?vQE zxV~-8-!@Rh2=Y98kM29t`u!sL5S4A#*}+Rbv!?zV6E08bd1i?po@FVRnVl?EwNWVzlb$whH{xGPhKfx0*m z0*+CB#(bDz7$jn?#S6tpcL_yF$lXl9nKL%zE0iz?lh2?*`{-$baG>l?`qIKJ_)_|@ zGu0x*u9~rM7cV`JWchoWWX%ymvzW1Xa-%_awjZ1a>nf@+DRR8`*u9 zjX`&(o`ul$9`$ftXD(OLQ?GFg2So3Sf(bGxTuqFltuo7WkFU`NgqvZuDvUm56uW*z z8x|*AV&MQ_SoB3&BKG2Vsws>s<`-jGq36=T8i8sW)LX<})wLN%mxe91#O)b&1A{uq zvxceze#4(&J2ilhJ;vw)bT@JVc*7V-;U2-A9CgR0O<~p#u4@a2CS|m01n~YK6u{3(OuTd0ZY{pUW()->7q>^)?eYti1|4phCWTP|cIz;!u6@98x1%pRHPv z$475}CGfRth#)vSXvQ~SF#SEn6ZSk*ia-W@E+qq?-umR<@Jv}{&3ZJkjCEWc_%Q5X zBw1gxDqMUSr!<f~hGYRWZUt=5O1^-(_Jt9$3zP zDc#?e$}HgB!uW#XX~UlE#s7w#K+3c)5E5_|WCTMW_!i#Q^%?P-;A4>d2Sj6hJ zQ}NHnZp^Z0nFl<{Qc;)|EB;-yg52*f0@h~{e|^{z_x}Xxw{3-P$Jqm^Bd}l9leg!G z)AAfpQ-Xc9^9+c(`7pE#2F`QfQ1+N>zqfh(KhJm|AkOjfn2oD0VVqQmg^cqr2f6Pm z<=gh~d1m(WOk)9L2}UsZbOi$4Sn~SeUv6oBeh*6{t%u_1WP69WSp=snL95%+Q+AQx z<24o`<(9_Erejc|H!I%_wc-bXSf{@sqfTJ+9)H(Q83LLata@0K>~~9;)W&eKWR_-PaK1^=6)S`l1`)it zA)(3IriiU~qnYv=s8lya(po+)zdFu7F9|@mSSSV~Vg$?NQSI=3OqU`cZ zJpcCDbpyEd$>2vo^4WSImjglU=5j>?YWjQqJU={HD8q@ReYz)=ZI`}Kry&ee%L-0B z(iZYE%PgqMgZXTX3tjo7V3(hh_J<|KS&KKRAx^!Zv2SftEYMD-<_~ezaMBDoQB0wN z>8mY>RsMV(i+n&EmrhUh*Fe$?yu2PDLmmIea3f9VS~>k@HTEct5L^8VSsCGBgttn) zKZ~bKMU|Yh=5>?1g?=4Ps@#Bul6 zDwZIeteHmMWUi)DqM#Xt{KA*V9spu7Q44^lJAOky?KRnMN9x2sgp?x zm)#pCGb=_>ZwTP#1F928xQG7Hn=$$NfSXMAzt#;z@?iY^tuf*bx{bwFQRMq~^g;A5`xR(>-;(l4 zQtBl=J@SqC9?+DJhc@JGXRthD&04{ui^39~eo>#^_`}U)x8pylunGF$+To?7SY_80 zs@kyTi#)t1{ycPA45~EBn4)rv6WqJjBau8sU%wu{(<=NcP-e*Qdu#mL4w|G$fTwI~ zTn}vX+n9n_|1+fFWKuiiE^q&u+y!hzQv4uwGj^;}LBb}V31`H@xvY=uDY?TA_-$|%|)_@91H0L5^ZXE z*bzu&$3si+P{|#&*tm#LEzt3jX1MwL$2kV)q&fU~oRhQzTI5gQqK>V!Dcrwcj%0pT zN+iBhstRvoR+0v`t^;CAhD@Cvyn(__mQYQ?XV6n9$O$`eZ!#68!oCE7?s{zxu$~UT z2ZO#*;Xo-$W6F*tyvDC##e?*_)na7Js0V_BUJLvFE|O5c;7q!B@i5f9Lj81Cf(&S8 z0yDxa`w@1_H+xGy1!4e{jCi!`;Q*hAG5ildZ`Th3@nAr|Kyk;m!C6XQ{o1iQ;VA5@ z>SU$_?gic2RAA`2Jr25#d^^1v)g$6>Wl~FThF}3Eg&DL|L#<_XpYluoaM<0!C-_Xx zV@itm7`F0=%B2exv`DrZGFP^h$fXg-Ic=!a@TlQj-hor8YAvjKznKw#ak+Zf&{D)y z*s!tkZ+gDC0&^SXt!kDJ9~ugMY&j6?cm+J}aeOw1Gs zWxX<~Y7{Zf)b^XH72uQE6F6z50-7hT&12>%bglahM`R*Ba|Q=cgOAmSJ_=_dd?tFS zWKBlxrL=TW5S*KjsFS)*sG-{yjycod;m{FCG16a^xy#U(m0XcqQ+5eiHai<~F<2=s z*{CkMwJkmh>j~GCE$kJ6mu2&(ElLR%XqCA!!9F%>k0EF-*O`W^8w$BABkVr!n$K$FAQ90nWG*szHT-R+Z)~{3goglyU zy4}f!)3%iat*yvokJzs)vRq4`?ZWnDQMRdqL)d2-9PF$%8+{0cccGs0aTziVHif%z zI3_|DkQmbJ`X4h0!hPxN_|aR|=WEvR1Ue75Xe$EUoxa*ne?1+9-F3dw5jO7CJog|9 zpmAYWmM|19!dzbKuQP<<`yquMgO;(QB^u0tuND3lxU@?CWA%k=|wm$z_#2@WewyGdx54hpD5CoL`_p6nUKUqQR zte!D?c$5x^SOb*{NyD6L|Q}`_30oJ^rZ_KzhDTA-^!FQ%aHok9uxBnZ8gc377PsDlI9_D?U5TM>C zNNx*qD47jCo_<#gmExwiwR?BB|3Mt+J+IlU-R46twSSg^!HTC!XhM}@v|ZMQE^U&J zBt%JFF;E!1LOlN}iGz;>`vHyuF2Nc#ItBhH1O@rSJDj0;LXrFpT@OaO4*3ygaIuiU z64pgWbGnCg+eNHrh~tJ@DXUFYNV}KeJytoAA3TgPrL+H=d`7m|OWN=_GBs435fT=) zn4zsN)`3O6kxoXJLK0DeJ^nVK6&QrtYp%c)#UdhWeM5DlM}aX!X4_#pC!=p=NcH}Q ziqF=lzWO`;qg}bS(yr{fR8nL8#B+=F>`xnSBpA3JPc71Ye*aVUDY!vRvR?ZRi#gmQ zZ0PKe3B*Gen1%03uBMUMNralc6G$Rt)YZR5=gW$fVBee{;c=g1{>T}0%gFSfLe-A~ z=<95-qG7wiuf`(TlC!0WpvoHpq zq7{CKEcP;cPHdM%TT8RfLqh9g(g?zajH|GQ$VR%UHWJomOqU|+j#{i~&VOUnqz#8r zOnVqaw;jS@j;I+L7xU%#dG-J$|B7xO=YooE!3-CtZG!CeRpzqzY?Hox6>1*K_xQ^Q<*Lz3?m|0RA%b`5c2VH~{!1ONX8>FP^K zm@qCwhH1h{;EpN?0y@;j1mYd?w_1(&Aq)tJ4*DNfoeh3n zh8@~(G+;<_PWsYf7|Lw%{IfkXli6ceh^HBqAoCd$pAu_%kll{%d$oZy)AIqP|1b2- z^x&(p9VbQNWD(=Ng%-px+zq%^KFgnN-Z&DXrI;@Ap^dm}@lLq7HqUs)n_LB<&&#~e z9eYIVQ4}d=A{NdF%r;xQS3HDzhgratB*$G5)y%(rsBDiHHW^v&f)rBCe956wWf!ic zHp2>JzlUDq2~YwR-QXvlke?197qcteT0PggN?R;^sQ!K^8}t*wKrP!LZ0wcmY8n9p zEdL~TP3VCB)3fCDg1cX(jraR|)Ob~N|3wVC(tc_4qJ{#arU${}@vO>((f5~ko~--QFTTVvC%m*N{fckm}}`> z6r|wK3@bLlQIy9PqtGK=uKfDF#>O5yWcgq;?A(#GF4L(YFD z9oWNBG*p346t>ZygCBE!o*FixsF3c5?i{B71Kzu>5e>WM#UJg5ulP{l9+dNBc-V_7 zU(QK9;$UT=I%VbvM^MVfg0s{^hi()KJio5h9O(sVOct679mDk`Ys{^6CI*av zmG)QSi(ytHmabCe2893P=`6#l`o6ABcf+B(yBnmtyW!9&(iR}y-QC?O(%ndRC`h9q zDJ8Jq?eG6w@2B&9U1zT~=a^&OLw{Aqi(MaM?-A?AtW|9G*8(-3nkLfoG0cL%1vRW`NwEhDxcq6 zE(}yml7c>rHpVnyKZu@eTueJH#Z$jZG`xlNqNB}BaY(=?`zh3A-TFMtgwcU6*Mt+c zIoq)zGXu}mKV$!hZwe^e*a2v+3rP10+79mYQDY4$n>vxPBNX*N32s&bACd=}v%cz^ zR-gP&H0?Okd%!j)oB^;u9k4Z}qY(_xb`Qaz{A zVG~3P5u143ef&g_u>+6ePCVTL%8b#Tb{eofO=u4(B zDy-TW#VcfqN6l|ans*wMCjvw{C%9t`WetT8hGF7ly`H2S3`B#(Aovh1dTF2sQ=PbQ z!j9owHMqz&!S^*QwS8ez?c?AI63GXoYWcx+BbGPqxQ1_Za;ViXRb%{vmFybk^e4mZ zTx>dX=BwvpqtfSTGb>&#*2bJV!CM;ss?V|`JyEqX#xX)A-QN&i?&PM`(R~B*8Sncn zU92%)sF(qLWx|89B#+AUMjEzy!OHZ6JpA+r`N>fg{+dhG7S|^zNMC|B|Fazx2m(a2 z059EQm7vhh|H|5t%YczU3}i@P@Hfj3ScQCJGTJOSVu38t7fm0c2?YeRy$qbI6j((t z3spkrmt@5DE}xl=%GU_4%2=%acvs?9E?C;ueoQdx-^201HQGx}OO9{aS4!cDRKvUK z$k~A>S5#G0^F)ne5KbHXJRBnI*X85cpr>?mK>`PK7yOPJ@ukH-lu zOf7u}m5LOPX0`6fE#rp7R{ZUJ?}U+rcpq!W^SL9wh1HUj&(K?;<;KnY5z5CyhzYpjj^}dhmA%HAQvu+(cnt%tVi@puS`@Z0T8n0h7hcK{Vc=SKh z8b$rmsBf#a+gTYHlzd0&O+lQ77HSk5;N9*>m@SckuV$}lTG z64ck&HTC;VMuJ^ydXsGEWHJ+cUAJe){H2Wq$iPyP!2uHoKnn#_BMtbQ3Zgpx)$!mT zDHqt6Jj3KnYM&DRU}o@&JSsaA#+?Fw-wc^^JR@f)2=l4l-Ca-a&|^fa8|MVx==gM3 zsN`Tj@UbP)tNdg)4^b7GAZxd8cF)Ohk*{Flr1aeMzc@48NBlSl2&DBVqbTloTG zbM}R>vpSLQQhDC;_jIiqSIEW-(6$kc!g@UzwjAf%bI&gyY1KE_{j}_aNo9Mg^9>ixkgnjRu3v9aFZXUUE0XY$8x!)Z9++ST3?ssxz@&e^$Ot|qgiI_ z(@7fFA+zBpoEKfbD|f;^u11shG1ZzvU6X@JjJ;j1QQnx?r7S$|3nDBk)k;%O&TUB( zcR1@0@*H7)|KqXgWFTuF7z79L+z(V8OpKDDA8uj2W1i0>M#U=@KQl@v71+PvVX(BY zX|YtWfABO*6}m(Pa@IcQcx%+@d~ujhB^#sc_>$(kuv?zV(Tg%i znU#~>ter+J!-j}-4mMZ3A_-C_M>KO6`w#z$nU#@ba7N7hLWflPm-x#wg1pyN`OI2} z>+!hX*<<801=sU(s7(Lhe+Uhbba3Ct^Z>oB{P*RwFoE3VYoHpTzyMg(&@Z*3jSS~; zSGz7q+k9;w5@#i^2>X?dBfy<)4S-1utlI6XGL+li8Tsp~VLW{NQ%0v_h^h&lhA4g=`0RFp8O?|Ss` z{5Kgjl-vbzbdrbJ_k%mglJ{v5`vVOU^#9Z{&+Uxf_cxD;TYubsOK#jAv`hd!69A*C zAZP`9fWGxoq}V^=0-&@&!(^!|@4M))urmt%11foL1YEfcl^4wCC^a)9IV9Zu#Fn({ ztrn0OS0v7?9=;XWL*psDT{?9o|_O^kPCb=aO79cPtoLC7L z=SK*d5`ftl```FUj@=ybRo{c zv}w71^{mf$SR*8I;R{3dX|=qec9u%X-umjjhRm@ldvB4-Y}m&o0Z#=>cXLA(diDx1 zWdT)jcS#rZHFdxUctnnb!O}g>Il4Y z`cp}HJds=O@ujxl#_9!6Fqt}J`hga4 z2r;M2X&=}C&y(R^(tJJul>V${`+g|+|MX4+k`gxLJ7eA~P6nNvBWiXw`R5y;JzZ8v zvYdAxaei2RRj_6Ha<2XY6~_S!2azMt-H=fL7#3X<;KdWl!A#jnoT@=54R-|Ct+e$j zvctN?ss75B;NdY(Q7`ICS8u?{Fd4~u7HBC6FKI(0<}rgc#-Jt&(s; z`ST32r`$RVSrG6BWkk3`>=);0#Ifo9_nft*ifQW;IR-~xnh=SE#ayi-Z=|ihbiv?F zaM9LVRbl&`(5GOQlF_lA!Vp=v-x@FW9T&;!Ev+iZ35WgeQWsu0H0qyK? zgy0UBT z^KYx5gw^5M(RV|&TE2|H)q6>D=EjQDeO*a>(bgr&`p)k+Lx#~;7!0rthgct;P z`~!z&u#EyW?BY3C3_tkawIitIcQjcd$qUp~>(=ol5Y9Ik8}Q;u!c4?={}>S0xcUkE zh_lYEMCmMXl$b0O2SPHk?8!=Nyu^vGh_GQYw{^9uoC$(9rKrvhY(LCz?>ojCLMX;d zt5mZP{ol4JYngZ>g#E0Jb^MtoY8-})MvAvgk0U-8bH}M|K*$jiiS{G~OOF$g&IO3D4Q9UTA*&aTrikU1rikd7X zEs(S`3rXZHF4z*xCg?1QUX&{K2)#)uM6>Ro39N-MSMm-H+k~hxmDn!VNAc%Gex)Eu z~9k+ot)3&~#&UnIh9}I8)1#D|K=qe4Y2MGKt=H zrNS_X<#oA4Aon`=XVv$hCgchgdE&QJpxuAjwSE`a`fH;J>z8>(+B za%HCu0=jsEf)ggpX0cCV+0BI|Jw;xJp!ikZ(sW#sLuwp-`zmx z1?yz)vyOU{{?oa(!MXLm>pOqF^nKtvn1WH+V@CEQxd}c&l7G5)pP&Pdxn?;fL+fYN z6`2Q?zsD(ObS&d0r1%J7QsXgfc3W+|gFK-CpK>O!nlNvw7|87W|WQwWp6E1G4?d?sSjhULZ4+p$TlHVu5c& zra3e`w(QvxD-hYNjM6(z6m@)l?)kZzHwVBVhsV6cKNdA`0kW4r3yqt8x(8?)UfRyT zO>qHR<@%^#cDoOziOV`lQ??2@4hz=MePsy2rvYQrp+8 z(4^z$iWlTRD&0v8R8YU=yn`JN{Y#+toto%1CxQ`8PH6XunP`wTRoT%1Ty0E|bveCd8fn~~T2=l+ZODEKela zUyxUR2(f*4Lcs$x`wG-jBw~-IzSrgG?X!*JrgTxFXN9TN-uGco?_B*GN*(Nisb9;h zwrJ&9F0ieDDzzxl` zzb(Ou$G5mOw6-3m$fe5&h7Ts5^zTyL(Y2JH-B3T$IM7{ISjmKI5Mid=$nGCouzKYE z%q+wnPjD^MbWkE*d7zM(l{yv!GeurySJ+_d`#jmi#lG+wd8?*YyQ#hvn9%!zxOqy4 zEpV-5MQ%~Nii{Tm@JdVjx43(S9}i#1)Dd1fdNm0RC%Auq%=~1zsW!s*IUq=4YUjat)QM3a}kgxHJPI1QJ?9k zl-pGkB%($jL=0=%lSxQ86Xf$ZSU# z7<)Px_gJkC=XHyaA|j}#6mNl}EU9cy;1o1Le0ow+>72dH@*aZ5Xu{9+aG3%7%3Ei; zHa!UQ^CBtTEF?ufXHzSB*hYy%hzE=Dq}4zxb&FH`HV73jYc2Dy^A6!td6o;*6T;@DF?L8T|cL z-*buCGaBuPcUUb%+fF+iLtKb$qNXvlNJ{mHF;LfitRDaSQc+spgEArV5TGwf!X|0` zt(Z`dTZoqG-CJ~vMz_SBEXuZr?}YYJtGU(xbjE^6U=`{g*Zt%CDYN&UWt;w}spqQv z=(7Wf8C9Q!yZpmHDJkSb1$cC`7~tt`Q@u1$E0Wfph3>dhvOdgrVCyNQ^5>QChW}1} zwr$Va{eh&4mnIY5Y>oow>5{@lfGKB(*g(QL0N;>kRZMl{e>{Sj#!8+N)c+T~&KAB9 zRgN*iHmyvif=)54Q=O`A)BQk#k}sa05E-J76H_cNGtrNvWygLok^Uedf5WS`L~4b1 zV)u(42`@M;kH)3N^Y8WqVzBh!M=I&eVCG+JuT>D_v;!KMl61o0aFsG`QNo%`_+_Qp z6VSS5Y*=3B2F1b83}X77H)%*alqA<$mj4+;EBG`DYLGc4mY`?#zK;Y>#{R%~K%sm5 zO=$hr-NT5rHD=a8^(pp;hCRR{v~=a4X<_WI`BUFRC(~yJKow}+PJ5H`sloS0LYuXs zi(xDvZ2#k_&zl%5QTR^z{v8_EizZ@V6=O?CFU@D;deL#$;)BdOv?%wHNO)D&*9YAQ zHDNpq?~L+}92ps6YgjwzhmgUhF+Dk%3~BmVAqeZLtglYplKyMHXBauEV3}p%R)osr zS?Pq*5BzLF>1?_=cN%(0^gD++s;ioiIqvSJ1F)p?kS^Xj%0z4aDOhT%F{(u&%camS zh$h3S5&*@m_P$umt>uQInSRy9JqePN9(ER4B|naGzsAve&(E##D;m!|-COt3wB5;| zh34Jn7NscOd@y_x43)_kTI~U`XwDO~u?p*jiWLyU7~=FYb&3laP-nGj>6OtgOGgzw zYld3Jo|ZYsM9}gld;RmHnm+|x#?3lHqi3g*0rbyRDBKV+i;GgS26Etz0W5`AEWvH> zBmkT9U+ZgV<-))trf~d8&rd})`AQM#wpeKzJGOR> z)2U0TUuoCoJu1=caB7-pBk#rOwTX}nwt%6TFplJ}+zgCTj3)>v71;(CFCNg+gNLxT zEOq{Qb!uH4X;la1#7vwcOVwYmSp>>)lkq}c`r}YME3rz7*Y~J@SPh#ii;*LWEyTqX zp-j{TEX(kn#=}6sa-cE4Joxu(x7anv2W70Awy64jLlE@3O_SH**wuqs`@Lu=~#vwo>dNOS)R z_U=X6O*`3ht4tn)SdjWX7{E%Br9a)4zn6``4B<<{qVm&L_+;{p7*bv--WECoMz!j5hzIm;BEoFNmS%lK6H%d)}{ao3W=6NTZ zeKd61R>+MIT*@km*^;;eGh=vHNbEY zAV5{a&ITu6N{7VYP8BdY&*Lmnk&i&~M|DmI#O{w69z-KCVBh zR!nJCh8YHKlGwV2q&n5q9(ri;BS@rdNC?fc2&qFWhaucH<1U&oQ9Jf+xcx$Sfw?R% zT;GuZ>}238I}|GtJ6Qt9XtPsU&Q0oF^$NtxE`Gs9P2$zI>s5g2cFcbel^8(N{|SHJ zA^*;rq;BJ;B_hR~lM1+F0k{^ky}{0NEWqF+m^#0S{{l@LO?p&Uq^eX=?UxrD?T$X6 zRlPIzw54iA=U3>DEW;C**24Y9ovMts1rxeI3%{F5AbF$5+ufuqG!|@O0pUBKh!QE= z?#9RkJJY~9mI5u*#ibrbl+1W1Wd#+H&+Fglqb!haYAEr#Gw5&Whi{D3GIZmH5=ZHH zWKG$ms+&IWN}1p&ro|IwAtO#*v-LY#%RTmXJqvG%b`J#ke_nhhxJ0w=#57EqF=|E% zAPhH`*p9VqKv>s+L=_)w?B)&sDJT4{Zy4QZnM5s!=~=CgNv%?;j8L+ZVG{A9T+6r5 znvAJqckw?Xc)A=VE6@N6n=8Ma)P1Fds>ta~CGIxe8OI!Uf%I6i^X1FCe9&qmT643tC zr*arCU{Q`I!G>VCJ~v(6>{;67!S3Kqm{x>_XKAS0Fw9g9$8KPY%xcP-4MGhy-E@@=c(wa@XE+k7oQ>8{dxan!iP`VBm*U!39Y zlyT}u+-3*Gc9eL#hH6g6q+mtF_%?c$J3W}{^{RIi1J17Ik>>STdWq$wc~K+giLho- zV3ti^9JSx=eV^8zlz-q;-wRK|BE$Jf4@eX~?!or6B6*3}X6Q|XOL;n2@+}e;84JBw zK3y*ziFSYxEcfFRnI!|XF)gqkJomvjBxi)u_lDul=gZ4+2T;`)u~{$KJmu0i76St= z%ND8b0x$a@Rzz0~39ti|!ulh?Hyz+g1K7Hi-#h8O^FFjB8idnJ-_1gIogof+HD5p+ zhP|O^)QHo08%Rs?8pcA^ruEuJS&?tJT4|1rg~o7AYUs0*>4**%2k*z}YJ>Opp~J-& z#WIg9>oO{%r&Dt@-dM&f9#opf?Ri3HW;PuqzAJ-o%Ule;TfP!*8^;cG3ZxBtZHpyV zZY}^%Z(H=chH{jBX;8W|wzh%X6286lOWLiwJTp9W;@3yQw{C`bb)x&_7meJ!bB)hrl^1AB}R?sn7RJTHv+tJi~x-a zo6S$}ek=F79jz{(DRGhVIS#-i>xKGJl$S{jAFaOsU@->xJE~r6pMfz3l!t(!-?i6R z27umrA{bZ|`ElZDJLRdN)9j6-)!PdwNLu84E3(r)4cxK*SG&Nx7aM?i^FoBhsl{Ie zkmiI>}FjgnVEWLn2CT1p{*x6nf0SdSLM z9pqlMksW9K{9qh~PToOLGU$L|kxc(!(7?&h_)%}9lxGS-=W$LUj(eavBq|PPvMrLP zvym@_Ex0DdK(+vb+FBtOBfzaZbMcKcxfy(`TA`Yyt0ABI!Dy()#lcr_z54C=6LjlQ z^@{39@lOGhw2$97(ffQ|O5*ef;>?_;%FZ zNu@M0+j3Y$j#i#!ZOBUWJF1NlkP1a(w^hfGE7ZMTHQt3q45|yq*As))z@g&_ks2Q0 zD|Mg^Z~6FWM3d35FC=3ktrw+Uy}{+Ci>8GB*`Ug-D~zMrlX15eYv@7mPe{*JAK~wn zmPzoRM{^Ce`vDeh5q3Xy(Zi<;Ur6hjU#~8V?4%w*&~??`FV4ebGK zPrzhO{`PM-s62M5tS77A&-#A#(j`TV}r27eQf9PvoTI#OdGbZbVOz2jS zYw@1s*fAi`Ie#){U6R&jkne$}@w3FT!ZPNP*}J&KdGAK2Qymt_?xa$?rv+*1(jW2X z7TMh+iMiHmR(*4qeqDEEe^uddKnW`|O zwR=HG?KZ=eS}ZHVA_q8;snsroQdrvqRQH~-HA0DONU9yUA6vvk+x#(D=o*8ZMZ|s9 z5s8lBi(*I1V91+72F;01{fZ|uSwg1r#o?$)`R4*#bSz(s$%z)&*f3*%D$swK8azvU z*v9oJY9j`48)_M~&v2#=N|ORV_O>V3$lwTkvTBt|tzVqFQ0UNMrVDkz< zf77o0Ec9EkYwhi|k7lw16etCW+5QtivnBWtQ(Xf?polUF=#%PN(WGc_^I}&+Z8g<{ zw9_KLi)kqcRK%Tsj-sS)i+2Vd9pAC5Q2&5MiuxI`ni)K|ge?_%muR?1iF6ebv6va< z0_X}#)sH*TYZExjFtNZr{O}qA!+HkrZBZli%_er{Z&r?lMp1pdk`bkhHxNFdFd^qL zgcPYfC}AK9T8gPEGCNq4M9b2RDtEm9`eAvp5z41$YsS}%mW~D)pu4xt{mP>jlPQ9A zNxD`&gSu~8y{JL(`N~%>gbwHD=p|C{x`IQ+H-7TmCSr0eI{3w=1&W+YZ5L91#)m%3 zDLFcjD6Afy`?h7|zJSa>P_8)1tt&+!X%mZkq$l@PdP-a-F@a)i$}QNBf%uq!iwA(p zfAenK5xejC^xEPd29XJDLcvz0EKOYf#|n1hm@uj4{AN` zaShuxNL@A)rTMd`xpFBgLt9p~KT2sS(tuj)kob&-Y8wh{Md>>dy(qmO{nSo3I9TjV z$(NJ89=3^$0g9_1~bcy-{fpA*7kK<0cdX#87EW37UF- z_q4@jeQ=R;B5dBmnqCFf?D=P@x#{m+)D6?czYdk1hBKJ#s)kl4`yzpd7IGT#6<{zO zc_ZTU5_7O5e?>eoL73Ib%O|gH)yt|*tmc4UkSlk9F->e!Rl~jw&MHD?fWIKV(Lj_|wNH06#hjqvw zmH=5MXfsos5RSyur;qYc;FN}`DEcnYRw)QZG`VBsx1BN5m+Mi)K0E8A+DKV`V5Bh+ zDMVkVBeIylrSMXGG96q2-SuPInr24zUZH6V8^`!-UF1xDKi*1>ox$e$n*$p|?6ka5cpCq4izd2@nw|H!I zTdtw{a~yL7Q0@m9gIQ?_;2h_@(Z7*IpA3_GdJ--?0<#B+SSCcJq|Fv<$Wf#Oo7hf7 z4M~PX_)8$Z{3MzvLU{GA(`f>-up>Bf^-t6t%~aw_C!N-p8ItN6NJJ8>zW9>J5f97E zk?}PXhqjQ9j|aAz#9f2{@nAp_aZZ;@l#*WC&Qq#I4vB(nY*w#1RST6g^!S)|LXHyc ztseDC_p6AQQFB;spG=rKJP|NYZj?*tH3g|Gfb@6vBX0cB7GLG9b|8oe}_BL8-%$)PH4v zv@T0^kE5O&D<5>W8?UGbes1=;CcmoLW$>0kBMwz zpyZK%*C7|3JiD<=`N~4U<}y355=!WiQ^&HP^*g~c?T>!IP1Sn=B5EhCu!beRwLK={ zN1P}I3dT#ZLUma2gOCQ7Iu1C*n+dut{FF+2hA(#Pm9!=%5Ib3_t{#FU9K{8R-{w@Z z3un>0B~-2Pad`Vg>Xe_t|OQNIR6oQea=qLRoxBVrXs8 zif8}J#ATMx?-Nrtv#ugx(muGaMVKJBq|^((sKLM@@#5mIv1MRtvs@o$w)Z#E1Gv?Y zo^siD5uea?1qM+|&fKp#)z(lw9G@QWD4tXO9ak&@6f%L9zeEV)S9bzBs8S^fiW9n2 z0d?%#NWj<=0M^VC4{kJ`9pB600{`-<;ml-&H!AINchWFhic_ynCw`W0my(i5GFAxm@vyC!ZFiM^>k)Oq2)BGFePD4+>67T`k76LcA_EP@}?lw5n?yo$?oEpmvDNAGN(DJfK}PGImrGL0ON{NRa}Z!cv!&md5w6 zd378yo~T5^@satr>JPHB^@I9{dP8!`r7%u%yrP=uAYWTOOCXlu9WsfMHl&f5u6K3f zRFF2-O(#gXDi{SW>qmvA+0hMpE@D$$h!#yp&AswE6D-ClI`!etpsS0p)F^F44bwU_ z>XX)CEO~=vjA7o085W^;Xu)W&D0gnN2I&*8Wok^uC!)O)E1x8~RAC0s)R zva$HMC+!SjE~IqpK{Y!&NHaCGxzyAullX=i*)bv4SvhKiccEY8L3SHp{S=|yx2GJS zJ0sHtrgM(lri_g)|MYkJC#-F3J2JUNf()JyFOVZZdIq>-2cSQnBn@Dc7>{5Gu@dP+ z!WKL2;2y%7|3OW<`kKyrRwZRNg;!)dSQ;hq_o{t55EIw7djx{nW=G);$GA zx3NwU-VkEJHybhR8R3%E)T8i%-HQfMUh4@jP<SyjhQzbQ33CKA8OLVB zX5Ome<(T(cF+1rvIP5JqO<@$qyb4flI?R}GJMH-e-Z)DgCSwH-N8(<`atdC;Iy_$j z?kxRvWrHtQ&e}-$u-&n@#z=5jr??o8WujIQkB3twD#BLb0E5Y7Au|%Q?JTkDFgII( z4MS>wLg}9I9#A|jeo4s<2*D8L>NNKc+ zUZ2ZL^O9cr?*pcS z>M{8F+}wrB_wmbdd)SZDFQ|#{vBtsc`U0>#0$6?104|@GfHXY@`4|MRO`(M14SaTP zxFBWy#-#<)#Y6pZXL<})oZz3yHF3UP(UA2nsyY25m2zsEZ;7Y+_~*#9oeA6?q0Ux} zE2(je$p}@u^X#p}AE|>QO7)jLviqgY&QxWWn_4bq-|E85Ec_ifp^t^@M5MxWps)OF zP36~MrY>s~dmoCZV=kb9unJpYWvB=Yds0y=c3oDp5?6?=a0HuTw+<>;=hpaJV1hOl z`m;F{Dbn~^IjvH#QSKhDv%QE`aj!#Xa~mJzuoCvLE91im|AvNO;DJxSq-Fx?2DdCS zTt3b{L9-D!w`HD~M&%BdlU4j)IUS@O%UL%E4y09;iJ)qi+~i{wsv4+x3IjygN_EC_ z@Am=W^Q|L*h#UfQTlejqcNuW>{ZCs77XVCBK=IG^0eEbGbuSa-h*>u(Y(q}U*t`LnLcp6 zzJWKYrTNGn&~$^R;Mr9h!b$Iv)==yg$HnwUG&^$CcN&I^1uO&OW1O|Ts)i7HSqu|7 z*v}|bm3O0KjBzWFq!GvNP}A`y!e#f2?omA7eR5A8wpBteloyxoH?}#57xTgNu`i8Q z(fc$qRih})%C?<34QI_Mo$VbsWAOLtJ^pA&;W;%4Ib_ z5cr|PL#P@{JHd&AXLQ*!t(BG!F8~$!k&5ORBOr^Dlg?Tfr&m4cj5~3*BPew8;h-na z?uBca;Qp1?3)fU&`0U6%6h9%_xM#aj8MB25)yjz+kqe<09yXRyTn9geKl#@7@&ug9-uXWk--FQ^w+-H+=NUjn08L5WG!lsE1=DGZ4uJ?U z0LpMW2xFlb6^)JK%q||@SnDWlbr)&3KDr%)%O@7}szWT&EB0`Vq>12)ERRiR6TWpm zLc5G9kB^D;{G5$92pfJc6iCVqy`K;W-ncaAI5F|vY)}BXwoc`PlQ*9WAk}1u0r0FuB8_OE* z)vN!~MY)hifQnCN__se#fQG(3b6)?TK7yCNhdrv+jICS1wFX5&%#;Iue+S3}V#p0% zn6`MZG(`bu)uj+A<_@XD4qVo%GzP`T4n!znu`@i5{fOe0whUYbpU=>Pv9^kMwi`N` z3&@)X8%F=H2VeL>)%vHvHgU?=j18l#)n@0+G)Bgngp}V>ujYu7jD$n_abam;O(ARu z&8?r^|IAHP_*LTds%?w6<+u~2nw;35(i)xQxF#yd za4o&hvncOmy1ow`n-(>SB+b69)VjO3j#!O3Zf-EK&M@`%yK~$d9PX?&10{sYONrNL zuQLl5l!Dfvet=^aWan3V_R1l9eqD z=vxIpS zP_+sL5QG9p6AKN*z0w4F1Le7u#&5>x@y;N?vy8@X|5(SjkVFQ>oB5d^pI+%Wq_)N*m61g)poHP5TmSmmJyGvv`?nX$FgnL zl04xz8b%>9f&Vk%zpz`N5j4h40Tnej4F$bVP}rA%<6hs|I#%Md)7Ak%{6girAXa0D za`Y(xRM*PDRQHnzYFWc|TDrD?uAfLB#5G0(Q&U5n{^!ShZ-)FJNIxu81Mctw|x~ zLPu}WjBQ$B;PUkAWl)*_BqPx44@8TX9f>;4VO@Ipd(l@&qkbCne_x^a))-4foaUgM`|};EyQJi{Zc6R3Z9|Ef0Zj@tfM) zWxX`B_vHh=2CT<`Q+}Gu0IE+VFMr;3n^L|%o&xPPZjblKr{6u;`m>xu0Gk)k@K5DN z_4_G6e)O0T-*}=w8CyMsXq=|nSdvLi&j%*uCl@P=OiGx+t-HT<7d-QrN=PL0QX_pr zP5|bSZ$_pnF$dW^p29@b$QQv5F62z+Y26*BueyT4F_$0KOLi0KrGnFIcs**b?vn%g z{4N)2nVn&*zr9e;ZehMrs3}w!Q10?V`JC%1!rg_2Z#tP<#*aGxZz zmxQf?tcq8zp>lQc!S=|Wp>uRNY$^~++gm*faS;6~$2_k5s~Gcgn@s!=mds#vVTL#( zy>|uc+Vk+f&(Uj+iXG%3*=-a*MY!&MAmJBb2!JLHs5xFV zmNtYdbJQxPbxvDW-n6QSiskJ_ZCc}5dW6^DqgV%4Yk8=N&Kj>|?baq6#!aA_u!xL* z6?Hh+haD9-`I|~94p$|Mjp>r%#GG0>XADX0t@+7=;Y6lbx%4;c6m3PQKlBVmSwgy=2m*bxl`ymLgjJD z&lm%7i&oKSuEy2HFWUdBuoM}f^nI~)`CY}gZj;J%h@}8Jp0QrWfH8stf~oqZrl8q7 z<-@O+0R!d55z|0fR3kl5r&^w;^;)PP(-?+g-7fE5p_?EfK3)85^f<>rM-$941oIQH zBLG9mEA3`)U;i)Sz~m(es0fgwe43Qy-FW#=gOnWpSJ?%az-sA7nyguw%VTD345Ffz zZk*k!AuqndLo!qkM-!odwLlo)CHw`!D$`*sBc&vVNAT63d`mhf(suHi_%k2(9Ujzs z_dVv4?*Znhn;3+Bbb>`AkgS|flyM9vrdJ;2bTn4Q(#4+(-PBlbpLNT_XVU)i>);XI z?KJ6rl14UTkm%y~G|lgFovepRm{%OEwgQZZl{mT`*pT=2{3L56Oo13{{?!y&404&f2v;yZ!f#Wm^tt2=zj%4xX&NJQ+)gBQZP#@46@%K>{_fzh<9CsZa##wl){pqjwJ>ZwOe zq_*-Tmnijt330IeZRO9@uHpCN}Mg4(+p@A^# zG9>7mVW|9PIVC@EGw8T zp)l~s{m{paYyzH3kTSN~gjiGVidAySg_HIAht;v+=-3}II4q2mGREwGa1Vj}e}B9X zAi;mQnGNRUe(_%Bljsw$e{`NPf7=IURM@{@N0WnA{9AS)Ljv37rEk)s7|tz+TzaH` z2LpBpL7xd`HMCzzFVX|ks1_A=hL*|cC`V8U2R#Mxq+*9tmUx-?*V?ZXbe0cFaA)NC zf0-1uCnnZBT%)4P^QlooQvNUxvW*Tfqn?JoMWW5{>WTifMr8l+bjyT$XvJ3qGul47Avu)R2 zudjvxu>H6HJZ_~3D1;Rq?2{t}8ch7vqIZA| z^0@7o!SXb7n!qu03=}fsaijpJpRg|XiGEV2bH{rfzq~pI# z+@B3&5}S)B(&Qirs>fB|AZ&y96PW{ZQEM&6mR1Qv#@~3=&BPKxIkty1c+87}uTZ!g zx3w@&{y_II)V%rtE%3s|T^iO(^>4&8dqcXid+8Z8M*&8x1}xCaL?b2wbi9)6Rj;5T z;ZBU})I~lL%i@Z7qdL0zc%m~rjt=ym54D&Qdw|6MF1XEJiCp`?`+XgN-Zj7kOdr|) z_ia{W{43o093B9jBY>@_6P`TY$wH)qL|p#QOU=}igSuw1-j|YazNwFmvhg-ONt3Jl zO>)tfkvdZ*?ve1FxwLt(HYu#QH?Br`Q5s^`6MJ3^kKR%ed)Ku z?aC^VBkrrIIZ8IAi>C-IHAqSP!T3tV+B0}>oCF7orm?CTg8v^)SK$_AA8hHCM!Gws z8)=XZX^;*P0Rbt=1qta!x+SDLg$1O$yHmP*+4nBrz4ssBd6=Ew%)DpLoYQ#9CzqX+ z|Ipg~{my1$NhQBq8_ZCaD-n9Ax1>8@OG=_Pbd+w*Xyy$jg|~8#)SD;|5VcqK3vrBk z)%|?8XIfdvVc}!6)YruP2|>4lDmTG{FVEf%253b#2JHj2YQ+1XUPgmnT8reb!N&dE zjfe0xQwh9zKQD!cD3&(>|N7~DNzr=VsR5uEcI~>H1MKea%v=GVY+=XZ!@qX{*&N_Z z0}7sb0CeHbf88jTE}1Sd>Q+nJxL@170`LqUciC`9FyARc_awe{(ZzDNHapbXmi0FX5RIKLorPb1#>uTPP z-Jl#2oKW~_R5Y4A*Rp|b4?M*y64Cyfn3A#wd4A*RMG`;OkUX8j^M6kBlqXVpcg$mP z;;FB`O=KP)>K?nxV>K}>zByh#Ff+)v8mEBq=>v^Q{VuuAn&>uZFF^;ep>1C8t-T7r zeA1RyG;^Hnr+j!Pqp^5ZHXj>@@@BN|Fn0zG=$XPatgZzEwZePG<Gc(!^UW!g#lFrg zXzbFArb*+r1Mu0s?|8|@+U`1jV6dKPxIRnFFHRrot+ zisYi(DoJ5(v!x8wa4bZ*aOZGl%6So&cy=3=bRfm(-I6kdUt@pcgLCZdk+=g0;fWff z);-nrTb$4cDd65$gq#tW3HVFBdE#_<*O{DDfq+HVYdxQ=*ZUrKVaQSs(|Q95DLG$Z zD_Hf-)5OZ137mZfxgH+v;lBXR>rO$urf0()V7&dRBH8#OddofaIbF^zWSomA80*c^ zzf9ZCLyIdAPKPIFALysFt$&d5g|xU%8bg;Bc8{X=*s#C@LvWX8u&ou05eqYDg$2`c z*z)wTjG7r{0o<#-4K(PR(HCTe2;Q&TbMFDD@qmTh!Uo+wP(~sv&`u_S$4eZ@MuOMc z>Y`d5_1D~y!DchOVRzzP(1Xk9L^BuZyVU$*q4A*uw?Zd|x zrGg%a*}a%lz0E!KH{AUM#ZC1@y>x<{2>Xqcy>|bE4hP5NVMRM$q9CzTlXnzg>h~ou zujRj+KsyfSljfg{Dj%&sVw6x|wBL;VzvyulCjmHKP;d_HVt5?TkB<}kp|4F=F3L~x zl}KHYeyg7Q9l2(-C_4A;^UW+PnJZ_kqZdxifvacbl%R_Og1mk1-xAYWJS^U7!cJX= zbt~4Q#6UT@-a+Z#)i}u5D%D0wYz@K88Y#?I@&a(nx+Gp-Do0zwj4Fd3ycYSslewu) zL|AVmo>v*h7z!5Be3I8Lq}i)ks0rvyCFnFV+4iW%$S(K$8~<9`pg)rYMyCT+-*HVe znd)ZIi0(6GnMYG}(yhh?y}HngcdkO>M$b)3FgKBA?5g!H>B-Z()11&dy1jYtPQCiN z#J#_{N#%E`OPh+RUW$fPnT~2TAvHsnj467oxsrM*e;y-q9vYKKF^UMRvb4{o$F3W% z-${y$18Yy<`R|Fq+PYOm%O2!;FLNC-cXtw?@gc%GSp}S7|7lVy&9ABsQRdJ$`gq57 zFqrXa*)3s1o0WpH(N|$K=tnOr%?M0|1h6{?jUt6LqCgYv(8q$-ZES0;edwE2$TqC5BJal%`R=CS2_CQoi^o&(Mgd^yZ-rV5j7 zh|gJu?;ZYm&1oF{Wqad@=rya*W$r1mOoDBMTw{mKYIHLznn;tW-+0V|m7_^%KYmnV zJayE=cutuZp6>rsD`y*p)Nb;vQAP&*{2K;oVl9=Xpx!K$3oMzqysghOt^<4VY&ohs zD%ndY=oE`J`IcQup`JaF2hD?rev7qLnwfg{!4y9#u=cJ(hge$*j+n~KH)(pvGMHvI zI$6FgY_p3Pa!?8WIY?1@afOi-0p^#jqaiq02KWWfxFMoKD zsWI-`y^n(d@+p`WLlEEzI_0Y$yHy+a=QM$s8P{_`z}FLr2c^Za2%UoUT_Cat09J2O zLjI{<0wMaZDfl%HX2B?>1GmM*IC{mFuLf+UdHLfJS|u=9D&1CpB8TLjDh0=@^1$mj z%u-E>Cg`n`B~NYYRC@C}J3=|pKte0EKkpH$#@nTw$( zXYr8{gKkaKgyK6-@^>D+ZA;#i&L^gA-NoER4RGvs6n5g z_zPwOkuD>!jrO;WZ0w9%vvn;zvS8sfnY5%D?MpF<_Dm**FKU6T{66RL`0z*^br-Ve z`-|peTt{I&iCR9*Cjw->LXkCj%V~li3BkP8@z?mgiga0&6;kppEpLXyKW8o$XV{y^ zi*+xDl!&rC)8iA@t(s{F#yfjsdtRnRsd@Im?SCv#Q*4g*omk*p;t~Ewm9W4(ro|I- z4ttj1J5U59C+=fT25O(cIrw_tQkTA$jjnHj{G7r zHH;HvExWXz=L=8G8)sAG-sFy8d&ffBcsuwT40Tn(G9u~^Fk#$Ll}pKjOSQ9vH_FH{ z!J4&7E-@M$5}n0JmRWgVrO&H-)nT$5e zq@NS_)0{gE5^uW1AY~y5)@lb2sc%b!e;m5zEcPZvbW(m+z}pIqv;j_|!k!j3dC{XzZ%q?+e}mn;7nM z*j6$6QjBg0rrU^`afPKzDm9x^b<^RFY1r{l3a(!coq^*!(jTRd{yA! zBVP7O_sL@zgBqWm~osdVbn&FsD{jGrW^}-$l z3$YD8+yqjo$r`DSk7$B?H?jLM{Y4bT_nago|i3|dNs2*DiZqhUMS{5 z3e1A??(hASJZVG)sQF^>3~PO`-?vRgb=rSXqQX-DXyuMQb0L!gv;_|UzSGuXN9aLTze+JVw5`X1ZO9Ys)tJaKg`N>o~57NT$q zFF|`Y9F9KAzdAfp9mt@xn+b*zeP62q0N~Tf}E6S zuL}Kd-R^IB$--wfoASHPUQARL>mQI%bBN^6^PTgvO1{JQ#-v?KyqNl6K-OUgZBTnN zjJUbBj$`$x#FZnRWAGMTE`I!vA;d2$dX=qJ4C;flj&W~bZ!Z*P*zFT6SeQ)L*#zpv z97hL+t&q33Kw_|1!DyN=5=c;JqB;yZFpFdR^p5A*G+JtRcO7wmYZl9?-ys`c-t*^V;INbXqvzip- z7Mf6!n3>7bj^W;*j~vma4KM7z%vdmla+NUG{nAVbuea7!HFQcO;??bSZ&unci=pYZ zvY5gykCz?YUzF7EpU$a6myw~=KzDcpKta<8Z1#dO{2}(>a2#syA<_%rfLJmO3ao#l zk8XlGfyg2K1@E9v_|yycT!L$9y8y9)S7M_sKP zT_OV&TNc5eU5kx&YrYe(4R*|T$;VdA{tDSxMM|RiYvaD~y*+0BV}Wc7g~zf?=0C#Z zLcJ=si1#c;-hU%dyB_clTZi(EIhpy>$hp--xFzY5Y27%YH;sBR_P6Uo{FPj+z2(0~ z+>p#pm5a?ciHhmXE5&@QD_C;yo~4-Oxoycs{J2F$efn_;zFa8Y5?q`R*~zvYR@RbX z3VBN(jrXRL>0M6|2HZ7DBa=^^kK-tJDQA;@TZ&Vwdq)Goi$R5KQU4}joU}5I;;DV8 zza$B5Tk~u1#@zNuDQuCg6t197KcI0I<65nhiNl&q;*%ji_JvUi&$J+1WpLTg^8ou& zk{%WEw*$T$wWc*6q37XP2;QnEWV3%4#xQ)UddSH3vw@Uvd|FS6pfQWWGC$XwY;rx3Dk_|? zHVncKug5N?#EqjzTo04OrW1%+xZ>Jz*wCBO?r4J6*Yrl^C`)If^;{5Fs9qr!Ax4wf&%-ikCj#Rgw6@-5hNO&Np$btGBzxgF{SCc z>+K2rQLJ4vBKLHi-POPdS(@QV)!yDQSdD+t-fMbQJOncGOw9au%nrP%F;qtQ-dsaJ z_(|}(XXrZ8*TsjfW^A-BJ8DSX3X_blUm2t2ppeq|qqw2cxp0HSd zwl4PfSHb%ks^E@L@%dLDe;nBc`l$no#U=$ET+a}Tqn8q#9^q?IiA&XeX%}0DfOQ5! zzRDxGm<9pqR|u-9$%&%*NU0Nu@rh%hC0Cxc83fY!5z{6zqcZUUA(dIb@cF;jsh&|- z)o@a29slFa>KiVS{<)J0tLQ)7!^a6IEQK?_+so9Fd19p;>N$gt$`@0K>;w1R1<@Gr z@H=_lFCQtnmJVq(1c~9e82>lUhyp&#CqCO0v$yd8?)_@@mOs~5iFsC8O9NE%?T{` zM=|I^f!lEnLC3*DxWZB6T>6;^G=$(Qq-THhM}>ss&<#a20NfFS#M`e8$BW!yyLH$A zq}~VcjRRcH&YVMR2xtpYZe`HSv@BQpGS`wow)-+9|~kTEq<9hBPkTSV?_~h?1 zzM{EQW@pa`Z~v|*^?M_ZC7b8RV3dKh^?s#$s&T@z9d^+Y>##9U8?9Yk%|Po)BI>opJ-qM!0Ue!@0Y+Xj2| z&K2D0ZiAkDM=(*2?3jjM#C<$jp*~a_ahgR-WBr2w7q`@GX+V6Aj)8Xe8?oF05xOt} z{@#45aLw1DhbgFjP43U8gNZ2y&O&e@mW)y?-EnB}*S(a|!wkY$aO!xJAy2|W%q@TA z5&~)E;-wwYUy)Rl{i%;3lG^+(kKUhpjfOiBFtLV_PsuJk`p&chA58u?y!tg315Cxu zsLU%@L}kbK0Z%xZKNO|7^POIuQ4gc-J`W^aNm``ESpJBgy_ zFwY3^cz4QhZ#~OWv0M+X7}!27RiRm|$d%G%OqlgY%T&u`q(v&!XLib*$>(x>=GatgEATAlEFzl|V&&opZczM*6 zsfnu#-CcL_)y8Slj7X|3T3^DyVFz__iEg}u6}WrVbQX_Rq2iw#0}?nItfTR*jv*%V za(MMW6VBP+VN`XuHtKTT^5}+V;P1_Ze3!iyF?d`1AQxxXKj(#xL?_tx61w@(;t?Pm z^0Xot3UmogyF?LPte;8+pTll(DcVE-IMjY_O}+=SX2Y}wP;hL`p?%!F!!?q5^E@Dyy14zpO>mOLT-ds|i!3+!Ii!lRxeN;mHZME?Liw|XW} zG`I7RUP`!q*aBKt@$k@L1J!2;c>7=bZuArX(qdDS|4w`HILID_)<^?l=E?Z=JZG*O zbOuuRuP4ooB>|dv=*Tj*lYx3`B2cD~;*IiTZXpyFM67TW^YYdb8LKzZdn5?jj?{}b z#Hh`k3-3fx>G%1+Qy63_F0zjArjt!$l@O&{6Mj|Tc31x$OA=YwR-$ztxrrGv-_jEE z{q&>h|reZv{GC7$5Hv{jxvC_kyPr;x*IK8Tqa44&G(2VAKi=*dowjlA-< z{Q9Xy*r8Q?(DA5dFzl5%Le>i*pu}w;?Ht4-t|=uv)VFw5W!_MMj)?G~mnA9w>Dq=V zb3+Qd7KuSK_`1$1(W=a&M~W$ogEl%&PKqR`jR*N>!S$8xy(*vi{4<8fDTP{N?g7G{ zc8pWJzumE&$U#=Gq})jyLB#S~M=m8E(x;ZWL&o^rzsW6~aO!L|vFQ8O z=;pQV)Fn%-nGP`h-n(Qn+~BVQ1dpRcJv1eQwvA>(kK4xf7j>L9zv(-Wr5w8-LGHX9 z-F1mCZoxu}w;RB_dVvc`XWZfe{=tirP{%rXSF3Nej4T}~(-M?QIb8fS8V}$(!F#7V zCI>Nm;AKEOL_YpxySC=te|QQk!m#Iy9J|r~qTIj~KGNBD1dw&4*q$Ij=^dnl{sPGH-?&2tDSxTgb~dgLb1yns8e${>vu%Dpbq;_@Ay=kh`5DLBjz&oIy%w>@0+v+Lgt4TDlFTEP z`0X>Q;jtJx!eo-;*D*%&_XlE1_*0KWXDQ2=yuO@LEWnqm{T-I0^*-T$)Eg(iKJDY* z5YZ+@utxP^V7a}M`dXG6GQw8cH3}xkuyU~c8|V0zU}^7jZD6Wq$c&tm#{NDA zI#cub-s}MRdd)iW2%c=7> zLuK1RZ&Q!m|64%(z67m0`ay6;$nut?DOg|I$yop4Nm~gDUtW zaT4;6AwtQ;Ctg~W2@#nx^2UiMh6E?s8mU*#D&744n-uC9Y;We66E*~1yf2uiW2Zo6jaSke5@7bltT6fwo zZm?*9qcWc51~2iK&18NE^I*dl*N{#S)J0MX?~*b?nh$`~vZVKCKI+eCrYqLThSODXQx>-`TQa=U}-%5ttDzX^`}^}k|b zERH7MQ0wKE2By=~WVmL237r^Az3^PI~@ou!>^w$V5B& zedXLb+@3YuAt1Xtn30jI^0Tna0UJ900>}vgcOQqK_56_Zhw%zK=HbNaxSHjwB7e^o z-j9nuc~T=Nuvr`r04^MbroRzx%owr${Bv|FHjWA0i9v_cgSuZPMMo;$Eto$Fs4ENa z&{mslRG+job&q03vHf;Q;)>k#AXON%(FOM!t!Re1fG(I9Y2R9v=4&l230l~gS9#or z%eQZ9;L)2q&b#_Yc)e@{;6atFxCS>gS(%DEaud54z73b#37!(LvQA25;w}+BGUw#V zBvBcAqRUsZ)Uuy3!rDrxOC+rP7spVbQbdKxHG-wp&19&jc-pP95!7Ps+H5DOM@!w0 z{)DOiX-ogE9euQ>!qX$hbyXxtX!Sq$>{iJ7uSNheju^knIQ0r$gEQFDlNCL(`on@~ zzqz9?9(s#SHgE)cAN*Tje=e1!{sOqz^@wcfg^yEZ&NwoVMNpr8uaN=UNX-uc`~=JL zsI0&^qN6fIQX9&JRL!NgshF6a9opk*Y3@Ejfv)3X1VSN0>5^5XCo5!=kBgHsgzwZ8 z#qUT#e?^~}U7_g7^hYmIabVD#NLWh8N2%Cf11_#HhvJ-YR_Xgk2Bn@}jKUKobv7}x zGKt3V>83wyHKZrYUkV1IICwrj4;y5w*0L=WLTP&!Vbum&7|7-+NGC6 zFtBQ!=)7?IddyV6CYa@}LhiC-?_=ZX$MF*P*(bfllVe_jD8jY?5i-bK5DGw@0%XkG zW&moat*W)lC$0GdgsL&=aHBe>$)TZ(bo#Xq*WX~$AaFid(AfLf6Gs3ldwHwj8}@Fw zEx8FwEq;5xcqO^yy$z(pxF&n&0L?x?P7Ju*^KAeK^~#vfx>DN4>&*ldg9X3NrKijZ zRdjBrr1-QDOEhr1D)zU6R}|gRRW*MY5^?e+5~p#ZD!E#^hi;;!7GeL2s5Jp=^0cvk%&z787af5xMB zA%0B1mhX!26a)x{Q-yEaOor~(Ivm{B8xvowt~F8s*N-dXjq1>O3tOEy2*u_`j50c@ z)T+mRLB!hQ{8i6TG=kOrD&?-^pwVhPY%s(F-uHo^f$PO>)V=}lG8$hdy^FeV+d&{2 zHWFiF1i^TQ>%I4&Mu^$MR~(#fHfMy9k#+ufZUUq!IHRPKpW-F3$YwZd*!)#q_@G)iw1@ynq8Ou$S+!K< zU%>~XM2T$^yaZ!ItRMMYg#l+MG{WNp$|zroi$>XRc|-}nZ?D)PxA;N$HD=omBeA$X zraly`17DolYKC0kn5B+9k?cG<}kmpPuI*K z;E8&HzNTR=%|G4kKiO}BFsm!MPJ`GBf7K?cdUK>eC-sZ`yx%*L@EOJfb^b2kq+-q^?SM8_h1)D|HbrM-LvI1z)24x`2pHr z1!usBj8Kl)^BKe2IL^dOly+7AeH$MhO`~fAFXD4Sr;^AY1FIVzN@vKRQIoTc@O-l#7X?&1l;?DJ4Ea=7e_dii0i8dK+9^o@0W@IsA&2n1O zH}qT*532trkJLe>&0)5jaZq+d87^cp&);dHU+T2Hp4~=cbNa|UQ6^_5C?{x1!JDtQr$FluY&e3o~0UfM&SIsM6CM5*~BF#+6Z}`z- zDmYl->j%v*{Y!fTNN1&~SBEl#i9FLjNq6Xj;b%>|ib>!C2r zZ%cAv{!z?h>k5K+_Q*&kA21g|1*HZ9RY&`-y+CvnVE_3gyY4PL7@#?C0OHm#&1&yW z8IlM0mOu@4h;Rcu2`alehV>*H2}~@TKsb#Xtq{85nykxC9Mu=u#K+4*!&L67tb|O~ zy`wWF)R`yVu0^&{9YNHDOd*J!2E{6&+hFM?ZIOP3ZAqxpumIv6^SJVhFwo3&nwN= z9+Kf~U{u~pGgBgYDo64$JV1yQ>Lm5zQvj&<0(HB9jJ?mjY9Bufz3@{+!q5ac1^5WB z=+V%&;FrtQwN&k4s&4oScY+j)u-DVEt1`;8jfyfMeLLRrpluGL{%mvEits}K=*hQ}0Tx6GmQ{PY+-ZD+Jr+-B$s(x5h_i~T@GA*i2V8 zBL0$dOLJK+0#vrH=DKXj{)JQI_K@RQRag-Tur5waA}7AJKfXiBJy`vWd5D3ikTGN! z8}&QweNX@KJ@}RhHWv1RMBDGpIQ>Bzd#fhs=ET4qCFuUoIf!5jc!5D+Xk8^NG@Aed z^Hwgx<-B&6Vmwl)RL8hRZsk&FUHA}rP^Z#)Hbp{Cnw?*r#xMawiB>w6pF7KALTqqjXazqTv^j95-R9_Lv=iLjA6=kMwOTA{6(&8@Q zJg9d(mZ-hKnbkn}K&L!E#n5a6fsYdTg7zInT3~_9(&N9t+=IX1TU(#-0e5KCC1oB| zF10l`F)LHn1%?F9d-r&$QR^1Uu)SPpxEpMxO%HwYTDpTlongmY+i(r{HR+!1_gp6w zS0Jmc|2p2sE1+`+Ab$;v{{$i#bV!hxNdpbo8JHAR^I1u|;6AF$H1px&;1^}G0UqDv z-v)V6Y_$15@BfP_xSQgvZ@pN7;J0B=L11k8m1sI z@yy~ID~Dhn<3WknHoD{g%9)8-WVwIy1}sYEpfZIDXDfdF@&yO6U@_8`?MLINOaPOO zaBTQA?}Yo6G;@0T!HDAT=FiWie9~e|b~+l4g+g)O+wA zj1mUhZXm`1PFgR`%`8A#D3A^%s*8vrToK+fc7molRrBC2IU*rS#16kmwqE!MLTT7- zTN<0yy3^3WZa^DYys`(La(x}NhS=5BMvrK6IXiri9mx& zVv?Z0hG}7m%G%!)d8?2M*;AY>#a0(vb@_gCA|jUH)X`GO=GJ|Y*!XlPJo1Ts0y#qV zTr0{HB=?Q>i1#$S&|SdL=!SC>-G$xlUe*Ad8x458vtq7 z_j_aFeUW&OTW=Mcm7DW4;In&Dh7fR!3J5Vns~TFeKb?cf@4*!hKp(6O?EuIG@}n%M zJls~+*(8Z>c42Hth&QI&BN!%Vv+PF57mhHwO~qRCy+MCfsgI55BgF}bM;lg7R=X?Hbe7 zYhsnK;DAKkp-x828qg*vo|CmY_jL|Fdz!iXcrf~2UAf_>sI)zn)P)nJyJ!2KIWWDm z`zos(We8|KK6btz-qiI*Wj6!5mrM?~Q)h282uMPnLG%jLUZ2+Q!8{kB)fkqmZ!f5` z7~V_m!9%>ZN6~Ugijvod^M=9!v304H9|ER`ob5u{?HIo};#VR{I(W*=jIy3_a{d z1j2QOd_VZXsVJxQrl`VC+Pvim7+(W?+y}uXkoiVx*yr3mxDWPvvLNGs`xgXHnP9br z2pETHFqeJ54%$F+jtolg40Mwk(xtBo<%Z*j3+~>sy=X(D2gn7 z4TAgkTU=je`n8|?RTc%#vqsEWmLfBN6eh0JCX!mWjOptmaue=2N*b8i{>m}iXJ{@K z)ry~L4Mde>vG0Em3Vd zn1OTHfCHUsPl*r2J?$r`fT8##)gR^l6C1I3@kAP|m2Xr_dr1#qGZ^^L13KFq*#g?2 zf0=h)YOD3~+N<-*Hc3;{>-nD$Co*835SX8Q0c<3<09CWtzd(rNLR}EYpOHY1{Dn#l zJ`OTBJ?V}}jn;MiAf1Qr;C2r8m9-6T-xC!$hWy!Pj<~ywJ-FHtRT&3|<1y=c*N@|c ziEuU3RivKd!6ow+Ddw9VdZ;IaqGyy8qDGob`zXxG&0T*1EC0wYGpPrDan{MyKW!MH>Uh;ey%pvK1 zH0lOi{Fv{5StT4fY=KaK=js!f5+1Da{`aF3!_NPcTL0uewV~22Tc@UL?ZJuL8>H#n z+Yc&9&%*uQzI$HUua^tDY02`>U$vD?u?)|P4!Tm>_o!`qEBvtvhe7;0>w1P+q?q^P zUCVso#`#--B-FMY-_dZ`1eMGYFA2fAc zRnZVx87^R{xOlK#^1MHFRRg^VSD$-F9`rlhXcDc0bebi=ft2eYm?*sNp>!9g3zXX zpv`Z4v5n`x5ty*G_}atqWAb!tVu(J%IQ{am1*lKq zKs4XQMg5amsZY*9B|tQzIZNi}TaEgRaX+Wb6Ma4rR6qf87W+2i9(27R3z-x9QgC?& zBbqRp_$(5+oz!s_KY!W%0#w{l<*iFx))*maZVCkhF3{F(V96+Jb^~+>6KaQ{=i3mz zO#s=j7K@5DU5Ad%qNI!=7ys#yn2o5*G6p$h)^Wy>&nBmw_LJrpH#DzOsGth-Dbj>_ z$>pMyK~wVIPNa8hEo>w=%Uz1Ur#^>Hfd`qEaQT^CQRdEgYfDur$_exRMm#<)Co%$i z#~(A3#IkvtCdk{OPyVFsk;oP!ePzP?)KMGi`Qzgg!UvAmk)_P8zTXtc-3aL`TP%{N z96vVOs^8CsSxN|r#AL7kw>RkpUVVBR2Prq!cN~W);p;rwFMR4}-y3w^B(U>4PL3mQ zwD`iTfppg}xB-L#`vOqlcGYtZ^n>$Hm}x5;lCk>%pFWW7Zt5>nZM?Joyn*Pp+^(>* zM^C-I?LA42@C6$XVuM!Iyn;?2L*79_)!V=#EMde4Kk#5E7o584X9*RGq{`xUDK+>- zb~i5$CA#INl4yRS<2m&dNA&Ws@#?KQw%?d3kay&kv`uWfZCC80RZw!`UUtBixv>HH z`Lp9bwoo20yS^s=@)K9xc^Vy|_u3{|l0N8A4(r?ze=3j_ulTSwIt3Ze0ekkg zvdzVB%=sMb0h`kvqF|mbe;~f*5Bf`WIgj;5NX?Jud%;k|2xo^4uITrG6R(G>XX2oel! z1eoWK4N{J|2Trgt9AmAW`=@Wz3~gIgBM?)~t7dG~D3?8uyz5vmdRRS1vjhiXkltLLOz5^H9c9QN#r{SLQOQ3Z^u*h?%Ck8N__ zgCh9;szRcY^Y+N)j?!o044eFPVXu+#Kd`4RYPBEM%@ZO0(SF;MVSw#0fEl-gSD(S< zCg9d(s3h|{zlfkpVGI9fDviiw`9^+;ji!-U%xbggg(fNKz&iskxd1mJB5w0#Cl#&q zH4-BISZ&-L0SU`w_C|@B&M$EMp`+@AzLm^rYY&RAN+M>NGE+v$_nY|l^HDq&zs{fT zT&M0dW%b}ImmxTp3^;1=&2m=9Z`@XH&5uxFRX(S>qU>8(1)ifkMil6d*8h3tSLCts zq^uJ)irF9%s4HzOaQTud8oCZm>ovN5=4leqNdi_=h#{ zFwGl0>)CYV;gt(o2(IuEaremOb5Kl?RYynz(J#qJ z0*6?dq8%Tx3qBK5Tv?=2zh_x%U|v*Tf$!d5qnwvJRa7>)`2cvw1w_7mi|RhU;rxw> zjsy)EoB@rLb@W!m=7L6uAUQJ?w#kbV2c?OSC9;p=QQ&72tc&bTig3>ST{7A=d`jJm zdhC0q^goGnhMAwiE*!=I_a^s?^+%T5yDND5L5&iL51{Yi!5DkX-9O0myYzpKJD9`;Q}W=nKIc=CzD6Y1uP9odE`?y|(Rk8sI^ zJ5FV`@4sUbo7$^cT1Kr}qB9vm=18%`_ubOC zIzP%k+eWx=hy6++duVxwfSgZ_dBsiY5K}sAK#6YdB4VOXF(z-sIC;E5{N+i!OB_>bt14XlA*Z@m%0As-YB9SE{zx zv?0Mjmoth)PNu3&07P#Ah~hMP))8xe;}Yx3%pMK5_&xbjkL+O72vx+})K)5JK~?&6 z2-Pvk6+<^Z?Vx8s5=Ev%!Sh|XqOv-ALZQ1LChE(V#F-RiodxQo2{E&MIX=2`Y=*tX zUl6lYsY)rW)}U&n4aH?V!q3f2vH?Yzz4(0xSuxoPN<7|FN+gp>R1Hx~)oaL~tUOU7 zt%EGnVq)^(%gyO)LuY~T+i|}}f2bX_@j|C%-{l(+m-`+0y zgMPoq;f!?vw|ieLLe`vyq(rrJYJ?Ol{Q7asJ8}a)mly%n_LuAO{e9&R@g8&fU<=w& z9LgX)O~if=V^@qlv`o|+ryo~W7j?45)0G|9hDoJ?ME2OOyi^9?9ej014(!Z}yFe!n z=iq~9!UymeN~JmY;Vdv?`-wrRwkBGi(rg5$$T~#Sv@csR^p^|!1&APh;^q0kJ{W~! zUS`Jdxqp!id#fEU&^?Tuh5-=UeLbQ zzArHH!A^e z8B1f%EMU}q-2M%qMK=WZo_0m3TuI$c;grztyyBK2Ch zEcjw)GE;pjNHj4xPwPSCaHUc?CNSdqEGLyPJG* zC?&3wehr6j%M_QpsuR7h7kLF`&ccF9N&xdKuniYw(R*Btxnl(41Sv@-M(K(D9fK2v zUAk`Fa>6S$>~9cb$uyX&5jYnzwM;Qn z#+j4x0AI|sC7wjO4G+RK58W2j!+N^PjK$%Iq=NlO&>>Z# zS&X9mXlaaR9|=j|IG~Yax5mn4%si#s_Nb10JTqe0vA>yYsb7}ZRIbX<#0}^x1I1Mu zt@^D8dpv08=lvH9Txt=Ek9cC0`7eRaA%&oDFPVycQjUV~M%dcS_u^@s>&C=#2e=UWI%aO~w)KW`!}^xd#=Qn;yTS3}~lx6-7( zxdV5dM8WPjFF?!NfZm1VNzS_Z6A>`KFf4~ib$L4U9Au)rf!xRgX!-byv)uVdd5hPpZ8IiU0q36Til<}Q-=8Q$W8_o zr&&42ESFI|nT`Xn&ri7-1HlKOQ#^GsI^*G9%yc%-=0|;=0l(?pG6>=#zZKCRed!zQ z-JgpdjJPC0IqYyQ$#TGWXz$n9cc#VODEyJ7Rl>%iD|Ynh!Hc5Pwq@X9v&vfph&%$j zvU5Ufl4mi%UfVbgnDuB=?GkTfcHsU?DSRmSv4tz&47jCUii(7>ZwP?VA|w*_XAaiR zia8;Fo2W78Ahjxr+Flcae2M%1YX07wTY=zGdpwV&U2rT`k=4+mLGe`Zv5H(F|T6vtGx_| zU$coknam7h_27f5OnGj>`{mY0QCbBo2c$?^ruBZAa=&rZKcl$cBtOL96f-ZuoN_U6 zFzMOA(y5$Z=mV)s!!Yb-^?h)rD$qkN3s$eizh9m>XJQ_+++dHE{()ph(zI=+q=P9 zyYG80&pj!;R)g*4RA;@3-uWDmx&U;RrQ-nlBcBU%{h6BAx>12g<9q(=|0Y?))yhO0 zrV5f~dh#BIWTJ?@i}>sS=*V99?qPFp9WpJf7)V(ABr{x;Oe?Y<4@I-IeeF+TI-S@S*q zw)jJOu>0Q~?hE5-VfTF!_90V+)EJ4Q!iVS+^;?V!o^0*nAXcp^diNzbTGyL<(3^mZVchh5$)Y$vGHzNr$9^H3bFB(1hT+#JiD`6=`;cOSL*FFlf zx!6^fULUQZ*pf<~sF<*Rns^tDJaF4Fd04H5s&5SZjEE8y#ZfqzxB4S4DJ586UNH2g zJCd7#>a4*)ap8ft+x8KVuPw69TZ+orBRz`a1Q=C=XQZ0`vVyZBCC|QqFnr35F1!cwrYn!8vs6@>pJr)%WPGV*tF4ry=MV z!yRO0y7Vw=4}z?F3^|+Vq5r#w5n>@v^6=S%CyQTLu1q#fg( z7;Q@^E^1}r0|+OtXMgIbd<}APuV!Fe;HaTk@^5|z)dwGLF8Qhw&MOb(XE=PsDtG{WM+cYy@ggP^8M zgC5te$C&++$+lHgb;kt6LP8DUv~7yu!^;)0g9R9z-?9pKS+C^?J;R5Qm^S*z?nPvt!tstCdw`TLD z!NjRJ?FD$&I2hE^C_KdjUpxE3$@Gtrg9(iFAq#*p9mu{|%1p;B4JXdpVgv+``7OPh zsgP29v4erX75R1fPnqNoXC=fmGb;A{dgP#x_p_h9WJ=(C@{QgKFG%t zKSWLWr1)3YC>@YtQlgJJjln)-5y9?AyD|Dky%$^4E(Ea)O7VgWIZ9|w*Q4T%xn$K1vAJM&x$ zV6FKpratjNPV9P|CiPP9X%QVe|J%(do|f0g3p~zz^kqMgj!ff%^dpPgQMH=6h)|DH zRzq(=;OFLP;kQqty#UuXz&!{=3oqM zbeXcHYvrFS2{;A;VD)&kYig>_vQa@Vw$e=y<%lK2F*%0GW+ZJu-<{y{D+^*k$?3NlW$Qpo#a{~Z(S8F z-99Rj|G2U(m2O6OH6ST+-BzFYSqpr1B173UKg;GY7rbXQuGDdVGM&R{5i|68Dtfz| zoKZ{wIH`QryK3A?`{3J*`6jep^rffE{A0;X+ys6N5|qN8tjWP(Q||=Ne(3i3-?_eh;vB(#OSz}hyT8*}6d?{Mz+na-wrcL+(j z06nm*y}M;KmGkWH3Ag@X5w$C0L1D8p5r#<7^x?KMh%35m!O^VS@sqQfq-cY!MO91l zk+_g#ubBqkc$yCY=WE^X6q4Sm{(FMDB1nS!FyZ6<5|g&YUR*_wIc*ctyp{XzHo?oj`ggEIzz+-!P!G={Ttet0nbEk!nk3$Xw79yyDs2}zVZke23zZq=aG7({Z;b=?1 zQOJ>W7-cQ`IRZ8FcQS`u8l5G~K-uv*+Vk5YX__E=oAVfaSN zeT*LQE$zc$#=;N^H~0|z=Ks>hCIIM!86VhJ167qzKU7>xhruF!XHENq#W6PGGNc;O z>;wfn62=n-Cs@8VeH>!Zrt!2`-%ncgV#Jp(h0eZ_pGkUn$mOajxv*LTIf$M zOC>9xNe|wbizAwDV0jCuIyW~YCMW$+>vh+HZcE>jwlaOam0mQP2n=aRD6jnCetVtz z7!sO{iLk_&q=Ypi;6{Ytz(GrHm8k3seh&t_P?2!&+}oNHa?F}nS|>ky%@hVl(9V%G zPnh(->aux$nN)KUXClJs*oWfLqx^ADQFw(jQ2t(&9UJwohjcy~L00%;HS!7k=ixDE zv4mCC0I$mqQ#Dp`BUhc=DvfD$zy`;yS(J`#q`4@6rQ>(7mdLwrnU=B1m47`1sX1hF z3B9XSjlMR-HD^)gkN$}F5z281;%A8_uGJ;;?%0{()#2>uzBueQ&k#xyf{qSu@I0?A zOeIyHt}j_5BZ84rfSs=ym)x;+t3aDG_$~?yL<5{u9}K#UWG54DB)vudw%dm%eo*h% zolktkw~;TA)z|Y8Hr0->_`LT`V?TYz**x$?`fmi8woR8iNe=V;f;l`0(NA80sV|_; zep3(CwE=yl(ab%;ow+ePGtN(~qrjNPiSx&D&djRzuywy@-#TFXO58@NsB?5j}lGvcRgr^ zE%Qv1rs^RU;p6dtX>aza&BmoLWIL#03Abs={6{z)`?C}{0XCD zWz}tiZ)vZL8sdXUO51qOIZ9BkP0OvVu;Az>)eL!auwNU4kYpmbwpS4I>+cX^a>CT( z=r?&U2`}2cO}-Xi7=HI<5lLhuxYj-WuJJ}*Ae@r;rTwTDe_O5t2c6Q(Dt5Qi7N%;B ze!+dEIL)6O4I0T_(gp2i(|QN*wKWrdtZfC+BVXdOO<82OGb{3iWV0+!CkcBm!Pu!1 zQ)rLrxmg$aytYDZauj~^NHKonob4vWpy}(0xYc$kJQesJgfCWo=>se{&1z&iJBSAy z(?6cM!b={T5k#_+9bmpZzM2+(SY6E{DaLlPc(@4wkJTXoeZa1MWFVMCGvPcdD4*sa zY~9IzD1uhdoZMjlf~Yq?mE=?*DMPzIdgz23eOd;{x~hh&lMS0OWhlS(m^CG~9!UX*~R|INeBros*FH>=F)~ z_-_=0v0wVRaWs-vW|#)aCjp5%4ME|Mvz1Zy^2x`&1MpS@{v9Xpd;V5wtpHX6(jenkeeI<3G0+ zE{Iup#WK1yv|YQ@2Ktt8b~W#vPZ5!NhkhtLhCO`_>w1%%@4F4El+Ie4T#4y6-NU0O z{eyN1S+R;-5_Q65X*?HElW6^c*zV$WWt;L4+in>nc!u&i!qYbxi^+}O+l<6GGhkrn zx=0Zf`!J45w$dLxwIe`5-~3UPn_P?;yK6rj*P+34VHCHss`L1Ao_c}A>+UBZf};EY z<53zT=-NNp2dyWM%1s-pEDF$WQ4MvmC;z+SVkDC1U59$(C6! za=w~Kqs=#qwuXfk)VzG5?iqDgFg~;-XOz4#M!ce0D_~F5z*K$rLQZ(|n^h_aiU-bI zc8$m0W4=-}m6GI7R6GJaRzmfDD#}muUzJ9z<=-FT3nb11DwvyxYC)0%jwA8yW69_6 zqL1c-poX?ZQ3qcU;U9+Q?Y9VDgC}5fvaIs}aWtQxhzx++!;DLf;I-x{X)|=~bO5NP zg~^I_aVUR%NT{GyhCCmsT17~UJ&a(b%lM?=j>}_LJ1C$F?ieIq%5lhKi@XvaeySmO zJ_t3;fas%#?p-vIB2}b`f8+ru0dj7i275#yRe*u!?;#UQ?(mcwi%Yhy+570E`%}+j zub)G&?5-vRx3ccRKyA!N*pd*~i{qrO)MS&BaqPuc;&=>T>mE_lw!1+ngp5lAT1u7H z)rqQ3E-Q1fc%NoQjBS4*O*wHb5mvn{BWC9^5c3E<6dN0svvjbage=<7Rl*`?LPO0) zj*83r`G+Kik=Q!((L%reax2I&WB7(0)h5wOaQbZl?ZARW(9NT#r7LRq%p+8Q;maG< zrA90=E+@g$W+g8kG3@u7^0b>j`qM2HN?A49_Tyl8X6MO|IkWmEmx8aH!A9(LsT;l* zQRLH|1aA!@XkWlx(Eyb`-F#G@sO*R?oLzFs$04tkw+d6gyaOlRElQ*HQc`DZ3fBB# z<;XUW#uL2St(u#ELreD3HGk?kZ?ylw=M7^Q8#DTVfS?`!bG1g?3c&)#01{ycfn6m7~ci9X*qx1qnT)^JzrS1aC}}G?K{dFAoIj zc?kl3Q96=ydkHm~J~Z%@Kl1pVp2An{5v0Um7*Ejs#Yb~p$Q2K{HfCp92LJOc2)@>J zUwl98@_oqmR)eVj`|aSb(*w3@y950Ao&5v4(@$obi?vkjlZ^34BFOwv{kw{{RboMg zYRb)xY2-QqGS68W1c-tPnuRT@=l23FiwW@@O*^Aqsbbt@akKIujnaOSR&`bY8y2;D=QT+xAc6eKqL51={Q^5ocvpAczZ zGgOti^4(rb>2wyUGci)iNQArNJK=(b;E}pN!ht1~+V7Iln^b!IPZg`-%v<;B7zvH2 zRQLH+yuVl4z8Tc@h7{`_P~{)%vI@O_$I3U9+CoSuOftkee>4~ivaUneH81Xl&sR9P zz;b>WW@7D<>YF1RcyNG89zbtB4L48C6|TCKA!FcBg|-M+xAuVfrmHH z9?)fS2JPY=Y;PF^>jbMQ(7;p|g_cQa+d79bT%(xACS4U&nBE1xJ9X5Ix@~d{)5e@b5Rpb4VS8`pY^-SmX8sv>!!~Uz#pjS9+09J zbbm_T5eZx@1mk9;y=&}xe8Ei-ezvf&J`L91^W^QV1zAz>OkYp9)m5{Kq>A@S z`_)lO#tiHeP_i(8j@uJp(raQN8cHsOH^v`r(PDmEQbAGSoLu^of%aW!6GX9mU@-)U zthQnYS#)9c8HPOJKgo=bD}$Kz+O3WayAIXbQTU%{Wx~BtH$jRPQ(I2T7O5XG#{>%~z$^9*-j>Bd^ro^P7 z?Y7$alPTdRB-oor2vxYxAG@1XwexpsMgo+KseZ7`CS<&jXMLd1!#GL!KQ?2mP0{^OZ3> z8}Cp}q=5%q*6rA;aKw872DO1_kRern9E#CQ#>A)dJuK=(p;rjK={jDZzbYa&%rZ2j zN!67+Oy!Zv^g+T|6(hX0B~y~BbkZW#_#&ayM1iS@J*7D1H^w+zu&K0Ph?;xQsb^`J z7s=T&J_}Xqb=jx@XA*fTyW#2UHYYp1Wq#+u;8Y^7p6a>w!Mp`}BFzS&j@V-8JK{sM zN3Y=u3u`^);$vH)0M`e&Om#fO1o}cVz3&A)2G1)R<{IRuYv@?(nbb}77S0Ry|VU{q`J*WPXs`BHR0o=oqGW;2^4Q_YJWf+9Ecy1+Cl)n* z9;{1lHBe$v%PQY$`fI~umWnzWh!k=geO8D7#HBAPIl-4r;vkVtFe|NcW90<|W#L3X z_^GiU6QE53rk2+A4v9#}sR}WcGGN`?e!`M>gEtExpKqN%+OT*VP*BKkuk5AO%5+ zH1AYSGz;E|*AHrm_4(0=kT}n-=#%>o(TGnA{S3+WCn2^1J4@VvDtxBw>74^T0}Dr_ zK6H}jd<`2u<=Ct6(||e!)|;olO}rEraO>SiP5Jy+sSybra;pn9HOKP%f_M}G&*R;4 zcW*DJVu9%O&T^HgZM*qd%<7BNRww&rjhNN}p&P1z7OD z6uhohWU&Y&9QyD+okEv)p_GT{0^2UiD1!OTvzIm8&>rRLvb)M};jh`eKJ5}Vs=DQ* z=-!WCaI=^>uMFpDn`D>=0rSTudR9!WTP~(Q_pu%tXMHcHbxnO-l=Q&&%;x49QJ;O> zR>sS5?v3@y&Yu$BO8jFMbwB#^6fT%F=0w#PpNY!2`a#cT5KbLFgP!Wj6-)fm=gx$= z*MaR@-(vilj9f6P)y-Znn|@t(UwyklO1D)N_gObM3wLOa?59k+L062!TM8O`-co}| z+hlj}c>PO=@6zLSJVdqo8rtE*ile}s#m$LA<9|F53j{W(B>_q-40|L|$_Pnr1mMP% z%5(mrELgJo8HHOmI~ajVafH(Pk8tiU+ncJlPmMkqE#o?a{@E>d6TzyJkQS4%1YAPp zZ3Q$>EhV-=?sxhM=Mlj3gy|52k}?(TjH0?e(Z7eV3W)E_JW) z__!%*Mav{Mr0VHa?aG7R+tU@Unv2umshK8+|JjloXo!d{tXs7FQa_92^B}S6*dHXn zH+C+8r^jQD9I7kv<(tSet(oZa5%yXkLJj)x7Qs(SB zdU5vDsr&aS*$Kg13<_NDn8^ZiEqzuSwa5FIq6BLLFQukQoAyTy2NuKn+Jo} zhkK{%xZ}$8_BmtSj{^?jl9r7DR{PDwIyE5k=; zDwnM`vCvm@d8VJ)6Thofx{p*cr0q3vI_IH@SQi#m^P>?=$UVQd3CT2slmC(86a!5LMFRR6AcSNH--s790d zt~9+(dM+zdQ#1Nz|xaf=6%&c9g#j1#8i@$wxmZ&XSvkPko@Rv|3Uc7BVF&*3%<=IJTk2SPZs~JXZn9<s{lXJ5-W6HZ|k!F$88Kn^)@BtZxCuSEcR`GAdPT7o=JVY?2sfDj$P)2hjZ zW>QNjEU!G9rs@H`i1}Am?=k?Mpe+q00-QI|ldNiZ&Aw@3@ocA72ajqpJCYRKrvDI!Kh)g8HRlOe zGj!SP+;>x}PA;-cLz9h}SF~|hx|aEEWr=Ov^3W=EWe?VBSmsw$NW_4Mon&2`Zn#Jp zina{(FBRph6@(I)s775JaPC}jVCU9kdOH}?ahv^dCyf&#BR=y<;{l!%QVXZP06$-J zA0&v-7TvytfvRDyS9EMbS`%aALO!d}JHQ*?prvwrSiD;05~G@MrAuNef;D&FO^awz zP!~C8r7fs!P>(1nj-c`)9ohf_(dyH=ATargQP(`94V zmRd!Ag$4dL*rT;@xN^Yxe5E9s$|KMt1(|OTHU6HJlcbY=slXYbB3;q(X`X5 z^%raN#m~Fu$(MHpmq6PX_uju5)!DHp9OOUdIz4^4d%N65w~q3Y=SKvZj@mAJ=n!?h zlAjY3jx(w12ganM}_b)N6rslO-Nj3Wi61!RMch{I<8f>_wu%EwA-QEg>f-_O)9 zgV${J=g6H;Mh^>TMkmq#g9butqg#o9J7Ksd$e>T`LaDZ}+j55Nn335d@Q z!j6!c;4#kFrBM8|-5vpcKqiF$UH)*Lia(ao%3A-eCtu_mlzVX1+~8bj)Fhy#PKP_a zM$`}N+=^E@X_FB@*zs9nr9lVep**SHkV$v{KEtrrX3sJL7L6ova{EZyTHk;6#Lf@I z4Ist2Oe}h(d=mGRJ35onBv6@W{R&;GxzO%I8&aVCTi?&^oLFrldhAhKUk*CbKfMLE zE8A8x*!Jcdfo z;^`Eb(w%0JqGcAJsxKW} zLO0(%SG@h}8PbxQ#8)QEANAaQ8^x1$)_GW>Dn0|VS4;|e?Ti5TVZVS4nyU1IUf|6Y zly}CB<-mo-83FGvKUNi!AFo;`m&A(RBRYeX}}5#)3CLn{i3ZKQk8 zz*HFTEB@b(y+n_yiHiI6~)#O7PZhJzS9;-r0co07s%pre1Dw2!0@jU%v8*cp|1t$4SmQ|0sA zbftxCWl+6tm#6AE+Sl*o5;yV8;rqF5NGE=wk&M8M3_k%eYy#TZri-TgxT18I!$?-jX z+#WBMGsPZpgk3#a@S7PL7);8~uE@b&(ir>1i#P4Lp- z9_%xwYx`A%$ML|PcE>O>F`t=HSAAq$zhI9o6kCYo#*~mp@d}+Oivc5@xX*xPnaMr# z?#6I8q2F^7ypSPo^G&ig)1<~|n)b9SEutD)Yui|o*0-4h3R;N;l&b2JfMmTlULuH_ zSrM+?mqIx0vjb>!-;Z1hRNo7Q+-gJKwNK4xRUq@ezv5Y+|hMFQnhD#o#5(H<07R-qtPRRDV2pS z50^}B%g7bUT)WAXOy4L;W$}Nt{K@;p=Kb1-{yP1DYdqmTDy0sdL!~#Y1X+PAMcyh# z*+1;CK6*~W$FOnHE7;7XZ@*}fWxixijcd8fb>K1cei9opZ|BC~03F>if{%8*5L{{X z_Qjx9U|OSfD|5rH?ppc8WJKw6&+&&mc7gB+r#%tS(fjr4mCwT4%k>??)VBd> zHB!#7s4lq&zBUW!?+z4R>r__Ug&8|)Ll+KSN=#Qcvj-74rvJc|@-P>ecia1qiL98+ z{VI#wnz4A%qgx_mQkpVyp$~k7da;t1Ka1I3FJMR{%Sj6td_2$(dQz{%IKTVcQ;pt$ z0N(Os5IuRFJ0{Sa47k}`ciTPyAFCgXLT)^}KiG%d&Or&>V1NJV@kQ72_<ka^iz$E6y)P*oe|JEFM=tT&FME}kwu z?7p8v{Jb-++IYIdeQMK$A2Zr#IQLer)Vs^fmHft>Cq{mlf7}*i`stlzBJ<0@HE}GuWOd=3(d3yPQ#`B zvs-xNIkf~-cKA02BH6a0ykBUO5_;@}a~ET>h$*1U$oM9sWsCJ^i58yP5ZEi^T}4^5 zMhfozphoIlS_}(-yk96K^%3{_EAO?HX5LueZE3cQpv9U&u9_j~9OKm1PyCK?1(7jf zKp%PNYr++Q;80JgmDBf^W=@(RyGQrdNmWrmc{^UvRw8`B)_(#(G>bB>PXjTqstLhw z3Dxl0$BH0wg=A+e6rKUx>|3t;rKi!o>)eBfWLs6GL7kB54Gr!Mp=dxp19!+%LKv%b zYHn#OFZDanuMzO6!fkxVJ>Ar?dDWA=^KbB3dQp-D!YNI8VF)~6`WEf<5FbuU1*eUK z0q27n--Npk+kFpfLz`qSxBf5g8V6ulp<>9@irRPv1a3PkI5=^(6p`g&Wth?XT4l}? zeZ7HlZOja!-;ykrk?18I!=?jxnff<-?kjP>KhmB~Q43r$Q_<~tIW_Ah*Rt!uM9XrW zyvo84=NM8Z6F}2}g5`8!_|9G5+fN@VT2QspMXrv_Zhz*%UR=#2O89|IcVT~v?r<~+ ziwx2d8NF%Ywag;5=6sbC)92=j9mSDG=VT*4wqkTK+pnWdU~$#Y+dz4B5MLUEae0Is zw)OyTd)!&HX0>amfH>>QBT@g}{IA!7D<4UC!(Xa11*U2>1EWo^fh)c`;RurIzepnxr0;IjY( zGH~#-r_b3l5)?4*)}+DR_k97ML|CUVz}~pRev}xGa7s!tnh;yA1eS>L?*3$Q(+N^! zPSh;q6#Y%W8ngWk$wU$plLh;mvcsgb!DJQeUR{DwK(>3GipLEhz25o<^}Lhn>51#*o zS`D3MFqpoeT)%5S=>r`pq5h}FF*C=awQ$Wcte9W5^Ln$4#3kG==rE19>Y@L*qOIHfi9UqCM3n7)H= zfb)luw*l!9z`Xs^a_84@mEdIDsPg>3S~&Fg{}=J!zE?;eVs7`t;v(j0zli+!z7_H_ zTgrAMbZe&N>L6F;z4C9(3J@x0s%}6HMayiW8Ajd{Q*4g*Ogj?%yV7kfdoF|{1V4A) z4cEOg@=}hy^q2B{?QfRK!sKjIcA1ujXtv*Uk@K}Lp_#>xRYQNNBV{#SgKTdL>nd`6 z8XM#DeHR#{JR>Kfz84S5T<^VSnID=Ve(}hIU6j{ptg!hq%$?c4?-(H(&o%)+T(vH` zk#lUgnyqzm`kqg`0em0P!FMHlu=?(BDF?|ef)}TyPZrpao7Fj5wNoj_sfevy`&j>m zkI(7(ka(fb<{^ngY|2ZcdY82Lwz)2KVM)_MzXt&o$Tg||>Y4t@g=R% zXT^}_!A8RBRVG1^rA}ECc;UX@@G4&_M<@ShSSJHh!r7Kq#}bhwV_}>F{t4Q|9NrYI z>m+?neLqebiW=#$(4J235#@xrm)O#`w#1xSd0RB(<3&nta7ge7v`TW%{1gWJ{j|>I z8}vwXikkFSr;WoqVtSz6y^NguHT4R022-*M6l!96UcSX#^jdDiS+>pN0-)CKYT-5o z?Y{`&+NgK{@At*uBic!Y>X7w#hXteQEsc&+cwZDNCJNaBGxBXu5)n#QL)=;Xz0TWL zJZ#;mc84ZwvzxawA_w#JrZ=_SoX4LQ@xXwPu>YYKg8G*SU`a(543G^D8^F5!B-% zB&lx6nD1?DdnS$jzG5m@JBUP-0#i~JF-jJqYNBE!{2ODfr{v@qWT()rNNHX^li0QC zS~$sHgdLC1SL`i);f=sCP6yeO()A&;uK4`6GX@t#` zt;0c*iBu+s^{0$UY=3it))V|6=DEzoetizAYDS>KCw2aa1f|_w|I}0g_m*eNV1)x znkW9O`(o3Ur{O7DLEE7Z=}^b_5Ep3bH8PKLppJ8t@y1HY5Qh+}dmQ*-C)x2-rhH`z`gXWL-)-?@FHju zx7tMU6)3)ITZva2{a3pREN1Hk)Noem}Z0Qyh1kt8} z$Z@uWBaQ>B$AfOUX~0~g#Pap-ifd(5=s&p-&(*v+vO|tM_?^{lI5~Hucww#Hs)si# zXJ1H0U~f}#q-rg@$fKxMtHayES*5Y-o1ki`+m%2C`)Dkrn7;ZsJRBAEpK9soyJp`k zM==doE^xR_3{d771^tG}eCrm)5!u`dPkdGc&^``4 zVoA*tF^9{S+I%=*O>dA3WK-bEo*cu+Ku>nDGKC1}%7LJRDfedGx4jy9WDl1aS+-=! zNkvGNVFKwBimV_Mw+V^~sv~VXqfeXQcM5TuYSBt>dEQgBqUU4HHuN+APH=613Yeo>N>D`Y0R8`|`c(?JGxvy*Vb|H27 zOUgfokg9u8?74lg@r_}%F0UWS4jQU;*u#oQb#T%bXKRh%BIe9jZ1!ib8fawfM3Q`x z0JOS1H5iZlfN zW=#_$fiX z(4CTE#G*^A`#uPF>#nL$R2C^qR8|11m2ehmZTAO~9^HdqCu-h4s$$BMG7~l==agZe zw18H5y{-dkC&v8pQ(nhLtzO6wjH_)GT4VhsiKTu|@1uj}XcK|Bo&EQHd%r&jA)w&P z9K6lMggjiI=)ZkIQg3q^d0#0Mxcbr+tIIt zcRK9pA?Dt<=Lq7bYZ_u^c^@lq*Nuj*;Wsz4DSLp4D#(X+ivPs^tpZc63D>eF!e;uJ zWmCUB6YXyot)lX8@_-h3h~zzi`|j~3*gv77L)?1Ltfr9zqoHZ(0zzup*wgcmS5bW> zCJY0yAqW^mw;!vQ@)VGhf^I_|Ykh%is^D!NOdxpMQM?R>m?${Ew>B!FNZ{?3z+#z| zWc4sgGaP`$=b)=&ZegpEqP<(BW4pAOgAnT71}G42wHg&AAw{gr^$hdt`UMGDsBPoT zsv%oPA1I;E>u2lK#Fib=ddbXjU9fFQ9`wZ@uH^V$gbx{YZnd_Z1LxC$Z)2Im-*kBF z&(#o)A6uFWvSI%k@?kP!LD7RGOdnqg4)UUDt!}{>q7tP?FkhgxsZN-F_cv?uk@d%b zXCL_-s;^fy)E$`uuY!+QlTDoz@My0i*)Xx%Kft3_+gR?3Q%(RQAe{R zzytrNPm}Vxl9bPNW{6pZ5(c`5?Ue(A8gfz$FGy2gPGA3cp7=T#`;M1FTqsu9_4NVO z3$c87_3gyp*9`mgbXv7dN%N({@3YT%QLRfpRZeXS!;oPgn6OQwdi&2&@Oi|C*|EIM za~ceG-dnUE2vfZx;x>#M4Ne%jdDva(JU%-uTc-Rc-%ZIL35Q{)mm7Fe_ zCQ@n!s&>n+ODJ>WN_)$FME2OM1o1K}RGrI?@oN900zd~bCPy7uz@DHZ_MT?C+ z5Pe6#bN;RL`1cbs{s&H97O;o6{pD5RlGPBym;kwigK*!`{>BTK0Jl>SmFQBef?A7=WmfJZ93)|<6cHRBe|XFIhjfs?4eMP z)(m#hnja}wQpbAdhDTo1tPT3Q{5NlY2xV~>Ru2ULhdWbagWJgZo4Kj??lX?UM$@nfd9+oxJdmK zcb3JO3kzN$U10qMQJm;o*p$qW-LdIhnMz7a8Se!UZe6Xhz|NXL`*|Xf)9uVOf_uR9 zL;=L@UZws!u-+E{-3}mZauKwi2GQbvLF>xld=A+!xPa;-NukwD77`G$eT&*n-(18j zH)EA5;9rpr_Z^DT6g?#UsTFlaWZ9C5Wjn|eDmNwAO4eCFpG=5?Qj-~SB{44jzHN*a zpCIV~N|zP7TR%akL%3hXZIB~-O>Ye#Xm_!`5BATaXIb5UgS%yggD ztl6=Jztby-ka;RV8%$`Ii0ai3coG5CcS9&XC)~?Jx6E)#$mk!$GJy-nH~+nj2|x$z zr(-@P^3qoblo0h7a7!$O9mb@2^;G$o8l0 z+*)b&5W;qMR6(PY;^Y1IglX!arRl+hGGBfqFSlqV;kijJtaSWYNqutI zHS3wV=~3zU?}9(6xv`D7gea5<`C2%fyozVrntf}bpz4jr^d)wwmL@cIRXTZQB#*i& zLBOjDyYz%Y{6pEUobc6*oajcM{l!gSX%Em}J`hZH?I-bHjbSBe*SgQ%(+VzJ@BR+S zrfIhQmA^mVZKqxF6})2ez@uFxRv#s?DyQ^Rn-+f_{MJx zV3H4*lt<(lB9F#5`W!Ha-EggRU*Wf%-S>M*Zi4<2>mnq9z2JrM?-!uN?1y&S2QWL@ z@%|Gs;!F|(M>t4l4JTxoBIVqeJjJE7YXpM2ruf}q*9C*Q<6EvLU|>6+}+!e<7+oY zTp|o>hnBR{baXo-)g`m4X{9133($>|wMRF8SMf z4g(h3DN8hn@t5Sx+OLUHO4#fFu6=Ri9GY-w#o1*u8460~+=(l$W<(3+B6@+V-{6HU z!vWxkyFEOlSkkCLyj$cw&fLw8(H<{c{O-Ev-%|pL0;es8{eQS>8lYN7iqdtK2hvc` zM@O9HlRbb0-yEo0AUc9&c@#LV$mkoCSY|4+wEK;GZj5c7Do9P*s`yVGjZrvhbYfZI zC+U*?CxMza;+!KI?EpXC9L~L|kBFASL$T9gt4@+jK8$|>RUp&I8`ne=vYbdlog@9~ zY=l#yfc*KfIG%eGI5FgT+Trlr!OaO_6rJs0}*vw;3a?Ve?^M|R4N`8HP&Yba}VZT;H$4m zW7F0IS7aFy&i1sThW72xU4eSOWZ*Ox`1a6#Ef1VbB%N1M+u{={dZiJ&Kwe#ubUS}W zl*Koxzy;wVg8X>grX;Lc2`%BWLJCwICi2-#hEvb!JA{&BaW4lDn=u>&0%;zdOdEA2 z&7Tv-9LS9fzv=5qQh_empf?++_S6996y3pf|fsJbmoBLdRW-3>~2cPibIf;3VRLw89?gCIyZ(lB&)r*ukp40G=A-QPU? zU~~4`Ypu7iQXbas+o}u*5GX3kGEsoWOyK?%oR--Kf`m3@t(JhUBJJ0hJP19%%+@|U zBB3SJyRj|bkri{{zh)s|SPjdBf1`u@2k&+mJz_6KJTL7Fqf-XUFxoUR(TBeJuOp9j zk|$gM8a0A)blml0YI{^Acnem;D%kJ9s&U!(z7m+4sv*rPrH}9Q-ZQEjyDv7_EJ$_u zq6Q^^;0LzglIYXJr{F)(rwa`vUBtP4QB}^x8`|(~i|%dz_}+x?&**- z=bW3-zTv73cTU!CpI2ydYv=98<0ucH5uJ+`rUPi=u<*);f_YCcC%r`lOYT|kDDUYh zM+OE`*+1tJ(y_sQ=i?3W9Pk-LeC6Ks$Mlcf(zQbQMMv05i1KkGGL zsHyJ~>^*3LCSP*YUj}tvj#uCSwqG8qzz7l{DSX2t3&?{zC=CHgp}KrGa$iw3ZaEq7 z{sk|}9Qvz#2T@a>$-v`>G;l0?J?55YIeN$AwViPbcJMi(7n`N+VB2mr`Qil$WuX0h6+XQ|_iv zvKBk-&&_-1b>k&D@giR0Yd1@%2XRlQc`M?6>fZW`8^HFTF3==0c`*c}``7)$W#ZZW zg46$D2XO~Rz!1JO$AW*C06yqXG9$qhL(W)$oAx15@*aW_0usyOROj=pGn}dQR!VfV0F67%LIQPt+Ii4wn zKirU;>d$`-LG~cvR0ypYVWtZ12WHDedCNTOU2e_;az2ff;8=-i-SZSPWTg!e0I*T%W9!{9G!aOQ5qoY9%(vv+| zSQ>qH(hT>O7kSR6udte+7S~*a3flA{<&9VNwv|d$w;-`&H}S1!xE8k90*q@!+9d3G zH7?JJ_SJi!uVz3EW=iEdANKQYl+f_KEkQ8h^XX>9?2+8X}~G z+|Bjk)C?783^H7owEIjG3;_s-|8Z4^L?u6;hTsZD@Lv)+!S8AgoDJ&n z*LKwV3q2~r81~6Rlq(nA8w_k(fz|#Tv=7160lF^z@6Gg@&1y4I?JfR^G}TCCnBl{d zAo}JPvsU=kYw&IIlc4X&sI&TcPf-y+qmqZ2h*D-IS!l-^y%-l+^likNheGA+VnsNzR zhZ%{yh6=wm<}V@yC?SxG5BC+WXVc0ErWgFV$^+`YIm5~bcTvF2hUmTNqu29--&N`T zH*(|K!cX+07G48FcaC)DkTrI%=?dnPpcl2ANZId+w>*)>RT*DUC)qhN zk?e1wA~29KxC^u`>{Yc2^VDLSI%kr_V)iSRlNF7 zfD#K?hiS~Rf^QJcE{Q%iyYO!T9v9$ejGt-4zg4Txsl<4@?XsL$c)z^)HAgMwm6ZO= zR8;d!EM51NKAoxngYC+&ZmP7h3Vu%PWYQuULJqxJEhvAED7#MMm@cdoL&0V~XUg;p z!BbAd)yOn1PKOoy%8OPOy*OyiCF&TF7p27Eo`>=#Pnkq z=ZlqVDI!YF?Osnw>fL1M}9%jV= zZbP8%sOyHIgs^>r=7C86eaBzKI@^Yeroyw2fpNMl4;o+M#4xbRm62aLqA^nlJ`KDa zGf)=`^P|YpBM3Q5e2N$EZQT-A$^tE=iWjk*ZQBR*~Sf3-LC3e&nd<;i!Pi z{_9(Gg!nhmCm1Xz3h*Y7r9l7~9(^$Hb6Uz-*RhpYbRl_RQ}!@hHI$_`GNp$Ag_Tde0L!J+ppvRbrRfwFLx8B1~G%^xcO&Gph>(KqIip3nYEA2|e_ zQ+ht|LY~0StRJ$>A5`5k%W?ZoWYq+;gbW6bKdXiL%X3G?BE z%l~M8gLb?cO(hUnM{Z)o(C2c+Fk6iHS0g)EtsebrgYQ_t@}u5CR-Kzvl{@qvwBv@A z+_v z00VT99G*XLh{8T1pK)yzrH1F2=wLf4XUGC^whILOuMTUFy^P2OhK|fjbxJz*iFfRz zgX;R9wBbYIsyLDrZ!QX=a|VH`)Q&{|>n|OjzHmUEbfU<1?|P%VbOk#%Kwrh~)ltO( z@yn%O5SWBU?<#;3m{x88S2s@^dM^w_0QK6Csxz2}NRdpnJl_)_n1jE2DsUUht@G@r zPa0g9ClzzwPGkQkj;ZSv*>@S3DppG@1H`@Iu54;7m|S5NS4bY|T7n^RMr7L`NoS#{ z#l~1BbL2UbG^Spr_=Z%jU02@cq10S22T$Zx1sgJ?X5AaL{h_BZxm9Esp{(W4P`JZ& zkb-3HiR4cUboRqFvQNefKL32WlAQwzY;EXVMw{#Hc$24$_}1osraBLNo$f#9&x&#r z&O(nClKfB*aX3m&agV~y#rxIDuXpij=s&&gE>65Op+DM*6pi?Tqw(4lWNQ6EyTiRO z9M2T8#e9{^M3(w<+3kH*4A*y3&<%j& z9I%Hu|Kr8GdN(R~OYV(yK%%mlXnqL#%eQO%zI23pbn4h5c|3Hw_h`91MBQFv*{un^ z#7k)Wd!=Y%jPcTj&v5Jlo?_>REmD_T4in)zb76wc|>&#(S0=0OLw`$mPO`m4>BHjG9&!M@K6&*aN{IxIQh%L|yCNc=+5 zduQsKZjWTiT$d78eM-_th>*g0R*S6anfBk7+1;Jm`T?hyRh}93r18w< zI^pFu2XJy{2vx$;M-=mi=Q6Ws$@j!p)~|)AOjY9gEkM>zm=I8ZQb+PIr@`^?O2>#@ zDnIDp$F~9SpP6k?UjR}^kLZz$HHB`YiY4|5Eiy4ZDaC;CO*3+PC8rdtXI6dV(dp*a z6u}$l83QoE2L-L8SM8Ek`Cg2R=xCYmOJ;vbH}nQ3jyUpPjEJ80lVFR`4Jg0H#MM>h`Z z4hyofE1qWMP-nwz=@*uKtAD7@oc}ji7c)pkApYT`CM|fJOruW*d4f$TWMpvi{-lzb zD0xfdJ}hE(xE@6UfmUd*vl(Y=F3`Mb>#t&ZGB;zzvS!&kylWfnubou#vRQ2j9N6Uq zV?t*_NH%OsDi&zQ7F9z{>kv@E+h8#!`FQf83GQmcr|BOr^7J8mM5x(6lLP|J*ZD}CCiw}$^7-1^_BX+n2*3KJO$*JXUh z`+EfHFi1BXaR~W0()*%8P<;|k1X5F~&!AYjKOOu`r!$?v6;N%n*n2QCqzXBpV>!QD zll5!hR=eBtJi{nQbrZ*Tk&ymK3%y)hY(g&WxWvN69MVlof5y^>m;@tr zA=|HSm6o}}MtV7ZjZ7sh#nf_v3twl^gL9e4AwQ>owfxNbYU)2U(@+Z+Kq&FZwykIY zM5pF@aBq8QxT?M5UtMr5-0pUETOvUNj!fib?!FA zHka|^7-9b?1tZ$9!@2l%11W^RJx38De!GaqJTfq#EQck>8jqBGhu8*#AuRkDVcy0Y z;wO4g&|UDITQm2E9d^!<)wj8~utr9TXA9;m3ZSKSTv}E2q)W$-66eaO>3w8tb*WFr zd5;!Y7a)enjf(Il7)=P{Ut~OIyQIsO32~lRN6)TI-!f8Z4%1j(Pl-_bx3&^tVX2vL zl6NdYtJYkF&Cd?w3hw#L-xBMD*qrTCOoXg=VIj69)3i2*oc`jouRhl>7V~xNgcOA ziW#sOXF$gjmY~yeX^Xo3aBu98>UT)5_1FFDNUoo&d(Q^Ol<|UR*x9wie?QWFI9|5hhfuED$SjCR z$ajI8Uf6c6v|+a$lyXhl4Oqnc<&1DjC2Paee_wA9{zlnxM)3&sgi7^2TfkmB!Ci5) z=wHLCua6Xz23ZfHa*=bsbd+5A4f32@8RNjY4D@T?wf_w?2ki-J=E6SDsC28PPtaT# zG|o9|M6i0l*DA)*dYRZ6yc~5>`6Mn{^f+Y^FDe9sbe*2hw4mVO@ZkxROI6lf9oRA~ z33Owj4F^XNa}S0+2Hl0bPN|$w3TWXiu~I82*pYY@`}&GMT0p`15U&|CPdv@ShUn;Kz8{C5 zNxBw{84s7d3l)EC1+zFoH`sS8I(7&+-#cRm)m^#6$yk-0yiy<6(0u<+;rj5K_DbTq zD18OS0^WsG=@r?MoPUj#&xPI_m88JrB?WPp4=x5~?{6%aSXTm^;%IxMlD^k6&12OZ zE?Gucl)J5kaJsKZK-ZrpgIE7zou?XJ_y@Rd0*~UpMdd+MlvQ8{zte}Rfk$NVU`coI zzl60>4~3poWa1r!&9#h}h07IgzVbdEc#Kx8dJAk*BwP@E(cj>fj%k2 zMb;=G^xn3LG?N7P2caE32vPA0_g<1QMk_|5c>Z0I3AD8DpR-t0n2~^=qfHeQq)}9R zlbl|Uo7>DNC^!n;eE%c|2RnB6d|J0!Xg7&*$g=V z`=L_$?c|YhHO(m7Y7keLqS?!uo%Jh$pna`o8!54K4dFcr)fV%-Nbt`z5yHbrz0vud zNnc|=D4Ngos&!%B%a0;)64Y!9Y%0Tzpd^H$_ea;@6MY)bI%VP);9o`Pwd6!BoTymno z09v3r3KRpnJ>CU8#{i(qPSZ(tkcY1756>G$`NnpFm3Dh=q$XSUi>pvh=-i9@+xQ-^ zyZ#+!rvc8_7Fw%^mFJRjI3k$KWF(;8be-7-7a$dmbAqffkK>8=FH!Z7m5t{k^?Rwg ziDMLdHZ@b2ESUT*`!tLs}!=wbBlk8sDwa)iiYFTsxxSkfr>^(0hM6v~VDQ?_0n%7+l=rtoMy{z)T9>HP&lK~6Z zw1B!;y|tGqZUjPx**&vwfqamt8otWxj|aqw=TZ@c&(!7$)J1k>4TURBSrEeqAzXIwWM)nUf-I3&C@U1p_3`fxh<2{sy!i2KCjy>Ia@KTCf_u=p@Y&D1=Q9ukBh3BnHWskOipfI^#-8zSx(Oq&J9?sGdNq2DXY}Tc zQaGtN%#rvUFuHmf4`4w%;H?wXqlMsWW!l#1;kEH-PG6em3jk{Kp9VSl0km9aH_$N| zmASiHwBWmBbv+9U_Nk~#Nrl6OwNJ=?g>pk$Q>x_+~C`CbsF-HB|m%nkl~*M7BsxDI&|-jVU{S{J(#Klz9^YR1tY z>~9wSZvjEO=W}m?Nq?v5ghQ(^&;9z4!s6WqXt4G49`sanwwbAmb~U2YVNs5K)!@>F6=o;sjt2`X?Lsrj#u#Ob73|a8 z2J*ADj%5tCOE<=8MryS&%#&aDydzQ@XGm5ffNK;>os3JeHn_+gPmuBFsf;bD?6q`> z+o-{vKuot+)?AvYO^WmBQW@taB}KpSh@{xB^|i15;}Z?k}W$ zc0Hh^nI9vI@E%#CRB|NnWRw_PR6EDxQE)WWXECefrxN*{BT=aYEBTMw`!2Q3wH)Ci z(lB5lK2@9XX?3`iFyH;cv@?$4)eutm*?4x2CtXs3sz^Nd8}wgQUwRrtCaiFLe#gA@ z^A?VV*ua@EwAu)W2C_^5RIvW~Cin7fJ>_$T?j&p>>-S=3g?a@f4FJCeD1QHGgt;3> z0qDZa^wA3VIs*Y{e<|1}bbM<+8uBU*TXqXB-b>ADdq6BmS0=cBsUm$!gz#ns2~(ad zJmC^e42&KHAnAA6MxD9S^!j{O{Hj7(wa`2q)}jf^Uc0LWRkWQz4r;*Vx$SoL-!vE< zdo>D>x#5RoLzg_tpXluaZcDp*Qs&2<>#BF4IAZ6iGxr_FBVUsO_MY`N(i57;G(?k- zYC?@Tk=~K)S(DKWzy4MC)+h5VU3F(O6^X^@vucgckw9Y#P|qP(9t)g;ZoQK1{{$}k zn+V5U3@GZgMcAXE)pTbpKJeeKhmk{a4}Gq-LEbg1U)CS4zRoa;EB$g%e@b_Pc!+(S zl6@(B!ao3U6NtE?#Ckm67BGJSlJvu48-{C&&C0W&qM@1STqhb|BWFbOtd;*0ls9n+ z6bk<$5Df@%4wcSiTL##B|$m&8NZhy%?<^7#g_Z7SE23)lGq?xZ8CIKE!<=X z2M|L|yZK`|S*%g&Pmt~qcF>9Uo|MRK6r1mz&Jz)HtWl!qJ00z^9(!(@Pm(IND8_vs z4wlP$I0zSqg%8O@v;>w2`$A*ieq&Cyr4~7}T!yn1!`>L?OZh%Ue7n_xF*JlW_{G;M zLrWluX{8T%cJ%OWe+wGg=b9RFj1pa$tHAIIR->C@ShtE7fc|M7Y7T)WfVo zu-i|u)|tg0h0hXdNX&!{^p7BZ3r74eI0GyNt6~_SWQ0!Wxq)CZM8g3f6!TR%Zm5TX1)m9Sb*~1a?Tu39*ty!Gg^BLbrO?&X`z0VLP z^Auy?jG0NsbRu%#P)!WEfjqu1hPKHsS-VL~Iggj#mrk_Qh*L#ddO52{SVZJDzz(`qTBLnKQiZC6)Kj^gi}1Bjo*t17;uz`T z6ul}1U69&qvoK&w`mR>Dqp9|%R|5!f2rTwGR?tlO7g10VU3^RVv zJzw)D?N?1OCc@|XF~=7Y)>O=k=OY;X0od*XR6oOJGXj7xhEp`b5lt3vOw)*k^TY}x z_ISDVEjBH+(aGpeI<`h^sWKjJT1mSRi~1@z0iDSU!^%ECSm-vR75zWX;!IR+uVXuVTe40yx z&fl`>fmINdCriE)WHIljvTW`DMDDNLizV$y#Z&(`_irx?c}S>2tyNuFTWCK{250T& zBlm1XMHbRSuoxMDB_O|EOzd<(1=4X z=u*((6+p=4uuBWa)9*0JUTVl8c`hQzJ;-u?o)idshvkR<}U-Z@H&K zEJA%_ZYQKEeH^oZGLjVMhEmN2<$9GS|2GHaUC=@KmGQOE@WlR!)Yb5%pmr*9$=*Wv zhUnNYK79(oUNyme`S>U(yCF4`&KV3x`Cr5p38B`SC%=apEswkojb3TIJE9oS=P0C1 zcknkH11j&HKeK(=0kiZ3BNpj}N#a>@v09tjZXax}wSE#3xAP1VkoeCL#{=q_Cbn<^ zG(xX`U1zM{>u%ErlCGLOb^p8GCqo5b!9El8fS*uiA}}+w4Ya^&&MqrA8y?3<>tAP3mxPkvO^ruU<6;=h3jfeBfApw zPh?$1X!su~?!eg*uBqtkA^EU(mtY4KJqy~U@M z502Acv#&nJFC|#H?W>+Sry5hQ|D}Q|xl@V>*Qirg_+*RkUKtk47n3x!Tg+#=HJwkz zE2Q_-bBZpoSN?9Uf^5Hrlh`l{H}`S9GG*r#sjUMYIPntnLCF(sqMxhEwGN5XFUZ4Q zKnZL{ixtH39TOHSoIB}3X@JoTi}GF#tj=AauKt3Ke?di-=M^U?i|zJJM+IwtLuN7U zx)9-5-~|#h|{IRJ>j%KWcFiSCQxR;j>RSW>ZvVrzs&)n?= z_$^!khu@*X;m_F84ESn~gYFvE?%zvDL3JM*@t^2f_+lCC6kc0pAVyiJW|IBX2=B9I z-u>YHEQhVCZEjW)7iaDZ7k#s9MswvdrYd-tX3rdFAhXK+=TqN)`o5)ds7l|3Is1$) zV#mnD$<<@#ctmg-8D2@jllcbH58ErLo6b$uOh&vPm7Y}ZR@}7rMptekBmUL7Z?T=5 z^^#d=F7kA|PcTkIL=9?QaN?f+aRaBj3dNM}Q(K~rjy#Nu@qJwyFBIJIuJqe(C&gCn zP?Pi0cTzra2EAGa93hd8^qFOI=$VfpEpYM%^)EGcYpa>=whZ6A*2PVwEL^P%{B

$O#6vPqT1dBg;<9|0s1Wu-B0aGpQX_v1+lZy_>Uf4iG4YGe`qts zv%<@$;3VV!v(UmJW-J@e*ZocWqjk5`;g=8(Je8sf*XfTEw%=7z@_!IP+;rh(aJVQR>f}3I5SdJ@nRL?G)(s_?ra88UJMI4tmNH!s~ii$>;Yr6q>Oh*w)}5vv;U!iy!31)Lr`eeT0dAvW7J0ho4H1f8BD zKpD)e^jzFN;AHh8mtO!n^KzG8`ia^i)k=4&qqlZaOY?JHu0wL>_#LG-c|p!v6v;*c zLvpTBx?1nkD+VQxl|AiI-@8{Z>49h@&th8fUaxK8Qe{j0s=Ouqh#lv4BTK6zS^Lho zb>lOKlEFHP>y7@-$LWq*`267=x4Wb7UfSFegS{p+S~Z~zQHy5`dv3{=szv!oh4^sO z-cijYd`3l)ky|lU*&HjfV=)yf0OqA~jTQe9*{e3o)=`&fB<{tVrDL`VRB{!o5N{;z zRF-c~s}eOlBMs>615-iz@;KT7i83)x2eHc{6(rdM1QkY&uX$KGZw8j@K@1oIBm%Z2 zM*CWUJg6;mkRo25y_<`53rY0HzjdEYm1KM5NmFQ?@t@ch0~8(>5J^nFW?BpHCes^g zG>*Mo-c~X|{CQCNxHSO@1H0Qwtx!QumXM)4S2NcBn6SRGBd9LhX>e2M>5}44lXh&f3zHK4ERNZ6+kP0GF`nT^IQQ0G z?R!$)q}WJ!XbVyCj~XocH{`Y)c~J+5|MCLAyv2-jpcKoYo;C0J5Lx~vha|GBg1We= zz4>A&=(FK6LD$mW%-&#F+qtGT&5gMorJCkkY4~fA8gBUq(>ofyX3MLEdarQpOBZ7877{llWJ-fOBPL$uyM4l3FpObZVAme8bb z&|Qok^`O8yORrvPfIfoUD}#L9@OfDWTlvVwuR=R#b~mHvNAVld9Ff8LQUbwkJ%=xE z7CD3r_V={hx%z?)bh|@8qHC$T#2pg7eKbQ1Qtf3qIA=bUr3UpJcvR1}>h;Kl6G=%Hvk9@mIm^7PKnksYwj zc2I?Qb~u;$8@5=*wId67kk%7TULSj68XP+io{dq+czstN@A5Sa+1C@IP!||`}bLDVXsV6R&YG={b5wTjU+HFuwV*N z{S_vP?%MK^M~iM@So0Gi>^JHzJ53g)JQ}rv!-AY@po`q$OtEVUA(+cDi370mA8+v zKbg{eFWV-MNVOSf2YX`h&5~p!h8#p{naXf*%m*O}l&E#t#w*Z-~)KBiwI@U!WV zovf0(=dZ_+q||Xbh3nf!SA#kwvtRAVGhh3~wB+Bqy|RQKDlKVQhxQybb$Y(KIaf2u z%IbAwyhzNp$ay6d)g;}SlNoN>#Kcin-A9xWXNO>?T@TgCbnOdN92b2$By$Uz?9;gf z`QbRJLaICorCM*e2uql2Z;Z4Wz9M|1kqdOUNx77{;_0RvNTlx3JO6y1D};^#?NRX% zfPxQ$8>eH>ytcjA%rGn3Fskuk40}`n%MuC*T;#r&xow9XtRDm{AwMlFgiep&x~1iC z9IOP1Gq1p@bthK(oNXBSJ=@lym{0b7gRZg}+ILn1RwX&E%c1(RPal)MaaA`8;ki>` zYUQIi=8VsBzMgc(=3-z*Hgu3L(&}K#>?tq*l8Xl0jY>KgeOQcUTpqJ!*J_kNpJC*j z6_i09#;iL<5ptDZ9`-N&;)*wGtj1uO^4UF=4E=zFRxkNPnO9!WmUxlN=I~A4a##jy zl$D;4*b^=VrhSSDt{&Hq2E3DIZukS24Y`3Sco+}hq51v01H%AUPZ|ds54wU|IbQy9 zZ_&y7;;Zwi(vE+Ii=_N)&Lm?nbShl3XKMZuJoC4M$9VA1TPXicBK9D5VoA=T>hG>z z`}Jy6J;t^pC)3dEQRIpKt_HNUm!Sq0fnx&Uj7MlY#qtk!1g=G|{Wx3VV6sm9<=NX! z7_Sz_V@>*cec$7{01_Hb!7BqO#jY20VgAPXUvgZCjbXU(VMb5t6kmOr?&XiIkqvQ7 zC}lMSxYz=hU*){QG3taxo3TT{ox%iV=tNCzVbYep*575UX&BStB3Qi?2aj+o@oAS! zT?g4e;a~B2@Tpbnbfq~Yvb0@}V}6NrL^s~7*Qip~72!shrY9_;j3TtR$I#?xq*TGC zrB9zRRgOm+=ybZh?i-I|q)lbS5A4oUh!jA$F5tXcCtz(-$QF5f9h^Q<1y~Z@&+`WqQ~|PI;ftNz51M-gdyh`=*1HlqzLu%# z6`#lLqaW12e9mt&K?lYpI@Yvd3RcWg8b#BB>?D)L#&sHo)`3rQ_Fh$8$`6_|@5n<^ zq>+PR8g|YPJ}EkmXKP$0MP3FdO8Sr>EHSR z*|MS~ae^KzI~giQP}w&@0mZ?w((8-Vk3`=i$@9$di`xj_cf=O7X!sn~Kju`gk{R*C2|Db3C@j2 zz0|FrGiI?y^YVxv4b_sD-yKs;W~(Y`et7KIJ5Rkb?05ig^ZRa>9ST=M>~fRMbCU1# z>SI1m(J#kx?%fJ-ITU;HG+McBJ@ejeGL4zc*_*+u=bn7*U?~|ISo#31ipKi4CEoR+ zu>mrTOHk{wcvIwI?fj(UZmn9`d$I9IAl%4a^u_bD`8-^V-xg3vC?3lH9SBMBD9b%S zdkN;E1^5rQ0MWA6XMV_*_%D9{4bie_U?TS&Y@2(VdOdUYO#W@m3F1Wh>?Q4uu@R-r z@e+K9rAnUgaxcW)KmWaJkbVFpH-R7({ceqm_W?_txKz3oUv6aRk@qzQ^g1tBQ66+| zwyTp6^O$WEaU~o<`YFH1C8W|xiks3YV;1o@ckAVN-N8EGw#(UfhDsQ=|rM*_Oy+xqp)6-J%FBG^wj1s!<53ruZe zEew$Q@2qz?SeSZzc4>ZLuLFEZ?&l!!Z4jmsg2WL^+ zgI_wYWF!%ewlhK!0|l-6J6DEtMFUDBeKV<56vq_FyTkd(cE{^A;jQQhztb_THvnjY zd^~IxbSAogOfJ0jM6=bqw4cRO@~s?K{L9S6_-hfB-&RW7JcHy>5uAegKglgCr81`a zSbaW)t68)|Wp>b z%9gcNIrHuY=kBNSBw>Fk0&MQ?U1~IX_q#K^r181y-(Ga{WBswwt)$$Kt6= zNd!8xU`=E&7A{fad}+&{pf6FBO(h#WDNp|Q9b?<>QK9mmWJstV zmR13pd%^6B?;(jAw{yy;mXu`OPz|1n!Rx4dMLG^#nw+hB%`4tK|2-4iO}P$T0QkS& ztCJ1;U9Jl&a5D;wBLY)B*g!TA?Ro>wss};DjT=8ffIafYsVc~W{M*`B8aeu0=b#O7 zb~s?47*PDp%X`)^`~a|Af@UMOM+lkv!tN_be$1$FDJIF#r6GKvPg$_FbE;6^oGno; z(~+gS<~}JeZIWZu?DHwK2qEt@ana>*U+#q00T0lWlv2bB#ZI94O9(9^tfyZw)r3U~ z3MTb$6tu8g++Q+XF>^OItoUbO>q(uuo!I@PmdM+UF71Vg@n?sybai~CO}%* zoZE#O*Gr`#M}c39)BGNb-xrr_%vyvJ3WlWF@ZFK5N_D3C-!vXp01*1No5`NNVNf)! zxzNQ%`n2Bd3C}smAZ~0Ec-IRQJ@0JJdOmHKfcF?{8wD*AGLk-;8zQiFH^D@dZ^})fn`S;_MpH$)mX3GQmy*#KC zhG%2;@!-tqj^75Cu9JRI#Pc zsTj|DohyKqrx^u4-l5+wb9UR}&HNY;%>ixq?$0oq_?Y;STw44w%{`6|PI8l=!kI}y zjy%UU`NZRdN5ZqGC65aFo-N9Vn+ByzA`?;~a7uTLN=7mkf=;xj6vhZaAP=11ePtM1 zID3HDtFs2fH&isq%rbAwN!AY}35+F8`p&Jdz#`7&=Z-q8p$#8mSdoLq_CQ9=EXuno zYQJ^b%?jVpAp-j&fY1#xjG50$G*R{({tv8Q1$n|E9Rp#95^ESP!DS}Vx zrL5K19KvqPMOtqYMNXLaPPiXKRH*A_2pC`Xs7FVEb7QQo* zc2hlV)iwsthY275@jY&+@4SlyDpxRg+nLFlEUuOIL+xk@rM4zbTLH~)cGd4}86<2X z1d+Nbm?*TQKU{JX;8&imek&yOFnoC^5-dK|7H`b)!YU06xLKj3QsQYW9~~uq_?-G zq#)XHvo=G1IE?B*x6OpbcC!cJVG87N+#D(|+mR8PG!8}}^zpObW$vs)GyC~5kwkbT zy(YC~9KGEJfwY|qXpsV`$Oehh$GWKm!g)ptdYGChXtyog&~Zd(ns_a6y)iH&cTOoZ z%-}3x(_(I(;j2OCA~RTq3dOnrPw4KcRzjs+Ue4JVU^ghxu{iGp0s+yh^$OwC`*qJR z<*msNK+6RvITjDWWmaz(u+UdiNk}SOq>W5iC~d+a`WDhCfHRU7bqE??G+tjQ5e*K`z_wY^iMKwznt5G9ELCJge zTzc5BWKXjDEwxt1SAMmAo)_uAvyE2#G8oP++nu!rR{7oT?v3a@HbU7X+oB@jTG~#* z{(OGsu5&=auejgO4mW`TNqC7|0)=o0d7EJ70F(iS25KXDuEzAz+~dfmb(~?M)Pk`n z{ez@x>Zjq|ZSk8)mLK1?#E}MTHw5l<6+VhID+bGLIZ0u^9%`0eW40C8qHct&B0<4V z$W;Y?4px1gA&o|V-#Sx47Mj!J5;2z;XUUi9blQb1OCm_EHUMnE$BzH_UjDJWf`1|w zSXlQXT&sSjwAEY&-RwLbhXw+Oz|%gk`cjfpM1!;gZ2Cp2eAKq5=X&vjj;g{ZDwAs8U7?#fJ*( z!>6=CeTRzq^Xf5sD12BvI!QkHY%S^srD)}+2Ihb1D$9^R8wf_1W@vMx5ip_B*nON# z(4X~Ven>94sCCECtZs3%Wa#V}_&xfYMGvmf-t#GMik}>OGuV=KUm*_QE=TzAjra`@ zs8?F@afi{I7snZ$?}$VoENP{V&arX^4R&oF%XZgvW>#|sU+vmelDLjr^N%TyfsTGD zf*@Fw`_z)f7wD^W{Hi^zS%wQt+d32HPJ-#1wwU&>0aI?l!1Z<3-(H5Dt)}Otij?Y$ zjL?;armDF_xPHHCd^Bz`H#+1TBL$&!+;jg8aV>Dp1-OV$@Zk(n`v3vE1iprwM}cBN zicB8F*YDTA7|ui18!G65f6{OR31RJ}8+Wr3y2U9Hhux2*D1oZNcs-e1T%WO}HjuOq z5B?ThcW&XlR<>Lmz&Q@DQ78}U7MO2rHfJVY!{5JS4|O2N_$}ea{w7WX!HV-cxlZ~w zgO;5%d`}UkG%0JMkE6`0bD~LgCt%_{W_t7v)quVblsVjB1+HA2@ioLa&+6Y#eNDtI zVweO4LqB1`n=x$n;2X6~J_f(%-W^MD)-~)8=0Bd3hTayYXv1|P^_*7cdF}MJC2z%f z_aJ^Yqm%iQw(Tr|y%OZs3_@c=E9|rdv$F#wMe}%oCVdzt8G0h~J1H?n>Vl!j0%W95 zQA8&}N5Cd9J5NsIfB@Y50ZkMkI_%yC!YVHs|Ip z%^SevtZ>kYp{TR=F~M!HXFSIKd{VzinEzcV-5xqqL~xOVvv6SXeQ^#ZPbHy<;tW|h z0^?AZ3NbS7GasC@tEU92daszSqL6|aB2nV8w*r0+(cbNIAEpsyVq!24-3IiBh&pe> zleLPerim;0XOq0!0E%M@hV;Yg)k8d9iRq{kOR{2*>rXtt37^nyh7-62`VXrds&q+5 zBBh!Qz{C~!`2M?K|Ary!B!x}~GPP+%SDi$1jU~_ym3dV{6f!=R-qI2`ueOC}lPzV6 zjpBp4D9?!T;|7>OwD&pTQ~IEw98n4;DFx!RDH_jxJo<-Za{*T3;GQc^bL)j$WTHelMG_Y62s`NxLj<_zm-~U{##($ZY>4td z!?InV#hRh1XTM3Os$Y3&A2xv6T_BzqC|Pe%c>p>=Yc>T@k;u$%dCdJnowWqYx6D^+ zgHE6%qlOta;v^r5XMzfi(@Jl@2g~>hbVK3aplv5Huc1+!|Lkh=4?tyBi~5B1tyD#A zx$9b=Xinzs_#NQ99W%(1e;c}?Y)+xbx9Z(J*ZP{m%lZ9*`%p<3eySwzLC?5 z-l49jW}8J)MzR%m`78XvILk{@3@=yJs0x0j*F#&lE1i4bTtJfdl61g@L6q;zwTwBYpYShiU zy2IePeaB-ZuLaUb$r@f4u&%2|P&Zbm%KKti)R1zP6AkOe))jaoG@i1(>YU*K>K*-Wc_Y$q27-T{Ol~lt+P2qAl@qSWa%=lJu1hJ{x*if+#5Zup|u%yR@e*F{y&bcDj=$Mi_#_1AV}wslG5E> zQqmpL-3(pQogxZS(%s!Df^>Ix4|DGP_hBCAVZL)_e`~MU`)XsD=CEERGCGHur-nND zn9qkwXvC7XrUpJP(F$Fm|22LKkY#<{_{35X*k$Yd#-$VIqr(1T&*(R;tm$TS!Ks+{ z7ShzQOa(YH8^V|1_#H51rD$9=2)a@4&WQnc?)YP~&~X#!0jws0K-L`CJRd&~!QwL; zNP!#__A>JVa+c`~o|mmZPMDkdzmMA+G7+|Ki3#g1cC{_&_1Tt<$$J6na%K*@EW@r7 z>6=pDTb9M5IwU!CG?`^&5~M=5xU)z>mM4*9Bu0w8WirJw4=Fq--ehkqcZ5+2!LCwn z!>zF_{mn}6d|p59d_%>Y|EV4Psd#k%ToC=n1ti8iZ!#Zr^v~FeTF<_+Es@yjUt{3$ zhpnFe?8OT!h+7{pY9dvFQBU-{G_}N`6Vhd9Peh7+ZwL=!b zhKv8em7G@gq0d+)T^*5)of|029D2Rlz`Cq}?U}er4RYc7QT(rI>#~d-+e?Keij@H0 zqR5R9A#LXg{swq_UhcNAqo`aby0q_B0g?ME4+BU;1F0BvX3t|RD#4?!sG>Mk;<#?e z3A^E3A5fsx7LXEw<~k%`I8oFbVWI$NrwEKDDkKznoHvVY?XUX}_D+WZ?(slp^WX%q zQ;K7`*?kQrP}C%C(Nqbkq9^TP_zdr0k3%qpS0vR^YbCO)ili9gO5H&!{ym$AI>{hZ zy-YAtF_ERq!cY-E(yRf*U$XmqPJJ6I7J+5|t^wn4p2_n&b4YZHFaG*gDP~BS$rlkF zCG*)YhhR1%M#AMF$nm@mo!guuEiuo# ze93#M$|BJJ0)3b*5`)q%IVIp%AXMPiIqz6EtImw`en$&kj1=IB!7$U<7`g(3jbFT} zDp!dvhuL2VpJCO3&{2dZKvGdnP&Z7={g;@Nj-JO^*0Wmz6QbQF;5P*LZs3~?oumQ= z6c0Z7;Z63MNXbmr8AD0k|L-T9QGkQ@fX#N`H^+?~z6sRnX~KpQdAazMJ6rrHCN0HDi14zBqd6G!XumA8q$UhM7;24^iDOB}d?6Ut!}Jx` z|CsWKy`5p2G4%de9LJ3?X`hA>fvJzB_!CL;XH!Mhdgq`8257Sa<1v{3iJ@LLV@WLNTRCXG4XMGK8%M5T%_th+)f|p zp~$VXWG0rDkI-2nH}v^@1`v|jZMxq#a8VD~x+zyDTuko;&819EtHX_cfU zHb*{-ZF9c}d+U`;^gNa)u%b01!Tmj+M1b)IKBx8?iPm!!c5C+2md;I%g^xXGj1 zh9_}u?a8h%ulK6aoVf>m*KzC+v@+f8b3Xq1Fn5dyPxN%`~2vIDK%JxD89nyY?z03FvGPeVV4xfPX+@WXHZ`VbQs}+Yy$Tl@2jr zkH+~a&#h{oqlM^VPS5Wh3}M&Z0Ph^A{tz_?lpU`Bre*?Z-1r3?gTVTCN+W#IzPb;2 zL%HgFian+0B`pHUcpDb5*ufxh=H3PBGWEl=@!J9ZVk8oMKl9r)8^^9wnk5noFN=SR z2*BQU*=umsm6+yH&@#d<(1sc?*8>c11M-y&vlFz(n%lb%J%y>C#QWifkdp?wL1q#< zh@!R(@vCJY=#1&izswVlpwl5M#1XVfRpq**Fi(Ky;AqfeK1jP*#s5u-rTR9lalp3U zy?j+1f!VGI3Bvk)>GB#`q`YKzyRsTz`~2xItF+dbJjRD;>Y8jcnb&!BmEDlST2wNO zjC#hp?A3JtR5nzxZyV1y>}BJx2X{%`fp%VXy?RqtKBG8_=!C8P{nu2RhQ!|}LS`C9 zu^hjUCBLh0eY>7^Z=eZ%nD>K)Wutw$6C2;#y7@^^kQtdydzMSf3*bd*m;dYaxu&lb zxz0s|S(Z6A{!uuOk5-Ooe8asy|71M5*F>!*3PT4%Z?hlXIwH{QBoOzm_^``j;nm?uq^e`y+MVmRIJM%-G_KHlSl0~8c^m+c9dKgh zFr?u3W(s7f2mE7r7Z|bL9f@EfmTSF67ndcJ918ar96-d%Xn*8-qtI6Sf`xf0>Zz6d zNhYepL^pP3BsLi-1qJ=o2N^Dx6oXz}OCtd6W<@-o8<<+XdC7h5+h(AERJ(Z$3E-T65rtj+un%I=8etp6sKmEpVD?8bE~Tdw zd<*7QTsrJ(ee&G?2Z_9WGIwj6I=DK^2Uqo#>IWOtgDXof)4SI-2m_IP3()=y^E8#5MIq`J*`+eJAL{3{UrA6o53(MNb)J z;x3PWk(@HENa&dRW=~wcNSC-2y;s^bLOx|>4GaY$*Sx!~QIWXT#+C~CH!(z&9&X59oNC}-Awxjcon4>zBozpoM-y^2}Aq4P+V{F|5Pw|(!iGQ)PCeKikDO? z4c+E`I@v_&Q89l3CVFme?i?H)C;nhjgIi6nsUfy)#zjq{-EiY@lf!XY*$mmbvbv2_ z`nbm=Tu1U;35~UjR{^`7-&Wg=hl-qRjTClJKqvSywK{NI^25W%ryEoR#p{hfq2b2; zL?&EF2eU}hl4FrS%7skFoah#`9a%l|#wUPveG|yLkm}U5VLhtZ4`j#=99-w0T^v%{ z(hffMGQ670#!JUy#Q_o3)%@0+ss#6QW84^HEQlL{uvTUzqd$)S46&f-RdRP!rHCi_ z>T|uc^Cp0D2}a)r{^KBhn>8_;)2-cifPV^{K^o>)@iR|otmAd4VxB0PYWf2=*!|yA zbU6=xOj zC&o3bEPfZn$P)-XNuN=8bZS1Sz)f^GQ|BDy;%Ch5Ft|c8QSyK+xi9iEkG$z`ci-ssX)@w3yu2Pc z&&9Af%S{gqj+Y6bPTwY5pGhZe1;O~rgGJwR=ZFyfKD_9U(d+D&P73D&cUB3~$8NVP z@NgBX<$5&&Yvr{=0t9>_24-SYY!7+?5GXC7^2eia&2;i3O?!JtTKK;3+G^H_y8nUV z3)rQXa8*!tF$C+O9h+aL0MG8Z{71gL=Q=6ZO~B%M!x+(4kplG*&w3EqYAiysK68MJ z=##2{gsMMxc}yzmSS4!sDMffd9YbFhKN$jD&8w!-6h}(o?wBXFdkX7rLS_F$Ps}7Y zBLDr{h#s7jxFvqk`&a&RINUb_E_Czd2)T!5JbzIbgshxZ@9w6o{Y?L@JU^!ML4ZyoIjv|h*BzA~fLZb9A~TN8byJRgn~SnyEG zzp$@l^wc^5D@_0tIWfy1Ktvq_)W?ZxpYr9q_*<7NY2t(qwO2}qo-|cV)}z#(5T0I_ zjmK`7`DrCnM+dRvB~ACp5k887NRfUZVwo<5D?c!>nMZ@B#A1Cy=+XpR@9YuWe-AG5 zZSJerwEcz(jC1?IP^GPY+WmuA8wjbXq6sg!SeVlK7YggXl$ohN8rhRJ&!+i*O&`yJ z$Q9W5syp9>((&dR?2?2~GkqvivoaOFV4lF`q4uWG-!zVmGF$;f zB#&{VSNVuPdtfHGJquLilSKMHOGyRZnWv8Q8(Xi#;EjCCq zf;PiYTbn$M{$?+K?b+q$yzRZXcul#h;VyWRmQg+bv{Y5|#31xAV?co2!g&r|<{g26 zd1fjg!ef1&2xt_^rp2<*FyXqANLC7e=tb+52*V!2xbDkWL?ALiO^|_fIYX*JbRymN z7vi}aL>#Sc`s+mMxlgPwrz-Ln7X%4=IWpKm!0&|zm_p$Px!B%o}u z0WhS=!Y~KD%f^mW;k1<1jzoc8(8I`!FlyG<-V22iPVG1EX0=>}<%vJu)N#Ak`Jppv z_x@)06d+i&mBBGU*+tl^rB^sBlFw0CQ|G6A8Ltf=fh(OSIl)UnY`6t|VF=B|swsY}Qc{4WA`g42)dJ|Y5nlD39=ns4v z==Z>sDn)!ygJ74y9oqv{qQ9L#=|u4gb104niItg-)D)&XLTkk!;3v*;-#f4lOEa?J zV|o2i!)W^Sbs);rCt>u?6)us!bc{98%L3cR9f0qmBmMcyg}|QnommEqoa;M4$M#9~ zd3`c)X6P?(6}@7azl_;$C&tPwE|jOCrUy-4=`!XUVM3gT3qt_u&H-SkD6m0f0C*h* zQma~8y-iM!+sfq}=4F0MlRQ$Uh@k*-8I4C~#Oj6-tE`_d(X%*DHkTBTqSrQmV^|~d zV3yR$>+pFK?8IV)7Aw*d$QT`@q&ek0_-C#@(1{Vms)lQmhWe-qXk0Fus=J(;x!bYyefxe*ag?-wMEZiLRqk}RJdnq>0x;=P+lQggEfZ$XqVpYv>E_O}Eli8Kj!<8> zivi|sm-9C4YyanqTk~({EJ``qyhUl@vUlvt`*j9sua;|2qlws?;NcZNRx+Ac!|(>( zY)I>kiQp4)>z|k$q)=88Y9o|?WJeiluj*Ke=d`p*?(G|0*Wi^5&~@&+$N$jWu_HXJ zn-09zUDw1QH3P2sB#0TVTBrpCW-MSNg%@O_>jb{lW&ySaPF=s(&Nou#x84#lG!xY# z~55*_%MX4!Fq& z>?jDmu!lMizo&frV!5F##vkSxIIIRM=@(C8?^onH7kb<2)Vaf|5=Ngllis6^so`D3 zr(S=t3nLQ+&|1!a?1lQ&TcEEabo|u{%Jl3$w?xi$u3r@fy|U_l#sc z$6Gs``CLg>E+K$EI+vZPo2+HMP}}mnxQnlt^E1M>svz!dg`}X~9Us>IFJ=SxNp*uh zWFBJfF<5DjYk%l(pt(DKcZTRQnQnlINZLLB3h&N~*^8V;DB*B-!X z5IF;ZbE=*jHwv4f5G9}S9V{~gqQWm$PN{$MSfki{H-Y0}faN6pC`g|P0j6(vM;Jd$ z9L&gG0*|yoksq@1AneQaK4aIwnb5@oJDDz7FKVRYsq+@W{v`jIqA0H_O0+VyD*}eG zpv64QvhkN6cD&!Nz}9#i(@nXLCf#RN(CQbiWuK*`p2}U84`EOYQkI&DRAli{I7M-8~=4=_O%JN7@1)1c9 z=X)E) zOLen%%9;}4JBAm>%%uTaK) z0>|Z}G1rJN0K^tWruo(Pt9~B#x`uxQHia5kt?REf8ORimC3lRaE^8ZuUez#+-+|=w ziP9%im=b=uam$)lM!CRjc2pHB%y+GcrqlIGE|{0WS+dDR=3SwPd8oi96@YK^~~^DkMu-UmOmm zF|FaKwFJ+9)O){?_6=^Q%{^MMTOgr$2%YDdL;bj;>;VN={QXdcW$B-4J$2lTOasg$ z)rXm3a^>omrGv}t)C&9xp8~Mc9>(AWc!z!WTmD1+6Rv7b8Jl>96rRR*nGEqtM&8#* zSvJoc_q|L1!Kb)Dcb~<*eQ>qoI;45Cq&=(nJH8TKlD;irzEsz0HP(Awwo>v1#8!}U zjps;RGi0M}58bnIynGetF45W7i_I#=xeReOk+E88HeMRLBvgw=h5pqtn1Afv>3uow zhkN3Ene)(oAc)I&Y^)1wdr3bnjZnV}Kt+huaoa9d``WYQcAwl|fwKDEzjx zK1=aZXMCYx#3oCAo{KY^VDywjDK!H8$p56*NjmmQH*qxsCKb z$yK`+MBStUyM}22>@S%6G=ABG)%RH+I}lm^mi<}vC0@&8ttwL&2zAb%H}h|rj!_lVu$2-XE_rZ}v?2*e2U;QE3Ed<;xXkhwuq~b&ExI#R(}Q z!Ja?8$527~$w2ZYs?l8X(`2Ohr{_g(00esPwvzxv1i^ec)q{$`vo0xjlwIC$19zRP z67b9O2tLC=yXX~gePPnf!Lo|+4g|Ce{8|Cu_K$Y^+YV2m9uB8o$k6XKg;W|~<>of;Kq#W#2Q|J&^Ku(r7MKgMq zo`T1C3^(;lziJMCDW=AMiJaN!BmMyPLb>GA!5tl~KEoNp`$--_9p2$YBO92~QYgPh zz|3hKkW$u1(P(i!hjK*xFI%l%Vnr!+N={XD;S!W0V+9++EK9e-Fgj=ehSIa#>36R+ zrrZX4xY4NNv6PGBFPI?-UXJ``wQx`{MRP#&QLS?E$r*auA9MGj7*fhK^L^S-viSQ1 zc}fiL`_lOV>yL-z;QmkH1;tjyA-qX-OH%Ol08>l@>|TX#2Hw7y0KWqu&fVi+NS(>& zuk&!l>3g&6V$XUXnjryOO@7#qJ;3kpAHTwKA-)D|DD1P^w=_yj1MDyTUT=nz3UyDi z2JL5Ao_0g7>BYItshs~^ZMD@(Dx##$}02 zH#I+r5v(1fK1QeNbY85n>kVt)-4Apx%ZA9oZj6mbG4osxMl|NRX&DlYNmmK_X@yEF zZR~0f%ca0yE+|8r%C-ug7#IjJbOY~d#WEKj`*rlN&-ZDzf+gSA8vc&2(1lELdtBa- zd$P9FOYrI@!+Jf%jPWzVp;ql`NL2jP=NV+%JInrxVh3JAyUbznFZJV=E7{WlAHdg{ z8+F$pU@unDcVq(oRZc1|{>7VO7RzRwe7?6VKB3Fq3Wq+I|?4-8A(~i#9 z;At7gn@`bhoTJUIqJMSq>$WBzb_Rw*fR{1gkJfw)+0}_TGSG_+Upox?onN*2k;}znK%c+82ittCTTO>&RtXscBlhjxkF&Hbb5N5tnf##yM zKfqQLCX6IWBL(;MGU_e9eftJO&eKgJhe-pSt#5Rxy`LV7T_2iJ9KN@O2wm^FYl)UR{KJT$qOAiY1e#YaR@Lo z*mVg;T3U?YDC5t>Sxg}UsY#hiBv*B4Q zc2dUqnj+c@8XhO|V&<>gir*)dn#dqNekV8@mGqiYdBgJ9qY~ z4?MU6zq5t&TJYT;+y1&tMGFl(eZRpOuT&yTRNExe*o@s|w;O#D8;<*6(c5;VkN z6!cYFEk#W}xTgbY=^cOA8{S2n|CC4}sJGP9>dN{sIFBKBWLwzu*0LUN_&%+3)-5Ik zfLVbl_nbPmdrKKP(PYX-VB2gAeMCCSoH+O(gj#d*p>Z1NrLw`2{Vk=I98te9hw)d4 zd4Cs7hTuHS=AF|}6s{Ji4Y4Hbj-Rg`iG%U*)MvC5$o4L$XbDUpD>1;_%7d7`iB36F=>Qy|Um8 z_utg2(alK)y{mP@Dgm8>-trf4PCdp%VqSqa!R%KBsWQ)w{**xFqK3hmGnt<>cK8nj zcoMJn;97s!GNPOAE(ea|prI+(zN&Y;Md|bTA<_~uAK}qSVmac?+#>C@QKB!_-XOlp zs%z1NDPRt#9a>?HwsD2hnK996Sf%Jz1OzbVNY;LWye-Jim%d!swK&iF$d`E6n} z3Il<;;jBH_lBF_pIAae7F80(VMd)_oTrDC*Kn1_WXez3XWQcY^U2*zEW+0$1_)!0D z+;5G@h4uBRx&ww+K2Mi~0Ehk}+kjm6!=&izf?`A4?FJ_m!9mzIu}87LaMS+3!6kIj zVU#nSK$Z+Z(3mlT>g$#i1opK#6G*03_+jIj9AsFb?c8b2)I`M0W;KYZD;*U|Nd?Fi zI$SyRAhL3z@BccMart{^mOCrTfr9};$FX3~F5NN1Dy;P5o_bYJ5=W4s{&~ViHp4HR zIvKhWo063IR_eVQXje9#!~zfRFZWE%N^oAy77sBFwOVU7{;YsX8vj++O7$zBZAyQC zikTU#5&WQJ_%sdP>|MHV%dB_X*z#L>oSkbq_() zLf`LgbZ{L)nneUIKfkJO;;JeIpBL|gDcQs-tk)rl?(Q*2H1Yi$KZ@vc_fM!y-l+1u zEw)Dw5i`R0@{N0xBNt%SdDQ<{0#}k;mK1H07?Y$&xqAmIB(P8}NQ~yB?)`TH`3;CW z=&!c(`YLuV|Y}@{?l?$VQGrsg*y&LxDfSR3ueO zgn5x5i(!v!B>|J}h4(Ur270M^(~T-yn*6*CM*1Y%GukE>+Lx9Rk<`mGwoVE1N$3}R zN;Cj=x@-|?Z0$#D8laZaef5> zcE$k^;P9d9bFSoqEU(csIKC?%E2$4X|42_?tcc%q`1_`Gk_TvGxPYJM-BD;IF}H6)Fl^@9-=q5~xFTQ`%NH)p2`- zf2d}onAeihqeOCQQrc}vAB(Zohwyx-i$6{+TVnV&uUtu+-AW;*uJ)$#H9& zDRbJHjNyXC#4prwI>nfWkAlBI<^H^u_z(Nd{Oy4035;8!bKQGm5_mt>z45XGelhYr z0z)jG`W!bFC^5Mo&BF;K>60gKY{tjK%hyOe^gm_Qyc zb%FyO4xoedbvMaToAuk$EVU64ZHqPL48<=|?J8;*+XFoLEwyaJ_o=9`QGh*Q{nAIZ zdJd89raxB&5VC)q{m_qYFn6gpVCpxC^Gk2R7h^M!Fh;o%Ym{ z$C;Ve336TSi!Z@@>raKxEpV>ipsy~uOo)yitiduZS=zI}n?q-=VuB(ED3{mgjmQ9k zLn1%9|CnY9>`d{FFbVHuGp)3Eg=f-Lv{uRXwg2q3A+u~>5lJBeUc1rr|Rb$oNo_zs3OMwsbwd_h;NN>b&Pwn)Dx3?(+{TT zS@#TSKfMLDk~x?Qs*^9<^sY!y-L$Ny?T%deyIo$QCWJTBl+5B`Re9(fehFrplBlCl zNaQfAI8|}|vm4xBWAojI@OSFW!%BF<#y$7#et}So>Y^82r5lT`wW6EX(?fOTMyn3| zNxKsSy@_b6k0Y+;Pff?`a!V*0kDbAaNk^-!H)lt2LOx$5t27vfQMh78UD>nDVOBg{`|PlWVHVe1s&`<-z!*i^4|QpluIc=KlBeC zia{=o@6{NuOWLiDK2u-kEf_*8cwO?b?$KRdIwW@ zD+mFl0H&r7$ajTjU3+@JCxe%ew5aC6+&`j+t<{ps-e=V(Rvx&X*JtrVUjKu7f2}V! z_V{NWyWt$`x4X%u&_!W7PtOxrUCECcs9Htm`a?icB7Pw2j#w`3w;0{o7s_1dv!oIe zv-3VqSJtO^sNJGonC#tX!zY?W3A(13tw~qAAZiy&iyYS$WIcrMx@nA-&9NYOs$)BT zq)BxgKCQ+Yp_wXLx?s&Uq%ldYY%gcX{*)O2_c#L_Y!VO(Y3$a2kb84 zL^TnQJuc;2Lh#GQl|Z!rwJDTFO*ckaj~AO~-Htkt>UK=X1_v}zzJh00iTZ~*PJAQj z%5eJGu*dsyb5T|87rplpO1Es7OQnHTA{$tk4=BRm0bd9YPNxbnelU3X_YK%xDB>Qul?YAhb2?Clj!{Q zxt}kypZQ_itsW=pZ-UQKS+*{opKbOU3m!*c82R$syRNo%q8eD>?RMDn0Eis~umfJo z+h(n79a)@LnXuPo`nCIp#0ZF8g_9ifE`N=-6ujM~>u{7Gj%UedN&fFlH~gX9C68*7 z^@#gJBw{1JKO8sJ4P3&0kh%)L;?BQ*7pSOq@x(^z#d>z60rN}{V+s!S0;Ki$O5mL~h#ux8`DFf80VMpuq7d~289KF%UAxz^Q z^vfXj`zjqJj>NTQ>)Hr8yeK^rMQyOX+QbOQ9yzfI1df3&KF^(^2Yd~zT&zj3ckU)0 zhJo*_m?GLeZHsYW4PQ_R5n_{*fJ5a|_m`{gRX49G8}{UD!{9vtwGbmOF&pwCR{>a4P_WO9<6mFSAV(?|0$=M9dvlWFu}>Tzz|+R%WL}jry>+yd^bra zz=wJvWo#V$(ynKakkym_-hxwKFIJSw%`amcK>vWJ>Cws8Hav%Hdyg#9TI%xV?0 zRV7DBBrIGcK;lNB&f2>yCG~yAV);%@fC?Ad$L7ji^m95RvDU09oef2M*BzrTQsPaW zO0YvscR0+Xpq+6)093u5Wsto5T5DlqcxIbYvW&w+4$hNn6v;!wdoI|*?-#x7bSS_* z!gkJdV7AoLwn!k^Q zYdZzyM#Q#|lOB!jsKk0?zXo=elg8f%}~vaE=f_Jq{fJ)(zBP ztz0*D+cd00<`h8RIBQ2JFsQ6SXHW>Pk#}Q|=Olzr<^A`<+%^_vx>hW+gvFgF`?^*o z9)hXtlR<~UHpl;=8rmAlM5ZLE zK;b(#(7s602&U)TGdTU$?dz&)AR_N3`;FCb*aN)5TPMX6K_630?MBSuCT6yTg$``| zJle9{k-;%CbtYtv7|N3J+|iAvQN8HR1YXpGHujQ_feo-@r9RwR?-6~hR$b|G&lTd~ zN)hsjFXWPOAdITya$KdEu}p3C3v(QIeGB&l(JAnc!{_+!Tof%_ z2Y1vd`>etAb6AVqe=)n|W&cnAO6;Oe@(LJEZMj`m3u?LPX}|lD2?hj>?z=M~*t$#K z^oY-y`XkT2P~h!;sg9oi@4sJwYM2E9JMVz2D@*<}-CUoM0@&t1bhgVzci^mZwyl_N z!{_@@quHj_$L;7jw1e7qTozmzD>Vm|CEbx|K%N4an49@5C>QL z73x%yee+;LGlh#Px-Wl=izaI2uO^W0ob0yr$C7+=wtbmbbi4c0{bcQ1Q1g`j&nBKp zL?EAOkiG_iB>Lw5nou$53(M!u5k3=woBuR%2h03qZalWnml#1`FJ|xa zA1aXnT@h#jplkA<1)*;*Ug%_QtlyjcWyL=I>1714^RF>lJKwH^zU#7y2DBg}W^RW& zSo{BJS0y?DHlLI0ZvMy}pNj?E;xboop4TFr^Z1FznpH8;@Xh;_n%FT!t~4 zb>mfNUd8q4I09EV=^sba3DEI)(Phx2>hnVG^GBTDhiFh4EYvRQWR`m(otx%3Fr>2c z*vtObR+{S3!C4fpPjvDbsM zU1Z!yU$UVWx_Np@>wXxCTuF<^H>jpy)WRKCIEy-*ald7-<|~hCnw@ExRTq?#^8Eoj z4|n+Cy5Gn5NsR~iUZTBb{D@tWA9;wxF7{M%dRSwy-cha0+`0Scfh45*6%KH7Ji)auE>|F>YRo;Y37qzYakL@|tfi2`Iwa zZ%2vUBo?2VxR8En}E<*7PPvla^P%O^uRdeZp#X*ZlJ0 zc2LAb=2iO=J-fIXlrNhVkvvB>sU4v&gd6_3CJA_0t?I&zhMYYW-yWb_!;^S{p)1yAO zqJNX1{l&|8oF_Yh>mn6|MU+{YijNVruplZ0qlg5b9=aa|jWg%rE_}Bt7-SM{{?X6v zsW3GBfS6=H0+T|{xp^+H&e!YjI$~iSReAkEuRHSQS+J166T7S5&qwTT&BB5yAxOA| z$~M?n?{HL2BvRGym=n@aAl%X6l!Dce9;C=p4PuKJo>hcDh-|-Loz~ihCFoSVL?AOg z;8zY z(lt^gSfG7oIJPZ{!N9|jK=_eSWO#N}wB~`+A1;}^y6i*yPh)6z*jBr^3_%f%JFiNCHURp)C$jvA;2>h5b|1|-jm3*5xo)c)E<}38(@=U0YR+N`;mSwEjcm*EEGfN z&731+LqGxlE{?!imcbSs==f(xt!!2);v@AQ@qk# ziYa$LvhDaotONLT_-VPMLJyk|BKl!Px-vF3ntAlQV1C0JUu4v_zKOtZvS^ACB8zm6 zCBIt>&x}|IkGx;^Y>q!cb4?L60^36I_A?-=Yd1m5~RyJAWApdaPz?{3x;26N8FYjphF> zIgS2L^-DuT6v&1Pi%joeZGi6nE$03eu?$KAXUy;Ci8Jl3!^CJjqF`MlN1`cRu|<5u zlp)@<)*{h075Amdh1q%Ub>@TD)zOX*&B%vrIK!@=U$vax=y5`;wIT2NFF$&(GdM$A zQ!^;~pUJ-2Gk&}7ZvD@P4Br04pt}pcZ@v2>cDFiq9DH5<`29cCx2h)&wE+vVaR^gs z+(+1LA(Yp^5d@$rS5gb2;;oBmrN-9@Cczl`tQuyY5b8n1N}z)iXbRs8mwv z8boW*!PZhH$0~RVG05MJH{8HP1CILW7r0L@aqlGci>P{*cfrf3W?AlT+-j%GcYR25b2IKp29ZdDu&IEry!eO(|R|BV7HutLKC<; zgv5Y{FMbzg)m5O+<8x!_Tj*GOk(8?>T{fza62f=@6cyaI3XRTc*YLEi|o7P>NL%$&(j63$AdYH34NCBsg|w7fXIyQG}+ZSR75orJz% zb*Fh!57R}iHvF#sU={BVCj5M|5g@_$%N7kOhI9TVlpSK1@FhL4CC+b!a~>t_5=_NQ zN8(4+!Ur7RFLK8|kE%z-kxhLbsGZ>_;e^BrpwOUFSR!Ry4R6`$sdRW8?Dz@{nj}hY z$=ksq3R3+={ySm;#I9J;Rqv6e(C94Fos3Xyh+{+8j_{@je}C2{X|$qLil;u~|54)q zV~c*riJ2o6l}ak-3eF&ci`Qr&?+!(<-KjoAQ+#NH1k(K~c2U}loMwP9<+tyCh4VYz zN$;CMhGAyvaPa|**a8LsH>DMUg05&)pD_)$^?FtP#{@|sUny_o7MWLkhzn{+d2U+SxLIR%D-gF8YcB=6R z$l5Y>8y#V{OA>gx^BkIKR&W{%LiE8lnaMpX@Xs?jI_LDlz>4ml1Lr>_Cb{M5>N3?4 zBRfAW&`a&6&%1Zf_#sSeBb$zu++;{kKKzN&lX1uIj1VfNgyjnG(^Gl%a=X@ zV$kZxAf2BeIu)~W^ut3HN$U~HgKw2VHY;$e#d&kB==a|MN&xEVF-hJ$lUopok>6z8 zj69cG%f9M$g9BvJK`#=3J;i4Mv6j>((JckH_33=A^B3bR8>F_^9dZzO)=)5V5t1pilOrv6+H0G`tT_79%$!*sYY=^G8l zItwIE*cPKrQ6|aq;5l`zRoPyZ*<{cUUses`A<)}8)LyoCt}37&*G2I}@|UoownwD{ zs{$V8^DKc8FWkbdc)Aypuz;7kB^L&~$}rr{^5n;6&?a5Q3E5-KnlE1z5*(n@|9N#O!4w=H4oSm zbzn5W26s5rk~R}WGn(j?ig)_P@XdDf154<4S!;K-Xu+G1Q9nm91+%!O()7r6dr#s3@A#n?Z-DBKyVE3>Ya_tTNdc zg>gr%QLV;bzR#CODGg!NgYf*s z9+;?3rp^iOpo}1MA!Qn3h7y9_gzUOi0g_B2wf_T46UfDwxU{2W{~VHB=rrG}83&S4 zD%*2jA4M{1ML?bNzN5z`xg3Ipbt9E+RvNK=-=}j1U0=hxRMZL2-x|3>ZA&iD1yJRN zD@*6o5{Uf_Zza&tvr0`YLG<#Becrnc`>KHG19UoRkMpV9-i^k%Cg4AAnHEdr@hVla zXAAaGyJ?cbu8;|-Ozrl6;h3n?Ec)Hh{<==73SE=hqc|tMfXXdD9ZF4sbBAZIDPP$% zA!2^!_27qVZrx|XUxTRW=JV~)>-x6$rkjB16{T)}*>n@a73FKC6!ZcaP9d_#dx1Bu z-_BR+jJo_?>ZZ|K!KQcbGG5Dj1r_Z_a0ydzavb}ZdL2@*e;nNtVXO;KK`uRol?2lBOqb!#HJS_E_!g}#KG=xzN>k%$`FH@4^*DOBm5C0ize^I+LjxGiea!o+b+!evK(82t~In4p2Ik` zg7;xg2pu)jQ?W@)HKe?zGK89}a-5DEaZK~7JMouOj{kPI;KO;A*Y>*y8q0(M z19GUacfbedzh%`YD=I7&zwv-~OwfX-iGyq3Y2$c$AaPz1e)Gpdp447Ou>WmSQ3&8a z3W(nPU{>_!61*%R5(6kn%FKW25Gl*eI4HM#BQBCCoZ#WN=>DwTfPz-vWN=yC(-bl< zxJu2Z!ACvFTk!WhF0@u9BXZ8_gNE>^f|3xJJDR<-){;c9f!v%FmpN#6eA}yEm%sBl z5n=OWbquc`j{us>ZfGp_Go}}5fRT+p|L?&6X!^>qsQ%||x;v!1TR^&`q)P-85Req4 zyO%DJ?nb($OIVO@q`O198}>Zs`}<$d`+YUnp3ltObBAhg>^6Eg(-#a%P4V#TzHtPr z>&O~nsmJBPm}W-3%ceAh(pT9rbY1pY(YlO@3+uF5>0GM=>@@T|p<7#9hMCXsuZdSgZkUpMZRh9sEytXw7qdRtod2*?(wY{`1Ry|x zPYZaB19`Kv_t`r37PGRS5`5++BvZrhp^g`j>3d_{xnzzSG51v-G3wvBNBfr!mm;~( zHH5%Z46rY8WbXgO^V^k69rjn5?ozwvNzEIkr7#aFuI>^5)sCh>z*`DpKa&qA2X` z#)y2Q3l8jLHnbvK8j@PZaZQDFmXg|al4MBEDP#Ha z8{;|GY9#XG*)ys?{_jaWb=8bjms7ZwiKW^`t;Bz?52iX!hmcU1RUCV_KKanI_{$F< z)WFoIk+Up({R z4swia&DSOiKb=7?Mjyq&Be2f^m~EDtBMWk6Z65&O2)NpUTsXXZY|?>d!@Z~SC+JNY zKB%{ob?QCR>lRp2`}IX}xOS#^HO4+NqjP!vb**^QzhK&*f2@*iOi@GD5ohAjX1n1c zzEJQ93^<7mI3ncppMlm&^p1#%_BRJtLM=%?cf&otb4AI7uzD#!tAn*bi2&;w> zz9Ju3s#Td58E~#9yLHG|POSQ=cUS&;i%i{W$zR52M^t+>{O0X-cX0)S0!i)?j}a^GXECi;G|)E~f7Oq0we<`MQOU`^ zTE4OKWo@j}xR5Q)TxHD493#(N*>EVPTzN3xp5zR>`2%FfN~*56T?uyX#Vr5xlDg8E zw^Xv{rH-lrIXG)uQvPK3(8mj?_;F3nbF~-8MHA~@*vTOTq+I6!EYwarj6$Lbk2bq> zG3*mvfcQB5KA;cx>%v#2?67w(mPaMfxvE~Am*P!`-Bu5l+HCF5+)qKil})M6y01UxG<7n>J7C08&&&KTgUcR{mR~ z9hzWoW3W{OlT?fO$mi znG8ZeqSWnVc{liq=@;SD*b)yx(dxq_=Qo%HYD2^N%&5dun&{;(maB3S1ZDg@PGsndf)wQzB&Dq9?S4H-3MnBAGt`mulc;`Y2zLd!!MWiwacuM|q3-JD9r9R=v2axb}a+ zh_-;t_x*3?~}_%MwqetmB-;z#mLc|Zb!!piP0Vh zb=dv726%M<|DM3_p&`KQTiY>2<3xhTS0EV-ngQDPfl6_QQ_vvi{Vu?;B#XGy9?)R2 z!b=(M%*Z7&(j{=&_);=#XjRF!eI)&$(OffSNIm_r0P6rRX{zM`!>So_DpNn|&Iu03 zaJJ#~9R$HkPjT)fCI%#ldqn*uI(9{E1_=jOP&d#QCPLDYbur6|YMLnnI7R>3FLOlu zcJ1*gBHQqxlOrzG`i!DFX&(hO+w1SvZ#U`{r#G8xX;k^lwk9rutq{@j5h?+IY70b*<%T*?8Uz%;HPH2H08VD+4Lfr3-`zhs|6 z&Dy*|i^h{n0?0>h=;5T((^MbK6XR9YhY61cXPGg$;@=~OUoxuMskw#MM=N=qQkI)q zqV)U1fj)&27*D(9SXk8FKPxenT_SelX7CtCI-)@2nuq0uDXM^%YL?kUYpfKPh zIPTbg_7(lxrXg0d4m476(ldBy)r8h;%}y+PqwOWrE=*t`XdWh7H)4A~hR{m{NuZR& z-=Wn$wB5Xi?CqErWZm=h9K{-4T$>$eTftP=`6j*tW9TIQ+7RF=Qo;>0pLF0lEEfz67gFF}3SjA)N<#!;R;zq);Fq z{A*y(<#^Pm7?%3`v)@3~wMr_0>$-RcWhl+EsEpf>Kb)_xZX1UbGaURQY&m8AsqUOF zsH6tfzEf8I({XYpOcZ7QEv3H4u;+ZI2KAsbB{tzVc9_)$omg+yu>fB{>S7re9xy@x zIB!91Yb4q0UZq2y!`7?LqSsYn9?iS!WTi()3GYMq@U6z*45&YJ67|-enIY>xX=O-W z_zAtdSuunMKh0^6f5#JsEeXxYMA5mfGrSj|6R}Suc(T^xfH`nXG^HD^QZJbaiKBv> z(Hp?aqc6$}99Td#mgE(%*bHaRPz;wHiE^&2kFq^%R$RQ*y`vo=S<=03f0yozf*L&) zkT08{YN?0NymJ&ofiZsA1mQ=pqn`K2!qaN2;T~FYlaT}?kkW9G*+g;7(+yf>bW@&y zTN0iUs&;BL+rKj%doVI#7csilkW*0|@%enYEi^-q3a7RL>CoW8;e!y!amGNfS?LyG z@It-$c_I4+reEt1tJV2e80e@nPV!aXbGe1cjrQ2SA3J3g*Qo9N*MSAfPII+^WWs+I zc1+T2$kEN>Pf6JOP*@7J?~KTo3=y11o;xAt->$pvKFm1kOr$fW+U|+xAJRoG2(pHi zv=Sh*_mGKRX!0_`cp8zAo}B<(PB(8a@Gt;W-igNs0wYwwx*?5C`)^){wW|9(=XRt{ z+~>RCCn;&td*}zyE4@0eVYbb7fSZsU(<6G1Y7B#1Q(4GFRuL44b@Pi12M9{r55`5C zDOb@jF=5_(Ot0pY=FT6#pZm=GDTks-TOqQv1+hzp+FZ--;!GU@M^!6|5zNj^v1oks85@< zs>ItjhzBSY_I)2^xaF&VY(P)*G^E^D$a31Mlr9P1H5yOEbj7DHsQM?hbuWuk=#Q+% ztfWgZ-zUYE5bpr%z`yXDrhq!v&J`&rhLS(q%&cH9V0n5K0ZcxDpJ^meBJm>34umy1 zRIQMEf*9A#g6KJDk6QzNq?h2c_S;3FEI};&P(sgD_2NFJvo3)%LL0VhJ24;(PbsbT z`jA!L@K#RuQ1??>p@>(vXhW__mX8-AyCHtrCSMpbTKz^)Ph|1IzSSMYYoMxOV!>aq? zLOjPU?HEy7SkKvR9v$NejP2uw#L#EMee<8)`nUQ0$>Q#N^f7h_l;;T-XzBz)@XO(b zgfNozI}nPCuKcr%fRmfm0;&K&jAsL|=>xtT0D%HfVv`N%$+Spdwg<&M4U|g>M0~~? zcuil;p)h^W&J)+h`yzsY1*Tgl?hMac&D={`zUiyFSGU#0LU1Se!D)b!#fG~Rfc3er zNd*#@R$5uzfFmL@QD1RfmO>?eDs@nI+V@7ls5!QJEw${k|CPtT9*E12WAA&qQ_+c~ z-V}Z<&Q+N$Q}?VTsiPu&&P`%SXmJ)6WvrMdDRSW-N$fC!O@hCS+sui4EY-IL(SdAm zh@UG}G6#3p6GpwUT;FXuTfNM2NU5qh@45VB!s z!7lk4k#H%gRt!V?;KkabK)*eJs9Fq47f@U%<(vCpurA_MbJ5c@7TY74Zv3%ve)wy+ zXfB1}*k$g~Ag~O(!tuUfqKQjrNzpUl-bRN5A?8}UrXWEUkjrG8eQ#`EM3w@h@#N^c z;c#e;9|>|oq?PBmmo>YlQYr3 zaurN}f|{4lI!acaRL_7po$GA{YIn^`Bh{Y0wXNWiREO4gg#Rk!hsxa#XdG?FrSn{x zzby^ebrZ3Et?eD!?7hXg{SiXvA**Ra>EZ6rU9wpipDw!dCrxA{4bk>>=1%UHV6}aK zBSs`e@}>;}T9?;s4?*l}(ir_nvi-SSdL8gt>CyySXXT1E5PRj%WM$+V`= zIPLcmh0nCF^HxfpOsxAYS5r}8EUO2CTO-VUf9s_>vK%nc8nG`Rr~myAmCvD^#xzTT z%?TPwGKXCdw|+mc#g3!`v6o00wBHe#Qgj(DRMDqY-+aSNbEH?(Q7UHH$+*p95{W;D zeu`w|efJ&(rjGm$yuqTxCT70TvvEy=)bJIqim5Yl_+UIwejChXk?h+k>$QP5DEpHG zqFS>h#>UW>>Q@b_(}HKC(^TYI^(j47ud|G0O`Sq*!oX`2*%!V?~eJ6;xERi{<@ zBMNJCuNJPOoxx_lnT)}S(-d)c32LHP5oqhV6EC^;V{h5`+$*yt2#Q&S$igqDv2)GxH+ZomD;+`j<6utTXN>$99 zj4oGIH=T!-Y`sq~sx+{7`00bl;3!^X_lJ>0e9AxdyGp}xhBtXJf9mSm%y#{+77CMl zKJlp!ISlMdl(1`jCbJ|XaT$JbmF@^QITW+6Vu7Wu`r18zEv%ri5-AslJgo`$eNwM6 z`z9r#=6w`Ixt?uaR1uOHHEheW>|wc_jd(;*{k>jX$ohg4)|?nJhK8iQp#Cy3RmBv# zb2Yl$ZWnwH3Cg}ttsr| zU_~+eeYq|IW(w#Q2D)~0&CGrmK<09E4mA0AOn&Eq!TD`0>kTCkA%M%2_J3{Q`I(0L zxw*Eg5<%GIJrX!-nAmMdjb$w(fbCNJV|iB`2XPd?JWKbH?nzdHLpE_B86`{(~(uu7VPgJ{{Y}HBUHH=JMJSDBw2ag4-v?aWG z5YKq7StfDoSK*Hk$*7{zkW{C46K!I){=}^uSp^d$Jes~jvwb8jn;3UL14myGO_+n& z^)GLWr>)IiNDYN9ViH6i659uql{?y3inSXf;aVbdb+0`*4&NVMkMVX6X}3(>x7q_B zN8b1za-g=p$<)SW5&_Pw|52-2AB<05XsTfjz97Yz(x#cDnhUVP6cTDo`@ zds|&(w29V|e@DAjX|ZOq-YOa#q%Ueb$r2_}k~U?c(r5jOOJf#h*A$dIT%a?3%{9U6 zpZc!sNBzISc_afoG9CHDfWVu>A>q4RH+0=F7d_ z7TFbAm30oiPrz07%6I_zsTb>Ros@ZMxAt!ER<>LsgL~5N_%jq`fu>7~Z8fH!A@U__ zJXWSl($i*^R@6#gr-ib#``SflmOh&$oJI-=A*(rB+-4UL^}$?Gu=Oa z8vx@!45I@f@r|=CgTL#`kjr~1eYV?p7or!53LDS2`xtKL_3NLw{w~Ri{hgk$h@oWHY zt3mjRQruX#P)yd0z~ZTO);pA|R6ZSnQTU?1#iW3IL@KNP4|hL+8sEnY-@bGpYUG<| z7$NY(j!EnJvJg$vb$Kf-+lAlj{d08#WmLHg8Zn%}Kjcr-t|GqaYr zXrY~zwYqD@_WR`FUF9yF*scfhCGhBm9YT83u(34{gV@AC-@A~1D!B>&N{!B48 zbCq(njnkYju8^+_8Jjr6X>8GC?XwiN>Qwa`#LU_;U0J^-f~{O7U4FZ-bp=Djt6>FAg~rf177+ zLsmOpfBR)MLezwREdLW#`s7cZw&X)2>uY$!)hy`DikbkO#=(S>&)OyoEoV3FJ{16d&& z<+!}$OV9eBu+Nk0*Wc~BXI_qCe{28DY^Bl6#B)eQfms2?!EwWRFI7sjx?VE2f^--N zy!@{*4u0pB%w4;{&=%-f zXASWC&Y1$|-YML0_(=C!i8tB7mBNp?#hF{=#$o?3XTKH{bDZcvh9!)Kco@A`mwY|o zSe{ba-S@}6ACo|RoHzjYb7tZ%TRGg|laIyXx$&|!_KGSAoRH~v@n~AgQM4K_h(3p% z)|g(GNAQ8|OCz}J-%rwng49U{1ofnory%a5;g$Sev+1(JA9S8_?v2&oj=aB~&hcN7 zp>_P6xwNt1&2<0Re)gf+4&8t86qc;*7-hEuBggrzJu?PNPlPlIBJY7!z5s50Ac`G= z9bd(JAykFo_h4yLqk-?wx>=X1B`OY&<2z^B4Of4|OEkFizYqB~OK!dv{jwZ?{kWae z9@iJ(EVdO%_}SP2x7dgu#(%R2K9`@teG3^`))!}SF|L{@V47ptXcL}SoReRKwD)IJ z{2*trViGe2={wE$>gqbVe<=cecZiR1;IO%6_@l?E3f^Miz1Hb_iFkt=7Y=*k7CJ%PxXW_!y6bXZL@rm=u9aIa_d5Ulg>+eVyesXd2 zOeyGuryE0w8IF5+3LhX0ICBKPVNnPSa3+t1;KMn69@Ji2A z3Jo5(n19Hez7AzITMjE&v3lt?53P=h>HfBBe7&aOJe^Lrl3$>>4QeL{g1T7?0l=cq6vnY^Yuq`$K_ zGKi*0)9Hh;kO{7)BskE2rt9sqmLOQO)ll5=c)Z1m{l44CD$ zZ{Irf;d!)-k6p-0IM{J;)HX=8M?NKP*_6q`$) zN?4CJ)x7NN(BEQAvaF^os0&WRBfRpHZ5p#8MyhH5XSB+z)n_$WzEMCcpZkL?BA+jP zJfAbCl~Mr_T|O?I*}XB4PN>-E`Bm(U_C+?Tk;t<)rzzd7ql+|Ul<;M#$rct_FHHkOnY-hjn%xz znl$9w>+T`zD3RzKJi5H&wkqryOE~v9L`65TcG9`j+pw1(NneiN7V`0%y?5@&$nd(a z6#3LNCoRH8oI94bMg69__`SMX5*(IOG_;BJ-HqDR3Qey%5g#6pZpBr69HF4|`4?-} zvSS3C(04M%WB%oY@}!R&{mtq7+dCh7IxP)2X|Pc-!fRILz-_fKyBz0GGO0!!BAHJFBKcWZ@nqPD%o#S{X5KLj^DtP9&@dh$_*QhP#1@(Q zUmWaTo|^-^-z)^W`mC~4N+h<0U!+IPx`ASjf^6BvY1%L2B67;-Jv1dJh(MVh>~3`d zz`1~|T}W*Y)~OLQQSqNcYVV<}cf+8abTw+meXKj}0{{|?m7zBeBVY0L*<_Ls9`rAv z1K`5*a0%cm;bc+jVA^DfA-zH(=5IM%mip9yYsSmY`;ML0!fG-CQwh5zT@0ga_;vD9 z+Rm_a5t3PJwGMZEF*^nZEl>VzhhcX}t8ghXt!fpE(xY>z!G^`~RtY4fH-Z*Cm|IOM z*P-v+#CY2V0ThvZc@#buGlBjz`HQpx)-KJv(its`3V96lu~=3IF;j-QcN2S|b+Fm^ zH#nxnrIwkrT#Z+Fki>@LzXZ%bXWjoZRPu%I?L1kmnBE*Zd;J`|GOIPw#1|F4PQit-A=*rTHA2YmM0K7k|L&yNpA#7 zXRwp(vCkYc6jC#DFj5G{=wUX8t$}6CuisPba3aBP zZ@5dMZwN1MgYRTF(5Z*8p{oW;_;h6Z|`l4_*GqKqD z<^IPIz|(P1)B0O2@67#LgR0a0nZ-1p8f;4sy^vIUsViU!&4Aq>>RY@ z-V`C~AUs^ADH_W4I{oaOGHX5PbWcRraNJ4=+pJzIu^4<(4oW{+A0>1lnP?fk(0Wbg z_uzT@cBahJBpz+fOW)Cu^3B+z?60NEmf!=f36aoF=Uan=of2xg6vdQU%MS8FCatZl1fgDhG4fM?W1#j)>Z0)>V4%-80I!|p5+wCN- zm5v4gx#qQ{E+BLau)2WEs0wO`Qpi{Y{#@V?B;`avvO+dpq;6e~NWl(@3WYbWjUyr( zNHe9d54$tabL3nU1^WdD|BCIEA*1YEL2Z>k-rzNDvLd3EnQHGr4I^H)oy2Yog^#x z3Jy5^ql>N4ICvQraVWWrgSy#)jE$<__dl-=o*n!mjov&MQ0Fv*iq~vokte&pHGkrt zBJe6hN-yY>k@qHt>?Y$-a`p?RT(z0j;ehKBoZ$L0{gl*D=CHwBAQSpnmg4@O z1O88qIDWf+TKQ2mTo*1!MqY!0*Xvoh@%J;lgS=DYRaCi!CndFdUVszB>W+3FhQ>qw zyU~WKMyrDQe~FpFWYC^(=A#GV60~f$*IdA}|NJjEi&KA$nb1~yUh zVock8r+OsbU_GCKF9VpH{@4!u6&5i_&je?u`?ebGT)UC(*xX0E*#`|j)G)A$sepD) z4;E-sV|KeErFlHwWrx9RBt$T(331{CzJ^4_D0{}VenlfvCjNcucPS>x0hiYwA>;^|K(h}T}l6) z^tQgkSEMM!!w-n-8I}K_N4oDq=tx%^VXfa_W;jt zTRV;C5uJ$3neF<7OQbWltLL)Rblb(V5_!&26MPmwmc%8-q)}nz3jjHAhoy{{S5v6 z>eq_>ydHmoy`b|*M2k3EDn1l_^+3HzC-+KRp6iF!ZZAQyLKC^EKf@|VRg zh^G~9Q~XF`d2;dMS^=T)_NJO^kTOhl(+m6n|D;FLh>@TSy^Vb1C+;M@c-Pl@au5;T z(nMODNpqh4O_+8JHWK*_$V6F=SxDIbjO@CtJZuu2h1d0pN!5Q;f)P5Yz^>o5UlWDD zW67te)zg+ku53ephAR-4f+h*CSc&D`=8orIJI8?Fi1sSVRpaV9O~f24W*A(3v>at9I@gc|tK=~E~KIVn6?%cbJt4jJYmPWxhKa*H{BQW3A1#2Mon9aYy!+jLz8j%n2@Sr2 zKJ+DbaOG8^u=eHOb^N>jEk28vnn-!d2wQLf)20ysA3JLqaQ8HRZa&!=Du%}7OXR1P zCZw5AZ5BJeInNag2MlohpWhVkE!aQ(#qEhz3|{Pl&ui*_fP6xp^fBOv-7TZQv^5?e z5}lj%sY$nkK=9?8CCf&)Ot=YpD0+?*X5(2`VX47_CKh7i&QOSxAAzR~*W7AHJ_D(Z0lpnZ$8QF$VHZmQb-M=?YOgs`m1mS3%pgjyIFe%A0pulIhi(-VhkB^)3`q|Gb=$vKND2Fqgl_mMh1mq<~F<+Zurim8OH*`x}QKQcnAc{OD00)25%LzItc+2hy*4>buoI zqf3H+V;q#;c!)RAnrJ7GCFf(%t?v=UDq7OF70KCAd5vP&O-^CUOvB-K{ghT~G(7+C z#3#Ek|L&C#UK0LxS1YOOZ-XemLqlYV{qhL}flN`chpDI#-dHDrw+HZ6LsaiE@I6Xq zznJjbF}2&fSN#jG;Kt_F`5B5L~Ym6$QQ!J-_7`}FtfY>Sw@hRtGQchNY*{DIJO>7v)u~`zK5F4 zBVLc!_-0fC-PzYE`@c;!dmR6 zAUU4RqQ3KUtb`v3F~^jf9cH3UOAzm>=YirW zFJR52Vn`OR3WkE9S&?YXqRPuJ7-w-Ddi1r1IWPiU@#d%$f$$SX7sm6_iQX2gW3!4H zay#J$D8o6hDmhVS*(}QT`4i^(JIUSXMuf4YTxMt}{E0*FY2;<)$a~9pDH-BH=(p2u zT!VY3U4CAC4h$RECtcDOMRt0*=^;`Y+f!^_()LcQIw!&g8Abk3x7d+oc`;yDe7QROx2siY2C}rpbpc$gsg}EU+T>`Ah7i zm^pEJf&;@9L>c*~^A#28aM$`g@cLa}$DO;wXKw z1Q8wSj4(4pvSL@8tx$H{t0NSMUPd9Su71^CDFOO`G+o){L@`}v*4@OL>)3n_ z-=*%haX^k1Vf&emAS5`5Y!y8-*lOapFZ6i9mkh5z?>K8FU0r>+#W2ulKKv5(UTCPlYOdj%VTnAf;w8 zzV*N=K`V)@q;2KIllaUQq26aZZtEidNu7|CBDu=Df3hl04|7qnpl1g9w1kG>NWLX4 z9XZs660R`^kjD?6qz<0WN|?~+f86B$k$T7bS%B8XAdn}wNn%s!x>MV@evSnO(l?B0 zKHRkq4c*b7L-rYfk}QyR^4CJhm?Jgog4w8V7H)Sg!9yG%Il5vR@7xNiBd5Jbgn*a( z>4&piwIZ;$U!FZQXL$V&;#z+_>P{U7BX~YG?6pcXk4Z!`3lgp}lus%)1MXBEHvs<& z$W!p0R<0dE?uS^x<}lo_;IUyK_jaeJtVWlcbSE*VaRahvXbch}aF%B7b7L9$h3x>N zulcK25IHqs#t_>~awTHAH3^vzHk~JR9m+sFo8+($_HcWpx&_~O;iY*#xWA#oiyG%Z z=@%Ta!7D-&S!ALld@!XRG!-X~E;HCBl_rp0G_g0N=s{BInzo_evw5lKhu6em(5lFV zgU;2-v>6%b)IX4+^J^*yGa~G1A$u(MA}xB!C;5{w2)g2rgvhBeF(!DH0RggA=ge82 z^sHZS6-OO~DfdDP<|a`mLcF4OG}H^(<%Pe|U78K0M58Z{r+rNpGr~t8kc~9Jk!;Bc z@Vhx~t~wclcy$?nI)pqHLCRSG%zl7M?5seW#}~q-UTFg+!T9cGHK#VKl8%EZhm`lm z$#}w6nIyX*P4gfF(`XBtYnZ1Pl=BzLAimL0ipb2D@SxgDpc-aNdCOF$qAXzz#{Pod z+MxDiio>$NsGTVca;T6}|#hM|n4$Y&#$c;r4P^<1Ub{0onT~hHkk5PTB+tKhXNbFqFe+kkv z#U^KdbsW!O_SeKkIT9PA)|U1BTJgk|K?QG8k?h5*h8Ksx(A=n;$T2LjIhW`l&=EcFtu-sdj7pmILvI?z%gM;qNq%snl$~n z3+UJeVrjf0R8)^Ra&j*Rlv_Ps+JqmA0X5L z!W5F@mi>@bfj?yvp{@8mwG+HgIDmpF-m7@DV@)g~d3`Q43yjtP$uF{HvfHz5WMSCy|K1HUF}Ze1%9Gi~W@C6YAr@Xi4G#;U%hGWX6L_AC1IN9>1?-W{RQR3`+Boz69CPc{(k2c zhgPGo4h^t3yozB)q~i;|ceCh#uD zvk!+ehcb1(W|pv`ZU^!igf$3XMmqMOcgY-{pz|3Q7XWtD`Q2z0AgLM_ZtzvdfK5~F zd>B094+a9R8_&&>*LPX(4c^{)=GVH?nFf;EMgXAClBwChQ2KSAF=iqb_n+{)_h*Wx zpVhC4BWpfmZm-0S%ZJ^cZM90$Bm&JZ`#z-;w<%V+qg{TXxkmOe+O*#&(gEle*-Hms z^Q(#pKcC-KR%)O{0mc1U#=`Uc*^3MhBOenoTh0Pg#ugCaVkPr&9^n^0mRWEafr#AA zuBq;i2xWemkP1@eS9pC%YH75$Jx;cKMP%t6Ohi^2gX79|rg^GXENHf7MdjaPdnK#A zg3ie)U!1txB1hg?>wO##qqj*QK&%vde}-wcQ)}u1#qP~h^*qpB|MTU&34&+870xS{ zQT|bLCad_;&OKfxGohFfN#=eN74sD;Jg>g-5|mcWtq~kmSJmk3EPjjSJo@92DA6wn z!y;dtw;)429c3_s`T56W)SKTI+9WfOd0?aZak=gaI31eJfxLvO>}DH3xN^!G);8HC zn0)r_&-{mL&E8PzOsncQy^scxehBn(|1hekXcFG4 zRb1DFF@9{t&+x)1n_`34Cq=Xu@Y@0G?oRV0mq8?}ymL)Q!>;#4Z^?28*iGkM_U@#@ z;h0;y`#Jqzw&Pq7&;SSCJbbqxuH~NY^VrmuYMQ+>+<-#Z(wnfatWJ~4!-qWb_UjeZ^)Xz$*YOqhn<2^-zD>73 z1)U~OD%t_Yq_E=ip^vbMHA|ui6^_c7By3w;xoYyIIe08o088 zUfhjHTgNSNd7=Q!=t^A}&!avkvphksIP_H2pMl_{3ZSTp9FX!D`Eeb_!5zl|oW9ea zbzho}E%?qN%sqh$vMW8FHO2_2HFg&gaPPe*5N!;9RkV_Q_Y;tol#_k-NC%U!k-gIM z#R3kz8!A+98J?2)l^$|(L%M|H19h`E->?vqhTY?;ok#!xY|UQ}F6q_*^zXt~7j?#u z_$qq^*cs$BXx0pJ)iiQdz>oWk58XVKUqL58n9t}UcW5v1;i%%@&;4lbOD#S?fk+Hp zR7G%WPZncv%(q2r&Ag;GP41l9y0u5x7oB}-jzx%}-~0pfXgCAj>v zMIOdow9)~z+ugXq`Off10Jfb!XC4ywOQ2t1ktXa2HAE{hPJ-1c2&eK%O@GMWF` zw_GD|3)*ELp~><_aq#(*hyq)_Nt?bW{yQ3cv0l8z$D*h$b3v}!Ncm9r6XA@ywONq@>P%juyKi_Tt z?IY^d>iRP<8H`(o+w-~CfNZ4dn{g%)f^uzJ!|V=JbHZ)|YPUXm&-2Afz4nI$O5qlq z1$3Mo6|DQGB+fM4f61(Z^3L1h#nAS$ugvf**jLpb@HsQjaoYTzJf-1|E2M421#+0N zw&qch;(6!`sIvE0V}?YW&^DQ#)D+)ox8rNE@G1>~)z6SCEzn@_I6GEP;vDPNyCB0A z*evy5eAZuFRl-AN3u@VQAz?JyV0itykusteQo2KlGv4$)Er}f$`RE znjFHcGaW;{Jy-WR*Mwwfr~CNwL$bsg$-EWEkcQV=BQLwP@aiYg6Yu-{#w}pqzC&y6 zzAX0dzOI)*7t(TSmeVQH_5kj)#xu!TFzYr}i%R!LON;2K_cC|t<%dO)SfGKy{e&ou z*ka6x#deyHms2k6$9CXJSA#6IXY9i^8e{(yJ7|0QGyrruAhMoR4kbTa;^__eE#n9S zkO3uV%cR4fMVr1u4kMuE>oOsZffDR)3GA0_uz{#H{0DyAV@Do5CyzDOV@oihi%<}x zstOGL<@)%^M3!GwQ$SNEF^Gdd(xF+yUy$N}h+wf)*C_w^$ENa2?TF#|=rvSr>ywoA)nbM<%<3>H z4ujJaS`Sx8b%|K)8imo1pF{@S%)n@pPXm&%a}UY>MagidmE&4D)cf~xS^fv-%S&p@(tGtQq=z-necuAh+lU_WG4Bpv`hkIif)f5t~P9IVu6(p@vK~SboBY16haVYf*;t&r$xdRgiZBl zR7Gq$CYQ*cSd_l232&_qC9ZIxC?t>f6PM~alCb0HG^4Yi9vSrvDDwZZvn5eHR&jgn z1mIjTSv^1ML;oL1R~Zmx*R)CLZt3psknZm8PH6;*rMnwxM7kRkmPSGv0TJnL5G0np zzkS~C&;Gw>=bSS$SIlDb?+q)e%YU=YmNv~YZy&7&fFC%Q>3ZA{IaYQ$!_FLs+|saD zaMdbx{lCa2dcY$|uIYhCb`~$bRw|j5o2JN>Pw)V`2V+aa6q2b<@BOr5SrcL&1?f{m z8B*3A)CiV*BjD_Emp?Aoc*4^EJK%O{TqHcQEZqC;RiFC)LXJv|VzK_4eJN$+_D63d)JlXlxQ&!t=t`wF;6`!!8&VJ_QbjxW zMH1m}@DwUQ+LjzY`xcGOfM@arYyfSsi0nNpk>`~ z%i8r(iTLj#F>`aSW#vR#U|gv`h-L~@6?tP8ni(3ian^;C*0FvS3Kg?^ z-7qVaO`C=nMIm@qez(r)3!GWAX2PojznEW(q_b+|g@{<&Xu&@VObIOD`4B=z13Lk9 zhd5601(x6J1sK9)22~lG?JK_&x%yAU55kUP2AJQ|o57TZFU?N~p?j%jFwX}@V0tPZ zHU$8ySF^H{kj9J0ABU# zP5B=Rw%pi7RiBJXbi7`1E-T5IkRca1lG5FOzHZ(DayU=i`^?ZSl)HB5^yg#PZiA3E z0FAx74G*WCbFPgQ=VEi<4Ekjc?Az5WbWQEiz6X}J%2w6PMDLI7J#7=@`zhBbeU3~o z`kncdW~q4ngX3+gd&SM^o3j$MA%WJh4uL0u#BKM@Cz10l{;(0u{n#VLes%9^Ib0fM z+?2@8=bLb>W74(q5SOvTFaLfa2`*UI)O876>pmbA?eZZu@m9?-&XG-9tqoERR=>w{ z|HGMcxvPq?5ydjYt9dJJH70Ljj?KM|z0eWPxqDebYwPpo5!qBXa+Oh}4?%|{yG^X@ zC;N-`uM}}z;t#tC&zdfAnlvj8%ie8rs~%ZfFh^6b-nrC2|8|%D+hjGkRk@~fb>~@} z0VR6`)*#%>69-|VQlc04_s~%T_k%x6OVITjzlUl`A=VIymRrawtvBj~N~U&KC^I(8 z<>`z&_Qww7kE^mrqwoLxAXtX6p825|i`~lC*M(A;=wjCbi&cC3H&CyTC|R)Za$q*W zmC!5cE=~Oj@O}=yjUPK zE{1O_0`QFohROITJ{knMHU3P*N&%RL_K3(@Jqay0iIvKJz1NcwutsTIT_4j)LY4ff zWMjAY$G3XtlxiS(D4S4#z$iU%j9#)ZCB&rgje9AoFw;;6vbulSgj4bW!(pntirBmiqE~d*c;(eI)sUQl=y@^O8dxv=X+T&HnHp>^c}aaXO!nUfy`78n zErOmlv(zRY(47aNGvgIwWdB|K@E@ADC&e`OtKSSddDMPss&k;Ri(!_ia3Rfp4~%cLtbLdW@kn}~53e#v}m$6cYR7ZTSH zSzm^0?(rsS-DOHuF(~HIeM78bCp~MxzsH(-??9QEO-aO5YeZvgi(Rt0wnTvi=kk;| zDq~qjI0NIri4gyh8KRMxFxdOHbJHc>EBsMygi;{rJGfJ5i+`P|Prk@w;y z&2z1y7bgcqvq?XIfsx_=7-m3K4R5So*MR7-^k!xv?QvSxx6c%)@*%dn!~Weeq%U9( zYgfPIP=WNHgB-X^bcBg?@XjT45yt_O7{AQ9!}DKx7l*tuQR^t5)=K1uaEp@Xg@PI4 znc`g*?`GGlm+&wae^gj5U=<6_vT!tL zXu!CYk{$3Jxp&zu?VK)*aJHL*bsJOr>2MtMBGvBx}F*y*>6JYlZkM z+K--ryjEr3L5`$N3npqwN~C&KUm4lDMKB>-6ws~yy8%;}T;9+hSw*F33bAsElkcE- zrhW=EVmGRjY6RUUb8=V8WWMR2x^miFcFVfK1!~hCOEdS__|HTXGcm1*HC?E0UD1wd z!jhu7&LZ1QvIop zbKqQNa&X|=7;0IY?y{h}dh^?rB(#T*C`b6gyE{bHhLaEVcm=&?^Rc<-e=zW@xST5+ z!Buia@gn02cyAVsN`%n_ljB%(LAE4D{11| z{gIXA47a$t>aBZ=_ygHx`r4gDi^iYEPP4~~+9%aqp3=p1dE=~a*Iq5qN_Pe!*!m(i z?=x9qr@6sr_)iBOj5f7e55OIUGi>Gs11AZ9X|`}-^}oLz zUoh-z^aIiyJeLRlQ_18!G-P6Y4u`gYg|G*?@Uq!yZDZ zzk44O4G0hKCmrSHMRznMl5E9jEF0)$7m?Gg%`fzIJ4ZHHbkb~{k68}h}_T2j8Mmeu<|2 zp_j}IV>1)|U4dbuxIo71+juv{SoUP3n&+9Ngnq^}`Rqi7crZ_tb_ml1cj^a8%>P0Q zR;@=^Z7nuT`U%$jPv`c{7;^Ow0&^HF+La=3FiV=y4amtT8{GCzvtTm+UfqWv{b_bh zu<=uOt9nqZ^-adxQIjw9l}=2;k+Zov@Cn-)_|akwWwtV05=_H}hKru6rASrz@Glg* zu1H^P1h&b2?=Iouvb0^Qr0?(=jpj_h+qZp5#fLZ2MPQJzhC{IFeJH}@&gQ;%8{VDZ zP$sZmSVmPM{+JnC)Lapc~8VjeJq$9O9+09ZKUE%?hVZdPu zC01zD%WTkS>o0bw2oRuj7fnH=nPsXYiUpj(bgm+oo5ml#^jVLb+yRj3b1g0 zNuuqE;AYwjWloXdMR!h6X+^OHtwq6QLIQQOJFnI-iy;liNIhybUJMAaNA1=VBX!WQ7$3%zgjH>wI_+gB0~+V>xmlJ$wpey z4zne->Y@B%+WxlWIqJ+%JlXav{KE?t*jXSGCp+#0+oP8p9UcHCVP_-I_Ni7Fst3S^ zvJ$&evkQ2uB7c8Sc<1usrF|}%esh2Iea61HnnuIoZ*BJONSE2$%69hXjhR8oF~vfP zU2KzU-f2g#Z7P(nM)I6Rgz8jPJ&kSl#ttn76zJ%J6iTnEqAE?Qrz}KELr{F8+#>(& zV0{ne?4C@&s9ydcwu}|eH=m)Rv(jk<wzyJ(v&dNRVf(;_Jc6bE}pHjvIr z==x^{&NHgECn~G6L}NTjqSl!Dqqcl)^KKio{by7Ll{I!`7K)$5hzbM|u2R;Yd`q;; zBT3dew-X#ExuPfg6`6~6a!22sn&1D!@Kdq&2B}_pHkyESb@nb#qLhr4W}V;uXeQw+ zZ=Jj*%4vI$4}&F9Qi^F%rU3Kg2|>o*h!FE`BD|!ry^3)X&byW>u7}`7TUBCDY{DJ3_Z*m82+Dw>`< z(v}VA_5EF0#xNF8|4BX<1!>?wl-rmLYDe9M^u5Vh5BtKu{a@|G+f`H%Ptsp<81Jzm zS6P5iPcf*l3ZYO;DT{4{Ohq7Up@-|4{5V@zrLdNSAodZ=3juCMaBkQJm;#H9k|}aisPU=1OhABb za8b0%j8o}|IK5Fm;@`!QS?7@=qFW8BYKed)TNuemG9waf!)L*D?bM+>09~PY&^=inIy(%Py`8${wOp z#vEvMiaco7@i9Gw0e|jy_iE?xhjGI*M8(`Uzt7u_4h&n8ItYE@95~u^WGIzDZ!OKK zGQ2M?{|(%_m_}D=J-~p3i@@svm?_{Rgv3Zk-Tq}Xfcx9_t1gVIYKCOk*niN=UFP)e zpeAQWN;4cSAu=~c z_3~lFiJ2ze%|0F=9|x{9cD?#H-#fD}L7w1_28uPI%yVVEH{hclp6k*>0zd}`D(iL?o=dzX^&&Ehtor9oqUPE1iXy07%cJxaD0li&tN3+0QxsU-a}JNUvq@`pvPZ$o?GBOOEh_918h?trjA6y= z>Yg6^7q#{sWup$+D8*@5?F6Ah9_iXYoV^Q4?Wy43J}CguQ}b@#{U*0^+Ekyg-W1Ye6!J*or~ zX_n)(dq1%E_*mElLn^O!RdA zu9#O89j}ViVH}Li_yZv~KDutPv!Ko6+>tN5GMccA4&61I**e)h8V}{y(q+Ce9hswQ zy81a?&brxROr99G~9yf5v&fGAkyBV9Bh_kwn3(a4eaCjXQ4~HwtXB`_$|bdL$pFoJttN`b zrL@a9G35c04RVsKC*tpro>3QY0pUzQDr}!NY@`(A&S>O?m2#W|oCiMtD*Ph@7G<^f zY@9<`qk#U~g5d!4(4=X6@9#vUsyj?;aDwWh0XN?fN$oTR^3k=b4e+3JS$6l#$(TdfK(#{wx_nJ9f#*aoWu-$lK$$Yy4zTqGt}A$*}nsG&=+ z36IH|8*B4&jxe}K>_`#EKLazgh%Dp9y`#lvdC&VP^3O$)ve+%U*(d4^NANSQ>D@Hu z??Ds%jCH~v^2-cv3+ft0ISi?+I*U4LB9K^BQlGKouVhG?%-2ISl~fbO<_TG&+c=dQ z(Ct@Wvj-zsVX(P+vWx*uhn|mvA5bC%W^G`88rR@<&&~uR@Vi>(zT2z!m6_UMm!MP6 zFO!Ph?Wv$YQ`b@rteN9?MZ?Or%{jN1RS3cV$;@nTOS38~2^_@>;LJlIe$QOkn@Hbj z6#v4ot^d5Bi(%xztLf5;^2Yb<-R%~)YGBJ!kM{p@iT~IC9y3ts*YFAHOce<&e*7sZ z_5{R>agq&9;McIDU?7bto;+#cBbcg#5StqIyna{1i+g3Nc8Y4=-QN){^^nWA@kR6h z$mD^?2@$5T4v0YK^Xlat7drF1qRqybeTHIda){tbOEKW{2OcY|g+>^YR{ue>EId{p zzC6!zZesH)(qd{Xyy$Q5soK7(C^Nnz0M+R6ElpAVE%ejCdjD35cD^Z%JC&fF{AdBf zmst6c`l%X^9+9M8|Y`n`40#{rR9}&8h>?uH45IvV%~Lx$kSO#y2ZoBHjJ)m$S``tJQSl0oZldBDtVRH8L%^Hn zfR@r@>p|W{_^~r9$R^gUTj{fL^IY*-O~8~?==jvNRUXxw_bOu8mI(CwhyG+Q9(0Jn z5^2&5=y)?!VtibGD6=S+UnI*=@;hwgKLn_GLa;*vBdkayI#OH?3gAkgRzMn8g@lM* z?`~RBc0pj+0SMWZ+V5C10+N@wU7SD^d+%W~))=(gV6s}YPsUvEuR-Bu#+W$yLrqnSu2>ez!qWA(9=);G4)DwB$JZ5vI-^!KaT5#XZdKWRb4Y*2{!oN z{{-UM6ghZrdYw`xaxj))mz}>xkfxyvNrl38fQQ}iNWlyRp^=V6C(rAVJoKNKqgZv|9k`6eKJk2N8Ppacdd*-ylz)QUzBSx(&o=zA_dVKKiU1`-DYK8?7vcF z#g=_HQES=yZUA>M3Sebr_1|8V6#v^m3k$@***7Xyl&JUr?~d5&CBEC~$WA%0qdY;& zf-ee!4IoyI5SE1{Ic{9TKbM9)s@`lpXQs#dWM(Psu(mv`N0!2*(%}GCymh}(zF zU%dKRvfTxxn|WZEiUR|f<_xBWty^?kn7V+o5;rQv@j9jjVqW`7Ay@q{>`4jI{BET5 zh#lgs!d)T~hsR+eHcl*#9(N z8Zs(6Bv3Tj7E5E78YgX1k$*tl{&pjue3v_FoN#LX*{uS*=N=uYpnY5U8k4-%SA0}u z*s?z=Rb{cg#@bC&(eC}Ed)OEOwLTUWfy}y>kH*8XwuTDxScs_K_`yDiz69CprCiWMcP|2=#y0u6m6% zYE7E?vmUt|l|8CBTaIv_-&dhedVf!)UY0)IOP$b0`&7GGk6m@I` zEX7NkSn9vgt|EKbhQvdwCa(VL$ba0MuHkYxlF^1KK!9->x{PF%%cb78k6NMKMZVY& zD>3Nmp077k>GFq#F$&iHFDbBW#Q~<K3a9%frHw*`q}U1&8gY?523}R#=!Yl64O7^hFyysZ%W& zqt%kx*mdAo1mr|f)sPq=cc*w=U#GlfvnD#}2z=#Avx)a5Ie$)|5jDwGSqE$zu!|gE!FB&)rcY;0m!I9@b2{;)-YJR^p2#|c9 zmz2UNLz?ixhjnE@amZyZxkl5xPac29-!~8HU_O{h+2fpdk>>c-bR*$0!@XroVaX;; zG%<-G^$*(r8}wZ3_;oczXUnl2VXk>S&k!9R^i=g>p8DHy@^jGD)7F2v z4}$=0nQuF`o&aUU&kqN~>x!>Q(w=6Se`rDyyWAN=BW_(4inI$HH5Uz{5|OXbx{7N0n)ssK@={y< zrq-3gy{f5rcVvdND4+DaR)$*d&Cn=n%U(2WT6Aq5W#0YX-D+@hW|@*+`G*u5^}t;Y zY5c|SLcU>@WECfi6to(ijJi5@5?FsGed# zzJ(HhuZ43CXEcArO1=H&P2wEHShECs?v}5&%A$TT761u7 zYn;*^_`F90TB;7gK}DS#+DAJ8lME5O6*E+M+`*yg!mY`poAaOt=L5EC z#SO8UG&G5I`P#Zf0mow50BI*(;`Z(6&0$x zZoNmEzxIKGT`9i_cG_g-xY~nserTxxa20xd77BB45mR9AqSA6B5jKScopB0JMnR`` zBVd0>*VSV&Xo1=>Sh=3o)NKyMxf#nHIpPl&Y(JE97LYEm;@!FjURRg~_GrU}*m%U* z#Yx$&&MxXv19|o2c*1GRA-@c7-lKB5z)i2Xy}rlT^K#T== zusX8e)LmAI-9`!OA4&TQZ0zXJiZ5zx; zXz}g8BnQLtDvIhM83=LUZvLc-j6$vU8ZBQj{R=G18;Lb+a`Kt8|9sLQxM$yz+1M!^ zcr2mq-=lKs;h4OHMFm*liVHi@e>ZYy+ws8mJHTx4YuK^U0hy76Mgj2keHOqx0wfgx)N#rE_~x_| z>^O%jw&>BQ#**PAsz{8Ex<)TPmHFzQ)Rnho1ST`^z8m<@XW5UfpE~P@)J4JNj)f3X zE?S5591r6=1j^xC>*#d%pjsma;wmr1$cAN#9yataSN%;?&01~=5*?MDz;?CBbYSLm zT;(RmC4RHti~XkG+VL@aQh&z7*bFxS4QlMomUqAXQKM2F{<6m>((cC^{8-hZr@hoA zyKDz(bR9^7b6Nn!H}ahWg-9czVZ(tktVdYo^$r zJs~|pKmj6|EUpsX`N;PKVtd>gaXVJ@TC`=*`-_p1p+UI7HkOKrWPzb!`TUxDbomP0 z|06hp`7Ag)6#%jor;)4lfJVS36X3nPzAwcfyZO3W(!2rNY^miso%07$k(UIPz2Y$~Jew|gd1sNv5_vof zzNm!V@}xiz!2AiIz6Tjax~ibhoa>oZSRdRorZ_xC1-*oN5hF7e};8OvG{JdnP(v_CbHgA)6|}O}Am&H;}}|cG)ViSmPNIZn<(R?ma4e zgkv1rNCoYAK}|JiuaOOKa`wMx_E*wOnB0;CzuHk>G6VfM z_XDW6Z)EwokM+vf%eCM}xLfIjis$!1m+k)`Rv^hKXT%lwEi&c1KLYSl3*xRzlSykH z-m1i{hftIa*KnMx6`$$$Sx}`)+INt_x&sF1-rYVxb|B@Rt%UXb=?3Hkseas|%KZ1R z_2dO=bo<+2agh z-oQ~Uvm~StB4q>B1M&J(ScV>suWiWl0a8t<_<8Un@u-1h4i$;g2B{5}%QMbQg6}P5 z&biGii;XVL(*RjJE(*SdT@;?QgCCWcfZLhn~&$UZi`em```HWuv*O8Ni=Q zsm>Ve)u}^_DRWMZxwYS>V5Oq?VEuHkNHJonv62$tp3$CH5l0D_`R-D_TGXi((?_Te z|Ei|QbnYUkNwX^6A|S5uXWcq*9Cht{0_Q9T6 zV)f9b6Xa~7kTZZoH;FJqB{km&i2>(3UHUVNXsH3d2v(j}+?K!!dD-1OwM>R@Hm1I$ zQa)F%vVC}-z7!(cO{FNC$s~UF2^fEZE!mOe^FQxwT!P3X6Go<7fnTAB70-EPEozo3 z$#`QHfYR$M0sK!@t%6n5S*bI&;6?Z)3PI}nx%3=| z@s~m<(-?1_jQ0|dsCf#4C)#_{^IYGXHOrj+sTud7~F&~`7##PY#ni=KJ&mkPiv z1)A#%{O#$|k+lzS{`C8K z@_Iu5lufM_`zLXH3XBc~3-9wj_oz)G9v_~F?Fx@A>bSD+Pl%u{qT!l}ED*epb@Iz> z{aSP~5|clH(m2yRCyf$BV)Kr$w#6_&_?gNNPNephs7tW?RTXdx{`a)sDQ(oE<5Y(IvBcu1Hs=fAS4 zeVyt^+IPL(vOnS^7Se=LQvoA}{LpsZf3QLY0xML2uS-oKQwJaK+e_%V%}5XjUN%bp z0AC3kiKr!?Vx*4i$DpsEZ&p+ipg$IasgD43?)Tvw$|Lk4IlA7yOKpn@(1JM^nxFeP zD2PnX!?*`D<#M?Bl%<(`$2uQNOTN z3M&kvBgz_55y_!Nr{N(~iPB%aMU(xZg3PSB@Tv`&TaHg|XqBWNjDHq;u3zlOVuq;E zU)J?n=YBX=X2g+Y8aqco{*2YB>?+rdZ(<64L;2jqzODR**!UvLBGUO2Yl$IT#cSWS z(jbZvhhCTJi~PuqylM~L_oCQzv_QdwGx_9=UzeRyjj9&b3@VO;NXmJi^wjCYoRs2! zoMi8$FOZAJO#($%&|9l@(ja`@M|JGl*3G24S=6MawA%?dEw7C|ha!f@2?+yJAQZUz z<9b%9s`ZgEJOlh4j4}?`w0(7sHH%;ZaB=`xrC##5)ie&&2iOikn<@Rr4zya2UPV{<|Ur z%!c|J;{yS3ug0JID5*ZK=#c7}fD>4Ia54p?DNOMYZKnz~3H-%2UN@B=4J13~V2Vh{ z(50z8!7bv5vM`jIamDWd{RGG4b3;GBWyOwo%Z3xA+r6nm^Ea1%!G~XzJLbzXStc{? z_>*)5`W@4tQ)NR2L*8Q$=Op@?q9ba-?Hv_Em_bh6OOHBjg{lS7LK&)h2m`Xj;wplC zbUBhQGD^|M)Q1zf<6D=Cx@fl$!)%0@?L0bs{A>g*CQsAPk%KNeT6KKy4<>zSMe52J zjWEsH(~5p&kk8LoeB51<7dd%LnP|UT5Mq%Qh@@0@UfIzi`2+?aB=ziJ@Pr98%s~~A zHxL$bSv%v2K^6m{D&sc=6m$RZCXf2vZma?pn1BzTKfE^-D}hLvL+2jB^iZkEM<68* zTzfQdJK=LV4Kg2KV1oQ*ANX>$oCx2@Gyf|TC&Yrs^pBI}4S0Xc4%)qn*|qwntDCVy z_>IJ#bP0}Eo5wBC0lQyn&n4lR@gp-2E|r3IS2b*i;E*D+qGDqu86!VueiB(@C2LLQ z%BMLEk?Sme3FkOnteVPMM&Bkh4Jk68xeSG)VZ*?!!h(DpZsBN#9?G+%J8fl7U+Pby z{G>I%mcQ>%v@A-z=;{=0h(t}rAmwx)d_IO9E#SSML!iD+ZAsna6-B44WKY%Ydtl|3>MZOMr!tLvXX~UpSRht^qcR zd~F1h34X`M)9Xu8yC=tLtPZt5wzoj`H8nGFl4Kd6Euz~F$Iqxfe47J}7RG@$UdpC; zHrzXzP3W2gvB!O_e?zGSr4|63AbFHGz~UJ20=|(R2f1Gu$@n|qW!3;__>Txi&Xh`8p63R*}e;3_^>91U3xM?Y(5b$ zFn$F^)8xV}Hf=qYKhG@eeqbHJDw}z#SudM^u&0!3qmh&5q`^JEDLCjz*T3^lMP}dx zzIfG|F0olyz}YbWe$IjmPxAVSCPaHA$-t^8(cK}{XWx&FT?v$t>6Wnw`xt*mr6rzF zYE+L4F`utY)!Pa!8rYw)+u@X zbc%DHA3JVvlbn4btEuoKK_*jl`Yy{2b%TLXyx$6oH2M~hBD}*%L@b5+Jb3o@%^cVq z1(op9X=vvvTqW07hT&1k-e}76=H`F@JQ6SO*m7M)7PB|OiIGs<;YPZc_GX7(gMc1{ zfay54IBqSE4KMo)puP|D@hCAJPeE{#Vpp@lhbQTBeG1So~4L9olR0N~q$1*tj@ zxCn^t24=zO584~BJVO2d=zK^1;f3VlPDV3vac?B5KWddmAaq$sgzqE7^trv&{9bQX zt`5CsUZ#)Zhc{XvT&@wf%@8t6N|fzeCEbS{ZwEZtN(WtdIVsnDb)BE9Z%f0akbK9W zwS?3281wIq?Tj=qS{kg5t?YN=rzO3{UD#8O_yt1;3oyD8q`3_YC|XBs)YQlA8?KrS z69`2!EIG}Hn4}*Y2;Ud_Z{DGP^EJ>j&@C&n(xmHkksF*r$Yo&<-oSfPic;gw;J=(%Hm5I_guSX)d+0yd-WZ%QcHq{t-1gn+$ z%|l>r&5y7%#|JxeTX?V^a6UwdcAFA>w*w6ySue3Jgyh+TRfhvgI!*29x2A+SK z*wi)a;*V^m$>)S(R2|lPLTTNNLPh9FX*e}<-a;?CPi2r8FBGh^5$wXIdV{IUb87Mj zYyD`m&$;Ac@V10A1vtmDTGTA|2S&E$GdA3lGBvx%l$VqR!oLZw45ijYdAIQf+Q<>| zTG%!nPqD@)>XVUSQ+=c(}zcFRbg>$`~_m`HyiYQLSR}NfqIR(07 zl*ZBV`Jpui=(Ogyg?;e39PG2pOA-G&QLyKg5xNyJXW{e8-S1?9`PlMX7z19b4Ay`O zL8%qI%4q^324z$mU$~dku!a|)!z&s2+8T505?v{O&1I;W?K?Nsr+5rQ#{+(tN{op} z1q+eV*UL^n)yzLiejx2GqF8Qiq9>~rgfkivnaO>39i}@=$gelc;cl(_a)mla|Hq*H z;U|sAbt=hvcp>Xbg&&W?#U;+SHRU{^C}gkNLz>Iq1)pY>?!8;-*ZR~olXYwvVjZQh>ItRXE6ISX`4e8=*6exNMkGJ}8 zsV*6QB#LHldyeNF>AZiY|0@mO5qq}XiFAjLl%J*=;mGc;)l!>6#8X}=2bqeH8&<)6 zW(`FFwMrdge_j?@(6OCa^v<&=GNu})OJ_w+Bjqm^$0ts}5D~*~*^lsTRfRlyR*Vqe zwKVA4r5$$yQq6Vwx-^Mt(Ku!dWkiq$KMHWrIa4U9U!!S)*>_wDmUaDJ^i)w;f4~c5 z)S3i^PIT8`KfJ9_+GQ`Xrh@nhF=sxH*pf_ST;h_VKrq>XFaSc*1Igv*j>@(*j`dT+U} zH^rMy@3oYczO=ShoR$gDNsG#`qbSa{0B+;69upor#tPAD!D0+rH>-=xN?hN2yc-eA zIbYSZdj<@~l85V^PfUGS(TYCpNqrE)uIyhm9>|ywugNWOMZpU2qHFqnv2Mj;@A0CzI+H$Fy4h!q#vI6iyB&-XtwQe6wK9QpKb5XcienRp!e z>c;cPy=^Br|LQ1jc9o8$My`~wwGLvZYA3{+XhqXwk(F~{N7P2iRKdP)7`W)EyP~|) z72z>t70_TtuTf^+dN%$`nNM0}2-z|aXMkG0G#njo{4|1qVQm-J%L)_lGKVq)kOAE9 z0RM5Ie;a_!mtQUpfXB_-Pe)dqo=?!T`xB$E`Wc}v`MM## z8sen3^b5WH=CsXj=!0-tNR0yfs0?Vwk4-y4s(jwL2lM@Y$tmfND;psn>7?Fh1T@2R zPWO5YQ3W3|75#Ugn=ZjOnG&thW*^w<;Y7B%5mxgI=r|_4if!1%!aoTow~ZVIvM8Aj z$xV=Eq^_cpwERfFV>t0>NqW-KKUw^hHu)`kK2lkcXnc&67$7~cLhOp+P?=?F*yB5t zrEQe%?ra|LT_zMA)f{l?fO$V5 z`VHR6m^C)o^VSJ*zOG^a+U9v3G5kHRB!oenOQf4!*J&zL$1t5XWbC40{o))k*MJyD zVVd)&>5kNnUfM+Ti~aB#KNg%B%414X$Dj?1m+tH!&QPedg8uuFB7ZMW$I6EV+Jft~ z-)>$?aZ^IsWTEQNJDZm|;J6AQW}GPH{afs}wtIXB^PVXpG!Ek93{pnhnHuIr&f*&o z@GWcJ8R+hHb^3=egtGV0WG*ArolWRT>WNy9^nhAl?7ySwlnuN-!GZr?K81v*2t9#5 zJ%T!l%0Ae(F~koc)towKG{y60cI?Wlm*KVNTaNP*o9q46FGCk4;Yh=!9Ee-I(R;Hk z_FIh+Gm%DIoV9uMY?#+rELG=eN2Tr8E^5-PjaYZKL{ukgoy-?Rv-1>DHxug#jT5We zX@6!-Pevy(Gp$H-P~S;IO4t+}+!P?Dg(^Z}iT2VB0w%K2^x!e);tD^uT zi37f%=_Jc0Ps>sk%C|s)%Rr+NfrZ0IKGl*ondYQAiAc7;c6cp6Dd&qlgPyGbiXGn4&;XPf@5q*NV)tIMw2>h;UGj8EAx{o`tA5?e2R3pcS z6Mdo!5F`7ns`=7_u0-%gRe|gyrUFLLXPHCq1XS^KpCU<%53agxlRi*32WqvJzq4(T zU(5wFZ^6se--#Pb+=GAIS=CUGsxnJ{rfd6>h0Wg6NEUuRTC6IT8HW%quK$F?lB(hLDfL>Vc*2jz{Q{VKx2J?5#wegT`Py?z#>4f+CAMB50684rsylSE- z)lP-?@?bjo2ppx_Y|3cT52}yA9(PFV?}seUJYf(peUM_*Z(MS->?tY%6*e~${dicZ zJxs-)@Xkx8|d2ra_dgh*yW|Ac|or@>3l zWS#>>uB;mR=3T%4*2lNC?VTZBjx}}@$qdD|G%6&w6BKv zjJnzpf{eu>eeO9rR;>hSgOkM14w#s;bVb>w28nuaY~<818*J>}b6-sT+7?y~UrER*dk@wrSkJ?6z+zeF=oQXPh%KZ5Stp+=!5e!<9Zn^L>}m4naX zpoj1`)Kh2MsVWOMFsnD(Yf5CcG&WM}Wz;tm*X~1|GSDH0&!L)i#f-4A#IqozNelWf zpcnYuNe1{X(cXf_VNXVP*lZ9LF5o%v(i8>*!GIM{<>27Kole;0HxAGrzRj9HI+{tF z=V3|hZDY_wPWrOfQiO>@EfQ5{NRs(%KgKSME5XK#*I@hp-UiOp=x+x5hxwSC-KBZt z?lnV6_*C5SyCCdGatif(3x2KeIJrhjh!v@^eyoE^n^L1ixvNo~DeX3yR;cMjNlf|c z0Q*g_1$xO4iD{LC9pegWwlqanyLyaj9`?g@SX}5ZZGLiSK_!6}2STuS8oA`>PjS#i z6)c&VgO$b|9r>DDbIobinY+snp-9`whOmS*A?BvMY+2F_F3J19#we z9^Dp{{U`TQo%c4*Mp9?uFGR~D+aRO08jX^q<6H*!>2Hlq`Chl##z>kT=WZhe|3}nU zg;mvc;nLmRjkKiF-6h>6E!`p#n=a`Fr5mKXyBnpEE|Kos>#XlT=XuT@S6s}!=9pu= zHBNhZvk9R+kI!cLPK*-{Ie?U)6NkwX>P3(5I4R(e;J*W*a5g^zkF${!z>WiBxqQ7x zzl+^`L%#P590RJ9Rve(zV>WK|ibRC)Q;H=M2`{3Us5z{aiJv3j7`|?!W;r4&{y=8`ZHgHUxjTXjDk#xe92~MQOct=>mC=^eQpcWt$Hdl3U?~Ok#C%9Y=N%UWVSlQ(nCP zC6semB7*1nN#o+pOm9!=F2`lnLzrkVzV9gTIkl%s0Ngt7;`h6<@YMhYxfhttHdZhl z#SbRI{JE#bi$dR*-|aRu9Ist?{MgEQ?wxencnxHoKYVCAb?Gv>hl&C6pG14UUqY_L zM|cxS0cw0;?i_q+z5`B(y|F#7+aX`ow?Le77jSbOtF(}3;*A2?ZP!UL%x!)+};qWI33o`yPF6(dHy<0srTY&z$_di%XJ38|{a=;Pe z-t~Up9ygKNA*D5u&z38Y8cVu81j;TH-w8m1r@7ibx(&M7c|iBi8jAKNNTX4u-DM zh5HkB^;N5+>Em4O``?FXegO(d4D#BT0Y(vu@{ z@G@qEUz9ZVlVXH#OhuM$5+)?M+uZAZ z?Am)8*|!IKn?213ZndpAK?l~NUSh>BDwu9IuETqD1HT@<_?TyBt3r*+x;-^8~2iTOKh5 zDdd%yvKXXm9O~1@Q?v1aRsX4~&MJo}0jI<`Woy|PikiFPakd6c!2?s$;(bDENyy7AY zYYu6plZ;*`|L;ta{(kaT1G!v&@;Pg6bNZcC7*=^fsA`fh>2ohNqC2_{MXxSWClBLx zxDr+sN%CGb_XI8DqjWp$%o~Mub+y#F%qVu;odx~Tl6u^ihT~QvDF6EUO?Yk+`kzBU z-_Vz4i@fIRSmuFv79BD5L7pI1sa}*n!|e{q+n-8^W3y9vq-Nvk{^^|ae_9K*1_A!= zi}rwAz(DAcdMyS(o`Oso4OiOLg14O)nojiwFmfPfS-pnc05XmCe}WIu(LTW?Fk0Fb z7QcfSPphtwSa(;ksgWtU0B^C?Q}#8Thg(td3mUAiL`9T6%486V(2yoFa{!9jc(5|1 zxd^8>%dU4;yoTslT07&aJosSCOg!G6$f|Eo(94gc>U^Cl|IT8c8SSbx%w03G zmm28GM`iVn@`rmZrKg{eVFzPg_PP)4mL1gcNOO!f@;6;|z&YKGDyrP7IO`%|Q~&)N z4I(Lq4;=8?7K4ao{=WM}6T@&JktVh887&w$w_Fk21BEHqjNQ&DC=T)Blv;VZR4PPJ zg0Z*RD@^rKbOqR1x{-2|vP||XI(C5go{OP*us!m9<6egUh!|94VE@gmR`jIMyK@Y;po zGGAoE!-mI1^&XW4)8^SNY6f5Avud*?eNyB~Ww=S}P42@%3(wC_k%ctPq>Zb`BsLKs z$ImGw!SY8^EmXT^!FpCcn~uBLjY%Md)y?Xz=waDE2(NGYMT4|;V^J$-80M>Z3`8OwRvk@tua0aImM@3WI!rY9HS_cxWRfL;Zn!OEmnk~MCSeMIhj$$&)=eW8#f!!BO7zhxA3!(lz@TsfldHP z@DB`^x?uFIF^pb1R_Ig7N}30GOglz`q90kqFkw{Cz{G8jyhbfT;uq7Y$a~QX9l@%q zKWMJI*qw$O5B=$*&yaiF$5AkMqMrb!34_dvNunaeOixqUEKnbpmH>}b^$M2cwJqP@ zV|(e8T#RWLBjl4H9{JaFlEQ-E;?2ql-ap~waXpZNI5`T;AAboix86BbCPewi35}Oy zV+kazT4u=&{V~?m3O^}60Djdx=sL|rd}2CEZ*PjI&VHWd%n^jzUP*qfLG&P0psBpL zl&o&uOO`*bjW2SUzBuV#T)=I|Al3Oi>j>WVSMOJApS9JR?Auad7Za1ukowQX|HME1boc%f;d>}TtN4YaGI;QI!kEHN z^j#^hMNgvvvdp83LbcOXFn-Q`xr4A;N~k!lp{`%BN{*`4=HUyEXT78-f=YwAVh<|X z2VY$D61Y4s9+YgO)6sEbNHv%NP(JH>@0%uJ{ zk2E*5SsGF(Zr)3GVSrNilFiG=qVz7Lq;P<+gu@N)Yd*fbD*3+{pnz|3 zB^pQuhz|gv_@$Q4Bla%Z@Eh*`1a%O0#p-S@3CaVR-F(aB3LC3eIYySVTSP3HA8)^s z_8)o?tRLG`h<=D$xigV1UqOmUd2N%PW&Tq;pIqbk1#sIpRDAdKlP7Y9Fj;DUJcTyV zxT(&V5(y4RUWw#$^_k!^XSw!3qCS-GF2pCY9AiN#)#ZnHP44qAU{=lbgRg_f!=Boo z<#f@mbKaJ*SL}dyH*CE%aS5}5uJfsI@QLDy+Ns?fSOWMZjh2Xt)E zdifZ%r1FDl1cV9+erIP0qYTIrMVG!o)XYWaP-g9WGClboK8jk6j3g}BDB}tEls^E& zpLc*Ay^}r3PaL%yelzC=yORdnJ({vcTV_?FqoAq&3!l=NrKeF~ZT=ne+g*+q8;L|X zHMEie!URe_y~(qOO*BK>)24v<4!e?5sM9bIvIiN&&)vd3(Vv6QjV-0Xby(&vTnEPi z7U(l>GN24Kb2IR@ob$RWxrNyNqpho*+XL*j0q$VAO)7@>hV#7>a~2=zKIW0o@NAmd-RyHn&hhfCr> zX;fZ`k{ILAK7<4`a#{+wJ-S*p#}j6A8;s%_1f5>5*ZX9%){#Y~nLTDTZ%`cF3hn5Y zcWM8J<+Dkd8|kAVqTniD?@lnwd%tX2f|+2D*;x_0{T4$eD(JNq*4I^5iICS(`B~;S z#hI3SiURIBo>*ci;YJ`!5WMDB&c;+%$D1b(Y% zD3%e@1LXxI9kHs*Ueq@864-jg?n;PNgeB7m=Gro5C$oW&)intS??OnkxxLVKfIT4W z5ppZUZ{H%7b7?dTcCx5IyiYAN3Q>$S&5OfNhV55(Vb<}t5gK{@=eWP^d&2NFzU4f< zul^!jN7zYUm)xJkr)F*Lh(vh%m$}ySi_`}R>oqsQf0s!4H>HYxNBI@SIZDN*T433YzAJy%Vnb?NDqIK0gU4_NcEM%lKvW9O?~GCE86L*6 zikP9C9VEyh?g$?=8SLhoN|u;8wR+IQ30Wq514l#Iln>0;jv#XpMOtJP(TpB`BtNWm z`?}y&<@W>0o}A)^Sh7t;pyIFPY7O4NZ2WOI`?r8!Ppqd?_v_a8;tx%{0|0E{p8(d7 z>xj-iVxz~#xbgJkguE#6M_paRv1sSx`S*K#D~fBd{~HWm^~@l7_m3{k<2Ad_;A=Mt zh?yr745if1!vxA|Rxk*(5w!=R7| znT=!p$5-S(57uItwb$9)mof7DgT0<6U447RsD?oRCn`9BeRAY&{80T>={C2hfX};* zSg%Hf%ZM`~pI_ChidVp2P?0VE@RYlV-Mw9UI40gSdSo>WQE)ALk zjeDpT7;03h%CDR?k?nE=#|mF^fC=E71lX1YM`JmCoBS>?#l1>$iGP_{g3#dc1Ee#C zgGB_zQ8z`sM1C5_5~Fc@U%HO|P3jQNenqANd-_F^svzxzTZbEk)`l1R>1;-<&AVS8 zjgcwfVd(6FwJWg*KUGESer$a(YFIom`ieN0K$!1ceq5+4%Y4F~g@nD1*sbi`O1G;N zNBz>*;oNe&;4WIJX!l*uw|H^Ndy^4lPnmviCT6KDz&Z(0eUDUYOl)v&?)4WX+xume zw5JoNZ&M^;u8qfAif;CsT1wi-R55Uq2cy~EREQ$6h-f*~T2g6kcYN;Y7h1oxRL&eyXoE+*iaa`K(OQYcL6Y` z?a<)tpUi9L2|vl-4j>%{Cj_1+nvcc`Y#u_V08A2CRsmjS=04O-P8UmM*CGTi)i=_^ebl$-eObdyPk z{hZ_3k@5nWx~@r5tM;D;6~v(+Gq5La@IiSlOYhqdm(h~eAn877=@hOiqY^cXfISV* z=y$ZPhnMqQ+ZWD4GaBf(u{{70#DbrK2S9!%chF@)H0E&V$MY32C>f>&;$L793Vg8R z5QCNFr?e;u5b_iPp432qjAo*5WeZh3bjPYDFPawCb{S%X* z`gtGhK=sMCD)RK}4Nf$Bo-bGntkFY!?0bJVPo(amQ^Y;+kkbFkFyju?^=ur-t9`$C z2t5g~tC*~kYq$)hJPH0Uq|B-ex&Uf3lyykU@db9Ybf(X zGcCOS^029EyMALSzL4H{dPNI~FKyC+acMsuvW*6U1%Id5lS}C#{tWz_`EDdC&}w)$ zNRcnI9Ys6%q`-s@YW6yLc!X>QXstR+3MtR%wK0HQ#u$W}oiy!wkcH49I~B8mO+(dd zZYxsLa?!pRLuv-pTSWogPFE0eU`;mh`^Hw=9PJ;T#f`m%oCdx}&eAwwWIj7^20t)d zH6=JQ8Tnd2EWgmRjNnU(4O^MV_vaWAbRB%o9rH)&GDC(@1F^A?Z-nYn1c+ZP+N2Td zaNeYlhkM%pyOqLhg2*+JN>7f3mgjYYwpIP*ITR8Ggk8&n8EePI66%2T;xByHuW7Pq#QXSKrZxmsaNj2F8S2mvLT}^FZB7X zd%}y}kt80HwVGx%dH;x8SJ@k$pNkgAskDZe7R7O7*E<&=Ns$SN8~jILGehfW-R(!K`+5;aZH*1K32Gtc7CrWZ=Eff>UfY6=LH&cBAwsv)$=icIOB@di7qg(LksG zsAcsagbRlBAFT?jmHP%e=7>=SU-M8BqAjl;Jk#)|<>888(6xyf%8F?Eg)5TYAFzV$FWs5>s&BQ!vD`OF!Bl*! z3JZq4nQuU5wM;_)5^o9PKNYCk#|WQoMX2c>5)1B>Y)zEL9Y+ByuhIbeGIeHB>cgf^w%Fd;8o?4`k~(% zi&w@FcZ{`3K_`{bTV;!!G3$pi#~@>v{}q@O76sG+eGX;U{rSC6oit^p-~=m}+y<9&5~hrd7>-ZG3$|{* z4Bh<73Ti2e@w`ck`IJW$4}}rD5)wVgSN| z2icx>7weZ`tTU56W!!;@!FXpho&++$Qqq2eq8|N_iou+(9 z2YZv~xDY=B!?agls0?&q33TvB_NFu+1i-`^TF87*j6T_?!R@u85x$@pp>>Y!BzPF# zp&|3L-}zAyIMlc7-3QnjKQ{&S12C{A6-k*`aIdYKr`hz1)8+So>+aLPulcFAk6tQF^K?ox57c-R%s>OYUr<25 zseMy?@oG4J6}zqik#0ik(_pCknEg&ro3kd*xbVZ#Ot5fOsyuT*MR<3IfYJ;*K07UI)M!;N@K8;xotge3}rk zA$DJSKVjqy4O|CimzaQk&ud`W_Jp^NEBGL2!FZn$#ydN>(n8bl{Wj_xtPbjuCmWe$ za)lIP8AG9AeswFge~HFVjC3?*FGCeAUK(_rP_!Q`OcjYm-?;qCI)~|*MpCQc$Qfr6 zgx0IR<)<>&^0EpFCZc2;>a_lVsgeQ>a!>DkHrq)qpi7isqE60Y%|)|!P4^?c19H-M_Lsw?{cjvAN*TCRKcr8)SsW}^SrP=g-umqzgAaG`vi z0mWYX0QU@MFftn6pw8zzM)Wi~b%~H545E1Ydc65N6CFYLHjNx${3I_`6 zbOI7Wu_O(3!HOg`Sdeghy?K>p)Bh@&N<$h9!N_00rg+0D0h?fc@am$|Tajd7Ojaqz z5TXe4KFvXrZNue1<9<4j`>%3Wk9zVN*08RyYho=I`vR2};fr)C`}(&v*_7Z9;S7V) zY-&ky%FH5yC37|2)j=Ga{GI7Tfc<)R_OI&DBsx-pNaivjAtDU&C)}VTtK`KQZV}Gg zZ{{^V2j0hPHIQ3Lg2>q?2iTf!XUa+EbkHHm@FjD(n6pu2yDU_pmSIK{>4?TcK|)#Q z^REPwn4mY+Sc>6tMr-;mFF|@^*VCidYqv#k?o z&tCySsFDR8oA)F|2Pd17?F=*epkeMynM~0;W)j)aM5%9zI$6pWHlh$HW(B+9!YN+( zA05o}t$6~I3TnOY(dsbGsi?+CEO~I=fw-iSkJ8ANLezl8-mV|gg$R5W-)OQNgT?I9 z+n-_C~rG5i$6Y1`Lw{;EfCKT$7KQuFhR&1ozOU1v)_iIhoE?!TE4=0I3}3q zu4bp^4`F3*7IVTV4`GAr2^`F}SQN#p7|^{AoNOEd3RN7I|Fu|as@?%lG>_EzI-yns z=8lY>{XSVGIB2<@9fU9nSPASBJ-xTa6+VB#A^c~|z<$pVpu}V+nXAHB&bOUWUTkWd zr*F7c>cQZ*lW=vbM!Y4C%}$42zC-?j3rhgH)cHcS|6nN1gzF`p>~~|Z2l3UN{BHIV z9U#1y-;%~uRr658Nw!gLH8uwD4g3^A$g!9dFGcrDdC_r{ZMi@-boI5 zUTP$S7kdcL6^mSha~*o-;QZRQ@nF>R=D#;=8>4;2JoiJHu8*mK+|U)llw28s@Oh{N z+hjh*r=TSa{aR)HHetT_Pcg4n)_H6vS@$JjDp_L&>@`_?b zt_BVQgD)w4BC}0`hA%d?s^4?3<}bKNZ~wjr;|H=;TH`KV>mrYYLi-3*vEM?OaMTs> z23kW-BGLfy+xm&G&V1iS+;Rbv+_ScM=|gJJX14nxPE}?ROcr93NK-1ght2p41sBG$ z2lm|seem?iY*R3obcmQvJ((44F4nywO6=k8VfWF}{1+(46msA0F#i(btpKU9j*XB3F@?o%;5C%4yV`4%v53w-0YA-v(Efo4hs6kj*McW9)mA9jQduHXU*vDY9#yVW+SL zdF3+VjW58et*HFQ>p)HJ(5_d;qrKtL%!);6p> zW}|hEX{xVCLeujcZ!XJDeom$rDASQQtck29Ak5*??OFxh=h}wRw5>g|nz#P~j<6)Y zN!QM3=>D>})5P+$E}SkzL(bmtyU%u4_I28NVqMK=#~itabvD!!68~~|`kMO2jj*hx zb;n{Tej~Ur5R0OK(b%$OlqKxMBxg>2L5o%ZO=tIy`i8>=vcCh8CAebGKHl2>)CI!U zk4=5%4?Ts=;D_Ete1)X}{bNMXWQL!GCu>qr3=#H4#g^M;J=1{9^#}MGu?(Y(yXq?T zEif>{{Ce|rX<;Alxt9%0h-6#T8m!yrS7#rytE_oWn-rI>IrysDsqwUi-SC?#>4mn9 zNDOs0a4PS+z{}4eh`~|V2ZR@w(a({O5MPCp&Y8_M+oi|3XYuPOubi8a)5;PDP^5kR z5>E(2GKl%68>8QST1Rd=Fg{QB_g6+^JZs?5`vWmW$1*!t< zKp-JcD@tXMfR@LRNrZ~hldGZO#|22m8WLuA)A`C-A}}iIeY*e?k!0NR+*J)!pWEq0 z7H^)*jDHsUbWvzVG>@yUXxc+uilUq9#pZ{%IF9<}O)>`+g1@|~sgemxQZ0i5C+X38 zj$SjP6LLT<#MeZX?aXiYWdORn2NYgyS|+&46rioA@Q`}gt6=2)wd#Amcdo&U{PX_E*SXn!?8e((S9o0(7TeSK3h^ltG#Fn4qrTXVM zfzpW{mKLW>=^RT8#m|6W3R5UgbnkUUmUZuVc?>YFw+BB_OYgt&J99L1`afaLRlvOX zw-L{vy?f^t7m=5XJ&2#U43t_+;x9laQ*Lb{AaI~r8i*r!z}|Pl)430iuCxKIbXJk! z|BdscdWd%w76M!M8WFISINK#j`n#dRAVfyhOl5}ypDjwtCddS7C8qM`FNwTFM@G9H zgI35B2U3+bePC`NVMV{3cuD1*7{R-Wl>Bww)LNJTigG1ZB}!Cy>_7?%8${UTFJggR z2Z(wTw4&5`9GN{(&VcyNf6be-&s0x_UqnWDeti5QP>e=#Atc0yCK06<5}(?qAa&wa zi;z-_#rd32OT4YNChU+~=+XmSxcaks=P5XNz$BZS;9+Jr@%pY!_z@UXp^p#P7@&s?Ub;NneBJJV#OX?kJGLRw|F-D?%2|5p1MkXI75$cap6G&*R zDA9*s{@9YDm;O1>xdS@?5#7TFgEEdX6iqZc?ZNC#=*#kO)Kj8IK99d(=*xcs(CM6| zrkAr>qi>X8y{cIO?bIOTdSw>J9f`ng&{sZMt$e`)u105ghg(jvw)EiwFDWC|5HXkT zMLle6^n^_NO$R}n2|G`7qI4ar8EqOXYp7GHj$nGPvyjw#n!PbrCc#PBkenh<`trhz z!eFy#X&DbP(jOe!M_*j!jg6&K*^o4kLed(T9o4@^JxkAH6}u#9J@DW7Pn>VE0LSrX zf@O^RoX^#V_keTv^IQK-N@|xV@11Ot10>L;6%vc00&Xi9iybRdC**@8;%JNR#`|SA z;7VKt*oxg6-Tr~x-XigR%`epz6u+CSt#@dYz;>X_;L|KMU!->$}YP{hExTk{k!D)};;^k`eaK z2ipn!G#rKTl02dV^EA@ejbR8!7PCO)=F0;;48>Rv!?Q#pZx-lH_<1-TF(~&uAl_xR z@mLcuaP~4!+&{?!`Z{VxsF?Yv1qd&Q=g*=D~|J+4NUhSC`z5u!H1=`PW^JC z#V=+gM36-koV3gP1oMCwhNtu6hL1a*tPsW#HsL~2kdR}x!26*Ngn`g>vdS#2oACPt zlaq=9Lrv!QgD_u7lwk~s`~_gg2Ia;`kdStl=+)qc5#&Shs3*CHvDY7eg-{6rwT*g6 zpnSk&(X{Y>mDrdgVs_O7$MCkbhkJ{+MXCli1x*4@pC_T}2AipLpPc!Jt}DSl72<2< z!W{(=4Td00ydL^PbJPCA#*+b@?VW4tKf{fl8CQT_OokNo3je7Z6I!0+kCabSuV7FY2me>lHd1~q2AN4-mHqkstX&km zQsR%;Q0hZXN-TF~=c8rr;XgDUQ*E8W=jt=PxJ$uXl4#tR+V`~x2(pEm{aZ2a&4Q%3 zLlg|~dkK_wqMhzCcC0G%6PR(agx_J;Ssyj4*;kI%X=LBxk6~3&6ckVtuapN_YG7V5 zhI+XuCcVK&EBApnk=hOKlWLO;oz0|D)ZSDmEi=x4AY9nKZEkiva_V`dH4?`62mS@@ z(3}BI6T_>DpFyf!rW1spf^!jIqFFLE-%^a@>=oeZ@?7J)2dWsV+8)YxULF%q7%lNo z9(Osu79HKiPe=ogo&OX-;wc#DZ_hbRdcQtDpEHZ$6aqMlCTBMpUyb&F^Yy2f8K-yH zBt-$}nTc};b&kk>Gx*{^ZiYq_1XOX2>KOD`H{(Cl=f347Ek@sqPw~dp)3wDnhn$y| z%mZJ|PM0IkgyX?~IOb-6tBV@&&XTscb{Sh3tW>G#CL4McAlUuyW)@syR%O!YK&<$L zm^20wJAOopP-@!?IOdAq8aBV!4JvUm7rYWzsSBM|na7v^l-+!U*Dh+>JVg2^F*jTG z9n3H*D9}J-hEJjUByyAE?TUi&VIqYnZi=q|-ZES%b+=KNi~cYrEjWKLbU4Hq?kH?v zqEiBvi)`~KM-tE&tG%teA#FC!)>?34(!bw_fD@RJw)D8-$LzFm z$aybz0g80TfD|`!ARaM{7M1BUHn!4?$_3 zF~NRgTcjD@oUdMu8H2@O2(@;p`iTE=*hs`T@X-@Vb~yAz8G!9|L4}u_vi)Ot)!qyhyP8+-@9EtN#MVx zL)XBKp0K;C2@Pb^3Ta_EFityp z2Qiy$UyXj`%Y@hbo@4w5tbt`2`f+O*P42><5KU7RpD}zFL1@c0>l#SIukujKzJL7Y zccylp?mgrWMQr)+$kDF>fmjJN$y-GC-TQod9uy)uY|$G%#=$k4cNKd?i#q{Yrq~}f zVHXh4#{4^C-oK=IVE7Yp*_+X~4aph7@a+Fo&mT1I4>!WYxV{G%eboVp!C22eB}FYs3Zc*xh!3^%6c3VgH1x23H{7xD(M0n{Lu2((i86Ul+&%6y zw*$~$=YE#pc&~Q%yQ*_?d$&{eT890teNO*N&*mX0UmNe7p~j9ny?-Q@U;?LoYIq=? zEgX(n7am10Og-G{HZ&$%HYc*q2IGC%M5%IeFwHi53YG8#|)MI|(hgnb2btc#Ej~RViUfxmW+qmRs&)aFME9Y-0E2P}~QYTaTWPS8= zFVuOhOC+(iNQ(Jy5O3tU2=dU2-^(&5DsH<1Y2{0)ZllcJxmtWF0{fP!*e!BPHc z*Lt_39)s7^2YP&7?AkH@-Psl3bow?{&&+%F{;}obB9-a&EdNYtL(YFI0B7#@3_JV! zLB9ujs!k;45TRh>!gjw?4K0XEf#n%f z^Yan})sbAu&@5{8q)k8P(Ly&be5Rb)r82v)SGE%560H)UF&b-;zYl8fSH}Sgg$Jbq zytGFvq_rwf+-61G4~o$EZ*(q(hw|5AfLD$uINX{7cL~d5U?5d|92FZ7Z)q2VwPS0{ zOU|lbw-u_Zt*YG^P^*uyz1E1qY@j-JB<%Q2U}AJ-At$?=PKYG(X9S+7?(A(Ezd=;tX*om&;Rs3&0T<0H(eXoHK- z<-|V7%ayI)$wOu#rAkHsQ|llGRcxc~-*=mr#{MI>HEHM2=F$n;=FjgMbsA>=Jj)(} z-0L|TfGlb2I~l9jEOIlI)=CISw}d1ys$mh%YX8YUru@$t8Uv7D_S_)nkE97i)%Z-n zeK1YDwlug%O{9b~P*sD@-9J{K5?LXCGt zIt^V-8LdvDkutl4&u*_aAP@i33JDsHQ~p5JwM36>XETV4gLFiHNEBxBJz)P0X%4r zxN=qQb{b;~K15@fhnSsN?)*-}kWIH?0DTZLP?r>j&d_5{d_+zyMMa55r+IM5K!GZh zUmE?coHMZ=)oV%E2o}Sb7p+jUrkn#$4vnR_`EqY~k+{AvjX-Xb2*?!xEY~#lj)PL? z&dduf;QqCUo!dbn`&w_)Ta5X^ZVwYIa(Xl&J^y{tYH7d-PC=R^id0pxk}MG%&2(1L z_B&V_QWW#0Nu5atg4a*V^bHzg>Zn4aOd8v+Lvjh$*1aeu$Leq?rdSHF3`g?)dTth# zU?v;(-&Q5Xa^qAtit_Kd1zGPYYz4(Dq*&(OB&OrjBUoH)v&yk@#8zj0bEpR5bn0oi#KW;wS`JCH|nFD`c9yy{w2gJrTq*>nSHn2pkVO}aev{&bA zMC2c&6XiGjuP*Fulf~y9_CmWo%7VK<;UMrJsU9re76hVi$bo9OTEsV4adIEso~Ou%Rv1E8B0zrj8gU4Ye%5C)l|O zv4rT8NYU@YWpTAuhGr?n=Tx8tA4nsgdVcA;eSSHA_!7?u#-xF_3JvV`kYMM6$TW6{`?+p}Fkfl}Sc{Z?Vvy$KASASrPo)0Ay36hqG2u#XCFYfKKf*Cn? z=B^kOMjDZVcA~Ht!P;Yy@n3- zU1t!P z^JiNx>-ueuL>-epRp4)S9`9Ug;$Jas#>{8`X9r3jAp#hkRN9IlIzPzB@eRitNh+A>My>qWEi|N_7?Eb?3L%X{eb}+nL(vHX=q5*T31VYtEBAA3GS_h zSo=j1HpWrV%4;04`jN;ebe?KKH`5+_d^Zj^jXGuklNNt%v3uM5M{McAY!Dy^m_Qu6 zuT^P+Py+bvs~s`OGoF<1;EIA7X78u3NUqk)w+9*v{I`#g zdTN=EFR{~xE31nz-=O_#BgH51{h*SAYD<&MO#7dTJrLAPGpws2Y&R|pE}FncQbtDm=K3b- z%k;TJR}BGb69SdW=M9^eO-Ej+xcv+DS2;Hb(iVU#F=^=;mK|t_jjS+_2=J&DdkW(@@Dj_d z!C$Hh{w6G%J@09iw({h;8sj9C5a-dujD)TSOKW=vVhSmJjA97H!w`z$WuVi&&Gv!jw2u z`(<=QZy3v5A(V(-^4)3ogZH1$%4QokX~bQ|X7^nn^w%gY@<@Gl?NU>j`67=0LcB72 zA=#4F+O!Yl2l}!->*d>twnlaUtOwdCWeQ;6lR0GK1}=)VJm__qQ1{4>d79Op(O39^zlAbtlVT>t{QgexI=Mv))iPX?=rI=F^l&kC&z?n>QZpj4mB*I2nZQC{h3L zBI^we)aWTL#1bDEB+UE$eoT|FX$m1Fa6(*WLFI2BzN_x*00=PZY~HOwwSR%Nm!t)e zLbKy<^_r8vX3=KZ66guem4#Bf{)<{`Q@cgxyIp$XsLtW|j%JIWhOMAvQ}l$GZXknP z^f%6{hWEd(Cgq`~IdSqj%GyplNWhYm(LvBl#H=`_5AIL2K$!Xw7UkF6RBd9LrQE9k z^JEcek^=@!CMh<9V`VwJw@840|3Ul7m$c6QC|gT~0xpfp{j9HNJ%ukIn#Gw+Uj6lN z=Zv^v2hQDwuvl{(^`R=`49b6_2`38>FWnN&q=c4e94;eA5pDus#IR&y$NhaFo3N_s-lj8Rk9oT!$!%Z=A*RkKzA%D_TcH|<4tDG0*=Zg&^ zgPWy#Sfi?}AKr4sZ2zsIgaSQ^xc3?ezw~u+{)`zt)Wco9okKLqJ96AS%TOvyM<+Gd zc*7=!z~o@*^)nDv-iF-+k$6A!??K0T%J=g&Ez{?h%IA>F1JRGA9pJmMlzO7O41TWP zUrKA;YpXc!U0Ryf@4;AV_>pIUIuFn9t{+`;^?ECr-r%-!Tn@gbS`DwECiAnqy4N0pIT5;!I5+oYdK+aw^3iLWNylB-9y%v!5# ze99JsM|5?~xF`NWjE4Aw!f_GRR}E6ntvqIJ}y1NmOx^yEVB`w`jN|%6yl!y|SF6l;EKuWqpF5S}I9RiY) zmwUd$^L~FB!!hu%_TF>NHP@UK766IAEoRR%re6DI)?*`DyP&S`cc^c?X!yiCeVIq6 zk85znrTd3-9pJ`U3Y*=ehC>berlmOS%4(N|5iR()zA?v$ggBE#`hHb zeku4DN4a}(R`=8QpZ`h3mSc*cm>jm3AW$ zn)p>jcrW!X=D`gqx-3J0P2<~EsnLF#Ix^Rd@UCF&vpTV}s$wc|JM=l}gLb2;vW%=@vx{!PqpIowPDh`Q^prYYN46*JYb?pV?U&lB z7yR)Cuklhb<8qaZtND$z8(Zz3fg(6)$tEmmd6kaj{r6P%JupJ*3!&oTZgXI^gIOQ$ zsUGOdm0y7-5v?QAm^A7+1kJRYQ;s3Sbk$OK@36`q?>9^p17Mpq!-XrWpVDs4I%~WUM$hOZVlBVP3Xow5lA;;!H;r0dBTd41%1`#zagi}`l3Rq`5&C)*OE7ShkBEIpp({-`{8w^@2=I=`0Yg^;MQ;p-2skj5LYoY z6`uz?S6UZLH*QBYS1m#G`#HLd>@+AjZ6aq?g55Sjz*rkgV_i+Sib`s?nOE?rtz{C% zL(-cFZni#t-z}Kl4>9(oSL_-bbVd&wg6Vh-^t}^lH01e~rX30Cm5yk znSvkKvGR=}MD470#iv{ba@GgIr9iP{^paeRZuo4l+)Ie3L7eo4Gk;XNir`x&*`<8c z65a3d9mjqfI}Ag&;mV7VpM*7NQT8p9jXTEUUcU$TULij%nMwlk=k+76O?5tqn4~ANEU4b*fttE6{}MEJL^S$eM*-9C;Hjz)i`~OA1gu?je;A)@U*{CP)IZz$ zsJ3`-ps9KY=JJ_-alZ_YNuf- z8bd=U-Fx)H-@N!pacwb7__9{(ZS5%MNF6ntXA+H(4qVxXsc>7iK6A>D%r!_pFF{7x zLbLHeD^r*yFjOZgLd7gXQ`Uws8DAmry+C{(ilmdy5P@BpiX(G`25`Dmz5$yk9K2{mucS z93K89Q@l}Tp>^G9+T%(mB4p6y5v=1r-OG6v;r1e(>f_h}DvR?5WovXQ7rACt^38uG z>UKsCGN!VI3kp9<{3VQ%4keW`>WWg^=fCT_{Mii-uyeq&Dc^7bdYE43u8)2kgpwa$7MsiH9zK zw%pcus_Mp;>su1ZkK8HFb1SRMl50;gQJWZTKVv(>e6|fCMt&3A2stR|BdE8`mQZr# z_()BgxG}%Z=ixNbh@2PT&Vjk_A6_UNY`5PMsHssUKJ&=pUKtGk+Pa$?_y;|^6uvi;4|G3pt@6_T1r z?m=e>tw+!*lUJbSE)i(&b$*lM6bJhO(1!Hu%^EEJA)3TGvltI;J25Z13(v|>sr;-K zmzHp2kq^;5|3oc`yrpwhgab)baRmGKt>qq2Zs;?>1~iQUKfvE6M_lI-NZ9~4CyhuV zt*;k~>pGU1>`hSQP?WlS;|c_%b#tQb>UvUlN{Ln~IMkYdU|TKO*`ob>vT|0gUlS-N zs&BUNy(^bJF81PepyG&3)-+l+13A4E>(hEZgZExI=;xekc>Y+iaRkh?s#jc&$jK9& zXR4GAkXUq+UVF+^ITCNtG-Tx9APb6&7?y`oO9GZT23X|a`jH#ppPn%c=e*2-KBUeC z?7mM2w~H^2e~3%*JPJF`qc8T@078G@g|Tniwe!`S;L4r$?Q+h_TswyF{YS&|j`p9<^9TP#Tl@l?zzBc6nIZ>`{ zEhBG@kygpu2?T@n&9AF_(D4+IJO`+jQP1J(jC(Xb?r_a{EpK*)ETu*Bviq@yn$rS_^f*% zU#?>!RG?e>NwDJc$WEeUj%?;1p)#sHRMgt#Qyd?`5vI%&2~pq+j1`+<6#FeDs-`lQSX>ixMe{@6l-(135Jv7jb%4D0yUt`yJhOPu5-m-D|k zvu6m4vcSwwp}6q^=ZU|8wJ*7Bz*08p)Z#X5U|Wi{4hol2XdK4=5*#JvG>%y8B9m4$ zH)@+5k4jbXh~BO~66JnJK%~o}pk+*$Mm@LzebI-VZq??8i_O8XjXU1huZf~6C{j1C zK3r`BlTpO{ZdINF;j|$~fJrQJKhP;&)%Kv${MgOwF`LYrp?%p*jeV%~Z$kD}n!MSR z%|sDR)$Us}AEHC)O-|~L7y2-zb*0E<=O8*u(|64DD9g^okoSs(OESA5PTaADrF#DD zzXJa@NVRyI`1%#L{q^1h#r2L3Z1Pg7C!(4+AY4&KNL`eyae}Jpx#33+hE}KBiN`5q zSZ+;%9;T4;x+D^qdBq>v60bDRByiT#!5(1P<2Iv$>jQ;+Zl|Smd`-;-2nKE!Wbl$M z4T7$9pJ;$*+F`D$6w&_4MS^`^ST(pgG?_SvPmmUHS6AJW@u#+PjnYbrgf6cpY#P=VM=>`PJTy>%#W&e92=<0d0$#gew+wh*z&zJR8rckw7or& zW6rPNB5TArD)fGC7MK<&1Ct~gAl-|~R+Cl%svMRrNV^2w>e?h(W=EI{W;LAur8rNgc6}L2$^|&}v zN&iS(56-tdFm8Ja(eM${bsYaVXuy8`kYkMiT26i`5ESOQEb~g#k5%+P&Lkf##E|oz zU8->N$A2>}mMC~mQOS8PKu%b^nioopw7x&5GKdF3wudC?EBm7g9F--Ezh$TMJ&%F# zCZ%0$%NLMV8Z8($XbC4IQBlWKk~tV93id`*2R>07{kUVtX?63|q$XLHByfl40Ilo0 z8U(+eQKm|xq6x-^q&*|>>YPM4nFY&a$(Q3DCU<54o-(Qb`*BCHP?eZCi9th|9$}ic z*rlf7M#0r0N49hol^Ej&%+npjznMdNz!o;XM#c}WD$E#$(b=;g%LBh;oK(8zbRd&hnxmgakDs?%+RDZ;gUvH%R55I`qsm~ z*XarLpJ1~rUv6zkC!1|;$2`OHD@#{ zs=2I>x*{WoU_YzZIFhu5b&n=QDfhH3+bMV2ec z9q;2hY|V3Ka4F*TeUT>V5|Najz6p-{gVqTHWH(u$!EaDJ(Uhnc?v(2Wo9V4rJb z-Jg8Z=P&DzJ8fNMr$N>9!VZ8x*XH4S4nzjEjRC)M5KU4eZVQIj*?!1-zhhQlE`kYn zvl1eS)u*rDrE~YBI2D%#_r;B$VY|C=8Cze?#17t*ydVi8bx7t_e=N`LURe0bMMD^_m;QLbUxTXw|16q?WCUqnW6BOw zG@Uc(M3N;Jo;7RNTY5-SbX-pVKsh^K{ic)cXqc2m{o#2TOPG;-*RrCh(&$H3%hno) zQ~qRhzR;m89);@8vp)>%$lTI&qF?n3HJ3Y96kNH8fEdWD5|latseh8s8;TNRB~i6f z2(0p+aWA~l%`^98N+h^4SL3%FQP?X^7#33g7|@6%0ylkzOBsN)i| z0ak$TE^OpRst2g;0+199$Hmd!4YEdH6Bk!{v}dj*MgX?uRX$7fE44B7jh8+G{Gzvf zAij$R`nh#c%94r$kz1U}%nvv51G4jZ0(_D0UgZ{YM#jwGlmtsFeHLR+B+DTu`|bTu z`XYa;$>~s4+H&~J&7SGx@a@xkWwnh?>XPK(@(t5i6bz3z`M>93l2Lb>lY>|AqK6g| zkv~Wafd%$qGK77(AIYnn`eYPP1}#VTtu6+fep3^f{QUlQ&^=j_&Flx7cuTaL+0r(| zyYAR@M@Xcgy2?XYoi*zLY0rxXd#=i!%AJ)UpHc>mn7mSmd>Yj{F=kcc4eZmRr7DAAFkneci1 z)F9x4;ZInsqQ!c{`>|o1NqV9z4Do#>A4wdInxPSEw*e3FHG%+jZcpZxCX%t+a)e=Q zHx2EFRtJXCsl)G==`8uu?x^-t+&h-G(^Chs#+-A)SgYx{i^=b2PT z)uS4b_rm6}D^Bp~ljD|qUT;u5o@aXoDmG@ravpx0C4)Sv6}D+;BfISy0_2`);Ltzd zOUekFJ#m8F=Ka&lsO*l69fE%N3bBkx6w`NCM7vk=9C9W}_N1o`Yk>S2q%MnThmjVr6Pr{O9I?0kLk({w{qUZEPe(?JH<5D}NF{I_8ix7{@ zdX0n}$!c&bLEcm^YMD;&XdRyo8A-8IcSgt>iuX{_H)yw1{l0BcAn`!Lr+IHLWN?Gh zg!Ds2Z-@l#O%@%}(~;?)Lm^W{)LiyzQLD|YA$B9`S%e101$`#G#41I#Y8b;es443n zoE+}HcwgRbvfHQ`7^!CU1ry48`Yzvh0BK-ITM@96G1jw$@&{4BG-n_S(9R(6y)+Ox zbOis^?us*+@zmYQ5Q-X+elr>g(9VpHK0|;$n_HCtDznkY7e6Y z-8JBHyXwqXKul1w?$me{>TTpGWh0`@B@*!$C=QGuyxkJ(AC`%mQISu*5ExG}66x4d(Ad+vF=$MwUo7g1>EhF3C-Qkv9{Uo= zxjxoRZ8QU(G+!-@CCj~s$Nj(@18!4-FVD_kHw-j`ApuCPIop#4SCAlq=5l*aRwX<6 z)R#{EXI+)fB-_DYye(*c#)HV=MbO)4+D}W0*bg#tpScWf0Wl8~r1l;;QVZ_qIYI)r znpRoh0Lyl~Jf2L@O|dwpj3@9f)}P~4mIkVoCb6-W2`}H!6No^T1>T7p^}vBUPCMC_ zUAG(Zez`9-+NnGv9+5A(Hs&ey;EzGm_+sJDo$RG3G*0UX-epis;kV_FnA=9ccF8Xx?yQpWH|f#mGN{j^3Zsg=q>UPBv@1N z?>$w@KQB4%v^a)=`Nmgk_Vy1EdyP;~mUe)fe_{Rd8~z2@$NYkS&@^?vk?5Jb^F6<~ zo?iocc#i)7xrbjnp`l9fUA^no&k%V6(OKk6% zZ9ASyzSOUgf!u>;0X{zm@Khm&X@3o{xGQ>$ej`>v|?Lq>U1l{cB%Ip`&O(gmaZ*aQJ(k>1=CPDKSwu(?=!W>hblG}KknubWx>gCV0bu}Ggid&j@$ zMO%@ZE-T^*2aQ@opsg(Hb5KCzKK;$9Z9yRo|LE!ZO zKvN+J+JN|in$g5ht5@3hN2!v~!CTD-PBzb_CoBqoO-oLC{O8luT+bPBb9SYNw}d}) z93eE@)F0XP*)6GmtzqzfTGFo+VvF|hwY}MtdjMV}HEW>M@R$cdNKKdw^f&i(i}#^2 zySQLS{J|^>AteeaI&Du$#`kw&jx%D)Oc9wzB~a?!62J-RQP?j}n!V>)e-iO$?I)S2 zn4I}k+o6oGVAQ0X7(XmJk|4VIEJ#^Dpa0n!nN>0=2X*n}G^~{uv>=Xa?!rrq(-$@V zw|XZ_G#q4D*QNOBhQKkkkV3^7Vp-bMuOobvy`6H5x&x04-@x=CB(rEOSy}jIrN%q$ ze*sAB-6#>KqlPNN!5+2; zxY7kawZWf%39tNjf2(;v^`I`V840LrG{@)f;iB2~Id5tWRD6{WtSq(el9Oy_lPDYT zacfg7xK;kI!5i~|ubCm{B4Xm7zL_m=+4wy03iKpfb}2IyKl@NAl?~!MdK(F21buAj zsXzi6D~+o4{xflG3zCg%g42%zMK8zl$ePoF6NeUQNCdy}1+QvV_1zKc*Xyos@ z9T##dzauTDXeIL>6FuN5Kb3-8`<{a76O-}OT^D@! z*rldBCvMKcr!4bjM)N}x9m9>eK`gv~eNW4-3|4__4>M=nfqVEv4-j}`B+~B2zi)RA z=$-=UYv-`>PsYF#d%`>My4i%r>y$WPXqWdfJ^oA#%7x%+}ldeD`OqAP0g8Q=! zVa*J$>B7%j3{A3}4YpxMpDR_fd3$6ea1eIIW#$oy1Nsn6;L!4!E4YsiScMkz-@=OE zAb*qk&l?WiTugns>hREh8Pa|pn*A5%j<8M0YvxHL={w|~)z+F&aqd%iM^epqtN3)v z3vlugpLnDl0pvcwcLUaJYHhX&J9=~27`gIAHJ0&lnwQo&iErZL$k*JSAFV4N~emR_}p$3b|)O+gW^XpbLx z|4%H!nS9yQrm2Bj$LYKQhOtnqIyW9uqu;)ReXq)HeCv`eFJ%-4USNpKSRfW`2UO)> zQ|0!}WMQ;k#lglJ!r*if8K23yTI?j%?tbp4oRiGi91cQ{YBb>=h997RD`6W9W`}br z_t5RX?%w@CJzvr~txwrqAsIf?o7>lyP8k|j2N_oDN?3}S{-|k*MXIdg4ta-<91I~3 z;sh5UP4y-LY*G>7Z((|lCPM88%SDfq0Z{)P0`xz0tOn?=W7H`#&hK`tV72=<*H{fU zl&(eJ%wBVK{L8AVifaW`106`6#RES_ScpUXzFKvtcm-tAh`08B!cbrK-td~@3B|{Z ze;|unmv>--(9^0vE!)-`%qz}N!d8kWY^lR`6LF`*5+R~@GrXjt!)EhDQ_8lxHpvSY z86A+&orsbd|0JThSP+SQiTV6mg)90%QuCfXdmnd{b}0axNBUqrZR^E=xDT(4+WfBS zmcC}IL-l4+3Qs_7Z!f;0{{kj&O&MbvI61-vK?r5G{YevI=1VbnsdY4K4$7_hc=6qy z7^F{$k|XXz@CWZ1<?Ef~Qtle9GVW~VZDv4#I?ET9*7*Z{J7ftN0d zPW6Ug>5eU+|EsXrQNW6wiM3KkdJrb3m5ftouS6G>_N;4r&VY+2x5QE6a-*N(Mz4=W z_$Rk8KdS*!QfF?4U~*>^d4)3l&UWYdRb&!R1g()U=@a6O0_!AA7T=<`0fq^=Px?dD z?SxeT6;U?TtyUwYhjGcS(N}QOVUKO4#d!{oj<*>8GMLX{Xx>qI_7@elG?@cBidA&e zC^J`KCgvJCbZ^B_=7?!21`-Tk6Zh?|9T&NipIxnEZwbRDzP&hxAmqxzRaH+~>G>!}PZ|{SBwzqAH@ute`*+=j~9Y z8vPIHuJIIKrFVMYI)5qPeEPNpG9GG=Z)n^ZH0K7U2`0<#CqC}CwcX{w*Y20%njS?G zpio#4h)oVY?xvh)!OvaS#KH4EoD@wu&m&3xxeTG^QJ(9pNXz|1m&|OcLX}|T|4GYg z2;+SH6Ah;;Z^-~t08>`?f(&nowLG?u@)Ls(2V>#>K^KFcwRCFZg6K4ne=a+)+Xf}A zJ4bbrHyi&=xR6mjOp*)6BO%TWdQ-kjXwogJjb2VqIM2~-H9;Y7qY(FOU@N^?_994( z0R|w=#$3_#UlUKht7wvmbL+XpwW4X6*UK-o)8Q4*!V)*>(?z4qfpM3vCIaGnk3@;C z-!4m~MAJQd$qjgHzmC!UUv~nQ3Kknmk#89t=pouC|Dfl-5f86q<>wQ6+$=_>a+SOk zXmhNm-a3muUSKl4z^@I!4_qcMzJnMUiF%#{nW<)7!T!I%h5#$tG2U9j}3}{XzguxMZnVx2R4@^FX%-~ zmOd9gC`(M(n!m|JwMx(#Jg{twr{Ua6rYz}+u4-(JY8|vm1g87J5Dp69({I2Rv}$yI zEE1Yc@KBL}HpN}@pSLb3h}iX1<}LV(^G>sQ`)VhreAs*~ad_^A3 z_0^808UBK4qeJoOKOKu%o!n9KruOX)s4yQ0t@yB#uioggQopu;w(w^fW5wEn%Og=< zw{s+Q^nI&|x@W502ZyDHOuV*b!PLT-$Q)M>=QKDh2@F8~=P>;6p=p;T?E0-#Ti3a& znVlKtLzWn#jHZ#^nL7uMaC~t+lba-hhbq;kD@?v^th?a?_+M*?Ne5g<1K^&d_j<~s zIJ^IW6?oMrTEl%<-am{g9b923XvgW75rpJ&hd^hNa-1s0Zei+8!?Rq*RqNog{0i|; zVo*hmLL>ba8$wDuSUXWEHbK#`_}$AFvQ>5QW}q>TCYw~8f35fXT~XE~1PC0r6j)R$ z#kCw*X}T^1_s;xnt7=DRgv&%@H6|#;iWxkR*Pn{!GEJz`Z5ndyg^u|uYjk9^XfcZdtgWeJxlcFNP*C>t^C)f*E><|>2lg-HxY#gAPJy&%0Vd<_?I@&>*Z z_SmFI-2zRZ*Dsl1`~6cq-D>sqAA(U)W*CYrC6_@v-ZJb`en?+cQvP7<@O9=PVbeqh zXjq2`zEEnhfwI4YM6=u&)NoQOk6QQ*BkXyD@VH-~LN~1I)pC&;*wLRXQ+-&13vWM1 zMQ;9HR#h@2EA?aPV`Cu?YF}(IOUefM?jTf&RUYU(K%B-otv4YGvl7t%SmD?p)e;N zQsMZC6gjDF2nywr46ijNhv;WGuK40NGqw;rXbVwyw?o~S;37_HWQ_ZJ4yivg77S?{lRzmk4` z6Bgr); zO#r>P6+G2dEy|*1ql_>3IW~QyG(PZk$0w6YzQ%fjy8f&|3YJ{uI&*Z1~MWo_X|;Yd6(MGUI; z$+<8(8HQyK>?JUkzkjQu;e%x8FHcLQRP4d4QsJe6xR$p%~lL zX_Fo?Zbz;l8qMPF%?54oUaPu7erX{u?WjL?YV&1Np&RScw-~+2=pmNf_4`iunSLI@ z4_RaAZ!PV#t5+EALNad&-Yy2y@}k>*)-^Tl5dE(x8f_3^aB$9@1AqW_{yDVoHU}FW z^Du3WdGsaRYBL_rJ=`~FqSkVvAnDm^%I8`;qoT+9*j65ax!8ykGx%C89Mq;g%GA5u zO@`dtSj?CB<3);TY{}_)<2?XAq zk~l_8Br2R87H%5Nst{K-?00i>Za;DhUps*-4*+%_IXA@EK3n;61+ax~89eThe?fwe(bKFY z@O7{>)I(DZO!;GmHvjaZKQSSux`!4N{6Sj`u>_68GQN1#fqRD{DQXB?ZwTVV3ci;5 zcvt3>ro*A2(FtGYH;hFcbY1F>j&I@D#{XW!7MIb9@X zoVMTbSG+$Mp7au;M5?%c8Q0NSA^Th}J^SJ91 z-wZ!djwOt6md;c$X1c^|4CBipjV~yT*e$P6`WI_(b)VYAFYh)zKS37f2HfBjb==uW zEz{2>_Y#E~_nCKAQ)hcx{7>xM8oV=6x<6ti93$js;@!a=uo%EVa>wm9!oh6uQw0ai1wpu6Ji`x?^qu5K#Q23RrJyh+@#y49M6Lxv`sCA_i2C%?>+ zo$h&~zgeYgTBhsjLPdUtH!z~2=Ir`gNn1~d5ukX>uWLK)#i%KuJEx=I-18tB zI4*~*JveEma`CkqXICR=5voSWwGXpzx&GFBo}ijd0hc*r!v^h$8N?FoKJlJF48gbC zAdWaOfv;tW|M?CARycULTrU7G;^N;sm65Id1KkYoJP6~4LWy5sv3>UE4Ui}G^bN6Z z;K11mNyrLruVzh|j+0@HO7H(+4H?sBP=lDLu(HZ?URuzFW$0zosw#UXGj=u;4Tvc! z{ECXfj0xg$*t(%qtGVOlqKSDkRI?GDM9Z-d%pb48fM$%5+Q}A7nh;$kXZ<2h(2=`J zz`M4tmHvg;dB`g09y%%)7A_D)ShP?7TLJyA#94|=Sm|Y*<1kCT=qH?Y6@uE6a$HQ| zTL7e?G``KTAs$sVIlyKATVe)jFGDzelg=QOPOChB>@T(N#^*h?>+b!G6>2=2f>vI0 z*w-Dusex-TL}TI|3{IKNI0y9Z4L|xf9M?)NB&&c4E%%E?l9#n+ca}gKFCM6l5(2Jy z5huKSZdCKzpqVH5P_uy;wx*_-_f;V`<{{mIVe^7uGX#axx&=E@ra704ro@-lUs*+f zhJ;`PEjYzny15wbR*p1Psddu}p@{Cln8KP37h`Ori!jGuC{F~xIkGwk@FB9Q71l?idyVOz zp+x^+b}14Nqg~lNMf&~3-2(*-IwHsp*V?Sg$2C80`1s6AhI|O$``_jTN8=&C`7mP9?xt+cxIT_*?ICGmrp<5~c$^=sC5}wQNY>mjJ%W5Vy^Q(8(9C@qpeFK{cXF$EZYO$J{v%JKU<)$WCv{g`|+C%T*m%1Tn-?z z+f_i#?&Jk@Z4=;KLaH7F9mBys;2nZKaV8iatDLVE&nVsbb&HDLJpZp9Nl|V8poS0Z ze6009#a!# z0->{0bna%aiUh@$U0i;!Na$s%K#Pf#^?s3@eiQpzF+gD-nVFif^|iSSB8gE{7%}WR z7z4DcZ4PPQ4~Ve_(t24+Yx*IdhKvXjF-KcVrvB|XXy?LlVG2$a@lq06(H;tnVO!Nj zp<)dpB};p&kjX-@d9U?+8!W|eICSOOWQG3x)8hzu2LYz#z!3L)6omDnx(J%xug^of z+i3d)GI+oGm&wWY+>E|UC$e!%ib%C~3vG7lOCSZMv%^HGjJ>>DP^0+i$V9eZf+f*& z2=D%3v!$=d^tTW0hrr)VI4l^x)_4OWblL@;0XcA}4_r!PgYE;4bZ-B2i+(a%BFvn5 zgDjHWd|QwVD>!0DUXQA6exgVp<2eRhcnZ#z6VtG~>?8v^n)0vSpFvvKeK+L3xYL~~ zeCCNZf%9nDNpi+x8f1c_p|AJFwVo&JmRhtGr=kDEQNm$k+!82AEcdriTyWCRKoaZl zBEeo0jJJ}oQ_BdzeJC&~y9F$MV%7-qTCWjU^=)|om;`;{7PyU8& zoZB;P!d58tp0@bqJO_GcS2viKa0@-Ig{k`Owix_jl|G^S zr0smF{veN&Bq9qbYEsVr+!*AHZFNyJ8mDLJ5+(0`#O@P<;Gr7KA7FVHxHc(!VSJI` zDFr)f$)h@*J@l(0NAWPhiUWzEKifd>E+E+t(A7nA{d)#z3V=nJ!wS5(fo7J@G6lv@ zSpMiF+)*~vsCcL&5>8?$`O9*1_J%T<51>5nIP0P13G$6W=Z8o~B^58F&fXkN`8PYh z+n`cCKaxiiVWjht(!c0G)$haqhmQdf5;GJkl1O-dx(#_UnB&iFTY%Q$_zKg?_L-Q< z(;V7&ELkXY(SrO3=3(QCmf`mv94tEugf7g;ImodI!}>#?YqLyVxHKIYhwMpjhgBqa zpWzd1u3uyxXw`ruE&AjO$#4J|E8krnCRfvJp-DnEU_Np)p*FV$&JJ zQI~|wsDEhGT!bU6`g!sF$$`0Hb4dqUs)q+Z zf)#x7N@Kx-k5XEHUSqK>Q(oI|ocr*i1zH$)$&E<*{}vvC2_o>bgFj47H$ zP)HAYFx8HlkSl#Pb{Oggoa&>EMpwFCl&T>>VP#g+d+++SN94Yo3L5}_vUZ=qxIqFp z7P+oAVcHGDbh@lo3;hXyxl#WkSH*qA+I_FWt`EAP2ZvpWXO|W295b_{)!CBNk-vf= zfR`_Bp~N>iRwt_PwU7B!LKonjmkP?dAx9V>ir0C4ANmlC!lg;1Qc%b+(Y)c8poJ6U zk1Cmj`-+N;XzM3jAwN%c@*~bpydyvBovB6|wxI6Eo<5bYx|VqZFSzX+WJ;Afi*aUP z%}ND{v{FT~F;*pO3=StTcrl+N2fPYCcy#Tt=ut8vEPOZx3>8+QOgn3O zrN}-s%lL3-R@EC?cY4iIo3QdqnQv!i8X^d(Eu-w&hbj9uy1})~b1UE8F6gc1TG9W^ zirBgV-^aH29U|vp^bx64p8{zg8}T!{Jvp={NQh85bPgn|J^0h9I_6pV1qYve!QLZ5 zO(2bQW$FAY#3Mk_%!Ph`_zL2WhZek42@PLLX^$w#bVw$ zkj^9OJ9D7+Gsn?E^8|5E4}0_1#OwjQzVYDnReod>CR16w{bU*_aa!1}o~nXEE(^jH zDkZutRmuHq6Yt*ge`N`8UH_TEc7R0#)VAxZSnw^?Mw}SI*Yfzz zd%Si5rrubmFLCLBT*@F|*H6+pPdgfTkI@h?skA`ykvIUw5<@EuT~K=HXH;;N5>D_F znr~#5$UTK_DOj^CPGw>S7{`%G#}_o~^^V`%w&5w~7wsmMMp!MGWvkBn_;F6*lh%f!M`}2D)>coaQZB4sTHtYYanl~JTSuiN;ExuZJWJrZ=VyA?!BuDq zv?a7i;c9SV!#qN*=o>78zS-(g6;`!QBa2672{T96HDLK!U{Y6QdgkH3w4>t%vwLcf z_4LDHlR5lbcz#qwoL*A zFFHuJ+hD66IDzx{4s4zfz373zBEaThrC#n}sSP13;r{NmnH&iZ2AE~oq4`81( z9C&GrX{7>N@uZ?MRGEUUt&}Ow@%rEDdRl7*o%=LdrYJO4zoowKGcJ1@e7vF7@WmB{3=9G~tFtbDF(-E0W(#(_BieKL|oNj&#FbE$i#Me3C ze;TjXAaaX?!%VFF!USkW{a1a@-dO)?`DrTFAWAHq^;`1a5ZD-RFjP_vTm|%2?Oy^7sBC_!DElC98?I90?>tdGfDOxWE>` z5)Hz@bNjoo)ti)GUM)bOT{+U}*xbc^2_Zmw`)Ezb3gi$0*I-ZDfG^nvO}qUM4ZprR zhD)uM{@gJSVtjpC9_%kKH$@f9$zY9i4JoX%u!KnXR<836qXmestB|XIDRwCx)-%r+ z&EKb3$rY4G^?DH-mliVnibSe6!9R2$G?CoGg>HeErzAj&29@U_e8oAk z;87xWQOCQlo-)r)exh5tnn~Z|{lbel&&}Aba$0#Rm9308`D3zhKfdUTIQjKpbv}L@ zc7HtxypK%>lqgc(lG32K_?+BDuQg{xiALSpoQpz;DV0V&ryQ032se)2jBUM)zgc^F zd5sI*f-MfSf!*6|Bn|A=3~2OyuH%g{xGnYXmg)u;dj=FN5!r^W*&_J!UWkI0axl$*|(@p{+TRC!wBn)uII}8zVAz3J@@M-C<*QrVLr(9(iid{)Jr4i zlT=`r-ZFFgj3Y|%%luq>CD9auzZ=Bf$2VzV#;P3Hztg8q7sQOUr&|#2WW@Q+e$HZa z1`4q4Dqpz#!xww9r@qDR`ZxY#4gPXO-c8C&`_d28J{TPrnW1IsPsD0Zqq@7Dtf&87 ztv6hl&n?B_vF%3t(O=4H%+J`Tm~C~Z?*Rb3W_bHUr6zoAQD?D#a#0ZQ9CNx~0(;X} z>(Jel@>t<$X2o~QgaZP3e4R;NY1$Ct)exS`8rl&XA)~=u2XKqD72FXuOSl1GK)b{J zYXo6Qy+K8TWxw|a;AR_2;A!!Ap{3Hj$aEQ`UjMJk+!InkIgpSTb`+*h4>Gnn1JwS8 zc)NJ@a-EMx)g5h2WB=N{3K{%*SFI27t)!djYi|&Z?^|eek<#gPV5xwwV^~M@! z?fCk0o6TQ(jZNDZdvFWY6byRPD3x z{c{?ZV3_Rt-pSPd5EspsIPT@gjKw92x>0^`fRkYGbQ`pQUS0J0r(ZV+#k;q@p}#xy z4vw1-6UCtWpp4@)7m2;3XH}DoS_aOs2>tB)fUi^lbeBb{z$s*sQFx^F_U9^ig9QNh zv1P<_SnISNar`TmctE3AQ7NjS#K^|$CJrsmZ7KBjMZy!hs5LyH@Spj4m~n|m z4UF|MWU8pINpzuwQeI3Rgtgo<7?hB*8{zGD5`5}3)MO46aR%smB{kT&uNrrz9pmeE zt{4paQ=Q_GpX(60ezaf;VkotOmN2)yDg9YcLyc`nDX4phvh?|gH-q=`FA`0BPH5jV zqfL(G6|xd3t`o1T8_-|{_*8jU{mh&2IOK`x;q~yIB+H`-j|C{WOWc9=u{Yap>e^=$ zzvgCwxsW7DOjwM(+T4v`4nulk^=v-Jx>n^1+?)MM0eUD;YHhqf3(I0R`47B`kvJCVAuOjZP|;WFt^-QyNu*s zJOu0QssgQg;!~qLfyGKr9luzdVU&#ED3?H2eu3`15ZAExMq&it7PXQ7D)r;cGC$Y- zrf^-q-mMgqX*OcT!u2hqCn{=Q=0dxi-ZF#WB6)wR>$fW4Q}C_w&ipKK+^U6=f$;sN z?Yw)5Z9Kk7=YmHTU#V+%yF7ebPt1DhePCS?gs|*G*98A;1D0EYdbP^%+0VBzxl8=~ z*?ECpgKbm|z1g-Q;QwlN>c0JmZD~;u`lY4;v?Ki54}6b3`J4KR=#9N(P$dROaL_0D zf78E0xS4wM)J>FF{Fq{&8t}e)4Tg_0^@HLx--FH^c)lw!s0n2Z3e*QH&BmZTe2HTh zJdgP~)WEMj(oLx~6n>=M6I-nAuJGq!#l_=^n5sipL}A||=_?91z8Rtz3Vqs(Bp_q7 zX2gowc@`<7gk7ObuJJ?&J-$35NQK8W{#DHcroSJ#U5r49XKxD{v0mNo&zUT48cy}P zsIX9CjZpp^4`-F@V1i*e?y$`*;l6iLaT*hMP;~y10`5$I&ekXf8M7?rkA*5ql1xZE z7SuCwo`p^;85D$%=$OD~$Tn zZ-$b!wtm@sFZ8g6KyyDX6_4TE&K1?-l>M3~&!Y7Ej6xlI;C40DQToz0YPa5N$Hdv=DFkd_rLdk=Kb^H%sG3nz4qFB z@i1fkHkY{;#A>zTU4CDsqiC9|2?FKs-?jAl1Jg>%~^#wBBQh*ZzkqOTX%vbfB1 zJVJgvZdN(-`z0uXzlga`yY8q3h0=A!*!eQcLTNf$?#c#ZFNoWK#$TOwM7l?G;JEYD z@aB}*jnSbx<4IVaxD7M&5y?cCBE{hSpLtwO91PTszFcRtJBJ7u90^_SsMINWP@v_zb&sX>#%=NN)fxX#(YY``4rWknBsPC&dgFG87b>32NN+(qD zVCm|$%GQ2~_@?obXy~wxLbSBmC(TXA$+n#L(YtpHj(0wt@+gLlZP*>xvX-+-zB7N{ z<1%iZybA##W@opOH@%Zq&V63}9muDIrwR0d-l}yiXWmc`b&8}yPLJxATQ`N-R^2ap zgc9!P?TV~lL^{j&pR}?#-#d=_@rp`s870}ti(+C6?21Udum)5NLhDWI!WEJ=eV}r@ zY1@^E@JjTImHsOdzkH>#@33!JPUw8x4ivlg@-i%ZHtrp^erg|@CBjp ziSR$AH8>J-xQDA|`zCj$kX2I?d6S~B%k{X@HFH&Vx-1XdpW2Kkzeu?ac6zcfom%V6 zS;@FvA$`|G8p+l54iPfs*ZkEi(&=<&i7coR0Hz3Wf$2m>Oy)RrDNm}f&t{70q)oX#bmL=hHOwH$nbm32XC?m!OkEI*F zs<9M$2HT_Id1(V+13PBCMS8@KIcmEUO_sM+@F-hDrAoeQ#B4}kx4jIev%;y27h|Ej zZDsR}3=fZCD)exV?>L*sKAb9(M3WwW+9NWajUW(jbtq4Nit1z4WtGh6-slW9BPKHg z6IT*xE~aC+qISnmHoH%Lcvg5^i0seD37$v%a;mZ`O>5IU`&QOrjCX32Wtfp$ivjo) zGQEGz&;1&vv3&urxx7gJ6~A8PzF&TV4P>zZ{5DaNZrd8HEO%;KpS%=ey=ls{as7;9 z6X@VEllSzIbylrlr&$sL0eZQ?XG4uf@A?{#s)^yDxC7T;reKbI#bdCgUy1B6EO!ON z#Wb+Ve`Lk?;h2_aY#{_L#hk>1UgWB&+h&AT5bv8O4aZQ^V8G`^l~_J zOQXfG5u8~NhTn19=g-dY=_HIa{;3x=m?Cjj#ybqGt^S@Co_}HaJx_eT1tb2L{7i|~ zcPTeh@Y=vBzdcCgV+LN1mbPbx!K@tSpD_wb_N+L&iTBQg%%K!1eES%Yxj5TsSlVi2 zGOiDs`O9la-WiL=ApKGUp zy@69h*puGnAAn`z2bdgS1;)kg2}+U;t<(&28)NU-VaMO)thKIfsanD@)DxV`Ykv(N zI$@LkN{0={)z--yaox`csvM2^O4fVUgW*>{CehesP%EEO;h89eF#C+B>fYM>BbDIR z>IBJn7U6ukPZbm~Ga5<|RY>u$bZ8$$8J>AbNACvL3#Q7b>Ejzoq0jA0BkI;DrDRXD zLBo_OoAV@tM~kfg$kqk^z#M>X5!Eu%f;oqe6FbokDnPSfjf_G=Aup6TDD0T}(8Y=V zdexxj77mWNfRB({rLm_0$A;$X8O&jK4cPZt*XIxVWf2%lY4W`PR|S{@KkdTG4XF}H z?&g(LXuqvB@^^NBvd55GB)$sniHJmEK9JB5RqpDQpD}KAOQI-&5V4=^-IN zW9ZTuwE1*#k@Fw53HI+-x+-z(&P2iaf5YQfz~&V*2eG!A7ptQmt1X#YR9#wR-|ZU| z6#Dj@6HYY5nDWFODH9ErEF{mQ#AHdnsnR-fA;dKJCAf}O9gTQF38nBsZ{SBvE!MA8 z_V;f;ZBsFQo`1X)^}|Dqqa$WR2CM7lB5C`pV`XhD*O%Ikw|jsQ63Zh$%(@oXObAiA z(es8y7_Pd=i%ze?W66W}P8A*Uds?uWXB^ch4&{7>OvIv;Rk5Fsw!;+`0#L#llux2{ zIpJzfTUj&BhZCxw_$4bxq7ZWitu93htGr{xpI4Niz;G(m@cMe-7D2Rq35E&pJ^EKA z@0ssaSN*TSxsbUoD^NAs+p9?;CXsC+tM%|2RiSV;kj(;pO9~QmM*pT*IqVocAwV9Y zS^urA>Ebf@zK$l7<~jRb^S@rJpS5q|Lu#l!tW1N>xWnAOV;-Av%$TGi#t>UR$}5_d z+f-bZY%EB>$C*a7geymQ9~J*JyGK7n@f>qd?oWu01gATZX&@P#8;s+ z;_Yat%d7-NOU75$)-jr5Tg1C(n&AkAjo1`#i-l?3RHkU`RSzimT$R3CrR+g;~*Zr%FMA zN#Mz(Kn*A{a_K}cd#0Kzf^3aEaJR0HIvxOg|Gd+84}kQvL08M<*;u$D!W(cB6Y~gO z#g5rZ)-aqT2+;&XkiHsGf{r8xi!+!_NYNWF+@>0NpAnB^?|3(DhjWs%=QlS<15plx zQt=Fd2MZKsC7W_7!j{LQ^I3&}$0lal!Gp)szz(s`hQOe5QK?x?tPClR@E&Y^VF5F? zsJoH&P1-yZLL)_xuOoO>i+xV}>JQoO4>wRDYCPFGZNLO=`lx#pV&a*9oi*sxv~$Xh z;Z?QAXDSe_AA3T&>*a0-C$;99gj*V|9VopC&VOyVG;8R)j(S}KYI&0UmThY9GUe{8 z-2#^jpb$_#gans>=g`G~UnjhWfgxLDKFNa1YfR+C!P>os0z*aT^;49-%_8Pxrv?nN z4u4{=`uMM3@mST$)(5|{Yl>_elE*iaKBd$;sEa!kctMY=-3h)nV2e0acA;>VO(InF zV!}ls8`ey?&$F6EgaQk|6|9kzm>OJAd;?w|cuF&S#FSzlxar=afo$nR3vO8>LufZE zLEm)wwK6eA>sCx;iy_T2YA)=x#tsQrLL|DO3FUYPVPe>HE1z6Ni5zh`LURwJ`p>f6GOEt?b`D7 z`)`Eus-`bU4%oY~+if_BDTA-1m!GK%+EIU+{mA(^KZ(1~g-)RT>->AQL+I!H|{w@({ zTLNd09FdvaE@F8yDvFJq(QeAyAjB$XC?+Fd*6zoCZn$rx*+qTFSnb+a<*>#KKOowk z|BY(eCLV;|aHG$iO2m8Zct-3!EO`0(i|i=GbfUTvjt`a9PL_EJD?;D-$$`9V3Kf*4 zg3PTprC=WrSKcY{<>oJie-M4XP+o}pYxC#kFUg9k1X_Y}AMos#dGeLIJaU6J9tVNc zkaNIwZj`1|sL}NS3fnbvjaU!B#!8?=K+yp>S4lTqBF43-TUOkn@v&`yjLW6@u)sEU zzo>U!5t_?C2`uuT?w$jyPExlJ(rLmMDvalE0M#_WW?DjeCIAE#@&YuE12kd55DVpx z=XO6xShOOsrGf zh+*DlrkUjEF#Mh1e%JafVLzBJ?O`j3QC&rh2K}9ITg}H=gU{U6*7pF)Wh7PqEC`<(p`|OJ;7h`1RC8RqxW3P+??^uEm-lh zF9*N{mE35ul;a+?csKzLk8JOhk(;GQtOf*VF5P^aF_*R2$aKVT0P9KjZ)E@ ze8@I1d7L)k2iv%){mt!r}piJpHdnvM!+wX+y^=BtRRyVuUcb{FY6I6AO zhiWbQHYzA9oCbuLa>OZcCo61P=|y!~lQaBj^F~#6qoaMc4Pqwj-e0iSa2#H_#7ceg z_1inTGoX!f@ImcAtvm^&+ADChk50bx7@$?+PwX7;vjf=jY)`Zc6Z?w5fntr>CX-w9M; zjvZR!U1lytWUAo8pivySLy*bNnX(kk*Xgkb=yaw7b0Po`b=3xT_}stKJvTQO4BNXq z37Dp+{a?(=y8+~j*74-C%gNd~E#~XKMzf%=+EFr`P5vkP9Q4i&)?fO2{Y_0h zr=87ANG{9o#qKA^xZg>4pH-YfbtxKRwQIYW7b~3iQVWK$$vVVz5aBL-YI^x#a#&2mBT#@}L$!9rdO{({aBIu6`?sKmB#oa}FtKGXiMk4-t z(Y0Kfl+c}DGL#>nlDCtFcJG$s0@V7Opka1D51rNXj|DF(sY1RgMSm2-xNm%VTLi?r zvIAPA_5a?s+Z+^A%VhwAXKXR_0BG6+8c{&_bOipfXO8+7*OC{ubH ziT2j9`I}Mf#{&GgXo*UE?tTKjBF6}I7aOzREHnqSY9?%kMnO*MnBC!EZk0)r2*fXG z#mXiT!_R6e^wOZI-(}hT)q~AH9<1z^)Q+qQg;-g7XjYht7)Y$93ck7Nq zl3Kly*a2=M`z-oyY@T+(FTr~uPK(H23k+i3cf7N@1R(oTL6Og64^_fc7nI3F1Oa0{ zA>UNe2VQp{VCTQ<+4YIEYyElG@N;b0%jwN;%O@?NQV=Hj22ap1^k_p?{qAPJ_QUTN z|A2ZW@!+!yNg&4iur9497qayk!fom<1_N6E<>_lc9?i_a^1Y+%&v)xx47ygOKjhzC z$;w_)#AXXO3zQkvNXCP#a!I}`9V*U7aMbIluHZ7qtKyFgC{%r;TEH#2t9_r`iuNmO zTV_sxI>!5Dafmi02YIAhN8$4Qy(7v!2Kcp2hKGS zcT-eDqS1+C_tT~=qHPM<#56;ZCS=VHOfxIGPA8)@|hWp zFFt0@&SuUgObF~!F2XB%I@8h<`fa~*TEE$25TSD8mqp>^f5h0;FfuZ@WT9X7I%EL0 zuBK9FU~7Rxg4Qzqf!VcN13=|hoM#Kq1DN{8Vkr~l3A4WdOG4#GJ8I4}MFW~?wG_LZ zsGOLne2l6q-{kZBotz*B9Pxs43~Lfpzz`K9^k$YUVT^pe&Am=0$&225Q{wz zAp)IIjQC-_w0Dx@J2&)HnH_~-)DPuRHPIl8Zv?JkmETvW;7e;RsfOB<-yaZ{We87v z_}PpgFC-;Z*_q-vgOTzW!9Bu4mYCK%*{FN?Xhamckuf8NN#*&15`pMk1#ulX?PM-& zud%a|=#Y8(1pyoQSL?ElN3-Dg>%%OH=7*ml&n-0zWx5WR$f(5YmLq(o#_R}%2DW%i zy30!M)i7SZ@~tQi*5Ry8$wQ&spj*{S*S6oHl}{91!u7P7eFAi+A8DSTv#<#!wX}d2 z2l(p!TJVdT6sO0tAx&a$L=AV9Nv*;RS#tQhufsci1(cvP5GfxhcfL~=TwYDAGzmiKyxt6@-VP%U&&Lc3 zbR;V|CH?p(L6|-GuH!fvI&He#$BEa&e{+UZ;n9eR-T$mwEw;fc!TG&Hk`9llGotz!DYso{%l@pd08MQG~ShJ4Je zIP@c!V=BWdoFqqCY*trK#oMr9e$au#?}!B{l8rzUiSI0n?rm=+h%LWN`e*|r@Smv0#mpJ`N2s6*oox`|9*rxr>5QEJCs%?koDjQt@dN}YntNF< zf*21TRYN|O{4IcrKkh%H0%~QuBVhSJ)@PKaOiV%an_kW{D-FVTcXoHn554**R&yWG zD?85|E6%Kfr-T9F;AvoU8d%3s{Qr^@_;;ieYYCxGA<@W*x@AbCv^DDrXmO#alDBLw z0b(|U=lacYXC``ao6X2xu6cNoF6S^Jt*8%Kkuw7&2>FT@FE83scK_V$8gVlpTBl3O z6Oy9hy@qEy%C4}~r&AVrX6ffnpcUbHLmE$;Vz-?*cP5dH$yrZp{YL<$`SR@#+$v0b zyBpueoGS0sOuQE6NGrW6giGSW28kSAZafBBE?2ns>1Tqe?dBpOi>hV`tJ2(l)#sew zG$Jr?3=A;XQ4PVrr#GgQwhppY_4L@n)RX*w{a~+r`6t4xZx-jRVpIeY#dT7plE zCZx$&+4OV;@%qFhvDmM#^>R7uw2#H&NpKOkYfO;iXX2tS(PRTP$Fx)HxAV^H*+^zN z2f8`D)L(yziYAzo&{scUNjCp{>KLSrLO}NI8~IGUHQr6SS0AwovD(`Py-6g6A%e&c zB@^%LqaDu~6`OAE9AC+*^P_ePuz??62j(TPIu@s3hm~AF25z>t+7}&}ixoM@#3Wh~xc&ED z-4|`(V04njh8*<52)gAjK>@R#a@_-lfRP)p4hcbm_|XC}kHE#LA8jHWoxbtS4WcTj zKXro}T_-(^l5-XXP0kpJrd~FKgc}>=NiIXr>DU~hYiMUncA=&P!kpI0(OXKwvEsj` zhDvMJyb48t`}>rz6k$)RBAZW40U!9=!34q8>w_ZW=+cGm^NduxcH9p7B}#p*u0D?$ zQfN3L9_?&fT0%HCT)*~DKIt@4KO=)of8hsi+e^H|gqYFB=|&{Z1@>=bGqNm~(N&_u z*Hy1SS13GH4-)b*hqUi27P{&GDz(wcm? z3IuwX9s?BvUYQ%u+o4gwL=iBFsZ#$?+7(473eWpB(FW^-WbSX3Urv{z$V{Pvv-ajF zfYK{BfpmwLAV5DcbQj=)Wd!e6H$Xao3hXC6f@QZK3p0g(U%D&CjEp=A*yQH%ni!WK zMcXOhD9csV!1IJx=6txGMn5J_!B}R_r5OseN1ty9li#oS5+@bSt^lvwlj!Ls*hmJK z5UEn1WSjkl0EAME{)B>lFvQd_cbAixtr2|0uAdJm?xWeVnnt_g^FXdy#NL!g?5#W` zmAIpKFGRTqfcI1;-Qk67w1UBmgm+4t>>OG83JDlSPaIvAb``bnt1R@Mp*2sn20TA{ z+L4ILw<|Cn7g7wKA}4F%pKvTzu-;Ha$i`5u(kqZVJO{ZuQ}Z|6W}-Q^XG!W|bHG<` z+dboi{_Q&i45`E9!B?3ynH3OYX^wmFgtJ+}*(-?DEJQf4c@LlPyCkY*q>XOqM0ETk zNrnDXwn$cHLOi9%2#?W5Bk!kUh=}MX1lTyCncqBmimJie8l-7$s;l?- z%<8TINy96GFAH!C^5N+f+xyFcy#CbL-o{Q;jHxL{P)ot42b zUmFEDsoGHly1}m<%X}p88|dSAqC+3dW$@HEXj$x}n1+dmK%f5#vHqeDRNSJUCXGhx zbAqE-&gd@Q0VU`S31Tak&u_mT>0EAYR{mbMo2U0P58#4X7+|XG$@Jhq}LV zYgbzQC?YiW?Pz%5aKPuNjQjP*rT-!6g?n(x=bm|J?;P0beOvF3kN0q=j6eVj;L-d4 zSB-=j3Iz9_5!xyqBXIekR)H^0F>w+Dl#=>u7`Hk0KM+bp!o`?X$)h^tGTS?b;oY^7 zAEa^g%2f04467s^h%tBvqomEVL{$3<20sxP4k1kH^r%Hv>}hqgp*12KS6vuvCEVJf zTIL`qr^MA+jME=QBAvc!K*Y!6p3YGfVYuv=?Z?O?A4T<*GDSv27dPVevu08*nvcT^ z3RuA!nq=rO$qr9xEe3mns(?nt5{dZzeQw2Mn(zFtBTgZeU^SHdw@!CcRM~ir$ zPiT^N8#qqsgPz81u>+?bZy65jGyn^`psX%(tA( zx;i|Wdn)zNYNhO1j&V4fBic=WGK#Yhw}8+i^G6zwZk6+(OUW|!4~y4eu?uG4-lA>m zaRgis+yo4m72j3*Sh&7cFG&1C3eDA;{}gie8#V~7c>r*B{vUMw`T(}>=#tf)Gt#xx zOd*;DbU$LV#6#SBdRDfrcH(~aYcBR_;GK{-$+OM zrPZHOQEWtknA{XP?B1s5a;11vr3U(LGjD|Q1H`cp+7~GOHK}8^GIQ~7S;){u8{bLJ zJp$_`&oBO+$F0>;Z)WWa!j?RGc7ft8V3|G$(drV+{Ydk;e2t&=A&6*zW2mlX1v7Ec zej63%s$~s{%~84yr8XfG`6oGhMY4aaTh9s5Mg#peF$(AxX7@sORUQw)Cl~uS_v`Lj zJ=<5;upQ++9+0g($iNwjLwwSs)Y|`r1cbR?VrlZ(Ir(su`HK&}{h-jg);G{Z45K^a zY|$G|>-m)LRtaJwN-_N9gG5Cf*zz|RZzT5(Z|5(i?H?RO?;C=>o zG8=$(hyx_=J?u!=+MvT@ux%S}rvX!s|G`hNzn>A58!4?)74a+j$jGf#5*FU~?mFLU z?Lphq+GRPH*iWr zB*rrB6#QK8%K1JM6>Uv5oU0!Yik(Qt8u6M-*jWE=L^F}cOZ#DD!6ywreo!&EAcUCh z(kc+5xuH!XO~N|Qh?7QWx(23F>DktSPe(ut@F)UfBc+7-RIKoYlxD-#Twu+M{Tz{H zvaUT>*7YJl7UYAfSlnPEXKQHM+<0}dzuN*kylba?Kw%8guVh~K3J3Lx5O z&9)rOc{hQ?h-73+f09Q-5LAZcZRu8PiN%6Yh^pIQ^UblMs7Hg>E71*b~iflwmAT@)dCs#DRcn*hd@OE(s7ciIo@!Ynjf#l{-jk#+Z<0}l%CcKP-?3& z*QT{3X^T4WDo`C%5s*%Q+*j1C_*0tzPdCw%SLBnK^B z6eY2P_-*4+#n{=EEO~9dQZYaWj^r-f0xoDY3e4mG286*@{owZ9lr%_g2|vssU_(ud z_Y&SV(3&`ya&bqOen^Yq8b?6#X`E!+EQ-MS=D+L#B@TLXyLJ_)Uy$g|=!~MUK2i8NI#QV%8y(n{KF!Qd$(s|`OvbUAQ>350h z<4AmokQcRKB`d^MdtiiPu~xfR{GC@O2m?=?G&QJ&&xklM%=t|?1VwM zQEpAU62}DSN%wQPCywL46fhCssm5Y5&=DE7yRm{qpiVJ?yOU`S_N=vtJFY58o)$Zb zZejMz4@S1(4O@|yQ!SaMM?i&}Mb$G&2xoz$0Pp*1?#U>P)`miFE31NeGK z>h*45FxOhx6?ozrz)^qyF6zIG&GmO1%p5WSAd6`cy@ER;*f##v&|DeOvIN4!mgy>; zigm`6ZwVJFbgS7Sl7V+sTNT@@y|ew^5Qsjlp+OrPqYFoPgqV8=4~qNxLC030JR~WX zI=2Zr25#sfG-@M-xBcl`SBb?ZB{@z9zWiP_VL~@IrJ_xVW&~HF+;HG#1D$f z={mBaz6c3Q|G%_5$&%jp(pgqr!Gly%`wj9KI{>yAM(8X06R56t)$Pbw;t{; z4G_-;gzcUgTy3BhN~sX32QVzWJ>-#hG2@jC|7`zaxI7HsEqpZ|%A)dq(=0h1i7zrC zrsHhJTd_e*>y2nOj!;aB2)D8FA6uLEdV+B(XDr*t_|%jH{@+`3=h)DQJqr<+FhNe; z_JpbJNE8*&YYq|KB#p|9^FukjrDXiZeg2u!AIT0e5h*umRGr11P90x)@OG=QuzI?i zT9>ZLh+G+^i~5z%Kg7JQIyPrS2oz5-u@1#MMa7vQsINSYbbULQV5JNjq5j^1)K>B8 zPPLZ*i{mv%i~;Aw)8Xzk;KcXz5qCgI^XJDy2rw(Y#RR0nc7{VI2e)CpIj0AyDB}d& z&7@N$eaW9bFU$_ALiqI z93Y+pbp8?&vHuDvDE?z~kO5LWT4Z$wXXZiI;dCUgjSmCa!AAq=d5JsA=olDb9HMoU zOccb;5*_-<8KcOciU8x`iLm|crb7M6h9t7V68OmsG=)R~HmKs@Bwog^>nhH3qaHF< zemWFTIyPg&$J2!}8%5d)GX77mo;YMq2UiMY#OVv!#i5sRL!UTFd+jkKCE%+xxsw{b+^0i^-*! zIV-F8%YZ*6@uX30aY}92IY55*^lBg0Px(ENt<}VJeR#&+C~|532o30KJfnfcb@4rb zDWE=c=qDo5Dy>TcW4a3=Lih+S2uUv)b@)=zB5fSEu(w*a2K&MV4LJZl^cmnbg_)+C z&H%0ZH3A1fYuM!^0Q-tFl)KWkSeE1RZZL}ltA;T=8}}`?gs9cg7rYuHCMzr##3)L= z?0GQ)oYbw79|#}N`V}OG&?lZV7=Ly}rnPqbHN?hZs|)X#^D@+-LlvbH4@Z-M2J%;5 zxE)3w1hu`(|&M$96-|)Bc*I?<c6!k`!x}P7?b_gOJ=jug{9odGLDRMcdDXCHH|Lb z5>8ChUd>0Z-dOBF z5w@ZXE{7?@QXFa3cUG+A4(uU0Mv8mo`@G16*cNKh_^H`KnB(D={M0KBm2yUCtO^O7lFF)3qvfGLMh-xm&<%{Z2q*uky~uYYJQKK)6lo@m7<5`s_#SrC)Z_xX zu%n%?@W+3wc6?V9?j#zH6T(sA`)ogy+i6D$Ai2kpJj;k?bS>Qe`Vt+hYF|=86 zFKFN;?*t1kFUG~5!Ohb@1+B#_6mvy*67o-TmvYS2F?laHeHdypW4!+9SUFhUKeTls zd}Ha=<(uEXpY_z*v?9?RpZlv@m86Sh4aMf?7b;g7Xt$WJ(qoI^vO9b9}Wn zruAh~ijofC?%V!@3Agwjwc5ge6}F5*7rNZ2mE8A#M^y$RTO(;7ze>lK8w=2_zv@RD zPDXu$UZ;jPD_dM6>qwz|#8r2|FAtJK#WmuO%3>#Y`(dAKVTy4NSw+DTRPR-VP)t;n zd|YCs8uR5yfCl{cG2Y>Ta>E`DaP0^H{Fu z0A@0TCVNtYT{~oM0fjUHT+Ibs@Set-@gxdOdrNxH;|XV!A+)HT%46EdwSSOp8~FWD zLU=IGIS8j7trb{>7lN5$!rVAN`ljk~LhN}x#`4cENm!cH4qhr5APW@ZgGvPp9>BZ` zzxaBDtif1)WoT~WG6Sp!_pei_{x~pytTdg*(~;%-pw(utNT1HneAuq{dYq%qN=QXQcWCFMu0SY7YnXyR(Q&*4g>jFDSRAI8%L8zLxYpx;I5H%a?^Yp zg=a?hixL_^e)a;2#2l%aFDv*v)#%qhCR~LlXUqRqk%iB;N`fg>Fo(YVJcH%}_u5kZ zQ%&VMd@J$Ewez4rgnrOg zw2XJAe5w2c#?kvZuMUE9#aCygLj6e>!xI*swvd6-qdC`ID!<+nHRiWXuXll_eZUPS z7*T?G8=wMmvx_%iIy|gIU5D{;G&~>V1~^lk#3E5E)FY01-8FqylL_Ol@@ZqdtYM9M zU76bOFnpCamw!0IiNGz`v8jh?-wtH7roSWebjW*aeYZftKY}|6LkW#?+zdn;=qiQx zoD|wO&NVPT{6h+wJ2qcwkFqm(!WcXmT7UOK@G(EY;y1bxp}tlbX75wV7Y;WM-AZnH zVq;J9$o_B@oiYeEL{x}0NqqOlbt(DVWJXuDCraO|Cl@mFS%$IiLTu73R@5^ENjaFj z-0u_6l<@;mfmIlWg;hMNPzD;8GNil4xrL8b%^FW6ki`sqdiV;pq_&7$_=8_|u=|wg z|I>%t1)LfQaD&fp>|UK^TJ!~TKMt5af_oRs|DDkPn!0@!>`=+cHC+c;N0 zh09si^X#v9Yn2dQ9hNTJ8b~vvJtx+ScKS(z?ZXdLBP0tPjhrw63igPiJ?SyEs2`hM zx>hnnXqW&)cwWnZ+Jy*>iY%UOX)H1+XQ}BBd+Ikr!c!)F?&OG3_>0yW#g2tmVrDjq zf+(Q|YN#R)o2e;SCO>=oj7cmm&~s6{AKQG)0jaQ+`BJt4^ZCEZd>02=5)0^VO`+I% zY>Qrcn4d=K>xat?d>_rEYnp#`qw7VAK0_$Z1dHVnXW|3<+*IfQci6(4b&PPgb{Ic7 z_3~d^a#r5d38wJ>S7*YYLcF*#CvnUm#~qd@7FUH;2;P{%y%m}@vbD%^y`JLUw4}Eg zYeOxguH;#iJ#HNrHd43^Yp0g<;7PL%Ud%ennB9fjQ>bi z?xZ{PNIX=ASx;m}>+#o00Rx|h{AH}`6t;`M$p+aSwfK^^=8w~4m90`Xf&hMBI9WxU zvzI0kOTsoDIr7B184a>@qB|S$%Fd@!-4)qLyyB|C691WFZUlvx)6vQt^j^#5pD!t2 zM00YM;-JILYHuS-Gc4M~8JUOY{)*1@WB| zQ*hq8XuUEUVxTt4YY)wY=B$t zm_!p+j*cb7oI3Bbcb&7r!$$3&Ir<*UJ}g<;T@1i2j{8L9>U|}W5VzEjdya#~xE|`@ zJbmtZRUO#e$pw|0MOifsv03M6N9RsSrZv=D_ov>>-D>(0ayc5TWW80%Sbs;K(ajyt zLO)nYXBDIM&>o9^SgaVzAFuG?^fY^XnM&EegM|*^*q$7%J+dAh)$VL3Q(K$lSo0rF zM+I=V!JEc|AerFcL2l74BY(02VN=zG4BpMFkBeHt|6qaB+8l5%RSN4CkQVYorOu|b zvT4A-)&4y{aL9ew4LJ82AX%RTe8~TMf3E@3mmCA1W|LoOTxhlqonM5hj>ZjYqw2&C z{&MjC6rsv4Z&2L6mN8~kL0a_6sBm=m#7t03BS4BW!HLe5BYJ+z%xNq;A67|=f?tl5 zRRh}pjxAD`@VgJC^+&8`>R%cHVL5mceI(n=r>w)uQ@kMsa|Bzxpn}(*=qcMXiGOrv zqeo7Zmc{PtiNL%f+1}1ax+h?o&2#2w3e2C@k&Pwr>7?Q7i{t8{gX`Ho$#1FBHG3+5 zarEa(YCYxT=}u!)tQ1y_EMz&QWuvj<*TY|QqwER|_B5VuSPJ?=@MBvFi3&z*O9=u& zBnQNIgT|VF>&On`o{EUC!~T?H(hU*lN-~*R$~$^vK2JJUP?f*!>yZW?2Og!`=U@~q zOt^#ayk-7^YioM8IdDJKzyB}1H+K9BUb!$Fx(}2WC<|yiJ5e?e%q*0klLqV<q-nq&i>TvO`f>N&x(vCqvK;dOBCjX9{*_yn-yQ|dXVHC zE|W_lJZW=^50PwA%I~oCBZCibY#IHZXI-!uK99|rjZo-^JD zVp4-6VWT%tug&oVptdOFGOYVVOY@xjUx#oe2X+JhX~@90eu1zGu}QW{BRBcaw;DB~ zdT%{pR7$dOKjsdG-QMTPL{2O1Z*kF~8j-^wGXvE$)Y{(~o!!=%_Q1YxKs)YXf8E16 zadxA7dBK?k6&Bc(>t^aISNHBuU7WUj`{GC?CQ(L_J}vJ&-(cm@X;oR>%ARl&p6VKL zhb9jfp2RjmD|q#7BQxl4<5aw#HXHJ^e!z^zQ!MV~>j0m@+Cd8V@^WwMkIX1(J8+%m z9a9dNWz6poQvLXfwp;gOn`u`4*x+Qzc82Z0QaVMYAv1ums^Bu?UOlkl>kau~zrle9 zxcvk4dJy4aw&=EnAxx=Hw~P|1c>-Q8VK}Z9gxMLaK#rnh)rvRAyAv48@sBt|4jH(N z#5^6Ikdn#dCxDuS8l9QGcDZdib{LbRKjYsgDsZl1^my6-}o<0-3dTiN2kQkLQM$kI`-EMsA z;6}l``7@90TgyoJk8j`aln|5b*refCm>5~SjxerEwB#-7fAU~O|C#5WTzw!%dKikG zk+!(uZ|&M}CV3DIM`%y%t_<(bvp6~r_aAXz?wTcTCr56I+&^*ez7o+U^f9U^ zD5V(%;}e6G@xb~Z7x^H4z$mtmTI=#Cyz z&<65Ps~F*+rDNI0>Mzg4u*OgRj5!PUW6l5w{9NSG7EIH3p>qYadW3w0Lbf0>t4l@T zc5&cklQL()_ z+|lmvX@gRc9@U?&F2w?4_rJ*fJ8a1N#Tlrtsnq?1TgJ;T z)N{jCoBw7kyxac8`P5%10PZ({@T5N%FNYd;pX2NU&Zrt*7IMIjlY}N{48%xm5Npx& zp$BcmnNKeKn*fhzQ8zWxElxHZPjau`VILQzk{uOFM*6$%5)zzj=z(~jcLwn|ZH#|d zqO%3sVDXPij@8lYdPL2e%L5TI?J560+vtSJ@4XC?Q zyGHBqCNUX4Q6oYcFU)cDsfQ}l=ucx$kv%a_OICfaTr3>Mah@~2F#$znsE^zKkcQS6 zc)|sTjId*}`_FQ~kk|*jhS?k0el0Pvb&O3f>q1cTtGk6wrge`fAnpK=OO+@e@I9O` zAf4Z0=v`=f>NxBK&LKOS*Hhfk-n#&kFO31s`U|jZyiTXrJxZn2Lq?D z_KCMgHJxG-suKyua=ys5&9l|Mt5L~)|#6Xe9)dmElXbdYW4(n4QN zn6;thk(fnmDkJBVM_X7!^-y8PfRS&iL(*&Fd8wObO=?F#DescG3xI4llY1pqJlVjZ1yJkv0@sLyo9gcIr?U$blOTCG2aSC8QhVF zo9>w^Z(bhEsC{%c&|_B7;7e!H9{yC+NMA265g%(U6g ztp>u({HsO0>h6kvZURTh;lba4+B02f^=M0@(6v-JD+ACb4!gj6K)KTb(i;mQI^^a% zfMeHXjY*mC*o3?!3H>_ALp?DY@?Lq{`uCu{Kzf5gSAZ!p`Bgkj87vl7!nF}XhJBt@2hCz#wUza7r?`-wvQ<-5U#Zz~!V znWU3i=y3j#3xZ06KfcutvV}?u+F!}<7>4St!xv~6Gqy}-<)Zk$L9X^~$Mr9{@Kb-A z;GL`!>g-W8S@7ZU$rvP7!FM-I1;*PeR`MnW^Btqv3Kr&~IcLITUo*92+eUt#2GY8f z8LR)btcgY>hk;}mOy&f#^$F8Pr*)a~njBu7CsF^XTB?WvOL-KvThoX^3hG=NiZiuS zuo1m0;6U7?4Vzny@+|Fav!wgY`QtTA_xO*|UeAK2{O={VMQ9BH3Xg`Cmy~9Ie2A`G z$Rjf4qd(H;{A{LNcX>!q**m9%8%3+)t9$q`lg}5fN29Q? zTRNqsySuxT?iMy(0us_K-5t^(jgo?dbV-MF>~$8;`<-k5gSponbBw#jKqlHoB^{Vg ziEP~e)>OJo2|_{3=F;5z$!j4*sLew3tNxZmB(k>&V^rj9m#woMjuia`R<{<%YblK$ zt}TA%0^QOodNJ`7%l=YQQ;)=~t{Ozbd5Q-udh}T9A4$F;XdiZ-Wb~WGrEvN$j$xNc zlVJVX<`Ad?v?lz0Byy%ZHYK5rZ$U4GWQ|rUyM{iy`vYH(_XISq8RL) z%e~K>(*4%w0LC4VDn|jDb~a{O?3RG5*@1pY_{r)RptvCXfP|F*Vz`hTp*zSMD2HYg z)i*-Wz4N6QB{q}b8ner;w&}-V+BkeMI@dv{G%=CtPd5o6^Z|QZ@g-@5WSK-^7^}i) zFDqP_GJwNmCr2Y1<;%tR0_)d?&lvqM7YGTfIME2w<_9LXl{cv;-11(yisS|H%pQ z`)1^fn7%0AmDUPv86)c#=?pSpw^DIO>7%P?>@=~Q`#!`#PgnSJwpiis_KdEHPkJo* z70G#%6RN%hH-GrDqCWQn3{xbR5W|7BJPXQ`IK%gC;MGk4C$mUe^8Zl;a{Yim0+mx0 zS5K-09Lxzl+NPAJrMmM07JM<|$UT!qqK$mzKQxWM*2Bi(iJKTd>3O@$4F)tT)J`Dr zgOyBHf^W+^Q3lXdbU$k1nvSDp_lCkD3+aVIDG8 z=!Rn3oD0I*O=`gRS1!-rmy%P~)s%-*Zd{(F3G>Ql&k95B`hrS-B^VBD$i$He$7^lh z2SudJQiPi0^o8*({iRB?S$}TOR6Xe0IAtB7|8sv&E-@e5lTF}RuVc~XO#EyAX}%~t z3?&cs)Zb2~a_7MgbA`^RgE9IF)!&R=8lh~PQ`_}TO6-TaF>c#9hI*n?$p}kXg}$vH z^!3ympGsV4)osLIQOk+T)E(0^wEn?K4!?@`gd5hDoZhne{e~J7Z1+JiKP)g;ZjHre zIN&q(Sw>lS$gMD8n+#2C$lAGx+jX=^SC@*RM&qH}=zSzO)9(SfX6;50HxV?~a=z7T zkmO54jTi=4hhpwdj~xk&IP)?;R$Qb(=qC z$l~!AmTayjNxWL@D~_|UyR-T2b7fX*H;D#cpebn>;cQE9B&pEOXE_tB019~HJV*@2_?;5$Aw`PSgma7YmClr zvP9PD$km9vw{5&e3KagEG!ac}$fQmhwcJx8j|9fZTMa?TpVBoSjnCH`V4>!gWFyjB zZFgf-jPXeHAFG?yvVB`h%Ec8FlMcWB4%+&45v0TxMfn7221W5vYqX%SwK&fE@Mgj8 z07$1KeoD`KA=}$XbKC5fozYwBR}{5 z1R`=tnc&L?e7*|2De*k*ypbe1N%J5NbYMjh4NW&U#}Ar8cEPC&WH%l5ndygEpo5;{ zm4&(UbtGVl3(z595#$Y!plfOyQX$bfXS8n2@|6bGJ|&$lF%Egd0>W98fpfe*k3x6Y zt5b1+*wc|q`b8BCa$DP<8(6>F_o9M#y~8iwp4r{vBIovLvj2)5MCf1h)9tN*E`j14 z_2%tC@6&ysde&GWowYxbd?9?|qVOgmv&`aBPY(SwaiKL2nZEvny!aE=uVje{`-%~6 z2HaEKp%9G9Z$EP5T1Bz&`Qf64yK+0)IRcsX44Py0?Q_r@M{GGLx?R|{dfM4emETTd)v(A?sUpPP$i0R|w z(hU*GKiLwmC1BHua~m2lcp@9+=0`-0uV=F^Yy3+#D7QwX7~F4vPu{DIgxNn-Ow0Ds z9<+>M12*T7$0xNqQide?zo~=U#TknzkIatA`S%+;O#WFHcz=0U{Bi7^&U=UbnxDk( z`vPyi_d?ORm;E14#GbluwtD*kmH)(*!y83TFIam1B^ES&p=`6a4yNx6GRa7k6=_~_ z6P$E@j&jVDN3|^L+Xz zCl==wkjhdVo@8D(yNlDIR%Fw3%qCKAR$|PPP&;B|>%c^?f56T1=>0N2Fw^K6U66Zo zVmBrpC)E<_fV6|fXSiZdQ<=-h={MfBY6~}DL?oS&-on;wGU#5^_m#vT=>K!bZ5dWq z`}NbI;n{>u26az$KNt0Jnn*al&rFc}uJM7?pur}<*z4P-{24w@*UA4BV1y3UEpPOW z+CtAvq3aR<_bf#1LwfYMRB?4B$=;B=X7{I=3dXz_57w`2|NF!jk@nB7s*meaF>}4x zXJ*Cuu}N#gcM$`Ef`1>;+ZGMi-M&nemb+^1%KFQ=>UQXmbnIe@$p#!e_4epVCH`fd zPH8zV7|)UOO6sc;9H6na?5JfQB?xIap<4eS!oe-ZWz(SE0D@{(Ar#8bu9i$+sFN;q zObGp|u}!;`{cahhv?wDqQngP)Rn&Dn2AH0c>F<~{Wr)zj(P&iPQ|m?p`a9QjmFGxXR{yTXtv#ZtPNdv{tqAk3*RfyzL`ECQ>R<%SxtuO zfd7|`G(Y$6Xc^6Th)(>!x%6T9_k9M^WQ}BKQ>cV9LiXVknhJbi2n07VNiOQ)a6Yco z?@(Cth#A+QIhNQDS=79kV_3IM8NNR!btM#FeyNhi*WlVLgc{1X#s$~6as(zywmWT- z$FDo_HT@E2>f2~O{3f31NteBi!zMYq$SPbte>6A~Nyk9Blm2#9df{WD6*2RGVz@;r zmE8DZQjn6$KT4OO&otj1#qK#z77Ja^fnQHA94I7{I)+Q_a$$W@?A{`zN5!AKLqYYC zV`_>Pc0tIM5pD+jXm)}4wr;aB`m@JBt{0Fg_*-n~ep1;Uz`g@nvj4;@c-i}hPAriF26y^U`B@LA!Y30d@EFxklMRtpDNA=XA9egLy*h%mSBYqk{{Eu4W*!Le?*xzvCptt@aQM?;CbcJ!|&^rS=I!1 zEuJ$drVLF8vJzt*@>)AqJZ-lhOP$3{|Jogecnh-`Ygnkv1v7cnyX#5}?Tf06Z}jFQ z0PYF*b)dK7-RJwy3b0nD!e_bw+T0dqSGQ9~(e_gdN(!8z&();#*M5lTQNsnq|6sL; z5VrIa(EF?ubn%(*4gv%Cumdhl*Eb|!)rPc=ZUFsHW?we7i1ZBizAmWQisx{$R#@`v zog+Ix=9u%)Bli{uNMGV_=_Y7Iw@W09G z$YVi~^NpO&&8&&;TmpsxPw_y3^}D*b+-aw6>%`-7rjZWE0eqxT@!P^T^hTd$!Rj3e zxO_W#uED8SJE`SZWq~}ByEIgwfbMA&=$Rpuf5}}~^yxp&uKcbS=1qaRtv;r^}AK!>Ukv^d|r6U0G#oEjv8Hz9NlGr^Y*C!k+;H8sd$|T2drJDLYAVq>=P; zO5r3$eKLFe0|=o7kBo3ydD@Fxi8c#M8UcQsWX%#8jHs6)2o&-322DLp z5;mkY`!aktA6x{FFmy-sG++NWJ*#z&G2N>v2=t`9jb~!uasF`K+d*A#Ex0))DTf`> zF?|jFi8X#9w*+?!cb`PIIAW)$ou4my@ z-I<~*bLj^ij1z42fM_6ZlaQ8a_YG$L?C6Bv~O>*xg&wm0TeN{7oKE4a*o z;BKb$Wr3QIqlt|S@4CbBIcTfwAr&hvfRSfa%eK9-viHRt4!=SgX4v*Q2AszumX zfM%60JqB!OVMTP#?M=DZTxW3VBNO>2bqDl#zQcr0yi{DKJ2Rwa!Px~P3&bb%K71T` zm|5m;Xj7p3c&*jQ?%f_JK!gP#7n7gI%)VWeEzjOOoh9Yp{WNQcqPPd=7uBk{ zi(XQTqkFT~*9o#vwJ%qJpm^s+hrqEX6l1LrRp69c-O?*wo23|$nJT2a_DU&+c{Ey^ zz*>_arg!-55v=-zKH=Na>jVNc)8ns#3dsTH{1{-8o z_th*AGUL5m`Y|8FwwBxwOxaeV85DI&!t9rk_oI_J96MuFvxN6ai<=xmO_?{f>(d%J zZgC;VfK2gpd7=wBXXA(v##ydgQE?gXb*7S0b1BU_mC?=JGw z^|?Oi&J7Sre~qK%g!`5HP;F6uXq=oOFxkAG2FFAauBwMbbRtJUM_u4Co2qkp&zPq7 zuzlZ!l{2SiV}JH0)vuljp58p}wvVU)_`~C7dWgdQ%lCwIsk)Qpd#f_6mP;50L3^|4 zTl5=?-_}b^bI5y*Ca}pj_7TGfHLqmPjcWUpNY&^WQ}0q=BgE7~BAk}%%03`*R&fnz z`XwY&ICu*^*q$39(jDVmis{x5Nxu#;u_g@vKUif7lE+$U*?5dkP(F7%_DqO6iqlQ> zKE>&@$W|wiaRoe?w`s-XlMH~GsBixBPVe3Pf$DvOJ|?HhRqs_kw0C!s<73dd83h|q zbLNYD38bww?1B^Tv;GeiXV!`W>(BAL)d^<6 zz}rYPY#eNRU&cY^n)G&gB36a7SOX?GT$AF^0;OK{pY~*MvyRF9=w1H`1OayLwt%JR zFj+ZHY5s^jix6UAc$261{Zk2X(vCr@%F(>coip5SLt?1xzN?4WoU`4kthCZ2m2m|ECAP`+B+vTZRets2feb(lC z(JN=aze#l5M-+dTTTnDT>i`6|Fn00sjMghdnY1V%t?1_qPK0^RxM%jq#84 z-y^}a`CPeyAj)Xdp2>~HP0NR)?L8iy#3dmX-NaIJgvPxj-qIsvqlgo)J^m$cDZGtn z%slI2PU3JuVgydE!hiqw<$&l$TXp8FO|?RjvjCDm4{~#MoV=(*CO-O|pp#>^@R}y* zFlL6T5zW*AV68cjeE4X9c@742wVA!WW8sHRxq~)WiYGDB|AfuYt=QmGwJrl9@xmZ3 zOtlJw@$BA}Pi`G6p=gm~YL`nev5!kuVfZi^FO5c5zjQAT<+5ef8XJhw*ZGUmj$2wf zbsWH?b>rW!f0gnkW<3^B`qErQ=V_dsN-~LZBb0qv=96f_@F0`Z95RBt{omJC$)`K; z0`97lR4}fxe5@zQ#cx#$CLgxRV>0zILX?-x)s3`*u{fCnsjNtiD0Ih|V)CNG4~og& z&pwLVO0+!Lf}+%7wsP4tOaHlqjt?w593XkK(>sGO{xJ5+ zDlL4{V!*+}r|hH18*4W756gpmp7p7GV749`bp0Zt)2X9iZFdB28QgxYO9bVA`gM2) zMpH~Nyimy#t?Me>UKq7fIn1!J`B2zz{qKqVvb+$D@~>89++g3)fFdP6 zP0h$Ex(%V=@U}<|8qN={++A*E;PW;iIutY3o>Mhs9>zaP;C^k(0WT>OCeGef`oOWU zV9?ax!EH&ZNplXrVn#-bqK;S`ac<%PQX z2xndRnHg~@ljC#y5_f~i9o}9+=?*vIsHnNJoSITwnsw*@UA+sl-^q?!ykF#-^+&P? zx}tZQE`Go^IzfEoLw>wwlwP-Fot*{cya7-76zCby04NW8%Wu;g$-uET5xv&c(}!7w z`;#eXo^r1xC);)Y5oZes(~SZq69B1iJILLD6^bSd&=MvG)UF~`R!Zg&_!QMMs?~(m z+bh;R0kz`vr>zw1QVusYS4Y{{t9@X{%N#w z8u^QaY(rL2ut^iHl8SN0TZumEiMF#9VvLedPcB8r(Ti2fd1Rej}?}oLe_{hdG{WgR*JH&8T&-` zH=iT9$bN1Wda#N>3H`p_rWSPZ&*K=>+&TT1LlFY9U!+z)K(qp%)MmsBP;>-opt%Qzl{ z==3SANK?YRPsDR$`mEjf_+t`9<jwB4jUfFw;6;YfhPnRxb&d!w_eQD-j7opmSnX=uMS@^L&lrHShuLl?K`00uEI9HIz~(()6Rm(6^l}2^ zQUbV*%Zg=BA4tcd%k%TtJNL_p+m#oj*~$;-*#%5lm#_e@-(Yh>>i%Z~$N)7tlz9Ln zjgKN6LHAC~LLLdz!gp8+B#Fxi3}%F_x_ne7^#5P$P=itBsGYOQjaAWB(d!PTv=TOq;zhC%!4p7?-49QSIxXvot_s1>s)gqxs;6|M-e% zfXLJ}r0b%yF1aetC-cWjwjN^0S^H+$O%#w>wZ!OPnmk}IhVre!h3^{*dv-}}XpF@% zvU3a=DGN@}K^MRzM^Mni=UQyzJ9J7Qd>%s7T$FdSQ?}fVy+(AS_3nNnzf|=L;N|iU zs699HW(355Z1$E40oIo;5wr&1K_Z{Ta2cFB21trC`UDdU7j;P9B(Ih{hx%p>vOK2J z<|I@=BEC993yhOykNb){P8bk3SyN$}1ehergnDvMCkd-OzQZdtoN8zK`jDZ6?tdi`t$m=Pd<%r}_#@984c3a^QTX zNG;=^U83Fth#v+@=DIFy<)HG4BS$>YcB8 z?xe=Q730~^=IqD82yB}EJi9QMaFoE3OE9P&jozhzks@24H(pdf5c^|-0_LtyvgQiT zaNi9z&_twB2rl;dQ!R#5FOL1C^k%@?p~khSr&s)xR-9p_Nuy6C9Zt3;7nn=}V-rau zgttk1pi7)F*l>!m?P^$N+lQ77LpJa8I2mq9u4PIe5q3#NupdO2X_qN<$U3Re(WtQ8 z(0^MgkL|vpXnVJ;88Exeab6DQa$(sj)0(7wgDcexB?SlgvwO%r5Bq%nLtd4&A@j!$ zzHV1XfBgG**nfPU?y)zk8Td8vzr|&U_^~K+SY=jfeIG}Hy^>MZEz!4&IWg2^=HYcN zslb3znn6E5^=?1C7jvctdT%aSJavfE!qC3Ut-jElS`%P*^be9W`6@hEK!mK~_;a(F zS6I+H3;1kDm#?`W{E~B}#`QVuiG%D=V>r9+ZQ&cJRf32ZOs*?z$_3VX<*kjL0G{(N7y_>G6|>oy2+=Wz9kda2HXE ziMP}3SpLY2B*P-zzO*u-E;5bOrIqR0gqvG4K!31$&t|Q(R03DtKl92O_WU?XnGDae zA?XU=fVytXd*uq_#(_ORqwOb-HoO_!)Sxq6Ga5q4Jouc8{m+E7cK5C>(G2LU6aF;> z{3_l3K9^h2_@`m7w=~84%2KzQeqO&(RSRLD>lG1d2UKAqNzmCRoXfIp@!?=@1MGDc zk!6qp&{U{W`Xx*xr8K!I4af)L-j5?gjxlQlx>$$+t(SiZe15I$}wLUvH${ zA4`|y(b7<)hbY3cm^{F@A1>H(6H{IM=HC?yk4e@C?FL&{18?Md6SQ9iE?8rd@0&*A zxW(T8nR2g^Zxf%7$$f6ulaHL1HE_BVj*>6xW-hj)r1~g`O%s>4)Ddh}+ah8toE0o| z^_1Y)*7B)F=kj^=C`gvwZ(PfU&@dKnUh=1^N6En*czBQ_yN(nfhM*Q~X$ag)FcWz;75RYMr0%!Vx{vxl|vAuM6Mu+~>6-*@ns zCb^Ow|}%e?r0KT;?En+Kn| z=S(K1BYMyvyK!^Ly|S=))icEHi2*$m1iy;-DdW`(z|F7P$7}8Bl2Qcv#PL;u>qDv2 z{YnmUa~P;4Bz0HUlU>)RND)XY3p8>F12wK?WC6*5PXr(v{7SIwz8CE~NAfE8Cj94S ziF`6cQKHz-b|sJ78CTt2N0RokatKa|(_^`MBKt*NERx~7pCq&$x$h2RUz=L$l*5^Z zhR3W>`}UG{gJhBe=V4J1-V3Q*9e+g#qaI<$j1BM+ZH{G@VL8Ni->fOlC4o7=JN1sd zuc|v{o+=V8uoxCX?=*<|*BsgiGn>BBEX7zjCiE?)a{wj*tGZ&|ENk5&;7Zd-t_ed3qu6MbLk3-q+@(Je2>zWf-$kQjBf~sp)%BOV>sF`m+ zA|Kl=5dXVNl|5j%;KkHP_pU$pT^F>d)!WnA`)((&-LC)npDb|d3&7Y|q|kyWUL?fR*2k6h ze9^!#VWX2sH`k$*BG*s7^jS|=`=0?}$#~+8d593Few>Z@u>Fzb!YK`JW4pOxA+_M& zjbQz0m~MSyG8~T8f_zvMXQ>mLW2yL^tKkq0?>DOh{fbyTivtL{hxsy!0dvyKFa>y< zY;P6+rICLbAbF*v%Zlkj*I$tlW3UC&sy|0y8l7HTuFI<(53<)KF&b|Sm1?-B?H_)+ z9SD{7XbjM?GWryAzNH1@tp_#xC$hAz4(FgxwR;@s9DukgjoWqiQUpTHb`iR7WH{+jAx2NfINpDDM|PagN~q_7|BfNTZ*eDIWmP4Ckl_8=7I!3<;?c==Rv>uPWhph7{`PCGfX5o2Wx7-vFwDmovuTqf>CvL{xu@a`vUGu zWg^!MV#&tqXk26!m-J?qP()>g#4z^m+u{_~zQWdJK974B-sfxMxS!27RCGK%;mC z5dXY{?5B`{ER^wO{)Th+Ju3>``y8Ooe{J{k!#hqR=a)Vw3~;x7G7CTW;@wgb(2Xe0 zKx0r(b!Z^pm(&p7qQF?%eP|f_T#JY>vLyM5mA(X*GWKn8P@QewR^V5AqFq|r)l|HSKPz+nS}6Tgh$loa2o`)bJy;#Lwl-#dX~s~`^paw!SIKv}2I zoaQ7pbdhvscu zMajL3xn9<7K9H{*1p-n5pBTVyzpKc>?|pq+|G__?Irg02)T5^B{xL}12l5T5Sl$rB z7QD;%PI<{c;gD?5tffU1?%NFxX%v-S$h~3X?4o2r|7pXGM@3Xn`ElFyd>0>%)B;t7 z30FLC2G(84Un87>f??n~)s@@vol_qowuM}Ye62u*yqfLX8)fa$S<}1U3qR~aP?(T; z&(#_QEF_@8jw+F_ZkSf^jD$A6Pnmbxh)}QLFNPmvo2?Q-ZT8GCRRrJ=g6Dc9dvu{% zu&;u~clA-=7FHC-PR|vT#rHE5Bb71*67<it8oX@CgLb_(I2CNKgs@o0rOTeN8pW{Q1UMz6O8<)(ir zbgH5&6q^lQ=7RVrEpTQ4mE&L82fnt|m39Tygj5y35}uK}2WPcj@X9){AWwN&+~u_X z5BLEULNq@~$Q@vE-8qmL@ns8{ayLNq{Ya=HfI?254Ywr9OmVDyM?UVXwvAVYT33l^ zNX9^jJEuSd>8AW<5KXDyZAMSOMvz>!Nn2mB=PxgviivAdxW%Vp*jKNW?787 zm=ke7Y2$sd{!J$8^vd!QREoUz1li_n^E8P>f(wZhWBHEKL#;^Glx%4v zk=`bVbT+CkkEeH2LS+wPgN2cCo=%zLMPW1r#PKD(3469F+%1a9oc%@9%I^^-Q$CJn zpvZuBSuodF4Sf@4909QapAZ8P%?&MVQ1Addj&xdMbGlqX2q886Q8_*Xz4l&K@$_69 ziJ>iJyD#RA*FFk_DWTsekdx3-CvSMOZYO#>O+sez9k#CM@~moh(TsE3f#oqJWW8!9 z?YD6in5sy#G2O^5a0jWrgKQ$I)&Exc9Af;)u)1fG`@3fYE%V&l@!my$@-MPXSIyNK zkSTGYc=ayrxw;vi!3B6a8c=n*LP{rSDE+ zO=a)yD{e^Zx?5L?1r7(!zkxl?%P(rPcjgJAG2q`HA+D*-l@U&iFNuOR%2Tb(>T1lU z_vRlVl^0ih*Yw*r-$Kh5Gm7&U+5R%BE>w;hOewy%rg|4C&ubA%EowHi?9kx(J$mK! zL+S}R*3L&xiKEHLStt@iO`Bg@ewfY>b<JTywwlI{?I=a=Cu4);cnorZHSfB1nD-qq_{5dctY(lUQ-6lW?lkaLKmzaLE zkSzHsQc&8fD(adK#=n1e7i~TGc5GL%?xojNZP0d=3Tn^>03wm?S-Wq&rk(IUUZq5E zs%PW^s!yMJ-rgEqj|k9V;%aE=R-9+>MQ>gV?;92&`ygjnZPj0wf->YWFh&&&918i( zEtw~0rQkf`4tbtt7R&iP@%;XzFskf+@qb@p#T|!vbaus(w**qZDI1JFS(beBYhD>o z?_kAEdle}X41Il!Fu7ff2{j>=S!q{i+D|2R)Rcm3utf(LOhh2wDtHmY;pXXt{*`c% zw>r=K_v~C~ugw zoB22LU=aIDLLvD1@GWH!Ndw5OGB-`*^E{>?n7?wg%mWdd%fpJtORpeAyw*Or>Ct0<(K*J;IPNt+n@*z*T?3L0DY?|lETv<;R<$f4hP?>=Nh z%1u?kfJhKSupq8dzm$s8a=Geb(ZOALXKHeg=Sb(Nb{Mnn1_wv5lmg*hCahs(3Um>H zR$;5utBy7P1jS_3)ekL6o@JU!KfG|9<(+}J`+=vD=HB{c*#K@DjDnz!f63|^Zk%Q* zck8ZOS|UqRS@JCA>@_Z53mz9-c7s7neM!>>@~228%uTwe*`Tgpu>1jW;S*>X{g)YL zrT(@ur0aOFuv5GSnKNh;ad;M_E%ufjJ`Y!$Q$ezmw|pja(1BYZ`f2;@vH!r}q96G8 z1(-9rv6C6OVtsG?;mkPT-vs1b|H2H_!_qIRE`95--LA zNoZ<8aJ;~RDYD2mpE1&>U9~FIO^}4l_V+E5A>b@ororHs82Ta)s+isx-6lg0Z$UMZs+6FhWIeXE_If>n1t+$@%S zk99<{V%oOk_<=}z^Z|9axq)WI_m4Z1u07XJ*G1cHTJ&%b$ zNv_?N-w8N=kU~jw)p;n^38%+L9I92tx9U)((bG_+;<6iqKjG!8pw+eRBEwqhQs41_ z{h(Na(CIyd@s4u&d(>t?{>2DPUH6j=_v&@Nt_AtN_s9z?_W}mwUzi=X4;pN7tD4rs z&OWGOPfmt!h|9#inp9!D`k>mJnx-iHCUb-v9X0qpvG$RKx*V1<=&m7qzxwuM#PJ~2@t+G~GaDxb9ub&^Z^ zQJK9t)96{J(2&9uyo>IK2{#G!#oej_u@x4f=W$x>2{&dqWugx*&>p>(TX^HkFP}Wm zA#L{%1*217dw(80Meb$mcSuiafAyhK9%(mHs(!!4OY+@{gSeEl6O&C1sXHcrP4aSu zd^K&G2a~0t>CY2@a6hEN+3GU?!R!VcbKrq((w%sY?uSLT+x)f^>@^M_=*<2LQR*?* z^G~ehOx1%t8J~IpZc?&MQdnhHlZngbY_V8f(w=5R2M2X1vH5MKHMM_+I5EoWAbE#E zweQnuO6=ovCQ`dGHf-Ph5bMcuXQ<_o-*zR_wi8y`Fm8kuhW-{WwIN5Z`XBp*`9oP4E_fIRasQN(u^dNBg#c8*CUrI*YoA)+o{5gu za@*boaV9fAl_hmUZ<+~KP8988(Oxq@mDYm$Exd8mb|ftItjw>}J_i5n_}pmO#GZ@R z+a)lQQztgn2d83e9?)0%C`AG2LCmP@1g5LfRvaqmFi<2@6h5|kPHik*!~em zTHuE&zeUH1ja~@F(>g!26QJP<*klYLe6|)%&~PeTYAeB$4T|U~*F|NaE)jsy6R&*2 z;xBV~3iD1*-K+}OPOA2l;qY_z)GcksGNH~+sZ(}woc0u*@lF;*`!GiFG(*;bjy{mf|h z6NllQ|Idh7*Y>kIaA_pZbb+iQ8mYd#Z5A4xoB6GfYKl<`_7eeiX}vE61EO`|D3$(I z!5}++Nkp*wqm8@%rH*nyHP3PQ4G*`EPzC0lC@rECQ;E^a2du&CJlAMywWb-pK> zn}Q^xER}YF+y8iJ5&QJ(ue&>&Bw5)oyh`o^3_u(Bz@(UQV; zY2R7C_4|Ushl=?fH^D70!`4H(LJ_eg+ZiZd`#4hLPDVMmWc8z3|&sKZQlA+XZfVLH*+MEy^po6UH2;-evxD& zd~EbNrmyZG&-;LNe#bsKsBswu9CSfE5DG8Wc-c?yZfuxcT-)6F58a0n9f}UiVaYuh z#~F)BARHlOrOX_DtJ|ZpM1Ut<9kj)~*X8}m=TR4$A`IM}f&v(y|MZArEnmE#fLQN8 zmqXxk9nuTLPUW?bx&-w8`S@7#@>&OsLB7cNlgUVkFS-oyA!F0UV)At(a8}`08!{IX ziwF-N2A4nYlZ-P4&<}4IV6KyD{5V4zRN{3U##3Suyc_rf6;qf?)O7P?gkTZ|z#l6x z@J06Va(d>eBE?(k{Vkalj@Rxlq(@XjG$I+sFNDL0)7^}-$|`0vabe?-M#ONFYCaPF zNm7X*9w;`Lhl=?a;iRlU9_C~)R<6BJ>i>s9J-mU}Psd%me(a<^@=OT>X|rR1uIB?@ zmtMp`vJr*5b%Qq{)mAg2e63%pI>z$>c26gL@%C~l+=yD>l#c)7(K=na$oB(bT)*7Ng^^|v%b@j15EQLgczmJs z1GP2$PjW6VdNh|UKxe8`xzzY+`JLhE)QL%ckJK9X>hsGRzWy8B;7Grtnff;H8IM9! z*xU86(i9G7?-Cxqk9@j?KZ3C%5p#nqKAn~ItB~bE0Pg0kn!fnA=;;2cedE?Xc%^T~ zHEmTPz81SAFp>w7zM==>eYMQ$2yk0DVeH{}2gp;<_f!n5QI6#HbK~9R-)ve}(Pe*1 z_Z-uDy5}pzqoXUjq{y&Bs1F*S=)HXCiIYAI&AvWnq1Bh&BnG$XD7v?NR~6@x{mHatjU)fP zV2XG5ke?e1djjVS>dlyei?3blzCT+okFB2Y$`q!4n_M)4cAKifSi^5fM8%sc0(&&A zl{kzj>7vw9)ip_W!%G$QHK17MMG$g6`)v<6RP9~6%yRJZkF#G-dn{7W~_;6Tt{6^^$lQ610 zmVx%BqEk;dmNg|{ z?C%J~fT89Op}Y7Jd)Ba|m=i6AW+njP>HB~Xsfg2>fNL(eOHTvm+EX{~IF~+j*kvPB z;$$os%5pryQu(yP?4=hm$M(%(jb{S07^ z<5Ib*wDhUI&MC{tz91+ffFoGDG$yZduI#(v5SCX)Kz_UfVf0AD=l81k2y?l(>MySSiCE{>-0amCGd_u9oO1We(I`G=}Z zf+Q>0XGB#0TXszm+*c_J1~Bz58|(o#>%J9T_XKr9gdD+vyiN}lyQD79!C>)}TlIJ; zpR@q_Qo;#Ky>7DjQvI~nt36>{Yt2d=o88JlJ(kP%r1QrZ2AJTXe{Y7x{IL##8euB?bEr|4XL8rPwZHVm^gSx2#Rydki{`CSUt& z8owgA*y(L9Mo}RTW}kYhY;(ar^K`g4-YYbTG+1{`Ce1-@FuyAjk{U-%f?%nPEUEDY z`qttra^D6oEw;ayTQi~Yj0_dW)msFrihnpEp57DzROg6PI#g&z4uX|v90%O3uoVo- z)R?uur#Gc!`f!ny?|+X{9DhdX?BPNse?UaS!5M2)pBrM0ME`DzR#Dir;*2Wh5uwz$ zRx%tJ6P>;-O-W*H0k7~4*E|&SEn-bD{tc>VU$TNSOBF6s+|%FvTChhv{e7?sqxbI2 z{afujLRS%V*tyHWs@`XAuqZxG?P5c}zXMmeYyfXL+9hvozJWf@E~DCjDd94OKc<)Y z)=jfL=7p~UtY}E29-xS7rZf2@MfNN;T=qL4SR@A(J9_Fpjb*uR;qaYV6Wv#vcwYVf z-#b4DNbN!vY1&}`Bz>x0^nsGdjvkj4RoCAGJ*d=QIZ&{~JvUw%JYpd_2zTklbO)C; zOe3VgcozO?%MbTM43)rI*21)$=F(x~^@X3OKK-s_)|B@tGZCFYaCEbU$R_#2;i_IY zPJ4JTH5VnCObc9!C(1=k3SGnHB>IXL3ko%Z_|JMY#NIU|%{^vIIzJ&&ZN$mjugbbN zp>cm$QJbE@Q z{kCt7YaSc_P)<2wLGhz`PIXa6+7{~UUC6WEA?|G*9rx9|-TM#(T06l=vHSIFVOD@6 zV3@~iJ^fLQWyrbbS=aCHX5C9x?&l{+<&)YF+$S_ymXGai3y~O0G$S1gjI-agS zq5uMR?Tq+1TzFc3w5wpN#NAqerZ{@Gn`_0e4h@YF%JvL(APL`_-wayYh~uuQ=!MBS zo@7>pksgi6Fm!M9gi3lO16j0OB4;Pn%12CwthrvAR zuzHZkUH5x_6QOsV@4=Vq1Y8=*Xmb7^4Y*RZ3*}!AS~>ieuab=f{BHDD?PK+vPz>OA z)20AdSur*%lV8ph688kz+U7_kHH9(LVu)y$pQO{eT2J_^3)3#AtkC~ZO@~vD-{+hf4I_^88DVbxuPfXs z4BbvO%p2<({e`|zxf3`~wGZjNin0vJ#bMTA@Fj7?5`}%Q`{HzPV#hOZaiT@p;mg09 zfze0UOXv)7iZmAbCCEM2EgBw-o{}31`_^=KzDzh5Wm+@Fo!o{T*X_Y<&^K-iRn)EAT=lP~EtPrj8Dc?v? z5uhvWH;E;3SB2K5?ebU{c+suUe+^XbJ`#I+ywUG#;poszIMNh@C~ROK&QApdb>fTA z-vsUeKJSiJt|Il0N2d5%6#lsM%$1Iqf+csax0t**ZF{%nLU}}>@(S~bl-h>{JmBx= z`Nk|SEh@NdChOh9b-IcZGqKv#eVZA6mf<;i{!3pa2j$P)2XK@^DJ@KyZ0X`P5w>Ir zs8op&u!>3oin98PH3VoMll*%M)D4p5#85lF^5J4rVXiu5BFkDx8*G&RKbEe7AbSx!^q=b}|lt{Pa(%m3kihy(pNG{!_DBay%3(MZ$^1R>vgPnWk%$e(4 zVTi?a5XY=zC6G*7IF6F~F1`0@CvJC9E2>HCQKDemAa4H+OTgY=+?UOMA^mQ@JVGg| z@S;8ji~qee+jTyLk-d9+cdD4n9?Xu0)$;b=M7#NS`44|yqJB*={%jkQoashGqa^!A z0QF6{Qt2myJ07U_=8@kEIln~;EV6vvah>vU#>A!Q{X^Txrf8#W^ZijZV327-c$>B$ttk;N2Jkk2)CIvh z^!B!V>lXpN!*TH4`$CdKBroq1LeOtPUHy+_@zovM#%DVpXI9 z;iF}t^HhZ*taC}ibv;8%SE!hr^nKwkD=cJ@S@K&$^ekjXu_0-5s?vz@F5bJM^f#1j zoU$KvSeICz6)CclL>zfba$AUK))zg|FSSEgLj8#)^Y5GKeXZ&dD9$JOwYiGT{3qfk z`MCVh72X6C2g<*l@h7$jTJm3|s8UFNUldW4IyLg! z1jzHMI?fB|Si%wQ;KgaHlX~X-aKBPShd8u}=LSk7#Ht;&t*%Pn@Cojux!?_F^PK@6oy~9|y>H@0vLp+c3jwu&ol@ij(NL zh4pT)0*b#Maln-f;ML%Zf3i8$8wxH1-@?1xWtMzbwQQ3Ey#-VlWW-NOruIHka&};+NOrMpF=851VUltFL3xJ5Rk*f-t|IM} zT1XxB<YfM3O65V;#`4%o*d0oQ{tcye*{2RQ2XN@7petDjd4_1j* znuKcw+l&(Z8ySbxD6Ex5@5kg%HJ{ z4BCcCaLywo2J}9hGSAoR)e&{dm{ItzQ4rRJMOy+ypVRNZq!zkwO9$Q0&R{&{?x{C7 zzxL*1>*=L@Y5hq+Y})#o0IQ&7Ce~tFYPoB}l}yv^p4c;dwd|$&!RAP(qV$oRn$Cs3 zaAZ&=RqEI_lZ~Le|A32O4i5eIE~fr|@0y1o&v%|uo~gitABz>M9#;>J?>?phKdD6t zT&RBbr%r$m*D4a8Dje|iHbXw-N?Jipe*4!Kd4PuR@GH-r5z}^xk%zOSF|LuwH$hT^>#gmJ`!HP1>QDvf`Fyl$BnF5(r2KB;LH=_F< z8Ut@;qrK6U(<67iD_;Z{_+iXOH#2|=XAY{)x}4Hm_S^V=;XMF6;r%63)eOh=KYCI% z>Hn)-@Ig}B*V;`e3W=I{!A}ku+S>~tfOebF$1_4XO5aTgQZzxI09N?@J4^VGb7qpOS-k&`OSs#sMWjUuj<&O6-rB3>*(>PN;~ z+n-PC@r2zmNoGeX4y(;2aZSQ<(YAIShVn~VE4+GJQK|x#w_K5JD)wnQdWWe~$Hu$+ z^B%}M39k$wUCIKQcCV3OzX(&xpHUN~eZ-!cmS0BMG~i7eAu*^&17+wqaN={_w0FhZ z>ce~9Q#>7je|^2MSuElBrCJiME3&r=h9I@PFyQSD#2ma1s9}!1A8k>=4i!XG{gw{z zp=+3&Zc<0((eeK4`15J5;$G39-uLC%h_UMpO7<`pr587!G@c`Jvem767q_X6@ZPu< zjoGW|rQk{&*398VC1UFs^x@`I=%TLSsf5o4rHIdpGk##3MjaPGVMY;FIjon%l%XIO zzD=`N7Ot*KS4dO=lbueBzoB4y^8EwVoB-;N>i#-u^?EuVRjcOefmacQb=l0F%I}T~ z7wHv=Y{uRhJ6ClsA=WH{<~$tTTU_ue1~URU!iuAMxo6({R0Z~XTTL!xzU`sNWWL1wN{ z0K6t==i%wlPsjz7t@H)QK3Zy?5CF&xTie;G>vE)BZ~z$3y#VB4vXX>bV7&@P(RNc zksD^8{J+Ja2G;w}T_l%Tx{kBya3p6R1Aedq@Yq7n8O8tigqJv_*#Z=hUG?zsb^`7l zDp}GvUS$f2P?l$7YMs?L{6J+H?7#6-nds&?DdGfXPeVQt{$Y~|4VcHk6g8zxecPUd zuPR=C{DYV5Ek5^)xCE_;l8uVA_+KbXXyC1*I`tdLK}nO4jlCyddES>$HuX50~Btv zuzm)UvnE|}El1cdU)pt~fv;3eJi6oS@ z3Uwes+!<|C^y~Fm_d$1&Yx(?#pYLzJaeDYDb-LIiIY|PrDjTU8$cN+W1p8co_jA9< zLAL2xnUujXq-oK4;r=ileAi~ZPYYk6LJSBGe_gQ6f;<9G;Kn0%fcL&w#l!SDmc$RW zlZa;I&Nb-R{1432xP`{S6SV!z1}klE@EHtQC2zHOb@v}h(u#z0+aeVnk1A^mM# z7gk?{vO?{jo|}_g{*BMwg;86Gf)eEE$CjR)9F*0aluRR2Q!f{(qnWdpPo#sPpLy4O z6Li1Ta??@YPPf`ZVd~Ar`ux$=+IvH#+HmQucT(GVG4*90)(P14kvcq6$>ItA#i8hz zO>?t7@}P&2uHpCc;$#(YJckPIl4tMWKbltn?VG@(tf?K#8u`XI1%iw=tH8|^XJ-Gy zOx<#T_LpckJjs0n>VMa9rBEkvzGeRXQj_S3{<#dpC-!w(o+IP+c$^8E`%6Vm z`OUy>yfAxnES8EVPX@Z}o@wUczsM>eCLN$~L>m8A7Ry;NE(alx_V1Eiot^>l^ zFTDT5qo1Ec@}FZZ{4EYXdrIe@zvwsMfEwEPsMRHqXC~d8Jp*6Pe?sV$wuFbVg!!lm zxinsRendzT4aOPwzmGw1EjxC~y+N5Q>rAHmQ}?w7P)ob}xUG>WdVtz(piIml-NvR` ziz@8z^vujaO`!2%15e>6j|@JmFs=FelxicqosiUww^GgaM!+`IWa{ttcx6q)c(p^=m~q?Fe7RIR%1Y_`Y-bScH0tdMDptTj@03o>$Rn) zE>*V>B*?O(izU@h!q)H%mFOa<9fHSCleQt4ATM~ejCrhnEd2dz{ z2D2`hQK*f7Bsu%>6@&IoHuYEle{%=qS2U+-fD9LHBT;@kU`>%rc5f`=c z7@#D2O+!JlaHcHb7pVGAD?HwtqR8eSw^eDdn{klHsN8Gylj+;a%;SI< z-UZ2a`;vDCtDnk*C_CG-AFEp{qxlo%*>jnF6c4b*N|f7mx+t$9G7}sE(dss4%5tbG zCfF*(n_pEI_xzg2GH!tLv`aCsGucw&Uix@SI)6Jo(^@qfy%M&3@|9qWo<3BbAP#fB z{F-HO*mzY|WxqG)qln`i#i~7Z`dcOWF14ReNROfCxJ3Eyt;k*qh+;8lNIOGKF6@B6 zRfy}6t6$ROlgl;mXuXlGiC^vKr|JKgjEM(Tzv`C$i5-tYzLiSVvm<#u>_8>b zZyA}4izLmW03~qe2bF5+#*-y6)r;UW-Vs{U+xI)2j|1o=AnxcpY%Ovd;Bt382U zu~opqvE|@IjTn!|u*epQ=t&!+;r`L5m-O?g{M%9it5<#^&Ts!>^ZwvT$__o6vR_D} zwZ(II+qRjaQ$GB$y}y&rJvr3kx-a z-*ptA5k;jY*7;HehQ2h2uJ5#C8m3VmVEi>1ua!x&fcWI*EZI#Gs}NMRx!XBGA8V8N zhoktN{0mT#AokUtLg%kfUU3OBiz^31Y2xq;SV66d;U)p1D@RkpthR%aHEI~W`^|zX zf22{}{3)3KV0?H2-sa+1(HD{Dy87@^o{t&KhQSpZmZ5iDhnpkmN5qMC9vs}5B>RUF z7bmIrmc__{f^6%%W&IFuV`*aU&-fJ7T-KU5PDxqGJV&Y|sqXBmH4mI(mW?>ew)bLr z1Z^sghD%%iGeR@83tQ1sukLlQKOa>;2qMwIML`E~Muh3LTDm%kM7tk>$@<^p<+cS= zn!VXSU67=UG~g07Lz^1j%W(ELt*HXGX-p%L7zMo#=a`#Vjt+{+ZM1$3hjc<^ljwZdP4FBAoT znuwtKIu@rKhm;*^qkk8r6QUCB9}3hhqgJt_?fQ79n1g9mv;?9F&&a=5(+@_lxPL<@ z_Wii=tK!>QB-;=6-$H$4UoUWDEDJAf!ZRDcFj}F*lV?P0deq54F9x_)d({;(eQ=_N zV_6N;EF}9q3_c|Ir&Ra~O=l8+{l654_xJ1RLqY@YVdx@UH&EF1LUB600ZI5C2n1ND-Im`9mCq45 z{O7xRkq>D2En<|zor~VrVuR{IWo075Th(j3O+$e zKLYhSD*z_|dr&UZ<<^`&1<*7n{aq!lR}t^>bNzKyKYP3(pJ@d_pl+GUjsR>ei!xuB ziqL=~#Yxk^eNiv5<|#_klk+v6`sNpCBUE88A0`*(e6nx4B%I|3z7I0g^LvXWq%IZa zvs?(+8=*ROaxI6KeK0l?^qdlKH?Jz->lYy1jtwatbM6sO&kgj~3W+H-N8gwG?p_jS z-cj+R8UdMNm1dy3J1jYD;q1_>4p=_(3wJTZscF$JoN}6l9;__7&;vKNVmrAG%>b_k zGzaO2^&SQ-DmD~huluhL@E!+dDsZ=uhad7uWce$+f>DF8OMesRsvN4l7OP z6{E{v-QLAaH&}(B95kp8O z5yw_gu76ebS0sU>?8o_l*e__6X)3ycm&L^2H6;oe@krhc6{E|I5$6enJ2zg3lj0NZ zl?++O2nBE8fu}hb*oMC7$DCDK7@>bCvT&oBuG2sz!pWIQ`J8M2-GV|@u(Qk>X|G_H zYaXT79WZ0Vs-4g?=~20I%FE={tg#G%s;hK@;aE`C8B_18Gk@jP5!0n;nwj>UTps}w zV$q}Q<$a((y~7opc3*wG{QhS7+7lR*4gtQNoUK7Un{NiQ*D^=`oA6=6A>$%H;WalS z50E>wra@#RYs%)vqo)C{)n>-Iss6~t3!tPbA{+2R&@bd;zsCIcHtO4p!I!J$eb2-4 z->$u$wBoOh#3?M%APb!laL4~-5X`?FqlX&vQkIV)-|{T1lSVzMAUhdnRc9@j?0}}1 zF?8#Knm)Dqnvl@nfj1VpK|`RtwsZq{}BIH3B10nOs!IUwaMgu z&3+UnRAeNYIKC#Yt=M#5an)M^dXi6hwOPl?Ep`?(>^KV3HbbZ%^LjWL@!h5&Lc zc=vT8VBQ2+2dW;@&K@tS<9KAZdc=yR=Otk8GY({(h$2QFN<24|R%VtTkghnUNKj=Q zZrUp4-21fD^>H>eVS%`J1s*=r92N*mu{5?E@HzoY3A2y)R06y@w=c}ny>;T*T!8{Z zeohP=VA}zX+E(969IuPOU(@ne%OvvQeM3i~jtn!=3&hXyujapB!n&-TrnTc921ONC z=t;-bC~EF!e3QLe{~|OO8sG3%WcU+}sK_k0WeQE0ff8S6Q&Wo)p_M~-u_KFxmH2r;} z2j%?Aa^oS}C63KA@p*)kv5cqv8+7A6Dh>)$_ax(+ZzoF!`NHBn4lTYHcQ{rZl=CNE#ctPub#$iX&1YVa~*Wb3y8bq zEqLmLa6u7qqAo?S4W!t!hIYRAxoB}G$Lo=(ZomL}mhq)7IVh;;ua_dVk)96O2zSeV z-e~6{kzDMF+{8G~2YzbRND7Gb-mAU(dxLGi`BdPc3{^t6*5>JZHH*M}Mn95h^c^_%YrN+1z?H z7XOKr+dQaKUhzDv++<|(`;@gq1)CKIVW7Z9@nkSzg>)#vQ~>CwgBvV1(ja-FCwN>) zLUo|7+5=9e*m@M=h>NAgAq00MlF(v<6S z@pJHX0(>RE8L401WQV9beeNs2_Gw0*!69YX-pz^tS-g4mGA8%RQ0HV~|r=4Ie? z3GBKyZ+_Au>KQ1FdTrB<-a1M~sIL?FLXDNOEU@h7;<^l<$_nq9U_TDm)fZ@Aa5||F znNHLxR|Bk6pPuw|G@1nSki%A((aiA%f;LKny+DT<;SRdC;vmgnr|cJ{t)pnm!6Z{+ zv7b8v(L3m)rtB0uei4v)c`&ezXKo;lhN(KtcPax;^FT*4KRbMip_iM4S}RbqMd+*mT;wrXNf2mSNWbKWq?mHA6gfLi*_=z2@(Ig(5)h?rRPzU0<=dC31b>uDVa z_WA;5{07L%X}tQDJ2rXK?iUsyH;kvlIy3}U`p6<~Z>wvSo+H%;2rc^LGr3V~y{1Uc^Pge;hsiSfvgf+jc; zYQ)z_RUP4U4e&W!w*mR;<~OtH0_dkfZR)2q-Ik5b5{`RL^^5w-W3pi{=?U2x`*6NI zxCc(1Hah6f>sCHdy~QHaCWvi6WP0)=Z%3)YHQ;^xKS)!}E3ue+h6T2=C@OmE)rn03 z>gsN_Y7wj);mK&U%%f=DJXOiBSG8)W6jZUCbEA?@*`gl=4VuN(qF!u8g~Zt>;k1Tx z4hKJmz0|aw`8Um~HLaALv~0K3J~NA0__+x2nTFhD^V=Z}gYJ8PsVz9;$4B57(s2r$ zJGJOIBDdO&c=gin0J7~|&-0-ByMqCKfFKO%JHQEq+4t^DO}FO&`5^vTwzXRPHHTG> zDlt8ww0$<=EkzNutAVFAB|>6EY^1zA&S~U))7YqSLi3Wd8L6y{c?yJ)*z{@ry*7C| z9u~eYIhsp>S1RYM4AX&9QWW66%*j^ZPDaUjtv}-8yiI z2^)bR=o<6?Q72#=zRNydR-G``@IsUb-$J2q5&k)A-!0P)zq)k>Gw^hVcruG^=5u$@D;@H|H?d+;sVl!&NuX-v6K5 z7dDJ``@t0`O~Lx3I=-s{ z@d6p+L)uDSROdb1e#Dml9e82&niLom#i9S2P@C?AnR4+}bGfETES z8aVp(m3u(b?;(yJGeG*n(xX^+01iQV$!-G1!+?z)qFTeZhk&}GR(TEGVP~+0{>Z{^ zj0SXC^gTC80l(nbN07lUu#^!Oq`|RC$rqyV3)^igQ+2!9jVKW}BI^Sa(d#DBMzrIK z>hTVY?(of!V8sJtHf*BO`WUfQ(V8TJj32m!o}203a=WNki%Zit(<4F=I3zfC(VCz= z!T8q`4?LeW*EOHnNZBRDrkW+c6i~sfr~l|?HE?$ky7EdRBURM!Q#@q z2>p-yVfuhfqot^a9^6Y<{hl;)0yA-e`VIN&;XxNdTQ7@&pz; zrBPSky?}KA*VRKC>hKjyx@_JXNC|u&IIG^Y1NHzf^$W0z*C;S&^0@QR7W&Bc`VD{u zI%iB6luwF!4Q~gy`w8qtq>Gk{WMS7%Lbb@a=4mFkG6Lv$XV@HdKJLmiy%XBoJjk5Z z1s8|J1{VKdyEABPmSm!F*0Mi9p{-k>`EI9@gKOp!C^e`<)|qiy>YHnnmO9tCUtS)d ztReaw;tw4DXeOXkp}r3uCkt$~7_x@J0;NY#Xhv~bOX?mJ4}Yb;N?QgCmfVCM&W9If zuG$G8$svMsf4i$+e3H?%SXs@!H4bpE#NZ&M#4)8D9JkZI&+xP<4Y-JAOMA>(KOe+O8` zY?&D{FPt}fef#m+yFpfbw?trK8V=zA>9ZGz9AB=0FFl zHIk-mgkN-ybwn0r#Zz6(zq@MgGcbES^HqxvIr@tg>!CS*f0X9zT#ylBxo=^s(stWr zxvjc)RB0#s+S)VxXt<<%;JM8A=beI;t)RxCCLt4Ec~$}p5n}>(YI}og200oU>=w{J zUA35Bq&20D?KrH|#0DistV*jL(^NzD6}X?XgL_e0>XDXMR^IN8yt{l&MUuD(Q+R63 z;WphZzyBjAt1#EV4mt)u_8LacC>}|~yspm>8k~;B!3!|YHMglFx?CQe!_AQYhQNTu zI-q~dd;X}Gt%9ty!uhkgcaX+_6Ap7oJE-{#Mqt5W0-#>7SOwZZllUHTc1dYK_@k`kRXVjnMg-GbWN|`R=kO-~yQ z3c_h#{R}}9#%j3sziU0sm&m;LpK8XMzatg!TA}8dwjz0Uqw^4LUxVe9xK5>nevqOp zvRk+Va02IBK1(rBFSudni+OjO%lVr&`D1wZ(xbfz(gHpl*trjz2!+{x8Q})Dudj0d zV-0#_3;{V#zj~`6bDUS_Ht#v^=MzPaZwn_*-XkceE|4)jU>kV46mmuG$h>3WC%7QH z9NXkdT}5GzKc!}_r!L*0Bhn(`f>Txth2i@y? zW`db5F>8f$wDXlao(AS56#m^QUe$AAAZxp_EL_Hdy|U@p-7;kLT4;XHv*Yt&C#Vab z^#T!38TPv>nx*90;;cJjreTxO1KsaUW3u<8vQ}7Fdjei~1kNGzgt&?DP{3{@iYfKN z+$z9~EZ9}8>%Lq2PSE#?M#$R?%)I~~o*#JS}!{m_;e5Ry*O=m{<)KXp6q-@dE~Vj z>jIAj$SON>fUC z+XG2SX1Y>3mL%8wonT<98l&w#Yv$z2iGKq5&;aJWpoL3)w*Q5@lEQ)NymOs)#IWA* zBl+}^Jt@@rln%!IU&JkdiGXez<#B=^+N&22bgiaT5LNy&5?h<;Ct#;@un*k@QaYbG zB6SWZioqn#8t26_fP36Psvp7ni4XKW6+%nzGn1co6Y~|jrTusj<@mgi(HL5oq-wb> zLjoGO8RIa)qKdzo32Agif{f!o85w@yPA2_P4oLeV=*kd5q@7;@EbE z)-)S|n9x8zBSL^(h@rA)4`;yZYwu>jedclbw59!KY-;w}MgT$YZ3wavY;-t{N37R8CMa!KT(>RtoA_jeKj_*+roSV6GL6-nmx`3SKiCV(@2DlLe_SJ zpE~<%R*|vhFwqVu{{(EjV$X<)?miy9=>sh}U+ee(9+P*RgV%wDJ}q8~cE+;Q;#n4o zrn=K?PPO!YNG0Eq{s^Ao2&YuVpWf5#mEM@%la<{CwY#ne_`l`-A~pkue)eF!xPol> zK8oA}rjKe4j=~cV?~KmDYTJ*1w#Ez(P>ghv`WH44Em)^pIc(Icq~9l#_*dDxAM7iV1^ma3d$45KD0ramzzkyAh`@ z`sHL+1PdcuyMu50@A+Avrwp`CQD;sj>qiBxS+%tDjdGzwE>`(Z00qUyqOijt@3Z-! z&uLoRmu;R-`|jmE)XAWXB*AEbl0%hoBB103wE`E#w>GDq`vx*2vAG%2@n2(Ws@?_c z`1LTmKB$<^;ezhx=T%U_cOAO?tJg@0Zz8~Zi|qKF+V&%^1Db)}AbsxsqqYeE>(Z+X z9}}=yKC$T$X5b-J^QtP;qZ+>W8;snInPmR^<91O3V}0VzdG9>j^dWrAe>pIBVlg(V+wMwvFA+z!vjS zG*BHsJuT_r&iQD+72VbiTF6njaEdHnTcqSOSFM3v_sWxTtooA@>S_X>dYKr|T(0x4 zPN>P*;ws%d>jjVP0*h2Eipa{u&O8dG#MJf2TQIjv#~B&;`@mKFj#PO}hTgJ!&!xmz z2*B653bDC?-smC1kMB=1^UIdp@0Av^Cr^@oxq$Z%3|9T8qd~ ziW>=FEB(k99b^<}0s}j!otUzlAZSh*GT!TFHCB*qJIgF}GxckH#}W{*hbKg*hab|V z9T&lm5u|u}sHWSFdP>|_y5o+^Bv7qA{(4?r1NBYyZ1|_-lk33dcVfNXZgh@+ZumM9 z4OIJtcSo0UqWJ_f3oiO)Jm0{=*3mGy_b@Q>e$UAE8kOhW4Uu8jMH|Y}K0G<#zt59i zpdJSlI@n6~a(YnKhZ-HT`pSKZ_^ZRsvb+GsKb^@II*!r=KHLdXH^Ffg>4uXYZ zx%o@3OZTLorL>`6XORxU@#m0k#1-#@(Rq%O=yR|G?Bjgs(ZTWqe)B_ubW;-y04A29 zSPovgH$kh0yyLxf1MivU`0?mmBm|GxH})HYN~xYmT2Q}!{dCoZ8Pn%OdY7Uqiwn|J z*T%XmOXYk*hiaZd$XM#BBO{F$1+l0VlU7&S$L!D;ZCYw^tB)HT_EdkFbcy6vG3WSw z-hp=xJz$j{*B1f#ydh9XrmjhU$p7%CAOzzG*=7Kv0cozzIQ;F{z53W zcmU~>biUJS?&L--P0cWfBMeH~?C9S$*9ZGze;|)qm^gg^94@l20!U+fr*Dwcw+XPt6o7!t0|(p2wDfaemXeKUlxw{Fvnl9IAIR1^ z104oU%RTCu{_)#a=~z*btiwGsP$aSE4cU|{QNP6=1gg+mUhk{YKEX*Q_KekkPD}gS zQF6(Jws;#<{I(d6ktqhK)m@*DE?{2yk+^urCpgjjHCBaEutO*0(r|eoSRww%0^u0% zrU;KcB_WEIl(iXJe`z`zojU-Fby2>^0;!SDSyW4~TyLV8&?3MO>@y?2rvB9V|5ytk z_CR{xmR;^;W8gM8)+G%Fd_U;OH1VGzm@3b|fiE8e9*srVr_%(>QwIS1CNSm;=an!W z7sN`7fVUq@oxGW7+P#8;ftgyLhm#3FF9c{r16|bex1Dklg|O*L|Hxsalt?NQO+5|l z4h{N6N z2y1T5RsR7Y3Dh~3qz~9y-rf58(dMc#aLZidz!dUR2$(>|Bi)Uktef7} zUOZS{j4Y*9PC4C`c>Sq{>_cXAQcM618eo8%m$&!?7(e-TF`s5^R(=0ZVz)3Kc6r`7 z1IdWEyMR;!4@f1?>53O<6q_Jw7PLVq%awVxXfNqUFi`A9MlCGcdmu72z-7%ZKI^n@ zfyu1HprUG@!SV55f84F5pe@OLCM{)2a?DG_=nU$6rrsAHV5<>)HGAN*%HCA`K7{5* z)M`=0k5KLd7{g1QC>dnVwnXWu-klz`)rX7P#V$MCDjSRO!{tr3K1(|hez)iW^j6kz zV1y@dOMYWyeAjbtqOu8NQQqreTqzr{NdzeJTp77dZKrQwrp=x+Cw}%@lRF)^t9N<6 z58$^*<(Ejnh0n-{HxM3U)e5*inN5EVd-LC)Ts+N3Zulqw4um>+FW0Ypl=*?Seuhhk z7f7SXSON9A^b({Oy z$2E6y^3jR#NoW7DO1f`&S_vAc&j|v82xd)Bg(e6yS#!4*2fC`5cgT>@CZbv3p%|ih z<`nNKyXO?|Al7oR8!mC+UW(j#8qO0=Pbs8{c`wlpM$8#(_El{M? z!2v~-YMhoJ#?USgq3~QA$A#ULs(Tin-(HXt1Vtb9JGvW$Fz}T#Ukkwty@ZbvbVMQ! z@^2+n97`9z*iHD&hkcPZMNBE}Y6I6O`&Yo`DPT3x5rxRkzw_O>t454)n^q&f0Iuxk zf1r>{yOFP%*9HeyboRRwKuI>xAPWe#Bjlk}8tODaIxR>G@3iY+Vtz7GuG8m;lm?4~ zL5r3s(`kgwli69<^$)op5qh59F3Se2;oph?D@|5E5A*$Gj&N}w=x(?u-q`+WVdS42 zW~Hgmdad7eu6X#Dud_a`mk$>2zv)Y`jX!;MdNNC+73CjA!stmv_mMg;JI?4rL;?cx z9SV=ie`)u{x^!jM#`4VF=-?n_5HK=5B{n&xlZqMZK)a)*LzrCvq!+;UgQ+;=8e`}F z`HtPcG+H6>b$rwG5~v+vYK7p%7&E=it`?t>Fb&UxLaKLeuI^Uyf%^;K`XTc@V6&0u zj;L+|O$yt(F4ktc9Xt>+4KOb)TTB+?WTCU76k}G3`eN@66)UfbY-W$?qCsf92^uWm zIdOVHC)d1dX(|~bidm6cTq_L|F4E|lH^AXGg8(uWxn7URM|k5~12q54m(s^YQzyUYN`A^->21OxuWcr;Q3<*gM*{$85K4cQ|?V_a!=STiKn&?5y87w!!2htbg@&{?;}{RV!K&hn4# zRCnltIh2GmLPSSvqGnk*5I-h{ic?0B@}m8q$KoZ*#=zfUfE55|`rP(~i=MrIfS z_E#i3MP0=9_G!HYE0{;}%e-jqH3LpyzN?T+C3nGV6>io97@e{@@5oDf0}z-yqc2vLt&k+0*;S_XQ*{??TMG{l(x^S zUt*%bc3K*2$p{rLX3zQkPvMH+Bk_4LCnH!we@M@>FF5MRg6ri$66hQ0O{)LG1;(w_ zNr@`{l4Xl@HF($ju@QeW*<|0F^7G=KasDbAxt}L)U7Ff|;v} zr8tN%H^qAA$t=36g83 zR@OT%10pxrvdhzFGq9z4kHkzu4YyB?be(W>uFiGlh=hrAS*-q?Wr(mHk7KW2(%6TR zG+-oTdig$UVf=8Wf}0?@Wvkt55X}@WcLCgP=!F4~AAb*_ae@WU&AQHC@giuy3c2Cp zMV^`4`Ew%F54cR_j79VhI>N(<0S6r7*d2KHwhVDlgqaZA<8Z6dXPuM30&KNvU zc6HSlv~&sjy8lahg@490nk{a_XpOeD`6aDj@blr{C;}4;{K2m&y=t~P`KGYGXr8ss zW4df_9Ohy3scp}XdJT?Spz|^tlleWJ;+JdfkAUs+WyU-Rd2yO%h{vr1_>{QhC5A#n8g_D9?-QUp2K&`9N$hHaXGoO!N13y>9*!bw<_Rky+Hx@{R zy8Y|>8_wx}aHV6vGVA*z*B#ja-cfH(??6LMJ;UjUfB6}8`Uc!si~;h}W4zvr|8)R@ zD{X+v&Uqr#u>`twB|&E-X1PLM^V0<29S5vR{+(CEKt1D6E==b;2suw~PyOFNKc9Il zk;oPh*$w?Yql04dHBb%mgehR%lLne=?#itHIkO1-yc$(JRlDROuF2iZ#J)J+tSp^e zZUU;+l9kH%G53DYqc)vRsmNJMBQVVt5lNDn%MPaSLka;2~w|YfSsv!8DhPcMU zPn0xW?$8YI{1x-=mL*-&<;z~6%=$sx4-+Q36g!&q)e%`E4M8A1LTd?OQ7u{Dgu%7K z*?x$v4~eoxnDQfd!@i3^H?1BT0&FS_UO22JA&VHn1%$M7{AR|8r|CoG!UG*5nC{>T z;#&mgGGn(6POI}f%$vxu?>}9DW8-3?;3Otywp~_Chi)v>!;!Qrt;~Xz)rx9*V)9e%arMXSL4F%{8&>3 zxc2G}Jb#14kk&dt*SJ30+WG1`4cy}Q{WmLLqF=Qmkbm}~6Y$}_+JR7?jh+Il_<;jc z(L|WojDc8{(c|_sk|nG3;#J*+B;)AJbB#1r+8?w!r(@=#s@PoT2HtTIU3|TbG;W%2 z^~CC|(HrM-Os9V_Fze-)6Tz+nqRE|Zvbs-SKtf8h>N11acTa1l*AHHWLgZ2{UMZ0a zB;5a-EzZy-u<4K--pLWJo*W zWC7U-T1#t4ivC?bTO+8Fcy(#S#8diTL*p@uKvISP4M{)=rGS|JYP4tUwaG-~fS%oj zSk~I9j7MC78%`*;eQv=}* z#v}UJo0Jz*Qw3crzUh+u=n0ispd{(xuTC}?@vbFg<)o+;rKh=|47w4l&4yuJ$8{VN z7*cl#mUn;vSG$p@-9ip7OkExGK6M~V`Lxu#fVW-=ZFj{xQh|JTl=li37`?xPM29T9v@HxkFO1l_Jm@Wb7C$r&iQ#){qWAxsI{M7gX&dX- ziA;b!7H@_XQ{>Dopq?&oZ$lab@lPRh+kW>6?fZ*xvPDE04EZ)lz6D|roZdV6ik3@Q z@icHR8bmYR&Tdp-# zKU6v{lT_}QnY+#}kq4qY&PUDbz_|ko^lpFQwgU0z=B=+88Ju_uJ^^0=x?2Ns=e%KI zHr|nQM&R3kIY2TE>8Wz9_VX}Y^aNQwTTzx zeA(G!x_bq#I(RJpD@zh8k2e8bQvMN$Pm@yYlk=a@k^90zM)31Gfo0e0|?rS`Ut8=L)?_}5T$LG*yY;_=498X>*M z4Wk-1+rQCU1N^bb6$(i=#k2%=HOGz@JDfN@=bvzr3+qE=w`eA`E;JJR{f-VAWy+;MHt5#AbhnVoZY?2JSM*B<Gs5(MU2ueJ3 z(+s2l)ORM~^H+mi{gvhqyGbWK8KL(E#Znmuz(n!Ax%|nRchmOG+mn#JZpACCE@biN zOGcRM?>MlF#vWWY2Ral0N91c1CfR%vS|-Q{-OUEaY8%S%`|S$f*!?)J_vvbNn0>xD zgq|X=S=?g=)mqCaaC5+mQQ3SpG*lVOL0aB7isr??xrrpm}k&>r5A4z z=EY+)AThly(vv{-PcBhePY6=~^)!-+)>4;mORhZP#ZGTBzk`H7gmEg_xkCJ0$LbA5 zM662lObQc&1-i4qpgD!Y#HH&q^SBSjLb)l}*?-o>gt?GU>PV)inStz+wpZPoF7Syx zQ`h{U{27pq4eYCuxSKu~e0)wWdy#VkTpW0mNCK9Y+Jb?*aOCmz*_ZrlSB92=mx}-f zFtJ!(bbW*b12QmuGkX9?TL8>PVBwJ10IYvlo-GDPg|98UNU@_aeZHVSV$nrEQWc#u zZ6p$Mz#k?+%gcBsQIF-g`yAC4^RTkH4d=I-niazfJ-@S}tZSE{2)~IoLwtIPQ@JU=A?e{o z<^xBm>7cc7(8KF*A7hVs(JAIoTdl%3WwIBSQu@&sanN7A=yf(V?5EeT%F<4OkTCm9 z7|zIpY6y!{7Mz0aI?7+7-p}bE3PtZzd)tCf8Q{eXxBVSkK)|=2faPQ8M9@j<%;Sr7 z`wgI5ila>)V0XOA%6;Z+r2Y-&C-3dD2yni-)_Mc;nK>5K75MTA;Xgxv(M!pAcnM;w zU?DR6-`^bIKolY*Ea9QS;P7;w#aWTPigXCS0E3cV$rB9)XUn`sHLg<(<&jYm7aG@X z6%L=5A!>u2a2m6BSniUd6NwQ28JRL3+w;Y$*Jp@GxwunqNr-=95HTMMQBa%Cy~IW< zY~3n}nOr~6cpC4iOpGs^H5$tdB3U)X7)Mox&q&<)S<7EUlYiKlJ}g4CwMlA5+d1Mkg@=Xph_0c9&Kzf8~X_oM#xekfUKR@ z_eDUuc=2B8rV8RMF)roGw5r4C4W{N6YrGE zDbhccc&}UC8^h*bSEBYU(#hzH-35Ka8aCcwR5O!TQV{V0Sp$!hIt%Is3bE>D`Ur}H zU<796l(4DK<`$uQ<0LwHAp@IczXiImb!WWSmrnwamLWH0g~TokK9($+6ExV!Qp_gB z_#7g}|CRrE(t+jJG#IGCW;AyPs4c!5I9QY~%DzKnSKSE$A#k0Zh$;cll*+vge}Y#! zpz7W4M&f5iBAwTq#s*cNT!xDX_ zW&|56Ddv_VW&%pFz^9~cqU@aTu`xc1MsdvDJsSGN4?7ycw8`nh8I}zyx?T(;&qtB) zrpB0Fze*yyX#WZmFB*9?3gv#%MuWZ#68Co+u3SHOs^LYHRPk|rYo+*3DN(NIf*W(L zV|eVMA3j>Wzr>aeKIDHwJ^f)vc_p25%b&-+fbwu^Y2}80nxW(I+<%!%d5H1m2j_OU zp#0z(tOc*dweaz?BaI28^}VjJEaeeS;0Wz}=V^DK!%YvM7L#oz{p#v)Z_W}C-J|H( z#lMmZEC}9zxa8gikguVybS!<5(7$O7bAq84?(wH_W!03(aT%rVXBQI=SO zy|D^kdXE-`8I0lGp{DPWbq?g~=?i|k>u)C6O@|cdUvtVnNA%H^(C1*0ra&rJjud|{ zn9ZAChLb%vz4&NkDd{^~Nt0jmVM`O|+jH}A8=)ln&M_YX5sqMH>*t5hUQE{5@Ar&^ zSFSYcQpLFtyqUINWi1lGwa1SEdgppsrgnL4bsrnUo!-?_ef88NK;1=@6n+kv76qVJ zaKTqVUNFTRS!Q$H7zgZ~A#;$6@&_X>a;d`KR(HPZ+D%B|k7(pcd-f3g+{ zHcJV}1}9Kc-^pmap?QX9j5{awEaLHg`?h@$Y$3@Mn35v#(YmWCiHH@k@B1$2w1*?* zlVyOgdY+p(@8FxSJ~Pes7{TtuW?%-}dRVi`- zOv=~KOa10CW-l=`x^ja^Da#(Yhw$yKO6-|ReLk52;ut3sw#JqlWIhw7n}wzY{|-8i z4a(QS0l@od3G1XRsBO43YZV2RJ7rUI+NeNwPh<~#NU#cT578RdR&V(&!0&PJ&jkQP z*7Ge9A`dV$W8t3fUN;kLIOy2C!@C4*&pM(5?~8B2EI1GihWpkV4_QcTV}bpNBysd# z{O_;>2|e0O`A36;LgG12%3l+{m{+iVs`IotdcsxWqK^wTCl=PAY4y@F+-kkrhCBI& zoU6D#i!BLbXVM!?zls_(v?%8hDCm2Mv8h{e+I#lsX0~X1$?gT zsKCsv@fb*ZaTWs5qi;Rg$c<;Oyxfshb z#!>`58h+hEVT6fQcf~O*OQl)9Lq8S{{-aga`avV)0|y;?5QxZ+a6!y<+fn@nnF6|# zF74xNf^|?1%k=w|jyv$=P0fn|Vf)!1J%>H73N$p{4-V6CGSO+v7zOfOVZ5g#A5hL3 zjdPy~KU7wLyr#31$@-)*5|>T*{Hj41OV#RcQ^r}zqw!5B5h=bf1B(M;6!S$afdwzi zhC+nAF;|9%TDwGRxMDuDc$>8GTQ51glL{+Yyc5K0JC`U4MNc_!9XW^vQo5TI$4*a# z>^e5}f!^MWvjzAm97P-go0EGxeGbJ41lmP3G_VSBi2JZJgt^P&L*oYN?^Yb%sC(e9Rg2l06VY)fZXB%Mtl2N0eC8>O$qp*th0o( zzrnxDymlDGZ#Ti1g{^R^e0@x0MZIF@`_1AubV@j-G+$2I{@4^7g8oui4*kjfHA#B; zW6F6yQS0R;>r9<3t)#*`;gqB%{8vC34K+ zRA-uqn(rzlpu6WUGc&7tX}U%IVOn>dbbd3Xqnv6L>^gPY{t?^a*Ji=YK+u(vpq{EL z@{3F7%JyeCuyg}mD30xRt+436GY2+rL_?7PFYa4`k4XH$CS@jTdt zRk-0gXM5d4=Zf$Yf|A=ktN97xK}SJr+=Rg!cpy4)3w^N&sC@bll$q~`@!U;|7N5m|lR1@gaYb+#8=b=!P)oH(k} z*ccm${*ENT&|mAX_Q-c0+R4b{9Z&e)tgXM+NJ7<+Vl z-V1-qj@=+nKqljv_%Tt;A*0UGV1VMX_M7c@BEDe(E|^dQINDeXlwBczL5`{IR9dQ z5HeYn0Z`WWgum+A8wvo}-*TfPlG*^d$BlqSw)K4v9BZ1A(!q^evwNfH+qNW#^#51< z{0CcngabKn5UlDb&Z2@g-*2^Z>>x?VBZd?)vnSQ_S4r2=1%4*z{gkStQNvOl_nS$C z=rey!b~8xe^n-h;>Q@YTR)kP2Qha;bI8VxjM=y>aRgXV z!QRPKH`7XGdONu)+sBcxQ*)HuBvdP8B5(J8 zSeTnC$=ExXZnl9>U0ebJRP^VVGkx(d zFlsWA?j2E+qHI|8iMQ~5+ncB9%f+mX=orv}*y|19&-=P844r=xlNNj1#iO$fIGu^z zMIMdA66;M)_^%A1B1oJc+)2gD6siQ~TQ;9}j$88Z@bqDT$QhIyvg#Y%CcoD4XC=5z z?({olJr^J-xAPD`pTj4=*lE#vt@3zA4hEsRb1zZ+OgWlm@B4%(d7szw=$U zEtGt>L1eADxyYSNc2E?|I>{&Cw8E@xWfFTXBtX^2P0INBQB}P&3DFT7)jP5=CU*0@ zmD(@FgvP&w#(g%*AgxtuWy#r_WqXe+sn9UP^}i~u>L)Ay*ywKJ{|F8vb{bXU^VzHh zqm)0Nj*R-nHd3-XZxhL4y`_hqMR243v%aOVAIN6D#$$ztc%u$a{xQDG2eOj@M|j-+ z-|Um%e}=-TX=_sB?G_Jb;8()viLwJz~!T=qOH(VIgaNj{OZu z^)Bs0lhG7Uldw92`a29)>a(+$r#5CXbVQ2D{5Nt=wLGf&^lzB?=oS zZ*n^qG?~J9p#x_VO2zk^@BZFw?EuX~a5urbk3KNU%}c?&>(2bp6IijV9|AgiEti1YNU)sVyKfDv zNWIDmT5S0J`~ZmJTvKf8IegLe8)`{sW^)0eZgUU0y~^c5Es~c1%oO1ER|M}~11_*= z2EpI?s7&U}QZQH`yt})a{V(2>wSy-cU!(7N<|FY8+iz%tBLc?q*|6}i zk^$bMhe;N~-}!rWlsGisW8dLuRE9}3=krHss#de1p|fmYakoe)Co~D)NgJD|IZ^me z$r@YBGI69q^9RWwHrnFTY9)Qis#lTJ-t;K}dLcRA-z_bmRX26+AseZzLOI!<^sC{! z#^r`vM%+dLB=-P5fbyxcp z8B0EjWd+pPlqe=rjriES@7C^m{pq^QFrsf+&xvG{inff8gqh7O>-74h0#@53-uNurG=VdSvYPWHLghNzqcFiW(eE+rK&& zk_faR{U+Reu7vzSua#wpP*q_d2Ta(s|`gV zfSllmaevW!Gjg;3o8^|zoyJh%1p72!5zhaI;DI5X2a_d>s}*KnVo*0NbvDCMvBy-bRMIt;%;@fE?C#=Ib#JYd*ZZPK6z~{Sbo&22TJhRv^|zqlhA?9V z54rh}yc8R|KkZ$_w5@fMRDF1nb-b`7y_ z798?H2)*J@)q?Vzrm+>kp>|4_<1Cy1Ptt; zvrGXK10ZzNXt32ePKstPL>#Cny@btQNKs>od|a}r$I4dDsrn8>C0U(qEWbQ@`wQsGX?0%8Rlz3fZ?M!)HdQdFhM*}G*Gc4%;Z^KOj_LO{nm1NSdB_}E z@7PL})2E$c)l2!gFau-m`2p{=V4a^b&Hj}lvcIe?77lpUY#ukee{?y_KxHzyPG^3k zgq{yX_JK-{9?$!vW2JD?HgtEQ$| z(jTd@gd2um;|LWdpe^R|Fko&suhlTeCK6V$&LB$FqN@>^HwxPdan^O#jOnYyC5**7 zLrF8YCGOSU_dbtkq||%vutXQLYgd z)0B9lRxh4*g8gp#uc3BNTlQJ}!6?uj+~7UI;MR}eG2ZDzL0QI|?UBEgJ?9pC4&MM! zMdKAX@Y}CGiqHiKwE$i0bMZ$r42{+b!^ zcmV<2KD6+*9d6t9Gtz5>IC6Mys|;^aI`0RruWoxl>XmAg@L~@N!Z=d^Z$A`ywFvxh zS7miGJEdcHBrw!%PQ#&+&((}^SAi(%%3XwOY-G*I2H1%pT|S$rHKb~eD89%c62Krp zeWAjmuBxSYp3BUljM~bt;I~PsTsEb5@e|WK0+(&@g(vlO?5rFrZC(GaBb%|&;z3Gs zyAh%2FQc87I@c`;gUwIBrMWYj#)BwB$hKNib8;wr28_>4M5*l2D{*wk`yo86Ic=+N z@aIRBCCtUTGNUN(Jf8Zky{?deC7t&}(5rYDXx$+K`2c4aXp1+=E2u><^MAfFc}RVg zJ4=@=aP)d}U}qET?=Mk*pJjlr#$Xi8wXTxDvhI#66_#NYivU#x{J$EyY8Og>&vOdk z00$3ntBf{Jb|Sjn(~(}`<`*nEZO=DH^Yb$yR0IqJ-WOD;xom~9GkU97fQEb2aX49#8%zS#QH^; zoZ)}yOWy|`&;QE8qVN7^0Sw(51G@?|P&hb=Zj6k~W|=sDTK^!T{dINX z%d*l8K4}bJ>n;q1TZYCW4_|{IDHK**y$BU${RA9)Syd7I5Xku9AsBJeN|64Qdn3E zN3UT|^&&=EC)_qNRH=;izF?Z#lFuH}#k3uYMe})A8Pr5!LmU39wfT1~97+PBALl); zA;+9$#htCI1DyzQ*oQky1X!9B|0m7;y#PAXe(Pakz(kkdM7R3Ffx@J*n-d6qfQcH6 zF^_H(icm@xxP+W+2kX>cl4A>yv>ZN9EA)Ts{2OUQ>X15-V?2bWB#dX>REwP*zr1xC z?u5qn2-{<%FwJX;nPr|QZS#pEUX#i!JES%vsEY@^<>m1vhU4!-FE5JRq~fhLrI$ol zkQHx)(|sHcBM*S4;y>Xs!kkZ(le@ZAkJ{%9c4RI3^D(T>xhr%rHW!W!zlRCA^n8 zFVtG2)u4Z9;+a=bx_Im6a~`^Ey9g za+CaQ*%FHG@V+TUtcm2>z-?2SHrvHDUP_3b^y#l(;1U<56)TN2lc>Q|&Q}^4T8#9x zL?XvAJjzeG;+Z{l>3va2U&zsvNx(ifiVQAR=C{N#xqCfbcd}5Wn$#fpyC1j{-nm<| z$0zBo0s{tNP`G_nact<(FJv}y=k>()vy|Wpc*;n>ntKEiqUg>8QcF;;yz?_zBlHcuS> z!Ax~%6VE*_ij)<2p-{md z53#vsVbemo2?;h5{ZR53o!-FZgrgc{OLAv<_1C6sG#OS;Q5Jp2uD&{B4ocn(GS3I9 zA|BwYw4ejnPq!`Ji9cO`CRPy&9SBQzJpVl%qy?xv64Y(0UCAmGo=`z%u`Fi;hqYT) z4S=T>48&w^zb^hSk4%h0kqL#dK>ICuE$S*3`C6uZn`OjaTNxp#E=C%5)H&Neta?{D6y9p1vvM6bnqv@G zq+O=|!jP7b_`B53V)RkIKT&ibFF~$NB+&)kYdLlJCfaSD?pn@EFQTE^rL2rcyLmuW z5&$w&j-Cp{_v5WIA!kZ~6J6KeEqY*Sy8!nlNQD=QD9p*)*X!TycPsi%1pUz{4%+|E z@rgwLT^83*{@=Z)zF};G{3sbQw0*A}`U&Dv)Q5J#7a-Y)hUTR)9$tO(HuDDKdlb5U zb22rp7gej!ka6mg2?Z)4pX@$0*+>#AR_DxClSJFqiQfd8targks4JSVe%SeC8+#=?GTf#7O?f?5tLSwG-l>^w4Dq>K-@UpS35nZg z=Fw1P#KbANm>9#W&+{FkRWUj&73YQO`^fsN#CGb~?>Vy`aOrP7xb4`|&(Qsl#G$eT z0+rkX)O4A6Q^5QLu=46gzZ>jt6?G89$kwol<*ZLg4`A;a9Bg()7Rw_AEvo_!@Lr7z zo;1vN+PG~DKQzq0;j)Y%nF-ec<>>d9;5Y&l1{|ydT#!^9tKU(qgJxIdVVZ^%ts0Tf zN>QZbu*a+-@JXy8`VnF{MZGx_yi6=OM+Ez<-z1RBM*eK|MqW#Xb?HWx#mkFyyw8|? zR3(NqJ5l4$rehNqiABAeM7%g4{5*6XThXt>6%#)dZ$^C9)mBBHQGPDWA>VzMZA!oG zB~|tn$4Jv+5~Vdg(g}~9h-1W>0DX!gSG`)0;JvCRPpE%#MCZoSZFe7rdQMznHtul) z*OrbLr_B{Wt%& zPyt?i25xQH4;t`Q+w6Cm4T{e!9b3~D^Qq~%z03$Ngpt=D^g@m}fXxg5Xh&~Q?? zo{UnCotgPtWFH_mJCUK6OUEwt?d;B>e?WdGAlN(2AOy=4$M#B5s( ze***X&!OyPogRe%k}33tVuNwZD6BTw9iUNtiSP;i#3dc0C$8Jn z`DB~ikS~|V+xif$vJpABAdBh2X_WQO-2lOoP?p<@FaG|^Uw?ybO7wq9WM?_W&wk@_ z3$3Ty{5@y#fg?}l)gSVP4lCeO&@A}o$pUnpbr`ELuJhyA5lE>f0k`-N4CyV26yYpm zZMm0DU$35x?xLHQ592#Qkmzz!IutiQo_59AayS%;R1>QmmwY9yAPJ+-lHGaJ5+J$BoLbWp38!bb50`3z6=bZeX1 zVLXL1J5s|K8Pz3(!?<-SSe7P_Lm6ymQmkv>9w9U6Kyi~C@qMPkypF0uB|mg3_{gHC zCD=Fs2GE@WMUZOWjBz-IVd&x-Wj4T!dm90>S6BK!^y_W_c zz{iHTiAO`iW}v%X;_d3*Hr27YoGvwIiL7V^tfc#jx*yHbZ9zp%LBG9}Pv@`wChDSkyV%*qXMBWIFlwymzs7)U0Dq4mmat&XQ;GOgY(@+J#@^zeE1GQTs{y z_zhLg_iw+gEZ#8350#=a&gPh?Lf)j{{QSrG@3j@aYbrlOt7z%Ick6Jza@NOqOyWpV zpS04huHhJ5Xrp4)SC;&r7Hb5$`eZ2J5zwaA$wg_hfi{WQDx;!ToEcQ^`X4C6xo zQPrlX>g~KqN5f@XIg)yt`h4TCMUZ7Dpq^q zrD-ZQ6+y^0!%bsh#wDs<*<56+v8v73UR7Ou{tbISDRxBoYY{)<1(d-;cZ_$^876t* z+Ywoc==K0dR9Rx8z&{@~Bug`y0wJ!kE#t zr}=OKdZqqgh7Di>Ae=E{0Ef6kswQvwsqT7ip;x6?LH8hg@t@WiY$=T3J;#vPC8u=K zX7Wzm`%x=)HY1rQEV_m9IbJMwkYFB&Cw>woAok z?*LOUQLoh(YEM?1P>a;aSzml~BX71w`#6rI_x@Bz>R0djpe;jN{I+-MWTzs|qg^BM z2C*uJ--nw@v%;#+NK4XpMlEx_5@Mcm5xMVid<-mniFMCqY%fe91Faz&FHd=aW&ryn zRq>Da3yITNs^QVr7-1d3Q+F;UYuV7TAYb&hIplj*BmUO&T{>lN zbTm)7FKroh2lpoi840f^FI1*>D3-;>go-`Dnb$vlReofvQ#EU z#V5*)u}-*#Qa#Ml_SPQ)e@0Rf9khUGdwu!8lh(~PVIfx)zO+S18%vv0`2jsSKAG2o z{APG`GWvna%W4T_jqYDY_+DqrM=eYG?Xw64C^)nRX)A1sLGRugxE`AUhVT4CTmrx= z0Z74te|uf)&rjZ)`VMuT-TTd*-0KuutX<#gOaZ(9aVy4HGkm}`Mp$t=HdC0NyAuJb zeJ?frato)r%})xfu>)B{;c zzc3+BZ9)MUeG=}f6s|NCdB?f}*k-Tdx zbyb!W{g4q^1-8-o_!ptqQdn8~xJ*$)&IP%n9gpcy1WUY?ITnAIWQy&xGQ3A8(Q4l} zS7oY~Xw8Ba6vW%2hDY}AQaz3BB&n4E+2OlJl$pfQWL(e9vPBC0C-0S|RF?QfcpNfJ zAWd_K*y%)U=+qE&R5ZYW`IVI93&uEIsmMr)%g;Rfs?OzFyyhl=xRT>jK0QOfDifxD z`^IwofWIaYQCwo*zWxH;9q;kht;ddJ&&AHgVlkXgM#~KzwX5*$?)RHLD3G=bR*Uhc zz|Ksl!0#(#9YC8ph-x=G>$mVTP?-i_Lp&G+Dgak-N1@8ycq3doBqU|tg@f*f)>EL2 z8j;<}Y>#VFPsdKQE=*;MZ-GjK;H979#Nf8a62hD~D10$QS70A3_DZwU;Zy9C%P$#( zfU(MFAED;Wb|YKD#7EM&idc%Bw_^UyFs{Ju$?JF{hkBnNb1!^+6VI&47@lYc7!O4x z14-3c^)Mz+*ZZ38O8#czePJGBFVP;0#u5=tB1fWdD#tBRb|uMf_P+iDWgID-pMwo- z-slCanuyf>r)Mq~u1EuzB^+qpTV-qOI(@1=L^`|XeR7x8!21D|Y-j#w0vK=!y?na8 zK-qagRIB4^#&GU8QP`u(2p`Y^!6yW#%L4|5P`G}$S`%JaeufU_1WwO)qnDdB{WXTm zP-sy~_z3=qq0cqE9CtAvXt|;{s3@OQWx;4`ZHlrL{}ZmE8n=)AoSy$G;ZzmgT$1ys zz^xj!PhF0Vw_TW(f3aPl(D%-3Sc2v|(a!X(6=5r9cn!1z^)I?nYQ2}~tCPgUCrp}T z#fImjIO+Y&PQuG|U$Jpy(AxFS(yGhdo3L!uX!{c|xY8a`u+&^-u5$3%5o;isUMlIv z>3&D6rTOEUu}359>~p}x^=7z3Ecvz&j@L~LX9F)ixHsjmPKPx=YQc|BGhl95{Bt>d zNtq8?Wo@Jj7h%ajazhxpO@3Z>f|Rc${R6tjfeP}tD?jkw1FXBjkt)zEYY4^}NN?C3 z+t?%fE==0L_@+#x=>>v1=jW)$C$S}&+ZBWG@79hP<$FWl9DWf3lIA&Uk^>QPzRByiGFwk1*zrg?(|^ zij7sqjKT>+G@fR6C`ypcYvS2>IGYP{z;&}rmkdBdN`zCqF4?bjQ`E=-E59cUG;$DkLuuMxC zo-4d^H1{9nVaNx#1E;`sVN(jH%7IhJ9&k&FRID z;%40bmSAoV8wfpt=*%tK3sy#Fu0K>R8M|r-|D0r~}S;C24R~>YGTbh@Z$vbNAW3`kqYH$@Cna{CZxk|@l zWgBH6LUOA9)7=EUYOp@OYv}zRm37H)*H2BPSst3h58o~Z4wRh)P}~n>BRP!`*@ZXH zc25kv%fOhgQ2HRt6Wm@z6gMEXbE8-%=I^1P*rjU|QPqoe`M*+u3U7f3y+Hwx1O`Sk z27o1$M)A*vm9EWsXN{6kcqIRro;~)qYDwAmdIBHIsa4{C!%CEaU1D|VN@6IL`aTMG z<)f6gMgmXlmzT~O%tVSZ=r`-Ej3nW%g36EX4Cj*clAsEpaTk?~7eRSi#$b}M#&h)&#ORE2mjm#KU! z(365`pL2}su^nz!3F+6c_tfMe<3j3@g>4UE-v(jXD!+Es&saL~`>@Ym0iZp34LY_C zJs@Zfd@^>cn(WUk?zwgkxKjXP$e+%kBKbW5Z%ps@YT-4m}6mTDvsVl~! za5&36Khww2_{=2mS@=VlI!)iZnAUQaC3N(k(4>@N`zL8UHsO&q{Wz%Yoxe2XGR5G@ z(ZcqA<*FZ13qP;0>}>M5N+EoEe#D+o@aLYAhy7cFkSP~dSMm%!y0!a^kTPWF=QM}$ z4gLKYW*+rq#zLs=r&WGd+RBP_FrpwxSgFc+M~H%kWe`umOl|3a>X<&I2~l8GcJ!-y zUn7g64m~187{&E*LIUW<&`>^vC2}r6`Xu~ScU3Tupz!!~2&-@7!7dkPKm&BR^e*2<`B z0oAhW_hWM@3i&;Mr7%!fGL*av*n-VsF-)%GsG<8kt9ZU z?f%%M_c{^EN1{+UaBzAFp~X4c);y-*oa;W5#|y_Ch~VRv!U;aSislw>rAO0iA|b#R zn`+Jzh0w-D5TTnYYADJ52&Kj>;}v#KrVYR+d;WoUm{w7aM@94?!6^+za3Y&Q5Wl>W zs`tz`Z`oE+gQb>f!6*Ga86|F?i}3UTgEqsa(vUX&kCeuh^pWz|XDmKT%rud8T==RD zCit>Xq9fEfaD86&7eu^pEG_t~xy)zCBmV((dhv$K7Z(@9uR&75FRN0cV_z% z2K)lwtC@3CU>6d$6(4n*rHsB!f?+a^(PMHFqf?7}fdv?D~yGv4C z{kUS@Euk5kOk-V2zf43&dZ!!p2{qKNlMkU{MoOV?aJb*f{o}{Oz0f>`a-#;%%nQ^- zh3(LyA8xImd-i}rBa8>pnpB8s&s+nX(MTM8%XsSo2HhCs1*!AEW!p}B2k!JPdk~Ir zWDIOv1gi#E&yAZjxI7Aswmsn581PA{7ze3kioWT7#fTLiR0G_$7WPW1wzJ3hgjnM4 zs!THPsU_nqF+XnPi?HvRFt%F$Z=e^q>%ZubabV>3c;(v&ZAL+s(wsp zO?8Oo{xe0H%U704Ir1peftwRH9}^MJ?pEP&&hV4_buv0-RP$-sqe-d4Tm8s)%xTQq ze81w}%V~rUb|}OFYR-U?b5KCZu*Utw7fxf_dDh4rJ!= z)i-}CCyCP0_;(>rG8e1Jte7viUe5J>olev`YHExCh2`AD>?t*>DGnQ^rChZ8bG4^* zjH=w%agfYMZ~t^Mp^SbwkwF-NSkFL{7E!cr!`pVZ;A~-ElF?GFTPX~2o|B(hs&*y{ zZfWL0{6DRUdJf;kH3<6tm9KjHlu_+VH~4`c%H(`7Nc`{j;bL^G18HcfD7INvUtZXe z(J%By*|Kub^e3}v#!`_0lx1ZA}FFP@*TE`QEpWH%doaMhNobBSm;rE)m1iq@78Q(K!@iW+k> zt7X138N+}Jm5QQ@u;K@+Dq9=l#F2e$r|~-2T(@KDO=fI9#N3;5`vP^{5rIX~$#x6n zXi?ycT8@~*`f7&oW=of)Ngc37|MuI%DMZQ|SL|^UiGSv@^DUrFO4ZshpL+P4#z>;Z z&+sQd6>a$c9~*3|4qZWyU^NEu+i~E7aDspNDs@--m!t$gCo4$*uHHUv6AO0JCpKB< zJXXz6-!fjs(Y|u$X6Y_LQyX%d!?G=dYE_d};eKiE4Sc$}PYS}3^9v%fB#PFO>uaRL z#~gIukrC(;G|3wfb8F@xHE*`}lF4!ODU&?s#+#x~L=%iVN#4~|{E!vueHl%KfsCdK zJ7JpKRCfAP{!>Miz*f(^_UDe$6Nx(99QrbK%$Ea|c>Z%7UBjCUs?3fPOKd7s&q$q6 zXp>4eVWe9)=YJP4hNe-Qhk`k;nc2c0V@g1`1skUe^hzoNvRFA8z{Xh;@KHs)`2Rls z$N#Jm6tfHDZUAX!4znUpXO|BnRDO|syfHAL`$#}3sXXbRZO3C3VnIs6Fl*jEm_v+OuHv?WbFfz zIOkMFNp7y>8X24fn30D>=&xe1zefh;+J(!A=kw>0kT^|7l+-uvNge8?jmCUp->q0K z+Qt^4Xbo!NnjQCiLgrgdh#G<&W6znMp`o!a%oM|?nmZqsOz|yinMLiZ*v3U25}!}0 z0H+ayj4#x+ z7xjvRq0E3z9|V-KABy;pkR^pb3mF%vADq!nl)Ql=}0#xQtSd9a~V{vZFWj zxtYET8yO_)W|? z`k8}PcwyuS&bkVGXEM~Lxr~?MC3bz+gTidD-t{Y*E4ksm1P8;YB4>p?ey(~-rf0fR zW^2lN5V7<%p%5tM9ipI{^#`ViE#~5MROT?ZF*}Sc@p+~3^$J5%EudwMsqQLs#NS(c zYlw$iX15FEa<<7*%Kuil5By7UaJ4DkOJ+WKR?jMj6Sr*=<;bOku^-EI^7F zQ6TDfWeT}Yll#PAA76FP+UHAcbSZk5Rdxn)dXv%NN>~nTONu<4aN@QMFT4qvXHl2& z{AW^su@70Fp^8;q+x*NGj+h|9ZgO#Bime9)>?R_*VY%= z(scA#$-%4is*Kyp|M_&!qebw@ch4U(oajRtFa>o%&YNy4KvzCB@SqTWUf5&g>cEG( z6DIZJ(F#Z>prmSG5&eoD9PzxiXXn4bt11!HLygcGe)}hXj-I;(? zt0lc?yoHLYm^G`Ln|b}(FNQgM3(3VNB|l=`pdaCQ3Y2(Sup|zy3ogS9aBswhWnQ&Q zA7;BsMpMEDTD1Njvc58)39xH>g8`$*=QmhHju!Ai-b{=M*jQ7b+1{hwRi z6Sxj%Q%`FHC9(fAVDz+G0e5-jIqI7Nhf@ywT6b&rZPdMGgxcav27?^eiyR7N>{6t2 z(0eXs1|Nr#f#jjiNrJG!{eGWT|8tQIm*}VP%f?u?pI=m^DrglbC0lfRy(SSI0dwpj zH)~!)MJH9|B7gDuH^$=DIv$MnewSh>Myp%+?~Eq(l0Ei$A^1pM=d`9W{9)$OwgoZ) zQ$y~gM*k!6BG|K6F&!bUVJ;OYy3_Lj#qaOl3d@Tdq~T6aka*O`BK}RFWIeg#Rj<_d z_{>6}tjGNiN)ckWoWC^}POXU2TwmWcVj0IYR-GmH-uw3@2P@!vdDblg817H&LVDl^ z&JQ|Bh&~yHp+c2NL~%429UX8J!$-q?rre}?=c*?&3Iw0&DTnn>aZ07eb2&)Jb1ZZw z7D}@5p8_Dyka0xOhF-=BtW*QoeP_FyLv87x4ad$=3pHO#0;<$u0)L<1bX%!)3_T#5 zag;Y{rvMO$@;^ReCmj}UgEdcP;~zg{Zxb&lTExrm zIqB-$SHs0Rc<~7!u4dpS(CmT4-D#vj#Tq|f_xg^p6QcLbF(g|HrSc*NgrI+qJD8{e$ zP4qFJr-tkO41E)Z0p-_Dyz1>PeY^GikhfR+xlcnFogYtCw}4sb)SMCuJ1{F%w~#K} zvaiBqa|q7#1G3_i=S%vG*ag`&%P3lo1P5IW$W}IFk^4o@1w2r*M?PPf(&gP%d1_X< zdTy!7=}$nGes&vwQC$Q1D9h)z38?Z*dh>EcF@HYJ0F-cj@a;_z5v6%JHiZDHhFd9= zzf?AHNJ--(SsEFT`8|>0`nXRw@4#elQa$>n3sddfb<{eY|6|{OE*x*PtT13Ig!hPd0IB1N2iLhT{CN2CB&z>jxk zg7>_OGiZD_LMT8ItIj&>tiT^t~NN0She7YEPQwzTfbae4p zaXeGa#HVpG7#Wby$aFN&1FsPieG#ODPx;DOXZuUc=3$Ay#9kAo#f2x{SP1|bzCVy@M)Kou>1e}OB*J13RMgnnUK1PA zQ2#V(N@n-yBuo)}K~wLo@iCDw%&8E@1X|r$RVLya1uQJ2M)a}|2A0M}juR$;H1B|; z+>tvBakf!MIr0#Hpzf~ux+CwKyL!>R=xBDS<-D-F1y1B?_BfQ6f>#CWOSDxHDIX%k z7}u2CAjTCr$w^(3b*qGRxu!V8*+(X-%RBof;kGNqN1rZ|XD)^uB>$K<_R9Z+w%Gb> z-c^-@`6WUQyf6t2AgR>2jhBGyK!><_fD2e3EHN&!%BFrWIr#C|3l^Am*P#!Ez>(z9 za%NJ^jHDby^TvuJK0KP>p}Q>T*-6$ampL#B(j#-|0_X!o-_b|7UuG)YMnE8{pt#jm zveN0>Ob#qm-n5)zD~=!bUF;`a*0X>$puhoHE+++5*J~@QWkg2%8a^6&vz7MPH!knSZzt zJ4a={?L_Ow6m+tQBN?CnGv|Cv>MN`+`!55zcJX%Msx*wQ`~oYcT!BJ%irZVYBJ4?B zyn3upnu;Ok`K|ZaHUPC7Qcl`pZKP~zUo;(m0BKtASSoRiDSP#hY>bQrEkvUXjwF(` z%A`mi9|!U*V5*Mug!7y+1rD%fk9~bjZxE^#KX^|jybb+e?Q)9Z{gjq0+B}@yyudM} zt@f*MO>0Rv4%|j_ZVw$t4v+O7!GlB_2+^N8)RS#>rFt&}F{`0vqYk^ISe@T5%l+_l zYFK;+v9TQjaFF?vIfB0|;J6dW(f}v2ax}c{DOvi-UcGQXXzx(Jpri?Us~$?Ka*XGY zrZlMT^}dn8gU<76ZE7<#-td2s3~9^=%+Kqr+wd?uDRAu9W9^~BctRmL)Gf`Y`?Ujr z&H+grmPi%fk6%Fw%tOW~4P@&ml+v0b4Is<+z&FtiF`nbR)%!t~n^GZ?=Y^(GmA|&~ z&+Y=28bV(&EzO5(^1PpTf2NK^RvyQR(QiF$zU8i+Qplx681S*9xo^?^@Nr95^7vjD zZO|U`=QkO;tyvQDudRs>h=3orp|UJ80|*}L?}Nj7wo%_M!ec+$a>^Q_<92YWWCDH* zHa)tdcU&RB1om%z*skl}s8!FKWJ%bCAi2|IqJvI*6_+^gIg{=}eVwkZ0avK^t-XcK zP`PU)_BVHLVrn*&h;Ka!IKFx2%#t0PcHGKXdoPhIwfJZS+c+UqW?5n|LM`{uWC5Or z*~**Zm&}*V@_vI7^sdR_r>C?JkhVEKEkfl; zxUW&kYkBq5I9D2W3Gp_K&JY*Wz}hqDi0u_AAW0DiHB}6IkOj2OcqSMT{xYS#3%a8yU!9lwBJBz8NiLE+UEXzX_}>Ow>`vkCNZ{`aMtiszz@8XZ zgvn%aZpilDM;KY3YTS-o?@A!-a?!38OA$iu>XT_Kb2km|^N4O7J-1I~XIvaQZ48?C z{?TyM8{r~>{JLz7rkvZgwR!yR6tR|PNw@Q;zNkM~T*|z5?SZY-ib|0^^xgZ>j~Bc- zf0z@~&wp9@5Bj{0&sA8Vw9?X)X;RF?gGCR7GZNfM%s$U!9Ru~Z?9O6HIsRX?cY)Mk?Ip?0Ay+=hLd^1yf>{MoFbmP z+e&QS+2R0ycT>^!a`opFO}OuzpZ11F&wWHtbTo&3bzw6L?*@35g9q>2|)ekHVyy zN;XQ#F%&^}k2t5^S<%oEoc#;2*^~b3Ua!5@9f=`Vdh(l@ya5leFol66 z@s1`sNA$Wp@OkA#SXbVi&^O^}c4ntVfx}23*tqOB4(y zb|)j|4C5Se%7nGK_^_>MfT0@Dn`yKYc`Z)`(nySTu2Kf@zT+-vgEf1k=0^zhPR8b+ zn#(7Y`4S<=1szP)a)K%2>$8=qaZy@4?4~36Q6+N8o(aoFij2?aTvh0!YE$bZ<)By~MKK|P23d?4LNL%3$j)s;60=R#g${NLXv5y= z>`p#L%I1$%{|AEHzj9tOeT(^>&FM@K_C`gVuSWX}Cy==R?1>1D6}^jaQ8K3qPHA!w z)#*85rJlj71t6b>A++MrBpX35L8K6yS^at=$+jChc{Vp|Mk8##flP&D1b)U*Jlg)c zQI)do)M!i<;H;x+CBpw>9RF6oSn@oG#pp!guJ=@Yc`PJ^tXS3UqvkM@G!*GutHLKY zx7eWFSI!q6zZnu$IQK0w&<~-DmJ&}}bDgO(l4MqP_JOVNTN=6NQ|~jnMUjgY zLT1vY6bCAV(zUPVgT4}vCe0;O=cYk-xoZsvDxD z(h`RR7=-qlye1%nD)Az{(A+Mx*#1+DCnd5B3M|y8rYEVaYe057A)TSAA zv7n{`(tkkM7|iQLFik0zr9R};&@(|bc!v*!IyllgSS4LW`#9A3$UvfMH}{iFd}lDcdlAHp;FT5u z=YqL>hkvj-v8sec@W*!SX+7RPDcNTP06*AG4A3Ud^;k1Ik3T<;*9uGl%T)#7`t4#n zvLOBu|4OxexRCz6F2oSjX80+MaaD%0c}9wIa6wq?xC!18N>wfY_cI@3LXv@we)@OO zLNPL!{dZz!o_qcr)7jsu$P%L_pG0dB#=P4Dy+|Q?epKo=sNlLf?l<>;A&Ju1;zct2 znq*?>2gd9O<1HDoq;zO1f1W~mPq99{v35x`DisZBx(UvM!1rOP_yu`%m1J5Rtqb|J z>KO)f)oaMeMZB>Wl%L&H1vq^irSox~*XyagVDySj7q3lMOj*#@05s!nvQGQv!EGzB zswFGR!BKlZF@>#6eFgQ$es5+*(5odQI@?;#>LTV{8I~8EiF0`F$oaFZ&~SY zdg?-N8tzmo$>tfw(Hp{-eBaTs2e(F{568xQRa_6rjJslH#7Unh$cH$?5r;w9#CPaE z(s~KrZ-MTO`dc;m@{xv9z7qqm)JIT!V}<*|;t22N6(wBW=<#6!+RxMQ;!iyYN8Zqz z%1~RQL+f{PYbi)1BzO22rWuhCrP|Tb+$4>QRji(d5=X5LM(6j$q_iP3#d6;kL%{z5iRSut|sKfzGx`NgmMmkBsMlrxWU)L1ouVT>Y) zf@M?=Krg5{sbP3F5D?>f@Au zl%!N$e1~SQjBoW-6P4!JOfSLSY=0~}56G#57*G0sS}+A?HZA@ao_3BWwQN-C@-qW$ zjT{k#I&-QT_=X_TDRpyz4G-JPwxAA~b)l7|J3F$0V+11lM(=T)C-Bdso7h9@ud55> z7!PR9X1Cqcg>*tq%l~5uya^b2mR}f=abf`GIu4Zp=^i%{Ve%Vjn$5w3Y2gw|jm7lNjPyD+QjjoS-*){a|Q~C*U;Le9=?fp2I z$0x?uf-$fu+ZT+Ps*53OvSv=H@s7jb8(_f|_5zi*oA|TZ`iFS# zCtVj%|Jp_G&h!B0a|NKWh56w3T(BoVVn4|sSo3+?krEsc z;zqeX98YW)*NGMBjHz>LLcq!b>7^hl-c&ks`Xo*Mcc8%2x~*8+8YcoL@SFUlw{-GA zif6`L5@yx)X~cKI9FH0JeEZ(eo%Kmd4((0(B6h@__3HEZZ#TCTw$bF|a7`!#@JRxKMZd>76j=z{Q9!f-e?G}e$uMZ(1 z;mYSK-|lZzL?2>SLLdHSE^e$d1ui34FF}Ms)HLKrnqx4iMXZD~pWial$bH^KIY$;i z>Sz9&k;*nx@TGwi!$f@ez!)_E2#==@R>RHVNs^-4Xff8Z2=mkraugakt^o106BlFv zf}jG)ysz#Gor9KOX&A!3NO#mKymqzKJE-gQ!q=EDj)ZZ+X=l2 z1kdN8Z++C(aN>R0Lp%KW*X0S$jA}oC?S?) zH9(a<7}w=(4mHmD;EcA}7YKZowXCu)PE*;Fc|+|V8u~pq9xD3$Olk8Ulf~>lI4U-f z!_F?bqM67b0))PK+}EI9talc6dq;Wd z&UoJ;;=}7tD#YA+gdI{X25aGAvIDzDRkqk(`nTV50_E>c9)Xr}OXH+OICaOOw5P3} zpVJXKCA)JFh9%+-+wM2Z-P;<79(zh)M2Hm7;9e4MwQ@5Y2i;ufY@IJ~g>Yd<^p|hYL;+%eqVjlCSa`WHy$?Jku zY;RRAhC{O(KYbIY=kO!ocX;Sqp8#I^Hak8e3xWhnoN@K=2!pq9QlkY$sJ`HKAZ8c> zaSx{mEMVQ=cS%0w0>QW(l0FQ2(zqvbCui)Ag=9t*x6rH0H|l@WCa(mOR3yFF7e{^G zb{r0$D06PI^St$-sC*q&mz27sa!Z^QI@H;Z*$oAbLX?={a0FXV2X{HHU zd|B$av2HU=DSImPMEK51iTSu71<_4^xq4Hf_Z~J3LeGXf50d7mi+WJ;QO-u~iU$>7 zO=v+P6>X6^fhfcDb2XrxjO>M60`aTl6C)~IP)#FbOw(tRE+O)^7Z(fTuWe))UuR>p zFurhxaXodu&CsZm2&9QhEw;$Bg20?25~l|0X;C+`*(DY&s5(7|c*p^vdA7grYiuD+&7i` zR42JKy8N0;RsV^hf2-Koa%M9&fM7h=Fjv`^hV|sXW83%vPZEoKW1(&`9`K%J*@|0w zWcIk3(OSUNvzo1_Fym1kAp}B=JG#U6CH^$OPmB`;{R}xT;Vmwcq(w3kay6VpU`bH( zM4=p?F&(C1OJdHbP=z;rX8=y_w;5B528meROnmY&Z-Xz zoAk$6(Lc9JCNxfBG6_LOztQx#w0#K$WjnSu`o9H#ZDZ}gcSdxX6-iX3I&T`el?h~c zq;>F*W5iVd#ggQM{|hz7G1tXTgrXXDs3u_NE?_4TCu0`&=9ydXL%*20I#!k^Q7x$p z$abxmja>jpQWS6Z<5wnCr5+%XI%gLSbbvx(R321ggutnAm9kHMJfHhu?1+evG@fua zaxw!-Qyf%9eD%i)AD>{ zXEnIPTx)HIubu)Z{`r1S_ECa!;(Obi$^6jWN159|eriL~w540g&4lrJyzo-VPYCn{ zzT9|uT=I~T@|No|JSl~mC65iLDV!#9kB4o5fc;%0Jwsn#(T!hxZRkFWnKBZ)0+aqFF8W0Ga&e*YustRN7dGHMe_ z5wH5$4^79*yG!vqQrd^Em7iOaFsCj#ykpK7{Y7g?&gPqAnQ#~dhb`YHq60cD zqAKNebHv@>W>(}8c;7^?sHu}s%5zO`A>4MNPOndtC45bmLH&l2QBiaM;a6Le*U`em zA8Q-;kJSFPe6YI#e`Buby06|XbOG2je~9-aev8VlO;71rXA$fN508W2xMj7)yC9m2@{}HRM+}XzHmMRPNw2* zXbHK*%LYOE8o|(79#b=?dHqN>lk!j)`R zeZ?qn#=a)1IWwdeay8G{1ia0TVqP{Eq|ngBy}?KpNeI>1tmF%Iy}XmnC9W5SJ92RM zYO(vjT*UOB%V0(}F_eK9Qh6<&K!=vk`3i`PeVFwSgmcA9Wj!y_ZzJ}c8FxWiwGo(g zQyA)`OqNAK^0pCK^HYfD286QF_EJ=3s(}V2;VUK|-qKnA=+P$v>ebAF@ib0vrHCD6 zQW;xle1q(Pl_u|H9OXKQ*yok_Y>Xc0)syH?%}K#Y79_AndEeZb)AS~5RvnT zRFW|5G)Vl%l--#w!d1he74{Yq3AOqU{+4$hexgM*_Jbpq+q`N|AHY;96zJE0WV>{v zWgIdy6gFNm=*36_I?6zx8KBQXWaLdA6~A;?Hq;E&ShDjr*N$ya9Z&w03Hv1aGuA3h zqZ`17p%%20gh_VC>dXq&9EV z?V6v{b7mO@nKUEk#_mbxEWnL_{8;EUOt_EPHLtofGQtp!tnBsH}uQ4A9q)RQTLUE!CyG z*+_>2kBW6wWEsD!DYf9K*-N08>~L=VG5`Kho?FhWaBQu(aFBCfp;m>MrU6^f{`A+H zxnsOcCU5kA`O2SI@~3taaDX9vB#)_Qwz;lcz8kI7||+G znsmT(fiORC)wh;;Zrey8z1=T3Ga9?t7x{`1tfH3&dIVi9!%aI5XKvxd-_5q3&~~RS z)*VXpJ|5T>iR&-r!prZ+lVXR-3(D*S@`Yues1QA(v=sZ%vpL3d#pbKSpkK80u0=i8 zPPJs{X0Q(h6ON&Zm2-4AX3(kxB#v6r8Q2cJD2!xdt(N;cto;wmAuG#>h}4(lPw zsQ;B*4Y@GCXg|R{eq-Jz2>}GjaX%$r2%m%ly9Ds4=1C&)a=2b_uPG9_-|={_1#V96 zjLRmb>%CxC^07}>X7DAv8!5|)a*E9zHg3bWjN-Gpi}S3+0xD-U9`q|WLY1oWWj6=T zkt{6GdLAXjvHCQog#R)IE*Bb4Y_L^X=TI`i?i;XU7}uV&)SIJ_NDq!FOQ~0C;mBW8 zE(n6i{&GPnn+R18$cKjE%UaHZ-R&vMtSEtEv}=_@_%Q%uAnHioUa%T~eJeQWYkUCl zTT}653R09M87@NlPOd0@8vD}x92e@$a4`QudB8=9g_tafoS6UBR8ueD+76;_{Z|Vv zbnmY|>MQyn^kLBN?)+feR|gN24npT+f3#C!&I9_y7*lZBxYX`|fuZQ)4w-Y}u=lq) zSrCP(WB3I0O~%b?jeke@w*>Wi%d>qkar7gGD9u*zBY}@yDJ7c z7R+;8UdIPZFx}RW>SdF!`$35#3ise-F%IuyKEBktalhnHTtO)-j;W$ZUV}j|T6CruDeF zGu=TrnwWv}P81CR3atRzsXa|`WXr85XB?=LzRrE|CD`9_7X=Uh_w0y!VRw1mwwDpftRF9WGnoqepxaA|x=f&k=+v?`QTs<*oUyoGRJYLF(($n2=qE|c|yeq%yY-~k8@CTxBfN1Ui6*m5y z$n`VBs{z)hE3_eJb^)n@+IV3E_c57E3yVM5bK;Px%4#r@?XPNsY@WN@Y{bucHgLg% zM$|QO=N%?u#S!&D{~~@qEza@LItV2B-0y-fA&uP!&pXKTgme|(132*}dSu9S4JJdn zm-0oXtb7ms)MyDE5>N2ah?{C(ZI7EMFNrrXj~UWTCx5uPYLH_#OxeYg5g}`$Z^iNG z=eMYE6~wYFC`8u7t#jG)Laz_cF^Y765UK~7sHKB{tAXW-!0}6rY8`m#CDSGS8E#PG zu2CWBW=Jo*2n-HjM_hW&=8G@y<7|(vYtCj%8SivUCV)xbE*Z99{F* zgJ$f(j19`-=NgUiB3p+MebxsAXT*vB>^mr?!EzqL&fX{0C2$$;ifVi525}=H2AY)`h z3$0Q&JwF3EOq-_DC0sdO`}ieiAu8diB-s>r&-H~dRv^2&LSK3B8^7tgvY?Zys_01$ z(g+Q|^GW2FCb&G zU0auwT&@80shV5evrN_3pmt$nC{?{=vqbtWuDicI2uCP{b2f$6 z1#nc&U>lu0)W$BHHv$8ge|BWU&5h`37}@Qq=YLnH#8{SYo9ySPsXk%es<0bQS(l$E zuk=L!aS|o$nII?s;V_xtzfipnjVYI&=5Etn0yeg3E_F>bxY(Yg3HBs%rSn$f>kBfK zAqZQZQ}ZU~TD;(*E}TR3o(O>P*K8{B8t=;j1jFl(fV#8pgMS9uBq5sIJ$iQMq_kqq_bXtY|qEUq8qF$IK)qUAGa zKEBdG2^h}!21ysM%j}UzjO12>RdcTZhKy+K==AtBUa;k-5ZtG8=dh`3OBCwBRb+Q} zG^?0pW<5}ydNGI;5Du??75b*p)90?w+7QE!=I7)$DQ{SXGhv(ay z`MJ2zpM6k4j|;kX#)c%ztLSJ=m zxZthzUz?zIoPlhe<~e%`TQa!eD|q=K&6+*cjI?o9T9k^jwQ}{EC1z8GL(;tMRu!!Q z_wFydQwT*?ru9v;z6QN8?LUlgJ*0|&+C~n<%R1U2-iyac85DS8^#gwuL0j7YQY`CU z2{wdahz^RQe{^T?Vw@k3;f2NC?_Rs&+zfTWM@G?`1TaFw2G;PT7&n=N0D2Z6!E$)d zdtVQmH2i(L1q-db#=AYne#@|JEeR-r6d`aY9t}tP4_Q5vxL2N|g|h;o8L{w;px;!D z$SDyZbq$syvR)sv;GAJC=b-PAGO!j&sOo0k+#zHV-bj`WmQ=IXvmH`tkb)J&U*z0` zt_~oH1YZKe;AjnD!Y1(9&GMJBTHjVx$sjs?z2IB)vz^7ur1>ekm<3{|3?|jpO^ti+x>-Bou|Wl47748bpL0L~8K@w7`+oPM@t_q}F2PEsj{o!# z{sq&&6mB*KW4~6;S~)bw;tzE2v@b7T{3W%dgf&44HDm?vQM9>mj^h47YpX1!+h?tc_^H< z4jgP{`(awapb&C{`s)Tsb97kZDa&jrWmNiJm1X(W}B7GoTYHKgq%T&Y?o%A{E=u?Huu@sd1g2 zJ9e86#mjoy(msa}RHGqs5fu@4;%H&MN*4`_8<)p-9J}cRG*YKxXU{8U>;8!tahN|? zdn2lo1`hwdrN^KKZtaQEivB2gV#Qw%A7SUP6*(r&zW{wK$d>X{8@fEPAr#6aNsl<$ zQNq4%c=4y|@gFz(OK$6WL*HPGSEZ-dJg!ef@+sRgGgZyQWvFLkq`(U)vI;iV)Ri2C zl}A*4KF%FbdewpO>@6beDE=45Gpe>-bCP<5xJwVbMwUxH@B5DtUN}G3Scpp16QiS? zoYda-hxDZM5|2DW^S8Xs*Ky{I#$^OEHFtbh?-2@vHuR}X-cuRiN+v&ha&>Ijo+@K$ ztv&6jQ_^sYN2_TnPCx5HyI7;;Kmt8O8F>kMLloO5O5p1I8CO@GN`@Z&rpUxWKRS7u zn^2bGmWk#3gEW{^ua=Ug?{oPZM8(@$)%6B9ft7CLj>-q-pObi1S|ZbsURSXPe@x+T z*%;e)nhO78C!x7#B3-X84=#RU*EiYjjkI?ij%Xfkvwna2G@f)*c`C{ss@oVRle$Q` z#vz_Gvu3N;wtH?%V^x?NE@+{yQWC>lM5j`~K21F!2DZ zVf)EqzfotUsJBb!ZA1}3HX0|kQx@N6a&irxz5h(nRmvjqWZ2j)` z{VjqaIF?zY+}`mBJzp$7oxXp5h1s5doN$MtPr2&>ZU#PmJ z7OFlkWT;&}z>PW6qh02CN_4v>S!}DR&i&8@lzW$R7cJ7pc;)(fRLhQ59Fly^5AKlG zxeave07jFiI^!X@eF{W3K1*i73sLZO`KpCY3EdPy0f<}-3&0Y*?C2QbABeYNlTcjP z6z#R9q&fH5mlzf%Ji~2I9p1QNqDHW3fw0nHs=`9L8&GBQ22__$xNBYk$t<4$nO^K* z&*R&@+G0-Z{lhzAK+9Q6+*qghH8H7P1%{!%O6@Sr5vD8^3re=H%DmG$+X@bD)rguXrzI|^O;ao+ z`9)xDB&NW?``A&EiVJ`{N^V&i&BMpF`eQF&`E!c07PxW40oQ31b;5VHno>_Jjjz(v zFh!f3h=P;wTF8=TQ>D3J;+#aQTs_iJ#E@|gO8ArD>L1WiPAFFJfIXp zStQ*k(|8g~d&nAY3=rkK@o5@Ish4MF6Qx!P&Zb|jpYi+1cWfTUW*{`wE<*>duWC_H z<2&Awwq-BNPl&+WKg~eA(Mh`d462gPm49CZB>EDm=x{?P#L3FRr{-g->~ehvCFlo) zd@Cs8(yD#nf@}}+@G+;SNn;g{$)t`1MG*QY3{*8X(h`hDj3QI0p}P{zqOx<5NRi|JI?S3OWO08V@5ogqAvUyJR334)RILy}UmxPu+uO1gd zYP8;KgHybGXopfI`P5@M90(nwBiFDyk`XU2`JH3&(`N7|olExIVYH0%5Uchek0!}D zb!A%A!S`whzEy0eeCQ3nT``P^#(G#b*=*3IdpKHll?)?p$|ABh($|0R;6{)9aNMii zBQs$MOJAr;;B{y9xZ})lJhZ*#tk@n~3h&+w^}LRkarcifPmV&1FEQ^_yZ2w%SG}DS zw!wm|-zkuTd*gkYVOFpiO7hJZ^22+d2%2s_0G&ji|RY`PRJqdAAvw3|@l zKCHogb$;!HIk&WKK2CF+vE+~Dk>doV;l+$}-$P3J}lk>3KPPJo_71gVvW>$Au zbYtlzr%$i{FT=Ht`LD-pSYd7D2dWfnGfM_zXQ4y1Urugd?!VOAYx;x}wrBjPFZny^ zYx^7SjHeAsG6e6U)m5mY>r2f{9G!6bM?uDE99#qM&K?J4=Mj?YQ8?w8uEgP(r0xdd zu=HALa_O^#AdJEjI!No&)aN%JC@}NH@ybJ=f6L~m>nD)*v~^4fzaoE>p8VaWrcaHU z*=gzCh8Ud5pW^Yw*9OOd1xaAKb|jeBCroCj1RqM*bmZ2;_ul6q!gjk21)sOp=koJp z6}kD9TIqJ$YYkqxpYN%La5m#gv~HWs?)fde4OtH5BgxV|65;MU=x2Kw=qWgQvhZsy zbuc~71bj;hdQbBzfGj6g?Brk0EEe{;;+4D1JBVN zm8F%!st&=nIAn3!1nn`Xcf}ONE8=80gOs-i=7`RH*gzNedU;k?J#|JtYIwk0KHkpv zPEq%A@Q^UC=hXe_%{|c3fOno}71z@*%^_RQB&tFP?Fd#AUX^pNze7?uOqvAnVxKb3uF`tfJX!U6(VA8U-~Cd@n<0DfFJd*Qs|jJH#O?C$iR3fNjpyzN}1mGMs184g)<_{YuG*!~^QwaF~HjAIh8X@n)@n9%-r z@5V3hm&nAgM6HbX&f+Y;3x&E{giTqz7yK3ZLCq@b8#kR8{~DV~Yc{o*EbBwq`Aves z>0-`tHYiz8nae|4cRDc8HNMOG8HFuqzOQ#h70i-1Q5rFU;`0FIAnG;ZG}8 zu&-ZkkWQCJejP&pM9q+Ec%9sMJwA>#@NZrj4PCh0roD&Bv@d`ADd&a5tAi}o(CIJH z(jPh{Pg5hIp9bKM9nhr?k0If^DIeG(${-Q>jnqB*PMgaH<#)GO)$4NOh?F0jYIBE~ z6QPg|$VSTAVMMUO0?jWLYNIVej~6tt+Kq)ncOW=9O)1>Qx^Et}_?3xQwMBq=XD2%@ zq8-gP>p95txXnq(g?!V5R^w+|84L-I(gur!_%fVcwkANZGMu9nA6jK}Z;bmQz9O?U z`Et6?j-`cqQ6l{htm^2`bz)(XNB8TyS3?Tpp)%NqDxE9Wu%Vb3yF9D=taLGSLq7nI zVVe5hEao+9_fyu&S>Xmd`}#cmk=B@}7P!ed-hI<_ctg;ycc%KP^?`DZ@(~f)dsdi+ zOtw|=5J!AVR-cz70+2UxKSGQ(gvM9Pf?(aAGUG_8NeIMm>Fx`By}6 z%^+Ea$~G}{e~FVcW(abB5wn4zc?nDWD$v4s=QkD)VuD=RufF{*WfRtSb7=LCH$taY z>9g4xRY;5-3c3%WVMng}jc3ftd>S|vffV&&vpem|q@c2WHh3Lh4$$Zs$ML`>i&H2X za4(mTcG>O&du>~lw`~elmj=wKJ%NY#eQ9vAd`OAO=2i@K!5a0}Q@EcB$Y#1keb>t< z8ISl`&bCg*kz<7Ry#q0ujl)&cmBMP*PeV34Pvey8kn+=eicESmyDzs%6(-h_t=h1| zw>sv=uAAVjIlnsAu(zu;Sb4@8eyvx=P6itf{V8XF-QM!y5JO4+de8DrGS+B!$$k|2D;lYTQw`5)jXWgot76VBLJ@%AQ?mBJx@_;v*K5y+oWQi zdLp^oNMI1f8`5DHW$q5_*k;nZ6FwWlKid(pU6${(QKlEqb~(b}iA;1IjGCBXT+8c0 zn{+t_t(4`{7Uc}3-f%)O5i4TnITu_P=ucl_{rI{jy>!`#6%%1UV9g&pSO0hD_oV~r z3y9rHpgDFXYC7@5C&#L~>ds8JQFvi@wA3cZZtdQUdYV!hQ9*?Jtb%{!;t_{CbM$_F z!!37wYa%^Ds6kzvW@gGaaP(Ou6;QW_z`sciszTypUXXZNzk9PC-i2o`T<}pk@mXyK zQB0a0wjg37#%X@V$Zpj{Bd|l~%+Qy|N7f&oZ?@{u-F9qM_yDk{rkza;zXGyJ3qsbYPG$lKCOPqLV+Tj4$H<3I3F) zEpxs@j0#>An6NVo&IxXNn{Ws*ZOPm(k5ZiDRZOc7`!tZ_<@mP_jXjCbThsrbfTJaL z68};0mB+7EO_AS}7h{88uD9B5y>a6iT(~2+Xk%rIO ztwE<5^la>ybD37me8HA(-&oET-M~I@j_wFS?DjHZ3l*5HPtz}LbKt%1S4{VLEti0E ziz0Rg@6GNK{MEcw$)otL-3Ko=p@BwR(|y}}Cb{U@n{!ouGp)&<#S4i0B>cbjj(;>E!3 zN%p6a!Q5v3KU>2Nga=W}bJ|tbf=FSLdo?8;X$csa?4kO;tSI6=bH8$n(qbFqDy}}f z9bvvOIvW>fu^R~@sXH_we7@a+PSLDIQgV`pI_$?uLAVp;njO|wx3{tm8}qQY{I|^d zw>~7JaozVVCcpY2e{t~fVk5KzPP#BhxZk?;$q@4ze~eU?i98yS8x$4g4_FYjf*xwA za=6?`k?J-~)iRCwKRsP#Sd$GGCI(1Ijgancl$34}hDb}NASoq1V3GqSAR#RhA`MEz z=x$WHOS+_C`v&j(e*3X&KX;yUpZlEWttT$+*Fk<-AN z!n*JUxlP1c@QeZivBQ2a7Cr$!@wz&>xwuS@ z*_9PzS>iiD%h=y5`M&b@p3>RzKDQ2zMjaeO9A^*KC+~$D2d!jQB^tCdmADn;mB&60 z?SG3lkru_)K9I<267#`5u7$XO%0ix#WLp&bkgfTd)rZaJ%mq62g7t>+#AHb`#={(cgwj_*dFaYJNVIHj+6mL5|3}m4{KNZI4W*b61XhX2sPAm z3i&$h)@bMffG&%MS27l2VNrm%74P)A^v{6&e+5Pqj`TxC;K-3sQAJZpOW{IpJEco807HKa)>>Qz_C43mjf;gGFNd+dn?(r;C$XePrA=)D!#5WVlCTmtb+K%eK_6qH8tfTDNig zI#vC4z3KwEmWk`^7K=MqrC?@HPed%Jt^fc%3;`Hm8Ol+8@fCV5qrKwLe<_~(lXv;e z|LY`T$V9P^xBsrQ0ZW$NyuRv;LvE(ix7^MnlgqRlH?V^9rH=)Wr>=WYwC8F;FNod} z)1b4h{q!6x+|J>Ul`BS@bkyjy_=rY;Cr`_ApcBR6*i6BV3U`co2Hv9+3jzPnWv&9;9p4mv{S+3s!y`vMK6ZD@J0c zr#0UBm_bqPz5ADkyUETR1CzMlUyRIn2o^voBXA*rKVDv{%3|X8@SE)GET`{#uQZH;CfL^Nwlx!8*T!0*bB3BZ zjVPvgu=hX95OR@h$c9ZbvcK2U|1RRJoz@k}OzCS@!8c8huciS_GE3qun*C{!g-c1g zAzGRG_Qht%+xB-p5l1R9BFK)i4O!9HLFB}&aSvPh8NDj_(Wd+C8nMyd+sk}%%VJ?` zoFYdRhkaf;k(*OKR%g|oWTm$Y2^Fa?z|&QJbL@XYoj&c-;HIXljmBXe@*>!WKpwEn zf7X`aV_PF3ko@sVsj2Ekk`3!JKC&h+^Gcq^fWsG!Am<`C_B1VOs0a7vXE+3dd~)UY z*Hj(=&0Z;fU@!>i5IA(^=UgSM;>kdYLu4s@9l9!lJh>4vN3oV{?OiX$e;GU!bpcJ8 z5C@CWgR3dX+A&-QRnPXBj#~J2&@xBBB4WL9cj_xa(e4tMTW&#fF^$-Fp-ZB$V+;D@ z`2&*gUc4$x-8Trbk+%0W!VC8>GwuOT$&7aasV_0-Vhwj`!_Op?9OECDGJi?>APluFA}JqvRCsr zaO|5!q9AyH^SL6&Rthp-D09jCmQNPJP7!2HuHlHOYZB-tKI4mgHtWos_W|>0UjLAQ z-t6>yzZLVElT`G!);A2T+X@+`7@YLeMLBn@Kr+e1Y^4|E%8T|Z0+$p*a0=(xmIh_} z%;bq&1)SXltdrWnCud&t&}Y#@?-Q1?2c3eYIx&hcJxocpms(^Do(SJhegnNHFe2Iuiu=HTfjN;c<8D2jh7sklIk^9n~=#$ zF_gbB)ivX8d*lFM2uyd^Ipd0OCHSX*WC&T@i)7g#lI};Iz|3~8Z(r}NSb4xr5Or4$ke%@qi_k5wtl^dN*{mCO1QJvuI9W~=1r@G~SwKIq2e(HtBo8a0O1+^HEjN`#N%ew zacwS;1V{w0TfSDH>uPkAl_WbeD|ze7?vr!i3nG6`Csx8~+fy&*x)S-Ko*~ubuf3&1 z$uvN6^v6c&RDepGI*QH1?EQ()BJ#%~g1v5LB5a*+rdZzA|E5*+jCkE`$_xV5jVLUbseW+%RJewy8l3KPXq#Fv;n!q1;j!we_By%i4wI|<2Ybi%me zsKlySB4CY%oZ76h9QFOYG&EAQwsWf?mX+gtLiLh`vavTBv=O#CzCZVu7}8svfPTH* zBFSfeN1-OdSMFvXh`MU8&K{-3`r7Xf(FSF1J$no1P2_G`i(CE)m`$_D5{fI!7EXVU z9hoy8d*WpB*Ed5f4nte3X3i+Py-^>^<@O}ZWXZt_l+C*5RTBI=_D#V|XvR4akUr%ZdBjLC%5x>5DC~y0R zrrk>Z&)74o(TCXwi+gK&sM}OXip&J>zBO5g`+DnBpYu(4z1H7%jDOGtlkK={D!vuy zU6pj!3oz5ghuEu>orW6|5O(0g zm7q)Lh*~O@4QPW@s8QH{3*BVKsbL@uICT8#6**Iv@|AP}&zF9(=mR_YlkjRq!1w`v zZdTQcN=6hpzl8_>c2;TG&v`QR$afdTp9-P(RNFAiZm(Y)xaa7U93$cWR-sVGx zzs&IK_DUhTxG68XlcXq5nm@3IVNn*niI{F@^R^8T!&LH>ZZUmuEH|xEWJPa-HA~3H zoce_96SwG@o%Icq`QF+$@;=O+L<(G*Tv`^WQD?eaNSh4uxxYaIk1C0h`I%}-AixDp ztX1`pnrV=fNi?I?4U?C(GeKTqS5WK_9s;uf$vsacuS?wiMr%f3q_-Gs^G>yQynL7| zJmx%Cva#Xe4tR)tIk7&3m|s`nr(ZOK8Sjr-NPv7)%~|2*RFyb)RDW1=?sGBHgu1ZI z5b&1%lI-D#H{&70#UzCIg6IYZ@dFY9rh>L2dnY~9&9h`=gH9$MH-#j)p}Qkqp&bu> zQ3Np`-#QN13^+@oV(UUQyQ@6*N}C zye}7CIW@c^O(Q3Pibg-~DqthZmW$gxRGao{bb6aCM%(Vn-Lj%9!}gXMYWH2SU@u;A ziv{i40ixLL4)X$)bk@!;o`|=#sYPL9Ao+qqkgZ^2o;7Kp<1f`eV$>#D+o_%M9-jUmrD&1 zMC#OutFx?c;S!;Poha+h!NSiAjjs{><-K6=E5&iGsa^F*1ox&vD{p0JZ@!fl56_{iiBfz{z+ks0CMtue)CYE}-OQ%* z60CWni2pEIbD6zn^baXG9hAbxhS8vbNXS!7ln2_ABEkX8`r<*<=Ga)c_zbwD6o7v3 z*J+C%xu1R?VOE>3TO)>znc`~3JkLQEgsEUQ*hGnVF?|FOrB2LILl-zAL4tg`g!*uY z*zFHv4^3I|K6xzKYo(xTj^unDsIN-Jm7cdDSF{C|;_xGeV^jFv7(Ze(me@pzfPCY+ zlpXkwit+^xVP%piy>Wag$47F}6^bFuO=oD3+_(meC-QPFzVA<50mPp(+=m^KGH5Ju znJulx8&Y|j^Kj5JX1Pb1NDXA-6r#%eI7N=Lx9m!KnBvxE^j@ikS^oj@EugvJX3pC? z^Su9)Mac8?N&KgF-5xH5jYRlDx50FJVyf3Gg2}TvlY!7bv5qjFty}?U+MtdtG_s2^ zBEk<_$fN_3^17_Ixmt{j$M9Hd8dpGF9FBo}@E^AB)Pb_Dr9~+tVfc|Ocm+%XyMnPU zN84P!XLZ_X7rN#bq_Vl>PT3wy!r{TOnbV!558^^k!-geiPll)Uc zcdpKvs*8T?Gn+gu;1*Kn3H#W*z%9#*Hp5r$>E{L1`gvASkXLT>@&!;1Z+o6$Die5;o{z>>a7-PH_lUN&5*)VDDm)Hk;0Bii}riK{PVvmOe(D%K7GPsZQd zNO?EB>_Qf#eY5=0HAo`pc9pM8t>(efO1=!aV17>r7sLMK8LJjlseL3?bb7{I`DJ*F zh#^r#$DzL#^_(9ASDO@7SB~5- zVfEpvXFSTPT@~xf{=2(`RS{&xGKv1%Ps2G+1^F=c7S*&qiA!FRV~Gx=(_1u8t-q4{ zS%@vAvZQ?g1rk9dMhbsa-irX=;R2!<3M?XmGVD7ehuh9NMj*G2xAM;4$n2f3fs{PA z##@+!I|3dpg~YYy_KgzDU+#oz4;?9;Kb)kAPpYP0`ao;};=NXDpNP~Y<09PKf4I+L zrI=gRkhHgQT3$^E;MoL6T?9POFYfZlLUTcKQ zdeAS{p6r2TpUcnRvD;_EtF(-dB}%02L?qE&)+!WK6ot>OPOn&|b#_G`xVxq|?r(XH zcM&|@r7h16m$i93LbE%>?=QA-xT7iUzXI}jvBB%>vj8I%GfSDAn*!HPy-7b?V3nBk zt23;gqP&Or`Ih`A;6-f^0FjCKmrfTG_M|j=9P&9PA{uH1ToFE`u2=bUNl(i3_XDrm z{KaV(vYEYQ$3oEX*}gnGn@Q2rhpw4|;zJ%*>@rFIe#|ZP#Wx8;_rtHWKmoWKX>qUw zWxqU{qZe~+sSMkl(-ZeP`VHt4!|>Y=?iEjVF0bt?0oHMII|fCzw+NSIyB`R28Vk=U zn!Q^SyR0ABmrh9VKW>SJ8gQ!IL1PF1AA$k@u({`cB+`1(L+M4IuS{Sk>K u>3PXpIN_?Z^Z3Tg&+q1JbMwJ89}=yAd+!LcnFdEgeV!?Ul*$w=LjMP>h*x<4 From cf200a32ae8c7a3cc692f1a11c5e09eea6cd4943 Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Thu, 18 Jan 2024 12:33:19 -0800 Subject: [PATCH 234/263] fix(docs-website): update copy around the video on index (#9661) --- docs-website/src/pages/index.js | 7 ++++--- metadata-ingestion/docs/sources/mssql/mssql_recipe.yml | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs-website/src/pages/index.js b/docs-website/src/pages/index.js index 085680df343a4..07345ac7100fa 100644 --- a/docs-website/src/pages/index.js +++ b/docs-website/src/pages/index.js @@ -66,12 +66,13 @@ function Home() { width: "18rem", }} > - What Is DataHub? + The Origins of DataHub {/*
*/}

- Explore DataHub's journey from start to present,

- and learn how we empower leaders in today's data-driven world. + Explore DataHub's journey from search and discovery tool at + LinkedIn to the #1 open source metadata platform, through the + lens of its founder and some amazing community members.

diff --git a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml index 93be7a86d72cc..5cfc086756090 100644 --- a/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml +++ b/metadata-ingestion/docs/sources/mssql/mssql_recipe.yml @@ -27,7 +27,7 @@ sink: # ------------------------------------------------------------------------ source: - type: mssql + type: mssql-odbc config: # Coordinates host_port: localhost:1433 From 6cb3dc839cf2c845a8d2f182c20fa68dcc1a66b6 Mon Sep 17 00:00:00 2001 From: juhyun seo Date: Fri, 19 Jan 2024 06:45:00 +0900 Subject: [PATCH 235/263] fix(protobuf): fix reseved field error in fields in nested messages (#9318) --- .../datahub/protobuf/model/ProtobufField.java | 17 ++-- .../protobuf/model/ProtobufFieldTest.java | 32 +++++++ .../extended_protobuf/messageD.proto | 89 ++++++++++++++++++ .../extended_protobuf/messageD.protoc | Bin 0 -> 8749 bytes 4 files changed, 132 insertions(+), 6 deletions(-) create mode 100644 metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto create mode 100644 metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.protoc diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java index 5bb41017488f3..c3ede2e62c314 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java @@ -277,13 +277,18 @@ private FieldDescriptorProto getNestedTypeFields( messageType = messageType.getNestedType(value); } - if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { - return messageType.getField(pathList.get(pathSize - 1)); - } else { - return null; + int fieldIndex = pathList.get(pathList.size() - 1); + if (isFieldPath(pathList) + && pathSize % 2 == 0 + && fieldIndex < messageType.getFieldList().size()) { + return messageType.getField(fieldIndex); } + + return null; + } + + private boolean isFieldPath(List pathList) { + return pathList.get(pathList.size() - 2) == DescriptorProto.FIELD_FIELD_NUMBER; } private boolean isEnumType(List pathList) { diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 9508f4778e5c8..40d54a8651012 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -323,4 +323,36 @@ public void nestedTypeFieldTest() throws IOException { assertEquals("Zip code, alphanumeric", addressField.getDescription()); } + + @Test + public void nestedTypeReservedFieldsTest() throws IOException { + ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageD"); + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + SchemaField msg3Field13 = + testMetadata.getFields().stream() + .filter( + v -> + v.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_MyMsg]." + + "[type=extended_protobuf_MyMsg_Msg3].field3.[type=google_protobuf_StringValue].msg3_13")) + .findFirst() + .orElseThrow(); + + assertEquals("test comment 13", msg3Field13.getDescription()); + + SchemaField msg3Field14 = + testMetadata.getFields().stream() + .filter( + v -> + v.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_MyMsg]." + + "[type=extended_protobuf_MyMsg_Msg3].field3.[type=google_protobuf_StringValue].msg3_14")) + .findFirst() + .orElseThrow(); + + assertEquals("test comment 14", msg3Field14.getDescription()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto b/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto new file mode 100644 index 0000000000000..4aaf80cf788dd --- /dev/null +++ b/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.proto @@ -0,0 +1,89 @@ +syntax = "proto3"; +package extended_protobuf; + +import "google/protobuf/wrappers.proto"; + +/* + MyMsg Message + */ +message MyMsg { + /* + Message 1 + */ + message Msg1 { + int32 msg1_id = 1; + } + Msg1 msg1_field = 1; + + /* + Message 2 + */ + message Msg2 { + int32 msg2_id = 1; + } + Msg2 msg2_field = 2; + + /* + Message 3 + */ + message Msg3 { + // test comment 1 + google.protobuf.Int64Value msg3_1 = 1; + // test comment 2 + google.protobuf.Int64Value msg3_2 = 2; + // test comment 3 + google.protobuf.Int64Value msg3_3 = 3; + // test comment 4 + google.protobuf.StringValue msg3_4 = 4; + // test comment 5 + reserved 5; + // test comment 6 + reserved 6; + + message Msg4 { + // msg4_1 comment + google.protobuf.Int32Value msg4_1 = 1; + // msg4_2 reserved + reserved 2; + // msg4_3 comment + google.protobuf.Int32Value msg4_3 = 3; + + message Msg5 { + // msg5_1 comment + google.protobuf.Int32Value msg5_1 = 1; + // msg5_2 comment + google.protobuf.Int32Value msg5_2 = 2; + // msg5_3 comment + google.protobuf.Int32Value msg5_3 = 3; + // msg5_4 comment + google.protobuf.Int32Value msg5_4 = 4; + // reserved comment + reserved 5; + // msg5_6 comment + google.protobuf.Int32Value msg5_6 = 6; + } + // msg5 comment + Msg5 msg5 = 4; + } + // test comment 7 + Msg4 msg4 = 7; + // test comment 8 + google.protobuf.StringValue msg3_8 = 8; + // test comment 9 + google.protobuf.StringValue msg3_9 = 9; + // test comment 10 + google.protobuf.StringValue msg3_10 = 10; + // test comment 11 + reserved 11; + // test comment 12 + google.protobuf.StringValue msg3_12 = 12; + // test comment 13 + google.protobuf.StringValue msg3_13 = 13; + // test comment 14 + google.protobuf.StringValue msg3_14 = 14; + // test comment 15 + google.protobuf.StringValue msg3_15 = 15; + } + // field 3 + Msg3 field3 = 3; +} \ No newline at end of file diff --git a/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.protoc b/metadata-integration/java/datahub-protobuf/src/test/resources/extended_protobuf/messageD.protoc new file mode 100644 index 0000000000000000000000000000000000000000..03cb56b35314a849cb594321a119f3e05965ed3c GIT binary patch literal 8749 zcmbtZ%TgoB71g5xI9;-uW*a;qca5M|f-hgO@%x>nR}`BFq9Rv+h0j+(+KbEZ9Guv$-?NI%}Cr zmmSNtPI{+HR~_T>((H6IVn&{H-a8|_pMTEAidOHWWxh9BJyU+fMn2NROq53#qPjDf zpJ!~dWf}GmnOI0hYul^&Au@4K2FCVp6Oue37=uhMI|OLNlW~ED3Qc%IU$E4=WwnOj zBs~$}RP9c)eKtfWYN3+vac`A?$Xa~>#FVN?egTm(deII@vX}GUt)7xBD)y@hU{ey%vS4HBwr)d zoxeY030Yc*{4UD=HN~Ria(p2oXa73Rg+kqG@%7&6DXM|L<_^@$E^ipN!JBQ{?9|Ue z$>gV2=fbcV->@#PJI%9mn`f7o*IWgzwd)zql~#+3cU|5wyJqL3*~l<{ZrhjLw@XV6 z^P}0aMA^CSA~uenAn_W^iNq3PTsIrdE;4Y^vzu0%8|?<~bxqLWU8~orn_}jq*)}@Y zf>$@qubTEbw>sjl)dSB9tI<4d)(t{PbE9K&SiWf5w%OpQ`j5>99_(|&#xL@&)v~UT za$dLEjV38|NeMWn{T7tyXK{d7ms_VEqq@~F`9-g5!*kmJQ$jUPtdAxT-H0*Xw(Mrz zOygy<%Uke~5W*JWX+IqVqTVu^7iI^M98LnpLb32jz{f@pq4IgKfXM&FgHav`wPDqJ z7iQZQA*PfpA^sNLbT}%->@Lviv`ykNjZHoDfS zeMMzLL{UM!dui6Gf}qu;!s<}%w4EyIc2OW1ltpb@tMY1jvv#29D#vq0FTdA{YLTxW z;-$(r%9TT1+uE-2?ecC>)vH`76){_?>Du~!t*loWpI53-ofmJE(jixWtmtaB%F8;} z_A0v?&;YM1rJANz(_AZU?Cuw}(pH)SftSiP#&@+ntp>STIW5fI)Z^t%zNhLN+kjEl zwOy@tC=70DwGtWIEbELbyrSqeZDW5|(RpQGuav7QC(nvnbz@i2_S9kq_FZ-jT{P|HV`}z+)XoHt*9Fso)8Ze zVNmqLv?Qgrd5?Uae`h{hG?R%H^U6c2(8i zYa43yTfSSS@Ne?{stO`SrKSkez(Bb09uBPUS2YnhtyEKWeZNxE%B5Gx@&O_P*A?g% zMTE;G@|J2vE$fE_OK}hxNb`ej74t}R$%&Ri0jZ)mHfmup7(}fg(Se`5q;BnMTWV=T zB|c?>Inb)=D5sz~rcis(61x(G7z$+bcDD;GUvYfNDDCTGfg?5oVYy}0O*Din^bjmz^bphO0@Q4P z+G}>t(H!h66HA!-h;h<2+h~tyGe%$%y#QM#`a;ueH3T@?47$as(QDcKBb|Z7suzxU zhbVq-z~i=st-@%-7JL%h6*UsH%8q$yq8+-R*j2hj;F^Q+&(wQ}OS9Fquc>J@Oxvv6 zAcRN>dTj)u(Zp`mGg{ninbdH*=$cL}?@7TxZ@i>NXmG1-(x%k}I5qMBni{DoI;$@& zqTo(BdUXT4ajRn*jq5?Gh)a@1V{&{kGDA;EIi8Mu%Tg?s{L*=JCSOOMiOFYjoF1LY zOk`V3j>&O)bS9S~X)zg<1>F5F#?YVA05G(P6oI)q@mhYH3f zji0y_0q1X$4;g2Y6p2OT_?y_n*@u)g?3#40_G@XJjgZ6$i6ogjg7%x#18)?^Xj*0_ z9Ei%&50UlgEEctMg6wzTl#b(f^AAq#x6qG*zz<28Jp=tH(N|*ftSsmY6MgI<0QO33 zRPe6KL!F@F8dg<*L?QE zL?3$yfW79j#|X-r%N`@J*T9~NCoW6nh<|I&pZ~J)$u8g+$Nz= z6H0)}S#p}qVHGFHk!Eaqc3P}rGS-&|yoVdjSVAmYjG)^b9rs3Xqd7SxmcghjS&`1I zd2m+Z-HwDV*num3%XkzNEH@9MME`@I2S={Y0Z71s4{jbXg7ybD4;aCL5AHHZ%F-3? z!ngI%6S~yj%|E#N-$FkL0$1)bND}>Pe;Ei9ee59s_Sfz*zzE9M?lQm#?60S$PZHt) zlw<$o?18&+72Pj5)8=n+Il-0o$U7v>-tp0A{@;v^>|O75&*bkIyXc;+9Ets2eB&9* z{xWj|F(afi&{)w^q_9b+Bv(7@YRB+}=nn0y(9SyA1(pzIb2IU~HFLTvJd2%)c*sxL zsG#0#2;L)lf`(ed709{**`q#zECl|O6c9N%%Sb18wBmaH+2H4zOt9nVBP6=w>ttsf zfA7}G`Z}JcckATjoXaggBZ=sIeuG`SJ|AhnC3C%5tEVof*HkS~gJ2%}m@|Ef=WeXCB;JEgz^A zW+v~gR+#-;T$E$pEtCAG%hBDzM8@-8Ip&W(Sz3KMIr&>rB?Y&zUfx|LR4N5gDuvlU z#su4f%eHWTwgr!E;pmf#C(O3sEuO;tJuG+&yKv+!o}mk_&yE-U!qMtXLL~2Qoz)#F zIWm>N@7Pq_e@;w|;(2UpeCN-P>FaAG@?(_lCHVLniOBSXn^H6~OE;7_Hivj*fqmxO zP?8Xxon)ff@;&E%;%w-&DbTGz#Gx6v#8}n{2ld&fbax9XNt!7>1927biH`w(-Rkr$38vhI`P=2=UgZr`}8n|oyR^sU_JHeMM3YWPmk<8 z_31^S_SC0`yU6FU{V=_pN006gpMxGgn1=y1YmVe znhVANn0xkRFoxy1g;zcxUh|uP2#*IyZtxHwIl@DLU8=Fb2T+5FoiY z9|DX4a6SZx;S1^OL6`FZU>txiq-VYXy0g6CUj<`Ac(sVLC$6{gSW1q6B~1?E6_{J0 zu@oxwtBD7mdWs6GZwwD<9KbEygj)^ZpuP}-qxkiW0q2ZGhG{7+O6wz@d9i*L$EVmA zEJ^I06_ie-;Q;NdM=M?xFgC?fo(MvcPVn#y%%;D18J;Kz?u}RN1#U_Kpw{J;AXJ7^n(M&)Xrk4Z4xnYE<%9i^GQ=FFj2vemkgYcBchGLaX z{4g5O5z)#%klAy_9H*Rk<&zmFUf3N1^D-L?gg_vloM(M=ppbw5s(%h7^6A%o1|YT? z7|0GcKpw6R7$6^42Mmyxs{;lQ(Y3%pD1c$Mw64|$4A9zI8!$lYYi+;)rrrhy^20r# zb@q0^0IjvR0|w%R1_Lj=aulWYUGgD5+j#$q3oJ|rnWF0Y4mYEaSE9ad%po2$7f!My z$G;CeSPk$)V07<=RDY7`6T$Zb9^m^*{CjwSPlE1(f<-0qX!ZfEjY0tKuY(vxUrB`5 zAjMu8n+%FGMv+(Khkow`=b|JJcA{^B&PIHTQqt=M^vx|^Oicu102c90!rL9Nmu}u{ zm1_au2mm)jt&+YkY=&Cp2mm+HDm_4mZ{Gxb696WJM9_Qbp1&OeOaO2jfZW}`MrE8! zUZcnK?_bLS;wTVzLKNxhy~AJnK92%%XE6LEl1L_LIvkKMH)h}adqZF>xewGiSM5NjdCF(B4}_(UAk k5Yz*FV@SFGz3|R6VwoKHha$?w!Km|jHX@eCLGWqee-Q?U)Bpeg literal 0 HcmV?d00001 From f2e78db92e050483c851fb9edd4beb251905dfd4 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Fri, 19 Jan 2024 03:56:33 +0530 Subject: [PATCH 236/263] feat(ingest/databricks): ingest hive metastore by default, more docs (#9601) Co-authored-by: Harshal Sheth --- docs/how/updating-datahub.md | 20 + .../sources/databricks/unity-catalog_post.md | 28 +- .../sources/databricks/unity-catalog_pre.md | 5 + .../datahub/ingestion/source/unity/config.py | 10 +- .../unity/unity_catalog_mces_golden.json | 2506 ++++++++--------- .../tests/unit/test_unity_catalog_config.py | 4 + 6 files changed, 1178 insertions(+), 1395 deletions(-) diff --git a/docs/how/updating-datahub.md b/docs/how/updating-datahub.md index fb082bea7d151..b671e2fc5d123 100644 --- a/docs/how/updating-datahub.md +++ b/docs/how/updating-datahub.md @@ -10,6 +10,26 @@ This file documents any backwards-incompatible changes in DataHub and assists pe - Neo4j 5.x, may require migration from 4.x - Build requires JDK17 (Runtime Java 11) - Build requires Docker Compose > 2.20 +- #9601 - The Unity Catalog(UC) ingestion source config `include_metastore` is now disabled by default. This change will affect the urns of all entities in the workspace.
+ Entity Hierarchy with `include_metastore: true` (Old) + ``` + - UC Metastore + - Catalog + - Schema + - Table + ``` + + Entity Hierarchy with `include_metastore: false` (New) + ``` + - Catalog + - Schema + - Table + ``` + We recommend using `platform_instance` for differentiating across metastores. + + If stateful ingestion is enabled, running ingestion with latest cli version will perform all required cleanup. Otherwise, we recommend soft deleting all databricks data via the DataHub CLI: + `datahub delete --platform databricks --soft` and then reingesting with latest cli version. +- #9601 - The Unity Catalog(UC) ingestion source config `include_hive_metastore` is now enabled by default. This requires config `warehouse_id` to be set. You can disable `include_hive_metastore` by setting it to `False` to avoid ingesting legacy hive metastore catalog in Databricks. ### Potential Downtime diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md b/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md index 472b0a541911a..df244f7d9ae9c 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_post.md @@ -1,11 +1,33 @@ -#### Troubleshooting -##### No data lineage captured or missing lineage + +### Advanced + +#### Multiple Databricks Workspaces + +If you have multiple databricks workspaces **that point to the same Unity Catalog metastore**, our suggestion is to use separate recipes for ingesting the workspace-specific Hive Metastore catalog and Unity Catalog metastore's information schema. + +To ingest Hive metastore information schema +- Setup one ingestion recipe per workspace +- Use platform instance equivalent to workspace name +- Ingest only hive_metastore catalog in the recipe using config `catalogs: ["hive_metastore"]` + +To ingest Unity Catalog information schema +- Disable hive metastore catalog ingestion in the recipe using config `include_hive_metastore: False` +- Ideally, just ingest from one workspace +- To ingest from both workspaces (e.g. if each workspace has different permissions and therefore restricted view of the UC metastore): + - Use same platform instance for all workspaces using same UC metastore + - Ingest usage from only one workspace (you lose usage from other workspace) + - Use filters to only ingest each catalog once, but shouldn’t be necessary + + +### Troubleshooting + +#### No data lineage captured or missing lineage Check that you meet the [Unity Catalog lineage requirements](https://docs.databricks.com/data-governance/unity-catalog/data-lineage.html#requirements). Also check the [Unity Catalog limitations](https://docs.databricks.com/data-governance/unity-catalog/data-lineage.html#limitations) to make sure that lineage would be expected to exist in this case. -##### Lineage extraction is too slow +#### Lineage extraction is too slow Currently, there is no way to get table or column lineage in bulk from the Databricks Unity Catalog REST api. Table lineage calls require one API call per table, and column lineage calls require one API call per column. If you find metadata extraction taking too long, you can turn off column level lineage extraction via the `include_column_lineage` config flag. diff --git a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md index 12540e1977f64..22f3f9cb1d276 100644 --- a/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md +++ b/metadata-ingestion/docs/sources/databricks/unity-catalog_pre.md @@ -13,6 +13,11 @@ * Ownership of or `SELECT` privilege on any tables and views you want to ingest * [Ownership documentation](https://docs.databricks.com/data-governance/unity-catalog/manage-privileges/ownership.html) * [Privileges documentation](https://docs.databricks.com/data-governance/unity-catalog/manage-privileges/privileges.html) + + To ingest legacy hive_metastore catalog (`include_hive_metastore` - disabled by default), your service principal must have all of the following: + * `READ_METADATA` and `USAGE` privilege on `hive_metastore` catalog + * `READ_METADATA` and `USAGE` privilege on schemas you want to ingest + * `READ_METADATA` and `USAGE` privilege on tables and views you want to ingest + * [Hive Metastore Privileges documentation](https://docs.databricks.com/en/sql/language-manual/sql-ref-privileges-hms.html) + To ingest your workspace's notebooks and respective lineage, your service principal must have `CAN_READ` privileges on the folders containing the notebooks you want to ingest: [guide](https://docs.databricks.com/en/security/auth-authz/access-control/workspace-acl.html#folder-permissions). + To `include_usage_statistics` (enabled by default), your service principal must have `CAN_MANAGE` permissions on any SQL Warehouses you want to ingest: [guide](https://docs.databricks.com/security/auth-authz/access-control/sql-endpoint-acl.html). + To ingest `profiling` information with `method: ge`, you need `SELECT` privileges on all profiled tables. diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py index df36153af9d83..d933e5a5ff38e 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/config.py @@ -126,7 +126,7 @@ class UnityCatalogSourceConfig( description="SQL Warehouse id, for running queries. If not set, will use the default warehouse.", ) include_hive_metastore: bool = pydantic.Field( - default=False, + default=True, description="Whether to ingest legacy `hive_metastore` catalog. This requires executing queries on SQL warehouse.", ) workspace_name: Optional[str] = pydantic.Field( @@ -135,12 +135,12 @@ class UnityCatalogSourceConfig( ) include_metastore: bool = pydantic.Field( - default=True, + default=False, description=( "Whether to ingest the workspace's metastore as a container and include it in all urns." " Changing this will affect the urns of all entities in the workspace." - " This will be disabled by default in the future," - " so it is recommended to set this to `False` for new ingestions." + " This config is deprecated and will be removed in the future," + " so it is recommended to not set this to `True` for new ingestions." " If you have an existing unity catalog ingestion, you'll want to avoid duplicates by soft deleting existing data." " If stateful ingestion is enabled, running with `include_metastore: false` should be sufficient." " Otherwise, we recommend deleting via the cli: `datahub delete --platform databricks` and re-ingesting with `include_metastore: false`." @@ -299,7 +299,7 @@ def include_metastore_warning(cls, v: bool) -> bool: if v: msg = ( "`include_metastore` is enabled." - " This is not recommended and will be disabled by default in the future, which is a breaking change." + " This is not recommended and this option will be removed in the future, which is a breaking change." " All databricks urns will change if you re-ingest with this disabled." " We recommend soft deleting all databricks data and re-ingesting with `include_metastore` set to `False`." ) diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 383f94144ffdc..649212c1041ed 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -1,7 +1,7 @@ [ { "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -9,42 +9,10 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore" + "catalog": "hive_metastore" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data", - "name": "acryl metastore" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore", + "name": "hive_metastore" } }, "systemMetadata": { @@ -55,13 +23,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Metastore" + "Catalog" ] } }, @@ -73,37 +41,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [] + "removed": false } }, "systemMetadata": { @@ -114,19 +57,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "hive_metastore" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore", - "name": "hive_metastore" + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -137,12 +73,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "entityUrn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [] } }, "systemMetadata": { @@ -153,7 +89,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -161,30 +97,11 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main" + "catalog": "hive_metastore", + "unity_schema": "bronze_kambi" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", - "name": "main", - "description": "Main catalog (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Catalog" - ] + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi", + "name": "bronze_kambi" } }, "systemMetadata": { @@ -195,28 +112,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" + "container": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" } }, "systemMetadata": { @@ -227,28 +128,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -264,7 +144,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -280,13 +160,13 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Catalog" + "Schema" ] } }, @@ -298,96 +178,15 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "hive_metastore", - "unity_schema": "bronze_kambi" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi", - "name": "bronze_kambi" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", + "entityUrn": "urn:li:container:21058fb6993a790a4a43727021e52956", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "id": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", + "urn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" } ] } @@ -398,64 +197,14 @@ "lastRunId": "no-run-id-provided" } }, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Schema" - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", - "aspect": { - "json": { - "platform": "urn:li:dataPlatform:databricks" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + "container": "urn:li:container:21058fb6993a790a4a43727021e52956" } }, "systemMetadata": { @@ -465,22 +214,21 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "datasetProfile", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - } - ] + "timestampMillis": 1705308660413, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -491,7 +239,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -504,7 +252,7 @@ "Last Access": "UNKNOWN", "Created By": "Spark 3.2.1", "Owner": "root", - "table_id": "acryl_metastore.hive_metastore.bronze_kambi.view1", + "table_id": "hive_metastore.bronze_kambi.view1", "created_at": "2022-06-22 05:14:56" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/view1", @@ -527,50 +275,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", - "changeType": "UPSERT", - "aspectName": "viewProperties", - "aspect": { - "json": { - "materialized": false, - "viewLogic": "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", - "viewLanguage": "SQL" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", - "name": "default", - "description": "Default schema (auto-created)" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -588,57 +293,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", - "changeType": "UPSERT", - "aspectName": "browsePathsV2", - "aspect": { - "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - }, - { - "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" - } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.view1", + "schemaName": "hive_metastore.bronze_kambi.view1", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1044,19 +704,166 @@ "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" }, { - "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "double", - "recursive": false, - "isPartOfKey": false, - "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + "fieldPath": "[version=2.0].[type=struct].[type=struct].combination.[type=double].stake", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "double", + "recursive": false, + "isPartOfKey": false, + "jsonProps": "{\"native_data_type\": \"double\", \"_nullable\": true}" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", + "urn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" + }, + { + "id": "urn:li:container:21058fb6993a790a4a43727021e52956", + "urn": "urn:li:container:21058fb6993a790a4a43727021e52956" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "main.quickstart_schema.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "lastModified": { + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "viewProperties", + "aspect": { + "json": { + "materialized": false, + "viewLogic": "CREATE VIEW `hive_metastore`.`bronze_kambi`.`view1` AS SELECT * FROM `hive_metastore`.`bronze_kambi`.`bet`", + "viewLanguage": "SQL" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:c45a3b960d7503abfb5549f583eb0517" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -1067,12 +874,35 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "containerProperties", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "main" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main", + "name": "main", + "description": "Main catalog (auto-created)" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "status", + "aspect": { + "json": { + "removed": false } }, "systemMetadata": { @@ -1083,14 +913,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "removed": false } }, "systemMetadata": { @@ -1101,21 +929,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -1126,12 +945,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + "container": "urn:li:container:21058fb6993a790a4a43727021e52956" } }, "systemMetadata": { @@ -1142,12 +961,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "browsePathsV2", "aspect": { "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "path": [] } }, "systemMetadata": { @@ -1158,37 +977,54 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "schemaMetadata", "aspect": { "json": { - "customProperties": { - "storage_location": "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", - "data_source_format": "DELTA", - "table_type": "HIVE_MANAGED_TABLE", - "Catalog": "hive_metastore", - "Database": "bronze_kambi", - "Table": "bet", - "Last Access": "UNKNOWN", - "Created By": "Spark 3.2.1", - "Statistics": "1024 bytes, 3 rows", - "Owner": "root", - "Is_managed_location": "true", - "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", - "table_id": "acryl_metastore.hive_metastore.bronze_kambi.bet", - "created_at": "2022-06-22 05:14:56" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", - "name": "bet", - "qualifiedName": "hive_metastore.bronze_kambi.bet", + "schemaName": "main.quickstart_schema.quickstart_table", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, "created": { - "time": 1655874896000 + "time": 0, + "actor": "urn:li:corpuser:unknown" }, "lastModified": { - "time": 1655874896000 + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "tags": [] + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "columnA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "columnB", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] } }, "systemMetadata": { @@ -1198,14 +1034,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Catalog" ] } }, @@ -1216,21 +1052,14 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:5ada0a9773235325e506410c512feabb", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "subTypes", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - } + "typeNames": [ + "Table" ] } }, @@ -1241,26 +1070,22 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:045573d60442121f01b8d66a3eb95622", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202", - "urn": "urn:li:container:6d6f608f9f945f2862d99b855bdd3202" - }, + "owners": [ { - "id": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e", - "urn": "urn:li:container:1b3927f927ada651ce5fe3fb84227f8e" + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -1271,12 +1096,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.hive_metastore.bronze_kambi.bet", + "schemaName": "hive_metastore.bronze_kambi.bet", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -1705,12 +1530,111 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586", + "urn": "urn:li:container:d91b261e5da1bf1434c6318b8c2ac586" + }, + { + "id": "urn:li:container:21058fb6993a790a4a43727021e52956", + "urn": "urn:li:container:21058fb6993a790a4a43727021e52956" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "storage_location": "dbfs:/user/hive/warehouse/bronze_kambi.db/bet", + "data_source_format": "DELTA", + "table_type": "HIVE_MANAGED_TABLE", + "Catalog": "hive_metastore", + "Database": "bronze_kambi", + "Table": "bet", + "Last Access": "UNKNOWN", + "Created By": "Spark 3.2.1", + "Statistics": "1024 bytes, 3 rows", + "Owner": "root", + "Is_managed_location": "true", + "Table Properties": "[delta.autoOptimize.autoCompact=true,delta.autoOptimize.optimizeWrite=true,delta.minReaderVersion=1,delta.minWriterVersion=2]", + "table_id": "hive_metastore.bronze_kambi.bet", + "created_at": "2022-06-22 05:14:56" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/hive_metastore/bronze_kambi/bet", + "name": "bet", + "qualifiedName": "hive_metastore.bronze_kambi.bet", + "created": { + "time": 1655874896000 + }, + "lastModified": { + "time": 1655874896000 + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "browsePathsV2", + "aspect": { + "json": { + "path": [ + { + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" + }, + { + "id": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", + "urn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517" + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProfile", "aspect": { "json": { - "container": "urn:li:container:5ada0a9773235325e506410c512feabb" + "timestampMillis": 1705308660403, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -1721,7 +1645,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -1744,9 +1668,9 @@ "updated_at": "2022-10-19 13:27:29.633000+00:00", "created_at": "2022-10-19 13:21:38.688000+00:00" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", "name": "quickstart_table", - "qualifiedName": "main.default.quickstart_table", + "qualifiedName": "quickstart_catalog.default.quickstart_table", "created": { "time": 1666185698688, "actor": "urn:li:corpuser:abc@acryl.io" @@ -1766,14 +1690,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "container", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "container": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc" } }, "systemMetadata": { @@ -1784,53 +1706,13 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "subTypes", "aspect": { "json": { - "schemaName": "acryl_metastore.main.default.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } + "typeNames": [ + "Table" ] } }, @@ -1841,22 +1723,21 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "containerProperties", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "main", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -1867,50 +1748,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - }, + "owners": [ { - "id": "urn:li:container:5ada0a9773235325e506410c512feabb", - "urn": "urn:li:container:5ada0a9773235325e506410c512feabb" + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "containerProperties", - "aspect": { - "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "main", - "unity_schema": "quickstart_schema" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -1921,7 +1773,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -1937,7 +1789,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -1952,15 +1804,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "removed": false } }, "systemMetadata": { @@ -1970,38 +1820,55 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "schemaMetadata", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], + "schemaName": "quickstart_catalog.default.quickstart_table", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, "lastModified": { "time": 0, "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + }, + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "columnA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "columnB", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] } }, "systemMetadata": { @@ -2012,19 +1879,15 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:481380c5a355638fc626eca8380cdda9", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" } ] } @@ -2037,12 +1900,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "container": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e" } }, "systemMetadata": { @@ -2052,42 +1915,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "main.quickstart_schema.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -2098,7 +1934,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -2116,12 +1952,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.main.quickstart_schema.quickstart_table", + "schemaName": "main.default.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -2174,21 +2010,66 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "datasetProperties", + "aspect": { + "json": { + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/default/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "main.default.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "lastModified": { + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePathsV2", "aspect": { "json": { - "owners": [ + "path": [ { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" + }, + { + "id": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", + "urn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e" } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + ] } }, "systemMetadata": { @@ -2199,25 +2080,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:83d98e62e36bddc3596c2b738e23b596", - "urn": "urn:li:container:83d98e62e36bddc3596c2b738e23b596" - }, + "owners": [ { - "id": "urn:li:container:481380c5a355638fc626eca8380cdda9", - "urn": "urn:li:container:481380c5a355638fc626eca8380cdda9" + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -2228,7 +2105,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2236,7 +2113,6 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore", "catalog": "quickstart_catalog" }, "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog", @@ -2251,13 +2127,22 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" + }, + { + "id": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", + "urn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc" + } + ] } }, "systemMetadata": { @@ -2268,12 +2153,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "status", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "removed": false } }, "systemMetadata": { @@ -2284,14 +2169,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "typeNames": [ - "Catalog" - ] + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2302,14 +2185,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:abc@acryl.io", "type": "DATAOWNER" } ], @@ -2327,33 +2210,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "container", - "entityUrn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - } - ] + "path": [] } }, "systemMetadata": { @@ -2364,21 +2226,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "container", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", - "name": "default", - "description": "Default schema (auto-created)" + "container": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } }, "systemMetadata": { @@ -2389,12 +2242,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "subTypes", "aspect": { "json": { - "removed": false + "typeNames": [ + "Catalog" + ] } }, "systemMetadata": { @@ -2405,12 +2260,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:d6f502d4d8165f68d5b594ab4cb2171e", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "container", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "container": "urn:li:container:045573d60442121f01b8d66a3eb95622" } }, "systemMetadata": { @@ -2421,14 +2276,21 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:730e95cd0271453376b3c1d9623838d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "ownership", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -2439,14 +2301,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:abc@acryl.io", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -2464,12 +2326,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "subTypes", "aspect": { "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -2480,21 +2344,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - } - ] + "removed": false } }, "systemMetadata": { @@ -2504,13 +2359,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "container": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2520,60 +2375,21 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "containerProperties", "aspect": { "json": { "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.default.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" + "platform": "databricks", + "env": "PROD", + "catalog": "system", + "unity_schema": "quickstart_schema" }, - "tags": [] - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "subTypes", - "aspect": { - "json": { - "typeNames": [ - "Table" - ] + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -2583,53 +2399,16 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:a1123d3ed81951784140565f5085b96d", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "browsePathsV2", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.default.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, + "path": [ { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } ] } @@ -2641,15 +2420,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:account users", + "owner": "urn:li:corpuser:Service Principal 2", "type": "DATAOWNER" } ], @@ -2666,25 +2445,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "subTypes", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90", - "urn": "urn:li:container:ce568b660cba2e1a4e811b010ac27f90" - } + "typeNames": [ + "Catalog" ] } }, @@ -2696,7 +2464,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "containerProperties", "aspect": { @@ -2704,11 +2472,10 @@ "customProperties": { "platform": "databricks", "env": "PROD", - "metastore": "acryl metastore", - "catalog": "quickstart_catalog", + "catalog": "main", "unity_schema": "quickstart_schema" }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/main/quickstart_schema", "name": "quickstart_schema", "description": "A new Unity Catalog schema called quickstart_schema" } @@ -2721,7 +2488,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -2737,12 +2504,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "status", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "removed": false } }, "systemMetadata": { @@ -2753,14 +2520,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2771,21 +2536,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2796,12 +2552,19 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "containerProperties", "aspect": { "json": { - "container": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "system" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", + "name": "system", + "description": "System catalog (auto-created)" } }, "systemMetadata": { @@ -2812,19 +2575,15 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:47a033e31b92a120f08f297c05d286f1", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" + "id": "urn:li:container:045573d60442121f01b8d66a3eb95622", + "urn": "urn:li:container:045573d60442121f01b8d66a3eb95622" } ] } @@ -2836,58 +2595,21 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "container", - "aspect": { - "json": { - "container": "urn:li:container:47a033e31b92a120f08f297c05d286f1" - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "containerProperties", "aspect": { "json": { "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" + "platform": "databricks", + "env": "PROD", + "catalog": "quickstart_catalog", + "unity_schema": "quickstart_schema" }, - "tags": [] + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema", + "name": "quickstart_schema", + "description": "A new Unity Catalog schema called quickstart_schema" } }, "systemMetadata": { @@ -2897,14 +2619,14 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { "json": { "typeNames": [ - "Table" + "Schema" ] } }, @@ -2915,55 +2637,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "schemaMetadata", + "aspectName": "status", "aspect": { "json": { - "schemaName": "acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table", - "platform": "urn:li:dataPlatform:databricks", - "version": 0, - "created": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - }, - "hash": "", - "platformSchema": { - "com.linkedin.schema.MySqlDDL": { - "tableSchema": "" - } - }, - "fields": [ - { - "fieldPath": "columnA", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.NumberType": {} - } - }, - "nativeDataType": "int", - "recursive": false, - "isPartOfKey": false - }, - { - "fieldPath": "columnB", - "nullable": true, - "type": { - "type": { - "com.linkedin.schema.StringType": {} - } - }, - "nativeDataType": "string", - "recursive": false, - "isPartOfKey": false - } - ] + "removed": false } }, "systemMetadata": { @@ -2973,22 +2653,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "dataPlatformInstance", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "platform": "urn:li:dataPlatform:databricks" } }, "systemMetadata": { @@ -2998,26 +2669,13 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965", - "urn": "urn:li:container:079ede9d4f0640985a8ccf8eb180e965" - }, - { - "id": "urn:li:container:47a033e31b92a120f08f297c05d286f1", - "urn": "urn:li:container:47a033e31b92a120f08f297c05d286f1" - } - ] + "path": [] } }, "systemMetadata": { @@ -3028,20 +2686,21 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "ownership", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system", - "name": "system", - "description": "System catalog (auto-created)" + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3052,12 +2711,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" + } + ] } }, "systemMetadata": { @@ -3068,12 +2732,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "dataPlatformInstance", + "aspectName": "subTypes", "aspect": { "json": { - "platform": "urn:li:dataPlatform:databricks" + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -3084,14 +2750,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "container", "aspect": { "json": { - "typeNames": [ - "Catalog" - ] + "container": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } }, "systemMetadata": { @@ -3102,14 +2766,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", "aspectName": "ownership", "aspect": { "json": { "owners": [ { - "owner": "urn:li:corpuser:Service Principal 2", + "owner": "urn:li:corpuser:account users", "type": "DATAOWNER" } ], @@ -3127,12 +2791,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:c45a3b960d7503abfb5549f583eb0517", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "container": "urn:li:container:045573d60442121f01b8d66a3eb95622" } }, "systemMetadata": { @@ -3143,17 +2807,37 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", + "entityUrn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "container", "aspect": { "json": { - "path": [ + "container": "urn:li:container:730e95cd0271453376b3c1d9623838d6" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "container", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "owner": "urn:li:corpuser:abc@acryl.io", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3164,21 +2848,14 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system", - "unity_schema": "default" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", - "name": "default", - "description": "Default schema (auto-created)" + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -3189,7 +2866,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3205,7 +2882,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -3221,14 +2898,20 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "containerProperties", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "system", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -3239,21 +2922,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePathsV2", "aspect": { "json": { - "owners": [ + "path": [ { - "owner": "urn:li:corpuser:abc@acryl.io", - "type": "DATAOWNER" + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + ] } }, "systemMetadata": { @@ -3264,12 +2943,12 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "container": "urn:li:container:730e95cd0271453376b3c1d9623838d6" } }, "systemMetadata": { @@ -3280,21 +2959,21 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:b330768923270ff5450695bee1c94247", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "ownership", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, + "owners": [ { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "owner": "urn:li:corpuser:abc@acryl.io", + "type": "DATAOWNER" } - ] + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3305,12 +2984,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "container", "aspect": { "json": { - "container": "urn:li:container:b330768923270ff5450695bee1c94247" + "container": "urn:li:container:934b6043df189ef6dc63ac3519be34ac" } }, "systemMetadata": { @@ -3320,42 +2999,15 @@ } }, { - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityType": "container", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "datasetProperties", + "aspectName": "subTypes", "aspect": { "json": { - "customProperties": { - "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", - "data_source_format": "DELTA", - "generation": "2", - "table_type": "MANAGED", - "created_by": "abc@acryl.io", - "delta.lastCommitTimestamp": "1666185711000", - "delta.lastUpdateVersion": "1", - "delta.minReaderVersion": "1", - "delta.minWriterVersion": "2", - "spark.sql.statistics.numRows": "10", - "spark.sql.statistics.totalSize": "512", - "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", - "owner": "account users", - "updated_by": "abc@acryl.io", - "updated_at": "2022-10-19 13:27:29.633000+00:00", - "created_at": "2022-10-19 13:21:38.688000+00:00" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", - "name": "quickstart_table", - "qualifiedName": "system.default.quickstart_table", - "created": { - "time": 1666185698688, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "typeNames": [ + "Schema" + ] } }, "systemMetadata": { @@ -3366,7 +3018,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "subTypes", "aspect": { @@ -3384,12 +3036,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.default.quickstart_table", + "schemaName": "quickstart_catalog.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -3442,21 +3094,41 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "datasetProperties", "aspect": { "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/quickstart_schema/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "quickstart_catalog.quickstart_schema.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] } }, "systemMetadata": { @@ -3467,23 +3139,19 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" - }, - { - "id": "urn:li:container:b330768923270ff5450695bee1c94247", - "urn": "urn:li:container:b330768923270ff5450695bee1c94247" + "id": "urn:li:container:934b6043df189ef6dc63ac3519be34ac", + "urn": "urn:li:container:934b6043df189ef6dc63ac3519be34ac" } ] } @@ -3495,22 +3163,22 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "containerProperties", + "aspectName": "ownership", "aspect": { "json": { - "customProperties": { - "platform": "databricks", - "env": "PROD", - "metastore": "acryl metastore", - "catalog": "system", - "unity_schema": "quickstart_schema" - }, - "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/quickstart_schema", - "name": "quickstart_schema", - "description": "A new Unity Catalog schema called quickstart_schema" + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3521,7 +3189,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -3537,7 +3205,7 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", "aspectName": "dataPlatformInstance", "aspect": { @@ -3553,14 +3221,20 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "containerProperties", "aspect": { "json": { - "typeNames": [ - "Schema" - ] + "customProperties": { + "platform": "databricks", + "env": "PROD", + "catalog": "quickstart_catalog", + "unity_schema": "default" + }, + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/quickstart_catalog/default", + "name": "default", + "description": "Default schema (auto-created)" } }, "systemMetadata": { @@ -3571,21 +3245,17 @@ }, { "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityUrn": "urn:li:container:39e66cf95f60943f217d96142ffd9ddc", "changeType": "UPSERT", - "aspectName": "ownership", + "aspectName": "browsePathsV2", "aspect": { "json": { - "owners": [ + "path": [ { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" + "id": "urn:li:container:730e95cd0271453376b3c1d9623838d6", + "urn": "urn:li:container:730e95cd0271453376b3c1d9623838d6" } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } + ] } }, "systemMetadata": { @@ -3595,13 +3265,36 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "datasetProfile", "aspect": { "json": { - "container": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" + "timestampMillis": 1705308660402, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 3, + "columnCount": 3, + "fieldProfiles": [ + { + "fieldPath": "betStatusId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + }, + { + "fieldPath": "channelId", + "uniqueCount": 1, + "uniqueProportion": 0.3333333333333333, + "nullCount": 0, + "nullProportion": 0.0 + } + ], + "sizeInBytes": 1024 } }, "systemMetadata": { @@ -3611,22 +3304,13 @@ } }, { - "entityType": "container", - "entityUrn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", "changeType": "UPSERT", - "aspectName": "browsePathsV2", + "aspectName": "status", "aspect": { "json": { - "path": [ - { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" - }, - { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" - } - ] + "removed": false } }, "systemMetadata": { @@ -3637,12 +3321,20 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "container", + "aspectName": "datasetProfile", "aspect": { "json": { - "container": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1" + "timestampMillis": 1705308660401, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -3653,7 +3345,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProperties", "aspect": { @@ -3684,10 +3376,69 @@ "actor": "urn:li:corpuser:abc@acryl.io" }, "lastModified": { - "time": 1666186049633, - "actor": "urn:li:corpuser:abc@acryl.io" - }, - "tags": [] + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "container", + "aspect": { + "json": { + "container": "urn:li:container:a1123d3ed81951784140565f5085b96d" + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "subTypes", + "aspect": { + "json": { + "typeNames": [ + "Table" + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", + "changeType": "UPSERT", + "aspectName": "ownership", + "aspect": { + "json": { + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3698,14 +3449,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "subTypes", + "aspectName": "status", "aspect": { "json": { - "typeNames": [ - "Table" - ] + "removed": false } }, "systemMetadata": { @@ -3716,12 +3465,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "schemaMetadata", "aspect": { "json": { - "schemaName": "acryl_metastore.system.quickstart_schema.quickstart_table", + "schemaName": "system.quickstart_schema.quickstart_table", "platform": "urn:li:dataPlatform:databricks", "version": 0, "created": { @@ -3774,48 +3523,19 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "ownership", - "aspect": { - "json": { - "owners": [ - { - "owner": "urn:li:corpuser:account users", - "type": "DATAOWNER" - } - ], - "lastModified": { - "time": 0, - "actor": "urn:li:corpuser:unknown" - } - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "browsePathsV2", "aspect": { "json": { "path": [ { - "id": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb", - "urn": "urn:li:container:22ec33be0e53ba3e61bb6c4ad58f6ffb" + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" }, { - "id": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9", - "urn": "urn:li:container:5f7e6ee26826ba56e6d1d0b94f291fa9" - }, - { - "id": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1", - "urn": "urn:li:container:bd1cfbc8ea4c94a6f8c9b1ddba52a8c1" + "id": "urn:li:container:a1123d3ed81951784140565f5085b96d", + "urn": "urn:li:container:a1123d3ed81951784140565f5085b96d" } ] } @@ -3828,12 +3548,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProfile", "aspect": { "json": { - "timestampMillis": 1703580920011, + "timestampMillis": 1705308660401, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -3852,35 +3572,41 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "datasetProperties", "aspect": { "json": { - "timestampMillis": 1703581191932, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "customProperties": { + "storage_location": "s3://db-02eec1f70bfe4115445be9fdb1aac6ac-s3-root-bucket/metastore/2c983545-d403-4f87-9063-5b7e3b6d3736/tables/cff27aa1-1c6a-4d78-b713-562c660c2896", + "data_source_format": "DELTA", + "generation": "2", + "table_type": "MANAGED", + "created_by": "abc@acryl.io", + "delta.lastCommitTimestamp": "1666185711000", + "delta.lastUpdateVersion": "1", + "delta.minReaderVersion": "1", + "delta.minWriterVersion": "2", + "spark.sql.statistics.numRows": "10", + "spark.sql.statistics.totalSize": "512", + "table_id": "cff27aa1-1c6a-4d78-b713-562c660c2896", + "owner": "account users", + "updated_by": "abc@acryl.io", + "updated_at": "2022-10-19 13:27:29.633000+00:00", + "created_at": "2022-10-19 13:21:38.688000+00:00" }, - "rowCount": 3, - "columnCount": 3, - "fieldProfiles": [ - { - "fieldPath": "betStatusId", - "uniqueCount": 1, - "uniqueProportion": 0.3333333333333333, - "nullCount": 0, - "nullProportion": 0.0 - }, - { - "fieldPath": "channelId", - "uniqueCount": 1, - "uniqueProportion": 0.3333333333333333, - "nullCount": 0, - "nullProportion": 0.0 - } - ], - "sizeInBytes": 1024 + "externalUrl": "https://dummy.cloud.databricks.com/explore/data/system/default/quickstart_table", + "name": "quickstart_table", + "qualifiedName": "system.default.quickstart_table", + "created": { + "time": 1666185698688, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "lastModified": { + "time": 1666186049633, + "actor": "urn:li:corpuser:abc@acryl.io" + }, + "tags": [] } }, "systemMetadata": { @@ -3891,12 +3617,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "container", "aspect": { "json": { - "removed": false + "container": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6" } }, "systemMetadata": { @@ -3907,20 +3633,14 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "subTypes", "aspect": { "json": { - "timestampMillis": 1703580406273, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 + "typeNames": [ + "Table" + ] } }, "systemMetadata": { @@ -3931,12 +3651,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "ownership", "aspect": { "json": { - "removed": false + "owners": [ + { + "owner": "urn:li:corpuser:account users", + "type": "DATAOWNER" + } + ], + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + } } }, "systemMetadata": { @@ -3947,20 +3676,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "status", "aspect": { "json": { - "timestampMillis": 1703580920008, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" - }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 + "removed": false } }, "systemMetadata": { @@ -3971,60 +3692,54 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "datasetProfile", + "aspectName": "schemaMetadata", "aspect": { "json": { - "timestampMillis": 1703580920011, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "schemaName": "system.default.quickstart_table", + "platform": "urn:li:dataPlatform:databricks", + "version": 0, + "created": { + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.main.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "datasetProfile", - "aspect": { - "json": { - "timestampMillis": 1703580920012, - "partitionSpec": { - "type": "FULL_TABLE", - "partition": "FULL_TABLE_SNAPSHOT" + "lastModified": { + "time": 0, + "actor": "urn:li:corpuser:unknown" }, - "rowCount": 10, - "columnCount": 2, - "fieldProfiles": [], - "sizeInBytes": 512 - } - }, - "systemMetadata": { - "lastObserved": 1638860400000, - "runId": "unity-catalog-test", - "lastRunId": "no-run-id-provided" - } -}, -{ - "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.default.quickstart_table,PROD)", - "changeType": "UPSERT", - "aspectName": "status", - "aspect": { - "json": { - "removed": false + "hash": "", + "platformSchema": { + "com.linkedin.schema.MySqlDDL": { + "tableSchema": "" + } + }, + "fields": [ + { + "fieldPath": "columnA", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.NumberType": {} + } + }, + "nativeDataType": "int", + "recursive": false, + "isPartOfKey": false + }, + { + "fieldPath": "columnB", + "nullable": true, + "type": { + "type": { + "com.linkedin.schema.StringType": {} + } + }, + "nativeDataType": "string", + "recursive": false, + "isPartOfKey": false + } + ] } }, "systemMetadata": { @@ -4035,12 +3750,21 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.quickstart_catalog.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.default.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "browsePathsV2", "aspect": { "json": { - "removed": false + "path": [ + { + "id": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4", + "urn": "urn:li:container:94af9873ac56d46a4bcc995836dc15d4" + }, + { + "id": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6", + "urn": "urn:li:container:b84ba0cc36e3b2636942fc34ec5724d6" + } + ] } }, "systemMetadata": { @@ -4051,12 +3775,12 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "datasetProfile", "aspect": { "json": { - "timestampMillis": 1703580920010, + "timestampMillis": 1705308660402, "partitionSpec": { "type": "FULL_TABLE", "partition": "FULL_TABLE_SNAPSHOT" @@ -4075,7 +3799,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.default.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,main.default.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4091,7 +3815,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.system.quickstart_schema.quickstart_table,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { @@ -4107,12 +3831,20 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.bet,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", - "aspectName": "status", + "aspectName": "datasetProfile", "aspect": { "json": { - "removed": false + "timestampMillis": 1705308660402, + "partitionSpec": { + "type": "FULL_TABLE", + "partition": "FULL_TABLE_SNAPSHOT" + }, + "rowCount": 10, + "columnCount": 2, + "fieldProfiles": [], + "sizeInBytes": 512 } }, "systemMetadata": { @@ -4123,7 +3855,7 @@ }, { "entityType": "dataset", - "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,dummy.acryl_metastore.hive_metastore.bronze_kambi.view1,PROD)", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,quickstart_catalog.quickstart_schema.quickstart_table,PROD)", "changeType": "UPSERT", "aspectName": "status", "aspect": { diff --git a/metadata-ingestion/tests/unit/test_unity_catalog_config.py b/metadata-ingestion/tests/unit/test_unity_catalog_config.py index 3c0994cde7889..6b97d06b7ff93 100644 --- a/metadata-ingestion/tests/unit/test_unity_catalog_config.py +++ b/metadata-ingestion/tests/unit/test_unity_catalog_config.py @@ -15,6 +15,7 @@ def test_within_thirty_days(): "token": "token", "workspace_url": "https://workspace_url", "include_usage_statistics": True, + "include_hive_metastore": False, "start_time": FROZEN_TIME - timedelta(days=30), } ) @@ -38,6 +39,7 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", + "include_hive_metastore": False, "profiling": { "enabled": True, "method": "ge", @@ -51,6 +53,7 @@ def test_profiling_requires_warehouses_id(): { "token": "token", "workspace_url": "https://workspace_url", + "include_hive_metastore": False, "profiling": {"enabled": False, "method": "ge"}, } ) @@ -60,6 +63,7 @@ def test_profiling_requires_warehouses_id(): UnityCatalogSourceConfig.parse_obj( { "token": "token", + "include_hive_metastore": False, "workspace_url": "workspace_url", } ) From 2b744fac7f9856dc84806f7716397edb263542a2 Mon Sep 17 00:00:00 2001 From: Yang Jiandan Date: Fri, 19 Jan 2024 11:40:56 +0800 Subject: [PATCH 237/263] fix(docker):The datahub-frontend service failed to start when executing dev.sh #7616 (#7618) Co-authored-by: yangjd33 Co-authored-by: RyanHolstien Co-authored-by: david-leifker <114954101+david-leifker@users.noreply.github.com> --- docker/datahub-frontend/Dockerfile | 9 ++++----- docker/docker-compose.dev.yml | 1 + 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 17d691177aa34..5563fd6350e20 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -15,10 +15,13 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # Upgrade Alpine and base packages # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 +ENV JMX_VERSION=0.18.0 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ - && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ + && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar ENV LD_LIBRARY_PATH="/lib:/lib64" @@ -32,10 +35,6 @@ RUN unzip datahub-frontend.zip -d /datahub-frontend \ COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend -ENV JMX_VERSION=0.18.0 -RUN wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ - && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar - FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 7067b68fba3f9..23ac821670e44 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -25,6 +25,7 @@ services: - DATAHUB_ANALYTICS_ENABLED=${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - ../datahub-frontend/build/stage/main:/datahub-frontend + - ./monitoring/client-prometheus-config.yaml:/datahub-frontend/client-prometheus-config.yaml datahub-gms: image: linkedin/datahub-gms:debug ports: From 3682c5f1d03c673f72215bd335b17ecacbc33afb Mon Sep 17 00:00:00 2001 From: Indy Prentice Date: Thu, 18 Jan 2024 21:41:08 -0600 Subject: [PATCH 238/263] feat(openapi): Implement getIndexSizes function from rest.li in openapi (#8730) Co-authored-by: Indy Prentice Co-authored-by: David Leifker --- .../elastic/OperationsController.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index f7c848f91a64c..777d65d517b81 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -9,9 +9,12 @@ import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.timeseries.TimeseriesIndexSizeResult; import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.List; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.json.JSONObject; import org.opensearch.client.tasks.GetTaskResponse; @@ -44,6 +47,10 @@ public class OperationsController { @Qualifier("elasticSearchSystemMetadataService") private SystemMetadataService _systemMetadataService; + @Autowired + @Qualifier("timeseriesAspectService") + private TimeseriesAspectService _timeseriesAspectService; + public OperationsController(AuthorizerChain authorizerChain) { _authorizerChain = authorizerChain; } @@ -91,4 +98,36 @@ public ResponseEntity getTaskStatus(String task) { j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); return ResponseEntity.ok(j.toString()); } + + @GetMapping(path = "/getIndexSizes", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity getIndexSizes() { + Authentication authentication = AuthenticationContext.getAuthentication(); + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN) + .body(String.format(actorUrnStr + " is not authorized to get timeseries index sizes")); + } + List indexSizeResults = _timeseriesAspectService.getIndexSizes(); + JSONObject j = new JSONObject(); + j.put( + "sizes", + indexSizeResults.stream() + .map( + timeseriesIndexSizeResult -> + new JSONObject() + .put("aspectName", timeseriesIndexSizeResult.getAspectName()) + .put("entityName", timeseriesIndexSizeResult.getEntityName()) + .put("indexName", timeseriesIndexSizeResult.getIndexName()) + .put("sizeMb", timeseriesIndexSizeResult.getSizeInMb())) + .collect(Collectors.toList())); + return ResponseEntity.ok(j.toString()); + } } From f993f50a0493111e4cfe85409098d844779292c5 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Thu, 18 Jan 2024 23:12:20 -0500 Subject: [PATCH 239/263] feat(ingest/sql-parsing): Support file backed dict in SqlParsingBuilder for lineage (#9654) --- .../datahub/emitter/sql_parsing_builder.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py index 046b615bd4e9f..a8fe4f0df83cf 100644 --- a/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py +++ b/metadata-ingestion/src/datahub/emitter/sql_parsing_builder.py @@ -20,6 +20,7 @@ UpstreamClass, UpstreamLineageClass, ) +from datahub.utilities.file_backed_collections import FileBackedDict from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, SqlParsingResult logger = logging.getLogger(__name__) @@ -80,10 +81,10 @@ class SqlParsingBuilder: generate_operations: bool = True usage_config: Optional[BaseUsageConfig] = None - # TODO: Make inner dict a FileBackedDict and make LineageEdge frozen + # Maps downstream urn -> upstream urn -> LineageEdge # Builds up a single LineageEdge for each upstream -> downstream pair - _lineage_map: Dict[DatasetUrn, Dict[DatasetUrn, LineageEdge]] = field( - default_factory=lambda: defaultdict(dict), init=False + _lineage_map: FileBackedDict[Dict[DatasetUrn, LineageEdge]] = field( + default_factory=FileBackedDict, init=False ) # TODO: Replace with FileBackedDict approach like in BigQuery usage @@ -128,13 +129,14 @@ def process_sql_parsing_result( if self.generate_lineage: for downstream_urn in downstreams_to_ingest: - _merge_lineage_data( + # Set explicitly so that FileBackedDict registers any mutations + self._lineage_map[downstream_urn] = _merge_lineage_data( downstream_urn=downstream_urn, upstream_urns=result.in_tables, column_lineage=result.column_lineage if include_column_lineage else None, - upstream_edges=self._lineage_map[downstream_urn], + upstream_edges=self._lineage_map.get(downstream_urn, {}), query_timestamp=query_timestamp, is_view_ddl=is_view_ddl, user=user, @@ -170,11 +172,12 @@ def add_lineage( user: Optional[UserUrn] = None, ) -> None: """Manually add a single upstream -> downstream lineage edge, e.g. if sql parsing fails.""" - _merge_lineage_data( + # Set explicitly so that FileBackedDict registers any mutations + self._lineage_map[downstream_urn] = _merge_lineage_data( downstream_urn=downstream_urn, upstream_urns=upstream_urns, column_lineage=None, - upstream_edges=self._lineage_map[downstream_urn], + upstream_edges=self._lineage_map.get(downstream_urn, {}), query_timestamp=timestamp, is_view_ddl=is_view_ddl, user=user, @@ -225,7 +228,7 @@ def _merge_lineage_data( query_timestamp: Optional[datetime], is_view_ddl: bool, user: Optional[UserUrn], -) -> None: +) -> Dict[str, LineageEdge]: for upstream_urn in upstream_urns: edge = upstream_edges.setdefault( upstream_urn, @@ -255,6 +258,8 @@ def _merge_lineage_data( column_map = upstream_edges[upstream_column_info.table].column_map column_map[cl.downstream.column].add(upstream_column_info.column) + return upstream_edges + def _compute_upstream_fields( result: SqlParsingResult, From 45236a89aa1fcafcc5fef61994c5c4a498ebfe69 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 19 Jan 2024 15:38:50 +0530 Subject: [PATCH 240/263] feat(posts): add edit support for posts (#9666) --- .../src/app/settings/posts/CreatePostForm.tsx | 11 ++- .../app/settings/posts/CreatePostModal.tsx | 90 ++++++++++++++++--- .../src/app/settings/posts/PostItemMenu.tsx | 8 +- .../src/app/settings/posts/PostsList.tsx | 19 +++- .../app/settings/posts/PostsListColumns.tsx | 11 ++- .../src/graphql/mutations.graphql | 4 + 6 files changed, 125 insertions(+), 18 deletions(-) diff --git a/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx b/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx index a8d6cfa64c9c1..ee7f50a058957 100644 --- a/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx +++ b/datahub-web-react/src/app/settings/posts/CreatePostForm.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { Form, Input, Typography, FormInstance, Radio } from 'antd'; import styled from 'styled-components'; import { @@ -21,11 +21,18 @@ const SubFormItem = styled(Form.Item)` type Props = { setCreateButtonEnabled: (isEnabled: boolean) => void; form: FormInstance; + contentType: PostContentType; }; -export default function CreatePostForm({ setCreateButtonEnabled, form }: Props) { +export default function CreatePostForm({ setCreateButtonEnabled, form, contentType }: Props) { const [postType, setPostType] = useState(PostContentType.Text); + useEffect(() => { + if (contentType) { + setPostType(contentType); + } + }, [contentType]); + return (
void; onCreate: ( contentType: string, @@ -22,12 +24,27 @@ type Props = { link: string | undefined, location: string | undefined, ) => void; + onEdit: () => void; }; -export default function CreatePostModal({ onClose, onCreate }: Props) { +export default function CreatePostModal({ onClose, onCreate, editData, onEdit }: Props) { const [createPostMutation] = useCreatePostMutation(); + const [updatePostMutation] = useUpdatePostMutation(); const [createButtonEnabled, setCreateButtonEnabled] = useState(false); const [form] = Form.useForm(); + + useEffect(() => { + if (editData) { + form.setFieldsValue({ + description: editData.description, + title: editData.title, + link: editData.link, + location: editData.imageUrl, + type: editData.contentType, + }); + } + }, [editData, form]); + const onCreatePost = () => { const contentTypeValue = form.getFieldValue(TYPE_FIELD_NAME) ?? PostContentType.Text; const mediaValue = @@ -75,33 +92,86 @@ export default function CreatePostModal({ onClose, onCreate }: Props) { onClose(); }; + const onUpdatePost = () => { + const contentTypeValue = form.getFieldValue(TYPE_FIELD_NAME) ?? PostContentType.Text; + const mediaValue = + form.getFieldValue(TYPE_FIELD_NAME) && form.getFieldValue(LOCATION_FIELD_NAME) + ? { + type: MediaType.Image, + location: form.getFieldValue(LOCATION_FIELD_NAME) ?? null, + } + : null; + updatePostMutation({ + variables: { + input: { + urn: editData?.urn, + postType: PostType.HomePageAnnouncement, + content: { + contentType: contentTypeValue, + title: form.getFieldValue(TITLE_FIELD_NAME), + description: form.getFieldValue(DESCRIPTION_FIELD_NAME) ?? null, + link: form.getFieldValue(LINK_FIELD_NAME) ?? null, + media: mediaValue, + }, + }, + }, + }) + .then(({ errors }) => { + if (!errors) { + message.success({ + content: `Updated Post!`, + duration: 3, + }); + onEdit(); + form.resetFields(); + } + }) + .catch((e) => { + message.destroy(); + message.error({ content: 'Failed to update Post! An unknown error occured.', duration: 3 }); + console.error('Failed to update Post:', e.message); + }); + onClose(); + }; + // Handle the Enter press useEnterKeyListener({ querySelectorToExecuteClick: '#createPostButton', }); + const onCloseModal = () => { + form.resetFields(); + onClose(); + }; + + const titleText = editData ? 'Edit Post' : 'Create new Post'; + return ( - } > - + ); } diff --git a/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx b/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx index e3fc424a47ef2..3708c04ab1ad3 100644 --- a/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx +++ b/datahub-web-react/src/app/settings/posts/PostItemMenu.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { DeleteOutlined } from '@ant-design/icons'; +import { DeleteOutlined, EditOutlined } from '@ant-design/icons'; import { Dropdown, Menu, message, Modal } from 'antd'; import { MenuIcon } from '../../entity/shared/EntityDropdown/EntityDropdown'; import { useDeletePostMutation } from '../../../graphql/post.generated'; @@ -8,9 +8,10 @@ type Props = { urn: string; title: string; onDelete?: () => void; + onEdit?: () => void; }; -export default function PostItemMenu({ title, urn, onDelete }: Props) { +export default function PostItemMenu({ title, urn, onDelete, onEdit }: Props) { const [deletePostMutation] = useDeletePostMutation(); const deletePost = () => { @@ -53,6 +54,9 @@ export default function PostItemMenu({ title, urn, onDelete }: Props) {  Delete + +  Edit + } > diff --git a/datahub-web-react/src/app/settings/posts/PostsList.tsx b/datahub-web-react/src/app/settings/posts/PostsList.tsx index 849a3765a94b0..b71f06c83c17f 100644 --- a/datahub-web-react/src/app/settings/posts/PostsList.tsx +++ b/datahub-web-react/src/app/settings/posts/PostsList.tsx @@ -51,6 +51,7 @@ export const PostList = () => { const [page, setPage] = useState(1); const [isCreatingPost, setIsCreatingPost] = useState(false); + const [editData, setEditData] = useState(undefined); const pageSize = DEFAULT_PAGE_SIZE; const start = (page - 1) * pageSize; @@ -82,6 +83,16 @@ export const PostList = () => { }, 2000); }; + const handleEdit = (post: PostEntry) => { + setEditData(post); + setIsCreatingPost(true); + }; + + const handleClose = () => { + setEditData(undefined); + setIsCreatingPost(false); + }; + const allColumns = [ { title: 'Title', @@ -113,7 +124,7 @@ export const PostList = () => { width: '5%', align: 'right' as AlignType, key: 'menu', - render: PostListMenuColumn(handleDelete), + render: PostListMenuColumn(handleDelete, handleEdit), }, ]; @@ -123,6 +134,8 @@ export const PostList = () => { title: post.content.title, description: post.content.description, contentType: post.content.contentType, + link: post.content.link, + imageUrl: post.content.media?.location, }; }); @@ -181,7 +194,9 @@ export const PostList = () => { )} {isCreatingPost && ( setIsCreatingPost(false)} + editData={editData as PostEntry} + onClose={handleClose} + onEdit={() => setTimeout(() => refetch(), 2000)} onCreate={(urn, title, description) => { addToListPostCache( client, diff --git a/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx b/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx index 38f910baf8f41..ee680cbec7396 100644 --- a/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx +++ b/datahub-web-react/src/app/settings/posts/PostsListColumns.tsx @@ -9,15 +9,22 @@ export interface PostEntry { contentType: string; description: Maybe; urn: string; + link: string; + imageUrl: string; } const PostText = styled.div<{ minWidth?: number }>` ${(props) => props.minWidth !== undefined && `min-width: ${props.minWidth}px;`} `; -export function PostListMenuColumn(handleDelete: (urn: string) => void) { +export function PostListMenuColumn(handleDelete: (urn: string) => void, handleEdit: (urn: PostEntry) => void) { return (record: PostEntry) => ( - handleDelete(record.urn)} /> + handleDelete(record.urn)} + onEdit={() => handleEdit(record)} + /> ); } diff --git a/datahub-web-react/src/graphql/mutations.graphql b/datahub-web-react/src/graphql/mutations.graphql index 439d20810ef7c..077922cee45fb 100644 --- a/datahub-web-react/src/graphql/mutations.graphql +++ b/datahub-web-react/src/graphql/mutations.graphql @@ -120,6 +120,10 @@ mutation createPost($input: CreatePostInput!) { createPost(input: $input) } +mutation updatePost($input: UpdatePostInput!) { + updatePost(input: $input) +} + mutation updateLineage($input: UpdateLineageInput!) { updateLineage(input: $input) } From 4138b2f72442a72d84d9b12fac04abf8144ba1cf Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Fri, 19 Jan 2024 11:48:12 +0000 Subject: [PATCH 241/263] feat(roles): Add support for roles in groups in GMS (#9659) Co-authored-by: Aseem Bansal --- .../resolvers/group/CreateGroupResolver.java | 3 +- .../com/linkedin/identity/RoleMembership.pdl | 2 +- .../src/main/resources/entity-registry.yml | 1 + .../datahub/authorization/PolicyEngine.java | 75 +++++++++++++++++-- .../authorization/DataHubAuthorizerTest.java | 73 +++++++++++++++--- .../authorization/PolicyEngineTest.java | 12 ++- .../tests/privileges/test_privileges.py | 62 ++++++++++++++- smoke-test/tests/privileges/utils.py | 61 +++++++++++++++ 8 files changed, 266 insertions(+), 23 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index e487ee00608d4..cde7d4958a25b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -43,9 +43,10 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Create the Group key. final CorpGroupKey key = new CorpGroupKey(); final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + final String description = input.getDescription() != null ? input.getDescription() : ""; key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". return _groupService.createNativeGroup( - key, input.getName(), input.getDescription(), authentication); + key, input.getName(), description, authentication); } catch (Exception e) { throw new RuntimeException("Failed to create group", e); } diff --git a/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl b/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl index ec1e472545c1c..2f0ebbb58d05f 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/identity/RoleMembership.pdl @@ -3,7 +3,7 @@ namespace com.linkedin.identity import com.linkedin.common.Urn /** - * Carries information about which roles a user is assigned to. + * Carries information about which roles a user or group is assigned to. */ @Aspect = { "name": "roleMembership" diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index f275d41e786c2..9d8c4bfdab0da 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -184,6 +184,7 @@ entities: - ownership - status - origin + - roleMembership - name: domain doc: A data domain within an organization. category: core diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index 123e5f3c55932..f078d2d316cae 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -3,8 +3,10 @@ import static com.linkedin.metadata.Constants.*; import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.Owner; import com.linkedin.common.Ownership; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; @@ -12,6 +14,8 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; +import com.linkedin.identity.NativeGroupMembership; import com.linkedin.identity.RoleMembership; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -26,6 +30,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -393,7 +398,6 @@ private Set resolveRoles( Set roles = new HashSet<>(); final EnvelopedAspectMap aspectMap; - try { Urn actorUrn = Urn.createFromString(actor); final EntityResponse corpUser = @@ -401,7 +405,10 @@ private Set resolveRoles( .batchGetV2( CORP_USER_ENTITY_NAME, Collections.singleton(actorUrn), - Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), _systemAuthentication) .get(actorUrn); if (corpUser == null || !corpUser.hasAspects()) { @@ -414,19 +421,71 @@ private Set resolveRoles( return roles; } - if (!aspectMap.containsKey(ROLE_MEMBERSHIP_ASPECT_NAME)) { - return roles; + if (aspectMap.containsKey(ROLE_MEMBERSHIP_ASPECT_NAME)) { + RoleMembership roleMembership = + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); + if (roleMembership.hasRoles()) { + roles.addAll(roleMembership.getRoles()); + } } - RoleMembership roleMembership = - new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); - if (roleMembership.hasRoles()) { - roles.addAll(roleMembership.getRoles()); + List groups = new ArrayList<>(); + if (aspectMap.containsKey(GROUP_MEMBERSHIP_ASPECT_NAME)) { + GroupMembership groupMembership = + new GroupMembership(aspectMap.get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + groups.addAll(groupMembership.getGroups()); + } + if (aspectMap.containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { + NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership( + aspectMap.get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + groups.addAll(nativeGroupMembership.getNativeGroups()); + } + if (!groups.isEmpty()) { + GroupMembership memberships = new GroupMembership(); + memberships.setGroups(new UrnArray(groups)); + roles.addAll(getRolesFromGroups(memberships)); + } + + if (!roles.isEmpty()) { context.setRoles(roles); } + return roles; } + private Set getRolesFromGroups(final GroupMembership groupMembership) { + + HashSet groups = new HashSet<>(groupMembership.getGroups()); + try { + Map responseMap = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + groups, + ImmutableSet.of(ROLE_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication); + + return responseMap.keySet().stream() + .filter(Objects::nonNull) + .filter(key -> responseMap.get(key) != null) + .filter(key -> responseMap.get(key).hasAspects()) + .map(key -> responseMap.get(key).getAspects()) + .filter(aspectMap -> aspectMap.containsKey(ROLE_MEMBERSHIP_ASPECT_NAME)) + .map( + aspectMap -> + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data())) + .filter(RoleMembership::hasRoles) + .map(RoleMembership::getRoles) + .flatMap(List::stream) + .collect(Collectors.toSet()); + + } catch (Exception e) { + log.error( + String.format("Failed to fetch %s for urns %s", ROLE_MEMBERSHIP_ASPECT_NAME, groups), e); + return new HashSet<>(); + } + } + private Set resolveGroups( ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.groups != null) { diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index ffee378a363c7..588cdf57269ef 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -28,6 +28,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; @@ -36,6 +37,7 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; import com.linkedin.identity.RoleMembership; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.ScrollResult; @@ -254,10 +256,14 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2( any(), eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), any())) .thenReturn( - createUserRoleMembershipBatchResponse( + createRoleMembershipBatchResponse( USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); final Authentication systemAuthentication = @@ -460,6 +466,49 @@ public void testAuthorizationOnDomainWithoutPrivilegeIsDenied() { assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } + @Test + public void testAuthorizationGrantedBasedOnGroupRole() throws Exception { + final EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:custom"); + + final Urn userUrnWithoutPermissions = UrnUtils.getUrn("urn:li:corpuser:userWithoutRole"); + final Urn groupWithAdminPermission = UrnUtils.getUrn("urn:li:corpGroup:groupWithRole"); + final UrnArray groups = new UrnArray(List.of(groupWithAdminPermission)); + final GroupMembership groupMembership = new GroupMembership(); + groupMembership.setGroups(groups); + + // User has no role associated but is part of 1 group + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(userUrnWithoutPermissions)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createEntityBatchResponse( + userUrnWithoutPermissions, GROUP_MEMBERSHIP_ASPECT_NAME, groupMembership)); + + // Group has a role + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(groupWithAdminPermission)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createRoleMembershipBatchResponse( + groupWithAdminPermission, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + + // This request should only be valid for actor with the admin role. + // Which the urn:li:corpuser:userWithoutRole does not have + AuthorizationRequest request = + new AuthorizationRequest( + userUrnWithoutPermissions.toString(), "EDIT_USER_PROFILE", Optional.of(resourceSpec)); + + assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); + } + private DataHubPolicyInfo createDataHubPolicyInfo( boolean active, List privileges, @Nullable final Urn domain) throws Exception { @@ -575,20 +624,24 @@ private Map createDomainPropertiesBatchResponse( return batchResponse; } - private Map createUserRoleMembershipBatchResponse( - final Urn userUrn, @Nullable final Urn roleUrn) { - final Map batchResponse = new HashMap<>(); - final EntityResponse response = new EntityResponse(); - EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + private Map createRoleMembershipBatchResponse( + final Urn actorUrn, @Nullable final Urn roleUrn) { final RoleMembership membership = new RoleMembership(); if (roleUrn != null) { membership.setRoles(new UrnArray(roleUrn)); } + return createEntityBatchResponse(actorUrn, ROLE_MEMBERSHIP_ASPECT_NAME, membership); + } + + private Map createEntityBatchResponse( + final Urn actorUrn, final String aspectName, final RecordTemplate aspect) { + final Map batchResponse = new HashMap<>(); + final EntityResponse response = new EntityResponse(); + EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); aspectMap.put( - ROLE_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(membership.data()))); + aspectName, new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(aspect.data()))); response.setAspects(aspectMap); - batchResponse.put(userUrn, response); + batchResponse.put(actorUrn, response); return batchResponse; } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index 08ec91d5287dc..c7f06eeba6e85 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -83,7 +83,11 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), any())) .thenReturn(authorizedEntityResponseMap); @@ -94,7 +98,11 @@ public void setupTest() throws Exception { when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(unauthorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + eq( + ImmutableSet.of( + ROLE_MEMBERSHIP_ASPECT_NAME, + GROUP_MEMBERSHIP_ASPECT_NAME, + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), any())) .thenReturn(unauthorizedEntityResponseMap); diff --git a/smoke-test/tests/privileges/test_privileges.py b/smoke-test/tests/privileges/test_privileges.py index 75e2265f1f555..e1cb848cccf8e 100644 --- a/smoke-test/tests/privileges/test_privileges.py +++ b/smoke-test/tests/privileges/test_privileges.py @@ -450,4 +450,64 @@ def test_privilege_to_create_and_manage_policies(): # Ensure that user can't create a policy after privilege is removed by admin - _ensure_cant_perform_action(user_session, create_policy,"createPolicy") \ No newline at end of file + _ensure_cant_perform_action(user_session, create_policy,"createPolicy") + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_privilege_from_group_role_can_create_and_manage_secret(): + + (admin_user, admin_pass) = get_admin_credentials() + admin_session = login_as(admin_user, admin_pass) + user_session = login_as("user", "user") + secret_urn = "urn:li:dataHubSecret:TestSecretName" + + # Verify new user can't create secrets + create_secret = { + "query": """mutation createSecret($input: CreateSecretInput!) {\n + createSecret(input: $input)\n}""", + "variables": { + "input":{ + "name":"TestSecretName", + "value":"Test Secret Value", + "description":"Test Secret Description" + } + }, + } + _ensure_cant_perform_action(user_session, create_secret,"createSecret") + + # Create group and grant it the admin role. + group_urn = create_group(admin_session, "Test Group") + + # Assign admin role to group + assign_role(admin_session,"urn:li:dataHubRole:Admin", [group_urn]) + + # Assign user to group + assign_user_to_group(admin_session, group_urn, ["urn:li:corpuser:user"]) + + # Verify new user with admin group can create and manage secrets + # Create a secret + _ensure_can_create_secret(user_session, create_secret, secret_urn) + + # Remove a secret + remove_secret = { + "query": """mutation deleteSecret($urn: String!) {\n + deleteSecret(urn: $urn)\n}""", + "variables": { + "urn": secret_urn + }, + } + + remove_secret_response = user_session.post(f"{get_frontend_url()}/api/v2/graphql", json=remove_secret) + remove_secret_response.raise_for_status() + secret_data = remove_secret_response.json() + + assert secret_data + assert secret_data["data"] + assert secret_data["data"]["deleteSecret"] + assert secret_data["data"]["deleteSecret"] == secret_urn + + # Delete group which removes the user's admin capabilities + remove_group(admin_session, group_urn) + + # Ensure user can't create secret after policy is removed + _ensure_cant_perform_action(user_session, create_secret,"createSecret") diff --git a/smoke-test/tests/privileges/utils.py b/smoke-test/tests/privileges/utils.py index ea1f565f6f5ac..eeb385a243a90 100644 --- a/smoke-test/tests/privileges/utils.py +++ b/smoke-test/tests/privileges/utils.py @@ -170,6 +170,67 @@ def remove_user(session, urn): response.raise_for_status() return response.json() +def create_group(session, name): + json = { + "query": """mutation createGroup($input: CreateGroupInput!) {\n + createGroup(input: $input) + }""", + "variables": {"input": {"name": name}}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["createGroup"] + return res_data["data"]["createGroup"] + +def remove_group(session, urn): + json = { + "query": """mutation removeGroup($urn: String!) {\n + removeGroup(urn: $urn) + }""", + "variables": {"urn": urn}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["removeGroup"] + return res_data["data"]["removeGroup"] + +def assign_user_to_group(session, group_urn, user_urns): + json = { + "query": """mutation addGroupMembers($groupUrn: String!, $userUrns: [String!]!) {\n + addGroupMembers(input: { groupUrn: $groupUrn, userUrns: $userUrns }) + }""", + "variables": {"groupUrn": group_urn, "userUrns": user_urns}, + } + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["addGroupMembers"] + return res_data["data"]["addGroupMembers"] + +def assign_role(session, role_urn, actor_urns): + json = { + "query": """mutation batchAssignRole($input: BatchAssignRoleInput!) {\n + batchAssignRole(input: $input) + }""", + "variables": {"input": {"roleUrn": role_urn, "actors": actor_urns}}, + } + + response = session.post(f"{get_frontend_url()}/api/v2/graphql", json=json) + response.raise_for_status() + res_data = response.json() + assert res_data + assert res_data["data"] + assert res_data["data"]["batchAssignRole"] + return res_data["data"]["batchAssignRole"] + def create_user_policy(user_urn, privileges, session): policy = { "query": """mutation createPolicy($input: PolicyUpdateInput!) {\n From 0b66e5e452140b158fd350b4b769d4b7792db073 Mon Sep 17 00:00:00 2001 From: Ingthor Birkir Arnason Date: Fri, 19 Jan 2024 15:03:56 +0000 Subject: [PATCH 242/263] fix(frontend): Add fallback for image load error on Avatar (#9501) Co-authored-by: Ingthor Birkir Arnason Co-authored-by: Harshal Sheth --- .../src/app/shared/avatar/CustomAvatar.tsx | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx b/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx index 320b244125315..2ec5a1e77fe22 100644 --- a/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx +++ b/datahub-web-react/src/app/shared/avatar/CustomAvatar.tsx @@ -1,6 +1,6 @@ import { Avatar, Tooltip } from 'antd'; import { TooltipPlacement } from 'antd/lib/tooltip'; -import React from 'react'; +import React, { useState } from 'react'; import { Link } from 'react-router-dom'; import styled from 'styled-components'; @@ -50,6 +50,8 @@ export default function CustomAvatar({ isRole = false, hideTooltip = false, }: Props) { + const [imageError, setImageError] = useState(false); + const avatarWithInitial = name ? ( {name.charAt(0).toUpperCase()} @@ -62,8 +64,15 @@ export default function CustomAvatar({ ) : ( avatarWithInitial ); + + const handleImageError = () => { + setImageError(true); + // To prevent fallback error handling from Ant Design + return false; + }; + const avatar = - photoUrl && photoUrl !== '' ? : avatarWithDefault; + photoUrl && photoUrl !== '' && !imageError ? : avatarWithDefault; if (!name) { return url ? {avatar} : avatar; } From a60df52cd2d1abd0ab5e9b6d05f6094a7d3e58f0 Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 19 Jan 2024 22:06:50 +0530 Subject: [PATCH 243/263] fix(user-removal): resolve user removal list update issue (#9671) --- datahub-web-react/src/app/identity/user/UserList.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/identity/user/UserList.tsx b/datahub-web-react/src/app/identity/user/UserList.tsx index 22b44e5f2d625..178f54325ecde 100644 --- a/datahub-web-react/src/app/identity/user/UserList.tsx +++ b/datahub-web-react/src/app/identity/user/UserList.tsx @@ -82,7 +82,7 @@ export const UserList = () => { }); const totalUsers = usersData?.listUsers?.total || 0; - useEffect(()=> { + useEffect(() => { setUsersList(usersData?.listUsers?.users || []); }, [usersData]); const onChangePage = (newPage: number) => { @@ -92,6 +92,7 @@ export const UserList = () => { const handleDelete = (urn: string) => { removeUserFromListUsersCache(urn, client, page, pageSize); + usersRefetch(); }; const { From 0c940c7b2b9ba2e4e79ead7973045775882460a3 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 19 Jan 2024 13:38:48 -0500 Subject: [PATCH 244/263] feat(ui) Add standardized GQL error handling function to FE (#9470) --- datahub-web-react/src/App.tsx | 3 +- .../Dataset/Queries/QueryCardDetailsMenu.tsx | 10 +++-- datahub-web-react/src/app/shared/constants.ts | 8 ++++ .../src/app/shared/handleGraphQLError.ts | 40 +++++++++++++++++++ 4 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 datahub-web-react/src/app/shared/handleGraphQLError.ts diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 895c2a4781e42..79c9ee91ceaa1 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -14,6 +14,7 @@ import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; import possibleTypesResult from './possibleTypes.generated'; +import { ErrorCodes } from './app/shared/constants'; /* Construct Apollo Client @@ -24,7 +25,7 @@ const errorLink = onError((error) => { const { networkError, graphQLErrors } = error; if (networkError) { const serverError = networkError as ServerError; - if (serverError.statusCode === 401) { + if (serverError.statusCode === ErrorCodes.Unauthorized) { isLoggedInVar(false); Cookies.remove(GlobalCfg.CLIENT_AUTH_COOKIE); const currentPath = window.location.pathname + window.location.search; diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx index d17f0d12b3cf3..a663dfffaaabf 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Queries/QueryCardDetailsMenu.tsx @@ -3,6 +3,7 @@ import styled from 'styled-components'; import { DeleteOutlined, MoreOutlined } from '@ant-design/icons'; import { Dropdown, Menu, message, Modal } from 'antd'; import { useDeleteQueryMutation } from '../../../../../../graphql/query.generated'; +import handleGraphQLError from '../../../../../shared/handleGraphQLError'; const StyledMoreOutlined = styled(MoreOutlined)` font-size: 14px; @@ -28,9 +29,12 @@ export default function QueryCardDetailsMenu({ urn, onDeleted, index }: Props) { onDeleted?.(urn); } }) - .catch(() => { - message.destroy(); - message.error({ content: 'Failed to delete Query! An unexpected error occurred' }); + .catch((error) => { + handleGraphQLError({ + error, + defaultMessage: 'Failed to delete Query! An unexpected error occurred', + permissionMessage: 'Unauthorized to delete Query. Please contact your DataHub administrator.', + }); }); }; diff --git a/datahub-web-react/src/app/shared/constants.ts b/datahub-web-react/src/app/shared/constants.ts index dc04372b43607..1cd9077ab8cdf 100644 --- a/datahub-web-react/src/app/shared/constants.ts +++ b/datahub-web-react/src/app/shared/constants.ts @@ -1 +1,9 @@ export const ENTER_KEY_CODE = 13; + +export enum ErrorCodes { + BadRequest = 400, + Unauthorized = 401, + Forbidden = 403, + NotFound = 404, + ServerError = 500, +} diff --git a/datahub-web-react/src/app/shared/handleGraphQLError.ts b/datahub-web-react/src/app/shared/handleGraphQLError.ts new file mode 100644 index 0000000000000..f129fef34c7ca --- /dev/null +++ b/datahub-web-react/src/app/shared/handleGraphQLError.ts @@ -0,0 +1,40 @@ +import { ErrorResponse } from '@apollo/client/link/error'; +import { message } from 'antd'; +import { ErrorCodes } from './constants'; + +interface Props { + error: ErrorResponse; + defaultMessage: string; + permissionMessage?: string; + badRequestMessage?: string; + serverErrorMessage?: string; +} + +export default function handleGraphQLError({ + error, + defaultMessage, + permissionMessage = 'Unauthorized. Please contact your DataHub administrator.', + badRequestMessage, + serverErrorMessage, +}: Props) { + // destroy the default error message from errorLink in App.tsx + message.destroy(); + const { graphQLErrors } = error; + if (graphQLErrors && graphQLErrors.length) { + const { extensions } = graphQLErrors[0]; + const errorCode = extensions && (extensions.code as number); + if (errorCode === ErrorCodes.Forbidden) { + message.error(permissionMessage); + return; + } + if (errorCode === ErrorCodes.BadRequest && badRequestMessage) { + message.error(badRequestMessage); + return; + } + if (errorCode === ErrorCodes.ServerError && serverErrorMessage) { + message.error(serverErrorMessage); + return; + } + } + message.error(defaultMessage); +} From 131c8f878c9f0fe872c9cb4faa4de22c57922c31 Mon Sep 17 00:00:00 2001 From: RyanHolstien Date: Fri, 19 Jan 2024 13:44:48 -0600 Subject: [PATCH 245/263] feat(kafka): add health indicator for kafka (#9662) --- ...docker-compose.consumers-without-neo4j.yml | 2 + docker/docker-compose.consumers.yml | 2 + ...ose.consumers-without-neo4j.quickstart.yml | 2 + .../docker-compose.consumers.quickstart.yml | 2 + metadata-jobs/common/build.gradle | 16 ++++ .../health/kafka/KafkaHealthIndicator.java | 75 +++++++++++++++++++ metadata-jobs/mae-consumer-job/build.gradle | 1 + .../kafka/MaeConsumerApplication.java | 3 +- .../kafka/MaeConsumerApplicationTest.java | 4 + metadata-jobs/mce-consumer-job/build.gradle | 1 + .../kafka/MceConsumerApplication.java | 3 +- .../kafka/MceConsumerApplicationTest.java | 10 ++- .../config/kafka/ConsumerConfiguration.java | 1 + .../src/main/resources/application.yml | 1 + .../boot/OnBootApplicationListener.java | 10 +-- .../kafka/DataHubUpgradeKafkaListener.java | 2 +- settings.gradle | 1 + 17 files changed, 126 insertions(+), 10 deletions(-) create mode 100644 metadata-jobs/common/build.gradle create mode 100644 metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java diff --git a/docker/docker-compose.consumers-without-neo4j.yml b/docker/docker-compose.consumers-without-neo4j.yml index f1be585232a1a..792c212e9be9a 100644 --- a/docker/docker-compose.consumers-without-neo4j.yml +++ b/docker/docker-compose.consumers-without-neo4j.yml @@ -17,6 +17,7 @@ services: env_file: datahub-mae-consumer/env/docker-without-neo4j.env environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} datahub-mce-consumer: container_name: datahub-mce-consumer hostname: datahub-mce-consumer @@ -31,3 +32,4 @@ services: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} diff --git a/docker/docker-compose.consumers.yml b/docker/docker-compose.consumers.yml index 8d331cea2f0b9..0a7cbe4dbe3d7 100644 --- a/docker/docker-compose.consumers.yml +++ b/docker/docker-compose.consumers.yml @@ -17,6 +17,7 @@ services: env_file: datahub-mae-consumer/env/docker.env environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} depends_on: neo4j: condition: service_healthy @@ -39,6 +40,7 @@ services: - NEO4J_PASSWORD=datahub - GRAPH_SERVICE_IMPL=neo4j - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} depends_on: neo4j: condition: service_healthy diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index 4ed57dca1f080..c66931cabd7a4 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -7,6 +7,7 @@ services: container_name: datahub-mae-consumer environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -45,6 +46,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index ba8432d8a89af..b8106ef096952 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -10,6 +10,7 @@ services: condition: service_healthy environment: - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -55,6 +56,7 @@ services: - GRAPH_SERVICE_IMPL=neo4j - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false diff --git a/metadata-jobs/common/build.gradle b/metadata-jobs/common/build.gradle new file mode 100644 index 0000000000000..bdc3b7a44a98a --- /dev/null +++ b/metadata-jobs/common/build.gradle @@ -0,0 +1,16 @@ +plugins { + id 'java' +} + +dependencies { + implementation(project(':metadata-service:factories')) { + exclude group: 'org.neo4j.test' + } + implementation externalDependency.springActuator + implementation externalDependency.springKafka + implementation externalDependency.slf4jApi + + compileOnly externalDependency.lombok + + annotationProcessor externalDependency.lombok +} \ No newline at end of file diff --git a/metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java b/metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java new file mode 100644 index 0000000000000..305d33d2a09d1 --- /dev/null +++ b/metadata-jobs/common/src/main/java/io/datahubproject/metadata/jobs/common/health/kafka/KafkaHealthIndicator.java @@ -0,0 +1,75 @@ +package io.datahubproject.metadata.jobs.common.health.kafka; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import java.util.Collection; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Value; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.common.TopicPartition; +import org.springframework.boot.actuate.health.AbstractHealthIndicator; +import org.springframework.boot.actuate.health.Health; +import org.springframework.boot.actuate.health.Status; +import org.springframework.kafka.config.KafkaListenerEndpointRegistry; +import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; +import org.springframework.kafka.listener.MessageListenerContainer; +import org.springframework.stereotype.Component; + +@Component +@Slf4j +public class KafkaHealthIndicator extends AbstractHealthIndicator { + + private final KafkaListenerEndpointRegistry listenerRegistry; + private final ConfigurationProvider configurationProvider; + + public KafkaHealthIndicator( + KafkaListenerEndpointRegistry listenerRegistry, ConfigurationProvider configurationProvider) { + this.listenerRegistry = listenerRegistry; + this.configurationProvider = configurationProvider; + } + + @Override + protected void doHealthCheck(Health.Builder builder) throws Exception { + Status kafkaStatus = Status.UP; + boolean isContainerDown = + listenerRegistry.getAllListenerContainers().stream() + .filter( + container -> + !DataHubUpgradeKafkaListener.CONSUMER_GROUP.equals(container.getGroupId())) + .anyMatch(container -> !container.isRunning()); + Map details = + listenerRegistry.getAllListenerContainers().stream() + .collect( + Collectors.toMap( + MessageListenerContainer::getListenerId, this::buildConsumerDetails)); + if (isContainerDown && configurationProvider.getKafka().getConsumer().isHealthCheckEnabled()) { + kafkaStatus = Status.DOWN; + } + builder.status(kafkaStatus).withDetails(details).build(); + } + + private ConsumerDetails buildConsumerDetails(MessageListenerContainer container) { + Collection partitionDetails = container.getAssignedPartitions(); + int concurrency = 1; + if (container + instanceof ConcurrentMessageListenerContainer concurrentMessageListenerContainer) { + concurrency = concurrentMessageListenerContainer.getConcurrency(); + } + return new ConsumerDetails( + partitionDetails, + container.getListenerId(), + container.getGroupId(), + concurrency, + container.isRunning()); + } + + @Value + private static class ConsumerDetails { + Collection partitionDetails; + String listenerId; + String groupId; + int concurrency; + boolean isRunning; + } +} diff --git a/metadata-jobs/mae-consumer-job/build.gradle b/metadata-jobs/mae-consumer-job/build.gradle index a8920d50b068e..f3d1ca9885044 100644 --- a/metadata-jobs/mae-consumer-job/build.gradle +++ b/metadata-jobs/mae-consumer-job/build.gradle @@ -15,6 +15,7 @@ dependencies { implementation project(':metadata-jobs:mae-consumer') // TODO: Extract PE consumer into separate pod. implementation project(':metadata-jobs:pe-consumer') + implementation project(':metadata-jobs:common') implementation(externalDependency.springBootStarterWeb) { exclude module: "spring-boot-starter-tomcat" diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index eef513f8b91e0..e695788e09726 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -22,7 +22,8 @@ "com.linkedin.metadata.dao.producer", "com.linkedin.gms.factory.config", "com.linkedin.gms.factory.entity.update.indices", - "com.linkedin.gms.factory.timeline.eventgenerator" + "com.linkedin.gms.factory.timeline.eventgenerator", + "io.datahubproject.metadata.jobs.common.health.kafka" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java index 69288cec8220a..22fbe7fc6b6ca 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java @@ -3,6 +3,7 @@ import static org.testng.AssertJUnit.*; import com.linkedin.metadata.entity.EntityService; +import io.datahubproject.metadata.jobs.common.health.kafka.KafkaHealthIndicator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; @@ -16,8 +17,11 @@ public class MaeConsumerApplicationTest extends AbstractTestNGSpringContextTests @Autowired private EntityService _mockEntityService; + @Autowired private KafkaHealthIndicator kafkaHealthIndicator; + @Test public void testMaeConsumerAutoWiring() { assertNotNull(_mockEntityService); + assertNotNull(kafkaHealthIndicator); } } diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index 2f60d1ae985fb..3370838974bf7 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -15,6 +15,7 @@ dependencies { implementation project(':metadata-service:auth-filter') implementation project(':metadata-jobs:mce-consumer') implementation project(':entity-registry') + implementation project(':metadata-jobs:common') implementation(externalDependency.springBootStarterWeb) { exclude module: "spring-boot-starter-tomcat" diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index 05bcd556debe9..181a723e1cd25 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -29,7 +29,8 @@ "com.linkedin.restli.server", "com.linkedin.metadata.restli", "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer" + "com.linkedin.metadata.dao.producer", + "io.datahubproject.metadata.jobs.common.health.kafka" }, excludeFilters = { @ComponentScan.Filter( diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index 714c7b899ff49..6d19db97fb39f 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -2,10 +2,11 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.when; -import static org.testng.AssertJUnit.assertTrue; +import static org.testng.AssertJUnit.*; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; +import io.datahubproject.metadata.jobs.common.health.kafka.KafkaHealthIndicator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.web.client.TestRestTemplate; @@ -23,6 +24,8 @@ public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests @Autowired private EntityService _mockEntityService; + @Autowired private KafkaHealthIndicator kafkaHealthIndicator; + @Test public void testRestliServletConfig() { RestoreIndicesResult mockResult = new RestoreIndicesResult(); @@ -34,4 +37,9 @@ public void testRestliServletConfig() { "/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); assertTrue(response.contains(mockResult.toString())); } + + @Test + public void testHealthIndicator() { + assertNotNull(kafkaHealthIndicator); + } } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index 61b9d5c816790..60f3e1b4fef76 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -7,4 +7,5 @@ public class ConsumerConfiguration { private int maxPartitionFetchBytes; private boolean stopOnDeserializationError; + private boolean healthCheckEnabled; } diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 36498f7c45fea..cfc84491ab0ae 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -237,6 +237,7 @@ kafka: consumer: maxPartitionFetchBytes: ${KAFKA_CONSUMER_MAX_PARTITION_FETCH_BYTES:5242880} # the max bytes consumed per partition stopOnDeserializationError: ${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:true} # Stops kafka listener container on deserialization error, allows user to fix problems before moving past problematic offset. If false will log and move forward past the offset + healthCheckEnabled: ${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:true} # Sets the health indicator to down when a message listener container has stopped due to a deserialization failure, will force consumer apps to restart through k8s and docker-compose health mechanisms schemaRegistry: type: ${SCHEMA_REGISTRY_TYPE:KAFKA} # INTERNAL or KAFKA or AWS_GLUE url: ${KAFKA_SCHEMAREGISTRY_URL:http://localhost:8081} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java index 801a902b7f835..0750dfca865c2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java @@ -16,14 +16,12 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; /** Responsible for coordinating starting steps that happen before the application starts up. */ -@Configuration @Slf4j @Component public class OnBootApplicationListener { @@ -73,22 +71,22 @@ public Runnable isSchemaRegistryAPIServletReady() { return () -> { final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); int timeouts = _servletsWaitTimeout; - boolean openAPIServeletReady = false; - while (!openAPIServeletReady && timeouts > 0) { + boolean openAPIServletReady = false; + while (!openAPIServletReady && timeouts > 0) { try { log.info("Sleeping for 1 second"); Thread.sleep(1000); StatusLine statusLine = httpClient.execute(request).getStatusLine(); if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { log.info("Connected! Authentication not tested."); - openAPIServeletReady = true; + openAPIServletReady = true; } } catch (IOException | InterruptedException e) { log.info("Failed to connect to open servlet: {}", e.getMessage()); } timeouts--; } - if (!openAPIServeletReady) { + if (!openAPIServletReady) { log.error( "Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", timeouts); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java index b2b6fb5e5cb7e..e69ab342740e4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java @@ -36,7 +36,7 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap private final KafkaListenerEndpointRegistry registry; - private static final String CONSUMER_GROUP = + public static final String CONSUMER_GROUP = "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; private static final String SUFFIX = "temp"; public static final String TOPIC_NAME = diff --git a/settings.gradle b/settings.gradle index d2844fe00cdbc..4614b6ed4ccaf 100644 --- a/settings.gradle +++ b/settings.gradle @@ -63,3 +63,4 @@ include 'metadata-integration:java:examples' include 'mock-entity-registry' include 'metadata-service:services' include 'metadata-service:configuration' +include ':metadata-jobs:common' From bd9b0c49fbf26a344a0f2f626b8b33a04cefacd5 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 19 Jan 2024 16:32:22 -0600 Subject: [PATCH 246/263] perf(entity-service): batch exists calls (#9663) --- .../assertion/DeleteAssertionResolver.java | 6 +- .../UpdateDeprecationResolver.java | 7 +- .../resolvers/domain/SetDomainResolver.java | 8 +- .../resolvers/domain/UnsetDomainResolver.java | 6 +- .../resolvers/embed/UpdateEmbedResolver.java | 2 +- .../entity/EntityExistsResolver.java | 6 +- .../glossary/AddRelatedTermsResolver.java | 6 +- .../DeleteGlossaryEntityResolver.java | 6 +- .../glossary/RemoveRelatedTermsResolver.java | 4 +- .../lineage/UpdateLineageResolver.java | 10 +- .../BatchUpdateSoftDeletedResolver.java | 4 +- .../resolvers/mutate/DescriptionUtils.java | 52 ++++----- .../resolvers/mutate/MoveDomainResolver.java | 4 +- .../resolvers/mutate/UpdateNameResolver.java | 4 +- .../mutate/UpdateParentNodeResolver.java | 6 +- .../resolvers/mutate/util/DomainUtils.java | 11 +- .../resolvers/mutate/util/LabelUtils.java | 61 ++++++---- .../resolvers/mutate/util/LinkUtils.java | 12 +- .../resolvers/mutate/util/OwnerUtils.java | 23 ++-- .../resolvers/tag/SetTagColorResolver.java | 4 +- .../HyperParameterValueTypeMapper.java | 2 +- .../resolvers/UpdateLineageResolverTest.java | 42 +++---- .../DeleteAssertionResolverTest.java | 59 +++++----- .../BatchUpdateSoftDeletedResolverTest.java | 19 ++- .../BatchUpdateDeprecationResolverTest.java | 19 ++- .../UpdateDeprecationResolverTest.java | 46 ++++---- .../domain/BatchSetDomainResolverTest.java | 56 ++++++--- .../domain/MoveDomainResolverTest.java | 13 ++- .../domain/SetDomainResolverTest.java | 33 ++++-- .../domain/UnsetDomainResolverTest.java | 14 ++- .../embed/UpdateEmbedResolverTest.java | 14 ++- .../entity/EntityExistsResolverTest.java | 3 +- .../glossary/AddRelatedTermsResolverTest.java | 86 ++++++++------ .../DeleteGlossaryEntityResolverTest.java | 7 +- .../RemoveRelatedTermsResolverTest.java | 19 +-- .../glossary/UpdateNameResolverTest.java | 11 +- .../UpdateParentNodeResolverTest.java | 27 +++-- .../load/BatchGetEntitiesResolverTest.java | 7 +- .../mutate/UpdateUserSettingResolverTest.java | 4 +- .../owner/AddOwnersResolverTest.java | 110 +++++++++++------- .../owner/BatchAddOwnersResolverTest.java | 78 ++++++++----- .../owner/BatchRemoveOwnersResolverTest.java | 80 +++++++------ .../resolvers/tag/AddTagsResolverTest.java | 39 ++++--- .../tag/BatchAddTagsResolverTest.java | 48 +++++--- .../tag/BatchRemoveTagsResolverTest.java | 34 ++++-- .../tag/SetTagColorResolverTest.java | 12 +- .../resolvers/term/AddTermsResolverTest.java | 79 +++++++------ .../term/BatchAddTermsResolverTest.java | 96 ++++++++------- .../term/BatchRemoveTermsResolverTest.java | 80 +++++++------ .../metadata/client/JavaEntityClient.java | 2 +- .../metadata/entity/EntityServiceImpl.java | 70 +++++++---- .../linkedin/metadata/entity/EntityUtils.java | 23 ---- .../candidatesource/MostPopularSource.java | 40 +++---- .../candidatesource/RecentlyEditedSource.java | 40 +++---- .../candidatesource/RecentlyViewedSource.java | 40 +++---- .../sibling/SiblingGraphServiceTest.java | 6 +- .../SampleDataFixtureConfiguration.java | 17 ++- .../authentication/group/GroupService.java | 8 +- .../token/StatefulTokenService.java | 2 +- .../user/NativeUserService.java | 4 +- .../datahub/telemetry/TrackingService.java | 2 +- .../DataHubTokenAuthenticatorTest.java | 3 +- .../group/GroupServiceTest.java | 8 +- .../token/StatefulTokenServiceTest.java | 3 +- .../user/NativeUserServiceTest.java | 4 +- .../telemetry/TrackingServiceTest.java | 6 +- .../steps/IngestRetentionPoliciesStep.java | 6 +- .../boot/steps/RemoveClientIdAspectStep.java | 4 +- .../delegates/EntityApiDelegateImpl.java | 2 +- .../src/test/java/mock/MockEntityService.java | 5 +- .../resources/entity/EntityResource.java | 2 +- .../metadata/entity/EntityService.java | 23 +++- .../EntityRecommendationSource.java | 37 ++++++ .../metadata/shared/ValidationUtils.java | 79 +++++++++---- 74 files changed, 1064 insertions(+), 751 deletions(-) create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 89912b2814e40..cbf685e9f45bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -24,10 +24,10 @@ public class DeleteAssertionResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; public DeleteAssertionResolver( - final EntityClient entityClient, final EntityService entityService) { + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @@ -41,7 +41,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) () -> { // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { + if (!_entityService.exists(assertionUrn, true)) { return true; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 62c88c506ba61..be887d845f385 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -37,7 +37,7 @@ public class UpdateDeprecationResolver implements DataFetcher _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -101,9 +101,10 @@ private boolean isAuthorizedToUpdateDeprecationForEntity( orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { + public static Boolean validateUpdateDeprecationInput( + Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 1c52f707c61a4..4d2e93be42fcb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -28,7 +28,7 @@ public class SetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -74,16 +74,16 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } public static Boolean validateSetDomainInput( - Urn entityUrn, Urn domainUrn, EntityService entityService) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { + if (!entityService.exists(domainUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); } - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to add Entity %s to Domain %s. Entity does not exist.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index b2a82ac7608d8..c415d933e4a3a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -29,7 +29,7 @@ public class UnsetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -71,9 +71,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw }); } - public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index e1b264606074c..caaf76b0d1dc8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -82,7 +82,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw */ private static void validateUpdateEmbedInput( @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { - if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { + if (!entityService.exists(UrnUtils.getUrn(input.getUrn()), true)) { throw new IllegalArgumentException( String.format( "Failed to update embed for entity with urn %s. Entity does not exist!", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index d2bd2f3fb8a17..257f0a4efd260 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -12,9 +12,9 @@ /** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; - public EntityExistsResolver(final EntityService entityService) { + public EntityExistsResolver(final EntityService entityService) { _entityService = entityService; } @@ -32,7 +32,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) return CompletableFuture.supplyAsync( () -> { try { - return _entityService.exists(entityUrn); + return _entityService.exists(entityUrn, true); } catch (Exception e) { throw new RuntimeException( String.format("Failed to check whether entity %s exists", entityUrn.toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 535dbbf70a4cb..31aa8b2ab9ddf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -29,7 +29,7 @@ @RequiredArgsConstructor public class AddRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -91,7 +91,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || !_entityService.exists(urn)) { + || !_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format( "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); @@ -104,7 +104,7 @@ public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); - } else if (!_entityService.exists(termUrn)) { + } else if (!_entityService.exists(termUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index f623f0e34b366..3dc3e93260665 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -15,10 +15,10 @@ public class DeleteGlossaryEntityResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; public DeleteGlossaryEntityResolver( - final EntityClient entityClient, EntityService entityService) { + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @@ -33,7 +33,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) return CompletableFuture.supplyAsync( () -> { if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { + if (!_entityService.exists(entityUrn, true)) { throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 8c9b792b74e0d..b1dd404e12465 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -27,7 +27,7 @@ @RequiredArgsConstructor public class RemoveRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -46,7 +46,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || !_entityService.exists(urn)) { + || !_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format( "Failed to update %s. %s either does not exist or is not a glossaryTerm.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index a0caef20a4755..804bd6ca05431 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -35,7 +35,7 @@ @RequiredArgsConstructor public class UpdateLineageResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final LineageService _lineageService; @Override @@ -60,9 +60,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { + final Set existingDownstreamUrns = _entityService.exists(downstreamUrns, true); + // build MCP for every downstreamUrn for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { + if (!existingDownstreamUrns.contains(downstreamUrn)) { throw new IllegalArgumentException( String.format( "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); @@ -128,9 +130,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + final Set existingUpstreamUrns = _entityService.exists(upstreamUrns, true); + // build MCP for upstreamUrn if necessary for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { + if (!existingUpstreamUrns.contains(upstreamUrn)) { throw new IllegalArgumentException( String.format( "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 5a25e6d83e648..aa7c1b152790f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -20,7 +20,7 @@ @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -65,7 +65,7 @@ private void validateInputUrn(String urnStr, QueryContext context) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - if (!_entityService.exists(urn)) { + if (!_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index d0796389d2280..ab151d6244f48 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -44,7 +44,7 @@ public static void updateFieldDescription( Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService) { + EntityService entityService) { EditableSchemaMetadata editableSchemaMetadata = (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( @@ -66,7 +66,7 @@ public static void updateFieldDescription( } public static void updateContainerDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = (EditableContainerProperties) EntityUtils.getAspectFromEntity( @@ -84,7 +84,7 @@ public static void updateContainerDescription( } public static void updateDomainDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( @@ -107,7 +107,7 @@ public static void updateDomainDescription( } public static void updateTagDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( @@ -123,7 +123,7 @@ public static void updateTagDescription( } public static void updateCorpGroupDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( @@ -143,7 +143,7 @@ public static void updateCorpGroupDescription( } public static void updateGlossaryTermDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( @@ -168,7 +168,7 @@ public static void updateGlossaryTermDescription( } public static void updateGlossaryNodeDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( @@ -189,7 +189,7 @@ public static void updateGlossaryNodeDescription( } public static void updateNotebookDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( @@ -212,8 +212,8 @@ public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -223,8 +223,8 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -232,8 +232,8 @@ public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityS return true; } - public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -241,24 +241,24 @@ public static Boolean validateContainerInput(Urn resourceUrn, EntityService enti return true; } - public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { - if (!entityService.exists(corpUserUrn)) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { + if (!entityService.exists(corpUserUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { - if (!entityService.exists(notebookUrn)) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { + if (!entityService.exists(notebookUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); } @@ -335,7 +335,7 @@ public static boolean isAuthorizedToUpdateDescription( } public static void updateMlModelDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( @@ -355,7 +355,7 @@ public static void updateMlModelDescription( } public static void updateMlModelGroupDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( @@ -375,7 +375,7 @@ public static void updateMlModelGroupDescription( } public static void updateMlFeatureDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( @@ -395,7 +395,7 @@ public static void updateMlFeatureDescription( } public static void updateMlFeatureTableDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( @@ -415,7 +415,7 @@ public static void updateMlFeatureTableDescription( } public static void updateMlPrimaryKeyDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( @@ -435,7 +435,7 @@ public static void updateMlPrimaryKeyDescription( } public static void updateDataProductDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e4c5c132be4f7..dab8cfffd54e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -27,7 +27,7 @@ @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -67,7 +67,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { throw new IllegalArgumentException("Parent entity is not a domain."); } - if (!_entityService.exists(newParentDomainUrn)) { + if (!_entityService.exists(newParentDomainUrn, true)) { throw new IllegalArgumentException("Parent entity does not exist."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index dd44c2718b3a4..8e4a96637e04d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -35,7 +35,7 @@ @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -47,7 +47,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { - if (!_entityService.exists(targetUrn)) { + if (!_entityService.exists(targetUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 848118e6cc0f6..2fcec54978b4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -26,7 +26,7 @@ @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -37,7 +37,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); - if (!_entityService.exists(targetUrn)) { + if (!_entityService.exists(targetUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } @@ -45,7 +45,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) + if (!_entityService.exists(parentNodeUrn, true) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { throw new IllegalArgumentException( String.format( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index fb88d6c29f662..5dbd282580c87 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -77,7 +77,7 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List resources, Urn actor, - EntityService entityService) + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -87,7 +87,10 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + @Nullable Urn domainUrn, + ResourceRefInput resource, + Urn actor, + EntityService entityService) { Domains domains = (Domains) EntityUtils.getAspectFromEntity( @@ -104,8 +107,8 @@ private static MetadataChangeProposal buildSetDomainProposal( UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } - public static void validateDomain(Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { + public static void validateDomain(Urn domainUrn, EntityService entityService) { + if (!entityService.exists(domainUrn, true)) { throw new IllegalArgumentException( String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index 8765b91f65d9d..09323fdfc8377 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -42,7 +42,11 @@ public class LabelUtils { private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { + Urn labelUrn, + Urn resourceUrn, + String subResource, + Urn actor, + EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) @@ -80,7 +84,7 @@ public static void removeTermFromResource( } public static void removeTagsFromResources( - List tags, List resources, Urn actor, EntityService entityService) + List tags, List resources, Urn actor, EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -90,7 +94,10 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List tagUrns, List resources, Urn actor, EntityService entityService) + List tagUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -100,7 +107,10 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List termUrns, List resources, Urn actor, EntityService entityService) + List termUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -110,7 +120,10 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List termUrns, List resources, Urn actor, EntityService entityService) + List termUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -124,7 +137,7 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService) + EntityService entityService) throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = @@ -248,7 +261,7 @@ public static void validateResourceAndLabel( String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, + EntityService entityService, Boolean isRemoving) { for (Urn urn : labelUrns) { validateResourceAndLabel( @@ -263,14 +276,14 @@ public static void validateResourceAndLabel( } public static void validateLabel( - Urn labelUrn, String labelEntityType, EntityService entityService) { + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { throw new IllegalArgumentException( String.format( "Failed to validate label with urn %s. Urn type does not match entity type %s..", labelUrn, labelEntityType)); } - if (!entityService.exists(labelUrn)) { + if (!entityService.exists(labelUrn, true)) { throw new IllegalArgumentException( String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } @@ -281,8 +294,8 @@ public static void validateResource( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); @@ -310,7 +323,7 @@ public static void validateResourceAndLabel( String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, + EntityService entityService, Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); @@ -319,7 +332,7 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity @@ -331,7 +344,7 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity @@ -343,7 +356,7 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity( @@ -361,7 +374,7 @@ private static MetadataChangeProposal buildRemoveTagsToEntityProposal( } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( @@ -383,7 +396,7 @@ private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.common.GlobalTags tags = (com.linkedin.common.GlobalTags) @@ -402,7 +415,7 @@ private static MetadataChangeProposal buildAddTagsToEntityProposal( } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) @@ -455,7 +468,7 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) } private static MetadataChangeProposal buildAddTermsProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity @@ -467,7 +480,7 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity @@ -479,7 +492,7 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) @@ -500,7 +513,7 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) @@ -526,7 +539,7 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( @@ -542,7 +555,7 @@ private static MetadataChangeProposal buildRemoveTermsToEntityProposal( } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index b93c72edbcfc5..d82b8c17ff1b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -28,7 +28,11 @@ public class LinkUtils { private LinkUtils() {} public static void addLink( - String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + String linkUrl, + String linkLabel, + Urn resourceUrn, + Urn actor, + EntityService entityService) { InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( @@ -46,7 +50,7 @@ public static void addLink( } public static void removeLink( - String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( @@ -109,7 +113,7 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, Urn resourceUrn, EntityService entityService) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); @@ -120,7 +124,7 @@ public static Boolean validateAddRemoveInput( resourceUrn)); } - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change institutional memory for resource %s. Resource does not exist.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 55d408d3f7aab..1dd9da97cb2f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -202,16 +202,16 @@ public static void validateAuthorizedToUpdateOwners( } public static void validateAddOwnerInput( - List owners, Urn resourceUrn, EntityService entityService) { + List owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { validateAddOwnerInput(owner, resourceUrn, entityService); } } public static void validateAddOwnerInput( - OwnerInput owner, Urn resourceUrn, EntityService entityService) { + OwnerInput owner, Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); @@ -220,7 +220,7 @@ public static void validateAddOwnerInput( validateOwner(owner, entityService); } - public static void validateOwner(OwnerInput owner, EntityService entityService) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); @@ -241,7 +241,7 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) ownerUrn)); } - if (!entityService.exists(ownerUrn)) { + if (!entityService.exists(ownerUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource(s). Owner with urn %s does not exist.", @@ -249,7 +249,7 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) } if (owner.getOwnershipTypeUrn() != null - && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()), true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource(s). Custom Ownership type with " @@ -264,8 +264,8 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) } } - public static void validateRemoveInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static void validateRemoveInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); @@ -276,17 +276,18 @@ public static void addCreatorAsOwner( QueryContext context, String urn, OwnerEntityType ownerEntityType, - EntityService entityService) { + EntityService entityService) { try { Urn actorUrn = CorpuserUrn.createFromString(context.getActorUrn()); OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + if (!entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())), true)) { log.warn("Technical owner does not exist, defaulting to None ownership."); ownershipType = OwnershipType.NONE; } String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); - if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { + if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn), true)) { throw new RuntimeException( String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index 7b9290b4532b5..e548c6f3eae07 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -33,7 +33,7 @@ public class SetTagColorResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -53,7 +53,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { + if (!_entityService.exists(tagUrn, true)) { throw new IllegalArgumentException( String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index f60f34dd7a085..81849df320e57 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -33,7 +33,7 @@ public HyperParameterValueType apply( } else if (input.isDouble()) { result = new FloatBox(input.getDouble()); } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); + result = new FloatBox(Double.valueOf(input.getFloat())); } else { throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index 0d87ce4b2e2ad..3f228efafac42 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -2,10 +2,11 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.LineageEdge; @@ -16,8 +17,10 @@ import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletionException; import org.joda.time.DateTimeUtils; import org.mockito.Mockito; @@ -64,10 +67,8 @@ public void testUpdateDatasetLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -79,8 +80,7 @@ public void testFailUpdateWithMissingDownstream() throws Exception { mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(false); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(false); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))).thenAnswer(args -> Set.of()); assertThrows(CompletionException.class, () -> resolver.get(_mockEnv).join()); } @@ -93,9 +93,8 @@ public void testUpdateChartLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -112,10 +111,8 @@ public void testUpdateDashboardLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DASHBOARD_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -133,11 +130,8 @@ public void testUpdateDataJobLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -153,15 +147,13 @@ public void testFailUpdateLineageNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } @@ -169,7 +161,7 @@ public void testFailUpdateLineageNoPermissions() throws Exception { private void mockInputAndContext(List edgesToAdd, List edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 019d254ffdaac..f09ead41e5c46 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -31,7 +32,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -49,24 +51,23 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(1)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) .getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -74,7 +75,8 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -85,24 +87,23 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(1)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) .getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -111,32 +112,32 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(false); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(0)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + eq(Constants.ASSERTION_ENTITY_NAME), + eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -144,7 +145,8 @@ public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -161,7 +163,7 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -178,14 +180,15 @@ public void testGetEntityClientException() throws Exception { .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index 56b01be29e163..f83adf33d521a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -47,8 +48,10 @@ public void testGetSuccessNoExistingStatus() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -94,8 +97,10 @@ public void testGetSuccessExistingStatus() throws Exception { Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -138,8 +143,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index be7f200a6b9d7..f25d5a4cbbf04 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -48,8 +49,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -109,8 +112,10 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -163,8 +168,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index e4be330f5ba2a..f4b45b3dc8f29 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -45,9 +46,9 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -58,7 +59,8 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -66,7 +68,7 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); @@ -81,10 +83,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(eq(proposal), Mockito.any(Authentication.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -101,9 +103,9 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -119,7 +121,8 @@ public void testGetSuccessExistingDeprecation() throws Exception { .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -127,7 +130,7 @@ public void testGetSuccessExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); @@ -142,10 +145,10 @@ public void testGetSuccessExistingDeprecation() throws Exception { UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(eq(proposal), Mockito.any(Authentication.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -155,9 +158,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -168,7 +171,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -176,7 +180,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -193,7 +197,7 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -214,7 +218,7 @@ public void testGetEntityClientException() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 32f0d30e7751a..81343b75f7d7e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -53,11 +54,15 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -88,7 +93,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test @@ -113,11 +118,15 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -153,7 +162,7 @@ public void testGetSuccessExistingDomains() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test @@ -178,11 +187,15 @@ public void testGetSuccessUnsetDomains() throws Exception { Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -222,8 +235,10 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(false); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -260,9 +275,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index a0eff5d0574db..1aa7f5aef467c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -73,7 +74,8 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -92,7 +94,8 @@ public void testGetSuccess() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -115,7 +118,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetFailureParentDoesNotExist() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -130,7 +134,8 @@ public void testGetFailureParentDoesNotExist() throws Exception { public void testGetFailureParentIsNotDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index ad5ad2315ce43..7b8d11802792b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -58,8 +59,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -82,10 +85,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -119,8 +122,10 @@ public void testGetSuccessExistingDomains() throws Exception { .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -143,10 +148,10 @@ public void testGetSuccessExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -170,8 +175,10 @@ public void testGetFailureDomainDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(false); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -208,8 +215,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index 7e6e258168898..7ac45fe98b131 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -57,7 +58,8 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -77,7 +79,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -111,7 +113,8 @@ public void testGetSuccessExistingDomains() throws Exception { .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -131,7 +134,7 @@ public void testGetSuccessExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -155,7 +158,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index 241951319c75e..ed04a14ed7c3a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -47,7 +48,8 @@ public void testGetSuccessNoExistingEmbed() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -68,7 +70,7 @@ public void testGetSuccessNoExistingEmbed() throws Exception { ; Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -85,7 +87,8 @@ public void testGetSuccessExistingEmbed() throws Exception { Mockito.eq(0L))) .thenReturn(originalEmbed); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -105,7 +108,7 @@ public void testGetSuccessExistingEmbed() throws Exception { verifySingleIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -128,7 +131,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index fa8b1d6a747ca..c3c9ccea6d270 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -3,6 +3,7 @@ import static org.mockito.Mockito.*; import static org.testng.Assert.*; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; @@ -33,7 +34,7 @@ public void testFailsNullEntity() { @Test public void testPasses() throws Exception { when(_dataFetchingEnvironment.getArgument("urn")).thenReturn(ENTITY_URN_STRING); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(Urn.class), eq(true))).thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 287d270ab569c..8c5b1d7607027 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -28,9 +29,9 @@ private EntityService setUpService() { EntityService mockService = getMockEntityService(); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + eq(0L))) .thenReturn(null); return mockService; } @@ -39,9 +40,12 @@ private EntityService setUpService() { public void testGetSuccessIsRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -52,26 +56,29 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetSuccessHasRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -82,24 +89,25 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetFailAddSelfAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -108,7 +116,7 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -119,7 +127,8 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { public void testGetFailAddNonTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -128,7 +137,7 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -139,8 +148,10 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -149,7 +160,7 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -160,8 +171,10 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { public void testGetFailAddToNonExistentUrn() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -170,7 +183,7 @@ public void testGetFailAddToNonExistentUrn() throws Exception { RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -181,8 +194,10 @@ public void testGetFailAddToNonExistentUrn() throws Exception { public void testGetFailAddToNonTerm() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(DATASET_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(DATASET_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -191,7 +206,7 @@ public void testGetFailAddToNonTerm() throws Exception { RelatedTermsInput input = new RelatedTermsInput( DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -202,9 +217,12 @@ public void testGetFailAddToNonTerm() throws Exception { public void testFailNoPermissions() throws Exception { EntityService mockService = setUpService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); @@ -215,7 +233,7 @@ public void testFailNoPermissions() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 7229d2acf763d..f4d4c528dc0c6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -26,7 +27,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -50,7 +52,8 @@ public void testGetEntityClientException() throws Exception { .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 47de668b2c9dc..e46d8b1503d9e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -41,7 +42,8 @@ public void testGetSuccessIsA() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -56,7 +58,7 @@ public void testGetSuccessIsA() throws Exception { assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -73,7 +75,8 @@ public void testGetSuccessHasA() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -88,7 +91,7 @@ public void testGetSuccessHasA() throws Exception { assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -101,7 +104,8 @@ public void testFailAspectDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -131,7 +135,8 @@ public void testFailNoPermissions() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); @@ -146,6 +151,6 @@ public void testFailNoPermissions() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); Mockito.verify(mockService, Mockito.times(0)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index 3972715fcefb1..062c1da5e038d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -61,7 +62,7 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -76,7 +77,7 @@ public void testGetSuccess() throws Exception { public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(NODE_URN)), eq(true))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_NODE); @@ -106,7 +107,8 @@ public void testGetSuccessForNode() throws Exception { public void testGetSuccessForDomain() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_DOMAIN); @@ -148,7 +150,8 @@ public void testGetSuccessForDomain() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index 74a59b10a40b0..cdab78023b846 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -63,8 +64,9 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -80,8 +82,9 @@ public void testGetSuccess() throws Exception { public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(NODE_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -114,8 +117,10 @@ public void testGetSuccessForNode() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -131,8 +136,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -148,8 +154,9 @@ public void testGetFailureNodeDoesNotExist() throws Exception { public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java index 6bd5b4f8c3f38..21d1e0caa1bb2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java @@ -13,6 +13,7 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; @@ -79,7 +80,8 @@ public void testReordering() throws Exception { CompletableFuture.completedFuture( ImmutableList.of(mockResponseEntity2, mockResponseEntity1)); when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); assertEquals(batchGetResponse.size(), 2); assertEquals(batchGetResponse.get(0), mockResponseEntity1); @@ -108,7 +110,8 @@ public void testDuplicateUrns() throws Exception { CompletableFuture mockFuture = CompletableFuture.completedFuture(ImmutableList.of(mockResponseEntity)); when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); assertEquals(batchGetResponse.size(), 2); assertEquals(batchGetResponse.get(0), mockResponseEntity); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 3fee28bc31725..f8fe38187b30d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,7 +23,8 @@ public class UpdateUserSettingResolverTest { @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_USER_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_USER_URN)), eq(true))) + .thenReturn(true); UpdateUserSettingResolver resolver = new UpdateUserSettingResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 5e199f2c6b2c7..4bd16d5311818 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -45,16 +47,21 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -84,10 +91,10 @@ public void testGetSuccessNoExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); } @Test @@ -112,15 +119,19 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -147,7 +158,7 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test @@ -172,15 +183,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(Urn.class), eq(true))).thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -207,7 +219,7 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test @@ -232,24 +244,32 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -288,13 +308,13 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true)); } @Test @@ -308,8 +328,10 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(false); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -343,8 +365,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -398,7 +422,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); + .ingestProposal(any(AspectsBatchImpl.class), Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 92960f45232b5..cb607adf45c0a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -52,18 +53,24 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -99,10 +106,10 @@ public void testGetSuccessNoExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test @@ -131,26 +138,34 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -186,10 +201,10 @@ public void testGetSuccessExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test @@ -203,8 +218,10 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(false); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -256,9 +273,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 10c95c1bac648..84e0f6f282a7b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -37,22 +38,26 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -66,7 +71,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -88,9 +93,9 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners1); final Ownership oldOwners2 = @@ -104,16 +109,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -127,7 +136,7 @@ public void testGetSuccessExistingOwners() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -140,20 +149,23 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -167,7 +179,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -189,7 +201,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -217,7 +229,7 @@ public void testGetEntityClientException() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 2468cef0e1216..1898753e5ae76 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -42,9 +43,12 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -73,10 +77,10 @@ public void testGetSuccessNoExistingTags() throws Exception { verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -97,9 +101,12 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -128,10 +135,10 @@ public void testGetSuccessExistingTags() throws Exception { verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -145,8 +152,10 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -173,8 +182,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index c174d917748eb..82dd13ee29e8a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -53,11 +54,15 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -92,10 +97,10 @@ public void testGetSuccessNoExistingTags() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -123,11 +128,15 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -162,10 +171,10 @@ public void testGetSuccessExistingTags() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -179,8 +188,10 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -216,9 +227,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index ba75b41388587..83de3acfb4c94 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -55,11 +56,15 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -127,11 +132,15 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(oldTags2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -178,9 +187,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index 6ae72fcbb7268..f7929012ccb68 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -47,7 +48,8 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.eq(0L))) .thenReturn(oldTagProperties); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -69,7 +71,7 @@ public void testGetSuccessExistingProperties() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -86,7 +88,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.eq(0))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -131,7 +134,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 397bb533ff871..d0697df3f2f6c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -34,14 +35,17 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -51,19 +55,19 @@ public void testGetSuccessNoExistingTerms() throws Exception { AddTermsInput input = new AddTermsInput( ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)) - .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test @@ -80,14 +84,17 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -97,19 +104,19 @@ public void testGetSuccessExistingTerms() throws Exception { AddTermsInput input = new AddTermsInput( ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)) - .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test @@ -118,13 +125,15 @@ public void testGetFailureTermDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -133,7 +142,7 @@ public void testGetFailureTermDoesNotExist() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -147,13 +156,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -162,7 +173,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -180,7 +191,7 @@ public void testGetUnauthorized() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -204,7 +215,7 @@ public void testGetEntityClientException() throws Exception { QueryContext mockContext = getMockAllowContext(); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 2c85e870dd6ac..b3700632f56cd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -37,24 +38,26 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -68,17 +71,17 @@ public void testGetSuccessNoExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test @@ -95,24 +98,26 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -126,17 +131,17 @@ public void testGetSuccessExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test @@ -145,13 +150,14 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -163,7 +169,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { new BatchAddTermsInput( ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -176,20 +182,22 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -203,7 +211,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -224,7 +232,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -249,7 +257,7 @@ public void testGetEntityClientException() throws Exception { new BatchAddTermsInput( ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index c2520f4dfb712..a76a813802b94 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -37,22 +38,26 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -65,7 +70,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -88,9 +93,9 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms1); final GlossaryTerms oldTerms2 = @@ -103,16 +108,20 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -125,7 +134,7 @@ public void testGetSuccessExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -138,20 +147,23 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -164,7 +176,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -185,7 +197,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -212,7 +224,7 @@ public void testGetEntityClientException() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 34921e4182b10..9b3f42a37b45d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -625,7 +625,7 @@ public SearchResult filter( @Override public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.exists(urn); + return _entityService.exists(urn, true); } @SneakyThrows diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index 2e19916ee3c8f..ed69e919a7b24 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -93,6 +93,7 @@ import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -1782,7 +1783,8 @@ private EntityResponse toEntityResponse( return response; } - private Map> buildEntityToValidAspects(final EntityRegistry entityRegistry) { + private static Map> buildEntityToValidAspects( + final EntityRegistry entityRegistry) { return entityRegistry.getEntitySpecs().values().stream() .collect( Collectors.toMap( @@ -1950,36 +1952,54 @@ public RollbackRunResult deleteUrn(Urn urn) { } /** - * Returns true if the entity exists (has materialized aspects) + * Returns a set of urns of entities that exist (has materialized aspects). * - * @param urn the urn of the entity to check - * @return true if the entity exists, false otherwise + * @param urns the list of urns of the entities to check + * @param includeSoftDeleted whether to consider soft delete + * @return a set of urns of entities that exist. */ @Override - public Boolean exists(Urn urn) { - final Set aspectsToFetch = getEntityAspectNames(urn); - final List dbKeys = - aspectsToFetch.stream() + public Set exists(@Nonnull final Collection urns, boolean includeSoftDeleted) { + final Set dbKeys = + urns.stream() .map( - aspectName -> - new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); + urn -> + new EntityAspectIdentifier( + urn.toString(), + _entityRegistry + .getEntitySpec(urn.getEntityType()) + .getKeyAspectSpec() + .getName(), + ASPECT_LATEST_VERSION)) + .collect(Collectors.toSet()); - Map aspects = _aspectDao.batchGet(new HashSet(dbKeys)); - return aspects.values().stream().anyMatch(aspect -> aspect != null); - } + final Map aspects = _aspectDao.batchGet(dbKeys); + final Set existingUrnStrings = + aspects.values().stream() + .filter(aspect -> aspect != null) + .map(aspect -> aspect.getUrn()) + .collect(Collectors.toSet()); - /** - * Returns true if an entity is soft-deleted. - * - * @param urn the urn to check - * @return true is the entity is soft deleted, false otherwise. - */ - @Override - public Boolean isSoftDeleted(@Nonnull final Urn urn) { - Objects.requireNonNull(urn, "urn is required"); - final RecordTemplate statusAspect = getLatestAspect(urn, STATUS_ASPECT_NAME); - return statusAspect != null && ((Status) statusAspect).isRemoved(); + Set existing = + urns.stream() + .filter(urn -> existingUrnStrings.contains(urn.toString())) + .collect(Collectors.toSet()); + + if (includeSoftDeleted) { + return existing; + } else { + // Additionally exclude status.removed == true + Map> statusResult = + getLatestAspects(existing, Set.of(STATUS_ASPECT_NAME)); + return existing.stream() + .filter( + urn -> + // key aspect is always returned, make sure to only consider the status aspect + statusResult.getOrDefault(urn, List.of()).stream() + .filter(aspect -> STATUS_ASPECT_NAME.equals(aspect.schema().getName())) + .noneMatch(aspect -> ((Status) aspect).isRemoved())) + .collect(Collectors.toSet()); + } } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index 459b2d183d7ac..4d3ac9a550553 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -6,11 +6,9 @@ import com.datahub.util.RecordUtils; import com.google.common.base.Preconditions; import com.linkedin.common.AuditStamp; -import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.entity.validation.EntityRegistryUrnValidator; import com.linkedin.metadata.entity.validation.RecordTemplateValidator; @@ -157,27 +155,6 @@ public static SystemMetadata parseSystemMetadata(String jsonSystemMetadata) { return RecordUtils.toRecordTemplate(SystemMetadata.class, jsonSystemMetadata); } - /** Check if entity is removed (removed=true in Status aspect) and exists */ - public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) { - try { - - if (!entityService.exists(entityUrn)) { - return false; - } - - EnvelopedAspect statusAspect = - entityService.getLatestEnvelopedAspect(entityUrn.getEntityType(), entityUrn, "status"); - if (statusAspect == null) { - return false; - } - Status status = new Status(statusAspect.getValue().data()); - return status.isRemoved(); - } catch (Exception e) { - log.error("Error while checking if {} is removed", entityUrn, e); - return false; - } - } - public static RecordTemplate buildKeyAspect( @Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { final EntitySpec spec = entityRegistry.getEntitySpec(urnToEntityName(urn)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index 35d75de482007..f5c783014caa1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -4,15 +4,11 @@ import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityUtils; -import com.linkedin.metadata.recommendation.EntityProfileParams; import com.linkedin.metadata.recommendation.RecommendationContent; -import com.linkedin.metadata.recommendation.RecommendationParams; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -22,7 +18,6 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -37,12 +32,13 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; @Slf4j @RequiredArgsConstructor -public class MostPopularSource implements RecommendationSource { +public class MostPopularSource implements EntityRecommendationSource { /** Entity Types that should be in scope for this type of recommendation. */ private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of( @@ -59,7 +55,7 @@ public class MostPopularSource implements RecommendationSource { private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; - private final EntityService _entityService; + private final EntityService _entityService; private static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; private static final String ENTITY_AGG_NAME = "entity"; @@ -107,10 +103,11 @@ public List getRecommendations( _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets().stream() - .map(bucket -> buildContent(bucket.getKeyAsString())) - .filter(Optional::isPresent) - .map(Optional::get) + List bucketUrns = + parsedTerms.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()); + return buildContent(bucketUrns, _entityService) .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { @@ -119,6 +116,11 @@ public List getRecommendations( } } + @Override + public Set getSupportedEntityTypes() { + return SUPPORTED_ENTITY_TYPES; + } + private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); @@ -142,20 +144,4 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } - - private Optional buildContent(@Nonnull String entityUrn) { - Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) - || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { - return Optional.empty(); - } - - return Optional.of( - new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams( - new RecommendationParams() - .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); - } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java index 0815ffadd05c1..127b0f5c342c7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java @@ -4,15 +4,11 @@ import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityUtils; -import com.linkedin.metadata.recommendation.EntityProfileParams; import com.linkedin.metadata.recommendation.RecommendationContent; -import com.linkedin.metadata.recommendation.RecommendationParams; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -22,7 +18,6 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -38,12 +33,13 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; @Slf4j @RequiredArgsConstructor -public class RecentlyEditedSource implements RecommendationSource { +public class RecentlyEditedSource implements EntityRecommendationSource { /** Entity Types that should be in scope for this type of recommendation. */ private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of( @@ -60,7 +56,7 @@ public class RecentlyEditedSource implements RecommendationSource { private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; - private final EntityService _entityService; + private final EntityService _entityService; private static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; private static final String ENTITY_AGG_NAME = "entity"; @@ -108,10 +104,11 @@ public List getRecommendations( _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets().stream() - .map(bucket -> buildContent(bucket.getKeyAsString())) - .filter(Optional::isPresent) - .map(Optional::get) + List bucketUrns = + parsedTerms.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()); + return buildContent(bucketUrns, _entityService) .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { @@ -120,6 +117,11 @@ public List getRecommendations( } } + @Override + public Set getSupportedEntityTypes() { + return SUPPORTED_ENTITY_TYPES; + } + private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); @@ -147,20 +149,4 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } - - private Optional buildContent(@Nonnull String entityUrn) { - Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) - || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { - return Optional.empty(); - } - - return Optional.of( - new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams( - new RecommendationParams() - .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); - } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java index 47ffebee2e947..0ab5cf40cf4e5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java @@ -4,15 +4,11 @@ import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants; import com.linkedin.metadata.datahubusage.DataHubUsageEventType; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.EntityUtils; -import com.linkedin.metadata.recommendation.EntityProfileParams; import com.linkedin.metadata.recommendation.RecommendationContent; -import com.linkedin.metadata.recommendation.RecommendationParams; import com.linkedin.metadata.recommendation.RecommendationRenderType; import com.linkedin.metadata.recommendation.RecommendationRequestContext; import com.linkedin.metadata.recommendation.ScenarioType; @@ -22,7 +18,6 @@ import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -38,12 +33,13 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; @Slf4j @RequiredArgsConstructor -public class RecentlyViewedSource implements RecommendationSource { +public class RecentlyViewedSource implements EntityRecommendationSource { /** Entity Types that should be in scope for this type of recommendation. */ private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of( @@ -60,7 +56,7 @@ public class RecentlyViewedSource implements RecommendationSource { private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; - private final EntityService _entityService; + private final EntityService _entityService; private static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; private static final String ENTITY_AGG_NAME = "entity"; @@ -108,10 +104,11 @@ public List getRecommendations( _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets().stream() - .map(bucket -> buildContent(bucket.getKeyAsString())) - .filter(Optional::isPresent) - .map(Optional::get) + List bucketUrns = + parsedTerms.getBuckets().stream() + .map(MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()); + return buildContent(bucketUrns, _entityService) .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { @@ -120,6 +117,11 @@ public List getRecommendations( } } + @Override + public Set getSupportedEntityTypes() { + return SUPPORTED_ENTITY_TYPES; + } + private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { // TODO: Proactively filter for entity types in the supported set. SearchRequest request = new SearchRequest(); @@ -151,20 +153,4 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { request.indices(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)); return request; } - - private Optional buildContent(@Nonnull String entityUrn) { - Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) - || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { - return Optional.empty(); - } - - return Optional.of( - new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams( - new RecommendationParams() - .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java index df332cacaa751..a2cb9b7412a8e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java @@ -19,6 +19,7 @@ import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.graph.SiblingGraphService; import java.net.URISyntaxException; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -60,12 +61,13 @@ public class SiblingGraphServiceTest { private GraphService _graphService; private SiblingGraphService _client; - EntityService _mockEntityService; + EntityService _mockEntityService; @BeforeClass public void setup() { _mockEntityService = Mockito.mock(EntityService.class); - when(_mockEntityService.exists(any())).thenReturn(true); + when(_mockEntityService.exists(any(Collection.class), any(Boolean.class))) + .thenAnswer(args -> new HashSet<>(args.getArgument(0))); _graphService = Mockito.mock(GraphService.class); _client = new SiblingGraphService(_mockEntityService, _graphService); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 14e5259f90097..84433a2b439f4 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -40,6 +40,8 @@ import java.io.IOException; import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -276,7 +278,20 @@ private EntityClient entityClientHelper( AspectDao mockAspectDao = mock(AspectDao.class); when(mockAspectDao.batchGet(anySet())) - .thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); + .thenAnswer( + args -> { + Set ids = args.getArgument(0); + return ids.stream() + .map( + id -> { + EntityAspect mockEntityAspect = mock(EntityAspect.class); + when(mockEntityAspect.getUrn()).thenReturn(id.getUrn()); + when(mockEntityAspect.getAspect()).thenReturn(id.getAspect()); + when(mockEntityAspect.getVersion()).thenReturn(id.getVersion()); + return Map.entry(id, mockEntityAspect); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + }); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index f33ae5de130da..8ce7675edf580 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -39,12 +39,12 @@ public class GroupService { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; private final GraphClient _graphClient; public GroupService( @Nonnull EntityClient entityClient, - @Nonnull EntityService entityService, + @Nonnull EntityService entityService, @Nonnull GraphClient graphClient) { Objects.requireNonNull(entityClient, "entityClient must not be null!"); Objects.requireNonNull(entityService, "entityService must not be null!"); @@ -57,7 +57,7 @@ public GroupService( public boolean groupExists(@Nonnull Urn groupUrn) { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); - return _entityService.exists(groupUrn); + return _entityService.exists(groupUrn, true); } public Origin getGroupOrigin(@Nonnull final Urn groupUrn) { @@ -73,7 +73,7 @@ public void addUserToNativeGroup( Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Verify the user exists - if (!_entityService.exists(userUrn)) { + if (!_entityService.exists(userUrn, true)) { throw new RuntimeException("Failed to add member to group. User does not exist."); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index c631bede45364..40555107f4c79 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -63,7 +63,7 @@ public StatefulTokenService( public Boolean load(final String key) { final Urn accessUrn = Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); - return !_entityService.exists(accessUrn); + return !_entityService.exists(accessUrn, true); } }); this.salt = salt; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index 741d176f98c1b..ff46642827b30 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -30,7 +30,7 @@ public class NativeUserService { private static final long ONE_DAY_MILLIS = TimeUnit.DAYS.toMillis(1); - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; private final SecretService _secretService; private final AuthenticationConfiguration _authConfig; @@ -51,7 +51,7 @@ public void createNativeUser( Objects.requireNonNull(authentication, "authentication must not be null!"); final Urn userUrn = Urn.createFromString(userUrnString); - if (_entityService.exists(userUrn) + if (_entityService.exists(userUrn, true) // Should never fail these due to Controller level check, but just in case more usages get // put in || userUrn.toString().equals(SYSTEM_ACTOR) diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java index dc63b5e4a2897..de2c18782d3d8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java @@ -146,7 +146,7 @@ public String getClientId() { Urn clientIdUrn = UrnUtils.getUrn(CLIENT_ID_URN); // Create a new client id if it doesn't exist - if (!_entityService.exists(clientIdUrn)) { + if (!_entityService.exists(clientIdUrn, true)) { return createClientIdIfNotPresent(_entityService); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java index 5bd273f3bacf8..f9a762b2dd02a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java @@ -8,6 +8,7 @@ import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertThrows; @@ -151,7 +152,7 @@ public void testAuthenticateSuccess() throws Exception { configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) .thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.exists(Mockito.any(Urn.class), eq(true))).thenReturn(true); Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java index 6d0678d4f3558..2c1ab6f12efa1 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java @@ -55,7 +55,7 @@ public class GroupServiceTest { private static EntityRelationships _entityRelationships; private EntityClient _entityClient; - private EntityService _entityService; + private EntityService _entityService; private GraphClient _graphClient; private GroupService _groupService; @@ -121,7 +121,7 @@ public void testGroupExistsNullArguments() { @Test public void testGroupExistsPasses() { - when(_entityService.exists(_groupUrn)).thenReturn(true); + when(_entityService.exists(eq(_groupUrn), eq(true))).thenReturn(true); assertTrue(_groupService.groupExists(_groupUrn)); } @@ -147,7 +147,7 @@ public void testAddUserToNativeGroupNullArguments() { @Test public void testAddUserToNativeGroupPasses() throws Exception { - when(_entityService.exists(USER_URN)).thenReturn(true); + when(_entityService.exists(eq(USER_URN), eq(true))).thenReturn(true); when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) .thenReturn(_entityResponseMap); @@ -232,7 +232,7 @@ public void testMigrateGroupMembershipToNativeGroupMembershipPasses() throws Exc when(_entityClient.batchGetV2( eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) .thenReturn(_entityResponseMap); - when(_entityService.exists(USER_URN)).thenReturn(true); + when(_entityService.exists(eq(USER_URN), eq(true))).thenReturn(true); _groupService.migrateGroupMembershipToNativeGroupMembership( Urn.createFromString(EXTERNAL_GROUP_URN_STRING), diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java index ed10022632a56..63ac0f048ad0a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java @@ -1,6 +1,7 @@ package com.datahub.authentication.token; import static com.datahub.authentication.token.TokenClaims.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Actor; @@ -180,7 +181,7 @@ public void generateRevokeToken() throws TokenException { Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) .thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.exists(Mockito.any(Urn.class), eq(true))).thenReturn(true); final RollbackRunResult result = new RollbackRunResult(ImmutableList.of(), 0); Mockito.when(mockService.deleteUrn(Mockito.any(Urn.class))).thenReturn(result); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index 9cb5d5cb697cc..2b584c3461452 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -85,7 +85,7 @@ public void testCreateNativeUserNullArguments() { expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserAlreadyExists() throws Exception { // The user already exists - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(Urn.class), eq(true))).thenReturn(true); _nativeUserService.createNativeUser( USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); @@ -109,7 +109,7 @@ public void testCreateNativeUserUserSystemUser() throws Exception { @Test public void testCreateNativeUserPasses() throws Exception { - when(_entityService.exists(any())).thenReturn(false); + when(_entityService.exists(any(), any())).thenReturn(false); when(_secretService.generateSalt(anyInt())).thenReturn(SALT); when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); when(_secretService.getHashedPassword(any(), any())).thenReturn(HASHED_PASSWORD); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java index a0bbe69691db4..8baeb7d3f8443 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java @@ -76,7 +76,7 @@ public void setupTest() { @Test public void testEmitAnalyticsEvent() throws IOException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); - when(_entityService.exists(_clientIdUrn)).thenReturn(true); + when(_entityService.exists(eq(_clientIdUrn), eq(true))).thenReturn(true); when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) .thenReturn(TELEMETRY_CLIENT_ID); when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())) @@ -99,7 +99,7 @@ public void testEmitAnalyticsEvent() throws IOException { @Test public void testGetClientIdAlreadyExists() { - when(_entityService.exists(_clientIdUrn)).thenReturn(true); + when(_entityService.exists(eq(_clientIdUrn), eq(true))).thenReturn(true); when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) .thenReturn(TELEMETRY_CLIENT_ID); @@ -108,7 +108,7 @@ public void testGetClientIdAlreadyExists() { @Test public void testGetClientIdDoesNotExist() { - when(_entityService.exists(_clientIdUrn)).thenReturn(false); + when(_entityService.exists(eq(_clientIdUrn), eq(true))).thenReturn(false); assertNotNull(_trackingService.getClientId()); verify(_entityService, times(1)) diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java index ab86749eb431d..e3a3c452b85f1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java @@ -28,8 +28,8 @@ @RequiredArgsConstructor public class IngestRetentionPoliciesStep implements BootstrapStep { - private final RetentionService _retentionService; - private final EntityService _entityService; + private final RetentionService _retentionService; + private final EntityService _entityService; private final boolean _enableRetention; private final boolean _applyOnBootstrap; private final String pluginPath; @@ -63,7 +63,7 @@ public String name() { @Override public void execute() throws IOException, URISyntaxException { // 0. Execute preflight check to see whether we need to ingest policies - if (_entityService.exists(UPGRADE_ID_URN)) { + if (_entityService.exists(UPGRADE_ID_URN, true)) { log.info("Retention was applied. Skipping."); return; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java index 3c62f695ddd5f..7e232f939dc08 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java @@ -13,7 +13,7 @@ @RequiredArgsConstructor public class RemoveClientIdAspectStep implements BootstrapStep { - private final EntityService _entityService; + private final EntityService _entityService; private static final String UPGRADE_ID = "remove-unknown-aspects"; private static final String INVALID_TELEMETRY_ASPECT_NAME = "clientId"; @@ -27,7 +27,7 @@ public String name() { @Override public void execute() throws Exception { try { - if (_entityService.exists(REMOVE_UNKNOWN_ASPECTS_URN)) { + if (_entityService.exists(REMOVE_UNKNOWN_ASPECTS_URN, true)) { log.info("Unknown aspects have been removed. Skipping..."); return; } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java index fc935514f4138..d7c8268903508 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java @@ -136,7 +136,7 @@ public ResponseEntity delete(String urn) { public ResponseEntity head(String urn) { try { Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn)) { + if (_entityService.exists(entityUrn, true)) { return new ResponseEntity<>(HttpStatus.NO_CONTENT); } else { return new ResponseEntity<>(HttpStatus.NOT_FOUND); diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index fdf99cdc303c1..b082a735e8084 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -42,6 +42,7 @@ import com.linkedin.schema.StringType; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -212,7 +213,7 @@ public RollbackRunResult deleteUrn(Urn urn) { } @Override - public Boolean exists(Urn urn) { - return null; + public Set exists(@NotNull Collection urns) { + return Set.of(); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index dfd986c2ebea0..afdaf06802a11 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -1057,6 +1057,6 @@ public Task exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) } log.info("EXISTS for {}", urnStr); return RestliUtil.toTask( - () -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); + () -> _entityService.exists(urn, true), MetricRegistry.name(this.getClass(), "exists")); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 89b0e5ba9a558..71573aa2b10e0 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -25,6 +25,7 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import java.net.URISyntaxException; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; @@ -312,9 +313,27 @@ RollbackRunResult rollbackWithConditions( IngestResult ingestProposal( MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); - Boolean exists(Urn urn); + /** + * Returns a set of urns of entities that exist (has materialized aspects). + * + * @param urns the list of urns of the entities to check + * @return a set of urns of entities that exist. + */ + Set exists(@Nonnull final Collection urns, boolean includeSoftDelete); + + /** + * Returns a set of urns of entities that exist (has materialized aspects). + * + * @param urns the list of urns of the entities to check + * @return a set of urns of entities that exist. + */ + default Set exists(@Nonnull final Collection urns) { + return exists(urns, true); + } - Boolean isSoftDeleted(@Nonnull final Urn urn); + default boolean exists(@Nonnull Urn urn, boolean includeSoftDelete) { + return exists(List.of(urn), includeSoftDelete).contains(urn); + } void setWritable(boolean canWrite); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java new file mode 100644 index 0000000000000..546c2856c28ac --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntityRecommendationSource.java @@ -0,0 +1,37 @@ +package com.linkedin.metadata.recommendation.candidatesource; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.recommendation.EntityProfileParams; +import com.linkedin.metadata.recommendation.RecommendationContent; +import com.linkedin.metadata.recommendation.RecommendationParams; +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; +import javax.annotation.Nonnull; + +public interface EntityRecommendationSource extends RecommendationSource { + Set getSupportedEntityTypes(); + + default RecommendationContent buildContent(@Nonnull Urn urn) { + return new RecommendationContent() + .setEntity(urn) + .setValue(urn.toString()) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(urn))); + } + + default Stream buildContent( + @Nonnull List entityUrns, EntityService entityService) { + List entities = + entityUrns.stream() + .map(UrnUtils::getUrn) + .filter(urn -> getSupportedEntityTypes().contains(urn.getEntityType())) + .toList(); + Set existingNonRemoved = entityService.exists(entities, false); + + return entities.stream().filter(existingNonRemoved::contains).map(this::buildContent); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index c618db801d9d6..71c4d357ad1eb 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -1,20 +1,29 @@ package com.linkedin.metadata.shared; import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.AbstractArrayTemplate; import com.linkedin.metadata.browse.BrowseResult; +import com.linkedin.metadata.browse.BrowseResultEntity; import com.linkedin.metadata.browse.BrowseResultEntityArray; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.EntityLineageResult; +import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.search.LineageScrollResult; +import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchEntityArray; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import java.util.Objects; +import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -23,7 +32,7 @@ public class ValidationUtils { public static SearchResult validateSearchResult( - final SearchResult searchResult, @Nonnull final EntityService entityService) { + final SearchResult searchResult, @Nonnull final EntityService entityService) { if (searchResult == null) { return null; } @@ -37,16 +46,16 @@ public static SearchResult validateSearchResult( .setNumEntities(searchResult.getNumEntities()); SearchEntityArray validatedEntities = - searchResult.getEntities().stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedSearchResult.setEntities(validatedEntities); return validatedSearchResult; } public static ScrollResult validateScrollResult( - final ScrollResult scrollResult, @Nonnull final EntityService entityService) { + final ScrollResult scrollResult, @Nonnull final EntityService entityService) { if (scrollResult == null) { return null; } @@ -62,16 +71,16 @@ public static ScrollResult validateScrollResult( } SearchEntityArray validatedEntities = - scrollResult.getEntities().stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + validatedUrns(scrollResult.getEntities(), SearchEntity::getEntity, entityService, true) .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedScrollResult.setEntities(validatedEntities); return validatedScrollResult; } public static BrowseResult validateBrowseResult( - final BrowseResult browseResult, @Nonnull final EntityService entityService) { + final BrowseResult browseResult, @Nonnull final EntityService entityService) { if (browseResult == null) { return null; } @@ -88,16 +97,16 @@ public static BrowseResult validateBrowseResult( .setNumElements(browseResult.getNumElements()); BrowseResultEntityArray validatedEntities = - browseResult.getEntities().stream() - .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) + validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + validatedBrowseResult.setEntities(validatedEntities); return validatedBrowseResult; } public static ListResult validateListResult( - final ListResult listResult, @Nonnull final EntityService entityService) { + final ListResult listResult, @Nonnull final EntityService entityService) { if (listResult == null) { return null; } @@ -110,16 +119,17 @@ public static ListResult validateListResult( .setTotal(listResult.getTotal()); UrnArray validatedEntities = - listResult.getEntities().stream() - .filter(entityService::exists) + validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) .collect(Collectors.toCollection(UrnArray::new)); + validatedListResult.setEntities(validatedEntities); return validatedListResult; } public static LineageSearchResult validateLineageSearchResult( - final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { + final LineageSearchResult lineageSearchResult, + @Nonnull final EntityService entityService) { if (lineageSearchResult == null) { return null; } @@ -133,9 +143,13 @@ public static LineageSearchResult validateLineageSearchResult( .setNumEntities(lineageSearchResult.getNumEntities()); LineageSearchEntityArray validatedEntities = - lineageSearchResult.getEntities().stream() - .filter(entity -> entityService.exists(entity.getEntity())) + validatedUrns( + lineageSearchResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true) .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageSearchResult.setEntities(validatedEntities); return validatedLineageSearchResult; @@ -143,7 +157,7 @@ public static LineageSearchResult validateLineageSearchResult( public static EntityLineageResult validateEntityLineageResult( @Nullable final EntityLineageResult entityLineageResult, - @Nonnull final EntityService entityService) { + @Nonnull final EntityService entityService) { if (entityLineageResult == null) { return null; } @@ -155,10 +169,12 @@ public static EntityLineageResult validateEntityLineageResult( .setCount(entityLineageResult.getCount()) .setTotal(entityLineageResult.getTotal()); - final LineageRelationshipArray validatedRelationships = - entityLineageResult.getRelationships().stream() - .filter(relationship -> entityService.exists(relationship.getEntity())) - .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) + LineageRelationshipArray validatedRelationships = + validatedUrns( + entityLineageResult.getRelationships(), + LineageRelationship::getEntity, + entityService, + false) .collect(Collectors.toCollection(LineageRelationshipArray::new)); validatedEntityLineageResult.setFiltered( @@ -173,7 +189,8 @@ public static EntityLineageResult validateEntityLineageResult( } public static LineageScrollResult validateLineageScrollResult( - final LineageScrollResult lineageScrollResult, @Nonnull final EntityService entityService) { + final LineageScrollResult lineageScrollResult, + @Nonnull final EntityService entityService) { if (lineageScrollResult == null) { return null; } @@ -189,13 +206,29 @@ public static LineageScrollResult validateLineageScrollResult( } LineageSearchEntityArray validatedEntities = - lineageScrollResult.getEntities().stream() - .filter(entity -> entityService.exists(entity.getEntity())) + validatedUrns( + lineageScrollResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true) .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageScrollResult.setEntities(validatedEntities); return validatedLineageScrollResult; } + private static Stream validatedUrns( + final AbstractArrayTemplate array, + Function urnFunction, + @Nonnull final EntityService entityService, + boolean includeSoftDeleted) { + + Set existingUrns = + entityService.exists( + array.stream().map(urnFunction).collect(Collectors.toList()), includeSoftDeleted); + return array.stream().filter(item -> existingUrns.contains(urnFunction.apply(item))); + } + private ValidationUtils() {} } From 14dee5723de8aac6ae8e566988f4bbcf3fac98ea Mon Sep 17 00:00:00 2001 From: Harshal Sheth Date: Fri, 19 Jan 2024 16:18:15 -0800 Subject: [PATCH 247/263] feat(ingest/airflow): support airflow 2.8 dataset listeners (#9664) --- .github/workflows/airflow-plugin.yml | 9 +++++--- .../datahub_airflow_plugin/_airflow_shims.py | 3 +++ .../datahub_listener.py | 22 +++++++++++++++++++ .../airflow-plugin/tox.ini | 8 +++++-- 4 files changed, 37 insertions(+), 5 deletions(-) diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 158d3416bc2a9..7ae7b87b0f5ce 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -40,13 +40,16 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0"' + extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0" "Flask-Session<0.6.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0"' + extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0" "Flask-Session<0.6.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0 pydantic==2.4.2" + extra_pip_requirements: 'apache-airflow~=2.7.0 pydantic==2.4.2 "Flask-Session<0.6.0"' + extra_pip_extras: plugin-v2 + - python-version: "3.10" + extra_pip_requirements: 'apache-airflow>=2.8.0 pydantic>=2.4.2 "Flask-Session<0.6.0"' extra_pip_extras: plugin-v2 fail-fast: false steps: diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py index d384958cf3ddb..c1e2dd4cc422d 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/_airflow_shims.py @@ -32,6 +32,9 @@ HAS_AIRFLOW_STANDALONE_CMD = AIRFLOW_VERSION >= packaging.version.parse("2.2.0.dev0") HAS_AIRFLOW_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.3.0.dev0") HAS_AIRFLOW_DAG_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse("2.5.0.dev0") +HAS_AIRFLOW_DATASET_LISTENER_API = AIRFLOW_VERSION >= packaging.version.parse( + "2.8.0.dev0" +) NEEDS_AIRFLOW_LISTENER_MODULE = AIRFLOW_VERSION < packaging.version.parse( "2.5.0.dev0" ) or PLUGGY_VERSION <= packaging.version.parse("1.0.0") diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py index debc91700d3db..a7f588a166dde 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_listener.py @@ -24,6 +24,7 @@ from datahub_airflow_plugin._airflow_shims import ( HAS_AIRFLOW_DAG_LISTENER_API, + HAS_AIRFLOW_DATASET_LISTENER_API, Operator, get_task_inlets, get_task_outlets, @@ -40,6 +41,7 @@ _F = TypeVar("_F", bound=Callable[..., None]) if TYPE_CHECKING: + from airflow.datasets import Dataset from airflow.models import DAG, DagRun, TaskInstance from sqlalchemy.orm import Session @@ -502,3 +504,23 @@ def on_dag_run_running(self, dag_run: "DagRun", msg: str) -> None: self.emitter.flush() # TODO: Add hooks for on_dag_run_success, on_dag_run_failed -> call AirflowGenerator.complete_dataflow + + if HAS_AIRFLOW_DATASET_LISTENER_API: + + @hookimpl + @run_in_thread + def on_dataset_created(self, dataset: "Dataset") -> None: + self._set_log_level() + + logger.debug( + f"DataHub listener got notification about dataset create for {dataset}" + ) + + @hookimpl + @run_in_thread + def on_dataset_changed(self, dataset: "Dataset") -> None: + self._set_log_level() + + logger.debug( + f"DataHub listener got notification about dataset change for {dataset}" + ) diff --git a/metadata-ingestion-modules/airflow-plugin/tox.ini b/metadata-ingestion-modules/airflow-plugin/tox.ini index 27ae2ce65ba65..154ced6b8deb9 100644 --- a/metadata-ingestion-modules/airflow-plugin/tox.ini +++ b/metadata-ingestion-modules/airflow-plugin/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py310-airflow27 +envlist = py38-airflow21, py38-airflow22, py310-airflow24, py310-airflow26, py310-airflow27, py310-airflow28 [testenv] use_develop = true @@ -30,10 +30,14 @@ deps = # constraints file is overly restrictive. airflow27: apache-airflow~=2.7.0 airflow27: pydantic==2.4.2 + airflow28: apache-airflow~=2.8.0 + # Apparently Flask-Session 0.6.0 was released by accident. + # See https://github.com/pallets-eco/flask-session/issues/209 + airflow24,airflow26,airflow27,airflow28: Flask-Session<0.6.0 commands = pytest --cov-append {posargs} # For Airflow 2.4+, add the plugin-v2 extra. -[testenv:py310-airflow{24,26,27}] +[testenv:py310-airflow{24,26,27,28}] extras = dev,integration-tests,plugin-v2 From 0906ce832d2bdc3f9b63e3f52cff56772fb8ecf9 Mon Sep 17 00:00:00 2001 From: Pedro Silva Date: Sat, 20 Jan 2024 00:45:22 +0000 Subject: [PATCH 248/263] fix(docs): Correct Kafka Connect sink documentation (#9672) --- metadata-ingestion/docs/sources/kafka-connect/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/docs/sources/kafka-connect/README.md b/metadata-ingestion/docs/sources/kafka-connect/README.md index e4f64c62914c5..5535f89259082 100644 --- a/metadata-ingestion/docs/sources/kafka-connect/README.md +++ b/metadata-ingestion/docs/sources/kafka-connect/README.md @@ -21,4 +21,4 @@ This ingestion source maps the following Source System Concepts to DataHub Conce Works only for - Source connectors: JDBC, Debezium, Mongo and Generic connectors with user-defined lineage graph -- Sink connectors: BigQuery, Confluent S3, Snowflake +- Sink connectors: BigQuery, Confluent, S3, Snowflake From 9168c4550a1553c1efd205662181df6ae3ca3e86 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Sat, 20 Jan 2024 06:36:36 +0530 Subject: [PATCH 249/263] docs(observability): add databricks as supported source (#9619) --- docs/managed-datahub/observe/column-assertions.md | 1 + docs/managed-datahub/observe/custom-sql-assertions.md | 1 + docs/managed-datahub/observe/freshness-assertions.md | 1 + docs/managed-datahub/observe/volume-assertions.md | 1 + 4 files changed, 4 insertions(+) diff --git a/docs/managed-datahub/observe/column-assertions.md b/docs/managed-datahub/observe/column-assertions.md index 99a764f771676..8ef32e73b4b72 100644 --- a/docs/managed-datahub/observe/column-assertions.md +++ b/docs/managed-datahub/observe/column-assertions.md @@ -35,6 +35,7 @@ Column Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. diff --git a/docs/managed-datahub/observe/custom-sql-assertions.md b/docs/managed-datahub/observe/custom-sql-assertions.md index d4a09b434ca79..11e9aa807b616 100644 --- a/docs/managed-datahub/observe/custom-sql-assertions.md +++ b/docs/managed-datahub/observe/custom-sql-assertions.md @@ -43,6 +43,7 @@ Custom SQL Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. diff --git a/docs/managed-datahub/observe/freshness-assertions.md b/docs/managed-datahub/observe/freshness-assertions.md index 82de423f6f2de..416db6a65343e 100644 --- a/docs/managed-datahub/observe/freshness-assertions.md +++ b/docs/managed-datahub/observe/freshness-assertions.md @@ -43,6 +43,7 @@ Freshness Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. diff --git a/docs/managed-datahub/observe/volume-assertions.md b/docs/managed-datahub/observe/volume-assertions.md index 5f5aff33a5ce2..67971d0c20037 100644 --- a/docs/managed-datahub/observe/volume-assertions.md +++ b/docs/managed-datahub/observe/volume-assertions.md @@ -44,6 +44,7 @@ Volume Assertions are currently supported for: 1. Snowflake 2. Redshift 3. BigQuery +4. Databricks Note that an Ingestion Source _must_ be configured with the data platform of your choice in Acryl DataHub's **Ingestion** tab. From 087d3fdb0dba1fccd802161e44a74a22edc211ac Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 19 Jan 2024 23:27:44 -0600 Subject: [PATCH 250/263] feat(build): conditional ci (#9673) --- .github/actions/ci-optimization/action.yml | 79 ++++++++++++ .github/workflows/build-and-test.yml | 34 +++++- .github/workflows/docker-unified.yml | 132 +++++++++++++++++---- 3 files changed, 218 insertions(+), 27 deletions(-) create mode 100644 .github/actions/ci-optimization/action.yml diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml new file mode 100644 index 0000000000000..404e0bab814e8 --- /dev/null +++ b/.github/actions/ci-optimization/action.yml @@ -0,0 +1,79 @@ +name: 'Identify CI Optimizations' +description: 'Determine if code changes are specific to certain modules.' + +outputs: + frontend-only: + description: "Frontend only change" + value: ${{ steps.filter.outputs.frontend == 'true' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'false' }} + ingestion-only: + description: "Ingestion only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'true' && steps.filter.outputs.backend == 'false' }} + backend-only: + description: "Backend only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'true' }} + backend-change: + description: "Backend code has changed" + value: ${{ steps.filter.outputs.backend == 'true' }} + ingestion-change: + description: "Ingestion code has changed" + value: ${{ steps.filter.outputs.ingestion == 'true' }} + frontend-change: + description: "Frontend code has changed" + value: ${{ steps.filter.outputs.frontend == 'true' }} + docker-change: + description: "Docker code has changed" + value: ${{ steps.filter.outputs.docker == 'true' }} + kafka-setup-change: + description: "Kafka setup docker change" + value: ${{ steps.filter.outputs.kafka-setup == 'true' }} + mysql-setup-change: + description: "Mysql setup docker change" + value: ${{ steps.filter.outputs.mysql-setup == 'true' }} + postgres-setup-change: + description: "Postgres setup docker change" + value: ${{ steps.filter.outputs.postgres-setup == 'true' }} + elasticsearch-setup-change: + description: "Elasticsearch setup docker change" + value: ${{ steps.filter.outputs.elasticsearch-setup == 'true' }} +runs: + using: "composite" + steps: + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + frontend: + - "datahub-frontend/**" + - "datahub-web-react/**" + - "smoke-test/tests/cypress/**" + - "docker/datahub-frontend/**" + ingestion: + - "metadata-ingestion-modules/airflow-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + - "smoke-test/**" + - "docker/datahub-ingestion-**" + docker: + - "docker/**" + backend: + - "metadata-models/**" + - "datahub-upgrade/**" + - "entity-registry/**" + - "li-utils/**" + - "metadata-auth/**" + - "metadata-dao-impl/**" + - "metadata-events/**" + - "metadata-io/**" + - "metadata-jobs/**" + - "metadata-service/**" + - "metadata-utils/**" + - "smoke-test/**" + - "docker/**" + kafka-setup: + - "docker/kafka-setup/**" + mysql-setup: + - "docker/mysql-setup/**" + postgres-setup: + - "docker/postgres-setup/**" + elasticsearch-setup: + - "docker/elasticsearch-setup/**" \ No newline at end of file diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 64493e99211b4..180e0472a8d99 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -20,6 +20,25 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 + - uses: ./.github/actions/ci-optimization + id: ci-optimize + build: strategy: fail-fast: false @@ -36,11 +55,13 @@ jobs: timezone: "America/New_York" runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - uses: szenius/set-timezone@v1.2 with: timezoneLinux: ${{ matrix.timezone }} - - uses: hsheth2/sane-checkout-action@v1 + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 - name: Set up JDK 17 uses: actions/setup-java@v3 with: @@ -51,12 +72,12 @@ jobs: with: python-version: "3.10" cache: pip - - name: Gradle build (and test) for metadata ingestion - if: ${{ matrix.command == 'except_metadata_ingestion' }} + - name: Gradle build (and test) for NOT metadata ingestion + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} run: | ./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel - name: Gradle build (and test) for frontend - if: ${{ matrix.command == 'frontend' }} + if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }} run: | ./gradlew :datahub-frontend:build :datahub-web-react:build --parallel env: @@ -75,8 +96,11 @@ jobs: quickstart-compose-validation: runs-on: ubuntu-latest + needs: setup + if: ${{ needs.setup.outputs.docker_change == 'true' }} steps: - - uses: actions/checkout@v3 + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index d246bf329bcb0..5e9112726b010 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -51,9 +51,19 @@ jobs: short_sha: ${{ steps.tag.outputs.short_sha }} branch_name: ${{ steps.tag.outputs.branch_name }} repository_name: ${{ steps.tag.outputs.repository_name }} + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + backend_only: ${{ steps.ci-optimize.outputs.backend-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} steps: - - name: Checkout - uses: actions/checkout@v3 + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 - name: Compute Tag id: tag run: | @@ -75,11 +85,14 @@ jobs: run: | echo "Enable publish: ${{ env.ENABLE_PUBLISH }}" echo "publish=${{ env.ENABLE_PUBLISH }}" >> $GITHUB_OUTPUT + - uses: ./.github/actions/ci-optimization + id: ci-optimize gms_build: name: Build and Push DataHub GMS Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -113,6 +126,7 @@ jobs: name: "[Monitoring] Scan GMS images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, gms_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v3 @@ -142,6 +156,7 @@ jobs: name: Build and Push DataHub MAE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -171,6 +186,7 @@ jobs: name: "[Monitoring] Scan MAE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mae_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -204,6 +220,7 @@ jobs: name: Build and Push DataHub MCE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -233,6 +250,7 @@ jobs: name: "[Monitoring] Scan MCE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mce_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -266,6 +284,7 @@ jobs: name: Build and Push DataHub Upgrade Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -295,6 +314,7 @@ jobs: name: "[Monitoring] Scan DataHub Upgrade images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_upgrade_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -328,6 +348,7 @@ jobs: name: Build and Push DataHub Frontend Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -359,6 +380,7 @@ jobs: name: "[Monitoring] Scan Frontend images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, frontend_build] + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -392,6 +414,7 @@ jobs: name: Build and Push DataHub Kafka Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -412,6 +435,7 @@ jobs: name: Build and Push DataHub MySQL Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -432,6 +456,7 @@ jobs: name: Build and Push DataHub Elasticsearch Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -454,6 +479,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: setup + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -486,6 +512,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -526,6 +553,7 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 @@ -568,6 +596,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -623,6 +652,7 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion Slim images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v3 @@ -655,6 +685,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Set up JDK 17 uses: actions/setup-java@v3 @@ -709,6 +740,7 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy uses: actions/checkout@v3 @@ -734,22 +766,31 @@ jobs: with: sarif_file: "trivy-results.sarif" + smoke_test_matrix: + runs-on: ubuntu-latest + needs: setup + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - id: set-matrix + run: | + if [ '${{ needs.setup.outputs.frontend_only }}' == 'true' ]; then + echo 'matrix=["cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.ingestion_only }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.backend_change }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1","cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + else + echo 'matrix=[]' >> $GITHUB_OUTPUT + fi + smoke_test: name: Run Smoke Tests runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - test_strategy: - [ - "no_cypress_suite0", - "no_cypress_suite1", - "cypress_suite1", - "cypress_rest", - ] needs: [ setup, + smoke_test_matrix, gms_build, frontend_build, kafka_setup_build, @@ -760,6 +801,11 @@ jobs: datahub_upgrade_build, datahub_ingestion_slim_build, ] + strategy: + fail-fast: false + matrix: + test_strategy: ${{ fromJson(needs.smoke_test_matrix.outputs.matrix) }} + if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }} steps: - name: Disk Check run: df -h . && docker images @@ -788,57 +834,99 @@ jobs: run: df -h . && docker images - name: Download GMS image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.gms_build.result == 'success' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Frontend image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.frontend_build.result == 'success' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Kafka Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.kafka_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Mysql Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mysql_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Elastic Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.elasticsearch_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MCE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mce_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MAE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mae_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download upgrade image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.datahub_upgrade_build.result == 'success' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download datahub-ingestion-slim image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} + if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' && needs.datahub_ingestion_slim_build.result == 'success' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }} - name: Disk Check run: df -h . && docker images + - name: CI Optimization Head Images + # When publishing all tests/images are built (no optimizations) + if: ${{ needs.setup.outputs.publish != 'true' }} + run: | + if [ '${{ needs.setup.outputs.backend_change }}' == 'false' ]; then + echo 'GMS/Upgrade/MCE/MAE head images' + docker pull '${{ env.DATAHUB_GMS_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' + docker tag '${{ env.DATAHUB_GMS_IMAGE }}:head' '${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' '${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.frontend_change }}' == 'false' ]; then + echo 'Frontend head images' + docker pull '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' + docker tag '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' '${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.kafka_setup_change }}' == 'false' ]; then + echo 'kafka-setup head images' + docker pull '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.mysql_setup_change }}' == 'false' ]; then + echo 'mysql-setup head images' + docker pull '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.elasticsearch_setup_change }}' == 'false' ]; then + echo 'elasticsearch-setup head images' + docker pull '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.ingestion_change }}' == 'false' ]; then + echo 'datahub-ingestion head-slim images' + docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' + if [ '${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }}' != 'head-slim' ]; then + docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }}' + fi + fi - name: run quickstart env: DATAHUB_TELEMETRY_ENABLED: false DATAHUB_VERSION: ${{ needs.setup.outputs.unique_tag }} DATAHUB_ACTIONS_IMAGE: ${{ env.DATAHUB_INGESTION_IMAGE }} - ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag }} + ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }} ACTIONS_EXTRA_PACKAGES: "acryl-datahub-actions[executor]==0.0.13 acryl-datahub-actions==0.0.13 acryl-datahub==0.10.5" ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | From d78db0abee07e33b44342ce1920889324303b137 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 14:53:50 +0530 Subject: [PATCH 251/263] build(deps-dev): bump vite from 4.5.0 to 4.5.2 in /datahub-web-react (#9676) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- datahub-web-react/package.json | 2 +- datahub-web-react/yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index cf63d5c313bdb..97830cec4e164 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -126,7 +126,7 @@ "less": "^4.2.0", "prettier": "^2.8.8", "source-map-explorer": "^2.5.2", - "vite": "^4.5.0", + "vite": "^4.5.2", "vite-plugin-babel-macros": "^1.0.6", "vite-plugin-static-copy": "^0.17.0", "vite-plugin-svgr": "^4.1.0", diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 9ea6c58eadc6b..37801e42b3eab 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -10835,10 +10835,10 @@ vite-plugin-svgr@^4.1.0: "@svgr/core" "^8.1.0" "@svgr/plugin-jsx" "^8.1.0" -"vite@^3.0.0 || ^4.0.0 || ^5.0.0-0", "vite@^3.1.0 || ^4.0.0 || ^5.0.0-0", vite@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.0.tgz#ec406295b4167ac3bc23e26f9c8ff559287cff26" - integrity sha512-ulr8rNLA6rkyFAlVWw2q5YJ91v098AFQ2R0PRFwPzREXOUJQPtFUG0t+/ZikhaOCDqFoDhN6/v8Sq0o4araFAw== +"vite@^3.0.0 || ^4.0.0 || ^5.0.0-0", "vite@^3.1.0 || ^4.0.0 || ^5.0.0-0", vite@^4.5.2: + version "4.5.2" + resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.2.tgz#d6ea8610e099851dad8c7371599969e0f8b97e82" + integrity sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w== dependencies: esbuild "^0.18.10" postcss "^8.4.27" From 20ad83d0b1be4fbb54e10aa183afba64674deecf Mon Sep 17 00:00:00 2001 From: Quentin FLEURENT NAMBOT Date: Sat, 20 Jan 2024 10:24:10 +0100 Subject: [PATCH 252/263] feat(superset): add some custom properties for dashboards (#9670) --- .../src/datahub/ingestion/source/superset.py | 24 ++++++++++++- .../superset/golden_test_ingest.json | 16 +++++++-- .../superset/golden_test_stateful_ingest.json | 9 ++++- .../integration/superset/test_superset.py | 35 +++++++++++++++++-- 4 files changed, 78 insertions(+), 6 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 7f607666db313..931069a921058 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -267,13 +267,35 @@ def construct_dashboard_from_api_data(self, dashboard_data): f"urn:li:chart:({self.platform},{value.get('meta', {}).get('chartId', 'unknown')})" ) + # Build properties + custom_properties = { + "Status": str(dashboard_data.get("status")), + "IsPublished": str(dashboard_data.get("published", False)).lower(), + "Owners": ", ".join( + map( + lambda owner: owner.get("username", "unknown"), + dashboard_data.get("owners", []), + ) + ), + "IsCertified": str( + True if dashboard_data.get("certified_by") else False + ).lower(), + } + + if dashboard_data.get("certified_by"): + custom_properties["CertifiedBy"] = dashboard_data.get("certified_by") + custom_properties["CertificationDetails"] = str( + dashboard_data.get("certification_details") + ) + + # Create DashboardInfo object dashboard_info = DashboardInfoClass( description="", title=title, charts=chart_urns, lastModified=last_modified, dashboardUrl=dashboard_url, - customProperties={}, + customProperties=custom_properties, ) dashboard_snapshot.aspects.append(dashboard_info) return dashboard_snapshot diff --git a/metadata-ingestion/tests/integration/superset/golden_test_ingest.json b/metadata-ingestion/tests/integration/superset/golden_test_ingest.json index 6a522281f1c9d..74312940f06e7 100644 --- a/metadata-ingestion/tests/integration/superset/golden_test_ingest.json +++ b/metadata-ingestion/tests/integration/superset/golden_test_ingest.json @@ -11,7 +11,14 @@ }, { "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { - "customProperties": {}, + "customProperties": { + "Status": "published", + "IsPublished": "true", + "Owners": "test_username_1, test_username_2", + "IsCertified": "true", + "CertifiedBy": "Certification team", + "CertificationDetails": "Approved" + }, "title": "test_dashboard_title_1", "description": "", "charts": [ @@ -52,7 +59,12 @@ }, { "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { - "customProperties": {}, + "customProperties": { + "Status": "draft", + "IsPublished": "false", + "Owners": "unknown", + "IsCertified": "false" + }, "title": "test_dashboard_title_2", "description": "", "charts": [ diff --git a/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json b/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json index 268fa37396245..cf38341085c1b 100644 --- a/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json +++ b/metadata-ingestion/tests/integration/superset/golden_test_stateful_ingest.json @@ -11,7 +11,14 @@ }, { "com.linkedin.pegasus2avro.dashboard.DashboardInfo": { - "customProperties": {}, + "customProperties": { + "Status": "published", + "IsPublished": "true", + "Owners": "test_username_1, test_username_2", + "IsCertified": "true", + "CertifiedBy": "Certification team", + "CertificationDetails": "Approved" + }, "title": "test_dashboard_title_1", "description": "", "charts": [ diff --git a/metadata-ingestion/tests/integration/superset/test_superset.py b/metadata-ingestion/tests/integration/superset/test_superset.py index bc299e36515e1..b3b5982016146 100644 --- a/metadata-ingestion/tests/integration/superset/test_superset.py +++ b/metadata-ingestion/tests/integration/superset/test_superset.py @@ -41,6 +41,18 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None: "dashboard_title": "test_dashboard_title_1", "url": "/dashboard/test_dashboard_url_1", "position_json": '{"CHART-test-1": {"meta": { "chartId": "10" }}, "CHART-test-2": {"meta": { "chartId": "11" }}}', + "status": "published", + "published": True, + "owners": [ + { + "username": "test_username_1", + }, + { + "username": "test_username_2", + }, + ], + "certified_by": "Certification team", + "certification_details": "Approved", }, { "id": "2", @@ -51,6 +63,15 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None: "dashboard_title": "test_dashboard_title_2", "url": "/dashboard/test_dashboard_url_2", "position_json": '{"CHART-test-3": {"meta": { "chartId": "12" }}, "CHART-test-4": {"meta": { "chartId": "13" }}}', + "status": "draft", + "published": False, + "owners": [ + { + "first_name": "name", + }, + ], + "certified_by": "", + "certification_details": "", }, ], }, @@ -151,7 +172,6 @@ def register_mock_api(request_mock: Any, override_data: dict = {}) -> None: @freeze_time(FROZEN_TIME) @pytest.mark.integration def test_superset_ingest(pytestconfig, tmp_path, mock_time, requests_mock): - test_resources_dir = pytestconfig.rootpath / "tests/integration/superset" register_mock_api(request_mock=requests_mock) @@ -193,7 +213,6 @@ def test_superset_ingest(pytestconfig, tmp_path, mock_time, requests_mock): def test_superset_stateful_ingest( pytestconfig, tmp_path, mock_time, requests_mock, mock_datahub_graph ): - test_resources_dir = pytestconfig.rootpath / "tests/integration/superset" register_mock_api(request_mock=requests_mock) @@ -241,6 +260,18 @@ def test_superset_stateful_ingest( "dashboard_title": "test_dashboard_title_1", "url": "/dashboard/test_dashboard_url_1", "position_json": '{"CHART-test-1": {"meta": { "chartId": "10" }}, "CHART-test-2": {"meta": { "chartId": "11" }}}', + "status": "published", + "published": True, + "owners": [ + { + "username": "test_username_1", + }, + { + "username": "test_username_2", + }, + ], + "certified_by": "Certification team", + "certification_details": "Approved", }, ], }, From 61165994587b1bd9f430d693aa9a73948903a68f Mon Sep 17 00:00:00 2001 From: Quentin FLEURENT NAMBOT Date: Sat, 20 Jan 2024 10:24:23 +0100 Subject: [PATCH 253/263] fix(superset): handle comma in dataset table name (#9656) --- .../src/datahub/ingestion/source/superset.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 931069a921058..827c630cfa148 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -9,7 +9,7 @@ from pydantic.fields import Field from datahub.configuration import ConfigModel -from datahub.emitter.mce_builder import DEFAULT_ENV +from datahub.emitter.mce_builder import DEFAULT_ENV, make_dataset_urn from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -223,15 +223,13 @@ def get_datasource_urn_from_id(self, datasource_id): database_name = self.config.database_alias.get(database_name, database_name) if database_id and table_name: - platform = self.get_platform_from_database_id(database_id) - platform_urn = f"urn:li:dataPlatform:{platform}" - dataset_urn = ( - f"urn:li:dataset:(" - f"{platform_urn},{database_name + '.' if database_name else ''}" - f"{schema_name + '.' if schema_name else ''}" - f"{table_name},{self.config.env})" + return make_dataset_urn( + platform=self.get_platform_from_database_id(database_id), + name=".".join( + name for name in [database_name, schema_name, table_name] if name + ), + env=self.config.env, ) - return dataset_urn return None def construct_dashboard_from_api_data(self, dashboard_data): From ce65b9cb3fe344da20dfd1b222a6d2a80f2fc2d8 Mon Sep 17 00:00:00 2001 From: Dimitri <36767102+dim-ops@users.noreply.github.com> Date: Sat, 20 Jan 2024 10:25:40 +0100 Subject: [PATCH 254/263] feat(openapi): allow swagger 3.x (#9646) Co-authored-by: Dimitri GRISARD --- .../src/datahub/ingestion/source/openapi_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py index 84bb3ad452611..c1caca18fefe3 100755 --- a/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py +++ b/metadata-ingestion/src/datahub/ingestion/source/openapi_parser.py @@ -111,8 +111,8 @@ def check_sw_version(sw_dict: dict) -> None: version = [int(v) for v in v_split] if version[0] == 3 and version[1] > 0: - raise NotImplementedError( - "This plugin is not compatible with Swagger version >3.0" + logger.warning( + "This plugin has not been fully tested with Swagger version >3.0" ) From 344eeaebc7ddff275cce5c62e2fd31f2bff6b8d6 Mon Sep 17 00:00:00 2001 From: Quentin FLEURENT NAMBOT Date: Sun, 21 Jan 2024 21:38:44 +0100 Subject: [PATCH 255/263] feat(ingest/superset): add domains and platform_instance support (#9647) --- .../src/datahub/ingestion/source/superset.py | 75 ++++++++++++++++--- 1 file changed, 65 insertions(+), 10 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/superset.py b/metadata-ingestion/src/datahub/ingestion/source/superset.py index 827c630cfa148..18f8e3709a648 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/superset.py +++ b/metadata-ingestion/src/datahub/ingestion/source/superset.py @@ -8,8 +8,18 @@ from pydantic.class_validators import root_validator, validator from pydantic.fields import Field -from datahub.configuration import ConfigModel -from datahub.emitter.mce_builder import DEFAULT_ENV, make_dataset_urn +from datahub.configuration.common import AllowDenyPattern +from datahub.configuration.source_common import ( + EnvConfigMixin, + PlatformInstanceConfigMixin, +) +from datahub.emitter.mce_builder import ( + make_chart_urn, + make_dashboard_urn, + make_dataset_urn, + make_domain_urn, +) +from datahub.emitter.mcp_builder import add_domain_to_entity_wu from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SourceCapability, @@ -49,6 +59,7 @@ DashboardInfoClass, ) from datahub.utilities import config_clean +from datahub.utilities.registries.domain_registry import DomainRegistry logger = logging.getLogger(__name__) @@ -72,7 +83,9 @@ } -class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): +class SupersetConfig( + StatefulIngestionConfigBase, EnvConfigMixin, PlatformInstanceConfigMixin +): # See the Superset /security/login endpoint for details # https://superset.apache.org/docs/rest-api connect_uri: str = Field( @@ -82,6 +95,10 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): default=None, description="optional URL to use in links (if `connect_uri` is only for ingestion)", ) + domain: Dict[str, AllowDenyPattern] = Field( + default=dict(), + description="regex patterns for tables to filter to assign domain_key. ", + ) username: Optional[str] = Field(default=None, description="Superset username.") password: Optional[str] = Field(default=None, description="Superset password.") @@ -92,10 +109,7 @@ class SupersetConfig(StatefulIngestionConfigBase, ConfigModel): provider: str = Field(default="db", description="Superset provider.") options: Dict = Field(default={}, description="") - env: str = Field( - default=DEFAULT_ENV, - description="Environment to use in namespace when constructing URNs", - ) + # TODO: Check and remove this if no longer needed. # Config database_alias is removed from sql sources. database_alias: Dict[str, str] = Field( @@ -188,6 +202,12 @@ def __init__(self, ctx: PipelineContext, config: SupersetConfig): } ) + if self.config.domain: + self.domain_registry = DomainRegistry( + cached_domains=[domain_id for domain_id in self.config.domain], + graph=self.ctx.graph, + ) + # Test the connection test_response = self.session.get(f"{self.config.connect_uri}/api/v1/dashboard/") if test_response.status_code == 200: @@ -233,7 +253,11 @@ def get_datasource_urn_from_id(self, datasource_id): return None def construct_dashboard_from_api_data(self, dashboard_data): - dashboard_urn = f"urn:li:dashboard:({self.platform},{dashboard_data['id']})" + dashboard_urn = make_dashboard_urn( + platform=self.platform, + name=dashboard_data["id"], + platform_instance=self.config.platform_instance, + ) dashboard_snapshot = DashboardSnapshot( urn=dashboard_urn, aspects=[Status(removed=False)], @@ -262,7 +286,11 @@ def construct_dashboard_from_api_data(self, dashboard_data): if not key.startswith("CHART-"): continue chart_urns.append( - f"urn:li:chart:({self.platform},{value.get('meta', {}).get('chartId', 'unknown')})" + make_chart_urn( + platform=self.platform, + name=value.get("meta", {}).get("chartId", "unknown"), + platform_instance=self.config.platform_instance, + ) ) # Build properties @@ -325,9 +353,17 @@ def emit_dashboard_mces(self) -> Iterable[MetadataWorkUnit]: ) mce = MetadataChangeEvent(proposedSnapshot=dashboard_snapshot) yield MetadataWorkUnit(id=dashboard_snapshot.urn, mce=mce) + yield from self._get_domain_wu( + title=dashboard_data.get("dashboard_title", ""), + entity_urn=dashboard_snapshot.urn, + ) def construct_chart_from_chart_data(self, chart_data): - chart_urn = f"urn:li:chart:({self.platform},{chart_data['id']})" + chart_urn = make_chart_urn( + platform=self.platform, + name=chart_data["id"], + platform_instance=self.config.platform_instance, + ) chart_snapshot = ChartSnapshot( urn=chart_urn, aspects=[Status(removed=False)], @@ -424,6 +460,10 @@ def emit_chart_mces(self) -> Iterable[MetadataWorkUnit]: mce = MetadataChangeEvent(proposedSnapshot=chart_snapshot) yield MetadataWorkUnit(id=chart_snapshot.urn, mce=mce) + yield from self._get_domain_wu( + title=chart_data.get("slice_name", ""), + entity_urn=chart_snapshot.urn, + ) def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: yield from self.emit_dashboard_mces() @@ -439,3 +479,18 @@ def get_workunit_processors(self) -> List[Optional[MetadataWorkUnitProcessor]]: def get_report(self) -> StaleEntityRemovalSourceReport: return self.report + + def _get_domain_wu(self, title: str, entity_urn: str) -> Iterable[MetadataWorkUnit]: + domain_urn = None + for domain, pattern in self.config.domain.items(): + if pattern.allowed(title): + domain_urn = make_domain_urn( + self.domain_registry.get_domain_urn(domain) + ) + break + + if domain_urn: + yield from add_domain_to_entity_wu( + entity_urn=entity_urn, + domain_urn=domain_urn, + ) From ad65c36ddcb253dd3f8b22dc01465de134b006b1 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Sun, 21 Jan 2024 14:39:31 -0600 Subject: [PATCH 256/263] fix(workflow): workflow tweaks (#9678) --- .github/actions/ci-optimization/action.yml | 2 +- .github/workflows/build-and-test.yml | 1 + .github/workflows/metadata-io.yml | 20 ++++++++++++++++++++ 3 files changed, 22 insertions(+), 1 deletion(-) diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index 404e0bab814e8..f6160fdbcff67 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -48,7 +48,7 @@ runs: - "smoke-test/tests/cypress/**" - "docker/datahub-frontend/**" ingestion: - - "metadata-ingestion-modules/airflow-plugin/**" + - "metadata-ingestion-modules/**" - "metadata-ingestion/**" - "metadata-models/**" - "smoke-test/**" diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 180e0472a8d99..060d345a6b7d9 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -69,6 +69,7 @@ jobs: java-version: 17 - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" cache: pip diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index eb5822b5b480d..243bd90cd6003 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -24,9 +24,28 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: hsheth2/sane-checkout-action@v1 + - uses: ./.github/actions/ci-optimization + id: ci-optimize build: runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - uses: actions/checkout@v3 - name: Set up JDK 17 @@ -36,6 +55,7 @@ jobs: java-version: 17 - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" cache: "pip" From 77df9ec9262047e0e314c5a7a80f0eca3854ef35 Mon Sep 17 00:00:00 2001 From: Mayuri Nehate <33225191+mayurinehate@users.noreply.github.com> Date: Mon, 22 Jan 2024 18:07:43 +0530 Subject: [PATCH 257/263] feat(ingest/databricks): view upstream lineage for hive metastore (#9657) --- .../source/unity/hive_metastore_proxy.py | 1 - .../datahub/ingestion/source/unity/source.py | 101 +++++++++++++++++- .../unity/unity_catalog_mces_golden.json | 60 +++++++++++ 3 files changed, 159 insertions(+), 3 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py index 814d86a2f3234..2a98dda1c79c5 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/hive_metastore_proxy.py @@ -55,7 +55,6 @@ class HiveMetastoreProxy(Closeable): - # TODO: Support for view lineage using SQL parsing # Why not use hive ingestion source directly here ? # 1. hive ingestion source assumes 2-level namespace heirarchy and currently # there is no other intermediate interface except sqlalchemy inspector diff --git a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py index 1bc47c6307849..7a47b1181ae36 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/unity/source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/unity/source.py @@ -1,7 +1,7 @@ import logging import re from concurrent.futures import ThreadPoolExecutor -from typing import Dict, Iterable, List, Optional, Set, Union +from typing import Dict, Iterable, List, Optional, Set, Tuple, Union from urllib.parse import urljoin from datahub.emitter.mce_builder import ( @@ -24,6 +24,7 @@ add_dataset_to_container, gen_containers, ) +from datahub.emitter.sql_parsing_builder import SqlParsingBuilder from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SupportStatus, @@ -67,6 +68,7 @@ DATA_TYPE_REGISTRY, Catalog, Column, + CustomCatalogType, Metastore, Notebook, NotebookId, @@ -104,6 +106,12 @@ from datahub.utilities.file_backed_collections import FileBackedDict from datahub.utilities.hive_schema_to_avro import get_schema_fields_for_hive_column from datahub.utilities.registries.domain_registry import DomainRegistry +from datahub.utilities.sqlglot_lineage import ( + SchemaResolver, + SqlParsingResult, + sqlglot_lineage, + view_definition_lineage_helper, +) logger: logging.Logger = logging.getLogger(__name__) @@ -137,6 +145,7 @@ class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource): unity_catalog_api_proxy: UnityCatalogApiProxy platform: str = "databricks" platform_instance_name: Optional[str] + sql_parser_schema_resolver: Optional[SchemaResolver] = None def get_report(self) -> UnityCatalogReport: return self.report @@ -179,6 +188,9 @@ def __init__(self, ctx: PipelineContext, config: UnityCatalogSourceConfig): self.table_refs: Set[TableReference] = set() self.view_refs: Set[TableReference] = set() self.notebooks: FileBackedDict[Notebook] = FileBackedDict() + self.view_definitions: FileBackedDict[ + Tuple[TableReference, str] + ] = FileBackedDict() # Global map of tables, for profiling self.tables: FileBackedDict[Table] = FileBackedDict() @@ -191,6 +203,13 @@ def init_hive_metastore_proxy(self): self.config.get_sql_alchemy_url(HIVE_METASTORE), self.config.options ) self.report.hive_metastore_catalog_found = True + + if self.config.include_table_lineage: + self.sql_parser_schema_resolver = SchemaResolver( + platform=self.platform, + platform_instance=self.config.platform_instance, + env=self.config.env, + ) except Exception as e: logger.debug("Exception", exc_info=True) self.warn( @@ -243,6 +262,8 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: yield from self.process_metastores() + yield from self.get_view_lineage() + if self.config.include_notebooks: self.report.report_ingestion_stage_start("Notebook lineage") for notebook in self.notebooks.values(): @@ -304,7 +325,6 @@ def process_notebooks(self) -> Iterable[MetadataWorkUnit]: yield from self._gen_notebook_workunits(notebook) def _gen_notebook_workunits(self, notebook: Notebook) -> Iterable[MetadataWorkUnit]: - properties = {"path": notebook.path} if notebook.language: properties["language"] = notebook.language.value @@ -449,6 +469,17 @@ def process_table(self, table: Table, schema: Schema) -> Iterable[MetadataWorkUn table.ref, self.notebooks[str(notebook_id)] ) + # Sql parsing is required only for hive metastore view lineage + if ( + self.sql_parser_schema_resolver + and table.schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG + ): + self.sql_parser_schema_resolver.add_schema_metadata( + dataset_urn, schema_metadata + ) + if table.view_definition: + self.view_definitions[dataset_urn] = (table.ref, table.view_definition) + yield from [ mcp.as_workunit() for mcp in MetadataChangeProposalWrapper.construct_many( @@ -828,8 +859,74 @@ def _create_schema_field(column: Column) -> List[SchemaFieldClass]: ) ] + def _run_sql_parser( + self, view_ref: TableReference, query: str, schema_resolver: SchemaResolver + ) -> Optional[SqlParsingResult]: + raw_lineage = sqlglot_lineage( + query, + schema_resolver=schema_resolver, + default_db=view_ref.catalog, + default_schema=view_ref.schema, + ) + view_urn = self.gen_dataset_urn(view_ref) + + if raw_lineage.debug_info.table_error: + logger.debug( + f"Failed to parse lineage for view {view_ref}: " + f"{raw_lineage.debug_info.table_error}" + ) + self.report.num_view_definitions_failed_parsing += 1 + self.report.view_definitions_parsing_failures.append( + f"Table-level sql parsing error for view {view_ref}: {raw_lineage.debug_info.table_error}" + ) + return None + + elif raw_lineage.debug_info.column_error: + self.report.num_view_definitions_failed_column_parsing += 1 + self.report.view_definitions_parsing_failures.append( + f"Column-level sql parsing error for view {view_ref}: {raw_lineage.debug_info.column_error}" + ) + else: + self.report.num_view_definitions_parsed += 1 + return view_definition_lineage_helper(raw_lineage, view_urn) + + def get_view_lineage(self) -> Iterable[MetadataWorkUnit]: + if not ( + self.config.include_hive_metastore + and self.config.include_table_lineage + and self.sql_parser_schema_resolver + ): + return + # This is only used for parsing view lineage. Usage, Operations are emitted elsewhere + builder = SqlParsingBuilder( + generate_lineage=True, + generate_usage_statistics=False, + generate_operations=False, + ) + for dataset_name in self.view_definitions.keys(): + view_ref, view_definition = self.view_definitions[dataset_name] + result = self._run_sql_parser( + view_ref, + view_definition, + self.sql_parser_schema_resolver, + ) + if result and result.out_tables: + # This does not yield any workunits but we use + # yield here to execute this method + yield from builder.process_sql_parsing_result( + result=result, + query=view_definition, + is_view_ddl=True, + include_column_lineage=self.config.include_view_column_lineage, + ) + yield from builder.gen_workunits() + def close(self): if self.hive_metastore_proxy: self.hive_metastore_proxy.close() + if self.view_definitions: + self.view_definitions.close() + if self.sql_parser_schema_resolver: + self.sql_parser_schema_resolver.close() super().close() diff --git a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json index 649212c1041ed..7cc0f84ee5177 100644 --- a/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json +++ b/metadata-ingestion/tests/integration/unity/unity_catalog_mces_golden.json @@ -3463,6 +3463,66 @@ "lastRunId": "no-run-id-provided" } }, +{ + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD)", + "changeType": "UPSERT", + "aspectName": "upstreamLineage", + "aspect": { + "json": { + "upstreams": [ + { + "auditStamp": { + "time": 0, + "actor": "urn:li:corpuser:unknown" + }, + "dataset": "urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD)", + "type": "VIEW" + } + ], + "fineGrainedLineages": [ + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD),betStatusId)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD),betStatusId)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD),channelId)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD),channelId)" + ], + "confidenceScore": 1.0 + }, + { + "upstreamType": "FIELD_SET", + "upstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.bet,PROD),combination)" + ], + "downstreamType": "FIELD", + "downstreams": [ + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:databricks,hive_metastore.bronze_kambi.view1,PROD),combination)" + ], + "confidenceScore": 1.0 + } + ] + } + }, + "systemMetadata": { + "lastObserved": 1638860400000, + "runId": "unity-catalog-test", + "lastRunId": "no-run-id-provided" + } +}, { "entityType": "dataset", "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:databricks,system.quickstart_schema.quickstart_table,PROD)", From 1d16e4296497d5e9525cfebaf89344dd18fd247d Mon Sep 17 00:00:00 2001 From: Dimitri <36767102+dim-ops@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:46:57 +0100 Subject: [PATCH 258/263] feat(ingest/dynamodb): add domain arg (#9658) Co-authored-by: Dimitri GRISARD --- .../ingestion/source/dynamodb/dynamodb.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py b/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py index d7f3dfb9279fb..972eb60ff5b05 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py +++ b/metadata-ingestion/src/datahub/ingestion/source/dynamodb/dynamodb.py @@ -13,8 +13,10 @@ make_data_platform_urn, make_dataplatform_instance_urn, make_dataset_urn_with_platform_instance, + make_domain_urn, ) from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.emitter.mcp_builder import add_domain_to_entity_wu from datahub.ingestion.api.common import PipelineContext from datahub.ingestion.api.decorators import ( SupportStatus, @@ -53,6 +55,7 @@ DataPlatformInstanceClass, DatasetPropertiesClass, ) +from datahub.utilities.registries.domain_registry import DomainRegistry MAX_ITEMS_TO_RETRIEVE = 100 PAGE_SIZE = 100 @@ -68,6 +71,11 @@ class DynamoDBConfig(DatasetSourceConfigMixin, StatefulIngestionConfigBase): aws_access_key_id: str = Field(description="AWS Access Key ID.") aws_secret_access_key: pydantic.SecretStr = Field(description="AWS Secret Key.") + domain: Dict[str, AllowDenyPattern] = Field( + default=dict(), + description="regex patterns for tables to filter to assign domain_key. ", + ) + # This config option allows user to include a list of items from a table when we scan and construct the schema, # the key of this dict is table name and the value is the list of item primary keys in dynamodb format, # if the table use composite key then the value should have partition key and sort key present @@ -155,6 +163,12 @@ def __init__(self, ctx: PipelineContext, config: DynamoDBConfig, platform: str): self.report = DynamoDBSourceReport() self.platform = platform + if self.config.domain: + self.domain_registry = DomainRegistry( + cached_domains=[domain_id for domain_id in self.config.domain], + graph=self.ctx.graph, + ) + @classmethod def create(cls, config_dict: dict, ctx: PipelineContext) -> "DynamoDBSource": config = DynamoDBConfig.parse_obj(config_dict) @@ -234,6 +248,11 @@ def get_workunits_internal(self) -> Iterable[MetadataWorkUnit]: aspect=dataset_properties, ).as_workunit() + yield from self._get_domain_wu( + dataset_name=table_name, + entity_urn=dataset_urn, + ) + platform_instance_aspect = DataPlatformInstanceClass( platform=make_data_platform_urn(self.platform), instance=make_dataplatform_instance_urn( @@ -480,3 +499,20 @@ def get_field_type( def get_report(self) -> DynamoDBSourceReport: return self.report + + def _get_domain_wu( + self, dataset_name: str, entity_urn: str + ) -> Iterable[MetadataWorkUnit]: + domain_urn = None + for domain, pattern in self.config.domain.items(): + if pattern.allowed(dataset_name): + domain_urn = make_domain_urn( + self.domain_registry.get_domain_urn(domain) + ) + break + + if domain_urn: + yield from add_domain_to_entity_wu( + entity_urn=entity_urn, + domain_urn=domain_urn, + ) From 943bb57cbcf22db12c092a7f9a30c762aa2bf6e5 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Mon, 22 Jan 2024 11:46:04 -0600 Subject: [PATCH 259/263] feat(backend): structured properties and forms (#9626) Co-authored-by: Chris Collins Co-authored-by: RyanHolstien --- .../GenerateJsonSchemaTask.java | 15 +- .../io/datahubproject/OpenApiEntities.java | 30 +- .../app/config/ConfigurationProvider.java | 4 + .../app/controllers/Application.java | 7 + .../app/controllers/RedirectController.java | 25 + datahub-frontend/conf/routes | 4 + datahub-frontend/public | 1 + .../resources/public/logos/datahub-logo.png | Bin 0 -> 53563 bytes datahub-graphql-core/build.gradle | 3 + .../linkedin/datahub/graphql/Constants.java | 3 +- .../datahub/graphql/GmsGraphQLEngine.java | 235 +++- .../datahub/graphql/GmsGraphQLEngineArgs.java | 2 + .../datahub/graphql/GmsGraphQLPlugin.java | 4 + .../datahub/graphql/SubTypesResolver.java | 55 + .../graphql/WeaklyTypedAspectsResolver.java | 2 +- .../GetMetadataAnalyticsResolver.java | 2 +- .../analytics/service/AnalyticsService.java | 2 +- .../graphql/featureflags/FeatureFlags.java | 1 + .../resolvers/chart/BrowseV2Resolver.java | 10 +- .../resolvers/config/AppConfigResolver.java | 12 +- .../ListDataProductAssetsResolver.java | 2 +- .../domain/DomainEntitiesResolver.java | 2 +- .../form/BatchAssignFormResolver.java | 52 + .../CreateDynamicFormAssignmentResolver.java | 50 + .../form/IsFormAssignedToMeResolver.java | 80 ++ .../form/SubmitFormPromptResolver.java | 89 ++ .../resolvers/form/VerifyFormResolver.java | 63 + .../glossary/CreateGlossaryNodeResolver.java | 1 - .../resolvers/group/EntityCountsResolver.java | 2 +- .../execution/RollbackIngestionResolver.java | 3 +- .../resolvers/mutate/util/FormUtils.java | 105 ++ .../policy/GetGrantedPrivilegesResolver.java | 2 +- .../ListRecommendationsResolver.java | 2 +- .../AggregateAcrossEntitiesResolver.java | 8 +- .../AutoCompleteForMultipleResolver.java | 8 +- .../search/GetQuickFiltersResolver.java | 2 +- .../search/ScrollAcrossEntitiesResolver.java | 2 +- .../search/ScrollAcrossLineageResolver.java | 2 +- .../search/SearchAcrossLineageResolver.java | 2 +- .../resolvers/search/SearchResolver.java | 2 +- .../graphql/resolvers/search/SearchUtils.java | 2 +- .../resolvers/type/PropertyValueResolver.java | 25 + .../graphql/resolvers/view/ViewUtils.java | 2 +- .../common/mappers/UrnToEntityMapper.java | 6 + .../graphql/types/dataset/DatasetType.java | 2 + .../dataset/mappers/DatasetFilterMapper.java | 24 + .../types/dataset/mappers/DatasetMapper.java | 13 + .../dataset/mappers/SchemaFieldMapper.java | 14 + .../dataset/mappers/SchemaMetadataMapper.java | 7 +- .../types/datatype/DataTypeEntityMapper.java | 51 + .../graphql/types/datatype/DataTypeType.java | 78 ++ .../types/datatype/DataTypeUrnMapper.java | 40 + .../entitytype/EntityTypeEntityMapper.java | 54 + .../entitytype}/EntityTypeMapper.java | 8 +- .../types/entitytype/EntityTypeType.java | 78 ++ .../types/entitytype/EntityTypeUrnMapper.java | 85 ++ .../graphql/types/form/FormMapper.java | 129 ++ .../datahub/graphql/types/form/FormType.java | 76 ++ .../graphql/types/form/FormsMapper.java | 133 ++ .../glossary/mappers/GlossaryNodeMapper.java | 11 +- .../glossary/mappers/GlossaryTermsMapper.java | 11 +- .../graphql/types/mappers/MapperUtils.java | 2 +- .../types/schemafield/SchemaFieldMapper.java | 54 + .../types/schemafield/SchemaFieldType.java | 70 +- .../StructuredPropertiesMapper.java | 80 ++ .../StructuredPropertyMapper.java | 124 ++ .../StructuredPropertyType.java | 79 ++ .../graphql/types/view/DataHubViewMapper.java | 2 +- .../src/main/resources/app.graphql | 10 + .../src/main/resources/entity.graphql | 336 ++++- .../src/main/resources/forms.graphql | 407 ++++++ .../src/main/resources/properties.graphql | 243 ++++ .../src/main/resources/search.graphql | 6 +- .../src/main/resources/tests.graphql | 2 + .../linkedin/datahub/graphql/TestUtils.java | 8 +- .../browse/BrowseV2ResolverTest.java | 13 +- .../domain/DomainEntitiesResolverTest.java | 2 +- .../form/IsFormAssignedToMeResolverTest.java | 167 +++ .../form/VerifyFormResolverTest.java | 122 ++ .../RollbackIngestionResolverTest.java | 7 +- .../mutate/MutableTypeBatchResolverTest.java | 5 +- .../AggregateAcrossEntitiesResolverTest.java | 21 +- .../search/GetQuickFiltersResolverTest.java | 2 +- .../SearchAcrossEntitiesResolverTest.java | 2 +- .../upgrade/UpgradeCliApplication.java | 8 +- .../common/steps/GMSDisableWriteModeStep.java | 10 +- .../common/steps/GMSEnableWriteModeStep.java | 10 +- .../config/BackfillBrowsePathsV2Config.java | 2 +- .../upgrade/config/BuildIndicesConfig.java | 10 +- .../upgrade/config/NoCodeUpgradeConfig.java | 9 +- .../config/RemoveUnknownAspectsConfig.java | 2 +- .../upgrade/config/RestoreBackupConfig.java | 9 +- .../upgrade/config/RestoreIndicesConfig.java | 11 +- .../upgrade/impl/DefaultUpgradeContext.java | 46 +- .../upgrade/impl/DefaultUpgradeManager.java | 3 + .../datahub/upgrade/nocode/NoCodeUpgrade.java | 10 +- .../RemoveClientIdAspectStep.java | 2 +- .../RemoveUnknownAspects.java | 4 +- .../upgrade/restorebackup/RestoreBackup.java | 10 +- .../restorebackup/RestoreStorageStep.java | 2 +- .../backupreader/BackupReader.java | 1 + .../restoreindices/RestoreIndices.java | 11 +- .../upgrade/restoreindices/SendMAEStep.java | 26 +- .../system/elasticsearch/BuildIndices.java | 24 +- .../steps/BuildIndicesPreStep.java | 29 +- .../system/elasticsearch/util/IndexUtils.java | 19 + .../entity/steps/BackfillBrowsePathsV2.java | 2 +- .../steps/BackfillBrowsePathsV2Step.java | 4 +- .../src/main/resources/application.properties | 5 + ...pgradeCliApplicationTestConfiguration.java | 2 +- datahub-web-react/build.gradle | 1 - datahub-web-react/index.html | 3 +- .../public/assets/{ => icons}/favicon.ico | Bin datahub-web-react/public/assets/logo.png | Bin 53563 -> 22 bytes .../public/assets/logos/datahub-logo.png | Bin 0 -> 53563 bytes datahub-web-react/public/browserconfig.xml | 9 + datahub-web-react/public/manifest.json | 2 +- docker/build.gradle | 5 + .../datahub-ingestion-base/smoke.Dockerfile | 4 +- docker/datahub-ingestion/Dockerfile | 4 +- docker/datahub-ingestion/Dockerfile-slim-only | 2 +- docker/elasticsearch-setup/Dockerfile | 2 +- docker/profiles/docker-compose.gms.yml | 4 + docs-website/graphql/generateGraphQLSchema.sh | 2 + docs-website/sidebars.js | 16 +- .../TownhallButton/townhallbutton.module.scss | 2 +- .../openapi/openapi-structured-properties.md | 284 +++++ .../metadata/aspect/batch/AspectsBatch.java | 11 +- .../metadata/aspect/batch/MCPBatchItem.java | 2 +- .../metadata/aspect/batch/PatchItem.java | 6 +- .../metadata/aspect/batch/UpsertItem.java | 5 +- .../aspect/patch/GenericJsonPatch.java | 34 + .../aspect}/patch/PatchOperationType.java | 2 +- .../AbstractMultiFieldPatchBuilder.java | 7 +- .../patch/builder}/ChartInfoPatchBuilder.java | 10 +- .../CustomPropertiesPatchBuilder.java | 9 +- .../builder}/DashboardInfoPatchBuilder.java | 12 +- .../builder}/DataFlowInfoPatchBuilder.java | 13 +- .../builder}/DataJobInfoPatchBuilder.java | 13 +- .../DataJobInputOutputPatchBuilder.java | 13 +- .../DatasetPropertiesPatchBuilder.java | 13 +- .../EditableSchemaMetadataPatchBuilder.java | 10 +- .../builder}/GlobalTagsPatchBuilder.java | 9 +- .../builder}/GlossaryTermsPatchBuilder.java | 9 +- .../patch/builder}/OwnershipPatchBuilder.java | 9 +- .../aspect/patch/builder}/PatchUtil.java | 6 +- .../StructuredPropertiesPatchBuilder.java | 110 ++ .../builder}/UpstreamLineagePatchBuilder.java | 11 +- .../CustomPropertiesPatchBuilderSupport.java | 4 +- .../IntermediatePatchBuilder.java | 4 +- .../patch}/template/ArrayMergingTemplate.java | 4 +- .../patch}/template/AspectTemplateEngine.java | 19 +- .../patch/template/CompoundKeyTemplate.java | 23 + .../patch}/template/Template.java | 30 +- .../aspect/patch/template/TemplateUtil.java | 97 ++ .../template/chart/ChartInfoTemplate.java | 6 +- .../template/common/GenericPatchTemplate.java | 59 + .../template/common/GlobalTagsTemplate.java | 4 +- .../common/GlossaryTermsTemplate.java | 8 +- .../template/common/OwnershipTemplate.java | 6 +- .../common/StructuredPropertiesTemplate.java | 56 + .../dashboard/DashboardInfoTemplate.java | 6 +- .../dataflow/DataFlowInfoTemplate.java | 4 +- .../template/datajob/DataJobInfoTemplate.java | 4 +- .../datajob/DataJobInputOutputTemplate.java | 4 +- .../DataProductPropertiesTemplate.java | 4 +- .../dataset/DatasetPropertiesTemplate.java | 4 +- .../EditableSchemaMetadataTemplate.java | 10 +- .../dataset/UpstreamLineageTemplate.java | 11 +- .../aspect/plugins/PluginFactory.java | 29 +- .../metadata/aspect/plugins/PluginSpec.java | 8 +- .../aspect/plugins/hooks/MCLSideEffect.java | 11 +- .../aspect/plugins/hooks/MCPSideEffect.java | 9 +- .../plugins/validation/AspectRetriever.java | 25 +- .../PropertyDefinitionValidator.java | 91 ++ .../StructuredPropertiesValidator.java | 264 ++++ .../metadata/models/LogicalValueType.java | 10 + .../models/StructuredPropertyUtils.java | 45 + .../models/registry/ConfigEntityRegistry.java | 2 +- .../models/registry/EntityRegistry.java | 7 +- .../models/registry/MergedEntityRegistry.java | 2 +- .../models/registry/PatchEntityRegistry.java | 2 +- .../registry/SnapshotEntityRegistry.java | 31 +- .../template/CompoundKeyTemplate.java | 52 - .../registry/template/util/TemplateUtil.java | 39 - .../template}/ChartInfoTemplateTest.java | 4 +- .../template}/DashboardInfoTemplateTest.java | 4 +- .../UpstreamLineageTemplateTest.java | 4 +- .../metadata/aspect/plugins/PluginsTest.java | 28 +- .../plugins/hooks/MCLSideEffectTest.java | 5 +- .../plugins/hooks/MCPSideEffectTest.java | 3 +- .../validation/ValidatorPluginTest.java | 2 +- .../PropertyDefinitionValidatorTest.java | 212 ++++ .../StructuredPropertiesValidatorTest.java | 246 ++++ .../models/EntitySpecBuilderTest.java | 2 +- .../PluginEntityRegistryLoaderTest.java | 2 +- .../ingestion/IngestionSchedulerTest.java | 4 +- .../java/com/linkedin/metadata/Constants.java | 23 + .../airflow-plugin/scripts/release.sh | 2 +- .../bootstrap_data/business_glossary.yml | 2 + metadata-ingestion/examples/forms/forms.yaml | 54 + .../mce_files/test_structured_properties.json | 218 ++++ .../examples/structured_properties/README.md | 51 + .../structured_properties/click_event.avsc | 14 + .../structured_properties/dataset.yaml | 45 + .../structured_properties.yaml | 68 + metadata-ingestion/scripts/docgen.sh | 2 +- metadata-ingestion/scripts/modeldocgen.py | 22 + metadata-ingestion/scripts/release.sh | 2 +- .../datahub/api/entities/dataset/__init__.py | 0 .../datahub/api/entities/dataset/dataset.py | 466 +++++++ .../datahub/api/entities/forms/__init__.py | 0 .../src/datahub/api/entities/forms/forms.py | 353 ++++++ .../entities/forms/forms_graphql_constants.py | 27 + .../entities/structuredproperties/__init__.py | 0 .../structuredproperties.py | 185 +++ .../src/datahub/cli/docker_check.py | 37 + .../datahub/cli/specific/dataproduct_cli.py | 3 +- .../src/datahub/cli/specific/dataset_cli.py | 67 + .../src/datahub/cli/specific/forms_cli.py | 53 + .../cli/specific/structuredproperties_cli.py | 62 + metadata-ingestion/src/datahub/entrypoints.py | 6 + .../source/metadata/business_glossary.py | 2 + .../src/datahub/specific/dataset.py | 32 + .../datahub/specific/structured_properties.py | 53 + .../src/datahub/utilities/urn_encoder.py | 1 + .../src/datahub/utilities/urns/_urn_base.py | 40 + .../urns/structured_properties_urn.py | 5 + .../business-glossary/business_glossary.yml | 2 + .../glossary_events_auto_id_golden.json | 93 +- .../glossary_events_golden.json | 93 +- .../remote/content/business_glossary.yml | 2 + .../remote/golden/remote_glossary_golden.json | 93 +- .../java/datahub-client/build.gradle | 12 +- .../java/datahub-client/scripts/check_jar.sh | 3 +- .../java/datahub/client/patch/PatchTest.java | 18 +- .../datahub-protobuf/scripts/check_jar.sh | 3 +- .../java/examples/build.gradle | 3 - .../examples/DataJobLineageAdd.java | 2 +- .../examples/DatasetCustomPropertiesAdd.java | 2 +- .../DatasetCustomPropertiesAddRemove.java | 2 +- .../DatasetCustomPropertiesReplace.java | 2 +- .../java/spark-lineage/build.gradle | 5 + .../setup_spark_smoke_test.sh | 2 +- .../client/EntityClientAspectRetriever.java | 35 + .../metadata/client/JavaEntityClient.java | 19 +- .../client/SystemJavaEntityClient.java | 14 +- .../metadata/entity/EntityServiceImpl.java | 52 +- .../linkedin/metadata/entity/EntityUtils.java | 12 +- .../cassandra/CassandraRetentionService.java | 16 +- .../entity/ebean/EbeanRetentionService.java | 16 +- .../entity/ebean/batch/AspectsBatchImpl.java | 31 +- .../entity/ebean/batch/MCLBatchItemImpl.java | 23 +- .../entity/ebean/batch/MCPPatchBatchItem.java | 11 +- .../ebean/batch/MCPUpsertBatchItem.java | 63 +- .../entity/validation/ValidationUtils.java | 3 +- .../graph/dgraph/DgraphGraphService.java | 19 + .../graph/elastic/ESGraphQueryDAO.java | 92 +- .../elastic/ElasticSearchGraphService.java | 123 +- .../graph/neo4j/Neo4jGraphService.java | 19 + .../elasticsearch/ElasticSearchService.java | 9 + .../indexbuilder/ESIndexBuilder.java | 46 +- .../indexbuilder/EntityIndexBuilders.java | 48 + .../indexbuilder/MappingsBuilder.java | 79 ++ .../indexbuilder/ReindexConfig.java | 33 + .../elasticsearch/query/ESSearchDAO.java | 4 +- .../request/AggregationQueryBuilder.java | 17 +- .../query/request/SearchAfterWrapper.java | 2 +- .../query/request/SearchRequestHandler.java | 1 + .../metadata/search/features/Features.java | 3 +- .../SearchDocumentTransformer.java | 114 +- .../metadata/search/utils/ESUtils.java | 102 +- .../service/UpdateIndicesService.java | 82 +- .../metadata/shared/ElasticSearchIndexed.java | 11 + .../ElasticSearchSystemMetadataService.java | 8 + .../ElasticSearchTimeseriesAspectService.java | 159 ++- .../TimeseriesAspectIndexBuilders.java | 8 + .../metadata/AspectIngestionUtils.java | 6 +- .../metadata/client/JavaEntityClientTest.java | 10 +- .../entity/EbeanEntityServiceTest.java | 20 +- .../metadata/entity/EntityServiceTest.java | 228 +++- .../metadata/entity/TestEntityRegistry.java | 2 +- .../search/SearchGraphServiceTestBase.java | 28 +- .../search/fixtures/GoldenTestBase.java | 2 +- .../indexbuilder/IndexBuilderTestBase.java | 118 ++ .../indexbuilder/MappingsBuilderTest.java | 137 +- .../request/AggregationQueryBuilderTest.java | 97 +- .../request/CustomizedQueryHandlerTest.java | 10 + .../query/request/SearchQueryBuilderTest.java | 19 +- .../metadata/search/utils/ESUtilsTest.java | 71 ++ .../test/search/SearchTestUtils.java | 2 +- ...rm_assignment_test_definition_complex.json | 145 +++ ...orm_assignment_test_definition_simple.json | 67 + .../forms/form_prompt_test_definition.json | 39 + .../kafka/MaeConsumerApplication.java | 4 +- .../src/main/resources/application.properties | 2 +- ...eConsumerApplicationTestConfiguration.java | 3 - .../kafka/MetadataChangeLogProcessor.java | 7 +- .../kafka/config/EntityHydratorConfig.java | 12 +- .../event/EntityChangeEventGeneratorHook.java | 9 +- .../kafka/hook/form/FormAssignmentHook.java | 130 ++ .../hook/siblings/SiblingAssociationHook.java | 8 +- .../kafka/hydrator/EntityHydrator.java | 6 +- .../kafka/hook/UpdateIndicesHookTest.java | 7 +- .../EntityChangeEventGeneratorHookTest.java | 10 +- .../siblings/SiblingAssociationHookTest.java | 6 +- .../spring/MCLSpringTestConfiguration.java | 19 +- .../kafka/MceConsumerApplication.java | 6 +- .../metadata/restli/RestliServletConfig.java | 13 - .../kafka/MceConsumerApplicationTest.java | 2 +- ...eConsumerApplicationTestConfiguration.java | 21 +- metadata-jobs/mce-consumer/build.gradle | 2 +- .../kafka/MetadataChangeEventsProcessor.java | 6 +- .../MetadataChangeProposalsProcessor.java | 6 +- .../datahub/event/PlatformEventProcessor.java | 2 +- metadata-models-custom/README.md | 20 + .../CustomDataQualityRulesMCLSideEffect.java | 9 +- .../CustomDataQualityRulesMCPSideEffect.java | 5 +- metadata-models/build.gradle | 3 +- .../com/linkedin/common/CustomProperties.pdl | 1 + .../common/FieldFormPromptAssociation.pdl | 17 + .../com/linkedin/common/FormAssociation.pdl | 21 + .../linkedin/common/FormPromptAssociation.pdl | 23 + .../common/FormPromptFieldAssociations.pdl | 16 + .../common/FormVerificationAssociation.pdl | 17 + .../pegasus/com/linkedin/common/Forms.pdl | 66 + .../common/GlossaryTermAssociation.pdl | 6 + .../com/linkedin/common/PropertyValue.pdl | 13 + .../linkedin/datahub/DataHubSearchConfig.pdl | 87 ++ .../com/linkedin/datatype/DataTypeInfo.pdl | 21 + .../com/linkedin/datatype/DataTypeKey.pdl | 11 + .../linkedin/entitytype/EntityTypeInfo.pdl | 22 + .../com/linkedin/entitytype/EntityTypeKey.pdl | 11 + .../linkedin/form/DynamicFormAssignment.pdl | 19 + .../com/linkedin/form/FormActorAssignment.pdl | 21 + .../pegasus/com/linkedin/form/FormInfo.pdl | 51 + .../pegasus/com/linkedin/form/FormPrompt.pdl | 53 + .../linkedin/glossary/GlossaryNodeInfo.pdl | 3 +- .../linkedin/glossary/GlossaryTermInfo.pdl | 2 + .../com/linkedin/metadata/key/FormKey.pdl | 14 + .../structured/PrimitivePropertyValue.pdl | 9 + .../com/linkedin/structured/PropertyValue.pdl | 10 + .../structured/StructuredProperties.pdl | 14 + .../StructuredPropertyDefinition.pdl | 74 ++ .../structured/StructuredPropertyKey.pdl | 11 + .../StructuredPropertyValueAssignment.pdl | 29 + .../src/main/resources/entity-registry.yml | 55 +- .../authentication/group/GroupService.java | 42 +- .../token/StatefulTokenService.java | 11 +- .../metadata/config/VisualConfiguration.java | 9 + .../src/main/resources/application.yml | 20 +- .../factory/auth/AuthorizerChainFactory.java | 31 +- .../auth/DataHubAuthorizerFactory.java | 22 +- .../auth/DataHubTokenServiceFactory.java | 6 +- .../gms/factory/auth/GroupServiceFactory.java | 13 +- .../auth/InviteTokenServiceFactory.java | 10 +- .../auth/NativeUserServiceFactory.java | 15 +- .../gms/factory/auth/PostServiceFactory.java | 11 +- .../gms/factory/auth/RoleServiceFactory.java | 12 +- .../auth/SystemAuthenticationFactory.java | 4 +- .../common/SiblingGraphServiceFactory.java | 2 +- .../factory/config/ConfigurationProvider.java | 6 + .../StructuredPropertiesConfiguration.java | 10 + .../DataProductServiceFactory.java | 10 +- .../entity/CassandraSessionFactory.java | 4 +- .../entity/DeleteEntityServiceFactory.java | 4 +- .../factory/entity/EntityServiceFactory.java | 18 +- .../entity/JavaEntityClientFactory.java | 100 -- .../entity/RetentionServiceFactory.java | 12 +- .../entity/RollbackServiceFactory.java | 27 + .../indices/UpdateIndicesServiceFactory.java | 18 +- .../EntityClientConfigFactory.java | 20 + .../entityclient/JavaEntityClientFactory.java | 85 ++ .../RestliEntityClientFactory.java | 60 +- .../gms/factory/form/FormServiceFactory.java | 21 + .../factory/graphql/GraphQLEngineFactory.java | 30 +- .../ingestion/IngestionSchedulerFactory.java | 20 +- .../kafka/KafkaEventConsumerFactory.java | 1 - .../AwsGlueSchemaRegistryFactory.java | 2 +- .../lineage/LineageServiceFactory.java | 11 +- .../OwnershipTypeServiceFactory.java | 17 +- .../factory/query/QueryServiceFactory.java | 16 +- .../MostPopularCandidateSourceFactory.java | 2 +- .../RecentlyEditedCandidateSourceFactory.java | 2 +- .../RecentlyViewedCandidateSourceFactory.java | 2 +- .../TopPlatformsCandidateSourceFactory.java | 2 +- .../search/ElasticSearchServiceFactory.java | 13 + .../search/LineageSearchServiceFactory.java | 4 +- .../search/views/ViewServiceFactory.java | 16 +- .../settings/SettingsServiceFactory.java | 17 +- .../gms/factory/telemetry/DailyReport.java | 4 +- .../telemetry/ScheduledAnalyticsFactory.java | 2 +- .../gms/factory/telemetry/TelemetryUtils.java | 4 +- .../telemetry/TrackingServiceFactory.java | 2 +- ...tyChangeEventGeneratorRegistryFactory.java | 9 +- .../linkedin/metadata/boot/BootstrapStep.java | 2 +- .../linkedin/metadata/boot/UpgradeStep.java | 5 +- .../factories/BootstrapManagerFactory.java | 10 +- .../IngestRetentionPoliciesStepFactory.java | 2 +- .../boot/steps/BackfillBrowsePathsV2Step.java | 2 +- .../boot/steps/IndexDataPlatformsStep.java | 2 +- .../IngestDataPlatformInstancesStep.java | 5 +- .../boot/steps/IngestDataPlatformsStep.java | 6 +- .../boot/steps/IngestDataTypesStep.java | 103 ++ .../IngestDefaultGlobalSettingsStep.java | 6 +- .../boot/steps/IngestEntityTypesStep.java | 88 ++ .../boot/steps/IngestOwnershipTypesStep.java | 8 +- .../boot/steps/IngestPoliciesStep.java | 5 +- .../metadata/boot/steps/IngestRolesStep.java | 5 +- .../boot/steps/IngestRootUserStep.java | 2 +- .../steps/RestoreColumnLineageIndices.java | 4 +- .../boot/steps/RestoreDbtSiblingsIndices.java | 3 +- .../boot/steps/RestoreGlossaryIndices.java | 2 +- .../steps/UpgradeDefaultBrowsePathsStep.java | 2 +- ...SearchIndexBuilderFactoryDefaultsTest.java | 27 + .../steps/BackfillBrowsePathsV2StepTest.java | 8 +- .../IngestDataPlatformInstancesStepTest.java | 12 +- .../boot/steps/IngestDataTypesStepTest.java | 81 ++ .../IngestDefaultGlobalSettingsStepTest.java | 10 +- .../boot/steps/IngestEntityTypesStepTest.java | 91 ++ .../RestoreColumnLineageIndicesTest.java | 13 +- .../steps/RestoreGlossaryIndicesTest.java | 10 +- .../UpgradeDefaultBrowsePathsStepTest.java | 12 +- .../telemetry/TelemetryUtilsTest.java | 2 +- .../boot/test_data_types_invalid.json | 9 + .../resources/boot/test_data_types_valid.json | 10 + .../test/resources/test-entity-registry.yaml | 18 +- .../openapi-entity-servlet/build.gradle | 2 +- .../delegates/EntityApiDelegateImpl.java | 119 +- .../JavaSpring/apiController.mustache | 2 +- .../delegates/EntityApiDelegateImplTest.java | 8 +- .../GlobalControllerExceptionHandler.java | 15 +- .../openapi/config/SpringWebConfig.java | 31 +- .../openapi/util/MappingUtil.java | 6 +- .../openapi/util/ReflectionCache.java | 27 +- .../v2/controller/EntityController.java | 507 ++++++++ .../v2/controller/RelationshipController.java | 228 ++++ .../v2/controller/TimeseriesController.java | 115 ++ .../openapi/v2/models/GenericEntity.java | 57 + .../v2/models/GenericRelationship.java | 36 + .../v2/models/GenericScrollResult.java | 12 + .../v2/models/GenericTimeseriesAspect.java | 18 + .../openapi/v2/models/PatchOperation.java | 26 + .../openapi/v2/utils/ControllerUtil.java | 67 + .../com.linkedin.entity.aspects.snapshot.json | 7 + ...com.linkedin.entity.entities.snapshot.json | 7 + .../com.linkedin.entity.runs.snapshot.json | 7 + ...nkedin.operations.operations.snapshot.json | 7 + ...m.linkedin.platform.platform.snapshot.json | 7 + .../linkedin/entity/client/EntityClient.java | 30 +- .../entity/client/RestliEntityClient.java | 10 +- .../entity/client/SystemEntityClient.java | 55 +- .../client/SystemRestliEntityClient.java | 2 +- .../resources/entity/AspectResource.java | 4 +- .../entity/BatchIngestionRunResource.java | 309 +---- .../resources/entity/AspectResourceTest.java | 2 +- .../mock/MockTimeseriesAspectService.java | 15 + metadata-service/services/build.gradle | 6 +- .../linkedin/metadata/entity/AspectUtils.java | 2 +- .../metadata/entity/EntityService.java | 16 +- .../linkedin/metadata/graph/GraphService.java | 15 + .../metadata/graph/RelatedEntities.java | 31 + .../graph/RelatedEntitiesScrollResult.java | 16 + .../metadata/search/EntitySearchService.java | 2 +- .../metadata/service/FormService.java | 1107 +++++++++++++++++ .../metadata/service/RollbackService.java | 328 +++++ .../SearchBasedFormAssignmentManager.java | 94 ++ .../util/SearchBasedFormAssignmentRunner.java | 45 + .../metadata/shared/ValidationUtils.java | 167 +-- .../timeseries/GenericTimeseriesDocument.java | 26 + .../timeseries/TimeseriesAspectService.java | 11 + .../timeseries/TimeseriesScrollResult.java | 18 + .../gms/servlet/ConfigSearchExport.java | 2 +- .../src/main/resources/boot/data_types.json | 42 + .../authorization/OwnershipUtils.java | 20 + .../metadata/utils/AuditStampUtils.java | 10 + .../linkedin/metadata/utils/FormUtils.java | 49 + .../metadata/utils/GenericRecordUtils.java | 21 + .../metadata/utils/SchemaFieldUtils.java | 22 + .../linkedin/metadata/utils/SearchUtil.java | 26 + .../main/java/mock/MockEntityRegistry.java | 2 +- smoke-test/cypress-dev.sh | 2 +- smoke-test/requests_wrapper/__init__.py | 1 + .../cypress/cypress/e2e/siblings/siblings.js | 2 +- .../tests/structured_properties/__init__.py | 0 .../structured_properties/click_event.avsc | 14 + .../structured_properties/test_dataset.yaml | 19 + .../test_structured_properties.py | 577 +++++++++ .../test_structured_properties.yaml | 33 + smoke-test/tests/telemetry/telemetry_test.py | 14 +- smoke-test/tests/utilities/__init__.py | 0 smoke-test/tests/utilities/file_emitter.py | 21 + 492 files changed, 15378 insertions(+), 1931 deletions(-) create mode 100644 datahub-frontend/app/controllers/RedirectController.java create mode 120000 datahub-frontend/public create mode 100644 datahub-frontend/test/resources/public/logos/datahub-logo.png create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java rename datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/{resolvers => types/entitytype}/EntityTypeMapper.java (91%) create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java create mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java create mode 100644 datahub-graphql-core/src/main/resources/forms.graphql create mode 100644 datahub-graphql-core/src/main/resources/properties.graphql create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java create mode 100644 datahub-upgrade/src/main/resources/application.properties rename datahub-web-react/public/assets/{ => icons}/favicon.ico (100%) mode change 100644 => 120000 datahub-web-react/public/assets/logo.png create mode 100644 datahub-web-react/public/assets/logos/datahub-logo.png create mode 100644 datahub-web-react/public/browserconfig.xml create mode 100644 docs/api/openapi/openapi-structured-properties.md create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java rename {metadata-integration/java/datahub-client/src/main/java/datahub/client => entity-registry/src/main/java/com/linkedin/metadata/aspect}/patch/PatchOperationType.java (81%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/AbstractMultiFieldPatchBuilder.java (95%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/ChartInfoPatchBuilder.java (75%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/CustomPropertiesPatchBuilder.java (90%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DashboardInfoPatchBuilder.java (86%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DataFlowInfoPatchBuilder.java (92%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DataJobInfoPatchBuilder.java (93%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DataJobInputOutputPatchBuilder.java (93%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/DatasetPropertiesPatchBuilder.java (91%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/EditableSchemaMetadataPatchBuilder.java (90%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/GlobalTagsPatchBuilder.java (88%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/GlossaryTermsPatchBuilder.java (89%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/OwnershipPatchBuilder.java (91%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/PatchUtil.java (96%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/UpstreamLineagePatchBuilder.java (96%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/subtypesupport/CustomPropertiesPatchBuilderSupport.java (81%) rename {metadata-integration/java/datahub-client/src/main/java/datahub/client/patch => entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder}/subtypesupport/IntermediatePatchBuilder.java (83%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/ArrayMergingTemplate.java (98%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/AspectTemplateEngine.java (71%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/Template.java (69%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/chart/ChartInfoTemplate.java (92%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/common/GlobalTagsTemplate.java (90%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/common/GlossaryTermsTemplate.java (92%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/common/OwnershipTemplate.java (89%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dashboard/DashboardInfoTemplate.java (94%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataflow/DataFlowInfoTemplate.java (89%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/datajob/DataJobInfoTemplate.java (89%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/datajob/DataJobInputOutputTemplate.java (96%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataproduct/DataProductPropertiesTemplate.java (91%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataset/DatasetPropertiesTemplate.java (91%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataset/EditableSchemaMetadataTemplate.java (92%) rename entity-registry/src/main/java/com/linkedin/metadata/{models/registry => aspect/patch}/template/dataset/UpstreamLineageTemplate.java (96%) create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java delete mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java delete mode 100644 entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java rename entity-registry/src/test/java/com/linkedin/metadata/{models/registry/patch => aspect/patch/template}/ChartInfoTemplateTest.java (92%) rename entity-registry/src/test/java/com/linkedin/metadata/{models/registry/patch => aspect/patch/template}/DashboardInfoTemplateTest.java (91%) rename entity-registry/src/test/java/com/linkedin/metadata/{models/registry/patch => aspect/patch/template}/UpstreamLineageTemplateTest.java (99%) create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java create mode 100644 metadata-ingestion/examples/forms/forms.yaml create mode 100644 metadata-ingestion/examples/mce_files/test_structured_properties.json create mode 100644 metadata-ingestion/examples/structured_properties/README.md create mode 100644 metadata-ingestion/examples/structured_properties/click_event.avsc create mode 100644 metadata-ingestion/examples/structured_properties/dataset.yaml create mode 100644 metadata-ingestion/examples/structured_properties/structured_properties.yaml create mode 100644 metadata-ingestion/src/datahub/api/entities/dataset/__init__.py create mode 100644 metadata-ingestion/src/datahub/api/entities/dataset/dataset.py create mode 100644 metadata-ingestion/src/datahub/api/entities/forms/__init__.py create mode 100644 metadata-ingestion/src/datahub/api/entities/forms/forms.py create mode 100644 metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py create mode 100644 metadata-ingestion/src/datahub/api/entities/structuredproperties/__init__.py create mode 100644 metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py create mode 100644 metadata-ingestion/src/datahub/cli/specific/dataset_cli.py create mode 100644 metadata-ingestion/src/datahub/cli/specific/forms_cli.py create mode 100644 metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py create mode 100644 metadata-ingestion/src/datahub/specific/structured_properties.py create mode 100644 metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py create mode 100644 metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java create mode 100644 metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json create mode 100644 metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json create mode 100644 metadata-io/src/test/resources/forms/form_prompt_test_definition.json create mode 100644 metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl create mode 100644 metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java delete mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java rename metadata-service/factories/src/main/java/com/linkedin/gms/factory/{entity => entityclient}/RestliEntityClientFactory.java (53%) create mode 100644 metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java create mode 100644 metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java create mode 100644 metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java create mode 100644 metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java create mode 100644 metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java create mode 100644 metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json create mode 100644 metadata-service/factories/src/test/resources/boot/test_data_types_valid.json rename metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/{ => v2}/delegates/EntityApiDelegateImpl.java (86%) rename metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/{ => v2}/delegates/EntityApiDelegateImplTest.java (97%) create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java create mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java create mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java create mode 100644 metadata-service/war/src/main/resources/boot/data_types.json create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java create mode 100644 smoke-test/tests/structured_properties/__init__.py create mode 100644 smoke-test/tests/structured_properties/click_event.avsc create mode 100644 smoke-test/tests/structured_properties/test_dataset.yaml create mode 100644 smoke-test/tests/structured_properties/test_structured_properties.py create mode 100644 smoke-test/tests/structured_properties/test_structured_properties.yaml create mode 100644 smoke-test/tests/utilities/__init__.py create mode 100644 smoke-test/tests/utilities/file_emitter.py diff --git a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java index 25bf239ab835b..1c9dfd4686610 100644 --- a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java +++ b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java @@ -183,6 +183,7 @@ private void generateSchema(final File file) { final String fileBaseName; try { final JsonNode schema = JsonLoader.fromFile(file); + final JsonNode result = buildResult(schema.toString()); String prettySchema = JacksonUtils.prettyPrint(result); Path absolutePath = file.getAbsoluteFile().toPath(); @@ -195,11 +196,21 @@ private void generateSchema(final File file) { } else { fileBaseName = getBaseName(file.getName()); } - Files.write(Paths.get(jsonDirectory + sep + fileBaseName + ".json"), + + final String targetName; + if (schema.has("Aspect") && schema.get("Aspect").has("name") && + !schema.get("Aspect").get("name").asText().equalsIgnoreCase(fileBaseName)) { + targetName = OpenApiEntities.toUpperFirst(schema.get("Aspect").get("name").asText()); + prettySchema = prettySchema.replaceAll(fileBaseName, targetName); + } else { + targetName = fileBaseName; + } + + Files.write(Paths.get(jsonDirectory + sep + targetName + ".json"), prettySchema.getBytes(StandardCharsets.UTF_8), StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); if (schema.has("Aspect")) { - aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + getBaseName(file.getName()))); + aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + targetName)); } } catch (IOException | ProcessingException e) { throw new RuntimeException(e); diff --git a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java index 888c4a0e99931..04cbadcdc6b7b 100644 --- a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java +++ b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; @@ -58,8 +59,12 @@ public class OpenApiEntities { .add("notebookInfo").add("editableNotebookProperties") .add("dataProductProperties") .add("institutionalMemory") + .add("forms").add("formInfo").add("dynamicFormAssignment") .build(); + private final static ImmutableSet ENTITY_EXCLUSIONS = ImmutableSet.builder() + .add("structuredProperty") + .build(); public OpenApiEntities(JsonNodeFactory NODE_FACTORY) { this.NODE_FACTORY = NODE_FACTORY; @@ -117,14 +122,27 @@ public ObjectNode entityExtension(List nodesList, ObjectNode schemas return componentsNode; } - private static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + /** + * Convert the pdl model names to desired class names. Upper case first letter unless the 3rd character is upper case. + * i.e. mlModel -> MLModel + * dataset -> Dataset + * dataProduct -> DataProduct + * @param s input string + * @return class name + */ + public static String toUpperFirst(String s) { + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toUpperCase() + s.substring(2); + } else { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } private Set withEntitySchema(ObjectNode schemasNode, Set definitions) { return entityMap.values().stream() // Make sure the primary key is defined .filter(entity -> definitions.contains(toUpperFirst(entity.getKeyAspect()))) + .filter(entity -> !ENTITY_EXCLUSIONS.contains(entity.getName())) .map(entity -> { final String upperName = toUpperFirst(entity.getName()); @@ -547,7 +565,7 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode getMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Get %s for %s.", aspect, entity.getName())) - .put("operationId", String.format("get%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("get%s", upperFirstAspect)); getMethod.set("tags", tagsNode); ArrayNode singlePathParametersNode = NODE_FACTORY.arrayNode(); getMethod.set("parameters", singlePathParametersNode); @@ -575,13 +593,13 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { .set("application/json", NODE_FACTORY.objectNode()))); ObjectNode headMethod = NODE_FACTORY.objectNode() .put("summary", String.format("%s on %s existence.", aspect, upperFirstEntity)) - .put("operationId", String.format("head%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("head%s", upperFirstAspect)) .set("responses", headResponses); headMethod.set("tags", tagsNode); ObjectNode deleteMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Delete %s on entity %s", aspect, upperFirstEntity)) - .put("operationId", String.format("delete%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("delete%s", upperFirstAspect)) .set("responses", NODE_FACTORY.objectNode() .set("200", NODE_FACTORY.objectNode() .put("description", String.format("Delete %s on %s entity.", aspect, upperFirstEntity)) @@ -591,7 +609,7 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Create aspect %s on %s ", aspect, upperFirstEntity)) - .put("operationId", String.format("create%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("create%s", upperFirstAspect)); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", String.format("Create aspect %s on %s entity.", aspect, upperFirstEntity)) .put("required", true).set("content", NODE_FACTORY.objectNode() diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 3d87267f8ebe3..0f2945d5d2393 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,5 +1,6 @@ package config; +import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -22,4 +23,7 @@ public class ConfigurationProvider { /** Configuration for caching */ private CacheConfiguration cache; + + /** Configuration for the view layer */ + private VisualConfiguration visualConfig; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 60971bf06e27b..df0cd4f4ff82f 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -13,6 +13,7 @@ import com.linkedin.util.Pair; import com.typesafe.config.Config; import java.io.InputStream; +import java.net.URI; import java.time.Duration; import java.util.List; import java.util.Map; @@ -125,6 +126,12 @@ public CompletableFuture proxy(String path, Http.Request request) headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } + if (!headers.containsKey(Http.HeaderNames.X_FORWARDED_PROTO)) { + final String schema = + Optional.ofNullable(URI.create(request.uri()).getScheme()).orElse("http"); + headers.put(Http.HeaderNames.X_FORWARDED_PROTO, List.of(schema)); + } + return _ws.url( String.format( "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) diff --git a/datahub-frontend/app/controllers/RedirectController.java b/datahub-frontend/app/controllers/RedirectController.java new file mode 100644 index 0000000000000..17f86b7fbffae --- /dev/null +++ b/datahub-frontend/app/controllers/RedirectController.java @@ -0,0 +1,25 @@ +package controllers; + +import config.ConfigurationProvider; +import javax.inject.Inject; +import javax.inject.Singleton; +import play.mvc.Controller; +import play.mvc.Http; +import play.mvc.Result; + +@Singleton +public class RedirectController extends Controller { + + @Inject ConfigurationProvider config; + + public Result favicon(Http.Request request) { + if (config.getVisualConfig().getAssets().getFaviconUrl().startsWith("http")) { + return permanentRedirect(config.getVisualConfig().getAssets().getFaviconUrl()); + } else { + final String prefix = config.getVisualConfig().getAssets().getFaviconUrl().startsWith("/") ? "/public" : "/public/"; + return ok(Application.class.getResourceAsStream( + prefix + config.getVisualConfig().getAssets().getFaviconUrl())) + .as("image/x-icon"); + } + } +} diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 6b53a2789e7cc..9eac7aa34c3e3 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,9 +36,13 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) + # Analytics route POST /track controllers.TrackingController.track(request: Request) +# Map static resources from the /public folder to the /assets URL path +GET /assets/icons/favicon.ico controllers.RedirectController.favicon(request: Request) + # Known React asset routes GET /assets/*file controllers.Assets.at(path="/public/assets", file) GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) diff --git a/datahub-frontend/public b/datahub-frontend/public new file mode 120000 index 0000000000000..60c68c7b4b1bc --- /dev/null +++ b/datahub-frontend/public @@ -0,0 +1 @@ +../datahub-web-react/public \ No newline at end of file diff --git a/datahub-frontend/test/resources/public/logos/datahub-logo.png b/datahub-frontend/test/resources/public/logos/datahub-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..5e34e6425d23fa1a19ca3c89dae7acdfb2902e86 GIT binary patch literal 53563 zcmYhiWmJ^g`!Iad9R?vapeRTibc2YX0xCnNA~lqB4oIp8RPLAqb+=cyRA21d%p_f0ro9!C!)B;4I)j zR4xw;-5|&yk?@ab)KPK>g18}#d&;`r>1&fcuIx*xifa^^-@In6rY?9jurE_yg}u(p zE8=Bl>O9ZO`%7dXL#(0BeFPbNl6-aJi>km`bKRu?QtRmEbWYD1-N&~RVvY`tC+#0_ zQxRkD2OfIBYpjf+W6KcI0mX^KQ&NBege{M=VS%zIKTZGCSEa<12O=-1{D?nw3vilk z=LsEhJ3q}Dt!o%sYZ?m7!eK#XMJjWl>XRRN?(!d-rdLQc775`#V%B7v zBpnlTx7z#-B#!P$kwd!L5INKzRJ5}CQev)9!O{EB>lK~4A^$;sjm7V^TTu30!UG=x z*3O}f^!DG5jXp1X1+BsY(NAx(l0XtyArk28lf9RC?)QUTsKttMYN439wXE}wg7#>> ztoK^0XBVNM4FVp2c#m%**;Kp-L!g2vL<-&Hxg~RKswg@1o3a^#>=^^DDE4{!`m0jv zwX<2b9fUoH#9Rn(zsqxsu~Z(CeaR-IT&uM2GOc z^UPuI%Tou}>FfXq%T8dn+9%PuLY`2V{>A}+kbOJb6lY+g$BwOQp!*+eEff?*1s)c- z7;vSDo@c^fb%0FAQISG@N#&Mf0@Tk#c$@m3`LE9AlcgFiLufcpoWmw3MTr)YQ35>E zDZ_S4+4y2r8O6T%G0h~VhC{waOi_=?g|RP@+`{vIG1JhdPHzN9@1eTP@+NI zcUJozvgcp2P@t79!P`P&N(2Ps0fHI}nC_>2`H;OW;z2J|sIzi1y4NvAO;@xfrm4!=Ag<9siNY&WHDnRoD{_-mW zLUfUwx}G9rI%t;zT)VSi+eRuoIaEjxnI=JG3_NlLdnVM`W8cFMGD1ctN<~90q0*Fp zpa+t-R~cKQy87~<0#1bOrZi(XQ8O_C?TY~ISB^_hpaqViYH~|f$m$<^zKTOM#Qcbk z2Ew002)rBtB2KSRXe`;6qs}hM!Lh|dYmp8?jg!1YtfYW3&EafR&B0I?o2H@a#qfa4 zFw6Wr6cuFjS6-VGQc@otcdtniudt!}X~Mu$Ps{Tt_Y}uH7PINT9SH@&bpg*EzJ8}J zy#hB_H&6}iZLyOOU31%Qg|IaLgUtY77cjMNF0pBqF?Y%f$3yO^%nzU-1*{W|nNNj^ z-uG~zUrR>3L|7RnAYk@)NOnHA4@h02mvkhCs!yq^k}DsKsa*`xB3Qr&?qf#|8nYtH zAcPb(kN>S2$+1HG1E`b;Jf=kTY5JF9-um-k7KH5(+lp*sff!`eLkpPYT3>^ejObz; z``qZ}ay@j5KXmI^1*|FhV2;96N*36_u^asv~q8z$db&>GLl>wLG28X^}>Ap%D-vDMpm>#%|C z90E(tY>P2j71W9}JRDlcJ2H;m;t=f{YJk!XhnY98V^Be9k>wdqLw3HqLSStb=d)LN z@Bmu*r#gLwdYKPrW5g@$=$xa^oovrA+sglbF32Oh2xZ?JaGuLjp=2pEs+ciUO+)wh zy18(u;QV*e2F$Ll5@lol=>y=VVuw+^$i;dCRpV%w(P=~MZ>p24ps_3%I0 z8<(;O=Ck_XOs{ofhBSxii_LfOnuwj#@f zIX*<^8)3C?PnRAA6Jp`F3xs z<~OmBT0%nC0S#Yuss!g(_4-K4!#z0^y}2|ER4;}GTu|}8b<~5c_#;l#Z}-m+zT9n& zXI$&bQ@`k6@q(^bSr%P-w4JcE>ZZ&J-65P{qim6J#n1TI_ONwHH-fi65H#b4A>~2Y z@Ote36st=X{9rLdKuWyJI+)W$hSeNvNr%>1*e0`tNC^1YD(#Kq>?mhDTdx6S&3TN# zE=fOVMYVfIk+RYgs8t2jN|q&c6td`Xdw&nF;!xZ>_Tw29-$~`B%|0i9rt2H3$K}ED?SYj_o1$bLXMF+z)DMxqYcEyk?761G7(y0WJQ3bcR zp6yTcLEJ!fvR$!DpcQaRo}g8>-+*!s-z8-aivdBGrI)%ea;u$ftRnn4Ixqzun(@c; z9wb0Gh<$_Yu6VrTd(c<(c!$O9T;b%zo)XX^+|E#$71AI?DOkI5YlMQ4tyb`L2?#FMhvr_$wDUby7sfc5B zIy`>(QZyCtX`Joa#HA7B3nXc88fBkFHEcZUOIXYLZ+ZRzJCficO#qts7U^Z_avni5 zE)aQ9$&g?Rt&xnacXnHXx_^h6NG>V}32b`(l~p&aJIE>vS~HL$_|9w|!{L#l=WFR*7_VVX;;@rbs`4u8kmh z_g)iz&#I@7pqF%xhHZ6od@PAs+8fcC617~}cSnU#ynDw*b2*ro)|MkI3FD{VteAj9 z?WeIjndfg{JaQ4?D9qZ*2SH|9TB@_4#dq48G$65O?^uYYX>4lBAZfmGb(gW#3}X{c zi22xJ?DUZGiR7XY-)fD;1*OUtbn9{y8sa}y9pm~r2fUUy0Kt~{Q!Q;bfSPBatHG#B4*^O-OjcSn2{VuuP5{Bqpdq|Xh;nt$pA#!n938fvcZc3s z;o2j>L@2p~_`9szYXcH;UzthI&nKivl6tpKg!MJ!#cB%Nw=@X%%{!2`J=tjs-wf-?VXM4oU2il#)OB+ ztajTre)ljKYeB=&;_gcvax^^kfWF~3jZvtGC*G-!Z&Z%Vtae+BcG(D;6-1X&NMGll z(nw$hwpUt=7bd_Mkfs+PuK_a@c$%jN5Ai7=g0ihf*Gjd^b=Dj_UjI9ZmLBOIy9y6! zA^+q&)HNA$2i5&FPNqI(f=k}X_d=#U1byu{DV)!D7Of@K=~X`aXIc@7B^15_#}Zm* z5#}OA{fY1Y@T4Ms$k)ir*x3_hT7x?pXaOI-_fu7x66(8_RgS`Nxr_3Adw84U z+-{O$e~%baYGVWHsbi`@5i#me`pf8DcJZJo*Yxj`+K-60PvdLrQymTFMB*}yAjmfE zeybRdL2_S<9dqz{Vj~y!D|Q@rpBX7U73amIpuwspC;fPNtxK1GIRI>V%ZRA+$uth{k^iFFrIoa{$qx9 zqD4EL3;hg~Nt~BX6*3%m4fQznihkN$AksBQXS2O;xhanS7?<+r+YaOJAu4c+`Bb3+ zE9h*NLXg7<9#_!9`d+gYb>*8zh5%iQ94MvwNkHPGF#bdBDKwn#wgrOhBXQOMxE)UX zn)J74FL_e(FF=s*mk-PuJO)|4EgE`sp1NJR$`AisMD{$;UinaO9iVft5u2$3%z{!2a`z(%By+V4Y{Wu#Z^WrAy1fGJWnRnc|i(|mINn`~#X zS!wY`VZ`0$f%X*$+S1!Qao4ZprL223Tl53Fwks{houwDqC|y05i^}$*H~kPZ2-rl()z_ZacG=@3o@C8 z?Px$y_T>S68uCSq+JZVKqW_|XaG5DAP{>^fo}(I%(0`y!13@|kzk-2H{rh)qJO&vEsV<|d+6Za35XOK@=elF7p^Ogv1WnNE&DnE@ zAgJibrkF~D{;ie0r?U6h=;raW<@G@`n()8dAf*2+L+v-@Wbpv{kqIZc4V|ZZmrmLZ znV!@<*S7Cjnt8bwfqPl3{@Ixeep|4V4n<}bv$8c>@S=l<2nxCnmk}~Qc{T~`WHC77 zkFTx_>d&ZozPU0#>1rL}kS{m{ zUkong%PZvqO=x=^`zEbRrNl56butR2(i~L8sQ5MmsG zBV*!tv+G8%j3cFcgxI@xUxHuJF4Dgq?GIFa$hvJiqUO?!QiEe)Iv?ap-*vc+zeWvm zmY}KB30OnNV#so6L@P zu{KYPo@FxJs8;Vh5zW60VZSU((dAjozUetX5nVi!pJ#hBN!RY?Xxf<-_|bTOy!4R^ zIg6Tk9@G84zJ~kh3$1l@2r1g!EtfJ!DI^T=&Feh91V0#lR?c61v%9==-M!rcfVE9t zmTC~oATuiODm47+k@uf{QI1q9JLQ?`!P44!hSQ`bi8$+fY~kMDlg`x1r9~=2hv7+; z>Sx9xuTP3!cBkmk$F<>HDjE)5E_Z7(oMQjF6GQtSX?~PgpX0|Tn#}jz@BVc!DO=Ot zJs0NAMU_=`B-5yR5K{I%=wDnn(0OP6>AN@g-&#Fb>ho@!vX#6+_mR;2jkB)n^ox{Lq!)KQKByiE(K;R z**tMAuW_eO8KQ(POR%_`_6gLKWTmyMopajEj$~P%m)O5A=^VRiZZ?wItLB`Bj{Qw) z+>pyx%yN3Gx2Z2DOO0p5noHX$#!M;UohiJ{N|@> zPK37?zM=B~YnhA4i8UeF61}s>_~_lU2e&DwzmWtHrA7}`WzK)vXj{`(UAD_jIMQ)g z)f-cxit?7_4g3?FP=3BPJH^+5@|=M|HaVucz~$mm6t@7H$!`}&OFOp3|bD`!+mSf+M?xAT0n=i7gw#w)1)y_f-IQWogJU=wOQ ze=(FxriA|)(r;fP22DF%5pXGQ-MFW*s&o?^yOfY%S6~3CM~AkesiE22MLa;BRr8JK&fUkfY37kzC{v<8io3Vn$)|>7 zc++M*)WcYc+~sKgt8&UK56;#dK+28ZEK9{$vP>;uTt32ssalK#j$fqLJNuX(uY_8t zly994qIK&RXT1^HUWMk)KWDd+r+1-5@o-9DKb_wE*vsij8^ov<0a% zT0pP^FTD#vL`v+B+U<;U1w_Ag?R>jazEa9dd*@M>3R^MxrC?zh(u~2vUVrx>C6Y4qFGK80Z+YFD(;W*5MKWu{XmvX=@*1mQi#~)RLZm*-LRjAi zVh*(^mO@5Ki$gjIHADwl;jvZH#PD&4rtd>FVvl5*5RnIStA^qR0jPZuyr^*qW6^bc zLVvD69AV3oPX-C_?gfW`s2e~shCjJi0pm-wh_RRT5vl)ol+2!{Ihe5s9K8YpyJk|D zS=LK8i%>J2>YX;!ZoJsquGK@2J8cHn$F#i~{S3$b)mf&JInPIdB3B$#Zs`>h888M{ zTGI*tbqDPQjebT1 zNslyKF#0;Wg_wFpX=!Xsc;nU1Y^Sm?Ed2-2zse2^l2VBx<(&jy6d3{()M?Re8Ou_R z=11Sq63jEMuL@gHv1DwDT1^ZLMy1R~_O8zEw`(MixkK3m$kWb_Y}@*l{))@Jq5qp) zi3+o>SUtRVCq0DF(-DcQkkV?OJ#tSEH|xybXRx<3CelM}rX9JXhxSe^MkF3S-iYFg zV05w;t~k9;noAej62j0z7%TK@8gb;$ZCS|Bqsr}3>XqvgWJUSx&VREWH8F^k7!pvp zOi1FS3f3OgY?)P&gG7(Ag&Y`a-^m$iSw904$D9`Y924^Ni5fgNsOIV-M5{B2Jxrt~|QS_$oYD zShi8KwDxQ8)3<0w&~v4|e|uaobnDx}ee9dF2IX^$)m)c}V2^&H0c5(v=dI82iS!M} zFOS%CgWA7ErAF3XgW)8gJOXBP*p6v9~+4%!{wB*?-dKb((v3 z0O`+Hs4H@Km`rmHM=<`oo)zE$^M&SZzH9YWC=vHR9D4QR7G@E54@bs15R=s2ZPMqN z6ju3YBZ@wut3^moI7Y7nj_9X1%t*KsBe$ogq!{TT^Mr_831#IN9X_VYLbvo7S6Lj> zYf9QmJ%D8E_#pdGYrnH#PBG0PCKFwiMD)h&CyFKd6RYp_q^zGZ6T0B;()BuNApB)_ zhp+PP=u+~X0wJF2&W4KXsEI1MVZOrXb6%M@juuZ!eh$A^Mg3QHONlcYXYb zh$QYda|s)E6}+Ya}&2BPOQ2r5dy5;7c?1dQ>+^yYHd?<{J*{ z`=l{b1%1ST>bVu%dY@i{9`RcFA1ntLcJ5kvct{89yjTe-50-cILNaOvm$zY z%ZLYK*tWmc&@;oUx{<5Dz3V^{Za+Ov*KqN7-}4LVyN4i#>D1KBta^OAzO5un*(i}0 z@UDxuZ98G*b5!_T-IiH<29}#kmy)^R-KN!gG>9J9epPVA=05&A5$v*B)vj{(l0KpQ~F#d0Hcx{cTvIe*2d za3CG_PBS)D+s*rCkBotz1WD%YYvN^0@P`hP~o%s#RK5=FPIGG`tG zcUJQl`Z-v67fO&gg?WpkXlDgKjd)F(eHpaq`g+PBxZq$!9 zY_qGnW3^33X~Zq_9ypP5ZR+%p6=~$|SgJtA=;^EbpyFWfB%e4>Sx55XgBu5TC>!F} z8GVZT>Ej63Rd+J69`1}@p2--jn3jad@%G?WAzio`*@DgXZF8aHb71P!8srn&?X%n` zdn3$e{Wig(*t{Cft6>(yMLbk3^q)e0JcBXbfSV6k%K61q*Q<>mzMiX|F2iL zJZ9sVkv3mR5q*cvOUb8UK;sinf7XEx(f@V+B(6 zB*Xj>;pIP1+X8tHO?`lcAH5v127a$W1r&Oc0T?h)h3tM@X?Al%3&7+WLVt*-2b(I-u^y ziqTDHq9!I^J!@p(XgYJ-(t*ecH3VnYS6ndB1%L5%m(|=-mwW+R8Xkj}?TS=9{yLc6 z*v{A!AzT6K_&1Ou#*020xO-SpZ5(i5AP3TtSbB&fQ{}eWe-6=2VuOp3=-D45fAE>iE{v~4Y+s?u=Z$P7&=9dv|19c%C0`w0PGGNXnZf$`j z?pL4+w0pXqpM2-=Ak^j&e_H-_HR`;cs%h-rJ_{)DC1rqqrPYw`r(!)(}3LcIe~iP5;p>WV5YNkpe-{p?WR>v8FFe3D*SLb;y&%R;>PJV9lI%+{90Z zRa{*{|Kic-8C|@z)b@Q>F&EZ*>USpg7&m;Q^KcpjYS)Sn$`9|I*!?I&QhqtMo)ni$ zcMyfmG(=;$8{-=KWM${4c4u&>crI^=ng*n|5JbWnyDY`q#I5jbU#-2T;CuYmjP_`< zYx?QoGP;Fp<0 z$SUxA<5!LpT=1@aEoo7_PN=51P>9w7DqD$9u37X{a*$)7%YUmwRw}2TFKq3Qj*J~=Un-7@1%NmB&x=x=qUB&sk_^1ZtN9r&VopuM0%&UE;$`0=t4m;HD&!`ocFZ*L9S{(5mFClsbhnz*Yp8EX2JOH5&svX`M^an5|_H<`ANKM|Kp{DSFv#;G&Eo9t*U zWpL~Jwj43!u#tS^N!vQcld|Tmfx`{0K+exfXV#-#e^2Ol9+VG(+#fi(!QWTt2A(6jNjm{aNiZOSN z4J$Wp-GJ|J4QKXMnY}Zl(*9ge#`g|nRB08(PKPyC{ zC?}&mPzvF1v_qJ&jYb`Ca)4Iq_C+Q>y+!_@2|G1Rp~% z!47x-s`sal>H?2h#)HObeom6C?^_#ODn$1Y9CC|t5y6a?V;885DMcVc!Teuru zvkk=!k=Vk7W2~$b&CDqqvQ0*Nsarxt*A3eG zV_@ka@nAJX4t+d}0-RAEdgXmt#d|}V^}S)_+>&wiAgDFTwSo2#JWGo-|12(lwDe?b znuewsVvDVmyU~InSqfx~F0(u^;vlkkZL6p?b*aI`*C`a%tIQq^Wbb~6OWfT1@g6|8 z24&XAo|CRRCdJ8k{PXGVf`+xL6p$cg3rC>i|M-A!M|n(P2n4b{ZeNZ)A=z#8eO$=` zUB0~DUf8Zm3FDZSwryua%OZDjwhjP|xm}oYkR@l|SPa-7bEv}H88(ATQ*9NIHLj0B zx7ws_+t@%lU%JD=VcoW2{Cze7oFyN6WpH{wT12z8q;Gwm5?V025=>}+f|D@lTs?C8T; zjjTG=Zfn0q-5_k8tzu(bm_;&BEUYhi)h4XWx>d2q3I|erhnbjmgsWmoHu$co?r2(LmJ_pYbR#4lYtsooi)y-)2hZLe3~8o?`&AjG;*cv z8iF5*Z!22q_LHLqToSs)nV*AEUJp2)_eiOf71$;ZdJU3e*gEa#&RoRA(hM(6G@d`)Y=u01@KG+sl(| z5Cze!(T+yW!EE5>Ars$XSL&B0h|@vP-@O)UzMkmM=KUB)2Bk~ZB!}OKNzP39L6%>< zEMBpTE9W_cQ8s-IrWOm5HpJ}@O55eRcO%>y!MY&`P@_yc1A}RF|8iW=SI-5soX81W zgGf3kn~YWgzQ2!%AvE)PyN23%)I(>{ATXfOYU8JCq-QrJ3&|$CsDvBV)3|~!FE+NT z%iW@ZIuNoa+>W=3V;`et)o!CzaB8N2h!8%!{q^-|sQDM|Pv?d0p_MZ*?x0Bj16Ffr zLGGZ}7m=;SH@Xq6Kn41&7l4OUT)P}R?18sQSRWWNiY8s6Yz}GxkzP`T!JneyS7gz` zwmXOy5WhZO_}^+vx?#?9{=(y@liDvxEtKF1dnRm?)1<6d%^584nxeSK>f}Zk1@vPWCc!FOc5^^=% zFkAQp2~-J8mVHzQ-)$E*;Z7GmF{HWaV-%Fbh^%b2LvMIL0|NNt=qgJ0GE-RIVInv` zj}F0q!_EV*gi~Yx2H+^;hN%kx; zSHR5sR7w}tXnjW6qRR?IH+QjMAGP23{|kgyoZfSC5e4Z1685J=saqDogn-OEBC3SK z70e|Y?sDdX+DN`G3N%y#Qi+J5icBAi8qkq4$J`jt9bzsbcl}Po+b`RSx_{lR(TTbV z3ZftFE%+#6W79FpR#HSulamMJ+;A0d|JzP)KkqUZgiQxAR3!r=h$+4(B#peiyGjDL zaE}6rO9I3-$g|BZA&DSUanK2x>XS3B@lUkq1Vz@Jth@Ep9goRaK|c1%z?lWsbpYl} zV_>KY2Dixc`mFlKomCww=0Sndx@#{)v9wGUN=q+iF)Q&NT_2JfX#i%KhVl$T6`wB_ z`FI-VKO?nJ%I^s?*OyDEl>Xf+nuQ~R8bFPvsW9(1@)q^&h;idS7;!7l6$Xjhy(X1G z$Y?Dw;Q8?diAm~E_93v5RKYr#i2>?V-EPyg330}(gJ(qK?t_F@H$^LI%+`k#3ZH*< zaAqT|x}9+NH-^oWs7M2#Pp~%MVxqhcJa#_*KRggoK4ADN50rb=x$N^rp>M>-oUAG# zXQQ1IGT6MWLWw@$e)36%B2!uy|eg{OM88uV{LN4#2O=odccL-uxw=2D@Ht+@f~p z?);{g*G4F#0@(VzlHq*VyG`%3w_&bT=9$TR`6OnoZ!U0b8a_!?3dSJkKMo>LkIgM5 z{qXBxInOdzFz7m9qYaqlTpwJ~o*DV`iWzzHMa0L`(~Vw-q^^9P&+Gx|uV$l+zjLqE z8;TvYneZqmY(QM9^b$TJRSB}J~w#PuYQ8dz^}nC~bBWv^}VvQ&T@*nTa1x)`8La#oc=|07WP z{Md6jt?ZMsf%I+*T5&P${DIjsvplBf=HD1ej`_x7c0F#;N|K&fxO%xybi~bGBo@m~ zEZbV$ACukCxmWhEPZgkK22iST|1vu$X>Cf`Dl)YxmUb&pT|R};Q5a?>lm zQnOd!ztBx3KDCoqXCh&i8?-g@HL@}UvVs6vU_LZyxfEyf<+jE!#!S0O!nU}27E>=m zYWZ_1q_}o-)`ay79Xs}NaV$bQb%N8h@bs$-6%eujNRcbV-$Y&UWS~1*&B##C?XK!G zpPMvT-fej|QC-U|WL4hQ_(0{BC2Kzu=)&&&Tb2RngFHW59JFltDlnOGn(0=v$yelb z%A2M}rJr3d)kqmmP(CW8YzxxDS_xX6A|5CNHAvLR4jyRD0RBJo(UKSmdkI>xT2X$Xey;~I;;8bu7kO)Ylyt|d?Xrqr zP>v6$UL|scdid#_-EPl)f|K(xWj~#Q0t|*t*K68e@Lykh&DUsb_Yt_sC%|>Duspw>={QNP*^|R#w>no= zwefK3b)~(6LEMR+EdTl1>C}Wm&CDau%G2k2C#HAkU;hFlzp{{{zdc$>cE|7Pr{<>9 zCpbsnzx@St{nyC$_uCrXa|AJBL(h-AmlpUxqh57;@K7R_vhiM$fnF!P(9l(?yY~g> zHPsX9a?>g~>3Tt?6qN#|>e-m^sbqng89yO>u=QzQ+6pSmI?`aZN3T#aiFe2|8=irP-4R&7+?Hx*80 z(6j2r3^tW`D;P*A!i3t>)94mM!laP9+6Q&lzcrrRIG&4cCh#0j{mpi`*^Twl=h zeNmvoNd5e$JQ+2l{O}BJ7(r)2( z5^mFc)8tB+m}%|DPmXbF8wj1VpC=-X+A`%fKp)nnl25a*iDso390<>^bqA0pHa-!qo)tP4!d!9hoF@rO#wT{$2G5y zl&*mHJ$8sRaw1j&uN&;J`8fhcS0)(|z<}(vC3&lm1F)xD$|URjzxw!s5++G8e#Q1v zX8n+`A|vTk{Z!phd0;0`8fYCpjVR&8-e{Hz?!q{fhe5iLm>R8G){RCK$K)|0mAe5d zp1o!_(*2U*K;i=@CEM9)dZFSnu%eCtcqkFm+k+v!roZgU`*jx|F|k&Hxu@=P87);5 zD9fHwHf2Zl2H2bZ?_$jhtIQY0^@hEgWh!vfC7Z~FQYSToMs>0W4IL5_{9mSJRSSr7 zC4lB>rkudpW?9^u1JCW3?(1#W8cf-jUY8Uw(x}=4l^0BCX}gs?(aO?CEyeWO>j}UO zt*V>4w+fmqUVz(!ssC>8{jAaaqFGRj{-Hbo^lUMN=^N$v*8)Tv=QYt=O^Epn{}^i8 zVK8|R3w6=Ob#ETZiNP)Ys6gZ&>_H2OHdD5c9f%^LDB!WqT}tCq9mI5VHlE|Ku3q>y zSjbqwHE+4Og6aJCIZ<$G#w)?L%54RDIdL+9nHosZS%=0P!fym3Z?nLO#)Q`6}q4V z_yDA!NObtE#~NS(-~P#My>4RNLidoN9PPdJNqOXkaALB;{kWtq5J=-R%NCSJa)l^& zWiR}&a24g;JM*5qGt|cjB$`-8!&~|t*vQNM1ax~85NX@lPIt!VvvG ze-EbwR{bsRq>)=DEB@(vu^?lurfl&4*WZv*jwDjc&Yvc?xzF2y6xolsn6Te^DE>y6 zg_}v&)p)p&7xUzEUYRYU-skwW9c~9S zvt9KA)*qb>Pj*^5m-5oX&cXm3{tSSel?=W1!z&ZgCJbTb{j#eJGJVE?S9Ank&5^4W z)=D@=ns^UgO|&8#yDCZ5DiWtF$vbQT!c%Dje;&2ao(O5(^WW4j%V~IHjnT)r+&lUP z7m?PNv@zHU%%pc?v3RmBA=g(HZx4sc`YIvvg0Od z-TAmqlHXjMmG)W9Cd&y3k#;v@T4$)5gFNg4gl{G>rrVGyfSxNNMnH3w5Qgh*RWUc3N_MS}rUwueTjrRdV9slTcHbPHw|86>PW)#nhkw@-uEh7PU8Xt( zU5QSLUd6;SHP0qa@DVLYe=91*9uSLJd@I`DRv$JG^XJ9~&mQltXUtdb9_R68lYuO} z);Th`G%s*6jN4)N$fAAJL9EF*yB+vtAIjM$%jT9t`sZn|^RfAjuZndqpUMLC1dL#j zKK%IL7URb4pI<=wy2fhwInQ;7^BN|wJaVh-0bIZ3B(soQ_IE)`#XS`U(CIsFQry8J1wRXjQG)_hXm?_?(@`5o_8Y%e zH#twdXj^>gxz5>GyOGbdC2dz0U56wq+wrm*$W$j`@6NL;JjA2`XL6RM;GAmu>c0AB0&etK*;&w%NA~-KAGt$yeVVS?`40^ zz<512VtGOV-HPfHJbG8l863cT%G3CC#fX&_aEol{)mQ7oRHSTA_>^ELxd;vw;~CFz z*(~Nh&vcq6&@4r45XA!GZD=Owve`@Kt~klgw}B$lY#EVr2x5Aq&{S>KOVD8m>c-HbRzes+1!0u`-q2P zBVPi@f?AOp=j;EMY8rwlb>j3d>nPtWOg=w6i?(;d#*}`J%=^Dx0HpqFCgV1nJXFZk z9g_cnhXvTzH_m}URG5&G)A{~@R@u--?&#@#pXtOX<}<{`i3qOGb#wgFkww~+*KE&9 zIz!TVquubVpLr?`xha@ZDZ(Xagxo#3Mk+fe<1X+Pffh1&ti_X%JpspsUXiz zH|7iUuCWY_c&JRpjjqrZE9MmVpV)Bg+zeEEG?tPZoof z8Fcl@m}QKqzPN>%0116tmeM`>_VnD}VuM8|{QTHeZ8l{ zmA~jTr~m13?}?MnFuWpu4WPPiKN>+_SWhkFUUty|;p+9dE3p#T1Q_jq+Y(F$9ywb` z@f>-$w=}(Z1J#Bp)5Z-+?tpFeK$4IhO#E*$zs$`CO5M7~03AcB1KX%^B~3C`+6i~O zb^F(&0+J-)iWS}81r=B^S$dVdE3NPl_7XQ3$-Y_yEMKC=xe#PlHO{Y>WR?OL%I(FN zF7+!{>b?$R<|SK&om&A1Qao7M&rU4H`eRv&!-~bID2e{kR4tRqU8s_v#wFIM0a!Sr z)BF3o*E)%-GnIq-jUSo%$SzPi5mL7{+#$k2EN& zPN($i)Q`A0ZJ#>AEpndm5Y%BnV*!yfL~0?XGdkmEOo3{j5X~+>^Y>f&1-@~-M)yUXO~*4L!l2rERyO55+AUe%4a+B!A* zeM;=R0G#zy#iQs!v9%tD@Ac-{%12=ve#<>$7eeLzHVfS)pJtQT7ZcNDP_}553y5uw z)_`qMa$6a#KtqMC36#|aix_3#*+TFB6K|F`#ivAXm5<0+{){Of`E3SN7XG~=1HO%4 zJy)t*-m}=>hG}g?>z2FT1X#RpIjoTbk}-3)ZGiG{-J(N2&%G9jzc0{siT=peqwit~ zR#pdO`ddWY}iXxQ( zFyaS)A^pi+nzj=2AFlxk%!KOq3Ezs@}5*f}9; z+)7fkm2AFY(qnX8v4JuC$_I07WAao{D)%Ewa$#W8`sD(pzLk=scWAR70u|u?GN*H% zuR6I0R@zHxn3c4W!!5}dSWI*f|L#;ePv2(j~BlGWB5DeqkO~bDK#?vDpBy)Vd!v+@Y!+#u3#;< zPn9x84)ZX>x;SQPg5(2Mv}vLy4j$Id& zM?y(jrRsb-hM~4&RbcrFg>Zo`pm|U$YC&u09VukLdsbc7ZU;BuC<&92qv!cdf_A&C zx`3h-1b*#rW16mqqtOMBsrv|ax@K%uA2$3yuHHJX={EWs|B$1HK}%V16M|9#O4g$|yo%25N z*@=p?Zoh(2EzVBLJD`?5e9l%ey)d3jc@m5f)s26QeUAMu6`0-0(vBx=J5F5c&R<^N zh}S3QNW4rn3#G=MQ+9}R(4|eyjqkhYV1A&dV|4yZ0?LiEDlH>_I**t_`oLG>8ZjrB zhqiX7E#tC}-^&dJYsxhG>b*`!)EIYl+HD3*O{J7~m=?*ip6`<~61~Bllg_qt3)hPY zoXc3-krpvXx7*mHy8TQi6Ad_&UJ#wamMyg*(dV!~6%O?cPMxW>ziDtnJ%e{HWb%Haq>EYw-9bY%jn|`jY~NqVury zGP});q&$S>(KZHK0#GokE&Tsie2ecV0bdkyNr5%+Gi$27c=_FZ_DKLf*{`RJOICT>H5gAOV|L z%2YG1_Pgbdey|VfpX7xB7N}1@wv=>e4R}kLV^`WcE|d*7bJBk*?f>5h9ozWo$|B+L zq|NekjCZ)T&)YoP;bU-EySgy)2XdY%U8_8WbSn#m2gUFNsml`7!j|*s3l+%;GdZl8 zj5IdcaQB~Y<+2YR7$zovH`fQ;vRx^Q5Z}4Q(XP^q-|o$Q4!QRwpv$pH3m4WDc6IcynW@W9Hfo3qNih?0OP2!Mo@%h0XO!6`%(c zP;z|R&(pQ)1*#$vbNxsiz1Xu^#;&wZ+WgYiY4*mQK>~!73fdU;YVHM(db;&;;&bBL z`w*Wmp0VZhX_R;44=_rIC?to!DE@KlltpFnnXqN*ySz~J7xqvO8sERyFG6{>|4aNUoKoU=l z1oe-GHo9Az%QzCs@SNpHDA$!3Y>nxYLILzLFcuqK$5V-tN&vP#o|4DTZQ6>mTpnT( zdr4oGB=mMfvaXc|3e)fgc=-bZ!s^g7%Ey#YcR) zcCwF|ACXk>!`RB|{IXwC-hMqCZVlTy1GCw}Y^Py1d0mwHJ`>|+TM;m4JC zOHM!Mt<|;i$N$#>s&0q_G*hPIANJ+dv=VYI!8B}u=dN#jS(Xzm64cMqy!1`u)G^Sf z4oF+9nS}5{5K!sKHR83x*FsT_o?6+zpN@_R7iT2An$Uzf9^|U@|iQBxqjj2A1&&tN{>(_NVh4ECNt3h-CTve*SMm#BFSyE#KyRE`A zc6*m&I1G9=gE7t1h=v}DW((|^dA!mVYX9}Q=BmZl5I@v-PwB{}M2OidNI72*69G0p zG&$Gex=qdXvo?-vpkBUG;h!`!Owrs;cU<)ftax$yqbbm5aH`+aLr42c|Nm&0Z=)1g8rhVu_x)wPKLZVqa@2ViopuIr+Egd_gSvqvJshb7)1W6t1 z{ll9pJdJH%UzPCDTqT4}J|ylt*@&+8{LKUW+Bf%nVj}eY){N{#5R#_Ozy)9N)?@aB zio8tqI6QSquPs4W%lI3}dsU+HHaWHbV_aF}46x9}djCrZ``pLf8+*GjNTHSb9(v!0 z6)%`5fP}&l%506*O#YarpPs{TjH`Ww5;g5tK!;+U{r5qE36`l{q``eVd*|%3bSL}R z#Mg!MIWfi$M+&?s0--_I`}TF5gS{x#E+}20vgu_sM_4ya%nH9Gv_hEgjCI$}`+@oD zKSm0w`uIcR{+wF%QnJ$%a8~Dw?={C*1A05H1ZO?}ROFL! z8TPDmZr7&6^-eLR}gv@Zw6Yb^6(;VwI z|2#7Q+a)bo!q=+R7}lh8OS;Eja$5K$wimNs@tl&mBKMD+O$B^$&oLm2Au&Bb}3Kb!Z+>P=Z%+~mxr{~f1v#;xn5`d|7x z#{z@Itde9{xuMJA=tBQB>2!q;h5C_J;nunJ{vh=fZ*EczvIxlJht%8B7V|TzVaxV~ z1_tEDJ7Kfytug;KnuA7H8I6kevvObMh@bKANZWq>Uf^f5glyY59LIq8Bkb`lEwdYa zhOlfQx1mURX&?@1K>_vKCbIf>7#(=4P42IOUVRM!?^-1Ybnti! zn@r#A^?~tkWD7V(@0m~AmcPre1#y|NL z!s`p#Wx%)RJc)^kSVCGlslGzoiSIz+^ zrjTn?^7^GCJ=E%c+o!GDpDWpXEGUOhou_-`P_l!X_ik8jC&o&8~ zKLp=Ml?@P!AT0&S~qL2X|uWxy)M~Y*nHEmm)eWcb7bejg$&#t*2 zgX#R)KfR3QVKyT-vNp)_nzNRIEXYMluTL4MKgD0P{5z#)M+|d(2zvqcV`YHiq~8-` zSqGTpO*`3p1!pQ89K&XJzyM&<>RwjP7)fsH&*w)RqT{43lx4B^ao93jDIMp`zOUs2 z!jJMS(oX}SD%38JH2X@6(&|`{GY0rkpV9S36&qt(c^l2!zJ6Om7U}`M^t3bSPwt@c z&w^mfwoaE}^h=d3UpZ+h$c(tG7A$zj`W-`?A8JY47GYVk@#tD;9i$~ow=m6s_g(eY zU-~*%c{ksyX)yq?7_%qM&F56ufN}c}g)J+vEpd^}P0_TTlX&FDXJG`VrYWht;ReJN zqDBQgG3u+gz1U{jwuON3ZuPIaTGzC&_zPf8nuM%@L>Gl7S$m6_l2&?pI&xOtQET-uU{OON`6sO|@@(@F}2Ms}$;17L<75J{rX#X)?jIpi-65 z+M|3KcL%c`+VC}EzjkZgKN*}kmtE*1TghL6g8lN$JzH?VvW77KZGB5?w=QS(JvN(K zv6tAjJRi_h%PtH(_Lr~f6A69I(#^uGocVgaUgc?t>)0zBW+Z88Wx*$wyVJgaZm!B9 z2HJ_ti~{mo2;mN0;cYZ$V`uBqMLB;Ihkzta>zWn5T_O4vJXP3QAcf4<2tVs+oEk+==wPM0ESY8_+rJM{3umPl*4DlI4*wbnt~ zj2zxTjKfl~ghZZix~(4X@BdvTncDus?AyodZd>Owj=}=0wJu>~^R$S=o-<}govXDF zYgq|FvKC5J79$`$x-wo72qGf#1m&|F{we6d$IH)DN^diVh%&bR=O|y>y%G&OcICf~ zmC3g}-<0GR9%+ei&z;i(nZLIA%kNSzbqC_OuJmUhqk+vST@zy{{8AFf9F{cL%p~S2 zKdL0EDJKSp1#$JOu_r4=q?4Qn92+uDBFe|yETuI*vCoX9ZNKMIP{#?}+Z>*|7$p+= z37L^)5;kcQ%hF(6L*#aouLhZax_5T+B(9okphPTeId$}gC{IssvQ(5%=#7W#wkyfn zprUqB+BGlD6)e7!tHC&qrePh9FNzjiz-iH#`|$@ML6VFe`)u zJUslKZ3YQRxBMbM`cdDaQoGRGm}c*nz{29~IecEyjB!6GxF3VgyIPbcW7D~3NITbr zrF0vEn`h`NGvcP=_!LRO!7Na=jyLRd1 zmnQd^I^jpu2mv-V zaDFPEZL_uq9~@;}8pN2p>oU_vSnRbqSgg^XFFjGF$0c9V_Y=3QLk>a}*zT;oa|^98 z%=o7}LiE`nT8hU=P0y6F5Am%2a}8JeXs`rOEps}>TwA9(ysNKFY_$B5!50VJb*&C? zG-O5!88A+klfFPt;QMBzbkiwkTF#%e#Xg38voUNFLMvQvv+B&c8YYM5g9+4hJgu~s z@9-o1`S&;Jx&a&AH%|MHM^zNZ=birTJuphp=dAK{*^Yus~oeupCeLiB#Hut_3WEo4{ z?GY<;T+k|a>sly@{}`~JS=Qt(aTwFKQvUi>ZMHnda-Q2!@+E;xAJE{D6qF5aBen3) z<}?!a6~Q`>n}ockvmqUuPq1HB1*pwI9SuVedKWM-Z4y|k1VSgwZlZfVn3_3afQ!p=jchK zA3XvCn zLP@3Y5Mq;9@#07oS1==ny{gqL{QBGLS?|J@H4^%dOh)9H(GL9KtKOpR5rzOtlUvKU z(fD1dpsi2FP|p`sonyBV-@{nQIL?kgxLn?FovNt}b(k}sk}>00CO!N2rq-SN$)kNM z4p2`J7wB>kyeYKV3Pto+<=S&$i-rZi$J+U{BNC4{%}fff8p|6 zT=-=k7tc6u{Hj!|*_pP`9q^ocZRsF%_s!&+=RdL_cQ@C274SrPgJO|zwj;7w+pmft zR++h63YGj95J4y=Ym%{`7{ibPiN#7S%s6EZ%TrBZ%SHNd9_Si^Y%c$7e4JD*%+7To z_`nho=wsle`TdsblXl~Q_<3|Ak)5-^lpq>-UHX-MO%;eY(IDO4WIh7KsfX|dXf2p0 zzV?HNy-M{nuWYsx!~9cFBb;zr7la{gz~tS=3W2$Y9qNvfXs5MhR6Zw?(Q;M5;-NAQt`@fMyjCfU)@I!m!UiTGZidqGjB%$A8KVxWp1p*j7qiu7KjmY* z=)^7(vxqx~`qsNU3(@-w0I}+-=v)bMpa~(XO@sNSH^c$4slG?zEp>tuu(Jz^5DOh} z4Aho|7Z?ZrVo3DqX8(H5)ciF27>R5|>i@aSN>p13XJUUZYD1Rr zWUM7N-{amAlTVy74qR_V+SOhIH#nEGW+q}v`Udxp?+U5rI!Krfs&NAaWwkTI96Mtg zhEF8)hUU4sG4A2ZeB$@@v%WboSq2Prt!wo-@-LMQIqo@Q67s>V`gQGi>c?Y8%2?(IzrAPLoZ-g zD{`+7=1u2LKLXEP)QvB>$wFf`;SjsRd;keMa4|S8xo0czo(i2CIf`R;l4|Di1q_b9 zIiQFxcfT5-)Yy-%Zf4>JCssUhV^_a9lQRp_o=YX_R@~3Z1-G`F!iIQy!Pa*6;v7aH-DeypNvCCDDO z4$p9*NtZx-TA)YO(AzsmL1;fWL;*1Hx+Vx7*S!eI<7Y3i^g!9-Ac{~)+LNqY1W+n! z)Pi%4;)ZxyESF-;fEH}5jw=vEN0X4)B2eQ)dV9~;_}n-oJu zrV#5uokM>P^oxFaISr3jh}JN%JV{6(#60ViX!Xap+`Cd1>h_9)6%ilF>TFGtOESGUmp-WDv`daNX_s5rGsWBzS9u-kT5*@V~JDw`3}ejwUBc zvVpteGXF>3or7PoQ);lmnM&axL_qBss|(s|IdXBF4Ww* z=}4t#Xv>^^k<`rnC=2la<+N=qT~{<5{dHs|jJGbsjxlKNer!>EJ&tiOC(HdNlRYGV zF6ooybge}|2N=wn)xHb{>PsObHj6c(m{?FekoJ=ptF2^Lj#pY*vLva%qyN&_{C%?U zJk@AG4$i&WmU|IvQDlnA<0Z&LsaLMo1M7uS9P$)34ccdQOSue*Kxs8vwN$I=Xpw>U z3xubUlWzC1GPYJVv3oT)`b@j%)S!bRbxd)kjz)WXH~=6zjy*42OI&Ve&WS7zci!pWlYPX z&&SItXTC5yV+ON^9wazwZabR$4E7<&MXlH5f_UDV5wVv_NLbeCPaM@2aN>`gG9C{! z2p|uBVKy;#=o)MeM9(!`#1LBLh%_!a8m_Ah!9hmdt*CW>L2-QTff~HufW?qv!&5cYx&E9$!4{b1F5Y_4vP2Fw%PfDUJ zIK=AIc7;Uv?MyvRU_q=uyB(rEyatgtwDb{DJ_HhE=?-1+vFG9*X6R(b;T4VVAO+Z> zRrR$dJp-m>#->H+lkGzE2Wng+%KIn1-S;sG@7^$Tp~!8Quy^JDW=2*UaHnvkT-)~# zqQ+8bXv?gsG*`38_XxBz58}Sn^Oq*IDJ_af)n0n9kak8M=A+uxuA(8d>QROjx}W!$ z5uA`7!tQeS9xJl-XSvob*7OExfK<2$(L^I{Hp)p?Q=jHB(pHXRz}Q}Fwa(8(ipqx+Ma%P5SB=5ZbXf-v^Y`> zC-?Kimk3nr&hjIZd9@J)KOfW>p{urlg!(vEsc@23@%Dz+?VqO(}k0 z_m6?d!8sJ-YwJ(Oa1Y3N*&5TPJ#O6}E23W?-@^}H1Cy4~f02-@e0hWWOa;OfSBhNt z8lxl}E7_L!rZx?phY3HsrdSA}bJ!4B$5UraD1msf;4{0zmSyPF?t6b!)AoJW6)x0e zg_?8$FrVFBQt>W1r0Qa2`6}TC=R=0pg|Me2I6#W+qt~R8oL}OIwI7b%#)CHV14@`* zT)aA~2I|6FNtt$U${HX%N53v@Fmw-FhDd>mv=d{=At!Niv{}w!isLb^zpM-;toF#98eC zw)La0U7gfh@bnw!aN$b#ioPTf1I}7pKu#J$KI286Dio5n3aWjC>5b2PjVR_}l|F7z z(~z`(h~}Z$_$>KJeX`zEo9Vm25N*ciQ;@>Ew^_MEKoTpu$6Qv1=TsS$MEN6OY-T{u zGA2Lj<$0;FTH>~~^LPLKD}Ek|hq6lA73sD8tYB#EaCJ~B^ue7Mfmf#|JROX6Vgmp5 z720e!26BZz9#6ec$=5m}EBW9-+g-@cOcVAHLy#=x8;3c{mg(P*%1X*^_BtiDxo5zC zuPR$zWQiZ4eD1G&eGBkzvorrlF2a>BUEkYXk%I?eb!D_!6PGtf=5JcSdnK6ze8$~)Lt2d^b9%nd2gNXU4$vBYioVR411 zF1cw9)taEHE=a{3;{Em)4aV89=+T^0-uL+*YyMs3zum3a*3~}3%UwWWlb?&e2YaPz z+Cd@{g5+2fi7Gs_H?H){Z!au1#B1o%@m+I6K>l!HS?XbUaAj_IoxD4#5$B)}Nj(Un zpDD*7zrm)+(};RwBOxpVc`J#(Kl>mdTmy>BIsPiLl9{RK{ZR3LcPBA_&GN6dD{w&a zL;&NT1hnnK0od^@&T3vxhzm(GXfpBgx@BA`z1gnbrd|OB*K$`HcOscRbPCDk_B(<2 zoask_@V+z!YkIb-hRpA5J6Zw7+YfnMMv&>Htzj@WH6lhM($^a^?hAr0#Xs znY1v2GtUwSB7<9F`4~JlH9vGEPl+i+FMCgT9gyx;E>>EY35?mp1M-%pWoSfoCg-hg z4tOTg+rq6?!Eg#PnLpviE{xzy@8oxzn3{)#EtmCQMA#nFXrA8Oh5V1%fw$cnQ-@lB zG_vsVLC@H0vyIuu=yggyJk^nYFbd-#OFBVF{WZpdD85NSS)VO1y&(+;{wT-_U`Yw! zv)#(uuA@vnjzTZp%!|blJte9xea=iU_(oH1A4kobRJ0x2nFsno(n*4odcaLHX1dQ4 z!Jx}_oEssU3qXu{!U%@w%g>&z_?zXL^e2#nrRSRVA^cBYqX|S;Q_<{cM|b1k?RToy zUePT06C9^1fF7?O`+)08Ddeq2A!ceD!#;MjUpe|@(o1@7yRCt;<6d}YY9y^JjJgt_ z^pdq&=VnO|H#a9Qylm9 zM;heo8vo7<>o;%VErmi2$a%64y`_8YF4uuD{B45z)x*tMdpRgdG@`hGHC|z*Dif~u z=GX}y~{_U?eS61?Tnho7n%V9Yv{w8pLG|#5nGJZJt|s^3-5jV3(w^z+6l=sQ#ZFHf~u4 zcL^dPDn$E`ftQkAkaA@{Rw?h-fB=9_E+n9}JH4w?xUn_fdoaPum#?Q|r9KL_H zgagE0T6Q+uz22cJDvKAlE~|$G2gu|`a(*#YAeb`;#Ix)yiI$vWDYPJI_G$VN zSn(*#(SH{nE%!5rC&b&T%Z=V1)^ zY5k(rbvLd-E^9F)#P@f_o%80rHH|@?Y}jmo4&|zbE>M{)IP8*8paC33GgQJclh~p9 zf7iU8Gqm6@ca~kB>oVTj>a;*~rMsP-n!{M3K|VO=8?RDN9UP3ksUma;Nt2(?Red)z zLgy!?ZFka_v80YI){ETotR8$E4EsVj*rzVm;

?*5y(jBpY@{v`pxiG=JUUwS>Js z?$i$u&=&WL{UTKA)RUa{A`ThGfk4kjdF+*~NWtVBlqz)7o>5D?Qd`yFYTcF|X75vcrcy5O zTaXFSCu^8bV8gMY1nI$K%l79VQ-U$u2X+2#JKE5rum`W8L4rpvJ^P z&~DixXN+WlQGYI$sl3>hNH&&9#WAxWb`%Y4z)bgt*Xb_JlX9B{ZreL>yp4e8xg{Of zUR7cJZ`k;ik6hVUw_Wg_RUH#kp;OfDJae`RVn1MuqpZ%nxV4JvY--~@&RzYTp!RA& z^Mn(_I5$6j$1mxm*nUL0Q}yg%#oe~T!|!lI8<~^#sWj+l%Xa3d2er-*YoI)A9s(U6 z-^oiTE`Gs{D7SsDDbIY;{mpEVm8?l6zBI$xpIsL@-yI;yf%h&5UDzewx53j z!s{rcQTT=aoCG(7x}k*Pq#7 z&aCv|VS@zvCQ4R41u{yJ7){2sA5Ui0x^cWLRR*G=r;cN**~YX__3>59ZIl0yL|6Uo zJTu`)%XKogN)Dzvg|Q+>u2O7=?dx4ZFa=lNzN;f|vMclxjq5KiHX-=qgn%#-^Sbg5 zR^hpdmy2;F8ROo2Z{NSAhtU_-%+p|~{qp*N@T4tVqK$VCWdHngPWIOse!=dvKmlv_ zt4q?G=5>;xYgEm{7Jnf;BUkoe79_zOdtHqqaRr0Yk)^NZy`r<3=d9-+o9CY;#<`;6 zc7#+rZtV$y6feXskM;$`!bDo&u+Vkot*|?4ZJbMy)~%fP0grO5oLZutM-6jVBWW3o z`NwP2Gp3%<7g{hvZE1lkgrWRt_Ms?4i>B&q3=dr;-nKR!@d$Ma+#&En8EEh41$GLb-O~> zD)ja`z){@DLk%10U~C@RB2t$+M~8n(cPA-<{G~;o)ZOWynIib(DEPF~8RJe@LTNr# z6c6G*Iz#wev{YJS8|RoCjZvl3E66i_;xDmV=5+EU(nNL5fw&w*(`XaQ@d%ye26)?y zxN*eX1)xmcnYB94%om8E!O zTc6B6RO?0c)71GBc$k)>Pm10b3S&F2Zk^B|Ux&|Z-q^^%+Rk*VVyK^EK#AQ#jV;i- zu%F-kWXZ)5*p{#Ru2k_>UV0ryb#!Z#p5ey!1EPOLFBGf|1a?@w8^t-P}{-kl3 zKc-px<2n!p&O`qLx_3=d@U=dF-2+yVDNzQd5f(epjSG|t$>yJ}(~A%vh$#j@hvBcC z49F{Cfc|)0>WpP#f}A$k3#Er;>0KC#kha|Mp;B*V{Qg(`&3@AE;CiWA4Cr-~4C1SH zea*s-B(J-VD`^C+Jo}#YHEUA>gd)B0o0&msVZXr7U?{WB%~<|BE*>bF!WiAht-RGp z31#!)o2D#j`kAt20AQ)Y^mRQa&moYy$(0*{8}<#h$Yg>{L~y_9L|ENnSu8z|dVcO1 zh{q5lyFyNRB7y@R6fMA>LoSg!36)+Mt7?_~Vaqu*4wc8jSby&n4ky#^2RW&YXyGGCF~Fi2-g=ci08|OYLgo&t4eNy9^3n(N^yUb)5Mf z?@mN(#5c&vMeCw^22SUUx4kiqW=X%7K~T~jwo5<_Z74BtScV5^24^+bHskZWPl_o` z+p3Lr&JZ6Lig)E``Uh-$d@r=J-yk;|c-dvR-JdLtVm zM*+;raHBRQSLEyR`S1TNASK38osEc~U#pIyjBQufDgx19tfSB2H0!j~P|jrT zaP>3DgU;W*n3G=akX5vyTtB&yF%>FO!nKg<8fndpKLhtEu@q&EyCJMD*UyKf?SMhW zOrIQ?gy2nT5#4E`aQ0c0#KVG>ccuKZp4^Kej{N%c zYc_AM7Kr=Kd<^sP0$&<&t<5wcllBPovgsZ6Y!*dA`%JA7{)d0oa_XnJvrCKBf9jKE zoam1BgkS0gbOm#!?qstcu7^UpFT-B-Jw2+jK5dmw+l-NRAeq0Hwz_lV?63km050mn z;CqrkY+TkbP@a3X1E9yC5no#iQp7~-XxcZu3rL!x(K+7FxqADRp^|hCl*$)8d9V@Oo-2i~IOMSvb!B z*DJloS;Cv25?_B+$6+)B0GX`7#h|PA`51&2U18a)^GJm?8aqZ$fu&po;oE3aJHzR= zK9Zjf)Li3PURiYYD%)u`X3>{)$2rWLFFv9!R1;fxV$GZZ#OL8%Q~LZ0-a=dESahXZ z#g5}RFAk%(tWB6d_FZsus)-6THS{co2IyZ+X`PUqyj|EXM z3$v!1PI~5D|563aIX+%dJp%hQr^xFUV|R40b0SQ4JYda=%_#7h)s_F-AN*(^tF}0O zU8?-9216F9i3Tfay5wk-4`k)x7zcCI+g4kUm))jcp0;~3+J6EYAsPCbE&!=NzZOpZ zp&U&bnn?ok;;4d~sXzPT_ro=dU&myz3v<{CkJ1sJs8%s5ZS~SV#a}j2);5nz(*YFv zmDj8OW3eXh1n69a3&7-xXJ4Sp++`i%U_8fo!-6Nxed=dDo^atgzzDXhUAQe%zeTC7 zb^K3FD|FIWt=_n8+9UYd4EuV5rLMfcCzxx4{2ke5v=>=Rk*ejZY(36ntAeCq;a(+( zIt3XA1d2`ui_Y;6Rz6De~_j3nC`>0GOvIw5uj4RdFJk!nG!-q`9m9H(G zC1NK8Lh-fqY5L&h7A7{3Y<-tvB+Ni(+5w4Ts@9toqT?YEq~)X0uzv@uiEEN>`6u}8 z)`fNxtq$BdD&1qAY zj3@ma(|%3;$5CA6p=*hIzLl+NA>T%QgJv#CUXv8M$`4vlXy_?I(#N>>L4*Wdae8YN zSIl!*zK0nOiPqjM{h=I)WcD%`qkd{=&#P{Cq&7wSr4Tm3#0S!|>Wt!*1 zF`rr(FjDFK!*fq&a#?K{EeqD`A;JYyydHZM%=|Z|t`FxnEhX%~s%a|oNFvks$}8in z>?QlFS1x8`5v)1g#yQtDSUmXC5tBEHW5||w%hZAI7U#7AhcpR*Gu}#q_ zq4I5%bkC*_J2UqxII==<>incd?kvo%4gO9rv{9Ll6Cp%{I5oCh=CjqpAd&fP;j>uZ@y%+JX6SlJ$Y^) zi0v0CxiJ;}5HqmDS%&rYYSSH2e1c^~MWXa8GKSG%y&1!sk74lNS#mqYe%(@r}tG=CbMdxc!p{+Msp}xs+g`fq&D2gJL z8NAD(=&;_M2#1BvYA~^#QuC9NGk2y~AoK`q+~tEv*uW=dc!qbtX%l7EvE%cjX`{}7 zGiE69R49#onpU#tyEpet$ZH?m368PlLoP9HnanI9Vz8Y#F2Xs@@}R;3p^kL}2c67Y z0ejIPGgTIwyBPy^ELJ$n;&x5HZfUsh&4;`igX6T7*NI`3+0QoZS8zf($qx0G6TyI%W;h$=O_!580AJe;$)_s!NV6-cWRVK`i zHF+>7-2LMmGF;m*6aD>xT`&9JmKl78T#gL=W@^3=93z7ViuP0 zbkXS_c2e#1^j8@)Jldp&&a8P_&5ArHO*2fkT!x^qp^#%rxa_xp~} z*ImC4!?00ucnyijd}*ZpJ-mIYtNw?!uQe7|Zx}zcJM|(Q9KflbVEfnD@b!o##)jQe zeEEbaANlh;)9T_T5m4bpr~zex8q07GecG6K@>3s)u->46Ir|63cW zklw!5yDw}ZedoXp>e4}?p>o@8aWF9PlPcJKIn13jCcOzxf|<83#i71d&g30gptttA z$Z`_|6+Ggsl9H<3cu`Rvo~rT;d@rRJHjDCCE2l1x5?EN%m)7f}nVll`&|CE~2lZh? zZ7H_PeNkCe$fVq``GJ%{oxZ~vsH_fGitSOT1&wpgw#6Yjp=z{tQSmD!5>O1SlbfcTInRvKI~K;~o(<jM;@e#rwb0>E?P5lrQWb3A!8M8C4{;c$r=#Zzbqv;W+i25n z@}tpa!E#SPi~6#4oQJ-}W(3=~T*@E(^=Zx}Sb&>1#g=Y>?_45@Wg?6yS5`U6NKzZq z;{qj5#*nQ*2X=$&vfI~^yH96Y4APEb*Xn~KtRqQ2ppw^g=gHxxYPgeN({Gm5(oLeu=w z+(Grhke(!|4PtP3##^9g1m13w`%L^TZ{G7Gh!k@6guY2O(3~a6<~D9qQ{!LjbMM4+ z&KiI2Shz^cK1b=JZ7%SJ&oZKe7w5)q(+3KETkZiZjdhCgI!k)%VpI5TSDZtU`kjoU z(dUCYUCr;`!OnvA&FZ>oB^d0rkTc;Zq{QfP05i38M(rddoEZc)My)Xt6JDl9bMOX=;)OA&y(QpA4W_e1&cGE{V+xTkc0kf-sZNhRJhrXE*SlJF}L9T7Y9+j zViS0Kj2c$DT%$bI^aqok;&OcbKxX}paJL1xQ=pa(IE)GTZZrAtTQ*-^d@b25u#if; z8BUFVn+%~!Xcptd^0B2eo2IzJ>6v$Y^kDu+Xs6z%N25lZu(Kje!rx?W?D(hotMJ^7 z&F+J#Ue57=;eBaeb{HGpN_s7 z$>XZH-s!wWOKH=Tbz_d`IOg+j_S~{DDySv>@qOpv=Bum8W37#FT)(y!XU6aJ&do%t z&2!+llEW#TZQ6{PoTcHJJ-Kd%eY7uwjm7P9aPcMMSyeT;TcziK&$-76DxX3vQK(NI z05!x+sI?0doCP~CP;8ArNZuqX;J4(HM>s26b1v$fZ{E#h{ZYQ4viYfV_vw;n-d1Z< zLXk+J_O?zr|Nry|UNgI2Cls7aY1a!xa8 zXmHA~_LpqgVquzlwHx_d_-S0#2xz_s6jy=;2kn^JBY}~%fsrwzM0WXO>TQpnwLgt| zPUna>x*Mf$zm?ORG>E_25A18PNymcH79`sNpW)dX|iIn|GKC#N|gs z6&p|v``ou{tMANM>dTvi=%Y-ov2|ByoG6QJE1yD5zJsY+wuhl`Y>MX~v7(Z;+@9Zr ztBcZ%sXR-m$qNR`_sNFRAhh^;zwTq35H1L}GXS051)5qb>Gye5j=V!%f_Fx0BOst{ zj~RAWQ!%DpIyNkuciDA{lqu=K)E%|43wMdhvj=)!)A`dBLRn9a!sJ%8hju7^OeCKV znK3xa^k(2`=MoxKdkVX^n(V>H>DkzMg~@`zep9D2l&SsVcp%&9O}4p23{{PHKdY0s z_PQA5EbDnTVb2T0a`@m}_LYvs;C%5aZ^ZcivCVUadFUc7_OXt+cgYo%3#<-JlR}+0 zxo@pG$J4sFVBdFDfF!oUrY zdsw*IuttLXWk_KjSW|r+FN>W|STQ>zWEg^o=egzF$lTl+7y;FthkLGXCG`%(Nn=d5 z1|C3#NSMUFA$kAV6eoeN9V|I9$na4sQfX|ImDSKU{Y^F|5$esYwZ;wFKo-M#bXU$E z)$ZVgw0YZusRt(J_8RT!kMNdBJrH$GnyJqV4BzJ{jF0#u2M&Ui6cC?=yx)@exdRqz z<&)z(A^r2mXu)i?jd_>34Ya>@6Cd;|9K)Zo3_Lg0Y8;5uz4s8d9RkAQb^P-WcOqCF zek^U7ZRL-rJDC+i%U6GQy%ybAA{Sn0O?L5BqYYbE{B)7_mjfn>ZOcbJu#aTTJ0ZVc z?}~EsYXOW{5dXPT}M7US)DuJI>ycZd!}lEng216vFhrU)Y%uj zrw{noC~>p{nP?XsM}I3jGWv;(X$C(g2Qa@*v1;Jh{3V_95Y)1ISPmo}_tk0Qd2?*J6c=LJ+U_Nu>&JL#X!cm&F)hZfk46B(Q>WT2$@ zbYSmxsNR#{!x!61FdF$@cUxF2CF$-%!6{C_q^yh=WL2g?)o<3fYdX;M42uerAY9M6 z+FYLqtK@ai3tFv6KFqAyUwbi2Kx^kV>yE)mxPwx;@3o!b{PF9m_5r@SJf*HcE? zGZb20wB)MuWALp!ReC#gv8Z6#wkHmh8;^Y;O4AzjyQg0jAM-C8Oj%{#T3-yl8FH{) zjA0u3wSQ>DrT%mp3PrZs^p`v_em9x$++sjs3ur{9igUuC5V@4N@U1J{ED-Q;PxI zSMS2|+w&Xr*QN3kE4+k)BPM%c*Y3W$OUB5FeQvj}ml+Ruo%Ry)C~uS}6mA>tk+4@% z8i7!sf3gS`XV`=b_eTgJL5~TR@ zaD+o<3y}ml+g+FHbwznBNnD{&AoQBD<_~u6h05Rn!=8`5EVup3C5~Rv6FbC{fobX& zYU>xVxf%cu3{Q0Amt%bSUKP}H%3YYSRW*IZ-~Sc}JUOy_*Iz48H?fP|xGYeQ zS{tZ#YA)L3pX7}q`)3Bn?98yq|FW&GZ=Mc|H7f~`1+60T0$;v{x{*oH$(!qk{g3#) z-or;SQb41B?Ughkyx!jsWuW93Q>>-)1uQTv_NCVtYS)6FHKj)B5l9ua#67nZ<;wCb zO~Wz6Wn|$GXtWz_*yJB{rNl&aLY&B_us8|nD;j3l8^7Al;RF6i@`i6Q7VP)}{T2H{ zx1FQ%=I70$AL`el_n%cL;4xufzWXq{Pu2-}X*SCf%FFK`6-cCZ5zgUv4GZ0d<%+^` z_3O&=B%z|?usrthdgOvkB#PyH6Ry&GMFeA()TRs3vkD- zerx9BMfu%pI^01!J9DLYz{}DxDs>f341J`{O1GJ#vc|mSaO3#>i&p***dBrX>K|{; zWq9MK-_e=K0YjN%SGN)-X)6yxQh)MPM+v}_{!Hn{D^|?i%=M!pkI=gTk}0|_#=U!E z6&SvLUFcihi|M?1_zajB-NF3|+2_NGNl5k1pVAxf-VR*e6o}pGyRvY>Dr*uM2Tjdi zVf*c=46+;)%$*>4t zaOG>9^nGscob@xjmsU)HW!n$?tq})WC5T<~Ek@sUMSVM(f3jM;hcU#B@5}wb&0;lq zi$y+~h_!O>x#M<_Mh7HayvP6EzrcDOImfFr1jd&u2vKfYS~=XIO3*F*Rk zgw{qoH)8gY`SBo-ddkFkUQFd=p~{KmBQ-$BGD^oe4BIkkZ@FaH2Ml#byD*aKw5IP+ zTCoAHrn9dkqIFkDhf{2r!mG`#z8K1hMS%$4rycX7V~+{8dJ|w$4qTz0WGh~YKOHuo zE}2qPeA9i8G*qmROJ{pw1&Vc?RhdeHuwY=FbmgCyk=8P`8wHw&^n5r87AK+N`k6&G z>ftR<|N5do-@Shv@4}eC_rNP+(O>D;HU&QU_Fw5N@yA@SvbuVmY4T0fQ`z%Z*4U`4 z*=r=N*?^f02>ue^QNx_s{s73XzzL_FvMV?uPpn=x2PxAC)LlBrt#wc`m|pbf+}SMn z&f}?o&a&P;BgQF>hCNT4`eV$n#~ocPC-*4Ec>bT7zB(Z4r+N73qeJNi5x#_=q;v>^ zN-23L2dD^0N$1g^QUa1n2?$bmv~-DdOSg149LE9g;`4j?ll$!M?CjLePMAtB$+N3t zJyv%4!D&`S4W`zCmeqta&h@1X_9y@26Y8T;`;+i66xU0>sdt{tdVae9fsvC-9L%zy zkKjim$>>2(=x876S;n2+@v@sPNmVi=jsI4S7A{g`?~-$nQUPryka43`AzHdPYP`#! zqh0UwnfJDFa=uff+7^kV08BSV< z=v}DVl`GOzbSfEay8^s?t^9L~vRMKTxW^4n^=i2tN30xbjDNmpXjIuR!m8Wp`BUfA zZ3y^!J2{(i=n<#CbQ!caC0+?2=FmF#97&At_J4fPe7c<9h>^SrKc=ss@uyDhRHF0h zWoJo->VKPO=;g-8d#{aYI{EWec%D$(-UpGK3%@2o8y*NZ)3rm4oQN)goYbIYl|hIL zeA}wQ=5rs8=e!)W%Pw3Wh5^~em*3oQMZSsLSC`j{U1|++(8L#5aE|y$6NnSMDMr~) z$;zGKKSDGYMiV&l5WsN5pSrwLsWN|br)8ufm>+zaz=L{%wg zKUzv(OB*Pc|H>o-g2SMc#PVuEy(A|f1im06qjv7OX?ij*;+fnl|I;&pIUL%|VGF18 z`k>MQK-D*3_wkFf4>kjunX+df8~7<<@jwO}SczoM3;4WgU%N%wP@Hg2K|OXU@8I6v zrG^TsV&QzED^Jm=-0_z{y_+vT8aH2L@(W_RBYX~R7+QY79eEEweH5r^A~bkIZU9{I zrk(Ev#pOECy!xKBL8t596>RFUazUU^JljIG~ z#T;BScXeWpE`O8N^b!ZDk{fBi1`wAHDZTX?1r4S%RqCoFQ>rZ_ho~MN($}*AXY*zZ4OeWy(&_PetVdEJYkk-5C=J>e8iMb$QvNmK-4grzB8_*)YcH$iwNxKAp?8_zVwp|NyAazL}9xuGF0Ej5Wd%9X~G_dqr0;E_3|7nPPxQQT>=?JT zkc~a$qCPh5U$&(fuRQkLI80QvkEJ2q0`Wsvb#Yf^<6rY_>be}J8@hr@3|mkC+L-|L zb#7m!R~xn`;1XtByLf?7xv8>m=yuu`veT`lj(@ zI=`*lrTv!Soa4-IruNOSi_Jr#!I?FUALU|*OxHCwX0Yj*UhHunzX#N8hMmvw?3wI7c6DSQ*60i`5VOg$8pu_{!FlKop^F zJC0aKN8$HEnPbxI{s8f2D?zduAGVvN$y-gcC4GTUe*V$yY<<6~LKA8v5mGu8k2j?m z{Z-k#dJp;2od>-b68bf!gnUJXu_O~3vpeHn#-;I`^hTG?<>`h&p6`Q00sRUU)69Pz zAcf9G7DXE}eryEu>~SS86f#qFHqp7CP~0EalTR5^n?VnQ_@4M&xy zpfj|!=|xUyqS*s_f8qwZd_idUn;+W=4I1=5AzLEsPVB0T0K}8~3LAD64=&Pka$6}| zD3mk950cGZP@>^T+oCSgwLtK^ycxgy`1}UAnJBP$Ogz&0M%;8DvXVSsfXWF%`J_#j zLI&jpf$ z$ydvAzd!Ct`R!g@*3f zUJi#;b^x=`6ZGzGLF&l$@?UWkV^Urp+%vWxNgW6|zW*p}(SZoXI&ke1eS3ENv-fb? zn$68O`Ref6-8qt}=n(0adtCHnwwFqf)n~Ux=~Ey{6Hu-*uVA_k;$>e~%9<;Ai6%by zxc{iO2}J6!u53r8zA!Iv<}^x5=W%Y%Q1w(b@=7~LtxV*GdAg2Co;5ZObwT%lbO0@- zLvU4H0Z|O^w+0X-Kn+B2jDTZ@4)FtH^Sg6Ma-;=Gl}hmCv=&}1>Y42F^0<6Y;UV&X zs<6zQ|JU`qvX-gmUUMuhl$RdH;OEQc^Ax4c3|IzqHq#KCQ~K+ zkc@uN6Em!%i2I6IaK=dV#M0pw;g09M$hz1pN_;q{bZ^-7>49<_c>I4TDIn6|JenZ8lKCAi0r? z8l)br=+Ew05o4y8UekT^qjupjatDlf_FQCn8=|-x;gl^Q%J&n_kTxpQyH`9i%u(4H3xOw+&nYmEvJFYR7%BqlNfhmK&~l#zvX(tF3eOL9 z+BLG3DGm8)12&=l^2|wM3y~rVcOu|UxrHX_HJ9J){`f*NsjlbIhEw@r zC?2UPq|f%I8}>t+KiUR0pFYSviT=DNKm#=C7C5|jWFD021)N5H3J=dc#giJ55Jf#O zq7Ph;pp{n4vup}1YdW0#6@8E!2vub?ZR`=36L`ihBv zm;VI$WS;WQJBe?yC5v~%b6bQh^h_YPu^;`FPBS%#)R~;%0l;9>!LBHaq-W{X*bLDV zF5y8+I#vP8h;|)`03+AE3(`eWXu90pI&iGQsiW_&rYJl9$^$>1W$8?GsNvTrK_z*MY zrPtG)L`w=$4#OZB&sgf;lEeEeODqG&0?gqp`a+~6iR-U#c2BdS`oISGi;WgokLwzwmgih2z`*j zp44(ZFtzX|F+|Mr=x|;4by@zoK0!jIyjNQ0jeKZRgHe81ikF8OWt+L_an6W<4ylSr zdd_!Q&8eU;b}Mp1$i`jFSvkgd^PGU7w$wJBnk387CZDYrP)+5als zTax#2CR${6nJec#`{XkQ^7);z(zPYu8s`wQn*cIn^e?fy@>4T-G zJOH?iZ*jRs{&m#05`Je)qs`O6*}X7aQkD_K|c;RfzoKRd&> zzH5xL$Ce$=P1Mzs3$;UnNbzUN3JZkTjSA%fo_ zlml;@9mme|h*7#~&Im$CUxeHDs#qnC{sIuNatw6o`;Odi!>6_7)3$2w;3&?$UNDqCLNImsD)IF@wpA_u@)h>g;k%K> za_g<uqlkb9u3|sf5U3OC#SSMh_)-}HKWb|cG&(kH#fRHT_pFc@&%`j^7LJuMwyuAK z!st0)%Jc}dOoQj`_DHwylKr;inYaR5{HXHH*S>Z4_r4{X^3nKjpLj7&e1|qnx&V{e zI`gj@P#i_F__Z&!^$Ri1^X4E2H-j9)DmIx+=Y9V_eD6h!o0V_2p*@x_|I+6DUV-NM zS)s6l9Nz<|L6BE}fH&xcM9Ynn!L24Ue02XfPv>=mr&(hCL{$0lewK&?NlFPHC>EkG zxnO9bb>Q}}K;|ioEl|!G@m#lTRY8dX;?lH9%idiVaWn9Rgt}>@B++JoExDNG4iy&M z(EO_wbpHIxIF|rAZ?i=>K^)6*pB3>*h_?O>@0}lKhj1o???DIdyywQQq7bUwnW1=e z=wG-*@6O9icY7Ft(=ci^1?=>IpRz%I=s=zJfU1Z5f$?@49)UT8>c?Q8i~BkkIS`V$ zix2TcpRVUrt0u(-*`6eZ1?3YrPJRk~kxsVD%YM3Rm`e;!JMW<9aS%a^?3HySzCTp( z`F=?rqDqA=ik*C?;vkL_BC6mP7qnzQYpAIY&gVWbdcA+<7{t+Ql*GTs@ME1bILIKl z`rs9@I9)*HyDH8mSoKu;Xta*5Bkb~#o;rFYbXXQ$!;Ae!+)CL8vpqC0>1z|+-K!Hn zID4hw4Z5k~o0W^!#~Dt#KR!Ht7#$2hnyHw+&yG-7KactmJYOK>eMSr{k6BLgFq8`N4f+8^_<2ar=qGvR(9XMB-Y%4)#X4L~}A24>HjN z08=bj1J_NUuyQPAJJQ}%m8{15^05P(LT{fabDt0JYx!%S4S~(7L_^at4(75SFe7?5LiGI2>@Bn z_`FN8bai;$(Yl}2T>q$%`@zlz8H$o+fCt)`k|1weOxpOqZ7-b_;%@6 zBfe)}QSEUTY_Z7qSNp#7|XH@AQOJ%0Gs=@S7QKBYw9x%YO` zZT7$lEjIY2u4A%y(_|#izRyoJ8)ux?dL%vb4B}WexT_@@r^G=@1afHXqS?vi^Ic3b zxbF~}U6>TvGO}S)PQYWK^wtrRvTIDR8F$mq@VgckEAT{ z7NlMhGh>b6+6#ZIzpsqy{^;-!Gi2wBwVG$cTlvZ0&+6(GQmPMj@)X9iZ{;zE$8|x4 zeJ--vD4nfuE*#1{(Idj46X*A7hPmI?kR1Wn=Pk6bM7wH1-Hc1NMNUcSz#N{xlG{wJ zhpcCJ)&f4wCf7-s=!*QTHtP#;1>Fk+I$Dd%xlU8DIgW!NnG}aH^Q_$MEJ-lSj=A(a%`?s*=@Z5B*SukrZ_4~rXU`zj3epL60Ls&IjXTTG3BBc@g z#%+=SAloUH2+q&WA?X>D8qHo+sqpnmnS(4S<;kU#cc&wrmnNy9r_iEU`gv6mKUrS{ z!X38rI-P2(baTplhyJr8n}5YQik1dooH-v8xCzWIo8Xz>CS0j_D!0gjXkU=j_n9z% zk(ncIF-zXt{0atk|NntGyx(f;=iaelC-*ANPerDdR8Kyg)WsyIuQp0O5e_z|3^sR+ zOU^VL+Bn-0EAYBP{56lCHI~Lo^WxWT*~;`>1jhE^p^%TsFk5Vt2)mlx3u=1<@5kBB zW@tJQxunmv2YKls)Hz(MMJadTlyGQj+MFTu0ors99FGn5D(+B3Zo_Dr`$O@eY4Ud)% zAHhabul}O2@4Q?yrJuwLrN&lCq#1e%A@V3reR6qXwF-b_!8fZu-q$8_1O$~YIK38QC>DNcK!&xx;{dEwDAbHt2ubZVnEq$B3N#YRBF3R5hwB6BO{47$nMwuBQ7l+b( z5XQJ(y^vN7N2Q2COp4-f6S83gCF+XC!_i;}NGgG$uHev)c4?E|$wft#!HIBib3rJ58d=KRB)gnpEW(y%kn3gC z;v^9PQjB|r1{F5)?|H_^_Vuj~)2Mv2emFh$|ns z3}wRk#p93kUc>uOX}9v{=u~zym1oGBt{=7?BS3;iQ76LSMTz$hrU;bFq4Y67k3L@c; z*lY$;As_-NjIkXgSso{Ey^+kREow||bKNKnZyrY;ek^&0cRCe^{N=%BTeN&wV^83@ zv)2D+eRl7BGMspLb8i#NGKi#Qq&KmXA*&Bdap8s>>QFZ@M}cI~4atfAzGL>j7yre2 zRcpn+i@jQvW$|`pOZvfLqG{Uc%4RduRZwlOI(%oQu|I}iB+y0ZA2ZvYZHoZNX#5~6 znOr`k*HfjKOBj(p&)RTg`;@wJ_&b)A(^$Bmq0uW_x2pUvV%f0C@< z1#;Ccp)(#O=*jx+#q&?gel`lPlCb+ZbKBExdmB@RY|`h{mZ#@sMU@b_?O`aBxvOaVL|ey9{MAD*5O7ee=kYR&#u+AX-Bm0c4PV5 zqcZFLxSxo;A+69>9yGTCxBX6ln05CF;dSw@TQlT}dv zkm|Z8Odnd**@_e^rjJmMsip6RVUPcscj+d*NiH4*L6JR^Hba1K$?ZO4Dz3 zPO%m9&r8!bEEhi$>%5z?JhLb9p@EMTkxIJk%yQ}Wbp^y|c$TX$Lh(1pjj7_leSFVafV3or1*^U9=iqdo$d7OC!pFErtV#>;t## zczBI@hSD5U1O1xQf_*RuTu^CCoSD<5N?cH@l>>_rXve8Z8 z+!N^NzRH(VAVjfF#Drr88q$9^Q0sBn5~XBmLI-FqBWmXZtAlziOm{I8NAGeL z=*zYu{qCVV(gIdek+_}=zn-G6SG>6U)SM=H@Z`uI(SSY)VLmXSzr6T+_u`&I4)A*x z{*qB!|Ag&CxIGzW{>$q-$6ox5+A-qqyj1+L!SgSVBh(bWd~Y7J6#8QSy2IJ}G=7g1 z<^*)0!e$#djg9+iD`W)r<#OQZX-{4dO^Dek^kcVs;}>vpsAKDZ9k-DzQ4d?<)i~IH zrC{>kfw2S8y;?1>8z^7Q3=r+4mOZ`#h zx`!}=DpARj!S2l=yL@5EHWzuTye=bMdMAR;cIRXzpvRS;BI+5MB}<=$qA|T9Mi4? zcqlzS%A}R>(d(-NbakofYDrL?{YyNMm_)1I{g0-udnhF9s40v#)K`1el zLTo_<_ye&+x;c|3DKY1WhL3&^+Z@W}>cuD1lneV!qMPrF1n$e1IPwB5k^v$ z4ofh6&PpKh2fJ_O?FAAx@3clP0n55Kx0>G7y9_WfOV%DSAE!}PIxthM-FC+i=#h)V zU$Y)>2YUHE-~VZYY<0u@2qcvk-l7@E=ji<_{!6cQy}8D5D0Aim;zT7FN_^zUyb>B&E9=_F%qlZf(&=-B?x~e&%U(?ARFfJBOd#)cVBJc zhK@vYxJtr3%=*zNmgQve@xA)Ox*9C=Z3mo1s=m8k)J+mc zy@wnLwZ*@M1Rl7xhHz||Zc}>vf8wfW9*2@JUxr+12L()r_m(ADz!Kz33a@4DxL3TH^5j;wX9U zHw_IDj(X|hfNqdX*6f(V;hnkCT%BZ7aC08gkGl}u^7-0dCF8xhEln%`Pf4hQuq$t1 zjX#&fSQ{){)eRCvt;pY%D7@GnXTItWdRXK@fGYQ-HT$7mdbMfE=ktyw>&Aj%7RHp| z*rQzTqUAZt9x!DtIMu~Et}WerFGQ+hPpT84=`U|GQBFLM>pz#a@o{x1`t)Isw8q>X zBH<%EajHWd;9+lcb#<~M)cIixhUaL5ALi9vbtf7ki@OJ*2wlU0Q19$43baA3)cjFX z_LvVF#6R<2;91?pO?+r}rs)v-FyltDt}hZ{q_0jHbUWDvD?{ z)($-ioB69`^+AljFjtw%4RIiaZ~ozvob=?=+UB=6zZmkuCxZ^FCs%!EEXP4+6{(lS z8_snszr)=@6SgdSJA*P$xkizz0>(*pNiPW*(LxvK3ccsv*#p{F_`&@9hfHm2n5FKe zL={kiH1-lz7Z(w0J3E%ecc*)Ak`wu+k?~BDSgxdi;NHZ2+oTg0hgOUldL&OuLj9d* zV$`8|$wa3Q-+R8ifU)!`58Oz@A!bvvH zI4;bSBQrd!BzC(+FyEmdtm|YWEe*b`JqX7<9*Es(=uEm833RDD+P73dTE5A0s72}I zi#MukwML+94U#@K@&DZX)&KTpnIM*?FOC2{;XTw=L@6338TPiyW(>qZvrS1ty3^#b zwH41EhUTAt88PIgyn&+={W><)rRD+&W+l+$A(x!3=oWx1DLK?})H!ee6ZB7gY$`umgQW?;@E_FTwIjYZ6 z7wHP_@80@qNx-5T9(r5;G^m_Ra^hbSCt3HD% zG52*l(7Eq7to-Tl5Ow({evn4vj$X)xbh(C8oK6mNoz~r2vUs?>73K7-Q1sNVr@rwn z4Kz>C1bDgQCZd>ma@)NFK;2l1)jLR=F(fo2b(IFJ1AospmC!GMOE9hJYn1+_9t1GLN3dxeN0GAl4P zZ!HX_`?rsRE#GBL2`1$c`gO{)y3sAilCrKlp`K>xQ+}*1!VF2EIS$@_!Jo$7{N__3 z8h?EoSsCVJ-;NaJ@TDxJI(icQn&!=$+|Bgq8h%Ezr26m?Lt4^idG~YaV7Q1P(ag6M zL(j(@GJfV!V2TYfJwH!w>kp7Zr?E_?9y6EGV7g8VhEWCOc(iAv#pepCOP7NjaCxdvkhXqJxOBl0Vp7yJh{` z4DUb2x$U_Y@2kJ9s%z!xGuE+s`><)W2ziYlw(o467HtR{3CMq+yHQvx4-eCi>p+a-5&2)%80V%-h0uQCK zo#DHmeqnhe7&`(Hx{pZ^YkV|WTkWplCs`OSa@rk{t)9KO++T0q{Bf*cVnV%*JLiF% z=##u5a?{*OX#KcJQP57e2xZ}#&k3-w^VY7%UruyNMBhI0E5o3d%p30BqFMjh#v9e@ z#G2n+9ERZJyY?7;lhSS2LT>!o(0yU3T3?zpf{Ae)-W2^Xp?Xh2tNT<3DNl~Ceq z^3kg*?Ar9TvU9mw;aHz~ZQy$4R79l+tt_5!jd%l(1lVT{Skw1nPJ#1iw+=@yF#Wa^ zS5R7B%1rP#%2`}i@xZLciq=vNJOG=A`A$s^2?$dukv%A@FR zNK3$&242%=z8LxG(y4hu;`zP!aoBOi2P)ea^>3*WI3^=b=gIJ8_PTV6l~T$#Wi`-v zxaG?ScgmGgy2sc?%)~4ZF^0jkcqP48DK&K)+l5 zwuDp?)f2R8_E$ni0x-_zCCBY+dS27P6=r&tzp;6nbG*i8EXGeO*?0Msy;&8^ApkUL z3x+=sWL?$%?`m7zCe=m+25NG}2*v%;M(c+rs4=^Zs*^?mKQ@J-yOrAy^T zUM|q1kZSh6^(0_Tm-ohIgSzBuE-QZht*EW`nJ%rASCR{3x5pM9vUym)$<_pC>BjE! zqFV)5Wq*vdGGSBajE8~fWOTd;dB{1wCsELXOmpOV^|@F@JjoCK!20quT4|hqq2N(T zKD04bXChwT9xD+1A`Uu=UwyP)dU*rnvI9Y?r&G;}J50kNP0%l$}5bqLcMHU`(6CJP-7k#=qG8ca|tV8}CH?WUb zv~6c5kM2gq%%7|JU;JxbCsbF)6{0Bw?yYW&fgmdVb)g-Muq2B!{*nj6$iAllK#8k~|*YJGjmxN2%aYgxqqGu9EQ zf&ZLL72`X*Si(T_9O5T$jR}dK*q4RCxwZs<6{o{?9y;`n}sGkPuCO?>cnj>_$yWOd;7fQ`+<`VKFmfYeoT!7)1E#FruG>58^Fzuj7h z=^~sd;93w}_h8%&xM}Oe7AV530vR`ZS-W^AFt;}a-tBR6yr#N1N6Qewhlh$k(>Kk> z79VVUUI>9N4jioEi86;!2~P3JyM{%gOTqYqMjE>_R z)2~W6oEXb`Ga^K0rjSbB^bzRT`5oT$dg>DeEG&3>l0>BcZ+43yyQ7ANoQ?Q10-_k%xdnS1- zX50lB!>l!zmcOP66rueFanWLM^c(Hsl!I|f%Ge4`w4TG0PYKF@7~&WWV%a^RC!0Fe zL&x{kCYalzjnO%i;g!J)Xk;(JreM$k&f4cLqG@(zXr*y=#q%&v_z8z%ffutsZO**a zGf-VP=ejOH?^R5{2&_Wg%{*<8%g5;AzhKi)6`ByRlW*;`2`Y}jQ);V>^ZqHOY8jBY zp-)@M995uQ5htbYENU?dF9X_;w@FFY7>W2!#;uT#eaSO!iF{X-@SLT|fWdKVjJUo0DkOysY zgN|4faBR)u91<0`GB+vf)BE0B39LW->z2kr=Kp(}tl=X*;MV%vE50~`H2ydbiJhAf z5-g4M_~wv5_Pg_MRUwteK5y^tlD-FBOQc{)q;AxmPvoL_p?DciIYCIS=3&eI!sWIt z=EDa6pTtf6UBDXJyrt3id~lX37{{?ZEAZbp+r>On>{y$V-j1L29A3C08Bf8n^7Rl5 z(@d}jia;I}2%`CEWbH=#V7JfekPchq{`R2Gay+SX^X!OA3ja1Wn5zKxzqxvgU(23b z9(^0!g*t-z>pI~g^n!0c_-v4zMu9zvR}jHffd=g@Up;cQHj5_OP1U6n)O(&x;7Cue^LlV&mIP70c2E=3dKzP7n57; z=8#I-0tw~8hYOo{`_cQ`8T7--WpPtM*<~hb7#C=G8vT6V^h+>@?Y$#XoPmey@I?1m zUButRI6gL8;%12gdm$KGz%1BN@&SaUyT!CQbu;mP_ zyI?{x%YoULRPiysnc_|5$FJNzW0DPDIhH@fE$1k|FaP5eO90pldh8y#K0LoPVD^ST zj;==gnI$$gp`*EmEcaoG9Ij4K&?L(#kH@6oY{m1?y~nf?BA_HLhjb>>mc{D)3(OsF zu8Wsaa!>Ucnqu72R*ZN6rM)ZSA+0m*X<$r!h+^{p4N7QR0h7|0;Zr~A2ML~(V%`ZX zb!EKgtES3+RrpJxxL2->`3UR3e41^if=rxAoCL3=kJ_MW5denY)W7oZ?Mc* zRXKxW0u(4-wI(}awq*@tNOLe{peky4j$G7wcT?JKtnsZgBaBTSOvnkr^HLIL;301H z&&!?@+6_7Rof|JdKn?H|{k9C+oXG#i-voeYs!RSGC)iw3fFyGaBXXV(O|9I#gq7Y; z6b4{z0g^n5vJnAnh-WjRzUCT7Wm{`s+(sM?Ap*2tpTiWv4HkF=7GS?==_P){YvvaI zRI$n1cP-pa0pJvnKM1q9wBqm|Xypp}Z} z$d3V6XzQJdM=hvqzs^-pIjCs;j@?Bosn_)==H*>sPQ&j_vKO3l!9uyDUI5N%7DhEFX zT{HEErs>u)l;g+vkEA0$y^+LOcyvMqWM$RlCQd%6#!e{2Dy*#{KEA+iW2P{1<3b~k z;)_*cuvCs;=Nx_MY4CYGC5m2pTVjjKmMKt}s0i{0lz7_Xgm%j>)k)k+C)9Mnw@5ot zAtkYkJZ&BGnpzCeswEf2_m02S>2YFWcL;F>LJS|6m-hk~#4`4q-rmR@wr?eFzMZ-8 zSXD$34-^HK4CXvTR;Uh@S|@RSe@zqh>C)4am$8Dy!;Z^kviNGu62#CPc3pKtY7g7wpL{G(KdIj{=2J3f`}MI~Obg(fvcslcP9Fmb-Q2BU47{t2Ki?@5MGEQN6|buC3^ zlCi~+8U}QK4cWIx!x~a`JIcNs+q#d#%4KjLC_@N!l&!*j*2~qUv&W^AeN*#qW|5MZ zkI~2X@6a4Q zt&ExinIAJN$4^xDmK%tvKvkBen4x!<0Xwc@4>whgKQ@nl8+i1iCO~}sOEm3^0>@3n zJq&|an<~^z1`u@mXc?4OCW_mH69Vd&`+p2RGeepbSo*s?X$foeBW@4PqH-zT6!0-)2NJDG;Zuo{*$q z5Pj5JIv(|tq^beh=>EK^g7SaEPew&M1sV{mm-qvq3O49uzPMaJKv!Ka1pTRbm(Qfa zr1+*nFhyypApTB);^s5|WAj9JzwOhP8QYEu%DFQsFZ#vs;j57dLkK@;AZMk>@cHUa z7w5nLoV}ovWY*zqc;{K~JDnyvSk@_VdS&W+kE`FvQ+7V~LoMz3au5w+e~f%X9Vm>q zgCh-UISpihGMYIYWL6PWg)DDtW<)}d-ZYPWxxPNmF*;HX5CIVBfF!R1JtHFTx)-o; zj&EaF15?BDuV?TA+%rG}Y_3RM0)wncR?!##q5mtar=cp-TWZG~)b;tOc6m7DmD z#`Qvc?w-%Ac|~6dLqNSPy!Q{JI)zAE(!4bNVbJET8`9gO+s99o+s5R>%plkOVf{Ox zE&ed#=B|?OQ?3qjfx`Ycfc+jkq9XjG1~rHhpDAWm*(ZQQK#w-O_g^{4@B*)pSY zO9(1BBf0z>3H0mNL#7 zXcpZBxqqMLXZ-(88?pKB8#kl^P%&9jz*&=)GRmoN-EC^@B}xtgvS)EV0lQ{g{U6jj z!(Kh7xEUaM6C!_OC0K5djo9bajsd{z)RS1*RCfL=+bPEanue)|Q77VbR{aTL^F@te--w5OZcA6#~qW3bRm)idXv=kXPkv5$r zr8SS%3u%Uc4KUk7-I9AxQ!dP1CdS?I{aFDrKGSJM_bC66%jLD-*V_Y5{QiI+uC9O3 zN}@mEE7u@t&3FuGBzMBqH)WIO2-nOEK=S}-yS#s`G;5+Q)nw&$ndRRvLGX{ zss9>pXVOEqz^N)C88Rqir~~7j&r`kw=PAwDUmOlofHW@wu<}j*RyNO^OdhbG(Xr2| z>JYF4o99O6j>2gFPtWhpd4{YmgvAtN1$hhFM08cmR+Q_Lx7DXx^eohz>9Kaat#j zqLzJ?Zt`{+$5t`_UBZIY7wZ6u7yW6`-!E}-6&1iqbKJ{(v}MU> znZR&ixZ8ImjYTJJ+<`EFuJRBUf1QZtJWF>NS#4?8ow8bp8K|cX834tJON7g{hnSb$ z^5R5lACD9K6I=diz*A>HYjN6YIAY<_zQVduZZr~q0b}NdpoGC}WDlfG<@tim1Mgah z_xeQ~G#?F9xq##_NEEQk{R2pw$MaJU?9pdInQ(fW#1L4xvg(+H+wC=6Ak#sadI|m> z1{3{{GeY>U-JgrwmlzS4a4w}3PcpoDlNAz9^iLW@r2lvoG%pk+e#VGn(VfY zVaNEZA^zil+5KTav*bH%X)U;P>8o2_8ciFFm}sCebIJsUqxn{7S;)YD0K*yjA&*M4 zbu>>DLIwoy#ud^OKm>^4SI^LA8>bb#xDe3W01qO~!z%1hX*5gLzGH9Iyrp8YVv{!7 zJ9qA|FxDpWSbz(n2=oEy&pGhDI{Ib)z(=7F8PE&_!VfIoD?CuPc6;S{dng3_X{c&F JC{{KL{C{3veEk3b literal 0 HcmV?d00001 diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index 3d5a961d6f7c7..f273a4dd0eea5 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -12,6 +12,7 @@ dependencies { implementation project(':metadata-service:services') implementation project(':metadata-io') implementation project(':metadata-utils') + implementation project(':metadata-models') implementation externalDependency.graphqlJava implementation externalDependency.graphqlJavaScalars @@ -40,8 +41,10 @@ graphqlCodegen { "$projectDir/src/main/resources/auth.graphql".toString(), "$projectDir/src/main/resources/timeline.graphql".toString(), "$projectDir/src/main/resources/tests.graphql".toString(), + "$projectDir/src/main/resources/properties.graphql".toString(), "$projectDir/src/main/resources/step.graphql".toString(), "$projectDir/src/main/resources/lineage.graphql".toString(), + "$projectDir/src/main/resources/forms.graphql".toString() ] outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java") packageName = "com.linkedin.datahub.graphql.generated" diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index e45bed33eb023..2bde7cb61047b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -19,9 +19,10 @@ private Constants() {} public static final String TESTS_SCHEMA_FILE = "tests.graphql"; public static final String STEPS_SCHEMA_FILE = "step.graphql"; public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String PROPERTIES_SCHEMA_FILE = "properties.graphql"; + public static final String FORMS_SCHEMA_FILE = "forms.graphql"; public static final String BROWSE_PATH_DELIMITER = "/"; public static final String BROWSE_PATH_V2_DELIMITER = "␟"; public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index f61d76e72e8bd..4819510d34018 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -57,6 +57,7 @@ import com.linkedin.datahub.graphql.generated.EntityRelationship; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; import com.linkedin.datahub.graphql.generated.GetRootGlossaryNodesResult; import com.linkedin.datahub.graphql.generated.GetRootGlossaryTermsResult; import com.linkedin.datahub.graphql.generated.GlossaryNode; @@ -91,12 +92,17 @@ import com.linkedin.datahub.graphql.generated.QuerySubject; import com.linkedin.datahub.graphql.generated.QuickFilter; import com.linkedin.datahub.graphql.generated.RecommendationContent; +import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SiblingProperties; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.TestResult; +import com.linkedin.datahub.graphql.generated.TypeQualifier; import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.resolvers.MeResolver; import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver; @@ -135,6 +141,11 @@ import com.linkedin.datahub.graphql.resolvers.embed.UpdateEmbedResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityPrivilegesResolver; +import com.linkedin.datahub.graphql.resolvers.form.BatchAssignFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.CreateDynamicFormAssignmentResolver; +import com.linkedin.datahub.graphql.resolvers.form.IsFormAssignedToMeResolver; +import com.linkedin.datahub.graphql.resolvers.form.SubmitFormPromptResolver; +import com.linkedin.datahub.graphql.resolvers.form.VerifyFormResolver; import com.linkedin.datahub.graphql.resolvers.glossary.AddRelatedTermsResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryNodeResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryTermResolver; @@ -254,6 +265,7 @@ import com.linkedin.datahub.graphql.resolvers.type.EntityInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver; +import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver; import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver; @@ -288,7 +300,10 @@ import com.linkedin.datahub.graphql.types.dataset.DatasetType; import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; +import com.linkedin.datahub.graphql.types.datatype.DataTypeType; import com.linkedin.datahub.graphql.types.domain.DomainType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeType; +import com.linkedin.datahub.graphql.types.form.FormType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; @@ -303,6 +318,7 @@ import com.linkedin.datahub.graphql.types.role.DataHubRoleType; import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; import com.linkedin.datahub.graphql.types.view.DataHubViewType; @@ -323,6 +339,7 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -392,6 +409,7 @@ public class GmsGraphQLEngine { private final LineageService lineageService; private final QueryService queryService; private final DataProductService dataProductService; + private final FormService formService; private final FeatureFlags featureFlags; @@ -439,6 +457,10 @@ public class GmsGraphQLEngine { private final QueryType queryType; private final DataProductType dataProductType; private final OwnershipType ownershipType; + private final StructuredPropertyType structuredPropertyType; + private final DataTypeType dataTypeType; + private final EntityTypeType entityTypeType; + private final FormType formType; /** A list of GraphQL Plugins that extend the core engine */ private final List graphQLPlugins; @@ -494,6 +516,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.lineageService = args.lineageService; this.queryService = args.queryService; this.dataProductService = args.dataProductService; + this.formService = args.formService; this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); @@ -533,11 +556,15 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.testType = new TestType(entityClient); this.dataHubPolicyType = new DataHubPolicyType(entityClient); this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); + this.schemaFieldType = new SchemaFieldType(entityClient, featureFlags); this.dataHubViewType = new DataHubViewType(entityClient); this.queryType = new QueryType(entityClient); this.dataProductType = new DataProductType(entityClient); this.ownershipType = new OwnershipType(entityClient); + this.structuredPropertyType = new StructuredPropertyType(entityClient); + this.dataTypeType = new DataTypeType(entityClient); + this.entityTypeType = new EntityTypeType(entityClient); + this.formType = new FormType(entityClient); // Init Lists this.entityTypes = @@ -573,11 +600,16 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubViewType, queryType, dataProductType, - ownershipType); + ownershipType, + structuredPropertyType, + dataTypeType, + entityTypeType, + formType); this.loadableTypes = new ArrayList<>(entityTypes); // Extend loadable types with types from the plugins // This allows us to offer search and browse capabilities out of the box for those types for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + this.entityTypes.addAll(plugin.getEntityTypes()); Collection> pluginLoadableTypes = plugin.getLoadableTypes(); if (pluginLoadableTypes != null) { this.loadableTypes.addAll(pluginLoadableTypes); @@ -654,18 +686,23 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configureRoleResolvers(builder); configureSchemaFieldResolvers(builder); configureEntityPathResolvers(builder); + configureResolvedAuditStampResolvers(builder); configureViewResolvers(builder); configureQueryEntityResolvers(builder); configureOwnershipTypeResolver(builder); configurePluginResolvers(builder); + configureStructuredPropertyResolvers(builder); + configureFormResolvers(builder); } private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { builder.type( "Role", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "RoleAssociation", typeWiring -> @@ -703,7 +740,9 @@ public GraphQLEngine.Builder builder() { .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(PROPERTIES_SCHEMA_FILE)) + .addSchema(fileBasedSchema(FORMS_SCHEMA_FILE)); for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { List pluginSchemaFiles = plugin.getSchemaFiles(); @@ -767,6 +806,8 @@ private void configureContainerResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "platform", @@ -841,7 +882,8 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) .dataFetcher( "aggregateAcrossEntities", - new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + new AggregateAcrossEntitiesResolver( + this.entityClient, this.viewService, this.formService)) .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) .dataFetcher( "autoCompleteForMultiple", @@ -928,7 +970,8 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) .dataFetcher( - "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + "browseV2", + new BrowseV2Resolver(this.entityClient, this.viewService, this.formService))); } private DataFetcher getEntitiesResolver() { @@ -1139,7 +1182,14 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { new UpdateOwnershipTypeResolver(this.ownershipTypeService)) .dataFetcher( "deleteOwnershipType", - new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + new DeleteOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher("submitFormPrompt", new SubmitFormPromptResolver(this.formService)) + .dataFetcher("batchAssignForm", new BatchAssignFormResolver(this.formService)) + .dataFetcher( + "createDynamicFormAssignment", + new CreateDynamicFormAssignmentResolver(this.formService)) + .dataFetcher( + "verifyForm", new VerifyFormResolver(this.formService, this.groupService))); } private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { @@ -1342,7 +1392,25 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder typeWiring.dataFetcher( "ownershipType", new EntityTypeResolver( - entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))) + .type( + "StructuredPropertiesEntry", + typeWiring -> + typeWiring + .dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertiesEntry) env.getSource()) + .getStructuredProperty() + .getUrn())) + .dataFetcher( + "valueEntities", + new BatchGetEntitiesResolver( + entityTypes, + (env) -> + ((StructuredPropertiesEntry) env.getSource()).getValueEntities()))); } /** @@ -1422,6 +1490,14 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { "owner", new OwnerTypeResolver<>( ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "SchemaField", + typeWiring -> + typeWiring.dataFetcher( + "schemaFieldEntity", + new LoadableTypeResolver<>( + schemaFieldType, + (env) -> ((SchemaField) env.getSource()).getSchemaFieldEntity().getUrn()))) .type( "UserUsageCounts", typeWiring -> @@ -1518,6 +1594,8 @@ private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) .dataFetcher("schemaMetadata", new AspectResolver()) .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService))); } @@ -1528,7 +1606,9 @@ private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) typeWiring .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); } private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { @@ -1551,6 +1631,16 @@ private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); } + private void configureResolvedAuditStampResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "ResolvedAuditStamp", + typeWiring -> + typeWiring.dataFetcher( + "actor", + new LoadableTypeResolver<>( + corpUserType, (env) -> ((CorpUser) env.getSource()).getUrn()))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.CorpUser} type. @@ -1559,8 +1649,10 @@ private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { builder.type( "CorpUser", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "CorpUserInfo", typeWiring -> @@ -1581,6 +1673,8 @@ private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService))); builder .type( @@ -1623,8 +1717,10 @@ private void configureTagAssociationResolver(final RuntimeWiring.Builder builder builder.type( "Tag", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "TagAssociation", typeWiring -> @@ -1659,6 +1755,8 @@ private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) .dataFetcher( "platform", @@ -1690,6 +1788,8 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1758,6 +1858,42 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { }))); } + private void configureStructuredPropertyResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "StructuredPropertyDefinition", + typeWiring -> + typeWiring + .dataFetcher( + "valueType", + new LoadableTypeResolver<>( + dataTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getValueType() + .getUrn())) + .dataFetcher( + "entityTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getEntityTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + builder.type( + "TypeQualifier", + typeWiring -> + typeWiring.dataFetcher( + "allowedTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((TypeQualifier) env.getSource()) + .getAllowedTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.Chart} type. @@ -1769,6 +1905,8 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher( "platform", @@ -1858,6 +1996,7 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) { .type( "HyperParameterValueType", typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver())) .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) .type( "TimeSeriesAspect", @@ -1884,6 +2023,8 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "dataFlow", new LoadableTypeResolver<>( @@ -1947,6 +2088,8 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1979,6 +2122,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( @@ -2064,6 +2209,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -2103,6 +2250,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) .dataFetcher( "platform", @@ -2127,6 +2276,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "dataPlatformInstance", @@ -2145,6 +2296,8 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "dataPlatformInstance", @@ -2179,6 +2332,8 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); builder.type( "DomainAssociation", @@ -2193,12 +2348,64 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) { .getUrn()))); } + private void configureFormResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "FormAssociation", + typeWiring -> + typeWiring.dataFetcher( + "form", + new LoadableTypeResolver<>( + formType, + (env) -> + ((com.linkedin.datahub.graphql.generated.FormAssociation) env.getSource()) + .getForm() + .getUrn()))); + builder.type( + "StructuredPropertyParams", + typeWiring -> + typeWiring.dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertyParams) env.getSource()) + .getStructuredProperty() + .getUrn()))); + builder.type( + "FormActorAssignment", + typeWiring -> + typeWiring + .dataFetcher( + "users", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + final FormActorAssignment actors = env.getSource(); + return actors.getUsers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> { + final FormActorAssignment actors = env.getSource(); + return actors.getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher("isAssignedToMe", new IsFormAssignedToMeResolver(groupService))); + } + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { builder.type( "DataProduct", typeWiring -> typeWiring .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 4829194a8ce4d..5b780cc8cb40b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -25,6 +25,7 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -73,6 +74,7 @@ public class GmsGraphQLEngineArgs { QueryService queryService; FeatureFlags featureFlags; DataProductService dataProductService; + FormService formService; // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index 472d9465aeee1..a544bd46527c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql; +import com.linkedin.datahub.graphql.types.EntityType; import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.idl.RuntimeWiring; import java.util.Collection; @@ -34,6 +35,9 @@ public interface GmsGraphQLPlugin { */ Collection> getLoadableTypes(); + /** Return a list of Entity Types that the plugin services */ + Collection> getEntityTypes(); + /** * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific * resolvers. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java new file mode 100644 index 0000000000000..ae8ac4330e7fb --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql; + +import com.linkedin.common.SubTypes; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.Collections; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@AllArgsConstructor +public class SubTypesResolver implements DataFetcher> { + + EntityClient _entityClient; + String _entityType; + String _aspectName; + + @Override + @Nullable + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + SubTypes subType = null; + final String urnStr = ((Entity) environment.getSource()).getUrn(); + try { + final Urn urn = Urn.createFromString(urnStr); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(_aspectName), + context.getAuthentication()) + .get(urn); + if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { + subType = + new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); + } + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); + } + return subType; + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index 22ee4d4d4845c..d8665ae784bd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.RawAspect; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.AspectSpec; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index 31a8359f8f0e3..de389a358d936 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -12,8 +12,8 @@ import com.linkedin.datahub.graphql.generated.BarSegment; import com.linkedin.datahub.graphql.generated.MetadataAnalyticsInput; import com.linkedin.datahub.graphql.generated.NamedBar; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Filter; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 03333bda05f61..baea3ea4e6201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.NamedLine; import com.linkedin.datahub.graphql.generated.NumericDataPoint; import com.linkedin.datahub.graphql.generated.Row; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.util.List; import java.util.Map; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index e74ed09849763..667ccd368a729 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -17,4 +17,5 @@ public class FeatureFlags { private boolean showAcrylInfo = false; private boolean showAccessManagement = false; private boolean nestedDomainsEnabled = false; + private boolean schemaFieldEntityFetchEnabled = false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index da4a3a76dd7e0..d9ce2472c8634 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -12,13 +12,14 @@ import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -37,6 +38,7 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environmen ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); BrowseResultV2 browseResults = _entityClient.browseV2( @@ -76,8 +78,8 @@ public CompletableFuture get(DataFetchingEnvironment environmen pathStr, maybeResolvedView != null ? SearchUtils.combineFilters( - filter, maybeResolvedView.getDefinition().getFilter()) - : filter, + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, sanitizedQuery, start, count, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index 81b52991cde90..f127e6a49abff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -126,9 +126,15 @@ public CompletableFuture get(final DataFetchingEnvironment environmen appConfig.setAuthConfig(authConfig); final VisualConfig visualConfig = new VisualConfig(); - if (_visualConfiguration != null && _visualConfiguration.getAssets() != null) { - visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); - visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + if (_visualConfiguration != null) { + if (_visualConfiguration.getAssets() != null) { + visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); + visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + } + if (_visualConfiguration.getAppTitle() != null) { + visualConfig.setAppTitle(_visualConfiguration.getAppTitle()); + } + visualConfig.setHideGlossary(_visualConfiguration.isHideGlossary()); } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index a0f1698bf99e8..72912087190c0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -12,9 +12,9 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.dataproduct.DataProductAssociation; import com.linkedin.dataproduct.DataProductProperties; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 8f6d109e71b2c..6229e38954163 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -7,7 +7,7 @@ import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java new file mode 100644 index 0000000000000..39c9210c289e1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java @@ -0,0 +1,52 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class BatchAssignFormResolver implements DataFetcher> { + + private final FormService _formService; + + public BatchAssignFormResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final BatchAssignFormInput input = + bindArgument(environment.getArgument("input"), BatchAssignFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final List entityUrns = input.getEntityUrns(); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.batchAssignFormToEntities( + entityUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + formUrn, + authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java new file mode 100644 index 0000000000000..5b5f058dbdeac --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java @@ -0,0 +1,50 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class CreateDynamicFormAssignmentResolver + implements DataFetcher> { + + private final FormService _formService; + + public CreateDynamicFormAssignmentResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final CreateDynamicFormAssignmentInput input = + bindArgument(environment.getArgument("input"), CreateDynamicFormAssignmentInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final DynamicFormAssignment formAssignment = FormUtils.mapDynamicFormAssignment(input); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.createDynamicFormAssignment(formAssignment, formUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java new file mode 100644 index 0000000000000..e7bf87ae7644e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java @@ -0,0 +1,80 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IsFormAssignedToMeResolver implements DataFetcher> { + + private final GroupService _groupService; + + public IsFormAssignedToMeResolver(@Nonnull final GroupService groupService) { + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final FormActorAssignment parent = environment.getSource(); + + return CompletableFuture.supplyAsync( + () -> { + try { + + // Assign urn and group urns + final Set assignedUserUrns = + parent.getUsers() != null + ? parent.getUsers().stream().map(CorpUser::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Set assignedGroupUrns = + parent.getGroups() != null + ? parent.getGroups().stream().map(CorpGroup::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + + // First check whether user is directly assigned. + if (assignedUserUrns.size() > 0) { + boolean isUserAssigned = assignedUserUrns.contains(userUrn.toString()); + if (isUserAssigned) { + return true; + } + } + + // Next check whether the user is assigned indirectly, by group. + if (assignedGroupUrns.size() > 0) { + final List groupUrns = + _groupService.getGroupsForUser(userUrn, context.getAuthentication()); + boolean isUserGroupAssigned = + groupUrns.stream() + .anyMatch(groupUrn -> assignedGroupUrns.contains(groupUrn.toString())); + if (isUserGroupAssigned) { + return true; + } + } + } catch (Exception e) { + log.error( + "Failed to determine whether the form is assigned to the currently authenticated user! Returning false.", + e); + } + + // Else the user is not directly assigned. + return false; + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java new file mode 100644 index 0000000000000..5b40c353b3809 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java @@ -0,0 +1,89 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.metadata.service.FormService; +import com.linkedin.structured.PrimitivePropertyValueArray; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class SubmitFormPromptResolver implements DataFetcher> { + + private final FormService _formService; + + public SubmitFormPromptResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final Urn entityUrn = UrnUtils.getUrn(environment.getArgument("urn")); + final SubmitFormPromptInput input = + bindArgument(environment.getArgument("input"), SubmitFormPromptInput.class); + final String promptId = input.getPromptId(); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final String fieldPath = input.getFieldPath(); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (input.getType().equals(FormPromptType.STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitStructuredPropertyPromptResponse( + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + context.getAuthentication()); + } else if (input.getType().equals(FormPromptType.FIELDS_STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + if (fieldPath == null) { + throw new IllegalArgumentException( + "Failed to provide fieldPath for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitFieldStructuredPropertyPromptResponse( + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + fieldPath, + context.getAuthentication()); + } + return false; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java new file mode 100644 index 0000000000000..54e3562c97add --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java @@ -0,0 +1,63 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class VerifyFormResolver implements DataFetcher> { + + private final FormService _formService; + private final GroupService _groupService; + + public VerifyFormResolver( + @Nonnull final FormService formService, @Nonnull final GroupService groupService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final VerifyFormInput input = + bindArgument(environment.getArgument("input"), VerifyFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final Urn entityUrn = UrnUtils.getUrn(input.getEntityUrn()); + final Authentication authentication = context.getAuthentication(); + final Urn actorUrn = UrnUtils.getUrn(authentication.getActor().toUrnStr()); + + return CompletableFuture.supplyAsync( + () -> { + try { + final List groupsForUser = + _groupService.getGroupsForUser(actorUrn, authentication); + if (!_formService.isFormAssignedToUser( + formUrn, entityUrn, actorUrn, groupsForUser, authentication)) { + throw new AuthorizationException( + String.format( + "Failed to authorize form on entity as form with urn %s is not assigned to user", + formUrn)); + } + _formService.verifyFormForEntity(formUrn, entityUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index 6a204286ba44e..b52153d70fa7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -37,7 +37,6 @@ public class CreateGlossaryNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index 93582fb956bd8..1f8c17ee72884 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -6,7 +6,7 @@ import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; import com.linkedin.datahub.graphql.generated.EntityCountResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 0b909dee51374..3e9583824a568 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -44,7 +44,8 @@ public CompletableFuture rollbackIngestion( return CompletableFuture.supplyAsync( () -> { try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); + _entityClient.rollbackIngestion( + runId, context.getAuthorizer(), context.getAuthentication()); return true; } catch (Exception e) { throw new RuntimeException("Failed to rollback ingestion execution", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java new file mode 100644 index 0000000000000..25768da819555 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java @@ -0,0 +1,105 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.form.FormInfo; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import java.util.Objects; +import javax.annotation.Nonnull; + +public class FormUtils { + + private static final String COMPLETED_FORMS = "completedForms"; + private static final String INCOMPLETE_FORMS = "incompleteForms"; + private static final String VERIFIED_FORMS = "verifiedForms"; + private static final String OWNERS = "owners"; + private static final String COMPLETED_FORMS_COMPLETED_PROMPT_IDS = + "completedFormsCompletedPromptIds"; + private static final String INCOMPLETE_FORMS_COMPLETED_PROMPT_IDS = + "incompleteFormsCompletedPromptIds"; + + private FormUtils() {} + + public static PrimitivePropertyValueArray getStructuredPropertyValuesFromInput( + @Nonnull final SubmitFormPromptInput input) { + final PrimitivePropertyValueArray values = new PrimitivePropertyValueArray(); + + input + .getStructuredPropertyParams() + .getValues() + .forEach( + value -> { + if (value.getStringValue() != null) { + values.add(PrimitivePropertyValue.create(value.getStringValue())); + } else if (value.getNumberValue() != null) { + values.add(PrimitivePropertyValue.create(value.getNumberValue().doubleValue())); + } + }); + + return values; + } + + /** Map a GraphQL CreateDynamicFormAssignmentInput to the GMS DynamicFormAssignment aspect */ + @Nonnull + public static DynamicFormAssignment mapDynamicFormAssignment( + @Nonnull final CreateDynamicFormAssignmentInput input) { + Objects.requireNonNull(input, "input must not be null"); + + final DynamicFormAssignment result = new DynamicFormAssignment(); + final Filter filter = + new Filter() + .setOr(ResolverUtils.buildConjunctiveCriterionArrayWithOr(input.getOrFilters())); + result.setFilter(filter); + return result; + } + + /** + * Creates a Filter where the provided formUrn is either in completedForms or incompleteForms for + * an entity + */ + private static Filter generateCompleteOrIncompleteFilter(@Nonnull final String formUrn) + throws Exception { + final CriterionArray completedFormsAndArray = new CriterionArray(); + final CriterionArray incompleteFormsAndArray = new CriterionArray(); + completedFormsAndArray.add(buildFormCriterion(formUrn, COMPLETED_FORMS)); + incompleteFormsAndArray.add(buildFormCriterion(formUrn, INCOMPLETE_FORMS)); + // need this to be an OR not two ANDs + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(completedFormsAndArray), + new ConjunctiveCriterion().setAnd(incompleteFormsAndArray))); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field) { + return buildFormCriterion(formUrn, field, false); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field, final boolean negated) { + return new Criterion() + .setField(field) + .setValue(formUrn) + .setCondition(Condition.EQUAL) + .setNegated(negated); + } + + private static boolean isActorExplicitlyAssigned( + @Nonnull final Urn actorUrn, @Nonnull final FormInfo formInfo) { + return (formInfo.getActors().getUsers() != null + && formInfo.getActors().getUsers().stream().anyMatch(user -> user.equals(actorUrn))) + || (formInfo.getActors().getGroups() != null + && formInfo.getActors().getGroups().stream().anyMatch(group -> group.equals(actorUrn))); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 3328eff2bdf45..7bfd166b18a20 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -9,7 +9,7 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.GetGrantedPrivilegesInput; import com.linkedin.datahub.graphql.generated.Privileges; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index ca1e01b45989d..e65666117b4fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -15,8 +15,8 @@ import com.linkedin.datahub.graphql.generated.RecommendationRenderType; import com.linkedin.datahub.graphql.generated.RecommendationRequestContext; import com.linkedin.datahub.graphql.generated.SearchParams; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.recommendation.EntityRequestContext; import com.linkedin.metadata.recommendation.RecommendationsService; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index 6d23456b76b4f..b54987dc0e9b0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -15,6 +15,7 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -36,6 +37,7 @@ public class AggregateAcrossEntitiesResolver private final EntityClient _entityClient; private final ViewService _viewService; + private final FormService _formService; @Override public CompletableFuture get(DataFetchingEnvironment environment) { @@ -58,7 +60,7 @@ public CompletableFuture get(DataFetchingEnvironment environme context.getAuthentication()) : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); @@ -75,8 +77,8 @@ public CompletableFuture get(DataFetchingEnvironment environme sanitizedQuery, maybeResolvedView != null ? SearchUtils.combineFilters( - baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, 0, 0, // 0 entity count because we don't want resolved entities searchFlags, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 6a01fa19867ad..f300331ab4bc8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -10,9 +10,9 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleInput; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleResults; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -66,6 +66,12 @@ public CompletableFuture get(DataFetchingEnvironmen : null; List types = getEntityTypes(input.getTypes(), maybeResolvedView); + types = + types != null + ? types.stream() + .filter(AUTO_COMPLETE_ENTITY_TYPES::contains) + .collect(Collectors.toList()) + : null; if (types != null && types.size() > 0) { return AutocompleteUtils.batchGetAutocompleteResults( types.stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index e54955e1857f0..1a380781385c3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -11,9 +11,9 @@ import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.search.AggregationMetadata; import com.linkedin.metadata.search.AggregationMetadataArray; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index 742d1d170de64..658138ae6e3dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -8,9 +8,9 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ScrollAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.ScrollResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index adab62c22bb72..0af0a3827b1bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageInput; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 0f5d2d90ba0c2..2dc5032f2a4eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -9,9 +9,9 @@ import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageInput; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 6821423887923..bc177c600beee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -5,9 +5,9 @@ import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index 6746c30a2edbc..8c45df1b30b26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -21,8 +21,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java new file mode 100644 index 0000000000000..cb0d24839056d --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java @@ -0,0 +1,25 @@ +package com.linkedin.datahub.graphql.resolvers.type; + +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import graphql.TypeResolutionEnvironment; +import graphql.schema.GraphQLObjectType; +import graphql.schema.TypeResolver; + +public class PropertyValueResolver implements TypeResolver { + + public static final String STRING_VALUE = "StringValue"; + public static final String NUMBER_VALUE = "NumberValue"; + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringValue) { + return env.getSchema().getObjectType(STRING_VALUE); + } else if (env.getObject() instanceof NumberValue) { + return env.getSchema().getObjectType(NUMBER_VALUE); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index 9da5f915ff31d..3a676f118c1ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.DataHubViewFilterInput; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.CriterionArray; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 4c452af126201..18a082fee95f1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -32,6 +32,7 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -192,6 +193,11 @@ public Entity apply(Urn input) { ((OwnershipTypeEntity) partialEntity).setUrn(input.toString()); ((OwnershipTypeEntity) partialEntity).setType(EntityType.CUSTOM_OWNERSHIP_TYPE); } + if (input.getEntityType().equals(STRUCTURED_PROPERTY_ENTITY_NAME)) { + partialEntity = new StructuredPropertyEntity(); + ((StructuredPropertyEntity) partialEntity).setUrn(input.toString()); + ((StructuredPropertyEntity) partialEntity).setType(EntityType.STRUCTURED_PROPERTY); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index badb24810c82b..fd31e1d394a92 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -88,6 +88,8 @@ public class DatasetType DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, ACCESS_DATASET_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME, SUB_TYPES_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java new file mode 100644 index 0000000000000..7e5372268170b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java @@ -0,0 +1,24 @@ +package com.linkedin.datahub.graphql.types.dataset.mappers; + +import com.linkedin.datahub.graphql.generated.DatasetFilter; +import com.linkedin.datahub.graphql.generated.DatasetFilterType; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; + +public class DatasetFilterMapper + implements ModelMapper { + + public static final DatasetFilterMapper INSTANCE = new DatasetFilterMapper(); + + public static DatasetFilter map(@Nonnull final com.linkedin.dataset.DatasetFilter metadata) { + return INSTANCE.apply(metadata); + } + + @Override + public DatasetFilter apply(@Nonnull final com.linkedin.dataset.DatasetFilter input) { + final DatasetFilter result = new DatasetFilter(); + result.setType(DatasetFilterType.valueOf(input.getType().name())); + result.setSql(input.getSql()); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 7fa1decdf7f55..163e8b9288d87 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -38,9 +39,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,6 +56,7 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -151,6 +155,15 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setStructuredProperties( + StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index edc9baf4ba9c5..e0a74d351125f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.metadata.utils.SchemaFieldUtils; import javax.annotation.Nonnull; public class SchemaFieldMapper { @@ -37,6 +40,7 @@ public SchemaField apply( result.setIsPartOfKey(input.isIsPartOfKey()); result.setIsPartitioningKey(input.isIsPartitioningKey()); result.setJsonProps(input.getJsonProps()); + result.setSchemaFieldEntity(this.createSchemaFieldEntity(input, entityUrn)); return result; } @@ -75,4 +79,14 @@ private SchemaFieldDataType mapSchemaFieldDataType( "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } } + + private SchemaFieldEntity createSchemaFieldEntity( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + SchemaFieldEntity schemaFieldEntity = new SchemaFieldEntity(); + schemaFieldEntity.setUrn( + SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), input.getFieldPath()) + .toString()); + schemaFieldEntity.setType(EntityType.SCHEMA_FIELD); + return schemaFieldEntity; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 31381073a16dd..e550280a6c2db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -18,6 +18,11 @@ public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + return apply(input, entityUrn, aspect.getVersion()); + } + + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nonnull final SchemaMetadata input, final Urn entityUrn, final long version) { final com.linkedin.datahub.graphql.generated.SchemaMetadata result = new com.linkedin.datahub.graphql.generated.SchemaMetadata(); @@ -35,7 +40,7 @@ public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( .map(field -> SchemaFieldMapper.map(field, entityUrn)) .collect(Collectors.toList())); result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); + result.setAspectVersion(version); if (input.hasForeignKeys()) { result.setForeignKeys( input.getForeignKeys().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java new file mode 100644 index 0000000000000..612644ae2dbb2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java @@ -0,0 +1,51 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.DataTypeInfo; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; + +public class DataTypeEntityMapper implements ModelMapper { + + public static final DataTypeEntityMapper INSTANCE = new DataTypeEntityMapper(); + + public static DataTypeEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataTypeEntity apply(@Nonnull final EntityResponse entityResponse) { + final DataTypeEntity result = new DataTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_TYPE_INFO_ASPECT_NAME, this::mapDataTypeInfo); + + // Set the standard Type ENUM for the data type. + if (result.getInfo() != null) { + result.getInfo().setType(DataTypeUrnMapper.getType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapDataTypeInfo(@Nonnull DataTypeEntity dataType, @Nonnull DataMap dataMap) { + com.linkedin.datatype.DataTypeInfo gmsInfo = new com.linkedin.datatype.DataTypeInfo(dataMap); + DataTypeInfo info = new DataTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + dataType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java new file mode 100644 index 0000000000000..5ea1680546ce6 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class DataTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATA_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.DATA_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + DATA_TYPE_ENTITY_NAME, + new HashSet<>(dataTypeUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataTypeEntityMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load data type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java new file mode 100644 index 0000000000000..ec71cd63a70d5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java @@ -0,0 +1,40 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.StdDataType; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataTypeUrnMapper { + + static final Map DATA_TYPE_ENUM_TO_URN = + ImmutableMap.builder() + .put(StdDataType.STRING, "urn:li:dataType:datahub.string") + .put(StdDataType.NUMBER, "urn:li:dataType:datahub.number") + .put(StdDataType.URN, "urn:li:dataType:datahub.urn") + .put(StdDataType.RICH_TEXT, "urn:li:dataType:datahub.rich_text") + .put(StdDataType.DATE, "urn:li:dataType:datahub.date") + .build(); + + private static final Map URN_TO_DATA_TYPE_ENUM = + DATA_TYPE_ENUM_TO_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private DataTypeUrnMapper() {} + + public static StdDataType getType(String dataTypeUrn) { + if (!URN_TO_DATA_TYPE_ENUM.containsKey(dataTypeUrn)) { + return StdDataType.OTHER; + } + return URN_TO_DATA_TYPE_ENUM.get(dataTypeUrn); + } + + @Nonnull + public static String getUrn(StdDataType dataType) { + if (!DATA_TYPE_ENUM_TO_URN.containsKey(dataType)) { + throw new IllegalArgumentException("Unknown data type: " + dataType); + } + return DATA_TYPE_ENUM_TO_URN.get(dataType); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java new file mode 100644 index 0000000000000..b942ff2325bf7 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityTypeInfo; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; + +public class EntityTypeEntityMapper implements ModelMapper { + + public static final EntityTypeEntityMapper INSTANCE = new EntityTypeEntityMapper(); + + public static EntityTypeEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public EntityTypeEntity apply(@Nonnull final EntityResponse entityResponse) { + final EntityTypeEntity result = new EntityTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.ENTITY_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ENTITY_TYPE_INFO_ASPECT_NAME, this::mapEntityTypeInfo); + + // Set the standard Type ENUM for the entity type. + if (result.getInfo() != null) { + result + .getInfo() + .setType(EntityTypeUrnMapper.getEntityType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapEntityTypeInfo(@Nonnull EntityTypeEntity entityType, @Nonnull DataMap dataMap) { + com.linkedin.entitytype.EntityTypeInfo gmsInfo = + new com.linkedin.entitytype.EntityTypeInfo(dataMap); + EntityTypeInfo info = new EntityTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + entityType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java similarity index 91% rename from datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java rename to datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java index aba781f9e1dc7..23e793782e8dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.graphql.resolvers; +package com.linkedin.datahub.graphql.types.entitytype; import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.graphql.generated.EntityType; @@ -17,7 +17,6 @@ public class EntityTypeMapper { ImmutableMap.builder() .put(EntityType.DATASET, "dataset") .put(EntityType.ROLE, "role") - .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) .put(EntityType.CORP_USER, "corpuser") .put(EntityType.CORP_GROUP, "corpGroup") .put(EntityType.DATA_PLATFORM, "dataPlatform") @@ -41,6 +40,9 @@ public class EntityTypeMapper { .put(EntityType.TEST, "test") .put(EntityType.DATAHUB_VIEW, Constants.DATAHUB_VIEW_ENTITY_NAME) .put(EntityType.DATA_PRODUCT, Constants.DATA_PRODUCT_ENTITY_NAME) + .put(EntityType.SCHEMA_FIELD, "schemaField") + .put(EntityType.STRUCTURED_PROPERTY, Constants.STRUCTURED_PROPERTY_ENTITY_NAME) + .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) .build(); private static final Map ENTITY_NAME_TO_TYPE = @@ -52,7 +54,7 @@ private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); if (!ENTITY_NAME_TO_TYPE.containsKey(lowercaseName)) { - throw new IllegalArgumentException("Unknown entity name: " + name); + return EntityType.OTHER; } return ENTITY_NAME_TO_TYPE.get(lowercaseName); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java new file mode 100644 index 0000000000000..aa5dfc13ea757 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class EntityTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(ENTITY_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.ENTITY_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return EntityTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List entityTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + ENTITY_TYPE_ENTITY_NAME, + new HashSet<>(entityTypeUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(EntityTypeEntityMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load entity type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java new file mode 100644 index 0000000000000..9e9bf86e5fe7f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java @@ -0,0 +1,85 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +/** + * In this class we statically map "well-supported" entity types into a more usable Enum class + * served by our GraphQL API. + * + *

When we add new entity types / entity urns, we MAY NEED to update this. + * + *

Note that we currently do not support mapping entities that fall outside of this set. If you + * try to map an entity type without a corresponding enum symbol, the mapping WILL FAIL. + */ +public class EntityTypeUrnMapper { + + static final Map ENTITY_NAME_TO_ENTITY_TYPE_URN = + ImmutableMap.builder() + .put(Constants.DATASET_ENTITY_NAME, "urn:li:entityType:datahub.dataset") + .put(Constants.ROLE_ENTITY_NAME, "urn:li:entityType:datahub.role") + .put(Constants.CORP_USER_ENTITY_NAME, "urn:li:entityType:datahub.corpuser") + .put(Constants.CORP_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.corpGroup") + .put(Constants.DATA_PLATFORM_ENTITY_NAME, "urn:li:entityType:datahub.dataPlatform") + .put(Constants.DASHBOARD_ENTITY_NAME, "urn:li:entityType:datahub.dashboard") + .put(Constants.CHART_ENTITY_NAME, "urn:li:entityType:datahub.chart") + .put(Constants.TAG_ENTITY_NAME, "urn:li:entityType:datahub.tag") + .put(Constants.DATA_FLOW_ENTITY_NAME, "urn:li:entityType:datahub.dataFlow") + .put(Constants.DATA_JOB_ENTITY_NAME, "urn:li:entityType:datahub.dataJob") + .put(Constants.GLOSSARY_TERM_ENTITY_NAME, "urn:li:entityType:datahub.glossaryTerm") + .put(Constants.GLOSSARY_NODE_ENTITY_NAME, "urn:li:entityType:datahub.glossaryNode") + .put(Constants.ML_MODEL_ENTITY_NAME, "urn:li:entityType:datahub.mlModel") + .put(Constants.ML_MODEL_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.mlModelGroup") + .put(Constants.ML_FEATURE_TABLE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeatureTable") + .put(Constants.ML_FEATURE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeature") + .put(Constants.ML_PRIMARY_KEY_ENTITY_NAME, "urn:li:entityType:datahub.mlPrimaryKey") + .put(Constants.CONTAINER_ENTITY_NAME, "urn:li:entityType:datahub.container") + .put(Constants.DOMAIN_ENTITY_NAME, "urn:li:entityType:datahub.domain") + .put(Constants.NOTEBOOK_ENTITY_NAME, "urn:li:entityType:datahub.notebook") + .put( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataPlatformInstance") + .put(Constants.TEST_ENTITY_NAME, "urn:li:entityType:datahub.test") + .put(Constants.DATAHUB_VIEW_ENTITY_NAME, "urn:li:entityType:datahub.dataHubView") + .put(Constants.DATA_PRODUCT_ENTITY_NAME, "urn:li:entityType:datahub.dataProduct") + .put(Constants.ASSERTION_ENTITY_NAME, "urn:li:entityType:datahub.assertion") + .put(Constants.SCHEMA_FIELD_ENTITY_NAME, "urn:li:entityType:datahub.schemaField") + .build(); + + private static final Map ENTITY_TYPE_URN_TO_NAME = + ENTITY_NAME_TO_ENTITY_TYPE_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private EntityTypeUrnMapper() {} + + public static String getName(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + return ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + } + + /* + * Takes in a entityTypeUrn and returns a GraphQL EntityType by first mapping + * the urn to the entity name, and then mapping the entity name to EntityType. + */ + public static EntityType getEntityType(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + final String entityName = ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + return EntityTypeMapper.getType(entityName); + } + + @Nonnull + public static String getEntityTypeUrn(String name) { + if (!ENTITY_NAME_TO_ENTITY_TYPE_URN.containsKey(name)) { + throw new IllegalArgumentException("Unknown entity name: " + name); + } + return ENTITY_NAME_TO_ENTITY_TYPE_URN.get(name); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java new file mode 100644 index 0000000000000..a0ddd4a5883d2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java @@ -0,0 +1,129 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import com.linkedin.datahub.graphql.generated.FormInfo; +import com.linkedin.datahub.graphql.generated.FormPrompt; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.FormType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class FormMapper implements ModelMapper { + + public static final FormMapper INSTANCE = new FormMapper(); + + public static Form map(@Nonnull final EntityResponse form) { + return INSTANCE.apply(form); + } + + public Form apply(@Nonnull final EntityResponse entityResponse) { + Form result = new Form(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityUrn.toString()); + result.setType(EntityType.FORM); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(FORM_INFO_ASPECT_NAME, this::mapFormInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (form, dataMap) -> + form.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + + return mappingHelper.getResult(); + } + + private void mapFormInfo(@Nonnull Form form, @Nonnull DataMap dataMap) { + com.linkedin.form.FormInfo gmsFormInfo = new com.linkedin.form.FormInfo(dataMap); + FormInfo formInfo = new FormInfo(); + formInfo.setName(gmsFormInfo.getName()); + formInfo.setType(FormType.valueOf(gmsFormInfo.getType().toString())); + if (gmsFormInfo.hasDescription()) { + formInfo.setDescription(gmsFormInfo.getDescription()); + } + formInfo.setPrompts(this.mapFormPrompts(gmsFormInfo, form.getUrn())); + formInfo.setActors(mapFormActors(gmsFormInfo.getActors())); + form.setInfo(formInfo); + } + + private List mapFormPrompts( + @Nonnull com.linkedin.form.FormInfo gmsFormInfo, @Nonnull String formUrn) { + List formPrompts = new ArrayList<>(); + if (gmsFormInfo.hasPrompts()) { + gmsFormInfo + .getPrompts() + .forEach(FormPrompt -> formPrompts.add(mapFormPrompt(FormPrompt, formUrn))); + } + return formPrompts; + } + + private FormPrompt mapFormPrompt( + @Nonnull com.linkedin.form.FormPrompt gmsFormPrompt, @Nonnull String formUrn) { + final FormPrompt formPrompt = new FormPrompt(); + formPrompt.setId(gmsFormPrompt.getId()); + formPrompt.setTitle(gmsFormPrompt.getTitle()); + formPrompt.setType(FormPromptType.valueOf(gmsFormPrompt.getType().toString())); + formPrompt.setRequired(gmsFormPrompt.isRequired()); + formPrompt.setFormUrn(formUrn); + if (gmsFormPrompt.hasDescription()) { + formPrompt.setDescription(gmsFormPrompt.getDescription()); + } + + if (gmsFormPrompt.hasStructuredPropertyParams()) { + final StructuredPropertyParams params = new StructuredPropertyParams(); + final Urn structuredPropUrn = gmsFormPrompt.getStructuredPropertyParams().getUrn(); + final StructuredPropertyEntity structuredProp = new StructuredPropertyEntity(); + structuredProp.setUrn(structuredPropUrn.toString()); + structuredProp.setType(EntityType.STRUCTURED_PROPERTY); + params.setStructuredProperty(structuredProp); + formPrompt.setStructuredPropertyParams(params); + } + + return formPrompt; + } + + private FormActorAssignment mapFormActors(com.linkedin.form.FormActorAssignment gmsFormActors) { + FormActorAssignment result = new FormActorAssignment(); + result.setOwners(gmsFormActors.isOwners()); + if (gmsFormActors.hasUsers()) { + result.setUsers( + gmsFormActors.getUsers().stream().map(this::mapUser).collect(Collectors.toList())); + } + if (gmsFormActors.hasGroups()) { + result.setGroups( + gmsFormActors.getGroups().stream().map(this::mapGroup).collect(Collectors.toList())); + } + return result; + } + + private CorpUser mapUser(Urn userUrn) { + CorpUser user = new CorpUser(); + user.setUrn(userUrn.toString()); + return user; + } + + private CorpGroup mapGroup(Urn groupUrn) { + CorpGroup group = new CorpGroup(); + group.setUrn(groupUrn.toString()); + return group; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java new file mode 100644 index 0000000000000..8a09cee353cc9 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java @@ -0,0 +1,76 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class FormType implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(FORM_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.FORM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Form.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List formUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + FORM_ENTITY_NAME, + new HashSet<>(formUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : formUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(FormMapper.map(gmsResult)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Forms", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java new file mode 100644 index 0000000000000..43665b37b9ee8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java @@ -0,0 +1,133 @@ +package com.linkedin.datahub.graphql.types.form; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.FieldFormPromptAssociationArray; +import com.linkedin.common.FormPromptAssociationArray; +import com.linkedin.common.Forms; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FieldFormPromptAssociation; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptFieldAssociations; +import com.linkedin.datahub.graphql.generated.FormVerificationAssociation; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nonnull; + +public class FormsMapper { + + public static final FormsMapper INSTANCE = new FormsMapper(); + + public static com.linkedin.datahub.graphql.generated.Forms map( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + return INSTANCE.apply(forms, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Forms apply( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + final List incompleteForms = new ArrayList<>(); + forms + .getIncompleteForms() + .forEach( + formAssociation -> + incompleteForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List completeForms = new ArrayList<>(); + forms + .getCompletedForms() + .forEach( + formAssociation -> + completeForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List verifications = new ArrayList<>(); + forms + .getVerifications() + .forEach( + verificationAssociation -> + verifications.add(this.mapVerificationAssociation(verificationAssociation))); + + return new com.linkedin.datahub.graphql.generated.Forms( + incompleteForms, completeForms, verifications); + } + + private FormAssociation mapFormAssociation( + @Nonnull final com.linkedin.common.FormAssociation association, + @Nonnull final String entityUrn) { + FormAssociation result = new FormAssociation(); + result.setForm( + Form.builder().setType(EntityType.FORM).setUrn(association.getUrn().toString()).build()); + result.setAssociatedUrn(entityUrn); + result.setCompletedPrompts(this.mapPrompts(association.getCompletedPrompts())); + result.setIncompletePrompts(this.mapPrompts(association.getIncompletePrompts())); + return result; + } + + private FormVerificationAssociation mapVerificationAssociation( + @Nonnull final com.linkedin.common.FormVerificationAssociation verificationAssociation) { + FormVerificationAssociation result = new FormVerificationAssociation(); + result.setForm( + Form.builder() + .setType(EntityType.FORM) + .setUrn(verificationAssociation.getForm().toString()) + .build()); + if (verificationAssociation.hasLastModified()) { + result.setLastModified(createAuditStamp(verificationAssociation.getLastModified())); + } + return result; + } + + private List mapPrompts( + @Nonnull final FormPromptAssociationArray promptAssociations) { + List result = new ArrayList<>(); + promptAssociations.forEach( + promptAssociation -> { + FormPromptAssociation association = new FormPromptAssociation(); + association.setId(promptAssociation.getId()); + association.setLastModified(createAuditStamp(promptAssociation.getLastModified())); + if (promptAssociation.hasFieldAssociations()) { + association.setFieldAssociations( + mapFieldAssociations(promptAssociation.getFieldAssociations())); + } + result.add(association); + }); + return result; + } + + private List mapFieldPrompts( + @Nonnull final FieldFormPromptAssociationArray fieldPromptAssociations) { + List result = new ArrayList<>(); + fieldPromptAssociations.forEach( + fieldFormPromptAssociation -> { + FieldFormPromptAssociation association = new FieldFormPromptAssociation(); + association.setFieldPath(fieldFormPromptAssociation.getFieldPath()); + association.setLastModified( + createAuditStamp(fieldFormPromptAssociation.getLastModified())); + result.add(association); + }); + return result; + } + + private FormPromptFieldAssociations mapFieldAssociations( + com.linkedin.common.FormPromptFieldAssociations associationsObj) { + final FormPromptFieldAssociations fieldAssociations = new FormPromptFieldAssociations(); + if (associationsObj.hasCompletedFieldPrompts()) { + fieldAssociations.setCompletedFieldPrompts( + this.mapFieldPrompts(associationsObj.getCompletedFieldPrompts())); + } + if (associationsObj.hasIncompleteFieldPrompts()) { + fieldAssociations.setIncompleteFieldPrompts( + this.mapFieldPrompts(associationsObj.getIncompleteFieldPrompts())); + } + return fieldAssociations; + } + + private ResolvedAuditStamp createAuditStamp(AuditStamp auditStamp) { + final ResolvedAuditStamp resolvedAuditStamp = new ResolvedAuditStamp(); + final CorpUser emptyCreatedUser = new CorpUser(); + emptyCreatedUser.setUrn(auditStamp.getActor().toString()); + resolvedAuditStamp.setActor(emptyCreatedUser); + resolvedAuditStamp.setTime(auditStamp.getTime()); + return resolvedAuditStamp; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 901361eb0b2be..31c8cec8cb5fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -8,6 +8,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryNodeProperties; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -36,7 +37,8 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( GLOSSARY_NODE_INFO_ASPECT_NAME, - (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + (glossaryNode, dataMap) -> + glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap, entityUrn))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, @@ -46,13 +48,18 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { return mappingHelper.getResult(); } - private GlossaryNodeProperties mapGlossaryNodeProperties(@Nonnull DataMap dataMap) { + private GlossaryNodeProperties mapGlossaryNodeProperties( + @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { GlossaryNodeInfo glossaryNodeInfo = new GlossaryNodeInfo(dataMap); GlossaryNodeProperties result = new GlossaryNodeProperties(); result.setDescription(glossaryNodeInfo.getDefinition()); if (glossaryNodeInfo.hasName()) { result.setName(glossaryNodeInfo.getName()); } + if (glossaryNodeInfo.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryNodeInfo.getCustomProperties(), entityUrn)); + } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index 8494eace22244..68475a2599158 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.generated.GlossaryTerms; @@ -46,7 +47,15 @@ private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossa resultGlossaryTerm.setName( GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); + if (input.hasActor()) { + CorpUser actor = new CorpUser(); + actor.setUrn(input.getActor().toString()); + actor.setType(EntityType.CORP_USER); + result.setActor(actor); + } + if (entityUrn != null) { + result.setAssociatedUrn(entityUrn.toString()); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 7c7dab2e02472..b5733626468d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -9,8 +9,8 @@ import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SearchSuggestion; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; import java.net.URISyntaxException; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java new file mode 100644 index 0000000000000..254a1ed1767f1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.types.schemafield; + +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nonnull; + +public class SchemaFieldMapper implements ModelMapper { + + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + + public static SchemaFieldEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public SchemaFieldEntity apply(@Nonnull final EntityResponse entityResponse) { + Urn entityUrn = entityResponse.getUrn(); + final SchemaFieldEntity result = this.mapSchemaFieldUrn(entityUrn); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((schemaField, dataMap) -> + schemaField.setStructuredProperties( + StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + + return result; + } + + private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { + try { + SchemaFieldEntity result = new SchemaFieldEntity(); + result.setUrn(urn.toString()); + result.setType(EntityType.SCHEMA_FIELD); + result.setFieldPath(urn.getEntityKey().get(1)); + Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); + result.setParent(UrnToEntityMapper.map(parentUrn)); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to load schemaField entity", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index b543a40cbac41..9f14bf52733ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -1,22 +1,40 @@ package com.linkedin.datahub.graphql.types.schemafield; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; -import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +@RequiredArgsConstructor public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType { - public SchemaFieldType() {} + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME); + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; @Override public EntityType type() { @@ -40,29 +58,41 @@ public List> batchLoad( urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - return schemaFieldUrns.stream() - .map(this::mapSchemaFieldUrn) + Map entities = new HashMap<>(); + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + entities = + _entityClient.batchGetV2( + SCHEMA_FIELD_ENTITY_NAME, + new HashSet<>(schemaFieldUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + } + + final List gmsResults = new ArrayList<>(); + for (Urn urn : schemaFieldUrns) { + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + gmsResults.add(entities.getOrDefault(urn, null)); + } else { + gmsResults.add( + new EntityResponse() + .setUrn(urn) + .setAspects(new EnvelopedAspectMap()) + .setEntityName(urn.getEntityType())); + } + } + + return gmsResults.stream() .map( - schemaFieldEntity -> - DataFetcherResult.newResult().data(schemaFieldEntity).build()) + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(SchemaFieldMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to load schemaField entity", e); } } - - private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { - try { - SchemaFieldEntity result = new SchemaFieldEntity(); - result.setUrn(urn.toString()); - result.setType(EntityType.SCHEMA_FIELD); - result.setFieldPath(urn.getEntityKey().get(1)); - Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(parentUrn)); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to load schemaField entity", e); - } - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java new file mode 100644 index 0000000000000..ad48067599328 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java @@ -0,0 +1,80 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StructuredPropertiesMapper { + + public static final StructuredPropertiesMapper INSTANCE = new StructuredPropertiesMapper(); + + public static com.linkedin.datahub.graphql.generated.StructuredProperties map( + @Nonnull final StructuredProperties structuredProperties) { + return INSTANCE.apply(structuredProperties); + } + + public com.linkedin.datahub.graphql.generated.StructuredProperties apply( + @Nonnull final StructuredProperties structuredProperties) { + com.linkedin.datahub.graphql.generated.StructuredProperties result = + new com.linkedin.datahub.graphql.generated.StructuredProperties(); + result.setProperties( + structuredProperties.getProperties().stream() + .map(this::mapStructuredProperty) + .collect(Collectors.toList())); + return result; + } + + private StructuredPropertiesEntry mapStructuredProperty( + StructuredPropertyValueAssignment valueAssignment) { + StructuredPropertiesEntry entry = new StructuredPropertiesEntry(); + entry.setStructuredProperty(createStructuredPropertyEntity(valueAssignment)); + final List values = new ArrayList<>(); + final List entities = new ArrayList<>(); + valueAssignment + .getValues() + .forEach( + value -> { + if (value.isString()) { + this.mapStringValue(value.getString(), values, entities); + } else if (value.isDouble()) { + values.add(new NumberValue(value.getDouble())); + } + }); + entry.setValues(values); + entry.setValueEntities(entities); + return entry; + } + + private StructuredPropertyEntity createStructuredPropertyEntity( + StructuredPropertyValueAssignment assignment) { + StructuredPropertyEntity entity = new StructuredPropertyEntity(); + entity.setUrn(assignment.getPropertyUrn().toString()); + entity.setType(EntityType.STRUCTURED_PROPERTY); + return entity; + } + + private void mapStringValue( + String stringValue, List values, List entities) { + try { + final Urn urnValue = Urn.createFromString(stringValue); + entities.add(UrnToEntityMapper.map(urnValue)); + } catch (Exception e) { + log.debug("String value is not an urn for this structured property entry"); + } + values.add(new StringValue(stringValue)); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java new file mode 100644 index 0000000000000..259020b83bee1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java @@ -0,0 +1,124 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.generated.AllowedValue; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyCardinality; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.TypeQualifier; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.PropertyValueArray; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class StructuredPropertyMapper + implements ModelMapper { + + private static final String ALLOWED_TYPES = "allowedTypes"; + + public static final StructuredPropertyMapper INSTANCE = new StructuredPropertyMapper(); + + public static StructuredPropertyEntity map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public StructuredPropertyEntity apply(@Nonnull final EntityResponse entityResponse) { + final StructuredPropertyEntity result = new StructuredPropertyEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.STRUCTURED_PROPERTY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, (this::mapStructuredPropertyDefinition)); + return mappingHelper.getResult(); + } + + private void mapStructuredPropertyDefinition( + @Nonnull StructuredPropertyEntity extendedProperty, @Nonnull DataMap dataMap) { + com.linkedin.structured.StructuredPropertyDefinition gmsDefinition = + new com.linkedin.structured.StructuredPropertyDefinition(dataMap); + StructuredPropertyDefinition definition = new StructuredPropertyDefinition(); + definition.setQualifiedName(gmsDefinition.getQualifiedName()); + definition.setCardinality( + PropertyCardinality.valueOf(gmsDefinition.getCardinality().toString())); + definition.setValueType(createDataTypeEntity(gmsDefinition.getValueType())); + if (gmsDefinition.hasDisplayName()) { + definition.setDisplayName(gmsDefinition.getDisplayName()); + } + if (gmsDefinition.getDescription() != null) { + definition.setDescription(gmsDefinition.getDescription()); + } + if (gmsDefinition.hasAllowedValues()) { + definition.setAllowedValues(mapAllowedValues(gmsDefinition.getAllowedValues())); + } + if (gmsDefinition.hasTypeQualifier()) { + definition.setTypeQualifier(mapTypeQualifier(gmsDefinition.getTypeQualifier())); + } + definition.setEntityTypes( + gmsDefinition.getEntityTypes().stream() + .map(this::createEntityTypeEntity) + .collect(Collectors.toList())); + extendedProperty.setDefinition(definition); + } + + private List mapAllowedValues(@Nonnull PropertyValueArray gmsValues) { + List allowedValues = new ArrayList<>(); + gmsValues.forEach( + value -> { + final AllowedValue allowedValue = new AllowedValue(); + if (value.getValue().isString()) { + allowedValue.setValue(new StringValue(value.getValue().getString())); + } else if (value.getValue().isDouble()) { + allowedValue.setValue(new NumberValue(value.getValue().getDouble())); + } + if (value.getDescription() != null) { + allowedValue.setDescription(value.getDescription()); + } + allowedValues.add(allowedValue); + }); + return allowedValues; + } + + private DataTypeEntity createDataTypeEntity(final Urn dataTypeUrn) { + final DataTypeEntity dataType = new DataTypeEntity(); + dataType.setUrn(dataTypeUrn.toString()); + dataType.setType(EntityType.DATA_TYPE); + return dataType; + } + + private TypeQualifier mapTypeQualifier(final StringArrayMap gmsTypeQualifier) { + final TypeQualifier typeQualifier = new TypeQualifier(); + List allowedTypes = gmsTypeQualifier.get(ALLOWED_TYPES); + if (allowedTypes != null) { + typeQualifier.setAllowedTypes( + allowedTypes.stream().map(this::createEntityTypeEntity).collect(Collectors.toList())); + } + return typeQualifier; + } + + private EntityTypeEntity createEntityTypeEntity(final Urn entityTypeUrn) { + return createEntityTypeEntity(entityTypeUrn.toString()); + } + + private EntityTypeEntity createEntityTypeEntity(final String entityTypeUrnStr) { + final EntityTypeEntity entityType = new EntityTypeEntity(); + entityType.setUrn(entityTypeUrnStr); + entityType.setType(EntityType.ENTITY_TYPE); + return entityType; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java new file mode 100644 index 0000000000000..b028563b5253c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java @@ -0,0 +1,79 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class StructuredPropertyType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.STRUCTURED_PROPERTY; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return StructuredPropertyEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List extendedPropertyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + STRUCTURED_PROPERTY_ENTITY_NAME, + new HashSet<>(extendedPropertyUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : extendedPropertyUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(StructuredPropertyMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index 8ea06f46d5133..a4bbd685fd4a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilter; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 52451e195ee84..7964f7e4fab23 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -212,6 +212,16 @@ type VisualConfig { """ faviconUrl: String + """ + Custom app title to show in the browser tab + """ + appTitle: String + + """ + Boolean flag disabling viewing the Business Glossary page for users without the 'Manage Glossaries' privilege + """ + hideGlossary: Boolean + """ Configuration for the queries tab """ diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index ebb13d32643ed..2ad4982579380 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -700,6 +700,31 @@ type Mutation { deleteOwnershipType( "Urn of the Custom Ownership Type to remove." urn: String!, deleteReferences: Boolean): Boolean + + """ + Submit a response to a prompt from a form collecting metadata on different entities. + Provide the urn of the entity you're submitting a form response as well as the required input. + """ + submitFormPrompt(urn: String!, input: SubmitFormPromptInput!): Boolean + + """ + Assign a form to different entities. This will be a patch by adding this form to the list + of forms on an entity. + """ + batchAssignForm(input: BatchAssignFormInput!): Boolean + + """ + Creates a filter for a form to apply it to certain entities. Entities that match this filter will have + a given form applied to them. + This feature is ONLY supported in Acryl DataHub. + """ + createDynamicFormAssignment(input: CreateDynamicFormAssignmentInput!): Boolean + + """ + Verifies a form on an entity when all of the required questions on the form are complete and the form + is of type VERIFICATION. + """ + verifyForm(input: VerifyFormInput!): Boolean } """ @@ -910,6 +935,31 @@ enum EntityType { A Role from an organisation """ ROLE + + """" + An structured property on entities + """ + STRUCTURED_PROPERTY + + """" + A form entity on entities + """ + FORM + + """" + A data type registered to DataHub + """ + DATA_TYPE + + """" + A type of entity registered to DataHub + """ + ENTITY_TYPE + + """ + Another entity type - refer to a provided entity type urn. + """ + OTHER } """ @@ -1284,6 +1334,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { """ domain: DomainAssociation + """ + The forms associated with the Dataset + """ + forms: Forms + """ The Roles and the properties to access the dataset """ @@ -1426,6 +1481,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Structured properties about this Dataset + """ + structuredProperties: StructuredProperties } type RoleAssociation { @@ -1529,6 +1589,7 @@ type SiblingProperties { If this entity is the primary sibling among the sibling set """ isPrimary: Boolean + """ The sibling entities """ @@ -1910,6 +1971,12 @@ type GlossaryTerm implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -2047,6 +2114,12 @@ type GlossaryNode implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -2076,6 +2149,11 @@ type GlossaryNodeProperties { Description of the glossary term """ description: String + + """ + Custom properties of the Glossary Node + """ + customProperties: [CustomPropertiesEntry!] } """ @@ -2447,6 +2525,12 @@ type Container implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -2822,12 +2906,27 @@ type SchemaFieldEntity implements Entity { """ parent: Entity! + """ + Structured properties on this schema field + """ + structuredProperties: StructuredProperties + """ Granular API for querying edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult } +""" +Object containing structured properties for an entity +""" +type StructuredProperties { + """ + Structured properties on this entity + """ + properties: [StructuredPropertiesEntry!] +} + """ Information about an individual field in a Dataset schema """ @@ -2902,6 +3001,11 @@ type SchemaField { For schema fields that have other properties that are not modeled explicitly, represented as a JSON string. """ jsonProps: String + + """ + Schema field entity that exist in the database for this schema field + """ + schemaFieldEntity: SchemaFieldEntity } """ @@ -3444,6 +3548,12 @@ type CorpUser implements Entity { Settings that a user can customize through the datahub ui """ settings: CorpUserSettings + + """ + Experimental API. + For fetching extra aspects that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -3804,6 +3914,12 @@ type CorpGroup implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4005,6 +4121,12 @@ type Tag implements Entity { Deprecated, use properties.description field instead """ description: String @deprecated + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4099,6 +4221,11 @@ type GlossaryTermAssociation { """ term: GlossaryTerm! + """ + The actor who is responsible for the term being added" + """ + actor: CorpUser + """ Reference back to the associated urn for tracking purposes e.g. when sibling nodes are merged together """ @@ -4635,6 +4762,12 @@ type Notebook implements Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4955,6 +5088,12 @@ type Dashboard implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -5265,6 +5404,12 @@ type Chart implements EntityWithRelationships & Entity & BrowsableEntity { Sub Types that this entity implements """ subTypes: SubTypes + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -5622,6 +5767,12 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -5822,6 +5973,12 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -6558,10 +6715,10 @@ type PartitionSpec { """ The partition identifier """ - partition: String! + partition: String """ - The optional time window partition information + The optional time window partition information - required if type is TIMESTAMP_FIELD. """ timePartition: TimeWindow } @@ -6587,7 +6744,6 @@ type TimeWindow { durationMillis: Long! } - """ An assertion represents a programmatic validation, check, or test performed periodically against another Entity. """ @@ -7048,10 +7204,29 @@ type AssertionStdParameter { The type of an AssertionStdParameter """ enum AssertionStdParameterType { + """ + A string value + """ STRING + + """ + A numeric value + """ NUMBER + + """ + A list of values. When used, the value should be formatted as a serialized JSON array. + """ LIST + + """ + A set of values. When used, the value should be formatted as a serialized JSON array. + """ SET + + """ + A value of unknown type + """ UNKNOWN } @@ -8738,6 +8913,12 @@ type MLModel implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -8849,6 +9030,12 @@ type MLModelGroup implements EntityWithRelationships & Entity & BrowsableEntity Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLModelGroupProperties { @@ -8973,6 +9160,12 @@ type MLFeature implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLHyperParam { @@ -9142,6 +9335,12 @@ type MLPrimaryKey implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLPrimaryKeyProperties { @@ -9269,6 +9468,12 @@ type MLFeatureTable implements EntityWithRelationships & Entity & BrowsableEntit Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } type MLFeatureTableEditableProperties { @@ -9577,6 +9782,22 @@ enum CostType { ORG_COST_TYPE } + +""" +Audit stamp containing a resolved actor +""" +type ResolvedAuditStamp { + """ + When the audited action took place + """ + time: Long! + + """ + Who performed the audited action + """ + actor: CorpUser +} + type SubTypes { """ The sub-types that this entity implements. e.g. Datasets that are views will implement the "view" subtype @@ -9644,6 +9865,12 @@ type Domain implements Entity { Edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -10139,6 +10366,12 @@ type DataHubRole implements Entity { The description of the Role """ description: String! + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -11015,6 +11248,12 @@ type DataProduct implements Entity { Tags used for searching Data Product """ tags: GlobalTags + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -11270,3 +11509,94 @@ input UpdateOwnershipTypeInput { """ description: String } + +""" +Describes a generic filter on a dataset +""" +type DatasetFilter { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +Type of partition +""" +enum DatasetFilterType { + """ + Use a SQL string to apply the filter + """ + SQL +} + + +""" +Input required to create or update a DatasetFilter +""" +input DatasetFilterInput { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +An entity type registered in DataHub +""" +type EntityTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: EntityTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual entity type +""" +type EntityTypeInfo { + """ + The standard entity type + """ + type: EntityType! + + """ + The fully qualified name of the entity type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} diff --git a/datahub-graphql-core/src/main/resources/forms.graphql b/datahub-graphql-core/src/main/resources/forms.graphql new file mode 100644 index 0000000000000..0ff55cfa9f173 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/forms.graphql @@ -0,0 +1,407 @@ +""" +Requirements forms that are assigned to an entity. +""" +type Forms { + """ + Forms that are still incomplete. + """ + incompleteForms: [FormAssociation!]! + + """ + Forms that have been completed. + """ + completedForms: [FormAssociation!]! + + """ + Verifications that have been applied to the entity via completed forms. + """ + verifications: [FormVerificationAssociation!]! +} + +type FormAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + Reference back to the urn with the form on it for tracking purposes e.g. when sibling nodes are merged together + """ + associatedUrn: String! + + """ + The prompt that still need to be completed for this form + """ + incompletePrompts: [FormPromptAssociation!] + + """ + The prompt that are already completed for this form + """ + completedPrompts: [FormPromptAssociation!] +} + +""" +Verification object that has been applied to the entity via a completed form. +""" +type FormVerificationAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + When this verification was applied to this entity + """ + lastModified: ResolvedAuditStamp +} + +""" +A form that helps with filling out metadata on an entity +""" +type FormPromptAssociation { + """ + The unique id of the form prompt + """ + id: String! + + """ + When and by whom this form prompt has last been modified + """ + lastModified: ResolvedAuditStamp! + + """ + Optional information about the field-level prompt associations. + """ + fieldAssociations: FormPromptFieldAssociations +} + +""" +Information about the field-level prompt associations. +""" +type FormPromptFieldAssociations { + """ + If this form prompt is for fields, this will contain a list of completed associations per field + """ + completedFieldPrompts: [FieldFormPromptAssociation!] + + """ + If this form prompt is for fields, this will contain a list of incomlete associations per field + """ + incompleteFieldPrompts: [FieldFormPromptAssociation!] +} + +""" +An association for field-level form prompts +""" +type FieldFormPromptAssociation { + """ + The schema field path + """ + fieldPath: String! + + """ + When and by whom this form field-level prompt has last been modified + """ + lastModified: ResolvedAuditStamp! +} + +""" +A form that helps with filling out metadata on an entity +""" +type Form implements Entity { + """ + A primary key associated with the Form + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Information about this form + """ + info: FormInfo! + + """ + Ownership metadata of the form + """ + ownership: Ownership + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The type of a form. This is optional on a form entity +""" +enum FormType { + """ + This form is used for "verifying" entities as a state for governance and compliance + """ + VERIFICATION + + """ + This form is used to help with filling out metadata on entities + """ + COMPLETION +} + +""" +Properties about an individual Form +""" +type FormInfo { + """ + The name of this form + """ + name: String! + + """ + The description of this form + """ + description: String + + """ + The type of this form + """ + type: FormType! + + """ + The prompt for this form + """ + prompts: [FormPrompt!]! + + """ + The actors that are assigned to complete the forms for the associated entities. + """ + actors: FormActorAssignment! +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type FormPrompt { + """ + The ID of this prompt. This will be globally unique. + """ + id: String! + + """ + The title of this prompt + """ + title: String! + + """ + The urn of the parent form that this prompt is part of + """ + formUrn: String! + + """ + The description of this prompt + """ + description: String + + """ + The description of this prompt + """ + type: FormPromptType! + + """ + Whether the prompt is required for the form to be considered completed. + """ + required: Boolean! + + """ + The params for this prompt if type is STRUCTURED_PROPERTY + """ + structuredPropertyParams: StructuredPropertyParams +} + +""" +Enum of all form prompt types +""" +enum FormPromptType { + """ + A structured property form prompt type. + """ + STRUCTURED_PROPERTY + """ + A schema field-level structured property form prompt type. + """ + FIELDS_STRUCTURED_PROPERTY +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type StructuredPropertyParams { + """ + The structured property required for the prompt on this entity + """ + structuredProperty: StructuredPropertyEntity! +} + +""" +Input for responding to a singular prompt in a form +""" +input SubmitFormPromptInput { + """ + The unique ID of the prompt this input is responding to + """ + promptId: String! + + """ + The urn of the form that this prompt is a part of + """ + formUrn: String! + + """ + The type of prompt that this input is responding to + """ + type: FormPromptType! + + """ + The fieldPath on a schema field that this prompt submission is association with. + This should be provided when the prompt is type FIELDS_STRUCTURED_PROPERTY + """ + fieldPath: String + + """ + The structured property required for the prompt on this entity + """ + structuredPropertyParams: StructuredPropertyInputParams +} + +""" +Input for responding to a singular prompt in a form for a batch of entities +""" +input BatchSubmitFormPromptInput { + """ + The urns of the entities this prompt submission is for + """ + assetUrns: [String!]! + + """ + Input for responding to a specific prompt on a form + """ + input: SubmitFormPromptInput +} + +""" +Input for collecting structured property values to apply to entities +""" +input PropertyValueInput { + """ + The string value for this structured property + """ + stringValue: String + + """ + The number value for this structured property + """ + numberValue: Float +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +input StructuredPropertyInputParams { + """ + The urn of the structured property being applied to an entity + """ + structuredPropertyUrn: String! + + """ + The list of values you want to apply on this structured property to an entity + """ + values: [PropertyValueInput!]! +} + +""" +Input for batch assigning a form to different entities +""" +input BatchAssignFormInput { + """ + The urn of the form being assigned to entities + """ + formUrn: String! + + """ + The entities that this form is being assigned to + """ + entityUrns: [String!]! +} + +""" +Input for batch assigning a form to different entities +""" +input CreateDynamicFormAssignmentInput { + """ + The urn of the form being assigned to entities that match some criteria + """ + formUrn: String! + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters). + Entities that match this filter will have this form applied to them. + Currently, we only support a set of fields to filter on and they are: + (1) platform (2) subType (3) container (4) _entityType (5) domain + """ + orFilters: [AndFilterInput!]! +} + +type FormActorAssignment { + """ + Whether the form should be completed by owners of the assets which the form is applied to. + """ + owners: Boolean! + + """ + Urns of the users that the form is assigned to. If null, then no users are specifically targeted. + """ + users: [CorpUser!] + + """ + Groups that the form is assigned to. If null, then no groups are specifically targeted. + """ + groups: [CorpGroup!] + + """ + Whether or not the current actor is universally assigned to this form, either by user or by group. + Note that this does not take into account entity ownership based assignment. + """ + isAssignedToMe: Boolean! +} + +""" +Input for verifying forms on entities +""" +input VerifyFormInput { + """ + The urn of the form being verified on an entity + """ + formUrn: String! + + """ + The urn of the entity that is having a form verified on it + """ + entityUrn: String! +} + +""" +Input for verifying a batch of entities for a give form +""" +input BatchVerifyFormInput { + """ + The urns of the entities getting verified for this form + """ + assetUrns: [String!]! + + """ + The urn of the form being verified on the given entities + """ + formUrn: String! +} diff --git a/datahub-graphql-core/src/main/resources/properties.graphql b/datahub-graphql-core/src/main/resources/properties.graphql new file mode 100644 index 0000000000000..2bed0f1155ff1 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/properties.graphql @@ -0,0 +1,243 @@ +""" +A structured property that can be shared between different entities +""" +type StructuredPropertyEntity implements Entity { + """ + A primary key associated with the structured property + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Definition of this structured property including its name + """ + definition: StructuredPropertyDefinition! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual Query +""" +type StructuredPropertyDefinition { + """ + The fully qualified name of the property. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this structured property + """ + displayName: String + + """ + The description of this property + """ + description: String + + """ + The cardinality of a Structured Property determining whether one or multiple values + can be applied to the entity from this property. + """ + cardinality: PropertyCardinality + + """ + A list of allowed values that the property is allowed to take. + """ + allowedValues: [AllowedValue!] + + """ + The type of this structured property + """ + valueType: DataTypeEntity! + + """ + Allows for type specialization of the valueType to be more specific about which + entity types are allowed, for example. + """ + typeQualifier: TypeQualifier + + """ + Entity types that this structured property can be applied to + """ + entityTypes: [EntityTypeEntity!]! +} + +""" +An entry for an allowed value for a structured property +""" +type AllowedValue { + """ + The allowed value + """ + value: PropertyValue! + + """ + The description of this allowed value + """ + description: String +} + +""" +The cardinality of a Structured Property determining whether one or multiple values +can be applied to the entity from this property. +""" +enum PropertyCardinality { + """ + Only one value of this property can applied to an entity + """ + SINGLE + + """ + Multiple values of this property can applied to an entity + """ + MULTIPLE +} + +""" +Allows for type specialization of the valueType to be more specific about which +entity types are allowed, for example. +""" +type TypeQualifier { + """ + The list of allowed entity types + """ + allowedTypes: [EntityTypeEntity!] +} + +""" +String property value +""" +type StringValue { + """ + The value of a string type property + """ + stringValue: String! +} + +""" +Numeric property value +""" +type NumberValue { + """ + The value of a number type property + """ + numberValue: Float! +} + +""" +The value of a property +""" +union PropertyValue = StringValue | NumberValue + +""" +An entry in an structured properties list represented as a tuple +""" +type StructuredPropertiesEntry { + """ + The key of the map entry + """ + structuredProperty: StructuredPropertyEntity! + + """ + The values of the structured property for this entity + """ + values: [PropertyValue]! + + """ + The optional entities associated with the values if the values are entity urns + """ + valueEntities: [Entity] +} + +""" +A data type registered in DataHub +""" +type DataTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: DataTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +A well-supported, standard DataHub Data Type. +""" +enum StdDataType { + """ + String data type + """ + STRING + + """ + Number data type + """ + NUMBER + + """ + Urn data type + """ + URN + + """ + Rich text data type. Right now this is markdown only. + """ + RICH_TEXT + + """ + Date data type in format YYYY-MM-DD + """ + DATE + + """ + Any other data type - refer to a provided data type urn. + """ + OTHER +} + +""" +Properties about an individual data type +""" +type DataTypeInfo { + """ + The standard data type + """ + type: StdDataType! + + """ + The fully qualified name of the type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index 8f2377edb546e..8896dd02b5ad3 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1139,7 +1139,7 @@ type QuickFilter { } """ -Freshness stats for a query result. +Freshness stats for a query result. Captures whether the query was served out of a cache, what the staleness was, etc. """ type FreshnessStats { @@ -1154,7 +1154,7 @@ type FreshnessStats { In case an index was consulted, this reflects the freshness of the index """ systemFreshness: [SystemFreshness] - + } type SystemFreshness { @@ -1303,4 +1303,4 @@ input SortCriterion { The order in which we will be sorting """ sortOrder: SortOrder! -} +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/tests.graphql b/datahub-graphql-core/src/main/resources/tests.graphql index 9dce48ac60d83..579f4919bdc78 100644 --- a/datahub-graphql-core/src/main/resources/tests.graphql +++ b/datahub-graphql-core/src/main/resources/tests.graphql @@ -44,6 +44,7 @@ Definition of the test type TestDefinition { """ JSON-based def for the test + Deprecated! JSON representation is no longer supported. """ json: String } @@ -209,6 +210,7 @@ input UpdateTestInput { input TestDefinitionInput { """ The string representation of the Test + Deprecated! JSON representation is no longer supported. """ json: String } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index de507eda8cdef..b75530773c352 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -122,13 +122,7 @@ public static void verifyIngestProposal( int numberOfInvocations, List proposals) { AspectsBatchImpl batch = - AspectsBatchImpl.builder() - .mcps( - proposals, - mock(AuditStamp.class), - mockService.getEntityRegistry(), - mockService.getSystemEntityClient()) - .build(); + AspectsBatchImpl.builder().mcps(proposals, mock(AuditStamp.class), mockService).build(); Mockito.verify(mockService, Mockito.times(numberOfInvocations)) .ingestProposal(Mockito.eq(batch), Mockito.eq(false)); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 433772d7e2cfe..c565e771a0475 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -26,6 +26,7 @@ import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; @@ -44,6 +45,7 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = initMockEntityClient( @@ -70,7 +72,8 @@ public static void testBrowseV2Success() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -87,6 +90,7 @@ public static void testBrowseV2Success() throws Exception { @Test public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); List orFilters = new ArrayList<>(); @@ -123,7 +127,8 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -143,6 +148,7 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { @Test public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); + FormService mockFormService = Mockito.mock(FormService.class); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); EntityClient mockClient = @@ -170,7 +176,8 @@ public static void testBrowseV2SuccessWithView() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, viewService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 9596abf55d04f..c6e6cdc7f018e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java new file mode 100644 index 0000000000000..0fe57d0a28fff --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java @@ -0,0 +1,167 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IsFormAssignedToMeResolverTest { + + private static final Urn TEST_USER_1 = UrnUtils.getUrn("urn:li:corpuser:test-1"); + private static final Urn TEST_USER_2 = UrnUtils.getUrn("urn:li:corpuser:test-2"); + private static final Urn TEST_GROUP_1 = UrnUtils.getUrn("urn:li:corpGroup:test-1"); + private static final Urn TEST_GROUP_2 = UrnUtils.getUrn("urn:li:corpGroup:test-2"); + + @Test + public void testGetSuccessUserMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, Collections.emptyList()); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessGroupMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessBothMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); // is matching user + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessNoMatchNullAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchEmptyAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setUsers(Collections.emptyList()); + actors.setGroups(Collections.emptyList()); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchNoAssignmentMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_2.toString()); // Does not match. + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + private GroupService mockGroupService(final Urn userUrn, final List groupUrns) + throws Exception { + GroupService mockService = Mockito.mock(GroupService.class); + Mockito.when( + mockService.getGroupsForUser(Mockito.eq(userUrn), Mockito.any(Authentication.class))) + .thenReturn(groupUrns); + return mockService; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java new file mode 100644 index 0000000000000..192f4ff9aa7c7 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java @@ -0,0 +1,122 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class VerifyFormResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final VerifyFormInput TEST_INPUT = + new VerifyFormInput(TEST_FORM_URN, TEST_DATASET_URN); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + boolean success = resolver.get(mockEnv).get(); + + assertTrue(success); + + // Validate that we called verify on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity( + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)), + Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN)), + Mockito.any(Authentication.class)); + } + + @Test + public void testGetUnauthorized() throws Exception { + FormService mockFormService = initMockFormService(false, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do not call verify on the service + Mockito.verify(mockFormService, Mockito.times(0)) + .verifyFormForEntity(Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + @Test + public void testThrowErrorOnVerification() throws Exception { + FormService mockFormService = initMockFormService(true, false); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do call verifyFormForEntity but an error is thrown + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity(Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + private FormService initMockFormService( + final boolean isFormAssignedToUser, final boolean shouldVerify) throws Exception { + FormService service = Mockito.mock(FormService.class); + Mockito.when( + service.isFormAssignedToUser( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(isFormAssignedToUser); + + if (shouldVerify) { + Mockito.when( + service.verifyFormForEntity( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class))) + .thenReturn(true); + } else { + Mockito.when( + service.verifyFormForEntity( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class))) + .thenThrow(new RuntimeException()); + } + + return service; + } + + private GroupService initMockGroupService() throws Exception { + GroupService service = Mockito.mock(GroupService.class); + Mockito.when(service.getGroupsForUser(Mockito.any(), Mockito.any(Authentication.class))) + .thenReturn(new ArrayList<>()); + + return service; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index bec141bddf260..6ae2fa7dcbf64 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -46,7 +47,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) - .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); + .rollbackIngestion(Mockito.eq(RUN_ID), any(), any(Authentication.class)); } @Test @@ -58,7 +59,7 @@ public void testRollbackIngestionMethod() throws Exception { resolver.rollbackIngestion(RUN_ID, mockContext).get(); Mockito.verify(mockClient, Mockito.times(1)) - .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); + .rollbackIngestion(Mockito.eq(RUN_ID), any(), any(Authentication.class)); } @Test @@ -66,7 +67,7 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RuntimeException.class) .when(mockClient) - .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); + .rollbackIngestion(any(), any(), any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 8fc5ab6ebb828..05387123f9c96 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -22,7 +22,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; @@ -74,7 +73,7 @@ public class MutableTypeBatchResolverTest { @Test public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); BatchMutableType batchMutableType = new DatasetType(mockClient); @@ -167,7 +166,7 @@ public void testGetSuccess() throws Exception { @Test public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); BatchMutableType batchMutableType = new DatasetType(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c7d397c5a4a73..4d56cc3d52af8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -15,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; @@ -27,6 +27,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; @@ -52,6 +53,7 @@ public static void testApplyViewNullBaseFilter() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + FormService mockFormService = Mockito.mock(FormService.class); List facets = ImmutableList.of("platform", "domains"); @@ -71,7 +73,7 @@ public static void testApplyViewNullBaseFilter() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -102,6 +104,7 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); @@ -122,7 +125,7 @@ public static void testApplyViewBaseFilter() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -166,6 +169,7 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of("platform"); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); EntityClient mockClient = @@ -184,7 +188,7 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); @@ -217,6 +221,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of(); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); EntityClient mockClient = @@ -235,7 +240,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -267,6 +272,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, null); List searchEntityTypes = @@ -290,7 +296,7 @@ public static void testApplyViewViewDoesNotExist() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); @@ -306,6 +312,7 @@ public static void testApplyViewViewDoesNotExist() throws Exception { @Test public static void testErrorFetchingResults() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -322,7 +329,7 @@ public static void testErrorFetchingResults() throws Exception { .thenThrow(new RemoteInvocationException()); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index 29a2b3081aefe..f5accdfb02043 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -9,8 +9,8 @@ import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.AggregationMetadata; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index d0bbfd126b9b9..0b8c1f1aeb83f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -15,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 909ceeb8f3bab..ff8bd542fbdff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade; +import com.linkedin.gms.factory.auth.AuthorizerChainFactory; +import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -19,7 +21,11 @@ excludeFilters = { @ComponentScan.Filter( type = FilterType.ASSIGNABLE_TYPE, - classes = ScheduledAnalyticsFactory.class) + classes = { + ScheduledAnalyticsFactory.class, + AuthorizerChainFactory.class, + DataHubAuthorizerFactory.class + }) }) public class UpgradeCliApplication { public static void main(String[] args) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index dd6c3fd1e44aa..4be39ac3c4bfc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -4,14 +4,16 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; @Override public String id() { @@ -27,9 +29,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(false); + entityClient.setWritable(false); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode off in GMS", e); context.report().addLine("Failed to turn write mode off in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8a0d374d6ee3e..09713dc78ee27 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -4,13 +4,15 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; @Override public String id() { @@ -26,9 +28,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(true); + entityClient.setWritable(true); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode back on in GMS", e); context.report().addLine("Failed to turn write mode back on in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index abd144bf453ed..406963c58fd71 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -11,7 +11,7 @@ public class BackfillBrowsePathsV2Config { @Bean public BackfillBrowsePathsV2 backfillBrowsePathsV2( - EntityService entityService, SearchService searchService) { + EntityService entityService, SearchService searchService) { return new BackfillBrowsePathsV2(entityService, searchService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index 1e9298bc60612..caa45988733df 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -3,7 +3,9 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; @@ -20,7 +22,9 @@ public BuildIndices buildIndices( final GraphService graphService, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { return new BuildIndices( systemMetadataService, @@ -28,6 +32,8 @@ public BuildIndices buildIndices( entitySearchService, graphService, baseElasticSearchComponents, - configurationProvider); + configurationProvider, + aspectDao, + entityRegistry); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index d968e8521867e..741aeece1cf62 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; @@ -21,14 +21,13 @@ public class NoCodeUpgradeConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "systemEntityClient", "entityRegistry"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index 0b46133209382..5bf1241e21305 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -8,7 +8,7 @@ @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") - public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { + public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { return new RemoveUnknownAspects(entityService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 116d62878f5c6..ec6e5a4a8f04d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -25,7 +25,7 @@ public class RestoreBackupConfig { @DependsOn({ "ebeanServer", "entityService", - "systemRestliEntityClient", + "systemEntityClient", "graphService", "searchService", "entityRegistry" @@ -34,9 +34,8 @@ public class RestoreBackupConfig { @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 9d229f315d709..008bdf5cfac38 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; @@ -21,19 +20,17 @@ public class RestoreIndicesConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") - @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "searchService", "graphService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); - final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices( - ebeanServer, entityService, entityRegistry, entitySearchService, graphService); + return new RestoreIndices(ebeanServer, entityService, entitySearchService, graphService); } @Bean(name = "restoreIndices") @@ -41,6 +38,6 @@ public RestoreIndices createInstance() { @Nonnull public RestoreIndices createNotImplInstance() { log.warn("restoreIndices is not supported for cassandra!"); - return new RestoreIndices(null, null, null, null, null); + return new RestoreIndices(null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 6cc94fbed5bf3..57e16eb72d025 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -8,49 +8,33 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import lombok.Getter; +import lombok.experimental.Accessors; +@Getter +@Accessors(fluent = true) public class DefaultUpgradeContext implements UpgradeContext { - private final Upgrade _upgrade; - private final UpgradeReport _report; - private final List _previousStepResults; - private final List _args; - private final Map> _parsedArgs; + private final Upgrade upgrade; + private final UpgradeReport report; + private final List previousStepResults; + private final List args; + private final Map> parsedArgs; DefaultUpgradeContext( Upgrade upgrade, UpgradeReport report, List previousStepResults, List args) { - _upgrade = upgrade; - _report = report; - _previousStepResults = previousStepResults; - _args = args; - _parsedArgs = UpgradeUtils.parseArgs(args); - } - - @Override - public Upgrade upgrade() { - return _upgrade; + this.upgrade = upgrade; + this.report = report; + this.previousStepResults = previousStepResults; + this.args = args; + this.parsedArgs = UpgradeUtils.parseArgs(args); } @Override public List stepResults() { - return _previousStepResults; - } - - @Override - public UpgradeReport report() { - return _report; - } - - @Override - public List args() { - return _args; - } - - @Override - public Map> parsedArgs() { - return _parsedArgs; + return previousStepResults; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index 623c8a71e861d..bddf53a274905 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -16,7 +16,9 @@ import java.util.List; import java.util.Map; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class DefaultUpgradeManager implements UpgradeManager { private final Map _upgrades = new HashMap<>(); @@ -137,6 +139,7 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { + log.error("Caught exception during attempt {} of Step with id {}", i, step.id(), e); context .report() .addLine( diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index 674efb2b8ba78..1524a015e414e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -6,7 +6,7 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSQualificationStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; @@ -28,9 +28,9 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( @Nullable final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { + final SystemEntityClient entityClient) { if (server != null) { _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); _cleanupSteps = buildCleanupSteps(); @@ -61,9 +61,9 @@ private List buildCleanupSteps() { private List buildUpgradeSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { + final SystemEntityClient entityClient) { final List steps = new ArrayList<>(); steps.add(new RemoveAspectV2TableStep(server)); steps.add(new GMSQualificationStep(ImmutableMap.of("noCode", "true"))); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index 7e55dcddc639f..74d97767d1c39 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -17,7 +17,7 @@ public class RemoveClientIdAspectStep implements UpgradeStep { private static final String INVALID_CLIENT_ID_ASPECT = "clientId"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String id() { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index dc95b7605ef88..3ea449051b355 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -12,7 +12,7 @@ public class RemoveUnknownAspects implements Upgrade { private final List _steps; - public RemoveUnknownAspects(final EntityService entityService) { + public RemoveUnknownAspects(final EntityService entityService) { _steps = buildSteps(entityService); } @@ -26,7 +26,7 @@ public List steps() { return _steps; } - private List buildSteps(final EntityService entityService) { + private List buildSteps(final EntityService entityService) { final List steps = new ArrayList<>(); steps.add(new RemoveClientIdAspectStep(entityService)); return steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 4ac295b4fdfb7..bcaeaa34e8936 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -8,7 +8,7 @@ import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; import com.linkedin.datahub.upgrade.common.steps.GMSDisableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -24,9 +24,9 @@ public class RestoreBackup implements Upgrade { public RestoreBackup( @Nullable final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, + final SystemEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { if (server != null) { @@ -50,9 +50,9 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, + final SystemEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { final List steps = new ArrayList<>(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 5c4e8cdc47e34..c756407832a36 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -47,7 +47,7 @@ public class RestoreStorageStep implements UpgradeStep { private final ExecutorService _gmsThreadPool; public RestoreStorageStep( - final EntityService entityService, final EntityRegistry entityRegistry) { + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 212f0da9f592d..c6839c0e63f05 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -9,6 +9,7 @@ * Strings */ public interface BackupReader { + String getName(); @Nonnull diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index f46bb9b05624d..9bc42e23a9974 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -8,7 +8,6 @@ import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import java.util.ArrayList; @@ -32,12 +31,11 @@ public class RestoreIndices implements Upgrade { public RestoreIndices( @Nullable final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry, + final EntityService entityService, final EntitySearchService entitySearchService, final GraphService graphService) { if (server != null) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + _steps = buildSteps(server, entityService, entitySearchService, graphService); } else { _steps = List.of(); } @@ -55,14 +53,13 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry, + final EntityService entityService, final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); steps.add(new ClearGraphServiceStep(graphService, false)); - steps.add(new SendMAEStep(server, entityService, entityRegistry)); + steps.add(new SendMAEStep(server, entityService)); return steps; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index bedf200a1c055..aca27892d2e3a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -10,7 +10,6 @@ import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; -import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; import java.util.ArrayList; @@ -23,7 +22,9 @@ import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -51,10 +52,7 @@ public RestoreIndicesResult call() { } } - public SendMAEStep( - final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry) { + public SendMAEStep(final Database server, final EntityService entityService) { _server = server; _entityService = entityService; } @@ -77,7 +75,7 @@ private List iterateFutures(List iterateFutures(List indexedServices = Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) @@ -36,7 +40,13 @@ public BuildIndices( .map(service -> (ElasticSearchIndexed) service) .collect(Collectors.toList()); - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + _steps = + buildSteps( + indexedServices, + baseElasticSearchComponents, + configurationProvider, + aspectDao, + entityRegistry); } @Override @@ -53,13 +63,19 @@ private List buildSteps( final List indexedServices, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { final List steps = new ArrayList<>(); // Disable ES write mode/change refresh rate and clone indices steps.add( new BuildIndicesPreStep( - baseElasticSearchComponents, indexedServices, configurationProvider)); + baseElasticSearchComponents, + indexedServices, + configurationProvider, + aspectDao, + entityRegistry)); // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService steps.add(new BuildIndicesStep(indexedServices)); // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index c25888be07f89..894075417a349 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -2,6 +2,8 @@ import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; @@ -11,8 +13,12 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.util.List; import java.util.Map; @@ -31,6 +37,8 @@ public class BuildIndicesPreStep implements UpgradeStep { private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents _esComponents; private final List _services; private final ConfigurationProvider _configurationProvider; + private final AspectDao _aspectDao; + private final EntityRegistry _entityRegistry; @Override public String id() { @@ -46,9 +54,28 @@ public int retryCount() { public Function executable() { return (context) -> { try { + List reindexConfigs = + _configurationProvider.getStructuredProperties().isSystemUpdateEnabled() + ? getAllReindexConfigs( + _services, + _aspectDao + .streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + EntityUtils.toAspectRecord( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + entityAspect.getMetadata(), + _entityRegistry)) + .map(recordTemplate -> (StructuredPropertyDefinition) recordTemplate) + .collect(Collectors.toSet())) + : getAllReindexConfigs(_services); + // Get indices to update List indexConfigs = - getAllReindexConfigs(_services).stream() + reindexConfigs.stream() .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index b3de7c503fb3e..52b34200991c3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,8 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Set; import lombok.extern.slf4j.Slf4j; @@ -39,6 +41,23 @@ public static List getAllReindexConfigs( return reindexConfigs; } + public static List getAllReindexConfigs( + List elasticSearchIndexedList, + Collection structuredProperties) + throws IOException { + // Avoid locking & reprocessing + List reindexConfigs = new ArrayList<>(_reindexConfigs); + if (reindexConfigs.isEmpty()) { + for (ElasticSearchIndexed elasticSearchIndexed : elasticSearchIndexedList) { + reindexConfigs.addAll( + elasticSearchIndexed.buildReindexConfigsWithAllStructProps(structuredProperties)); + } + _reindexConfigs = new ArrayList<>(reindexConfigs); + } + + return reindexConfigs; + } + public static boolean validateWriteBlock( RestHighLevelClient esClient, String indexName, boolean expectedState) throws IOException, InterruptedException { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index 03f0b0b7f2ec2..4b9fc5bba0204 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -11,7 +11,7 @@ public class BackfillBrowsePathsV2 implements Upgrade { private final List _steps; - public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { _steps = ImmutableList.of(new BackfillBrowsePathsV2Step(entityService, searchService)); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 610d9069337a5..9a426369cfb02 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -54,10 +54,10 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { Constants.ML_FEATURE_ENTITY_NAME); private static final Integer BATCH_SIZE = 5000; - private final EntityService _entityService; + private final EntityService _entityService; private final SearchService _searchService; - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { _searchService = searchService; _entityService = entityService; } diff --git a/datahub-upgrade/src/main/resources/application.properties b/datahub-upgrade/src/main/resources/application.properties new file mode 100644 index 0000000000000..b884c92f74bd4 --- /dev/null +++ b/datahub-upgrade/src/main/resources/application.properties @@ -0,0 +1,5 @@ +management.health.elasticsearch.enabled=false +management.health.neo4j.enabled=false +ingestion.enabled=false +spring.main.allow-bean-definition-overriding=true +entityClient.impl=restli diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 0e7bf5ddd5250..be28b7f739cf5 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -20,7 +20,7 @@ public class UpgradeCliApplicationTestConfiguration { @MockBean private Database ebeanServer; - @MockBean private EntityService _entityService; + @MockBean private EntityService _entityService; @MockBean private SearchService searchService; diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index c0355b935137a..05af6871715ce 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -117,7 +117,6 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete fileTree('../datahub-frontend/public') delete fileTree(dir: 'src', include: '*.generated.ts') } clean.finalizedBy(cleanExtraDirs) diff --git a/datahub-web-react/index.html b/datahub-web-react/index.html index 9490881246e12..bb86e2f350e1a 100644 --- a/datahub-web-react/index.html +++ b/datahub-web-react/index.html @@ -2,7 +2,8 @@ - + + diff --git a/datahub-web-react/public/assets/favicon.ico b/datahub-web-react/public/assets/icons/favicon.ico similarity index 100% rename from datahub-web-react/public/assets/favicon.ico rename to datahub-web-react/public/assets/icons/favicon.ico diff --git a/datahub-web-react/public/assets/logo.png b/datahub-web-react/public/assets/logo.png deleted file mode 100644 index 5e34e6425d23fa1a19ca3c89dae7acdfb2902e86..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 53563 zcmYhiWmJ^g`!Iad9R?vapeRTibc2YX0xCnNA~lqB4oIp8RPLAqb+=cyRA21d%p_f0ro9!C!)B;4I)j zR4xw;-5|&yk?@ab)KPK>g18}#d&;`r>1&fcuIx*xifa^^-@In6rY?9jurE_yg}u(p zE8=Bl>O9ZO`%7dXL#(0BeFPbNl6-aJi>km`bKRu?QtRmEbWYD1-N&~RVvY`tC+#0_ zQxRkD2OfIBYpjf+W6KcI0mX^KQ&NBege{M=VS%zIKTZGCSEa<12O=-1{D?nw3vilk z=LsEhJ3q}Dt!o%sYZ?m7!eK#XMJjWl>XRRN?(!d-rdLQc775`#V%B7v zBpnlTx7z#-B#!P$kwd!L5INKzRJ5}CQev)9!O{EB>lK~4A^$;sjm7V^TTu30!UG=x z*3O}f^!DG5jXp1X1+BsY(NAx(l0XtyArk28lf9RC?)QUTsKttMYN439wXE}wg7#>> ztoK^0XBVNM4FVp2c#m%**;Kp-L!g2vL<-&Hxg~RKswg@1o3a^#>=^^DDE4{!`m0jv zwX<2b9fUoH#9Rn(zsqxsu~Z(CeaR-IT&uM2GOc z^UPuI%Tou}>FfXq%T8dn+9%PuLY`2V{>A}+kbOJb6lY+g$BwOQp!*+eEff?*1s)c- z7;vSDo@c^fb%0FAQISG@N#&Mf0@Tk#c$@m3`LE9AlcgFiLufcpoWmw3MTr)YQ35>E zDZ_S4+4y2r8O6T%G0h~VhC{waOi_=?g|RP@+`{vIG1JhdPHzN9@1eTP@+NI zcUJozvgcp2P@t79!P`P&N(2Ps0fHI}nC_>2`H;OW;z2J|sIzi1y4NvAO;@xfrm4!=Ag<9siNY&WHDnRoD{_-mW zLUfUwx}G9rI%t;zT)VSi+eRuoIaEjxnI=JG3_NlLdnVM`W8cFMGD1ctN<~90q0*Fp zpa+t-R~cKQy87~<0#1bOrZi(XQ8O_C?TY~ISB^_hpaqViYH~|f$m$<^zKTOM#Qcbk z2Ew002)rBtB2KSRXe`;6qs}hM!Lh|dYmp8?jg!1YtfYW3&EafR&B0I?o2H@a#qfa4 zFw6Wr6cuFjS6-VGQc@otcdtniudt!}X~Mu$Ps{Tt_Y}uH7PINT9SH@&bpg*EzJ8}J zy#hB_H&6}iZLyOOU31%Qg|IaLgUtY77cjMNF0pBqF?Y%f$3yO^%nzU-1*{W|nNNj^ z-uG~zUrR>3L|7RnAYk@)NOnHA4@h02mvkhCs!yq^k}DsKsa*`xB3Qr&?qf#|8nYtH zAcPb(kN>S2$+1HG1E`b;Jf=kTY5JF9-um-k7KH5(+lp*sff!`eLkpPYT3>^ejObz; z``qZ}ay@j5KXmI^1*|FhV2;96N*36_u^asv~q8z$db&>GLl>wLG28X^}>Ap%D-vDMpm>#%|C z90E(tY>P2j71W9}JRDlcJ2H;m;t=f{YJk!XhnY98V^Be9k>wdqLw3HqLSStb=d)LN z@Bmu*r#gLwdYKPrW5g@$=$xa^oovrA+sglbF32Oh2xZ?JaGuLjp=2pEs+ciUO+)wh zy18(u;QV*e2F$Ll5@lol=>y=VVuw+^$i;dCRpV%w(P=~MZ>p24ps_3%I0 z8<(;O=Ck_XOs{ofhBSxii_LfOnuwj#@f zIX*<^8)3C?PnRAA6Jp`F3xs z<~OmBT0%nC0S#Yuss!g(_4-K4!#z0^y}2|ER4;}GTu|}8b<~5c_#;l#Z}-m+zT9n& zXI$&bQ@`k6@q(^bSr%P-w4JcE>ZZ&J-65P{qim6J#n1TI_ONwHH-fi65H#b4A>~2Y z@Ote36st=X{9rLdKuWyJI+)W$hSeNvNr%>1*e0`tNC^1YD(#Kq>?mhDTdx6S&3TN# zE=fOVMYVfIk+RYgs8t2jN|q&c6td`Xdw&nF;!xZ>_Tw29-$~`B%|0i9rt2H3$K}ED?SYj_o1$bLXMF+z)DMxqYcEyk?761G7(y0WJQ3bcR zp6yTcLEJ!fvR$!DpcQaRo}g8>-+*!s-z8-aivdBGrI)%ea;u$ftRnn4Ixqzun(@c; z9wb0Gh<$_Yu6VrTd(c<(c!$O9T;b%zo)XX^+|E#$71AI?DOkI5YlMQ4tyb`L2?#FMhvr_$wDUby7sfc5B zIy`>(QZyCtX`Joa#HA7B3nXc88fBkFHEcZUOIXYLZ+ZRzJCficO#qts7U^Z_avni5 zE)aQ9$&g?Rt&xnacXnHXx_^h6NG>V}32b`(l~p&aJIE>vS~HL$_|9w|!{L#l=WFR*7_VVX;;@rbs`4u8kmh z_g)iz&#I@7pqF%xhHZ6od@PAs+8fcC617~}cSnU#ynDw*b2*ro)|MkI3FD{VteAj9 z?WeIjndfg{JaQ4?D9qZ*2SH|9TB@_4#dq48G$65O?^uYYX>4lBAZfmGb(gW#3}X{c zi22xJ?DUZGiR7XY-)fD;1*OUtbn9{y8sa}y9pm~r2fUUy0Kt~{Q!Q;bfSPBatHG#B4*^O-OjcSn2{VuuP5{Bqpdq|Xh;nt$pA#!n938fvcZc3s z;o2j>L@2p~_`9szYXcH;UzthI&nKivl6tpKg!MJ!#cB%Nw=@X%%{!2`J=tjs-wf-?VXM4oU2il#)OB+ ztajTre)ljKYeB=&;_gcvax^^kfWF~3jZvtGC*G-!Z&Z%Vtae+BcG(D;6-1X&NMGll z(nw$hwpUt=7bd_Mkfs+PuK_a@c$%jN5Ai7=g0ihf*Gjd^b=Dj_UjI9ZmLBOIy9y6! zA^+q&)HNA$2i5&FPNqI(f=k}X_d=#U1byu{DV)!D7Of@K=~X`aXIc@7B^15_#}Zm* z5#}OA{fY1Y@T4Ms$k)ir*x3_hT7x?pXaOI-_fu7x66(8_RgS`Nxr_3Adw84U z+-{O$e~%baYGVWHsbi`@5i#me`pf8DcJZJo*Yxj`+K-60PvdLrQymTFMB*}yAjmfE zeybRdL2_S<9dqz{Vj~y!D|Q@rpBX7U73amIpuwspC;fPNtxK1GIRI>V%ZRA+$uth{k^iFFrIoa{$qx9 zqD4EL3;hg~Nt~BX6*3%m4fQznihkN$AksBQXS2O;xhanS7?<+r+YaOJAu4c+`Bb3+ zE9h*NLXg7<9#_!9`d+gYb>*8zh5%iQ94MvwNkHPGF#bdBDKwn#wgrOhBXQOMxE)UX zn)J74FL_e(FF=s*mk-PuJO)|4EgE`sp1NJR$`AisMD{$;UinaO9iVft5u2$3%z{!2a`z(%By+V4Y{Wu#Z^WrAy1fGJWnRnc|i(|mINn`~#X zS!wY`VZ`0$f%X*$+S1!Qao4ZprL223Tl53Fwks{houwDqC|y05i^}$*H~kPZ2-rl()z_ZacG=@3o@C8 z?Px$y_T>S68uCSq+JZVKqW_|XaG5DAP{>^fo}(I%(0`y!13@|kzk-2H{rh)qJO&vEsV<|d+6Za35XOK@=elF7p^Ogv1WnNE&DnE@ zAgJibrkF~D{;ie0r?U6h=;raW<@G@`n()8dAf*2+L+v-@Wbpv{kqIZc4V|ZZmrmLZ znV!@<*S7Cjnt8bwfqPl3{@Ixeep|4V4n<}bv$8c>@S=l<2nxCnmk}~Qc{T~`WHC77 zkFTx_>d&ZozPU0#>1rL}kS{m{ zUkong%PZvqO=x=^`zEbRrNl56butR2(i~L8sQ5MmsG zBV*!tv+G8%j3cFcgxI@xUxHuJF4Dgq?GIFa$hvJiqUO?!QiEe)Iv?ap-*vc+zeWvm zmY}KB30OnNV#so6L@P zu{KYPo@FxJs8;Vh5zW60VZSU((dAjozUetX5nVi!pJ#hBN!RY?Xxf<-_|bTOy!4R^ zIg6Tk9@G84zJ~kh3$1l@2r1g!EtfJ!DI^T=&Feh91V0#lR?c61v%9==-M!rcfVE9t zmTC~oATuiODm47+k@uf{QI1q9JLQ?`!P44!hSQ`bi8$+fY~kMDlg`x1r9~=2hv7+; z>Sx9xuTP3!cBkmk$F<>HDjE)5E_Z7(oMQjF6GQtSX?~PgpX0|Tn#}jz@BVc!DO=Ot zJs0NAMU_=`B-5yR5K{I%=wDnn(0OP6>AN@g-&#Fb>ho@!vX#6+_mR;2jkB)n^ox{Lq!)KQKByiE(K;R z**tMAuW_eO8KQ(POR%_`_6gLKWTmyMopajEj$~P%m)O5A=^VRiZZ?wItLB`Bj{Qw) z+>pyx%yN3Gx2Z2DOO0p5noHX$#!M;UohiJ{N|@> zPK37?zM=B~YnhA4i8UeF61}s>_~_lU2e&DwzmWtHrA7}`WzK)vXj{`(UAD_jIMQ)g z)f-cxit?7_4g3?FP=3BPJH^+5@|=M|HaVucz~$mm6t@7H$!`}&OFOp3|bD`!+mSf+M?xAT0n=i7gw#w)1)y_f-IQWogJU=wOQ ze=(FxriA|)(r;fP22DF%5pXGQ-MFW*s&o?^yOfY%S6~3CM~AkesiE22MLa;BRr8JK&fUkfY37kzC{v<8io3Vn$)|>7 zc++M*)WcYc+~sKgt8&UK56;#dK+28ZEK9{$vP>;uTt32ssalK#j$fqLJNuX(uY_8t zly994qIK&RXT1^HUWMk)KWDd+r+1-5@o-9DKb_wE*vsij8^ov<0a% zT0pP^FTD#vL`v+B+U<;U1w_Ag?R>jazEa9dd*@M>3R^MxrC?zh(u~2vUVrx>C6Y4qFGK80Z+YFD(;W*5MKWu{XmvX=@*1mQi#~)RLZm*-LRjAi zVh*(^mO@5Ki$gjIHADwl;jvZH#PD&4rtd>FVvl5*5RnIStA^qR0jPZuyr^*qW6^bc zLVvD69AV3oPX-C_?gfW`s2e~shCjJi0pm-wh_RRT5vl)ol+2!{Ihe5s9K8YpyJk|D zS=LK8i%>J2>YX;!ZoJsquGK@2J8cHn$F#i~{S3$b)mf&JInPIdB3B$#Zs`>h888M{ zTGI*tbqDPQjebT1 zNslyKF#0;Wg_wFpX=!Xsc;nU1Y^Sm?Ed2-2zse2^l2VBx<(&jy6d3{()M?Re8Ou_R z=11Sq63jEMuL@gHv1DwDT1^ZLMy1R~_O8zEw`(MixkK3m$kWb_Y}@*l{))@Jq5qp) zi3+o>SUtRVCq0DF(-DcQkkV?OJ#tSEH|xybXRx<3CelM}rX9JXhxSe^MkF3S-iYFg zV05w;t~k9;noAej62j0z7%TK@8gb;$ZCS|Bqsr}3>XqvgWJUSx&VREWH8F^k7!pvp zOi1FS3f3OgY?)P&gG7(Ag&Y`a-^m$iSw904$D9`Y924^Ni5fgNsOIV-M5{B2Jxrt~|QS_$oYD zShi8KwDxQ8)3<0w&~v4|e|uaobnDx}ee9dF2IX^$)m)c}V2^&H0c5(v=dI82iS!M} zFOS%CgWA7ErAF3XgW)8gJOXBP*p6v9~+4%!{wB*?-dKb((v3 z0O`+Hs4H@Km`rmHM=<`oo)zE$^M&SZzH9YWC=vHR9D4QR7G@E54@bs15R=s2ZPMqN z6ju3YBZ@wut3^moI7Y7nj_9X1%t*KsBe$ogq!{TT^Mr_831#IN9X_VYLbvo7S6Lj> zYf9QmJ%D8E_#pdGYrnH#PBG0PCKFwiMD)h&CyFKd6RYp_q^zGZ6T0B;()BuNApB)_ zhp+PP=u+~X0wJF2&W4KXsEI1MVZOrXb6%M@juuZ!eh$A^Mg3QHONlcYXYb zh$QYda|s)E6}+Ya}&2BPOQ2r5dy5;7c?1dQ>+^yYHd?<{J*{ z`=l{b1%1ST>bVu%dY@i{9`RcFA1ntLcJ5kvct{89yjTe-50-cILNaOvm$zY z%ZLYK*tWmc&@;oUx{<5Dz3V^{Za+Ov*KqN7-}4LVyN4i#>D1KBta^OAzO5un*(i}0 z@UDxuZ98G*b5!_T-IiH<29}#kmy)^R-KN!gG>9J9epPVA=05&A5$v*B)vj{(l0KpQ~F#d0Hcx{cTvIe*2d za3CG_PBS)D+s*rCkBotz1WD%YYvN^0@P`hP~o%s#RK5=FPIGG`tG zcUJQl`Z-v67fO&gg?WpkXlDgKjd)F(eHpaq`g+PBxZq$!9 zY_qGnW3^33X~Zq_9ypP5ZR+%p6=~$|SgJtA=;^EbpyFWfB%e4>Sx55XgBu5TC>!F} z8GVZT>Ej63Rd+J69`1}@p2--jn3jad@%G?WAzio`*@DgXZF8aHb71P!8srn&?X%n` zdn3$e{Wig(*t{Cft6>(yMLbk3^q)e0JcBXbfSV6k%K61q*Q<>mzMiX|F2iL zJZ9sVkv3mR5q*cvOUb8UK;sinf7XEx(f@V+B(6 zB*Xj>;pIP1+X8tHO?`lcAH5v127a$W1r&Oc0T?h)h3tM@X?Al%3&7+WLVt*-2b(I-u^y ziqTDHq9!I^J!@p(XgYJ-(t*ecH3VnYS6ndB1%L5%m(|=-mwW+R8Xkj}?TS=9{yLc6 z*v{A!AzT6K_&1Ou#*020xO-SpZ5(i5AP3TtSbB&fQ{}eWe-6=2VuOp3=-D45fAE>iE{v~4Y+s?u=Z$P7&=9dv|19c%C0`w0PGGNXnZf$`j z?pL4+w0pXqpM2-=Ak^j&e_H-_HR`;cs%h-rJ_{)DC1rqqrPYw`r(!)(}3LcIe~iP5;p>WV5YNkpe-{p?WR>v8FFe3D*SLb;y&%R;>PJV9lI%+{90Z zRa{*{|Kic-8C|@z)b@Q>F&EZ*>USpg7&m;Q^KcpjYS)Sn$`9|I*!?I&QhqtMo)ni$ zcMyfmG(=;$8{-=KWM${4c4u&>crI^=ng*n|5JbWnyDY`q#I5jbU#-2T;CuYmjP_`< zYx?QoGP;Fp<0 z$SUxA<5!LpT=1@aEoo7_PN=51P>9w7DqD$9u37X{a*$)7%YUmwRw}2TFKq3Qj*J~=Un-7@1%NmB&x=x=qUB&sk_^1ZtN9r&VopuM0%&UE;$`0=t4m;HD&!`ocFZ*L9S{(5mFClsbhnz*Yp8EX2JOH5&svX`M^an5|_H<`ANKM|Kp{DSFv#;G&Eo9t*U zWpL~Jwj43!u#tS^N!vQcld|Tmfx`{0K+exfXV#-#e^2Ol9+VG(+#fi(!QWTt2A(6jNjm{aNiZOSN z4J$Wp-GJ|J4QKXMnY}Zl(*9ge#`g|nRB08(PKPyC{ zC?}&mPzvF1v_qJ&jYb`Ca)4Iq_C+Q>y+!_@2|G1Rp~% z!47x-s`sal>H?2h#)HObeom6C?^_#ODn$1Y9CC|t5y6a?V;885DMcVc!Teuru zvkk=!k=Vk7W2~$b&CDqqvQ0*Nsarxt*A3eG zV_@ka@nAJX4t+d}0-RAEdgXmt#d|}V^}S)_+>&wiAgDFTwSo2#JWGo-|12(lwDe?b znuewsVvDVmyU~InSqfx~F0(u^;vlkkZL6p?b*aI`*C`a%tIQq^Wbb~6OWfT1@g6|8 z24&XAo|CRRCdJ8k{PXGVf`+xL6p$cg3rC>i|M-A!M|n(P2n4b{ZeNZ)A=z#8eO$=` zUB0~DUf8Zm3FDZSwryua%OZDjwhjP|xm}oYkR@l|SPa-7bEv}H88(ATQ*9NIHLj0B zx7ws_+t@%lU%JD=VcoW2{Cze7oFyN6WpH{wT12z8q;Gwm5?V025=>}+f|D@lTs?C8T; zjjTG=Zfn0q-5_k8tzu(bm_;&BEUYhi)h4XWx>d2q3I|erhnbjmgsWmoHu$co?r2(LmJ_pYbR#4lYtsooi)y-)2hZLe3~8o?`&AjG;*cv z8iF5*Z!22q_LHLqToSs)nV*AEUJp2)_eiOf71$;ZdJU3e*gEa#&RoRA(hM(6G@d`)Y=u01@KG+sl(| z5Cze!(T+yW!EE5>Ars$XSL&B0h|@vP-@O)UzMkmM=KUB)2Bk~ZB!}OKNzP39L6%>< zEMBpTE9W_cQ8s-IrWOm5HpJ}@O55eRcO%>y!MY&`P@_yc1A}RF|8iW=SI-5soX81W zgGf3kn~YWgzQ2!%AvE)PyN23%)I(>{ATXfOYU8JCq-QrJ3&|$CsDvBV)3|~!FE+NT z%iW@ZIuNoa+>W=3V;`et)o!CzaB8N2h!8%!{q^-|sQDM|Pv?d0p_MZ*?x0Bj16Ffr zLGGZ}7m=;SH@Xq6Kn41&7l4OUT)P}R?18sQSRWWNiY8s6Yz}GxkzP`T!JneyS7gz` zwmXOy5WhZO_}^+vx?#?9{=(y@liDvxEtKF1dnRm?)1<6d%^584nxeSK>f}Zk1@vPWCc!FOc5^^=% zFkAQp2~-J8mVHzQ-)$E*;Z7GmF{HWaV-%Fbh^%b2LvMIL0|NNt=qgJ0GE-RIVInv` zj}F0q!_EV*gi~Yx2H+^;hN%kx; zSHR5sR7w}tXnjW6qRR?IH+QjMAGP23{|kgyoZfSC5e4Z1685J=saqDogn-OEBC3SK z70e|Y?sDdX+DN`G3N%y#Qi+J5icBAi8qkq4$J`jt9bzsbcl}Po+b`RSx_{lR(TTbV z3ZftFE%+#6W79FpR#HSulamMJ+;A0d|JzP)KkqUZgiQxAR3!r=h$+4(B#peiyGjDL zaE}6rO9I3-$g|BZA&DSUanK2x>XS3B@lUkq1Vz@Jth@Ep9goRaK|c1%z?lWsbpYl} zV_>KY2Dixc`mFlKomCww=0Sndx@#{)v9wGUN=q+iF)Q&NT_2JfX#i%KhVl$T6`wB_ z`FI-VKO?nJ%I^s?*OyDEl>Xf+nuQ~R8bFPvsW9(1@)q^&h;idS7;!7l6$Xjhy(X1G z$Y?Dw;Q8?diAm~E_93v5RKYr#i2>?V-EPyg330}(gJ(qK?t_F@H$^LI%+`k#3ZH*< zaAqT|x}9+NH-^oWs7M2#Pp~%MVxqhcJa#_*KRggoK4ADN50rb=x$N^rp>M>-oUAG# zXQQ1IGT6MWLWw@$e)36%B2!uy|eg{OM88uV{LN4#2O=odccL-uxw=2D@Ht+@f~p z?);{g*G4F#0@(VzlHq*VyG`%3w_&bT=9$TR`6OnoZ!U0b8a_!?3dSJkKMo>LkIgM5 z{qXBxInOdzFz7m9qYaqlTpwJ~o*DV`iWzzHMa0L`(~Vw-q^^9P&+Gx|uV$l+zjLqE z8;TvYneZqmY(QM9^b$TJRSB}J~w#PuYQ8dz^}nC~bBWv^}VvQ&T@*nTa1x)`8La#oc=|07WP z{Md6jt?ZMsf%I+*T5&P${DIjsvplBf=HD1ej`_x7c0F#;N|K&fxO%xybi~bGBo@m~ zEZbV$ACukCxmWhEPZgkK22iST|1vu$X>Cf`Dl)YxmUb&pT|R};Q5a?>lm zQnOd!ztBx3KDCoqXCh&i8?-g@HL@}UvVs6vU_LZyxfEyf<+jE!#!S0O!nU}27E>=m zYWZ_1q_}o-)`ay79Xs}NaV$bQb%N8h@bs$-6%eujNRcbV-$Y&UWS~1*&B##C?XK!G zpPMvT-fej|QC-U|WL4hQ_(0{BC2Kzu=)&&&Tb2RngFHW59JFltDlnOGn(0=v$yelb z%A2M}rJr3d)kqmmP(CW8YzxxDS_xX6A|5CNHAvLR4jyRD0RBJo(UKSmdkI>xT2X$Xey;~I;;8bu7kO)Ylyt|d?Xrqr zP>v6$UL|scdid#_-EPl)f|K(xWj~#Q0t|*t*K68e@Lykh&DUsb_Yt_sC%|>Duspw>={QNP*^|R#w>no= zwefK3b)~(6LEMR+EdTl1>C}Wm&CDau%G2k2C#HAkU;hFlzp{{{zdc$>cE|7Pr{<>9 zCpbsnzx@St{nyC$_uCrXa|AJBL(h-AmlpUxqh57;@K7R_vhiM$fnF!P(9l(?yY~g> zHPsX9a?>g~>3Tt?6qN#|>e-m^sbqng89yO>u=QzQ+6pSmI?`aZN3T#aiFe2|8=irP-4R&7+?Hx*80 z(6j2r3^tW`D;P*A!i3t>)94mM!laP9+6Q&lzcrrRIG&4cCh#0j{mpi`*^Twl=h zeNmvoNd5e$JQ+2l{O}BJ7(r)2( z5^mFc)8tB+m}%|DPmXbF8wj1VpC=-X+A`%fKp)nnl25a*iDso390<>^bqA0pHa-!qo)tP4!d!9hoF@rO#wT{$2G5y zl&*mHJ$8sRaw1j&uN&;J`8fhcS0)(|z<}(vC3&lm1F)xD$|URjzxw!s5++G8e#Q1v zX8n+`A|vTk{Z!phd0;0`8fYCpjVR&8-e{Hz?!q{fhe5iLm>R8G){RCK$K)|0mAe5d zp1o!_(*2U*K;i=@CEM9)dZFSnu%eCtcqkFm+k+v!roZgU`*jx|F|k&Hxu@=P87);5 zD9fHwHf2Zl2H2bZ?_$jhtIQY0^@hEgWh!vfC7Z~FQYSToMs>0W4IL5_{9mSJRSSr7 zC4lB>rkudpW?9^u1JCW3?(1#W8cf-jUY8Uw(x}=4l^0BCX}gs?(aO?CEyeWO>j}UO zt*V>4w+fmqUVz(!ssC>8{jAaaqFGRj{-Hbo^lUMN=^N$v*8)Tv=QYt=O^Epn{}^i8 zVK8|R3w6=Ob#ETZiNP)Ys6gZ&>_H2OHdD5c9f%^LDB!WqT}tCq9mI5VHlE|Ku3q>y zSjbqwHE+4Og6aJCIZ<$G#w)?L%54RDIdL+9nHosZS%=0P!fym3Z?nLO#)Q`6}q4V z_yDA!NObtE#~NS(-~P#My>4RNLidoN9PPdJNqOXkaALB;{kWtq5J=-R%NCSJa)l^& zWiR}&a24g;JM*5qGt|cjB$`-8!&~|t*vQNM1ax~85NX@lPIt!VvvG ze-EbwR{bsRq>)=DEB@(vu^?lurfl&4*WZv*jwDjc&Yvc?xzF2y6xolsn6Te^DE>y6 zg_}v&)p)p&7xUzEUYRYU-skwW9c~9S zvt9KA)*qb>Pj*^5m-5oX&cXm3{tSSel?=W1!z&ZgCJbTb{j#eJGJVE?S9Ank&5^4W z)=D@=ns^UgO|&8#yDCZ5DiWtF$vbQT!c%Dje;&2ao(O5(^WW4j%V~IHjnT)r+&lUP z7m?PNv@zHU%%pc?v3RmBA=g(HZx4sc`YIvvg0Od z-TAmqlHXjMmG)W9Cd&y3k#;v@T4$)5gFNg4gl{G>rrVGyfSxNNMnH3w5Qgh*RWUc3N_MS}rUwueTjrRdV9slTcHbPHw|86>PW)#nhkw@-uEh7PU8Xt( zU5QSLUd6;SHP0qa@DVLYe=91*9uSLJd@I`DRv$JG^XJ9~&mQltXUtdb9_R68lYuO} z);Th`G%s*6jN4)N$fAAJL9EF*yB+vtAIjM$%jT9t`sZn|^RfAjuZndqpUMLC1dL#j zKK%IL7URb4pI<=wy2fhwInQ;7^BN|wJaVh-0bIZ3B(soQ_IE)`#XS`U(CIsFQry8J1wRXjQG)_hXm?_?(@`5o_8Y%e zH#twdXj^>gxz5>GyOGbdC2dz0U56wq+wrm*$W$j`@6NL;JjA2`XL6RM;GAmu>c0AB0&etK*;&w%NA~-KAGt$yeVVS?`40^ zz<512VtGOV-HPfHJbG8l863cT%G3CC#fX&_aEol{)mQ7oRHSTA_>^ELxd;vw;~CFz z*(~Nh&vcq6&@4r45XA!GZD=Owve`@Kt~klgw}B$lY#EVr2x5Aq&{S>KOVD8m>c-HbRzes+1!0u`-q2P zBVPi@f?AOp=j;EMY8rwlb>j3d>nPtWOg=w6i?(;d#*}`J%=^Dx0HpqFCgV1nJXFZk z9g_cnhXvTzH_m}URG5&G)A{~@R@u--?&#@#pXtOX<}<{`i3qOGb#wgFkww~+*KE&9 zIz!TVquubVpLr?`xha@ZDZ(Xagxo#3Mk+fe<1X+Pffh1&ti_X%JpspsUXiz zH|7iUuCWY_c&JRpjjqrZE9MmVpV)Bg+zeEEG?tPZoof z8Fcl@m}QKqzPN>%0116tmeM`>_VnD}VuM8|{QTHeZ8l{ zmA~jTr~m13?}?MnFuWpu4WPPiKN>+_SWhkFUUty|;p+9dE3p#T1Q_jq+Y(F$9ywb` z@f>-$w=}(Z1J#Bp)5Z-+?tpFeK$4IhO#E*$zs$`CO5M7~03AcB1KX%^B~3C`+6i~O zb^F(&0+J-)iWS}81r=B^S$dVdE3NPl_7XQ3$-Y_yEMKC=xe#PlHO{Y>WR?OL%I(FN zF7+!{>b?$R<|SK&om&A1Qao7M&rU4H`eRv&!-~bID2e{kR4tRqU8s_v#wFIM0a!Sr z)BF3o*E)%-GnIq-jUSo%$SzPi5mL7{+#$k2EN& zPN($i)Q`A0ZJ#>AEpndm5Y%BnV*!yfL~0?XGdkmEOo3{j5X~+>^Y>f&1-@~-M)yUXO~*4L!l2rERyO55+AUe%4a+B!A* zeM;=R0G#zy#iQs!v9%tD@Ac-{%12=ve#<>$7eeLzHVfS)pJtQT7ZcNDP_}553y5uw z)_`qMa$6a#KtqMC36#|aix_3#*+TFB6K|F`#ivAXm5<0+{){Of`E3SN7XG~=1HO%4 zJy)t*-m}=>hG}g?>z2FT1X#RpIjoTbk}-3)ZGiG{-J(N2&%G9jzc0{siT=peqwit~ zR#pdO`ddWY}iXxQ( zFyaS)A^pi+nzj=2AFlxk%!KOq3Ezs@}5*f}9; z+)7fkm2AFY(qnX8v4JuC$_I07WAao{D)%Ewa$#W8`sD(pzLk=scWAR70u|u?GN*H% zuR6I0R@zHxn3c4W!!5}dSWI*f|L#;ePv2(j~BlGWB5DeqkO~bDK#?vDpBy)Vd!v+@Y!+#u3#;< zPn9x84)ZX>x;SQPg5(2Mv}vLy4j$Id& zM?y(jrRsb-hM~4&RbcrFg>Zo`pm|U$YC&u09VukLdsbc7ZU;BuC<&92qv!cdf_A&C zx`3h-1b*#rW16mqqtOMBsrv|ax@K%uA2$3yuHHJX={EWs|B$1HK}%V16M|9#O4g$|yo%25N z*@=p?Zoh(2EzVBLJD`?5e9l%ey)d3jc@m5f)s26QeUAMu6`0-0(vBx=J5F5c&R<^N zh}S3QNW4rn3#G=MQ+9}R(4|eyjqkhYV1A&dV|4yZ0?LiEDlH>_I**t_`oLG>8ZjrB zhqiX7E#tC}-^&dJYsxhG>b*`!)EIYl+HD3*O{J7~m=?*ip6`<~61~Bllg_qt3)hPY zoXc3-krpvXx7*mHy8TQi6Ad_&UJ#wamMyg*(dV!~6%O?cPMxW>ziDtnJ%e{HWb%Haq>EYw-9bY%jn|`jY~NqVury zGP});q&$S>(KZHK0#GokE&Tsie2ecV0bdkyNr5%+Gi$27c=_FZ_DKLf*{`RJOICT>H5gAOV|L z%2YG1_Pgbdey|VfpX7xB7N}1@wv=>e4R}kLV^`WcE|d*7bJBk*?f>5h9ozWo$|B+L zq|NekjCZ)T&)YoP;bU-EySgy)2XdY%U8_8WbSn#m2gUFNsml`7!j|*s3l+%;GdZl8 zj5IdcaQB~Y<+2YR7$zovH`fQ;vRx^Q5Z}4Q(XP^q-|o$Q4!QRwpv$pH3m4WDc6IcynW@W9Hfo3qNih?0OP2!Mo@%h0XO!6`%(c zP;z|R&(pQ)1*#$vbNxsiz1Xu^#;&wZ+WgYiY4*mQK>~!73fdU;YVHM(db;&;;&bBL z`w*Wmp0VZhX_R;44=_rIC?to!DE@KlltpFnnXqN*ySz~J7xqvO8sERyFG6{>|4aNUoKoU=l z1oe-GHo9Az%QzCs@SNpHDA$!3Y>nxYLILzLFcuqK$5V-tN&vP#o|4DTZQ6>mTpnT( zdr4oGB=mMfvaXc|3e)fgc=-bZ!s^g7%Ey#YcR) zcCwF|ACXk>!`RB|{IXwC-hMqCZVlTy1GCw}Y^Py1d0mwHJ`>|+TM;m4JC zOHM!Mt<|;i$N$#>s&0q_G*hPIANJ+dv=VYI!8B}u=dN#jS(Xzm64cMqy!1`u)G^Sf z4oF+9nS}5{5K!sKHR83x*FsT_o?6+zpN@_R7iT2An$Uzf9^|U@|iQBxqjj2A1&&tN{>(_NVh4ECNt3h-CTve*SMm#BFSyE#KyRE`A zc6*m&I1G9=gE7t1h=v}DW((|^dA!mVYX9}Q=BmZl5I@v-PwB{}M2OidNI72*69G0p zG&$Gex=qdXvo?-vpkBUG;h!`!Owrs;cU<)ftax$yqbbm5aH`+aLr42c|Nm&0Z=)1g8rhVu_x)wPKLZVqa@2ViopuIr+Egd_gSvqvJshb7)1W6t1 z{ll9pJdJH%UzPCDTqT4}J|ylt*@&+8{LKUW+Bf%nVj}eY){N{#5R#_Ozy)9N)?@aB zio8tqI6QSquPs4W%lI3}dsU+HHaWHbV_aF}46x9}djCrZ``pLf8+*GjNTHSb9(v!0 z6)%`5fP}&l%506*O#YarpPs{TjH`Ww5;g5tK!;+U{r5qE36`l{q``eVd*|%3bSL}R z#Mg!MIWfi$M+&?s0--_I`}TF5gS{x#E+}20vgu_sM_4ya%nH9Gv_hEgjCI$}`+@oD zKSm0w`uIcR{+wF%QnJ$%a8~Dw?={C*1A05H1ZO?}ROFL! z8TPDmZr7&6^-eLR}gv@Zw6Yb^6(;VwI z|2#7Q+a)bo!q=+R7}lh8OS;Eja$5K$wimNs@tl&mBKMD+O$B^$&oLm2Au&Bb}3Kb!Z+>P=Z%+~mxr{~f1v#;xn5`d|7x z#{z@Itde9{xuMJA=tBQB>2!q;h5C_J;nunJ{vh=fZ*EczvIxlJht%8B7V|TzVaxV~ z1_tEDJ7Kfytug;KnuA7H8I6kevvObMh@bKANZWq>Uf^f5glyY59LIq8Bkb`lEwdYa zhOlfQx1mURX&?@1K>_vKCbIf>7#(=4P42IOUVRM!?^-1Ybnti! zn@r#A^?~tkWD7V(@0m~AmcPre1#y|NL z!s`p#Wx%)RJc)^kSVCGlslGzoiSIz+^ zrjTn?^7^GCJ=E%c+o!GDpDWpXEGUOhou_-`P_l!X_ik8jC&o&8~ zKLp=Ml?@P!AT0&S~qL2X|uWxy)M~Y*nHEmm)eWcb7bejg$&#t*2 zgX#R)KfR3QVKyT-vNp)_nzNRIEXYMluTL4MKgD0P{5z#)M+|d(2zvqcV`YHiq~8-` zSqGTpO*`3p1!pQ89K&XJzyM&<>RwjP7)fsH&*w)RqT{43lx4B^ao93jDIMp`zOUs2 z!jJMS(oX}SD%38JH2X@6(&|`{GY0rkpV9S36&qt(c^l2!zJ6Om7U}`M^t3bSPwt@c z&w^mfwoaE}^h=d3UpZ+h$c(tG7A$zj`W-`?A8JY47GYVk@#tD;9i$~ow=m6s_g(eY zU-~*%c{ksyX)yq?7_%qM&F56ufN}c}g)J+vEpd^}P0_TTlX&FDXJG`VrYWht;ReJN zqDBQgG3u+gz1U{jwuON3ZuPIaTGzC&_zPf8nuM%@L>Gl7S$m6_l2&?pI&xOtQET-uU{OON`6sO|@@(@F}2Ms}$;17L<75J{rX#X)?jIpi-65 z+M|3KcL%c`+VC}EzjkZgKN*}kmtE*1TghL6g8lN$JzH?VvW77KZGB5?w=QS(JvN(K zv6tAjJRi_h%PtH(_Lr~f6A69I(#^uGocVgaUgc?t>)0zBW+Z88Wx*$wyVJgaZm!B9 z2HJ_ti~{mo2;mN0;cYZ$V`uBqMLB;Ihkzta>zWn5T_O4vJXP3QAcf4<2tVs+oEk+==wPM0ESY8_+rJM{3umPl*4DlI4*wbnt~ zj2zxTjKfl~ghZZix~(4X@BdvTncDus?AyodZd>Owj=}=0wJu>~^R$S=o-<}govXDF zYgq|FvKC5J79$`$x-wo72qGf#1m&|F{we6d$IH)DN^diVh%&bR=O|y>y%G&OcICf~ zmC3g}-<0GR9%+ei&z;i(nZLIA%kNSzbqC_OuJmUhqk+vST@zy{{8AFf9F{cL%p~S2 zKdL0EDJKSp1#$JOu_r4=q?4Qn92+uDBFe|yETuI*vCoX9ZNKMIP{#?}+Z>*|7$p+= z37L^)5;kcQ%hF(6L*#aouLhZax_5T+B(9okphPTeId$}gC{IssvQ(5%=#7W#wkyfn zprUqB+BGlD6)e7!tHC&qrePh9FNzjiz-iH#`|$@ML6VFe`)u zJUslKZ3YQRxBMbM`cdDaQoGRGm}c*nz{29~IecEyjB!6GxF3VgyIPbcW7D~3NITbr zrF0vEn`h`NGvcP=_!LRO!7Na=jyLRd1 zmnQd^I^jpu2mv-V zaDFPEZL_uq9~@;}8pN2p>oU_vSnRbqSgg^XFFjGF$0c9V_Y=3QLk>a}*zT;oa|^98 z%=o7}LiE`nT8hU=P0y6F5Am%2a}8JeXs`rOEps}>TwA9(ysNKFY_$B5!50VJb*&C? zG-O5!88A+klfFPt;QMBzbkiwkTF#%e#Xg38voUNFLMvQvv+B&c8YYM5g9+4hJgu~s z@9-o1`S&;Jx&a&AH%|MHM^zNZ=birTJuphp=dAK{*^Yus~oeupCeLiB#Hut_3WEo4{ z?GY<;T+k|a>sly@{}`~JS=Qt(aTwFKQvUi>ZMHnda-Q2!@+E;xAJE{D6qF5aBen3) z<}?!a6~Q`>n}ockvmqUuPq1HB1*pwI9SuVedKWM-Z4y|k1VSgwZlZfVn3_3afQ!p=jchK zA3XvCn zLP@3Y5Mq;9@#07oS1==ny{gqL{QBGLS?|J@H4^%dOh)9H(GL9KtKOpR5rzOtlUvKU z(fD1dpsi2FP|p`sonyBV-@{nQIL?kgxLn?FovNt}b(k}sk}>00CO!N2rq-SN$)kNM z4p2`J7wB>kyeYKV3Pto+<=S&$i-rZi$J+U{BNC4{%}fff8p|6 zT=-=k7tc6u{Hj!|*_pP`9q^ocZRsF%_s!&+=RdL_cQ@C274SrPgJO|zwj;7w+pmft zR++h63YGj95J4y=Ym%{`7{ibPiN#7S%s6EZ%TrBZ%SHNd9_Si^Y%c$7e4JD*%+7To z_`nho=wsle`TdsblXl~Q_<3|Ak)5-^lpq>-UHX-MO%;eY(IDO4WIh7KsfX|dXf2p0 zzV?HNy-M{nuWYsx!~9cFBb;zr7la{gz~tS=3W2$Y9qNvfXs5MhR6Zw?(Q;M5;-NAQt`@fMyjCfU)@I!m!UiTGZidqGjB%$A8KVxWp1p*j7qiu7KjmY* z=)^7(vxqx~`qsNU3(@-w0I}+-=v)bMpa~(XO@sNSH^c$4slG?zEp>tuu(Jz^5DOh} z4Aho|7Z?ZrVo3DqX8(H5)ciF27>R5|>i@aSN>p13XJUUZYD1Rr zWUM7N-{amAlTVy74qR_V+SOhIH#nEGW+q}v`Udxp?+U5rI!Krfs&NAaWwkTI96Mtg zhEF8)hUU4sG4A2ZeB$@@v%WboSq2Prt!wo-@-LMQIqo@Q67s>V`gQGi>c?Y8%2?(IzrAPLoZ-g zD{`+7=1u2LKLXEP)QvB>$wFf`;SjsRd;keMa4|S8xo0czo(i2CIf`R;l4|Di1q_b9 zIiQFxcfT5-)Yy-%Zf4>JCssUhV^_a9lQRp_o=YX_R@~3Z1-G`F!iIQy!Pa*6;v7aH-DeypNvCCDDO z4$p9*NtZx-TA)YO(AzsmL1;fWL;*1Hx+Vx7*S!eI<7Y3i^g!9-Ac{~)+LNqY1W+n! z)Pi%4;)ZxyESF-;fEH}5jw=vEN0X4)B2eQ)dV9~;_}n-oJu zrV#5uokM>P^oxFaISr3jh}JN%JV{6(#60ViX!Xap+`Cd1>h_9)6%ilF>TFGtOESGUmp-WDv`daNX_s5rGsWBzS9u-kT5*@V~JDw`3}ejwUBc zvVpteGXF>3or7PoQ);lmnM&axL_qBss|(s|IdXBF4Ww* z=}4t#Xv>^^k<`rnC=2la<+N=qT~{<5{dHs|jJGbsjxlKNer!>EJ&tiOC(HdNlRYGV zF6ooybge}|2N=wn)xHb{>PsObHj6c(m{?FekoJ=ptF2^Lj#pY*vLva%qyN&_{C%?U zJk@AG4$i&WmU|IvQDlnA<0Z&LsaLMo1M7uS9P$)34ccdQOSue*Kxs8vwN$I=Xpw>U z3xubUlWzC1GPYJVv3oT)`b@j%)S!bRbxd)kjz)WXH~=6zjy*42OI&Ve&WS7zci!pWlYPX z&&SItXTC5yV+ON^9wazwZabR$4E7<&MXlH5f_UDV5wVv_NLbeCPaM@2aN>`gG9C{! z2p|uBVKy;#=o)MeM9(!`#1LBLh%_!a8m_Ah!9hmdt*CW>L2-QTff~HufW?qv!&5cYx&E9$!4{b1F5Y_4vP2Fw%PfDUJ zIK=AIc7;Uv?MyvRU_q=uyB(rEyatgtwDb{DJ_HhE=?-1+vFG9*X6R(b;T4VVAO+Z> zRrR$dJp-m>#->H+lkGzE2Wng+%KIn1-S;sG@7^$Tp~!8Quy^JDW=2*UaHnvkT-)~# zqQ+8bXv?gsG*`38_XxBz58}Sn^Oq*IDJ_af)n0n9kak8M=A+uxuA(8d>QROjx}W!$ z5uA`7!tQeS9xJl-XSvob*7OExfK<2$(L^I{Hp)p?Q=jHB(pHXRz}Q}Fwa(8(ipqx+Ma%P5SB=5ZbXf-v^Y`> zC-?Kimk3nr&hjIZd9@J)KOfW>p{urlg!(vEsc@23@%Dz+?VqO(}k0 z_m6?d!8sJ-YwJ(Oa1Y3N*&5TPJ#O6}E23W?-@^}H1Cy4~f02-@e0hWWOa;OfSBhNt z8lxl}E7_L!rZx?phY3HsrdSA}bJ!4B$5UraD1msf;4{0zmSyPF?t6b!)AoJW6)x0e zg_?8$FrVFBQt>W1r0Qa2`6}TC=R=0pg|Me2I6#W+qt~R8oL}OIwI7b%#)CHV14@`* zT)aA~2I|6FNtt$U${HX%N53v@Fmw-FhDd>mv=d{=At!Niv{}w!isLb^zpM-;toF#98eC zw)La0U7gfh@bnw!aN$b#ioPTf1I}7pKu#J$KI286Dio5n3aWjC>5b2PjVR_}l|F7z z(~z`(h~}Z$_$>KJeX`zEo9Vm25N*ciQ;@>Ew^_MEKoTpu$6Qv1=TsS$MEN6OY-T{u zGA2Lj<$0;FTH>~~^LPLKD}Ek|hq6lA73sD8tYB#EaCJ~B^ue7Mfmf#|JROX6Vgmp5 z720e!26BZz9#6ec$=5m}EBW9-+g-@cOcVAHLy#=x8;3c{mg(P*%1X*^_BtiDxo5zC zuPR$zWQiZ4eD1G&eGBkzvorrlF2a>BUEkYXk%I?eb!D_!6PGtf=5JcSdnK6ze8$~)Lt2d^b9%nd2gNXU4$vBYioVR411 zF1cw9)taEHE=a{3;{Em)4aV89=+T^0-uL+*YyMs3zum3a*3~}3%UwWWlb?&e2YaPz z+Cd@{g5+2fi7Gs_H?H){Z!au1#B1o%@m+I6K>l!HS?XbUaAj_IoxD4#5$B)}Nj(Un zpDD*7zrm)+(};RwBOxpVc`J#(Kl>mdTmy>BIsPiLl9{RK{ZR3LcPBA_&GN6dD{w&a zL;&NT1hnnK0od^@&T3vxhzm(GXfpBgx@BA`z1gnbrd|OB*K$`HcOscRbPCDk_B(<2 zoask_@V+z!YkIb-hRpA5J6Zw7+YfnMMv&>Htzj@WH6lhM($^a^?hAr0#Xs znY1v2GtUwSB7<9F`4~JlH9vGEPl+i+FMCgT9gyx;E>>EY35?mp1M-%pWoSfoCg-hg z4tOTg+rq6?!Eg#PnLpviE{xzy@8oxzn3{)#EtmCQMA#nFXrA8Oh5V1%fw$cnQ-@lB zG_vsVLC@H0vyIuu=yggyJk^nYFbd-#OFBVF{WZpdD85NSS)VO1y&(+;{wT-_U`Yw! zv)#(uuA@vnjzTZp%!|blJte9xea=iU_(oH1A4kobRJ0x2nFsno(n*4odcaLHX1dQ4 z!Jx}_oEssU3qXu{!U%@w%g>&z_?zXL^e2#nrRSRVA^cBYqX|S;Q_<{cM|b1k?RToy zUePT06C9^1fF7?O`+)08Ddeq2A!ceD!#;MjUpe|@(o1@7yRCt;<6d}YY9y^JjJgt_ z^pdq&=VnO|H#a9Qylm9 zM;heo8vo7<>o;%VErmi2$a%64y`_8YF4uuD{B45z)x*tMdpRgdG@`hGHC|z*Dif~u z=GX}y~{_U?eS61?Tnho7n%V9Yv{w8pLG|#5nGJZJt|s^3-5jV3(w^z+6l=sQ#ZFHf~u4 zcL^dPDn$E`ftQkAkaA@{Rw?h-fB=9_E+n9}JH4w?xUn_fdoaPum#?Q|r9KL_H zgagE0T6Q+uz22cJDvKAlE~|$G2gu|`a(*#YAeb`;#Ix)yiI$vWDYPJI_G$VN zSn(*#(SH{nE%!5rC&b&T%Z=V1)^ zY5k(rbvLd-E^9F)#P@f_o%80rHH|@?Y}jmo4&|zbE>M{)IP8*8paC33GgQJclh~p9 zf7iU8Gqm6@ca~kB>oVTj>a;*~rMsP-n!{M3K|VO=8?RDN9UP3ksUma;Nt2(?Red)z zLgy!?ZFka_v80YI){ETotR8$E4EsVj*rzVm;

?*5y(jBpY@{v`pxiG=JUUwS>Js z?$i$u&=&WL{UTKA)RUa{A`ThGfk4kjdF+*~NWtVBlqz)7o>5D?Qd`yFYTcF|X75vcrcy5O zTaXFSCu^8bV8gMY1nI$K%l79VQ-U$u2X+2#JKE5rum`W8L4rpvJ^P z&~DixXN+WlQGYI$sl3>hNH&&9#WAxWb`%Y4z)bgt*Xb_JlX9B{ZreL>yp4e8xg{Of zUR7cJZ`k;ik6hVUw_Wg_RUH#kp;OfDJae`RVn1MuqpZ%nxV4JvY--~@&RzYTp!RA& z^Mn(_I5$6j$1mxm*nUL0Q}yg%#oe~T!|!lI8<~^#sWj+l%Xa3d2er-*YoI)A9s(U6 z-^oiTE`Gs{D7SsDDbIY;{mpEVm8?l6zBI$xpIsL@-yI;yf%h&5UDzewx53j z!s{rcQTT=aoCG(7x}k*Pq#7 z&aCv|VS@zvCQ4R41u{yJ7){2sA5Ui0x^cWLRR*G=r;cN**~YX__3>59ZIl0yL|6Uo zJTu`)%XKogN)Dzvg|Q+>u2O7=?dx4ZFa=lNzN;f|vMclxjq5KiHX-=qgn%#-^Sbg5 zR^hpdmy2;F8ROo2Z{NSAhtU_-%+p|~{qp*N@T4tVqK$VCWdHngPWIOse!=dvKmlv_ zt4q?G=5>;xYgEm{7Jnf;BUkoe79_zOdtHqqaRr0Yk)^NZy`r<3=d9-+o9CY;#<`;6 zc7#+rZtV$y6feXskM;$`!bDo&u+Vkot*|?4ZJbMy)~%fP0grO5oLZutM-6jVBWW3o z`NwP2Gp3%<7g{hvZE1lkgrWRt_Ms?4i>B&q3=dr;-nKR!@d$Ma+#&En8EEh41$GLb-O~> zD)ja`z){@DLk%10U~C@RB2t$+M~8n(cPA-<{G~;o)ZOWynIib(DEPF~8RJe@LTNr# z6c6G*Iz#wev{YJS8|RoCjZvl3E66i_;xDmV=5+EU(nNL5fw&w*(`XaQ@d%ye26)?y zxN*eX1)xmcnYB94%om8E!O zTc6B6RO?0c)71GBc$k)>Pm10b3S&F2Zk^B|Ux&|Z-q^^%+Rk*VVyK^EK#AQ#jV;i- zu%F-kWXZ)5*p{#Ru2k_>UV0ryb#!Z#p5ey!1EPOLFBGf|1a?@w8^t-P}{-kl3 zKc-px<2n!p&O`qLx_3=d@U=dF-2+yVDNzQd5f(epjSG|t$>yJ}(~A%vh$#j@hvBcC z49F{Cfc|)0>WpP#f}A$k3#Er;>0KC#kha|Mp;B*V{Qg(`&3@AE;CiWA4Cr-~4C1SH zea*s-B(J-VD`^C+Jo}#YHEUA>gd)B0o0&msVZXr7U?{WB%~<|BE*>bF!WiAht-RGp z31#!)o2D#j`kAt20AQ)Y^mRQa&moYy$(0*{8}<#h$Yg>{L~y_9L|ENnSu8z|dVcO1 zh{q5lyFyNRB7y@R6fMA>LoSg!36)+Mt7?_~Vaqu*4wc8jSby&n4ky#^2RW&YXyGGCF~Fi2-g=ci08|OYLgo&t4eNy9^3n(N^yUb)5Mf z?@mN(#5c&vMeCw^22SUUx4kiqW=X%7K~T~jwo5<_Z74BtScV5^24^+bHskZWPl_o` z+p3Lr&JZ6Lig)E``Uh-$d@r=J-yk;|c-dvR-JdLtVm zM*+;raHBRQSLEyR`S1TNASK38osEc~U#pIyjBQufDgx19tfSB2H0!j~P|jrT zaP>3DgU;W*n3G=akX5vyTtB&yF%>FO!nKg<8fndpKLhtEu@q&EyCJMD*UyKf?SMhW zOrIQ?gy2nT5#4E`aQ0c0#KVG>ccuKZp4^Kej{N%c zYc_AM7Kr=Kd<^sP0$&<&t<5wcllBPovgsZ6Y!*dA`%JA7{)d0oa_XnJvrCKBf9jKE zoam1BgkS0gbOm#!?qstcu7^UpFT-B-Jw2+jK5dmw+l-NRAeq0Hwz_lV?63km050mn z;CqrkY+TkbP@a3X1E9yC5no#iQp7~-XxcZu3rL!x(K+7FxqADRp^|hCl*$)8d9V@Oo-2i~IOMSvb!B z*DJloS;Cv25?_B+$6+)B0GX`7#h|PA`51&2U18a)^GJm?8aqZ$fu&po;oE3aJHzR= zK9Zjf)Li3PURiYYD%)u`X3>{)$2rWLFFv9!R1;fxV$GZZ#OL8%Q~LZ0-a=dESahXZ z#g5}RFAk%(tWB6d_FZsus)-6THS{co2IyZ+X`PUqyj|EXM z3$v!1PI~5D|563aIX+%dJp%hQr^xFUV|R40b0SQ4JYda=%_#7h)s_F-AN*(^tF}0O zU8?-9216F9i3Tfay5wk-4`k)x7zcCI+g4kUm))jcp0;~3+J6EYAsPCbE&!=NzZOpZ zp&U&bnn?ok;;4d~sXzPT_ro=dU&myz3v<{CkJ1sJs8%s5ZS~SV#a}j2);5nz(*YFv zmDj8OW3eXh1n69a3&7-xXJ4Sp++`i%U_8fo!-6Nxed=dDo^atgzzDXhUAQe%zeTC7 zb^K3FD|FIWt=_n8+9UYd4EuV5rLMfcCzxx4{2ke5v=>=Rk*ejZY(36ntAeCq;a(+( zIt3XA1d2`ui_Y;6Rz6De~_j3nC`>0GOvIw5uj4RdFJk!nG!-q`9m9H(G zC1NK8Lh-fqY5L&h7A7{3Y<-tvB+Ni(+5w4Ts@9toqT?YEq~)X0uzv@uiEEN>`6u}8 z)`fNxtq$BdD&1qAY zj3@ma(|%3;$5CA6p=*hIzLl+NA>T%QgJv#CUXv8M$`4vlXy_?I(#N>>L4*Wdae8YN zSIl!*zK0nOiPqjM{h=I)WcD%`qkd{=&#P{Cq&7wSr4Tm3#0S!|>Wt!*1 zF`rr(FjDFK!*fq&a#?K{EeqD`A;JYyydHZM%=|Z|t`FxnEhX%~s%a|oNFvks$}8in z>?QlFS1x8`5v)1g#yQtDSUmXC5tBEHW5||w%hZAI7U#7AhcpR*Gu}#q_ zq4I5%bkC*_J2UqxII==<>incd?kvo%4gO9rv{9Ll6Cp%{I5oCh=CjqpAd&fP;j>uZ@y%+JX6SlJ$Y^) zi0v0CxiJ;}5HqmDS%&rYYSSH2e1c^~MWXa8GKSG%y&1!sk74lNS#mqYe%(@r}tG=CbMdxc!p{+Msp}xs+g`fq&D2gJL z8NAD(=&;_M2#1BvYA~^#QuC9NGk2y~AoK`q+~tEv*uW=dc!qbtX%l7EvE%cjX`{}7 zGiE69R49#onpU#tyEpet$ZH?m368PlLoP9HnanI9Vz8Y#F2Xs@@}R;3p^kL}2c67Y z0ejIPGgTIwyBPy^ELJ$n;&x5HZfUsh&4;`igX6T7*NI`3+0QoZS8zf($qx0G6TyI%W;h$=O_!580AJe;$)_s!NV6-cWRVK`i zHF+>7-2LMmGF;m*6aD>xT`&9JmKl78T#gL=W@^3=93z7ViuP0 zbkXS_c2e#1^j8@)Jldp&&a8P_&5ArHO*2fkT!x^qp^#%rxa_xp~} z*ImC4!?00ucnyijd}*ZpJ-mIYtNw?!uQe7|Zx}zcJM|(Q9KflbVEfnD@b!o##)jQe zeEEbaANlh;)9T_T5m4bpr~zex8q07GecG6K@>3s)u->46Ir|63cW zklw!5yDw}ZedoXp>e4}?p>o@8aWF9PlPcJKIn13jCcOzxf|<83#i71d&g30gptttA z$Z`_|6+Ggsl9H<3cu`Rvo~rT;d@rRJHjDCCE2l1x5?EN%m)7f}nVll`&|CE~2lZh? zZ7H_PeNkCe$fVq``GJ%{oxZ~vsH_fGitSOT1&wpgw#6Yjp=z{tQSmD!5>O1SlbfcTInRvKI~K;~o(<jM;@e#rwb0>E?P5lrQWb3A!8M8C4{;c$r=#Zzbqv;W+i25n z@}tpa!E#SPi~6#4oQJ-}W(3=~T*@E(^=Zx}Sb&>1#g=Y>?_45@Wg?6yS5`U6NKzZq z;{qj5#*nQ*2X=$&vfI~^yH96Y4APEb*Xn~KtRqQ2ppw^g=gHxxYPgeN({Gm5(oLeu=w z+(Grhke(!|4PtP3##^9g1m13w`%L^TZ{G7Gh!k@6guY2O(3~a6<~D9qQ{!LjbMM4+ z&KiI2Shz^cK1b=JZ7%SJ&oZKe7w5)q(+3KETkZiZjdhCgI!k)%VpI5TSDZtU`kjoU z(dUCYUCr;`!OnvA&FZ>oB^d0rkTc;Zq{QfP05i38M(rddoEZc)My)Xt6JDl9bMOX=;)OA&y(QpA4W_e1&cGE{V+xTkc0kf-sZNhRJhrXE*SlJF}L9T7Y9+j zViS0Kj2c$DT%$bI^aqok;&OcbKxX}paJL1xQ=pa(IE)GTZZrAtTQ*-^d@b25u#if; z8BUFVn+%~!Xcptd^0B2eo2IzJ>6v$Y^kDu+Xs6z%N25lZu(Kje!rx?W?D(hotMJ^7 z&F+J#Ue57=;eBaeb{HGpN_s7 z$>XZH-s!wWOKH=Tbz_d`IOg+j_S~{DDySv>@qOpv=Bum8W37#FT)(y!XU6aJ&do%t z&2!+llEW#TZQ6{PoTcHJJ-Kd%eY7uwjm7P9aPcMMSyeT;TcziK&$-76DxX3vQK(NI z05!x+sI?0doCP~CP;8ArNZuqX;J4(HM>s26b1v$fZ{E#h{ZYQ4viYfV_vw;n-d1Z< zLXk+J_O?zr|Nry|UNgI2Cls7aY1a!xa8 zXmHA~_LpqgVquzlwHx_d_-S0#2xz_s6jy=;2kn^JBY}~%fsrwzM0WXO>TQpnwLgt| zPUna>x*Mf$zm?ORG>E_25A18PNymcH79`sNpW)dX|iIn|GKC#N|gs z6&p|v``ou{tMANM>dTvi=%Y-ov2|ByoG6QJE1yD5zJsY+wuhl`Y>MX~v7(Z;+@9Zr ztBcZ%sXR-m$qNR`_sNFRAhh^;zwTq35H1L}GXS051)5qb>Gye5j=V!%f_Fx0BOst{ zj~RAWQ!%DpIyNkuciDA{lqu=K)E%|43wMdhvj=)!)A`dBLRn9a!sJ%8hju7^OeCKV znK3xa^k(2`=MoxKdkVX^n(V>H>DkzMg~@`zep9D2l&SsVcp%&9O}4p23{{PHKdY0s z_PQA5EbDnTVb2T0a`@m}_LYvs;C%5aZ^ZcivCVUadFUc7_OXt+cgYo%3#<-JlR}+0 zxo@pG$J4sFVBdFDfF!oUrY zdsw*IuttLXWk_KjSW|r+FN>W|STQ>zWEg^o=egzF$lTl+7y;FthkLGXCG`%(Nn=d5 z1|C3#NSMUFA$kAV6eoeN9V|I9$na4sQfX|ImDSKU{Y^F|5$esYwZ;wFKo-M#bXU$E z)$ZVgw0YZusRt(J_8RT!kMNdBJrH$GnyJqV4BzJ{jF0#u2M&Ui6cC?=yx)@exdRqz z<&)z(A^r2mXu)i?jd_>34Ya>@6Cd;|9K)Zo3_Lg0Y8;5uz4s8d9RkAQb^P-WcOqCF zek^U7ZRL-rJDC+i%U6GQy%ybAA{Sn0O?L5BqYYbE{B)7_mjfn>ZOcbJu#aTTJ0ZVc z?}~EsYXOW{5dXPT}M7US)DuJI>ycZd!}lEng216vFhrU)Y%uj zrw{noC~>p{nP?XsM}I3jGWv;(X$C(g2Qa@*v1;Jh{3V_95Y)1ISPmo}_tk0Qd2?*J6c=LJ+U_Nu>&JL#X!cm&F)hZfk46B(Q>WT2$@ zbYSmxsNR#{!x!61FdF$@cUxF2CF$-%!6{C_q^yh=WL2g?)o<3fYdX;M42uerAY9M6 z+FYLqtK@ai3tFv6KFqAyUwbi2Kx^kV>yE)mxPwx;@3o!b{PF9m_5r@SJf*HcE? zGZb20wB)MuWALp!ReC#gv8Z6#wkHmh8;^Y;O4AzjyQg0jAM-C8Oj%{#T3-yl8FH{) zjA0u3wSQ>DrT%mp3PrZs^p`v_em9x$++sjs3ur{9igUuC5V@4N@U1J{ED-Q;PxI zSMS2|+w&Xr*QN3kE4+k)BPM%c*Y3W$OUB5FeQvj}ml+Ruo%Ry)C~uS}6mA>tk+4@% z8i7!sf3gS`XV`=b_eTgJL5~TR@ zaD+o<3y}ml+g+FHbwznBNnD{&AoQBD<_~u6h05Rn!=8`5EVup3C5~Rv6FbC{fobX& zYU>xVxf%cu3{Q0Amt%bSUKP}H%3YYSRW*IZ-~Sc}JUOy_*Iz48H?fP|xGYeQ zS{tZ#YA)L3pX7}q`)3Bn?98yq|FW&GZ=Mc|H7f~`1+60T0$;v{x{*oH$(!qk{g3#) z-or;SQb41B?Ughkyx!jsWuW93Q>>-)1uQTv_NCVtYS)6FHKj)B5l9ua#67nZ<;wCb zO~Wz6Wn|$GXtWz_*yJB{rNl&aLY&B_us8|nD;j3l8^7Al;RF6i@`i6Q7VP)}{T2H{ zx1FQ%=I70$AL`el_n%cL;4xufzWXq{Pu2-}X*SCf%FFK`6-cCZ5zgUv4GZ0d<%+^` z_3O&=B%z|?usrthdgOvkB#PyH6Ry&GMFeA()TRs3vkD- zerx9BMfu%pI^01!J9DLYz{}DxDs>f341J`{O1GJ#vc|mSaO3#>i&p***dBrX>K|{; zWq9MK-_e=K0YjN%SGN)-X)6yxQh)MPM+v}_{!Hn{D^|?i%=M!pkI=gTk}0|_#=U!E z6&SvLUFcihi|M?1_zajB-NF3|+2_NGNl5k1pVAxf-VR*e6o}pGyRvY>Dr*uM2Tjdi zVf*c=46+;)%$*>4t zaOG>9^nGscob@xjmsU)HW!n$?tq})WC5T<~Ek@sUMSVM(f3jM;hcU#B@5}wb&0;lq zi$y+~h_!O>x#M<_Mh7HayvP6EzrcDOImfFr1jd&u2vKfYS~=XIO3*F*Rk zgw{qoH)8gY`SBo-ddkFkUQFd=p~{KmBQ-$BGD^oe4BIkkZ@FaH2Ml#byD*aKw5IP+ zTCoAHrn9dkqIFkDhf{2r!mG`#z8K1hMS%$4rycX7V~+{8dJ|w$4qTz0WGh~YKOHuo zE}2qPeA9i8G*qmROJ{pw1&Vc?RhdeHuwY=FbmgCyk=8P`8wHw&^n5r87AK+N`k6&G z>ftR<|N5do-@Shv@4}eC_rNP+(O>D;HU&QU_Fw5N@yA@SvbuVmY4T0fQ`z%Z*4U`4 z*=r=N*?^f02>ue^QNx_s{s73XzzL_FvMV?uPpn=x2PxAC)LlBrt#wc`m|pbf+}SMn z&f}?o&a&P;BgQF>hCNT4`eV$n#~ocPC-*4Ec>bT7zB(Z4r+N73qeJNi5x#_=q;v>^ zN-23L2dD^0N$1g^QUa1n2?$bmv~-DdOSg149LE9g;`4j?ll$!M?CjLePMAtB$+N3t zJyv%4!D&`S4W`zCmeqta&h@1X_9y@26Y8T;`;+i66xU0>sdt{tdVae9fsvC-9L%zy zkKjim$>>2(=x876S;n2+@v@sPNmVi=jsI4S7A{g`?~-$nQUPryka43`AzHdPYP`#! zqh0UwnfJDFa=uff+7^kV08BSV< z=v}DVl`GOzbSfEay8^s?t^9L~vRMKTxW^4n^=i2tN30xbjDNmpXjIuR!m8Wp`BUfA zZ3y^!J2{(i=n<#CbQ!caC0+?2=FmF#97&At_J4fPe7c<9h>^SrKc=ss@uyDhRHF0h zWoJo->VKPO=;g-8d#{aYI{EWec%D$(-UpGK3%@2o8y*NZ)3rm4oQN)goYbIYl|hIL zeA}wQ=5rs8=e!)W%Pw3Wh5^~em*3oQMZSsLSC`j{U1|++(8L#5aE|y$6NnSMDMr~) z$;zGKKSDGYMiV&l5WsN5pSrwLsWN|br)8ufm>+zaz=L{%wg zKUzv(OB*Pc|H>o-g2SMc#PVuEy(A|f1im06qjv7OX?ij*;+fnl|I;&pIUL%|VGF18 z`k>MQK-D*3_wkFf4>kjunX+df8~7<<@jwO}SczoM3;4WgU%N%wP@Hg2K|OXU@8I6v zrG^TsV&QzED^Jm=-0_z{y_+vT8aH2L@(W_RBYX~R7+QY79eEEweH5r^A~bkIZU9{I zrk(Ev#pOECy!xKBL8t596>RFUazUU^JljIG~ z#T;BScXeWpE`O8N^b!ZDk{fBi1`wAHDZTX?1r4S%RqCoFQ>rZ_ho~MN($}*AXY*zZ4OeWy(&_PetVdEJYkk-5C=J>e8iMb$QvNmK-4grzB8_*)YcH$iwNxKAp?8_zVwp|NyAazL}9xuGF0Ej5Wd%9X~G_dqr0;E_3|7nPPxQQT>=?JT zkc~a$qCPh5U$&(fuRQkLI80QvkEJ2q0`Wsvb#Yf^<6rY_>be}J8@hr@3|mkC+L-|L zb#7m!R~xn`;1XtByLf?7xv8>m=yuu`veT`lj(@ zI=`*lrTv!Soa4-IruNOSi_Jr#!I?FUALU|*OxHCwX0Yj*UhHunzX#N8hMmvw?3wI7c6DSQ*60i`5VOg$8pu_{!FlKop^F zJC0aKN8$HEnPbxI{s8f2D?zduAGVvN$y-gcC4GTUe*V$yY<<6~LKA8v5mGu8k2j?m z{Z-k#dJp;2od>-b68bf!gnUJXu_O~3vpeHn#-;I`^hTG?<>`h&p6`Q00sRUU)69Pz zAcf9G7DXE}eryEu>~SS86f#qFHqp7CP~0EalTR5^n?VnQ_@4M&xy zpfj|!=|xUyqS*s_f8qwZd_idUn;+W=4I1=5AzLEsPVB0T0K}8~3LAD64=&Pka$6}| zD3mk950cGZP@>^T+oCSgwLtK^ycxgy`1}UAnJBP$Ogz&0M%;8DvXVSsfXWF%`J_#j zLI&jpf$ z$ydvAzd!Ct`R!g@*3f zUJi#;b^x=`6ZGzGLF&l$@?UWkV^Urp+%vWxNgW6|zW*p}(SZoXI&ke1eS3ENv-fb? zn$68O`Ref6-8qt}=n(0adtCHnwwFqf)n~Ux=~Ey{6Hu-*uVA_k;$>e~%9<;Ai6%by zxc{iO2}J6!u53r8zA!Iv<}^x5=W%Y%Q1w(b@=7~LtxV*GdAg2Co;5ZObwT%lbO0@- zLvU4H0Z|O^w+0X-Kn+B2jDTZ@4)FtH^Sg6Ma-;=Gl}hmCv=&}1>Y42F^0<6Y;UV&X zs<6zQ|JU`qvX-gmUUMuhl$RdH;OEQc^Ax4c3|IzqHq#KCQ~K+ zkc@uN6Em!%i2I6IaK=dV#M0pw;g09M$hz1pN_;q{bZ^-7>49<_c>I4TDIn6|JenZ8lKCAi0r? z8l)br=+Ew05o4y8UekT^qjupjatDlf_FQCn8=|-x;gl^Q%J&n_kTxpQyH`9i%u(4H3xOw+&nYmEvJFYR7%BqlNfhmK&~l#zvX(tF3eOL9 z+BLG3DGm8)12&=l^2|wM3y~rVcOu|UxrHX_HJ9J){`f*NsjlbIhEw@r zC?2UPq|f%I8}>t+KiUR0pFYSviT=DNKm#=C7C5|jWFD021)N5H3J=dc#giJ55Jf#O zq7Ph;pp{n4vup}1YdW0#6@8E!2vub?ZR`=36L`ihBv zm;VI$WS;WQJBe?yC5v~%b6bQh^h_YPu^;`FPBS%#)R~;%0l;9>!LBHaq-W{X*bLDV zF5y8+I#vP8h;|)`03+AE3(`eWXu90pI&iGQsiW_&rYJl9$^$>1W$8?GsNvTrK_z*MY zrPtG)L`w=$4#OZB&sgf;lEeEeODqG&0?gqp`a+~6iR-U#c2BdS`oISGi;WgokLwzwmgih2z`*j zp44(ZFtzX|F+|Mr=x|;4by@zoK0!jIyjNQ0jeKZRgHe81ikF8OWt+L_an6W<4ylSr zdd_!Q&8eU;b}Mp1$i`jFSvkgd^PGU7w$wJBnk387CZDYrP)+5als zTax#2CR${6nJec#`{XkQ^7);z(zPYu8s`wQn*cIn^e?fy@>4T-G zJOH?iZ*jRs{&m#05`Je)qs`O6*}X7aQkD_K|c;RfzoKRd&> zzH5xL$Ce$=P1Mzs3$;UnNbzUN3JZkTjSA%fo_ zlml;@9mme|h*7#~&Im$CUxeHDs#qnC{sIuNatw6o`;Odi!>6_7)3$2w;3&?$UNDqCLNImsD)IF@wpA_u@)h>g;k%K> za_g<uqlkb9u3|sf5U3OC#SSMh_)-}HKWb|cG&(kH#fRHT_pFc@&%`j^7LJuMwyuAK z!st0)%Jc}dOoQj`_DHwylKr;inYaR5{HXHH*S>Z4_r4{X^3nKjpLj7&e1|qnx&V{e zI`gj@P#i_F__Z&!^$Ri1^X4E2H-j9)DmIx+=Y9V_eD6h!o0V_2p*@x_|I+6DUV-NM zS)s6l9Nz<|L6BE}fH&xcM9Ynn!L24Ue02XfPv>=mr&(hCL{$0lewK&?NlFPHC>EkG zxnO9bb>Q}}K;|ioEl|!G@m#lTRY8dX;?lH9%idiVaWn9Rgt}>@B++JoExDNG4iy&M z(EO_wbpHIxIF|rAZ?i=>K^)6*pB3>*h_?O>@0}lKhj1o???DIdyywQQq7bUwnW1=e z=wG-*@6O9icY7Ft(=ci^1?=>IpRz%I=s=zJfU1Z5f$?@49)UT8>c?Q8i~BkkIS`V$ zix2TcpRVUrt0u(-*`6eZ1?3YrPJRk~kxsVD%YM3Rm`e;!JMW<9aS%a^?3HySzCTp( z`F=?rqDqA=ik*C?;vkL_BC6mP7qnzQYpAIY&gVWbdcA+<7{t+Ql*GTs@ME1bILIKl z`rs9@I9)*HyDH8mSoKu;Xta*5Bkb~#o;rFYbXXQ$!;Ae!+)CL8vpqC0>1z|+-K!Hn zID4hw4Z5k~o0W^!#~Dt#KR!Ht7#$2hnyHw+&yG-7KactmJYOK>eMSr{k6BLgFq8`N4f+8^_<2ar=qGvR(9XMB-Y%4)#X4L~}A24>HjN z08=bj1J_NUuyQPAJJQ}%m8{15^05P(LT{fabDt0JYx!%S4S~(7L_^at4(75SFe7?5LiGI2>@Bn z_`FN8bai;$(Yl}2T>q$%`@zlz8H$o+fCt)`k|1weOxpOqZ7-b_;%@6 zBfe)}QSEUTY_Z7qSNp#7|XH@AQOJ%0Gs=@S7QKBYw9x%YO` zZT7$lEjIY2u4A%y(_|#izRyoJ8)ux?dL%vb4B}WexT_@@r^G=@1afHXqS?vi^Ic3b zxbF~}U6>TvGO}S)PQYWK^wtrRvTIDR8F$mq@VgckEAT{ z7NlMhGh>b6+6#ZIzpsqy{^;-!Gi2wBwVG$cTlvZ0&+6(GQmPMj@)X9iZ{;zE$8|x4 zeJ--vD4nfuE*#1{(Idj46X*A7hPmI?kR1Wn=Pk6bM7wH1-Hc1NMNUcSz#N{xlG{wJ zhpcCJ)&f4wCf7-s=!*QTHtP#;1>Fk+I$Dd%xlU8DIgW!NnG}aH^Q_$MEJ-lSj=A(a%`?s*=@Z5B*SukrZ_4~rXU`zj3epL60Ls&IjXTTG3BBc@g z#%+=SAloUH2+q&WA?X>D8qHo+sqpnmnS(4S<;kU#cc&wrmnNy9r_iEU`gv6mKUrS{ z!X38rI-P2(baTplhyJr8n}5YQik1dooH-v8xCzWIo8Xz>CS0j_D!0gjXkU=j_n9z% zk(ncIF-zXt{0atk|NntGyx(f;=iaelC-*ANPerDdR8Kyg)WsyIuQp0O5e_z|3^sR+ zOU^VL+Bn-0EAYBP{56lCHI~Lo^WxWT*~;`>1jhE^p^%TsFk5Vt2)mlx3u=1<@5kBB zW@tJQxunmv2YKls)Hz(MMJadTlyGQj+MFTu0ors99FGn5D(+B3Zo_Dr`$O@eY4Ud)% zAHhabul}O2@4Q?yrJuwLrN&lCq#1e%A@V3reR6qXwF-b_!8fZu-q$8_1O$~YIK38QC>DNcK!&xx;{dEwDAbHt2ubZVnEq$B3N#YRBF3R5hwB6BO{47$nMwuBQ7l+b( z5XQJ(y^vN7N2Q2COp4-f6S83gCF+XC!_i;}NGgG$uHev)c4?E|$wft#!HIBib3rJ58d=KRB)gnpEW(y%kn3gC z;v^9PQjB|r1{F5)?|H_^_Vuj~)2Mv2emFh$|ns z3}wRk#p93kUc>uOX}9v{=u~zym1oGBt{=7?BS3;iQ76LSMTz$hrU;bFq4Y67k3L@c; z*lY$;As_-NjIkXgSso{Ey^+kREow||bKNKnZyrY;ek^&0cRCe^{N=%BTeN&wV^83@ zv)2D+eRl7BGMspLb8i#NGKi#Qq&KmXA*&Bdap8s>>QFZ@M}cI~4atfAzGL>j7yre2 zRcpn+i@jQvW$|`pOZvfLqG{Uc%4RduRZwlOI(%oQu|I}iB+y0ZA2ZvYZHoZNX#5~6 znOr`k*HfjKOBj(p&)RTg`;@wJ_&b)A(^$Bmq0uW_x2pUvV%f0C@< z1#;Ccp)(#O=*jx+#q&?gel`lPlCb+ZbKBExdmB@RY|`h{mZ#@sMU@b_?O`aBxvOaVL|ey9{MAD*5O7ee=kYR&#u+AX-Bm0c4PV5 zqcZFLxSxo;A+69>9yGTCxBX6ln05CF;dSw@TQlT}dv zkm|Z8Odnd**@_e^rjJmMsip6RVUPcscj+d*NiH4*L6JR^Hba1K$?ZO4Dz3 zPO%m9&r8!bEEhi$>%5z?JhLb9p@EMTkxIJk%yQ}Wbp^y|c$TX$Lh(1pjj7_leSFVafV3or1*^U9=iqdo$d7OC!pFErtV#>;t## zczBI@hSD5U1O1xQf_*RuTu^CCoSD<5N?cH@l>>_rXve8Z8 z+!N^NzRH(VAVjfF#Drr88q$9^Q0sBn5~XBmLI-FqBWmXZtAlziOm{I8NAGeL z=*zYu{qCVV(gIdek+_}=zn-G6SG>6U)SM=H@Z`uI(SSY)VLmXSzr6T+_u`&I4)A*x z{*qB!|Ag&CxIGzW{>$q-$6ox5+A-qqyj1+L!SgSVBh(bWd~Y7J6#8QSy2IJ}G=7g1 z<^*)0!e$#djg9+iD`W)r<#OQZX-{4dO^Dek^kcVs;}>vpsAKDZ9k-DzQ4d?<)i~IH zrC{>kfw2S8y;?1>8z^7Q3=r+4mOZ`#h zx`!}=DpARj!S2l=yL@5EHWzuTye=bMdMAR;cIRXzpvRS;BI+5MB}<=$qA|T9Mi4? zcqlzS%A}R>(d(-NbakofYDrL?{YyNMm_)1I{g0-udnhF9s40v#)K`1el zLTo_<_ye&+x;c|3DKY1WhL3&^+Z@W}>cuD1lneV!qMPrF1n$e1IPwB5k^v$ z4ofh6&PpKh2fJ_O?FAAx@3clP0n55Kx0>G7y9_WfOV%DSAE!}PIxthM-FC+i=#h)V zU$Y)>2YUHE-~VZYY<0u@2qcvk-l7@E=ji<_{!6cQy}8D5D0Aim;zT7FN_^zUyb>B&E9=_F%qlZf(&=-B?x~e&%U(?ARFfJBOd#)cVBJc zhK@vYxJtr3%=*zNmgQve@xA)Ox*9C=Z3mo1s=m8k)J+mc zy@wnLwZ*@M1Rl7xhHz||Zc}>vf8wfW9*2@JUxr+12L()r_m(ADz!Kz33a@4DxL3TH^5j;wX9U zHw_IDj(X|hfNqdX*6f(V;hnkCT%BZ7aC08gkGl}u^7-0dCF8xhEln%`Pf4hQuq$t1 zjX#&fSQ{){)eRCvt;pY%D7@GnXTItWdRXK@fGYQ-HT$7mdbMfE=ktyw>&Aj%7RHp| z*rQzTqUAZt9x!DtIMu~Et}WerFGQ+hPpT84=`U|GQBFLM>pz#a@o{x1`t)Isw8q>X zBH<%EajHWd;9+lcb#<~M)cIixhUaL5ALi9vbtf7ki@OJ*2wlU0Q19$43baA3)cjFX z_LvVF#6R<2;91?pO?+r}rs)v-FyltDt}hZ{q_0jHbUWDvD?{ z)($-ioB69`^+AljFjtw%4RIiaZ~ozvob=?=+UB=6zZmkuCxZ^FCs%!EEXP4+6{(lS z8_snszr)=@6SgdSJA*P$xkizz0>(*pNiPW*(LxvK3ccsv*#p{F_`&@9hfHm2n5FKe zL={kiH1-lz7Z(w0J3E%ecc*)Ak`wu+k?~BDSgxdi;NHZ2+oTg0hgOUldL&OuLj9d* zV$`8|$wa3Q-+R8ifU)!`58Oz@A!bvvH zI4;bSBQrd!BzC(+FyEmdtm|YWEe*b`JqX7<9*Es(=uEm833RDD+P73dTE5A0s72}I zi#MukwML+94U#@K@&DZX)&KTpnIM*?FOC2{;XTw=L@6338TPiyW(>qZvrS1ty3^#b zwH41EhUTAt88PIgyn&+={W><)rRD+&W+l+$A(x!3=oWx1DLK?})H!ee6ZB7gY$`umgQW?;@E_FTwIjYZ6 z7wHP_@80@qNx-5T9(r5;G^m_Ra^hbSCt3HD% zG52*l(7Eq7to-Tl5Ow({evn4vj$X)xbh(C8oK6mNoz~r2vUs?>73K7-Q1sNVr@rwn z4Kz>C1bDgQCZd>ma@)NFK;2l1)jLR=F(fo2b(IFJ1AospmC!GMOE9hJYn1+_9t1GLN3dxeN0GAl4P zZ!HX_`?rsRE#GBL2`1$c`gO{)y3sAilCrKlp`K>xQ+}*1!VF2EIS$@_!Jo$7{N__3 z8h?EoSsCVJ-;NaJ@TDxJI(icQn&!=$+|Bgq8h%Ezr26m?Lt4^idG~YaV7Q1P(ag6M zL(j(@GJfV!V2TYfJwH!w>kp7Zr?E_?9y6EGV7g8VhEWCOc(iAv#pepCOP7NjaCxdvkhXqJxOBl0Vp7yJh{` z4DUb2x$U_Y@2kJ9s%z!xGuE+s`><)W2ziYlw(o467HtR{3CMq+yHQvx4-eCi>p+a-5&2)%80V%-h0uQCK zo#DHmeqnhe7&`(Hx{pZ^YkV|WTkWplCs`OSa@rk{t)9KO++T0q{Bf*cVnV%*JLiF% z=##u5a?{*OX#KcJQP57e2xZ}#&k3-w^VY7%UruyNMBhI0E5o3d%p30BqFMjh#v9e@ z#G2n+9ERZJyY?7;lhSS2LT>!o(0yU3T3?zpf{Ae)-W2^Xp?Xh2tNT<3DNl~Ceq z^3kg*?Ar9TvU9mw;aHz~ZQy$4R79l+tt_5!jd%l(1lVT{Skw1nPJ#1iw+=@yF#Wa^ zS5R7B%1rP#%2`}i@xZLciq=vNJOG=A`A$s^2?$dukv%A@FR zNK3$&242%=z8LxG(y4hu;`zP!aoBOi2P)ea^>3*WI3^=b=gIJ8_PTV6l~T$#Wi`-v zxaG?ScgmGgy2sc?%)~4ZF^0jkcqP48DK&K)+l5 zwuDp?)f2R8_E$ni0x-_zCCBY+dS27P6=r&tzp;6nbG*i8EXGeO*?0Msy;&8^ApkUL z3x+=sWL?$%?`m7zCe=m+25NG}2*v%;M(c+rs4=^Zs*^?mKQ@J-yOrAy^T zUM|q1kZSh6^(0_Tm-ohIgSzBuE-QZht*EW`nJ%rASCR{3x5pM9vUym)$<_pC>BjE! zqFV)5Wq*vdGGSBajE8~fWOTd;dB{1wCsELXOmpOV^|@F@JjoCK!20quT4|hqq2N(T zKD04bXChwT9xD+1A`Uu=UwyP)dU*rnvI9Y?r&G;}J50kNP0%l$}5bqLcMHU`(6CJP-7k#=qG8ca|tV8}CH?WUb zv~6c5kM2gq%%7|JU;JxbCsbF)6{0Bw?yYW&fgmdVb)g-Muq2B!{*nj6$iAllK#8k~|*YJGjmxN2%aYgxqqGu9EQ zf&ZLL72`X*Si(T_9O5T$jR}dK*q4RCxwZs<6{o{?9y;`n}sGkPuCO?>cnj>_$yWOd;7fQ`+<`VKFmfYeoT!7)1E#FruG>58^Fzuj7h z=^~sd;93w}_h8%&xM}Oe7AV530vR`ZS-W^AFt;}a-tBR6yr#N1N6Qewhlh$k(>Kk> z79VVUUI>9N4jioEi86;!2~P3JyM{%gOTqYqMjE>_R z)2~W6oEXb`Ga^K0rjSbB^bzRT`5oT$dg>DeEG&3>l0>BcZ+43yyQ7ANoQ?Q10-_k%xdnS1- zX50lB!>l!zmcOP66rueFanWLM^c(Hsl!I|f%Ge4`w4TG0PYKF@7~&WWV%a^RC!0Fe zL&x{kCYalzjnO%i;g!J)Xk;(JreM$k&f4cLqG@(zXr*y=#q%&v_z8z%ffutsZO**a zGf-VP=ejOH?^R5{2&_Wg%{*<8%g5;AzhKi)6`ByRlW*;`2`Y}jQ);V>^ZqHOY8jBY zp-)@M995uQ5htbYENU?dF9X_;w@FFY7>W2!#;uT#eaSO!iF{X-@SLT|fWdKVjJUo0DkOysY zgN|4faBR)u91<0`GB+vf)BE0B39LW->z2kr=Kp(}tl=X*;MV%vE50~`H2ydbiJhAf z5-g4M_~wv5_Pg_MRUwteK5y^tlD-FBOQc{)q;AxmPvoL_p?DciIYCIS=3&eI!sWIt z=EDa6pTtf6UBDXJyrt3id~lX37{{?ZEAZbp+r>On>{y$V-j1L29A3C08Bf8n^7Rl5 z(@d}jia;I}2%`CEWbH=#V7JfekPchq{`R2Gay+SX^X!OA3ja1Wn5zKxzqxvgU(23b z9(^0!g*t-z>pI~g^n!0c_-v4zMu9zvR}jHffd=g@Up;cQHj5_OP1U6n)O(&x;7Cue^LlV&mIP70c2E=3dKzP7n57; z=8#I-0tw~8hYOo{`_cQ`8T7--WpPtM*<~hb7#C=G8vT6V^h+>@?Y$#XoPmey@I?1m zUButRI6gL8;%12gdm$KGz%1BN@&SaUyT!CQbu;mP_ zyI?{x%YoULRPiysnc_|5$FJNzW0DPDIhH@fE$1k|FaP5eO90pldh8y#K0LoPVD^ST zj;==gnI$$gp`*EmEcaoG9Ij4K&?L(#kH@6oY{m1?y~nf?BA_HLhjb>>mc{D)3(OsF zu8Wsaa!>Ucnqu72R*ZN6rM)ZSA+0m*X<$r!h+^{p4N7QR0h7|0;Zr~A2ML~(V%`ZX zb!EKgtES3+RrpJxxL2->`3UR3e41^if=rxAoCL3=kJ_MW5denY)W7oZ?Mc* zRXKxW0u(4-wI(}awq*@tNOLe{peky4j$G7wcT?JKtnsZgBaBTSOvnkr^HLIL;301H z&&!?@+6_7Rof|JdKn?H|{k9C+oXG#i-voeYs!RSGC)iw3fFyGaBXXV(O|9I#gq7Y; z6b4{z0g^n5vJnAnh-WjRzUCT7Wm{`s+(sM?Ap*2tpTiWv4HkF=7GS?==_P){YvvaI zRI$n1cP-pa0pJvnKM1q9wBqm|Xypp}Z} z$d3V6XzQJdM=hvqzs^-pIjCs;j@?Bosn_)==H*>sPQ&j_vKO3l!9uyDUI5N%7DhEFX zT{HEErs>u)l;g+vkEA0$y^+LOcyvMqWM$RlCQd%6#!e{2Dy*#{KEA+iW2P{1<3b~k z;)_*cuvCs;=Nx_MY4CYGC5m2pTVjjKmMKt}s0i{0lz7_Xgm%j>)k)k+C)9Mnw@5ot zAtkYkJZ&BGnpzCeswEf2_m02S>2YFWcL;F>LJS|6m-hk~#4`4q-rmR@wr?eFzMZ-8 zSXD$34-^HK4CXvTR;Uh@S|@RSe@zqh>C)4am$8Dy!;Z^kviNGu62#CPc3pKtY7g7wpL{G(KdIj{=2J3f`}MI~Obg(fvcslcP9Fmb-Q2BU47{t2Ki?@5MGEQN6|buC3^ zlCi~+8U}QK4cWIx!x~a`JIcNs+q#d#%4KjLC_@N!l&!*j*2~qUv&W^AeN*#qW|5MZ zkI~2X@6a4Q zt&ExinIAJN$4^xDmK%tvKvkBen4x!<0Xwc@4>whgKQ@nl8+i1iCO~}sOEm3^0>@3n zJq&|an<~^z1`u@mXc?4OCW_mH69Vd&`+p2RGeepbSo*s?X$foeBW@4PqH-zT6!0-)2NJDG;Zuo{*$q z5Pj5JIv(|tq^beh=>EK^g7SaEPew&M1sV{mm-qvq3O49uzPMaJKv!Ka1pTRbm(Qfa zr1+*nFhyypApTB);^s5|WAj9JzwOhP8QYEu%DFQsFZ#vs;j57dLkK@;AZMk>@cHUa z7w5nLoV}ovWY*zqc;{K~JDnyvSk@_VdS&W+kE`FvQ+7V~LoMz3au5w+e~f%X9Vm>q zgCh-UISpihGMYIYWL6PWg)DDtW<)}d-ZYPWxxPNmF*;HX5CIVBfF!R1JtHFTx)-o; zj&EaF15?BDuV?TA+%rG}Y_3RM0)wncR?!##q5mtar=cp-TWZG~)b;tOc6m7DmD z#`Qvc?w-%Ac|~6dLqNSPy!Q{JI)zAE(!4bNVbJET8`9gO+s99o+s5R>%plkOVf{Ox zE&ed#=B|?OQ?3qjfx`Ycfc+jkq9XjG1~rHhpDAWm*(ZQQK#w-O_g^{4@B*)pSY zO9(1BBf0z>3H0mNL#7 zXcpZBxqqMLXZ-(88?pKB8#kl^P%&9jz*&=)GRmoN-EC^@B}xtgvS)EV0lQ{g{U6jj z!(Kh7xEUaM6C!_OC0K5djo9bajsd{z)RS1*RCfL=+bPEanue)|Q77VbR{aTL^F@te--w5OZcA6#~qW3bRm)idXv=kXPkv5$r zr8SS%3u%Uc4KUk7-I9AxQ!dP1CdS?I{aFDrKGSJM_bC66%jLD-*V_Y5{QiI+uC9O3 zN}@mEE7u@t&3FuGBzMBqH)WIO2-nOEK=S}-yS#s`G;5+Q)nw&$ndRRvLGX{ zss9>pXVOEqz^N)C88Rqir~~7j&r`kw=PAwDUmOlofHW@wu<}j*RyNO^OdhbG(Xr2| z>JYF4o99O6j>2gFPtWhpd4{YmgvAtN1$hhFM08cmR+Q_Lx7DXx^eohz>9Kaat#j zqLzJ?Zt`{+$5t`_UBZIY7wZ6u7yW6`-!E}-6&1iqbKJ{(v}MU> znZR&ixZ8ImjYTJJ+<`EFuJRBUf1QZtJWF>NS#4?8ow8bp8K|cX834tJON7g{hnSb$ z^5R5lACD9K6I=diz*A>HYjN6YIAY<_zQVduZZr~q0b}NdpoGC}WDlfG<@tim1Mgah z_xeQ~G#?F9xq##_NEEQk{R2pw$MaJU?9pdInQ(fW#1L4xvg(+H+wC=6Ak#sadI|m> z1{3{{GeY>U-JgrwmlzS4a4w}3PcpoDlNAz9^iLW@r2lvoG%pk+e#VGn(VfY zVaNEZA^zil+5KTav*bH%X)U;P>8o2_8ciFFm}sCebIJsUqxn{7S;)YD0K*yjA&*M4 zbu>>DLIwoy#ud^OKm>^4SI^LA8>bb#xDe3W01qO~!z%1hX*5gLzGH9Iyrp8YVv{!7 zJ9qA|FxDpWSbz(n2=oEy&pGhDI{Ib)z(=7F8PE&_!VfIoD?CuPc6;S{dng3_X{c&F JC{{KL{C{3veEk3b diff --git a/datahub-web-react/public/assets/logo.png b/datahub-web-react/public/assets/logo.png new file mode 120000 index 0000000000000..c570fd37bed97 --- /dev/null +++ b/datahub-web-react/public/assets/logo.png @@ -0,0 +1 @@ +logos/datahub-logo.png \ No newline at end of file diff --git a/datahub-web-react/public/assets/logos/datahub-logo.png b/datahub-web-react/public/assets/logos/datahub-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..5e34e6425d23fa1a19ca3c89dae7acdfb2902e86 GIT binary patch literal 53563 zcmYhiWmJ^g`!Iad9R?vapeRTibc2YX0xCnNA~lqB4oIp8RPLAqb+=cyRA21d%p_f0ro9!C!)B;4I)j zR4xw;-5|&yk?@ab)KPK>g18}#d&;`r>1&fcuIx*xifa^^-@In6rY?9jurE_yg}u(p zE8=Bl>O9ZO`%7dXL#(0BeFPbNl6-aJi>km`bKRu?QtRmEbWYD1-N&~RVvY`tC+#0_ zQxRkD2OfIBYpjf+W6KcI0mX^KQ&NBege{M=VS%zIKTZGCSEa<12O=-1{D?nw3vilk z=LsEhJ3q}Dt!o%sYZ?m7!eK#XMJjWl>XRRN?(!d-rdLQc775`#V%B7v zBpnlTx7z#-B#!P$kwd!L5INKzRJ5}CQev)9!O{EB>lK~4A^$;sjm7V^TTu30!UG=x z*3O}f^!DG5jXp1X1+BsY(NAx(l0XtyArk28lf9RC?)QUTsKttMYN439wXE}wg7#>> ztoK^0XBVNM4FVp2c#m%**;Kp-L!g2vL<-&Hxg~RKswg@1o3a^#>=^^DDE4{!`m0jv zwX<2b9fUoH#9Rn(zsqxsu~Z(CeaR-IT&uM2GOc z^UPuI%Tou}>FfXq%T8dn+9%PuLY`2V{>A}+kbOJb6lY+g$BwOQp!*+eEff?*1s)c- z7;vSDo@c^fb%0FAQISG@N#&Mf0@Tk#c$@m3`LE9AlcgFiLufcpoWmw3MTr)YQ35>E zDZ_S4+4y2r8O6T%G0h~VhC{waOi_=?g|RP@+`{vIG1JhdPHzN9@1eTP@+NI zcUJozvgcp2P@t79!P`P&N(2Ps0fHI}nC_>2`H;OW;z2J|sIzi1y4NvAO;@xfrm4!=Ag<9siNY&WHDnRoD{_-mW zLUfUwx}G9rI%t;zT)VSi+eRuoIaEjxnI=JG3_NlLdnVM`W8cFMGD1ctN<~90q0*Fp zpa+t-R~cKQy87~<0#1bOrZi(XQ8O_C?TY~ISB^_hpaqViYH~|f$m$<^zKTOM#Qcbk z2Ew002)rBtB2KSRXe`;6qs}hM!Lh|dYmp8?jg!1YtfYW3&EafR&B0I?o2H@a#qfa4 zFw6Wr6cuFjS6-VGQc@otcdtniudt!}X~Mu$Ps{Tt_Y}uH7PINT9SH@&bpg*EzJ8}J zy#hB_H&6}iZLyOOU31%Qg|IaLgUtY77cjMNF0pBqF?Y%f$3yO^%nzU-1*{W|nNNj^ z-uG~zUrR>3L|7RnAYk@)NOnHA4@h02mvkhCs!yq^k}DsKsa*`xB3Qr&?qf#|8nYtH zAcPb(kN>S2$+1HG1E`b;Jf=kTY5JF9-um-k7KH5(+lp*sff!`eLkpPYT3>^ejObz; z``qZ}ay@j5KXmI^1*|FhV2;96N*36_u^asv~q8z$db&>GLl>wLG28X^}>Ap%D-vDMpm>#%|C z90E(tY>P2j71W9}JRDlcJ2H;m;t=f{YJk!XhnY98V^Be9k>wdqLw3HqLSStb=d)LN z@Bmu*r#gLwdYKPrW5g@$=$xa^oovrA+sglbF32Oh2xZ?JaGuLjp=2pEs+ciUO+)wh zy18(u;QV*e2F$Ll5@lol=>y=VVuw+^$i;dCRpV%w(P=~MZ>p24ps_3%I0 z8<(;O=Ck_XOs{ofhBSxii_LfOnuwj#@f zIX*<^8)3C?PnRAA6Jp`F3xs z<~OmBT0%nC0S#Yuss!g(_4-K4!#z0^y}2|ER4;}GTu|}8b<~5c_#;l#Z}-m+zT9n& zXI$&bQ@`k6@q(^bSr%P-w4JcE>ZZ&J-65P{qim6J#n1TI_ONwHH-fi65H#b4A>~2Y z@Ote36st=X{9rLdKuWyJI+)W$hSeNvNr%>1*e0`tNC^1YD(#Kq>?mhDTdx6S&3TN# zE=fOVMYVfIk+RYgs8t2jN|q&c6td`Xdw&nF;!xZ>_Tw29-$~`B%|0i9rt2H3$K}ED?SYj_o1$bLXMF+z)DMxqYcEyk?761G7(y0WJQ3bcR zp6yTcLEJ!fvR$!DpcQaRo}g8>-+*!s-z8-aivdBGrI)%ea;u$ftRnn4Ixqzun(@c; z9wb0Gh<$_Yu6VrTd(c<(c!$O9T;b%zo)XX^+|E#$71AI?DOkI5YlMQ4tyb`L2?#FMhvr_$wDUby7sfc5B zIy`>(QZyCtX`Joa#HA7B3nXc88fBkFHEcZUOIXYLZ+ZRzJCficO#qts7U^Z_avni5 zE)aQ9$&g?Rt&xnacXnHXx_^h6NG>V}32b`(l~p&aJIE>vS~HL$_|9w|!{L#l=WFR*7_VVX;;@rbs`4u8kmh z_g)iz&#I@7pqF%xhHZ6od@PAs+8fcC617~}cSnU#ynDw*b2*ro)|MkI3FD{VteAj9 z?WeIjndfg{JaQ4?D9qZ*2SH|9TB@_4#dq48G$65O?^uYYX>4lBAZfmGb(gW#3}X{c zi22xJ?DUZGiR7XY-)fD;1*OUtbn9{y8sa}y9pm~r2fUUy0Kt~{Q!Q;bfSPBatHG#B4*^O-OjcSn2{VuuP5{Bqpdq|Xh;nt$pA#!n938fvcZc3s z;o2j>L@2p~_`9szYXcH;UzthI&nKivl6tpKg!MJ!#cB%Nw=@X%%{!2`J=tjs-wf-?VXM4oU2il#)OB+ ztajTre)ljKYeB=&;_gcvax^^kfWF~3jZvtGC*G-!Z&Z%Vtae+BcG(D;6-1X&NMGll z(nw$hwpUt=7bd_Mkfs+PuK_a@c$%jN5Ai7=g0ihf*Gjd^b=Dj_UjI9ZmLBOIy9y6! zA^+q&)HNA$2i5&FPNqI(f=k}X_d=#U1byu{DV)!D7Of@K=~X`aXIc@7B^15_#}Zm* z5#}OA{fY1Y@T4Ms$k)ir*x3_hT7x?pXaOI-_fu7x66(8_RgS`Nxr_3Adw84U z+-{O$e~%baYGVWHsbi`@5i#me`pf8DcJZJo*Yxj`+K-60PvdLrQymTFMB*}yAjmfE zeybRdL2_S<9dqz{Vj~y!D|Q@rpBX7U73amIpuwspC;fPNtxK1GIRI>V%ZRA+$uth{k^iFFrIoa{$qx9 zqD4EL3;hg~Nt~BX6*3%m4fQznihkN$AksBQXS2O;xhanS7?<+r+YaOJAu4c+`Bb3+ zE9h*NLXg7<9#_!9`d+gYb>*8zh5%iQ94MvwNkHPGF#bdBDKwn#wgrOhBXQOMxE)UX zn)J74FL_e(FF=s*mk-PuJO)|4EgE`sp1NJR$`AisMD{$;UinaO9iVft5u2$3%z{!2a`z(%By+V4Y{Wu#Z^WrAy1fGJWnRnc|i(|mINn`~#X zS!wY`VZ`0$f%X*$+S1!Qao4ZprL223Tl53Fwks{houwDqC|y05i^}$*H~kPZ2-rl()z_ZacG=@3o@C8 z?Px$y_T>S68uCSq+JZVKqW_|XaG5DAP{>^fo}(I%(0`y!13@|kzk-2H{rh)qJO&vEsV<|d+6Za35XOK@=elF7p^Ogv1WnNE&DnE@ zAgJibrkF~D{;ie0r?U6h=;raW<@G@`n()8dAf*2+L+v-@Wbpv{kqIZc4V|ZZmrmLZ znV!@<*S7Cjnt8bwfqPl3{@Ixeep|4V4n<}bv$8c>@S=l<2nxCnmk}~Qc{T~`WHC77 zkFTx_>d&ZozPU0#>1rL}kS{m{ zUkong%PZvqO=x=^`zEbRrNl56butR2(i~L8sQ5MmsG zBV*!tv+G8%j3cFcgxI@xUxHuJF4Dgq?GIFa$hvJiqUO?!QiEe)Iv?ap-*vc+zeWvm zmY}KB30OnNV#so6L@P zu{KYPo@FxJs8;Vh5zW60VZSU((dAjozUetX5nVi!pJ#hBN!RY?Xxf<-_|bTOy!4R^ zIg6Tk9@G84zJ~kh3$1l@2r1g!EtfJ!DI^T=&Feh91V0#lR?c61v%9==-M!rcfVE9t zmTC~oATuiODm47+k@uf{QI1q9JLQ?`!P44!hSQ`bi8$+fY~kMDlg`x1r9~=2hv7+; z>Sx9xuTP3!cBkmk$F<>HDjE)5E_Z7(oMQjF6GQtSX?~PgpX0|Tn#}jz@BVc!DO=Ot zJs0NAMU_=`B-5yR5K{I%=wDnn(0OP6>AN@g-&#Fb>ho@!vX#6+_mR;2jkB)n^ox{Lq!)KQKByiE(K;R z**tMAuW_eO8KQ(POR%_`_6gLKWTmyMopajEj$~P%m)O5A=^VRiZZ?wItLB`Bj{Qw) z+>pyx%yN3Gx2Z2DOO0p5noHX$#!M;UohiJ{N|@> zPK37?zM=B~YnhA4i8UeF61}s>_~_lU2e&DwzmWtHrA7}`WzK)vXj{`(UAD_jIMQ)g z)f-cxit?7_4g3?FP=3BPJH^+5@|=M|HaVucz~$mm6t@7H$!`}&OFOp3|bD`!+mSf+M?xAT0n=i7gw#w)1)y_f-IQWogJU=wOQ ze=(FxriA|)(r;fP22DF%5pXGQ-MFW*s&o?^yOfY%S6~3CM~AkesiE22MLa;BRr8JK&fUkfY37kzC{v<8io3Vn$)|>7 zc++M*)WcYc+~sKgt8&UK56;#dK+28ZEK9{$vP>;uTt32ssalK#j$fqLJNuX(uY_8t zly994qIK&RXT1^HUWMk)KWDd+r+1-5@o-9DKb_wE*vsij8^ov<0a% zT0pP^FTD#vL`v+B+U<;U1w_Ag?R>jazEa9dd*@M>3R^MxrC?zh(u~2vUVrx>C6Y4qFGK80Z+YFD(;W*5MKWu{XmvX=@*1mQi#~)RLZm*-LRjAi zVh*(^mO@5Ki$gjIHADwl;jvZH#PD&4rtd>FVvl5*5RnIStA^qR0jPZuyr^*qW6^bc zLVvD69AV3oPX-C_?gfW`s2e~shCjJi0pm-wh_RRT5vl)ol+2!{Ihe5s9K8YpyJk|D zS=LK8i%>J2>YX;!ZoJsquGK@2J8cHn$F#i~{S3$b)mf&JInPIdB3B$#Zs`>h888M{ zTGI*tbqDPQjebT1 zNslyKF#0;Wg_wFpX=!Xsc;nU1Y^Sm?Ed2-2zse2^l2VBx<(&jy6d3{()M?Re8Ou_R z=11Sq63jEMuL@gHv1DwDT1^ZLMy1R~_O8zEw`(MixkK3m$kWb_Y}@*l{))@Jq5qp) zi3+o>SUtRVCq0DF(-DcQkkV?OJ#tSEH|xybXRx<3CelM}rX9JXhxSe^MkF3S-iYFg zV05w;t~k9;noAej62j0z7%TK@8gb;$ZCS|Bqsr}3>XqvgWJUSx&VREWH8F^k7!pvp zOi1FS3f3OgY?)P&gG7(Ag&Y`a-^m$iSw904$D9`Y924^Ni5fgNsOIV-M5{B2Jxrt~|QS_$oYD zShi8KwDxQ8)3<0w&~v4|e|uaobnDx}ee9dF2IX^$)m)c}V2^&H0c5(v=dI82iS!M} zFOS%CgWA7ErAF3XgW)8gJOXBP*p6v9~+4%!{wB*?-dKb((v3 z0O`+Hs4H@Km`rmHM=<`oo)zE$^M&SZzH9YWC=vHR9D4QR7G@E54@bs15R=s2ZPMqN z6ju3YBZ@wut3^moI7Y7nj_9X1%t*KsBe$ogq!{TT^Mr_831#IN9X_VYLbvo7S6Lj> zYf9QmJ%D8E_#pdGYrnH#PBG0PCKFwiMD)h&CyFKd6RYp_q^zGZ6T0B;()BuNApB)_ zhp+PP=u+~X0wJF2&W4KXsEI1MVZOrXb6%M@juuZ!eh$A^Mg3QHONlcYXYb zh$QYda|s)E6}+Ya}&2BPOQ2r5dy5;7c?1dQ>+^yYHd?<{J*{ z`=l{b1%1ST>bVu%dY@i{9`RcFA1ntLcJ5kvct{89yjTe-50-cILNaOvm$zY z%ZLYK*tWmc&@;oUx{<5Dz3V^{Za+Ov*KqN7-}4LVyN4i#>D1KBta^OAzO5un*(i}0 z@UDxuZ98G*b5!_T-IiH<29}#kmy)^R-KN!gG>9J9epPVA=05&A5$v*B)vj{(l0KpQ~F#d0Hcx{cTvIe*2d za3CG_PBS)D+s*rCkBotz1WD%YYvN^0@P`hP~o%s#RK5=FPIGG`tG zcUJQl`Z-v67fO&gg?WpkXlDgKjd)F(eHpaq`g+PBxZq$!9 zY_qGnW3^33X~Zq_9ypP5ZR+%p6=~$|SgJtA=;^EbpyFWfB%e4>Sx55XgBu5TC>!F} z8GVZT>Ej63Rd+J69`1}@p2--jn3jad@%G?WAzio`*@DgXZF8aHb71P!8srn&?X%n` zdn3$e{Wig(*t{Cft6>(yMLbk3^q)e0JcBXbfSV6k%K61q*Q<>mzMiX|F2iL zJZ9sVkv3mR5q*cvOUb8UK;sinf7XEx(f@V+B(6 zB*Xj>;pIP1+X8tHO?`lcAH5v127a$W1r&Oc0T?h)h3tM@X?Al%3&7+WLVt*-2b(I-u^y ziqTDHq9!I^J!@p(XgYJ-(t*ecH3VnYS6ndB1%L5%m(|=-mwW+R8Xkj}?TS=9{yLc6 z*v{A!AzT6K_&1Ou#*020xO-SpZ5(i5AP3TtSbB&fQ{}eWe-6=2VuOp3=-D45fAE>iE{v~4Y+s?u=Z$P7&=9dv|19c%C0`w0PGGNXnZf$`j z?pL4+w0pXqpM2-=Ak^j&e_H-_HR`;cs%h-rJ_{)DC1rqqrPYw`r(!)(}3LcIe~iP5;p>WV5YNkpe-{p?WR>v8FFe3D*SLb;y&%R;>PJV9lI%+{90Z zRa{*{|Kic-8C|@z)b@Q>F&EZ*>USpg7&m;Q^KcpjYS)Sn$`9|I*!?I&QhqtMo)ni$ zcMyfmG(=;$8{-=KWM${4c4u&>crI^=ng*n|5JbWnyDY`q#I5jbU#-2T;CuYmjP_`< zYx?QoGP;Fp<0 z$SUxA<5!LpT=1@aEoo7_PN=51P>9w7DqD$9u37X{a*$)7%YUmwRw}2TFKq3Qj*J~=Un-7@1%NmB&x=x=qUB&sk_^1ZtN9r&VopuM0%&UE;$`0=t4m;HD&!`ocFZ*L9S{(5mFClsbhnz*Yp8EX2JOH5&svX`M^an5|_H<`ANKM|Kp{DSFv#;G&Eo9t*U zWpL~Jwj43!u#tS^N!vQcld|Tmfx`{0K+exfXV#-#e^2Ol9+VG(+#fi(!QWTt2A(6jNjm{aNiZOSN z4J$Wp-GJ|J4QKXMnY}Zl(*9ge#`g|nRB08(PKPyC{ zC?}&mPzvF1v_qJ&jYb`Ca)4Iq_C+Q>y+!_@2|G1Rp~% z!47x-s`sal>H?2h#)HObeom6C?^_#ODn$1Y9CC|t5y6a?V;885DMcVc!Teuru zvkk=!k=Vk7W2~$b&CDqqvQ0*Nsarxt*A3eG zV_@ka@nAJX4t+d}0-RAEdgXmt#d|}V^}S)_+>&wiAgDFTwSo2#JWGo-|12(lwDe?b znuewsVvDVmyU~InSqfx~F0(u^;vlkkZL6p?b*aI`*C`a%tIQq^Wbb~6OWfT1@g6|8 z24&XAo|CRRCdJ8k{PXGVf`+xL6p$cg3rC>i|M-A!M|n(P2n4b{ZeNZ)A=z#8eO$=` zUB0~DUf8Zm3FDZSwryua%OZDjwhjP|xm}oYkR@l|SPa-7bEv}H88(ATQ*9NIHLj0B zx7ws_+t@%lU%JD=VcoW2{Cze7oFyN6WpH{wT12z8q;Gwm5?V025=>}+f|D@lTs?C8T; zjjTG=Zfn0q-5_k8tzu(bm_;&BEUYhi)h4XWx>d2q3I|erhnbjmgsWmoHu$co?r2(LmJ_pYbR#4lYtsooi)y-)2hZLe3~8o?`&AjG;*cv z8iF5*Z!22q_LHLqToSs)nV*AEUJp2)_eiOf71$;ZdJU3e*gEa#&RoRA(hM(6G@d`)Y=u01@KG+sl(| z5Cze!(T+yW!EE5>Ars$XSL&B0h|@vP-@O)UzMkmM=KUB)2Bk~ZB!}OKNzP39L6%>< zEMBpTE9W_cQ8s-IrWOm5HpJ}@O55eRcO%>y!MY&`P@_yc1A}RF|8iW=SI-5soX81W zgGf3kn~YWgzQ2!%AvE)PyN23%)I(>{ATXfOYU8JCq-QrJ3&|$CsDvBV)3|~!FE+NT z%iW@ZIuNoa+>W=3V;`et)o!CzaB8N2h!8%!{q^-|sQDM|Pv?d0p_MZ*?x0Bj16Ffr zLGGZ}7m=;SH@Xq6Kn41&7l4OUT)P}R?18sQSRWWNiY8s6Yz}GxkzP`T!JneyS7gz` zwmXOy5WhZO_}^+vx?#?9{=(y@liDvxEtKF1dnRm?)1<6d%^584nxeSK>f}Zk1@vPWCc!FOc5^^=% zFkAQp2~-J8mVHzQ-)$E*;Z7GmF{HWaV-%Fbh^%b2LvMIL0|NNt=qgJ0GE-RIVInv` zj}F0q!_EV*gi~Yx2H+^;hN%kx; zSHR5sR7w}tXnjW6qRR?IH+QjMAGP23{|kgyoZfSC5e4Z1685J=saqDogn-OEBC3SK z70e|Y?sDdX+DN`G3N%y#Qi+J5icBAi8qkq4$J`jt9bzsbcl}Po+b`RSx_{lR(TTbV z3ZftFE%+#6W79FpR#HSulamMJ+;A0d|JzP)KkqUZgiQxAR3!r=h$+4(B#peiyGjDL zaE}6rO9I3-$g|BZA&DSUanK2x>XS3B@lUkq1Vz@Jth@Ep9goRaK|c1%z?lWsbpYl} zV_>KY2Dixc`mFlKomCww=0Sndx@#{)v9wGUN=q+iF)Q&NT_2JfX#i%KhVl$T6`wB_ z`FI-VKO?nJ%I^s?*OyDEl>Xf+nuQ~R8bFPvsW9(1@)q^&h;idS7;!7l6$Xjhy(X1G z$Y?Dw;Q8?diAm~E_93v5RKYr#i2>?V-EPyg330}(gJ(qK?t_F@H$^LI%+`k#3ZH*< zaAqT|x}9+NH-^oWs7M2#Pp~%MVxqhcJa#_*KRggoK4ADN50rb=x$N^rp>M>-oUAG# zXQQ1IGT6MWLWw@$e)36%B2!uy|eg{OM88uV{LN4#2O=odccL-uxw=2D@Ht+@f~p z?);{g*G4F#0@(VzlHq*VyG`%3w_&bT=9$TR`6OnoZ!U0b8a_!?3dSJkKMo>LkIgM5 z{qXBxInOdzFz7m9qYaqlTpwJ~o*DV`iWzzHMa0L`(~Vw-q^^9P&+Gx|uV$l+zjLqE z8;TvYneZqmY(QM9^b$TJRSB}J~w#PuYQ8dz^}nC~bBWv^}VvQ&T@*nTa1x)`8La#oc=|07WP z{Md6jt?ZMsf%I+*T5&P${DIjsvplBf=HD1ej`_x7c0F#;N|K&fxO%xybi~bGBo@m~ zEZbV$ACukCxmWhEPZgkK22iST|1vu$X>Cf`Dl)YxmUb&pT|R};Q5a?>lm zQnOd!ztBx3KDCoqXCh&i8?-g@HL@}UvVs6vU_LZyxfEyf<+jE!#!S0O!nU}27E>=m zYWZ_1q_}o-)`ay79Xs}NaV$bQb%N8h@bs$-6%eujNRcbV-$Y&UWS~1*&B##C?XK!G zpPMvT-fej|QC-U|WL4hQ_(0{BC2Kzu=)&&&Tb2RngFHW59JFltDlnOGn(0=v$yelb z%A2M}rJr3d)kqmmP(CW8YzxxDS_xX6A|5CNHAvLR4jyRD0RBJo(UKSmdkI>xT2X$Xey;~I;;8bu7kO)Ylyt|d?Xrqr zP>v6$UL|scdid#_-EPl)f|K(xWj~#Q0t|*t*K68e@Lykh&DUsb_Yt_sC%|>Duspw>={QNP*^|R#w>no= zwefK3b)~(6LEMR+EdTl1>C}Wm&CDau%G2k2C#HAkU;hFlzp{{{zdc$>cE|7Pr{<>9 zCpbsnzx@St{nyC$_uCrXa|AJBL(h-AmlpUxqh57;@K7R_vhiM$fnF!P(9l(?yY~g> zHPsX9a?>g~>3Tt?6qN#|>e-m^sbqng89yO>u=QzQ+6pSmI?`aZN3T#aiFe2|8=irP-4R&7+?Hx*80 z(6j2r3^tW`D;P*A!i3t>)94mM!laP9+6Q&lzcrrRIG&4cCh#0j{mpi`*^Twl=h zeNmvoNd5e$JQ+2l{O}BJ7(r)2( z5^mFc)8tB+m}%|DPmXbF8wj1VpC=-X+A`%fKp)nnl25a*iDso390<>^bqA0pHa-!qo)tP4!d!9hoF@rO#wT{$2G5y zl&*mHJ$8sRaw1j&uN&;J`8fhcS0)(|z<}(vC3&lm1F)xD$|URjzxw!s5++G8e#Q1v zX8n+`A|vTk{Z!phd0;0`8fYCpjVR&8-e{Hz?!q{fhe5iLm>R8G){RCK$K)|0mAe5d zp1o!_(*2U*K;i=@CEM9)dZFSnu%eCtcqkFm+k+v!roZgU`*jx|F|k&Hxu@=P87);5 zD9fHwHf2Zl2H2bZ?_$jhtIQY0^@hEgWh!vfC7Z~FQYSToMs>0W4IL5_{9mSJRSSr7 zC4lB>rkudpW?9^u1JCW3?(1#W8cf-jUY8Uw(x}=4l^0BCX}gs?(aO?CEyeWO>j}UO zt*V>4w+fmqUVz(!ssC>8{jAaaqFGRj{-Hbo^lUMN=^N$v*8)Tv=QYt=O^Epn{}^i8 zVK8|R3w6=Ob#ETZiNP)Ys6gZ&>_H2OHdD5c9f%^LDB!WqT}tCq9mI5VHlE|Ku3q>y zSjbqwHE+4Og6aJCIZ<$G#w)?L%54RDIdL+9nHosZS%=0P!fym3Z?nLO#)Q`6}q4V z_yDA!NObtE#~NS(-~P#My>4RNLidoN9PPdJNqOXkaALB;{kWtq5J=-R%NCSJa)l^& zWiR}&a24g;JM*5qGt|cjB$`-8!&~|t*vQNM1ax~85NX@lPIt!VvvG ze-EbwR{bsRq>)=DEB@(vu^?lurfl&4*WZv*jwDjc&Yvc?xzF2y6xolsn6Te^DE>y6 zg_}v&)p)p&7xUzEUYRYU-skwW9c~9S zvt9KA)*qb>Pj*^5m-5oX&cXm3{tSSel?=W1!z&ZgCJbTb{j#eJGJVE?S9Ank&5^4W z)=D@=ns^UgO|&8#yDCZ5DiWtF$vbQT!c%Dje;&2ao(O5(^WW4j%V~IHjnT)r+&lUP z7m?PNv@zHU%%pc?v3RmBA=g(HZx4sc`YIvvg0Od z-TAmqlHXjMmG)W9Cd&y3k#;v@T4$)5gFNg4gl{G>rrVGyfSxNNMnH3w5Qgh*RWUc3N_MS}rUwueTjrRdV9slTcHbPHw|86>PW)#nhkw@-uEh7PU8Xt( zU5QSLUd6;SHP0qa@DVLYe=91*9uSLJd@I`DRv$JG^XJ9~&mQltXUtdb9_R68lYuO} z);Th`G%s*6jN4)N$fAAJL9EF*yB+vtAIjM$%jT9t`sZn|^RfAjuZndqpUMLC1dL#j zKK%IL7URb4pI<=wy2fhwInQ;7^BN|wJaVh-0bIZ3B(soQ_IE)`#XS`U(CIsFQry8J1wRXjQG)_hXm?_?(@`5o_8Y%e zH#twdXj^>gxz5>GyOGbdC2dz0U56wq+wrm*$W$j`@6NL;JjA2`XL6RM;GAmu>c0AB0&etK*;&w%NA~-KAGt$yeVVS?`40^ zz<512VtGOV-HPfHJbG8l863cT%G3CC#fX&_aEol{)mQ7oRHSTA_>^ELxd;vw;~CFz z*(~Nh&vcq6&@4r45XA!GZD=Owve`@Kt~klgw}B$lY#EVr2x5Aq&{S>KOVD8m>c-HbRzes+1!0u`-q2P zBVPi@f?AOp=j;EMY8rwlb>j3d>nPtWOg=w6i?(;d#*}`J%=^Dx0HpqFCgV1nJXFZk z9g_cnhXvTzH_m}URG5&G)A{~@R@u--?&#@#pXtOX<}<{`i3qOGb#wgFkww~+*KE&9 zIz!TVquubVpLr?`xha@ZDZ(Xagxo#3Mk+fe<1X+Pffh1&ti_X%JpspsUXiz zH|7iUuCWY_c&JRpjjqrZE9MmVpV)Bg+zeEEG?tPZoof z8Fcl@m}QKqzPN>%0116tmeM`>_VnD}VuM8|{QTHeZ8l{ zmA~jTr~m13?}?MnFuWpu4WPPiKN>+_SWhkFUUty|;p+9dE3p#T1Q_jq+Y(F$9ywb` z@f>-$w=}(Z1J#Bp)5Z-+?tpFeK$4IhO#E*$zs$`CO5M7~03AcB1KX%^B~3C`+6i~O zb^F(&0+J-)iWS}81r=B^S$dVdE3NPl_7XQ3$-Y_yEMKC=xe#PlHO{Y>WR?OL%I(FN zF7+!{>b?$R<|SK&om&A1Qao7M&rU4H`eRv&!-~bID2e{kR4tRqU8s_v#wFIM0a!Sr z)BF3o*E)%-GnIq-jUSo%$SzPi5mL7{+#$k2EN& zPN($i)Q`A0ZJ#>AEpndm5Y%BnV*!yfL~0?XGdkmEOo3{j5X~+>^Y>f&1-@~-M)yUXO~*4L!l2rERyO55+AUe%4a+B!A* zeM;=R0G#zy#iQs!v9%tD@Ac-{%12=ve#<>$7eeLzHVfS)pJtQT7ZcNDP_}553y5uw z)_`qMa$6a#KtqMC36#|aix_3#*+TFB6K|F`#ivAXm5<0+{){Of`E3SN7XG~=1HO%4 zJy)t*-m}=>hG}g?>z2FT1X#RpIjoTbk}-3)ZGiG{-J(N2&%G9jzc0{siT=peqwit~ zR#pdO`ddWY}iXxQ( zFyaS)A^pi+nzj=2AFlxk%!KOq3Ezs@}5*f}9; z+)7fkm2AFY(qnX8v4JuC$_I07WAao{D)%Ewa$#W8`sD(pzLk=scWAR70u|u?GN*H% zuR6I0R@zHxn3c4W!!5}dSWI*f|L#;ePv2(j~BlGWB5DeqkO~bDK#?vDpBy)Vd!v+@Y!+#u3#;< zPn9x84)ZX>x;SQPg5(2Mv}vLy4j$Id& zM?y(jrRsb-hM~4&RbcrFg>Zo`pm|U$YC&u09VukLdsbc7ZU;BuC<&92qv!cdf_A&C zx`3h-1b*#rW16mqqtOMBsrv|ax@K%uA2$3yuHHJX={EWs|B$1HK}%V16M|9#O4g$|yo%25N z*@=p?Zoh(2EzVBLJD`?5e9l%ey)d3jc@m5f)s26QeUAMu6`0-0(vBx=J5F5c&R<^N zh}S3QNW4rn3#G=MQ+9}R(4|eyjqkhYV1A&dV|4yZ0?LiEDlH>_I**t_`oLG>8ZjrB zhqiX7E#tC}-^&dJYsxhG>b*`!)EIYl+HD3*O{J7~m=?*ip6`<~61~Bllg_qt3)hPY zoXc3-krpvXx7*mHy8TQi6Ad_&UJ#wamMyg*(dV!~6%O?cPMxW>ziDtnJ%e{HWb%Haq>EYw-9bY%jn|`jY~NqVury zGP});q&$S>(KZHK0#GokE&Tsie2ecV0bdkyNr5%+Gi$27c=_FZ_DKLf*{`RJOICT>H5gAOV|L z%2YG1_Pgbdey|VfpX7xB7N}1@wv=>e4R}kLV^`WcE|d*7bJBk*?f>5h9ozWo$|B+L zq|NekjCZ)T&)YoP;bU-EySgy)2XdY%U8_8WbSn#m2gUFNsml`7!j|*s3l+%;GdZl8 zj5IdcaQB~Y<+2YR7$zovH`fQ;vRx^Q5Z}4Q(XP^q-|o$Q4!QRwpv$pH3m4WDc6IcynW@W9Hfo3qNih?0OP2!Mo@%h0XO!6`%(c zP;z|R&(pQ)1*#$vbNxsiz1Xu^#;&wZ+WgYiY4*mQK>~!73fdU;YVHM(db;&;;&bBL z`w*Wmp0VZhX_R;44=_rIC?to!DE@KlltpFnnXqN*ySz~J7xqvO8sERyFG6{>|4aNUoKoU=l z1oe-GHo9Az%QzCs@SNpHDA$!3Y>nxYLILzLFcuqK$5V-tN&vP#o|4DTZQ6>mTpnT( zdr4oGB=mMfvaXc|3e)fgc=-bZ!s^g7%Ey#YcR) zcCwF|ACXk>!`RB|{IXwC-hMqCZVlTy1GCw}Y^Py1d0mwHJ`>|+TM;m4JC zOHM!Mt<|;i$N$#>s&0q_G*hPIANJ+dv=VYI!8B}u=dN#jS(Xzm64cMqy!1`u)G^Sf z4oF+9nS}5{5K!sKHR83x*FsT_o?6+zpN@_R7iT2An$Uzf9^|U@|iQBxqjj2A1&&tN{>(_NVh4ECNt3h-CTve*SMm#BFSyE#KyRE`A zc6*m&I1G9=gE7t1h=v}DW((|^dA!mVYX9}Q=BmZl5I@v-PwB{}M2OidNI72*69G0p zG&$Gex=qdXvo?-vpkBUG;h!`!Owrs;cU<)ftax$yqbbm5aH`+aLr42c|Nm&0Z=)1g8rhVu_x)wPKLZVqa@2ViopuIr+Egd_gSvqvJshb7)1W6t1 z{ll9pJdJH%UzPCDTqT4}J|ylt*@&+8{LKUW+Bf%nVj}eY){N{#5R#_Ozy)9N)?@aB zio8tqI6QSquPs4W%lI3}dsU+HHaWHbV_aF}46x9}djCrZ``pLf8+*GjNTHSb9(v!0 z6)%`5fP}&l%506*O#YarpPs{TjH`Ww5;g5tK!;+U{r5qE36`l{q``eVd*|%3bSL}R z#Mg!MIWfi$M+&?s0--_I`}TF5gS{x#E+}20vgu_sM_4ya%nH9Gv_hEgjCI$}`+@oD zKSm0w`uIcR{+wF%QnJ$%a8~Dw?={C*1A05H1ZO?}ROFL! z8TPDmZr7&6^-eLR}gv@Zw6Yb^6(;VwI z|2#7Q+a)bo!q=+R7}lh8OS;Eja$5K$wimNs@tl&mBKMD+O$B^$&oLm2Au&Bb}3Kb!Z+>P=Z%+~mxr{~f1v#;xn5`d|7x z#{z@Itde9{xuMJA=tBQB>2!q;h5C_J;nunJ{vh=fZ*EczvIxlJht%8B7V|TzVaxV~ z1_tEDJ7Kfytug;KnuA7H8I6kevvObMh@bKANZWq>Uf^f5glyY59LIq8Bkb`lEwdYa zhOlfQx1mURX&?@1K>_vKCbIf>7#(=4P42IOUVRM!?^-1Ybnti! zn@r#A^?~tkWD7V(@0m~AmcPre1#y|NL z!s`p#Wx%)RJc)^kSVCGlslGzoiSIz+^ zrjTn?^7^GCJ=E%c+o!GDpDWpXEGUOhou_-`P_l!X_ik8jC&o&8~ zKLp=Ml?@P!AT0&S~qL2X|uWxy)M~Y*nHEmm)eWcb7bejg$&#t*2 zgX#R)KfR3QVKyT-vNp)_nzNRIEXYMluTL4MKgD0P{5z#)M+|d(2zvqcV`YHiq~8-` zSqGTpO*`3p1!pQ89K&XJzyM&<>RwjP7)fsH&*w)RqT{43lx4B^ao93jDIMp`zOUs2 z!jJMS(oX}SD%38JH2X@6(&|`{GY0rkpV9S36&qt(c^l2!zJ6Om7U}`M^t3bSPwt@c z&w^mfwoaE}^h=d3UpZ+h$c(tG7A$zj`W-`?A8JY47GYVk@#tD;9i$~ow=m6s_g(eY zU-~*%c{ksyX)yq?7_%qM&F56ufN}c}g)J+vEpd^}P0_TTlX&FDXJG`VrYWht;ReJN zqDBQgG3u+gz1U{jwuON3ZuPIaTGzC&_zPf8nuM%@L>Gl7S$m6_l2&?pI&xOtQET-uU{OON`6sO|@@(@F}2Ms}$;17L<75J{rX#X)?jIpi-65 z+M|3KcL%c`+VC}EzjkZgKN*}kmtE*1TghL6g8lN$JzH?VvW77KZGB5?w=QS(JvN(K zv6tAjJRi_h%PtH(_Lr~f6A69I(#^uGocVgaUgc?t>)0zBW+Z88Wx*$wyVJgaZm!B9 z2HJ_ti~{mo2;mN0;cYZ$V`uBqMLB;Ihkzta>zWn5T_O4vJXP3QAcf4<2tVs+oEk+==wPM0ESY8_+rJM{3umPl*4DlI4*wbnt~ zj2zxTjKfl~ghZZix~(4X@BdvTncDus?AyodZd>Owj=}=0wJu>~^R$S=o-<}govXDF zYgq|FvKC5J79$`$x-wo72qGf#1m&|F{we6d$IH)DN^diVh%&bR=O|y>y%G&OcICf~ zmC3g}-<0GR9%+ei&z;i(nZLIA%kNSzbqC_OuJmUhqk+vST@zy{{8AFf9F{cL%p~S2 zKdL0EDJKSp1#$JOu_r4=q?4Qn92+uDBFe|yETuI*vCoX9ZNKMIP{#?}+Z>*|7$p+= z37L^)5;kcQ%hF(6L*#aouLhZax_5T+B(9okphPTeId$}gC{IssvQ(5%=#7W#wkyfn zprUqB+BGlD6)e7!tHC&qrePh9FNzjiz-iH#`|$@ML6VFe`)u zJUslKZ3YQRxBMbM`cdDaQoGRGm}c*nz{29~IecEyjB!6GxF3VgyIPbcW7D~3NITbr zrF0vEn`h`NGvcP=_!LRO!7Na=jyLRd1 zmnQd^I^jpu2mv-V zaDFPEZL_uq9~@;}8pN2p>oU_vSnRbqSgg^XFFjGF$0c9V_Y=3QLk>a}*zT;oa|^98 z%=o7}LiE`nT8hU=P0y6F5Am%2a}8JeXs`rOEps}>TwA9(ysNKFY_$B5!50VJb*&C? zG-O5!88A+klfFPt;QMBzbkiwkTF#%e#Xg38voUNFLMvQvv+B&c8YYM5g9+4hJgu~s z@9-o1`S&;Jx&a&AH%|MHM^zNZ=birTJuphp=dAK{*^Yus~oeupCeLiB#Hut_3WEo4{ z?GY<;T+k|a>sly@{}`~JS=Qt(aTwFKQvUi>ZMHnda-Q2!@+E;xAJE{D6qF5aBen3) z<}?!a6~Q`>n}ockvmqUuPq1HB1*pwI9SuVedKWM-Z4y|k1VSgwZlZfVn3_3afQ!p=jchK zA3XvCn zLP@3Y5Mq;9@#07oS1==ny{gqL{QBGLS?|J@H4^%dOh)9H(GL9KtKOpR5rzOtlUvKU z(fD1dpsi2FP|p`sonyBV-@{nQIL?kgxLn?FovNt}b(k}sk}>00CO!N2rq-SN$)kNM z4p2`J7wB>kyeYKV3Pto+<=S&$i-rZi$J+U{BNC4{%}fff8p|6 zT=-=k7tc6u{Hj!|*_pP`9q^ocZRsF%_s!&+=RdL_cQ@C274SrPgJO|zwj;7w+pmft zR++h63YGj95J4y=Ym%{`7{ibPiN#7S%s6EZ%TrBZ%SHNd9_Si^Y%c$7e4JD*%+7To z_`nho=wsle`TdsblXl~Q_<3|Ak)5-^lpq>-UHX-MO%;eY(IDO4WIh7KsfX|dXf2p0 zzV?HNy-M{nuWYsx!~9cFBb;zr7la{gz~tS=3W2$Y9qNvfXs5MhR6Zw?(Q;M5;-NAQt`@fMyjCfU)@I!m!UiTGZidqGjB%$A8KVxWp1p*j7qiu7KjmY* z=)^7(vxqx~`qsNU3(@-w0I}+-=v)bMpa~(XO@sNSH^c$4slG?zEp>tuu(Jz^5DOh} z4Aho|7Z?ZrVo3DqX8(H5)ciF27>R5|>i@aSN>p13XJUUZYD1Rr zWUM7N-{amAlTVy74qR_V+SOhIH#nEGW+q}v`Udxp?+U5rI!Krfs&NAaWwkTI96Mtg zhEF8)hUU4sG4A2ZeB$@@v%WboSq2Prt!wo-@-LMQIqo@Q67s>V`gQGi>c?Y8%2?(IzrAPLoZ-g zD{`+7=1u2LKLXEP)QvB>$wFf`;SjsRd;keMa4|S8xo0czo(i2CIf`R;l4|Di1q_b9 zIiQFxcfT5-)Yy-%Zf4>JCssUhV^_a9lQRp_o=YX_R@~3Z1-G`F!iIQy!Pa*6;v7aH-DeypNvCCDDO z4$p9*NtZx-TA)YO(AzsmL1;fWL;*1Hx+Vx7*S!eI<7Y3i^g!9-Ac{~)+LNqY1W+n! z)Pi%4;)ZxyESF-;fEH}5jw=vEN0X4)B2eQ)dV9~;_}n-oJu zrV#5uokM>P^oxFaISr3jh}JN%JV{6(#60ViX!Xap+`Cd1>h_9)6%ilF>TFGtOESGUmp-WDv`daNX_s5rGsWBzS9u-kT5*@V~JDw`3}ejwUBc zvVpteGXF>3or7PoQ);lmnM&axL_qBss|(s|IdXBF4Ww* z=}4t#Xv>^^k<`rnC=2la<+N=qT~{<5{dHs|jJGbsjxlKNer!>EJ&tiOC(HdNlRYGV zF6ooybge}|2N=wn)xHb{>PsObHj6c(m{?FekoJ=ptF2^Lj#pY*vLva%qyN&_{C%?U zJk@AG4$i&WmU|IvQDlnA<0Z&LsaLMo1M7uS9P$)34ccdQOSue*Kxs8vwN$I=Xpw>U z3xubUlWzC1GPYJVv3oT)`b@j%)S!bRbxd)kjz)WXH~=6zjy*42OI&Ve&WS7zci!pWlYPX z&&SItXTC5yV+ON^9wazwZabR$4E7<&MXlH5f_UDV5wVv_NLbeCPaM@2aN>`gG9C{! z2p|uBVKy;#=o)MeM9(!`#1LBLh%_!a8m_Ah!9hmdt*CW>L2-QTff~HufW?qv!&5cYx&E9$!4{b1F5Y_4vP2Fw%PfDUJ zIK=AIc7;Uv?MyvRU_q=uyB(rEyatgtwDb{DJ_HhE=?-1+vFG9*X6R(b;T4VVAO+Z> zRrR$dJp-m>#->H+lkGzE2Wng+%KIn1-S;sG@7^$Tp~!8Quy^JDW=2*UaHnvkT-)~# zqQ+8bXv?gsG*`38_XxBz58}Sn^Oq*IDJ_af)n0n9kak8M=A+uxuA(8d>QROjx}W!$ z5uA`7!tQeS9xJl-XSvob*7OExfK<2$(L^I{Hp)p?Q=jHB(pHXRz}Q}Fwa(8(ipqx+Ma%P5SB=5ZbXf-v^Y`> zC-?Kimk3nr&hjIZd9@J)KOfW>p{urlg!(vEsc@23@%Dz+?VqO(}k0 z_m6?d!8sJ-YwJ(Oa1Y3N*&5TPJ#O6}E23W?-@^}H1Cy4~f02-@e0hWWOa;OfSBhNt z8lxl}E7_L!rZx?phY3HsrdSA}bJ!4B$5UraD1msf;4{0zmSyPF?t6b!)AoJW6)x0e zg_?8$FrVFBQt>W1r0Qa2`6}TC=R=0pg|Me2I6#W+qt~R8oL}OIwI7b%#)CHV14@`* zT)aA~2I|6FNtt$U${HX%N53v@Fmw-FhDd>mv=d{=At!Niv{}w!isLb^zpM-;toF#98eC zw)La0U7gfh@bnw!aN$b#ioPTf1I}7pKu#J$KI286Dio5n3aWjC>5b2PjVR_}l|F7z z(~z`(h~}Z$_$>KJeX`zEo9Vm25N*ciQ;@>Ew^_MEKoTpu$6Qv1=TsS$MEN6OY-T{u zGA2Lj<$0;FTH>~~^LPLKD}Ek|hq6lA73sD8tYB#EaCJ~B^ue7Mfmf#|JROX6Vgmp5 z720e!26BZz9#6ec$=5m}EBW9-+g-@cOcVAHLy#=x8;3c{mg(P*%1X*^_BtiDxo5zC zuPR$zWQiZ4eD1G&eGBkzvorrlF2a>BUEkYXk%I?eb!D_!6PGtf=5JcSdnK6ze8$~)Lt2d^b9%nd2gNXU4$vBYioVR411 zF1cw9)taEHE=a{3;{Em)4aV89=+T^0-uL+*YyMs3zum3a*3~}3%UwWWlb?&e2YaPz z+Cd@{g5+2fi7Gs_H?H){Z!au1#B1o%@m+I6K>l!HS?XbUaAj_IoxD4#5$B)}Nj(Un zpDD*7zrm)+(};RwBOxpVc`J#(Kl>mdTmy>BIsPiLl9{RK{ZR3LcPBA_&GN6dD{w&a zL;&NT1hnnK0od^@&T3vxhzm(GXfpBgx@BA`z1gnbrd|OB*K$`HcOscRbPCDk_B(<2 zoask_@V+z!YkIb-hRpA5J6Zw7+YfnMMv&>Htzj@WH6lhM($^a^?hAr0#Xs znY1v2GtUwSB7<9F`4~JlH9vGEPl+i+FMCgT9gyx;E>>EY35?mp1M-%pWoSfoCg-hg z4tOTg+rq6?!Eg#PnLpviE{xzy@8oxzn3{)#EtmCQMA#nFXrA8Oh5V1%fw$cnQ-@lB zG_vsVLC@H0vyIuu=yggyJk^nYFbd-#OFBVF{WZpdD85NSS)VO1y&(+;{wT-_U`Yw! zv)#(uuA@vnjzTZp%!|blJte9xea=iU_(oH1A4kobRJ0x2nFsno(n*4odcaLHX1dQ4 z!Jx}_oEssU3qXu{!U%@w%g>&z_?zXL^e2#nrRSRVA^cBYqX|S;Q_<{cM|b1k?RToy zUePT06C9^1fF7?O`+)08Ddeq2A!ceD!#;MjUpe|@(o1@7yRCt;<6d}YY9y^JjJgt_ z^pdq&=VnO|H#a9Qylm9 zM;heo8vo7<>o;%VErmi2$a%64y`_8YF4uuD{B45z)x*tMdpRgdG@`hGHC|z*Dif~u z=GX}y~{_U?eS61?Tnho7n%V9Yv{w8pLG|#5nGJZJt|s^3-5jV3(w^z+6l=sQ#ZFHf~u4 zcL^dPDn$E`ftQkAkaA@{Rw?h-fB=9_E+n9}JH4w?xUn_fdoaPum#?Q|r9KL_H zgagE0T6Q+uz22cJDvKAlE~|$G2gu|`a(*#YAeb`;#Ix)yiI$vWDYPJI_G$VN zSn(*#(SH{nE%!5rC&b&T%Z=V1)^ zY5k(rbvLd-E^9F)#P@f_o%80rHH|@?Y}jmo4&|zbE>M{)IP8*8paC33GgQJclh~p9 zf7iU8Gqm6@ca~kB>oVTj>a;*~rMsP-n!{M3K|VO=8?RDN9UP3ksUma;Nt2(?Red)z zLgy!?ZFka_v80YI){ETotR8$E4EsVj*rzVm;

?*5y(jBpY@{v`pxiG=JUUwS>Js z?$i$u&=&WL{UTKA)RUa{A`ThGfk4kjdF+*~NWtVBlqz)7o>5D?Qd`yFYTcF|X75vcrcy5O zTaXFSCu^8bV8gMY1nI$K%l79VQ-U$u2X+2#JKE5rum`W8L4rpvJ^P z&~DixXN+WlQGYI$sl3>hNH&&9#WAxWb`%Y4z)bgt*Xb_JlX9B{ZreL>yp4e8xg{Of zUR7cJZ`k;ik6hVUw_Wg_RUH#kp;OfDJae`RVn1MuqpZ%nxV4JvY--~@&RzYTp!RA& z^Mn(_I5$6j$1mxm*nUL0Q}yg%#oe~T!|!lI8<~^#sWj+l%Xa3d2er-*YoI)A9s(U6 z-^oiTE`Gs{D7SsDDbIY;{mpEVm8?l6zBI$xpIsL@-yI;yf%h&5UDzewx53j z!s{rcQTT=aoCG(7x}k*Pq#7 z&aCv|VS@zvCQ4R41u{yJ7){2sA5Ui0x^cWLRR*G=r;cN**~YX__3>59ZIl0yL|6Uo zJTu`)%XKogN)Dzvg|Q+>u2O7=?dx4ZFa=lNzN;f|vMclxjq5KiHX-=qgn%#-^Sbg5 zR^hpdmy2;F8ROo2Z{NSAhtU_-%+p|~{qp*N@T4tVqK$VCWdHngPWIOse!=dvKmlv_ zt4q?G=5>;xYgEm{7Jnf;BUkoe79_zOdtHqqaRr0Yk)^NZy`r<3=d9-+o9CY;#<`;6 zc7#+rZtV$y6feXskM;$`!bDo&u+Vkot*|?4ZJbMy)~%fP0grO5oLZutM-6jVBWW3o z`NwP2Gp3%<7g{hvZE1lkgrWRt_Ms?4i>B&q3=dr;-nKR!@d$Ma+#&En8EEh41$GLb-O~> zD)ja`z){@DLk%10U~C@RB2t$+M~8n(cPA-<{G~;o)ZOWynIib(DEPF~8RJe@LTNr# z6c6G*Iz#wev{YJS8|RoCjZvl3E66i_;xDmV=5+EU(nNL5fw&w*(`XaQ@d%ye26)?y zxN*eX1)xmcnYB94%om8E!O zTc6B6RO?0c)71GBc$k)>Pm10b3S&F2Zk^B|Ux&|Z-q^^%+Rk*VVyK^EK#AQ#jV;i- zu%F-kWXZ)5*p{#Ru2k_>UV0ryb#!Z#p5ey!1EPOLFBGf|1a?@w8^t-P}{-kl3 zKc-px<2n!p&O`qLx_3=d@U=dF-2+yVDNzQd5f(epjSG|t$>yJ}(~A%vh$#j@hvBcC z49F{Cfc|)0>WpP#f}A$k3#Er;>0KC#kha|Mp;B*V{Qg(`&3@AE;CiWA4Cr-~4C1SH zea*s-B(J-VD`^C+Jo}#YHEUA>gd)B0o0&msVZXr7U?{WB%~<|BE*>bF!WiAht-RGp z31#!)o2D#j`kAt20AQ)Y^mRQa&moYy$(0*{8}<#h$Yg>{L~y_9L|ENnSu8z|dVcO1 zh{q5lyFyNRB7y@R6fMA>LoSg!36)+Mt7?_~Vaqu*4wc8jSby&n4ky#^2RW&YXyGGCF~Fi2-g=ci08|OYLgo&t4eNy9^3n(N^yUb)5Mf z?@mN(#5c&vMeCw^22SUUx4kiqW=X%7K~T~jwo5<_Z74BtScV5^24^+bHskZWPl_o` z+p3Lr&JZ6Lig)E``Uh-$d@r=J-yk;|c-dvR-JdLtVm zM*+;raHBRQSLEyR`S1TNASK38osEc~U#pIyjBQufDgx19tfSB2H0!j~P|jrT zaP>3DgU;W*n3G=akX5vyTtB&yF%>FO!nKg<8fndpKLhtEu@q&EyCJMD*UyKf?SMhW zOrIQ?gy2nT5#4E`aQ0c0#KVG>ccuKZp4^Kej{N%c zYc_AM7Kr=Kd<^sP0$&<&t<5wcllBPovgsZ6Y!*dA`%JA7{)d0oa_XnJvrCKBf9jKE zoam1BgkS0gbOm#!?qstcu7^UpFT-B-Jw2+jK5dmw+l-NRAeq0Hwz_lV?63km050mn z;CqrkY+TkbP@a3X1E9yC5no#iQp7~-XxcZu3rL!x(K+7FxqADRp^|hCl*$)8d9V@Oo-2i~IOMSvb!B z*DJloS;Cv25?_B+$6+)B0GX`7#h|PA`51&2U18a)^GJm?8aqZ$fu&po;oE3aJHzR= zK9Zjf)Li3PURiYYD%)u`X3>{)$2rWLFFv9!R1;fxV$GZZ#OL8%Q~LZ0-a=dESahXZ z#g5}RFAk%(tWB6d_FZsus)-6THS{co2IyZ+X`PUqyj|EXM z3$v!1PI~5D|563aIX+%dJp%hQr^xFUV|R40b0SQ4JYda=%_#7h)s_F-AN*(^tF}0O zU8?-9216F9i3Tfay5wk-4`k)x7zcCI+g4kUm))jcp0;~3+J6EYAsPCbE&!=NzZOpZ zp&U&bnn?ok;;4d~sXzPT_ro=dU&myz3v<{CkJ1sJs8%s5ZS~SV#a}j2);5nz(*YFv zmDj8OW3eXh1n69a3&7-xXJ4Sp++`i%U_8fo!-6Nxed=dDo^atgzzDXhUAQe%zeTC7 zb^K3FD|FIWt=_n8+9UYd4EuV5rLMfcCzxx4{2ke5v=>=Rk*ejZY(36ntAeCq;a(+( zIt3XA1d2`ui_Y;6Rz6De~_j3nC`>0GOvIw5uj4RdFJk!nG!-q`9m9H(G zC1NK8Lh-fqY5L&h7A7{3Y<-tvB+Ni(+5w4Ts@9toqT?YEq~)X0uzv@uiEEN>`6u}8 z)`fNxtq$BdD&1qAY zj3@ma(|%3;$5CA6p=*hIzLl+NA>T%QgJv#CUXv8M$`4vlXy_?I(#N>>L4*Wdae8YN zSIl!*zK0nOiPqjM{h=I)WcD%`qkd{=&#P{Cq&7wSr4Tm3#0S!|>Wt!*1 zF`rr(FjDFK!*fq&a#?K{EeqD`A;JYyydHZM%=|Z|t`FxnEhX%~s%a|oNFvks$}8in z>?QlFS1x8`5v)1g#yQtDSUmXC5tBEHW5||w%hZAI7U#7AhcpR*Gu}#q_ zq4I5%bkC*_J2UqxII==<>incd?kvo%4gO9rv{9Ll6Cp%{I5oCh=CjqpAd&fP;j>uZ@y%+JX6SlJ$Y^) zi0v0CxiJ;}5HqmDS%&rYYSSH2e1c^~MWXa8GKSG%y&1!sk74lNS#mqYe%(@r}tG=CbMdxc!p{+Msp}xs+g`fq&D2gJL z8NAD(=&;_M2#1BvYA~^#QuC9NGk2y~AoK`q+~tEv*uW=dc!qbtX%l7EvE%cjX`{}7 zGiE69R49#onpU#tyEpet$ZH?m368PlLoP9HnanI9Vz8Y#F2Xs@@}R;3p^kL}2c67Y z0ejIPGgTIwyBPy^ELJ$n;&x5HZfUsh&4;`igX6T7*NI`3+0QoZS8zf($qx0G6TyI%W;h$=O_!580AJe;$)_s!NV6-cWRVK`i zHF+>7-2LMmGF;m*6aD>xT`&9JmKl78T#gL=W@^3=93z7ViuP0 zbkXS_c2e#1^j8@)Jldp&&a8P_&5ArHO*2fkT!x^qp^#%rxa_xp~} z*ImC4!?00ucnyijd}*ZpJ-mIYtNw?!uQe7|Zx}zcJM|(Q9KflbVEfnD@b!o##)jQe zeEEbaANlh;)9T_T5m4bpr~zex8q07GecG6K@>3s)u->46Ir|63cW zklw!5yDw}ZedoXp>e4}?p>o@8aWF9PlPcJKIn13jCcOzxf|<83#i71d&g30gptttA z$Z`_|6+Ggsl9H<3cu`Rvo~rT;d@rRJHjDCCE2l1x5?EN%m)7f}nVll`&|CE~2lZh? zZ7H_PeNkCe$fVq``GJ%{oxZ~vsH_fGitSOT1&wpgw#6Yjp=z{tQSmD!5>O1SlbfcTInRvKI~K;~o(<jM;@e#rwb0>E?P5lrQWb3A!8M8C4{;c$r=#Zzbqv;W+i25n z@}tpa!E#SPi~6#4oQJ-}W(3=~T*@E(^=Zx}Sb&>1#g=Y>?_45@Wg?6yS5`U6NKzZq z;{qj5#*nQ*2X=$&vfI~^yH96Y4APEb*Xn~KtRqQ2ppw^g=gHxxYPgeN({Gm5(oLeu=w z+(Grhke(!|4PtP3##^9g1m13w`%L^TZ{G7Gh!k@6guY2O(3~a6<~D9qQ{!LjbMM4+ z&KiI2Shz^cK1b=JZ7%SJ&oZKe7w5)q(+3KETkZiZjdhCgI!k)%VpI5TSDZtU`kjoU z(dUCYUCr;`!OnvA&FZ>oB^d0rkTc;Zq{QfP05i38M(rddoEZc)My)Xt6JDl9bMOX=;)OA&y(QpA4W_e1&cGE{V+xTkc0kf-sZNhRJhrXE*SlJF}L9T7Y9+j zViS0Kj2c$DT%$bI^aqok;&OcbKxX}paJL1xQ=pa(IE)GTZZrAtTQ*-^d@b25u#if; z8BUFVn+%~!Xcptd^0B2eo2IzJ>6v$Y^kDu+Xs6z%N25lZu(Kje!rx?W?D(hotMJ^7 z&F+J#Ue57=;eBaeb{HGpN_s7 z$>XZH-s!wWOKH=Tbz_d`IOg+j_S~{DDySv>@qOpv=Bum8W37#FT)(y!XU6aJ&do%t z&2!+llEW#TZQ6{PoTcHJJ-Kd%eY7uwjm7P9aPcMMSyeT;TcziK&$-76DxX3vQK(NI z05!x+sI?0doCP~CP;8ArNZuqX;J4(HM>s26b1v$fZ{E#h{ZYQ4viYfV_vw;n-d1Z< zLXk+J_O?zr|Nry|UNgI2Cls7aY1a!xa8 zXmHA~_LpqgVquzlwHx_d_-S0#2xz_s6jy=;2kn^JBY}~%fsrwzM0WXO>TQpnwLgt| zPUna>x*Mf$zm?ORG>E_25A18PNymcH79`sNpW)dX|iIn|GKC#N|gs z6&p|v``ou{tMANM>dTvi=%Y-ov2|ByoG6QJE1yD5zJsY+wuhl`Y>MX~v7(Z;+@9Zr ztBcZ%sXR-m$qNR`_sNFRAhh^;zwTq35H1L}GXS051)5qb>Gye5j=V!%f_Fx0BOst{ zj~RAWQ!%DpIyNkuciDA{lqu=K)E%|43wMdhvj=)!)A`dBLRn9a!sJ%8hju7^OeCKV znK3xa^k(2`=MoxKdkVX^n(V>H>DkzMg~@`zep9D2l&SsVcp%&9O}4p23{{PHKdY0s z_PQA5EbDnTVb2T0a`@m}_LYvs;C%5aZ^ZcivCVUadFUc7_OXt+cgYo%3#<-JlR}+0 zxo@pG$J4sFVBdFDfF!oUrY zdsw*IuttLXWk_KjSW|r+FN>W|STQ>zWEg^o=egzF$lTl+7y;FthkLGXCG`%(Nn=d5 z1|C3#NSMUFA$kAV6eoeN9V|I9$na4sQfX|ImDSKU{Y^F|5$esYwZ;wFKo-M#bXU$E z)$ZVgw0YZusRt(J_8RT!kMNdBJrH$GnyJqV4BzJ{jF0#u2M&Ui6cC?=yx)@exdRqz z<&)z(A^r2mXu)i?jd_>34Ya>@6Cd;|9K)Zo3_Lg0Y8;5uz4s8d9RkAQb^P-WcOqCF zek^U7ZRL-rJDC+i%U6GQy%ybAA{Sn0O?L5BqYYbE{B)7_mjfn>ZOcbJu#aTTJ0ZVc z?}~EsYXOW{5dXPT}M7US)DuJI>ycZd!}lEng216vFhrU)Y%uj zrw{noC~>p{nP?XsM}I3jGWv;(X$C(g2Qa@*v1;Jh{3V_95Y)1ISPmo}_tk0Qd2?*J6c=LJ+U_Nu>&JL#X!cm&F)hZfk46B(Q>WT2$@ zbYSmxsNR#{!x!61FdF$@cUxF2CF$-%!6{C_q^yh=WL2g?)o<3fYdX;M42uerAY9M6 z+FYLqtK@ai3tFv6KFqAyUwbi2Kx^kV>yE)mxPwx;@3o!b{PF9m_5r@SJf*HcE? zGZb20wB)MuWALp!ReC#gv8Z6#wkHmh8;^Y;O4AzjyQg0jAM-C8Oj%{#T3-yl8FH{) zjA0u3wSQ>DrT%mp3PrZs^p`v_em9x$++sjs3ur{9igUuC5V@4N@U1J{ED-Q;PxI zSMS2|+w&Xr*QN3kE4+k)BPM%c*Y3W$OUB5FeQvj}ml+Ruo%Ry)C~uS}6mA>tk+4@% z8i7!sf3gS`XV`=b_eTgJL5~TR@ zaD+o<3y}ml+g+FHbwznBNnD{&AoQBD<_~u6h05Rn!=8`5EVup3C5~Rv6FbC{fobX& zYU>xVxf%cu3{Q0Amt%bSUKP}H%3YYSRW*IZ-~Sc}JUOy_*Iz48H?fP|xGYeQ zS{tZ#YA)L3pX7}q`)3Bn?98yq|FW&GZ=Mc|H7f~`1+60T0$;v{x{*oH$(!qk{g3#) z-or;SQb41B?Ughkyx!jsWuW93Q>>-)1uQTv_NCVtYS)6FHKj)B5l9ua#67nZ<;wCb zO~Wz6Wn|$GXtWz_*yJB{rNl&aLY&B_us8|nD;j3l8^7Al;RF6i@`i6Q7VP)}{T2H{ zx1FQ%=I70$AL`el_n%cL;4xufzWXq{Pu2-}X*SCf%FFK`6-cCZ5zgUv4GZ0d<%+^` z_3O&=B%z|?usrthdgOvkB#PyH6Ry&GMFeA()TRs3vkD- zerx9BMfu%pI^01!J9DLYz{}DxDs>f341J`{O1GJ#vc|mSaO3#>i&p***dBrX>K|{; zWq9MK-_e=K0YjN%SGN)-X)6yxQh)MPM+v}_{!Hn{D^|?i%=M!pkI=gTk}0|_#=U!E z6&SvLUFcihi|M?1_zajB-NF3|+2_NGNl5k1pVAxf-VR*e6o}pGyRvY>Dr*uM2Tjdi zVf*c=46+;)%$*>4t zaOG>9^nGscob@xjmsU)HW!n$?tq})WC5T<~Ek@sUMSVM(f3jM;hcU#B@5}wb&0;lq zi$y+~h_!O>x#M<_Mh7HayvP6EzrcDOImfFr1jd&u2vKfYS~=XIO3*F*Rk zgw{qoH)8gY`SBo-ddkFkUQFd=p~{KmBQ-$BGD^oe4BIkkZ@FaH2Ml#byD*aKw5IP+ zTCoAHrn9dkqIFkDhf{2r!mG`#z8K1hMS%$4rycX7V~+{8dJ|w$4qTz0WGh~YKOHuo zE}2qPeA9i8G*qmROJ{pw1&Vc?RhdeHuwY=FbmgCyk=8P`8wHw&^n5r87AK+N`k6&G z>ftR<|N5do-@Shv@4}eC_rNP+(O>D;HU&QU_Fw5N@yA@SvbuVmY4T0fQ`z%Z*4U`4 z*=r=N*?^f02>ue^QNx_s{s73XzzL_FvMV?uPpn=x2PxAC)LlBrt#wc`m|pbf+}SMn z&f}?o&a&P;BgQF>hCNT4`eV$n#~ocPC-*4Ec>bT7zB(Z4r+N73qeJNi5x#_=q;v>^ zN-23L2dD^0N$1g^QUa1n2?$bmv~-DdOSg149LE9g;`4j?ll$!M?CjLePMAtB$+N3t zJyv%4!D&`S4W`zCmeqta&h@1X_9y@26Y8T;`;+i66xU0>sdt{tdVae9fsvC-9L%zy zkKjim$>>2(=x876S;n2+@v@sPNmVi=jsI4S7A{g`?~-$nQUPryka43`AzHdPYP`#! zqh0UwnfJDFa=uff+7^kV08BSV< z=v}DVl`GOzbSfEay8^s?t^9L~vRMKTxW^4n^=i2tN30xbjDNmpXjIuR!m8Wp`BUfA zZ3y^!J2{(i=n<#CbQ!caC0+?2=FmF#97&At_J4fPe7c<9h>^SrKc=ss@uyDhRHF0h zWoJo->VKPO=;g-8d#{aYI{EWec%D$(-UpGK3%@2o8y*NZ)3rm4oQN)goYbIYl|hIL zeA}wQ=5rs8=e!)W%Pw3Wh5^~em*3oQMZSsLSC`j{U1|++(8L#5aE|y$6NnSMDMr~) z$;zGKKSDGYMiV&l5WsN5pSrwLsWN|br)8ufm>+zaz=L{%wg zKUzv(OB*Pc|H>o-g2SMc#PVuEy(A|f1im06qjv7OX?ij*;+fnl|I;&pIUL%|VGF18 z`k>MQK-D*3_wkFf4>kjunX+df8~7<<@jwO}SczoM3;4WgU%N%wP@Hg2K|OXU@8I6v zrG^TsV&QzED^Jm=-0_z{y_+vT8aH2L@(W_RBYX~R7+QY79eEEweH5r^A~bkIZU9{I zrk(Ev#pOECy!xKBL8t596>RFUazUU^JljIG~ z#T;BScXeWpE`O8N^b!ZDk{fBi1`wAHDZTX?1r4S%RqCoFQ>rZ_ho~MN($}*AXY*zZ4OeWy(&_PetVdEJYkk-5C=J>e8iMb$QvNmK-4grzB8_*)YcH$iwNxKAp?8_zVwp|NyAazL}9xuGF0Ej5Wd%9X~G_dqr0;E_3|7nPPxQQT>=?JT zkc~a$qCPh5U$&(fuRQkLI80QvkEJ2q0`Wsvb#Yf^<6rY_>be}J8@hr@3|mkC+L-|L zb#7m!R~xn`;1XtByLf?7xv8>m=yuu`veT`lj(@ zI=`*lrTv!Soa4-IruNOSi_Jr#!I?FUALU|*OxHCwX0Yj*UhHunzX#N8hMmvw?3wI7c6DSQ*60i`5VOg$8pu_{!FlKop^F zJC0aKN8$HEnPbxI{s8f2D?zduAGVvN$y-gcC4GTUe*V$yY<<6~LKA8v5mGu8k2j?m z{Z-k#dJp;2od>-b68bf!gnUJXu_O~3vpeHn#-;I`^hTG?<>`h&p6`Q00sRUU)69Pz zAcf9G7DXE}eryEu>~SS86f#qFHqp7CP~0EalTR5^n?VnQ_@4M&xy zpfj|!=|xUyqS*s_f8qwZd_idUn;+W=4I1=5AzLEsPVB0T0K}8~3LAD64=&Pka$6}| zD3mk950cGZP@>^T+oCSgwLtK^ycxgy`1}UAnJBP$Ogz&0M%;8DvXVSsfXWF%`J_#j zLI&jpf$ z$ydvAzd!Ct`R!g@*3f zUJi#;b^x=`6ZGzGLF&l$@?UWkV^Urp+%vWxNgW6|zW*p}(SZoXI&ke1eS3ENv-fb? zn$68O`Ref6-8qt}=n(0adtCHnwwFqf)n~Ux=~Ey{6Hu-*uVA_k;$>e~%9<;Ai6%by zxc{iO2}J6!u53r8zA!Iv<}^x5=W%Y%Q1w(b@=7~LtxV*GdAg2Co;5ZObwT%lbO0@- zLvU4H0Z|O^w+0X-Kn+B2jDTZ@4)FtH^Sg6Ma-;=Gl}hmCv=&}1>Y42F^0<6Y;UV&X zs<6zQ|JU`qvX-gmUUMuhl$RdH;OEQc^Ax4c3|IzqHq#KCQ~K+ zkc@uN6Em!%i2I6IaK=dV#M0pw;g09M$hz1pN_;q{bZ^-7>49<_c>I4TDIn6|JenZ8lKCAi0r? z8l)br=+Ew05o4y8UekT^qjupjatDlf_FQCn8=|-x;gl^Q%J&n_kTxpQyH`9i%u(4H3xOw+&nYmEvJFYR7%BqlNfhmK&~l#zvX(tF3eOL9 z+BLG3DGm8)12&=l^2|wM3y~rVcOu|UxrHX_HJ9J){`f*NsjlbIhEw@r zC?2UPq|f%I8}>t+KiUR0pFYSviT=DNKm#=C7C5|jWFD021)N5H3J=dc#giJ55Jf#O zq7Ph;pp{n4vup}1YdW0#6@8E!2vub?ZR`=36L`ihBv zm;VI$WS;WQJBe?yC5v~%b6bQh^h_YPu^;`FPBS%#)R~;%0l;9>!LBHaq-W{X*bLDV zF5y8+I#vP8h;|)`03+AE3(`eWXu90pI&iGQsiW_&rYJl9$^$>1W$8?GsNvTrK_z*MY zrPtG)L`w=$4#OZB&sgf;lEeEeODqG&0?gqp`a+~6iR-U#c2BdS`oISGi;WgokLwzwmgih2z`*j zp44(ZFtzX|F+|Mr=x|;4by@zoK0!jIyjNQ0jeKZRgHe81ikF8OWt+L_an6W<4ylSr zdd_!Q&8eU;b}Mp1$i`jFSvkgd^PGU7w$wJBnk387CZDYrP)+5als zTax#2CR${6nJec#`{XkQ^7);z(zPYu8s`wQn*cIn^e?fy@>4T-G zJOH?iZ*jRs{&m#05`Je)qs`O6*}X7aQkD_K|c;RfzoKRd&> zzH5xL$Ce$=P1Mzs3$;UnNbzUN3JZkTjSA%fo_ zlml;@9mme|h*7#~&Im$CUxeHDs#qnC{sIuNatw6o`;Odi!>6_7)3$2w;3&?$UNDqCLNImsD)IF@wpA_u@)h>g;k%K> za_g<uqlkb9u3|sf5U3OC#SSMh_)-}HKWb|cG&(kH#fRHT_pFc@&%`j^7LJuMwyuAK z!st0)%Jc}dOoQj`_DHwylKr;inYaR5{HXHH*S>Z4_r4{X^3nKjpLj7&e1|qnx&V{e zI`gj@P#i_F__Z&!^$Ri1^X4E2H-j9)DmIx+=Y9V_eD6h!o0V_2p*@x_|I+6DUV-NM zS)s6l9Nz<|L6BE}fH&xcM9Ynn!L24Ue02XfPv>=mr&(hCL{$0lewK&?NlFPHC>EkG zxnO9bb>Q}}K;|ioEl|!G@m#lTRY8dX;?lH9%idiVaWn9Rgt}>@B++JoExDNG4iy&M z(EO_wbpHIxIF|rAZ?i=>K^)6*pB3>*h_?O>@0}lKhj1o???DIdyywQQq7bUwnW1=e z=wG-*@6O9icY7Ft(=ci^1?=>IpRz%I=s=zJfU1Z5f$?@49)UT8>c?Q8i~BkkIS`V$ zix2TcpRVUrt0u(-*`6eZ1?3YrPJRk~kxsVD%YM3Rm`e;!JMW<9aS%a^?3HySzCTp( z`F=?rqDqA=ik*C?;vkL_BC6mP7qnzQYpAIY&gVWbdcA+<7{t+Ql*GTs@ME1bILIKl z`rs9@I9)*HyDH8mSoKu;Xta*5Bkb~#o;rFYbXXQ$!;Ae!+)CL8vpqC0>1z|+-K!Hn zID4hw4Z5k~o0W^!#~Dt#KR!Ht7#$2hnyHw+&yG-7KactmJYOK>eMSr{k6BLgFq8`N4f+8^_<2ar=qGvR(9XMB-Y%4)#X4L~}A24>HjN z08=bj1J_NUuyQPAJJQ}%m8{15^05P(LT{fabDt0JYx!%S4S~(7L_^at4(75SFe7?5LiGI2>@Bn z_`FN8bai;$(Yl}2T>q$%`@zlz8H$o+fCt)`k|1weOxpOqZ7-b_;%@6 zBfe)}QSEUTY_Z7qSNp#7|XH@AQOJ%0Gs=@S7QKBYw9x%YO` zZT7$lEjIY2u4A%y(_|#izRyoJ8)ux?dL%vb4B}WexT_@@r^G=@1afHXqS?vi^Ic3b zxbF~}U6>TvGO}S)PQYWK^wtrRvTIDR8F$mq@VgckEAT{ z7NlMhGh>b6+6#ZIzpsqy{^;-!Gi2wBwVG$cTlvZ0&+6(GQmPMj@)X9iZ{;zE$8|x4 zeJ--vD4nfuE*#1{(Idj46X*A7hPmI?kR1Wn=Pk6bM7wH1-Hc1NMNUcSz#N{xlG{wJ zhpcCJ)&f4wCf7-s=!*QTHtP#;1>Fk+I$Dd%xlU8DIgW!NnG}aH^Q_$MEJ-lSj=A(a%`?s*=@Z5B*SukrZ_4~rXU`zj3epL60Ls&IjXTTG3BBc@g z#%+=SAloUH2+q&WA?X>D8qHo+sqpnmnS(4S<;kU#cc&wrmnNy9r_iEU`gv6mKUrS{ z!X38rI-P2(baTplhyJr8n}5YQik1dooH-v8xCzWIo8Xz>CS0j_D!0gjXkU=j_n9z% zk(ncIF-zXt{0atk|NntGyx(f;=iaelC-*ANPerDdR8Kyg)WsyIuQp0O5e_z|3^sR+ zOU^VL+Bn-0EAYBP{56lCHI~Lo^WxWT*~;`>1jhE^p^%TsFk5Vt2)mlx3u=1<@5kBB zW@tJQxunmv2YKls)Hz(MMJadTlyGQj+MFTu0ors99FGn5D(+B3Zo_Dr`$O@eY4Ud)% zAHhabul}O2@4Q?yrJuwLrN&lCq#1e%A@V3reR6qXwF-b_!8fZu-q$8_1O$~YIK38QC>DNcK!&xx;{dEwDAbHt2ubZVnEq$B3N#YRBF3R5hwB6BO{47$nMwuBQ7l+b( z5XQJ(y^vN7N2Q2COp4-f6S83gCF+XC!_i;}NGgG$uHev)c4?E|$wft#!HIBib3rJ58d=KRB)gnpEW(y%kn3gC z;v^9PQjB|r1{F5)?|H_^_Vuj~)2Mv2emFh$|ns z3}wRk#p93kUc>uOX}9v{=u~zym1oGBt{=7?BS3;iQ76LSMTz$hrU;bFq4Y67k3L@c; z*lY$;As_-NjIkXgSso{Ey^+kREow||bKNKnZyrY;ek^&0cRCe^{N=%BTeN&wV^83@ zv)2D+eRl7BGMspLb8i#NGKi#Qq&KmXA*&Bdap8s>>QFZ@M}cI~4atfAzGL>j7yre2 zRcpn+i@jQvW$|`pOZvfLqG{Uc%4RduRZwlOI(%oQu|I}iB+y0ZA2ZvYZHoZNX#5~6 znOr`k*HfjKOBj(p&)RTg`;@wJ_&b)A(^$Bmq0uW_x2pUvV%f0C@< z1#;Ccp)(#O=*jx+#q&?gel`lPlCb+ZbKBExdmB@RY|`h{mZ#@sMU@b_?O`aBxvOaVL|ey9{MAD*5O7ee=kYR&#u+AX-Bm0c4PV5 zqcZFLxSxo;A+69>9yGTCxBX6ln05CF;dSw@TQlT}dv zkm|Z8Odnd**@_e^rjJmMsip6RVUPcscj+d*NiH4*L6JR^Hba1K$?ZO4Dz3 zPO%m9&r8!bEEhi$>%5z?JhLb9p@EMTkxIJk%yQ}Wbp^y|c$TX$Lh(1pjj7_leSFVafV3or1*^U9=iqdo$d7OC!pFErtV#>;t## zczBI@hSD5U1O1xQf_*RuTu^CCoSD<5N?cH@l>>_rXve8Z8 z+!N^NzRH(VAVjfF#Drr88q$9^Q0sBn5~XBmLI-FqBWmXZtAlziOm{I8NAGeL z=*zYu{qCVV(gIdek+_}=zn-G6SG>6U)SM=H@Z`uI(SSY)VLmXSzr6T+_u`&I4)A*x z{*qB!|Ag&CxIGzW{>$q-$6ox5+A-qqyj1+L!SgSVBh(bWd~Y7J6#8QSy2IJ}G=7g1 z<^*)0!e$#djg9+iD`W)r<#OQZX-{4dO^Dek^kcVs;}>vpsAKDZ9k-DzQ4d?<)i~IH zrC{>kfw2S8y;?1>8z^7Q3=r+4mOZ`#h zx`!}=DpARj!S2l=yL@5EHWzuTye=bMdMAR;cIRXzpvRS;BI+5MB}<=$qA|T9Mi4? zcqlzS%A}R>(d(-NbakofYDrL?{YyNMm_)1I{g0-udnhF9s40v#)K`1el zLTo_<_ye&+x;c|3DKY1WhL3&^+Z@W}>cuD1lneV!qMPrF1n$e1IPwB5k^v$ z4ofh6&PpKh2fJ_O?FAAx@3clP0n55Kx0>G7y9_WfOV%DSAE!}PIxthM-FC+i=#h)V zU$Y)>2YUHE-~VZYY<0u@2qcvk-l7@E=ji<_{!6cQy}8D5D0Aim;zT7FN_^zUyb>B&E9=_F%qlZf(&=-B?x~e&%U(?ARFfJBOd#)cVBJc zhK@vYxJtr3%=*zNmgQve@xA)Ox*9C=Z3mo1s=m8k)J+mc zy@wnLwZ*@M1Rl7xhHz||Zc}>vf8wfW9*2@JUxr+12L()r_m(ADz!Kz33a@4DxL3TH^5j;wX9U zHw_IDj(X|hfNqdX*6f(V;hnkCT%BZ7aC08gkGl}u^7-0dCF8xhEln%`Pf4hQuq$t1 zjX#&fSQ{){)eRCvt;pY%D7@GnXTItWdRXK@fGYQ-HT$7mdbMfE=ktyw>&Aj%7RHp| z*rQzTqUAZt9x!DtIMu~Et}WerFGQ+hPpT84=`U|GQBFLM>pz#a@o{x1`t)Isw8q>X zBH<%EajHWd;9+lcb#<~M)cIixhUaL5ALi9vbtf7ki@OJ*2wlU0Q19$43baA3)cjFX z_LvVF#6R<2;91?pO?+r}rs)v-FyltDt}hZ{q_0jHbUWDvD?{ z)($-ioB69`^+AljFjtw%4RIiaZ~ozvob=?=+UB=6zZmkuCxZ^FCs%!EEXP4+6{(lS z8_snszr)=@6SgdSJA*P$xkizz0>(*pNiPW*(LxvK3ccsv*#p{F_`&@9hfHm2n5FKe zL={kiH1-lz7Z(w0J3E%ecc*)Ak`wu+k?~BDSgxdi;NHZ2+oTg0hgOUldL&OuLj9d* zV$`8|$wa3Q-+R8ifU)!`58Oz@A!bvvH zI4;bSBQrd!BzC(+FyEmdtm|YWEe*b`JqX7<9*Es(=uEm833RDD+P73dTE5A0s72}I zi#MukwML+94U#@K@&DZX)&KTpnIM*?FOC2{;XTw=L@6338TPiyW(>qZvrS1ty3^#b zwH41EhUTAt88PIgyn&+={W><)rRD+&W+l+$A(x!3=oWx1DLK?})H!ee6ZB7gY$`umgQW?;@E_FTwIjYZ6 z7wHP_@80@qNx-5T9(r5;G^m_Ra^hbSCt3HD% zG52*l(7Eq7to-Tl5Ow({evn4vj$X)xbh(C8oK6mNoz~r2vUs?>73K7-Q1sNVr@rwn z4Kz>C1bDgQCZd>ma@)NFK;2l1)jLR=F(fo2b(IFJ1AospmC!GMOE9hJYn1+_9t1GLN3dxeN0GAl4P zZ!HX_`?rsRE#GBL2`1$c`gO{)y3sAilCrKlp`K>xQ+}*1!VF2EIS$@_!Jo$7{N__3 z8h?EoSsCVJ-;NaJ@TDxJI(icQn&!=$+|Bgq8h%Ezr26m?Lt4^idG~YaV7Q1P(ag6M zL(j(@GJfV!V2TYfJwH!w>kp7Zr?E_?9y6EGV7g8VhEWCOc(iAv#pepCOP7NjaCxdvkhXqJxOBl0Vp7yJh{` z4DUb2x$U_Y@2kJ9s%z!xGuE+s`><)W2ziYlw(o467HtR{3CMq+yHQvx4-eCi>p+a-5&2)%80V%-h0uQCK zo#DHmeqnhe7&`(Hx{pZ^YkV|WTkWplCs`OSa@rk{t)9KO++T0q{Bf*cVnV%*JLiF% z=##u5a?{*OX#KcJQP57e2xZ}#&k3-w^VY7%UruyNMBhI0E5o3d%p30BqFMjh#v9e@ z#G2n+9ERZJyY?7;lhSS2LT>!o(0yU3T3?zpf{Ae)-W2^Xp?Xh2tNT<3DNl~Ceq z^3kg*?Ar9TvU9mw;aHz~ZQy$4R79l+tt_5!jd%l(1lVT{Skw1nPJ#1iw+=@yF#Wa^ zS5R7B%1rP#%2`}i@xZLciq=vNJOG=A`A$s^2?$dukv%A@FR zNK3$&242%=z8LxG(y4hu;`zP!aoBOi2P)ea^>3*WI3^=b=gIJ8_PTV6l~T$#Wi`-v zxaG?ScgmGgy2sc?%)~4ZF^0jkcqP48DK&K)+l5 zwuDp?)f2R8_E$ni0x-_zCCBY+dS27P6=r&tzp;6nbG*i8EXGeO*?0Msy;&8^ApkUL z3x+=sWL?$%?`m7zCe=m+25NG}2*v%;M(c+rs4=^Zs*^?mKQ@J-yOrAy^T zUM|q1kZSh6^(0_Tm-ohIgSzBuE-QZht*EW`nJ%rASCR{3x5pM9vUym)$<_pC>BjE! zqFV)5Wq*vdGGSBajE8~fWOTd;dB{1wCsELXOmpOV^|@F@JjoCK!20quT4|hqq2N(T zKD04bXChwT9xD+1A`Uu=UwyP)dU*rnvI9Y?r&G;}J50kNP0%l$}5bqLcMHU`(6CJP-7k#=qG8ca|tV8}CH?WUb zv~6c5kM2gq%%7|JU;JxbCsbF)6{0Bw?yYW&fgmdVb)g-Muq2B!{*nj6$iAllK#8k~|*YJGjmxN2%aYgxqqGu9EQ zf&ZLL72`X*Si(T_9O5T$jR}dK*q4RCxwZs<6{o{?9y;`n}sGkPuCO?>cnj>_$yWOd;7fQ`+<`VKFmfYeoT!7)1E#FruG>58^Fzuj7h z=^~sd;93w}_h8%&xM}Oe7AV530vR`ZS-W^AFt;}a-tBR6yr#N1N6Qewhlh$k(>Kk> z79VVUUI>9N4jioEi86;!2~P3JyM{%gOTqYqMjE>_R z)2~W6oEXb`Ga^K0rjSbB^bzRT`5oT$dg>DeEG&3>l0>BcZ+43yyQ7ANoQ?Q10-_k%xdnS1- zX50lB!>l!zmcOP66rueFanWLM^c(Hsl!I|f%Ge4`w4TG0PYKF@7~&WWV%a^RC!0Fe zL&x{kCYalzjnO%i;g!J)Xk;(JreM$k&f4cLqG@(zXr*y=#q%&v_z8z%ffutsZO**a zGf-VP=ejOH?^R5{2&_Wg%{*<8%g5;AzhKi)6`ByRlW*;`2`Y}jQ);V>^ZqHOY8jBY zp-)@M995uQ5htbYENU?dF9X_;w@FFY7>W2!#;uT#eaSO!iF{X-@SLT|fWdKVjJUo0DkOysY zgN|4faBR)u91<0`GB+vf)BE0B39LW->z2kr=Kp(}tl=X*;MV%vE50~`H2ydbiJhAf z5-g4M_~wv5_Pg_MRUwteK5y^tlD-FBOQc{)q;AxmPvoL_p?DciIYCIS=3&eI!sWIt z=EDa6pTtf6UBDXJyrt3id~lX37{{?ZEAZbp+r>On>{y$V-j1L29A3C08Bf8n^7Rl5 z(@d}jia;I}2%`CEWbH=#V7JfekPchq{`R2Gay+SX^X!OA3ja1Wn5zKxzqxvgU(23b z9(^0!g*t-z>pI~g^n!0c_-v4zMu9zvR}jHffd=g@Up;cQHj5_OP1U6n)O(&x;7Cue^LlV&mIP70c2E=3dKzP7n57; z=8#I-0tw~8hYOo{`_cQ`8T7--WpPtM*<~hb7#C=G8vT6V^h+>@?Y$#XoPmey@I?1m zUButRI6gL8;%12gdm$KGz%1BN@&SaUyT!CQbu;mP_ zyI?{x%YoULRPiysnc_|5$FJNzW0DPDIhH@fE$1k|FaP5eO90pldh8y#K0LoPVD^ST zj;==gnI$$gp`*EmEcaoG9Ij4K&?L(#kH@6oY{m1?y~nf?BA_HLhjb>>mc{D)3(OsF zu8Wsaa!>Ucnqu72R*ZN6rM)ZSA+0m*X<$r!h+^{p4N7QR0h7|0;Zr~A2ML~(V%`ZX zb!EKgtES3+RrpJxxL2->`3UR3e41^if=rxAoCL3=kJ_MW5denY)W7oZ?Mc* zRXKxW0u(4-wI(}awq*@tNOLe{peky4j$G7wcT?JKtnsZgBaBTSOvnkr^HLIL;301H z&&!?@+6_7Rof|JdKn?H|{k9C+oXG#i-voeYs!RSGC)iw3fFyGaBXXV(O|9I#gq7Y; z6b4{z0g^n5vJnAnh-WjRzUCT7Wm{`s+(sM?Ap*2tpTiWv4HkF=7GS?==_P){YvvaI zRI$n1cP-pa0pJvnKM1q9wBqm|Xypp}Z} z$d3V6XzQJdM=hvqzs^-pIjCs;j@?Bosn_)==H*>sPQ&j_vKO3l!9uyDUI5N%7DhEFX zT{HEErs>u)l;g+vkEA0$y^+LOcyvMqWM$RlCQd%6#!e{2Dy*#{KEA+iW2P{1<3b~k z;)_*cuvCs;=Nx_MY4CYGC5m2pTVjjKmMKt}s0i{0lz7_Xgm%j>)k)k+C)9Mnw@5ot zAtkYkJZ&BGnpzCeswEf2_m02S>2YFWcL;F>LJS|6m-hk~#4`4q-rmR@wr?eFzMZ-8 zSXD$34-^HK4CXvTR;Uh@S|@RSe@zqh>C)4am$8Dy!;Z^kviNGu62#CPc3pKtY7g7wpL{G(KdIj{=2J3f`}MI~Obg(fvcslcP9Fmb-Q2BU47{t2Ki?@5MGEQN6|buC3^ zlCi~+8U}QK4cWIx!x~a`JIcNs+q#d#%4KjLC_@N!l&!*j*2~qUv&W^AeN*#qW|5MZ zkI~2X@6a4Q zt&ExinIAJN$4^xDmK%tvKvkBen4x!<0Xwc@4>whgKQ@nl8+i1iCO~}sOEm3^0>@3n zJq&|an<~^z1`u@mXc?4OCW_mH69Vd&`+p2RGeepbSo*s?X$foeBW@4PqH-zT6!0-)2NJDG;Zuo{*$q z5Pj5JIv(|tq^beh=>EK^g7SaEPew&M1sV{mm-qvq3O49uzPMaJKv!Ka1pTRbm(Qfa zr1+*nFhyypApTB);^s5|WAj9JzwOhP8QYEu%DFQsFZ#vs;j57dLkK@;AZMk>@cHUa z7w5nLoV}ovWY*zqc;{K~JDnyvSk@_VdS&W+kE`FvQ+7V~LoMz3au5w+e~f%X9Vm>q zgCh-UISpihGMYIYWL6PWg)DDtW<)}d-ZYPWxxPNmF*;HX5CIVBfF!R1JtHFTx)-o; zj&EaF15?BDuV?TA+%rG}Y_3RM0)wncR?!##q5mtar=cp-TWZG~)b;tOc6m7DmD z#`Qvc?w-%Ac|~6dLqNSPy!Q{JI)zAE(!4bNVbJET8`9gO+s99o+s5R>%plkOVf{Ox zE&ed#=B|?OQ?3qjfx`Ycfc+jkq9XjG1~rHhpDAWm*(ZQQK#w-O_g^{4@B*)pSY zO9(1BBf0z>3H0mNL#7 zXcpZBxqqMLXZ-(88?pKB8#kl^P%&9jz*&=)GRmoN-EC^@B}xtgvS)EV0lQ{g{U6jj z!(Kh7xEUaM6C!_OC0K5djo9bajsd{z)RS1*RCfL=+bPEanue)|Q77VbR{aTL^F@te--w5OZcA6#~qW3bRm)idXv=kXPkv5$r zr8SS%3u%Uc4KUk7-I9AxQ!dP1CdS?I{aFDrKGSJM_bC66%jLD-*V_Y5{QiI+uC9O3 zN}@mEE7u@t&3FuGBzMBqH)WIO2-nOEK=S}-yS#s`G;5+Q)nw&$ndRRvLGX{ zss9>pXVOEqz^N)C88Rqir~~7j&r`kw=PAwDUmOlofHW@wu<}j*RyNO^OdhbG(Xr2| z>JYF4o99O6j>2gFPtWhpd4{YmgvAtN1$hhFM08cmR+Q_Lx7DXx^eohz>9Kaat#j zqLzJ?Zt`{+$5t`_UBZIY7wZ6u7yW6`-!E}-6&1iqbKJ{(v}MU> znZR&ixZ8ImjYTJJ+<`EFuJRBUf1QZtJWF>NS#4?8ow8bp8K|cX834tJON7g{hnSb$ z^5R5lACD9K6I=diz*A>HYjN6YIAY<_zQVduZZr~q0b}NdpoGC}WDlfG<@tim1Mgah z_xeQ~G#?F9xq##_NEEQk{R2pw$MaJU?9pdInQ(fW#1L4xvg(+H+wC=6Ak#sadI|m> z1{3{{GeY>U-JgrwmlzS4a4w}3PcpoDlNAz9^iLW@r2lvoG%pk+e#VGn(VfY zVaNEZA^zil+5KTav*bH%X)U;P>8o2_8ciFFm}sCebIJsUqxn{7S;)YD0K*yjA&*M4 zbu>>DLIwoy#ud^OKm>^4SI^LA8>bb#xDe3W01qO~!z%1hX*5gLzGH9Iyrp8YVv{!7 zJ9qA|FxDpWSbz(n2=oEy&pGhDI{Ib)z(=7F8PE&_!VfIoD?CuPc6;S{dng3_X{c&F JC{{KL{C{3veEk3b literal 0 HcmV?d00001 diff --git a/datahub-web-react/public/browserconfig.xml b/datahub-web-react/public/browserconfig.xml new file mode 100644 index 0000000000000..0f5fd50ca7ce4 --- /dev/null +++ b/datahub-web-react/public/browserconfig.xml @@ -0,0 +1,9 @@ + + + + + + #020d10 + + + diff --git a/datahub-web-react/public/manifest.json b/datahub-web-react/public/manifest.json index 35dad30b4bb57..1ff1cb2a1f269 100644 --- a/datahub-web-react/public/manifest.json +++ b/datahub-web-react/public/manifest.json @@ -3,7 +3,7 @@ "name": "DataHub", "icons": [ { - "src": "/assets/favicon.ico", + "src": "/assets/icons/favicon.ico", "sizes": "64x64 32x32 24x24 16x16", "type": "image/x-icon" } diff --git a/docker/build.gradle b/docker/build.gradle index 8b71ff1f6f06b..cc95e12f26f76 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -61,6 +61,7 @@ dockerCompose { composeAdditionalArgs = ['--profile', 'quickstart-consumers'] environment.put 'DATAHUB_VERSION', "v${version}" + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' @@ -78,6 +79,7 @@ dockerCompose { composeAdditionalArgs = ['--profile', 'quickstart-postgres'] environment.put 'DATAHUB_VERSION', "v${version}" + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' @@ -97,6 +99,7 @@ dockerCompose { environment.put "ACTIONS_VERSION", "v${version}-slim" environment.put "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' environment.put "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' @@ -113,6 +116,8 @@ dockerCompose { isRequiredBy(tasks.named('quickstartDebug')) composeAdditionalArgs = ['--profile', 'debug'] + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally + useComposeFiles = ['profiles/docker-compose.yml'] projectName = 'datahub' projectNamePrefix = '' diff --git a/docker/datahub-ingestion-base/smoke.Dockerfile b/docker/datahub-ingestion-base/smoke.Dockerfile index 15dc46ae5b882..5c6738720e05e 100644 --- a/docker/datahub-ingestion-base/smoke.Dockerfile +++ b/docker/datahub-ingestion-base/smoke.Dockerfile @@ -15,12 +15,12 @@ RUN apt-get update && apt-get install -y \ xauth \ xvfb -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-11-jdk +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-17-jdk COPY . /datahub-src ARG RELEASE_VERSION RUN cd /datahub-src/metadata-ingestion && \ - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ cat src/datahub/__init__.py && \ cd ../ && \ ./gradlew :metadata-ingestion:installAll diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 2898a363a0a18..4f0e66251b154 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -13,8 +13,8 @@ COPY ./metadata-ingestion-modules/airflow-plugin /datahub-ingestion/airflow-plug ARG RELEASE_VERSION WORKDIR /datahub-ingestion -RUN sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ +RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ cat src/datahub/__init__.py && \ chown -R datahub /datahub-ingestion diff --git a/docker/datahub-ingestion/Dockerfile-slim-only b/docker/datahub-ingestion/Dockerfile-slim-only index 4112f470c25be..24412958a2d08 100644 --- a/docker/datahub-ingestion/Dockerfile-slim-only +++ b/docker/datahub-ingestion/Dockerfile-slim-only @@ -10,7 +10,7 @@ COPY ./metadata-ingestion /datahub-ingestion ARG RELEASE_VERSION WORKDIR /datahub-ingestion -RUN sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ +RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ cat src/datahub/__init__.py && \ chown -R datahub /datahub-ingestion diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index ea64f94f88727..fdaf9ddbaf813 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -44,9 +44,9 @@ FROM base AS dev-install # See this excellent thread https://github.com/docker/cli/issues/1134 FROM ${APP_ENV}-install AS final + CMD if [ "$ELASTICSEARCH_USE_SSL" == "true" ]; then ELASTICSEARCH_PROTOCOL=https; else ELASTICSEARCH_PROTOCOL=http; fi \ && if [[ -n "$ELASTICSEARCH_USERNAME" ]]; then ELASTICSEARCH_HTTP_HEADERS="Authorization: Basic $(echo -ne "$ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD" | base64)"; else ELASTICSEARCH_HTTP_HEADERS="Accept: */*"; fi \ && if [[ "$SKIP_ELASTICSEARCH_CHECK" != "true" ]]; then \ dockerize -wait $ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT -wait-http-header "${ELASTICSEARCH_HTTP_HEADERS}" -timeout 120s /create-indices.sh; \ else /create-indices.sh; fi - diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index f863dff7a59c5..769bce3105a7f 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -64,6 +64,8 @@ x-datahub-system-update-service: &datahub-system-update-service SCHEMA_REGISTRY_SYSTEM_UPDATE: ${SCHEMA_REGISTRY_SYSTEM_UPDATE:-true} SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS: ${SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS:-true} SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION: ${SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION:-true} + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins x-datahub-system-update-service-dev: &datahub-system-update-service-dev <<: *datahub-system-update-service @@ -99,6 +101,8 @@ x-datahub-gms-service: &datahub-gms-service timeout: 5s volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins + labels: + io.datahubproject.datahub.component: "gms" x-datahub-gms-service-dev: &datahub-gms-service-dev <<: *datahub-gms-service diff --git a/docs-website/graphql/generateGraphQLSchema.sh b/docs-website/graphql/generateGraphQLSchema.sh index 4e41c5dfbfacd..c6d7ec528b613 100755 --- a/docs-website/graphql/generateGraphQLSchema.sh +++ b/docs-website/graphql/generateGraphQLSchema.sh @@ -16,3 +16,5 @@ cat ../../datahub-graphql-core/src/main/resources/tests.graphql >> combined.grap cat ../../datahub-graphql-core/src/main/resources/timeline.graphql >> combined.graphql cat ../../datahub-graphql-core/src/main/resources/step.graphql >> combined.graphql cat ../../datahub-graphql-core/src/main/resources/lineage.graphql >> combined.graphql +cat ../../datahub-graphql-core/src/main/resources/properties.graphql >> combined.graphql +cat ../../datahub-graphql-core/src/main/resources/forms.graphql >> combined.graphql \ No newline at end of file diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 2b8873c678778..1e6d8bec01813 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -561,9 +561,18 @@ module.exports = { ], }, { - type: "doc", - label: "OpenAPI", - id: "docs/api/openapi/openapi-usage-guide", + OpenAPI: [ + { + type: "doc", + label: "OpenAPI", + id: "docs/api/openapi/openapi-usage-guide", + }, + { + type: "doc", + label: "Structured Properties", + id: "docs/api/openapi/openapi-structured-properties", + }, + ], }, "docs/dev-guides/timeline", { @@ -768,6 +777,7 @@ module.exports = { // "docs/how/add-user-data", // "docs/_feature-guide-template" // - "metadata-service/services/README" + // "metadata-ingestion/examples/structured_properties/README" // ], ], }; diff --git a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss index 3d30c65f89539..862fb04c8370b 100644 --- a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss +++ b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss @@ -26,4 +26,4 @@ background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); background-origin: border-box; } - } \ No newline at end of file + } diff --git a/docs/api/openapi/openapi-structured-properties.md b/docs/api/openapi/openapi-structured-properties.md new file mode 100644 index 0000000000000..521ce8789db0d --- /dev/null +++ b/docs/api/openapi/openapi-structured-properties.md @@ -0,0 +1,284 @@ +# Structured Properties - DataHub OpenAPI v2 Guide + +This guides walks through the process of creating and using a Structured Property using the `v2` version +of the DataHub OpenAPI implementation. Note that this refers to DataHub's OpenAPI version and not the version of OpenAPI itself. + +Requirements: +* curl +* jq + +## Structured Property Definition + +Before a structured property can be added to an entity it must first be defined. Here is an example +structured property being created against a local quickstart instance. + +### Create Property Definition + +Example Request: + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Amy.test.MyProperty01/propertyDefinition' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "qualifiedName": "my.test.MyProperty01", + "displayName": "MyProperty01", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo"}, + "description": "test foo value" + }, + { + "value": {"string": "bar"}, + "description": "test bar value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ], + "description": "test description" +}' | jq +``` + +### Read Property Definition + +Example Request: + +```shell +curl -X 'GET' -v \ + 'http://localhost:8080/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Amy.test.MyProperty01/propertyDefinition' \ + -H 'accept: application/json' | jq +``` + +Example Response: + +```json +{ + "value": { + "allowedValues": [ + { + "value": { + "string": "foo" + }, + "description": "test foo value" + }, + { + "value": { + "string": "bar" + }, + "description": "test bar value" + } + ], + "qualifiedName": "my.test.MyProperty01", + "displayName": "MyProperty01", + "valueType": "urn:li:dataType:datahub.string", + "description": "test description", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ], + "cardinality": "SINGLE" + } +} +``` + +### Delete Property Definition + +⚠ **Not Implemented** ⚠ + +## Applying Structured Properties + +Structured Properties can now be added to entities which have the `structuredProperties` as aspect. In the following +example we'll attach and remove properties to an example dataset entity with urn `urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)`. + +### Set Structured Property Values + +This will set/replace all structured properties on the entity. See `PATCH` operations to add/remove a single property. + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01", + "values": [ + {"string": "foo"} + ] + } + ] +}' | jq +``` + +### Patch Structured Property Value + +For this example, we'll extend create a second structured property and apply both properties to the same +dataset used previously. After this your system should include both `my.test.MyProperty01` and `my.test.MyProperty02`. + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/structuredProperty/urn%3Ali%3AstructuredProperty%3Amy.test.MyProperty02/propertyDefinition' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "qualifiedName": "my.test.MyProperty02", + "displayName": "MyProperty02", + "valueType": "urn:li:dataType:datahub.string", + "allowedValues": [ + { + "value": {"string": "foo2"}, + "description": "test foo2 value" + }, + { + "value": {"string": "bar2"}, + "description": "test bar2 value" + } + ], + "cardinality": "SINGLE", + "entityTypes": [ + "urn:li:entityType:datahub.dataset" + ] +}' | jq +``` + +This command will attach one of each of the two properties to our test dataset `urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)`. + +```shell +curl -X 'POST' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d '{ + "properties": [ + { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01", + "values": [ + {"string": "foo"} + ] + }, + { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty02", + "values": [ + {"string": "bar2"} + ] + } + ] +}' | jq +``` + +#### Remove Structured Property Value + +The expected state of our test dataset include 2 structured properties. We'd like to remove the first one and preserve +the second property. + +```shell +curl -X 'PATCH' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json-patch+json' \ + -d '{ + "patch": [ + { + "op": "remove", + "path": "/properties/urn:li:structuredProperty:my.test.MyProperty01" + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + }' | jq +``` + +The response will show that the expected property has been removed. + +```json +{ + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "string": "bar2" + } + ], + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty02" + } + ] + } + } + } +} +``` + +#### Add Structured Property Value + +In this example, we'll add the property back with a different value, preserving the existing property. + +```shell +curl -X 'PATCH' -v \ + 'http://localhost:8080/openapi/v2/entity/dataset/urn%3Ali%3Adataset%3A%28urn%3Ali%3AdataPlatform%3Ahive%2CSampleHiveDataset%2CPROD%29/structuredProperties' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json-patch+json' \ + -d '{ + "patch": [ + { + "op": "add", + "path": "/properties/urn:li:structuredProperty:my.test.MyProperty01", + "value": { + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01", + "values": [ + { + "string": "bar" + } + ] + } + } + ], + "arrayPrimaryKeys": { + "properties": [ + "propertyUrn" + ] + } + }' | jq +``` + +The response shows that the property was re-added with the new value `bar` instead of the previous value `foo`. + +```json +{ + "urn": "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)", + "aspects": { + "structuredProperties": { + "value": { + "properties": [ + { + "values": [ + { + "string": "bar2" + } + ], + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty02" + }, + { + "values": [ + { + "string": "bar" + } + ], + "propertyUrn": "urn:li:structuredProperty:my.test.MyProperty01" + } + ] + } + } + } +} +``` diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java index 83e40b22a5e44..453eddd3ae56c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java @@ -1,7 +1,6 @@ package com.linkedin.metadata.aspect.batch; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import java.util.HashSet; @@ -33,14 +32,12 @@ default List getMCPItems() { } Pair>, List> toUpsertBatchItems( - Map> latestAspects, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever); + Map> latestAspects, AspectRetriever aspectRetriever); default Stream applyMCPSideEffects( - List items, EntityRegistry entityRegistry, AspectRetriever aspectRetriever) { - return entityRegistry.getAllMCPSideEffects().stream() - .flatMap(mcpSideEffect -> mcpSideEffect.apply(items, entityRegistry, aspectRetriever)); + List items, AspectRetriever aspectRetriever) { + return aspectRetriever.getEntityRegistry().getAllMCPSideEffects().stream() + .flatMap(mcpSideEffect -> mcpSideEffect.apply(items, aspectRetriever)); } default boolean containsDuplicateAspects() { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java index bb5e0ac53934a..dd0d0ec68dac6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPBatchItem.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.aspect.batch; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.mxe.MetadataChangeProposal; import javax.annotation.Nullable; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java index f790c12ee5335..e9e30f7f2bd96 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/PatchItem.java @@ -3,7 +3,6 @@ import com.github.fge.jsonpatch.Patch; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; /** * A change proposal represented as a patch to an exiting stored object in the primary data store. @@ -13,14 +12,11 @@ public abstract class PatchItem extends MCPBatchItem { /** * Convert a Patch to an Upsert * - * @param entityRegistry the entity registry * @param recordTemplate the current value record template * @return the upsert */ public abstract UpsertItem applyPatch( - EntityRegistry entityRegistry, - RecordTemplate recordTemplate, - AspectRetriever aspectRetriever); + RecordTemplate recordTemplate, AspectRetriever aspectRetriever); public abstract Patch getPatch(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java index 4e4d2a38799dc..c337e4f848e5c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/UpsertItem.java @@ -3,7 +3,6 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; -import com.linkedin.metadata.models.registry.EntityRegistry; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -17,8 +16,6 @@ public abstract class UpsertItem extends MCPBatchItem { public abstract SystemAspect toLatestEntityAspect(); public abstract void validatePreCommit( - @Nullable RecordTemplate previous, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) + @Nullable RecordTemplate previous, @Nonnull AspectRetriever aspectRetriever) throws AspectValidationException; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java new file mode 100644 index 0000000000000..c73ccbb2d93e3 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/GenericJsonPatch.java @@ -0,0 +1,34 @@ +package com.linkedin.metadata.aspect.patch; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonpatch.JsonPatch; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class GenericJsonPatch { + @Nullable private Map> arrayPrimaryKeys; + + @Nonnull private JsonNode patch; + + @Nonnull + public Map> getArrayPrimaryKeys() { + return arrayPrimaryKeys == null ? Map.of() : arrayPrimaryKeys; + } + + @JsonIgnore + public JsonPatch getJsonPatch() throws IOException { + return JsonPatch.fromJson(patch); + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/PatchOperationType.java similarity index 81% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/PatchOperationType.java index ac93fd24fee02..6eaa6069267ba 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/PatchOperationType.java @@ -1,4 +1,4 @@ -package datahub.client.patch; +package com.linkedin.metadata.aspect.patch; import lombok.Getter; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/AbstractMultiFieldPatchBuilder.java similarity index 95% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/AbstractMultiFieldPatchBuilder.java index 943aaefec469b..165a4d26c339c 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/AbstractMultiFieldPatchBuilder.java @@ -1,6 +1,6 @@ -package datahub.client.patch; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -13,7 +13,6 @@ import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.tuple.ImmutableTriple; -import org.apache.http.entity.ContentType; public abstract class AbstractMultiFieldPatchBuilder> { @@ -87,7 +86,7 @@ protected GenericAspect buildPatch() { .set(VALUE_KEY, triple.right))); GenericAspect genericAspect = new GenericAspect(); - genericAspect.setContentType(ContentType.APPLICATION_JSON.getMimeType()); + genericAspect.setContentType("application/json"); genericAspect.setValue(ByteString.copyString(patches.toString(), StandardCharsets.UTF_8)); return genericAspect; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/ChartInfoPatchBuilder.java similarity index 75% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/ChartInfoPatchBuilder.java index 0655d2b3eb8eb..09f9dad134a0b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/chart/ChartInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/ChartInfoPatchBuilder.java @@ -1,12 +1,12 @@ -package datahub.client.patch.chart; +package com.linkedin.metadata.aspect.patch.builder; -import static com.linkedin.metadata.Constants.*; -import static datahub.client.patch.common.PatchUtil.*; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CHART_INFO_ASPECT_NAME; +import static com.linkedin.metadata.aspect.patch.builder.PatchUtil.createEdgeValue; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/CustomPropertiesPatchBuilder.java similarity index 90% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/CustomPropertiesPatchBuilder.java index e621aaf57ff97..e4143851afbe5 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/CustomPropertiesPatchBuilder.java @@ -1,12 +1,11 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.subtypesupport.IntermediatePatchBuilder; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.IntermediatePatchBuilder; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DashboardInfoPatchBuilder.java similarity index 86% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DashboardInfoPatchBuilder.java index cadde582f1c64..9156b304a394e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dashboard/DashboardInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DashboardInfoPatchBuilder.java @@ -1,15 +1,17 @@ -package datahub.client.patch.dashboard; +package com.linkedin.metadata.aspect.patch.builder; -import static com.linkedin.metadata.Constants.*; -import static datahub.client.patch.common.PatchUtil.*; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.aspect.patch.builder.PatchUtil.createEdgeValue; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.Edge; import com.linkedin.common.urn.ChartUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataFlowInfoPatchBuilder.java similarity index 92% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataFlowInfoPatchBuilder.java index 9e55ab4fc6db4..6a114d90875fe 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataFlowInfoPatchBuilder.java @@ -1,15 +1,14 @@ -package datahub.client.patch.dataflow; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_INFO_ASPECT_NAME; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.common.CustomPropertiesPatchBuilder; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInfoPatchBuilder.java similarity index 93% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInfoPatchBuilder.java index 581616f54e9b9..99c0ac6c15eb1 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInfoPatchBuilder.java @@ -1,16 +1,15 @@ -package datahub.client.patch.datajob; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INFO_ASPECT_NAME; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.DataFlowUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.common.CustomPropertiesPatchBuilder; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInputOutputPatchBuilder.java similarity index 93% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInputOutputPatchBuilder.java index fc250daffe916..8e2168e5b6a33 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DataJobInputOutputPatchBuilder.java @@ -1,8 +1,10 @@ -package datahub.client.patch.datajob; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; -import static datahub.client.patch.common.PatchUtil.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME; +import static com.linkedin.metadata.aspect.patch.builder.PatchUtil.createEdgeValue; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -10,9 +12,8 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import com.linkedin.metadata.graph.LineageDirection; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DatasetPropertiesPatchBuilder.java similarity index 91% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DatasetPropertiesPatchBuilder.java index f4329c84f33ff..31e181fc244fb 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/DatasetPropertiesPatchBuilder.java @@ -1,13 +1,12 @@ -package datahub.client.patch.dataset; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_PROPERTIES_ASPECT_NAME; import com.fasterxml.jackson.databind.JsonNode; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; -import datahub.client.patch.common.CustomPropertiesPatchBuilder; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.builder.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/EditableSchemaMetadataPatchBuilder.java similarity index 90% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/EditableSchemaMetadataPatchBuilder.java index 6478b31d27ef0..5e9e1911925fa 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/EditableSchemaMetadataPatchBuilder.java @@ -1,15 +1,15 @@ -package datahub.client.patch.dataset; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java similarity index 88% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java index 84db0ba307cf2..ff34b187f6151 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlobalTagsPatchBuilder.java @@ -1,12 +1,11 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TagUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlossaryTermsPatchBuilder.java similarity index 89% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlossaryTermsPatchBuilder.java index 6f31025406b1b..16d9beded3066 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlossaryTermsPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/GlossaryTermsPatchBuilder.java @@ -1,12 +1,11 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.GlossaryTermUrn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/OwnershipPatchBuilder.java similarity index 91% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/OwnershipPatchBuilder.java index 20e0c930a8c95..35a647424a88a 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/OwnershipPatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/OwnershipPatchBuilder.java @@ -1,13 +1,12 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/PatchUtil.java similarity index 96% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/PatchUtil.java index 69db36c6e038c..7556a8b1d9418 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/PatchUtil.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/PatchUtil.java @@ -1,7 +1,7 @@ -package datahub.client.patch.common; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.UNKNOWN_ACTOR; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.Edge; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java new file mode 100644 index 0000000000000..fab81e0af5bf5 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/StructuredPropertiesPatchBuilder.java @@ -0,0 +1,110 @@ +package com.linkedin.metadata.aspect.patch.builder; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.databind.node.ValueNode; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.tuple.ImmutableTriple; + +public class StructuredPropertiesPatchBuilder + extends AbstractMultiFieldPatchBuilder { + + private static final String BASE_PATH = "/properties"; + private static final String URN_KEY = "urn"; + private static final String CONTEXT_KEY = "context"; + + /** + * Remove a property from a structured properties aspect. If the property doesn't exist, this is a + * no-op. + * + * @param propertyUrn + * @return + */ + public StructuredPropertiesPatchBuilder removeProperty(Urn propertyUrn) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + "/" + propertyUrn, null)); + return this; + } + + public StructuredPropertiesPatchBuilder setProperty( + @Nonnull Urn propertyUrn, @Nullable List propertyValues) { + propertyValues.stream() + .map( + propertyValue -> + propertyValue instanceof Integer + ? this.setProperty(propertyUrn, (Integer) propertyValue) + : this.setProperty(propertyUrn, String.valueOf(propertyValue))) + .collect(Collectors.toList()); + return this; + } + + public StructuredPropertiesPatchBuilder setProperty( + @Nonnull Urn propertyUrn, @Nullable Integer propertyValue) { + ValueNode propertyValueNode = instance.numberNode((Integer) propertyValue); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + "/" + propertyUrn, propertyValueNode)); + return this; + } + + public StructuredPropertiesPatchBuilder setProperty( + @Nonnull Urn propertyUrn, @Nullable String propertyValue) { + ValueNode propertyValueNode = instance.textNode(String.valueOf(propertyValue)); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + "/" + propertyUrn, propertyValueNode)); + return this; + } + + public StructuredPropertiesPatchBuilder addProperty( + @Nonnull Urn propertyUrn, @Nullable Integer propertyValue) { + ValueNode propertyValueNode = instance.numberNode((Integer) propertyValue); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + "/" + propertyUrn + "/" + String.valueOf(propertyValue), + propertyValueNode)); + return this; + } + + public StructuredPropertiesPatchBuilder addProperty( + @Nonnull Urn propertyUrn, @Nullable String propertyValue) { + ValueNode propertyValueNode = instance.textNode(String.valueOf(propertyValue)); + ObjectNode value = instance.objectNode(); + value.put(URN_KEY, propertyUrn.toString()); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + "/" + propertyUrn + "/" + String.valueOf(propertyValue), + propertyValueNode)); + return this; + } + + @Override + protected String getAspectName() { + return STRUCTURED_PROPERTIES_ASPECT_NAME; + } + + @Override + protected String getEntityType() { + if (this.targetEntityUrn == null) { + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); + } + return this.targetEntityUrn.getEntityType(); + } +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/UpstreamLineagePatchBuilder.java similarity index 96% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/UpstreamLineagePatchBuilder.java index 9db2ebc522e09..bfb46d8fc5773 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/UpstreamLineagePatchBuilder.java @@ -1,7 +1,9 @@ -package datahub.client.patch.dataset; +package com.linkedin.metadata.aspect.patch.builder; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.UNKNOWN_ACTOR; +import static com.linkedin.metadata.Constants.UPSTREAM_LINEAGE_ASPECT_NAME; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; @@ -9,8 +11,7 @@ import com.linkedin.dataset.DatasetLineageType; import com.linkedin.dataset.FineGrainedLineageDownstreamType; import com.linkedin.dataset.FineGrainedLineageUpstreamType; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; -import datahub.client.patch.PatchOperationType; +import com.linkedin.metadata.aspect.patch.PatchOperationType; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.ToString; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/CustomPropertiesPatchBuilderSupport.java similarity index 81% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/CustomPropertiesPatchBuilderSupport.java index 9f221bac15be4..5e1cd094b204e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/CustomPropertiesPatchBuilderSupport.java @@ -1,6 +1,6 @@ -package datahub.client.patch.subtypesupport; +package com.linkedin.metadata.aspect.patch.builder.subtypesupport; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.AbstractMultiFieldPatchBuilder; import java.util.Map; import javax.annotation.Nonnull; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/IntermediatePatchBuilder.java similarity index 83% rename from metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/IntermediatePatchBuilder.java index e3b14c0838ad6..d891a6b9673da 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/IntermediatePatchBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/builder/subtypesupport/IntermediatePatchBuilder.java @@ -1,7 +1,7 @@ -package datahub.client.patch.subtypesupport; +package com.linkedin.metadata.aspect.patch.builder.subtypesupport; import com.fasterxml.jackson.databind.JsonNode; -import datahub.client.patch.AbstractMultiFieldPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.AbstractMultiFieldPatchBuilder; import java.util.List; import org.apache.commons.lang3.tuple.ImmutableTriple; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/ArrayMergingTemplate.java similarity index 98% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/ArrayMergingTemplate.java index 9cd8e74d952d6..ff721e97c0e1d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/ArrayMergingTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template; +package com.linkedin.metadata.aspect.patch.template; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/AspectTemplateEngine.java similarity index 71% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/AspectTemplateEngine.java index 029eb688c5291..e9d09085e7eb5 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/AspectTemplateEngine.java @@ -1,6 +1,18 @@ -package com.linkedin.metadata.models.registry.template; +package com.linkedin.metadata.aspect.patch.template; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.CHART_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATASET_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.UPSTREAM_LINEAGE_ASPECT_NAME; import com.fasterxml.jackson.core.JsonProcessingException; import com.github.fge.jsonpatch.JsonPatchException; @@ -34,7 +46,8 @@ public class AspectTemplateEngine { DATA_PRODUCT_PROPERTIES_ASPECT_NAME, DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, CHART_INFO_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME) + DASHBOARD_INFO_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME) .collect(Collectors.toSet()); private final Map> _aspectTemplateMap; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java new file mode 100644 index 0000000000000..78cf14c47a0bf --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/CompoundKeyTemplate.java @@ -0,0 +1,23 @@ +package com.linkedin.metadata.aspect.patch.template; + +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.populateTopLevelKeys; + +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonpatch.JsonPatchException; +import com.github.fge.jsonpatch.Patch; +import com.linkedin.data.template.RecordTemplate; + +public abstract class CompoundKeyTemplate + implements ArrayMergingTemplate { + + @Override + public T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) + throws JsonProcessingException, JsonPatchException { + JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch); + JsonNode patched = jsonPatch.apply(transformed); + JsonNode postProcessed = rebaseFields(patched); + return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/Template.java similarity index 69% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/Template.java index 0793cacce780f..bd8cd544fb59b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/Template.java @@ -1,6 +1,7 @@ -package com.linkedin.metadata.models.registry.template; +package com.linkedin.metadata.aspect.patch.template; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.OBJECT_MAPPER; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.populateTopLevelKeys; import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; @@ -19,7 +20,12 @@ public interface Template { * @return specific type for this template * @throws {@link ClassCastException} when recordTemplate is not the correct type for the template */ - T getSubtype(RecordTemplate recordTemplate) throws ClassCastException; + default T getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (getTemplateType().isInstance(recordTemplate)) { + return getTemplateType().cast(recordTemplate); + } + throw new ClassCastException("Unable to cast RecordTemplate to " + getTemplateType().getName()); + } /** Get the template clas type */ Class getTemplateType(); @@ -43,10 +49,20 @@ public interface Template { */ default T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) throws JsonProcessingException, JsonPatchException { - JsonNode transformed = preprocessTemplate(recordTemplate); - JsonNode patched = jsonPatch.apply(transformed); - JsonNode postProcessed = rebaseFields(patched); - return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); + + TemplateUtil.validatePatch(jsonPatch); + JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch); + try { + JsonNode patched = jsonPatch.apply(transformed); + JsonNode postProcessed = rebaseFields(patched); + return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); + } catch (JsonPatchException e) { + throw new RuntimeException( + String.format( + "Error performing JSON PATCH on aspect %s. Patch: %s Target: %s", + recordTemplate.schema().getName(), jsonPatch, transformed.toString()), + e); + } } /** diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java new file mode 100644 index 0000000000000..d998692f2c388 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/TemplateUtil.java @@ -0,0 +1,97 @@ +package com.linkedin.metadata.aspect.patch.template; + +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; + +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.fge.jsonpatch.Patch; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import com.linkedin.util.Pair; +import java.util.ArrayList; +import java.util.List; + +public class TemplateUtil { + + private TemplateUtil() {} + + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + public static List> getPaths(Patch jsonPatch) { + JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); + List> paths = new ArrayList<>(); + patchNode + .elements() + .forEachRemaining( + node -> + paths.add( + Pair.of( + PatchOperationType.valueOf(node.get("op").asText().toUpperCase()), + node.get("path").asText()))); + return paths; + } + + public static void validatePatch(Patch jsonPatch) { + // ensure supported patch operations + JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); + patchNode + .elements() + .forEachRemaining( + node -> { + try { + PatchOperationType.valueOf(node.get("op").asText().toUpperCase()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Unsupported PATCH operation: `%s` Operation `%s`", + node.get("op").asText(), node), + e); + } + }); + } + + /** + * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent + * paths to be specified + * + * @param transformedNode transformed node to have keys populated + * @return transformed node that has top level keys populated + */ + public static JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) { + JsonNode transformedNodeClone = transformedNode.deepCopy(); + List> paths = getPaths(jsonPatch); + for (Pair operationPath : paths) { + String[] keys = operationPath.getSecond().split("/"); + JsonNode parent = transformedNodeClone; + + // if not remove, skip last key as we only need to populate top level + int endIdx = + PatchOperationType.REMOVE.equals(operationPath.getFirst()) + ? keys.length + : keys.length - 1; + + // Skip first as it will always be blank due to path starting with / + for (int i = 1; i < endIdx; i++) { + if (parent.get(keys[i]) == null) { + ((ObjectNode) parent).set(keys[i], instance.objectNode()); + } + parent = parent.get(keys[i]); + } + } + + return transformedNodeClone; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/chart/ChartInfoTemplate.java similarity index 92% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/chart/ChartInfoTemplate.java index 654f923e7322d..aabc5b54cfa5c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/chart/ChartInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/chart/ChartInfoTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template.chart; +package com.linkedin.metadata.aspect.patch.template.chart; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.chart.ChartDataSourceTypeArray; @@ -10,7 +10,7 @@ import com.linkedin.common.EdgeArray; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java new file mode 100644 index 0000000000000..3a3e3c99f25a3 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GenericPatchTemplate.java @@ -0,0 +1,59 @@ +package com.linkedin.metadata.aspect.patch.template.common; + +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonpatch.JsonPatchException; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.aspect.patch.GenericJsonPatch; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import javax.annotation.Nonnull; +import lombok.Builder; + +@Builder +public class GenericPatchTemplate extends CompoundKeyTemplate { + + @Nonnull private final GenericJsonPatch genericJsonPatch; + @Nonnull private final Class templateType; + @Nonnull private final T templateDefault; + + @Nonnull + @Override + public Class getTemplateType() { + return templateType; + } + + @Nonnull + @Override + public T getDefault() { + return templateDefault; + } + + @Nonnull + @Override + public JsonNode transformFields(final JsonNode baseNode) { + JsonNode transformedNode = baseNode; + for (Map.Entry> composite : + genericJsonPatch.getArrayPrimaryKeys().entrySet()) { + transformedNode = arrayFieldToMap(transformedNode, composite.getKey(), composite.getValue()); + } + return transformedNode; + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + JsonNode transformedNode = patched; + for (Map.Entry> composite : + genericJsonPatch.getArrayPrimaryKeys().entrySet()) { + transformedNode = + transformedMapToArray(transformedNode, composite.getKey(), composite.getValue()); + } + return transformedNode; + } + + public T applyPatch(RecordTemplate recordTemplate) throws IOException, JsonPatchException { + return super.applyPatch(recordTemplate, genericJsonPatch.getJsonPatch()); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlobalTagsTemplate.java similarity index 90% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlobalTagsTemplate.java index a98e60c739749..dac5e89edc88e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlobalTagsTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.common; +package com.linkedin.metadata.aspect.patch.template.common; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlossaryTermsTemplate.java similarity index 92% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlossaryTermsTemplate.java index 7ce59916f2073..e6dd1fd523006 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/GlossaryTermsTemplate.java @@ -1,7 +1,7 @@ -package com.linkedin.metadata.models.registry.template.common; +package com.linkedin.metadata.aspect.patch.template.common; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -10,7 +10,7 @@ import com.linkedin.common.GlossaryTerms; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/OwnershipTemplate.java similarity index 89% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/OwnershipTemplate.java index b850ae830b98c..0eaed27ec4cb7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/OwnershipTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template.common; +package com.linkedin.metadata.aspect.patch.template.common; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; @@ -8,7 +8,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; import java.util.Arrays; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java new file mode 100644 index 0000000000000..df3d682632bca --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/common/StructuredPropertiesTemplate.java @@ -0,0 +1,56 @@ +package com.linkedin.metadata.aspect.patch.template.common; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import java.util.Collections; +import javax.annotation.Nonnull; + +public class StructuredPropertiesTemplate implements ArrayMergingTemplate { + + private static final String PROPERTIES_FIELD_NAME = "properties"; + private static final String URN_FIELD_NAME = "propertyUrn"; + + // private static final String AUDIT_STAMP_FIELD = "auditStamp"; + // private static final String TIME_FIELD = "time"; + // private static final String ACTOR_FIELD = "actor"; + + @Override + public StructuredProperties getSubtype(RecordTemplate recordTemplate) throws ClassCastException { + if (recordTemplate instanceof StructuredProperties) { + return (StructuredProperties) recordTemplate; + } + throw new ClassCastException("Unable to cast RecordTemplate to StructuredProperties"); + } + + @Override + public Class getTemplateType() { + return StructuredProperties.class; + } + + @Nonnull + @Override + public StructuredProperties getDefault() { + StructuredProperties structuredProperties = new StructuredProperties(); + structuredProperties.setProperties(new StructuredPropertyValueAssignmentArray()); + // .setAuditStamp(new + // AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + return structuredProperties; + } + + @Nonnull + @Override + public JsonNode transformFields(JsonNode baseNode) { + return arrayFieldToMap( + baseNode, PROPERTIES_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + } + + @Nonnull + @Override + public JsonNode rebaseFields(JsonNode patched) { + return transformedMapToArray( + patched, PROPERTIES_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dashboard/DashboardInfoTemplate.java similarity index 94% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dashboard/DashboardInfoTemplate.java index eae04b5285adf..85ce06b01c1d7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dashboard/DashboardInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dashboard/DashboardInfoTemplate.java @@ -1,6 +1,6 @@ -package com.linkedin.metadata.models.registry.template.dashboard; +package com.linkedin.metadata.aspect.patch.template.dashboard; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; @@ -11,7 +11,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.dashboard.DashboardInfo; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataflow/DataFlowInfoTemplate.java similarity index 89% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataflow/DataFlowInfoTemplate.java index 73e837f368f0b..28ee769521995 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataflow/DataFlowInfoTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.dataflow; +package com.linkedin.metadata.aspect.patch.template.dataflow; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataFlowInfo; -import com.linkedin.metadata.models.registry.template.Template; +import com.linkedin.metadata.aspect.patch.template.Template; import javax.annotation.Nonnull; public class DataFlowInfoTemplate implements Template { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInfoTemplate.java similarity index 89% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInfoTemplate.java index bdb306c2d32e4..7cb986da0cba6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInfoTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.datajob; +package com.linkedin.metadata.aspect.patch.template.datajob; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataJobInfo; -import com.linkedin.metadata.models.registry.template.Template; +import com.linkedin.metadata.aspect.patch.template.Template; import javax.annotation.Nonnull; public class DataJobInfoTemplate implements Template { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInputOutputTemplate.java similarity index 96% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInputOutputTemplate.java index 6761892b1b31b..3d398d97b50c3 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/datajob/DataJobInputOutputTemplate.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry.template.datajob; +package com.linkedin.metadata.aspect.patch.template.datajob; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.DataJobUrnArray; @@ -8,7 +8,7 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.dataset.FineGrainedLineageArray; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataproduct/DataProductPropertiesTemplate.java similarity index 91% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataproduct/DataProductPropertiesTemplate.java index 899c51a7c3d7e..9b117114395b1 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataproduct/DataProductPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataproduct/DataProductPropertiesTemplate.java @@ -1,10 +1,10 @@ -package com.linkedin.metadata.models.registry.template.dataproduct; +package com.linkedin.metadata.aspect.patch.template.dataproduct; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataproduct.DataProductAssociationArray; import com.linkedin.dataproduct.DataProductProperties; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/DatasetPropertiesTemplate.java similarity index 91% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/DatasetPropertiesTemplate.java index 991f7f3d4053a..cf76bed2fd3f7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/DatasetPropertiesTemplate.java @@ -1,11 +1,11 @@ -package com.linkedin.metadata.models.registry.template.dataset; +package com.linkedin.metadata.aspect.patch.template.dataset; import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import com.linkedin.metadata.models.registry.template.ArrayMergingTemplate; +import com.linkedin.metadata.aspect.patch.template.ArrayMergingTemplate; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/EditableSchemaMetadataTemplate.java similarity index 92% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/EditableSchemaMetadataTemplate.java index 9712a9081d33a..0b3605708e610 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/EditableSchemaMetadataTemplate.java @@ -1,15 +1,15 @@ -package com.linkedin.metadata.models.registry.template.dataset; +package com.linkedin.metadata.aspect.patch.template.dataset; -import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; -import com.linkedin.metadata.models.registry.template.common.GlobalTagsTemplate; -import com.linkedin.metadata.models.registry.template.common.GlossaryTermsTemplate; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlobalTagsTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlossaryTermsTemplate; import com.linkedin.schema.EditableSchemaFieldInfoArray; import com.linkedin.schema.EditableSchemaMetadata; import java.util.Collections; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/UpstreamLineageTemplate.java similarity index 96% rename from entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java rename to entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/UpstreamLineageTemplate.java index 81a4065dedb1a..6907181b3f7ff 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/patch/template/dataset/UpstreamLineageTemplate.java @@ -1,7 +1,10 @@ -package com.linkedin.metadata.models.registry.template.dataset; +package com.linkedin.metadata.aspect.patch.template.dataset; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.instance; +import static com.linkedin.metadata.Constants.FINE_GRAINED_LINEAGE_DATASET_TYPE; +import static com.linkedin.metadata.Constants.FINE_GRAINED_LINEAGE_FIELD_SET_TYPE; +import static com.linkedin.metadata.Constants.FINE_GRAINED_LINEAGE_FIELD_TYPE; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -13,7 +16,7 @@ import com.linkedin.dataset.FineGrainedLineageArray; import com.linkedin.dataset.UpstreamArray; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.metadata.models.registry.template.CompoundKeyTemplate; +import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate; import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java index dd9bbcda8f4af..aec0a4cfa0706 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginFactory.java @@ -27,6 +27,13 @@ @Slf4j public class PluginFactory { + private static final String[] VALIDATOR_PACKAGES = { + "com.linkedin.metadata.aspect.plugins.validation", "com.linkedin.metadata.aspect.validation" + }; + private static final String[] HOOK_PACKAGES = { + "com.linkedin.metadata.aspect.plugins.hooks", "com.linkedin.metadata.aspect.hooks" + }; + public static PluginFactory withCustomClasspath( @Nullable PluginConfiguration pluginConfiguration, @Nonnull List classLoaders) { return new PluginFactory(pluginConfiguration, classLoaders); @@ -178,17 +185,14 @@ private List buildAspectPayloadValidators( build( AspectPayloadValidator.class, pluginConfiguration.getAspectPayloadValidators(), - "com.linkedin.metadata.aspect.plugins.validation")); + VALIDATOR_PACKAGES)); } private List buildMutationHooks(@Nullable PluginConfiguration pluginConfiguration) { return pluginConfiguration == null ? List.of() : applyDisable( - build( - MutationHook.class, - pluginConfiguration.getMutationHooks(), - "com.linkedin.metadata.aspect.plugins.hooks")); + build(MutationHook.class, pluginConfiguration.getMutationHooks(), HOOK_PACKAGES)); } private List buildMCLSideEffects( @@ -196,10 +200,7 @@ private List buildMCLSideEffects( return pluginConfiguration == null ? List.of() : applyDisable( - build( - MCLSideEffect.class, - pluginConfiguration.getMclSideEffects(), - "com.linkedin.metadata.aspect.plugins.hooks")); + build(MCLSideEffect.class, pluginConfiguration.getMclSideEffects(), HOOK_PACKAGES)); } private List buildMCPSideEffects( @@ -207,10 +208,7 @@ private List buildMCPSideEffects( return pluginConfiguration == null ? List.of() : applyDisable( - build( - MCPSideEffect.class, - pluginConfiguration.getMcpSideEffects(), - "com.linkedin.metadata.aspect.plugins.hooks")); + build(MCPSideEffect.class, pluginConfiguration.getMcpSideEffects(), HOOK_PACKAGES)); } private List build( @@ -226,6 +224,11 @@ private List build( config -> { try { ClassInfo classInfo = classMap.get(config.getClassName()); + if (classInfo == null) { + throw new IllegalStateException( + String.format( + "The following class cannot be loaded: %s", config.getClassName())); + } MethodInfo constructorMethod = classInfo.getConstructorInfo().get(0); return Stream.of( (T) constructorMethod.loadClassAndGetConstructor().newInstance(config)); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java index 03a0473677fb8..d88b05ede8454 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java @@ -38,9 +38,11 @@ && isChangeTypeSupported(changeType) protected boolean isEntityAspectSupported( @Nonnull String entityName, @Nonnull String aspectName) { - return (ENTITY_WILDCARD.equals(entityName) - || getConfig().getSupportedEntityAspectNames().stream() - .anyMatch(supported -> supported.getEntityName().equals(entityName))) + return (getConfig().getSupportedEntityAspectNames().stream() + .anyMatch( + supported -> + ENTITY_WILDCARD.equals(supported.getEntityName()) + || supported.getEntityName().equals(entityName))) && isAspectSupported(aspectName); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java index ef9786f8d711e..a21f3cd2436de 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffect.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.aspect.plugins.PluginSpec; import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -23,16 +22,12 @@ public MCLSideEffect(AspectPluginConfig aspectPluginConfig) { * @return additional upserts */ public final Stream apply( - @Nonnull List input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + @Nonnull List input, @Nonnull AspectRetriever aspectRetriever) { return input.stream() .filter(item -> shouldApply(item.getChangeType(), item.getUrn(), item.getAspectSpec())) - .flatMap(i -> applyMCLSideEffect(i, entityRegistry, aspectRetriever)); + .flatMap(i -> applyMCLSideEffect(i, aspectRetriever)); } protected abstract Stream applyMCLSideEffect( - @Nonnull MCLBatchItem input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever); + @Nonnull MCLBatchItem input, @Nonnull AspectRetriever aspectRetriever); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java index fc1d1587d10fb..80cb405201c87 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffect.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.aspect.plugins.PluginSpec; import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -23,14 +22,12 @@ public MCPSideEffect(AspectPluginConfig aspectPluginConfig) { * @return additional upserts */ public final Stream apply( - List input, - EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + List input, @Nonnull AspectRetriever aspectRetriever) { return input.stream() .filter(item -> shouldApply(item.getChangeType(), item.getUrn(), item.getAspectSpec())) - .flatMap(i -> applyMCPSideEffect(i, entityRegistry, aspectRetriever)); + .flatMap(i -> applyMCPSideEffect(i, aspectRetriever)); } protected abstract Stream applyMCPSideEffect( - UpsertItem input, EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever); + UpsertItem input, @Nonnull AspectRetriever aspectRetriever); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java index 78aa4689472f5..00a20b3131c2a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/validation/AspectRetriever.java @@ -2,12 +2,35 @@ import com.linkedin.common.urn.Urn; import com.linkedin.entity.Aspect; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; +import java.util.Map; +import java.util.Set; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public interface AspectRetriever { - Aspect getLatestAspectObject(@Nonnull final Urn urn, @Nonnull final String aspectName) + @Nullable + default Aspect getLatestAspectObject(@Nonnull final Urn urn, @Nonnull final String aspectName) + throws RemoteInvocationException, URISyntaxException { + return getLatestAspectObjects(Set.of(urn), Set.of(aspectName)) + .getOrDefault(urn, Map.of()) + .get(aspectName); + } + + /** + * Returns for each URN, the map of aspectName to Aspect + * + * @param urns urns to fetch + * @param aspectNames aspect names + * @return urn to aspect name and values + */ + @Nonnull + Map> getLatestAspectObjects(Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException; + + @Nonnull + EntityRegistry getEntityRegistry(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java new file mode 100644 index 0000000000000..5a4635da433ae --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/PropertyDefinitionValidator.java @@ -0,0 +1,91 @@ +package com.linkedin.metadata.aspect.validation; + +import static com.linkedin.structured.PropertyCardinality.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class PropertyDefinitionValidator extends AspectPayloadValidator { + + public PropertyDefinitionValidator(AspectPluginConfig aspectPluginConfig) { + super(aspectPluginConfig); + } + + @Override + protected void validateProposedAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nonnull RecordTemplate aspectPayload, + @Nonnull AspectRetriever aspectRetriever) + throws AspectValidationException { + // No-op + } + + @Override + protected void validatePreCommitAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate previousAspect, + @Nonnull RecordTemplate proposedAspect, + AspectRetriever aspectRetriever) + throws AspectValidationException { + validate(previousAspect, proposedAspect); + } + + public static boolean validate( + @Nullable RecordTemplate previousAspect, @Nonnull RecordTemplate proposedAspect) + throws AspectValidationException { + if (previousAspect != null) { + StructuredPropertyDefinition previousDefinition = + (StructuredPropertyDefinition) previousAspect; + StructuredPropertyDefinition newDefinition = (StructuredPropertyDefinition) proposedAspect; + if (!newDefinition.getValueType().equals(previousDefinition.getValueType())) { + throw new AspectValidationException( + "Value type cannot be changed as this is a backwards incompatible change"); + } + if (newDefinition.getCardinality().equals(SINGLE) + && previousDefinition.getCardinality().equals(MULTIPLE)) { + throw new AspectValidationException( + "Property definition cardinality cannot be changed from MULTI to SINGLE"); + } + if (!newDefinition.getQualifiedName().equals(previousDefinition.getQualifiedName())) { + throw new AspectValidationException( + "Cannot change the fully qualified name of a Structured Property"); + } + // Assure new definition has only added allowed values, not removed them + if (newDefinition.getAllowedValues() != null) { + if (!previousDefinition.hasAllowedValues() + || previousDefinition.getAllowedValues() == null) { + throw new AspectValidationException( + "Cannot restrict values that were previously allowed"); + } + Set newAllowedValues = + newDefinition.getAllowedValues().stream() + .map(PropertyValue::getValue) + .collect(Collectors.toSet()); + for (PropertyValue value : previousDefinition.getAllowedValues()) { + if (!newAllowedValues.contains(value.getValue())) { + throw new AspectValidationException( + "Cannot restrict values that were previously allowed"); + } + } + } + } + return true; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java new file mode 100644 index 0000000000000..efd95e0c2e3f1 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java @@ -0,0 +1,264 @@ +package com.linkedin.metadata.aspect.validation; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.entity.Aspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.LogicalValueType; +import com.linkedin.metadata.models.StructuredPropertyUtils; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.PropertyCardinality; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +/** A Validator for StructuredProperties Aspect that is attached to entities like Datasets, etc. */ +@Slf4j +public class StructuredPropertiesValidator extends AspectPayloadValidator { + + private static final Set VALID_VALUE_STORED_AS_STRING = + new HashSet<>( + Arrays.asList( + LogicalValueType.STRING, + LogicalValueType.RICH_TEXT, + LogicalValueType.DATE, + LogicalValueType.URN)); + + public StructuredPropertiesValidator(AspectPluginConfig aspectPluginConfig) { + super(aspectPluginConfig); + } + + public static LogicalValueType getLogicalValueType(Urn valueType) { + String valueTypeId = getValueTypeId(valueType); + if (valueTypeId.equals("string")) { + return LogicalValueType.STRING; + } else if (valueTypeId.equals("date")) { + return LogicalValueType.DATE; + } else if (valueTypeId.equals("number")) { + return LogicalValueType.NUMBER; + } else if (valueTypeId.equals("urn")) { + return LogicalValueType.URN; + } else if (valueTypeId.equals("rich_text")) { + return LogicalValueType.RICH_TEXT; + } + + return LogicalValueType.UNKNOWN; + } + + @Override + protected void validateProposedAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nonnull RecordTemplate aspectPayload, + @Nonnull AspectRetriever aspectRetriever) + throws AspectValidationException { + validate(aspectPayload, aspectRetriever); + } + + public static boolean validate( + @Nonnull RecordTemplate aspectPayload, @Nonnull AspectRetriever aspectRetriever) + throws AspectValidationException { + StructuredProperties structuredProperties = (StructuredProperties) aspectPayload; + log.warn("Validator called with {}", structuredProperties); + Map> structuredPropertiesMap = + structuredProperties.getProperties().stream() + .collect( + Collectors.groupingBy( + x -> x.getPropertyUrn(), + HashMap::new, + Collectors.toCollection(ArrayList::new))); + for (Map.Entry> entry : + structuredPropertiesMap.entrySet()) { + // There should only be one entry per structured property + List values = entry.getValue(); + if (values.size() > 1) { + throw new AspectValidationException( + "Property: " + entry.getKey() + " has multiple entries: " + values); + } + } + + for (StructuredPropertyValueAssignment structuredPropertyValueAssignment : + structuredProperties.getProperties()) { + Urn propertyUrn = structuredPropertyValueAssignment.getPropertyUrn(); + String property = propertyUrn.toString(); + if (!propertyUrn.getEntityType().equals("structuredProperty")) { + throw new IllegalStateException( + "Unexpected entity type. Expected: structuredProperty Found: " + + propertyUrn.getEntityType()); + } + Aspect structuredPropertyDefinitionAspect = null; + try { + structuredPropertyDefinitionAspect = + aspectRetriever.getLatestAspectObject(propertyUrn, "propertyDefinition"); + + if (structuredPropertyDefinitionAspect == null) { + throw new AspectValidationException("Unexpected null value found."); + } + } catch (Exception e) { + log.error("Could not fetch latest aspect. PropertyUrn: {}", propertyUrn, e); + throw new AspectValidationException("Could not fetch latest aspect: " + e.getMessage(), e); + } + + StructuredPropertyDefinition structuredPropertyDefinition = + new StructuredPropertyDefinition(structuredPropertyDefinitionAspect.data()); + log.warn( + "Retrieved property definition for {}. {}", propertyUrn, structuredPropertyDefinition); + if (structuredPropertyDefinition != null) { + PrimitivePropertyValueArray values = structuredPropertyValueAssignment.getValues(); + // Check cardinality + if (structuredPropertyDefinition.getCardinality() == PropertyCardinality.SINGLE) { + if (values.size() > 1) { + throw new AspectValidationException( + "Property: " + + property + + " has cardinality 1, but multiple values were assigned: " + + values); + } + } + // Check values + for (PrimitivePropertyValue value : values) { + validateType(propertyUrn, structuredPropertyDefinition, value); + validateAllowedValues(propertyUrn, structuredPropertyDefinition, value); + } + } + } + + return true; + } + + private static void validateAllowedValues( + Urn propertyUrn, StructuredPropertyDefinition definition, PrimitivePropertyValue value) + throws AspectValidationException { + if (definition.getAllowedValues() != null) { + Set definedValues = + definition.getAllowedValues().stream() + .map(PropertyValue::getValue) + .collect(Collectors.toSet()); + if (definedValues.stream().noneMatch(definedPrimitive -> definedPrimitive.equals(value))) { + throw new AspectValidationException( + String.format( + "Property: %s, value: %s should be one of %s", propertyUrn, value, definedValues)); + } + } + } + + private static void validateType( + Urn propertyUrn, StructuredPropertyDefinition definition, PrimitivePropertyValue value) + throws AspectValidationException { + Urn valueType = definition.getValueType(); + LogicalValueType typeDefinition = getLogicalValueType(valueType); + + // Primitive Type Validation + if (VALID_VALUE_STORED_AS_STRING.contains(typeDefinition)) { + log.debug( + "Property definition demands a string value. {}, {}", value.isString(), value.isDouble()); + if (value.getString() == null) { + throw new AspectValidationException( + "Property: " + propertyUrn.toString() + ", value: " + value + " should be a string"); + } else if (typeDefinition.equals(LogicalValueType.DATE)) { + if (!StructuredPropertyUtils.isValidDate(value)) { + throw new AspectValidationException( + "Property: " + + propertyUrn.toString() + + ", value: " + + value + + " should be a date with format YYYY-MM-DD"); + } + } else if (typeDefinition.equals(LogicalValueType.URN)) { + StringArrayMap valueTypeQualifier = definition.getTypeQualifier(); + Urn typeValue; + try { + typeValue = Urn.createFromString(value.getString()); + } catch (URISyntaxException e) { + throw new AspectValidationException( + "Property: " + propertyUrn.toString() + ", value: " + value + " should be an urn", e); + } + if (valueTypeQualifier != null) { + if (valueTypeQualifier.containsKey("allowedTypes")) { + // Let's get the allowed types and validate that the value is one of those types + StringArray allowedTypes = valueTypeQualifier.get("allowedTypes"); + boolean matchedAny = false; + for (String type : allowedTypes) { + Urn typeUrn = null; + try { + typeUrn = Urn.createFromString(type); + } catch (URISyntaxException e) { + + // we don't expect to have types that we allowed to be written that aren't + // urns + throw new RuntimeException(e); + } + String allowedEntityName = getValueTypeId(typeUrn); + if (typeValue.getEntityType().equals(allowedEntityName)) { + matchedAny = true; + } + } + if (!matchedAny) { + throw new AspectValidationException( + "Property: " + + propertyUrn.toString() + + ", value: " + + value + + " is not of any supported urn types:" + + allowedTypes); + } + } + } + } + } else if (typeDefinition.equals(LogicalValueType.NUMBER)) { + log.debug("Property definition demands a numeric value. {}, {}", value.isString(), value); + try { + Double doubleValue = + value.getDouble() != null ? value.getDouble() : Double.parseDouble(value.getString()); + } catch (NumberFormatException | NullPointerException e) { + throw new AspectValidationException( + "Property: " + propertyUrn.toString() + ", value: " + value + " should be a number"); + } + } else { + throw new AspectValidationException( + "Validation support for type " + definition.getValueType() + " is not yet implemented."); + } + } + + private static String getValueTypeId(@Nonnull final Urn valueType) { + String valueTypeId = valueType.getId(); + if (valueTypeId.startsWith("datahub.")) { + valueTypeId = valueTypeId.split("\\.")[1]; + } + return valueTypeId; + } + + @Override + protected void validatePreCommitAspect( + @Nonnull ChangeType changeType, + @Nonnull Urn entityUrn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate previousAspect, + @Nonnull RecordTemplate proposedAspect, + AspectRetriever aspectRetriever) + throws AspectValidationException { + // No-op + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java b/entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java new file mode 100644 index 0000000000000..1643ce900f748 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/LogicalValueType.java @@ -0,0 +1,10 @@ +package com.linkedin.metadata.models; + +public enum LogicalValueType { + STRING, + RICH_TEXT, + NUMBER, + DATE, + URN, + UNKNOWN +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java new file mode 100644 index 0000000000000..a8711429421f3 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/StructuredPropertyUtils.java @@ -0,0 +1,45 @@ +package com.linkedin.metadata.models; + +import com.linkedin.structured.PrimitivePropertyValue; +import java.sql.Date; +import java.time.format.DateTimeParseException; + +public class StructuredPropertyUtils { + + private StructuredPropertyUtils() {} + + static final Date MIN_DATE = Date.valueOf("1000-01-01"); + static final Date MAX_DATE = Date.valueOf("9999-12-31"); + + /** + * Sanitizes fully qualified name for use in an ElasticSearch field name Replaces . and " " + * characters + * + * @param fullyQualifiedName The original fully qualified name of the property + * @return The sanitized version that can be used as a field name + */ + public static String sanitizeStructuredPropertyFQN(String fullyQualifiedName) { + String sanitizedName = fullyQualifiedName.replace('.', '_').replace(' ', '_'); + return sanitizedName; + } + + public static Date toDate(PrimitivePropertyValue value) throws DateTimeParseException { + return Date.valueOf(value.getString()); + } + + public static boolean isValidDate(PrimitivePropertyValue value) { + if (value.getString() == null) { + return false; + } + if (value.getString().length() != 10) { + return false; + } + Date date; + try { + date = toDate(value); + } catch (DateTimeParseException e) { + return false; + } + return date.compareTo(MIN_DATE) >= 0 && date.compareTo(MAX_DATE) <= 0; + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index ce8718c536fbe..bd9a6b6c9e589 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.data.schema.DataSchema; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DataSchemaFactory; @@ -18,7 +19,6 @@ import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; import com.linkedin.metadata.models.registry.config.Event; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.util.Pair; import java.io.FileInputStream; import java.io.FileNotFoundException; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java index fbc3285579cc0..c2aa1fab6c2c0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.models.registry; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.aspect.plugins.hooks.MCLSideEffect; import com.linkedin.metadata.aspect.plugins.hooks.MCPSideEffect; @@ -10,7 +11,6 @@ import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -39,11 +39,10 @@ default String getIdentifier() { EntitySpec getEntitySpec(@Nonnull final String entityName); /** - * Given an event name, returns an instance of {@link DefaultEventSpec}. + * Given an event name, returns an instance of {@link EventSpec}. * * @param eventName the name of the event to be retrieved - * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none - * exists. + * @return an {@link EventSpec} corresponding to the entity name provided, null if none exists. */ @Nullable EventSpec getEventSpec(@Nonnull final String eventName); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java index 285b96b93d1d6..650a1cd41066e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java @@ -3,13 +3,13 @@ import com.linkedin.data.schema.compatibility.CompatibilityChecker; import com.linkedin.data.schema.compatibility.CompatibilityOptions; import com.linkedin.data.schema.compatibility.CompatibilityResult; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.ConfigEntitySpec; import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import java.util.ArrayList; import java.util.HashMap; import java.util.List; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index c605cfa188fc8..35bfe935423f0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.data.schema.DataSchema; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.PluginFactory; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DataSchemaFactory; @@ -17,7 +18,6 @@ import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; import com.linkedin.metadata.models.registry.config.Event; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.util.Pair; import java.io.FileInputStream; import java.io.FileNotFoundException; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index bb0113abc9ed6..8fefa2fe00ae8 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -5,25 +5,26 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; +import com.linkedin.metadata.aspect.patch.template.Template; +import com.linkedin.metadata.aspect.patch.template.chart.ChartInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlobalTagsTemplate; +import com.linkedin.metadata.aspect.patch.template.common.GlossaryTermsTemplate; +import com.linkedin.metadata.aspect.patch.template.common.OwnershipTemplate; +import com.linkedin.metadata.aspect.patch.template.common.StructuredPropertiesTemplate; +import com.linkedin.metadata.aspect.patch.template.dashboard.DashboardInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.dataflow.DataFlowInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.datajob.DataJobInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.datajob.DataJobInputOutputTemplate; +import com.linkedin.metadata.aspect.patch.template.dataproduct.DataProductPropertiesTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.DatasetPropertiesTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.EditableSchemaMetadataTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.UpstreamLineageTemplate; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DefaultEntitySpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; import com.linkedin.metadata.models.EventSpec; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; -import com.linkedin.metadata.models.registry.template.Template; -import com.linkedin.metadata.models.registry.template.chart.ChartInfoTemplate; -import com.linkedin.metadata.models.registry.template.common.GlobalTagsTemplate; -import com.linkedin.metadata.models.registry.template.common.GlossaryTermsTemplate; -import com.linkedin.metadata.models.registry.template.common.OwnershipTemplate; -import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; -import com.linkedin.metadata.models.registry.template.dataflow.DataFlowInfoTemplate; -import com.linkedin.metadata.models.registry.template.datajob.DataJobInfoTemplate; -import com.linkedin.metadata.models.registry.template.datajob.DataJobInputOutputTemplate; -import com.linkedin.metadata.models.registry.template.dataproduct.DataProductPropertiesTemplate; -import com.linkedin.metadata.models.registry.template.dataset.DatasetPropertiesTemplate; -import com.linkedin.metadata.models.registry.template.dataset.EditableSchemaMetadataTemplate; -import com.linkedin.metadata.models.registry.template.dataset.UpstreamLineageTemplate; import com.linkedin.metadata.snapshot.Snapshot; import java.util.ArrayList; import java.util.HashMap; @@ -84,6 +85,8 @@ private AspectTemplateEngine populateTemplateEngine(Map aspe aspectSpecTemplateMap.put(CHART_INFO_ASPECT_NAME, new ChartInfoTemplate()); aspectSpecTemplateMap.put(DASHBOARD_INFO_ASPECT_NAME, new DashboardInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); + aspectSpecTemplateMap.put( + STRUCTURED_PROPERTIES_ASPECT_NAME, new StructuredPropertiesTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java deleted file mode 100644 index 44090b3a6d05b..0000000000000 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.linkedin.metadata.models.registry.template; - -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - -import com.datahub.util.RecordUtils; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.github.fge.jsonpatch.JsonPatchException; -import com.github.fge.jsonpatch.Patch; -import com.linkedin.data.template.RecordTemplate; -import java.util.List; - -public abstract class CompoundKeyTemplate - implements ArrayMergingTemplate { - - /** - * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent - * paths to be specified - * - * @param transformedNode transformed node to have keys populated - * @return transformed node that has top level keys populated - */ - public JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) { - JsonNode transformedNodeClone = transformedNode.deepCopy(); - List paths = getPaths(jsonPatch); - for (String path : paths) { - String[] keys = path.split("/"); - // Skip first as it will always be blank due to path starting with /, skip last key as we only - // need to populate top level - JsonNode parent = transformedNodeClone; - for (int i = 1; i < keys.length - 1; i++) { - if (parent.get(keys[i]) == null) { - ((ObjectNode) parent).set(keys[i], instance.objectNode()); - } - parent = parent.get(keys[i]); - } - } - - return transformedNodeClone; - } - - @Override - public T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) - throws JsonProcessingException, JsonPatchException { - JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch); - JsonNode patched = jsonPatch.apply(transformed); - JsonNode postProcessed = rebaseFields(patched); - return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString()); - } -} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java deleted file mode 100644 index 18d070ec3da45..0000000000000 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.linkedin.metadata.models.registry.template.util; - -import static com.linkedin.metadata.Constants.*; - -import com.fasterxml.jackson.core.StreamReadConstraints; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.fge.jsonpatch.Patch; -import java.util.ArrayList; -import java.util.List; - -public class TemplateUtil { - - private TemplateUtil() {} - - public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - static { - int maxSize = - Integer.parseInt( - System.getenv() - .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER - .getFactory() - .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - - public static List getPaths(Patch jsonPatch) { - JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); - List paths = new ArrayList<>(); - patchNode - .elements() - .forEachRemaining( - node -> { - paths.add(node.get("path").asText()); - }); - return paths; - } -} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/ChartInfoTemplateTest.java similarity index 92% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/ChartInfoTemplateTest.java index 108936bde2ed5..b2911100519fc 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/ChartInfoTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/ChartInfoTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry; +package com.linkedin.metadata.aspect.patch.template; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; @@ -9,7 +9,7 @@ import com.github.fge.jsonpatch.JsonPatchOperation; import com.linkedin.chart.ChartInfo; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.models.registry.template.chart.ChartInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.chart.ChartInfoTemplate; import java.util.ArrayList; import java.util.List; import org.testng.Assert; diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/DashboardInfoTemplateTest.java similarity index 91% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/DashboardInfoTemplateTest.java index 962ff1d40d873..be15d6976aee6 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/DashboardInfoTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/DashboardInfoTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry.patch; +package com.linkedin.metadata.aspect.patch.template; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; @@ -9,7 +9,7 @@ import com.github.fge.jsonpatch.JsonPatchOperation; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dashboard.DashboardInfo; -import com.linkedin.metadata.models.registry.template.dashboard.DashboardInfoTemplate; +import com.linkedin.metadata.aspect.patch.template.dashboard.DashboardInfoTemplate; import java.util.ArrayList; import java.util.List; import org.testng.Assert; diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/UpstreamLineageTemplateTest.java similarity index 99% rename from entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java rename to entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/UpstreamLineageTemplateTest.java index 8f410ae8da085..7d59664513d57 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/patch/UpstreamLineageTemplateTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/patch/template/UpstreamLineageTemplateTest.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.models.registry.patch; +package com.linkedin.metadata.aspect.patch.template; import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; @@ -16,7 +16,7 @@ import com.linkedin.dataset.FineGrainedLineageDownstreamType; import com.linkedin.dataset.FineGrainedLineageUpstreamType; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.metadata.models.registry.template.dataset.UpstreamLineageTemplate; +import com.linkedin.metadata.aspect.patch.template.dataset.UpstreamLineageTemplate; import java.util.ArrayList; import java.util.List; import org.testng.Assert; diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java index 8c3f71fcc8019..f801ce7bf1ffe 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/PluginsTest.java @@ -61,17 +61,16 @@ public void testConfigEntityRegistry() throws FileNotFoundException { assertNotNull(eventSpec.getPegasusSchema()); assertEquals( - configEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "*", "status").size(), + configEntityRegistry + .getAspectPayloadValidators(ChangeType.UPSERT, "chart", "status") + .size(), 2); assertEquals( - configEntityRegistry.getAspectPayloadValidators(ChangeType.DELETE, "*", "status").size(), + configEntityRegistry + .getAspectPayloadValidators(ChangeType.DELETE, "chart", "status") + .size(), 0); - assertEquals( - configEntityRegistry.getMCLSideEffects(ChangeType.UPSERT, "chart", "chartInfo").size(), 1); - assertEquals( - configEntityRegistry.getMCLSideEffects(ChangeType.DELETE, "chart", "chartInfo").size(), 0); - assertEquals( configEntityRegistry.getMCPSideEffects(ChangeType.UPSERT, "dataset", "datasetKey").size(), 1); @@ -124,17 +123,16 @@ public void testMergedEntityRegistry() throws EntityRegistryException { assertNotNull(eventSpec.getPegasusSchema()); assertEquals( - mergedEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "*", "status").size(), - 3); + mergedEntityRegistry + .getAspectPayloadValidators(ChangeType.UPSERT, "chart", "status") + .size(), + 2); assertEquals( - mergedEntityRegistry.getAspectPayloadValidators(ChangeType.DELETE, "*", "status").size(), + mergedEntityRegistry + .getAspectPayloadValidators(ChangeType.DELETE, "chart", "status") + .size(), 1); - assertEquals( - mergedEntityRegistry.getMCLSideEffects(ChangeType.UPSERT, "chart", "chartInfo").size(), 2); - assertEquals( - mergedEntityRegistry.getMCLSideEffects(ChangeType.DELETE, "chart", "chartInfo").size(), 1); - assertEquals( mergedEntityRegistry.getMCPSideEffects(ChangeType.UPSERT, "dataset", "datasetKey").size(), 2); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java index ce904142fecfe..8ee5ff4f99820 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCLSideEffectTest.java @@ -9,7 +9,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -60,9 +59,7 @@ public TestMCLSideEffect(AspectPluginConfig aspectPluginConfig) { @Override protected Stream applyMCLSideEffect( - @Nonnull MCLBatchItem input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + @Nonnull MCLBatchItem input, @Nonnull AspectRetriever aspectRetriever) { return Stream.of(input); } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java index ee8f947e0e994..8522e8facf3e0 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/hooks/MCPSideEffectTest.java @@ -9,7 +9,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.List; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -60,7 +59,7 @@ public TestMCPSideEffect(AspectPluginConfig aspectPluginConfig) { @Override protected Stream applyMCPSideEffect( - UpsertItem input, EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever) { + UpsertItem input, @Nonnull AspectRetriever aspectRetriever) { return Stream.of(input); } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java index 07c99ee8546be..eb132836be465 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/plugins/validation/ValidatorPluginTest.java @@ -33,7 +33,7 @@ public void testCustomValidator() { TestEntityProfile.class.getClassLoader().getResourceAsStream(REGISTRY_FILE)); List validators = - configEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "*", "status"); + configEntityRegistry.getAspectPayloadValidators(ChangeType.UPSERT, "chart", "status"); assertEquals( validators, List.of( diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java new file mode 100644 index 0000000000000..96e9fceb4a05d --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/PropertyDefinitionValidatorTest.java @@ -0,0 +1,212 @@ +package com.linkedin.metadata.aspect.validators; + +import static org.testng.Assert.*; + +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.validation.PropertyDefinitionValidator; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PropertyCardinality; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.PropertyValueArray; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.net.URISyntaxException; +import org.testng.annotations.Test; + +public class PropertyDefinitionValidatorTest { + @Test + public void testValidatePreCommitNoPrevious() + throws URISyntaxException, AspectValidationException { + StructuredPropertyDefinition newProperty = new StructuredPropertyDefinition(); + newProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + newProperty.setDisplayName("newProp"); + newProperty.setQualifiedName("prop3"); + newProperty.setCardinality(PropertyCardinality.MULTIPLE); + newProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + assertTrue(PropertyDefinitionValidator.validate(null, newProperty)); + } + + @Test + public void testCanChangeSingleToMultiple() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.SINGLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setCardinality(PropertyCardinality.MULTIPLE); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeMultipleToSingle() + throws URISyntaxException, CloneNotSupportedException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setCardinality(PropertyCardinality.SINGLE); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeValueType() throws URISyntaxException, CloneNotSupportedException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setValueType(Urn.createFromString("urn:li:logicalType:NUMBER")); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCanChangeDisplayName() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setDisplayName("newProp"); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeFullyQualifiedName() + throws URISyntaxException, CloneNotSupportedException { + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + newProperty.setQualifiedName("newProp"); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCannotChangeRestrictAllowedValues() + throws URISyntaxException, CloneNotSupportedException { + // No constraint -> constraint case + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + PropertyValue allowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(1.0)).setDescription("hello"); + newProperty.setAllowedValues(new PropertyValueArray(allowedValue)); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + + // Remove allowed values from constraint case + PropertyValue oldAllowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(3.0)).setDescription("hello"); + oldProperty.setAllowedValues((new PropertyValueArray(allowedValue, oldAllowedValue))); + assertThrows( + AspectValidationException.class, + () -> PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCanExpandAllowedValues() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + // Constraint -> no constraint case + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + PropertyValue allowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(1.0)).setDescription("hello"); + oldProperty.setAllowedValues(new PropertyValueArray(allowedValue)); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + + // Add allowed values to constraint case + PropertyValue newAllowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(3.0)).setDescription("hello"); + newProperty.setAllowedValues((new PropertyValueArray(allowedValue, newAllowedValue))); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } + + @Test + public void testCanChangeAllowedValueDescriptions() + throws URISyntaxException, CloneNotSupportedException, AspectValidationException { + // Constraint -> no constraint case + StructuredPropertyDefinition oldProperty = new StructuredPropertyDefinition(); + oldProperty.setEntityTypes( + new UrnArray( + Urn.createFromString("urn:li:logicalEntity:dataset"), + Urn.createFromString("urn:li:logicalEntity:chart"), + Urn.createFromString("urn:li:logicalEntity:glossaryTerm"))); + oldProperty.setDisplayName("oldProp"); + oldProperty.setQualifiedName("prop3"); + oldProperty.setCardinality(PropertyCardinality.MULTIPLE); + oldProperty.setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + StructuredPropertyDefinition newProperty = oldProperty.copy(); + PropertyValue allowedValue = + new PropertyValue().setValue(PrimitivePropertyValue.create(1.0)).setDescription("hello"); + oldProperty.setAllowedValues(new PropertyValueArray(allowedValue)); + PropertyValue newAllowedValue = + new PropertyValue() + .setValue(PrimitivePropertyValue.create(1.0)) + .setDescription("hello there"); + newProperty.setAllowedValues(new PropertyValueArray(newAllowedValue)); + assertTrue(PropertyDefinitionValidator.validate(oldProperty, newProperty)); + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java new file mode 100644 index 0000000000000..450b299b48b34 --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/StructuredPropertiesValidatorTest.java @@ -0,0 +1,246 @@ +package com.linkedin.metadata.aspect.validators; + +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.Aspect; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.PropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nonnull; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class StructuredPropertiesValidatorTest { + + static class MockAspectRetriever implements AspectRetriever { + StructuredPropertyDefinition _propertyDefinition; + + MockAspectRetriever(StructuredPropertyDefinition defToReturn) { + this._propertyDefinition = defToReturn; + } + + @Nonnull + @Override + public Map> getLatestAspectObjects( + Set urns, Set aspectNames) + throws RemoteInvocationException, URISyntaxException { + return Map.of( + urns.stream().findFirst().get(), + Map.of(aspectNames.stream().findFirst().get(), new Aspect(_propertyDefinition.data()))); + } + + @Nonnull + @Override + public EntityRegistry getEntityRegistry() { + return null; + } + } + + @Test + public void testValidateAspectNumberUpsert() throws URISyntaxException { + StructuredPropertyDefinition numberPropertyDef = + new StructuredPropertyDefinition() + .setValueType(Urn.createFromString("urn:li:type:datahub.number")) + .setAllowedValues( + new PropertyValueArray( + List.of( + new PropertyValue().setValue(PrimitivePropertyValue.create(30.0)), + new PropertyValue().setValue(PrimitivePropertyValue.create(60.0)), + new PropertyValue().setValue(PrimitivePropertyValue.create(90.0))))); + + try { + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create(30.0))); + StructuredProperties numberPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + boolean isValid = + StructuredPropertiesValidator.validate( + numberPayload, new MockAspectRetriever(numberPropertyDef)); + Assert.assertTrue(isValid); + } catch (AspectValidationException e) { + throw new RuntimeException(e); + } + + try { + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create(0.0))); + StructuredProperties numberPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + StructuredPropertiesValidator.validate( + numberPayload, new MockAspectRetriever(numberPropertyDef)); + Assert.fail("Should have raised exception for disallowed value 0.0"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("{double=0.0} should be one of [{")); + } + + // Assign string value to number property + StructuredPropertyValueAssignment stringAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create("hello"))); + StructuredProperties stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(stringAssignment)); + try { + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(numberPropertyDef)); + Assert.fail("Should have raised exception for mis-matched types"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("should be a number")); + } + } + + @Test + public void testValidateAspectDateUpsert() throws URISyntaxException { + // Assign string value + StructuredPropertyValueAssignment stringAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create("hello"))); + StructuredProperties stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(stringAssignment)); + + // Assign invalid date + StructuredPropertyDefinition datePropertyDef = + new StructuredPropertyDefinition() + .setValueType(Urn.createFromString("urn:li:type:datahub.date")); + try { + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(datePropertyDef)); + Assert.fail("Should have raised exception for mis-matched types"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("should be a date with format")); + } + + // Assign valid date + StructuredPropertyValueAssignment dateAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues( + new PrimitivePropertyValueArray(PrimitivePropertyValue.create("2023-10-24"))); + StructuredProperties datePayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(dateAssignment)); + try { + boolean isValid = + StructuredPropertiesValidator.validate( + datePayload, new MockAspectRetriever(datePropertyDef)); + Assert.assertTrue(isValid); + } catch (AspectValidationException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testValidateAspectStringUpsert() throws URISyntaxException { + // Assign string value + StructuredPropertyValueAssignment stringAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create("hello"))); + StructuredProperties stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(stringAssignment)); + + // Assign date + StructuredPropertyValueAssignment dateAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues( + new PrimitivePropertyValueArray(PrimitivePropertyValue.create("2023-10-24"))); + StructuredProperties datePayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(dateAssignment)); + + // Assign number + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues(new PrimitivePropertyValueArray(PrimitivePropertyValue.create(30.0))); + StructuredProperties numberPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + StructuredPropertyDefinition stringPropertyDef = + new StructuredPropertyDefinition() + .setValueType(Urn.createFromString("urn:li:type:datahub.string")) + .setAllowedValues( + new PropertyValueArray( + List.of( + new PropertyValue().setValue(PrimitivePropertyValue.create("hello")), + new PropertyValue() + .setValue(PrimitivePropertyValue.create("2023-10-24"))))); + + // Valid strings (both the date value and "hello" are valid) + try { + boolean isValid = + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(stringPropertyDef)); + Assert.assertTrue(isValid); + isValid = + StructuredPropertiesValidator.validate( + datePayload, new MockAspectRetriever(stringPropertyDef)); + Assert.assertTrue(isValid); + } catch (AspectValidationException e) { + throw new RuntimeException(e); + } + + // Invalid: assign a number to the string property + try { + StructuredPropertiesValidator.validate( + numberPayload, new MockAspectRetriever(stringPropertyDef)); + Assert.fail("Should have raised exception for mis-matched types"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("should be a string")); + } + + // Invalid allowedValue + try { + assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn( + Urn.createFromString("urn:li:structuredProperty:io.acryl.privacy.retentionTime")) + .setValues( + new PrimitivePropertyValueArray(PrimitivePropertyValue.create("not hello"))); + stringPayload = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + + StructuredPropertiesValidator.validate( + stringPayload, new MockAspectRetriever(stringPropertyDef)); + Assert.fail("Should have raised exception for disallowed value `not hello`"); + } catch (AspectValidationException e) { + Assert.assertTrue(e.getMessage().contains("{string=not hello} should be one of [{")); + } + } +} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index 2cb48c1b20da9..d9cf8fd2603a8 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -198,7 +198,7 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { .getSearchableAnnotation() .getFieldName()); assertEquals( - SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.TEXT, testEntityInfo .getSearchableFieldSpecMap() .get(new PathSpec("customProperties").toString()) diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java index b3eb2af72708c..1a64359008dd8 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java @@ -6,6 +6,7 @@ import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.RecordDataSchema; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.DataSchemaFactory; import com.linkedin.metadata.models.DefaultEntitySpec; @@ -17,7 +18,6 @@ import com.linkedin.metadata.models.annotation.EventAnnotation; import com.linkedin.metadata.models.registry.config.EntityRegistryLoadResult; import com.linkedin.metadata.models.registry.config.LoadStatus; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.util.Pair; import java.io.FileNotFoundException; import java.util.ArrayList; diff --git a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java index f9d22b142cbb9..8174afc20765f 100644 --- a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java +++ b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java @@ -11,11 +11,11 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.ingestion.DataHubIngestionSourceConfig; import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.query.ListResult; import java.util.Collections; @@ -88,7 +88,7 @@ public void setupTest() throws Exception { .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse2.getAspects()).thenReturn(map2); - JavaEntityClient mockClient = Mockito.mock(JavaEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); // Set up mocks for ingestion source batch fetching Mockito.when( diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 3d9b533dc8f72..39a17612aa4b3 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -13,6 +13,9 @@ public class Constants { public static final String UNKNOWN_ACTOR = "urn:li:corpuser:UNKNOWN"; // Unknown principal. public static final Long ASPECT_LATEST_VERSION = 0L; public static final String UNKNOWN_DATA_PLATFORM = "urn:li:dataPlatform:unknown"; + public static final String ENTITY_TYPE_URN_PREFIX = "urn:li:entityType:"; + public static final String DATA_TYPE_URN_PREFIX = "urn:li:dataType:"; + public static final String STRUCTURED_PROPERTY_MAPPING_FIELD = "structuredProperties"; // !!!!!!! IMPORTANT !!!!!!! // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. @@ -73,6 +76,10 @@ public class Constants { public static final String QUERY_ENTITY_NAME = "query"; public static final String DATA_PRODUCT_ENTITY_NAME = "dataProduct"; public static final String OWNERSHIP_TYPE_ENTITY_NAME = "ownershipType"; + public static final String STRUCTURED_PROPERTY_ENTITY_NAME = "structuredProperty"; + public static final String DATA_TYPE_ENTITY_NAME = "dataType"; + public static final String ENTITY_TYPE_ENTITY_NAME = "entityType"; + public static final String FORM_ENTITY_NAME = "form"; /** Aspects */ // Common @@ -125,6 +132,8 @@ public class Constants { public static final String VIEW_PROPERTIES_ASPECT_NAME = "viewProperties"; public static final String DATASET_PROFILE_ASPECT_NAME = "datasetProfile"; + public static final String STRUCTURED_PROPERTIES_ASPECT_NAME = "structuredProperties"; + public static final String FORMS_ASPECT_NAME = "forms"; // Aspect support public static final String FINE_GRAINED_LINEAGE_DATASET_TYPE = "DATASET"; public static final String FINE_GRAINED_LINEAGE_FIELD_SET_TYPE = "FIELD_SET"; @@ -306,6 +315,20 @@ public class Constants { public static final String OWNERSHIP_TYPE_KEY_ASPECT_NAME = "ownershipTypeKey"; public static final String OWNERSHIP_TYPE_INFO_ASPECT_NAME = "ownershipTypeInfo"; + // Structured Property + public static final String STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME = "propertyDefinition"; + + // Form + public static final String FORM_INFO_ASPECT_NAME = "formInfo"; + public static final String FORM_KEY_ASPECT_NAME = "formKey"; + public static final String DYNAMIC_FORM_ASSIGNMENT_ASPECT_NAME = "dynamicFormAssignment"; + + // Data Type + public static final String DATA_TYPE_INFO_ASPECT_NAME = "dataTypeInfo"; + + // Entity Type + public static final String ENTITY_TYPE_INFO_ASPECT_NAME = "entityTypeInfo"; + // Settings public static final String GLOBAL_SETTINGS_ENTITY_NAME = "globalSettings"; public static final String GLOBAL_SETTINGS_INFO_ASPECT_NAME = "globalSettingsInfo"; diff --git a/metadata-ingestion-modules/airflow-plugin/scripts/release.sh b/metadata-ingestion-modules/airflow-plugin/scripts/release.sh index 87157479f37d6..5667e761ea558 100755 --- a/metadata-ingestion-modules/airflow-plugin/scripts/release.sh +++ b/metadata-ingestion-modules/airflow-plugin/scripts/release.sh @@ -13,7 +13,7 @@ MODULE=datahub_airflow_plugin python -c 'import setuptools; where="./src"; assert setuptools.find_packages(where) == setuptools.find_namespace_packages(where), "you seem to be missing or have extra __init__.py files"' if [[ ${RELEASE_VERSION:-} ]]; then # Replace version with RELEASE_VERSION env variable - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/${MODULE}/__init__.py + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/${MODULE}/__init__.py else vim src/${MODULE}/__init__.py fi diff --git a/metadata-ingestion/examples/bootstrap_data/business_glossary.yml b/metadata-ingestion/examples/bootstrap_data/business_glossary.yml index de6ba8731c878..327246863b0ab 100644 --- a/metadata-ingestion/examples/bootstrap_data/business_glossary.yml +++ b/metadata-ingestion/examples/bootstrap_data/business_glossary.yml @@ -10,6 +10,8 @@ nodes: knowledge_links: - label: Wiki link for classification url: "https://en.wikipedia.org/wiki/Classification" + custom_properties: + is_confidential: true terms: - name: Sensitive description: Sensitive Data diff --git a/metadata-ingestion/examples/forms/forms.yaml b/metadata-ingestion/examples/forms/forms.yaml new file mode 100644 index 0000000000000..80bb7cee08ec3 --- /dev/null +++ b/metadata-ingestion/examples/forms/forms.yaml @@ -0,0 +1,54 @@ +- id: 123456 + # urn: "urn:li:form:123456" # optional if id is provided + type: VERIFICATION + name: "Metadata Initiative 2023" + description: "How we want to ensure the most important data assets in our organization have all of the most important and expected pieces of metadata filled out" + prompts: + - id: "123" + title: "Retention Time" + description: "Apply Retention Time structured property to form" + type: STRUCTURED_PROPERTY + structured_property_id: io.acryl.privacy.retentionTime + required: True # optional, will default to True + - id: "92847" + title: "Replication SLA" + description: "Apply Replication SLA structured property to form" + type: STRUCTURED_PROPERTY + structured_property_urn: urn:li:structuredProperty:io.acryl.dataManagement.replicationSLA + required: True + - id: "76543" + title: "Replication SLA" + description: "Apply Replication SLA structured property to form" + type: FIELDS_STRUCTURED_PROPERTY + structured_property_urn: urn:li:structuredProperty:io.acryl.dataManagement.replicationSLA + required: False + entities: # Either pass a list of urns or a group of filters + # urns: + # - urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) + # - urn:li:dataset:(urn:li:dataPlatform:snowflake,user.clicks,PROD) + filters: + types: + - dataset + platforms: + - snowflake + - dbt + domains: + - urn:li:domain:b41fbb69-1549-4f30-a463-d75d1bed31c1 + containers: + - urn:li:container:21d4204e13d5b984c58acad468ecdbdd +- urn: "urn:li:form:917364" + # id: 917364 # optional if urn is provided + type: VERIFICATION + name: "Governance Initiative" + prompts: + - id: "123" + title: "Retention Time" + description: "Apply Retention Time structured property to form" + type: STRUCTURED_PROPERTY + structured_property_id: io.acryl.privacy.retentionTime + required: False + - id: "certifier" + title: "Certifier" + type: STRUCTURED_PROPERTY + structured_property_id: io.acryl.dataManagement.certifier + required: True diff --git a/metadata-ingestion/examples/mce_files/test_structured_properties.json b/metadata-ingestion/examples/mce_files/test_structured_properties.json new file mode 100644 index 0000000000000..7771883152d38 --- /dev/null +++ b/metadata-ingestion/examples/mce_files/test_structured_properties.json @@ -0,0 +1,218 @@ +[ + { + "auditHeader": null, + "entityType": "entityType", + "entityUrn": "urn:li:entityType:datahub.dataset", + "changeType": "UPSERT", + "aspectName": "entityTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.dataset\", \"displayName\": \"Dataset\", \"description\": \"An entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "entityType", + "entityUrn": "urn:li:entityType:datahub.corpuser", + "changeType": "UPSERT", + "aspectName": "entityTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.corpuser\", \"displayName\": \"User\", \"description\": \"An entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "entityType", + "entityUrn": "urn:li:entityType:datahub.corpGroup", + "changeType": "UPSERT", + "aspectName": "entityTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.corpGroup\", \"displayName\": \"Group\", \"description\": \"An entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataType", + "entityUrn": "urn:li:dataType:datahub.string", + "changeType": "UPSERT", + "aspectName": "dataTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.string\", \"displayName\": \"String\", \"description\": \"A string type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataType", + "entityUrn": "urn:li:dataType:datahub.float", + "changeType": "UPSERT", + "aspectName": "dataTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.float\", \"displayName\": \"Number\", \"description\": \"A number type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataType", + "entityUrn": "urn:li:dataType:datahub.urn", + "changeType": "UPSERT", + "aspectName": "dataTypeInfo", + "aspect": { + "value": "{\"qualifiedName\": \"datahub.urn\", \"displayName\": \"Urn\", \"description\": \"A entity type.\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property1", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property1\", \"displayName\": \"String Property\", \"valueType\": \"urn:li:dataType:datahub.string\", \"cardinality\": \"SINGLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property2", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property2\", \"displayName\": \"String Property With Allowed Values\", \"valueType\": \"urn:li:dataType:datahub.string\", \"cardinality\": \"MULTIPLE\", \"allowedValues\": [ { \"value\": { \"string\": \"Test 1\" } }, { \"value\": { \"string\": \"Test 2\" } } ], \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property3", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property3\", \"displayName\": \"Numeric Property\", \"valueType\": \"urn:li:dataType:datahub.float\", \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property4", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property4\", \"displayName\": \"Numeric Property with Allowed Values\", \"valueType\": \"urn:li:dataType:datahub.float\", \"cardinality\": \"MULTIPLE\", \"allowedValues\": [ { \"value\": { \"double\": 0.12 } }, { \"value\": { \"double\": 1 } } ], \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property5", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property5\", \"displayName\": \"Urn property no type qualifier\", \"valueType\": \"urn:li:dataType:datahub.urn\", \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property6", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property6\", \"displayName\": \"Urn property with 1 type qualifier (user)\", \"valueType\": \"urn:li:dataType:datahub.urn\", \"typeQualifier\": { \"allowedTypes\": [\"urn:li:entityType:datahub.corpuser\"] }, \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "structuredProperty", + "entityUrn": "urn:li:structuredProperty:test.Property7", + "changeType": "UPSERT", + "aspectName": "propertyDefinition", + "aspect": { + "value": "{\"qualifiedName\": \"test.Property7\", \"displayName\": \"Urn property with 2 type qualifier (user)\", \"valueType\": \"urn:li:dataType:datahub.urn\", \"typeQualifier\": { \"allowedTypes\": [\"urn:li:entityType:datahub.corpuser\", \"urn:li:entityType:datahub.corpGroup\"] }, \"cardinality\": \"MULTIPLE\", \"entityTypes\": [\"urn:li:entityType:datahub.dataset\"], \"description\": \"My description\"}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-form-verification-default-3", + "changeType": "UPSERT", + "aspectName": "formInfo", + "aspect": { + "value": "{\"name\": \"My test form\", \"description\": \"My test description\", \"type\": \"VERIFICATION\", \"prompts\": [{\"id\": \"prompt-1\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}, {\"id\": \"prompt-2\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}]}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-no-verification-3", + "changeType": "UPSERT", + "aspectName": "formInfo", + "aspect": { + "value": "{\"name\": \"My test form without verification\", \"description\": \"My test description\", \"prompts\": [{\"id\": \"prompt-1\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}, {\"id\": \"prompt-2\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}]}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-no-verification-custom-5", + "changeType": "UPSERT", + "aspectName": "formInfo", + "aspect": { + "value": "{\"name\": \"My test form with custom verification\", \"description\": \"My test description\", \"type\": \"VERIFICATION\", \"verification\": { \"type\": \"urn:li:verificationType:my-test\"}, \"prompts\": [{\"id\": \"prompt-1\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"required\": true, \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" } }, {\"id\": \"prompt-2\", \"title\": \"Select your thing\", \"description\": \"Which will you select?\", \"type\": \"STRUCTURED_PROPERTY\", \"required\": true, \"structuredPropertyParams\": { \"urn\": \"urn:li:structuredProperty:test.Property7\" }}]}", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "form", + "entityUrn": "urn:li:form:my-test-no-verification-custom-5", + "changeType": "UPSERT", + "aspectName": "dynamicFormAssignment", + "aspect": { + "value": "{\"filter\": { \"or\": [ { \"and\": [ { \"field\": \"platform\", \"condition\": \"EQUAL\", \"values\": [\"urn:li:dataPlatform:snowflake\"], \"value\": \"\" } ] } ] } }", + "contentType": "application/json" + }, + "systemMetadata": null + }, + { + "auditHeader": null, + "entityType": "dataset", + "entityUrn": "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)", + "changeType": "UPSERT", + "aspectName": "forms", + "aspect": { + "value": "{ \"incompleteForms\":[\n {\n \"incompletePrompts\":[\n \n ],\n \"urn\":\"urn:li:form:my-test-no-verification-custom-4\",\n \"completedPrompts\":[\n {\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697585983115\n },\n \"id\":\"prompt-2\"\n },\n {\n \"id\":\"prompt-1\",\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697585983252\n }\n }\n ]\n },\n {\n \"incompletePrompts\":[\n \n ],\n \"urn\":\"urn:li:form:my-test-no-verification-custom-5\",\n \"completedPrompts\":[\n {\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697645753521\n },\n \"id\":\"prompt-2\"\n },\n {\n \"id\":\"prompt-1\",\n \"lastModified\":{\n \"actor\":\"urn:li:corpuser:__datahub_system\",\n \"time\":1697645754180\n }\n }\n ]\n }\n ],\n \"completedForms\":[\n \n ]}", + "contentType": "application/json" + }, + "systemMetadata": null + } +] \ No newline at end of file diff --git a/metadata-ingestion/examples/structured_properties/README.md b/metadata-ingestion/examples/structured_properties/README.md new file mode 100644 index 0000000000000..0429310be7424 --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/README.md @@ -0,0 +1,51 @@ +# Extended Properties + +## Expected Capabilities + +### structured_properties command + +```yaml +- id: io.acryl.privacy.retentionTime + # urn: urn:li:structuredProperty:<> + # fullyQualifiedName: io.acryl.privacy.retentionTime + type: STRING + cardinality: MULTIPLE + entityTypes: + - dataset # or urn:li:logicalEntity:metamodel.datahub.dataset + - dataflow + description: "Retention Time is used to figure out how long to retain records in a dataset" + allowedValues: + - value: 30 days + description: 30 days, usually reserved for datasets that are ephemeral and contain pii + - value: 3 months + description: Use this for datasets that drive monthly reporting but contain pii + - value: 2 yrs + description: Use this for non-sensitive data that can be retained for longer +- id: io.acryl.dataManagement.replicationSLA + type: NUMBER + description: "SLA for how long data can be delayed before replicating to the destination cluster" + entityTypes: + - dataset +- id: io.acryl.dataManagement.deprecationDate + type: DATE + entityTypes: + - dataset + - dataFlow + - dataJob +``` + +``` +datahub properties create -f structured_properties.yaml +``` + +``` +datahub properties create --name io.acryl.privacy.retentionTime --type STRING --cardinality MULTIPLE --entity_type DATASET --entity_type DATAFLOW +``` + +### dataset command + +``` +datahub dataset create -f dataset.yaml +``` + +See example in `dataproduct`. diff --git a/metadata-ingestion/examples/structured_properties/click_event.avsc b/metadata-ingestion/examples/structured_properties/click_event.avsc new file mode 100644 index 0000000000000..b277674f8b62f --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/click_event.avsc @@ -0,0 +1,14 @@ +{ + "namespace": "org.acryl", + "type": "record", + "name": "ClickEvent", + "fields": [ + { "name": "ip", "type": "string" }, + { "name": "url", "type": "string" }, + { "name": "time", "type": "long" }, + { "name": "referer", "type": ["string", "null"] }, + { "name": "user_agent", "type": ["string", "null"] }, + { "name": "user_id", "type": ["string", "null"] }, + { "name": "session_id", "type": ["string", "null"] } + ] +} diff --git a/metadata-ingestion/examples/structured_properties/dataset.yaml b/metadata-ingestion/examples/structured_properties/dataset.yaml new file mode 100644 index 0000000000000..557bf0167a51b --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/dataset.yaml @@ -0,0 +1,45 @@ +## This file is used to define a dataset and provide metadata for it +- id: user.clicks + platform: hive + # - urn: urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) # use urn instead of id and platform + subtype: Table + schema: + file: examples/structured_properties/click_event.avsc + fields: + - id: ip + - urn: urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD),ip) + structured_properties: # structured properties for schema fields/columns go here + io.acryl.dataManagement.deprecationDate: "2023-01-01" + io.acryl.dataManagement.certifier: urn:li:corpuser:john.doe@example.com + io.acryl.dataManagement.replicationSLA: 90 + structured_properties: # dataset level structured properties go here + io.acryl.privacy.retentionTime: 365 + projectNames: + - Tracking + - DataHub +- id: ClickEvent + platform: events + subtype: Topic + description: | + This is a sample event that is generated when a user clicks on a link. + Do not use this event for any purpose other than testing. + properties: + project_name: Tracking + namespace: org.acryl.tracking + version: 1.0.0 + retention: 30 + structured_properties: + io.acryl.dataManagement.certifier: urn:li:corpuser:john.doe@example.com + schema: + file: examples/structured_properties/click_event.avsc + downstreams: + - urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) +- id: user.clicks + platform: snowflake + schema: + fields: + - id: user_id + structured_properties: + io.acryl.dataManagement.deprecationDate: "2023-01-01" + structured_properties: + io.acryl.dataManagement.replicationSLA: 90 diff --git a/metadata-ingestion/examples/structured_properties/structured_properties.yaml b/metadata-ingestion/examples/structured_properties/structured_properties.yaml new file mode 100644 index 0000000000000..5c7ce47ba3b8a --- /dev/null +++ b/metadata-ingestion/examples/structured_properties/structured_properties.yaml @@ -0,0 +1,68 @@ +- id: io.acryl.privacy.retentionTime + # - urn: urn:li:structuredProperty:io.acryl.privacy.retentionTime # optional if id is provided + qualified_name: io.acryl.privacy.retentionTime # required if urn is provided + type: number + cardinality: MULTIPLE + display_name: Retention Time + entity_types: + - dataset # or urn:li:entityType:datahub.dataset + - dataFlow + description: "Retention Time is used to figure out how long to retain records in a dataset" + allowed_values: + - value: 30 + description: 30 days, usually reserved for datasets that are ephemeral and contain pii + - value: 90 + description: Use this for datasets that drive monthly reporting but contain pii + - value: 365 + description: Use this for non-sensitive data that can be retained for longer +- id: io.acryl.dataManagement.replicationSLA + type: number + display_name: Replication SLA + description: "SLA for how long data can be delayed before replicating to the destination cluster" + entity_types: + - dataset +- id: io.acryl.dataManagement.deprecationDate + type: date + display_name: Deprecation Date + entity_types: + - dataset + - dataFlow + - dataJob +- id: io.acryl.dataManagement.steward + type: urn + type_qualifier: + allowed_types: # only user and group urns are allowed + - corpuser + - corpGroup + display_name: Steward + entity_types: + - dataset + - dataFlow + - dataJob +- id: io.acryl.dataManagement.certifier + type: urn + display_name: Person Certifying the asset + entity_types: + - dataset + - schemaField +- id: io.acryl.dataManagement.team + type: string + display_name: Management team + entity_types: + - dataset +- id: projectNames + type: string + cardinality: MULTIPLE + display_name: Project names + entity_types: + - dataset + allowed_values: + - value: Tracking + description: test value 1 for project + - value: DataHub + description: test value 2 for project +- id: namespace + type: string + display_name: Namespace + entity_types: + - dataset diff --git a/metadata-ingestion/scripts/docgen.sh b/metadata-ingestion/scripts/docgen.sh index affb87f2e70a9..09fa2be912f61 100755 --- a/metadata-ingestion/scripts/docgen.sh +++ b/metadata-ingestion/scripts/docgen.sh @@ -7,4 +7,4 @@ DOCS_OUT_DIR=$DATAHUB_ROOT/docs/generated/ingestion EXTRA_DOCS_DIR=$DATAHUB_ROOT/metadata-ingestion/docs/sources rm -r $DOCS_OUT_DIR || true -python scripts/docgen.py --out-dir ${DOCS_OUT_DIR} --extra-docs ${EXTRA_DOCS_DIR} $@ +SPARK_VERSION=3.3 python scripts/docgen.py --out-dir ${DOCS_OUT_DIR} --extra-docs ${EXTRA_DOCS_DIR} $@ diff --git a/metadata-ingestion/scripts/modeldocgen.py b/metadata-ingestion/scripts/modeldocgen.py index 81b26145e620c..610c6d3107916 100644 --- a/metadata-ingestion/scripts/modeldocgen.py +++ b/metadata-ingestion/scripts/modeldocgen.py @@ -493,10 +493,32 @@ def strip_types(field_path: str) -> str: ], ) +@dataclass +class EntityAspectName: + entityName: str + aspectName: str + + +@dataclass +class AspectPluginConfig: + className: str + enabled: bool + supportedOperations: List[str] + supportedEntityAspectNames: List[EntityAspectName] + + +@dataclass +class PluginConfiguration: + aspectPayloadValidators: Optional[List[AspectPluginConfig]] = None + mutationHooks: Optional[List[AspectPluginConfig]] = None + mclSideEffects: Optional[List[AspectPluginConfig]] = None + mcpSideEffects: Optional[List[AspectPluginConfig]] = None + class EntityRegistry(ConfigModel): entities: List[EntityDefinition] events: Optional[List[EventDefinition]] + plugins: Optional[PluginConfiguration] = None def load_registry_file(registry_file: str) -> Dict[str, EntityDefinition]: diff --git a/metadata-ingestion/scripts/release.sh b/metadata-ingestion/scripts/release.sh index eacaf1d920a8d..955eb562089f7 100755 --- a/metadata-ingestion/scripts/release.sh +++ b/metadata-ingestion/scripts/release.sh @@ -11,7 +11,7 @@ fi python -c 'import setuptools; where="./src"; assert setuptools.find_packages(where) == setuptools.find_namespace_packages(where), "you seem to be missing or have extra __init__.py files"' if [[ ${RELEASE_VERSION:-} ]]; then # Replace version with RELEASE_VERSION env variable - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py else vim src/datahub/__init__.py fi diff --git a/metadata-ingestion/src/datahub/api/entities/dataset/__init__.py b/metadata-ingestion/src/datahub/api/entities/dataset/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py b/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py new file mode 100644 index 0000000000000..3b4a5fbfbb061 --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/dataset/dataset.py @@ -0,0 +1,466 @@ +import json +import logging +from pathlib import Path +from typing import Dict, Iterable, List, Optional, Tuple, Union + +from pydantic import BaseModel, Field, validator +from ruamel.yaml import YAML + +from datahub.api.entities.structuredproperties.structuredproperties import ( + AllowedTypes, + StructuredProperties, +) +from datahub.configuration.common import ConfigModel +from datahub.emitter.mce_builder import ( + make_data_platform_urn, + make_dataset_urn, + make_schema_field_urn, +) +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.extractor.schema_util import avro_schema_to_mce_fields +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.schema_classes import ( + DatasetPropertiesClass, + MetadataChangeProposalClass, + OtherSchemaClass, + SchemaFieldClass, + SchemaMetadataClass, + StructuredPropertiesClass, + StructuredPropertyValueAssignmentClass, + SubTypesClass, + UpstreamClass, +) +from datahub.specific.dataset import DatasetPatchBuilder +from datahub.utilities.urns.dataset_urn import DatasetUrn +from datahub.utilities.urns.urn import Urn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class SchemaFieldSpecification(BaseModel): + id: Optional[str] + urn: Optional[str] + structured_properties: Optional[ + Dict[str, Union[str, float, List[Union[str, float]]]] + ] = None + type: Optional[str] + nativeDataType: Optional[str] = None + jsonPath: Union[None, str] = None + nullable: Optional[bool] = None + description: Union[None, str] = None + label: Optional[str] = None + created: Optional[dict] = None + lastModified: Optional[dict] = None + recursive: Optional[bool] = None + globalTags: Optional[dict] = None + glossaryTerms: Optional[dict] = None + isPartOfKey: Optional[bool] = None + isPartitioningKey: Optional[bool] = None + jsonProps: Optional[dict] = None + + def with_structured_properties( + self, + structured_properties: Optional[Dict[str, List[Union[str, float]]]], + ) -> "SchemaFieldSpecification": + self.structured_properties = ( + {k: v for k, v in structured_properties.items()} + if structured_properties + else None + ) + return self + + @classmethod + def from_schema_field( + cls, schema_field: SchemaFieldClass, parent_urn: str + ) -> "SchemaFieldSpecification": + return SchemaFieldSpecification( + id=Dataset._simplify_field_path(schema_field.fieldPath), + urn=make_schema_field_urn( + parent_urn, Dataset._simplify_field_path(schema_field.fieldPath) + ), + type=str(schema_field.type), + nativeDataType=schema_field.nativeDataType, + nullable=schema_field.nullable, + description=schema_field.description, + label=schema_field.label, + created=schema_field.created.__dict__ if schema_field.created else None, + lastModified=schema_field.lastModified.__dict__ + if schema_field.lastModified + else None, + recursive=schema_field.recursive, + globalTags=schema_field.globalTags.__dict__ + if schema_field.globalTags + else None, + glossaryTerms=schema_field.glossaryTerms.__dict__ + if schema_field.glossaryTerms + else None, + isPartitioningKey=schema_field.isPartitioningKey, + jsonProps=json.loads(schema_field.jsonProps) + if schema_field.jsonProps + else None, + ) + + @validator("urn", pre=True, always=True) + def either_id_or_urn_must_be_filled_out(cls, v, values): + if not v and not values.get("id"): + raise ValueError("Either id or urn must be present") + return v + + +class SchemaSpecification(BaseModel): + file: Optional[str] + fields: Optional[List[SchemaFieldSpecification]] + + @validator("file") + def file_must_be_avsc(cls, v): + if v and not v.endswith(".avsc"): + raise ValueError("file must be a .avsc file") + return v + + +class StructuredPropertyValue(ConfigModel): + value: Union[str, float, List[str], List[float]] + created: Optional[str] + lastModified: Optional[str] + + +class Dataset(BaseModel): + id: Optional[str] + platform: Optional[str] + env: str = "PROD" + urn: Optional[str] + description: Optional[str] + name: Optional[str] + schema_metadata: Optional[SchemaSpecification] = Field(alias="schema") + downstreams: Optional[List[str]] + properties: Optional[Dict[str, str]] + subtype: Optional[str] + subtypes: Optional[List[str]] + structured_properties: Optional[ + Dict[str, Union[str, float, List[Union[str, float]]]] + ] = None + + @property + def platform_urn(self) -> str: + if self.platform: + return make_data_platform_urn(self.platform) + else: + assert self.urn is not None # validator should have filled this in + dataset_urn = DatasetUrn.create_from_string(self.urn) + return str(dataset_urn.get_data_platform_urn()) + + @validator("urn", pre=True, always=True) + def urn_must_be_present(cls, v, values): + if not v: + assert "id" in values, "id must be present if urn is not" + assert "platform" in values, "platform must be present if urn is not" + assert "env" in values, "env must be present if urn is not" + return make_dataset_urn(values["platform"], values["id"], values["env"]) + return v + + @validator("name", pre=True, always=True) + def name_filled_with_id_if_not_present(cls, v, values): + if not v: + assert "id" in values, "id must be present if name is not" + return values["id"] + return v + + @validator("platform") + def platform_must_not_be_urn(cls, v): + if v.startswith("urn:li:dataPlatform:"): + return v[len("urn:li:dataPlatform:") :] + return v + + @classmethod + def from_yaml(cls, file: str) -> Iterable["Dataset"]: + with open(file) as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + datasets: Union[dict, List[dict]] = yaml.load(fp) + if isinstance(datasets, dict): + datasets = [datasets] + for dataset_raw in datasets: + dataset = Dataset.parse_obj(dataset_raw) + yield dataset + + def generate_mcp( + self, + ) -> Iterable[Union[MetadataChangeProposalClass, MetadataChangeProposalWrapper]]: + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, + aspect=DatasetPropertiesClass( + description=self.description, + name=self.name, + customProperties=self.properties, + ), + ) + yield mcp + + if self.schema_metadata: + if self.schema_metadata.file: + with open(self.schema_metadata.file, "r") as schema_fp: + schema_string = schema_fp.read() + schema_metadata = SchemaMetadataClass( + schemaName=self.name or self.id or self.urn or "", + platform=self.platform_urn, + version=0, + hash="", + platformSchema=OtherSchemaClass(rawSchema=schema_string), + fields=avro_schema_to_mce_fields(schema_string), + ) + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, aspect=schema_metadata + ) + yield mcp + + if self.schema_metadata.fields: + for field in self.schema_metadata.fields: + field_urn = field.urn or make_schema_field_urn( + self.urn, field.id # type: ignore[arg-type] + ) + assert field_urn.startswith("urn:li:schemaField:") + if field.structured_properties: + # field_properties_flattened = ( + # Dataset.extract_structured_properties( + # field.structured_properties + # ) + # ) + mcp = MetadataChangeProposalWrapper( + entityUrn=field_urn, + aspect=StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:{prop_key}", + values=prop_value + if isinstance(prop_value, list) + else [prop_value], + ) + for prop_key, prop_value in field.structured_properties.items() + ] + ), + ) + yield mcp + + if self.subtype or self.subtypes: + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, + aspect=SubTypesClass( + typeNames=[ + s + for s in [self.subtype] + (self.subtypes or []) + if s + ] + ), + ) + yield mcp + + if self.structured_properties: + # structured_properties_flattened = ( + # Dataset.extract_structured_properties( + # self.structured_properties + # ) + # ) + mcp = MetadataChangeProposalWrapper( + entityUrn=self.urn, + aspect=StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:{prop_key}", + values=prop_value + if isinstance(prop_value, list) + else [prop_value], + ) + for prop_key, prop_value in self.structured_properties.items() + ] + ), + ) + yield mcp + + if self.downstreams: + for downstream in self.downstreams: + patch_builder = DatasetPatchBuilder(downstream) + assert ( + self.urn is not None + ) # validator should have filled this in + patch_builder.add_upstream_lineage( + UpstreamClass( + dataset=self.urn, + type="COPY", + ) + ) + for patch_event in patch_builder.build(): + yield patch_event + + logger.info(f"Created dataset {self.urn}") + + @staticmethod + def extract_structured_properties( + structured_properties: Dict[str, Union[str, float, List[str], List[float]]] + ) -> List[Tuple[str, Union[str, float]]]: + structured_properties_flattened: List[Tuple[str, Union[str, float]]] = [] + for key, value in structured_properties.items(): + validated_structured_property = Dataset.validate_structured_property( + key, value + ) + if validated_structured_property: + structured_properties_flattened.append(validated_structured_property) + structured_properties_flattened = sorted( + structured_properties_flattened, key=lambda x: x[0] + ) + return structured_properties_flattened + + @staticmethod + def validate_structured_property( + sp_name: str, sp_value: Union[str, float, List[str], List[float]] + ) -> Union[Tuple[str, Union[str, float]], None]: + """ + Validate based on: + 1. Structured property exists/has been created + 2. Structured property value is of the expected type + """ + urn = Urn.make_structured_property_urn(sp_name) + with get_default_graph() as graph: + if graph.exists(urn): + validated_structured_property = StructuredProperties.from_datahub( + graph, urn + ) + allowed_type = Urn.get_data_type_from_urn( + validated_structured_property.type + ) + try: + if not isinstance(sp_value, list): + return Dataset.validate_type(sp_name, sp_value, allowed_type) + else: + for v in sp_value: + return Dataset.validate_type(sp_name, v, allowed_type) + except ValueError: + logger.warning( + f"Property: {sp_name}, value: {sp_value} should be a {allowed_type}." + ) + else: + logger.error( + f"Property {sp_name} does not exist and therefore will not be added to dataset. Please create property before trying again." + ) + return None + + @staticmethod + def validate_type( + sp_name: str, sp_value: Union[str, float], allowed_type: str + ) -> Tuple[str, Union[str, float]]: + if allowed_type == AllowedTypes.NUMBER.value: + return (sp_name, float(sp_value)) + else: + return (sp_name, sp_value) + + @staticmethod + def _simplify_field_path(field_path: str) -> str: + if field_path.startswith("[version=2.0]"): + # v2 field path + field_components = [] + current_field = "" + for c in field_path: + if c == "[": + if current_field: + field_components.append(current_field) + current_field = "" + omit_next = True + elif c == "]": + omit_next = False + elif c == ".": + pass + elif not omit_next: + current_field += c + if current_field: + field_components.append(current_field) + return ".".join(field_components) + else: + return field_path + + @staticmethod + def _schema_from_schema_metadata( + graph: DataHubGraph, urn: str + ) -> Optional[SchemaSpecification]: + schema_metadata: Optional[SchemaMetadataClass] = graph.get_aspect( + urn, SchemaMetadataClass + ) + + if schema_metadata: + schema_specification = SchemaSpecification( + fields=[ + SchemaFieldSpecification.from_schema_field( + field, urn + ).with_structured_properties( + { + sp.propertyUrn: sp.values + for sp in structured_props.properties + } + if structured_props + else None + ) + for field, structured_props in [ + ( + field, + graph.get_aspect( + make_schema_field_urn(urn, field.fieldPath), + StructuredPropertiesClass, + ) + or graph.get_aspect( + make_schema_field_urn( + urn, Dataset._simplify_field_path(field.fieldPath) + ), + StructuredPropertiesClass, + ), + ) + for field in schema_metadata.fields + ] + ] + ) + return schema_specification + else: + return None + + @classmethod + def from_datahub(cls, graph: DataHubGraph, urn: str) -> "Dataset": + dataset_properties: Optional[DatasetPropertiesClass] = graph.get_aspect( + urn, DatasetPropertiesClass + ) + subtypes: Optional[SubTypesClass] = graph.get_aspect(urn, SubTypesClass) + structured_properties: Optional[StructuredPropertiesClass] = graph.get_aspect( + urn, StructuredPropertiesClass + ) + if structured_properties: + structured_properties_map: Dict[str, List[Union[str, float]]] = {} + for sp in structured_properties.properties: + if sp.propertyUrn in structured_properties_map: + assert isinstance(structured_properties_map[sp.propertyUrn], list) + structured_properties_map[sp.propertyUrn].extend(sp.values) # type: ignore[arg-type,union-attr] + else: + structured_properties_map[sp.propertyUrn] = sp.values + + return Dataset( # type: ignore[call-arg] + urn=urn, + description=dataset_properties.description + if dataset_properties and dataset_properties.description + else None, + name=dataset_properties.name + if dataset_properties and dataset_properties.name + else None, + schema=Dataset._schema_from_schema_metadata(graph, urn), + properties=dataset_properties.customProperties + if dataset_properties + else None, + subtypes=[subtype for subtype in subtypes.typeNames] if subtypes else None, + structured_properties=structured_properties_map + if structured_properties + else None, + ) + + def to_yaml( + self, + file: Path, + ) -> None: + with open(file, "w") as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + yaml.indent(mapping=2, sequence=4, offset=2) + yaml.default_flow_style = False + yaml.dump(self.dict(exclude_none=True, exclude_unset=True), fp) diff --git a/metadata-ingestion/src/datahub/api/entities/forms/__init__.py b/metadata-ingestion/src/datahub/api/entities/forms/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/api/entities/forms/forms.py b/metadata-ingestion/src/datahub/api/entities/forms/forms.py new file mode 100644 index 0000000000000..cc43779bda409 --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/forms/forms.py @@ -0,0 +1,353 @@ +import logging +import uuid +from enum import Enum +from pathlib import Path +from typing import List, Optional, Union + +import yaml +from pydantic import validator +from ruamel.yaml import YAML +from typing_extensions import Literal + +from datahub.api.entities.forms.forms_graphql_constants import ( + CREATE_DYNAMIC_FORM_ASSIGNMENT, + FIELD_FILTER_TEMPLATE, + UPLOAD_ENTITIES_FOR_FORMS, +) +from datahub.configuration.common import ConfigModel +from datahub.emitter.mce_builder import ( + make_data_platform_urn, + make_group_urn, + make_user_urn, +) +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.schema_classes import ( + FormInfoClass, + FormPromptClass, + OwnerClass, + OwnershipClass, + OwnershipTypeClass, + StructuredPropertyParamsClass, +) +from datahub.utilities.urns.urn import Urn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class PromptType(Enum): + STRUCTURED_PROPERTY = "STRUCTURED_PROPERTY" + FIELDS_STRUCTURED_PROPERTY = "FIELDS_STRUCTURED_PROPERTY" + + @classmethod + def has_value(cls, value): + return value in cls._value2member_map_ + + +class Prompt(ConfigModel): + id: Optional[str] + title: str + description: Optional[str] + type: str + structured_property_id: Optional[str] + structured_property_urn: Optional[str] + required: Optional[bool] + + @validator("structured_property_urn", pre=True, always=True) + def structured_property_urn_must_be_present(cls, v, values): + if not v and values.get("structured_property_id"): + return Urn.make_structured_property_urn(values["structured_property_id"]) + return v + + +class FormType(Enum): + VERIFICATION = "VERIFICATION" + DOCUMENTATION = "COMPLETION" + + @classmethod + def has_value(cls, value): + return value in cls._value2member_map_ + + +class Filters(ConfigModel): + types: Optional[List[str]] + platforms: Optional[List[str]] + domains: Optional[List[str]] + containers: Optional[List[str]] + + +class Entities(ConfigModel): + urns: Optional[List[str]] + filters: Optional[Filters] + + +class Forms(ConfigModel): + id: Optional[str] + urn: Optional[str] + name: str + description: Optional[str] + prompts: List[Prompt] = [] + type: Optional[str] + version: Optional[Literal[1]] + entities: Optional[Entities] + owners: Optional[List[str]] # can be user IDs or urns + group_owners: Optional[List[str]] # can be group IDs or urns + + @validator("urn", pre=True, always=True) + def urn_must_be_present(cls, v, values): + if not v: + assert values.get("id") is not None, "Form id must be present if urn is not" + return f"urn:li:form:{values['id']}" + return v + + @staticmethod + def create(file: str) -> None: + emitter: DataHubGraph + + with get_default_graph() as emitter: + with open(file, "r") as fp: + forms: List[dict] = yaml.safe_load(fp) + for form_raw in forms: + form = Forms.parse_obj(form_raw) + + try: + if not FormType.has_value(form.type): + logger.error( + f"Form type {form.type} does not exist. Please try again with a valid type." + ) + + mcp = MetadataChangeProposalWrapper( + entityUrn=form.urn, + aspect=FormInfoClass( + name=form.name, + description=form.description, + prompts=form.validate_prompts(emitter), + type=form.type, + ), + ) + emitter.emit_mcp(mcp) + + logger.info(f"Created form {form.urn}") + + if form.owners or form.group_owners: + form.add_owners(emitter) + + if form.entities: + if form.entities.urns: + # Associate specific entities with a form + form.upload_entities_for_form(emitter) + + if form.entities.filters: + # Associate groups of entities with a form based on filters + form.create_form_filters(emitter) + + except Exception as e: + logger.error(e) + return + + def validate_prompts(self, emitter: DataHubGraph) -> List[FormPromptClass]: + prompts = [] + if self.prompts: + for prompt in self.prompts: + if not prompt.id: + prompt.id = str(uuid.uuid4()) + logger.warning( + f"Prompt id not provided. Setting prompt id to {prompt.id}" + ) + if prompt.structured_property_urn: + structured_property_urn = prompt.structured_property_urn + if emitter.exists(structured_property_urn): + prompt.structured_property_urn = structured_property_urn + else: + raise Exception( + f"Structured property {structured_property_urn} does not exist. Unable to create form." + ) + elif ( + prompt.type + in ( + PromptType.STRUCTURED_PROPERTY.value, + PromptType.FIELDS_STRUCTURED_PROPERTY.value, + ) + and not prompt.structured_property_urn + ): + raise Exception( + f"Prompt type is {prompt.type} but no structured properties exist. Unable to create form." + ) + + prompts.append( + FormPromptClass( + id=prompt.id, + title=prompt.title, + description=prompt.description, + type=prompt.type, + structuredPropertyParams=StructuredPropertyParamsClass( + urn=prompt.structured_property_urn + ) + if prompt.structured_property_urn + else None, + required=prompt.required, + ) + ) + else: + logger.warning(f"No prompts exist on form {self.urn}. Is that intended?") + + return prompts + + def upload_entities_for_form(self, emitter: DataHubGraph) -> Union[None, Exception]: + if self.entities and self.entities.urns: + formatted_entity_urns = ", ".join( + ['"{}"'.format(value) for value in self.entities.urns] + ) + query = UPLOAD_ENTITIES_FOR_FORMS.format( + form_urn=self.urn, entity_urns=formatted_entity_urns + ) + result = emitter.execute_graphql(query=query) + if not result: + return Exception(f"Could not bulk upload entities for form {self.urn}.") + + return None + + def create_form_filters(self, emitter: DataHubGraph) -> Union[None, Exception]: + filters_raw = [] + # Loop through each entity and assign a filter for it + if self.entities and self.entities.filters: + filters = self.entities.filters + if filters.types: + filters_raw.append( + Forms.format_form_filter("_entityType", filters.types) + ) + if filters.platforms: + urns = [ + make_data_platform_urn(platform) for platform in filters.platforms + ] + filters_raw.append(Forms.format_form_filter("platform", urns)) + if filters.domains: + urns = [] + for domain in filters.domains: + domain_urn = Forms.validate_domain_urn(domain) + if domain_urn: + urns.append(domain_urn) + filters_raw.append(Forms.format_form_filter("domains", urns)) + if filters.containers: + urns = [] + for container in filters.containers: + container_urn = Forms.validate_container_urn(container) + if container_urn: + urns.append(container_urn) + filters_raw.append(Forms.format_form_filter("container", urns)) + + filters_str = ", ".join(item for item in filters_raw) + result = emitter.execute_graphql( + query=CREATE_DYNAMIC_FORM_ASSIGNMENT.format( + form_urn=self.urn, filters=filters_str + ) + ) + if not result: + return Exception( + f"Could not bulk upload urns or filters for form {self.urn}." + ) + + return None + + def add_owners(self, emitter: DataHubGraph) -> Union[None, Exception]: + owner_urns: List[str] = [] + if self.owners: + owner_urns += Forms.format_owners(self.owners) + if self.group_owners: + owner_urns += Forms.format_group_owners(self.group_owners) + + ownership = OwnershipClass( + owners=[ + OwnerClass(owner=urn, type=OwnershipTypeClass.TECHNICAL_OWNER) + for urn in (owner_urns or []) + ], + ) + + try: + mcp = MetadataChangeProposalWrapper(entityUrn=self.urn, aspect=ownership) + emitter.emit_mcp(mcp) + except Exception as e: + logger.error(e) + + return None + + @staticmethod + def format_form_filter(field: str, urns: List[str]) -> str: + formatted_urns = ", ".join(['"{}"'.format(urn) for urn in urns]) + return FIELD_FILTER_TEMPLATE.format(field=field, values=formatted_urns) + + @staticmethod + def validate_domain_urn(domain: str) -> Union[str, None]: + if domain.startswith("urn:li:domain:"): + return domain + + logger.warning(f"{domain} is not an urn. Unable to create domain filter.") + return None + + @staticmethod + def validate_container_urn(container: str) -> Union[str, None]: + if container.startswith("urn:li:container:"): + return container + + logger.warning(f"{container} is not an urn. Unable to create container filter.") + return None + + @staticmethod + def from_datahub(graph: DataHubGraph, urn: str) -> "Forms": + form: Optional[FormInfoClass] = graph.get_aspect(urn, FormInfoClass) + assert form is not None + prompts = [] + for prompt_raw in form.prompts: + prompts.append( + Prompt( + id=prompt_raw.id, + title=prompt_raw.title, + description=prompt_raw.description, + type=prompt_raw.type, + structured_property_urn=prompt_raw.structuredPropertyParams.urn + if prompt_raw.structuredPropertyParams + else None, + ) + ) + return Forms( + urn=urn, + name=form.name, + description=form.description, + prompts=prompts, + type=form.type, + ) + + @staticmethod + def format_owners(owners: List[str]) -> List[str]: + formatted_owners: List[str] = [] + + for owner in owners: + if owner.startswith("urn:li:"): + formatted_owners.append(owner) + else: + formatted_owners.append(make_user_urn(owner)) + + return formatted_owners + + @staticmethod + def format_group_owners(owners: List[str]) -> List[str]: + formatted_owners: List[str] = [] + + for owner in owners: + if owner.startswith("urn:li:"): + formatted_owners.append(owner) + else: + formatted_owners.append(make_group_urn(owner)) + + return formatted_owners + + def to_yaml( + self, + file: Path, + ) -> None: + with open(file, "w") as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + yaml.indent(mapping=2, sequence=4, offset=2) + yaml.default_flow_style = False + yaml.dump(self.dict(), fp) diff --git a/metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py b/metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py new file mode 100644 index 0000000000000..c227d8fc05366 --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/forms/forms_graphql_constants.py @@ -0,0 +1,27 @@ +UPLOAD_ENTITIES_FOR_FORMS = """ +mutation batchAssignForm {{ + batchAssignForm( + input: {{ + formUrn: "{form_urn}", + entityUrns: [{entity_urns}] + }} + ) +}} +""" + +FIELD_FILTER_TEMPLATE = ( + """{{ field: "{field}", values: [{values}], condition: EQUAL, negated: false }}""" +) + +CREATE_DYNAMIC_FORM_ASSIGNMENT = """ +mutation createDynamicFormAssignment {{ + createDynamicFormAssignment( + input: {{ + formUrn: "{form_urn}" + orFilters: [{{ + and: [{filters}] + }}] + }} + ) +}} +""" diff --git a/metadata-ingestion/src/datahub/api/entities/structuredproperties/__init__.py b/metadata-ingestion/src/datahub/api/entities/structuredproperties/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py b/metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py new file mode 100644 index 0000000000000..af9bf3dccac5c --- /dev/null +++ b/metadata-ingestion/src/datahub/api/entities/structuredproperties/structuredproperties.py @@ -0,0 +1,185 @@ +import logging +from enum import Enum +from pathlib import Path +from typing import List, Optional + +import yaml +from pydantic import validator +from ruamel.yaml import YAML + +from datahub.configuration.common import ConfigModel +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DataHubGraph, get_default_graph +from datahub.metadata.schema_classes import ( + PropertyValueClass, + StructuredPropertyDefinitionClass, +) +from datahub.utilities.urns.urn import Urn + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +class AllowedTypes(Enum): + STRING = "string" + RICH_TEXT = "rich_text" + NUMBER = "number" + DATE = "date" + URN = "urn" + + @staticmethod + def check_allowed_type(value: str) -> bool: + return value in [allowed_type.value for allowed_type in AllowedTypes] + + @staticmethod + def values(): + return ", ".join([allowed_type.value for allowed_type in AllowedTypes]) + + +class AllowedValue(ConfigModel): + value: str + description: Optional[str] + + +class TypeQualifierAllowedTypes(ConfigModel): + allowed_types: List[str] + + @validator("allowed_types") + def validate_allowed_types(cls, v): + validated_entity_type_urns = [] + if v: + with get_default_graph() as graph: + for et in v: + validated_urn = Urn.make_entity_type_urn(et) + if graph.exists(validated_urn): + validated_entity_type_urns.append(validated_urn) + else: + logger.warn( + f"Input {et} is not a valid entity type urn. Skipping." + ) + v = validated_entity_type_urns + if not v: + logger.warn("No allowed_types given within type_qualifier.") + return v + + +class StructuredProperties(ConfigModel): + id: Optional[str] + urn: Optional[str] + qualified_name: Optional[str] + type: str + value_entity_types: Optional[List[str]] + description: Optional[str] + display_name: Optional[str] + entity_types: Optional[List[str]] + cardinality: Optional[str] + allowed_values: Optional[List[AllowedValue]] + type_qualifier: Optional[TypeQualifierAllowedTypes] + + @property + def fqn(self) -> str: + assert self.urn is not None + return ( + self.qualified_name + or self.id + or Urn.create_from_string(self.urn).get_entity_id()[0] + ) + + @validator("urn", pre=True, always=True) + def urn_must_be_present(cls, v, values): + if not v: + assert "id" in values, "id must be present if urn is not" + return f"urn:li:structuredProperty:{values['id']}" + return v + + @staticmethod + def create(file: str) -> None: + emitter: DataHubGraph + + with get_default_graph() as emitter: + with open(file, "r") as fp: + structuredproperties: List[dict] = yaml.safe_load(fp) + for structuredproperty_raw in structuredproperties: + structuredproperty = StructuredProperties.parse_obj( + structuredproperty_raw + ) + if not structuredproperty.type.islower(): + structuredproperty.type = structuredproperty.type.lower() + logger.warn( + f"Structured property type should be lowercase. Updated to {structuredproperty.type}" + ) + if not AllowedTypes.check_allowed_type(structuredproperty.type): + raise ValueError( + f"Type {structuredproperty.type} is not allowed. Allowed types are {AllowedTypes.values()}" + ) + mcp = MetadataChangeProposalWrapper( + entityUrn=structuredproperty.urn, + aspect=StructuredPropertyDefinitionClass( + qualifiedName=structuredproperty.fqn, + valueType=Urn.make_data_type_urn(structuredproperty.type), + displayName=structuredproperty.display_name, + description=structuredproperty.description, + entityTypes=[ + Urn.make_entity_type_urn(entity_type) + for entity_type in structuredproperty.entity_types or [] + ], + cardinality=structuredproperty.cardinality, + allowedValues=[ + PropertyValueClass( + value=v.value, description=v.description + ) + for v in structuredproperty.allowed_values + ] + if structuredproperty.allowed_values + else None, + typeQualifier={ + "allowedTypes": structuredproperty.type_qualifier.allowed_types + } + if structuredproperty.type_qualifier + else None, + ), + ) + emitter.emit_mcp(mcp) + + logger.info(f"Created structured property {structuredproperty.urn}") + + @classmethod + def from_datahub(cls, graph: DataHubGraph, urn: str) -> "StructuredProperties": + + structured_property: Optional[ + StructuredPropertyDefinitionClass + ] = graph.get_aspect(urn, StructuredPropertyDefinitionClass) + assert structured_property is not None + return StructuredProperties( + urn=urn, + qualified_name=structured_property.qualifiedName, + display_name=structured_property.displayName, + type=structured_property.valueType, + description=structured_property.description, + entity_types=structured_property.entityTypes, + cardinality=structured_property.cardinality, + allowed_values=[ + AllowedValue( + value=av.value, + description=av.description, + ) + for av in structured_property.allowedValues or [] + ] + if structured_property.allowedValues is not None + else None, + type_qualifier={ + "allowed_types": structured_property.typeQualifier.get("allowedTypes") + } + if structured_property.typeQualifier + else None, + ) + + def to_yaml( + self, + file: Path, + ) -> None: + with open(file, "w") as fp: + yaml = YAML(typ="rt") # default, if not specfied, is 'rt' (round-trip) + yaml.indent(mapping=2, sequence=4, offset=2) + yaml.default_flow_style = False + yaml.dump(self.dict(), fp) diff --git a/metadata-ingestion/src/datahub/cli/docker_check.py b/metadata-ingestion/src/datahub/cli/docker_check.py index 97b88cbc8b8eb..47b89af6dfd04 100644 --- a/metadata-ingestion/src/datahub/cli/docker_check.py +++ b/metadata-ingestion/src/datahub/cli/docker_check.py @@ -193,6 +193,11 @@ def check_docker_quickstart() -> QuickstartStatus: .labels.get("com.docker.compose.project.config_files") .split(",") ) + + # If using profiles, alternative check + if config_files and "/profiles/" in config_files[0]: + return check_docker_quickstart_profiles(client) + all_containers = set() for config_file in config_files: with open(config_file, "r") as config_file: @@ -234,3 +239,35 @@ def check_docker_quickstart() -> QuickstartStatus: ) return QuickstartStatus(container_statuses) + + +def check_docker_quickstart_profiles(client: docker.DockerClient) -> QuickstartStatus: + container_statuses: List[DockerContainerStatus] = [] + containers = client.containers.list( + all=True, + filters={"label": "io.datahubproject.datahub.component=gms"}, + # We can get race conditions between docker running up / recreating + # containers and our status checks. + ignore_removed=True, + ) + if len(containers) == 0: + return QuickstartStatus([]) + + existing_containers = set() + # Check that the containers are running and healthy. + container: docker.models.containers.Container + for container in containers: + name = container.labels.get("com.docker.compose.service", container.name) + existing_containers.add(name) + status = ContainerStatus.OK + if container.status != "running": + status = ContainerStatus.DIED + elif "Health" in container.attrs["State"]: + if container.attrs["State"]["Health"]["Status"] == "starting": + status = ContainerStatus.STARTING + elif container.attrs["State"]["Health"]["Status"] != "healthy": + status = ContainerStatus.UNHEALTHY + + container_statuses.append(DockerContainerStatus(name, status)) + + return QuickstartStatus(container_statuses) diff --git a/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py b/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py index 5d6c65512354a..a52a9dddff127 100644 --- a/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py +++ b/metadata-ingestion/src/datahub/cli/specific/dataproduct_cli.py @@ -56,7 +56,6 @@ def _abort_if_non_existent_urn(graph: DataHubGraph, urn: str, operation: str) -> def _print_diff(orig_file, new_file): - with open(orig_file) as fp: orig_lines = fp.readlines() with open(new_file) as fp: @@ -388,7 +387,7 @@ def add_asset(urn: str, asset: str, validate_assets: bool) -> None: graph.emit(mcp) -@dataproduct.command(name="remove_asset", help="Add an asset to a Data Product") +@dataproduct.command(name="remove_asset", help="Remove an asset from a Data Product") @click.option("--urn", required=True, type=str) @click.option("--asset", required=True, type=str) @click.option( diff --git a/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py b/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py new file mode 100644 index 0000000000000..c702d0ec28961 --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/specific/dataset_cli.py @@ -0,0 +1,67 @@ +import json +import logging +from pathlib import Path + +import click +from click_default_group import DefaultGroup + +from datahub.api.entities.dataset.dataset import Dataset +from datahub.ingestion.graph.client import get_default_graph +from datahub.telemetry import telemetry +from datahub.upgrade import upgrade + +logger = logging.getLogger(__name__) + + +@click.group(cls=DefaultGroup, default="upsert") +def dataset() -> None: + """A group of commands to interact with the Dataset entity in DataHub.""" + pass + + +@dataset.command( + name="upsert", +) +@click.option("-f", "--file", required=True, type=click.Path(exists=True)) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def upsert(file: Path) -> None: + """Upsert attributes to a Dataset in DataHub.""" + + with get_default_graph() as graph: + for dataset in Dataset.from_yaml(str(file)): + try: + for mcp in dataset.generate_mcp(): + graph.emit(mcp) + click.secho(f"Update succeeded for urn {dataset.urn}.", fg="green") + except Exception as e: + click.secho( + f"Update failed for id {id}. due to {e}", + fg="red", + ) + + +@dataset.command( + name="get", +) +@click.option("--urn", required=True, type=str) +@click.option("--to-file", required=False, type=str) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def get(urn: str, to_file: str) -> None: + """Get a Dataset from DataHub""" + + if not urn.startswith("urn:li:dataset:"): + urn = f"urn:li:dataset:{urn}" + + with get_default_graph() as graph: + if graph.exists(urn): + dataset: Dataset = Dataset.from_datahub(graph=graph, urn=urn) + click.secho( + f"{json.dumps(dataset.dict(exclude_unset=True, exclude_none=True), indent=2)}" + ) + if to_file: + dataset.to_yaml(Path(to_file)) + click.secho(f"Dataset yaml written to {to_file}", fg="green") + else: + click.secho(f"Dataset {urn} does not exist") diff --git a/metadata-ingestion/src/datahub/cli/specific/forms_cli.py b/metadata-ingestion/src/datahub/cli/specific/forms_cli.py new file mode 100644 index 0000000000000..a494396909b32 --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/specific/forms_cli.py @@ -0,0 +1,53 @@ +import json +import logging +from pathlib import Path + +import click +from click_default_group import DefaultGroup + +from datahub.api.entities.forms.forms import Forms +from datahub.ingestion.graph.client import get_default_graph +from datahub.telemetry import telemetry +from datahub.upgrade import upgrade + +logger = logging.getLogger(__name__) + + +@click.group(cls=DefaultGroup, default="upsert") +def forms() -> None: + """A group of commands to interact with forms in DataHub.""" + pass + + +@forms.command( + name="upsert", +) +@click.option("-f", "--file", required=True, type=click.Path(exists=True)) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def upsert(file: Path) -> None: + """Upsert forms in DataHub.""" + + Forms.create(str(file)) + + +@forms.command( + name="get", +) +@click.option("--urn", required=True, type=str) +@click.option("--to-file", required=False, type=str) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def get(urn: str, to_file: str) -> None: + """Get form from DataHub""" + with get_default_graph() as graph: + if graph.exists(urn): + form: Forms = Forms.from_datahub(graph=graph, urn=urn) + click.secho( + f"{json.dumps(form.dict(exclude_unset=True, exclude_none=True), indent=2)}" + ) + if to_file: + form.to_yaml(Path(to_file)) + click.secho(f"Form yaml written to {to_file}", fg="green") + else: + click.secho(f"Form {urn} does not exist") diff --git a/metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py b/metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py new file mode 100644 index 0000000000000..4162d44b9b0ea --- /dev/null +++ b/metadata-ingestion/src/datahub/cli/specific/structuredproperties_cli.py @@ -0,0 +1,62 @@ +import json +import logging +from pathlib import Path + +import click +from click_default_group import DefaultGroup + +from datahub.api.entities.structuredproperties.structuredproperties import ( + StructuredProperties, +) +from datahub.ingestion.graph.client import get_default_graph +from datahub.telemetry import telemetry +from datahub.upgrade import upgrade +from datahub.utilities.urns.urn import Urn + +logger = logging.getLogger(__name__) + + +@click.group(cls=DefaultGroup, default="upsert") +def properties() -> None: + """A group of commands to interact with structured properties in DataHub.""" + pass + + +@properties.command( + name="upsert", +) +@click.option("-f", "--file", required=True, type=click.Path(exists=True)) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def upsert(file: Path) -> None: + """Upsert structured properties in DataHub.""" + + StructuredProperties.create(str(file)) + + +@properties.command( + name="get", +) +@click.option("--urn", required=True, type=str) +@click.option("--to-file", required=False, type=str) +@upgrade.check_upgrade +@telemetry.with_telemetry() +def get(urn: str, to_file: str) -> None: + """Get structured properties from DataHub""" + urn = Urn.make_structured_property_urn(urn) + + with get_default_graph() as graph: + if graph.exists(urn): + structuredproperties: StructuredProperties = ( + StructuredProperties.from_datahub(graph=graph, urn=urn) + ) + click.secho( + f"{json.dumps(structuredproperties.dict(exclude_unset=True, exclude_none=True), indent=2)}" + ) + if to_file: + structuredproperties.to_yaml(Path(to_file)) + click.secho( + f"Structured property yaml written to {to_file}", fg="green" + ) + else: + click.secho(f"Structured property {urn} does not exist") diff --git a/metadata-ingestion/src/datahub/entrypoints.py b/metadata-ingestion/src/datahub/entrypoints.py index 0cd37cc939854..4989f984badcc 100644 --- a/metadata-ingestion/src/datahub/entrypoints.py +++ b/metadata-ingestion/src/datahub/entrypoints.py @@ -23,7 +23,10 @@ from datahub.cli.put_cli import put from datahub.cli.specific.datacontract_cli import datacontract from datahub.cli.specific.dataproduct_cli import dataproduct +from datahub.cli.specific.dataset_cli import dataset +from datahub.cli.specific.forms_cli import forms from datahub.cli.specific.group_cli import group +from datahub.cli.specific.structuredproperties_cli import properties from datahub.cli.specific.user_cli import user from datahub.cli.state_cli import state from datahub.cli.telemetry import telemetry as telemetry_cli @@ -144,6 +147,9 @@ def init() -> None: datahub.add_command(user) datahub.add_command(group) datahub.add_command(dataproduct) +datahub.add_command(dataset) +datahub.add_command(properties) +datahub.add_command(forms) datahub.add_command(datacontract) try: diff --git a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py index 6baa70aa581d6..675c87b13313d 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py +++ b/metadata-ingestion/src/datahub/ingestion/source/metadata/business_glossary.py @@ -71,6 +71,7 @@ class GlossaryNodeConfig(ConfigModel): terms: Optional[List["GlossaryTermConfig"]] nodes: Optional[List["GlossaryNodeConfig"]] knowledge_links: Optional[List[KnowledgeCard]] + custom_properties: Optional[Dict[str, str]] # Private fields. _urn: str @@ -252,6 +253,7 @@ def get_mces_from_node( definition=glossaryNode.description, parentNode=parentNode, name=glossaryNode.name, + customProperties=glossaryNode.custom_properties, ) node_owners = parentOwners if glossaryNode.owners is not None: diff --git a/metadata-ingestion/src/datahub/specific/dataset.py b/metadata-ingestion/src/datahub/specific/dataset.py index 62ee4fc57b61b..d3c3de36198e3 100644 --- a/metadata-ingestion/src/datahub/specific/dataset.py +++ b/metadata-ingestion/src/datahub/specific/dataset.py @@ -23,6 +23,7 @@ ) from datahub.specific.custom_properties import CustomPropertiesPatchHelper from datahub.specific.ownership import OwnershipPatchHelper +from datahub.specific.structured_properties import StructuredPropertiesPatchHelper from datahub.utilities.urns.tag_urn import TagUrn from datahub.utilities.urns.urn import Urn @@ -103,6 +104,7 @@ def __init__( self, DatasetProperties.ASPECT_NAME ) self.ownership_patch_helper = OwnershipPatchHelper(self) + self.structured_properties_patch_helper = StructuredPropertiesPatchHelper(self) def add_owner(self, owner: Owner) -> "DatasetPatchBuilder": self.ownership_patch_helper.add_owner(owner) @@ -331,3 +333,33 @@ def set_display_name(self, display_name: str) -> "DatasetPatchBuilder": value=display_name, ) return self + + def set_structured_property( + self, property_name: str, value: Union[str, float, List[Union[str, float]]] + ) -> "DatasetPatchBuilder": + """ + This is a helper method to set a structured property. + @param property_name: the name of the property (either bare or urn form) + @param value: the value of the property (for multi-valued properties, this can be a list) + """ + self.structured_properties_patch_helper.set_property(property_name, value) + return self + + def add_structured_property( + self, property_name: str, value: Union[str, float] + ) -> "DatasetPatchBuilder": + """ + This is a helper method to add a structured property. + @param property_name: the name of the property (either bare or urn form) + @param value: the value of the property (for multi-valued properties, this value will be appended to the list) + """ + self.structured_properties_patch_helper.add_property(property_name, value) + return self + + def remove_structured_property(self, property_name: str) -> "DatasetPatchBuilder": + """ + This is a helper method to remove a structured property. + @param property_name: the name of the property (either bare or urn form) + """ + self.structured_properties_patch_helper.remove_property(property_name) + return self diff --git a/metadata-ingestion/src/datahub/specific/structured_properties.py b/metadata-ingestion/src/datahub/specific/structured_properties.py new file mode 100644 index 0000000000000..6b2592bf1cbba --- /dev/null +++ b/metadata-ingestion/src/datahub/specific/structured_properties.py @@ -0,0 +1,53 @@ +from typing import Generic, List, TypeVar, Union + +from datahub.emitter.mcp_patch_builder import MetadataPatchProposal +from datahub.metadata.schema_classes import StructuredPropertyValueAssignmentClass +from datahub.utilities.urns.structured_properties_urn import ( + make_structured_property_urn, +) + +T = TypeVar("T", bound=MetadataPatchProposal) + + +class StructuredPropertiesPatchHelper(Generic[T]): + def __init__( + self, + parent: T, + aspect_name: str = "structuredProperties", + ) -> None: + self.aspect_name = aspect_name + self._parent = parent + self.aspect_field = "properties" + + def parent(self) -> T: + return self._parent + + def set_property( + self, key: str, value: Union[str, float, List[Union[str, float]]] + ) -> "StructuredPropertiesPatchHelper": + self.remove_property(key) + self.add_property(key, value) + return self + + def remove_property(self, key: str) -> "StructuredPropertiesPatchHelper": + self._parent._add_patch( + self.aspect_name, + "remove", + path=f"/{self.aspect_field}/{make_structured_property_urn(key)}", + value={}, + ) + return self + + def add_property( + self, key: str, value: Union[str, float, List[Union[str, float]]] + ) -> "StructuredPropertiesPatchHelper": + self._parent._add_patch( + self.aspect_name, + "add", + path=f"/{self.aspect_field}/{make_structured_property_urn(key)}", + value=StructuredPropertyValueAssignmentClass( + propertyUrn=make_structured_property_urn(key), + values=value if isinstance(value, list) else [value], + ), + ) + return self diff --git a/metadata-ingestion/src/datahub/utilities/urn_encoder.py b/metadata-ingestion/src/datahub/utilities/urn_encoder.py index 093c9ade8c152..b39dd04370682 100644 --- a/metadata-ingestion/src/datahub/utilities/urn_encoder.py +++ b/metadata-ingestion/src/datahub/utilities/urn_encoder.py @@ -3,6 +3,7 @@ # NOTE: Frontend relies on encoding these three characters. Specifically, we decode and encode schema fields for column level lineage. # If this changes, make appropriate changes to datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts +# We also rely on encoding these exact three characters when generating schemaField urns in our graphQL layer. Update SchemaFieldUtils if this changes. RESERVED_CHARS = {",", "(", ")"} RESERVED_CHARS_EXTENDED = RESERVED_CHARS.union({"%"}) diff --git a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py index fbde0d6e6d69a..1b50d4b2fe810 100644 --- a/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py +++ b/metadata-ingestion/src/datahub/utilities/urns/_urn_base.py @@ -207,6 +207,46 @@ def url_encode(urn: str) -> str: # safe='' encodes '/' as '%2F' return urllib.parse.quote(urn, safe="") + @staticmethod + def make_data_type_urn(type: str) -> str: + if type.startswith("urn:li:dataType:"): + return type + else: + if not type.startswith("datahub."): + # we want all data types to be fully qualified within the datahub namespace + type = f"datahub.{type}" + return f"urn:li:dataType:{type}" + + @staticmethod + def get_data_type_from_urn(urn: str) -> str: + if urn.startswith("urn:li:dataType:"): + # urn is formatted like urn:li:dataType:datahub:{dataType}, so extract dataType by + # parsing by . and getting the last element + return urn.split(".")[-1] + return urn + + @staticmethod + def make_entity_type_urn(entity_type: str) -> str: + if entity_type.startswith("urn:li:entityType:"): + return entity_type + else: + if not entity_type.startswith("datahub."): + # we want all entity types to be fully qualified within the datahub namespace + entity_type = f"datahub.{entity_type}" + return f"urn:li:entityType:{entity_type}" + + @staticmethod + def make_structured_property_urn(structured_property: str) -> str: + if not structured_property.startswith("urn:li:structuredProperty:"): + return f"urn:li:structuredProperty:{structured_property}" + return structured_property + + @staticmethod + def make_form_urn(form: str) -> str: + if not form.startswith("urn:li:form:"): + return f"urn:li:form:{form}" + return form + class _SpecificUrn(Urn): ENTITY_TYPE: str = "" diff --git a/metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py b/metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py new file mode 100644 index 0000000000000..5bd36a0656d99 --- /dev/null +++ b/metadata-ingestion/src/datahub/utilities/urns/structured_properties_urn.py @@ -0,0 +1,5 @@ +from datahub.metadata.urns import StructuredPropertyUrn # noqa: F401 + + +def make_structured_property_urn(structured_property_id: str) -> str: + return str(StructuredPropertyUrn.create_from_string(structured_property_id)) diff --git a/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml b/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml index da238701e718d..c919dde18b187 100644 --- a/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml +++ b/metadata-ingestion/tests/integration/business-glossary/business_glossary.yml @@ -10,6 +10,8 @@ nodes: knowledge_links: - label: Wiki link for classification url: "https://en.wikipedia.org/wiki/Classification" + custom_properties: + is_confidential: true terms: - name: Sensitive description: Sensitive Data diff --git a/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json b/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json index b8cc922f0c1c3..1dce940b44390 100644 --- a/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json +++ b/metadata-ingestion/tests/integration/business-glossary/glossary_events_auto_id_golden.json @@ -6,6 +6,9 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": { + "is_confidential": "True" + }, "definition": "A set of terms related to Data Classification", "name": "Classification" } @@ -29,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +58,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -119,7 +125,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +166,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -176,7 +184,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +225,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -226,6 +236,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "All terms related to personal information", "name": "Personal Information" } @@ -249,7 +260,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -294,7 +306,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -377,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -387,6 +402,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Provides basic concepts such as account, account holder, account provider, relationship manager that are commonly used by financial services providers to describe customers and to determine counterparty identities", "name": "Clients And Accounts" } @@ -410,7 +426,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +475,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -496,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -506,6 +525,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Common Business KPIs", "name": "KPIs" } @@ -529,7 +549,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +604,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -597,7 +620,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -612,7 +636,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -627,7 +652,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -642,7 +668,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +684,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -672,7 +700,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -687,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -702,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -717,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -732,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -747,7 +780,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -762,7 +796,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json b/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json index e2b525658e36e..af85f6e2a3518 100644 --- a/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json +++ b/metadata-ingestion/tests/integration/business-glossary/glossary_events_golden.json @@ -6,6 +6,9 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": { + "is_confidential": "True" + }, "definition": "A set of terms related to Data Classification", "name": "Classification" } @@ -29,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +58,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -119,7 +125,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +166,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -176,7 +184,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +225,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -226,6 +236,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "All terms related to personal information", "name": "Personal Information" } @@ -249,7 +260,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -294,7 +306,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -377,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -387,6 +402,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Provides basic concepts such as account, account holder, account provider, relationship manager that are commonly used by financial services providers to describe customers and to determine counterparty identities", "name": "Clients And Accounts" } @@ -410,7 +426,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +475,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -496,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -506,6 +525,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Common Business KPIs", "name": "KPIs" } @@ -529,7 +549,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +604,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -597,7 +620,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -612,7 +636,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -627,7 +652,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -642,7 +668,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +684,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -672,7 +700,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -687,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -702,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -717,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -732,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -747,7 +780,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } }, { @@ -762,7 +796,8 @@ }, "systemMetadata": { "lastObserved": 1586847600000, - "runId": "datahub-business-glossary-2020_04_14-07_00_00" + "runId": "datahub-business-glossary-2020_04_14-07_00_00", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-ingestion/tests/integration/remote/content/business_glossary.yml b/metadata-ingestion/tests/integration/remote/content/business_glossary.yml index 59bea251a24e1..e0bee3eb4468f 100644 --- a/metadata-ingestion/tests/integration/remote/content/business_glossary.yml +++ b/metadata-ingestion/tests/integration/remote/content/business_glossary.yml @@ -10,6 +10,8 @@ nodes: knowledge_links: - label: Wiki link for classification url: "https://en.wikipedia.org/wiki/Classification" + custom_properties: + is_confidential: true terms: - name: Sensitive description: Sensitive Data diff --git a/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json b/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json index 1e1932822aee8..a3adcb7639712 100644 --- a/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json +++ b/metadata-ingestion/tests/integration/remote/golden/remote_glossary_golden.json @@ -6,6 +6,9 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": { + "is_confidential": "True" + }, "definition": "A set of terms related to Data Classification", "name": "Classification" } @@ -29,7 +32,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -54,7 +58,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -94,7 +99,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -119,7 +125,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -159,7 +166,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -176,7 +184,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -216,7 +225,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -226,6 +236,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "All terms related to personal information", "name": "Personal Information" } @@ -249,7 +260,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -294,7 +306,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -332,7 +345,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -377,7 +391,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -387,6 +402,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Provides basic concepts such as account, account holder, account provider, relationship manager that are commonly used by financial services providers to describe customers and to determine counterparty identities", "name": "Clients And Accounts" } @@ -410,7 +426,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -458,7 +475,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -496,7 +514,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -506,6 +525,7 @@ "aspects": [ { "com.linkedin.pegasus2avro.glossary.GlossaryNodeInfo": { + "customProperties": {}, "definition": "Common Business KPIs", "name": "KPIs" } @@ -529,7 +549,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +588,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +604,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -597,7 +620,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -612,7 +636,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -627,7 +652,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -642,7 +668,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -657,7 +684,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -672,7 +700,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -687,7 +716,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -702,7 +732,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -717,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -732,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -747,7 +780,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } }, { @@ -762,7 +796,8 @@ }, "systemMetadata": { "lastObserved": 1629795600000, - "runId": "remote-4" + "runId": "remote-4", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index b14953d7ce021..8e05b7ef8f5d6 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -15,8 +15,7 @@ import org.apache.tools.ant.filters.ReplaceTokens jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation dependencies { - implementation project(':metadata-models') - implementation project(path: ':metadata-models', configuration: "dataTemplate") + implementation project(':entity-registry') implementation(externalDependency.kafkaAvroSerializer) { exclude group: "org.apache.avro" } @@ -81,13 +80,13 @@ shadowJar { // preventing java multi-release JAR leakage // https://github.com/johnrengelman/shadow/issues/729 exclude('module-info.class', 'META-INF/versions/**', - '**/LICENSE', '**/LICENSE.txt', '**/NOTICE', '**/NOTICE.txt') + '**/LICENSE', '**/LICENSE*.txt', '**/NOTICE', '**/NOTICE.txt', 'licenses/**', 'log4j2.xml', 'log4j.xml') mergeServiceFiles() // we relocate namespaces manually, because we want to know exactly which libs we are exposing and why // we can move to automatic relocation using ConfigureShadowRelocation after we get to a good place on these first relocate 'org.springframework', 'datahub.shaded.org.springframework' relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson' - relocate 'org.yaml', 'io.acryl.shaded.org.yaml' // Required for shading snakeyaml + relocate 'org.yaml', 'datahub.shaded.org.yaml' // Required for shading snakeyaml relocate 'net.jcip.annotations', 'datahub.shaded.annotations' relocate 'javassist', 'datahub.shaded.javassist' relocate 'edu.umd.cs.findbugs', 'datahub.shaded.findbugs' @@ -95,6 +94,7 @@ shadowJar { relocate 'antlr', 'datahub.shaded.antlr' relocate 'com.google.common', 'datahub.shaded.com.google.common' relocate 'org.apache.commons', 'datahub.shaded.org.apache.commons' + relocate 'org.apache.maven', 'datahub.shaded.org.apache.maven' relocate 'org.reflections', 'datahub.shaded.org.reflections' relocate 'st4hidden', 'datahub.shaded.st4hidden' relocate 'org.stringtemplate', 'datahub.shaded.org.stringtemplate' @@ -104,7 +104,6 @@ shadowJar { relocate 'com.github.benmanes.caffeine', 'datahub.shaded.com.github.benmanes.caffeine' relocate 'org.checkerframework', 'datahub.shaded.org.checkerframework' relocate 'com.google.errorprone', 'datahub.shaded.com.google.errorprone' - relocate 'com.sun.jna', 'datahub.shaded.com.sun.jna' // Below jars added for kafka emitter only relocate 'org.apache.avro', 'datahub.shaded.org.apache.avro' relocate 'com.thoughtworks.paranamer', 'datahub.shaded.com.thoughtworks.paranamer' @@ -121,6 +120,9 @@ shadowJar { relocate 'common.message', 'datahub.shaded.common.message' relocate 'org.glassfish', 'datahub.shaded.org.glassfish' relocate 'ch.randelshofer', 'datahub.shaded.ch.randelshofer' + relocate 'io.github.classgraph', 'datahub.shaded.io.github.classgraph' + relocate 'nonapi.io.github.classgraph', 'datahub.shaded.nonapi.io.github.classgraph' + relocate 'com.github.fge', 'datahub.shaded.com.github.fge' finalizedBy checkShadowJar } diff --git a/metadata-integration/java/datahub-client/scripts/check_jar.sh b/metadata-integration/java/datahub-client/scripts/check_jar.sh index 02a1d06b73acf..e2c9ec16d49f8 100755 --- a/metadata-integration/java/datahub-client/scripts/check_jar.sh +++ b/metadata-integration/java/datahub-client/scripts/check_jar.sh @@ -35,7 +35,8 @@ jar -tvf $jarFile |\ grep -v "linux/" |\ grep -v "darwin" |\ grep -v "MetadataChangeProposal.avsc" |\ - grep -v "aix" + grep -v "aix" |\ + grep -v "com/sun/" if [ $? -ne 0 ]; then echo "✅ No unexpected class paths found in ${jarFile}" diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index 5bd10245899e4..1107f552012db 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -18,20 +18,20 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dataset.DatasetLineageType; +import com.linkedin.metadata.aspect.patch.builder.ChartInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DashboardInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DataFlowInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DataJobInfoPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DataJobInputOutputPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.EditableSchemaMetadataPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.OwnershipPatchBuilder; +import com.linkedin.metadata.aspect.patch.builder.UpstreamLineagePatchBuilder; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; -import datahub.client.patch.chart.ChartInfoPatchBuilder; -import datahub.client.patch.common.OwnershipPatchBuilder; -import datahub.client.patch.dashboard.DashboardInfoPatchBuilder; -import datahub.client.patch.dataflow.DataFlowInfoPatchBuilder; -import datahub.client.patch.datajob.DataJobInfoPatchBuilder; -import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; -import datahub.client.patch.dataset.EditableSchemaMetadataPatchBuilder; -import datahub.client.patch.dataset.UpstreamLineagePatchBuilder; import datahub.client.rest.RestEmitter; import datahub.client.rest.RestEmitterConfig; import java.io.IOException; diff --git a/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh b/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh index 930e3ab7be9e1..e3aa181c58801 100755 --- a/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh +++ b/metadata-integration/java/datahub-protobuf/scripts/check_jar.sh @@ -38,7 +38,8 @@ jar -tvf $jarFile |\ grep -v "linux/" |\ grep -v "darwin" |\ grep -v "MetadataChangeProposal.avsc" |\ - grep -v "aix" + grep -v "aix" |\ + grep -v "com/sun/" if [ $? -ne 0 ]; then echo "✅ No unexpected class paths found in ${jarFile}" diff --git a/metadata-integration/java/examples/build.gradle b/metadata-integration/java/examples/build.gradle index ddf574e8c8905..62c80562c7c3b 100644 --- a/metadata-integration/java/examples/build.gradle +++ b/metadata-integration/java/examples/build.gradle @@ -4,7 +4,6 @@ plugins { } dependencies { - implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok @@ -12,8 +11,6 @@ dependencies { implementation externalDependency.typesafeConfig implementation externalDependency.opentracingJdbc - implementation project(path: ':li-utils') - implementation project(path: ':metadata-models') implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') implementation externalDependency.httpAsyncClient diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java index 4cff55afc92de..e84511083b6d9 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java @@ -3,9 +3,9 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DataJobInputOutputPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java index b30cb5166df70..03f0673cd85a4 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java @@ -1,9 +1,9 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java index 0a89e87060698..eb8f700c4b068 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java @@ -1,9 +1,9 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java index 053c1f068e048..1586d9b069b24 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java @@ -1,9 +1,9 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.aspect.patch.builder.DatasetPropertiesPatchBuilder; import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; -import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; import java.util.HashMap; diff --git a/metadata-integration/java/spark-lineage/build.gradle b/metadata-integration/java/spark-lineage/build.gradle index c5dd9b5012c29..8d6160631bf45 100644 --- a/metadata-integration/java/spark-lineage/build.gradle +++ b/metadata-integration/java/spark-lineage/build.gradle @@ -102,6 +102,7 @@ shadowJar { // prevent jni conflict with spark exclude '**/libzstd-jni.*' exclude '**/com_github_luben_zstd_*' + exclude '**/log4j*.xml' relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson' relocate 'org.slf4j','datahub.shaded.org.slf4j' @@ -113,6 +114,10 @@ shadowJar { relocate 'io.opentracing','datahub.spark2.shaded.io.opentracing' relocate 'io.netty','datahub.spark2.shaded.io.netty' relocate 'ch.randelshofer', 'datahub.shaded.ch.randelshofer' + relocate 'com.sun', 'datahub.shaded.com.sun' + relocate 'avroutil1', 'datahub.shaded.avroutil1' + relocate 'com.github', 'datahub.shaded.com.github' + relocate 'org.apache.maven', 'datahub.shaded.org.apache.maven' finalizedBy checkShadowJar } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh b/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh index 33cac9d562cd8..90a90be768a51 100755 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/setup_spark_smoke_test.sh @@ -30,7 +30,7 @@ echo "--------------------------------------------------------------------" cd "${SMOKE_TEST_ROOT_DIR}"/docker #bring up spark cluster -docker-compose -f spark-docker-compose.yml up -d +docker compose -f spark-docker-compose.yml up -d echo "--------------------------------------------------------------------" echo "Executing spark-submit jobs" diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java b/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java new file mode 100644 index 0000000000000..974406c0be0df --- /dev/null +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/EntityClientAspectRetriever.java @@ -0,0 +1,35 @@ +package com.linkedin.metadata.client; + +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; + +@Builder +public class EntityClientAspectRetriever implements AspectRetriever { + @Getter private final EntityRegistry entityRegistry; + private final SystemEntityClient entityClient; + + @Nullable + @Override + public Aspect getLatestAspectObject(@Nonnull Urn urn, @Nonnull String aspectName) + throws RemoteInvocationException, URISyntaxException { + return entityClient.getLatestAspectObject(urn, aspectName); + } + + @Nonnull + @Override + public Map> getLatestAspectObjects( + Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException { + return entityClient.getLatestAspects(urns, aspectNames); + } +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index 9b3f42a37b45d..0ebe9ed1d1b66 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -4,6 +4,7 @@ import static com.linkedin.metadata.search.utils.SearchUtils.*; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -18,7 +19,6 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.aspect.EnvelopedAspectArray; @@ -31,7 +31,6 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.AutoCompleteResult; @@ -48,6 +47,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.shared.ValidationUtils; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -85,15 +85,15 @@ public class JavaEntityClient implements EntityClient { private final Clock _clock = Clock.systemUTC(); - private final EntityService _entityService; + private final EntityService _entityService; private final DeleteEntityService _deleteEntityService; private final EntitySearchService _entitySearchService; private final CachingEntitySearchService _cachingEntitySearchService; private final SearchService _searchService; private final LineageSearchService _lineageSearchService; private final TimeseriesAspectService _timeseriesAspectService; + private final RollbackService rollbackService; private final EventProducer _eventProducer; - private final RestliEntityClient _restliEntityClient; @Nullable public EntityResponse getV2( @@ -713,11 +713,7 @@ public String ingestProposal( Stream.concat(Stream.of(metadataChangeProposal), additionalChanges.stream()); AspectsBatch batch = AspectsBatchImpl.builder() - .mcps( - proposalStream.collect(Collectors.toList()), - auditStamp, - _entityService.getEntityRegistry(), - this) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) .build(); IngestResult one = _entityService.ingestProposal(batch, async).stream().findFirst().get(); @@ -780,9 +776,10 @@ public void producePlatformEvent( } @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + public void rollbackIngestion( + @Nonnull String runId, @Nonnull Authorizer authorizer, @Nonnull Authentication authentication) throws Exception { - _restliEntityClient.rollbackIngestion(runId, authentication); + rollbackService.rollbackIngestion(runId, false, true, authorizer, authentication); } private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java index 31c2846a9c9f3..fa020903c34f0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java @@ -2,18 +2,18 @@ import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClientCache; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import javax.annotation.Nonnull; import lombok.Getter; /** Java backed SystemEntityClient */ @@ -24,16 +24,16 @@ public class SystemJavaEntityClient extends JavaEntityClient implements SystemEn private final Authentication systemAuthentication; public SystemJavaEntityClient( - EntityService entityService, + EntityService entityService, DeleteEntityService deleteEntityService, EntitySearchService entitySearchService, CachingEntitySearchService cachingEntitySearchService, SearchService searchService, LineageSearchService lineageSearchService, TimeseriesAspectService timeseriesAspectService, + RollbackService rollbackService, EventProducer eventProducer, - RestliEntityClient restliEntityClient, - Authentication systemAuthentication, + @Nonnull Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { super( entityService, @@ -43,8 +43,8 @@ public SystemJavaEntityClient( searchService, lineageSearchService, timeseriesAspectService, - eventProducer, - restliEntityClient); + rollbackService, + eventProducer); this.systemAuthentication = systemAuthentication; this.entityClientCache = buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index ed69e919a7b24..b3b11d200ec0d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -12,6 +12,7 @@ import static com.linkedin.metadata.Constants.UI_SOURCE; import static com.linkedin.metadata.search.utils.BrowsePathUtils.buildDataPlatformUrn; import static com.linkedin.metadata.search.utils.BrowsePathUtils.getDefaultBrowsePath; +import static com.linkedin.metadata.utils.GenericRecordUtils.entityResponseToAspectMap; import static com.linkedin.metadata.utils.PegasusUtils.constructMCL; import static com.linkedin.metadata.utils.PegasusUtils.getDataTemplateClassFromSchema; import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; @@ -46,7 +47,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.Aspect; @@ -84,6 +84,7 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.util.Pair; import io.ebean.PagedList; import io.ebean.Transaction; @@ -166,14 +167,12 @@ public class EntityServiceImpl implements EntityService { private final Integer ebeanMaxTransactionRetry; - private SystemEntityClient systemEntityClient; - public EntityServiceImpl( @Nonnull final AspectDao aspectDao, @Nonnull final EventProducer producer, @Nonnull final EntityRegistry entityRegistry, final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, + @Nullable final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks) { this( aspectDao, @@ -190,9 +189,9 @@ public EntityServiceImpl( @Nonnull final EventProducer producer, @Nonnull final EntityRegistry entityRegistry, final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, + @Nullable final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks, - final Integer retry) { + @Nullable final Integer retry) { _aspectDao = aspectDao; _producer = producer; @@ -200,21 +199,13 @@ public EntityServiceImpl( _entityToValidAspects = buildEntityToValidAspects(entityRegistry); _alwaysEmitChangeLog = alwaysEmitChangeLog; _updateIndicesService = updateIndicesService; + if (_updateIndicesService != null) { + _updateIndicesService.initializeAspectRetriever(this); + } _preProcessHooks = preProcessHooks; ebeanMaxTransactionRetry = retry != null ? retry : DEFAULT_MAX_TRANSACTION_RETRY; } - @Override - public void setSystemEntityClient(SystemEntityClient systemEntityClient) { - this.systemEntityClient = systemEntityClient; - this._updateIndicesService.setSystemEntityClient(systemEntityClient); - } - - @Override - public SystemEntityClient getSystemEntityClient() { - return this.systemEntityClient; - } - @Override public RecordTemplate getLatestAspect(@Nonnull Urn urn, @Nonnull String aspectName) { log.debug("Invoked getLatestAspect with urn {}, aspect {}", urn, aspectName); @@ -634,7 +625,7 @@ public List ingestAspects( .aspect(pair.getValue()) .systemMetadata(systemMetadata) .auditStamp(auditStamp) - .build(_entityRegistry, systemEntityClient)) + .build(this)) .collect(Collectors.toList()); return ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); } @@ -693,7 +684,7 @@ private List ingestAspectsToLocalDB( // 1. Convert patches to full upserts // 2. Run any entity/aspect level hooks Pair>, List> updatedItems = - aspectsBatch.toUpsertBatchItems(latestAspects, _entityRegistry, systemEntityClient); + aspectsBatch.toUpsertBatchItems(latestAspects, this); // Fetch additional information if needed final Map> updatedLatestAspects; @@ -725,8 +716,7 @@ private List ingestAspectsToLocalDB( previousAspect == null ? null : previousAspect.getRecordTemplate(_entityRegistry), - _entityRegistry, - systemEntityClient); + this); } catch (AspectValidationException e) { throw new RuntimeException(e); } @@ -934,7 +924,7 @@ public RecordTemplate ingestAspectIfNotPresent( .aspect(newValue) .systemMetadata(systemMetadata) .auditStamp(auditStamp) - .build(_entityRegistry, systemEntityClient)) + .build(this)) .build(); List ingested = ingestAspects(aspectsBatch, true, false); @@ -954,10 +944,7 @@ public RecordTemplate ingestAspectIfNotPresent( public IngestResult ingestProposal( MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { return ingestProposal( - AspectsBatchImpl.builder() - .mcps(List.of(proposal), auditStamp, getEntityRegistry(), systemEntityClient) - .build(), - async) + AspectsBatchImpl.builder().mcps(List.of(proposal), auditStamp, this).build(), async) .stream() .findFirst() .get(); @@ -1545,7 +1532,7 @@ protected Map getSnapshotRecords( @Nonnull protected Map> getLatestAspectUnions( @Nonnull final Set urns, @Nonnull final Set aspectNames) { - return getLatestAspects(urns, aspectNames).entrySet().stream() + return this.getLatestAspects(urns, aspectNames).entrySet().stream() .collect( Collectors.toMap( Map.Entry::getKey, @@ -1694,7 +1681,7 @@ private void ingestSnapshotUnion( .aspect(pair.getValue()) .auditStamp(auditStamp) .systemMetadata(systemMetadata) - .build(_entityRegistry, systemEntityClient)) + .build(this)) .collect(Collectors.toList())) .build(); @@ -1796,6 +1783,7 @@ private static Map> buildEntityToValidAspects( } @Override + @Nonnull public EntityRegistry getEntityRegistry() { return _entityRegistry; } @@ -2487,4 +2475,12 @@ private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspec aspectSpec.getRelationshipFieldSpecs(); return relationshipFieldSpecs.stream().anyMatch(RelationshipFieldSpec::isLineageRelationship); } + + @Nonnull + @Override + public Map> getLatestAspectObjects( + Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException { + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + return entityResponseToAspectMap(getEntitiesV2(entityName, urns, aspectNames)); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index 4d3ac9a550553..f353e5142755d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -58,17 +58,11 @@ public static AuditStamp getAuditStamp(Urn actor) { public static void ingestChangeProposals( @Nonnull List changes, - @Nonnull EntityService entityService, + @Nonnull EntityService entityService, @Nonnull Urn actor, @Nonnull Boolean async) { entityService.ingestProposal( - AspectsBatchImpl.builder() - .mcps( - changes, - getAuditStamp(actor), - entityService.getEntityRegistry(), - entityService.getSystemEntityClient()) - .build(), + AspectsBatchImpl.builder().mcps(changes, getAuditStamp(actor), entityService).build(), async); } @@ -85,7 +79,7 @@ public static void ingestChangeProposals( public static RecordTemplate getAspectFromEntity( String entityUrn, String aspectName, - EntityService entityService, + EntityService entityService, RecordTemplate defaultValue) { Urn urn = getUrnFromString(entityUrn); if (urn == null) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java index f1b7d761087b4..4d9d2b3c416b7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java @@ -17,12 +17,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -45,28 +45,22 @@ @Slf4j @RequiredArgsConstructor -public class CassandraRetentionService extends RetentionService { - private final EntityService _entityService; +public class CassandraRetentionService extends RetentionService { + private final EntityService _entityService; private final CqlSession _cqlSession; private final int _batchSize; private final Clock _clock = Clock.systemUTC(); @Override - public EntityService getEntityService() { + public EntityService getEntityService() { return _entityService; } @Override protected AspectsBatch buildAspectsBatch( List mcps, @Nonnull AuditStamp auditStamp) { - return AspectsBatchImpl.builder() - .mcps( - mcps, - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, auditStamp, _entityService).build(); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java index d1f54f8a7e6e5..eba550714766b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java @@ -5,10 +5,10 @@ import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.batch.AspectsBatch; +import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -40,28 +40,22 @@ @Slf4j @RequiredArgsConstructor -public class EbeanRetentionService extends RetentionService { - private final EntityService _entityService; +public class EbeanRetentionService extends RetentionService { + private final EntityService _entityService; private final Database _server; private final int _batchSize; private final Clock _clock = Clock.systemUTC(); @Override - public EntityService getEntityService() { + public EntityService getEntityService() { return _entityService; } @Override protected AspectsBatch buildAspectsBatch( List mcps, @Nonnull AuditStamp auditStamp) { - return AspectsBatchImpl.builder() - .mcps( - mcps, - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, auditStamp, _entityService).build(); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index 4b75fe73a12e5..80fb4e3e1b940 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.aspect.batch.SystemAspect; import com.linkedin.metadata.aspect.batch.UpsertItem; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; @@ -33,15 +32,12 @@ public class AspectsBatchImpl implements AspectsBatch { * Convert patches to upserts, apply hooks at the aspect and batch level. * * @param latestAspects latest version in the database - * @param entityRegistry entity registry * @return The new urn/aspectnames and the uniform upserts, possibly expanded/mutated by the * various hooks */ @Override public Pair>, List> toUpsertBatchItems( - final Map> latestAspects, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { + final Map> latestAspects, AspectRetriever aspectRetriever) { LinkedList upsertBatchItems = items.stream() @@ -59,25 +55,27 @@ public Pair>, List> toUpsertBatchItems( // patch to upsert MCPPatchBatchItem patchBatchItem = (MCPPatchBatchItem) item; final RecordTemplate currentValue = - latest != null ? latest.getRecordTemplate(entityRegistry) : null; - upsertItem = - patchBatchItem.applyPatch(entityRegistry, currentValue, aspectRetriever); + latest != null + ? latest.getRecordTemplate(aspectRetriever.getEntityRegistry()) + : null; + upsertItem = patchBatchItem.applyPatch(currentValue, aspectRetriever); } // Apply hooks final SystemMetadata oldSystemMetadata = latest != null ? latest.getSystemMetadata() : null; final RecordTemplate oldAspectValue = - latest != null ? latest.getRecordTemplate(entityRegistry) : null; - upsertItem.applyMutationHooks( - oldAspectValue, oldSystemMetadata, entityRegistry, aspectRetriever); + latest != null + ? latest.getRecordTemplate(aspectRetriever.getEntityRegistry()) + : null; + upsertItem.applyMutationHooks(oldAspectValue, oldSystemMetadata, aspectRetriever); return upsertItem; }) .collect(Collectors.toCollection(LinkedList::new)); LinkedList newItems = - applyMCPSideEffects(upsertBatchItems, entityRegistry, aspectRetriever) + applyMCPSideEffects(upsertBatchItems, aspectRetriever) .collect(Collectors.toCollection(LinkedList::new)); Map> newUrnAspectNames = getNewUrnAspectsMap(getUrnAspectsMap(), newItems); upsertBatchItems.addAll(newItems); @@ -98,20 +96,17 @@ public AspectsBatchImplBuilder one(BatchItem data) { } public AspectsBatchImplBuilder mcps( - List mcps, - AuditStamp auditStamp, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { + List mcps, AuditStamp auditStamp, AspectRetriever aspectRetriever) { this.items = mcps.stream() .map( mcp -> { if (mcp.getChangeType().equals(ChangeType.PATCH)) { return MCPPatchBatchItem.MCPPatchBatchItemBuilder.build( - mcp, auditStamp, entityRegistry); + mcp, auditStamp, aspectRetriever.getEntityRegistry()); } else { return MCPUpsertBatchItem.MCPUpsertBatchItemBuilder.build( - mcp, auditStamp, entityRegistry, aspectRetriever); + mcp, auditStamp, aspectRetriever); } }) .collect(Collectors.toList()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java index f61280bac4b22..6563765657d6d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCLBatchItemImpl.java @@ -40,18 +40,24 @@ public class MCLBatchItemImpl implements MCLBatchItem { public static class MCLBatchItemImplBuilder { + // Ensure use of other builders + private MCLBatchItemImpl build() { + return null; + } + public MCLBatchItemImpl build( - MetadataChangeLog metadataChangeLog, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { - return MCLBatchItemImpl.builder() - .metadataChangeLog(metadataChangeLog) - .build(entityRegistry, aspectRetriever); + MetadataChangeLog metadataChangeLog, AspectRetriever aspectRetriever) { + return MCLBatchItemImpl.builder().metadataChangeLog(metadataChangeLog).build(aspectRetriever); } - public MCLBatchItemImpl build(EntityRegistry entityRegistry, AspectRetriever aspectRetriever) { + public MCLBatchItemImpl build(AspectRetriever aspectRetriever) { + EntityRegistry entityRegistry = aspectRetriever.getEntityRegistry(); + log.debug("entity type = {}", this.metadataChangeLog.getEntityType()); - entitySpec(entityRegistry.getEntitySpec(this.metadataChangeLog.getEntityType())); + entitySpec( + aspectRetriever + .getEntityRegistry() + .getEntitySpec(this.metadataChangeLog.getEntityType())); aspectSpec(validateAspect(this.metadataChangeLog, this.entitySpec)); Urn urn = this.metadataChangeLog.getEntityUrn(); @@ -75,7 +81,6 @@ public MCLBatchItemImpl build(EntityRegistry entityRegistry, AspectRetriever asp // validate new ValidationUtils.validateRecordTemplate( this.metadataChangeLog.getChangeType(), - entityRegistry, this.entitySpec, this.aspectSpec, urn, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java index 3adf384f3b0ed..be333af2f7539 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPPatchBatchItem.java @@ -16,13 +16,13 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.batch.PatchItem; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.SystemMetadataUtils; import com.linkedin.mxe.MetadataChangeProposal; @@ -73,9 +73,7 @@ public ChangeType getChangeType() { } public MCPUpsertBatchItem applyPatch( - EntityRegistry entityRegistry, - RecordTemplate recordTemplate, - AspectRetriever aspectRetriever) { + RecordTemplate recordTemplate, AspectRetriever aspectRetriever) { MCPUpsertBatchItem.MCPUpsertBatchItemBuilder builder = MCPUpsertBatchItem.builder() .urn(getUrn()) @@ -84,7 +82,8 @@ public MCPUpsertBatchItem applyPatch( .auditStamp(auditStamp) .systemMetadata(getSystemMetadata()); - AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + AspectTemplateEngine aspectTemplateEngine = + aspectRetriever.getEntityRegistry().getAspectTemplateEngine(); RecordTemplate currentValue = recordTemplate != null @@ -106,7 +105,7 @@ public MCPUpsertBatchItem applyPatch( throw new RuntimeException(e); } - return builder.build(entityRegistry, aspectRetriever); + return builder.build(aspectRetriever); } public static class MCPPatchBatchItemBuilder { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java index 9d41b141dcd60..89209c44f10c7 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/MCPUpsertBatchItem.java @@ -4,12 +4,14 @@ import static com.linkedin.metadata.entity.AspectUtils.validateAspect; import com.datahub.util.exception.ModelConversionException; +import com.github.fge.jsonpatch.JsonPatchException; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.batch.SystemAspect; import com.linkedin.metadata.aspect.batch.UpsertItem; +import com.linkedin.metadata.aspect.patch.template.common.GenericPatchTemplate; import com.linkedin.metadata.aspect.plugins.hooks.MutationHook; import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; @@ -19,12 +21,12 @@ import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.SystemMetadataUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.io.IOException; import java.sql.Timestamp; import java.util.Objects; import javax.annotation.Nonnull; @@ -39,6 +41,31 @@ @Builder(toBuilder = true) public class MCPUpsertBatchItem extends UpsertItem { + public static MCPUpsertBatchItem fromPatch( + @Nonnull Urn urn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate recordTemplate, + GenericPatchTemplate genericPatchTemplate, + @Nonnull AuditStamp auditStamp, + AspectRetriever aspectRetriever) { + MCPUpsertBatchItem.MCPUpsertBatchItemBuilder builder = + MCPUpsertBatchItem.builder() + .urn(urn) + .auditStamp(auditStamp) + .aspectName(aspectSpec.getName()); + + RecordTemplate currentValue = + recordTemplate != null ? recordTemplate : genericPatchTemplate.getDefault(); + + try { + builder.aspect(genericPatchTemplate.applyPatch(currentValue)); + } catch (JsonPatchException | IOException e) { + throw new RuntimeException(e); + } + + return builder.build(aspectRetriever); + } + // urn an urn associated with the new aspect @Nonnull private final Urn urn; @@ -66,12 +93,12 @@ public ChangeType getChangeType() { public void applyMutationHooks( @Nullable RecordTemplate oldAspectValue, @Nullable SystemMetadata oldSystemMetadata, - @Nonnull EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever) { // add audit stamp/system meta if needed for (MutationHook mutationHook : - entityRegistry.getMutationHooks( - getChangeType(), entitySpec.getName(), aspectSpec.getName())) { + aspectRetriever + .getEntityRegistry() + .getMutationHooks(getChangeType(), entitySpec.getName(), aspectSpec.getName())) { mutationHook.applyMutation( getChangeType(), entitySpec, @@ -99,14 +126,14 @@ public SystemAspect toLatestEntityAspect() { @Override public void validatePreCommit( - @Nullable RecordTemplate previous, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) + @Nullable RecordTemplate previous, @Nonnull AspectRetriever aspectRetriever) throws AspectValidationException { for (AspectPayloadValidator validator : - entityRegistry.getAspectPayloadValidators( - getChangeType(), entitySpec.getName(), aspectSpec.getName())) { + aspectRetriever + .getEntityRegistry() + .getAspectPayloadValidators( + getChangeType(), entitySpec.getName(), aspectSpec.getName())) { validator.validatePreCommit( getChangeType(), urn, getAspectSpec(), previous, this.aspect, aspectRetriever); } @@ -125,12 +152,11 @@ public MCPUpsertBatchItemBuilder systemMetadata(SystemMetadata systemMetadata) { } @SneakyThrows - public MCPUpsertBatchItem build( - EntityRegistry entityRegistry, AspectRetriever aspectRetriever) { - EntityUtils.validateUrn(entityRegistry, this.urn); + public MCPUpsertBatchItem build(AspectRetriever aspectRetriever) { + EntityUtils.validateUrn(aspectRetriever.getEntityRegistry(), this.urn); log.debug("entity type = {}", this.urn.getEntityType()); - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + entitySpec(aspectRetriever.getEntityRegistry().getEntitySpec(this.urn.getEntityType())); log.debug("entity spec = {}", this.entitySpec); aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); @@ -138,7 +164,6 @@ public MCPUpsertBatchItem build( ValidationUtils.validateRecordTemplate( ChangeType.UPSERT, - entityRegistry, this.entitySpec, this.aspectSpec, this.urn, @@ -157,17 +182,15 @@ public MCPUpsertBatchItem build( } public static MCPUpsertBatchItem build( - MetadataChangeProposal mcp, - AuditStamp auditStamp, - EntityRegistry entityRegistry, - AspectRetriever aspectRetriever) { + MetadataChangeProposal mcp, AuditStamp auditStamp, AspectRetriever aspectRetriever) { if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { throw new IllegalArgumentException( "Invalid MCP, this class only supports change type of UPSERT."); } log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + EntitySpec entitySpec = + aspectRetriever.getEntityRegistry().getEntitySpec(mcp.getEntityType()); AspectSpec aspectSpec = validateAspect(mcp, entitySpec); if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { @@ -191,7 +214,7 @@ public static MCPUpsertBatchItem build( .metadataChangeProposal(mcp) .auditStamp(auditStamp) .aspect(convertToRecordTemplate(mcp, aspectSpec)) - .build(entityRegistry, aspectRetriever); + .build(aspectRetriever); } private MCPUpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 97f7aa06340d2..947f0116b587c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -67,12 +67,12 @@ public static AspectSpec validate(EntitySpec entitySpec, String aspectName) { public static void validateRecordTemplate( ChangeType changeType, - EntityRegistry entityRegistry, EntitySpec entitySpec, AspectSpec aspectSpec, Urn urn, @Nullable RecordTemplate aspect, @Nonnull AspectRetriever aspectRetriever) { + EntityRegistry entityRegistry = aspectRetriever.getEntityRegistry(); EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entitySpec); Consumer resultFunction = @@ -83,6 +83,7 @@ public static void validateRecordTemplate( + "\n Cause: " + validationResult.getMessages()); }; + RecordTemplateValidator.validate( EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 0d8b7655fddeb..24e272dee7a25 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -10,6 +10,7 @@ import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.RelatedEntitiesResult; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.RelatedEntity; import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.query.filter.Criterion; @@ -17,6 +18,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import io.dgraph.DgraphClient; import io.dgraph.DgraphProto.Mutation; import io.dgraph.DgraphProto.NQuad; @@ -779,4 +781,21 @@ public void clear() { // setup urn, type and key relationships getSchema(); } + + @Nonnull + @Override + public RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + throw new IllegalArgumentException("Not implemented"); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 97cb186ce948c..3051319aa54cf 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -23,6 +23,8 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.utils.ConcurrencyUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -81,7 +83,7 @@ public class ESGraphQueryDAO { @Nonnull public static void addFilterToQueryBuilder( - @Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { + @Nonnull Filter filter, @Nullable String node, BoolQueryBuilder rootQuery) { BoolQueryBuilder orQuery = new BoolQueryBuilder(); for (ConjunctiveCriterion conjunction : filter.getOr()) { final BoolQueryBuilder andQuery = new BoolQueryBuilder(); @@ -93,12 +95,13 @@ public static void addFilterToQueryBuilder( } criterionArray.forEach( criterion -> - andQuery.must( + andQuery.filter( QueryBuilders.termQuery( - node + "." + criterion.getField(), criterion.getValue()))); + (node == null ? "" : node + ".") + criterion.getField(), + criterion.getValue()))); orQuery.should(andQuery); } - rootQuery.must(orQuery); + rootQuery.filter(orQuery); } private SearchResponse executeSearchQuery( @@ -174,9 +177,9 @@ public SearchResponse getSearchResponse( public static BoolQueryBuilder buildQuery( @Nullable final List sourceTypes, - @Nonnull final Filter sourceEntityFilter, + @Nullable final Filter sourceEntityFilter, @Nullable final List destinationTypes, - @Nonnull final Filter destinationEntityFilter, + @Nullable final Filter destinationEntityFilter, @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); @@ -187,17 +190,22 @@ public static BoolQueryBuilder buildQuery( String sourceNode = relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; if (sourceTypes != null && sourceTypes.size() > 0) { - finalQuery.must(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); + finalQuery.filter(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); + } + if (sourceEntityFilter != null) { + addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); } - addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); // set destination filter String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; if (destinationTypes != null && destinationTypes.size() > 0) { - finalQuery.must(QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); + finalQuery.filter( + QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); + } + if (destinationEntityFilter != null) { + addFilterToQueryBuilder(destinationEntityFilter, destinationNode, finalQuery); } - addFilterToQueryBuilder(destinationEntityFilter, destinationNode, finalQuery); // set relationship filter if (relationshipTypes.size() > 0) { @@ -206,8 +214,14 @@ public static BoolQueryBuilder buildQuery( relationshipType -> relationshipQuery.should( QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); - finalQuery.must(relationshipQuery); + finalQuery.filter(relationshipQuery); + } + + // general filter + if (relationshipFilter.getOr() != null) { + addFilterToQueryBuilder(new Filter().setOr(relationshipFilter.getOr()), null, finalQuery); } + return finalQuery; } @@ -659,4 +673,60 @@ public static class LineageResponse { int total; List lineageRelationships; } + + public SearchResponse getSearchResponse( + @Nullable final List sourceTypes, + @Nullable final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nullable final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count) { + + BoolQueryBuilder finalQuery = + buildQuery( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter); + + return executeScrollSearchQuery(finalQuery, sortCriterion, scrollId, count); + } + + private SearchResponse executeScrollSearchQuery( + @Nonnull final QueryBuilder query, + @Nonnull List sortCriterion, + @Nullable String scrollId, + final int count) { + + Object[] sort = null; + if (scrollId != null) { + SearchAfterWrapper searchAfterWrapper = SearchAfterWrapper.fromScrollId(scrollId); + sort = searchAfterWrapper.getSort(); + } + + SearchRequest searchRequest = new SearchRequest(); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + + searchSourceBuilder.size(count); + searchSourceBuilder.query(query); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, List.of(), false); + searchRequest.source(searchSourceBuilder); + ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); + + searchRequest.indices(indexConvention.getIndexName(INDEX_NAME)); + + try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esQuery").time()) { + MetricUtils.counter(this.getClass(), SEARCH_EXECUTIONS_METRIC).inc(); + return client.search(searchRequest, RequestOptions.DEFAULT); + } catch (Exception e) { + log.error("Search query failed", e); + throw new ESQueryException("Search query failed:", e); + } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 6c828c0e7c6ae..67590ffd6e7c1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -11,7 +11,9 @@ import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.graph.LineageRelationshipArray; +import com.linkedin.metadata.graph.RelatedEntities; import com.linkedin.metadata.graph.RelatedEntitiesResult; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.RelatedEntity; import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.query.filter.Condition; @@ -22,11 +24,14 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.structured.StructuredPropertyDefinition; import io.opentelemetry.extension.annotations.WithSpan; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -35,6 +40,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -47,6 +53,7 @@ import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchResponse; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; @Slf4j @RequiredArgsConstructor @@ -165,8 +172,6 @@ public RelatedEntitiesResult findRelatedEntities( } final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); - String destinationNode = - relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; SearchResponse response = _graphReadDAO.getSearchResponse( @@ -185,28 +190,8 @@ public RelatedEntitiesResult findRelatedEntities( int totalCount = (int) response.getHits().getTotalHits().value; final List relationships = - Arrays.stream(response.getHits().getHits()) - .map( - hit -> { - final String urnStr = - ((HashMap) - hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)) - .getOrDefault("urn", null); - final String relationshipType = - (String) hit.getSourceAsMap().get("relationshipType"); - - if (urnStr == null || relationshipType == null) { - log.error( - String.format( - "Found null urn string, relationship type, aspect name or path spec in Elastic index. " - + "urnStr: %s, relationshipType: %s", - urnStr, relationshipType)); - return null; - } - - return new RelatedEntity(relationshipType, urnStr); - }) - .filter(Objects::nonNull) + searchHitsToRelatedEntities(response.getHits().getHits(), relationshipDirection).stream() + .map(RelatedEntities::asRelatedEntity) .collect(Collectors.toList()); return new RelatedEntitiesResult(offset, relationships.size(), totalCount, relationships); @@ -328,6 +313,12 @@ public List buildReindexConfigs() throws IOException { Collections.emptyMap())); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return buildReindexConfigs(); + } + @Override public void reindexAll() { configure(); @@ -344,4 +335,88 @@ public void clear() { public boolean supportsMultiHop() { return true; } + + @Nonnull + @Override + public RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nullable Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nullable Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + + final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); + + SearchResponse response = + _graphReadDAO.getSearchResponse( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + sortCriterion, + scrollId, + count); + + if (response == null) { + return new RelatedEntitiesScrollResult(0, 0, null, ImmutableList.of()); + } + + int totalCount = (int) response.getHits().getTotalHits().value; + final List relationships = + searchHitsToRelatedEntities(response.getHits().getHits(), relationshipDirection); + + SearchHit[] searchHits = response.getHits().getHits(); + // Only return next scroll ID if there are more results, indicated by full size results + String nextScrollId = null; + if (searchHits.length == count) { + Object[] sort = searchHits[searchHits.length - 1].getSortValues(); + nextScrollId = new SearchAfterWrapper(sort, null, 0L).toScrollId(); + } + + return RelatedEntitiesScrollResult.builder() + .entities(relationships) + .pageSize(relationships.size()) + .numResults(totalCount) + .scrollId(nextScrollId) + .build(); + } + + private static List searchHitsToRelatedEntities( + SearchHit[] searchHits, RelationshipDirection relationshipDirection) { + return Arrays.stream(searchHits) + .map( + hit -> { + final String destinationUrnStr = + ((HashMap) + hit.getSourceAsMap().getOrDefault("destination", EMPTY_HASH)) + .getOrDefault("urn", null); + final String sourceUrnStr = + ((HashMap) + hit.getSourceAsMap().getOrDefault("source", EMPTY_HASH)) + .getOrDefault("urn", null); + final String relationshipType = (String) hit.getSourceAsMap().get("relationshipType"); + + if (destinationUrnStr == null || sourceUrnStr == null || relationshipType == null) { + log.error( + String.format( + "Found null urn string, relationship type, aspect name or path spec in Elastic index. " + + "destinationUrnStr: %s, sourceUrnStr: %s, relationshipType: %s", + destinationUrnStr, sourceUrnStr, relationshipType)); + return null; + } + + return new RelatedEntities( + relationshipType, sourceUrnStr, destinationUrnStr, relationshipDirection); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index c8d3147711eba..a1f73a134ec8e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -17,6 +17,7 @@ import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.graph.RelatedEntitiesResult; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.RelatedEntity; import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.query.filter.Condition; @@ -25,6 +26,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.util.Pair; import io.opentelemetry.extension.annotations.WithSpan; @@ -882,4 +884,21 @@ private boolean isSourceDestReversed( return null; } } + + @Nonnull + @Override + public RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + throw new IllegalArgumentException("Not implemented"); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index fd7491fe32ea3..7cba2e0ecc8cb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -18,6 +18,9 @@ import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.SearchUtils; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.io.IOException; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; @@ -47,6 +50,12 @@ public List buildReindexConfigs() { return indexBuilders.buildReindexConfigs(); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return indexBuilders.buildReindexConfigsWithAllStructProps(properties); + } + @Override public void reindexAll() { configure(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java index 388dcea784cbb..cc6a0f3e3d6f9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder.PROPERTIES; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.utils.ESUtils; @@ -22,6 +25,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.TreeMap; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -125,12 +129,20 @@ public ESIndexBuilder( public ReindexConfig buildReindexState( String indexName, Map mappings, Map settings) throws IOException { + return buildReindexState(indexName, mappings, settings, false); + } + + public ReindexConfig buildReindexState( + String indexName, + Map mappings, + Map settings, + boolean copyStructuredPropertyMappings) + throws IOException { ReindexConfig.ReindexConfigBuilder builder = ReindexConfig.builder() .name(indexName) .enableIndexSettingsReindex(enableIndexSettingsReindex) .enableIndexMappingsReindex(enableIndexMappingsReindex) - .targetMappings(mappings) .version(gitVersion.getVersion()); Map baseSettings = new HashMap<>(settings); @@ -148,6 +160,7 @@ public ReindexConfig buildReindexState( // If index doesn't exist, no reindex if (!exists) { + builder.targetMappings(mappings); return builder.build(); } @@ -173,6 +186,35 @@ public ReindexConfig buildReindexState( .getSourceAsMap(); builder.currentMappings(currentMappings); + if (copyStructuredPropertyMappings) { + Map currentStructuredProperties = + (Map) + ((Map) + ((Map) + currentMappings.getOrDefault(PROPERTIES, new TreeMap())) + .getOrDefault(STRUCTURED_PROPERTY_MAPPING_FIELD, new TreeMap())) + .getOrDefault(PROPERTIES, new TreeMap()); + + if (!currentStructuredProperties.isEmpty()) { + HashMap> props = + (HashMap>) + ((Map) mappings.get(PROPERTIES)) + .computeIfAbsent( + STRUCTURED_PROPERTY_MAPPING_FIELD, + (key) -> new HashMap<>(Map.of(PROPERTIES, new HashMap<>()))); + + props.merge( + PROPERTIES, + currentStructuredProperties, + (targetValue, currentValue) -> { + HashMap merged = new HashMap<>(currentValue); + merged.putAll(targetValue); + return merged.isEmpty() ? null : merged; + }); + } + } + + builder.targetMappings(mappings); return builder.build(); } @@ -251,7 +293,7 @@ public void buildIndex(ReindexConfig indexState) throws IOException { * @throws IOException communication issues with ES */ public void applyMappings(ReindexConfig indexState, boolean suppressError) throws IOException { - if (indexState.isPureMappingsAddition()) { + if (indexState.isPureMappingsAddition() || indexState.isPureStructuredProperty()) { log.info("Updating index {} mappings in place.", indexState.name()); PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java index 4489c661bb2ed..4322ea90edf1f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java @@ -3,9 +3,12 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; +import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -41,6 +44,24 @@ public List buildReindexConfigs() { entitySpec -> { try { Map mappings = MappingsBuilder.getMappings(entitySpec); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings, true); + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList()); + } + + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) { + Map settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map mappings = MappingsBuilder.getMappings(entitySpec, properties); return indexBuilder.buildReindexState( indexConvention.getIndexName(entitySpec), mappings, settings); } catch (IOException e) { @@ -49,4 +70,31 @@ public List buildReindexConfigs() { }) .collect(Collectors.toList()); } + + /** + * Given a structured property generate all entity index configurations impacted by it, preserving + * existing properties + * + * @param property the new property + * @return index configurations impacted by the new property + */ + public List buildReindexConfigsWithNewStructProp( + StructuredPropertyDefinition property) { + Map settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map mappings = + MappingsBuilder.getMappings(entitySpec, List.of(property)); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings, true); + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .filter(Objects::nonNull) + .filter(ReindexConfig::hasNewStructuredProperty) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index f85a0dcb06a07..79f530f18a345 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -1,13 +1,21 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.ENTITY_TYPE_URN_PREFIX; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_MAPPING_FIELD; +import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; import com.google.common.collect.ImmutableMap; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.LogicalValueType; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.search.utils.ESUtils; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.net.URISyntaxException; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -48,6 +56,53 @@ public static Map getPartialNgramConfigWithOverrides( private MappingsBuilder() {} + /** + * Builds mappings from entity spec and a collection of structured properties for the entity. + * + * @param entitySpec entity's spec + * @param structuredProperties structured properties for the entity + * @return mappings + */ + public static Map getMappings( + @Nonnull final EntitySpec entitySpec, + Collection structuredProperties) { + Map mappings = getMappings(entitySpec); + + String entityName = entitySpec.getEntityAnnotation().getName(); + Map structuredPropertiesForEntity = + getMappingsForStructuredProperty( + structuredProperties.stream() + .filter( + prop -> { + try { + return prop.getEntityTypes() + .contains(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + entityName)); + } catch (URISyntaxException e) { + return false; + } + }) + .collect(Collectors.toSet())); + + if (!structuredPropertiesForEntity.isEmpty()) { + HashMap> props = + (HashMap>) + ((Map) mappings.get(PROPERTIES)) + .computeIfAbsent( + STRUCTURED_PROPERTY_MAPPING_FIELD, + (key) -> new HashMap<>(Map.of(PROPERTIES, new HashMap<>()))); + + props.merge( + PROPERTIES, + structuredPropertiesForEntity, + (oldValue, newValue) -> { + HashMap merged = new HashMap<>(oldValue); + merged.putAll(newValue); + return merged.isEmpty() ? null : merged; + }); + } + return mappings; + } + public static Map getMappings(@Nonnull final EntitySpec entitySpec) { Map mappings = new HashMap<>(); @@ -89,6 +144,30 @@ private static Map getMappingsForRunId() { return ImmutableMap.builder().put(TYPE, ESUtils.KEYWORD_FIELD_TYPE).build(); } + public static Map getMappingsForStructuredProperty( + Collection properties) { + return properties.stream() + .map( + property -> { + Map mappingForField = new HashMap<>(); + String valueType = property.getValueType().getId(); + if (valueType.equalsIgnoreCase(LogicalValueType.STRING.name())) { + mappingForField = getMappingsForKeyword(); + } else if (valueType.equalsIgnoreCase(LogicalValueType.RICH_TEXT.name())) { + mappingForField = getMappingsForSearchText(FieldType.TEXT_PARTIAL); + } else if (valueType.equalsIgnoreCase(LogicalValueType.DATE.name())) { + mappingForField.put(TYPE, ESUtils.DATE_FIELD_TYPE); + } else if (valueType.equalsIgnoreCase(LogicalValueType.URN.name())) { + mappingForField = getMappingsForUrn(); + } else if (valueType.equalsIgnoreCase(LogicalValueType.NUMBER.name())) { + mappingForField.put(TYPE, ESUtils.DOUBLE_FIELD_TYPE); + } + return Map.entry( + sanitizeStructuredPropertyFQN(property.getQualifiedName()), mappingForField); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + private static Map getMappingsForField( @Nonnull final SearchableFieldSpec searchableFieldSpec) { FieldType fieldType = searchableFieldSpec.getSearchableAnnotation().getFieldType(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java index e3155c9f943cc..bb6905139f49d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java @@ -11,6 +11,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -65,6 +66,8 @@ public class ReindexConfig { private final boolean requiresApplyMappings; private final boolean isPureMappingsAddition; private final boolean isSettingsReindex; + private final boolean hasNewStructuredProperty; + private final boolean isPureStructuredProperty; public static ReindexConfigBuilder builder() { return new CalculatedBuilder(); @@ -92,6 +95,14 @@ private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { return this; } + private ReindexConfigBuilder hasNewStructuredProperty(boolean ignored) { + return this; + } + + private ReindexConfigBuilder isPureStructuredProperty(boolean ignored) { + return this; + } + // ensure sorted public ReindexConfigBuilder currentMappings(Map currentMappings) { this.currentMappings = sortMap(currentMappings); @@ -141,6 +152,15 @@ public ReindexConfig build() { super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty() || !mappingsDiff.entriesOnlyOnRight().isEmpty(); + super.isPureStructuredProperty = + mappingsDiff + .entriesDiffering() + .keySet() + .equals(Set.of(STRUCTURED_PROPERTY_MAPPING_FIELD)) + || mappingsDiff + .entriesOnlyOnRight() + .keySet() + .equals(Set.of(STRUCTURED_PROPERTY_MAPPING_FIELD)); super.isPureMappingsAddition = super.requiresApplyMappings && mappingsDiff.entriesDiffering().isEmpty() @@ -157,6 +177,19 @@ public ReindexConfig build() { super.name, mappingsDiff.entriesDiffering()); } + super.hasNewStructuredProperty = + (mappingsDiff.entriesDiffering().containsKey(STRUCTURED_PROPERTY_MAPPING_FIELD) + || mappingsDiff + .entriesOnlyOnRight() + .containsKey(STRUCTURED_PROPERTY_MAPPING_FIELD)) + && getOrDefault( + super.currentMappings, + List.of("properties", STRUCTURED_PROPERTY_MAPPING_FIELD, "properties")) + .size() + < getOrDefault( + super.targetMappings, + List.of("properties", STRUCTURED_PROPERTY_MAPPING_FIELD, "properties")) + .size(); /* Consider analysis and settings changes */ super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index b35c0258d09f0..0eb44edfb11de 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query; import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.*; import static com.linkedin.metadata.utils.SearchUtil.*; import com.codahale.metrics.Timer; @@ -303,7 +303,7 @@ public AutoCompleteResult autoComplete( /** * Returns number of documents per field value given the field and filters * - * @param entityName name of the entity, if null, aggregates over all entities + * @param entityNames names of the entities, if null, aggregates over all entities * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index 522c8e510dcf8..0f22b75b69f10 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.utils.SearchUtil.*; import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.search.utils.ESUtils; import java.util.ArrayList; @@ -72,8 +74,12 @@ private Set getAllFacetFields(final List annotatio } private boolean isValidAggregate(final String inputFacet) { - Set facets = Set.of(inputFacet.split(AGGREGATION_SEPARATOR_CHAR)); - boolean isValid = !facets.isEmpty() && _allFacetFields.containsAll(facets); + List facets = List.of(inputFacet.split(AGGREGATION_SEPARATOR_CHAR)); + boolean isValid = + !facets.isEmpty() + && ((facets.size() == 1 + && facets.get(0).startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) + || _allFacetFields.containsAll(facets)); if (!isValid) { log.warn( String.format( @@ -89,6 +95,13 @@ private AggregationBuilder facetToAggregationBuilder(final String inputFacet) { AggregationBuilder lastAggBuilder = null; for (int i = facets.size() - 1; i >= 0; i--) { String facet = facets.get(i); + if (facet.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) { + String structPropFqn = facet.substring(STRUCTURED_PROPERTY_MAPPING_FIELD.length() + 1); + facet = + STRUCTURED_PROPERTY_MAPPING_FIELD + + "." + + StructuredPropertyUtils.sanitizeStructuredPropertyFQN(structPropFqn); + } AggregationBuilder aggBuilder; if (facet.contains(AGGREGATION_SPECIAL_TYPE_DELIMITER)) { List specialTypeFields = List.of(facet.split(AGGREGATION_SPECIAL_TYPE_DELIMITER)); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java index 1fe4a74968e42..452e50a6e8d62 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java @@ -1,6 +1,6 @@ package com.linkedin.metadata.search.elasticsearch.query.request; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.*; import java.io.IOException; import java.io.Serializable; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 4d51de39c88e3..05fa6f45fcb30 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -93,6 +93,7 @@ public class SearchRequestHandler { private final Set _defaultQueryFieldNames; private final HighlightBuilder _highlights; private final Map _filtersToDisplayName; + private final SearchConfiguration _configs; private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java index 2a9571b18b726..6cadb39d5970d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java @@ -17,7 +17,8 @@ public class Features { public enum Name { SEARCH_BACKEND_SCORE, // Score returned by search backend NUM_ENTITIES_PER_TYPE, // Number of entities per entity type - RANK_WITHIN_TYPE; // Rank within the entity type + RANK_WITHIN_TYPE, + ONLY_MATCH_CUSTOM_PROPERTIES; // Rank within the entity type } public Double getNumericFeature(Name featureName, double defaultValue) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index bfeb993390571..d52a80d685fd5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.transformer; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; @@ -7,17 +10,26 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.entity.Aspect; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; +import com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.LogicalValueType; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.models.extractor.FieldExtractor; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; @@ -41,7 +53,7 @@ public class SearchDocumentTransformer { // Maximum customProperties value length private final int maxValueLength; - private SystemEntityClient entityClient; + private AspectRetriever aspectRetriever; private static final String BROWSE_PATH_V2_DELIMITER = "␟"; @@ -77,7 +89,8 @@ public Optional transformAspect( final Urn urn, final RecordTemplate aspect, final AspectSpec aspectSpec, - final Boolean forDelete) { + final Boolean forDelete) + throws RemoteInvocationException, URISyntaxException { final Map> extractedSearchableFields = FieldExtractor.extractFields(aspect, aspectSpec.getSearchableFieldSpecs(), maxValueLength); final Map> extractedSearchScoreFields = @@ -93,6 +106,12 @@ public Optional transformAspect( extractedSearchScoreFields.forEach( (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); result = Optional.of(searchDocument.toString()); + } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(aspectSpec.getName())) { + final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); + searchDocument.put("urn", urn.toString()); + setStructuredPropertiesSearchValue( + new StructuredProperties(aspect.data()), searchDocument, forDelete); + result = Optional.of(searchDocument.toString()); } return result; @@ -277,4 +296,93 @@ private String getBrowsePathV2Value(@Nonnull final List fieldValues) { } return aggregatedValue; } + + private void setStructuredPropertiesSearchValue( + final StructuredProperties values, final ObjectNode searchDocument, final Boolean forDelete) + throws RemoteInvocationException, URISyntaxException { + Map> propertyMap = + values.getProperties().stream() + .collect( + Collectors.groupingBy( + StructuredPropertyValueAssignment::getPropertyUrn, Collectors.toSet())); + + Map> definitions = + aspectRetriever.getLatestAspectObjects( + propertyMap.keySet(), Set.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME)); + + if (definitions.size() < propertyMap.size()) { + String message = + String.format( + "Missing property definitions. %s", + propertyMap.keySet().stream() + .filter(k -> !definitions.containsKey(k)) + .collect(Collectors.toSet())); + log.error(message); + } + + propertyMap + .entrySet() + .forEach( + propertyEntry -> { + StructuredPropertyDefinition definition = + new StructuredPropertyDefinition( + definitions + .get(propertyEntry.getKey()) + .get(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .data()); + String fieldName = + String.join( + ".", + List.of( + STRUCTURED_PROPERTY_MAPPING_FIELD, + sanitizeStructuredPropertyFQN(definition.getQualifiedName()))); + + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.nullNode()); + } else { + LogicalValueType logicalValueType = + StructuredPropertiesValidator.getLogicalValueType(definition.getValueType()); + + ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); + + propertyEntry + .getValue() + .forEach( + property -> + property + .getValues() + .forEach( + propertyValue -> { + final Optional searchValue; + switch (logicalValueType) { + case UNKNOWN: + log.warn( + "Unable to transform UNKNOWN logical value type."); + searchValue = Optional.empty(); + break; + case NUMBER: + Double doubleValue = + propertyValue.getDouble() != null + ? propertyValue.getDouble() + : Double.valueOf(propertyValue.getString()); + searchValue = + Optional.of( + JsonNodeFactory.instance.numberNode(doubleValue)); + break; + default: + searchValue = + propertyValue.getString().isEmpty() + ? Optional.empty() + : Optional.of( + JsonNodeFactory.instance.textNode( + propertyValue.getString())); + break; + } + searchValue.ifPresent(arrayNode::add); + })); + + searchDocument.set(fieldName, arrayNode); + } + }); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 982b5c8d5f367..aa854149de43a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; @@ -8,6 +10,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; +import com.linkedin.metadata.models.StructuredPropertyUtils; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; @@ -97,6 +100,7 @@ public class ESUtils { } }; + // TODO - This has been expanded for has* in another branch public static final Set BOOLEAN_FIELDS = ImmutableSet.of("removed"); /* @@ -203,6 +207,9 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( public static QueryBuilder getQueryBuilderFromCriterion( @Nonnull final Criterion criterion, boolean isTimeseries) { final String fieldName = toFacetField(criterion.getField()); + if (fieldName.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD)) { + criterion.setField(fieldName); + } /* * Check the field-name for a "sibling" field, or one which should ALWAYS @@ -260,46 +267,69 @@ public static void buildSortOrder( @Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable SortCriterion sortCriterion, List entitySpecs) { - if (sortCriterion == null) { + buildSortOrder( + searchSourceBuilder, + sortCriterion == null ? List.of() : List.of(sortCriterion), + entitySpecs, + true); + } + + /** + * Allow disabling default sort, used when you know uniqueness is present without urn field. For + * example, edge indices where the unique constraint is determined by multiple fields (src urn, + * dst urn, relation type). + * + * @param enableDefaultSort enable/disable default sorting logic + */ + public static void buildSortOrder( + @Nonnull SearchSourceBuilder searchSourceBuilder, + @Nonnull List sortCriterion, + List entitySpecs, + boolean enableDefaultSort) { + if (sortCriterion.isEmpty() && enableDefaultSort) { searchSourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); } else { - Optional fieldTypeForDefault = Optional.empty(); - for (EntitySpec entitySpec : entitySpecs) { - List fieldSpecs = entitySpec.getSearchableFieldSpecs(); - for (SearchableFieldSpec fieldSpec : fieldSpecs) { - SearchableAnnotation annotation = fieldSpec.getSearchableAnnotation(); - if (annotation.getFieldName().equals(sortCriterion.getField()) - || annotation.getFieldNameAliases().contains(sortCriterion.getField())) { - fieldTypeForDefault = Optional.of(fieldSpec.getSearchableAnnotation().getFieldType()); + for (SortCriterion sortCriteria : sortCriterion) { + Optional fieldTypeForDefault = Optional.empty(); + for (EntitySpec entitySpec : entitySpecs) { + List fieldSpecs = entitySpec.getSearchableFieldSpecs(); + for (SearchableFieldSpec fieldSpec : fieldSpecs) { + SearchableAnnotation annotation = fieldSpec.getSearchableAnnotation(); + if (annotation.getFieldName().equals(sortCriteria.getField()) + || annotation.getFieldNameAliases().contains(sortCriteria.getField())) { + fieldTypeForDefault = Optional.of(fieldSpec.getSearchableAnnotation().getFieldType()); + break; + } + } + if (fieldTypeForDefault.isPresent()) { break; } } - if (fieldTypeForDefault.isPresent()) { - break; + if (fieldTypeForDefault.isEmpty()) { + log.warn( + "Sort criterion field " + + sortCriteria.getField() + + " was not found in any entity spec to be searched"); } - } - if (fieldTypeForDefault.isEmpty()) { - log.warn( - "Sort criterion field " - + sortCriterion.getField() - + " was not found in any entity spec to be searched"); - } - final SortOrder esSortOrder = - (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) - ? SortOrder.ASC - : SortOrder.DESC; - FieldSortBuilder sortBuilder = - new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); - if (fieldTypeForDefault.isPresent()) { - String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); - if (esFieldtype != null) { - sortBuilder.unmappedType(esFieldtype); + final SortOrder esSortOrder = + (sortCriteria.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC + : SortOrder.DESC; + FieldSortBuilder sortBuilder = + new FieldSortBuilder(sortCriteria.getField()).order(esSortOrder); + if (fieldTypeForDefault.isPresent()) { + String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); + if (esFieldtype != null) { + sortBuilder.unmappedType(esFieldtype); + } } + searchSourceBuilder.sort(sortBuilder); } - searchSourceBuilder.sort(sortBuilder); } - if (sortCriterion == null - || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { + if (enableDefaultSort + && (sortCriterion.isEmpty() + || sortCriterion.stream() + .noneMatch(c -> c.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)))) { searchSourceBuilder.sort( new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); } @@ -335,7 +365,15 @@ public static String escapeReservedCharacters(@Nonnull String input) { @Nonnull public static String toFacetField(@Nonnull final String filterField) { - return filterField.replace(ESUtils.KEYWORD_SUFFIX, ""); + String fieldName = filterField; + if (fieldName.startsWith(STRUCTURED_PROPERTY_MAPPING_FIELD + ".")) { + fieldName = + STRUCTURED_PROPERTY_MAPPING_FIELD + + "." + + StructuredPropertyUtils.sanitizeStructuredPropertyFQN( + fieldName.substring(STRUCTURED_PROPERTY_MAPPING_FIELD.length() + 1)); + } + return fieldName.replace(ESUtils.KEYWORD_SUFFIX, ""); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index 247d542604da7..1f39a3947c47a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -10,15 +10,16 @@ import com.linkedin.common.InputField; import com.linkedin.common.InputFields; import com.linkedin.common.Status; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.dataset.FineGrainedLineage; import com.linkedin.dataset.UpstreamLineage; -import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.batch.MCLBatchItem; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.ebean.batch.MCLBatchItemImpl; import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphIndexUtils; @@ -43,6 +44,7 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.structured.StructuredPropertyDefinition; import com.linkedin.util.Pair; import java.io.IOException; import java.io.UnsupportedEncodingException; @@ -70,11 +72,11 @@ public class UpdateIndicesService { private final EntitySearchService _entitySearchService; private final TimeseriesAspectService _timeseriesAspectService; private final SystemMetadataService _systemMetadataService; - private final EntityRegistry _entityRegistry; private final SearchDocumentTransformer _searchDocumentTransformer; private final EntityIndexBuilders _entityIndexBuilders; - private SystemEntityClient systemEntityClient; + private AspectRetriever aspectRetriever; + private EntityRegistry _entityRegistry; @Value("${featureFlags.graphServiceDiffModeEnabled:true}") private boolean _graphDiffMode; @@ -82,6 +84,12 @@ public class UpdateIndicesService { @Value("${featureFlags.searchServiceDiffModeEnabled:true}") private boolean _searchDiffMode; + @Value("${structuredProperties.enabled}") + private boolean _structuredPropertiesHookEnabled; + + @Value("${structuredProperties.writeEnabled}") + private boolean _structuredPropertiesWriteEnabled; + private static final Set UPDATE_CHANGE_TYPES = ImmutableSet.of(ChangeType.UPSERT, ChangeType.RESTATE, ChangeType.PATCH); @@ -100,31 +108,26 @@ public UpdateIndicesService( EntitySearchService entitySearchService, TimeseriesAspectService timeseriesAspectService, SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, EntityIndexBuilders entityIndexBuilders) { _graphService = graphService; _entitySearchService = entitySearchService; _timeseriesAspectService = timeseriesAspectService; _systemMetadataService = systemMetadataService; - _entityRegistry = entityRegistry; _searchDocumentTransformer = searchDocumentTransformer; _entityIndexBuilders = entityIndexBuilders; } public void handleChangeEvent(@Nonnull final MetadataChangeLog event) { try { - MCLBatchItemImpl batch = - MCLBatchItemImpl.builder().build(event, _entityRegistry, systemEntityClient); + MCLBatchItemImpl batch = MCLBatchItemImpl.builder().build(event, aspectRetriever); Stream sideEffects = _entityRegistry .getMCLSideEffects( event.getChangeType(), event.getEntityType(), event.getAspectName()) .stream() - .flatMap( - mclSideEffect -> - mclSideEffect.apply(List.of(batch), _entityRegistry, systemEntityClient)); + .flatMap(mclSideEffect -> mclSideEffect.apply(List.of(batch), aspectRetriever)); for (MCLBatchItem mclBatchItem : Stream.concat(Stream.of(batch), sideEffects).toList()) { MetadataChangeLog hookEvent = mclBatchItem.getMetadataChangeLog(); @@ -173,11 +176,14 @@ private void handleUpdateChangeEvent(@Nonnull final MCLBatchItem event) throws I updateSystemMetadata(event.getSystemMetadata(), urn, aspectSpec, aspect); } - // Step 1. For all aspects, attempt to update Search + // Step 1. Handle StructuredProperties Index Mapping changes + updateIndexMappings(entitySpec, aspectSpec, aspect, previousAspect); + + // Step 2. For all aspects, attempt to update Search updateSearchService( entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); - // Step 2. For all aspects, attempt to update Graph + // Step 3. For all aspects, attempt to update Graph SystemMetadata systemMetadata = event.getSystemMetadata(); if (_graphDiffMode && !(_graphService instanceof DgraphGraphService) @@ -190,6 +196,46 @@ private void handleUpdateChangeEvent(@Nonnull final MCLBatchItem event) throws I } } + public void updateIndexMappings( + EntitySpec entitySpec, + AspectSpec aspectSpec, + RecordTemplate newValue, + RecordTemplate oldValue) + throws IOException { + if (_structuredPropertiesHookEnabled + && STRUCTURED_PROPERTY_ENTITY_NAME.equals(entitySpec.getName()) + && STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME.equals(aspectSpec.getName())) { + + UrnArray oldEntityTypes = + Optional.ofNullable(oldValue) + .map( + recordTemplate -> + new StructuredPropertyDefinition(recordTemplate.data()).getEntityTypes()) + .orElse(new UrnArray()); + + StructuredPropertyDefinition newDefinition = + new StructuredPropertyDefinition(newValue.data()); + newDefinition.getEntityTypes().removeAll(oldEntityTypes); + + if (newDefinition.getEntityTypes().size() > 0) { + _entityIndexBuilders + .buildReindexConfigsWithNewStructProp(newDefinition) + .forEach( + reindexState -> { + try { + log.info( + "Applying new structured property {} to index {}", + newDefinition, + reindexState.name()); + _entityIndexBuilders.getIndexBuilder().applyMappings(reindexState, false); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } + } + /** * This very important method processes {@link MetadataChangeLog} deletion events to cleanup the * Metadata Graph when an aspect or entity is removed. @@ -617,13 +663,13 @@ private EntitySpec getEventEntitySpec(@Nonnull final MetadataChangeLog event) { } /** - * Allow internal use of the system entity client. Solves recursive dependencies between the - * UpdateIndicesService and the SystemJavaEntityClient + * Solves recursive dependencies between the UpdateIndicesService and EntityService * - * @param systemEntityClient system entity client + * @param aspectRetriever aspect Retriever */ - public void setSystemEntityClient(SystemEntityClient systemEntityClient) { - this.systemEntityClient = systemEntityClient; - _searchDocumentTransformer.setEntityClient(systemEntityClient); + public void initializeAspectRetriever(AspectRetriever aspectRetriever) { + this.aspectRetriever = aspectRetriever; + this._entityRegistry = aspectRetriever.getEntityRegistry(); + this._searchDocumentTransformer.setAspectRetriever(aspectRetriever); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java index 9aa0cdca99f68..e894558e3d1af 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java @@ -1,7 +1,9 @@ package com.linkedin.metadata.shared; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; +import java.util.Collection; import java.util.List; public interface ElasticSearchIndexed { @@ -12,6 +14,15 @@ public interface ElasticSearchIndexed { */ List buildReindexConfigs() throws IOException; + /** + * The index configurations for the given service with StructuredProperties applied. + * + * @param properties The structured properties to apply to the index mappings + * @return List of reindex configurations + */ + List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException; + /** * Mirrors the service's functions which are expected to build/reindex as needed based on the * reindex configurations above diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java index 6fbe7cfe882ce..36eab7b69e6a1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java @@ -13,12 +13,14 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -245,6 +247,12 @@ public List buildReindexConfigs() throws IOException { Collections.emptyMap())); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return buildReindexConfigs(); + } + @Override public void reindexAll() { configure(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index f9ab86d41335d..71ffd603c999f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -20,12 +20,15 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchAfterWrapper; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; +import com.linkedin.metadata.timeseries.GenericTimeseriesDocument; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.TimeseriesScrollResult; import com.linkedin.metadata.timeseries.elastic.indexbuilder.MappingsBuilder; import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; import com.linkedin.metadata.timeseries.elastic.query.ESAggregatedStatsDAO; @@ -33,6 +36,7 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.structured.StructuredPropertyDefinition; import com.linkedin.timeseries.AggregationSpec; import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; @@ -43,9 +47,11 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -86,8 +92,6 @@ public class ElasticSearchTimeseriesAspectService .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } - private static final String TIMESTAMP_FIELD = "timestampMillis"; - private static final String EVENT_FIELD = "event"; private static final Integer DEFAULT_LIMIT = 10000; private final IndexConvention _indexConvention; @@ -118,7 +122,7 @@ public ElasticSearchTimeseriesAspectService( private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { Map docFields = doc.getSourceAsMap(); EnvelopedAspect envelopedAspect = new EnvelopedAspect(); - Object event = docFields.get(EVENT_FIELD); + Object event = docFields.get(MappingsBuilder.EVENT_FIELD); GenericAspect genericAspect; try { genericAspect = @@ -147,6 +151,61 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { return envelopedAspect; } + private static Set commonFields = + Set.of( + MappingsBuilder.URN_FIELD, + MappingsBuilder.RUN_ID_FIELD, + MappingsBuilder.EVENT_GRANULARITY, + MappingsBuilder.IS_EXPLODED_FIELD, + MappingsBuilder.MESSAGE_ID_FIELD, + MappingsBuilder.PARTITION_SPEC_PARTITION, + MappingsBuilder.PARTITION_SPEC, + MappingsBuilder.SYSTEM_METADATA_FIELD, + MappingsBuilder.TIMESTAMP_MILLIS_FIELD, + MappingsBuilder.TIMESTAMP_FIELD, + MappingsBuilder.EVENT_FIELD); + + private static Pair toEnvAspectGenericDocument( + @Nonnull SearchHit doc) { + EnvelopedAspect envelopedAspect = null; + + Map documentFieldMap = doc.getSourceAsMap(); + + GenericTimeseriesDocument.GenericTimeseriesDocumentBuilder builder = + GenericTimeseriesDocument.builder() + .urn((String) documentFieldMap.get(MappingsBuilder.URN_FIELD)) + .timestampMillis((Long) documentFieldMap.get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)) + .timestamp((Long) documentFieldMap.get(MappingsBuilder.TIMESTAMP_FIELD)); + + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.RUN_ID_FIELD)) + .ifPresent(d -> builder.runId((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.EVENT_GRANULARITY)) + .ifPresent(d -> builder.eventGranularity((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.IS_EXPLODED_FIELD)) + .ifPresent(d -> builder.isExploded((Boolean) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.MESSAGE_ID_FIELD)) + .ifPresent(d -> builder.messageId((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.PARTITION_SPEC_PARTITION)) + .ifPresent(d -> builder.partition((String) d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.PARTITION_SPEC)) + .ifPresent(d -> builder.partitionSpec(d)); + Optional.ofNullable(documentFieldMap.get(MappingsBuilder.SYSTEM_METADATA_FIELD)) + .ifPresent(d -> builder.systemMetadata(d)); + + if (documentFieldMap.get(MappingsBuilder.EVENT_FIELD) != null) { + envelopedAspect = parseDocument(doc); + builder.event(documentFieldMap.get(MappingsBuilder.EVENT_FIELD)); + } else { + // If no event, the event is any non-common field + builder.event( + documentFieldMap.entrySet().stream() + .filter(entry -> !commonFields.contains(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + } + + return Pair.of(envelopedAspect, builder.build()); + } + @Override public void configure() { _indexBuilders.reindexAll(); @@ -157,6 +216,12 @@ public List buildReindexConfigs() { return _indexBuilders.buildReindexConfigs(); } + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return _indexBuilders.buildReindexConfigsWithAllStructProps(properties); + } + public String reindexAsync( String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { @@ -256,7 +321,7 @@ public List getAspectValues( if (startTimeMillis != null) { Criterion startTimeCriterion = new Criterion() - .setField(TIMESTAMP_FIELD) + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) .setValue(startTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); @@ -264,7 +329,7 @@ public List getAspectValues( if (endTimeMillis != null) { Criterion endTimeCriterion = new Criterion() - .setField(TIMESTAMP_FIELD) + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) .setValue(endTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); @@ -421,4 +486,88 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) return rollbackResult; } + + @Nonnull + @Override + public TimeseriesScrollResult scrollAspects( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Filter filter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().filter(ESUtils.buildFilterQuery(filter, true)); + + if (startTimeMillis != null) { + Criterion startTimeCriterion = + new Criterion() + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTimeMillis.toString()); + filterQueryBuilder.filter(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); + } + if (endTimeMillis != null) { + Criterion endTimeCriterion = + new Criterion() + .setField(MappingsBuilder.TIMESTAMP_MILLIS_FIELD) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTimeMillis.toString()); + filterQueryBuilder.filter(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); + } + + SearchResponse response = + executeScrollSearchQuery( + entityName, aspectName, filterQueryBuilder, sortCriterion, scrollId, count); + int totalCount = (int) response.getHits().getTotalHits().value; + + List> resultPairs = + Arrays.stream(response.getHits().getHits()) + .map(ElasticSearchTimeseriesAspectService::toEnvAspectGenericDocument) + .toList(); + + return TimeseriesScrollResult.builder() + .numResults(totalCount) + .pageSize(response.getHits().getHits().length) + .events(resultPairs.stream().map(Pair::getFirst).collect(Collectors.toList())) + .documents(resultPairs.stream().map(Pair::getSecond).collect(Collectors.toList())) + .build(); + } + + private SearchResponse executeScrollSearchQuery( + @Nonnull final String entityNname, + @Nonnull final String aspectName, + @Nonnull final QueryBuilder query, + @Nonnull List sortCriterion, + @Nullable String scrollId, + final int count) { + + Object[] sort = null; + if (scrollId != null) { + SearchAfterWrapper searchAfterWrapper = SearchAfterWrapper.fromScrollId(scrollId); + sort = searchAfterWrapper.getSort(); + } + + SearchRequest searchRequest = new SearchRequest(); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + + searchSourceBuilder.size(count); + searchSourceBuilder.query(query); + ESUtils.buildSortOrder(searchSourceBuilder, sortCriterion, List.of(), false); + searchRequest.source(searchSourceBuilder); + ESUtils.setSearchAfter(searchSourceBuilder, sort, null, null); + + searchRequest.indices(_indexConvention.getTimeseriesAspectIndexName(entityNname, aspectName)); + + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "scrollAspects_search").time()) { + return _searchClient.search(searchRequest, RequestOptions.DEFAULT); + } catch (Exception e) { + log.error("Search query failed", e); + throw new ESQueryException("Search query failed:", e); + } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java index 564bcb2a242cb..6437bbc390d82 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java @@ -7,8 +7,10 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.structured.StructuredPropertyDefinition; import com.linkedin.util.Pair; import java.io.IOException; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; @@ -91,4 +93,10 @@ public List buildReindexConfigs() { }) .collect(Collectors.toList()); } + + @Override + public List buildReindexConfigsWithAllStructProps( + Collection properties) throws IOException { + return buildReindexConfigs(); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index 252ac2d633b98..451b732722498 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -41,7 +41,7 @@ public static Map ingestCorpUserKeyAspects( .aspect(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) - .build(entityService.getEntityRegistry(), entityService.getSystemEntityClient())); + .build(entityService)); } entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); return aspects; @@ -71,7 +71,7 @@ public static Map ingestCorpUserInfoAspects( .aspect(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) - .build(entityService.getEntityRegistry(), entityService.getSystemEntityClient())); + .build(entityService)); } entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); return aspects; @@ -102,7 +102,7 @@ public static Map ingestChartInfoAspects( .aspect(aspect) .auditStamp(AspectGenerationUtils.createAuditStamp()) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) - .build(entityService.getEntityRegistry(), entityService.getSystemEntityClient())); + .build(entityService)); } entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); return aspects; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java index fba11f24f4c44..5a4443904e260 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java @@ -6,7 +6,6 @@ import com.codahale.metrics.Counter; import com.linkedin.data.template.RequiredFieldNotPresentException; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.event.EventProducer; @@ -14,6 +13,7 @@ import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; import java.util.function.Supplier; @@ -32,8 +32,8 @@ public class JavaEntityClientTest { private LineageSearchService _lineageSearchService; private TimeseriesAspectService _timeseriesAspectService; private EventProducer _eventProducer; - private RestliEntityClient _restliEntityClient; private MockedStatic _metricUtils; + private RollbackService rollbackService; private Counter _counter; @BeforeMethod @@ -45,8 +45,8 @@ public void setupTest() { _searchService = mock(SearchService.class); _lineageSearchService = mock(LineageSearchService.class); _timeseriesAspectService = mock(TimeseriesAspectService.class); + rollbackService = mock(RollbackService.class); _eventProducer = mock(EventProducer.class); - _restliEntityClient = mock(RestliEntityClient.class); _metricUtils = mockStatic(MetricUtils.class); _counter = mock(Counter.class); when(MetricUtils.counter(any(), any())).thenReturn(_counter); @@ -66,8 +66,8 @@ private JavaEntityClient getJavaEntityClient() { _searchService, _lineageSearchService, _timeseriesAspectService, - _eventProducer, - _restliEntityClient); + rollbackService, + _eventProducer); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index 45e992576676d..c45306e5f022b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -124,21 +124,21 @@ public void testIngestListLatestAspects() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // List aspects @@ -193,21 +193,21 @@ public void testIngestListUrns() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // List aspects urns @@ -451,13 +451,7 @@ public void run() { auditStamp.setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)); auditStamp.setTime(System.currentTimeMillis()); AspectsBatchImpl batch = - AspectsBatchImpl.builder() - .mcps( - mcps, - auditStamp, - entityService.getEntityRegistry(), - entityService.getSystemEntityClient()) - .build(); + AspectsBatchImpl.builder().mcps(mcps, auditStamp, entityService).build(); entityService.ingestProposal(batch, false); } } catch (InterruptedException | URISyntaxException ie) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index ee21b56cea7c0..db749f3575a06 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -12,6 +12,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; +import com.linkedin.common.UrnArray; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.TupleKey; @@ -29,6 +30,7 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.AspectGenerationUtils; @@ -58,6 +60,12 @@ import com.linkedin.retention.DataHubRetentionConfig; import com.linkedin.retention.Retention; import com.linkedin.retention.VersionBasedRetention; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; import com.linkedin.util.Pair; import jakarta.annotation.Nonnull; import java.util.ArrayList; @@ -67,6 +75,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; @@ -847,28 +857,28 @@ public void testRollbackAspect() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) .aspect(writeAspect1Overwrite) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // this should no-op since this run has been overwritten @@ -926,21 +936,21 @@ public void testRollbackKey() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(keyAspectName) .aspect(writeKey1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) .aspect(writeAspect1Overwrite) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // this should no-op since the key should have been written in the furst run @@ -1006,35 +1016,35 @@ public void testRollbackUrn() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(keyAspectName) .aspect(writeKey1) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn2) .aspectName(aspectName) .aspect(writeAspect2) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn3) .aspectName(aspectName) .aspect(writeAspect3) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn1) .aspectName(aspectName) .aspect(writeAspect1Overwrite) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // this should no-op since the key should have been written in the furst run @@ -1073,7 +1083,7 @@ public void testIngestGetLatestAspect() throws AssertionError { .aspect(writeAspect1) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #1 @@ -1104,7 +1114,7 @@ public void testIngestGetLatestAspect() throws AssertionError { .aspect(writeAspect2) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata2) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #2 @@ -1150,7 +1160,7 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { .aspect(writeAspect1) .auditStamp(TEST_AUDIT_STAMP) .systemMetadata(metadata1) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #1 @@ -1170,7 +1180,7 @@ public void testIngestGetLatestEnvelopedAspect() throws Exception { .aspect(writeAspect2) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #2 @@ -1215,7 +1225,7 @@ public void testIngestSameAspect() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #1 @@ -1246,7 +1256,7 @@ public void testIngestSameAspect() throws AssertionError { .aspect(writeAspect2) .systemMetadata(metadata2) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); // Validate retrieval of CorpUserInfo Aspect #2 @@ -1299,42 +1309,42 @@ public void testRetention() throws AssertionError { .aspect(writeAspect1) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName) .aspect(writeAspect1a) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName) .aspect(writeAspect1b) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2a) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2b) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); @@ -1366,14 +1376,14 @@ public void testRetention() throws AssertionError { .aspect(writeAspect1c) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient()), + .build(_entityServiceImpl), MCPUpsertBatchItem.builder() .urn(entityUrn) .aspectName(aspectName2) .aspect(writeAspect2c) .systemMetadata(metadata1) .auditStamp(TEST_AUDIT_STAMP) - .build(_testEntityRegistry, _entityServiceImpl.getSystemEntityClient())); + .build(_entityServiceImpl)); _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), true, true); assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); @@ -1637,6 +1647,172 @@ public void testUIPreProcessedProposal() throws Exception { assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); } + @Test + public void testStructuredPropertyIngestProposal() throws Exception { + String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; + Urn entityUrn = UrnUtils.getUrn(urnStr); + + // Ingest one structured property definition + String definitionAspectName = "propertyDefinition"; + Urn firstPropertyUrn = UrnUtils.getUrn("urn:li:structuredProperty:firstStructuredProperty"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(firstPropertyUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("structuredProperty"); + gmce.setAspectName(definitionAspectName); + StructuredPropertyDefinition structuredPropertyDefinition = + new StructuredPropertyDefinition() + .setQualifiedName("firstStructuredProperty") + .setValueType(Urn.createFromString(DATA_TYPE_URN_PREFIX + "string")) + .setEntityTypes(new UrnArray(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"))); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] definitionSerialized = + dataTemplateCodec.dataTemplateToBytes(structuredPropertyDefinition); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(definitionSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + ArgumentCaptor captor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(firstPropertyUrn), Mockito.any(), captor.capture()); + assertEquals( + _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), + structuredPropertyDefinition); + Urn secondPropertyUrn = UrnUtils.getUrn("urn:li:structuredProperty:secondStructuredProperty"); + assertNull(_entityServiceImpl.getAspect(secondPropertyUrn, definitionAspectName, 0)); + assertEquals( + _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), + structuredPropertyDefinition); + Set defs = + _aspectDao + .streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + EntityUtils.toAspectRecord( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + entityAspect.getMetadata(), + _testEntityRegistry)) + .map(recordTemplate -> (StructuredPropertyDefinition) recordTemplate) + .collect(Collectors.toSet()); + assertEquals(defs.size(), 1); + assertEquals(defs, Set.of(structuredPropertyDefinition)); + + SystemEntityClient mockSystemEntityClient = Mockito.mock(SystemEntityClient.class); + Mockito.when( + mockSystemEntityClient.getLatestAspectObject(firstPropertyUrn, "propertyDefinition")) + .thenReturn(new com.linkedin.entity.Aspect(structuredPropertyDefinition.data())); + + // Add a value for that property + PrimitivePropertyValueArray propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create("hello")); + StructuredPropertyValueAssignment assignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn(firstPropertyUrn) + .setValues(propertyValues); + StructuredProperties structuredProperties = + new StructuredProperties() + .setProperties(new StructuredPropertyValueAssignmentArray(assignment)); + MetadataChangeProposal asgnMce = new MetadataChangeProposal(); + asgnMce.setEntityUrn(entityUrn); + asgnMce.setChangeType(ChangeType.UPSERT); + asgnMce.setEntityType("dataset"); + asgnMce.setAspectName("structuredProperties"); + JacksonDataTemplateCodec asgnTemplateCodec = new JacksonDataTemplateCodec(); + byte[] asgnSerialized = asgnTemplateCodec.dataTemplateToBytes(structuredProperties); + GenericAspect asgnGenericAspect = new GenericAspect(); + asgnGenericAspect.setValue(ByteString.unsafeWrap(asgnSerialized)); + asgnGenericAspect.setContentType("application/json"); + asgnMce.setAspect(asgnGenericAspect); + _entityServiceImpl.ingestProposal(asgnMce, TEST_AUDIT_STAMP, false); + assertEquals( + _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), structuredProperties); + + // Ingest second structured property definition + MetadataChangeProposal gmce2 = new MetadataChangeProposal(); + gmce2.setEntityUrn(secondPropertyUrn); + gmce2.setChangeType(ChangeType.UPSERT); + gmce2.setEntityType("structuredProperty"); + gmce2.setAspectName(definitionAspectName); + StructuredPropertyDefinition secondDefinition = + new StructuredPropertyDefinition() + .setQualifiedName("secondStructuredProperty") + .setValueType(Urn.createFromString(DATA_TYPE_URN_PREFIX + "number")) + .setEntityTypes(new UrnArray(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"))); + JacksonDataTemplateCodec secondDataTemplate = new JacksonDataTemplateCodec(); + byte[] secondDefinitionSerialized = secondDataTemplate.dataTemplateToBytes(secondDefinition); + GenericAspect secondGenericAspect = new GenericAspect(); + secondGenericAspect.setValue(ByteString.unsafeWrap(secondDefinitionSerialized)); + secondGenericAspect.setContentType("application/json"); + gmce2.setAspect(secondGenericAspect); + _entityServiceImpl.ingestProposal(gmce2, TEST_AUDIT_STAMP, false); + ArgumentCaptor secondCaptor = + ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(secondPropertyUrn), Mockito.any(), secondCaptor.capture()); + assertEquals( + _entityServiceImpl.getAspect(firstPropertyUrn, definitionAspectName, 0), + structuredPropertyDefinition); + assertEquals( + _entityServiceImpl.getAspect(secondPropertyUrn, definitionAspectName, 0), secondDefinition); + defs = + _aspectDao + .streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + EntityUtils.toAspectRecord( + STRUCTURED_PROPERTY_ENTITY_NAME, + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, + entityAspect.getMetadata(), + _testEntityRegistry)) + .map(recordTemplate -> (StructuredPropertyDefinition) recordTemplate) + .collect(Collectors.toSet()); + assertEquals(defs.size(), 2); + assertEquals(defs, Set.of(secondDefinition, structuredPropertyDefinition)); + + Mockito.when( + mockSystemEntityClient.getLatestAspectObject(secondPropertyUrn, "propertyDefinition")) + .thenReturn(new com.linkedin.entity.Aspect(secondDefinition.data())); + + // Get existing value for first structured property + assertEquals( + _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), structuredProperties); + + // Add a value for second property + propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create(15.0)); + StructuredPropertyValueAssignment secondAssignment = + new StructuredPropertyValueAssignment() + .setPropertyUrn(secondPropertyUrn) + .setValues(propertyValues); + StructuredProperties secondPropertyArr = + new StructuredProperties() + .setProperties( + new StructuredPropertyValueAssignmentArray(assignment, secondAssignment)); + MetadataChangeProposal asgn2Mce = new MetadataChangeProposal(); + asgn2Mce.setEntityUrn(entityUrn); + asgn2Mce.setChangeType(ChangeType.UPSERT); + asgn2Mce.setEntityType("dataset"); + asgn2Mce.setAspectName("structuredProperties"); + JacksonDataTemplateCodec asgnTemplateCodec2 = new JacksonDataTemplateCodec(); + byte[] asgnSerialized2 = asgnTemplateCodec2.dataTemplateToBytes(secondPropertyArr); + GenericAspect asgnGenericAspect2 = new GenericAspect(); + asgnGenericAspect2.setValue(ByteString.unsafeWrap(asgnSerialized2)); + asgnGenericAspect2.setContentType("application/json"); + asgn2Mce.setAspect(asgnGenericAspect2); + _entityServiceImpl.ingestProposal(asgn2Mce, TEST_AUDIT_STAMP, false); + StructuredProperties expectedProperties = + new StructuredProperties() + .setProperties( + new StructuredPropertyValueAssignmentArray(assignment, secondAssignment)); + assertEquals( + _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), expectedProperties); + } + @Nonnull protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java index 680d4079851eb..15852e0cbe35b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.entity; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.snapshot.Snapshot; import java.util.Collections; import java.util.HashMap; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 2f8fba0083aa7..bd500cd469100 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -337,26 +337,26 @@ public void testTimestampLineage() throws Exception { // Without timestamps EntityLineageResult upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); EntityLineageResult downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Timestamp before upstreamResult = getUpstreamLineage(datasetTwoUrn, 0L, initialTime - 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, 0L, initialTime - 10); - Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(0), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), downstreamResult.getTotal()); // Timestamp after upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); - Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(0), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), downstreamResult.getTotal()); // Timestamp included upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Update only one of the downstream edges Long updatedTime = 2000L; @@ -387,20 +387,20 @@ public void testTimestampLineage() throws Exception { // Without timestamps upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Window includes initial time and updated time upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(3), downstreamResult.getTotal()); // Window includes updated time but not initial time upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); - Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); - Assert.assertEquals(new Integer(2), downstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(1), upstreamResult.getTotal()); + Assert.assertEquals(Integer.valueOf(2), downstreamResult.getTotal()); } /** diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index fba9d5359d29f..d2aef982750bd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -7,7 +7,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.SearchEntityArray; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java index 2c395875a1d6b..a54e8aa1c9191 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java @@ -1,14 +1,19 @@ package com.linkedin.metadata.search.indexbuilder; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_MAPPING_FIELD; import static org.testng.Assert.*; import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder; +import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.systemmetadata.SystemMetadataMappingsBuilder; import com.linkedin.metadata.version.GitVersion; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -295,4 +300,117 @@ public void testSettingsNoReindex() throws Exception { wipe(); } } + + @Test + public void testCopyStructuredPropertyMappings() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder enabledMappingReindex = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + true, + new ElasticSearchConfiguration(), + gitVersion); + + ReindexConfig reindexConfigNoIndexBefore = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + assertNull(reindexConfigNoIndexBefore.currentMappings()); + assertEquals( + reindexConfigNoIndexBefore.targetMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertFalse(reindexConfigNoIndexBefore.requiresApplyMappings()); + assertFalse(reindexConfigNoIndexBefore.isPureMappingsAddition()); + + // Create index + enabledMappingReindex.buildIndex( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + + // Test build reindex config with no structured properties added + ReindexConfig reindexConfigNoChange = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + assertEquals( + reindexConfigNoChange.currentMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertEquals( + reindexConfigNoChange.targetMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertFalse(reindexConfigNoIndexBefore.requiresApplyMappings()); + assertFalse(reindexConfigNoIndexBefore.isPureMappingsAddition()); + + // Test add new field to the mappings + Map targetMappingsNewField = + new HashMap<>(SystemMetadataMappingsBuilder.getMappings()); + ((Map) targetMappingsNewField.get("properties")) + .put("myNewField", Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)); + + // Test build reindex config for new fields with no structured properties added + ReindexConfig reindexConfigNewField = + enabledMappingReindex.buildReindexState(TEST_INDEX_NAME, targetMappingsNewField, Map.of()); + assertEquals( + reindexConfigNewField.currentMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertEquals(reindexConfigNewField.targetMappings(), targetMappingsNewField); + assertTrue(reindexConfigNewField.requiresApplyMappings()); + assertTrue(reindexConfigNewField.isPureMappingsAddition()); + + // Add structured properties to index + Map mappingsWithStructuredProperties = + new HashMap<>(SystemMetadataMappingsBuilder.getMappings()); + ((Map) mappingsWithStructuredProperties.get("properties")) + .put( + STRUCTURED_PROPERTY_MAPPING_FIELD + ".myStringProp", + Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)); + ((Map) mappingsWithStructuredProperties.get("properties")) + .put( + STRUCTURED_PROPERTY_MAPPING_FIELD + ".myNumberProp", + Map.of(SettingsBuilder.TYPE, ESUtils.DOUBLE_FIELD_TYPE)); + + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, mappingsWithStructuredProperties, Map.of()); + + // Test build reindex config with structured properties not copied + ReindexConfig reindexConfigNoCopy = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + Map expectedMappingsStructPropsNested = + new HashMap<>(SystemMetadataMappingsBuilder.getMappings()); + ((Map) expectedMappingsStructPropsNested.get("properties")) + .put( + "structuredProperties", + Map.of( + "properties", + Map.of( + "myNumberProp", + Map.of(SettingsBuilder.TYPE, ESUtils.DOUBLE_FIELD_TYPE), + "myStringProp", + Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)))); + assertEquals(reindexConfigNoCopy.currentMappings(), expectedMappingsStructPropsNested); + assertEquals(reindexConfigNoCopy.targetMappings(), SystemMetadataMappingsBuilder.getMappings()); + assertFalse(reindexConfigNoCopy.isPureMappingsAddition()); + + // Test build reindex config with structured properties copied + ReindexConfig reindexConfigCopy = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of(), true); + assertEquals(reindexConfigCopy.currentMappings(), expectedMappingsStructPropsNested); + assertEquals(reindexConfigCopy.targetMappings(), expectedMappingsStructPropsNested); + assertFalse(reindexConfigCopy.requiresApplyMappings()); + assertFalse(reindexConfigCopy.isPureMappingsAddition()); + + // Test build reindex config with new field added and structured properties copied + ReindexConfig reindexConfigCopyAndNewField = + enabledMappingReindex.buildReindexState( + TEST_INDEX_NAME, targetMappingsNewField, Map.of(), true); + assertEquals(reindexConfigCopyAndNewField.currentMappings(), expectedMappingsStructPropsNested); + Map targetMappingsNewFieldAndStructProps = + new HashMap<>(expectedMappingsStructPropsNested); + ((Map) targetMappingsNewFieldAndStructProps.get("properties")) + .put("myNewField", Map.of(SettingsBuilder.TYPE, SettingsBuilder.KEYWORD)); + assertEquals( + reindexConfigCopyAndNewField.targetMappings(), targetMappingsNewFieldAndStructProps); + assertTrue(reindexConfigCopyAndNewField.requiresApplyMappings()); + assertTrue(reindexConfigCopyAndNewField.isPureMappingsAddition()); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 02bd186ccc183..6df31b35fecde 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -1,11 +1,16 @@ package com.linkedin.metadata.search.indexbuilder; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; import com.google.common.collect.ImmutableMap; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder; +import com.linkedin.structured.StructuredPropertyDefinition; +import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import org.testng.annotations.Test; @@ -54,14 +59,6 @@ public void testMappingsBuilder() { Map keyPart3FieldSubfields = (Map) keyPart3Field.get("fields"); assertEquals(keyPart3FieldSubfields.size(), 1); assertTrue(keyPart3FieldSubfields.containsKey("keyword")); - Map customPropertiesField = - (Map) properties.get("customProperties"); - assertEquals(customPropertiesField.get("type"), "keyword"); - assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); - Map customPropertiesFieldSubfields = - (Map) customPropertiesField.get("fields"); - assertEquals(customPropertiesFieldSubfields.size(), 1); - assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT Map nestedArrayStringField = (Map) properties.get("nestedArrayStringField"); @@ -81,6 +78,15 @@ public void testMappingsBuilder() { assertEquals(nestedArrayArrayFieldSubfields.size(), 2); assertTrue(nestedArrayArrayFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayArrayFieldSubfields.containsKey("keyword")); + Map customPropertiesField = + (Map) properties.get("customProperties"); + assertEquals(customPropertiesField.get("type"), "keyword"); + assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); + Map customPropertiesFieldSubfields = + (Map) customPropertiesField.get("fields"); + assertEquals(customPropertiesFieldSubfields.size(), 2); + assertTrue(customPropertiesFieldSubfields.containsKey("delimited")); + assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT with addToFilters Map textField = (Map) properties.get("textFieldOverride"); @@ -153,4 +159,115 @@ public void testMappingsBuilder() { Map doubleField = (Map) properties.get("doubleField"); assertEquals(doubleField.get("type"), "double"); } + + @Test + public void testGetMappingsWithStructuredProperty() throws URISyntaxException { + // Baseline comparison: Mappings with no structured props + Map resultWithoutStructuredProps = + MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); + + // Test that a structured property that does not apply to the entity does not alter the mappings + StructuredPropertyDefinition structPropNotForThisEntity = + new StructuredPropertyDefinition() + .setQualifiedName("propNotForThis") + .setDisplayName("propNotForThis") + .setEntityTypes(new UrnArray(Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"))) + .setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + Map resultWithOnlyUnrelatedStructuredProp = + MappingsBuilder.getMappings( + TestEntitySpecBuilder.getSpec(), List.of(structPropNotForThisEntity)); + assertEquals(resultWithOnlyUnrelatedStructuredProp, resultWithoutStructuredProps); + + // Test that a structured property that does apply to this entity is included in the mappings + String fqnOfRelatedProp = "propForThis"; + StructuredPropertyDefinition structPropForThisEntity = + new StructuredPropertyDefinition() + .setQualifiedName(fqnOfRelatedProp) + .setDisplayName("propForThis") + .setEntityTypes( + new UrnArray( + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"), + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "testEntity"))) + .setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + Map resultWithOnlyRelatedStructuredProp = + MappingsBuilder.getMappings( + TestEntitySpecBuilder.getSpec(), List.of(structPropForThisEntity)); + assertNotEquals(resultWithOnlyRelatedStructuredProp, resultWithoutStructuredProps); + Map fieldsBefore = + (Map) resultWithoutStructuredProps.get("properties"); + Map fieldsAfter = + (Map) resultWithOnlyRelatedStructuredProp.get("properties"); + assertEquals(fieldsAfter.size(), fieldsBefore.size() + 1); + + Map structProps = (Map) fieldsAfter.get("structuredProperties"); + fieldsAfter = (Map) structProps.get("properties"); + + String newField = + fieldsAfter.keySet().stream() + .filter(field -> !fieldsBefore.containsKey(field)) + .findFirst() + .get(); + assertEquals(newField, fqnOfRelatedProp); + assertEquals( + fieldsAfter.get(newField), + Map.of( + "normalizer", + "keyword_normalizer", + "type", + "keyword", + "fields", + Map.of("keyword", Map.of("type", "keyword")))); + + // Test that only structured properties that apply are included + Map resultWithBothStructuredProps = + MappingsBuilder.getMappings( + TestEntitySpecBuilder.getSpec(), + List.of(structPropForThisEntity, structPropNotForThisEntity)); + assertEquals(resultWithBothStructuredProps, resultWithOnlyRelatedStructuredProp); + } + + @Test + public void testGetMappingsForStructuredProperty() throws URISyntaxException { + StructuredPropertyDefinition testStructProp = + new StructuredPropertyDefinition() + .setQualifiedName("testProp") + .setDisplayName("exampleProp") + .setEntityTypes( + new UrnArray( + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"), + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "testEntity"))) + .setValueType(Urn.createFromString("urn:li:logicalType:STRING")); + Map structuredPropertyFieldMappings = + MappingsBuilder.getMappingsForStructuredProperty(List.of(testStructProp)); + assertEquals(structuredPropertyFieldMappings.size(), 1); + String keyInMap = structuredPropertyFieldMappings.keySet().stream().findFirst().get(); + assertEquals(keyInMap, "testProp"); + Object mappings = structuredPropertyFieldMappings.get(keyInMap); + assertEquals( + mappings, + Map.of( + "type", + "keyword", + "normalizer", + "keyword_normalizer", + "fields", + Map.of("keyword", Map.of("type", "keyword")))); + + StructuredPropertyDefinition propWithNumericType = + new StructuredPropertyDefinition() + .setQualifiedName("testPropNumber") + .setDisplayName("examplePropNumber") + .setEntityTypes( + new UrnArray( + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "dataset"), + Urn.createFromString(ENTITY_TYPE_URN_PREFIX + "testEntity"))) + .setValueType(Urn.createFromString("urn:li:logicalType:NUMBER")); + Map structuredPropertyFieldMappingsNumber = + MappingsBuilder.getMappingsForStructuredProperty(List.of(propWithNumericType)); + assertEquals(structuredPropertyFieldMappingsNumber.size(), 1); + keyInMap = structuredPropertyFieldMappingsNumber.keySet().stream().findFirst().get(); + assertEquals("testPropNumber", keyInMap); + mappings = structuredPropertyFieldMappingsNumber.get(keyInMap); + assertEquals(Map.of("type", "double"), mappings); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java index 6269827104faf..9e8855622ced4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java @@ -13,6 +13,7 @@ import java.util.Set; import java.util.stream.Collectors; import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.testng.Assert; import org.testng.annotations.Test; @@ -20,7 +21,6 @@ public class AggregationQueryBuilderTest { @Test public void testGetDefaultAggregationsHasFields() { - SearchableAnnotation annotation = new SearchableAnnotation( "test", @@ -82,7 +82,6 @@ public void testGetDefaultAggregationsFields() { @Test public void testGetSpecificAggregationsHasFields() { - SearchableAnnotation annotation1 = new SearchableAnnotation( "test1", @@ -135,6 +134,100 @@ public void testGetSpecificAggregationsHasFields() { Assert.assertEquals(aggs.size(), 0); } + @Test + public void testAggregateOverStructuredProperty() { + SearchConfiguration config = new SearchConfiguration(); + config.setMaxTermBucketSize(25); + + AggregationQueryBuilder builder = new AggregationQueryBuilder(config, List.of()); + + List aggs = + builder.getAggregations(List.of("structuredProperties.ab.fgh.ten")); + Assert.assertEquals(aggs.size(), 1); + AggregationBuilder aggBuilder = aggs.get(0); + Assert.assertTrue(aggBuilder instanceof TermsAggregationBuilder); + TermsAggregationBuilder agg = (TermsAggregationBuilder) aggBuilder; + // Check that field name is sanitized to correct field name + Assert.assertEquals(agg.field(), "structuredProperties.ab_fgh_ten"); + + // Two structured properties + aggs = + builder.getAggregations( + List.of("structuredProperties.ab.fgh.ten", "structuredProperties.hello")); + Assert.assertEquals(aggs.size(), 2); + Assert.assertEquals( + aggs.stream() + .map(aggr -> ((TermsAggregationBuilder) aggr).field()) + .collect(Collectors.toSet()), + Set.of("structuredProperties.ab_fgh_ten", "structuredProperties.hello")); + } + + @Test + public void testAggregateOverFieldsAndStructProp() { + SearchableAnnotation annotation1 = + new SearchableAnnotation( + "test1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest1"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList(), + false); + + SearchableAnnotation annotation2 = + new SearchableAnnotation( + "test2", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList(), + false); + + SearchConfiguration config = new SearchConfiguration(); + config.setMaxTermBucketSize(25); + + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); + + // Aggregate over fields and structured properties + List aggs = + builder.getAggregations( + ImmutableList.of( + "test1", + "test2", + "hasTest1", + "structuredProperties.ab.fgh.ten", + "structuredProperties.hello")); + Assert.assertEquals(aggs.size(), 5); + Set facets = + aggs.stream() + .map(aggB -> ((TermsAggregationBuilder) aggB).field()) + .collect(Collectors.toSet()); + Assert.assertEquals( + facets, + ImmutableSet.of( + "test1.keyword", + "test2.keyword", + "hasTest1", + "structuredProperties.ab_fgh_ten", + "structuredProperties.hello")); + } + @Test public void testMissingAggregation() { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java index 105ee2652dc30..47d18fe0d299c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.metadata.Constants.*; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.config.search.CustomConfiguration; @@ -30,6 +32,14 @@ public class CustomizedQueryHandlerTest { static { try { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + TEST_MAPPER + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); CustomConfiguration customConfiguration = new CustomConfiguration(); customConfiguration.setEnabled(true); customConfiguration.setFile("search_config_test.yml"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java index 8cb28d3658ee7..38d630bc302f4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java @@ -140,7 +140,8 @@ public void testQueryBuilderFulltext() { "urn.delimited", 7.0f, "textArrayField.delimited", 0.4f, "nestedArrayStringField.delimited", 0.4f, - "wordGramField.delimited", 0.4f)); + "wordGramField.delimited", 0.4f, + "customProperties.delimited", 0.4f)); BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(1); assertTrue(boolPrefixQuery.should().size() > 0); @@ -165,7 +166,7 @@ public void testQueryBuilderFulltext() { }) .collect(Collectors.toList()); - assertEquals(prefixFieldWeights.size(), 28); + assertEquals(prefixFieldWeights.size(), 29); List.of( Pair.of("urn", 100.0f), @@ -200,7 +201,7 @@ public void testQueryBuilderStructured() { assertEquals(keywordQuery.queryString(), "testQuery"); assertNull(keywordQuery.analyzer()); Map keywordFields = keywordQuery.fields(); - assertEquals(keywordFields.size(), 21); + assertEquals(keywordFields.size(), 22); assertEquals(keywordFields.get("keyPart1").floatValue(), 10.0f); assertFalse(keywordFields.containsKey("keyPart3")); assertEquals(keywordFields.get("textFieldOverride").floatValue(), 1.0f); @@ -360,7 +361,7 @@ public void testGetStandardFieldsEntitySpec() { public void testGetStandardFields() { Set fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); - assertEquals(fieldConfigs.size(), 21); + assertEquals(fieldConfigs.size(), 22); assertEquals( fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( @@ -384,7 +385,8 @@ public void testGetStandardFields() { "wordGramField.wordGrams3", "textFieldOverride.delimited", "urn", - "wordGramField.wordGrams2")); + "wordGramField.wordGrams2", + "customProperties.delimited")); // customProperties.delimited Saas only assertEquals( fieldConfigs.stream() @@ -467,9 +469,9 @@ public void testGetStandardFields() { fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields( ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); - // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the + // Same 22 from the original entity + newFieldNotInOriginal + 3 word gram fields from the // textFieldOverride - assertEquals(fieldConfigs.size(), 26); + assertEquals(fieldConfigs.size(), 27); assertEquals( fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( @@ -498,7 +500,8 @@ public void testGetStandardFields() { "fieldDoesntExistInOriginal.delimited", "textFieldOverride.wordGrams2", "textFieldOverride.wordGrams3", - "textFieldOverride.wordGrams4")); + "textFieldOverride.wordGrams4", + "customProperties.delimited")); // Field which only exists in first one: Should be the same assertEquals( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java index 03abd9ffe29d7..980b82194536e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java @@ -252,4 +252,75 @@ public void testGetQueryBuilderFromCriterionFieldToExpand() { + "}"; Assert.assertEquals(result.toString(), expected); } + + @Test + public void testGetQueryBuilderFromStructPropEqualsValue() { + + final Criterion singleValueCriterion = + new Criterion() + .setField("structuredProperties.ab.fgh.ten") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1"))); + + QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + String expected = + "{\n" + + " \"terms\" : {\n" + + " \"structuredProperties.ab_fgh_ten\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"structuredProperties.ab_fgh_ten\"\n" + + " }\n" + + "}"; + Assert.assertEquals(result.toString(), expected); + } + + @Test + public void testGetQueryBuilderFromStructPropExists() { + final Criterion singleValueCriterion = + new Criterion().setField("structuredProperties.ab.fgh.ten").setCondition(Condition.EXISTS); + + QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); + String expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"structuredProperties.ab_fgh_ten\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"structuredProperties.ab_fgh_ten\"\n" + + " }\n" + + "}"; + Assert.assertEquals(result.toString(), expected); + + // No diff in the timeseries field case for this condition. + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); + + result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; + Assert.assertEquals(result.toString(), expected); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index 58ea020e42565..a22a774065852 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -10,9 +10,9 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.LineageSearchResult; diff --git a/metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json b/metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json new file mode 100644 index 0000000000000..e68cbbd9aeff0 --- /dev/null +++ b/metadata-io/src/test/resources/forms/form_assignment_test_definition_complex.json @@ -0,0 +1,145 @@ +{ + "on": { + "types": ["dataset", "container", "dataJob", "dataFlow", "chart", "dashboard"], + "conditions": { + "or": [ + { + "or": [ + { + "property": "forms.incompleteForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + }, + { + "property": "forms.completedForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + } + ] + }, + { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dataset"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test"] + } + ] + }, + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:snowflake"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test-2"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dashboard"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test-2"] + } + ] + } + ] + } + ] + } + }, + "rules": { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dataset"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test"] + } + ] + }, + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:snowflake"] + }, + { + "property": "container.container", + "operator": "equals", + "values": ["urn:li:container:test-2"] + }, + { + "property": "entityType", + "operator": "equals", + "values": ["dashboard"] + }, + { + "property": "domains.domains", + "operator": "equals", + "values": ["urn:li:domain:test-2"] + } + ] + } + ] + }, + "actions": { + "passing": [ + { + "type": "ASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ], + "failing": [ + { + "type": "UNASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ] + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json b/metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json new file mode 100644 index 0000000000000..a09fbc801414c --- /dev/null +++ b/metadata-io/src/test/resources/forms/form_assignment_test_definition_simple.json @@ -0,0 +1,67 @@ +{ + "on": { + "types": ["dataset", "container", "dataJob", "dataFlow", "chart", "dashboard"], + "conditions": { + "or": [ + { + "or": [ + { + "property": "forms.incompleteForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + }, + { + "property": "forms.completedForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + } + ] + }, + { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + } + ] + } + ] + } + ] + } + }, + "rules": { + "or": [ + { + "and": [ + { + "property": "dataPlatformInstance.platform", + "operator": "equals", + "values": ["urn:li:dataPlatform:hive"] + } + ] + } + ] + }, + "actions": { + "passing": [ + { + "type": "ASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ], + "failing": [ + { + "type": "UNASSIGN_FORM", + "params": { + "formUrn": "urn:li:form:test" + } + } + ] + } +} \ No newline at end of file diff --git a/metadata-io/src/test/resources/forms/form_prompt_test_definition.json b/metadata-io/src/test/resources/forms/form_prompt_test_definition.json new file mode 100644 index 0000000000000..d797db7e25180 --- /dev/null +++ b/metadata-io/src/test/resources/forms/form_prompt_test_definition.json @@ -0,0 +1,39 @@ +{ + "on": { + "types": ["dataset", "container", "dataJob", "dataFlow", "chart", "dashboard"], + "conditions": { + "or": [ + { + "property": "forms.incompleteForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + }, + { + "property": "forms.completedForms.urn", + "operator": "equals", + "values": ["urn:li:form:test"] + } + ] + } + }, + "rules": { + "and": [ + { + "property": "structuredProperties.urn:li:structuredProperty:test.id", + "operator": "exists" + } + ] + }, + "actions": { + "passing": [], + "failing": [ + { + "type": "SET_FORM_PROMPT_INCOMPLETE", + "params": { + "formUrn": "urn:li:form:test", + "formPromptId": "test-id" + } + } + ] + } +} \ No newline at end of file diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index e695788e09726..ae208c053d69f 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -14,14 +14,14 @@ exclude = {ElasticsearchRestClientAutoConfiguration.class, CassandraAutoConfiguration.class}) @ComponentScan( basePackages = { - // "com.linkedin.gms.factory.config", - // "com.linkedin.gms.factory.common", "com.linkedin.gms.factory.kafka", "com.linkedin.metadata.boot.kafka", "com.linkedin.metadata.kafka", "com.linkedin.metadata.dao.producer", "com.linkedin.gms.factory.config", "com.linkedin.gms.factory.entity.update.indices", + "com.linkedin.gms.factory.entityclient", + "com.linkedin.gms.factory.form", "com.linkedin.gms.factory.timeline.eventgenerator", "io.datahubproject.metadata.jobs.common.health.kafka" }, diff --git a/metadata-jobs/mae-consumer-job/src/main/resources/application.properties b/metadata-jobs/mae-consumer-job/src/main/resources/application.properties index 7df61c93ab66d..f8b979e6fbac0 100644 --- a/metadata-jobs/mae-consumer-job/src/main/resources/application.properties +++ b/metadata-jobs/mae-consumer-job/src/main/resources/application.properties @@ -3,4 +3,4 @@ management.endpoints.web.exposure.include=metrics, health, info spring.mvc.servlet.path=/ management.health.elasticsearch.enabled=false management.health.neo4j.enabled=false -entityClient.preferredImpl=restli +entityClient.impl=restli diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index 7135e4e44d459..b409a41600bd7 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -1,6 +1,5 @@ package com.linkedin.metadata.kafka; -import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.entity.EntityServiceImpl; @@ -22,8 +21,6 @@ public class MaeConsumerApplicationTestConfiguration { @MockBean private EntityServiceImpl _entityServiceImpl; - @MockBean private SystemRestliEntityClient restliEntityClient; - @MockBean private Database ebeanServer; @MockBean private EntityRegistry entityRegistry; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java index f2eeef6e2c8e6..278c52030b5fc 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; import com.linkedin.metadata.kafka.hook.event.EntityChangeEventGeneratorHook; +import com.linkedin.metadata.kafka.hook.form.FormAssignmentHook; import com.linkedin.metadata.kafka.hook.ingestion.IngestionSchedulerHook; import com.linkedin.metadata.kafka.hook.siblings.SiblingAssociationHook; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -36,7 +37,8 @@ IngestionSchedulerHook.class, EntityChangeEventGeneratorHook.class, KafkaEventConsumerFactory.class, - SiblingAssociationHook.class + SiblingAssociationHook.class, + FormAssignmentHook.class }) @EnableKafka public class MetadataChangeLogProcessor { @@ -95,6 +97,7 @@ public void consume(final ConsumerRecord consumerRecord) // Here - plug in additional "custom processor hooks" for (MetadataChangeLogHook hook : this.hooks) { if (!hook.isEnabled()) { + log.debug(String.format("Skipping disabled hook %s", hook.getClass())); continue; } try (Timer.Context ignored = @@ -102,7 +105,7 @@ public void consume(final ConsumerRecord consumerRecord) .time()) { hook.invoke(event); } catch (Exception e) { - // Just skip this hook and continue. - Note that this represents "at most once" + // Just skip this hook and continue. - Note that this represents "at most once"// // processing. MetricUtils.counter(this.getClass(), hook.getClass().getSimpleName() + "_failure").inc(); log.error( diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java index 036968f9f6759..d8a959c0be624 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java @@ -1,23 +1,17 @@ package com.linkedin.metadata.kafka.config; import com.google.common.collect.ImmutableSet; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.kafka.hydrator.EntityHydrator; import com.linkedin.metadata.models.registry.EntityRegistry; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; @Configuration -@Import({RestliEntityClientFactory.class}) public class EntityHydratorConfig { - @Autowired - @Qualifier("systemRestliEntityClient") - private SystemRestliEntityClient _entityClient; + @Autowired private SystemEntityClient entityClient; @Autowired private EntityRegistry _entityRegistry; @@ -34,6 +28,6 @@ public class EntityHydratorConfig { @Bean public EntityHydrator getEntityHydrator() { - return new EntityHydrator(_entityRegistry, _entityClient); + return new EntityHydrator(_entityRegistry, entityClient); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java index f3b5a09708cee..375d1580dab51 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java @@ -6,8 +6,7 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.metadata.Constants; import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; @@ -43,7 +42,7 @@ */ @Slf4j @Component -@Import({EntityRegistryFactory.class, RestliEntityClientFactory.class}) +@Import({EntityRegistryFactory.class}) public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { /** The list of aspects that are supported for generating semantic change events. */ @@ -78,7 +77,7 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { ImmutableSet.of("CREATE", "UPSERT", "DELETE"); private final EntityChangeEventGeneratorRegistry _entityChangeEventGeneratorRegistry; - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient _entityClient; private final EntityRegistry _entityRegistry; private final Boolean _isEnabled; @@ -86,7 +85,7 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { public EntityChangeEventGeneratorHook( @Nonnull @Qualifier("entityChangeEventGeneratorRegistry") final EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry, - @Nonnull final SystemRestliEntityClient entityClient, + @Nonnull final SystemEntityClient entityClient, @Nonnull final EntityRegistry entityRegistry, @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled) { _entityChangeEventGeneratorRegistry = diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java new file mode 100644 index 0000000000000..91e8e186b07f7 --- /dev/null +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java @@ -0,0 +1,130 @@ +package com.linkedin.metadata.kafka.hook.form; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; +import com.linkedin.gms.factory.form.FormServiceFactory; +import com.linkedin.metadata.kafka.hook.MetadataChangeLogHook; +import com.linkedin.metadata.service.FormService; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeLog; +import java.util.Objects; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.inject.Singleton; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Import; +import org.springframework.stereotype.Component; + +/** + * This hook is used for assigning / un-assigning forms for specific entities. + * + *

Specifically, this hook performs the following operations: + * + *

1. When a new dynamic form assignment is created, an automation (metadata test) with the form + * urn embedded is automatically generated, which is responsible for assigning the form to any + * entities in the target set. It also will attempt a removal of the form for any failing entities. + * + *

2. When a new form is created, or an existing one updated, automations (metadata tests) will + * be generated for each prompt in the metadata test which verifies that the entities with that test + * associated with it are complying with the prompt. When they are NOT, the test will mark the + * prompts as incomplete. + * + *

3. When a form is hard deleted, any automations used for assigning the form, or validating + * prompts, are automatically deleted. + * + *

Note that currently, Datasets, Dashboards, Charts, Data Jobs, Data Flows, Containers, are the + * only asset types supported for this hook. + * + *

TODO: In the future, let's decide whether we want to support automations to auto-mark form + * prompts as "completed" when they do in fact have the correct metadata. (Without user needing to + * explicitly fill out a form prompt response) + * + *

TODO: Write a unit test for this class. + */ +@Slf4j +@Component +@Singleton +@Import({FormServiceFactory.class, SystemAuthenticationFactory.class}) +public class FormAssignmentHook implements MetadataChangeLogHook { + + private static final Set SUPPORTED_UPDATE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.RESTATE); + + private final FormService _formService; + private final boolean _isEnabled; + + @Autowired + public FormAssignmentHook( + @Nonnull final FormService formService, + @Nonnull @Value("${forms.hook.enabled:true}") Boolean isEnabled) { + _formService = Objects.requireNonNull(formService, "formService is required"); + _isEnabled = isEnabled; + } + + @Override + public void init() {} + + @Override + public boolean isEnabled() { + return _isEnabled; + } + + @Override + public void invoke(@Nonnull final MetadataChangeLog event) { + if (_isEnabled && isEligibleForProcessing(event)) { + if (isFormDynamicFilterUpdated(event)) { + handleFormFilterUpdated(event); + } + } + } + + /** Handle an form filter update by adding updating the targeting automation for it. */ + private void handleFormFilterUpdated(@Nonnull final MetadataChangeLog event) { + // 1. Get the new form assignment + DynamicFormAssignment formFilters = + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + DynamicFormAssignment.class); + + // 2. Register a automation to assign it. + _formService.upsertFormAssignmentRunner(event.getEntityUrn(), formFilters); + } + + /** + * Returns true if the event should be processed, which is only true if the change is on the + * incident status aspect + */ + private boolean isEligibleForProcessing(@Nonnull final MetadataChangeLog event) { + return isFormPromptSetUpdated(event) + || isFormDynamicFilterUpdated(event) + || isFormDeleted(event); + } + + /** Returns true if an form is being hard-deleted. */ + private boolean isFormDeleted(@Nonnull final MetadataChangeLog event) { + return FORM_ENTITY_NAME.equals(event.getEntityType()) + && ChangeType.DELETE.equals(event.getChangeType()) + && FORM_KEY_ASPECT_NAME.equals(event.getAspectName()); + } + + /** Returns true if the event represents an update the prompt set of a form. */ + private boolean isFormPromptSetUpdated(@Nonnull final MetadataChangeLog event) { + return FORM_ENTITY_NAME.equals(event.getEntityType()) + && SUPPORTED_UPDATE_TYPES.contains(event.getChangeType()) + && FORM_INFO_ASPECT_NAME.equals(event.getAspectName()); + } + + /** Returns true if the event represents an update to the dynamic filter for a form. */ + private boolean isFormDynamicFilterUpdated(@Nonnull final MetadataChangeLog event) { + return FORM_ENTITY_NAME.equals(event.getEntityType()) + && SUPPORTED_UPDATE_TYPES.contains(event.getChangeType()) + && DYNAMIC_FORM_ASSIGNMENT_ASPECT_NAME.equals(event.getAspectName()); + } +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 67198d13772a3..7a1aaa7f6a056 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -14,9 +14,9 @@ import com.linkedin.dataset.UpstreamArray; import com.linkedin.dataset.UpstreamLineage; import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.EntitySearchServiceFactory; import com.linkedin.metadata.Constants; @@ -72,14 +72,14 @@ public class SiblingAssociationHook implements MetadataChangeLogHook { public static final String SOURCE_SUBTYPE_V2 = "Source"; private final EntityRegistry _entityRegistry; - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient _entityClient; private final EntitySearchService _searchService; private final boolean _isEnabled; @Autowired public SiblingAssociationHook( @Nonnull final EntityRegistry entityRegistry, - @Nonnull final SystemRestliEntityClient entityClient, + @Nonnull final SystemEntityClient entityClient, @Nonnull final EntitySearchService searchService, @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java index 7a8fdd11fac43..6ad7cdbcad3e6 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java @@ -7,7 +7,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; @@ -24,7 +24,7 @@ public class EntityHydrator { private final EntityRegistry _entityRegistry; - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; private final ChartHydrator _chartHydrator = new ChartHydrator(); private final CorpUserHydrator _corpUserHydrator = new CorpUserHydrator(); private final DashboardHydrator _dashboardHydrator = new DashboardHydrator(); @@ -55,7 +55,7 @@ public Optional getHydratedEntity(String entityTypeName, String urn) .collect(Collectors.toSet())) .orElse(Set.of()); entityResponse = - _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); + entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); } catch (RemoteInvocationException | URISyntaxException e) { log.error("Error while calling GMS to hydrate entity for urn {}", urn); return Optional.empty(); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java index a227668e22e9b..89ad6105be9cb 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java @@ -28,6 +28,7 @@ import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import com.linkedin.metadata.client.EntityClientAspectRetriever; import com.linkedin.metadata.config.SystemUpdateConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.graph.Edge; @@ -121,9 +122,10 @@ public void setupTest() { _mockEntitySearchService, _mockTimeseriesAspectService, _mockSystemMetadataService, - ENTITY_REGISTRY, _searchDocumentTransformer, _mockEntityIndexBuilders); + _updateIndicesService.initializeAspectRetriever( + EntityClientAspectRetriever.builder().entityRegistry(ENTITY_REGISTRY).build()); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); } @@ -198,9 +200,10 @@ public void testInputFieldsEdgesAreAdded() throws Exception { _mockEntitySearchService, _mockTimeseriesAspectService, _mockSystemMetadataService, - mockEntityRegistry, _searchDocumentTransformer, _mockEntityIndexBuilders); + _updateIndicesService.initializeAspectRetriever( + EntityClientAspectRetriever.builder().entityRegistry(mockEntityRegistry).build()); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); _updateIndicesHook.invoke(event); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java index 8400e19ce49a3..021186404b2cb 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java @@ -41,7 +41,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DatasetKey; @@ -93,14 +93,14 @@ public class EntityChangeEventGeneratorHookTest { private static final String TEST_DATA_JOB_URN = "urn:li:dataJob:job"; private Urn actorUrn; - private SystemRestliEntityClient _mockClient; + private SystemEntityClient _mockClient; private EntityService _mockEntityService; private EntityChangeEventGeneratorHook _entityChangeEventHook; @BeforeMethod public void setupTest() throws URISyntaxException { actorUrn = Urn.createFromString(TEST_ACTOR_URN); - _mockClient = Mockito.mock(SystemRestliEntityClient.class); + _mockClient = Mockito.mock(SystemEntityClient.class); _mockEntityService = Mockito.mock(EntityService.class); EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = createEntityChangeEventGeneratorRegistry(); @@ -776,12 +776,12 @@ private EntityRegistry createMockEntityRegistry() { } private void verifyProducePlatformEvent( - SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { + SystemEntityClient mockClient, PlatformEvent platformEvent) throws Exception { verifyProducePlatformEvent(mockClient, platformEvent, true); } private void verifyProducePlatformEvent( - SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) + SystemEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) throws Exception { // Verify event has been emitted. verify(mockClient, Mockito.times(1)) diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index d4c6d122a6689..3823668adeace 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -21,7 +21,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; @@ -41,7 +41,7 @@ public class SiblingAssociationHookTest { private SiblingAssociationHook _siblingAssociationHook; - SystemRestliEntityClient _mockEntityClient; + SystemEntityClient _mockEntityClient; EntitySearchService _mockSearchService; @BeforeMethod @@ -51,7 +51,7 @@ public void setupTest() { SiblingAssociationHookTest.class .getClassLoader() .getResourceAsStream("test-entity-registry-siblings.yml")); - _mockEntityClient = Mockito.mock(SystemRestliEntityClient.class); + _mockEntityClient = Mockito.mock(SystemEntityClient.class); _mockSearchService = Mockito.mock(EntitySearchService.class); _siblingAssociationHook = new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index 44b2ce54e19c8..fc47679bebd39 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -1,8 +1,11 @@ package com.linkedin.metadata.kafka.hook.spring; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Authentication; import com.datahub.metadata.ingestion.IngestionScheduler; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; @@ -14,7 +17,9 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.apache.avro.generic.GenericRecord; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; @@ -40,12 +45,18 @@ public class MCLSpringTestConfiguration { @MockBean public IngestionScheduler ingestionScheduler; - @MockBean(name = "systemRestliEntityClient") - public SystemRestliEntityClient entityClient; + @Bean + public SystemEntityClient systemEntityClient( + @Qualifier("systemAuthentication") Authentication systemAuthentication) { + SystemEntityClient systemEntityClient = mock(SystemEntityClient.class); + when(systemEntityClient.getSystemAuthentication()).thenReturn(systemAuthentication); + return systemEntityClient; + } @MockBean public ElasticSearchService searchService; - @MockBean public Authentication systemAuthentication; + @MockBean(name = "systemAuthentication") + public Authentication systemAuthentication; @MockBean(name = "dataHubUpgradeKafkaListener") public DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index 181a723e1cd25..1210bf37059b4 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -1,6 +1,5 @@ package com.linkedin.metadata.kafka; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.boot.SpringApplication; @@ -22,6 +21,7 @@ "com.linkedin.gms.factory.config", "com.linkedin.gms.factory.entity", "com.linkedin.gms.factory.entityregistry", + "com.linkedin.gms.factory.entityclient", "com.linkedin.gms.factory.kafka", "com.linkedin.gms.factory.search", "com.linkedin.gms.factory.secret", @@ -30,12 +30,14 @@ "com.linkedin.metadata.restli", "com.linkedin.metadata.kafka", "com.linkedin.metadata.dao.producer", + "com.linkedin.gms.factory.form", + "com.linkedin.metadata.dao.producer", "io.datahubproject.metadata.jobs.common.health.kafka" }, excludeFilters = { @ComponentScan.Filter( type = FilterType.ASSIGNABLE_TYPE, - classes = {ScheduledAnalyticsFactory.class, RestliEntityClientFactory.class}) + classes = {ScheduledAnalyticsFactory.class}) }) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MceConsumerApplication { diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java index a4747c72c20fa..b41e6bc75af19 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java @@ -1,12 +1,8 @@ package com.linkedin.metadata.restli; import com.datahub.auth.authentication.filter.AuthenticationFilter; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; -import com.linkedin.parseq.retry.backoff.ExponentialBackoff; -import com.linkedin.restli.client.Client; import com.linkedin.restli.server.RestliHandlerServlet; -import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.web.servlet.FilterRegistrationBean; @@ -14,7 +10,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.springframework.context.annotation.Primary; @Configuration @Import({SystemAuthenticationFactory.class}) @@ -29,14 +24,6 @@ public class RestliServletConfig { @Value("${entityClient.numRetries:3}") private int numRetries; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = String.format("http://localhost:%s/gms/", configuredPort); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); - } - @Bean("restliServletRegistration") public ServletRegistrationBean restliServletRegistration( RestliHandlerServlet servlet) { diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index 6d19db97fb39f..bce8664689e2c 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -22,7 +22,7 @@ public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests @Autowired private TestRestTemplate restTemplate; - @Autowired private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; @Autowired private KafkaHealthIndicator kafkaHealthIndicator; diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index 1a44265c7a92a..93a6ae8fb4797 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -1,7 +1,10 @@ package com.linkedin.metadata.kafka; -import com.linkedin.entity.client.RestliEntityClient; +import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.SiblingGraphService; @@ -15,6 +18,7 @@ import io.ebean.Database; import java.net.URI; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.boot.test.web.client.TestRestTemplate; @@ -30,14 +34,21 @@ public class MceConsumerApplicationTestConfiguration { @MockBean public KafkaHealthChecker kafkaHealthChecker; - @MockBean public EntityService _entityService; + @MockBean public EntityService _entityService; - @Bean("restliEntityClient") + @Bean @Primary - public RestliEntityClient restliEntityClient() { + public SystemEntityClient systemEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { String selfUri = restTemplate.getRootUri(); final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); + return new SystemRestliEntityClient( + restClient, + new ExponentialBackoff(1), + 1, + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); } @MockBean public Database ebeanServer; diff --git a/metadata-jobs/mce-consumer/build.gradle b/metadata-jobs/mce-consumer/build.gradle index 5fa65c06de714..49604924acb68 100644 --- a/metadata-jobs/mce-consumer/build.gradle +++ b/metadata-jobs/mce-consumer/build.gradle @@ -53,4 +53,4 @@ processResources.dependsOn avroSchemaSources clean { project.delete("src/main/resources/avro") -} \ No newline at end of file +} diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java index e22a8ba813704..352fa93f56a04 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java @@ -5,8 +5,8 @@ import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; import com.linkedin.entity.Entity; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; @@ -48,7 +48,7 @@ public class MetadataChangeEventsProcessor { @NonNull private final Authentication systemAuthentication; - private final SystemRestliEntityClient entityClient; + private final SystemEntityClient entityClient; private final Producer kafkaProducer; private final Histogram kafkaLagStats = diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java index 26d5f66f4929a..a4f5a287bc8fd 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java @@ -3,8 +3,8 @@ import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; -import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; @@ -42,7 +42,7 @@ @RequiredArgsConstructor public class MetadataChangeProposalsProcessor { - private final SystemRestliEntityClient entityClient; + private final SystemEntityClient entityClient; private final Producer kafkaProducer; private final Histogram kafkaLagStats = diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java index b61858aef22cd..955d5c67c09a7 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java @@ -46,7 +46,7 @@ public PlatformEventProcessor() { public void consume(final ConsumerRecord consumerRecord) { try (Timer.Context i = MetricUtils.timer(this.getClass(), "consume").time()) { - log.info("Consuming a Platform Event"); + log.debug("Consuming a Platform Event"); kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); diff --git a/metadata-models-custom/README.md b/metadata-models-custom/README.md index 94399a67806a6..10801c3d8ed23 100644 --- a/metadata-models-custom/README.md +++ b/metadata-models-custom/README.md @@ -396,6 +396,26 @@ public class CustomDataQualityRulesMCLSideEffect extends MCLSideEffect { return timeseriesOptional.stream(); } + + private Optional buildEvent(MetadataChangeLog originMCP) { + if (originMCP.getAspect() != null) { + DataQualityRuleEvent event = new DataQualityRuleEvent(); + if (event.getActor() != null) { + event.setActor(event.getActor()); + } + event.setEventTimestamp(originMCP.getSystemMetadata().getLastObserved()); + event.setTimestampMillis(originMCP.getSystemMetadata().getLastObserved()); + if (originMCP.getPreviousAspectValue() == null) { + event.setEventType("RuleCreated"); + } else { + event.setEventType("RuleUpdated"); + } + event.setAffectedDataset(originMCP.getEntityUrn()); + + return Optional.of(event); + } + return Optional.empty(); + } } ``` diff --git a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java index a8735bae1521a..ba72a97908846 100644 --- a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java +++ b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCLSideEffect.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.ebean.batch.MCLBatchItemImpl; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeLog; import com.mycompany.dq.DataQualityRuleEvent; @@ -20,9 +19,7 @@ public CustomDataQualityRulesMCLSideEffect(AspectPluginConfig config) { @Override protected Stream applyMCLSideEffect( - @Nonnull MCLBatchItem input, - @Nonnull EntityRegistry entityRegistry, - @Nonnull AspectRetriever aspectRetriever) { + @Nonnull MCLBatchItem input, @Nonnull AspectRetriever aspectRetriever) { // Generate Timeseries event aspect based on non-Timeseries aspect MetadataChangeLog originMCP = input.getMetadataChangeLog(); @@ -42,9 +39,7 @@ protected Stream applyMCLSideEffect( }) .map( eventMCP -> - MCLBatchItemImpl.builder() - .metadataChangeLog(eventMCP) - .build(entityRegistry, aspectRetriever)); + MCLBatchItemImpl.builder().metadataChangeLog(eventMCP).build(aspectRetriever)); return timeseriesOptional.stream(); } diff --git a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java index 2c989725f4f9d..d2041c443503e 100644 --- a/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java +++ b/metadata-models-custom/src/main/java/com/linkedin/metadata/aspect/plugins/hooks/CustomDataQualityRulesMCPSideEffect.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; -import com.linkedin.metadata.models.registry.EntityRegistry; import java.util.stream.Stream; import javax.annotation.Nonnull; @@ -18,7 +17,7 @@ public CustomDataQualityRulesMCPSideEffect(AspectPluginConfig aspectPluginConfig @Override protected Stream applyMCPSideEffect( - UpsertItem input, EntityRegistry entityRegistry, @Nonnull AspectRetriever aspectRetriever) { + UpsertItem input, @Nonnull AspectRetriever aspectRetriever) { // Mirror aspects to another URN in SQL & Search Urn mirror = UrnUtils.getUrn(input.getUrn().toString().replace(",PROD)", ",DEV)")); return Stream.of( @@ -28,6 +27,6 @@ protected Stream applyMCPSideEffect( .aspect(input.getAspect()) .auditStamp(input.getAuditStamp()) .systemMetadata(input.getSystemMetadata()) - .build(entityRegistry, aspectRetriever)); + .build(aspectRetriever)); } } diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index 04c90fa444f0c..86f404adb7fef 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -43,11 +43,10 @@ mainAvroSchemaJar.dependsOn generateAvroSchema pegasus.main.generationModes = [PegasusGenerationMode.PEGASUS, PegasusGenerationMode.AVRO] -tasks.register('generateJsonSchema', GenerateJsonSchemaTask) { +task generateJsonSchema(type: GenerateJsonSchemaTask, dependsOn: 'generateAvroSchema') { it.setInputDirectory("$projectDir/src/mainGeneratedAvroSchema") it.setOutputDirectory("$projectDir/src/generatedJsonSchema") it.setEntityRegistryYaml("${project(':metadata-models').projectDir}/src/main/resources/entity-registry.yml") - dependsOn generateAvroSchema } // https://github.com/int128/gradle-swagger-generator-plugin#task-type-generateswaggercode diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl index 8390a05846c83..cc70bb5c60fc6 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/common/CustomProperties.pdl @@ -9,6 +9,7 @@ record CustomProperties { */ @Searchable = { "/*": { + "fieldType": "TEXT", "queryByDefault": true } } diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl new file mode 100644 index 0000000000000..d05f2308d82a5 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FieldFormPromptAssociation.pdl @@ -0,0 +1,17 @@ +namespace com.linkedin.common + +/** + * Information about the status of a particular prompt for a specific schema field + * on an entity. + */ +record FieldFormPromptAssociation { + /** + * The field path on a schema field. + */ + fieldPath: string + + /** + * The last time this prompt was touched for the field on the entity (set, unset) + */ + lastModified: AuditStamp +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl new file mode 100644 index 0000000000000..558672478c19b --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormAssociation.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.common + +/** + * Properties of an applied form. + */ +record FormAssociation { + /** + * Urn of the applied form + */ + urn: Urn + + /** + * A list of prompts that are not yet complete for this form. + */ + incompletePrompts: array[FormPromptAssociation] = [] + + /** + * A list of prompts that have been completed for this form. + */ + completedPrompts: array[FormPromptAssociation] = [] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl new file mode 100644 index 0000000000000..ee0f1041e23c4 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptAssociation.pdl @@ -0,0 +1,23 @@ +namespace com.linkedin.common + +/** + * Information about the status of a particular prompt. + * Note that this is where we can add additional information about individual responses: + * actor, timestamp, and the response itself. + */ +record FormPromptAssociation { + /** + * The id for the prompt. This must be GLOBALLY UNIQUE. + */ + id: string + + /** + * The last time this prompt was touched for the entity (set, unset) + */ + lastModified: AuditStamp + + /** + * Optional information about the field-level prompt associations. + */ + fieldAssociations: optional FormPromptFieldAssociations +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl new file mode 100644 index 0000000000000..419aa8aa3921d --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormPromptFieldAssociations.pdl @@ -0,0 +1,16 @@ +namespace com.linkedin.common + +/** + * Information about the field-level prompt associations on a top-level prompt association. + */ +record FormPromptFieldAssociations { + /** + * A list of field-level prompt associations that are not yet complete for this form. + */ + completedFieldPrompts: optional array[FieldFormPromptAssociation] + + /** + * A list of field-level prompt associations that are complete for this form. + */ + incompleteFieldPrompts: optional array[FieldFormPromptAssociation] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl new file mode 100644 index 0000000000000..066e72f2f2a20 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/FormVerificationAssociation.pdl @@ -0,0 +1,17 @@ +namespace com.linkedin.common + +/** + * An association between a verification and an entity that has been granted + * via completion of one or more forms of type 'VERIFICATION'. + */ +record FormVerificationAssociation { + /** + * The urn of the form that granted this verification. + */ + form: Urn + + /** + * An audit stamp capturing who and when verification was applied for this form. + */ + lastModified: optional AuditStamp +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl new file mode 100644 index 0000000000000..0a97c7d5099ed --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/Forms.pdl @@ -0,0 +1,66 @@ +namespace com.linkedin.common + +/** + * Forms that are assigned to this entity to be filled out + */ +@Aspect = { + "name": "forms" +} +record Forms { + /** + * All incomplete forms assigned to the entity. + */ + @Searchable = { + "/*/urn": { + "fieldType": "URN", + "fieldName": "incompleteForms" + }, + "/*/completedPrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "incompleteFormsCompletedPromptIds", + }, + "/*/incompletePrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "incompleteFormsIncompletePromptIds", + }, + "/*/completedPrompts/*/lastModified/time" : { + "fieldType": "DATETIME", + "fieldName": "incompleteFormsCompletedPromptResponseTimes", + } + } + incompleteForms: array[FormAssociation] + + /** + * All complete forms assigned to the entity. + */ + @Searchable = { + "/*/urn": { + "fieldType": "URN", + "fieldName": "completedForms" + }, + "/*/completedPrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "completedFormsCompletedPromptIds", + }, + "/*/incompletePrompts/*/id" : { + "fieldType": "KEYWORD", + "fieldName": "completedFormsIncompletePromptIds", + }, + "/*/completedPrompts/*/lastModified/time" : { + "fieldType": "DATETIME", + "fieldName": "completedFormsCompletedPromptResponseTimes", + } + } + completedForms: array[FormAssociation] + + /** + * Verifications that have been applied to the entity via completed forms. + */ + @Searchable = { + "/*/form": { + "fieldType": "URN", + "fieldName": "verifiedForms" + } + } + verifications: array[FormVerificationAssociation] = [] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl index 9f0f0ff6f24a2..80dc07981816a 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/common/GlossaryTermAssociation.pdl @@ -20,8 +20,14 @@ record GlossaryTermAssociation { } urn: GlossaryTermUrn + /** + * The user URN which will be credited for adding associating this term to the entity + */ + actor: optional Urn + /** * Additional context about the association */ context: optional string + } diff --git a/metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl b/metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl new file mode 100644 index 0000000000000..c8f1e4d5009dc --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/common/PropertyValue.pdl @@ -0,0 +1,13 @@ +namespace com.linkedin.common + +record PropertyValue { + value: union [ + string, + double + ] + + /** + * Optional description of the property value + */ + description: optional string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl b/metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl new file mode 100644 index 0000000000000..2d09d828d10bd --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/datahub/DataHubSearchConfig.pdl @@ -0,0 +1,87 @@ +namespace com.linkedin.datahub + +/** +* Configuration for how any given field should be indexed and matched in the DataHub search index. +**/ +record DataHubSearchConfig { + + /** + * Name of the field in the search index. Defaults to the field name otherwise + **/ + fieldName: optional string + + /** + * Type of the field. Defines how the field is indexed and matched + **/ + fieldType: optional enum SearchFieldType { + KEYWORD, + TEXT, + TEXT_PARTIAL, + BROWSE_PATH, + URN, + URN_PARTIAL, + BOOLEAN, + COUNT, + DATETIME, + OBJECT, + BROWSE_PATH_V2, + WORD_GRAM + } + + /** + * Whether we should match the field for the default search query + **/ + queryByDefault: boolean = false + + /** + * Whether we should use the field for default autocomplete + **/ + enableAutocomplete: boolean = false + + /** + * Whether or not to add field to filters. + **/ + addToFilters: boolean = false + + /** + * Whether or not to add the "has values" to filters. + * check if this is conditional on addToFilters being true + **/ + addHasValuesToFilters: boolean = true + + /** + * Display name of the filter + **/ + filterNameOverride: optional string + + /** + * Display name of the has values filter + **/ + hasValuesFilterNameOverride: optional string + + /** + * Boost multiplier to the match score. Matches on fields with higher boost score ranks higher + **/ + boostScore: double = 1.0 + + /** + * If set, add a index field of the given name that checks whether the field exists + **/ + hasValuesFieldName: optional string + + /** + * If set, add a index field of the given name that checks the number of elements + **/ + numValuesFieldName: optional string + + /** + * (Optional) Weights to apply to score for a given value + **/ + weightsPerFieldValue: optional map[string, double] + + /** + * (Optional) Aliases for this given field that can be used for sorting etc. + **/ + fieldNameAliases: optional array[string] + +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl new file mode 100644 index 0000000000000..4e3ea9d01e92d --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeInfo.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.datatype + +@Aspect = { + "name": "dataTypeInfo" +} +record DataTypeInfo { + /** + * The qualified name for the data type. Usually a unique namespace + name, e.g. datahub.string + */ + qualifiedName: string + + /** + * An optional display name for the data type. + */ + displayName: optional string + + /** + * An optional description for the data type. + */ + description: optional string +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl new file mode 100644 index 0000000000000..e0ea2b6974381 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/datatype/DataTypeKey.pdl @@ -0,0 +1,11 @@ +namespace com.linkedin.datatype + +@Aspect = { + "name": "dataTypeKey" +} +record DataTypeKey { + /** + * A unique id for a data type. Usually this will be a unique namespace + data type name. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl new file mode 100644 index 0000000000000..3a741a4d8f0b8 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeInfo.pdl @@ -0,0 +1,22 @@ +namespace com.linkedin.entitytype + +@Aspect = { + "name": "entityTypeInfo" +} +record EntityTypeInfo { + /** + * The fully qualified name for the entity type, which usually consists of a namespace + * plus an identifier or name, e.g. datahub.dataset + */ + qualifiedName: string + + /** + * The display name for the Entity Type. + */ + displayName: optional string + + /** + * A description for the Entity Type: what is it for? + */ + description: optional string +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl new file mode 100644 index 0000000000000..d857c7ff611e3 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/entitytype/EntityTypeKey.pdl @@ -0,0 +1,11 @@ +namespace com.linkedin.entitytype + +@Aspect = { + "name": "entityTypeKey" +} +record EntityTypeKey { + /** + * A unique id for an entity type. Usually this will be a unique namespace + entity name. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl new file mode 100644 index 0000000000000..93ecf017efb3a --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/DynamicFormAssignment.pdl @@ -0,0 +1,19 @@ +namespace com.linkedin.form + +import com.linkedin.metadata.query.filter.Filter + +/** + * Information about how a form is assigned to entities dynamically. Provide a filter to + * match a set of entities instead of explicitly applying a form to specific entities. + */ +@Aspect = { + "name": "dynamicFormAssignment" +} +record DynamicFormAssignment { + /** + * The filter applied when assigning this form to entities. Entities that match this filter + * will have this form applied to them. Right now this filter only supports filtering by + * platform, entity type, container, and domain through the UI. + */ + filter: Filter +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl new file mode 100644 index 0000000000000..e58eb4c7c56a8 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormActorAssignment.pdl @@ -0,0 +1,21 @@ +namespace com.linkedin.form + +import com.linkedin.common.Urn + +record FormActorAssignment { + /** + * Whether the form should be assigned to the owners of assets that it is applied to. + * This is the default. + */ + owners: boolean = true + + /** + * Optional: Specific set of groups that are targeted by this form assignment. + */ + groups: optional array[Urn] + + /** + * Optional: Specific set of users that are targeted by this form assignment. + */ + users: optional array[Urn] +} \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl new file mode 100644 index 0000000000000..b17bd1537a17c --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormInfo.pdl @@ -0,0 +1,51 @@ +namespace com.linkedin.form + +import com.linkedin.common.Urn + +/** + * Information about a form to help with filling out metadata on entities. + */ +@Aspect = { + "name": "formInfo" +} +record FormInfo { + /** + * Display name of the form + */ + @Searchable = { + "fieldType": "TEXT_PARTIAL" + } + name: string + + /** + * Description of the form + */ + description: optional string + + /** + * The type of this form + */ + @Searchable = { + "fieldType": "KEYWORD" + } + type: enum FormType { + /** + * A form simply used for collecting metadata fields for an entity. + */ + COMPLETION + /** + * This form is used for "verifying" that entities comply with a policy via presence of a specific set of metadata fields. + */ + VERIFICATION + } = "COMPLETION" + + /** + * List of prompts to present to the user to encourage filling out metadata + */ + prompts: array[FormPrompt] = [] + + /** + * Who the form is assigned to, e.g. who should see the form when visiting the entity page or governance center + */ + actors: FormActorAssignment = { "owners": true } +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl b/metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl new file mode 100644 index 0000000000000..73f06552d46ab --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/form/FormPrompt.pdl @@ -0,0 +1,53 @@ +namespace com.linkedin.form + +import com.linkedin.common.Urn + +/** + * A prompt to present to the user to encourage filling out metadata + */ +record FormPrompt { + /** + * The unique id for this prompt. This must be GLOBALLY unique. + */ + id: string + + /** + * The title of this prompt + */ + title: string + + /** + * The description of this prompt + */ + description: optional string + + /** + * The type of prompt + */ + type: enum FormPromptType { + /** + * This prompt is meant to apply a structured property to an entity + */ + STRUCTURED_PROPERTY + /** + * This prompt is meant to apply a structured property to a schema fields entity + */ + FIELDS_STRUCTURED_PROPERTY + } + + /** + * An optional set of information specific to structured properties prompts. + * This should be filled out if the prompt is type STRUCTURED_PROPERTY or FIELDS_STRUCTURED_PROPERTY. + */ + structuredPropertyParams: optional record StructuredPropertyParams { + /** + * The structured property that is required on this entity + */ + urn: Urn + } + + /** + * Whether the prompt is required to be completed, in order for the form to be marked as complete. + */ + required: boolean = true +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl index c3388d4f462d4..b4a6f4b47b221 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryNodeInfo.pdl @@ -1,5 +1,6 @@ namespace com.linkedin.glossary +import com.linkedin.common.CustomProperties import com.linkedin.common.GlossaryNodeUrn /** @@ -8,7 +9,7 @@ import com.linkedin.common.GlossaryNodeUrn @Aspect = { "name": "glossaryNodeInfo" } -record GlossaryNodeInfo { +record GlossaryNodeInfo includes CustomProperties { /** * Definition of business node diff --git a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl index e987a71be7131..1de826f1b2aa6 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/glossary/GlossaryTermInfo.pdl @@ -3,6 +3,7 @@ namespace com.linkedin.glossary import com.linkedin.common.Url import com.linkedin.common.GlossaryNodeUrn import com.linkedin.common.CustomProperties +import com.linkedin.schema.PrimitiveValueDataType /** * Properties associated with a GlossaryTerm @@ -76,4 +77,5 @@ record GlossaryTermInfo includes CustomProperties { */ @deprecated rawSchema: optional string + } diff --git a/metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl new file mode 100644 index 0000000000000..124d65d0e7452 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/metadata/key/FormKey.pdl @@ -0,0 +1,14 @@ +namespace com.linkedin.metadata.key + +/** + * Key for a Form + */ +@Aspect = { + "name": "formKey", +} +record FormKey { + /** + * Unique id for the form. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl new file mode 100644 index 0000000000000..93dbb14c7f969 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/PrimitivePropertyValue.pdl @@ -0,0 +1,9 @@ +namespace com.linkedin.structured + +/** +* Represents a stored primitive property value +**/ +typeref PrimitivePropertyValue = union [ + string, + double + ] \ No newline at end of file diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl new file mode 100644 index 0000000000000..012ce5416364f --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/PropertyValue.pdl @@ -0,0 +1,10 @@ +namespace com.linkedin.structured + +record PropertyValue { + value: PrimitivePropertyValue + + /** + * Optional description of the property value + */ + description: optional string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl new file mode 100644 index 0000000000000..f79e8fd86e825 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredProperties.pdl @@ -0,0 +1,14 @@ +namespace com.linkedin.structured + +/** + * Properties about an entity governed by StructuredPropertyDefinition + */ +@Aspect = { + "name": "structuredProperties" +} +record StructuredProperties { + /** + * Custom property bag. + */ + properties: array[StructuredPropertyValueAssignment] +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl new file mode 100644 index 0000000000000..1b263b679531a --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyDefinition.pdl @@ -0,0 +1,74 @@ +namespace com.linkedin.structured + +import com.linkedin.common.Urn +import com.linkedin.datahub.DataHubSearchConfig + +@Aspect = { + "name": "propertyDefinition" +} +record StructuredPropertyDefinition { + /** + * The fully qualified name of the property. e.g. io.acryl.datahub.myProperty + */ + @Searchable = {} + qualifiedName: string + + /** + * The display name of the property. This is the name that will be shown in the UI and can be used to look up the property id. + */ + @Searchable = {} + displayName: optional string + + /** + * The value type of the property. Must be a dataType. + * e.g. To indicate that the property is of type DATE, use urn:li:dataType:datahub.date + */ + valueType: Urn + + /** + * A map that allows for type specialization of the valueType. + * e.g. a valueType of urn:li:dataType:datahub.urn + * can be specialized to be a USER or GROUP URN by adding a typeQualifier like + * { "allowedTypes": ["urn:li:entityType:datahub.corpuser", "urn:li:entityType:datahub.corpGroup"] } + */ + typeQualifier: optional map[string, array[string]] + + /** + * A list of allowed values that the property is allowed to take. + * If this is not specified, then the property can take any value of given type. + */ + allowedValues: optional array[PropertyValue] + + /** + * The cardinality of the property. If not specified, then the property is assumed to be single valued.. + */ + cardinality: optional enum PropertyCardinality { + SINGLE + MULTIPLE + } = "SINGLE" + + @Relationship = { + "/*": { + "name": "StructuredPropertyOf", + "entityTypes": [ "entityType" ] + } + } + @Searchable = { + "/*": { + "fieldName": "entityTypes" + } + } + entityTypes: array[Urn] + + /** + * The description of the property. This is the description that will be shown in the UI. + */ + description: optional string + + /** + * Search configuration for this property. If not specified, then the property is indexed using the default mapping. + * from the logical type. + */ + searchConfiguration: optional DataHubSearchConfig +} + diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl new file mode 100644 index 0000000000000..16fec7b2a5ab6 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyKey.pdl @@ -0,0 +1,11 @@ +namespace com.linkedin.structured + +@Aspect = { + "name": "structuredPropertyKey" +} +record StructuredPropertyKey { + /** + * The id for a structured proeprty. + */ + id: string +} diff --git a/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl new file mode 100644 index 0000000000000..d8b8a93a3edb6 --- /dev/null +++ b/metadata-models/src/main/pegasus/com/linkedin/structured/StructuredPropertyValueAssignment.pdl @@ -0,0 +1,29 @@ +namespace com.linkedin.structured +import com.linkedin.common.Urn +import com.linkedin.common.AuditStamp + +record StructuredPropertyValueAssignment { + + /** + * The property that is being assigned a value. + */ + propertyUrn: Urn + + /** + * The value assigned to the property. + */ + values: array[PrimitivePropertyValue] + + /** + * Audit stamp containing who created this relationship edge and when + */ + created: optional AuditStamp + + /** + * Audit stamp containing who last modified this relationship edge and when + */ + lastModified: optional AuditStamp + +} + + diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index 9d8c4bfdab0da..65382c747a16a 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -42,6 +42,8 @@ entities: - dataPlatformInstance - browsePathsV2 - access + - structuredProperties + - forms - name: dataHubPolicy doc: DataHub Policies represent access policies granted to users or groups on metadata operations like edit, view etc. category: internal @@ -67,6 +69,7 @@ entities: - institutionalMemory - dataPlatformInstance - browsePathsV2 + - structuredProperties - subTypes - name: dataFlow category: core @@ -85,6 +88,7 @@ entities: - institutionalMemory - dataPlatformInstance - browsePathsV2 + - structuredProperties - name: dataProcess keyAspect: dataProcessKey aspects: @@ -409,7 +413,8 @@ entities: - name: schemaField category: core keyAspect: schemaFieldKey - aspects: [] + aspects: + - structuredProperties - name: globalSettings doc: Global settings for an the platform category: internal @@ -468,5 +473,51 @@ entities: - dataContractProperties - dataContractStatus - status - + - name: entityType + doc: A type of entity in the DataHub Metadata Model. + category: core + keyAspect: entityTypeKey + aspects: + - entityTypeInfo + - institutionalMemory + - status + - name: dataType + doc: A type of data element stored within DataHub. + category: core + keyAspect: dataTypeKey + aspects: + - dataTypeInfo + - institutionalMemory + - status + - name: structuredProperty + doc: Structured Property represents a property meant for extending the core model of a logical entity + category: core + keyAspect: structuredPropertyKey + aspects: + - propertyDefinition + - institutionalMemory + - status + - name: form + category: core + keyAspect: formKey + aspects: + - formInfo + - dynamicFormAssignment + - ownership events: +plugins: + aspectPayloadValidators: + - className: 'com.linkedin.metadata.aspect.validation.PropertyDefinitionValidator' + enabled: true + supportedOperations: + - UPSERT + supportedEntityAspectNames: + - entityName: structuredProperty + aspectName: propertyDefinition + - className: 'com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator' + enabled: true + supportedOperations: + - UPSERT + supportedEntityAspectNames: + - entityName: '*' + aspectName: structuredProperties \ No newline at end of file diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index 8ce7675edf580..c4b01fea8c09d 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -28,6 +28,9 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -175,6 +178,17 @@ public void migrateGroupMembershipToNativeGroupMembership( userUrnList.forEach(userUrn -> addUserToNativeGroup(userUrn, groupUrn, authentication)); } + public List getGroupsForUser( + @Nonnull final Urn userUrn, @Nonnull final Authentication authentication) throws Exception { + final NativeGroupMembership nativeGroupMembership = + getExistingNativeGroupMembership(userUrn, authentication); + final GroupMembership groupMembership = getExistingGroupMembership(userUrn, authentication); + final List allGroups = new ArrayList<>(); + allGroups.addAll(nativeGroupMembership.getNativeGroups()); + allGroups.addAll(groupMembership.getGroups()); + return allGroups; + } + NativeGroupMembership getExistingNativeGroupMembership( @Nonnull final Urn userUrn, final Authentication authentication) throws Exception { final EntityResponse entityResponse = @@ -186,7 +200,7 @@ NativeGroupMembership getExistingNativeGroupMembership( authentication) .get(userUrn); - NativeGroupMembership nativeGroupMembership; + final NativeGroupMembership nativeGroupMembership; if (entityResponse == null || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { // If the user doesn't have the NativeGroupMembership aspect, create one. @@ -204,6 +218,32 @@ NativeGroupMembership getExistingNativeGroupMembership( return nativeGroupMembership; } + GroupMembership getExistingGroupMembership( + @Nonnull final Urn userUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final EntityResponse entityResponse = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), + authentication) + .get(userUrn); + + final GroupMembership groupMembership; + if (entityResponse == null + || !entityResponse.getAspects().containsKey(GROUP_MEMBERSHIP_ASPECT_NAME)) { + // If the user doesn't have the GroupMembership aspect, create one. + groupMembership = new GroupMembership(); + groupMembership.setGroups(new UrnArray()); + } else { + groupMembership = + new GroupMembership( + entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + } + return groupMembership; + } + String createGroupInfo( @Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index 40555107f4c79..e072a59ae77ff 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.key.DataHubAccessTokenKey; import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -41,7 +40,7 @@ @Slf4j public class StatefulTokenService extends StatelessTokenService { - private final EntityService _entityService; + private final EntityService _entityService; private final LoadingCache _revokedTokenCache; private final String salt; @@ -49,7 +48,7 @@ public StatefulTokenService( @Nonnull final String signingKey, @Nonnull final String signingAlgorithm, @Nullable final String iss, - @Nonnull final EntityService entityService, + @Nonnull final EntityService entityService, @Nonnull final String salt) { super(signingKey, signingAlgorithm, iss); this._entityService = entityService; @@ -154,11 +153,7 @@ public String generateAccessToken( _entityService.ingestProposal( AspectsBatchImpl.builder() - .mcps( - proposalStream.collect(Collectors.toList()), - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) .build(), false); diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java index bc749a373c5b0..eb5243c0e5e4a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java @@ -8,9 +8,18 @@ public class VisualConfiguration { /** Asset related configurations */ public AssetsConfiguration assets; + /** Custom app title to show in the browse tab */ + public String appTitle; + /** Queries tab related configurations */ public QueriesTabConfig queriesTab; + /** + * Boolean flag disabling viewing the Business Glossary page for users without the 'Manage + * Glossaries' privilege + */ + public boolean hideGlossary; + /** Queries tab related configurations */ public EntityProfileConfig entityProfile; diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index cfc84491ab0ae..2b202d513c9bf 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -1,3 +1,6 @@ +# The base URL where DataHub is accessible to users. +baseUrl: ${DATAHUB_BASE_URL:http://localhost:9002} + # App Layer authentication: # Enable if you want all requests to the Metadata Service to be authenticated. Disabled by default. @@ -113,7 +116,9 @@ visualConfig: queriesTabResultSize: ${REACT_APP_QUERIES_TAB_RESULT_SIZE:5} assets: logoUrl: ${REACT_APP_LOGO_URL:/assets/platforms/datahublogo.png} - faviconUrl: ${REACT_APP_FAVICON_URL:/assets/favicon.ico} + faviconUrl: ${REACT_APP_FAVICON_URL:/assets/icons/favicon.ico} + appTitle: ${REACT_APP_TITLE:} + hideGlossary: ${REACT_APP_HIDE_GLOSSARY:false} entityProfile: # we only support default tab for domains right now. In order to implement for other entities, update React code domainDefaultTab: ${DOMAIN_DEFAULT_TAB:} # set to DOCUMENTATION_TAB to show documentation tab first @@ -305,6 +310,11 @@ systemUpdate: backOffFactor: ${BOOTSTRAP_SYSTEM_UPDATE_BACK_OFF_FACTOR:2} # Multiplicative factor for back off, default values will result in waiting 5min 15s waitForSystemUpdate: ${BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE:true} +structuredProperties: + enabled: ${ENABLE_STRUCTURED_PROPERTIES_HOOK:true} # applies structured properties mappings + writeEnabled: ${ENABLE_STRUCTURED_PROPERTIES_WRITE:true} # write structured property values + systemUpdateEnabled: ${ENABLE_STRUCTURED_PROPERTIES_SYSTEM_UPDATE:false} # applies structured property mappings in system update job + healthCheck: cacheDurationSeconds: ${HEALTH_CHECK_CACHE_DURATION_SECONDS:5} @@ -324,6 +334,7 @@ featureFlags: uiEnabled: ${PRE_PROCESS_HOOKS_UI_ENABLED:true} # Circumvents Kafka for processing index updates for UI changes sourced from GraphQL to avoid processing delays showAcrylInfo: ${SHOW_ACRYL_INFO:false} # Show different CTAs within DataHub around moving to Managed DataHub. Set to true for the demo site. nestedDomainsEnabled: ${NESTED_DOMAINS_ENABLED:true} # Enables the nested Domains feature that allows users to have sub-Domains. If this is off, Domains appear "flat" again + schemaFieldEntityFetchEnabled: ${SCHEMA_FIELD_ENTITY_FETCH_ENABLED:true} # Enables fetching for schema field entities from the database when we hydrate them on schema fields entityChangeEvents: enabled: ${ENABLE_ENTITY_CHANGE_EVENTS_HOOK:true} @@ -375,5 +386,12 @@ cache: status: 20 corpUserCredentials: 20 corpUserSettings: 20 + structuredProperty: + propertyDefinition: 86400 # 1 day + structuredPropertyKey: 86400 # 1 day springdoc.api-docs.groups.enabled: true + +forms: + hook: + enabled: {$FORMS_HOOK_ENABLED:true} \ No newline at end of file diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java index ec398388ae77b..7b823e552da97 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java @@ -1,6 +1,5 @@ package com.linkedin.gms.factory.auth; -import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.AuthorizerContext; import com.datahub.authorization.DataHubAuthorizer; @@ -18,8 +17,8 @@ import com.datahub.plugins.loader.IsolatedClassLoader; import com.datahub.plugins.loader.PluginPermissionManagerImpl; import com.google.common.collect.ImmutableMap; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import jakarta.annotation.Nonnull; import java.nio.file.Path; @@ -47,39 +46,29 @@ public class AuthorizerChainFactory { @Qualifier("configurationProvider") private ConfigurationProvider configurationProvider; - @Autowired - @Qualifier("dataHubAuthorizer") - private DataHubAuthorizer dataHubAuthorizer; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication systemAuthentication; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient entityClient; - @Bean(name = "authorizerChain") @Scope("singleton") @Nonnull - protected AuthorizerChain getInstance() { - final EntitySpecResolver resolver = initResolver(); + protected AuthorizerChain getInstance( + final DataHubAuthorizer dataHubAuthorizer, final SystemEntityClient systemEntityClient) { + final EntitySpecResolver resolver = initResolver(systemEntityClient); // Extract + initialize customer authorizers from application configs. final List authorizers = new ArrayList<>(initCustomAuthorizers(resolver)); if (configurationProvider.getAuthorization().getDefaultAuthorizer().isEnabled()) { AuthorizerContext ctx = new AuthorizerContext(Collections.emptyMap(), resolver); - this.dataHubAuthorizer.init(Collections.emptyMap(), ctx); + dataHubAuthorizer.init(Collections.emptyMap(), ctx); log.info("Default DataHubAuthorizer is enabled. Appending it to the authorization chain."); - authorizers.add(this.dataHubAuthorizer); + authorizers.add(dataHubAuthorizer); } return new AuthorizerChain(authorizers, dataHubAuthorizer); } - private EntitySpecResolver initResolver() { - return new DefaultEntitySpecResolver(systemAuthentication, entityClient); + private EntitySpecResolver initResolver(SystemEntityClient systemEntityClient) { + return new DefaultEntitySpecResolver( + systemEntityClient.getSystemAuthentication(), systemEntityClient); } private List initCustomAuthorizers(EntitySpecResolver resolver) { @@ -121,7 +110,7 @@ private void registerAuthorizer( // Get security mode set by user SecurityMode securityMode = SecurityMode.valueOf( - this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); // Create permission manager with security mode PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 3b23243f76742..0935e8ad0e7d4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -1,33 +1,19 @@ package com.linkedin.gms.factory.auth; -import com.datahub.authentication.Authentication; import com.datahub.authorization.DataHubAuthorizer; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@Import({RestliEntityClientFactory.class}) public class DataHubAuthorizerFactory { - @Autowired - @Qualifier("systemAuthentication") - private Authentication systemAuthentication; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient entityClient; - @Value("${authorization.defaultAuthorizer.cacheRefreshIntervalSecs}") private Integer policyCacheRefreshIntervalSeconds; @@ -40,7 +26,7 @@ public class DataHubAuthorizerFactory { @Bean(name = "dataHubAuthorizer") @Scope("singleton") @Nonnull - protected DataHubAuthorizer getInstance() { + protected DataHubAuthorizer dataHubAuthorizer(final SystemEntityClient systemEntityClient) { final DataHubAuthorizer.AuthorizationMode mode = policiesEnabled @@ -48,8 +34,8 @@ protected DataHubAuthorizer getInstance() { : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; return new DataHubAuthorizer( - systemAuthentication, - entityClient, + systemEntityClient.getSystemAuthentication(), + systemEntityClient, 10, policyCacheRefreshIntervalSeconds, mode, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java index 83544e4165ae3..beb467d614930 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java @@ -28,16 +28,16 @@ public class DataHubTokenServiceFactory { @Value("${authentication.tokenService.issuer:datahub-metadata-service}") private String issuer; - /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ + /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Bean(name = "dataHubTokenService") @Scope("singleton") @Nonnull protected StatefulTokenService getInstance() { return new StatefulTokenService( - this.signingKey, this.signingAlgorithm, this.issuer, this._entityService, this.saltingKey); + signingKey, signingAlgorithm, issuer, _entityService, saltingKey); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java index 7c6c4384d7343..47af58a8d8626 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.group.GroupService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -18,11 +18,7 @@ public class GroupServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; + private EntityService _entityService; @Autowired @Qualifier("graphClient") @@ -31,7 +27,8 @@ public class GroupServiceFactory { @Bean(name = "groupService") @Scope("singleton") @Nonnull - protected GroupService getInstance() throws Exception { - return new GroupService(this._javaEntityClient, this._entityService, this._graphClient); + protected GroupService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new GroupService(entityClient, _entityService, _graphClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java index c44eada46794d..7a2b14fdb0f28 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.invite.InviteTokenService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; @@ -15,9 +15,6 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class InviteTokenServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Autowired @Qualifier("dataHubSecretService") @@ -26,7 +23,8 @@ public class InviteTokenServiceFactory { @Bean(name = "inviteTokenService") @Scope("singleton") @Nonnull - protected InviteTokenService getInstance() throws Exception { - return new InviteTokenService(this._javaEntityClient, this._secretService); + protected InviteTokenService getInstance( + @Qualifier("entityClient") final EntityClient entityClient) throws Exception { + return new InviteTokenService(entityClient, _secretService); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index 844f3a094b6b7..0ed8f1a4b7af4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.user.NativeUserService; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -19,11 +19,7 @@ public class NativeUserServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; - - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; + private EntityService _entityService; @Autowired @Qualifier("dataHubSecretService") @@ -34,11 +30,8 @@ public class NativeUserServiceFactory { @Bean(name = "nativeUserService") @Scope("singleton") @Nonnull - protected NativeUserService getInstance() throws Exception { + protected NativeUserService getInstance(final SystemEntityClient entityClient) throws Exception { return new NativeUserService( - _entityService, - _javaEntityClient, - _secretService, - _configurationProvider.getAuthentication()); + _entityService, entityClient, _secretService, _configurationProvider.getAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java index a6ae703576a3e..317d8583ef1c3 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.post.PostService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -14,14 +13,12 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PostServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Bean(name = "postService") @Scope("singleton") @Nonnull - protected PostService getInstance() throws Exception { - return new PostService(this._javaEntityClient); + protected PostService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new PostService(entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java index 7696d5201493a..9321e2544a493 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authorization.role.RoleService; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -15,14 +14,11 @@ @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RoleServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - @Bean(name = "roleService") @Scope("singleton") @Nonnull - protected RoleService getInstance() throws Exception { - return new RoleService(this._javaEntityClient); + protected RoleService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new RoleService(entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java index 52d13b05a654d..efe688ceee3ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java @@ -34,8 +34,8 @@ public class SystemAuthenticationFactory { @Nonnull protected Authentication getInstance() { // TODO: Change to service - final Actor systemActor = new Actor(ActorType.USER, this.systemClientId); + final Actor systemActor = new Actor(ActorType.USER, systemClientId); return new Authentication( - systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret)); + systemActor, String.format("Basic %s:%s", systemClientId, systemSecret)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java index 5663162186b83..465d28542f371 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java @@ -18,7 +18,7 @@ public class SiblingGraphServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("graphService") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java index 5c7c2370ab337..e969793fac1ef 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java @@ -60,9 +60,15 @@ public class ConfigurationProvider { /** System Update configurations */ private SystemUpdateConfiguration systemUpdate; + /** The base URL where DataHub is hosted. */ + private String baseUrl; + /** Configuration for caching */ private CacheConfiguration cache; /** Configuration for the health check server */ private HealthCheckConfiguration healthCheck; + + /** Structured properties related configurations */ + private StructuredPropertiesConfiguration structuredProperties; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java new file mode 100644 index 0000000000000..6d4d4ea30c863 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/StructuredPropertiesConfiguration.java @@ -0,0 +1,10 @@ +package com.linkedin.gms.factory.config; + +import lombok.Data; + +@Data +public class StructuredPropertiesConfiguration { + private boolean enabled; + private boolean writeEnabled; + private boolean systemUpdateEnabled; +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java index 739211855cacd..39d42b6fb7568 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java @@ -1,6 +1,6 @@ package com.linkedin.gms.factory.dataproduct; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.service.DataProductService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -15,9 +15,6 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class DataProductServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Autowired @Qualifier("graphClient") @@ -26,7 +23,8 @@ public class DataProductServiceFactory { @Bean(name = "dataProductService") @Scope("singleton") @Nonnull - protected DataProductService getInstance() throws Exception { - return new DataProductService(_javaEntityClient, _graphClient); + protected DataProductService getInstance( + @Qualifier("entityClient") final EntityClient entityClient) throws Exception { + return new DataProductService(entityClient, _graphClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java index 326537ee07cbd..788dc3777e539 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java @@ -9,6 +9,7 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.net.ssl.SSLContext; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -16,6 +17,7 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class CassandraSessionFactory { @@ -50,7 +52,7 @@ protected CqlSession createSession() { try { csb = csb.withSslContext(SSLContext.getDefault()); } catch (Exception e) { - e.printStackTrace(); + log.error("Error creating cassandra ssl session", e); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java index 8644327747281..6bc2d3c7be63f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/DeleteEntityServiceFactory.java @@ -8,7 +8,6 @@ import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.Import; @Configuration @@ -16,14 +15,13 @@ public class DeleteEntityServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("graphService") private GraphService _graphService; @Bean(name = "deleteEntityService") - @DependsOn({"entityService"}) @Nonnull protected DeleteEntityService createDeleteEntityService() { return new DeleteEntityService(_entityService, _graphService); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index 88a3f5749343b..5fd64b02d08a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -48,16 +48,14 @@ protected EntityService createInstance( final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker); FeatureFlags featureFlags = configurationProvider.getFeatureFlags(); - EntityService entityService = - new EntityServiceImpl( - aspectDao, - eventProducer, - entityRegistry, - featureFlags.isAlwaysEmitChangeLog(), - updateIndicesService, - featureFlags.getPreProcessHooks(), - _ebeanMaxTransactionRetry); - return entityService; + return new EntityServiceImpl( + aspectDao, + eventProducer, + entityRegistry, + featureFlags.isAlwaysEmitChangeLog(), + updateIndicesService, + featureFlags.getPreProcessHooks(), + _ebeanMaxTransactionRetry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java deleted file mode 100644 index c550fc161b606..0000000000000 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java +++ /dev/null @@ -1,100 +0,0 @@ -package com.linkedin.gms.factory.entity; - -import com.datahub.authentication.Authentication; -import com.linkedin.entity.client.RestliEntityClient; -import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; -import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.client.SystemJavaEntityClient; -import com.linkedin.metadata.entity.DeleteEntityService; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; -import com.linkedin.metadata.event.EventProducer; -import com.linkedin.metadata.search.EntitySearchService; -import com.linkedin.metadata.search.LineageSearchService; -import com.linkedin.metadata.search.SearchService; -import com.linkedin.metadata.search.client.CachingEntitySearchService; -import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; -import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; - -@Configuration -@ConditionalOnExpression("'${entityClient.preferredImpl:java}'.equals('java')") -@Import({DataHubKafkaProducerFactory.class}) -public class JavaEntityClientFactory { - - @Autowired - @Qualifier("entityService") - private EntityService _entityService; - - @Autowired - @Qualifier("deleteEntityService") - private DeleteEntityService _deleteEntityService; - - @Autowired - @Qualifier("searchService") - private SearchService _searchService; - - @Autowired - @Qualifier("entitySearchService") - private EntitySearchService _entitySearchService; - - @Autowired - @Qualifier("cachingEntitySearchService") - private CachingEntitySearchService _cachingEntitySearchService; - - @Autowired - @Qualifier("timeseriesAspectService") - private TimeseriesAspectService _timeseriesAspectService; - - @Autowired - @Qualifier("relationshipSearchService") - private LineageSearchService _lineageSearchService; - - @Autowired - @Qualifier("kafkaEventProducer") - private EventProducer _eventProducer; - - @Bean("javaEntityClient") - public JavaEntityClient getJavaEntityClient( - @Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { - return new JavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - restliEntityClient); - } - - @Bean("systemJavaEntityClient") - public SystemJavaEntityClient systemJavaEntityClient( - @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication, - @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { - SystemJavaEntityClient systemJavaEntityClient = - new SystemJavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - restliEntityClient, - systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - - _entityService.setSystemEntityClient(systemJavaEntityClient); - - return systemJavaEntityClient; - } -} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java index dae5f903d7d80..31ad933b9579d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java @@ -33,9 +33,9 @@ public class RetentionServiceFactory { @DependsOn({"cassandraSession", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") @Nonnull - protected RetentionService createCassandraInstance(CqlSession session) { - RetentionService retentionService = - new CassandraRetentionService(_entityService, session, _batchSize); + protected RetentionService createCassandraInstance(CqlSession session) { + RetentionService retentionService = + new CassandraRetentionService<>(_entityService, session, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } @@ -44,9 +44,9 @@ protected RetentionService createCassandraInstance(CqlSession session) { @DependsOn({"ebeanServer", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull - protected RetentionService createEbeanInstance(Database server) { - RetentionService retentionService = - new EbeanRetentionService(_entityService, server, _batchSize); + protected RetentionService createEbeanInstance(Database server) { + RetentionService retentionService = + new EbeanRetentionService<>(_entityService, server, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java new file mode 100644 index 0000000000000..e1055835616ea --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java @@ -0,0 +1,27 @@ +package com.linkedin.gms.factory.entity; + +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.service.RollbackService; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import javax.annotation.Nonnull; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class RollbackServiceFactory { + + @Value("${authorization.restApiAuthorization:false}") + boolean restApiAuthorizationEnabled; + + @Bean + @Nonnull + protected RollbackService rollbackService( + final EntityService entityService, + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService) { + return new RollbackService( + entityService, systemMetadataService, timeseriesAspectService, restApiAuthorizationEnabled); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java index d8c1422f988c2..34c1887d67c56 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java @@ -1,7 +1,8 @@ package com.linkedin.gms.factory.entity.update.indices; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.search.EntityIndexBuildersFactory; +import com.linkedin.metadata.client.EntityClientAspectRetriever; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; @@ -22,7 +23,7 @@ public class UpdateIndicesServiceFactory { @Autowired private ApplicationContext context; - @Value("${entityClient.preferredImpl:java}") + @Value("${entityClient.impl:java}") private String entityClientImpl; @Bean @@ -34,18 +35,27 @@ public UpdateIndicesService updateIndicesService( EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, EntityIndexBuilders entityIndexBuilders) { + UpdateIndicesService updateIndicesService = new UpdateIndicesService( graphService, entitySearchService, timeseriesAspectService, systemMetadataService, - entityRegistry, searchDocumentTransformer, entityIndexBuilders); if ("restli".equals(entityClientImpl)) { - updateIndicesService.setSystemEntityClient(context.getBean(SystemRestliEntityClient.class)); + /* + When restli mode the EntityService is not available. Wire in an AspectRetriever here instead + based on the entity client + */ + SystemEntityClient systemEntityClient = context.getBean(SystemEntityClient.class); + updateIndicesService.initializeAspectRetriever( + EntityClientAspectRetriever.builder() + .entityRegistry(entityRegistry) + .entityClient(systemEntityClient) + .build()); } return updateIndicesService; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java new file mode 100644 index 0000000000000..c6fe0d6e95f48 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/EntityClientConfigFactory.java @@ -0,0 +1,20 @@ +package com.linkedin.gms.factory.entityclient; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +public class EntityClientConfigFactory { + + @Bean + public EntityClientCacheConfig entityClientCacheConfig( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider) { + return configurationProvider.getCache().getClient().getEntityClient(); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java new file mode 100644 index 0000000000000..530136e32662f --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/JavaEntityClientFactory.java @@ -0,0 +1,85 @@ +package com.linkedin.gms.factory.entityclient; + +import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.client.SystemJavaEntityClient; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; +import com.linkedin.metadata.entity.DeleteEntityService; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.event.EventProducer; +import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.search.LineageSearchService; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.service.RollbackService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import javax.inject.Singleton; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; + +/** The *Java* Entity Client should be preferred if executing within the GMS service. */ +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +@ConditionalOnProperty(name = "entityClient.impl", havingValue = "java", matchIfMissing = true) +public class JavaEntityClientFactory { + + @Bean("entityClient") + @Singleton + public EntityClient entityClient( + final @Qualifier("entityService") EntityService _entityService, + final @Qualifier("deleteEntityService") DeleteEntityService _deleteEntityService, + final @Qualifier("searchService") SearchService _searchService, + final @Qualifier("entitySearchService") EntitySearchService _entitySearchService, + final @Qualifier("cachingEntitySearchService") CachingEntitySearchService + _cachingEntitySearchService, + final @Qualifier("timeseriesAspectService") TimeseriesAspectService _timeseriesAspectService, + final @Qualifier("relationshipSearchService") LineageSearchService _lineageSearchService, + final @Qualifier("kafkaEventProducer") EventProducer _eventProducer, + final RollbackService rollbackService) { + return new JavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + rollbackService, + _eventProducer); + } + + @Bean("systemEntityClient") + @Singleton + public SystemEntityClient systemEntityClient( + final @Qualifier("entityService") EntityService _entityService, + final @Qualifier("deleteEntityService") DeleteEntityService _deleteEntityService, + final @Qualifier("searchService") SearchService _searchService, + final @Qualifier("entitySearchService") EntitySearchService _entitySearchService, + final @Qualifier("cachingEntitySearchService") CachingEntitySearchService + _cachingEntitySearchService, + final @Qualifier("timeseriesAspectService") TimeseriesAspectService _timeseriesAspectService, + final @Qualifier("relationshipSearchService") LineageSearchService _lineageSearchService, + final @Qualifier("kafkaEventProducer") EventProducer _eventProducer, + final RollbackService rollbackService, + final EntityClientCacheConfig entityClientCacheConfig, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + return new SystemJavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + rollbackService, + _eventProducer, + systemAuthentication, + entityClientCacheConfig); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java similarity index 53% rename from metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java rename to metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java index 1dee8c4aa4d27..88989b1833e78 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityclient/RestliEntityClientFactory.java @@ -1,47 +1,40 @@ -package com.linkedin.gms.factory.entity; +package com.linkedin.gms.factory.entityclient; import com.datahub.authentication.Authentication; +import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; -import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.restli.DefaultRestliClientFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import java.net.URI; +import javax.inject.Singleton; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; +/** The Java Entity Client should be preferred if executing within the GMS service. */ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +@ConditionalOnProperty(name = "entityClient.impl", havingValue = "restli") public class RestliEntityClientFactory { - @Value("${datahub.gms.host}") - private String gmsHost; - - @Value("${datahub.gms.port}") - private int gmsPort; - - @Value("${datahub.gms.useSSL}") - private boolean gmsUseSSL; - - @Value("${datahub.gms.uri}") - private String gmsUri; - - @Value("${datahub.gms.sslContext.protocol}") - private String gmsSslProtocol; - - @Value("${entityClient.retryInterval:2}") - private int retryInterval; - - @Value("${entityClient.numRetries:3}") - private int numRetries; - - @Bean("restliEntityClient") - public RestliEntityClient getRestliEntityClient() { + @Bean("entityClient") + @Singleton + public EntityClient entityClient( + @Value("${datahub.gms.host}") String gmsHost, + @Value("${datahub.gms.port}") int gmsPort, + @Value("${datahub.gms.useSSL}") boolean gmsUseSSL, + @Value("${datahub.gms.uri}") String gmsUri, + @Value("${datahub.gms.sslContext.protocol}") String gmsSslProtocol, + @Value("${entityClient.retryInterval:2}") int retryInterval, + @Value("${entityClient.numRetries:3}") int numRetries) { final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); @@ -52,10 +45,19 @@ public RestliEntityClient getRestliEntityClient() { return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); } - @Bean("systemRestliEntityClient") - public SystemRestliEntityClient systemRestliEntityClient( - @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Bean("systemEntityClient") + @Singleton + public SystemEntityClient systemEntityClient( + @Value("${datahub.gms.host}") String gmsHost, + @Value("${datahub.gms.port}") int gmsPort, + @Value("${datahub.gms.useSSL}") boolean gmsUseSSL, + @Value("${datahub.gms.uri}") String gmsUri, + @Value("${datahub.gms.sslContext.protocol}") String gmsSslProtocol, + @Value("${entityClient.retryInterval:2}") int retryInterval, + @Value("${entityClient.numRetries:3}") int numRetries, + final EntityClientCacheConfig entityClientCacheConfig, @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); @@ -68,6 +70,6 @@ public SystemRestliEntityClient systemRestliEntityClient( new ExponentialBackoff(retryInterval), numRetries, systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); + entityClientCacheConfig); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java new file mode 100644 index 0000000000000..73be819028f57 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/form/FormServiceFactory.java @@ -0,0 +1,21 @@ +package com.linkedin.gms.factory.form; + +import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.service.FormService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.PropertySource; +import org.springframework.context.annotation.Scope; + +@Configuration +@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) +public class FormServiceFactory { + @Bean(name = "formService") + @Scope("singleton") + @Nonnull + protected FormService getInstance(final SystemEntityClient entityClient) throws Exception { + return new FormService(entityClient, entityClient.getSystemAuthentication()); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index 723715a13b1c1..60697e57a9afb 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -10,17 +10,16 @@ import com.linkedin.datahub.graphql.GmsGraphQLEngineArgs; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.auth.DataHubTokenServiceFactory; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.common.SiblingGraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.recommendation.RecommendationServiceFactory; -import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.graph.GraphService; @@ -29,6 +28,7 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -52,7 +52,6 @@ @Import({ RestHighLevelClientFactory.class, IndexConventionFactory.class, - RestliEntityClientFactory.class, RecommendationServiceFactory.class, EntityRegistryFactory.class, DataHubTokenServiceFactory.class, @@ -68,14 +67,6 @@ public class GraphQLEngineFactory { @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) private IndexConvention indexConvention; - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _entityClient; - - @Autowired - @Qualifier("systemJavaEntityClient") - private SystemJavaEntityClient _systemEntityClient; - @Autowired @Qualifier("graphClient") private GraphClient _graphClient; @@ -86,7 +77,7 @@ public class GraphQLEngineFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("graphService") @@ -172,15 +163,21 @@ public class GraphQLEngineFactory { @Qualifier("dataProductService") private DataProductService _dataProductService; + @Autowired + @Qualifier("formService") + private FormService _formService; + @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; @Bean(name = "graphQLEngine") @Nonnull - protected GraphQLEngine getInstance() { + protected GraphQLEngine getInstance( + @Qualifier("entityClient") final EntityClient entityClient, + @Qualifier("systemEntityClient") final SystemEntityClient systemEntityClient) { GmsGraphQLEngineArgs args = new GmsGraphQLEngineArgs(); - args.setEntityClient(_entityClient); - args.setSystemEntityClient(_systemEntityClient); + args.setEntityClient(entityClient); + args.setSystemEntityClient(systemEntityClient); args.setGraphClient(_graphClient); args.setUsageClient(_usageClient); if (isAnalyticsEnabled) { @@ -215,6 +212,7 @@ protected GraphQLEngine getInstance() { args.setLineageService(_lineageService); args.setQueryService(_queryService); args.setFeatureFlags(_configProvider.getFeatureFlags()); + args.setFormService(_formService); args.setDataProductService(_dataProductService); return new GmsGraphQLEngine(args).builder().build(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java index 78b9c5d52efdd..0ba953d66730c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java @@ -1,11 +1,9 @@ package com.linkedin.gms.factory.ingestion; -import com.datahub.authentication.Authentication; import com.datahub.metadata.ingestion.IngestionScheduler; -import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entity.RestliEntityClientFactory; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; @@ -16,18 +14,10 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; -@Import({SystemAuthenticationFactory.class, RestliEntityClientFactory.class}) +@Import({SystemAuthenticationFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class IngestionSchedulerFactory { - @Autowired - @Qualifier("systemAuthentication") - private Authentication _systemAuthentication; - - @Autowired - @Qualifier("restliEntityClient") - private RestliEntityClient _entityClient; - @Autowired @Qualifier("configurationProvider") private ConfigurationProvider _configProvider; @@ -43,10 +33,10 @@ public class IngestionSchedulerFactory { @Bean(name = "ingestionScheduler") @Scope("singleton") @Nonnull - protected IngestionScheduler getInstance() { + protected IngestionScheduler getInstance(final SystemEntityClient entityClient) { return new IngestionScheduler( - _systemAuthentication, - _entityClient, + entityClient.getSystemAuthentication(), + entityClient, _configProvider.getIngestion(), _delayIntervalSeconds, _refreshIntervalSeconds); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index d82a789c9c086..0d00218d1990e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -127,7 +127,6 @@ use DefaultErrorHandler (does back-off retry and then logs) rather than stopping DeserializationException.class, new CommonContainerStoppingErrorHandler()); factory.setCommonErrorHandler(delegatingErrorHandler); } - log.info( String.format( "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java index a88e1d971973b..c06ebae27f3af 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java @@ -35,7 +35,7 @@ public class AwsGlueSchemaRegistryFactory { @Bean("schemaRegistryConfig") @Nonnull - protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { + protected SchemaRegistryConfig getInstance(final ConfigurationProvider configurationProvider) { Map props = new HashMap<>(); // FIXME: Properties for this factory should come from ConfigurationProvider object, // specifically under the diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java index 1589b33862bfe..d81df694c420d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.lineage; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -14,14 +13,12 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Bean(name = "lineageService") @Scope("singleton") @Nonnull - protected LineageService getInstance() throws Exception { - return new LineageService(this._javaEntityClient); + protected LineageService getInstance(@Qualifier("entityClient") final EntityClient entityClient) + throws Exception { + return new LineageService(entityClient); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java index ff48a922adf22..5403ca80fa5a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.ownership; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,12 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OwnershipTypeServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; @Bean(name = "ownerShipTypeService") @Scope("singleton") @Nonnull - protected OwnershipTypeService getInstance() throws Exception { - return new OwnershipTypeService(_javaEntityClient, _authentication); + protected OwnershipTypeService getInstance(final SystemEntityClient entityClient) + throws Exception { + return new OwnershipTypeService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java index cf81cbf70d5eb..64af400708e6c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.query; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.QueryService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,11 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class QueryServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; @Bean(name = "queryService") @Scope("singleton") @Nonnull - protected QueryService getInstance() throws Exception { - return new QueryService(_javaEntityClient, _authentication); + protected QueryService getInstance(final SystemEntityClient entityClient) throws Exception { + return new QueryService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java index f3be4db147399..9b8707b746b29 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java @@ -31,7 +31,7 @@ public class MostPopularCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService entityService; + private EntityService entityService; @Bean(name = "mostPopularCandidateSource") @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java index ac227faf06c4c..cfdb705dc3f6d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java @@ -31,7 +31,7 @@ public class RecentlyEditedCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Bean(name = "recentlyEditedCandidateSource") @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java index 6f17846efc1cd..742ed685fd6e1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java @@ -31,7 +31,7 @@ public class RecentlyViewedCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService entityService; + private EntityService entityService; @Bean(name = "recentlyViewedCandidateSource") @Nonnull diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java index ad241e7717545..8b1ef069423ee 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java @@ -18,7 +18,7 @@ public class TopPlatformsCandidateSourceFactory { @Autowired @Qualifier("entityService") - private EntityService entityService; + private EntityService entityService; @Autowired @Qualifier("entitySearchService") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java index 2b6d495e4fe33..7b5f4e18d4d53 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; @@ -32,6 +35,16 @@ public class ElasticSearchServiceFactory { private static final ObjectMapper YAML_MAPPER = new YAMLMapper(); + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + YAML_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + @Autowired @Qualifier("baseElasticSearchComponents") private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java index 17103240c938b..0d7d2e9c1855f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java @@ -19,6 +19,8 @@ @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageSearchServiceFactory { + public static final String LINEAGE_SEARCH_SERVICE_CACHE_NAME = "relationshipSearchService"; + @Bean(name = "relationshipSearchService") @Primary @Nonnull @@ -31,7 +33,7 @@ protected LineageSearchService getInstance( return new LineageSearchService( searchService, graphService, - cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, + cacheEnabled ? cacheManager.getCache(LINEAGE_SEARCH_SERVICE_CACHE_NAME) : null, cacheEnabled, configurationProvider.getCache().getSearch().getLineage()); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java index 32ad2175c9052..1fddb51065a1d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.search.views; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,11 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ViewServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; @Bean(name = "viewService") @Scope("singleton") @Nonnull - protected ViewService getInstance() throws Exception { - return new ViewService(_javaEntityClient, _authentication); + protected ViewService getInstance(final SystemEntityClient entityClient) throws Exception { + return new ViewService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java index f0d09a815628d..a3f533a22f7ee 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java @@ -1,12 +1,9 @@ package com.linkedin.gms.factory.settings; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; @@ -15,18 +12,10 @@ @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SettingsServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; - - @Autowired - @Qualifier("systemAuthentication") - private Authentication _authentication; - @Bean(name = "settingsService") @Scope("singleton") @Nonnull - protected SettingsService getInstance() throws Exception { - return new SettingsService(_javaEntityClient, _authentication); + protected SettingsService getInstance(final SystemEntityClient entityClient) throws Exception { + return new SettingsService(entityClient, entityClient.getSystemAuthentication()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java index b735e490f583e..393bbdf155485 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java @@ -25,7 +25,7 @@ public class DailyReport { private final IndexConvention _indexConvention; private final RestHighLevelClient _elasticClient; private final ConfigurationProvider _configurationProvider; - private final EntityService _entityService; + private final EntityService _entityService; private final GitVersion _gitVersion; private static final String MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a"; @@ -36,7 +36,7 @@ public DailyReport( IndexConvention indexConvention, RestHighLevelClient elasticClient, ConfigurationProvider configurationProvider, - EntityService entityService, + EntityService entityService, GitVersion gitVersion) { this._indexConvention = indexConvention; this._elasticClient = elasticClient; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java index 4986e705fd7b4..7d3638d44769b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java @@ -24,7 +24,7 @@ public DailyReport dailyReport( @Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, ConfigurationProvider configurationProvider, - EntityService entityService, + EntityService entityService, GitVersion gitVersion) { return new DailyReport( indexConvention, elasticClient, configurationProvider, entityService, gitVersion); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java index 748acb4a9499e..2e8317df6b14b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java @@ -17,7 +17,7 @@ public final class TelemetryUtils { private static String _clientId; - public static String getClientId(EntityService entityService) { + public static String getClientId(EntityService entityService) { if (_clientId == null) { createClientIdIfNotPresent(entityService); RecordTemplate clientIdTemplate = @@ -28,7 +28,7 @@ public static String getClientId(EntityService entityService) { return _clientId; } - private static void createClientIdIfNotPresent(EntityService entityService) { + private static void createClientIdIfNotPresent(EntityService entityService) { String uuid = UUID.randomUUID().toString(); TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); final AuditStamp clientIdStamp = new AuditStamp(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java index 4e858fb5cdefd..cb0ef29b50a89 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java @@ -32,7 +32,7 @@ public class TrackingServiceFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("gitVersion") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java index 53a98977413e4..50d4125257fb2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/eventgenerator/EntityChangeEventGeneratorRegistryFactory.java @@ -2,8 +2,7 @@ import static com.linkedin.metadata.Constants.*; -import com.datahub.authentication.Authentication; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.timeline.eventgenerator.AssertionRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DataProcessInstanceRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DatasetPropertiesChangeEventGenerator; @@ -32,12 +31,10 @@ public class EntityChangeEventGeneratorRegistryFactory { @Autowired ApplicationContext applicationContext; @Bean(name = "entityChangeEventGeneratorRegistry") - @DependsOn({"restliEntityClient", "systemAuthentication"}) + @DependsOn({"systemEntityClient"}) @Nonnull protected EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry() { - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); - final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final EntityChangeEventGeneratorRegistry registry = new EntityChangeEventGeneratorRegistry(); registry.register(SCHEMA_METADATA_ASPECT_NAME, new SchemaMetadataChangeEventGenerator()); registry.register( diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index dc82fc4907edc..7ff91affdf765 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -40,7 +40,7 @@ static Urn getUpgradeUrn(String upgradeId) { new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } - static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { + static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { final AuditStamp auditStamp = new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java index ff5d3f215d86b..ed8a53aa594c8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java @@ -7,7 +7,6 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.key.DataHubUpgradeKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -21,12 +20,12 @@ @Slf4j public abstract class UpgradeStep implements BootstrapStep { - protected final EntityService _entityService; + protected final EntityService _entityService; private final String _version; private final String _upgradeId; private final Urn _upgradeUrn; - public UpgradeStep(EntityService entityService, String version, String upgradeId) { + public UpgradeStep(EntityService entityService, String version, String upgradeId) { this._entityService = entityService; this._version = version; this._upgradeId = upgradeId; diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index 70fa91ae61861..b808c3da5d8d0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -13,7 +13,9 @@ import com.linkedin.metadata.boot.steps.IndexDataPlatformsStep; import com.linkedin.metadata.boot.steps.IngestDataPlatformInstancesStep; import com.linkedin.metadata.boot.steps.IngestDataPlatformsStep; +import com.linkedin.metadata.boot.steps.IngestDataTypesStep; import com.linkedin.metadata.boot.steps.IngestDefaultGlobalSettingsStep; +import com.linkedin.metadata.boot.steps.IngestEntityTypesStep; import com.linkedin.metadata.boot.steps.IngestOwnershipTypesStep; import com.linkedin.metadata.boot.steps.IngestPoliciesStep; import com.linkedin.metadata.boot.steps.IngestRetentionPoliciesStep; @@ -54,7 +56,7 @@ public class BootstrapManagerFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Autowired @Qualifier("entityRegistry") @@ -131,6 +133,8 @@ protected BootstrapManager createInstance() { new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); + final IngestDataTypesStep ingestDataTypesStep = new IngestDataTypesStep(_entityService); + final IngestEntityTypesStep ingestEntityTypesStep = new IngestEntityTypesStep(_entityService); final List finalSteps = new ArrayList<>( @@ -148,7 +152,9 @@ protected BootstrapManager createInstance() { removeClientIdAspectStep, restoreDbtSiblingsIndices, indexDataPlatformsStep, - restoreColumnLineageIndices)); + restoreColumnLineageIndices, + ingestDataTypesStep, + ingestEntityTypesStep)); if (_upgradeDefaultBrowsePathsEnabled) { finalSteps.add(new UpgradeDefaultBrowsePathsStep(_entityService)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java index 2436938c6c026..f13037c1e21c7 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java @@ -26,7 +26,7 @@ public class IngestRetentionPoliciesStepFactory { @Autowired @Qualifier("entityService") - private EntityService _entityService; + private EntityService _entityService; @Value("${entityService.retention.enabled}") private Boolean _enableRetention; diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index 770c0d2840fe8..80e139dcd5c65 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -47,7 +47,7 @@ public class BackfillBrowsePathsV2Step extends UpgradeStep { private final SearchService _searchService; - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { super(entityService, VERSION, UPGRADE_ID); _searchService = searchService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java index c46cfdd61158d..591082235ff30 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java @@ -34,7 +34,7 @@ public class IndexDataPlatformsStep extends UpgradeStep { private final EntityRegistry _entityRegistry; public IndexDataPlatformsStep( - EntityService entityService, + EntityService entityService, EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index e2f0b70526af5..716ae292338ed 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -28,7 +28,7 @@ public class IngestDataPlatformInstancesStep implements BootstrapStep { private static final int BATCH_SIZE = 1000; - private final EntityService _entityService; + private final EntityService _entityService; private final AspectMigrationsDao _migrationsDao; @Override @@ -81,8 +81,7 @@ public void execute() throws Exception { .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) .aspect(dataPlatformInstance.get()) .auditStamp(aspectAuditStamp) - .build( - _entityService.getEntityRegistry(), _entityService.getSystemEntityClient())); + .build(_entityService)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index 37eac6d5ec470..89ed493e162cc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -31,7 +31,7 @@ public class IngestDataPlatformsStep implements BootstrapStep { private static final String PLATFORM_ASPECT_NAME = "dataPlatformInfo"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String name() { @@ -91,9 +91,7 @@ public void execute() throws IOException, URISyntaxException { new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis())) - .build( - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()); + .build(_entityService); } catch (URISyntaxException e) { throw new RuntimeException(e); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java new file mode 100644 index 0000000000000..6f3a415b521e4 --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataTypesStep.java @@ -0,0 +1,103 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.datatype.DataTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.GenericAspect; +import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Objects; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.springframework.core.io.ClassPathResource; + +/** This bootstrap step is responsible for ingesting default data types. */ +@Slf4j +public class IngestDataTypesStep implements BootstrapStep { + + private static final String DEFAULT_FILE_PATH = "./boot/data_types.json"; + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); + private final EntityService _entityService; + private final String _resourcePath; + + public IngestDataTypesStep(@Nonnull final EntityService entityService) { + this(entityService, DEFAULT_FILE_PATH); + } + + public IngestDataTypesStep( + @Nonnull final EntityService entityService, @Nonnull final String filePath) { + _entityService = Objects.requireNonNull(entityService, "entityService must not be null"); + _resourcePath = filePath; + } + + @Override + public String name() { + return "IngestDataTypesStep"; + } + + @Override + public void execute() throws Exception { + log.info("Ingesting default data types..."); + + // 1. Read from the file into JSON. + final JsonNode dataTypesObj = + JSON_MAPPER.readTree(new ClassPathResource(_resourcePath).getFile()); + + if (!dataTypesObj.isArray()) { + throw new RuntimeException( + String.format( + "Found malformed data types file, expected an Array but found %s", + dataTypesObj.getNodeType())); + } + + log.info("Ingesting {} data types types", dataTypesObj.size()); + int numIngested = 0; + for (final JsonNode roleObj : dataTypesObj) { + final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); + final DataTypeInfo info = + RecordUtils.toRecordTemplate(DataTypeInfo.class, roleObj.get("info").toString()); + log.info(String.format("Ingesting default data type with urn %s", urn)); + ingestDataType(urn, info); + numIngested++; + } + log.info("Ingested {} new data types", numIngested); + } + + private void ingestDataType(final Urn dataTypeUrn, final DataTypeInfo info) throws Exception { + // Write key + final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); + final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(dataTypeUrn.getEntityType()); + GenericAspect keyAspect = + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(dataTypeUrn, keyAspectSpec)); + keyAspectProposal.setAspect(keyAspect); + keyAspectProposal.setAspectName(keyAspectSpec.getName()); + keyAspectProposal.setEntityType(DATA_TYPE_ENTITY_NAME); + keyAspectProposal.setChangeType(ChangeType.UPSERT); + keyAspectProposal.setEntityUrn(dataTypeUrn); + + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(dataTypeUrn); + proposal.setEntityType(DATA_TYPE_ENTITY_NAME); + proposal.setAspectName(DATA_TYPE_INFO_ASPECT_NAME); + proposal.setAspect(GenericRecordUtils.serializeAspect(info)); + proposal.setChangeType(ChangeType.UPSERT); + + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java index 194e1ddd73c2c..1420ec116be8f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java @@ -41,15 +41,15 @@ public class IngestDefaultGlobalSettingsStep implements BootstrapStep { private static final String DEFAULT_SETTINGS_RESOURCE_PATH = "./boot/global_settings.json"; - private final EntityService _entityService; + private final EntityService _entityService; private final String _resourcePath; - public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityService) { + public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityService) { this(entityService, DEFAULT_SETTINGS_RESOURCE_PATH); } public IngestDefaultGlobalSettingsStep( - @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { + @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { _entityService = Objects.requireNonNull(entityService); _resourcePath = Objects.requireNonNull(resourcePath); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java new file mode 100644 index 0000000000000..b2213eda71cae --- /dev/null +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStep.java @@ -0,0 +1,88 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entitytype.EntityTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.GenericAspect; +import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Objects; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +/** This bootstrap step is responsible for ingesting default data types. */ +@Slf4j +public class IngestEntityTypesStep implements BootstrapStep { + + private static final String DATAHUB_NAMESPACE = "datahub"; + private final EntityService _entityService; + + public IngestEntityTypesStep(@Nonnull final EntityService entityService) { + _entityService = Objects.requireNonNull(entityService, "entityService must not be null"); + } + + @Override + public String name() { + return "IngestEntityTypesStep"; + } + + @Override + public void execute() throws Exception { + log.info("Ingesting entity types from base entity registry..."); + + log.info( + "Ingesting {} entity types", _entityService.getEntityRegistry().getEntitySpecs().size()); + int numIngested = 0; + for (final EntitySpec spec : _entityService.getEntityRegistry().getEntitySpecs().values()) { + final Urn entityTypeUrn = + UrnUtils.getUrn( + String.format("urn:li:entityType:%s.%s", DATAHUB_NAMESPACE, spec.getName())); + final EntityTypeInfo info = + new EntityTypeInfo() + .setDisplayName(spec.getName()) // TODO: Support display name in the entity registry. + .setQualifiedName(entityTypeUrn.getId()); + log.info(String.format("Ingesting entity type with urn %s", entityTypeUrn)); + ingestEntityType(entityTypeUrn, info); + numIngested++; + } + log.info("Ingested {} new entity types", numIngested); + } + + private void ingestEntityType(final Urn entityTypeUrn, final EntityTypeInfo info) + throws Exception { + // Write key + final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); + final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(entityTypeUrn.getEntityType()); + GenericAspect keyAspect = + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(entityTypeUrn, keyAspectSpec)); + keyAspectProposal.setAspect(keyAspect); + keyAspectProposal.setAspectName(keyAspectSpec.getName()); + keyAspectProposal.setEntityType(ENTITY_TYPE_ENTITY_NAME); + keyAspectProposal.setChangeType(ChangeType.UPSERT); + keyAspectProposal.setEntityUrn(entityTypeUrn); + + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(entityTypeUrn); + proposal.setEntityType(ENTITY_TYPE_ENTITY_NAME); + proposal.setAspectName(ENTITY_TYPE_INFO_ASPECT_NAME); + proposal.setAspect(GenericRecordUtils.serializeAspect(info)); + proposal.setChangeType(ChangeType.UPSERT); + + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index fc1c82fc6d631..02d965b44fc88 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -34,7 +34,7 @@ public class IngestOwnershipTypesStep implements BootstrapStep { private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); - private final EntityService _entityService; + private final EntityService _entityService; private final Resource _ownershipTypesResource; @Override @@ -100,11 +100,7 @@ private void ingestOwnershipType( _entityService.ingestProposal( AspectsBatchImpl.builder() - .mcps( - List.of(keyAspectProposal, proposal), - auditStamp, - _entityService.getEntityRegistry(), - _entityService.getSystemEntityClient()) + .mcps(List.of(keyAspectProposal, proposal), auditStamp, _entityService) .build(), false); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index 9b9feb8e14638..f925c96e333fd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -46,7 +46,7 @@ public class IngestPoliciesStep implements BootstrapStep { private static final String POLICY_INFO_ASPECT_NAME = "dataHubPolicyInfo"; private final EntityRegistry _entityRegistry; - private final EntityService _entityService; + private final EntityService _entityService; private final EntitySearchService _entitySearchService; private final SearchDocumentTransformer _searchDocumentTransformer; @@ -210,8 +210,7 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()), - _entityRegistry, - _entityService.getSystemEntityClient()) + _entityService) .build(), false); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java index 9ce4d9ce644a8..28b556e78de12 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java @@ -31,7 +31,7 @@ @RequiredArgsConstructor public class IngestRolesStep implements BootstrapStep { private static final int SLEEP_SECONDS = 60; - private final EntityService _entityService; + private final EntityService _entityService; private final EntityRegistry _entityRegistry; @Override @@ -130,8 +130,7 @@ private void ingestRole( new AuditStamp() .setActor(Urn.createFromString(SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()), - _entityRegistry, - _entityService.getSystemEntityClient()) + _entityService) .build(), false); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java index 9e00b960482c5..1f8127d8be108 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java @@ -29,7 +29,7 @@ public class IngestRootUserStep implements BootstrapStep { private static final String USER_INFO_ASPECT_NAME = "corpUserInfo"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String name() { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java index 919ba93c9213e..2e60df54452cc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java @@ -10,7 +10,6 @@ import com.linkedin.metadata.boot.UpgradeStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ListResult; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ExtraInfo; @@ -31,8 +30,7 @@ public class RestoreColumnLineageIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; public RestoreColumnLineageIndices( - @Nonnull final EntityService entityService, - @Nonnull final EntityRegistry entityRegistry) { + @Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entityRegistry = Objects.requireNonNull(entityRegistry, "entityRegistry must not be null"); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java index e2d367a034491..789a4cbd11878 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; import com.linkedin.metadata.key.DataHubUpgradeKey; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -47,7 +46,7 @@ public class RestoreDbtSiblingsIndices implements BootstrapStep { private static final Integer BATCH_SIZE = 1000; private static final Integer SLEEP_SECONDS = 120; - private final EntityService _entityService; + private final EntityService _entityService; private final EntityRegistry _entityRegistry; @Override diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 319bbd084e05c..5c2b2c28e6dcf 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -38,7 +38,7 @@ public class RestoreGlossaryIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; public RestoreGlossaryIndices( - EntityService entityService, + EntityService entityService, EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java index e2d59b505a568..3eedbb48aaeca 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java @@ -39,7 +39,7 @@ public class UpgradeDefaultBrowsePathsStep extends UpgradeStep { private static final String UPGRADE_ID = "upgrade-default-browse-paths-step"; private static final Integer BATCH_SIZE = 5000; - public UpgradeDefaultBrowsePathsStep(EntityService entityService) { + public UpgradeDefaultBrowsePathsStep(EntityService entityService) { super(entityService, VERSION, UPGRADE_ID); } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java new file mode 100644 index 0000000000000..87f1546bd9557 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryDefaultsTest.java @@ -0,0 +1,27 @@ +package com.linkedin.gms.factory.search; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; + +@TestPropertySource(locations = "classpath:/application.yml") +@SpringBootTest(classes = {ElasticSearchIndexBuilderFactory.class}) +@EnableConfigurationProperties(ConfigurationProvider.class) +public class ElasticSearchIndexBuilderFactoryDefaultsTest extends AbstractTestNGSpringContextTests { + @Autowired ESIndexBuilder test; + + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 8268eeff48c5e..0657141562089 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -76,7 +76,7 @@ public class BackfillBrowsePathsV2StepTest { @Test public void testExecuteNoExistingBrowsePaths() throws Exception { - final EntityService mockService = initMockService(); + final EntityService mockService = initMockService(); final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); @@ -110,7 +110,7 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); @@ -140,8 +140,8 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.anyBoolean()); } - private EntityService initMockService() throws URISyntaxException { - final EntityService mockService = Mockito.mock(EntityService.class); + private EntityService initMockService() throws URISyntaxException { + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new UpgradeDefaultBrowsePathsStepTest.TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 41672a07a2389..1ac0f2f4f914a 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -39,7 +39,7 @@ public class IngestDataPlatformInstancesStepTest { @Test public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockDBWithDataPlatformInstanceAspects(migrationsDao); @@ -55,7 +55,7 @@ public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() @Test public void testExecuteCopesWithEmptyDB() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockEmptyDB(migrationsDao); @@ -73,7 +73,7 @@ public void testExecuteCopesWithEmptyDB() throws Exception { @Test public void testExecuteChecksKeySpecForAllUrns() throws Exception { final EntityRegistry entityRegistry = getTestEntityRegistry(); - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); final int countOfCorpUserEntities = 2; final int countOfChartEntities = 4; @@ -96,7 +96,7 @@ public void testExecuteChecksKeySpecForAllUrns() throws Exception { @Test public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throws Exception { final EntityRegistry entityRegistry = getTestEntityRegistry(); - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); final int countOfCorpUserEntities = 5; final int countOfChartEntities = 7; @@ -161,7 +161,7 @@ private void mockEmptyDB(AspectMigrationsDao migrationsDao) { private void mockDBWithWorkToDo( EntityRegistry entityRegistry, - EntityService entityService, + EntityService entityService, AspectMigrationsDao migrationsDao, int countOfCorpUserEntities, int countOfChartEntities) { @@ -194,7 +194,7 @@ private List insertMockEntities( String entity, String urnTemplate, EntityRegistry entityRegistry, - EntityService entityService) { + EntityService entityService) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entity); AspectSpec keySpec = entitySpec.getKeyAspectSpec(); List urns = new ArrayList<>(); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java new file mode 100644 index 0000000000000..2bbd06c8a61a4 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataTypesStepTest.java @@ -0,0 +1,81 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datatype.DataTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import org.jetbrains.annotations.NotNull; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class IngestDataTypesStepTest { + + private static final Urn TEST_DATA_TYPE_URN = UrnUtils.getUrn("urn:li:dataType:datahub.test"); + + @Test + public void testExecuteValidDataTypesNoExistingDataTypes() throws Exception { + EntityRegistry testEntityRegistry = getTestEntityRegistry(); + final EntityService entityService = mock(EntityService.class); + when(entityService.getEntityRegistry()).thenReturn(testEntityRegistry); + when(entityService.getKeyAspectSpec(anyString())) + .thenAnswer( + args -> testEntityRegistry.getEntitySpec(args.getArgument(0)).getKeyAspectSpec()); + + final IngestDataTypesStep step = + new IngestDataTypesStep(entityService, "./boot/test_data_types_valid.json"); + + step.execute(); + + DataTypeInfo expectedResult = new DataTypeInfo(); + expectedResult.setDescription("Test Description"); + expectedResult.setDisplayName("Test Name"); + expectedResult.setQualifiedName("datahub.test"); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateDataTypeProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + } + + @Test + public void testExecuteInvalidJson() throws Exception { + final EntityService entityService = mock(EntityService.class); + + final IngestDataTypesStep step = + new IngestDataTypesStep(entityService, "./boot/test_data_types_invalid.json"); + + Assert.assertThrows(RuntimeException.class, step::execute); + + // Verify no interactions + verifyNoInteractions(entityService); + } + + private static MetadataChangeProposal buildUpdateDataTypeProposal(final DataTypeInfo info) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(TEST_DATA_TYPE_URN); + mcp.setEntityType(DATA_TYPE_ENTITY_NAME); + mcp.setAspectName(DATA_TYPE_INFO_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(info)); + return mcp; + } + + @NotNull + private ConfigEntityRegistry getTestEntityRegistry() { + return new ConfigEntityRegistry( + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + } +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java index b28a6e9f5cc5b..783c82934599c 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java @@ -25,7 +25,7 @@ public class IngestDefaultGlobalSettingsStepTest { @Test public void testExecuteValidSettingsNoExistingSettings() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); final IngestDefaultGlobalSettingsStep step = @@ -49,7 +49,7 @@ public void testExecuteValidSettingsNoExistingSettings() throws Exception { public void testExecuteValidSettingsExistingSettings() throws Exception { // Verify that the user provided settings overrides are NOT overwritten. - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() .setViews( @@ -77,7 +77,7 @@ public void testExecuteValidSettingsExistingSettings() throws Exception { @Test public void testExecuteInvalidJsonSettings() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); final IngestDefaultGlobalSettingsStep step = @@ -92,7 +92,7 @@ public void testExecuteInvalidJsonSettings() throws Exception { @Test public void testExecuteInvalidModelSettings() throws Exception { - final EntityService entityService = mock(EntityService.class); + final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); final IngestDefaultGlobalSettingsStep step = @@ -106,7 +106,7 @@ public void testExecuteInvalidModelSettings() throws Exception { } private static void configureEntityServiceMock( - final EntityService mockService, final GlobalSettingsInfo settingsInfo) { + final EntityService mockService, final GlobalSettingsInfo settingsInfo) { Mockito.when( mockService.getAspect( Mockito.eq(GLOBAL_SETTINGS_URN), diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java new file mode 100644 index 0000000000000..0b87283fbe2f7 --- /dev/null +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestEntityTypesStepTest.java @@ -0,0 +1,91 @@ +package com.linkedin.metadata.boot.steps; + +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.entitytype.EntityTypeInfo; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import org.jetbrains.annotations.NotNull; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IngestEntityTypesStepTest { + + @Test + public void testExecuteTestEntityRegistry() throws Exception { + EntityRegistry testEntityRegistry = getTestEntityRegistry(); + final EntityService entityService = mock(EntityService.class); + when(entityService.getEntityRegistry()).thenReturn(testEntityRegistry); + when(entityService.getKeyAspectSpec(anyString())) + .thenAnswer( + args -> testEntityRegistry.getEntitySpec(args.getArgument(0)).getKeyAspectSpec()); + + final IngestEntityTypesStep step = new IngestEntityTypesStep(entityService); + + step.execute(); + + Urn userUrn = + Urn.createFromString(String.format("urn:li:entityType:datahub.%s", CORP_USER_ENTITY_NAME)); + EntityTypeInfo userInfo = new EntityTypeInfo(); + userInfo.setDisplayName("corpuser"); + userInfo.setQualifiedName("datahub.corpuser"); + + Urn chartUrn = + Urn.createFromString(String.format("urn:li:entityType:datahub.%s", CHART_ENTITY_NAME)); + EntityTypeInfo chartInfo = new EntityTypeInfo(); + chartInfo.setDisplayName("chart"); + chartInfo.setQualifiedName("datahub.chart"); + + Urn dataPlatformUrn = + Urn.createFromString( + String.format("urn:li:entityType:datahub.%s", DATA_PLATFORM_ENTITY_NAME)); + EntityTypeInfo dataPlatformInfo = new EntityTypeInfo(); + dataPlatformInfo.setDisplayName("dataPlatform"); + dataPlatformInfo.setQualifiedName("datahub.dataPlatform"); + + // Verify all entities were ingested. + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateEntityTypeProposal(userUrn, userInfo)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateEntityTypeProposal(chartUrn, chartInfo)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateEntityTypeProposal(dataPlatformUrn, dataPlatformInfo)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + } + + private static MetadataChangeProposal buildUpdateEntityTypeProposal( + final Urn entityTypeUrn, final EntityTypeInfo info) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(entityTypeUrn); + mcp.setEntityType(ENTITY_TYPE_ENTITY_NAME); + mcp.setAspectName(ENTITY_TYPE_INFO_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(info)); + return mcp; + } + + @NotNull + private ConfigEntityRegistry getTestEntityRegistry() { + return new ConfigEntityRegistry( + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + } +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java index 3b23368d8e99f..9e647da9ef2e9 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java @@ -46,7 +46,7 @@ public class RestoreColumnLineageIndicesTest { @Test public void testExecuteFirstTime() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); mockGetUpgradeStep(false, VERSION_1, mockService); @@ -109,7 +109,7 @@ public void testExecuteFirstTime() throws Exception { @Test public void testExecuteWithNewVersion() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); mockGetUpgradeStep(true, VERSION_2, mockService); @@ -172,7 +172,7 @@ public void testExecuteWithNewVersion() throws Exception { @Test public void testDoesNotExecuteWithSameVersion() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); mockGetUpgradeStep(true, VERSION_1, mockService); @@ -233,7 +233,8 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { Mockito.eq(ChangeType.RESTATE)); } - private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { + private void mockGetUpstreamLineage( + @Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { final List extraInfos = ImmutableList.of( new ExtraInfo() @@ -276,7 +277,7 @@ private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityServ } private void mockGetInputFields( - @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { + @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { final List extraInfos = ImmutableList.of( new ExtraInfo() @@ -325,7 +326,7 @@ private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { } private void mockGetUpgradeStep( - boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) + boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) throws Exception { final Urn upgradeEntityUrn = UrnUtils.getUrn(COLUMN_LINEAGE_UPGRADE_URN); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index a4f0c5e0aaba0..4a4532763f02b 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -40,7 +40,7 @@ public class RestoreGlossaryIndicesTest { "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); private void mockGetTermInfo( - Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) + Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { Map termInfoAspects = new HashMap<>(); termInfoAspects.put( @@ -79,7 +79,7 @@ private void mockGetTermInfo( } private void mockGetNodeInfo( - Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) + Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { Map nodeInfoAspects = new HashMap<>(); nodeInfoAspects.put( @@ -140,7 +140,7 @@ public void testExecuteFirstTime() throws Exception { Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); @@ -215,7 +215,7 @@ public void testExecutesWithNewVersion() throws Exception { Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); @@ -298,7 +298,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java index 17159ba1baf53..024ad7b16a844 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java @@ -12,6 +12,7 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ListResult; import com.linkedin.metadata.models.AspectSpec; @@ -19,7 +20,6 @@ import com.linkedin.metadata.models.EntitySpecBuilder; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.metadata.query.ExtraInfo; import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; @@ -48,7 +48,7 @@ public class UpgradeDefaultBrowsePathsStepTest { @Test public void testExecuteNoExistingBrowsePaths() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); @@ -104,7 +104,7 @@ public void testExecuteFirstTime() throws Exception { Urn testUrn2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))) @@ -193,7 +193,7 @@ public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not // migrate - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); @@ -269,7 +269,7 @@ public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = @@ -297,7 +297,7 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { Mockito.anyBoolean()); } - private void initMockServiceOtherEntities(EntityService mockService) { + private void initMockServiceOtherEntities(EntityService mockService) { List skippedEntityTypes = ImmutableList.of( Constants.DASHBOARD_ENTITY_NAME, diff --git a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java index 9931f044931b6..17bf7810f71e4 100644 --- a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java +++ b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java @@ -12,7 +12,7 @@ public class TelemetryUtilsTest { - EntityService _entityService; + EntityService _entityService; @BeforeMethod public void init() { diff --git a/metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json b/metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json new file mode 100644 index 0000000000000..ed1d8a7b45abe --- /dev/null +++ b/metadata-service/factories/src/test/resources/boot/test_data_types_invalid.json @@ -0,0 +1,9 @@ +[ + { + "urn": "urn:li:dataType:datahub.test", + "badField": { + "qualifiedName":"datahub.test", + "description": "Test Description" + } + } +] \ No newline at end of file diff --git a/metadata-service/factories/src/test/resources/boot/test_data_types_valid.json b/metadata-service/factories/src/test/resources/boot/test_data_types_valid.json new file mode 100644 index 0000000000000..3694c92947aa1 --- /dev/null +++ b/metadata-service/factories/src/test/resources/boot/test_data_types_valid.json @@ -0,0 +1,10 @@ +[ + { + "urn": "urn:li:dataType:datahub.test", + "info": { + "qualifiedName":"datahub.test", + "displayName": "Test Name", + "description": "Test Description" + } + } +] \ No newline at end of file diff --git a/metadata-service/factories/src/test/resources/test-entity-registry.yaml b/metadata-service/factories/src/test/resources/test-entity-registry.yaml index fe32b413751e6..400b22446c186 100644 --- a/metadata-service/factories/src/test/resources/test-entity-registry.yaml +++ b/metadata-service/factories/src/test/resources/test-entity-registry.yaml @@ -13,4 +13,20 @@ entities: category: core keyAspect: dataPlatformKey aspects: - - dataPlatformInfo \ No newline at end of file + - dataPlatformInfo + - name: entityType + doc: A type of entity in the DataHub Metadata Model. + category: core + keyAspect: entityTypeKey + aspects: + - entityTypeInfo + - institutionalMemory + - status + - name: dataType + doc: A type of data element stored within DataHub. + category: core + keyAspect: dataTypeKey + aspects: + - dataTypeInfo + - institutionalMemory + - status \ No newline at end of file diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index fb49727fa70d1..016ac6693f55b 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -75,7 +75,7 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'java11' : "true", 'modelPropertyNaming': "original", 'modelPackage' : "io.datahubproject.openapi.generated", - 'apiPackage' : "io.datahubproject.openapi.generated.controller", + 'apiPackage' : "io.datahubproject.openapi.v2.generated.controller", 'delegatePattern' : "false" ] } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java similarity index 86% rename from metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java rename to metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java index d7c8268903508..39a7e4722988e 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java @@ -1,4 +1,4 @@ -package io.datahubproject.openapi.delegates; +package io.datahubproject.openapi.v2.delegates; import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; @@ -35,10 +35,16 @@ import io.datahubproject.openapi.generated.DeprecationAspectResponseV2; import io.datahubproject.openapi.generated.DomainsAspectRequestV2; import io.datahubproject.openapi.generated.DomainsAspectResponseV2; +import io.datahubproject.openapi.generated.DynamicFormAssignmentAspectRequestV2; +import io.datahubproject.openapi.generated.DynamicFormAssignmentAspectResponseV2; import io.datahubproject.openapi.generated.EditableChartPropertiesAspectRequestV2; import io.datahubproject.openapi.generated.EditableChartPropertiesAspectResponseV2; import io.datahubproject.openapi.generated.EditableDatasetPropertiesAspectRequestV2; import io.datahubproject.openapi.generated.EditableDatasetPropertiesAspectResponseV2; +import io.datahubproject.openapi.generated.FormInfoAspectRequestV2; +import io.datahubproject.openapi.generated.FormInfoAspectResponseV2; +import io.datahubproject.openapi.generated.FormsAspectRequestV2; +import io.datahubproject.openapi.generated.FormsAspectResponseV2; import io.datahubproject.openapi.generated.GlobalTagsAspectRequestV2; import io.datahubproject.openapi.generated.GlobalTagsAspectResponseV2; import io.datahubproject.openapi.generated.GlossaryTermsAspectRequestV2; @@ -66,7 +72,7 @@ public class EntityApiDelegateImpl { private final EntityRegistry _entityRegistry; - private final EntityService _entityService; + private final EntityService _entityService; private final SearchService _searchService; private final EntitiesController _v1Controller; private final AuthorizerChain _authorizationChain; @@ -79,7 +85,7 @@ public class EntityApiDelegateImpl { private final StackWalker walker = StackWalker.getInstance(); public EntityApiDelegateImpl( - EntityService entityService, + EntityService entityService, SearchService searchService, EntitiesController entitiesController, boolean restApiAuthorizationEnabled, @@ -732,4 +738,111 @@ public ResponseEntity deleteDataProductProperties(String urn) { walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return deleteAspect(urn, methodNameToAspectName(methodName)); } + + public ResponseEntity createForms(FormsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + FormsAspectRequestV2.class, + FormsAspectResponseV2.class); + } + + public ResponseEntity deleteForms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getForms( + String urn, @jakarta.validation.Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + FormsAspectResponseV2.class); + } + + public ResponseEntity headForms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity createDynamicFormAssignment( + DynamicFormAssignmentAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DynamicFormAssignmentAspectRequestV2.class, + DynamicFormAssignmentAspectResponseV2.class); + } + + public ResponseEntity createFormInfo( + FormInfoAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + FormInfoAspectRequestV2.class, + FormInfoAspectResponseV2.class); + } + + public ResponseEntity deleteDynamicFormAssignment(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headDynamicFormAssignment(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headFormInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getFormInfo( + String urn, @jakarta.validation.Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + FormInfoAspectResponseV2.class); + } + + public ResponseEntity getDynamicFormAssignment( + String urn, @jakarta.validation.Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DynamicFormAssignmentAspectResponseV2.class); + } + + public ResponseEntity deleteFormInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache index 4a29b95eabc5d..7ac087f220561 100644 --- a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache +++ b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache @@ -1,6 +1,6 @@ package {{package}}; -import io.datahubproject.openapi.delegates.EntityApiDelegateImpl; +import io.datahubproject.openapi.v2.delegates.EntityApiDelegateImpl; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; import io.datahubproject.openapi.entities.EntitiesController; diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java similarity index 97% rename from metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java rename to metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java index 1f8f0a5023513..d4217c9fd1b66 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java @@ -1,4 +1,4 @@ -package io.datahubproject.openapi.delegates; +package io.datahubproject.openapi.v2.delegates; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.testng.Assert.*; @@ -32,8 +32,8 @@ import io.datahubproject.openapi.generated.Status; import io.datahubproject.openapi.generated.StatusAspectRequestV2; import io.datahubproject.openapi.generated.TagAssociation; -import io.datahubproject.openapi.generated.controller.ChartApiController; -import io.datahubproject.openapi.generated.controller.DatasetApiController; +import io.datahubproject.openapi.v2.generated.controller.ChartApiController; +import io.datahubproject.openapi.v2.generated.controller.DatasetApiController; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; @@ -51,7 +51,7 @@ import org.testng.annotations.Test; @SpringBootTest(classes = {SpringWebConfig.class}) -@ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) +@ComponentScan(basePackages = {"io.datahubproject.openapi.v2.generated.controller"}) @Import({OpenAPIEntityTestConfiguration.class}) @AutoConfigureMockMvc public class EntityApiDelegateImplTest extends AbstractTestNGSpringContextTests { diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java index cc040d29657b2..f4689a9862825 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java @@ -1,14 +1,25 @@ package io.datahubproject.openapi; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.ConversionNotSupportedException; +import org.springframework.core.Ordered; import org.springframework.core.convert.ConversionFailedException; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.servlet.mvc.support.DefaultHandlerExceptionResolver; +@Slf4j @ControllerAdvice -public class GlobalControllerExceptionHandler { - @ExceptionHandler(ConversionFailedException.class) +public class GlobalControllerExceptionHandler extends DefaultHandlerExceptionResolver { + + public GlobalControllerExceptionHandler() { + setOrder(Ordered.HIGHEST_PRECEDENCE); + setWarnLogCategory(getClass().getName()); + } + + @ExceptionHandler({ConversionFailedException.class, ConversionNotSupportedException.class}) public ResponseEntity handleConflict(RuntimeException ex) { return new ResponseEntity<>(ex.getMessage(), HttpStatus.BAD_REQUEST); } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java index a8721b23d1fa2..2336bea565e59 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java @@ -4,7 +4,9 @@ import io.swagger.v3.oas.annotations.OpenAPIDefinition; import io.swagger.v3.oas.annotations.info.Info; import io.swagger.v3.oas.annotations.servers.Server; +import java.util.HashSet; import java.util.List; +import java.util.Set; import org.springdoc.core.models.GroupedOpenApi; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -23,6 +25,20 @@ servers = {@Server(url = "/openapi/", description = "Default Server URL")}) @Configuration public class SpringWebConfig implements WebMvcConfigurer { + private static final Set OPERATIONS_PACKAGES = + Set.of("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health"); + private static final Set V2_PACKAGES = Set.of("io.datahubproject.openapi.v2"); + private static final Set SCHEMA_REGISTRY_PACKAGES = + Set.of("io.datahubproject.openapi.schema.registry"); + + public static final Set NONDEFAULT_OPENAPI_PACKAGES; + + static { + NONDEFAULT_OPENAPI_PACKAGES = new HashSet<>(); + NONDEFAULT_OPENAPI_PACKAGES.addAll(OPERATIONS_PACKAGES); + NONDEFAULT_OPENAPI_PACKAGES.addAll(V2_PACKAGES); + NONDEFAULT_OPENAPI_PACKAGES.addAll(SCHEMA_REGISTRY_PACKAGES); + } @Override public void configureMessageConverters(List> messageConverters) { @@ -41,16 +57,23 @@ public void addFormatters(FormatterRegistry registry) { public GroupedOpenApi defaultOpenApiGroup() { return GroupedOpenApi.builder() .group("default") - .packagesToExclude( - "io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .packagesToExclude(NONDEFAULT_OPENAPI_PACKAGES.toArray(String[]::new)) .build(); } @Bean public GroupedOpenApi operationsOpenApiGroup() { return GroupedOpenApi.builder() - .group("operations") - .packagesToScan("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .group("Operations") + .packagesToScan(OPERATIONS_PACKAGES.toArray(String[]::new)) + .build(); + } + + @Bean + public GroupedOpenApi openApiGroupV3() { + return GroupedOpenApi.builder() + .group("OpenAPI v2") + .packagesToScan(V2_PACKAGES.toArray(String[]::new)) .build(); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index c87820465dc88..a7e88966e4f87 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -465,11 +465,7 @@ public static Pair ingestProposal( AspectsBatch batch = AspectsBatchImpl.builder() - .mcps( - proposalStream.collect(Collectors.toList()), - auditStamp, - entityService.getEntityRegistry(), - entityService.getSystemEntityClient()) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, entityService) .build(); Set proposalResult = entityService.ingestProposal(batch, async); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java index 31577429df72d..6c0474dc6cfb6 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java @@ -134,11 +134,34 @@ public Method lookupMethod( return lookupMethod(builderPair.getFirst(), method, parameters); } + /** + * Convert class name to the pdl model names. Upper case first letter unless the 3rd character is + * upper case. Reverse of {link ReflectionCache.toUpperFirst} i.e. MLModel -> mlModel Dataset -> + * dataset DataProduct -> dataProduct + * + * @param s input string + * @return class name + */ public static String toLowerFirst(String s) { - return s.substring(0, 1).toLowerCase() + s.substring(1); + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toLowerCase() + s.substring(2); + } else { + return s.substring(0, 1).toLowerCase() + s.substring(1); + } } + /** + * Convert the pdl model names to desired class names. Upper case first letter unless the 3rd + * character is upper case. i.e. mlModel -> MLModel dataset -> Dataset dataProduct -> DataProduct + * + * @param s input string + * @return class name + */ public static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toUpperCase() + s.substring(2); + } else { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java new file mode 100644 index 0000000000000..503330fdc8a2e --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -0,0 +1,507 @@ +package io.datahubproject.openapi.v2.controller; + +import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthorizerChain; +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.ByteString; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.metadata.aspect.batch.UpsertItem; +import com.linkedin.metadata.aspect.patch.GenericJsonPatch; +import com.linkedin.metadata.aspect.patch.template.common.GenericPatchTemplate; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.UpdateAspectResult; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.MCPUpsertBatchItem; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.metadata.utils.SearchUtil; +import com.linkedin.mxe.SystemMetadata; +import com.linkedin.util.Pair; +import io.datahubproject.openapi.v2.models.GenericEntity; +import io.datahubproject.openapi.v2.models.GenericScrollResult; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.lang.reflect.InvocationTargetException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v2/entity") +@Slf4j +public class EntityController { + private static final SearchFlags DEFAULT_SEARCH_FLAGS = + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + @Autowired private EntityRegistry entityRegistry; + @Autowired private SearchService searchService; + @Autowired private EntityService entityService; + @Autowired private AuthorizerChain authorizationChain; + @Autowired private boolean restApiAuthorizationEnabled; + @Autowired private ObjectMapper objectMapper; + + @Tag(name = "Generic Entities", description = "API for interacting with generic entities.") + @GetMapping(value = "/{entityName}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Scroll entities") + public ResponseEntity> getEntities( + @PathVariable("entityName") String entityName, + @RequestParam(value = "aspectNames", defaultValue = "") Set aspectNames, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "query", defaultValue = "*") String query, + @RequestParam(value = "scrollId", required = false) String scrollId, + @RequestParam(value = "sort", required = false, defaultValue = "urn") String sortField, + @RequestParam(value = "sortOrder", required = false, defaultValue = "ASCENDING") + String sortOrder, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata) + throws URISyntaxException { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + // TODO: support additional and multiple sort params + SortCriterion sortCriterion = SearchUtil.sortBy(sortField, SortOrder.valueOf(sortOrder)); + + ScrollResult result = + searchService.scrollAcrossEntities( + List.of(entitySpec.getName()), + query, + null, + sortCriterion, + scrollId, + null, + count, + DEFAULT_SEARCH_FLAGS); + + return ResponseEntity.ok( + GenericScrollResult.builder() + .results(toRecordTemplates(result.getEntities(), aspectNames, withSystemMetadata)) + .scrollId(result.getScrollId()) + .build()); + } + + @Tag(name = "Generic Entities") + @GetMapping(value = "/{entityName}/{entityUrn}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Get an entity") + public ResponseEntity getEntity( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @RequestParam(value = "aspectNames", defaultValue = "") Set aspectNames, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata) + throws URISyntaxException { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.of( + toRecordTemplates(List.of(UrnUtils.getUrn(entityUrn)), aspectNames, withSystemMetadata) + .stream() + .findFirst()); + } + + @Tag(name = "Generic Entities") + @RequestMapping( + value = "/{entityName}/{entityUrn}", + method = {RequestMethod.HEAD}) + @Operation(summary = "Entity exists") + public ResponseEntity headEntity( + @PathVariable("entityName") String entityName, @PathVariable("entityUrn") String entityUrn) { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return exists(UrnUtils.getUrn(entityUrn), null) + ? ResponseEntity.noContent().build() + : ResponseEntity.notFound().build(); + } + + @Tag(name = "Generic Aspects", description = "API for generic aspects.") + @GetMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Get an entity's generic aspect.") + public ResponseEntity getAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName) + throws URISyntaxException { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.of( + toRecordTemplates(List.of(UrnUtils.getUrn(entityUrn)), Set.of(aspectName), true).stream() + .findFirst() + .flatMap(e -> e.getAspects().values().stream().findFirst())); + } + + @Tag(name = "Generic Aspects") + @RequestMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + method = {RequestMethod.HEAD}) + @Operation(summary = "Whether an entity aspect exists.") + public ResponseEntity headAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName) { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return exists(UrnUtils.getUrn(entityUrn), aspectName) + ? ResponseEntity.noContent().build() + : ResponseEntity.notFound().build(); + } + + @Tag(name = "Generic Entities") + @DeleteMapping(value = "/{entityName}/{entityUrn}") + @Operation(summary = "Delete an entity") + public void deleteEntity( + @PathVariable("entityName") String entityName, @PathVariable("entityUrn") String entityUrn) { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())); + } + + entityService.deleteAspect(entityUrn, entitySpec.getKeyAspectName(), Map.of(), true); + } + + @Tag(name = "Generic Aspects") + @DeleteMapping(value = "/{entityName}/{entityUrn}/{aspectName}") + @Operation(summary = "Delete an entity aspect.") + public void deleteAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName) { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())); + } + + entityService.deleteAspect(entityUrn, aspectName, Map.of(), true); + } + + @Tag(name = "Generic Aspects") + @PostMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Create an entity aspect.") + public ResponseEntity createAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata, + @RequestBody @Nonnull String jsonAspect) + throws URISyntaxException { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + Authentication authentication = AuthenticationContext.getAuthentication(); + + if (restApiAuthorizationEnabled) { + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + } + + AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); + UpsertItem upsert = + toUpsertItem(UrnUtils.getUrn(entityUrn), aspectSpec, jsonAspect, authentication.getActor()); + + List results = + entityService.ingestAspects( + AspectsBatchImpl.builder().items(List.of(upsert)).build(), true, true); + + return ResponseEntity.of( + results.stream() + .findFirst() + .map( + result -> + GenericEntity.builder() + .urn(result.getUrn().toString()) + .build( + objectMapper, + Map.of( + aspectName, + Pair.of( + result.getNewValue(), + withSystemMetadata ? result.getNewSystemMetadata() : null))))); + } + + @Tag(name = "Generic Aspects") + @PatchMapping( + value = "/{entityName}/{entityUrn}/{aspectName}", + consumes = "application/json-patch+json", + produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Patch an entity aspect. (Experimental)") + public ResponseEntity patchAspect( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @PathVariable("aspectName") String aspectName, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata, + @RequestBody @Nonnull GenericJsonPatch patch) + throws URISyntaxException, + NoSuchMethodException, + InvocationTargetException, + InstantiationException, + IllegalAccessException { + + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + Authentication authentication = AuthenticationContext.getAuthentication(); + + if (restApiAuthorizationEnabled) { + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpec, + entityUrn, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + } + + RecordTemplate currentValue = + entityService.getAspect(UrnUtils.getUrn(entityUrn), aspectName, 0); + + AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); + GenericPatchTemplate genericPatchTemplate = + GenericPatchTemplate.builder() + .genericJsonPatch(patch) + .templateType(aspectSpec.getDataTemplateClass()) + .templateDefault( + aspectSpec.getDataTemplateClass().getDeclaredConstructor().newInstance()) + .build(); + UpsertItem upsert = + toUpsertItem( + UrnUtils.getUrn(entityUrn), + aspectSpec, + currentValue, + genericPatchTemplate, + authentication.getActor()); + + List results = + entityService.ingestAspects( + AspectsBatchImpl.builder().items(List.of(upsert)).build(), true, true); + + return ResponseEntity.of( + results.stream() + .findFirst() + .map( + result -> + GenericEntity.builder() + .urn(result.getUrn().toString()) + .build( + objectMapper, + Map.of( + aspectName, + Pair.of( + result.getNewValue(), + withSystemMetadata ? result.getNewSystemMetadata() : null))))); + } + + private List toRecordTemplates( + SearchEntityArray searchEntities, Set aspectNames, boolean withSystemMetadata) + throws URISyntaxException { + return toRecordTemplates( + searchEntities.stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + aspectNames, + withSystemMetadata); + } + + private Boolean exists(Urn urn, @Nullable String aspect) { + return aspect == null ? entityService.exists(urn, true) : entityService.exists(urn, aspect); + } + + private List toRecordTemplates( + List urns, Set aspectNames, boolean withSystemMetadata) + throws URISyntaxException { + if (urns.isEmpty()) { + return List.of(); + } else { + Set urnsSet = new HashSet<>(urns); + + Map> aspects = + entityService.getLatestEnvelopedAspects( + urnsSet, resolveAspectNames(urnsSet, aspectNames)); + + return urns.stream() + .map( + u -> + GenericEntity.builder() + .urn(u.toString()) + .build( + objectMapper, + toAspectMap(u, aspects.getOrDefault(u, List.of()), withSystemMetadata))) + .collect(Collectors.toList()); + } + } + + private Set resolveAspectNames(Set urns, Set requestedNames) { + if (requestedNames.isEmpty()) { + return urns.stream() + .flatMap(u -> entityRegistry.getEntitySpec(u.getEntityType()).getAspectSpecs().stream()) + .map(AspectSpec::getName) + .collect(Collectors.toSet()); + } else { + // ensure key is always present + return Stream.concat( + requestedNames.stream(), + urns.stream() + .map(u -> entityRegistry.getEntitySpec(u.getEntityType()).getKeyAspectName())) + .collect(Collectors.toSet()); + } + } + + private Map> toAspectMap( + Urn urn, List aspects, boolean withSystemMetadata) { + return aspects.stream() + .map( + a -> + Map.entry( + a.getName(), + Pair.of( + toRecordTemplate(lookupAspectSpec(urn, a.getName()), a), + withSystemMetadata ? a.getSystemMetadata() : null))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private AspectSpec lookupAspectSpec(Urn urn, String aspectName) { + return entityRegistry.getEntitySpec(urn.getEntityType()).getAspectSpec(aspectName); + } + + private RecordTemplate toRecordTemplate(AspectSpec aspectSpec, EnvelopedAspect envelopedAspect) { + return RecordUtils.toRecordTemplate( + aspectSpec.getDataTemplateClass(), envelopedAspect.getValue().data()); + } + + private UpsertItem toUpsertItem( + Urn entityUrn, AspectSpec aspectSpec, String jsonAspect, Actor actor) + throws URISyntaxException { + return MCPUpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectSpec.getName()) + .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) + .aspect( + GenericRecordUtils.deserializeAspect( + ByteString.copyString(jsonAspect, StandardCharsets.UTF_8), + GenericRecordUtils.JSON, + aspectSpec)) + .build(entityService); + } + + private UpsertItem toUpsertItem( + @Nonnull Urn urn, + @Nonnull AspectSpec aspectSpec, + @Nullable RecordTemplate currentValue, + @Nonnull GenericPatchTemplate genericPatchTemplate, + @Nonnull Actor actor) + throws URISyntaxException { + return MCPUpsertBatchItem.fromPatch( + urn, + aspectSpec, + currentValue, + genericPatchTemplate, + AuditStampUtils.createAuditStamp(actor.toUrnStr()), + entityService); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java new file mode 100644 index 0000000000000..3550a86163f51 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java @@ -0,0 +1,228 @@ +package io.datahubproject.openapi.v2.controller; + +import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthorizerChain; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.graph.RelatedEntities; +import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; +import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.metadata.utils.SearchUtil; +import io.datahubproject.openapi.v2.models.GenericRelationship; +import io.datahubproject.openapi.v2.models.GenericScrollResult; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v2/relationship") +@Slf4j +@Tag( + name = "Generic Relationships", + description = "APIs for ingesting and accessing entity relationships.") +public class RelationshipController { + + private static final String[] SORT_FIELDS = {"source.urn", "destination.urn", "relationshipType"}; + private static final String[] SORT_ORDERS = {"ASCENDING", "ASCENDING", "ASCENDING"}; + private static final List EDGE_SORT_CRITERION; + + static { + EDGE_SORT_CRITERION = + IntStream.range(0, SORT_FIELDS.length) + .mapToObj( + idx -> SearchUtil.sortBy(SORT_FIELDS[idx], SortOrder.valueOf(SORT_ORDERS[idx]))) + .collect(Collectors.toList()); + } + + @Autowired private EntityRegistry entityRegistry; + @Autowired private ElasticSearchGraphService graphService; + @Autowired private AuthorizerChain authorizationChain; + + @Autowired private boolean restApiAuthorizationEnabled; + + /** + * Returns relationship edges by type + * + * @param relationshipType the relationship type + * @param count number of results + * @param scrollId scrolling id + * @return list of relation edges + */ + @GetMapping(value = "/{relationshipType}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Scroll relationships of the given type.") + public ResponseEntity> getRelationshipsByType( + @PathVariable("relationshipType") String relationshipType, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "scrollId", required = false) String scrollId) { + + RelatedEntitiesScrollResult result = + graphService.scrollRelatedEntities( + null, + null, + null, + null, + List.of(relationshipType), + new RelationshipFilter().setDirection(RelationshipDirection.UNDIRECTED), + EDGE_SORT_CRITERION, + scrollId, + count, + null, + null); + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + Set entitySpecs = + result.getEntities().stream() + .flatMap( + relatedEntity -> + Stream.of( + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getUrn()).getEntityType()), + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getSourceUrn()).getEntityType()))) + .collect(Collectors.toSet()); + + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpecs, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.ok( + GenericScrollResult.builder() + .results(toGenericRelationships(result.getEntities())) + .scrollId(result.getScrollId()) + .build()); + } + + /** + * Returns edges for a given urn + * + * @param relationshipTypes types of edges + * @param direction direction of the edges + * @param count number of results + * @param scrollId scroll id + * @return urn edges + */ + @GetMapping(value = "/{entityName}/{entityUrn}", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Scroll relationships from a given entity.") + public ResponseEntity> getRelationshipsByEntity( + @PathVariable("entityName") String entityName, + @PathVariable("entityUrn") String entityUrn, + @RequestParam(value = "relationshipType[]", required = false, defaultValue = "*") + String[] relationshipTypes, + @RequestParam(value = "direction", defaultValue = "OUTGOING") String direction, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "scrollId", required = false) String scrollId) { + + final RelatedEntitiesScrollResult result; + + switch (RelationshipDirection.valueOf(direction.toUpperCase())) { + case INCOMING -> result = + graphService.scrollRelatedEntities( + null, + null, + null, + null, + relationshipTypes.length > 0 && !relationshipTypes[0].equals("*") + ? Arrays.stream(relationshipTypes).toList() + : List.of(), + new RelationshipFilter() + .setDirection(RelationshipDirection.UNDIRECTED) + .setOr(QueryUtils.newFilter("destination.urn", entityUrn).getOr()), + EDGE_SORT_CRITERION, + scrollId, + count, + null, + null); + case OUTGOING -> result = + graphService.scrollRelatedEntities( + null, + null, + null, + null, + relationshipTypes.length > 0 && !relationshipTypes[0].equals("*") + ? Arrays.stream(relationshipTypes).toList() + : List.of(), + new RelationshipFilter() + .setDirection(RelationshipDirection.UNDIRECTED) + .setOr(QueryUtils.newFilter("source.urn", entityUrn).getOr()), + EDGE_SORT_CRITERION, + scrollId, + count, + null, + null); + default -> throw new IllegalArgumentException("Direction must be INCOMING or OUTGOING"); + } + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + Set entitySpecs = + result.getEntities().stream() + .flatMap( + relatedEntity -> + Stream.of( + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getDestinationUrn()).getEntityType()), + entityRegistry.getEntitySpec( + UrnUtils.getUrn(relatedEntity.getSourceUrn()).getEntityType()))) + .collect(Collectors.toSet()); + + checkAuthorized( + authorizationChain, + authentication.getActor(), + entitySpecs, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + return ResponseEntity.ok( + GenericScrollResult.builder() + .results(toGenericRelationships(result.getEntities())) + .scrollId(result.getScrollId()) + .build()); + } + + private List toGenericRelationships(List relatedEntities) { + return relatedEntities.stream() + .map( + result -> { + Urn source = UrnUtils.getUrn(result.getSourceUrn()); + Urn dest = UrnUtils.getUrn(result.getDestinationUrn()); + return GenericRelationship.builder() + .relationshipType(result.getRelationshipType()) + .source(GenericRelationship.GenericNode.fromUrn(source)) + .destination(GenericRelationship.GenericNode.fromUrn(dest)) + .build(); + }) + .collect(Collectors.toList()); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java new file mode 100644 index 0000000000000..ab12b68339011 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java @@ -0,0 +1,115 @@ +package io.datahubproject.openapi.v2.controller; + +import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthorizerChain; +import com.google.common.collect.ImmutableList; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.timeseries.GenericTimeseriesDocument; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.TimeseriesScrollResult; +import com.linkedin.metadata.utils.SearchUtil; +import io.datahubproject.openapi.v2.models.GenericScrollResult; +import io.datahubproject.openapi.v2.models.GenericTimeseriesAspect; +import io.swagger.v3.oas.annotations.tags.Tag; +import java.net.URISyntaxException; +import java.util.List; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequiredArgsConstructor +@RequestMapping("/v2/timeseries") +@Slf4j +@Tag( + name = "Generic Timeseries Aspects", + description = "APIs for ingesting and accessing timeseries aspects") +public class TimeseriesController { + + @Autowired private EntityRegistry entityRegistry; + + @Autowired private TimeseriesAspectService timeseriesAspectService; + + @Autowired private AuthorizerChain authorizationChain; + + @Autowired private boolean restApiAuthorizationEnabled; + + @GetMapping(value = "/{entityName}/{aspectName}", produces = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity> getAspects( + @PathVariable("entityName") String entityName, + @PathVariable("aspectName") String aspectName, + @RequestParam(value = "count", defaultValue = "10") Integer count, + @RequestParam(value = "scrollId", required = false) String scrollId, + @RequestParam(value = "startTimeMillis", required = false) Long startTimeMillis, + @RequestParam(value = "endTimeMillis", required = false) Long endTimeMillis, + @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") + Boolean withSystemMetadata) + throws URISyntaxException { + + if (restApiAuthorizationEnabled) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAuthorized( + authorizationChain, + authentication.getActor(), + entityRegistry.getEntitySpec(entityName), + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + } + + AspectSpec aspectSpec = entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName); + if (!aspectSpec.isTimeseries()) { + throw new IllegalArgumentException("Only timeseries aspects are supported."); + } + + List sortCriterion = + List.of( + SearchUtil.sortBy("timestampMillis", SortOrder.DESCENDING), + SearchUtil.sortBy("messageId", SortOrder.DESCENDING)); + + TimeseriesScrollResult result = + timeseriesAspectService.scrollAspects( + entityName, + aspectName, + null, + sortCriterion, + scrollId, + count, + startTimeMillis, + endTimeMillis); + + return ResponseEntity.ok( + GenericScrollResult.builder() + .scrollId(result.getScrollId()) + .results(toGenericTimeseriesAspect(result.getDocuments(), withSystemMetadata)) + .build()); + } + + private static List toGenericTimeseriesAspect( + List docs, boolean withSystemMetadata) { + return docs.stream() + .map( + doc -> + GenericTimeseriesAspect.builder() + .urn(doc.getUrn()) + .messageId(doc.getMessageId()) + .timestampMillis(doc.getTimestampMillis()) + .systemMetadata(withSystemMetadata ? doc.getSystemMetadata() : null) + .event(doc.getEvent()) + .build()) + .collect(Collectors.toList()); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java new file mode 100644 index 0000000000000..f1e965ca05464 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericEntity.java @@ -0,0 +1,57 @@ +package io.datahubproject.openapi.v2.models; + +import com.datahub.util.RecordUtils; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.mxe.SystemMetadata; +import com.linkedin.util.Pair; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GenericEntity { + private String urn; + private Map aspects; + + public static class GenericEntityBuilder { + + public GenericEntity build( + ObjectMapper objectMapper, Map> aspects) { + Map jsonObjectMap = + aspects.entrySet().stream() + .map( + e -> { + try { + Map valueMap = + Map.of( + "value", + objectMapper.readTree( + RecordUtils.toJsonString(e.getValue().getFirst()) + .getBytes(StandardCharsets.UTF_8))); + + if (e.getValue().getSecond() != null) { + return Map.entry( + e.getKey(), + Map.of( + "systemMetadata", e.getValue().getSecond(), + "value", valueMap.get("value"))); + } else { + return Map.entry(e.getKey(), Map.of("value", valueMap.get("value"))); + } + } catch (IOException ex) { + throw new RuntimeException(ex); + } + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return new GenericEntity(urn, jsonObjectMap); + } + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java new file mode 100644 index 0000000000000..a4fb429c1eb18 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericRelationship.java @@ -0,0 +1,36 @@ +package io.datahubproject.openapi.v2.models; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.linkedin.common.urn.Urn; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GenericRelationship { + @Nonnull private String relationshipType; + @Nonnull private GenericNode destination; + @Nonnull private GenericNode source; + @Nullable private NodeProperties properties; + + @Data + @Builder + public static class GenericNode { + @Nonnull private String entityType; + @Nonnull private String urn; + + public static GenericNode fromUrn(@Nonnull Urn urn) { + return GenericNode.builder().entityType(urn.getEntityType()).urn(urn.toString()).build(); + } + } + + @Data + @Builder + public static class NodeProperties { + private List source; + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java new file mode 100644 index 0000000000000..2befc83c00363 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericScrollResult.java @@ -0,0 +1,12 @@ +package io.datahubproject.openapi.v2.models; + +import java.util.List; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class GenericScrollResult { + private String scrollId; + private List results; +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java new file mode 100644 index 0000000000000..9d52ed28b2066 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/GenericTimeseriesAspect.java @@ -0,0 +1,18 @@ +package io.datahubproject.openapi.v2.models; + +import com.fasterxml.jackson.annotation.JsonInclude; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +@JsonInclude(JsonInclude.Include.NON_NULL) +public class GenericTimeseriesAspect { + private long timestampMillis; + @Nonnull private String urn; + @Nonnull private Object event; + @Nullable private String messageId; + @Nullable private Object systemMetadata; +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java new file mode 100644 index 0000000000000..c5323dfe68369 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/models/PatchOperation.java @@ -0,0 +1,26 @@ +package io.datahubproject.openapi.v2.models; + +import com.fasterxml.jackson.databind.JsonNode; +import com.linkedin.metadata.aspect.patch.PatchOperationType; +import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; +import lombok.NoArgsConstructor; + +@Data +@Builder +@NoArgsConstructor +@AllArgsConstructor +public class PatchOperation { + @Nonnull private String op; + @Nonnull private String path; + @Nullable private JsonNode value; + @Nullable private List arrayMapKey; + + public PatchOperationType getOp() { + return PatchOperationType.valueOf(op.toUpperCase()); + } +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java new file mode 100644 index 0000000000000..70d588721d3b3 --- /dev/null +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java @@ -0,0 +1,67 @@ +package io.datahubproject.openapi.v2.utils; + +import com.datahub.authentication.Actor; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; +import com.linkedin.metadata.models.EntitySpec; +import io.datahubproject.openapi.exception.UnauthorizedException; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ControllerUtil { + private ControllerUtil() {} + + public static void checkAuthorized( + @Nonnull Authorizer authorizationChain, + @Nonnull Actor actor, + @Nonnull EntitySpec entitySpec, + @Nonnull List privileges) { + checkAuthorized(authorizationChain, actor, entitySpec, null, privileges); + } + + public static void checkAuthorized( + @Nonnull Authorizer authorizationChain, + @Nonnull Actor actor, + @Nonnull Set entitySpecs, + @Nonnull List privileges) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup(privileges))); + List> resourceSpecs = + entitySpecs.stream() + .map( + entitySpec -> + Optional.of(new com.datahub.authorization.EntitySpec(entitySpec.getName(), ""))) + .collect(Collectors.toList()); + if (!AuthUtil.isAuthorizedForResources( + authorizationChain, actor.toUrnStr(), resourceSpecs, orGroup)) { + throw new UnauthorizedException(actor.toUrnStr() + " is unauthorized to get entities."); + } + } + + public static void checkAuthorized( + @Nonnull Authorizer authorizationChain, + @Nonnull Actor actor, + @Nonnull EntitySpec entitySpec, + @Nullable String entityUrn, + @Nonnull List privileges) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup(privileges))); + + List> resourceSpecs = + List.of( + Optional.of( + new com.datahub.authorization.EntitySpec( + entitySpec.getName(), entityUrn != null ? entityUrn : ""))); + if (!AuthUtil.isAuthorizedForResources( + authorizationChain, actor.toUrnStr(), resourceSpecs, orGroup)) { + throw new UnauthorizedException(actor.toUrnStr() + " is unauthorized to get entities."); + } + } +} diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index bca3e7161c8b8..ee45b8921143a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -319,6 +319,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -993,6 +994,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -2049,6 +2055,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index 69184856e4f9e..505f44c52d583 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -993,6 +994,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -5084,6 +5090,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index 09c0185f74f3a..e8c15d1b4ca04 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -735,6 +736,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -1783,6 +1789,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index eae0eed2dd50b..67f70d40e010c 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -735,6 +736,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -1777,6 +1783,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index cb253c458e6c4..4c8cd1f20d476 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -61,6 +61,7 @@ "default" : { }, "Searchable" : { "/*" : { + "fieldType" : "TEXT", "queryByDefault" : true } } @@ -993,6 +994,11 @@ "filterNameOverride" : "Glossary Term", "hasValuesFieldName" : "hasGlossaryTerms" } + }, { + "name" : "actor", + "type" : "Urn", + "doc" : "The user URN which will be credited for adding associating this term to the entity", + "optional" : true }, { "name" : "context", "type" : "string", @@ -5078,6 +5084,7 @@ "name" : "GlossaryNodeInfo", "namespace" : "com.linkedin.glossary", "doc" : "Properties associated with a GlossaryNode", + "include" : [ "com.linkedin.common.CustomProperties" ], "fields" : [ { "name" : "definition", "type" : "string", diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 64ae3632c353a..2f470dca01f2a 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -1,6 +1,9 @@ package com.linkedin.entity.client; +import static com.linkedin.metadata.utils.GenericRecordUtils.entityResponseToAspectMap; + import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -11,7 +14,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.aspect.VersionedAspect; -import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.browse.BrowseResultV2; import com.linkedin.metadata.graph.LineageDirection; @@ -40,7 +42,7 @@ import javax.annotation.Nullable; // Consider renaming this to datahub client. -public interface EntityClient extends AspectRetriever { +public interface EntityClient { @Nullable public EntityResponse getV2( @@ -623,14 +625,26 @@ public void producePlatformEvent( @Nonnull Authentication authentication) throws Exception; - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + public void rollbackIngestion( + @Nonnull String runId, @Nonnull Authorizer authorizer, @Nonnull Authentication authentication) throws Exception; - default Aspect getLatestAspectObject(@Nonnull Urn urn, @Nonnull String aspectName) + @Nullable + default Aspect getLatestAspectObject( + @Nonnull Urn urn, @Nonnull String aspectName, @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + return getLatestAspects(Set.of(urn), Set.of(aspectName), authentication) + .getOrDefault(urn, Map.of()) + .get(aspectName); + } + + @Nonnull + default Map> getLatestAspects( + @Nonnull Set urns, + @Nonnull Set aspectNames, + @Nonnull Authentication authentication) throws RemoteInvocationException, URISyntaxException { - return getV2(urn.getEntityType(), urn, Set.of(aspectName), null) - .getAspects() - .get(aspectName) - .getValue(); + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + return entityResponseToAspectMap(batchGetV2(entityName, urns, aspectNames, authentication)); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index d68c472ea9170..3108345bd3937 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -1,6 +1,7 @@ package com.linkedin.entity.client; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; @@ -539,7 +540,9 @@ public SearchResult search( if (searchFlags != null) { requestBuilder.searchFlagsParam(searchFlags); - requestBuilder.fulltextParam(searchFlags.isFulltext()); + if (searchFlags.isFulltext() != null) { + requestBuilder.fulltextParam(searchFlags.isFulltext()); + } } return sendClientRequest(requestBuilder, authentication).getEntity(); @@ -1057,7 +1060,10 @@ public void producePlatformEvent( } @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull final Authentication authentication) + public void rollbackIngestion( + @Nonnull String runId, + @Nonnull Authorizer authorizer, + @Nonnull final Authentication authentication) throws Exception { final RunsDoRollbackRequestBuilder requestBuilder = RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java index dfad20b5f52b2..243e8a40bf4b7 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java @@ -4,24 +4,60 @@ import com.linkedin.common.urn.Urn; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; -import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.ScrollResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; import javax.annotation.Nullable; -/** Adds entity/aspect cache and assumes system authentication */ -public interface SystemEntityClient extends EntityClient, AspectRetriever { +/** Adds entity/aspect cache and assumes **system** authentication */ +public interface SystemEntityClient extends EntityClient { EntityClientCache getEntityClientCache(); Authentication getSystemAuthentication(); + /** + * Searches for entities matching to a given query and filters across multiple entity types + * + * @param entities entity types to search (if empty, searches all entities) + * @param input search query + * @param filter search filters + * @param scrollId opaque scroll ID indicating offset + * @param keepAlive string representation of time to keep point in time alive, ex: 5m + * @param count max number of search results requested + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + default ScrollResult scrollAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + return scrollAcrossEntities( + entities, + input, + filter, + scrollId, + keepAlive, + count, + searchFlags, + getSystemAuthentication()); + } + /** * Builds the cache * @@ -101,11 +137,16 @@ default void setWritable(boolean canWrite) throws RemoteInvocationException { setWritable(canWrite, getSystemAuthentication()); } + @Nullable default Aspect getLatestAspectObject(@Nonnull Urn urn, @Nonnull String aspectName) throws RemoteInvocationException, URISyntaxException { - return getV2(urn.getEntityType(), urn, Set.of(aspectName), getSystemAuthentication()) - .getAspects() - .get(aspectName) - .getValue(); + return getLatestAspectObject(urn, aspectName, getSystemAuthentication()); + } + + @Nonnull + default Map> getLatestAspects( + @Nonnull Set urns, @Nonnull Set aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getLatestAspects(urns, aspectNames, getSystemAuthentication()); } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java index a2f5596af9f4e..0f179c4da7b74 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java @@ -17,7 +17,7 @@ public SystemRestliEntityClient( @Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, + @Nonnull Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { super(restliClient, backoffPolicy, retryCount); this.systemAuthentication = systemAuthentication; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index c5b019e85e0c9..ffa3abe6806f9 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -252,14 +252,14 @@ public Task ingestProposal( if (asyncBool) { // if async we'll expand the getAdditionalChanges later, no need to do this early batch = AspectsBatchImpl.builder() - .mcps(List.of(metadataChangeProposal), auditStamp, _entityService.getEntityRegistry(), _entityService.getSystemEntityClient()) + .mcps(List.of(metadataChangeProposal), auditStamp, _entityService) .build(); } else { Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService.getEntityRegistry(), _entityService.getSystemEntityClient()) + .mcps(proposalStream.collect(Collectors.toList()), auditStamp, _entityService) .build(); } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java index 294ded8a1e255..869cfc7afdee8 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java @@ -1,40 +1,25 @@ package com.linkedin.metadata.resources.entity; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.service.RollbackService.ROLLBACK_FAILED_STATUS; import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.authorization.EntitySpec; +import com.datahub.authentication.AuthenticationException; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.events.metadata.ChangeType; -import com.linkedin.execution.ExecutionRequestResult; -import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.VersionedAspect; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.RollbackRunResult; -import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.run.AspectRowSummaryArray; import com.linkedin.metadata.run.IngestionRunSummary; import com.linkedin.metadata.run.IngestionRunSummaryArray; import com.linkedin.metadata.run.RollbackResponse; -import com.linkedin.metadata.run.UnsafeEntityInfo; -import com.linkedin.metadata.run.UnsafeEntityInfoArray; -import com.linkedin.metadata.search.utils.ESUtils; +import com.linkedin.metadata.service.RollbackService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; -import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import com.linkedin.metadata.utils.EntityKeyUtils; -import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.parseq.Task; import com.linkedin.restli.common.HttpStatus; import com.linkedin.restli.server.RestLiServiceException; @@ -43,13 +28,8 @@ import com.linkedin.restli.server.annotations.Optional; import com.linkedin.restli.server.annotations.RestLiCollection; import com.linkedin.restli.server.resources.CollectionResourceTaskTemplate; -import com.linkedin.timeseries.DeleteAspectValuesResult; import io.opentelemetry.extension.annotations.WithSpan; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; @@ -64,14 +44,8 @@ public class BatchIngestionRunResource private static final Integer DEFAULT_OFFSET = 0; private static final Integer DEFAULT_PAGE_SIZE = 100; - private static final Integer DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE = 1000000; private static final boolean DEFAULT_INCLUDE_SOFT_DELETED = false; private static final boolean DEFAULT_HARD_DELETE = false; - private static final Integer ELASTIC_MAX_PAGE_SIZE = 10000; - private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; - private static final String ROLLING_BACK_STATUS = "ROLLING_BACK"; - private static final String ROLLED_BACK_STATUS = "ROLLED_BACK"; - private static final String ROLLBACK_FAILED_STATUS = "ROLLBACK_FAILED"; @Inject @Named("systemMetadataService") @@ -79,15 +53,15 @@ public class BatchIngestionRunResource @Inject @Named("entityService") - private EntityService _entityService; + private EntityService _entityService; @Inject - @Named("timeseriesAspectService") - private TimeseriesAspectService _timeseriesAspectService; + @Named("rollbackService") + private RollbackService rollbackService; - @Inject - @Named("authorizerChain") - private Authorizer _authorizer; + @Inject + @Named("authorizerChain") + private Authorizer _authorizer; /** Rolls back an ingestion run */ @Action(name = "rollback") @@ -111,274 +85,23 @@ public Task rollback( try { return RestliUtil.toTask( () -> { - if (runId.equals(DEFAULT_RUN_ID)) { - throw new IllegalArgumentException( - String.format( - "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", - runId)); - } - if (!dryRun) { - updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); - } - - RollbackResponse response = new RollbackResponse(); - List aspectRowsToDelete; - aspectRowsToDelete = - _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - Set urns = - aspectRowsToDelete.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet(); - List> resourceSpecs = - urns.stream() - .map(UrnUtils::getUrn) - .map( - urn -> - java.util.Optional.of( - new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - resourceSpecs)) { - throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); - } - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - if (dryRun) { - - final Map> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream() - .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List keyAspects = aspectsSplitByIsKeyAspects.get(true); - - long entitiesDeleted = keyAspects.size(); - long aspectsReverted = aspectRowsToDelete.size(); - final long affectedEntities = - aspectRowsToDelete.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray( - aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - // If we are soft deleting, remove key aspects from count of aspects being deleted - if (!doHardDelete) { - aspectsReverted -= keyAspects.size(); - rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + Authentication auth = AuthenticationContext.getAuthentication(); + try { + return rollbackService.rollbackIngestion(runId, dryRun, doHardDelete, _authorizer, auth); + } catch (AuthenticationException authException) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, authException.getMessage()); } - // Compute the aspects that exist referencing the key aspects we are deleting - final List affectedAspectsList = - keyAspects.stream() - .map( - (AspectRowSummary urn) -> - _systemMetadataService.findByUrn( - urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter( - row -> - !row.getRunId().equals(runId) - && !row.isKeyAspect() - && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final List unsafeEntityInfos = - affectedAspectsList.stream() - .map(AspectRowSummary::getUrn) - .distinct() - .map( - urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) - .collect(Collectors.toList()); - - return response - .setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - } - - RollbackRunResult rollbackRunResult = - _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - final List deletedRows = rollbackRunResult.getRowsRolledBack(); - int rowsDeletedFromEntityDeletion = - rollbackRunResult.getRowsDeletedFromEntityDeletion(); - - // since elastic limits how many rows we can access at once, we need to iteratively - // delete - while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - aspectRowsToDelete = - _systemMetadataService.findByRunId( - runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - log.info( - "{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - log.info("deleting..."); - rollbackRunResult = - _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); - rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); - } - - // Rollback timeseries aspects - DeleteAspectValuesResult timeseriesRollbackResult = - _timeseriesAspectService.rollbackTimeseriesAspects(runId); - rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); - - log.info("finished deleting {} rows", deletedRows.size()); - int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; - - final Map> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream() - .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List keyAspects = aspectsSplitByIsKeyAspects.get(true); - - final long entitiesDeleted = keyAspects.size(); - final long affectedEntities = - deletedRows.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray( - aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - log.info("computing aspects affected by this rollback..."); - // Compute the aspects that exist referencing the key aspects we are deleting - final List affectedAspectsList = - keyAspects.stream() - .map( - (AspectRowSummary urn) -> - _systemMetadataService.findByUrn( - urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter( - row -> - !row.getRunId().equals(runId) - && !row.isKeyAspect() - && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream() - .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) - .keySet() - .size(); - - final List unsafeEntityInfos = - affectedAspectsList.stream() - .map(AspectRowSummary::getUrn) - .distinct() - .map( - urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) - .collect(Collectors.toList()); - - log.info("calculation done."); - - updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); - - return response - .setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); }, MetricRegistry.name(this.getClass(), "rollback")); } catch (Exception e) { - updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); + rollbackService.updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); throw new RuntimeException( String.format("There was an issue rolling back ingestion run with runId %s", runId), e); } } - private String stringifyRowCount(int size) { - if (size < ELASTIC_MAX_PAGE_SIZE) { - return String.valueOf(size); - } else { - return "at least " + size; - } - } - - private void sleep(Integer seconds) { - try { - TimeUnit.SECONDS.sleep(seconds); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - private void updateExecutionRequestStatus(String runId, String status) { - try { - final Urn executionRequestUrn = - EntityKeyUtils.convertEntityKeyToUrn( - new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); - EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect( - executionRequestUrn.getEntityType(), - executionRequestUrn, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); - if (aspect == null) { - log.warn("Aspect for execution request with runId {} not found", runId); - } else { - final MetadataChangeProposal proposal = new MetadataChangeProposal(); - ExecutionRequestResult requestResult = new ExecutionRequestResult(aspect.getValue().data()); - requestResult.setStatus(status); - proposal.setEntityUrn(executionRequestUrn); - proposal.setEntityType(Constants.EXECUTION_REQUEST_ENTITY_NAME); - proposal.setAspectName(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); - proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); - proposal.setChangeType(ChangeType.UPSERT); - - _entityService.ingestProposal( - proposal, - new AuditStamp() - .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()), - false); - } - } catch (Exception e) { - log.error( - String.format( - "Not able to update execution result aspect with runId %s and new status %s.", - runId, status), - e); - } - } - /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @Action(name = "list") @Nonnull diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index e3534875c6cd2..d6130e05b77bd 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -87,7 +87,7 @@ public void testAsyncDefaultAspects() throws URISyntaxException { .aspect(mcp.getAspect()) .auditStamp(new AuditStamp()) .metadataChangeProposal(mcp) - .build(_entityRegistry, _entityService.getSystemEntityClient()); + .build(_entityService); when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) .thenReturn( List.of( diff --git a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java index 2a12ecf6866bb..5187cba0b9151 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java +++ b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java @@ -7,6 +7,7 @@ import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.timeseries.TimeseriesScrollResult; import com.linkedin.timeseries.AggregationSpec; import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; @@ -118,4 +119,18 @@ public void upsertDocument( public List getIndexSizes() { return List.of(); } + + @Nonnull + @Override + public TimeseriesScrollResult scrollAspects( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Filter filter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { + return TimeseriesScrollResult.builder().build(); + } } diff --git a/metadata-service/services/build.gradle b/metadata-service/services/build.gradle index c683b0c75f40a..78d651c05e4d9 100644 --- a/metadata-service/services/build.gradle +++ b/metadata-service/services/build.gradle @@ -1,6 +1,6 @@ plugins { id 'org.hidetake.swagger.generator' - id 'java' + id 'java-library' } configurations { @@ -14,7 +14,9 @@ dependencies { implementation project(':metadata-events:mxe-avro') implementation project(':metadata-events:mxe-registration') implementation project(':metadata-events:mxe-utils-avro') - implementation project(':metadata-models') + api project(path: ':metadata-models', configuration: 'dataTemplate') + api project(':metadata-models') + implementation project(':metadata-service:restli-client') implementation project(':metadata-service:configuration') diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index c4216962c134c..2c1596474fb21 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -88,7 +88,7 @@ public static List getAdditionalChanges( public static List getAdditionalChanges( @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { + @Nonnull EntityService entityService) { return getAdditionalChanges(metadataChangeProposal, entityService, false); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 71573aa2b10e0..94ab69e895920 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -9,11 +9,11 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.VersionedAspect; import com.linkedin.metadata.aspect.batch.AspectsBatch; import com.linkedin.metadata.aspect.batch.UpsertItem; +import com.linkedin.metadata.aspect.plugins.validation.AspectRetriever; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.models.AspectSpec; @@ -35,7 +35,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -public interface EntityService { +public interface EntityService extends AspectRetriever { /** * Just whether the entity/aspect exists @@ -287,6 +287,8 @@ Pair>> generateDefaultAspectsOnFirstW Set getEntityAspectNames(final String entityName); + @Override + @Nonnull EntityRegistry getEntityRegistry(); RollbackResult deleteAspect( @@ -349,15 +351,5 @@ default boolean exists(@Nonnull Urn urn, boolean includeSoftDelete) { BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException; - /** - * Allow internal use of the system entity client. Solves recursive dependencies between the - * EntityService and the SystemJavaEntityClient - * - * @param systemEntityClient system entity client - */ - void setSystemEntityClient(SystemEntityClient systemEntityClient); - - SystemEntityClient getSystemEntityClient(); - RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java index b3e713a906d01..625353eeb6820 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java @@ -5,6 +5,7 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; +import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.utils.QueryUtils; import java.net.URISyntaxException; import java.util.ArrayList; @@ -322,4 +323,18 @@ void removeEdgesFromNode( default boolean supportsMultiHop() { return false; } + + @Nonnull + RelatedEntitiesScrollResult scrollRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java new file mode 100644 index 0000000000000..0c6f8a0d65d5c --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntities.java @@ -0,0 +1,31 @@ +package com.linkedin.metadata.graph; + +import com.linkedin.metadata.query.filter.RelationshipDirection; +import javax.annotation.Nonnull; +import lombok.Getter; + +/** Preserves directionality as well as the generic `related` urn concept */ +@Getter +public class RelatedEntities extends RelatedEntity { + /** source Urn * */ + @Nonnull String sourceUrn; + + /** Destination Urn associated with the related entity. */ + @Nonnull String destinationUrn; + + public RelatedEntities( + @Nonnull String relationshipType, + @Nonnull String sourceUrn, + @Nonnull String destinationUrn, + @Nonnull RelationshipDirection relationshipDirection) { + super( + relationshipType, + relationshipDirection == RelationshipDirection.OUTGOING ? destinationUrn : sourceUrn); + this.sourceUrn = sourceUrn; + this.destinationUrn = destinationUrn; + } + + public RelatedEntity asRelatedEntity() { + return new RelatedEntity(relationshipType, urn); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java new file mode 100644 index 0000000000000..b0b5394ca5808 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntitiesScrollResult.java @@ -0,0 +1,16 @@ +package com.linkedin.metadata.graph; + +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +@AllArgsConstructor +@Data +@Builder +public class RelatedEntitiesScrollResult { + int numResults; + int pageSize; + String scrollId; + List entities; +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 189ae09e1b938..2fec88ad221fd 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -161,7 +161,7 @@ AutoCompleteResult autoComplete( * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return - * @return + * @return a map of the value to the count of documents having the value */ @Nonnull Map aggregateByValue( diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java new file mode 100644 index 0000000000000..59d40b29e7383 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/FormService.java @@ -0,0 +1,1107 @@ +package com.linkedin.metadata.service; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.entity.AspectUtils.buildMetadataChangeProposal; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.FieldFormPromptAssociation; +import com.linkedin.common.FieldFormPromptAssociationArray; +import com.linkedin.common.FormAssociation; +import com.linkedin.common.FormAssociationArray; +import com.linkedin.common.FormPromptAssociation; +import com.linkedin.common.FormPromptAssociationArray; +import com.linkedin.common.FormPromptFieldAssociations; +import com.linkedin.common.FormVerificationAssociation; +import com.linkedin.common.FormVerificationAssociationArray; +import com.linkedin.common.Forms; +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.form.FormActorAssignment; +import com.linkedin.form.FormInfo; +import com.linkedin.form.FormPrompt; +import com.linkedin.form.FormType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.authorization.OwnershipUtils; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.service.util.SearchBasedFormAssignmentRunner; +import com.linkedin.metadata.utils.FormUtils; +import com.linkedin.metadata.utils.SchemaFieldUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaMetadata; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +/** + * This class is used to execute CRUD operations around forms and submitting responses to forms and + * their prompts. + * + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. + */ +@Slf4j +public class FormService extends BaseService { + private static final int BATCH_FORM_ENTITY_COUNT = 500; + + public FormService( + @Nonnull final EntityClient entityClient, + @Nonnull final Authentication systemAuthentication) { + super(entityClient, systemAuthentication); + } + + /** Batch associated a form to a given set of entities by urn. */ + public void batchAssignFormToEntities( + @Nonnull final List entityUrns, @Nonnull final Urn formUrn) throws Exception { + batchAssignFormToEntities(entityUrns, formUrn, this.systemAuthentication); + } + + /** Batch associated a form to a given set of entities by urn. */ + public void batchAssignFormToEntities( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + verifyEntityExists(formUrn, authentication); + verifyEntitiesExist(entityUrns, authentication); + final List changes = + buildAssignFormChanges(entityUrns, formUrn, authentication); + ingestChangeProposals(changes, authentication); + } + + /** Batch remove a form from a given entity by urn. */ + public void batchUnassignFormForEntities( + @Nonnull final List entityUrns, @Nonnull final Urn formUrn) throws Exception { + batchUnassignFormForEntities(entityUrns, formUrn, this.systemAuthentication); + } + + /** Batch remove a form from a given entity by urn. */ + public void batchUnassignFormForEntities( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + verifyEntityExists(formUrn, authentication); + verifyEntitiesExist(entityUrns, authentication); + final List changes = + buildUnassignFormChanges(entityUrns, formUrn, authentication); + ingestChangeProposals(changes, authentication); + } + + /** Mark a specific form prompt as incomplete */ + public void batchSetFormPromptIncomplete( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId) + throws Exception { + batchSetFormPromptIncomplete(entityUrns, formUrn, formPromptId, this.systemAuthentication); + } + + /** Mark a specific form prompt as incomplete */ + public void batchSetFormPromptIncomplete( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + verifyEntityExists(formUrn, authentication); + verifyEntitiesExist(entityUrns, authentication); + final FormInfo formInfo = getFormInfo(formUrn, authentication); + final List changes = + buildUnsetFormPromptChanges(entityUrns, formUrn, formPromptId, formInfo, authentication); + ingestChangeProposals(changes, authentication); + } + + /** Create a dynamic form assignment for a particular form. */ + public void createDynamicFormAssignment( + @Nonnull final DynamicFormAssignment dynamicFormAssignment, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (!entityClient.exists(formUrn, authentication)) { + throw new RuntimeException( + String.format("Form %s does not exist. Skipping dynamic form assignment", formUrn)); + } + + try { + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + formUrn, Constants.DYNAMIC_FORM_ASSIGNMENT_ASPECT_NAME, dynamicFormAssignment), + authentication, + false); + } catch (Exception e) { + throw new RuntimeException("Failed to create form", e); + } + } + + /** Assigns the form to an entity for completion. */ + public void upsertFormAssignmentRunner( + @Nonnull final Urn formUrn, @Nonnull final DynamicFormAssignment formFilters) { + try { + SearchBasedFormAssignmentRunner.assign( + formFilters, formUrn, BATCH_FORM_ENTITY_COUNT, entityClient, systemAuthentication); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to dynamically assign form with urn: %s", formUrn), e); + } + } + + /** Submit a response for a structured property type prompt. */ + public Boolean batchSubmitStructuredPropertyPromptResponse( + @Nonnull final List entityUrns, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + entityUrns.forEach( + urnStr -> { + Urn urn = UrnUtils.getUrn(urnStr); + try { + submitStructuredPropertyPromptResponse( + urn, structuredPropertyUrn, values, formUrn, formPromptId, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to batch submit structured property prompt", e); + } + }); + + return true; + } + + /** Submit a response for a structured property type prompt. */ + public Boolean submitStructuredPropertyPromptResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + + // First, let's apply the action and add the structured property. + ingestStructuredProperties(entityUrn, structuredPropertyUrn, values, authentication); + + // Then, let's apply the change to the entity's form status. + ingestCompletedFormResponse(entityUrn, formUrn, formPromptId, authentication); + + return true; + } + + /** Submit a response for a field-level structured property type prompt. */ + public Boolean batchSubmitFieldStructuredPropertyPromptResponse( + @Nonnull final List entityUrns, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + entityUrns.forEach( + urnStr -> { + Urn urn = UrnUtils.getUrn(urnStr); + try { + submitFieldStructuredPropertyPromptResponse( + urn, + structuredPropertyUrn, + values, + formUrn, + formPromptId, + fieldPath, + authentication); + } catch (Exception e) { + throw new RuntimeException( + "Failed to batch submit field structured property prompt", e); + } + }); + + return true; + } + + /** Submit a response for a field-level structured property type prompt. */ + public Boolean submitFieldStructuredPropertyPromptResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + + // First, let's apply the action and add the structured property. + ingestSchemaFieldStructuredProperties( + entityUrn, structuredPropertyUrn, values, fieldPath, authentication); + + // Then, let's apply the change to the entity's form status. + ingestCompletedFieldFormResponse(entityUrn, formUrn, formPromptId, fieldPath, authentication); + + return true; + } + + private void ingestCompletedFieldFormResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + final Forms forms = getEntityForms(entityUrn, authentication); + final FormAssociation formAssociation = getFormWithUrn(forms, formUrn); + if (formAssociation == null) { + throw new RuntimeException( + String.format("Form %s has not been assigned to entity %s", formUrn, entityUrn)); + } + final FormPromptAssociation formPromptAssociation = + getOrDefaultFormPromptAssociation(formAssociation, formPromptId, authentication); + + // update the prompt association to have this fieldFormPromptAssociation marked as complete + updateFieldPromptToComplete( + formPromptAssociation, fieldPath, UrnUtils.getUrn(authentication.getActor().toUrnStr())); + + // field prompt is complete if all fields in entity's schema metadata are marked complete + if (isFieldPromptComplete(entityUrn, formPromptAssociation, authentication)) { + // if this is complete, the prompt as a whole should be marked as complete + ingestCompletedFormResponse(entityUrn, formUrn, formPromptId, forms, authentication); + } else { + // regardless, ingest forms to save state of this aspect + ingestForms(entityUrn, forms, authentication); + } + } + + private void ingestCompletedFormResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) + throws Exception { + final Forms forms = getEntityForms(entityUrn, authentication); + ingestCompletedFormResponse(entityUrn, formUrn, formPromptId, forms, authentication); + } + + private void ingestCompletedFormResponse( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Forms forms, + @Nonnull final Authentication authentication) + throws Exception { + // Next, get all the information we need to update the forms for the entity. + final FormInfo formInfo = getFormInfo(formUrn, authentication); + final FormAssociation formAssociation = getFormWithUrn(forms, formUrn); + + if (formAssociation == null) { + throw new RuntimeException( + String.format("Form %s has not been assigned to entity %s", formUrn, entityUrn)); + } + + // First, mark the prompt as completed in forms aspect. + updatePromptToComplete(formAssociation, entityUrn, formUrn, formPromptId, authentication); + + // Then, update the completed forms fields based on which prompts remain incomplete. + updateFormCompletion(forms, formAssociation, formInfo); + + // Finally, ingest the newly updated forms aspect. + ingestForms(entityUrn, forms, authentication); + } + + private void ingestSchemaFieldStructuredProperties( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final String fieldPath, + @Nonnull final Authentication authentication) + throws Exception { + Urn schemaFieldUrn = SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), fieldPath); + ingestStructuredProperties(schemaFieldUrn, structuredPropertyUrn, values, authentication); + } + + private void ingestStructuredProperties( + @Nonnull final Urn entityUrn, + @Nonnull final Urn structuredPropertyUrn, + @Nonnull final PrimitivePropertyValueArray values, + @Nonnull final Authentication authentication) + throws Exception { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME), + authentication); + + StructuredProperties structuredProperties = new StructuredProperties(); + structuredProperties.setProperties(new StructuredPropertyValueAssignmentArray()); + if (response != null && response.getAspects().containsKey(STRUCTURED_PROPERTIES_ASPECT_NAME)) { + structuredProperties = + new StructuredProperties( + response.getAspects().get(STRUCTURED_PROPERTIES_ASPECT_NAME).getValue().data()); + } + + // Since we upsert assignments for this structuredProperty, + // remove anything from this structured property and add to this list + List filteredAssignments = + structuredProperties.getProperties().stream() + .filter(assignment -> !assignment.getPropertyUrn().equals(structuredPropertyUrn)) + .collect(Collectors.toList()); + + StructuredPropertyValueAssignment assignment = new StructuredPropertyValueAssignment(); + assignment.setValues(values); + assignment.setPropertyUrn(structuredPropertyUrn); + assignment.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(System.currentTimeMillis())); + assignment.setLastModified( + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(System.currentTimeMillis())); + filteredAssignments.add(assignment); + + StructuredPropertyValueAssignmentArray assignments = + new StructuredPropertyValueAssignmentArray(filteredAssignments); + structuredProperties.setProperties(assignments); + + final MetadataChangeProposal structuredPropertiesProposal = + AspectUtils.buildMetadataChangeProposal( + entityUrn, STRUCTURED_PROPERTIES_ASPECT_NAME, structuredProperties); + try { + this.entityClient.ingestProposal(structuredPropertiesProposal, authentication, false); + } catch (Exception e) { + throw new RuntimeException("Failed to submit form response", e); + } + } + + private void ingestForms( + @Nonnull final Urn entityUrn, + @Nonnull final Forms forms, + @Nonnull final Authentication authentication) { + try { + ingestChangeProposals( + ImmutableList.of( + AspectUtils.buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, forms)), + authentication); + } catch (Exception e) { + log.warn(String.format("Failed to ingest forms for entity with urn %s", entityUrn), e); + } + } + + private Forms getEntityForms( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) throws Exception { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(FORMS_ASPECT_NAME), + authentication); + if (response != null && response.getAspects().containsKey(FORMS_ASPECT_NAME)) { + return new Forms(response.getAspects().get(FORMS_ASPECT_NAME).getValue().data()); + } + // No entity forms found. + throw new RuntimeException( + String.format( + "Entity is missing forms aspect, form is not assigned to entity with urn %s", + entityUrn)); + } + + /** + * Checks schema metadata for an entity and ensures there's a completed field prompt for every + * field. If there is no schema metadata, raise an error. + */ + private boolean isFieldPromptComplete( + @Nonnull final Urn entityUrn, + @Nonnull final FormPromptAssociation formPromptAssociation, + @Nonnull final Authentication authentication) + throws Exception { + final Set completedFieldPaths = + Objects.requireNonNull(formPromptAssociation.getFieldAssociations()) + .getCompletedFieldPrompts() + .stream() + .map(FieldFormPromptAssociation::getFieldPath) + .collect(Collectors.toSet()); + final SchemaMetadata schemaMetadata = getSchemaMetadata(entityUrn, authentication); + final List fieldPaths = + schemaMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .collect(Collectors.toList()); + + return completedFieldPaths.containsAll(fieldPaths); + } + + /** + * Performs the operation of changing the status of a form field prompt from incomplete to + * complete. + */ + private void updateFieldPromptToComplete( + @Nonnull final FormPromptAssociation formPromptAssociation, + @Nonnull final String fieldPath, + @Nonnull final Urn actor) { + final FieldFormPromptAssociation completedFieldPromptAssociation = + new FieldFormPromptAssociation(); + completedFieldPromptAssociation.setFieldPath(fieldPath); + completedFieldPromptAssociation.setLastModified(createAuditStamp(actor)); + + FormPromptFieldAssociations fieldAssociations = + formPromptAssociation.getFieldAssociations() != null + ? formPromptAssociation.getFieldAssociations() + : new FormPromptFieldAssociations(); + + if (fieldAssociations.getCompletedFieldPrompts() == null) { + fieldAssociations.setCompletedFieldPrompts(new FieldFormPromptAssociationArray()); + } + if (fieldAssociations.getIncompleteFieldPrompts() == null) { + fieldAssociations.setIncompleteFieldPrompts(new FieldFormPromptAssociationArray()); + } + + // add this prompt association to list of completed prompts, removing its previous association + // if it was already in there + FieldFormPromptAssociationArray completedFieldPrompts = + new FieldFormPromptAssociationArray( + fieldAssociations.getCompletedFieldPrompts().stream() + .filter(fieldPrompt -> !fieldPrompt.getFieldPath().equals(fieldPath)) + .collect(Collectors.toList())); + completedFieldPrompts.add(completedFieldPromptAssociation); + fieldAssociations.setCompletedFieldPrompts(completedFieldPrompts); + + // remove this prompt association from list of incomplete prompts + FieldFormPromptAssociationArray incompleteFieldPrompts = new FieldFormPromptAssociationArray(); + fieldAssociations + .getIncompleteFieldPrompts() + .forEach( + incompleteFieldPrompt -> { + if (!incompleteFieldPrompt.getFieldPath().equals(fieldPath)) { + incompleteFieldPrompts.add(incompleteFieldPrompt); + } + }); + fieldAssociations.setIncompleteFieldPrompts(incompleteFieldPrompts); + + formPromptAssociation.setFieldAssociations(fieldAssociations); + } + + /** Performs the operation of changing the status of a form prompt from incomplete to complete. */ + private void updatePromptToComplete( + @Nonnull final FormAssociation formAssociation, + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) { + final FormPromptAssociation formPromptAssociation = + getOrDefaultFormPromptAssociation(formAssociation, formPromptId, authentication); + + // add this prompt association to list of completed prompts, removing its previous association + // if it was already in there + FormPromptAssociationArray completedPrompts = + new FormPromptAssociationArray( + formAssociation.getCompletedPrompts().stream() + .filter(prompt -> !prompt.getId().equals(formPromptId)) + .collect(Collectors.toList())); + completedPrompts.add(formPromptAssociation); + formAssociation.setCompletedPrompts(completedPrompts); + + // remove this prompt association from list of incomplete prompts + FormPromptAssociationArray incompletePrompts = new FormPromptAssociationArray(); + formAssociation + .getIncompletePrompts() + .forEach( + incompletePrompt -> { + if (!incompletePrompt.getId().equals(formPromptId)) { + incompletePrompts.add(incompletePrompt); + } + }); + formAssociation.setIncompletePrompts(incompletePrompts); + } + + /** Performs the operation of changing the status of a form prompt from complete to incomplete. */ + private void updatePromptToIncomplete( + @Nonnull final FormAssociation form, + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId) { + // Remove the prompt from completed. + final List newCompletedPrompts = + form.getCompletedPrompts().stream() + .filter(prompt -> !prompt.getId().equals(formPromptId)) + .collect(Collectors.toList()); + form.setCompletedPrompts(new FormPromptAssociationArray(newCompletedPrompts)); + + // Add the prompt to in-completed. + if (form.getIncompletePrompts().stream() + .anyMatch(prompt -> prompt.getId().equals(formPromptId))) { + log.warn( + String.format( + "Attempting to unset a prompt that is already incomplete. Skipping... Form: %s, Prompt: %s, Entity: %s", + formUrn, formPromptId, entityUrn)); + return; + } + final List newIncompletePrompts = + new ArrayList<>(form.getIncompletePrompts()); + newIncompletePrompts.add( + new FormPromptAssociation().setId(formPromptId).setLastModified(createSystemAuditStamp())); + form.setIncompletePrompts(new FormPromptAssociationArray(newIncompletePrompts)); + } + + private List buildAssignFormChanges( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) { + final List results = new ArrayList<>(); + entityUrns.forEach( + entityUrn -> { + try { + MetadataChangeProposal maybeChange = + buildAssignFormChange(entityUrn, formUrn, authentication); + if (maybeChange != null) { + results.add(maybeChange); + } + } catch (Exception e) { + log.warn( + String.format( + "Failed to retrieve form %s for entity %s. Skipping form assignment", + formUrn, entityUrn), + e); + } + }); + return results; + } + + @Nullable + private MetadataChangeProposal buildAssignFormChange( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(FORMS_ASPECT_NAME), + authentication); + + Forms formsAspect = new Forms(); + formsAspect.setIncompleteForms(new FormAssociationArray()); + formsAspect.setCompletedForms(new FormAssociationArray()); + if (response != null && response.getAspects().containsKey(FORMS_ASPECT_NAME)) { + formsAspect = new Forms(response.getAspects().get(FORMS_ASPECT_NAME).getValue().data()); + } + + // if this form is already assigned to this entity, leave it and move on + Optional formAssociation = + Stream.concat( + formsAspect.getCompletedForms().stream(), formsAspect.getIncompleteForms().stream()) + .filter(form -> form.getUrn().equals(formUrn)) + .findAny(); + + if (formAssociation.isPresent()) { + return null; + } + + // add this form to the entity's incomplete form associations. + FormAssociationArray incompleteForms = formsAspect.getIncompleteForms(); + FormAssociation newAssociation = new FormAssociation(); + newAssociation.setUrn(formUrn); + + // set all prompts as incomplete when assigning this form + FormInfo formInfo = getFormInfo(formUrn, authentication); + FormPromptAssociationArray formPromptAssociations = new FormPromptAssociationArray(); + formInfo + .getPrompts() + .forEach( + prompt -> { + FormPromptAssociation association = new FormPromptAssociation(); + association.setId(prompt.getId()); + association.setLastModified(createAuditStamp(authentication)); + formPromptAssociations.add(association); + }); + newAssociation.setIncompletePrompts(formPromptAssociations); + newAssociation.setCompletedPrompts(new FormPromptAssociationArray()); + incompleteForms.add(newAssociation); + formsAspect.setIncompleteForms(incompleteForms); + return buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, formsAspect); + } + + private List buildUnassignFormChanges( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) { + final List results = new ArrayList<>(); + entityUrns.forEach( + entityUrn -> { + try { + MetadataChangeProposal maybeChange = + buildUnassignFormChange(entityUrn, formUrn, authentication); + if (maybeChange != null) { + results.add(maybeChange); + } + } catch (Exception e) { + log.warn( + String.format( + "Failed to retrieve form %s for entity %s. Skipping form unassignment.", + formUrn, entityUrn), + e); + } + }); + return results; + } + + @Nullable + private MetadataChangeProposal buildUnassignFormChange( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final Authentication authentication) + throws Exception { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(FORMS_ASPECT_NAME), + authentication); + Forms formsAspect = new Forms(); + formsAspect.setCompletedForms(new FormAssociationArray()); + formsAspect.setIncompleteForms(new FormAssociationArray()); + if (response != null && response.getAspects().containsKey(FORMS_ASPECT_NAME)) { + formsAspect = new Forms(response.getAspects().get(FORMS_ASPECT_NAME).getValue().data()); + } + + List newCompleted = + new ArrayList<>( + new FormAssociationArray( + formsAspect.getCompletedForms().stream() + .filter(form -> !form.getUrn().equals(formUrn)) + .collect(Collectors.toList()))); + List newIncomplete = + new ArrayList<>( + new FormAssociationArray( + formsAspect.getIncompleteForms().stream() + .filter(form -> !form.getUrn().equals(formUrn)) + .collect(Collectors.toList()))); + + if (newCompleted.size() == formsAspect.getCompletedForms().size() + && newIncomplete.size() == formsAspect.getIncompleteForms().size()) { + // No metadata to change. Skip ingestion. + return null; + } + + formsAspect.setCompletedForms(new FormAssociationArray(newCompleted)); + formsAspect.setIncompleteForms(new FormAssociationArray(newIncomplete)); + + return buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, formsAspect); + } + + private List buildUnsetFormPromptChanges( + @Nonnull final List entityUrns, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final FormInfo formDefinition, + @Nonnull final Authentication authentication) { + final List results = new ArrayList<>(); + entityUrns.forEach( + entityUrn -> { + try { + MetadataChangeProposal maybeChange = + buildUnsetFormPromptChange( + entityUrn, formUrn, formPromptId, formDefinition, authentication); + if (maybeChange != null) { + results.add(maybeChange); + } + } catch (Exception e) { + log.warn( + String.format( + "Failed to retrieve form %s for entity %s. Skipping form unassignment.", + formUrn, entityUrn), + e); + } + }); + return results; + } + + @Nullable + private MetadataChangeProposal buildUnsetFormPromptChange( + @Nonnull final Urn entityUrn, + @Nonnull final Urn formUrn, + @Nonnull final String formPromptId, + @Nonnull final FormInfo formDefinition, + @Nonnull final Authentication authentication) + throws Exception { + + // Retrieve entity forms state + final Forms forms = getEntityForms(entityUrn, authentication); + + // First, find the form with the provided urn. + final FormAssociation formAssociation = getFormWithUrn(forms, formUrn); + + if (formAssociation != null) { + // 1. Find and mark the provided form prompt as incomplete. + updatePromptToIncomplete(formAssociation, entityUrn, formUrn, formPromptId); + + // 2. Update the form's completion status given the incomplete prompt. + updateFormCompletion(forms, formAssociation, formDefinition); + + // 3. Update the form status aspect for the entity. + return buildMetadataChangeProposal(entityUrn, FORMS_ASPECT_NAME, forms); + } else { + // Form not assigned to the entity! Let's warn and do nothing. + log.warn( + String.format( + "Failed to find form with urn %s associated with entity urn %s while attempting to unset form prompt %s. Skipping...", + formUrn, entityUrn, formPromptId)); + } + + return null; + } + + private void updateFormCompletion( + @Nonnull final Forms forms, + @Nonnull final FormAssociation form, + @Nonnull final FormInfo formDefinition) { + + final boolean isFormCompleted = isFormCompleted(form, formDefinition); + + if (isFormCompleted) { + // If the form is complete, we want to add it to completed forms. + + // 1. Remove from incomplete. + forms.setIncompleteForms( + new FormAssociationArray( + forms.getIncompleteForms().stream() + .filter(incompleteForm -> !incompleteForm.getUrn().equals(form.getUrn())) + .collect(Collectors.toList()))); + + // 2. Add to complete (if not already present) + if (forms.getCompletedForms().stream() + .noneMatch(completedForm -> completedForm.getUrn().equals(form.getUrn()))) { + // Not found in completed, let's update it. + List newCompleted = new ArrayList<>(forms.getCompletedForms()); + newCompleted.add(form); + forms.setCompletedForms(new FormAssociationArray(newCompleted)); + } + } else { + // If the form is incomplete, we want to remove it from the completed forms. + // If the form implies verification, we also ensure that the verification status is + // un-applied. + + // 1. Remove from complete. + forms.setCompletedForms( + new FormAssociationArray( + forms.getCompletedForms().stream() + .filter(completedForm -> !completedForm.getUrn().equals(form.getUrn())) + .collect(Collectors.toList()))); + + // 2. Add to incomplete (if not already present) + if (forms.getIncompleteForms().stream() + .noneMatch(incompleteForm -> incompleteForm.getUrn().equals(form.getUrn()))) { + // Not found in incompleted. Let's updated + List newIncomplete = new ArrayList<>(forms.getIncompleteForms()); + newIncomplete.add(form); + forms.setIncompleteForms(new FormAssociationArray(newIncomplete)); + } + + // 3. Remove verification as required. + if (FormType.VERIFICATION.equals(formDefinition.getType())) { + removeFormVerification(form.getUrn(), forms); + } + } + } + + /** + * Returns true if a form is considered completed, false otherwise. This is a function of whether + * all required prompts are marked as completed. + * + *

If none or some required prompts are marked as completed, then the form will be considered + * NOT completed. + * + * @param form the form status, as completed for a specific entity. + * @param formDefinition the form definition, which contains information about which prompts are + * required. + */ + private boolean isFormCompleted( + @Nonnull final FormAssociation form, @Nonnull final FormInfo formDefinition) { + final List requiredPromptsIds = + formDefinition.getPrompts().stream() + .filter(FormPrompt::isRequired) + .map(FormPrompt::getId) + .collect(Collectors.toList()); + + final List completedPromptIds = + form.getCompletedPrompts().stream() + .map(FormPromptAssociation::getId) + .collect(Collectors.toList()); + + // If all required prompts are completed, then the form is completed. + return completedPromptIds.containsAll(requiredPromptsIds); + } + + @Nullable + private FormAssociation getFormWithUrn( + @Nonnull final Forms existingForms, @Nonnull final Urn formUrn) { + // First check in the completed set. + Optional maybeForm = + existingForms.getCompletedForms().stream() + .filter(form -> form.getUrn().equals(formUrn)) + .findFirst(); + if (maybeForm.isPresent()) { + return maybeForm.get(); + } + + // Then check the incomplete set. + maybeForm = + existingForms.getIncompleteForms().stream() + .filter(form -> form.getUrn().equals(formUrn)) + .findFirst(); + if (maybeForm.isPresent()) { + return maybeForm.get(); + } + + // No form found, return null. + return null; + } + + @Nullable + private FormPromptAssociation getFormPromptAssociation( + @Nonnull final FormAssociation formAssociation, @Nonnull final String formPromptId) { + // First check in the completed set. + Optional maybePromptAssociation = + formAssociation.getCompletedPrompts().stream() + .filter(prompt -> prompt.getId().equals(formPromptId)) + .findFirst(); + if (maybePromptAssociation.isPresent()) { + return maybePromptAssociation.get(); + } + + // Then check the incomplete set. + maybePromptAssociation = + formAssociation.getIncompletePrompts().stream() + .filter(prompt -> prompt.getId().equals(formPromptId)) + .findFirst(); + if (maybePromptAssociation.isPresent()) { + return maybePromptAssociation.get(); + } + + // No prompt association found, return null. + return null; + } + + /** + * Gets a form prompt association by the prompt ID. If none exists (could happen as a form was + * changed after assigned or some other reason), then create the association and add it to the + * formAssociation's list of incomplete prompts. + */ + private FormPromptAssociation getOrDefaultFormPromptAssociation( + @Nonnull final FormAssociation formAssociation, + @Nonnull final String formPromptId, + @Nonnull final Authentication authentication) { + final FormPromptAssociation existingPromptAssociation = + getFormPromptAssociation(formAssociation, formPromptId); + final FormPromptAssociation formPromptAssociation = + existingPromptAssociation != null ? existingPromptAssociation : new FormPromptAssociation(); + formPromptAssociation.setId(formPromptId); + formPromptAssociation.setLastModified( + createAuditStamp(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + if (existingPromptAssociation == null) { + FormPromptAssociationArray incompletePrompts = + new FormPromptAssociationArray(formAssociation.getIncompletePrompts()); + incompletePrompts.add(formPromptAssociation); + formAssociation.setIncompletePrompts(incompletePrompts); + } + return formPromptAssociation; + } + + private void removeFormVerification(@Nonnull final Urn formUrn, @Nonnull final Forms forms) { + if (!forms.hasVerifications()) { + // Nothing to do. + return; + } + + // Remove verification of given urn. + final List newVerifications = + forms.getVerifications().stream() + .filter(verification -> !formUrn.equals(verification.getForm())) + .collect(Collectors.toList()); + + // Update verifications for forms aspect. + forms.setVerifications(new FormVerificationAssociationArray(newVerifications)); + } + + /** + * A form is assigned to a user if either the user or a group the user is in is explicitly set on + * the actors field on a form. Otherwise, if the actors field says that owners are assigned, + * ensure this actor, or a group they're in, is an owner of this entity. + */ + public boolean isFormAssignedToUser( + @Nonnull final Urn formUrn, + @Nonnull final Urn entityUrn, + @Nonnull final Urn actorUrn, + @Nonnull final List groupsForUser, + @Nonnull final Authentication authentication) + throws Exception { + final FormInfo formInfo = getFormInfo(formUrn, authentication); + final FormActorAssignment formActorAssignment = formInfo.getActors(); + if (FormUtils.isFormAssignedToUser(formActorAssignment, actorUrn, groupsForUser)) { + return true; + } + + if (formActorAssignment.isOwners()) { + Ownership entityOwnership = getEntityOwnership(entityUrn, authentication); + return OwnershipUtils.isOwnerOfEntity(entityOwnership, actorUrn, groupsForUser); + } + + return false; + } + + /** + * Adds a new form verification association for an entity for this form on their forms aspect. If + * there was an existing verification association for this form, remove and replace it. First, + * ensure this form is of VERIFICATION type and that this form is in completedForms. + */ + public boolean verifyFormForEntity( + @Nonnull final Urn formUrn, + @Nonnull final Urn entityUrn, + @Nonnull final Authentication authentication) + throws Exception { + final FormInfo formInfo = getFormInfo(formUrn, authentication); + if (!formInfo.getType().equals(FormType.VERIFICATION)) { + throw new UnsupportedOperationException( + String.format("Form %s is not of type VERIFICATION. Cannot verify form.", formUrn)); + } + final Forms formsAspect = getEntityForms(entityUrn, authentication); + if (!isFormInCompletedForms(formUrn, formsAspect)) { + throw new RuntimeException( + String.format( + "Form %s is not in the list of completed forms for this entity. Skipping verification.", + formUrn)); + } + + // Remove any existing verifications for this form to patch a new one + List formVerifications = + formsAspect.getVerifications().stream() + .filter(verification -> !verification.getForm().equals(formUrn)) + .collect(Collectors.toList()); + FormVerificationAssociation newAssociation = new FormVerificationAssociation(); + newAssociation.setForm(formUrn); + newAssociation.setLastModified(createAuditStamp(authentication)); + formVerifications.add(newAssociation); + + formsAspect.setVerifications(new FormVerificationAssociationArray(formVerifications)); + + ingestForms(entityUrn, formsAspect, authentication); + return true; + } + + private boolean isFormInCompletedForms( + @Nonnull final Urn formUrn, @Nonnull final Forms formsAspect) { + return formsAspect.getCompletedForms().stream() + .anyMatch(completedForm -> completedForm.getUrn().equals(formUrn)); + } + + public FormInfo getFormInfo( + @Nonnull final Urn formUrn, @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { + final EntityResponse formInfoResponse = + entityClient.getV2( + formUrn.getEntityType(), + formUrn, + ImmutableSet.of(FORM_INFO_ASPECT_NAME), + authentication); + if (formInfoResponse != null + && formInfoResponse.getAspects().containsKey(FORM_INFO_ASPECT_NAME)) { + return new FormInfo( + formInfoResponse.getAspects().get(FORM_INFO_ASPECT_NAME).getValue().data()); + } else { + throw new RuntimeException(String.format("Form %s does not exist.", formUrn)); + } + } + + private SchemaMetadata getSchemaMetadata( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { + final EntityResponse response = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(SCHEMA_METADATA_ASPECT_NAME), + authentication); + if (response != null && response.getAspects().containsKey(SCHEMA_METADATA_ASPECT_NAME)) { + return new SchemaMetadata( + response.getAspects().get(SCHEMA_METADATA_ASPECT_NAME).getValue().data()); + } else { + throw new RuntimeException( + String.format("Schema metadata does not exist on entity %s.", entityUrn)); + } + } + + private Ownership getEntityOwnership( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { + final EntityResponse entityResponse = + entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(OWNERSHIP_ASPECT_NAME), + authentication); + if (entityResponse != null && entityResponse.getAspects().containsKey(OWNERSHIP_ASPECT_NAME)) { + return new Ownership( + entityResponse.getAspects().get(OWNERSHIP_ASPECT_NAME).getValue().data()); + } else { + throw new RuntimeException(String.format("Ownership %s does not exist.", entityUrn)); + } + } + + private void verifyEntitiesExist( + @Nonnull final List entityUrns, @Nonnull final Authentication authentication) { + entityUrns.forEach( + entityUrn -> { + try { + verifyEntityExists(entityUrn, authentication); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Issue verifying whether entity exists when assigning form to it. Entity urn: %s", + entityUrn)); + } + }); + } + + private void verifyEntityExists( + @Nonnull final Urn entityUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (!entityClient.exists(entityUrn, authentication)) { + throw new RuntimeException( + String.format("Entity %s does not exist. Skipping batch form assignment", entityUrn)); + } + } + + private AuditStamp createSystemAuditStamp() { + return createAuditStamp(UrnUtils.getUrn(SYSTEM_ACTOR)); + } + + private AuditStamp createAuditStamp(@Nonnull final Authentication authentication) { + return createAuditStamp(UrnUtils.getUrn(authentication.getActor().toUrnStr())); + } + + private AuditStamp createAuditStamp(@Nonnull final Urn actor) { + return new AuditStamp().setTime(System.currentTimeMillis()).setActor(actor); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java new file mode 100644 index 0000000000000..22496b6c07806 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java @@ -0,0 +1,328 @@ +package com.linkedin.metadata.service; + +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.AuthenticationException; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.execution.ExecutionRequestResult; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.RollbackRunResult; +import com.linkedin.metadata.key.ExecutionRequestKey; +import com.linkedin.metadata.run.AspectRowSummary; +import com.linkedin.metadata.run.AspectRowSummaryArray; +import com.linkedin.metadata.run.RollbackResponse; +import com.linkedin.metadata.run.UnsafeEntityInfo; +import com.linkedin.metadata.run.UnsafeEntityInfoArray; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.timeseries.DeleteAspectValuesResult; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Extracts logic historically in the Restli service which acts across multiple services */ +@Slf4j +@AllArgsConstructor +public class RollbackService { + public static final String ROLLING_BACK_STATUS = "ROLLING_BACK"; + public static final String ROLLED_BACK_STATUS = "ROLLED_BACK"; + public static final String ROLLBACK_FAILED_STATUS = "ROLLBACK_FAILED"; + + public static final int MAX_RESULT_SIZE = 10000; + public static final int ELASTIC_MAX_PAGE_SIZE = 10000; + public static final int DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE = 1000000; + public static final int ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; + + private final EntityService entityService; + private final SystemMetadataService systemMetadataService; + private final TimeseriesAspectService timeseriesAspectService; + private final boolean restApiAuthorizationEnabled; + + public List rollbackTargetAspects(@Nonnull String runId, boolean hardDelete) { + return systemMetadataService.findByRunId(runId, hardDelete, 0, MAX_RESULT_SIZE); + } + + public RollbackResponse rollbackIngestion( + @Nonnull String runId, + boolean dryRun, + boolean hardDelete, + Authorizer authorizer, + @Nonnull Authentication authentication) + throws AuthenticationException { + + if (runId.equals(DEFAULT_RUN_ID)) { + throw new IllegalArgumentException( + String.format( + "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", + runId)); + } + + if (!dryRun) { + updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); + } + + List aspectRowsToDelete = rollbackTargetAspects(runId, hardDelete); + if (!isAuthorized(authorizer, aspectRowsToDelete, authentication)) { + throw new AuthenticationException("User is NOT unauthorized to delete entities."); + } + + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + if (dryRun) { + + final Map> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List keyAspects = aspectsSplitByIsKeyAspects.get(true); + + long entitiesDeleted = keyAspects.size(); + long aspectsReverted = aspectRowsToDelete.size(); + + final long affectedEntities = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + // If we are soft deleting, remove key aspects from count of aspects being deleted + if (!hardDelete) { + aspectsReverted -= keyAspects.size(); + rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + } + // Compute the aspects that exist referencing the key aspects we are deleting + final List affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + systemMetadataService.findByUrn(urn.getUrn(), false, 0, MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .toList(); + + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + return new RollbackResponse() + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + RollbackRunResult rollbackRunResult = + entityService.rollbackRun(aspectRowsToDelete, runId, hardDelete); + final List deletedRows = rollbackRunResult.getRowsRolledBack(); + int rowsDeletedFromEntityDeletion = rollbackRunResult.getRowsDeletedFromEntityDeletion(); + + // since elastic limits how many rows we can access at once, we need to iteratively + // delete + while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + aspectRowsToDelete = systemMetadataService.findByRunId(runId, hardDelete, 0, MAX_RESULT_SIZE); + log.info("{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + log.info("deleting..."); + rollbackRunResult = entityService.rollbackRun(aspectRowsToDelete, runId, hardDelete); + deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); + rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); + } + + // Rollback timeseries aspects + DeleteAspectValuesResult timeseriesRollbackResult = + timeseriesAspectService.rollbackTimeseriesAspects(runId); + rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); + + log.info("finished deleting {} rows", deletedRows.size()); + int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; + + final Map> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List keyAspects = aspectsSplitByIsKeyAspects.get(true); + + final long entitiesDeleted = keyAspects.size(); + final long affectedEntities = + deletedRows.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + log.info("computing aspects affected by this rollback..."); + // Compute the aspects that exist referencing the key aspects we are deleting + final List affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + systemMetadataService.findByUrn(urn.getUrn(), false, 0, MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .toList(); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + log.info("calculation done."); + + updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); + + return new RollbackResponse() + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + public void updateExecutionRequestStatus(@Nonnull String runId, @Nonnull String status) { + try { + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); + EnvelopedAspect aspect = + entityService.getLatestEnvelopedAspect( + executionRequestUrn.getEntityType(), + executionRequestUrn, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + if (aspect == null) { + log.warn("Aspect for execution request with runId {} not found", runId); + } else { + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + ExecutionRequestResult requestResult = new ExecutionRequestResult(aspect.getValue().data()); + requestResult.setStatus(status); + proposal.setEntityUrn(executionRequestUrn); + proposal.setEntityType(Constants.EXECUTION_REQUEST_ENTITY_NAME); + proposal.setAspectName(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); + proposal.setChangeType(ChangeType.UPSERT); + + entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + } + } catch (Exception e) { + log.error( + String.format( + "Not able to update execution result aspect with runId %s and new status %s.", + runId, status), + e); + } + } + + private boolean isAuthorized( + final Authorizer authorizer, + @Nonnull List rowSummaries, + @Nonnull Authentication authentication) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); + + List> resourceSpecs = + rowSummaries.stream() + .map(AspectRowSummary::getUrn) + .map(UrnUtils::getUrn) + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .distinct() + .collect(Collectors.toList()); + + return !restApiAuthorizationEnabled + || AuthUtil.isAuthorizedForResources( + authorizer, authentication.getActor().toUrnStr(), resourceSpecs, orGroup); + } + + private static String stringifyRowCount(int size) { + if (size < ELASTIC_MAX_PAGE_SIZE) { + return String.valueOf(size); + } else { + return "at least " + size; + } + } + + private static void sleep(int seconds) { + try { + TimeUnit.SECONDS.sleep(seconds); + } catch (InterruptedException e) { + log.error("Rollback sleep exception", e); + } + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java new file mode 100644 index 0000000000000..73e3bc130ac9d --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentManager.java @@ -0,0 +1,94 @@ +package com.linkedin.metadata.service.util; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.service.FormService; +import com.linkedin.r2.RemoteInvocationException; +import java.util.List; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SearchBasedFormAssignmentManager { + + private static final ImmutableList ENTITY_TYPES = + ImmutableList.of(Constants.DATASET_ENTITY_NAME); + + public static void apply( + DynamicFormAssignment formFilters, + Urn formUrn, + int batchFormEntityCount, + EntityClient entityClient, + Authentication authentication) + throws Exception { + + try { + int totalResults = 0; + int numResults = 0; + String scrollId = null; + FormService formService = new FormService(entityClient, authentication); + + do { + + ScrollResult results = + entityClient.scrollAcrossEntities( + ENTITY_TYPES, + "*", + formFilters.getFilter(), + scrollId, + "5m", + batchFormEntityCount, + null, + authentication); + + if (!results.hasEntities() + || results.getNumEntities() == 0 + || results.getEntities().isEmpty()) { + break; + } + + log.info("Search across entities results: {}.", results); + + if (results.hasEntities()) { + final List entityUrns = + results.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + formService.batchAssignFormToEntities(entityUrns, formUrn); + + if (!entityUrns.isEmpty()) { + log.info("Batch assign {} entities to form {}.", entityUrns.size(), formUrn); + } + + numResults = results.getEntities().size(); + totalResults += numResults; + scrollId = results.getScrollId(); + + log.info( + "Starting batch assign forms, count: {} running total: {}, size: {}", + batchFormEntityCount, + totalResults, + results.getEntities().size()); + + } else { + break; + } + } while (scrollId != null); + + log.info("Successfully assigned {} entities to form {}.", totalResults, formUrn); + + } catch (RemoteInvocationException e) { + log.error("Error while assigning form to entities.", e); + throw new RuntimeException(e); + } + } + + private SearchBasedFormAssignmentManager() {} +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java new file mode 100644 index 0000000000000..a20f71f550c65 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/util/SearchBasedFormAssignmentRunner.java @@ -0,0 +1,45 @@ +package com.linkedin.metadata.service.util; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.DynamicFormAssignment; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class SearchBasedFormAssignmentRunner { + + public static void assign( + DynamicFormAssignment formFilters, + Urn formUrn, + int batchFormEntityCount, + EntityClient entityClient, + Authentication authentication) { + Runnable runnable = + new Runnable() { + @Override + public void run() { + try { + SearchBasedFormAssignmentManager.apply( + formFilters, formUrn, batchFormEntityCount, entityClient, authentication); + } catch (Exception e) { + log.error( + "SearchBasedFormAssignmentRunner failed to run. " + + "Options: formFilters: {}, " + + "formUrn: {}, " + + "batchFormCount: {}, " + + "entityClient: {}, ", + formFilters, + formUrn, + batchFormEntityCount, + entityClient); + throw new RuntimeException("Form assignment runner error.", e); + } + } + }; + + new Thread(runnable).start(); + } + + private SearchBasedFormAssignmentRunner() {} +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index 71c4d357ad1eb..b6bef33df1d7f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.shared; +import com.codahale.metrics.Timer; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.AbstractArrayTemplate; @@ -19,6 +20,7 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.utils.metrics.MetricUtils; import java.util.Objects; import java.util.Set; import java.util.function.Function; @@ -33,25 +35,27 @@ public class ValidationUtils { public static SearchResult validateSearchResult( final SearchResult searchResult, @Nonnull final EntityService entityService) { - if (searchResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateSearchResult").time()) { + if (searchResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + SearchResult validatedSearchResult = + new SearchResult() + .setFrom(searchResult.getFrom()) + .setMetadata(searchResult.getMetadata()) + .setPageSize(searchResult.getPageSize()) + .setNumEntities(searchResult.getNumEntities()); + + SearchEntityArray validatedEntities = + validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) + .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedSearchResult.setEntities(validatedEntities); + + return validatedSearchResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - SearchResult validatedSearchResult = - new SearchResult() - .setFrom(searchResult.getFrom()) - .setMetadata(searchResult.getMetadata()) - .setPageSize(searchResult.getPageSize()) - .setNumEntities(searchResult.getNumEntities()); - - SearchEntityArray validatedEntities = - validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) - .collect(Collectors.toCollection(SearchEntityArray::new)); - - validatedSearchResult.setEntities(validatedEntities); - - return validatedSearchResult; } public static ScrollResult validateScrollResult( @@ -81,78 +85,85 @@ public static ScrollResult validateScrollResult( public static BrowseResult validateBrowseResult( final BrowseResult browseResult, @Nonnull final EntityService entityService) { - if (browseResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateBrowseResult").time()) { + if (browseResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + BrowseResult validatedBrowseResult = + new BrowseResult() + .setGroups(browseResult.getGroups()) + .setMetadata(browseResult.getMetadata()) + .setFrom(browseResult.getFrom()) + .setPageSize(browseResult.getPageSize()) + .setNumGroups(browseResult.getNumGroups()) + .setNumEntities(browseResult.getNumEntities()) + .setNumElements(browseResult.getNumElements()); + + BrowseResultEntityArray validatedEntities = + validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) + .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + validatedBrowseResult.setEntities(validatedEntities); + + return validatedBrowseResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - BrowseResult validatedBrowseResult = - new BrowseResult() - .setGroups(browseResult.getGroups()) - .setMetadata(browseResult.getMetadata()) - .setFrom(browseResult.getFrom()) - .setPageSize(browseResult.getPageSize()) - .setNumGroups(browseResult.getNumGroups()) - .setNumEntities(browseResult.getNumEntities()) - .setNumElements(browseResult.getNumElements()); - - BrowseResultEntityArray validatedEntities = - validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) - .collect(Collectors.toCollection(BrowseResultEntityArray::new)); - - validatedBrowseResult.setEntities(validatedEntities); - - return validatedBrowseResult; } public static ListResult validateListResult( final ListResult listResult, @Nonnull final EntityService entityService) { - if (listResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateListResult").time()) { + if (listResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + ListResult validatedListResult = + new ListResult() + .setStart(listResult.getStart()) + .setCount(listResult.getCount()) + .setTotal(listResult.getTotal()); + + UrnArray validatedEntities = + validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) + .collect(Collectors.toCollection(UrnArray::new)); + validatedListResult.setEntities(validatedEntities); + + return validatedListResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - ListResult validatedListResult = - new ListResult() - .setStart(listResult.getStart()) - .setCount(listResult.getCount()) - .setTotal(listResult.getTotal()); - - UrnArray validatedEntities = - validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) - .collect(Collectors.toCollection(UrnArray::new)); - - validatedListResult.setEntities(validatedEntities); - - return validatedListResult; } public static LineageSearchResult validateLineageSearchResult( final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { - if (lineageSearchResult == null) { - return null; + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateLineageResult").time()) { + if (lineageSearchResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + LineageSearchResult validatedLineageSearchResult = + new LineageSearchResult() + .setMetadata(lineageSearchResult.getMetadata()) + .setFrom(lineageSearchResult.getFrom()) + .setPageSize(lineageSearchResult.getPageSize()) + .setNumEntities(lineageSearchResult.getNumEntities()); + + LineageSearchEntityArray validatedEntities = + validatedUrns( + lineageSearchResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageSearchResult.setEntities(validatedEntities); + + log.debug("Returning validated lineage search results"); + return validatedLineageSearchResult; } - Objects.requireNonNull(entityService, "entityService must not be null"); - - LineageSearchResult validatedLineageSearchResult = - new LineageSearchResult() - .setMetadata(lineageSearchResult.getMetadata()) - .setFrom(lineageSearchResult.getFrom()) - .setPageSize(lineageSearchResult.getPageSize()) - .setNumEntities(lineageSearchResult.getNumEntities()); - - LineageSearchEntityArray validatedEntities = - validatedUrns( - lineageSearchResult.getEntities(), - LineageSearchEntity::getEntity, - entityService, - true) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); - - validatedLineageSearchResult.setEntities(validatedEntities); - - return validatedLineageSearchResult; } public static EntityLineageResult validateEntityLineageResult( diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java new file mode 100644 index 0000000000000..1442f099c4703 --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/GenericTimeseriesDocument.java @@ -0,0 +1,26 @@ +package com.linkedin.metadata.timeseries; + +import com.fasterxml.jackson.annotation.JsonProperty; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Data; + +@Data +@Builder +public class GenericTimeseriesDocument { + @Nonnull private String urn; + private long timestampMillis; + + @JsonProperty("@timestamp") + private long timestamp; + + @Nonnull private Object event; + @Nullable private String messageId; + @Nullable private Object systemMetadata; + @Nullable private String eventGranularity; + private boolean isExploded; + @Nullable private String runId; + @Nullable private String partition; + @Nullable private Object partitionSpec; +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java index 54480bb700398..529e8e00ecf57 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java @@ -201,4 +201,15 @@ void upsertDocument( @Nonnull final JsonNode document); List getIndexSizes(); + + @Nonnull + TimeseriesScrollResult scrollAspects( + @Nonnull final String entityName, + @Nonnull final String aspectName, + @Nullable Filter filter, + @Nonnull List sortCriterion, + @Nullable String scrollId, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java new file mode 100644 index 0000000000000..200db2dfde8eb --- /dev/null +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesScrollResult.java @@ -0,0 +1,18 @@ +package com.linkedin.metadata.timeseries; + +import com.linkedin.metadata.aspect.EnvelopedAspect; +import java.util.List; +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +@AllArgsConstructor +@Data +@Builder +public class TimeseriesScrollResult { + int numResults; + int pageSize; + String scrollId; + List events; + List documents; +} diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java index 970235fc88c87..27aa9ee04cc75 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java @@ -4,7 +4,7 @@ import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; import com.datahub.gms.util.CSVWriter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.EntitySpec; diff --git a/metadata-service/war/src/main/resources/boot/data_types.json b/metadata-service/war/src/main/resources/boot/data_types.json new file mode 100644 index 0000000000000..2d7294e45bd7a --- /dev/null +++ b/metadata-service/war/src/main/resources/boot/data_types.json @@ -0,0 +1,42 @@ +[ + { + "urn": "urn:li:dataType:datahub.string", + "info": { + "qualifiedName":"datahub.string", + "displayName": "String", + "description": "A string of characters." + } + }, + { + "urn": "urn:li:dataType:datahub.number", + "info": { + "qualifiedName":"datahub.number", + "displayName": "Number", + "description": "An integer or decimal number." + } + }, + { + "urn": "urn:li:dataType:datahub.urn", + "info": { + "qualifiedName":"datahub.urn", + "displayName": "Urn", + "description": "An unique identifier for a DataHub entity." + } + }, + { + "urn": "urn:li:dataType:datahub.rich_text", + "info": { + "qualifiedName":"datahub.rich_text", + "displayName": "Rich Text", + "description": "An attributed string of characters." + } + }, + { + "urn": "urn:li:dataType:datahub.date", + "info": { + "qualifiedName":"datahub.date", + "displayName": "Date", + "description": "A specific day, without time." + } + } +] diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java new file mode 100644 index 0000000000000..140b64780918d --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/OwnershipUtils.java @@ -0,0 +1,20 @@ +package com.linkedin.metadata.authorization; + +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import java.util.List; +import javax.annotation.Nonnull; + +public class OwnershipUtils { + + public static boolean isOwnerOfEntity( + @Nonnull final Ownership entityOwnership, + @Nonnull final Urn actorUrn, + @Nonnull final List groupsForUser) { + return entityOwnership.getOwners().stream() + .anyMatch( + owner -> owner.getOwner().equals(actorUrn) || groupsForUser.contains(owner.getOwner())); + } + + private OwnershipUtils() {} +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java index 5f3975b066fde..6ba311cf166d4 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java @@ -3,8 +3,11 @@ import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import java.net.URISyntaxException; import java.time.Clock; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -16,4 +19,11 @@ public static AuditStamp createDefaultAuditStamp() { .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) .setTime(Clock.systemUTC().millis()); } + + public static AuditStamp createAuditStamp(@Nonnull String actorUrn) throws URISyntaxException { + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(actorUrn)); + auditStamp.setTime(Clock.systemUTC().millis()); + return auditStamp; + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java new file mode 100644 index 0000000000000..ebf2587418dae --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/FormUtils.java @@ -0,0 +1,49 @@ +package com.linkedin.metadata.utils; + +import com.linkedin.common.urn.Urn; +import com.linkedin.form.FormActorAssignment; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class FormUtils { + + private FormUtils() {} + + public static boolean isFormAssignedToUser( + @Nonnull final FormActorAssignment parent, + @Nonnull final Urn userUrn, + @Nonnull final List groupUrns) { + // Assigned urn and group urns + final Set assignedUserUrns = + parent.getUsers() != null + ? parent.getUsers().stream().map(Urn::toString).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Set assignedGroupUrns = + parent.getGroups() != null + ? parent.getGroups().stream().map(Urn::toString).collect(Collectors.toSet()) + : Collections.emptySet(); + + // First check whether user is directly assigned. + if (assignedUserUrns.size() > 0) { + boolean isUserAssigned = assignedUserUrns.contains(userUrn.toString()); + if (isUserAssigned) { + return true; + } + } + + // Next check whether the user is assigned indirectly, by group. + if (assignedGroupUrns.size() > 0) { + boolean isUserGroupAssigned = + groupUrns.stream().anyMatch(groupUrn -> assignedGroupUrns.contains(groupUrn.toString())); + if (isUserGroupAssigned) { + return true; + } + } + + return false; + } +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java index fc28367e6c7ee..ae061a2d0c090 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java @@ -1,12 +1,17 @@ package com.linkedin.metadata.utils; import com.datahub.util.RecordUtils; +import com.linkedin.common.urn.Urn; import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.GenericPayload; import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; import javax.annotation.Nonnull; public class GenericRecordUtils { @@ -66,4 +71,20 @@ public static GenericPayload serializePayload(@Nonnull RecordTemplate payload) { genericPayload.setContentType(GenericRecordUtils.JSON); return genericPayload; } + + @Nonnull + public static Map> entityResponseToAspectMap( + Map inputMap) { + return inputMap.entrySet().stream() + .map( + entry -> + Map.entry( + entry.getKey(), + entry.getValue().getAspects().entrySet().stream() + .map( + aspectEntry -> + Map.entry(aspectEntry.getKey(), aspectEntry.getValue().getValue())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java new file mode 100644 index 0000000000000..edf959d04a37b --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SchemaFieldUtils.java @@ -0,0 +1,22 @@ +package com.linkedin.metadata.utils; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.SchemaFieldKey; +import javax.annotation.Nonnull; + +public class SchemaFieldUtils { + + private SchemaFieldUtils() {} + + public static Urn generateSchemaFieldUrn( + @Nonnull final String resourceUrn, @Nonnull final String fieldPath) { + // we rely on schemaField fieldPaths to be encoded since we do that on the ingestion side + final String encodedFieldPath = + fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); + final SchemaFieldKey key = + new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); + return EntityKeyUtils.convertEntityKeyToUrn(key, Constants.SCHEMA_FIELD_ENTITY_NAME); + } +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java index eb58bc509838d..9df708c6e9fdc 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java @@ -7,14 +7,19 @@ import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.FilterValue; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.net.URISyntaxException; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -142,4 +147,25 @@ public static BoolQueryBuilder filterSoftDeletedByDefault( } return filterQuery; } + + public static SortCriterion sortBy(@Nonnull String field, @Nullable SortOrder direction) { + SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(field); + sortCriterion.setOrder( + com.linkedin.metadata.query.filter.SortOrder.valueOf( + Optional.ofNullable(direction).orElse(SortOrder.ASCENDING).toString())); + return sortCriterion; + } + + public static Filter andFilter(Criterion... criteria) { + Filter filter = new Filter(); + filter.setOr(andCriterion(Arrays.stream(criteria))); + return filter; + } + + public static ConjunctiveCriterionArray andCriterion(Stream criteria) { + return new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(criteria.collect(Collectors.toList())))); + } } diff --git a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java index a324f9ce0195b..dfa8c627e0617 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java +++ b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java @@ -1,10 +1,10 @@ package mock; +import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import java.util.Collections; import java.util.HashMap; import java.util.Map; diff --git a/smoke-test/cypress-dev.sh b/smoke-test/cypress-dev.sh index 93f03d36cbd19..b1c6571e1a065 100755 --- a/smoke-test/cypress-dev.sh +++ b/smoke-test/cypress-dev.sh @@ -15,7 +15,7 @@ python -c 'from tests.cypress.integration_test import ingest_data; ingest_data() cd tests/cypress npm install -source ../../set-cypress-creds.sh +source "$DIR/set-cypress-creds.sh" npx cypress open \ --env "ADMIN_DISPLAYNAME=$CYPRESS_ADMIN_DISPLAYNAME,ADMIN_USERNAME=$CYPRESS_ADMIN_USERNAME,ADMIN_PASSWORD=$CYPRESS_ADMIN_PASSWORD" diff --git a/smoke-test/requests_wrapper/__init__.py b/smoke-test/requests_wrapper/__init__.py index d9956e8434a89..c2f4190e6150d 100644 --- a/smoke-test/requests_wrapper/__init__.py +++ b/smoke-test/requests_wrapper/__init__.py @@ -1,3 +1,4 @@ from .utils_requests_wrapper import CustomSession as Session from .utils_requests_wrapper import get, post from .constants import * +from requests import exceptions diff --git a/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js b/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js index 00de08e77a185..f89b70b7a7d23 100644 --- a/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js +++ b/smoke-test/tests/cypress/cypress/e2e/siblings/siblings.js @@ -80,7 +80,7 @@ describe('siblings', () => { cy.login(); cy.visit('/search?page=1&query=raw_orders'); - cy.contains('Showing 1 - 10 of 14 results'); + cy.contains('Showing 1 - 10 of '); cy.get('.test-search-result').should('have.length', 5); cy.get('.test-search-result-sibling-section').should('have.length', 5); diff --git a/smoke-test/tests/structured_properties/__init__.py b/smoke-test/tests/structured_properties/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/structured_properties/click_event.avsc b/smoke-test/tests/structured_properties/click_event.avsc new file mode 100644 index 0000000000000..d959dcbbdeea1 --- /dev/null +++ b/smoke-test/tests/structured_properties/click_event.avsc @@ -0,0 +1,14 @@ +{ + "namespace": "io.datahubproject", + "type": "record", + "name": "ClickEvent", + "fields": [ + { "name": "ip", "type": "string" }, + { "name": "url", "type": "string" }, + { "name": "time", "type": "long" }, + { "name": "referer", "type": ["string", "null"] }, + { "name": "user_agent", "type": ["string", "null"] }, + { "name": "user_id", "type": ["string", "null"] }, + { "name": "session_id", "type": ["string", "null"] } + ] +} diff --git a/smoke-test/tests/structured_properties/test_dataset.yaml b/smoke-test/tests/structured_properties/test_dataset.yaml new file mode 100644 index 0000000000000..2ac1cca6c6dc2 --- /dev/null +++ b/smoke-test/tests/structured_properties/test_dataset.yaml @@ -0,0 +1,19 @@ +- id: user.clicks + platform: hive + # urn: urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD) # use urn instead of id and platform + subtype: table + schema: + file: tests/structured_properties/click_event.avsc + fields: + # - id: ip + - urn: urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD),ip) + structured_properties: + io.acryl.dataManagement.deprecationDate: "2023-01-01" + properties: + retention: 365 + structured_properties: + clusterType: primary + clusterName: gold + projectNames: + - Tracking + - DataHub diff --git a/smoke-test/tests/structured_properties/test_structured_properties.py b/smoke-test/tests/structured_properties/test_structured_properties.py new file mode 100644 index 0000000000000..83994776076b0 --- /dev/null +++ b/smoke-test/tests/structured_properties/test_structured_properties.py @@ -0,0 +1,577 @@ +import logging +import os +from datahub.ingestion.graph.filters import SearchFilterRule +from tests.consistency_utils import wait_for_writes_to_sync +import tempfile +from random import randint +from tests.utilities.file_emitter import FileEmitter +from typing import Iterable, List, Optional, Union + +import pytest +# import tenacity +from datahub.api.entities.dataset.dataset import Dataset +from datahub.api.entities.structuredproperties.structuredproperties import \ + StructuredProperties +from datahub.emitter.mce_builder import make_dataset_urn, make_schema_field_urn +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph +from datahub.metadata.schema_classes import ( + EntityTypeInfoClass, PropertyValueClass, StructuredPropertiesClass, + StructuredPropertyDefinitionClass, StructuredPropertyValueAssignmentClass) +from datahub.specific.dataset import DatasetPatchBuilder +from datahub.utilities.urns.structured_properties_urn import \ + StructuredPropertyUrn +from datahub.utilities.urns.urn import Urn + +from tests.utils import (delete_urns, delete_urns_from_file, get_gms_url, + get_sleep_info, ingest_file_via_rest, + wait_for_writes_to_sync) + +logger = logging.getLogger(__name__) + +start_index = randint(10, 10000) +dataset_urns = [ + make_dataset_urn("snowflake", f"table_foo_{i}") + for i in range(start_index, start_index + 10) +] + +schema_field_urns = [ + make_schema_field_urn(dataset_urn, "column_1") + for dataset_urn in dataset_urns +] + +generated_urns = [d for d in dataset_urns] + [f for f in schema_field_urns] + + +default_namespace = "io.acryl.privacy" + +def create_logical_entity( + entity_name: str, +) -> Iterable[MetadataChangeProposalWrapper]: + mcp = MetadataChangeProposalWrapper( + entityUrn="urn:li:entityType:" + entity_name, + aspect=EntityTypeInfoClass( + qualifiedName="io.datahubproject." + entity_name, + displayName=entity_name, + ), + ) + return [mcp] + + +def create_test_data(filename: str): + file_emitter = FileEmitter(filename) + for mcps in create_logical_entity("dataset"): + file_emitter.emit(mcps) + + file_emitter.close() + wait_for_writes_to_sync() + +sleep_sec, sleep_times = get_sleep_info() + + +@pytest.fixture(scope="module", autouse=False) +def graph() -> DataHubGraph: + graph: DataHubGraph = DataHubGraph( + config=DatahubClientConfig(server=get_gms_url()) + ) + return graph + + +@pytest.fixture(scope="module", autouse=False) +def ingest_cleanup_data(request): + new_file, filename = tempfile.mkstemp() + try: + create_test_data(filename) + print("ingesting structured properties test data") + ingest_file_via_rest(filename) + yield + print("removing structured properties test data") + delete_urns_from_file(filename) + delete_urns(generated_urns) + wait_for_writes_to_sync() + finally: + os.remove(filename) + + +@pytest.mark.dependency() +def test_healthchecks(wait_for_healthchecks): + # Call to wait_for_healthchecks fixture will do the actual functionality. + pass + + +def create_property_definition( + property_name: str, + graph: DataHubGraph, + namespace: str = default_namespace, + value_type: str = "string", + cardinality: str = "SINGLE", + allowed_values: Optional[List[PropertyValueClass]] = None, + entity_types: Optional[List[str]] = None, +): + structured_property_definition = StructuredPropertyDefinitionClass( + qualifiedName=f"{namespace}.{property_name}", + valueType=Urn.make_data_type_urn(value_type), + description="The retention policy for the dataset", + entityTypes=[Urn.make_entity_type_urn(e) for e in entity_types] + if entity_types + else [Urn.make_entity_type_urn("dataset")], + cardinality=cardinality, + allowedValues=allowed_values, + ) + + mcp = MetadataChangeProposalWrapper( + entityUrn=f"urn:li:structuredProperty:{namespace}.{property_name}", + aspect=structured_property_definition, + ) + graph.emit(mcp) + wait_for_writes_to_sync() + + +def attach_property_to_entity( + urn: str, + property_name: str, + property_value: Union[str, float, List[str | float]], + graph: DataHubGraph, + namespace: str = default_namespace +): + if isinstance(property_value, list): + property_values: List[Union[str, float]] = property_value + else: + property_values = [property_value] + + mcp = MetadataChangeProposalWrapper( + entityUrn=urn, + aspect=StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:{namespace}.{property_name}", + values=property_values, + ) + ] + ), + ) + graph.emit_mcp(mcp) + wait_for_writes_to_sync() + + +def get_property_from_entity( + urn: str, + property_name: str, + graph: DataHubGraph, +): + structured_properties: Optional[ + StructuredPropertiesClass + ] = graph.get_aspect(urn, StructuredPropertiesClass) + assert structured_properties is not None + for property in structured_properties.properties: + if ( + property.propertyUrn + == f"urn:li:structuredProperty:{property_name}" + ): + return property.values + return None + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_string(ingest_cleanup_data, graph): + property_name = "retentionPolicy" + + create_property_definition(property_name, graph) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.retentionPolicy") + + attach_property_to_entity( + dataset_urns[0], property_name, ["30d"], graph=graph + ) + + try: + attach_property_to_entity( + dataset_urns[0], property_name, 200030, graph=graph + ) + raise AssertionError( + "Should not be able to attach a number to a string property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_double(ingest_cleanup_data, graph): + property_name = "expiryTime" + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + create_property_definition(property_name, graph, value_type="number") + + attach_property_to_entity( + dataset_urns[0], property_name, 2000034, graph=graph + ) + + try: + attach_property_to_entity( + dataset_urns[0], property_name, "30 days", graph=graph + ) + raise AssertionError( + "Should not be able to attach a string to a number property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + try: + attach_property_to_entity( + dataset_urns[0], property_name, [2000034, 2000035], graph=graph + ) + raise AssertionError( + "Should not be able to attach a list to a number property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_double_multiple(ingest_cleanup_data, graph): + property_name = "versions" + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + + create_property_definition( + property_name, graph, value_type="number", cardinality="MULTIPLE" + ) + + attach_property_to_entity( + dataset_urns[0], property_name, [1.0, 2.0], graph=graph + ) + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_string_allowed_values( + ingest_cleanup_data, graph +): + property_name = "enumProperty" + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + + create_property_definition( + property_name, + graph, + value_type="string", + cardinality="MULTIPLE", + allowed_values=[ + PropertyValueClass(value="foo"), + PropertyValueClass(value="bar"), + ], + ) + + attach_property_to_entity( + dataset_urns[0], property_name, ["foo", "bar"], graph=graph + ) + + try: + attach_property_to_entity( + dataset_urns[0], property_name, ["foo", "baz"], graph=graph + ) + raise AssertionError( + "Should not be able to attach a value not in allowed values" + ) + except Exception as e: + if "value: {string=baz} should be one of [" in str(e): + pass + else: + raise e + + +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_definition_evolution( + ingest_cleanup_data, graph +): + property_name = "enumProperty1234" + + create_property_definition( + property_name, + graph, + value_type="string", + cardinality="MULTIPLE", + allowed_values=[ + PropertyValueClass(value="foo"), + PropertyValueClass(value="bar"), + ], + ) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{property_name}") + + try: + create_property_definition( + property_name, + graph, + value_type="string", + cardinality="SINGLE", + allowed_values=[ + PropertyValueClass(value="foo"), + PropertyValueClass(value="bar"), + ], + ) + raise AssertionError( + "Should not be able to change cardinality from MULTIPLE to SINGLE" + ) + except Exception as e: + if isinstance(e, AssertionError): + raise e + else: + pass + + +# @tenacity.retry( +# stop=tenacity.stop_after_attempt(sleep_times), +# wait=tenacity.wait_fixed(sleep_sec), +# ) +@pytest.mark.dependency(depends=["test_healthchecks"]) +def test_structured_property_schema_field(ingest_cleanup_data, graph): + property_name = ( + f"deprecationDate{randint(10, 10000)}" + ) + + create_property_definition( + property_name, + graph, + namespace="io.datahubproject.test", + value_type="date", + entity_types=["schemaField"], + ) + generated_urns.append(f"urn:li:structuredProperty:io.datahubproject.test.{property_name}") + + attach_property_to_entity( + schema_field_urns[0], property_name, "2020-10-01", graph=graph, namespace="io.datahubproject.test" + ) + + assert ( + get_property_from_entity( + schema_field_urns[0], f"io.datahubproject.test.{property_name}", graph=graph + ) + == ["2020-10-01"] + ) + + try: + attach_property_to_entity( + schema_field_urns[0], property_name, 200030, graph=graph, namespace="io.datahubproject.test" + ) + raise AssertionError( + "Should not be able to attach a number to a DATE property" + ) + except Exception as e: + if not isinstance(e, AssertionError): + pass + else: + raise e + + +def test_dataset_yaml_loader(ingest_cleanup_data, graph): + StructuredProperties.create( + "tests/structured_properties/test_structured_properties.yaml" + ) + + for dataset in Dataset.from_yaml( + "tests/structured_properties/test_dataset.yaml" + ): + for mcp in dataset.generate_mcp(): + graph.emit(mcp) + wait_for_writes_to_sync() + + property_name = "io.acryl.dataManagement.deprecationDate" + assert ( + get_property_from_entity( + make_schema_field_urn( + make_dataset_urn("hive", "user.clicks"), "ip" + ), + property_name, + graph=graph, + ) + == ["2023-01-01"] + ) + + dataset = Dataset.from_datahub( + graph=graph, + urn="urn:li:dataset:(urn:li:dataPlatform:hive,user.clicks,PROD)", + ) + field_name = "ip" + matching_fields = [ + f + for f in dataset.schema_metadata.fields + if Dataset._simplify_field_path(f.id) == field_name + ] + assert len(matching_fields) == 1 + assert ( + matching_fields[0].structured_properties[ + Urn.make_structured_property_urn( + "io.acryl.dataManagement.deprecationDate" + ) + ] + == ["2023-01-01"] + ) + + +def test_dataset_structured_property_validation( + ingest_cleanup_data, graph, caplog +): + from datahub.api.entities.dataset.dataset import Dataset + + property_name = "replicationSLA" + property_value = 30 + value_type = "number" + + create_property_definition( + property_name=property_name, graph=graph, value_type=value_type + ) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.replicationSLA") + + attach_property_to_entity( + dataset_urns[0], property_name, [property_value], graph=graph + ) + + assert Dataset.validate_structured_property( + f"{default_namespace}.{property_name}", property_value + ) == ( + f"{default_namespace}.{property_name}", + float(property_value), + ) + + assert ( + Dataset.validate_structured_property("testName", "testValue") is None + ) + + bad_property_value = "2023-09-20" + assert ( + Dataset.validate_structured_property( + property_name, bad_property_value + ) + is None + ) + + +def test_structured_property_search(ingest_cleanup_data, graph: DataHubGraph, caplog): + + def to_es_name(property_name, namespace=default_namespace): + namespace_field = namespace.replace(".", "_") + return f"structuredProperties.{namespace_field}_{property_name}" + + # Attach structured property to entity and to field + field_property_name = f"deprecationDate{randint(10, 10000)}" + + create_property_definition( + namespace="io.datahubproject.test", + property_name=field_property_name, + graph=graph, value_type="date", entity_types=["schemaField"] + ) + generated_urns.append(f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}") + + attach_property_to_entity( + schema_field_urns[0], field_property_name, "2020-10-01", graph=graph, namespace="io.datahubproject.test" + ) + dataset_property_name = "replicationSLA" + property_value = 30 + value_type = "number" + + create_property_definition(property_name=dataset_property_name, graph=graph, value_type=value_type) + generated_urns.append(f"urn:li:structuredProperty:{default_namespace}.{dataset_property_name}") + + attach_property_to_entity(dataset_urns[0], dataset_property_name, [property_value], graph=graph) + + # [] = default entities which includes datasets, does not include fields + entity_urns = list(graph.get_urns_by_filter(extraFilters=[ + { + "field": to_es_name(dataset_property_name), + "negated": "false", + "condition": "EXISTS", + } + ])) + assert len(entity_urns) == 1 + assert entity_urns[0] == dataset_urns[0] + + # Search over schema field specifically + field_structured_prop = graph.get_aspect(entity_urn=schema_field_urns[0], aspect_type=StructuredPropertiesClass) + assert field_structured_prop == StructuredPropertiesClass( + properties=[ + StructuredPropertyValueAssignmentClass( + propertyUrn=f"urn:li:structuredProperty:io.datahubproject.test.{field_property_name}", + values=["2020-10-01"] + ) + ] + ) + + # Search over entities that do not include the field + field_urns = list(graph.get_urns_by_filter(entity_types=["tag"], + extraFilters=[ + { + "field": to_es_name(field_property_name, + namespace="io.datahubproject.test"), + "negated": "false", + "condition": "EXISTS", + } + ])) + assert len(field_urns) == 0 + + # OR the two properties together to return both results + field_urns = list(graph.get_urns_by_filter(entity_types=["dataset", "tag"], + extraFilters=[ + { + "field": to_es_name(dataset_property_name), + "negated": "false", + "condition": "EXISTS", + } + ])) + assert len(field_urns) == 1 + assert dataset_urns[0] in field_urns + + +def test_dataset_structured_property_patch( + ingest_cleanup_data, graph, caplog +): + property_name = "replicationSLA" + property_value = 30 + value_type = "number" + + create_property_definition( + property_name=property_name, + graph=graph, + value_type=value_type + ) + + dataset_patcher: DatasetPatchBuilder = DatasetPatchBuilder( + urn=dataset_urns[0] + ) + + dataset_patcher.set_structured_property(StructuredPropertyUrn.make_structured_property_urn( + f"{default_namespace}.{property_name}"), property_value) + + for mcp in dataset_patcher.build(): + graph.emit(mcp) + wait_for_writes_to_sync() + + dataset = Dataset.from_datahub(graph=graph, urn=dataset_urns[0]) + assert dataset.structured_properties is not None + assert ( + [int(float(k)) for k in dataset.structured_properties[ + StructuredPropertyUrn.make_structured_property_urn( + f"{default_namespace}.{property_name}" + ) + ]] + == [property_value] + ) diff --git a/smoke-test/tests/structured_properties/test_structured_properties.yaml b/smoke-test/tests/structured_properties/test_structured_properties.yaml new file mode 100644 index 0000000000000..569a3d185165d --- /dev/null +++ b/smoke-test/tests/structured_properties/test_structured_properties.yaml @@ -0,0 +1,33 @@ +- id: clusterType + type: STRING + display_name: Cluster's type + description: "Test Cluster Type Property" + entity_types: + - dataset +- id: clusterName + type: STRING + display_name: Cluster's name + description: "Test Cluster Name Property" + entity_types: + - dataset +- id: projectNames + type: STRING + cardinality: MULTIPLE + display_name: Project Name + entity_types: + - dataset # or urn:li:logicalEntity:metamodel.datahub.dataset + - dataflow + description: "Test property for project name" + allowed_values: + - value: Tracking + description: test value 1 for project + - value: DataHub + description: test value 2 for project +- id: io.acryl.dataManagement.deprecationDate + type: DATE + display_name: Deprecation Date + entity_types: + - dataset + - dataFlow + - dataJob + - schemaField diff --git a/smoke-test/tests/telemetry/telemetry_test.py b/smoke-test/tests/telemetry/telemetry_test.py index 3127061c9f506..b7cd6fa0517df 100644 --- a/smoke-test/tests/telemetry/telemetry_test.py +++ b/smoke-test/tests/telemetry/telemetry_test.py @@ -3,9 +3,19 @@ from datahub.cli.cli_utils import get_aspects_for_entity -def test_no_clientID(): +def test_no_client_id(): client_id_urn = "urn:li:telemetry:clientId" - aspect = ["telemetryClientId"] + aspect = ["clientId"] # this is checking for the removal of the invalid aspect RemoveClientIdAspectStep.java + + res_data = json.dumps( + get_aspects_for_entity(entity_urn=client_id_urn, aspects=aspect, typed=False) + ) + assert res_data == "{}" + + +def test_no_telemetry_client_id(): + client_id_urn = "urn:li:telemetry:clientId" + aspect = ["telemetryClientId"] # telemetry expected to be disabled for tests res_data = json.dumps( get_aspects_for_entity(entity_urn=client_id_urn, aspects=aspect, typed=False) diff --git a/smoke-test/tests/utilities/__init__.py b/smoke-test/tests/utilities/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/smoke-test/tests/utilities/file_emitter.py b/smoke-test/tests/utilities/file_emitter.py new file mode 100644 index 0000000000000..27a91c360af8a --- /dev/null +++ b/smoke-test/tests/utilities/file_emitter.py @@ -0,0 +1,21 @@ +from datahub.ingestion.sink.file import FileSink, FileSinkConfig +from datahub.ingestion.api.common import PipelineContext, RecordEnvelope +from datahub.ingestion.api.sink import NoopWriteCallback +import time + + +class FileEmitter: + def __init__(self, filename: str, run_id: str = f"test_{int(time.time()*1000.0)}") -> None: + self.sink: FileSink = FileSink( + ctx=PipelineContext(run_id=run_id), + config=FileSinkConfig(filename=filename), + ) + + def emit(self, event): + self.sink.write_record_async( + record_envelope=RecordEnvelope(record=event, metadata={}), + write_callback=NoopWriteCallback(), + ) + + def close(self): + self.sink.close() \ No newline at end of file From 91ac7d91f80ac0d439c099bdf18946d09b8a50cf Mon Sep 17 00:00:00 2001 From: Raj Tekal Date: Tue, 23 Jan 2024 15:03:33 -0500 Subject: [PATCH 260/263] Indent correction --- .../linkedin/datahub/graphql/GmsGraphQLEngine.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 1e071cd1f7a6a..aa62f03f13642 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -415,8 +415,8 @@ public class GmsGraphQLEngine { private final LineageService lineageService; private final QueryService queryService; private final DataProductService dataProductService; - private final ERModelRelationService eRModelRelationService; - private final FormService formService; + private final ERModelRelationService eRModelRelationService; + private final FormService formService; private final FeatureFlags featureFlags; @@ -460,7 +460,7 @@ public class GmsGraphQLEngine { private final DataHubPolicyType dataHubPolicyType; private final DataHubRoleType dataHubRoleType; private final SchemaFieldType schemaFieldType; - private final ERModelRelationType ermodelrelationType; + private final ERModelRelationType ermodelrelationType; private final DataHubViewType dataHubViewType; private final QueryType queryType; private final DataProductType dataProductType; @@ -525,7 +525,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.settingsService = args.settingsService; this.lineageService = args.lineageService; this.queryService = args.queryService; - this.eRModelRelationService = args.eRModelRelationService; + this.eRModelRelationService = args.eRModelRelationService; this.dataProductService = args.dataProductService; this.formService = args.formService; @@ -567,8 +567,8 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.testType = new TestType(entityClient); this.dataHubPolicyType = new DataHubPolicyType(entityClient); this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(entityClient, featureFlags); - this.ermodelrelationType = new ERModelRelationType(entityClient, featureFlags); + this.schemaFieldType = new SchemaFieldType(entityClient, featureFlags); + this.ermodelrelationType = new ERModelRelationType(entityClient, featureFlags); this.dataHubViewType = new DataHubViewType(entityClient); this.queryType = new QueryType(entityClient); this.dataProductType = new DataProductType(entityClient); @@ -698,7 +698,7 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configureTestResultResolvers(builder); configureRoleResolvers(builder); configureSchemaFieldResolvers(builder); - configureERModelRelationResolvers(builder); + configureERModelRelationResolvers(builder); configureEntityPathResolvers(builder); configureResolvedAuditStampResolvers(builder); configureViewResolvers(builder); From 226fa401a16e7827b1a5d5e57ddd420357f5b2a0 Mon Sep 17 00:00:00 2001 From: Raj Tekal Date: Tue, 30 Jan 2024 14:54:24 -0500 Subject: [PATCH 261/263] Merging with main --- .../datahub/graphql/GmsGraphQLEngine.java | 110 ++++++---- .../datahub/graphql/GmsGraphQLEngineArgs.java | 2 +- .../common/mappers/UrnToEntityMapper.java | 2 +- .../CreateERModelRelationResolver.java | 122 ++++++----- .../ermodelrelation/ERModelRelationType.java | 207 ++++++++++-------- .../UpdateERModelRelationResolver.java | 68 +++--- .../mappers/ERModelRelationMapper.java | 115 ++++++---- .../ERModelRelationUpdateInputMapper.java | 101 +++++---- .../java/com/linkedin/metadata/Constants.java | 3 +- .../common/urn/ERModelRelationUrn.java | 30 +-- .../ERModelRelationServiceFactory.java | 16 +- .../factory/graphql/GraphQLEngineFactory.java | 2 +- .../service/ERModelRelationService.java | 49 +++-- .../authorization/PoliciesConfig.java | 20 +- 14 files changed, 493 insertions(+), 354 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index aa62f03f13642..27d3a2938f4af 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -53,6 +53,8 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; import com.linkedin.datahub.graphql.generated.Domain; +import com.linkedin.datahub.graphql.generated.ERModelRelation; +import com.linkedin.datahub.graphql.generated.ERModelRelationProperties; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.EntityRelationship; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; @@ -65,8 +67,6 @@ import com.linkedin.datahub.graphql.generated.GlossaryTermAssociation; import com.linkedin.datahub.graphql.generated.IngestionSource; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; -import com.linkedin.datahub.graphql.generated.ERModelRelation; -import com.linkedin.datahub.graphql.generated.ERModelRelationProperties; import com.linkedin.datahub.graphql.generated.LineageRelationship; import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.datahub.graphql.generated.ListDomainsResult; @@ -305,11 +305,12 @@ import com.linkedin.datahub.graphql.types.datatype.DataTypeType; import com.linkedin.datahub.graphql.types.domain.DomainType; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeType; +import com.linkedin.datahub.graphql.types.ermodelrelation.CreateERModelRelationResolver; +import com.linkedin.datahub.graphql.types.ermodelrelation.ERModelRelationType; +import com.linkedin.datahub.graphql.types.ermodelrelation.UpdateERModelRelationResolver; import com.linkedin.datahub.graphql.types.form.FormType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; -import com.linkedin.datahub.graphql.types.ermodelrelation.CreateERModelRelationResolver; -import com.linkedin.datahub.graphql.types.ermodelrelation.UpdateERModelRelationResolver; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureType; import com.linkedin.datahub.graphql.types.mlmodel.MLModelGroupType; @@ -326,7 +327,6 @@ import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; import com.linkedin.datahub.graphql.types.view.DataHubViewType; -import com.linkedin.datahub.graphql.types.ermodelrelation.ERModelRelationType; import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.config.DataHubConfiguration; @@ -344,13 +344,13 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationService; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; -import com.linkedin.metadata.service.ERModelRelationService; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.version.GitVersion; @@ -470,9 +470,7 @@ public class GmsGraphQLEngine { private final EntityTypeType entityTypeType; private final FormType formType; - - /** - A list of GraphQL Plugins that extend the core engine */ + /** A list of GraphQL Plugins that extend the core engine */ private final List graphQLPlugins; /** Configures the graph objects that can be fetched primary key. */ @@ -925,7 +923,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) .dataFetcher("domain", getResolver((domainType))) - .dataFetcher("ermodelrelation", getResolver(ermodelrelationType)) + .dataFetcher("ermodelrelation", getResolver(ermodelrelationType)) .dataFetcher("dataPlatform", getResolver(dataPlatformType)) .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) @@ -1053,8 +1051,12 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) - .dataFetcher("updateERModelRelation", new UpdateERModelRelationResolver(this.entityClient)) - .dataFetcher("createERModelRelation", new CreateERModelRelationResolver(this.entityClient, this.eRModelRelationService)) + .dataFetcher( + "updateERModelRelation", new UpdateERModelRelationResolver(this.entityClient)) + .dataFetcher( + "createERModelRelation", + new CreateERModelRelationResolver( + this.entityClient, this.eRModelRelationService)) .dataFetcher("addTag", new AddTagResolver(entityService)) .dataFetcher("addTags", new AddTagsResolver(entityService)) .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) @@ -2026,37 +2028,59 @@ private void configureTypeExtensions(final RuntimeWiring.Builder builder) { builder.scalar(GraphQLLong); } - /** - * Configures resolvers responsible for resolving the {@link ERModelRelation} type. - */ - private void configureERModelRelationResolvers(final RuntimeWiring.Builder builder) { - builder - .type("ERModelRelation", typeWiring -> typeWiring - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ERModelRelationProperties", typeWiring -> typeWiring - .dataFetcher("datasetA", - new LoadableTypeResolver<>(datasetType, - (env) -> { - final ERModelRelationProperties ermodelrelationProperties = env.getSource(); - return ermodelrelationProperties.getDatasetA() != null ? ermodelrelationProperties.getDatasetA().getUrn() : null; - })) - .dataFetcher("datasetB", - new LoadableTypeResolver<>(datasetType, - (env) -> { - final ERModelRelationProperties ermodelrelationProperties = env.getSource(); - return ermodelrelationProperties.getDatasetB() != null ? ermodelrelationProperties.getDatasetB().getUrn() : null; - })) - ) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes, - (env) -> ((Owner) env.getSource()).getOwner())) - ) - .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring - .dataFetcher("author", new LoadableTypeResolver<>(corpUserType, - (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())) - ); - } + /** Configures resolvers responsible for resolving the {@link ERModelRelation} type. */ + private void configureERModelRelationResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "ERModelRelation", + typeWiring -> + typeWiring + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ERModelRelationProperties", + typeWiring -> + typeWiring + .dataFetcher( + "datasetA", + new LoadableTypeResolver<>( + datasetType, + (env) -> { + final ERModelRelationProperties ermodelrelationProperties = + env.getSource(); + return ermodelrelationProperties.getDatasetA() != null + ? ermodelrelationProperties.getDatasetA().getUrn() + : null; + })) + .dataFetcher( + "datasetB", + new LoadableTypeResolver<>( + datasetType, + (env) -> { + final ERModelRelationProperties ermodelrelationProperties = + env.getSource(); + return ermodelrelationProperties.getDatasetB() != null + ? ermodelrelationProperties.getDatasetB().getUrn() + : null; + }))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))); + } /** * Configures resolvers responsible for resolving the {@link diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index a0c610ee832fc..a2771dfa4d64c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -25,11 +25,11 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationService; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; -import com.linkedin.metadata.service.ERModelRelationService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 664255a088a95..a22271dceb845 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -19,11 +19,11 @@ import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Domain; +import com.linkedin.datahub.graphql.generated.ERModelRelation; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryTerm; -import com.linkedin.datahub.graphql.generated.ERModelRelation; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureTable; import com.linkedin.datahub.graphql.generated.MLModel; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/CreateERModelRelationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/CreateERModelRelationResolver.java index c6070957cdbc1..2fb1f425490eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/CreateERModelRelationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/CreateERModelRelationResolver.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.ermodelrelation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.ERModelRelationUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.ERModelRelation; import com.linkedin.datahub.graphql.generated.ERModelRelationPropertiesInput; import com.linkedin.datahub.graphql.generated.ERModelRelationUpdateInput; import com.linkedin.datahub.graphql.types.ermodelrelation.mappers.ERModelRelationMapper; @@ -21,69 +24,88 @@ import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import com.linkedin.datahub.graphql.generated.ERModelRelation; import org.apache.commons.codec.digest.DigestUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - @Slf4j @RequiredArgsConstructor -public class CreateERModelRelationResolver implements DataFetcher> { +public class CreateERModelRelationResolver + implements DataFetcher> { - private final EntityClient _entityClient; - private final ERModelRelationService _eRModelRelationService; + private final EntityClient _entityClient; + private final ERModelRelationService _eRModelRelationService; - @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final ERModelRelationUpdateInput input = bindArgument(environment.getArgument("input"), ERModelRelationUpdateInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + final ERModelRelationUpdateInput input = + bindArgument(environment.getArgument("input"), ERModelRelationUpdateInput.class); - final ERModelRelationPropertiesInput ermodelrelationPropertiesInput = input.getProperties(); - String ermodelrelationName = ermodelrelationPropertiesInput.getName(); - String datasetA = ermodelrelationPropertiesInput.getDataSetA(); - String datasetB = ermodelrelationPropertiesInput.getDatasetB(); + final ERModelRelationPropertiesInput ermodelrelationPropertiesInput = input.getProperties(); + String ermodelrelationName = ermodelrelationPropertiesInput.getName(); + String datasetA = ermodelrelationPropertiesInput.getDataSetA(); + String datasetB = ermodelrelationPropertiesInput.getDatasetB(); - String lowDataset = datasetA; - String highDataset = datasetB; - if (datasetA.compareTo(datasetB) > 0) { - lowDataset = datasetB; - highDataset = datasetA; - } - // The following sequence mimics datahub.emitter.mce_builder.datahub_guid + String lowDataset = datasetA; + String highDataset = datasetB; + if (datasetA.compareTo(datasetB) > 0) { + lowDataset = datasetB; + highDataset = datasetA; + } + // The following sequence mimics datahub.emitter.mce_builder.datahub_guid - String ermodelrelationKey = - "{\"DatasetA\":\"" + lowDataset + "\",\"DatasetB\":\"" + highDataset + "\",\"ERModelRelationName\":\"" + ermodelrelationName - + "\"}"; + String ermodelrelationKey = + "{\"DatasetA\":\"" + + lowDataset + + "\",\"DatasetB\":\"" + + highDataset + + "\",\"ERModelRelationName\":\"" + + ermodelrelationName + + "\"}"; - byte[] mybytes = ermodelrelationKey.getBytes(StandardCharsets.UTF_8); + byte[] mybytes = ermodelrelationKey.getBytes(StandardCharsets.UTF_8); - String ermodelrelationKeyEncoded = new String(mybytes, StandardCharsets.UTF_8); - String ermodelrelationGuid = DigestUtils.md5Hex(ermodelrelationKeyEncoded); - log.info("ermodelrelationkey {}, ermodelrelationGuid {}", ermodelrelationKeyEncoded, ermodelrelationGuid); + String ermodelrelationKeyEncoded = new String(mybytes, StandardCharsets.UTF_8); + String ermodelrelationGuid = DigestUtils.md5Hex(ermodelrelationKeyEncoded); + log.info( + "ermodelrelationkey {}, ermodelrelationGuid {}", + ermodelrelationKeyEncoded, + ermodelrelationGuid); - ERModelRelationUrn inputUrn = new ERModelRelationUrn(ermodelrelationGuid); - QueryContext context = environment.getContext(); - final Authentication authentication = context.getAuthentication(); - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); - if (!ERModelRelationType.canCreateERModelRelation(context, Urn.createFromString(input.getProperties().getDataSetA()), - Urn.createFromString(input.getProperties().getDatasetB()))) { - throw new AuthorizationException("Unauthorized to create ermodelrelation. Please contact your DataHub administrator."); - } - return CompletableFuture.supplyAsync(() -> { + ERModelRelationUrn inputUrn = new ERModelRelationUrn(ermodelrelationGuid); + QueryContext context = environment.getContext(); + final Authentication authentication = context.getAuthentication(); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + if (!ERModelRelationType.canCreateERModelRelation( + context, + Urn.createFromString(input.getProperties().getDataSetA()), + Urn.createFromString(input.getProperties().getDatasetB()))) { + throw new AuthorizationException( + "Unauthorized to create ermodelrelation. Please contact your DataHub administrator."); + } + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Create ERModelRelation input: {}", input); + final Collection proposals = + ERModelRelationUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); try { - log.debug("Create ERModelRelation input: {}", input); - final Collection proposals = ERModelRelationUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to create ermodelrelation entity", e); - } - return ERModelRelationMapper.map(_eRModelRelationService.getERModelRelationResponse(Urn.createFromString(inputUrn.toString()), authentication)); - } catch (Exception e) { - log.error("Failed to create ERModelRelation to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to create ermodelrelation to resource with input %s", input), e); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to create ermodelrelation entity", e); } + return ERModelRelationMapper.map( + _eRModelRelationService.getERModelRelationResponse( + Urn.createFromString(inputUrn.toString()), authentication)); + } catch (Exception e) { + log.error( + "Failed to create ERModelRelation to resource with input {}, {}", + input, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to create ermodelrelation to resource with input %s", input), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/ERModelRelationType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/ERModelRelationType.java index a9d933ab81c68..d0a1a4e7ec02b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/ERModelRelationType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/ERModelRelationType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.ermodelrelation; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -14,11 +17,11 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.ERModelRelation; +import com.linkedin.datahub.graphql.generated.ERModelRelationUpdateInput; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.ERModelRelation; -import com.linkedin.datahub.graphql.generated.ERModelRelationUpdateInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.BrowsableEntityType; @@ -47,24 +50,21 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class ERModelRelationType implements com.linkedin.datahub.graphql.types.EntityType, - BrowsableEntityType, SearchableEntityType { - - - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - ERMODELRELATION_KEY_ASPECT_NAME, - ERMODELRELATION_PROPERTIES_ASPECT_NAME, - EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME - ); +public class ERModelRelationType + implements com.linkedin.datahub.graphql.types.EntityType, + BrowsableEntityType, + SearchableEntityType { + + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + ERMODELRELATION_KEY_ASPECT_NAME, + ERMODELRELATION_PROPERTIES_ASPECT_NAME, + EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("name"); private static final String ENTITY_NAME = "ermodelrelation"; @@ -72,9 +72,11 @@ public class ERModelRelationType implements com.linkedin.datahub.graphql.types.E private final EntityClient _entityClient; private final FeatureFlags _featureFlags; - public ERModelRelationType(final EntityClient entityClient, final FeatureFlags featureFlags) { + public ERModelRelationType(final EntityClient entityClient, final FeatureFlags featureFlags) { _entityClient = entityClient; - _featureFlags = featureFlags; // TODO: check if ERModelRelation Feture is Enabled and throw error when called + _featureFlags = + featureFlags; // TODO: check if ERModelRelation Feture is Enabled and throw error when + // called } @Override @@ -93,29 +95,31 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(@Nonnull final List urns, @Nonnull final QueryContext context) - throws Exception { - final List ermodelrelationUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull final List urns, @Nonnull final QueryContext context) throws Exception { + final List ermodelrelationUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - ERMODELRELATION_ENTITY_NAME, - new HashSet<>(ermodelrelationUrns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + ERMODELRELATION_ENTITY_NAME, + new HashSet<>(ermodelrelationUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : ermodelrelationUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(ERModelRelationMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ERModelRelationMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to load ermodelrelation entity", e); @@ -124,85 +128,113 @@ public List> batchLoad(@Nonnull final List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "ermodelrelation", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "ermodelrelation", pathStr, facetFilters, start, count, context.getAuthentication()); return BrowseResultMapper.map(result); } @Nonnull @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(UrnUtils.getUrn(urn), context.getAuthentication()); + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(UrnUtils.getUrn(urn), context.getAuthentication()); return BrowsePathsMapper.map(result); } @Override - public SearchResults search(@Nonnull String query, @Nullable List filters, - int start, int count, @Nonnull QueryContext context) throws Exception { + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, - count, context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); - } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, @Nullable String field, - @Nullable Filter filters, int limit, @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } - public static boolean canUpdateERModelRelation(@Nonnull QueryContext context, ERModelRelationUrn resourceUrn, ERModelRelationUpdateInput updateInput) { - final ConjunctivePrivilegeGroup editPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + public static boolean canUpdateERModelRelation( + @Nonnull QueryContext context, + ERModelRelationUrn resourceUrn, + ERModelRelationUpdateInput updateInput) { + final ConjunctivePrivilegeGroup editPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); List specificPrivileges = new ArrayList<>(); if (updateInput.getEditableProperties() != null) { specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - editPrivilegesGroup, - specificPrivilegeGroup - )); + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(editPrivilegesGroup, specificPrivilegeGroup)); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - resourceUrn.getEntityType(), - resourceUrn.toString(), - orPrivilegeGroups); + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); } - public static boolean canCreateERModelRelation(@Nonnull QueryContext context, Urn datasetAUrn, Urn datasetBUrn) { - final ConjunctivePrivilegeGroup editPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - final ConjunctivePrivilegeGroup createPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_ERMODELRELATION_PRIVILEGE.getType() - )); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - editPrivilegesGroup, - createPrivilegesGroup - )); - boolean datasetAPrivilege = AuthorizationUtils.isAuthorized( + + public static boolean canCreateERModelRelation( + @Nonnull QueryContext context, Urn datasetAUrn, Urn datasetBUrn) { + final ConjunctivePrivilegeGroup editPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + final ConjunctivePrivilegeGroup createPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_ERMODELRELATION_PRIVILEGE.getType())); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(editPrivilegesGroup, createPrivilegesGroup)); + boolean datasetAPrivilege = + AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), datasetAUrn.getEntityType(), datasetAUrn.toString(), orPrivilegeGroups); - boolean datasetBPrivilege = AuthorizationUtils.isAuthorized( + boolean datasetBPrivilege = + AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), datasetBUrn.getEntityType(), @@ -211,4 +243,3 @@ public static boolean canCreateERModelRelation(@Nonnull QueryContext context, Ur return datasetAPrivilege && datasetBPrivilege; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/UpdateERModelRelationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/UpdateERModelRelationResolver.java index 30234fbd307be..f2200842a98f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/UpdateERModelRelationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/UpdateERModelRelationResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.ermodelrelation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.ERModelRelationUrn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,40 +18,48 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - @Slf4j @RequiredArgsConstructor public class UpdateERModelRelationResolver implements DataFetcher> { - private final EntityClient _entityClient; - @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final ERModelRelationUpdateInput input = bindArgument(environment.getArgument("input"), ERModelRelationUpdateInput.class); - final String urn = bindArgument(environment.getArgument("urn"), String.class); - ERModelRelationUrn inputUrn = ERModelRelationUrn.createFromString(urn); - QueryContext context = environment.getContext(); - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); - if (!ERModelRelationType.canUpdateERModelRelation(context, inputUrn, input)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Create ERModelRelation input: {}", input); - final Collection proposals = ERModelRelationUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final ERModelRelationUpdateInput input = + bindArgument(environment.getArgument("input"), ERModelRelationUpdateInput.class); + final String urn = bindArgument(environment.getArgument("urn"), String.class); + ERModelRelationUrn inputUrn = ERModelRelationUrn.createFromString(urn); + QueryContext context = environment.getContext(); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + if (!ERModelRelationType.canUpdateERModelRelation(context, inputUrn, input)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Create ERModelRelation input: {}", input); + final Collection proposals = + ERModelRelationUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to update ermodelrelation entity"), e); - } - return true; - } catch (Exception e) { - log.error("Failed to update ERModelRelation to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to update ermodelrelation to resource with input %s", input), e); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException( + String.format("Failed to update ermodelrelation entity"), e); } + return true; + } catch (Exception e) { + log.error( + "Failed to update ERModelRelation to resource with input {}, {}", + input, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to update ermodelrelation to resource with input %s", input), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationMapper.java index f862852f3748f..073e59c5eee08 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.ermodelrelation.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -9,8 +11,8 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ERModelRelation; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; @@ -21,18 +23,16 @@ import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.ermodelrelation.EditableERModelRelationProperties; import com.linkedin.ermodelrelation.ERModelRelationProperties; +import com.linkedin.ermodelrelation.EditableERModelRelationProperties; import com.linkedin.metadata.key.ERModelRelationKey; import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class ERModelRelationMapper implements ModelMapper { @@ -53,61 +53,87 @@ public ERModelRelation apply(final EntityResponse entityResponse) { MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(ERMODELRELATION_KEY_ASPECT_NAME, this::mapERModelRelationKey); mappingHelper.mapToResult(ERMODELRELATION_PROPERTIES_ASPECT_NAME, this::mapProperties); - if (aspectMap != null && aspectMap.containsKey(EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME)) { - mappingHelper.mapToResult(EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + if (aspectMap != null + && aspectMap.containsKey(EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME)) { + mappingHelper.mapToResult( + EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); } if (aspectMap != null && aspectMap.containsKey(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (ermodelrelation, dataMap) -> - ermodelrelation.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); } if (aspectMap != null && aspectMap.containsKey(OWNERSHIP_ASPECT_NAME)) { - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (ermodelrelation, dataMap) -> + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (ermodelrelation, dataMap) -> ermodelrelation.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); } if (aspectMap != null && aspectMap.containsKey(STATUS_ASPECT_NAME)) { - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (ermodelrelation, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (ermodelrelation, dataMap) -> ermodelrelation.setStatus(StatusMapper.map(new Status(dataMap)))); } if (aspectMap != null && aspectMap.containsKey(GLOBAL_TAGS_ASPECT_NAME)) { - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (ermodelrelation, dataMap) -> this.mapGlobalTags(ermodelrelation, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (ermodelrelation, dataMap) -> this.mapGlobalTags(ermodelrelation, dataMap, entityUrn)); } if (aspectMap != null && aspectMap.containsKey(GLOSSARY_TERMS_ASPECT_NAME)) { - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (ermodelrelation, dataMap) -> - ermodelrelation.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); } return mappingHelper.getResult(); } - private void mapEditableProperties(@Nonnull ERModelRelation ermodelrelation, @Nonnull DataMap dataMap) { - final EditableERModelRelationProperties editableERModelRelationProperties = new EditableERModelRelationProperties(dataMap); + private void mapEditableProperties( + @Nonnull ERModelRelation ermodelrelation, @Nonnull DataMap dataMap) { + final EditableERModelRelationProperties editableERModelRelationProperties = + new EditableERModelRelationProperties(dataMap); ermodelrelation.setEditableProperties( com.linkedin.datahub.graphql.generated.ERModelRelationEditableProperties.builder() .setDescription(editableERModelRelationProperties.getDescription()) .setName(editableERModelRelationProperties.getName()) - .build() - ); + .build()); } - - private void mapERModelRelationKey(@Nonnull ERModelRelation ermodelrelation, @Nonnull DataMap datamap) { + private void mapERModelRelationKey( + @Nonnull ERModelRelation ermodelrelation, @Nonnull DataMap datamap) { ERModelRelationKey ermodelrelationKey = new ERModelRelationKey(datamap); ermodelrelation.setErmodelrelationId(ermodelrelationKey.getErmodelrelationId()); } private void mapProperties(@Nonnull ERModelRelation ermodelrelation, @Nonnull DataMap dataMap) { - final ERModelRelationProperties ermodelrelationProperties = new ERModelRelationProperties(dataMap); - ermodelrelation.setProperties(com.linkedin.datahub.graphql.generated.ERModelRelationProperties.builder() - .setName(ermodelrelationProperties.getName()) - .setDatasetA(createPartialDataset(ermodelrelationProperties.getDatasetA())) - .setDatasetB(createPartialDataset(ermodelrelationProperties.getDatasetB())) - .setErmodelrelationFieldMapping(mapERModelRelationFieldMappings(ermodelrelationProperties)) - .setCreatedTime(ermodelrelationProperties.hasCreated() && ermodelrelationProperties.getCreated().getTime() > 0 - ? ermodelrelationProperties.getCreated().getTime() : 0) - .build()); - if (ermodelrelationProperties.hasCreated() && ermodelrelationProperties.getCreated().hasActor()) { - ermodelrelation.getProperties().setCreatedActor(UrnToEntityMapper.map(ermodelrelationProperties.getCreated().getActor())); + final ERModelRelationProperties ermodelrelationProperties = + new ERModelRelationProperties(dataMap); + ermodelrelation.setProperties( + com.linkedin.datahub.graphql.generated.ERModelRelationProperties.builder() + .setName(ermodelrelationProperties.getName()) + .setDatasetA(createPartialDataset(ermodelrelationProperties.getDatasetA())) + .setDatasetB(createPartialDataset(ermodelrelationProperties.getDatasetB())) + .setErmodelrelationFieldMapping( + mapERModelRelationFieldMappings(ermodelrelationProperties)) + .setCreatedTime( + ermodelrelationProperties.hasCreated() + && ermodelrelationProperties.getCreated().getTime() > 0 + ? ermodelrelationProperties.getCreated().getTime() + : 0) + .build()); + if (ermodelrelationProperties.hasCreated() + && ermodelrelationProperties.getCreated().hasActor()) { + ermodelrelation + .getProperties() + .setCreatedActor( + UrnToEntityMapper.map(ermodelrelationProperties.getCreated().getActor())); } } + private Dataset createPartialDataset(@Nonnull Urn datasetUrn) { Dataset partialDataset = new Dataset(); @@ -115,29 +141,32 @@ private Dataset createPartialDataset(@Nonnull Urn datasetUrn) { partialDataset.setUrn(datasetUrn.toString()); return partialDataset; - } - private com.linkedin.datahub.graphql.generated.ERModelRelationFieldMapping mapERModelRelationFieldMappings( - ERModelRelationProperties ermodelrelationProperties) { + + private com.linkedin.datahub.graphql.generated.ERModelRelationFieldMapping + mapERModelRelationFieldMappings(ERModelRelationProperties ermodelrelationProperties) { return com.linkedin.datahub.graphql.generated.ERModelRelationFieldMapping.builder() - .setFieldMappings(ermodelrelationProperties.getErmodelrelationFieldMapping() - .getFieldMappings() - .stream() - .map(this::mapFieldMap) - .collect(Collectors.toList())) + .setFieldMappings( + ermodelrelationProperties.getErmodelrelationFieldMapping().getFieldMappings().stream() + .map(this::mapFieldMap) + .collect(Collectors.toList())) .build(); } - private com.linkedin.datahub.graphql.generated.FieldMap mapFieldMap(com.linkedin.ermodelrelation.FieldMap fieldMap) { + private com.linkedin.datahub.graphql.generated.FieldMap mapFieldMap( + com.linkedin.ermodelrelation.FieldMap fieldMap) { return com.linkedin.datahub.graphql.generated.FieldMap.builder() .setAfield(fieldMap.getAfield()) .setBfield(fieldMap.getBfield()) .build(); } - private void mapGlobalTags(@Nonnull ERModelRelation ermodelrelation, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull ERModelRelation ermodelrelation, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); ermodelrelation.setTags(globalTags); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationUpdateInputMapper.java index 589f645a27ea0..b5a723ee5716a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelation/mappers/ERModelRelationUpdateInputMapper.java @@ -1,34 +1,36 @@ package com.linkedin.datahub.graphql.types.ermodelrelation.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.generated.ERModelRelationEditablePropertiesUpdate; import com.linkedin.datahub.graphql.generated.ERModelRelationFieldMappingInput; -import com.linkedin.datahub.graphql.generated.ERModelRelationUpdateInput; import com.linkedin.datahub.graphql.generated.ERModelRelationPropertiesInput; -import com.linkedin.datahub.graphql.generated.ERModelRelationEditablePropertiesUpdate; +import com.linkedin.datahub.graphql.generated.ERModelRelationUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; import com.linkedin.ermodelrelation.ERModelRelationFieldMapping; import com.linkedin.ermodelrelation.ERModelRelationProperties; import com.linkedin.ermodelrelation.EditableERModelRelationProperties; -import com.linkedin.ermodelrelation.FieldMapArray; import com.linkedin.ermodelrelation.FieldMap; +import com.linkedin.ermodelrelation.FieldMapArray; import com.linkedin.mxe.MetadataChangeProposal; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class ERModelRelationUpdateInputMapper - implements InputModelMapper, Urn> { - public static final ERModelRelationUpdateInputMapper INSTANCE = new ERModelRelationUpdateInputMapper(); + implements InputModelMapper< + ERModelRelationUpdateInput, Collection, Urn> { + public static final ERModelRelationUpdateInputMapper INSTANCE = + new ERModelRelationUpdateInputMapper(); - public static Collection map(@Nonnull final ERModelRelationUpdateInput ermodelrelationUpdateInput, + public static Collection map( + @Nonnull final ERModelRelationUpdateInput ermodelrelationUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(ermodelrelationUpdateInput, actor); } @@ -36,41 +38,56 @@ public static Collection map(@Nonnull final ERModelRelat @Override public Collection apply(ERModelRelationUpdateInput input, Urn actor) { final Collection proposals = new ArrayList<>(8); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(ERMODELRELATION_ENTITY_NAME); + final UpdateMappingHelper updateMappingHelper = + new UpdateMappingHelper(ERMODELRELATION_ENTITY_NAME); final long currentTime = System.currentTimeMillis(); final AuditStamp auditstamp = new AuditStamp(); auditstamp.setActor(actor, SetMode.IGNORE_NULL); auditstamp.setTime(currentTime); if (input.getProperties() != null) { - com.linkedin.ermodelrelation.ERModelRelationProperties ermodelrelationProperties = createERModelRelationProperties(input.getProperties(), auditstamp); - proposals.add(updateMappingHelper.aspectToProposal(ermodelrelationProperties, ERMODELRELATION_PROPERTIES_ASPECT_NAME)); + com.linkedin.ermodelrelation.ERModelRelationProperties ermodelrelationProperties = + createERModelRelationProperties(input.getProperties(), auditstamp); + proposals.add( + updateMappingHelper.aspectToProposal( + ermodelrelationProperties, ERMODELRELATION_PROPERTIES_ASPECT_NAME)); } if (input.getEditableProperties() != null) { - final EditableERModelRelationProperties editableERModelRelationProperties = ermodelrelationEditablePropsSettings(input.getEditableProperties()); - proposals.add(updateMappingHelper.aspectToProposal(editableERModelRelationProperties, EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME)); + final EditableERModelRelationProperties editableERModelRelationProperties = + ermodelrelationEditablePropsSettings(input.getEditableProperties()); + proposals.add( + updateMappingHelper.aspectToProposal( + editableERModelRelationProperties, EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME)); } return proposals; } - private ERModelRelationProperties createERModelRelationProperties(ERModelRelationPropertiesInput inputProperties, AuditStamp auditstamp) { - com.linkedin.ermodelrelation.ERModelRelationProperties ermodelrelationProperties = new com.linkedin.ermodelrelation.ERModelRelationProperties(); + + private ERModelRelationProperties createERModelRelationProperties( + ERModelRelationPropertiesInput inputProperties, AuditStamp auditstamp) { + com.linkedin.ermodelrelation.ERModelRelationProperties ermodelrelationProperties = + new com.linkedin.ermodelrelation.ERModelRelationProperties(); if (inputProperties.getName() != null) { ermodelrelationProperties.setName(inputProperties.getName()); } try { if (inputProperties.getDataSetA() != null) { - ermodelrelationProperties.setDatasetA(DatasetUrn.createFromString(inputProperties.getDataSetA())); + ermodelrelationProperties.setDatasetA( + DatasetUrn.createFromString(inputProperties.getDataSetA())); } if (inputProperties.getDatasetB() != null) { - ermodelrelationProperties.setDatasetB(DatasetUrn.createFromString(inputProperties.getDatasetB())); + ermodelrelationProperties.setDatasetB( + DatasetUrn.createFromString(inputProperties.getDatasetB())); } } catch (URISyntaxException e) { e.printStackTrace(); } if (inputProperties.getErmodelrelationFieldmapping() != null) { - ERModelRelationFieldMappingInput ermodelrelationFieldMapping = inputProperties.getErmodelrelationFieldmapping(); - if ((ermodelrelationFieldMapping.getFieldMappings() != null && ermodelrelationFieldMapping.getFieldMappings().size() > 0)) { - ERModelRelationFieldMapping ermodelrelationFieldMappingUnit = ermodelrelationFieldMappingSettings(ermodelrelationFieldMapping); + ERModelRelationFieldMappingInput ermodelrelationFieldMapping = + inputProperties.getErmodelrelationFieldmapping(); + if ((ermodelrelationFieldMapping.getFieldMappings() != null + && ermodelrelationFieldMapping.getFieldMappings().size() > 0)) { + ERModelRelationFieldMapping ermodelrelationFieldMappingUnit = + ermodelrelationFieldMappingSettings(ermodelrelationFieldMapping); ermodelrelationProperties.setErmodelrelationFieldMapping(ermodelrelationFieldMappingUnit); } if (inputProperties.getCreated() != null && inputProperties.getCreated()) { @@ -92,36 +109,42 @@ private ERModelRelationProperties createERModelRelationProperties(ERModelRelatio return ermodelrelationProperties; } - private static ERModelRelationFieldMapping ermodelrelationFieldMappingSettings(ERModelRelationFieldMappingInput ermodelrelationFieldMapping) { + private static ERModelRelationFieldMapping ermodelrelationFieldMappingSettings( + ERModelRelationFieldMappingInput ermodelrelationFieldMapping) { ERModelRelationFieldMapping ermodelrelationFieldMappingUnit = new ERModelRelationFieldMapping(); - if (ermodelrelationFieldMapping.getFieldMappings() != null && ermodelrelationFieldMapping.getFieldMappings().size() > 0) { + if (ermodelrelationFieldMapping.getFieldMappings() != null + && ermodelrelationFieldMapping.getFieldMappings().size() > 0) { com.linkedin.ermodelrelation.FieldMapArray fieldMapArray = new FieldMapArray(); - ermodelrelationFieldMapping.getFieldMappings().forEach(fieldMappingInput -> { - FieldMap fieldMap = new FieldMap(); - if (fieldMappingInput.getAfield() != null) { - fieldMap.setAfield(fieldMappingInput.getAfield()); - } - if (fieldMappingInput.getBfield() != null) { - fieldMap.setBfield(fieldMappingInput.getBfield()); - } - fieldMapArray.add(fieldMap); - }); + ermodelrelationFieldMapping + .getFieldMappings() + .forEach( + fieldMappingInput -> { + FieldMap fieldMap = new FieldMap(); + if (fieldMappingInput.getAfield() != null) { + fieldMap.setAfield(fieldMappingInput.getAfield()); + } + if (fieldMappingInput.getBfield() != null) { + fieldMap.setBfield(fieldMappingInput.getBfield()); + } + fieldMapArray.add(fieldMap); + }); ermodelrelationFieldMappingUnit.setFieldMappings(fieldMapArray); } return ermodelrelationFieldMappingUnit; } - private static EditableERModelRelationProperties ermodelrelationEditablePropsSettings(ERModelRelationEditablePropertiesUpdate editPropsInput) { - final EditableERModelRelationProperties editableERModelRelationProperties = new EditableERModelRelationProperties(); - if (editPropsInput.getName() != null - && editPropsInput.getName().trim().length() > 0) { + + private static EditableERModelRelationProperties ermodelrelationEditablePropsSettings( + ERModelRelationEditablePropertiesUpdate editPropsInput) { + final EditableERModelRelationProperties editableERModelRelationProperties = + new EditableERModelRelationProperties(); + if (editPropsInput.getName() != null && editPropsInput.getName().trim().length() > 0) { editableERModelRelationProperties.setName(editPropsInput.getName()); } if (editPropsInput.getDescription() != null - && editPropsInput.getDescription().trim().length() > 0) { + && editPropsInput.getDescription().trim().length() > 0) { editableERModelRelationProperties.setDescription(editPropsInput.getDescription()); } return editableERModelRelationProperties; } } - diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index fedcb78575ffb..7212d72b0afc2 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -264,7 +264,8 @@ public class Constants { // ERModelRelation public static final String ERMODELRELATION_KEY_ASPECT_NAME = "ermodelrelationKey"; public static final String ERMODELRELATION_PROPERTIES_ASPECT_NAME = "eRModelRelationProperties"; - public static final String EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME = "editableERModelRelationProperties"; + public static final String EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME = + "editableERModelRelationProperties"; // Assertion public static final String ASSERTION_KEY_ASPECT_NAME = "assertionKey"; diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ERModelRelationUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ERModelRelationUrn.java index fe9fb2dc53daa..fd69970459a84 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ERModelRelationUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ERModelRelationUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public class ERModelRelationUrn extends Urn { public static final String ENTITY_TYPE = "ermodelrelation"; @@ -37,7 +36,8 @@ public static ERModelRelationUrn createFromUrn(Urn urn) throws URISyntaxExceptio try { return new ERModelRelationUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -49,18 +49,20 @@ public static ERModelRelationUrn deserialize(String rawUrn) throws URISyntaxExce static { Custom.initializeCustomClass(ERModelRelationUrn.class); - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(ERModelRelationUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(ERModelRelationUrn object) throws ClassCastException { + return object.toString(); + } - public ERModelRelationUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return ERModelRelationUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, ERModelRelationUrn.class); + public ERModelRelationUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return ERModelRelationUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + ERModelRelationUrn.class); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java index 93acb1fe411f6..17e7b3be2a5cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java @@ -1,9 +1,10 @@ package com.linkedin.gms.factory.ermodelrelation; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.service.ERModelRelationService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -11,15 +12,9 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ERModelRelationServiceFactory { - @Autowired - @Qualifier("javaEntityClient") - private JavaEntityClient _javaEntityClient; @Autowired @Qualifier("systemAuthentication") @@ -28,7 +23,8 @@ public class ERModelRelationServiceFactory { @Bean(name = "eRModelRelationService") @Scope("singleton") @Nonnull - protected ERModelRelationService getInstance() throws Exception { - return new ERModelRelationService(_javaEntityClient, _authentication); + protected ERModelRelationService getInstance( + @Qualifier("entityClient") final EntityClient entityClient) throws Exception { + return new ERModelRelationService(entityClient, _authentication); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index affe93b4879e8..de4117d7ba178 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -28,11 +28,11 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationService; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; -import com.linkedin.metadata.service.ERModelRelationService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ERModelRelationService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ERModelRelationService.java index 2365dbfa659ca..c6054978f4a50 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ERModelRelationService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ERModelRelationService.java @@ -1,36 +1,36 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Objects; import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** - * This class is used to permit easy CRUD operations on a Query - * Currently it supports creating and removing a Query. - * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * This class is used to permit easy CRUD operations on a Query. Currently it supports creating and + * removing a Query. * + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class ERModelRelationService extends BaseService { - public ERModelRelationService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public ERModelRelationService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( + + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( ERMODELRELATION_KEY_ASPECT_NAME, ERMODELRELATION_PROPERTIES_ASPECT_NAME, EDITABLE_ERMODELRELATION_PROPERTIES_ASPECT_NAME, @@ -38,19 +38,20 @@ public ERModelRelationService(@Nonnull EntityClient entityClient, @Nonnull Authe OWNERSHIP_ASPECT_NAME, STATUS_ASPECT_NAME, GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME - ); + GLOSSARY_TERMS_ASPECT_NAME); + /** - * Returns an instance of {@link EntityResponse} for the specified ERModelRelation urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified ERModelRelation urn, or null if + * one cannot be found. * * @param ermodelrelationUrn the urn of the Query * @param authentication the authentication to use - * - * @return an instance of {@link EntityResponse} for the ERModelRelation, null if it does not exist. + * @return an instance of {@link EntityResponse} for the ERModelRelation, null if it does not + * exist. */ @Nullable - public EntityResponse getERModelRelationResponse(@Nonnull final Urn ermodelrelationUrn, @Nonnull final Authentication authentication) { + public EntityResponse getERModelRelationResponse( + @Nonnull final Urn ermodelrelationUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ermodelrelationUrn, "ermodelrelationUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -58,10 +59,10 @@ public EntityResponse getERModelRelationResponse(@Nonnull final Urn ermodelrelat Constants.ERMODELRELATION_ENTITY_NAME, ermodelrelationUrn, ASPECTS_TO_RESOLVE, - authentication - ); + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Query with urn %s", ermodelrelationUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Query with urn %s", ermodelrelationUrn), e); } } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index 1693324b55ee4..2029555783856 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -222,10 +222,11 @@ public class PoliciesConfig { "Edit Embedded Content", "The ability to edit the embedded content for an entity."); - public static final Privilege CREATE_ERMODELRELATION_PRIVILEGE = Privilege.of( - "CREATE_ENTITY_ERMODELRELATION", - "Create ermodelrelation", - "The ability to add ermodelrelation on a dataset."); + public static final Privilege CREATE_ERMODELRELATION_PRIVILEGE = + Privilege.of( + "CREATE_ENTITY_ERMODELRELATION", + "Create ermodelrelation", + "The ability to add ermodelrelation on a dataset."); public static final List COMMON_ENTITY_PRIVILEGES = ImmutableList.of( @@ -569,12 +570,12 @@ public class PoliciesConfig { EDIT_ENTITY_PRIVILEGE)); // ERModelRelation Privileges - public static final ResourcePrivileges ERMODELRELATION_PRIVILEGES = ResourcePrivileges.of( + public static final ResourcePrivileges ERMODELRELATION_PRIVILEGES = + ResourcePrivileges.of( "ermodelrelation", "ERModelRelation", "update privileges for ermodelrelations", - COMMON_ENTITY_PRIVILEGES - ); + COMMON_ENTITY_PRIVILEGES); public static final List ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( DATASET_PRIVILEGES, @@ -590,9 +591,8 @@ public class PoliciesConfig { CORP_GROUP_PRIVILEGES, CORP_USER_PRIVILEGES, NOTEBOOK_PRIVILEGES, - DATA_PRODUCT_PRIVILEGES, - ERMODELRELATION_PRIVILEGES - ); + DATA_PRODUCT_PRIVILEGES, + ERMODELRELATION_PRIVILEGES); // Merge all entity specific resource privileges to create a superset of all resource privileges public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = From 43bc95c0fbc2b8c620d1be0ad3da4576288622b8 Mon Sep 17 00:00:00 2001 From: Raj Tekal Date: Thu, 1 Feb 2024 09:11:30 -0500 Subject: [PATCH 262/263] spotless java changes --- .../factory/ermodelrelation/ERModelRelationServiceFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java index 17e7b3be2a5cd..c5d161099ca37 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ermodelrelation/ERModelRelationServiceFactory.java @@ -24,7 +24,7 @@ public class ERModelRelationServiceFactory { @Scope("singleton") @Nonnull protected ERModelRelationService getInstance( - @Qualifier("entityClient") final EntityClient entityClient) throws Exception { + @Qualifier("entityClient") final EntityClient entityClient) throws Exception { return new ERModelRelationService(entityClient, _authentication); } } From 2369e07ce12712bed06ebc0911d8077ad87f289d Mon Sep 17 00:00:00 2001 From: Raj Tekal Date: Thu, 1 Feb 2024 21:24:00 -0500 Subject: [PATCH 263/263] Register for UI ERModelRelationEntity --- datahub-web-react/src/app/buildEntityRegistry.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datahub-web-react/src/app/buildEntityRegistry.ts b/datahub-web-react/src/app/buildEntityRegistry.ts index 4f74681570802..d5ad6781b9675 100644 --- a/datahub-web-react/src/app/buildEntityRegistry.ts +++ b/datahub-web-react/src/app/buildEntityRegistry.ts @@ -19,6 +19,7 @@ import GlossaryNodeEntity from './entity/glossaryNode/GlossaryNodeEntity'; import { DataPlatformEntity } from './entity/dataPlatform/DataPlatformEntity'; import { DataProductEntity } from './entity/dataProduct/DataProductEntity'; import { DataPlatformInstanceEntity } from './entity/dataPlatformInstance/DataPlatformInstanceEntity'; +import { ERModelRelationEntity } from './entity/ermodelrelations/ERModelRelationEntity' import { RoleEntity } from './entity/Access/RoleEntity'; export default function buildEntityRegistry() { @@ -44,5 +45,6 @@ export default function buildEntityRegistry() { registry.register(new DataPlatformEntity()); registry.register(new DataProductEntity()); registry.register(new DataPlatformInstanceEntity()); + registry.register(new ERModelRelationEntity()) return registry; } \ No newline at end of file